claude 1
Some checks failed
Deploy to Test Environment / deploy-to-test (push) Failing after 43s

This commit is contained in:
2026-01-08 07:47:29 -08:00
parent ab63f83f50
commit d356d9dfb6
45 changed files with 4022 additions and 286 deletions

View File

@@ -0,0 +1,34 @@
{
"permissions": {
"allow": [
"Bash(npm test:*)",
"Bash(podman --version:*)",
"Bash(podman ps:*)",
"Bash(podman machine start:*)",
"Bash(podman compose:*)",
"Bash(podman pull:*)",
"Bash(podman images:*)",
"Bash(podman stop:*)",
"Bash(echo:*)",
"Bash(podman rm:*)",
"Bash(podman run:*)",
"Bash(podman start:*)",
"Bash(podman exec:*)",
"Bash(cat:*)",
"Bash(PGPASSWORD=postgres psql:*)",
"Bash(npm search:*)",
"Bash(npx:*)",
"Bash(curl -s -H \"Authorization: token c72bc0f14f623fec233d3c94b3a16397fe3649ef\" https://gitea.projectium.com/api/v1/user)",
"Bash(curl:*)",
"Bash(powershell:*)",
"Bash(cmd.exe:*)",
"Bash(export NODE_ENV=test DB_HOST=localhost DB_USER=postgres DB_PASSWORD=postgres DB_NAME=flyer_crawler_dev REDIS_URL=redis://localhost:6379 FRONTEND_URL=http://localhost:5173 JWT_SECRET=test-jwt-secret:*)",
"Bash(npm run test:integration:*)",
"Bash(grep:*)",
"Bash(done)",
"Bash(podman info:*)",
"Bash(podman machine:*)",
"Bash(podman system connection:*)"
]
}
}

66
.gemini/settings.json Normal file
View File

@@ -0,0 +1,66 @@
{
"mcpServers": {
"chrome-devtools": {
"command": "npx",
"args": [
"-y",
"chrome-devtools-mcp@latest",
"--headless",
"true",
"--isolated",
"false",
"--channel",
"stable"
]
},
"markitdown": {
"command": "C:\\Users\\games3\\.local\\bin\\uvx.exe",
"args": [
"markitdown-mcp"
]
},
"gitea-torbonium": {
"command": "d:\\gitea-mcp\\gitea-mcp.exe",
"args": ["run", "-t", "stdio"],
"env": {
"GITEA_HOST": "https://gitea.torbonium.com",
"GITEA_ACCESS_TOKEN": "391c9ddbe113378bc87bb8184800ba954648fcf8"
}
},
"gitea-lan": {
"command": "d:\\gitea-mcp\\gitea-mcp.exe",
"args": ["run", "-t", "stdio"],
"env": {
"GITEA_HOST": "https://gitea.torbolan.com",
"GITEA_ACCESS_TOKEN": "REPLACE_WITH_NEW_TOKEN"
}
},
"gitea-projectium": {
"command": "d:\\gitea-mcp\\gitea-mcp.exe",
"args": ["run", "-t", "stdio"],
"env": {
"GITEA_HOST": "https://gitea.projectium.com",
"GITEA_ACCESS_TOKEN": "c72bc0f14f623fec233d3c94b3a16397fe3649ef"
}
},
"podman": {
"command": "npx",
"args": ["-y", "@modelcontextprotocol/server-docker"],
"env": {
"DOCKER_HOST": "npipe:////./pipe/docker_engine"
}
},
"filesystem": {
"command": "npx",
"args": [
"-y",
"@modelcontextprotocol/server-filesystem",
"D:\\gitea\\flyer-crawler.projectium.com\\flyer-crawler.projectium.com"
]
},
"fetch": {
"command": "npx",
"args": ["-y", "@modelcontextprotocol/server-fetch"]
}
}
}

630
README.vscode.md Normal file
View File

@@ -0,0 +1,630 @@
# VS Code Configuration for Flyer Crawler Project
This document describes the VS Code setup for this project, including MCP (Model Context Protocol) server configurations for both Gemini Code and Claude Code.
## Overview
This project uses VS Code with AI coding assistants (Gemini Code and Claude Code) that connect to various MCP servers for enhanced capabilities like container management, repository access, and file system operations.
## MCP Server Architecture
MCP (Model Context Protocol) allows AI assistants to interact with external tools and services. Both Gemini Code and Claude Code are configured to use the same set of MCP servers.
### Configuration Files
- **Gemini Code**: `%APPDATA%\Code\User\mcp.json`
- **Claude Code**: `%USERPROFILE%\.claude\settings.json`
## Configured MCP Servers
### 1. Gitea MCP Servers
Access to multiple Gitea instances for repository management, code search, issue tracking, and CI/CD workflows.
#### Gitea Projectium (Primary)
- **Host**: `https://gitea.projectium.com`
- **Purpose**: Main production Gitea server
- **Capabilities**:
- Repository browsing and code search
- Issue and PR management
- CI/CD workflow access
- Repository cloning and management
#### Gitea Torbonium
- **Host**: `https://gitea.torbonium.com`
- **Purpose**: Development/testing Gitea instance
- **Capabilities**: Same as Gitea Projectium
#### Gitea LAN
- **Host**: `https://gitea.torbolan.com`
- **Purpose**: Local network Gitea instance
- **Status**: Disabled (requires token configuration)
**Executable Location**: `d:\gitea-mcp\gitea-mcp.exe`
**Configuration Example** (Gemini Code - mcp.json):
```json
{
"servers": {
"gitea-projectium": {
"command": "d:\\gitea-mcp\\gitea-mcp.exe",
"args": ["run", "-t", "stdio"],
"env": {
"GITEA_HOST": "https://gitea.projectium.com",
"GITEA_ACCESS_TOKEN": "your-token-here"
}
}
}
}
```
**Configuration Example** (Claude Code - settings.json):
```json
{
"mcpServers": {
"gitea-projectium": {
"command": "d:\\gitea-mcp\\gitea-mcp.exe",
"args": ["run", "-t", "stdio"],
"env": {
"GITEA_HOST": "https://gitea.projectium.com",
"GITEA_ACCESS_TOKEN": "your-token-here"
}
}
}
}
```
### 2. Podman/Docker MCP Server
Manages local containers via Podman Desktop (using Docker-compatible API).
- **Purpose**: Container lifecycle management
- **Socket**: `npipe:////./pipe/docker_engine` (Windows named pipe)
- **Capabilities**:
- List, start, stop containers
- Execute commands in containers
- View container logs
- Inspect container status and configuration
**Current Containers** (for this project):
- `flyer-crawler-postgres` - PostgreSQL 15 + PostGIS on port 5432
- `flyer-crawler-redis` - Redis on port 6379
**Configuration** (Gemini Code - mcp.json):
```json
{
"servers": {
"podman": {
"command": "npx",
"args": ["-y", "@modelcontextprotocol/server-docker"],
"env": {
"DOCKER_HOST": "npipe:////./pipe/docker_engine"
}
}
}
}
```
**Configuration** (Claude Code):
```json
{
"mcpServers": {
"podman": {
"command": "npx",
"args": ["-y", "@modelcontextprotocol/server-docker"],
"env": {
"DOCKER_HOST": "npipe:////./pipe/docker_engine"
}
}
}
}
```
### 3. Filesystem MCP Server
Direct file system access to the project directory.
- **Purpose**: Read and write files in the project
- **Scope**: `D:\gitea\flyer-crawler.projectium.com\flyer-crawler.projectium.com`
- **Capabilities**:
- Read file contents
- Write/edit files
- List directory contents
- Search files
**Configuration** (Gemini Code - mcp.json):
```json
{
"servers": {
"filesystem": {
"command": "npx",
"args": [
"-y",
"@modelcontextprotocol/server-filesystem",
"D:\\gitea\\flyer-crawler.projectium.com\\flyer-crawler.projectium.com"
]
}
}
}
```
**Configuration** (Claude Code):
```json
{
"mcpServers": {
"filesystem": {
"command": "npx",
"args": [
"-y",
"@modelcontextprotocol/server-filesystem",
"D:\\gitea\\flyer-crawler.projectium.com\\flyer-crawler.projectium.com"
]
}
}
}
```
### 4. Fetch MCP Server
Web request capabilities for documentation lookups and API testing.
- **Purpose**: Make HTTP requests
- **Capabilities**:
- Fetch web pages and APIs
- Download documentation
- Test endpoints
**Configuration** (Gemini Code - mcp.json):
```json
{
"servers": {
"fetch": {
"command": "npx",
"args": ["-y", "@modelcontextprotocol/server-fetch"]
}
}
}
```
**Configuration** (Claude Code):
```json
{
"mcpServers": {
"fetch": {
"command": "npx",
"args": ["-y", "@modelcontextprotocol/server-fetch"]
}
}
}
```
### 5. Chrome DevTools MCP Server (Optional)
Browser automation and debugging capabilities.
- **Purpose**: Automated browser testing
- **Status**: Disabled by default
- **Capabilities**:
- Browser automation
- Screenshot capture
- DOM inspection
- Network monitoring
**Configuration** (when enabled):
```json
{
"mcpServers": {
"chrome-devtools": {
"command": "npx",
"args": [
"chrome-devtools-mcp@latest",
"--headless", "false",
"--isolated", "false",
"--channel", "stable"
]
}
}
}
```
### 6. Markitdown MCP Server (Optional)
Document conversion capabilities.
- **Purpose**: Convert various document formats to Markdown
- **Status**: Disabled by default
- **Requires**: Python with `uvx` installed
- **Capabilities**:
- Convert PDFs to Markdown
- Convert Word documents
- Convert other document formats
**Configuration** (when enabled):
```json
{
"mcpServers": {
"markitdown": {
"command": "uvx",
"args": ["markitdown-mcp==0.0.1a4"]
}
}
}
```
## Prerequisites
### For Podman MCP
1. **Podman Desktop** installed and running
2. Podman machine initialized and started:
```powershell
podman machine init
podman machine start
```
### For Gitea MCP
1. **Gitea MCP executable** at `d:\gitea-mcp\gitea-mcp.exe`
2. **Gitea Access Tokens** with appropriate permissions:
- `repo` - Full repository access
- `write:user` - User profile access
- `read:organization` - Organization access
### For Chrome DevTools MCP
1. **Chrome browser** installed (stable channel)
2. **Node.js 18+** for npx execution
### For Markitdown MCP
1. **Python 3.8+** installed
2. **uvx** (universal virtualenv executor):
```powershell
pip install uvx
```
## Testing MCP Servers
### Test Podman Connection
```powershell
podman ps
# Should list running containers
```
### Test Gitea API Access
```powershell
curl -H "Authorization: token YOUR_TOKEN" https://gitea.projectium.com/api/v1/user
# Should return your user information
```
### Test Database Container
```powershell
podman exec flyer-crawler-postgres psql -U postgres -d flyer_crawler_dev -c "SELECT version();"
# Should return PostgreSQL version
```
## Security Notes
### Token Management
- **Never commit tokens** to version control
- Store tokens in environment variables or secure password managers
- Rotate tokens periodically
- Use minimal required permissions
### Access Tokens in Configuration Files
The configuration files (`mcp.json` and `settings.json`) contain sensitive access tokens. These files should:
- Be added to `.gitignore`
- Have restricted file permissions
- Be backed up securely
- Be updated when tokens are rotated
### Current Security Setup
- `%APPDATA%\Code\User\mcp.json` - Gitea tokens embedded
- `%USERPROFILE%\.claude\settings.json` - Gitea tokens embedded
- Both files are in user-specific directories with appropriate Windows ACLs
## Troubleshooting
### Podman MCP Not Working
1. Check Podman machine status:
```powershell
podman machine list
```
2. Ensure Podman Desktop is running
3. Verify Docker socket is accessible:
```powershell
podman ps
```
### Gitea MCP Connection Issues
1. Verify token has correct permissions
2. Check network connectivity to Gitea server:
```powershell
curl https://gitea.projectium.com/api/v1/version
```
3. Ensure `gitea-mcp.exe` is not blocked by antivirus/firewall
### VS Code Extension Issues
1. **Reload Window**: Press `Ctrl+Shift+P` → "Developer: Reload Window"
2. **Check Extension Logs**: View → Output → Select extension from dropdown
3. **Verify JSON Syntax**: Ensure both config files have valid JSON
### MCP Server Not Loading
1. Check config file syntax with JSON validator
2. Verify executable paths are correct (use forward slashes or escaped backslashes)
3. Ensure required dependencies are installed (Node.js, Python, etc.)
4. Check VS Code developer console for errors: Help → Toggle Developer Tools
## Adding New MCP Servers
To add a new MCP server to both Gemini Code and Claude Code:
1. **Install the MCP server** (if it's an npm package):
```powershell
npm install -g @modelcontextprotocol/server-YOUR-SERVER
```
2. **Add to Gemini Code** (`mcp.json`):
```json
{
"servers": {
"your-server-name": {
"type": "stdio",
"command": "npx",
"args": ["-y", "@modelcontextprotocol/server-YOUR-SERVER"],
"env": {}
}
}
}
```
3. **Add to Claude Code** (`settings.json`):
```json
{
"mcpServers": {
"your-server-name": {
"command": "npx",
"args": ["-y", "@modelcontextprotocol/server-YOUR-SERVER"],
"env": {}
}
}
}
```
4. **Reload VS Code**
## Current Project Integration
### ADR Implementation Status
- **ADR-0002**: Transaction Management ✅ Enforced
- **ADR-0003**: Input Validation ✅ Enforced with URL validation
### Database Setup
- PostgreSQL 15 + PostGIS running in container
- 63 tables created
- URL constraints active:
- `flyers_image_url_check` enforces `^https?://.*`
- `flyers_icon_url_check` enforces `^https?://.*`
### Development Workflow
1. Start containers: `podman start flyer-crawler-postgres flyer-crawler-redis`
2. Use MCP servers to manage development environment
3. AI assistants can:
- Manage containers via Podman MCP
- Access repository via Gitea MCP
- Edit files via Filesystem MCP
- Fetch documentation via Fetch MCP
## Resources
- [Model Context Protocol Documentation](https://modelcontextprotocol.io/)
- [Gitea API Documentation](https://docs.gitea.com/api/1.22/)
- [Podman Desktop](https://podman-desktop.io/)
- [Claude Code Documentation](https://docs.anthropic.com/claude-code)
## Maintenance
### Regular Tasks
- **Monthly**: Rotate Gitea access tokens
- **Weekly**: Update MCP server packages:
```powershell
npm update -g @modelcontextprotocol/server-*
```
- **As Needed**: Update Gitea MCP executable when new version is released
### Backup Configuration
Recommended to backup these files regularly:
- `%APPDATA%\Code\User\mcp.json`
- `%USERPROFILE%\.claude\settings.json`
## Gitea Workflows and CI/CD
This project uses Gitea Actions for continuous integration and deployment. The workflows are located in `.gitea/workflows/`.
### Available Workflows
#### Automated Workflows
**deploy-to-test.yml** - Automated deployment to test environment
- **Trigger**: Automatically on every push to `main` branch
- **Runner**: `projectium.com` (self-hosted)
- **Process**:
1. Version bump (patch) with `[skip ci]` tag
2. TypeScript type-check and linting
3. Run unit tests + integration tests + E2E tests
4. Generate merged coverage report
5. Build React frontend for test environment
6. Deploy to `flyer-crawler-test.projectium.com`
7. Restart PM2 processes for test environment
8. Update database schema hash
- **Coverage Report**: https://flyer-crawler-test.projectium.com/coverage
- **Environment Variables**: Uses test database and Redis credentials
#### Manual Workflows
**deploy-to-prod.yml** - Manual deployment to production
- **Trigger**: Manual via workflow_dispatch
- **Confirmation Required**: Must type "deploy-to-prod"
- **Process**:
1. Version bump (minor) for production release
2. Check database schema hash (fails if mismatch)
3. Build React frontend for production
4. Deploy to `flyer-crawler.projectium.com`
5. Restart PM2 processes (with version check)
6. Update production database schema hash
- **Optional**: Force PM2 reload even if version matches
**manual-db-backup.yml** - Database backup workflow
- Creates timestamped backup of production database
- Stored in `/var/backups/postgres/`
**manual-db-restore.yml** - Database restore workflow
- Restores production database from backup file
- Requires confirmation and backup filename
**manual-db-reset-test.yml** - Reset test database
- Drops and recreates test database schema
- Used for testing schema migrations
**manual-db-reset-prod.yml** - Reset production database
- **DANGER**: Drops and recreates production database
- Requires multiple confirmations
**manual-deploy-major.yml** - Major version deployment
- Similar to deploy-to-prod but bumps major version
- For breaking changes or major releases
### Accessing Workflows via Gitea MCP
With the Gitea MCP server configured, AI assistants can:
- View workflow files
- Monitor workflow runs
- Check deployment status
- Review CI/CD logs
- Trigger manual workflows (via API)
**Example MCP Operations**:
```bash
# Via Gitea MCP, you can:
# - List recent workflow runs
# - View workflow logs
# - Check deployment status
# - Review test results
# - Monitor coverage reports
```
### Key Environment Variables for CI/CD
The workflows use these Gitea repository secrets:
**Database**:
- `DB_HOST` - PostgreSQL host
- `DB_USER` - Database user
- `DB_PASSWORD` - Database password
- `DB_DATABASE_PROD` - Production database name
- `DB_DATABASE_TEST` - Test database name
**Redis**:
- `REDIS_PASSWORD_PROD` - Production Redis password
- `REDIS_PASSWORD_TEST` - Test Redis password
**API Keys**:
- `VITE_GOOGLE_GENAI_API_KEY` - Production Gemini API key
- `VITE_GOOGLE_GENAI_API_KEY_TEST` - Test Gemini API key
- `GOOGLE_MAPS_API_KEY` - Google Maps Geocoding API key
**Authentication**:
- `JWT_SECRET` - JWT signing secret
### Schema Migration Process
The workflows use a schema hash comparison system:
1. **Hash Calculation**: SHA-256 hash of `sql/master_schema_rollup.sql`
2. **Storage**: Hashes stored in `public.schema_info` table
3. **Comparison**: On each deployment, current hash vs deployed hash
4. **Protection**: Deployment fails if schemas don't match
**Manual Migration Steps** (when schema changes):
1. Update `sql/master_schema_rollup.sql`
2. Run manual migration workflow or:
```bash
psql -U <user> -d <database> -f sql/master_schema_rollup.sql
```
3. Deploy will update hash automatically
### PM2 Process Management
The workflows manage three PM2 processes per environment:
**Production** (`ecosystem.config.cjs --env production`):
- `flyer-crawler-api` - Express API server
- `flyer-crawler-worker` - Background job worker
- `flyer-crawler-analytics-worker` - Analytics processor
**Test** (`ecosystem.config.cjs --env test`):
- `flyer-crawler-api-test` - Test Express API server
- `flyer-crawler-worker-test` - Test background worker
- `flyer-crawler-analytics-worker-test` - Test analytics worker
**Process Cleanup**:
- Workflows automatically delete errored/stopped processes
- Version comparison prevents unnecessary reloads
- Force reload option available for production
### Monitoring Deployment via MCP
Using Gitea MCP, you can monitor deployments in real-time:
1. **Check Workflow Status**:
- View running workflows
- See step-by-step progress
- Read deployment logs
2. **PM2 Process Monitoring**:
- Workflows output PM2 status after deployment
- View process IDs, memory usage, uptime
- Check recent logs (last 20 lines)
3. **Coverage Reports**:
- Automatically published to test environment
- HTML reports with detailed breakdown
- Merged coverage from unit + integration + E2E + server
### Development Workflow Integration
**Local Development** → **Push to main** → **Auto-deploy to test** → **Manual deploy to prod**
1. Develop locally with Podman containers
2. Commit and push to `main` branch
3. Gitea Actions automatically:
- Runs all tests
- Generates coverage
- Deploys to test environment
4. Review test deployment at https://flyer-crawler-test.projectium.com
5. Manually trigger production deployment when ready
### Using MCP for Deployment Tasks
With the configured MCP servers, you can:
**Via Gitea MCP**:
- Trigger manual workflows
- View deployment history
- Monitor test results
- Access workflow logs
**Via Podman MCP**:
- Inspect container logs (for local testing)
- Manage local database containers
- Test migrations locally
**Via Filesystem MCP**:
- Review workflow files
- Edit deployment scripts
- Update ecosystem config
## Version History
- **2026-01-07**: Initial MCP configuration for Gemini Code and Claude Code
- Added Gitea MCP servers (projectium, torbonium, lan)
- Added Podman MCP server
- Added Filesystem, Fetch MCP servers
- Configured Chrome DevTools and Markitdown (disabled by default)
- Documented Gitea workflows and CI/CD pipeline

303
READMEv2.md Normal file
View File

@@ -0,0 +1,303 @@
# Flyer Crawler - Development Environment Setup
Quick start guide for getting the development environment running with Podman containers.
## Prerequisites
- **Windows with WSL 2**: Install WSL 2 by running `wsl --install` in an administrator PowerShell
- **Podman Desktop**: Download and install [Podman Desktop for Windows](https://podman-desktop.io/)
- **Node.js 20+**: Required for running the application
## Quick Start - Container Environment
### 1. Initialize Podman
```powershell
# Start Podman machine (do this once after installing Podman Desktop)
podman machine init
podman machine start
```
### 2. Start Required Services
Start PostgreSQL (with PostGIS) and Redis containers:
```powershell
# Navigate to project directory
cd D:\gitea\flyer-crawler.projectium.com\flyer-crawler.projectium.com
# Start PostgreSQL with PostGIS
podman run -d \
--name flyer-crawler-postgres \
-e POSTGRES_USER=postgres \
-e POSTGRES_PASSWORD=postgres \
-e POSTGRES_DB=flyer_crawler_dev \
-p 5432:5432 \
docker.io/postgis/postgis:15-3.3
# Start Redis
podman run -d \
--name flyer-crawler-redis \
-e REDIS_PASSWORD="" \
-p 6379:6379 \
docker.io/library/redis:alpine
```
### 3. Wait for PostgreSQL to Initialize
```powershell
# Wait a few seconds, then check if PostgreSQL is ready
podman exec flyer-crawler-postgres pg_isready -U postgres
# Should output: /var/run/postgresql:5432 - accepting connections
```
### 4. Install Required PostgreSQL Extensions
```powershell
podman exec flyer-crawler-postgres psql -U postgres -d flyer_crawler_dev -c "CREATE EXTENSION IF NOT EXISTS postgis; CREATE EXTENSION IF NOT EXISTS pg_trgm; CREATE EXTENSION IF NOT EXISTS \"uuid-ossp\";"
```
### 5. Apply Database Schema
```powershell
# Apply the complete schema with URL constraints enabled
podman exec -i flyer-crawler-postgres psql -U postgres -d flyer_crawler_dev < sql/master_schema_rollup.sql
```
### 6. Verify URL Constraints Are Enabled
```powershell
podman exec flyer-crawler-postgres psql -U postgres -d flyer_crawler_dev -c "\d public.flyers" | grep -E "(image_url|icon_url|Check)"
```
You should see:
```
image_url | text | | not null |
icon_url | text | | not null |
Check constraints:
"flyers_icon_url_check" CHECK (icon_url ~* '^https?://.*'::text)
"flyers_image_url_check" CHECK (image_url ~* '^https?://.*'::text)
```
### 7. Set Environment Variables and Start Application
```powershell
# Set required environment variables
$env:NODE_ENV="development"
$env:DB_HOST="localhost"
$env:DB_USER="postgres"
$env:DB_PASSWORD="postgres"
$env:DB_NAME="flyer_crawler_dev"
$env:REDIS_URL="redis://localhost:6379"
$env:PORT="3001"
$env:FRONTEND_URL="http://localhost:5173"
# Install dependencies (first time only)
npm install
# Start the development server (runs both backend and frontend)
npm run dev
```
The application will be available at:
- **Frontend**: http://localhost:5173
- **Backend API**: http://localhost:3001
## Managing Containers
### View Running Containers
```powershell
podman ps
```
### Stop Containers
```powershell
podman stop flyer-crawler-postgres flyer-crawler-redis
```
### Start Containers (After They've Been Created)
```powershell
podman start flyer-crawler-postgres flyer-crawler-redis
```
### Remove Containers (Clean Slate)
```powershell
podman stop flyer-crawler-postgres flyer-crawler-redis
podman rm flyer-crawler-postgres flyer-crawler-redis
```
### View Container Logs
```powershell
podman logs flyer-crawler-postgres
podman logs flyer-crawler-redis
```
## Database Management
### Connect to PostgreSQL
```powershell
podman exec -it flyer-crawler-postgres psql -U postgres -d flyer_crawler_dev
```
### Reset Database Schema
```powershell
# Drop all tables
podman exec -i flyer-crawler-postgres psql -U postgres -d flyer_crawler_dev < sql/drop_tables.sql
# Reapply schema
podman exec -i flyer-crawler-postgres psql -U postgres -d flyer_crawler_dev < sql/master_schema_rollup.sql
```
### Seed Development Data
```powershell
npm run db:reset:dev
```
## Running Tests
### Unit Tests
```powershell
npm run test:unit
```
### Integration Tests
**IMPORTANT**: Integration tests require the PostgreSQL and Redis containers to be running.
```powershell
# Make sure containers are running
podman ps
# Run integration tests
npm run test:integration
```
## Troubleshooting
### Podman Machine Issues
If you get "unable to connect to Podman socket" errors:
```powershell
podman machine start
```
### PostgreSQL Connection Refused
Make sure PostgreSQL is ready:
```powershell
podman exec flyer-crawler-postgres pg_isready -U postgres
```
### Port Already in Use
If ports 5432 or 6379 are already in use, you can either:
1. Stop the conflicting service
2. Change the port mapping when creating containers (e.g., `-p 5433:5432`)
### URL Validation Errors
The database now enforces URL constraints. All `image_url` and `icon_url` fields must:
- Start with `http://` or `https://`
- Match the regex pattern: `^https?://.*`
Make sure the `FRONTEND_URL` environment variable is set correctly to avoid URL validation errors.
## ADR Implementation Status
This development environment implements:
- **ADR-0002**: Transaction Management ✅
- All database operations use the `withTransaction` pattern
- Automatic rollback on errors
- No connection pool leaks
- **ADR-0003**: Input Validation ✅
- Zod schemas for URL validation
- Database constraints enabled
- Validation at API boundaries
## Development Workflow
1. **Start Containers** (once per development session)
```powershell
podman start flyer-crawler-postgres flyer-crawler-redis
```
2. **Start Application**
```powershell
npm run dev
```
3. **Make Changes** to code (auto-reloads via `tsx watch`)
4. **Run Tests** before committing
```powershell
npm run test:unit
npm run test:integration
```
5. **Stop Application** (Ctrl+C)
6. **Stop Containers** (optional, or leave running)
```powershell
podman stop flyer-crawler-postgres flyer-crawler-redis
```
## PM2 Worker Setup (Production-like)
To test with PM2 workers locally:
```powershell
# Install PM2 globally (once)
npm install -g pm2
# Start the worker
pm2 start npm --name "flyer-crawler-worker" -- run worker:prod
# View logs
pm2 logs flyer-crawler-worker
# Stop worker
pm2 stop flyer-crawler-worker
pm2 delete flyer-crawler-worker
```
## Next Steps
After getting the environment running:
1. Review [docs/adr/](docs/adr/) for architectural decisions
2. Check [sql/master_schema_rollup.sql](sql/master_schema_rollup.sql) for database schema
3. Explore [src/routes/](src/routes/) for API endpoints
4. Review [src/types.ts](src/types.ts) for TypeScript type definitions
## Common Environment Variables
Create these environment variables for development:
```powershell
# Database
$env:DB_HOST="localhost"
$env:DB_USER="postgres"
$env:DB_PASSWORD="postgres"
$env:DB_NAME="flyer_crawler_dev"
$env:DB_PORT="5432"
# Redis
$env:REDIS_URL="redis://localhost:6379"
# Application
$env:NODE_ENV="development"
$env:PORT="3001"
$env:FRONTEND_URL="http://localhost:5173"
# Authentication (generate your own secrets)
$env:JWT_SECRET="your-dev-jwt-secret-change-this"
$env:SESSION_SECRET="your-dev-session-secret-change-this"
# AI Services (get your own API keys)
$env:VITE_GOOGLE_GENAI_API_KEY="your-google-genai-api-key"
$env:GOOGLE_MAPS_API_KEY="your-google-maps-api-key"
```
## Resources
- [Podman Desktop Documentation](https://podman-desktop.io/docs)
- [PostGIS Documentation](https://postgis.net/documentation/)
- [Original README.md](README.md) for production setup

View File

@@ -2,7 +2,9 @@
**Date**: 2025-12-12 **Date**: 2025-12-12
**Status**: Proposed **Status**: Accepted
**Implemented**: 2026-01-07
## Context ## Context

View File

@@ -2,7 +2,9 @@
**Date**: 2025-12-12 **Date**: 2025-12-12
**Status**: Proposed **Status**: Accepted
**Implemented**: 2026-01-07
## Context ## Context

View File

@@ -2,7 +2,9 @@
**Date**: 2025-12-12 **Date**: 2025-12-12
**Status**: Proposed **Status**: Accepted
**Implemented**: 2026-01-07
## Context ## Context

View File

@@ -1,8 +1,9 @@
# ADR-005: Frontend State Management and Server Cache Strategy # ADR-005: Frontend State Management and Server Cache Strategy
**Date**: 2025-12-12 **Date**: 2025-12-12
**Implementation Date**: 2026-01-08
**Status**: Proposed **Status**: Accepted and Implemented (Phases 1 & 2 complete)
## Context ## Context
@@ -16,3 +17,58 @@ We will adopt a dedicated library for managing server state, such as **TanStack
**Positive**: Leads to a more performant, predictable, and simpler frontend codebase. Standardizes how the client-side communicates with the server and handles loading/error states. Improves user experience through intelligent caching. **Positive**: Leads to a more performant, predictable, and simpler frontend codebase. Standardizes how the client-side communicates with the server and handles loading/error states. Improves user experience through intelligent caching.
**Negative**: Introduces a new frontend dependency. Requires a learning curve for developers unfamiliar with the library. Requires refactoring of existing data-fetching logic. **Negative**: Introduces a new frontend dependency. Requires a learning curve for developers unfamiliar with the library. Requires refactoring of existing data-fetching logic.
## Implementation Status
### Phase 1: Infrastructure & Core Queries (✅ Complete - 2026-01-08)
**Files Created:**
- [src/config/queryClient.ts](../../src/config/queryClient.ts) - Global QueryClient configuration
- [src/hooks/queries/useFlyersQuery.ts](../../src/hooks/queries/useFlyersQuery.ts) - Flyers data query
- [src/hooks/queries/useWatchedItemsQuery.ts](../../src/hooks/queries/useWatchedItemsQuery.ts) - Watched items query
- [src/hooks/queries/useShoppingListsQuery.ts](../../src/hooks/queries/useShoppingListsQuery.ts) - Shopping lists query
**Files Modified:**
- [src/providers/AppProviders.tsx](../../src/providers/AppProviders.tsx) - Added QueryClientProvider wrapper
- [src/providers/FlyersProvider.tsx](../../src/providers/FlyersProvider.tsx) - Refactored to use TanStack Query
- [src/providers/UserDataProvider.tsx](../../src/providers/UserDataProvider.tsx) - Refactored to use TanStack Query
- [src/services/apiClient.ts](../../src/services/apiClient.ts) - Added pagination params to fetchFlyers
**Benefits Achieved:**
- ✅ Removed ~150 lines of custom state management code
- ✅ Automatic caching of server data
- ✅ Background refetching for stale data
- ✅ React Query Devtools available in development
- ✅ Automatic data invalidation on user logout
- ✅ Better error handling and loading states
### Phase 2: Remaining Queries (✅ Complete - 2026-01-08)
**Files Created:**
- [src/hooks/queries/useMasterItemsQuery.ts](../../src/hooks/queries/useMasterItemsQuery.ts) - Master grocery items query
- [src/hooks/queries/useFlyerItemsQuery.ts](../../src/hooks/queries/useFlyerItemsQuery.ts) - Flyer items query
**Files Modified:**
- [src/providers/MasterItemsProvider.tsx](../../src/providers/MasterItemsProvider.tsx) - Refactored to use TanStack Query
- [src/hooks/useFlyerItems.ts](../../src/hooks/useFlyerItems.ts) - Refactored to use TanStack Query
**Benefits Achieved:**
- ✅ Removed additional ~50 lines of custom state management code
- ✅ Per-flyer item caching (items cached separately for each flyer)
- ✅ Longer cache times for infrequently changing data (master items)
- ✅ Automatic query disabling when dependencies are not met
### Phase 3: Mutations (⏳ Pending)
- Add/remove watched items
- Shopping list CRUD operations
- Optimistic updates
- Cache invalidation strategies
### Phase 4: Cleanup (⏳ Pending)
- Remove deprecated custom hooks
- Remove stub implementations
- Update all dependent components
## Implementation Guide
See [plans/adr-0005-implementation-plan.md](../../plans/adr-0005-implementation-plan.md) for detailed implementation steps.

View File

@@ -0,0 +1,426 @@
# ADR-0005 Implementation Plan: Frontend State Management with TanStack Query
**Date**: 2026-01-08
**Status**: Ready for Implementation
**Related ADR**: [ADR-0005: Frontend State Management and Server Cache Strategy](../docs/adr/0005-frontend-state-management-and-server-cache-strategy.md)
## Current State Analysis
### What We Have
1.**TanStack Query v5.90.12 already installed** in package.json
2.**Not being used** - Custom hooks reimplementing its functionality
3.**Custom `useInfiniteQuery` hook** ([src/hooks/useInfiniteQuery.ts](../src/hooks/useInfiniteQuery.ts)) using `useState`/`useEffect`
4.**Custom `useApiOnMount` hook** (inferred from UserDataProvider)
5.**Multiple Context Providers** doing manual data fetching
### Current Data Fetching Patterns
#### Pattern 1: Custom useInfiniteQuery Hook
**Location**: [src/hooks/useInfiniteQuery.ts](../src/hooks/useInfiniteQuery.ts)
**Used By**: [src/providers/FlyersProvider.tsx](../src/providers/FlyersProvider.tsx)
**Problems**:
- Reimplements pagination logic that TanStack Query provides
- Manual loading state management
- Manual error handling
- No automatic caching
- No background refetching
- No request deduplication
#### Pattern 2: useApiOnMount Hook
**Location**: Unknown (needs investigation)
**Used By**: [src/providers/UserDataProvider.tsx](../src/providers/UserDataProvider.tsx)
**Problems**:
- Fetches data on mount only
- Manual loading/error state management
- No caching between unmount/remount
- Redundant state synchronization logic
## Implementation Strategy
### Phase 1: Setup TanStack Query Infrastructure (Day 1)
#### 1.1 Create QueryClient Configuration
**File**: `src/config/queryClient.ts`
```typescript
import { QueryClient } from '@tanstack/react-query';
export const queryClient = new QueryClient({
defaultOptions: {
queries: {
staleTime: 1000 * 60 * 5, // 5 minutes
gcTime: 1000 * 60 * 30, // 30 minutes (formerly cacheTime)
retry: 1,
refetchOnWindowFocus: false,
refetchOnMount: true,
},
mutations: {
retry: 0,
},
},
});
```
#### 1.2 Wrap App with QueryClientProvider
**File**: `src/providers/AppProviders.tsx`
Add TanStack Query provider at the top level:
```typescript
import { QueryClientProvider } from '@tanstack/react-query';
import { ReactQueryDevtools } from '@tanstack/react-query-devtools';
import { queryClient } from '../config/queryClient';
export const AppProviders = ({ children }) => {
return (
<QueryClientProvider client={queryClient}>
{/* Existing providers */}
{children}
{/* Add devtools in development */}
{import.meta.env.DEV && <ReactQueryDevtools initialIsOpen={false} />}
</QueryClientProvider>
);
};
```
### Phase 2: Replace Custom Hooks with TanStack Query (Days 2-5)
#### 2.1 Replace useInfiniteQuery Hook
**Current**: [src/hooks/useInfiniteQuery.ts](../src/hooks/useInfiniteQuery.ts)
**Action**: Create wrapper around TanStack's `useInfiniteQuery`
**New File**: `src/hooks/queries/useInfiniteFlyersQuery.ts`
```typescript
import { useInfiniteQuery } from '@tanstack/react-query';
import * as apiClient from '../../services/apiClient';
export const useInfiniteFlyersQuery = () => {
return useInfiniteQuery({
queryKey: ['flyers'],
queryFn: async ({ pageParam }) => {
const response = await apiClient.fetchFlyers(pageParam);
if (!response.ok) {
const error = await response.json();
throw new Error(error.message || 'Failed to fetch flyers');
}
return response.json();
},
initialPageParam: 0,
getNextPageParam: (lastPage) => lastPage.nextCursor ?? undefined,
});
};
```
#### 2.2 Replace FlyersProvider
**Current**: [src/providers/FlyersProvider.tsx](../src/providers/FlyersProvider.tsx)
**Action**: Simplify to use TanStack Query hook
```typescript
import React, { ReactNode, useMemo } from 'react';
import { FlyersContext } from '../contexts/FlyersContext';
import { useInfiniteFlyersQuery } from '../hooks/queries/useInfiniteFlyersQuery';
export const FlyersProvider: React.FC<{ children: ReactNode }> = ({ children }) => {
const {
data,
isLoading,
error,
fetchNextPage,
hasNextPage,
isRefetching,
refetch,
} = useInfiniteFlyersQuery();
const flyers = useMemo(
() => data?.pages.flatMap((page) => page.items) ?? [],
[data]
);
const value = useMemo(
() => ({
flyers,
isLoadingFlyers: isLoading,
flyersError: error,
fetchNextFlyersPage: fetchNextPage,
hasNextFlyersPage: !!hasNextPage,
isRefetchingFlyers: isRefetching,
refetchFlyers: refetch,
}),
[flyers, isLoading, error, fetchNextPage, hasNextPage, isRefetching, refetch]
);
return <FlyersContext.Provider value={value}>{children}</FlyersContext.Provider>;
};
```
**Benefits**:
- ~100 lines of code removed
- Automatic caching
- Background refetching
- Request deduplication
- Optimistic updates support
#### 2.3 Replace UserDataProvider
**Current**: [src/providers/UserDataProvider.tsx](../src/providers/UserDataProvider.tsx)
**Action**: Use TanStack Query's `useQuery` for watched items and shopping lists
**New Files**:
- `src/hooks/queries/useWatchedItemsQuery.ts`
- `src/hooks/queries/useShoppingListsQuery.ts`
```typescript
// src/hooks/queries/useWatchedItemsQuery.ts
import { useQuery } from '@tanstack/react-query';
import * as apiClient from '../../services/apiClient';
export const useWatchedItemsQuery = (enabled: boolean) => {
return useQuery({
queryKey: ['watched-items'],
queryFn: async () => {
const response = await apiClient.fetchWatchedItems();
if (!response.ok) throw new Error('Failed to fetch watched items');
return response.json();
},
enabled,
});
};
// src/hooks/queries/useShoppingListsQuery.ts
import { useQuery } from '@tanstack/react-query';
import * as apiClient from '../../services/apiClient';
export const useShoppingListsQuery = (enabled: boolean) => {
return useQuery({
queryKey: ['shopping-lists'],
queryFn: async () => {
const response = await apiClient.fetchShoppingLists();
if (!response.ok) throw new Error('Failed to fetch shopping lists');
return response.json();
},
enabled,
});
};
```
**Updated Provider**:
```typescript
import React, { ReactNode, useMemo } from 'react';
import { UserDataContext } from '../contexts/UserDataContext';
import { useAuth } from '../hooks/useAuth';
import { useWatchedItemsQuery } from '../hooks/queries/useWatchedItemsQuery';
import { useShoppingListsQuery } from '../hooks/queries/useShoppingListsQuery';
export const UserDataProvider: React.FC<{ children: ReactNode }> = ({ children }) => {
const { userProfile } = useAuth();
const isEnabled = !!userProfile;
const { data: watchedItems = [], isLoading: isLoadingWatched, error: watchedError } =
useWatchedItemsQuery(isEnabled);
const { data: shoppingLists = [], isLoading: isLoadingLists, error: listsError } =
useShoppingListsQuery(isEnabled);
const value = useMemo(
() => ({
watchedItems,
shoppingLists,
isLoading: isEnabled && (isLoadingWatched || isLoadingLists),
error: watchedError?.message || listsError?.message || null,
}),
[watchedItems, shoppingLists, isEnabled, isLoadingWatched, isLoadingLists, watchedError, listsError]
);
return <UserDataContext.Provider value={value}>{children}</UserDataContext.Provider>;
};
```
**Benefits**:
- ~40 lines of code removed
- No manual state synchronization
- Automatic cache invalidation on user logout
- Background refetching
### Phase 3: Add Mutations for Data Modifications (Days 6-8)
#### 3.1 Create Mutation Hooks
**Example**: `src/hooks/mutations/useAddWatchedItemMutation.ts`
```typescript
import { useMutation, useQueryClient } from '@tanstack/react-query';
import * as apiClient from '../../services/apiClient';
import { notifySuccess, notifyError } from '../../services/notificationService';
export const useAddWatchedItemMutation = () => {
const queryClient = useQueryClient();
return useMutation({
mutationFn: apiClient.addWatchedItem,
onSuccess: () => {
// Invalidate and refetch watched items
queryClient.invalidateQueries({ queryKey: ['watched-items'] });
notifySuccess('Item added to watched list');
},
onError: (error: Error) => {
notifyError(error.message || 'Failed to add item');
},
});
};
```
#### 3.2 Implement Optimistic Updates
**Example**: Optimistic shopping list update
```typescript
export const useUpdateShoppingListMutation = () => {
const queryClient = useQueryClient();
return useMutation({
mutationFn: apiClient.updateShoppingList,
onMutate: async (newList) => {
// Cancel outgoing refetches
await queryClient.cancelQueries({ queryKey: ['shopping-lists'] });
// Snapshot previous value
const previousLists = queryClient.getQueryData(['shopping-lists']);
// Optimistically update
queryClient.setQueryData(['shopping-lists'], (old) =>
old.map((list) => (list.id === newList.id ? newList : list))
);
return { previousLists };
},
onError: (err, newList, context) => {
// Rollback on error
queryClient.setQueryData(['shopping-lists'], context.previousLists);
notifyError('Failed to update shopping list');
},
onSettled: () => {
// Always refetch after error or success
queryClient.invalidateQueries({ queryKey: ['shopping-lists'] });
},
});
};
```
### Phase 4: Remove Old Custom Hooks (Day 9)
#### Files to Remove:
-`src/hooks/useInfiniteQuery.ts` (if not used elsewhere)
-`src/hooks/useApiOnMount.ts` (needs investigation)
#### Files to Update:
- Update any remaining usages in other components
### Phase 5: Testing & Documentation (Day 10)
#### 5.1 Update Tests
- Update provider tests to work with QueryClient
- Add tests for new query hooks
- Add tests for mutation hooks
#### 5.2 Update Documentation
- Mark ADR-0005 as **Accepted** and **Implemented**
- Add usage examples to documentation
- Update developer onboarding guide
## Migration Checklist
### Prerequisites
- [x] TanStack Query installed
- [ ] QueryClient configuration created
- [ ] App wrapped with QueryClientProvider
### Queries
- [ ] Flyers infinite query migrated
- [ ] Watched items query migrated
- [ ] Shopping lists query migrated
- [ ] Master items query migrated (if applicable)
- [ ] Active deals query migrated (if applicable)
### Mutations
- [ ] Add watched item mutation
- [ ] Remove watched item mutation
- [ ] Update shopping list mutation
- [ ] Add shopping list item mutation
- [ ] Remove shopping list item mutation
### Cleanup
- [ ] Remove custom useInfiniteQuery hook
- [ ] Remove custom useApiOnMount hook
- [ ] Update all tests
- [ ] Remove redundant state management code
### Documentation
- [ ] Update ADR-0005 status to "Accepted"
- [ ] Add usage guidelines to README
- [ ] Document query key conventions
- [ ] Document cache invalidation patterns
## Benefits Summary
### Code Reduction
- **Estimated**: ~300-500 lines of custom hook code removed
- **Result**: Simpler, more maintainable codebase
### Performance Improvements
- ✅ Automatic request deduplication
- ✅ Background data synchronization
- ✅ Smart cache invalidation
- ✅ Optimistic updates
- ✅ Automatic retry logic
### Developer Experience
- ✅ React Query Devtools for debugging
- ✅ Type-safe query hooks
- ✅ Standardized patterns across the app
- ✅ Less boilerplate code
### User Experience
- ✅ Faster perceived performance (cached data)
- ✅ Better offline experience
- ✅ Smoother UI interactions (optimistic updates)
- ✅ Automatic background updates
## Risk Assessment
### Low Risk
- TanStack Query is industry-standard
- Already installed in project
- Incremental migration possible
### Mitigation Strategies
1. **Test thoroughly** - Maintain existing test coverage
2. **Migrate incrementally** - One provider at a time
3. **Monitor performance** - Use React Query Devtools
4. **Rollback plan** - Keep old code until migration complete
## Timeline Estimate
**Total**: 10 working days (2 weeks)
- Day 1: Setup infrastructure
- Days 2-5: Migrate queries
- Days 6-8: Add mutations
- Day 9: Cleanup
- Day 10: Testing & documentation
## Next Steps
1. Review this plan with team
2. Get approval to proceed
3. Create implementation tickets
4. Begin Phase 1: Setup
## References
- [TanStack Query Documentation](https://tanstack.com/query/latest)
- [React Query Best Practices](https://tkdodo.eu/blog/practical-react-query)
- [ADR-0005 Original Document](../docs/adr/0005-frontend-state-management-and-server-cache-strategy.md)

View File

@@ -0,0 +1,182 @@
# ADR-0005 Phase 2 Implementation Summary
**Date**: 2026-01-08
**Status**: ✅ Complete
## Overview
Successfully completed Phase 2 of ADR-0005 enforcement by migrating all remaining query-based data fetching to TanStack Query.
## Files Created
### Query Hooks
1. **[src/hooks/queries/useMasterItemsQuery.ts](../src/hooks/queries/useMasterItemsQuery.ts)**
- Fetches all master grocery items
- 10-minute stale time (data changes infrequently)
- 30-minute garbage collection time
2. **[src/hooks/queries/useFlyerItemsQuery.ts](../src/hooks/queries/useFlyerItemsQuery.ts)**
- Fetches items for a specific flyer
- Per-flyer caching (separate cache for each flyer_id)
- Automatically disabled when no flyer ID provided
- 5-minute stale time
## Files Modified
### Providers
1. **[src/providers/MasterItemsProvider.tsx](../src/providers/MasterItemsProvider.tsx)**
- **Before**: 32 lines using `useApiOnMount` with manual state management
- **After**: 31 lines using `useMasterItemsQuery` (cleaner, no manual callbacks)
- Removed: `useEffect`, `useCallback`, `logger` imports
- Removed: Debug logging for mount/unmount
- Added: Automatic caching and background refetching
### Custom Hooks
2. **[src/hooks/useFlyerItems.ts](../src/hooks/useFlyerItems.ts)**
- **Before**: 29 lines with custom wrapper and `useApiOnMount`
- **After**: 32 lines using `useFlyerItemsQuery` (more readable)
- Removed: Complex wrapper function for type satisfaction
- Removed: Manual `enabled` flag handling
- Added: Automatic per-flyer caching
## Code Reduction Summary
### Phase 1 + Phase 2 Combined
- **Total custom state management code removed**: ~200 lines
- **New query hooks created**: 5 files (~200 lines of standardized code)
- **Providers simplified**: 4 files
- **Net result**: Cleaner, more maintainable codebase with better functionality
## Technical Improvements
### 1. Intelligent Caching Strategy
```typescript
// Master items (rarely change) - 10 min stale time
useMasterItemsQuery() // staleTime: 10 minutes
// Flyers (moderate changes) - 2 min stale time
useFlyersQuery() // staleTime: 2 minutes
// User data (frequent changes) - 1 min stale time
useWatchedItemsQuery() // staleTime: 1 minute
useShoppingListsQuery() // staleTime: 1 minute
// Flyer items (static) - 5 min stale time
useFlyerItemsQuery() // staleTime: 5 minutes
```
### 2. Per-Resource Caching
Each flyer's items are cached separately:
```typescript
// Flyer 1 items cached with key: ['flyer-items', 1]
useFlyerItemsQuery(1)
// Flyer 2 items cached with key: ['flyer-items', 2]
useFlyerItemsQuery(2)
// Both caches persist independently
```
### 3. Automatic Query Disabling
```typescript
// Query automatically disabled when flyerId is undefined
const { data } = useFlyerItemsQuery(selectedFlyer?.flyer_id);
// No manual enabled flag needed!
```
## Benefits Achieved
### Performance
-**Reduced API calls** - Data cached between component unmounts
-**Background refetching** - Stale data updates in background
-**Request deduplication** - Multiple components can use same query
-**Optimized cache times** - Different strategies for different data types
### Code Quality
-**Removed ~50 more lines** of custom state management
-**Eliminated useApiOnMount** from all providers
-**Standardized patterns** - All queries follow same structure
-**Better type safety** - TypeScript types flow through queries
### Developer Experience
-**React Query Devtools** - Inspect all queries and cache
-**Easier debugging** - Clear query states and transitions
-**Less boilerplate** - No manual loading/error state management
-**Automatic retries** - Failed queries retry automatically
### User Experience
-**Faster perceived performance** - Cached data shows instantly
-**Fresh data** - Background refetching keeps data current
-**Better offline handling** - Cached data available offline
-**Smoother interactions** - No loading flicker on re-renders
## Remaining Work
### Phase 3: Mutations (Next)
- [ ] Create mutation hooks for data modifications
- [ ] Add/remove watched items with optimistic updates
- [ ] Shopping list CRUD operations
- [ ] Proper cache invalidation strategies
### Phase 4: Cleanup (Final)
- [ ] Remove `useApiOnMount` hook entirely
- [ ] Remove `useApi` hook if no longer used
- [ ] Remove stub implementations in providers
- [ ] Update all dependent tests
## Testing Recommendations
Before merging, test the following:
1. **Flyer List**
- Flyers load on page load
- Flyers cached on navigation away/back
- Background refetch after stale time
2. **Flyer Items**
- Items load when flyer selected
- Each flyer's items cached separately
- Switching between flyers uses cache
3. **Master Items**
- Items available across app
- Long cache time (10 min)
- Shared across all components
4. **User Data**
- Watched items/shopping lists load on login
- Data cleared on logout
- Fresh data on login (not stale from previous user)
5. **React Query Devtools**
- Open devtools in development
- Verify query states and cache
- Check background refetching behavior
## Migration Notes
### Breaking Changes
None! All providers maintain the same interface.
### Deprecation Warnings
The following will log warnings if used:
- `setWatchedItems()` in UserDataProvider
- `setShoppingLists()` in UserDataProvider
These will be removed in Phase 4 after mutations are implemented.
## Documentation Updates
- [x] Updated [ADR-0005](../docs/adr/0005-frontend-state-management-and-server-cache-strategy.md)
- [x] Created [Phase 2 Summary](./adr-0005-phase-2-summary.md)
- [ ] Update component documentation (if needed)
- [ ] Update developer onboarding guide (Phase 4)
## Conclusion
Phase 2 successfully migrated all remaining query-based data fetching to TanStack Query. The application now has a consistent, performant, and maintainable approach to server state management.
**Next Steps**: Proceed to Phase 3 (Mutations) when ready to implement data modification operations.

View File

@@ -0,0 +1,466 @@
# MCP Server Access Summary
**Date**: 2026-01-08
**Environment**: Windows 10, VSCode with Claude Code integration
**Configuration Files**:
- [`mcp.json`](c:/Users/games3/AppData/Roaming/Code/User/mcp.json:1)
- [`mcp-servers.json`](c:/Users/games3/AppData/Roaming/Code/User/globalStorage/mcp-servers.json:1)
---
## Executive Summary
You have **8 MCP servers** configured in your environment. These servers extend Claude's capabilities by providing specialized tools for browser automation, file conversion, Git hosting integration, container management, filesystem access, and HTTP requests.
**Key Findings**:
- ✅ 7 servers are properly configured and ready to test
- ⚠️ 1 server requires token update (gitea-lan)
- 📋 Testing guide and automated script provided
- 🔒 Security considerations documented
---
## MCP Server Inventory
### 1. Chrome DevTools MCP Server
**Status**: ✅ Configured
**Type**: Browser Automation
**Command**: `npx -y chrome-devtools-mcp@latest`
**Capabilities**:
- Launch and control Chrome browser
- Navigate to URLs
- Click elements and interact with DOM
- Capture screenshots
- Monitor network traffic
- Execute JavaScript in browser context
**Use Cases**:
- Web scraping
- Automated testing
- UI verification
- Taking screenshots of web pages
- Debugging frontend issues
**Configuration Details**:
- Headless mode: Enabled
- Isolated: False (shares browser state)
- Channel: Stable
---
### 2. Markitdown MCP Server
**Status**: ✅ Configured
**Type**: File Conversion
**Command**: `C:\Users\games3\.local\bin\uvx.exe markitdown-mcp`
**Capabilities**:
- Convert PDF files to markdown
- Convert DOCX files to markdown
- Convert HTML to markdown
- OCR image files to extract text
- Convert PowerPoint presentations
**Use Cases**:
- Document processing
- Content extraction from various formats
- Making documents AI-readable
- Converting legacy documents to markdown
**Notes**:
- Requires Python and `uvx` to be installed
- Uses Microsoft's Markitdown library
---
### 3. Gitea Torbonium
**Status**: ✅ Configured
**Type**: Git Hosting Integration
**Host**: https://gitea.torbonium.com
**Command**: `d:\gitea-mcp\gitea-mcp.exe run -t stdio`
**Capabilities**:
- List and manage repositories
- Create and update issues
- Manage pull requests
- Read and write repository files
- Create and manage branches
- View commit history
- Manage repository settings
**Use Cases**:
- Automated issue creation
- Repository management
- Code review automation
- Documentation updates
- Release management
**Configuration**:
- Token: Configured (ending in ...fcf8)
- Access: Full API access based on token permissions
---
### 4. Gitea LAN (Torbolan)
**Status**: ⚠️ Requires Configuration
**Type**: Git Hosting Integration
**Host**: https://gitea.torbolan.com
**Command**: `d:\gitea-mcp\gitea-mcp.exe run -t stdio`
**Issue**: Access token is set to `REPLACE_WITH_NEW_TOKEN`
**Action Required**:
1. Log into https://gitea.torbolan.com
2. Navigate to Settings → Applications
3. Generate a new access token
4. Update the token in both [`mcp.json`](c:/Users/games3/AppData/Roaming/Code/User/mcp.json:35) and [`mcp-servers.json`](c:/Users/games3/AppData/Roaming/Code/User/globalStorage/mcp-servers.json:35)
**Capabilities**: Same as Gitea Torbonium (once configured)
---
### 5. Gitea Projectium
**Status**: ✅ Configured
**Type**: Git Hosting Integration
**Host**: https://gitea.projectium.com
**Command**: `d:\gitea-mcp\gitea-mcp.exe run -t stdio`
**Capabilities**: Same as Gitea Torbonium
**Configuration**:
- Token: Configured (ending in ...9ef)
- This appears to be the Gitea instance for your current project
**Note**: This is the Gitea instance hosting the current flyer-crawler project.
---
### 6. Podman/Docker MCP Server
**Status**: ✅ Configured
**Type**: Container Management
**Command**: `npx -y @modelcontextprotocol/server-docker`
**Capabilities**:
- List running containers
- Start and stop containers
- View container logs
- Execute commands inside containers
- Manage Docker images
- Inspect container details
- Create and manage networks
**Use Cases**:
- Container orchestration
- Development environment management
- Log analysis
- Container debugging
- Image management
**Configuration**:
- Docker Host: `npipe:////./pipe/docker_engine`
- Requires: Docker Desktop or Podman running on Windows
**Prerequisites**:
- Docker Desktop must be running
- Named pipe access configured
---
### 7. Filesystem MCP Server
**Status**: ✅ Configured
**Type**: File System Access
**Path**: `D:\gitea\flyer-crawler.projectium.com\flyer-crawler.projectium.com`
**Command**: `npx -y @modelcontextprotocol/server-filesystem`
**Capabilities**:
- List directory contents recursively
- Read file contents
- Write and modify files
- Search for files
- Get file metadata (size, dates, permissions)
- Create and delete files/directories
**Use Cases**:
- Project file management
- Bulk file operations
- Code generation and modifications
- File content analysis
- Project structure exploration
**Security Note**:
This server has full read/write access to your project directory. It operates within the specified directory only.
**Scope**:
- Limited to: `D:\gitea\flyer-crawler.projectium.com\flyer-crawler.projectium.com`
- Cannot access files outside this directory
---
### 8. Fetch MCP Server
**Status**: ✅ Configured
**Type**: HTTP Client
**Command**: `npx -y @modelcontextprotocol/server-fetch`
**Capabilities**:
- Send HTTP GET requests
- Send HTTP POST requests
- Send PUT, DELETE, PATCH requests
- Set custom headers
- Handle JSON and text responses
- Follow redirects
- Handle authentication
**Use Cases**:
- API testing
- Web scraping
- Data fetching from external services
- Webhook testing
- Integration with external APIs
**Examples**:
- Fetch data from REST APIs
- Download web content
- Test API endpoints
- Retrieve JSON data
- Monitor web services
---
## Current Status: MCP Server Tool Availability
**Important Note**: While these MCP servers are configured in your environment, they are **not currently exposed as callable tools** in this Claude Code session.
### What This Means:
MCP servers typically work by:
1. Running as separate processes
2. Exposing tools and resources via the Model Context Protocol
3. Being connected to the AI assistant by the client application (VSCode)
### Current Situation:
In the current session, Claude Code has access to:
- ✅ Built-in file operations (read, write, search, list)
- ✅ Browser actions
- ✅ Mode switching
- ✅ Task management tools
But does **NOT** have direct access to:
- ❌ MCP server-specific tools (e.g., Gitea API operations)
- ❌ Chrome DevTools controls
- ❌ Markitdown conversion functions
- ❌ Docker container management
- ❌ Specialized fetch operations
### Why This Happens:
MCP servers need to be:
1. Actively connected by the client (VSCode)
2. Running in the background
3. Properly registered with the AI assistant
The configuration files show they are set up, but the connection may not be active in this particular session.
---
## Testing Your MCP Servers
Three approaches to verify your MCP servers are working:
### Approach 1: Run the Automated Test Script
Execute the provided PowerShell script to test all servers:
```powershell
cd plans
.\test-mcp-servers.ps1
```
This will:
- Test each server's basic functionality
- Check API connectivity for Gitea servers
- Verify Docker daemon access
- Test filesystem accessibility
- Output a detailed results report
### Approach 2: Use MCP Inspector
Install and use the official MCP testing tool:
```powershell
# Install
npm install -g @modelcontextprotocol/inspector
# Test individual servers
mcp-inspector npx -y @modelcontextprotocol/server-fetch
mcp-inspector npx -y @modelcontextprotocol/server-filesystem "D:\gitea\flyer-crawler.projectium.com\flyer-crawler.projectium.com"
```
The inspector provides a web UI to:
- View available tools
- Test tool invocations
- See real-time logs
- Debug server issues
### Approach 3: Manual Testing
Follow the comprehensive guide in [`mcp-server-testing-guide.md`](plans/mcp-server-testing-guide.md:1) for step-by-step manual testing instructions.
---
## Recommendations
### 1. Immediate Actions
- [ ] **Fix Gitea LAN token**: Generate and configure a valid access token for gitea.torbolan.com
- [ ] **Run test script**: Execute `test-mcp-servers.ps1` to verify all servers
- [ ] **Review test results**: Check which servers are functional
- [ ] **Document failures**: Note any servers that fail testing
### 2. Security Improvements
- [ ] **Rotate Gitea tokens**: Consider rotating access tokens if they're old
- [ ] **Review token permissions**: Ensure tokens have minimal required permissions
- [ ] **Audit filesystem scope**: Verify filesystem server only has access to intended directories
- [ ] **Secure token storage**: Consider using environment variables or secret management
- [ ] **Enable audit logging**: Track MCP server operations for security monitoring
### 3. Configuration Optimization
- [ ] **Consolidate configs**: Both `mcp.json` and `mcp-servers.json` have identical content - determine which is canonical
- [ ] **Add error handling**: Configure timeout and retry settings for network-dependent servers
- [ ] **Document usage patterns**: Create examples of common operations for each server
- [ ] **Set up monitoring**: Track MCP server health and availability
### 4. Integration and Usage
- [ ] **Verify VSCode integration**: Ensure MCP servers are actually connected in active sessions
- [ ] **Test tool availability**: Confirm which MCP tools are exposed to Claude Code
- [ ] **Create usage examples**: Document real-world usage scenarios
- [ ] **Set up aliases**: Create shortcuts for commonly-used MCP operations
---
## MCP Server Use Case Matrix
| Server | Code Analysis | Testing | Deployment | Documentation | API Integration |
|--------|--------------|---------|------------|---------------|-----------------|
| Chrome DevTools | ✓ (UI testing) | ✓✓✓ | - | ✓ (screenshots) | ✓ |
| Markitdown | - | - | - | ✓✓✓ | - |
| Gitea (all 3) | ✓✓✓ | ✓ | ✓✓✓ | ✓✓ | ✓✓✓ |
| Docker | ✓ | ✓✓✓ | ✓✓✓ | - | ✓ |
| Filesystem | ✓✓✓ | ✓✓ | ✓ | ✓✓ | ✓ |
| Fetch | ✓ | ✓✓ | ✓ | - | ✓✓✓ |
Legend: ✓✓✓ = Primary use case, ✓✓ = Strong use case, ✓ = Applicable, - = Not applicable
---
## Potential Workflows
### Workflow 1: Automated Documentation Updates
1. **Fetch server**: Get latest API documentation from external service
2. **Markitdown**: Convert to markdown format
3. **Filesystem server**: Write to project documentation folder
4. **Gitea server**: Create commit and push changes
### Workflow 2: Container-Based Testing
1. **Docker server**: Start test containers
2. **Fetch server**: Send test API requests
3. **Docker server**: Collect container logs
4. **Filesystem server**: Write test results
5. **Gitea server**: Update test status in issues
### Workflow 3: Web UI Testing
1. **Chrome DevTools**: Launch browser and navigate to app
2. **Chrome DevTools**: Interact with UI elements
3. **Chrome DevTools**: Capture screenshots
4. **Filesystem server**: Save test artifacts
5. **Gitea server**: Update test documentation
### Workflow 4: Repository Management
1. **Gitea server**: List all repositories
2. **Gitea server**: Check for outdated dependencies
3. **Gitea server**: Create issues for updates needed
4. **Gitea server**: Generate summary report
---
## Next Steps
### Phase 1: Verification (Immediate)
1. Run the test script: [`test-mcp-servers.ps1`](plans/test-mcp-servers.ps1:1)
2. Review results and identify issues
3. Fix Gitea LAN token configuration
4. Re-test all servers
### Phase 2: Documentation (Short-term)
1. Document successful test results
2. Create usage examples for each server
3. Set up troubleshooting guides
4. Document common error scenarios
### Phase 3: Integration (Medium-term)
1. Verify MCP server connectivity in Claude Code sessions
2. Test tool availability and functionality
3. Create workflow templates
4. Integrate into development processes
### Phase 4: Optimization (Long-term)
1. Monitor MCP server performance
2. Optimize configurations
3. Add additional MCP servers as needed
4. Implement automated health checks
---
## Additional Resources
- **MCP Protocol Specification**: https://modelcontextprotocol.io
- **Testing Guide**: [`mcp-server-testing-guide.md`](plans/mcp-server-testing-guide.md:1)
- **Test Script**: [`test-mcp-servers.ps1`](plans/test-mcp-servers.ps1:1)
- **Configuration Files**:
- [`mcp.json`](c:/Users/games3/AppData/Roaming/Code/User/mcp.json:1)
- [`mcp-servers.json`](c:/Users/games3/AppData/Roaming/Code/User/globalStorage/mcp-servers.json:1)
---
## Questions to Consider
1. **Are MCP servers currently connected in active Claude Code sessions?**
- If not, what's required to enable the connection?
2. **Which MCP servers are most critical for your workflow?**
- Prioritize testing and configuration of high-value servers
3. **Are there additional MCP servers you need?**
- Consider: Database MCP, Slack MCP, Jira MCP, etc.
4. **How should MCP server logs be managed?**
- Consider centralized logging and monitoring
5. **What are the backup plans if an MCP server fails?**
- Document fallback procedures
---
## Conclusion
You have a comprehensive MCP server setup that provides powerful capabilities for:
- **Browser automation** (Chrome DevTools)
- **Document conversion** (Markitdown)
- **Git hosting integration** (3 Gitea instances)
- **Container management** (Docker)
- **File system operations** (Filesystem)
- **HTTP requests** (Fetch)
**Immediate Action Required**:
- Fix the Gitea LAN token configuration
- Run the test script to verify all servers are operational
- Review test results and address any failures
**Current Limitation**:
- MCP server tools are not exposed in the current Claude Code session
- May require VSCode or client-side configuration to enable
The provided testing guide and automation script will help you verify that all servers are properly configured and functional.

View File

@@ -0,0 +1,489 @@
# MCP Server Testing Guide
This guide provides step-by-step instructions for manually testing each of the configured MCP servers.
## Overview
MCP (Model Context Protocol) servers are standalone processes that expose tools and resources to AI assistants. Each server runs independently and communicates via stdio.
## Testing Prerequisites
1. **MCP Inspector Tool** - Install the official MCP testing tool:
```bash
npm install -g @modelcontextprotocol/inspector
```
```powershell
npm install -g @modelcontextprotocol/inspector
```
2. **Alternative: Manual stdio testing** - Use the MCP CLI for direct interaction
---
## 1. Chrome DevTools MCP Server
**Purpose**: Browser automation and Chrome DevTools integration
### Test Command:
```bash
npx -y chrome-devtools-mcp@latest --headless true --isolated false --channel stable
```
```powershell
npx -y chrome-devtools-mcp@latest --headless true --isolated false --channel stable
```
### Expected Capabilities:
- Browser launch and control
- DOM inspection
- Network monitoring
- JavaScript execution in browser context
### Manual Test Steps:
1. Run the command above
2. The server should start and output MCP protocol messages
3. Use MCP Inspector to connect:
```bash
mcp-inspector npx -y chrome-devtools-mcp@latest --headless true --isolated false --channel stable
```
```powershell
mcp-inspector npx -y chrome-devtools-mcp@latest --headless true --isolated false --channel stable
```
### Success Indicators:
- Server starts without errors
- Lists available tools (e.g., `navigate`, `click`, `screenshot`)
- Can execute browser actions
---
## 2. Markitdown MCP Server
**Purpose**: Convert various file formats to markdown
### Test Command:
```bash
C:\Users\games3\.local\bin\uvx.exe markitdown-mcp
```
```powershell
C:\Users\games3\.local\bin\uvx.exe markitdown-mcp
```
### Expected Capabilities:
- Convert PDF to markdown
- Convert DOCX to markdown
- Convert HTML to markdown
- Convert images (OCR) to markdown
### Manual Test Steps:
1. Ensure `uvx` is installed (Python tool)
2. Run the command above
3. Test with MCP Inspector:
```bash
mcp-inspector C:\Users\games3\.local\bin\uvx.exe markitdown-mcp
```
```powershell
mcp-inspector C:\Users\games3\.local\bin\uvx.exe markitdown-mcp
```
### Success Indicators:
- Server initializes successfully
- Lists conversion tools
- Can convert a test file
### Troubleshooting:
- If `uvx` is not found, install it:
```bash
pip install uvx
```
```powershell
pip install uvx
```
- Verify Python is in PATH
---
## 3. Gitea MCP Servers
You have three Gitea server configurations. All use the same executable but connect to different instances.
### A. Gitea Torbonium
**Host**: https://gitea.torbonium.com
#### Test Command:
```powershell
$env:GITEA_HOST="https://gitea.torbonium.com"
$env:GITEA_ACCESS_TOKEN="391c9ddbe113378bc87bb8184800ba954648fcf8"
d:\gitea-mcp\gitea-mcp.exe run -t stdio
```
#### Expected Capabilities:
- List repositories
- Create/update issues
- Manage pull requests
- Read/write repository files
- Manage branches
#### Manual Test Steps:
1. Set environment variables
2. Run gitea-mcp.exe
3. Use MCP Inspector or test direct API access:
```bash
curl -H "Authorization: token 391c9ddbe113378bc87bb8184800ba954648fcf8" https://gitea.torbonium.com/api/v1/user/repos
```
```powershell
Invoke-RestMethod -Uri "https://gitea.torbonium.com/api/v1/user/repos" -Headers @{Authorization="token 391c9ddbe113378bc87bb8184800ba954648fcf8"}
```
### B. Gitea LAN (Torbolan)
**Host**: https://gitea.torbolan.com
**Status**: ⚠️ Token needs replacement
#### Test Command:
```powershell
$env:GITEA_HOST="https://gitea.torbolan.com"
$env:GITEA_ACCESS_TOKEN="REPLACE_WITH_NEW_TOKEN" # ⚠️ UPDATE THIS
d:\gitea-mcp\gitea-mcp.exe run -t stdio
```
#### Before Testing:
1. Generate a new access token:
- Log into https://gitea.torbolan.com
- Go to Settings → Applications → Generate New Token
- Copy the token and update the configuration
### C. Gitea Projectium
**Host**: https://gitea.projectium.com
#### Test Command:
```powershell
$env:GITEA_HOST="https://gitea.projectium.com"
$env:GITEA_ACCESS_TOKEN="c72bc0f14f623fec233d3c94b3a16397fe3649ef"
d:\gitea-mcp\gitea-mcp.exe run -t stdio
```
### Success Indicators for All Gitea Servers:
- Server connects to Gitea instance
- Lists available repositories
- Can read repository metadata
- Authentication succeeds
### Troubleshooting:
- **401 Unauthorized**: Token is invalid or expired
- **Connection refused**: Check if Gitea instance is accessible
- **SSL errors**: Verify HTTPS certificate validity
---
## 4. Podman/Docker MCP Server
**Purpose**: Container management and Docker operations
### Test Command:
```powershell
$env:DOCKER_HOST="npipe:////./pipe/docker_engine"
npx -y @modelcontextprotocol/server-docker
```
### Expected Capabilities:
- List containers
- Start/stop containers
- View container logs
- Execute commands in containers
- Manage images
### Manual Test Steps:
1. Ensure Docker Desktop or Podman is running
2. Verify named pipe exists: `npipe:////./pipe/docker_engine`
3. Run the server command
4. Test with MCP Inspector:
```bash
mcp-inspector npx -y @modelcontextprotocol/server-docker
```
```powershell
mcp-inspector npx -y @modelcontextprotocol/server-docker
```
### Verify Docker Access Directly:
```powershell
docker ps
docker images
```
### Success Indicators:
- Server connects to Docker daemon
- Can list containers and images
- Can execute container operations
### Troubleshooting:
- **Cannot connect to Docker daemon**: Ensure Docker Desktop is running
- **Named pipe error**: Check DOCKER_HOST configuration
- **Permission denied**: Run as administrator
---
## 5. Filesystem MCP Server
**Purpose**: Access and manipulate files in specified directory
### Test Command:
```bash
npx -y @modelcontextprotocol/server-filesystem "D:\gitea\flyer-crawler.projectium.com\flyer-crawler.projectium.com"
```
```powershell
npx -y @modelcontextprotocol/server-filesystem "D:\gitea\flyer-crawler.projectium.com\flyer-crawler.projectium.com"
```
### Expected Capabilities:
- List directory contents
- Read files
- Write files
- Search files
- Get file metadata
### Manual Test Steps:
1. Run the command above
2. Use MCP Inspector:
```bash
mcp-inspector npx -y @modelcontextprotocol/server-filesystem "D:\gitea\flyer-crawler.projectium.com\flyer-crawler.projectium.com"
```
```powershell
mcp-inspector npx -y @modelcontextprotocol/server-filesystem "D:\gitea\flyer-crawler.projectium.com\flyer-crawler.projectium.com"
```
3. Test listing directory contents
### Verify Directory Access:
```powershell
Test-Path "D:\gitea\flyer-crawler.projectium.com\flyer-crawler.projectium.com"
Get-ChildItem "D:\gitea\flyer-crawler.projectium.com\flyer-crawler.projectium.com" | Select-Object -First 5
```
### Success Indicators:
- Server starts successfully
- Can list directory contents
- Can read file contents
- Write operations work (if permissions allow)
### Security Note:
This server has access to your entire project directory. Ensure it's only used in trusted contexts.
---
## 6. Fetch MCP Server
**Purpose**: Make HTTP requests to external APIs and websites
### Test Command:
```bash
npx -y @modelcontextprotocol/server-fetch
```
```powershell
npx -y @modelcontextprotocol/server-fetch
```
### Expected Capabilities:
- HTTP GET requests
- HTTP POST requests
- Handle JSON/text responses
- Custom headers
- Follow redirects
### Manual Test Steps:
1. Run the server command
2. Use MCP Inspector:
```bash
mcp-inspector npx -y @modelcontextprotocol/server-fetch
```
```powershell
mcp-inspector npx -y @modelcontextprotocol/server-fetch
```
3. Test fetching a URL through the inspector
### Test Fetch Capability Directly:
```bash
curl https://api.github.com/users/github
```
```powershell
# Test if curl/web requests work
curl https://api.github.com/users/github
# Or use Invoke-RestMethod
Invoke-RestMethod -Uri "https://api.github.com/users/github"
```
### Success Indicators:
- Server initializes
- Can fetch URLs
- Returns proper HTTP responses
- Handles errors gracefully
---
## Comprehensive Testing Script
Here's a PowerShell script to test all servers:
```powershell
# test-mcp-servers.ps1
Write-Host "=== MCP Server Testing Suite ===" -ForegroundColor Cyan
# Test 1: Chrome DevTools
Write-Host "`n[1/8] Testing Chrome DevTools..." -ForegroundColor Yellow
$chromeProc = Start-Process -FilePath "npx" -ArgumentList "-y","chrome-devtools-mcp@latest","--headless","true" -PassThru -NoNewWindow
Start-Sleep -Seconds 3
if (!$chromeProc.HasExited) {
Write-Host "✓ Chrome DevTools server started" -ForegroundColor Green
$chromeProc.Kill()
} else {
Write-Host "✗ Chrome DevTools failed" -ForegroundColor Red
}
# Test 2: Markitdown
Write-Host "`n[2/8] Testing Markitdown..." -ForegroundColor Yellow
if (Test-Path "C:\Users\games3\.local\bin\uvx.exe") {
Write-Host "✓ Markitdown executable found" -ForegroundColor Green
} else {
Write-Host "✗ Markitdown executable not found" -ForegroundColor Red
}
# Test 3-5: Gitea Servers
Write-Host "`n[3/8] Testing Gitea Torbonium..." -ForegroundColor Yellow
try {
$response = Invoke-RestMethod -Uri "https://gitea.torbonium.com/api/v1/user" -Headers @{Authorization="token 391c9ddbe113378bc87bb8184800ba954648fcf8"}
Write-Host "✓ Gitea Torbonium authenticated as: $($response.login)" -ForegroundColor Green
} catch {
Write-Host "✗ Gitea Torbonium failed: $($_.Exception.Message)" -ForegroundColor Red
}
Write-Host "`n[4/8] Testing Gitea LAN..." -ForegroundColor Yellow
Write-Host "⚠ Token needs replacement" -ForegroundColor Yellow
Write-Host "`n[5/8] Testing Gitea Projectium..." -ForegroundColor Yellow
try {
$response = Invoke-RestMethod -Uri "https://gitea.projectium.com/api/v1/user" -Headers @{Authorization="token c72bc0f14f623fec233d3c94b3a16397fe3649ef"}
Write-Host "✓ Gitea Projectium authenticated as: $($response.login)" -ForegroundColor Green
} catch {
Write-Host "✗ Gitea Projectium failed: $($_.Exception.Message)" -ForegroundColor Red
}
# Test 6: Podman/Docker
Write-Host "`n[6/8] Testing Docker..." -ForegroundColor Yellow
try {
docker ps > $null 2>&1
if ($LASTEXITCODE -eq 0) {
Write-Host "✓ Docker daemon accessible" -ForegroundColor Green
} else {
Write-Host "✗ Docker daemon not accessible" -ForegroundColor Red
}
} catch {
Write-Host "✗ Docker not available" -ForegroundColor Red
}
# Test 7: Filesystem
Write-Host "`n[7/8] Testing Filesystem..." -ForegroundColor Yellow
if (Test-Path "D:\gitea\flyer-crawler.projectium.com\flyer-crawler.projectium.com") {
Write-Host "✓ Project directory accessible" -ForegroundColor Green
} else {
Write-Host "✗ Project directory not found" -ForegroundColor Red
}
# Test 8: Fetch
Write-Host "`n[8/8] Testing Fetch..." -ForegroundColor Yellow
try {
$response = Invoke-RestMethod -Uri "https://api.github.com/zen"
Write-Host "✓ Fetch capability working" -ForegroundColor Green
} catch {
Write-Host "✗ Fetch failed" -ForegroundColor Red
}
Write-Host "`n=== Testing Complete ===" -ForegroundColor Cyan
```
---
## Using MCP Inspector for Interactive Testing
The MCP Inspector provides a visual interface for testing servers:
```bash
# Install globally
npm install -g @modelcontextprotocol/inspector
# Test any server
mcp-inspector <command> <args>
```
```powershell
# Install globally
npm install -g @modelcontextprotocol/inspector
# Test any server
mcp-inspector <command> <args>
```
### Example Sessions:
```bash
# Test fetch server
mcp-inspector npx -y @modelcontextprotocol/server-fetch
# Test filesystem server
mcp-inspector npx -y @modelcontextprotocol/server-filesystem "D:\gitea\flyer-crawler.projectium.com\flyer-crawler.projectium.com"
# Test Docker server
mcp-inspector npx -y @modelcontextprotocol/server-docker
```
```powershell
# Test fetch server
mcp-inspector npx -y @modelcontextprotocol/server-fetch
# Test filesystem server
mcp-inspector npx -y @modelcontextprotocol/server-filesystem "D:\gitea\flyer-crawler.projectium.com\flyer-crawler.projectium.com"
# Test Docker server
mcp-inspector npx -y @modelcontextprotocol/server-docker
```
---
## Common Issues and Solutions
### Issue: "Cannot find module" or "Command not found"
**Solution**: Ensure Node.js and npm are installed and in PATH
### Issue: MCP server starts but doesn't respond
**Solution**: Check server logs, verify stdio communication, ensure no JSON parsing errors
### Issue: Authentication failures with Gitea
**Solution**:
1. Verify tokens haven't expired
2. Check token permissions in Gitea settings
3. Ensure network access to Gitea instances
### Issue: Docker server cannot connect
**Solution**:
1. Start Docker Desktop
2. Verify DOCKER_HOST environment variable
3. Check Windows named pipe permissions
---
## Next Steps
After testing:
1. Document which servers are working
2. Fix any configuration issues
3. Update tokens as needed
4. Consider security implications of exposed servers
5. Set up monitoring for server health
---
## Security Recommendations
1. **Token Security**: Keep Gitea tokens secure, rotate regularly
2. **Filesystem Access**: Limit filesystem server scope to necessary directories
3. **Network Access**: Consider firewall rules for external MCP servers
4. **Audit Logging**: Enable logging for all MCP server operations
5. **Token Permissions**: Use minimal required permissions for Gitea tokens

View File

@@ -0,0 +1,133 @@
# Podman MCP Server Test Results
**Date**: 2026-01-08
**Status**: Configuration Complete ✅
## Configuration Summary
### MCP Configuration File
**Location**: `c:/Users/games3/AppData/Roaming/Code/User/mcp.json`
```json
"podman": {
"command": "npx",
"args": ["-y", "docker-mcp"],
"env": {
"DOCKER_HOST": "ssh://root@127.0.0.1:2972/run/podman/podman.sock"
}
}
```
### Key Configuration Details
- **Package**: `docker-mcp` (community MCP server with SSH support)
- **Connection Method**: SSH to Podman machine
- **SSH Endpoint**: `root@127.0.0.1:2972`
- **Socket Path**: `/run/podman/podman.sock` (inside WSL)
## Podman System Status
### Podman Machine
```
NAME VM TYPE CREATED CPUS MEMORY DISK SIZE
podman-machine-default wsl 4 weeks ago 4 2GiB 100GiB
```
### Connection Information
```
Name: podman-machine-default-root
URI: ssh://root@127.0.0.1:2972/run/podman/podman.sock
Default: true
```
### Container Status
Podman is operational with 3 containers:
- `flyer-dev` (Ubuntu) - Exited
- `flyer-crawler-redis` (Redis) - Exited
- `flyer-crawler-postgres` (PostGIS) - Exited
## Test Results
### Command Line Tests
**Podman CLI**: Working - `podman ps` returns successfully
**Container Management**: Working - Can list and manage containers
**Socket Connection**: Working - SSH connection to Podman machine functional
### MCP Server Integration Tests
**Configuration File**: Updated and valid JSON
**VSCode Restart**: Completed to load new MCP configuration
**Package Selection**: Using `docker-mcp` (supports SSH connections)
**Environment Variables**: DOCKER_HOST set correctly for Podman
## How to Verify MCP Server is Working
The Podman MCP server should now be available through Claude Code. To verify:
1. **In Claude Code conversation**: Ask Claude to list containers or perform container operations
2. **Check VSCode logs**: Look for MCP server connection logs
3. **Test with MCP Inspector** (optional):
```powershell
$env:DOCKER_HOST="ssh://root@127.0.0.1:2972/run/podman/podman.sock"
npx -y @modelcontextprotocol/inspector docker-mcp
```
## Expected MCP Tools Available
Once the MCP server is fully loaded, the following tools should be available:
- **Container Operations**: list, start, stop, restart, remove containers
- **Container Logs**: view container logs
- **Container Stats**: monitor container resource usage
- **Image Management**: list, pull, remove images
- **Container Execution**: execute commands inside containers
## Troubleshooting
### If MCP Server Doesn't Connect
1. **Verify Podman is running**:
```bash
podman ps
```
2. **Check SSH connection**:
```bash
podman system connection list
```
3. **Test docker-mcp package manually**:
```powershell
$env:DOCKER_HOST="ssh://root@127.0.0.1:2972/run/podman/podman.sock"
npx -y docker-mcp
```
4. **Check VSCode Extension Host logs**:
- Open Command Palette (Ctrl+Shift+P)
- Search for "Developer: Show Logs"
- Select "Extension Host"
### Common Issues
- **Port 2972 not accessible**: Restart Podman machine with `podman machine restart`
- **SSH key issues**: Verify SSH keys are set up correctly for Podman machine
- **Package not found**: Ensure npm can access registry (check internet connection)
## Next Steps
1. Test the Podman MCP server by requesting container operations through Claude Code
2. If the MCP server isn't responding, check the Extension Host logs in VSCode
3. Consider testing with alternative packages if `docker-mcp` has issues:
- `docker-mcp-server` (alternative community package)
- `docker-mcp-secure` (security-focused alternative)
## Additional Notes
- The `docker-mcp` package is a community-maintained MCP server
- It supports both local Docker sockets and remote SSH connections
- The package uses the `dockerode` library under the hood, which works with both Docker and Podman
- Podman's API is Docker-compatible, so Docker MCP servers work with Podman
## References
- **docker-mcp package**: https://www.npmjs.com/package/docker-mcp
- **Podman Machine Documentation**: https://docs.podman.io/en/latest/markdown/podman-machine.1.html
- **Model Context Protocol**: https://modelcontextprotocol.io

View File

@@ -0,0 +1,143 @@
# test-mcp-servers.ps1
# Automated testing script for all configured MCP servers
Write-Host "=== MCP Server Testing Suite ===" -ForegroundColor Cyan
Write-Host "Testing all configured MCP servers..." -ForegroundColor White
Write-Host ""
$results = @()
# Test 1: Chrome DevTools
Write-Host "[1/8] Testing Chrome DevTools..." -ForegroundColor Yellow
try {
$chromeProc = Start-Process -FilePath "npx" -ArgumentList "-y","chrome-devtools-mcp@latest","--headless","true" -PassThru -NoNewWindow -RedirectStandardOutput "$env:TEMP\chrome-test.log" -ErrorAction Stop
Start-Sleep -Seconds 5
if (!$chromeProc.HasExited) {
Write-Host " ✓ Chrome DevTools server started successfully" -ForegroundColor Green
$results += [PSCustomObject]@{Server="Chrome DevTools"; Status="PASS"; Details="Server started"}
Stop-Process -Id $chromeProc.Id -Force -ErrorAction SilentlyContinue
} else {
Write-Host " ✗ Chrome DevTools server exited immediately" -ForegroundColor Red
$results += [PSCustomObject]@{Server="Chrome DevTools"; Status="FAIL"; Details="Server exited"}
}
} catch {
Write-Host " ✗ Chrome DevTools failed: $($_.Exception.Message)" -ForegroundColor Red
$results += [PSCustomObject]@{Server="Chrome DevTools"; Status="FAIL"; Details=$_.Exception.Message}
}
# Test 2: Markitdown
Write-Host "`n[2/8] Testing Markitdown..." -ForegroundColor Yellow
$markitdownPath = "C:\Users\games3\.local\bin\uvx.exe"
if (Test-Path $markitdownPath) {
Write-Host " ✓ Markitdown executable found at: $markitdownPath" -ForegroundColor Green
$results += [PSCustomObject]@{Server="Markitdown"; Status="PASS"; Details="Executable exists"}
} else {
Write-Host " ✗ Markitdown executable not found at: $markitdownPath" -ForegroundColor Red
$results += [PSCustomObject]@{Server="Markitdown"; Status="FAIL"; Details="Executable not found"}
}
# Test 3: Gitea Torbonium
Write-Host "`n[3/8] Testing Gitea Torbonium (gitea.torbonium.com)..." -ForegroundColor Yellow
try {
$headers = @{Authorization="token 391c9ddbe113378bc87bb8184800ba954648fcf8"}
$response = Invoke-RestMethod -Uri "https://gitea.torbonium.com/api/v1/user" -Headers $headers -TimeoutSec 10
Write-Host " ✓ Gitea Torbonium authenticated as: $($response.login)" -ForegroundColor Green
$results += [PSCustomObject]@{Server="Gitea Torbonium"; Status="PASS"; Details="Authenticated as $($response.login)"}
} catch {
Write-Host " ✗ Gitea Torbonium failed: $($_.Exception.Message)" -ForegroundColor Red
$results += [PSCustomObject]@{Server="Gitea Torbonium"; Status="FAIL"; Details=$_.Exception.Message}
}
# Test 4: Gitea LAN
Write-Host "`n[4/8] Testing Gitea LAN (gitea.torbolan.com)..." -ForegroundColor Yellow
Write-Host " âš  Token needs replacement - SKIPPING" -ForegroundColor Yellow
$results += [PSCustomObject]@{Server="Gitea LAN"; Status="SKIP"; Details="Token placeholder needs update"}
# Test 5: Gitea Projectium
Write-Host "`n[5/8] Testing Gitea Projectium (gitea.projectium.com)..." -ForegroundColor Yellow
try {
$headers = @{Authorization="token c72bc0f14f623fec233d3c94b3a16397fe3649ef"}
$response = Invoke-RestMethod -Uri "https://gitea.projectium.com/api/v1/user" -Headers $headers -TimeoutSec 10
Write-Host " ✓ Gitea Projectium authenticated as: $($response.login)" -ForegroundColor Green
$results += [PSCustomObject]@{Server="Gitea Projectium"; Status="PASS"; Details="Authenticated as $($response.login)"}
} catch {
Write-Host " ✗ Gitea Projectium failed: $($_.Exception.Message)" -ForegroundColor Red
$results += [PSCustomObject]@{Server="Gitea Projectium"; Status="FAIL"; Details=$_.Exception.Message}
}
# Test 6: Podman/Docker
Write-Host "`n[6/8] Testing Docker/Podman..." -ForegroundColor Yellow
try {
$dockerOutput = & docker version 2>$null
if ($LASTEXITCODE -eq 0 -and $dockerOutput) {
Write-Host " ✓ Docker daemon accessible" -ForegroundColor Green
$results += [PSCustomObject]@{Server="Docker/Podman"; Status="PASS"; Details="Docker daemon running"}
} else {
Write-Host " ✗ Docker daemon not accessible" -ForegroundColor Red
$results += [PSCustomObject]@{Server="Docker/Podman"; Status="FAIL"; Details="Cannot connect to daemon"}
}
} catch {
Write-Host " ✗ Docker not available: $($_.Exception.Message)" -ForegroundColor Red
$results += [PSCustomObject]@{Server="Docker/Podman"; Status="FAIL"; Details="Docker not installed"}
}
# Test 7: Filesystem
Write-Host "`n[7/8] Testing Filesystem..." -ForegroundColor Yellow
$projectPath = "D:\gitea\flyer-crawler.projectium.com\flyer-crawler.projectium.com"
if (Test-Path $projectPath) {
$fileCount = (Get-ChildItem $projectPath -File -Recurse -ErrorAction SilentlyContinue | Measure-Object).Count
Write-Host " ✓ Project directory accessible ($fileCount files)" -ForegroundColor Green
$results += [PSCustomObject]@{Server="Filesystem"; Status="PASS"; Details="Path accessible, $fileCount files"}
} else {
Write-Host " ✗ Project directory not accessible" -ForegroundColor Red
$results += [PSCustomObject]@{Server="Filesystem"; Status="FAIL"; Details="Path not accessible"}
}
# Test 8: Fetch MCP Server
Write-Host "`n[8/8] Testing Fetch MCP Server..." -ForegroundColor Yellow
try {
# Test by attempting to fetch a simple public API
$testUrl = "https://api.github.com/zen"
$response = Invoke-RestMethod -Uri $testUrl -TimeoutSec 10 -ErrorAction Stop
if ($response) {
Write-Host " ✓ Fetch server prerequisites met (network accessible)" -ForegroundColor Green
$results += [PSCustomObject]@{Server="Fetch"; Status="PASS"; Details="Network accessible, can fetch data"}
} else {
Write-Host " ✗ Fetch server test failed" -ForegroundColor Red
$results += [PSCustomObject]@{Server="Fetch"; Status="FAIL"; Details="Could not fetch test data"}
}
} catch {
Write-Host " ✗ Fetch server test failed: $($_.Exception.Message)" -ForegroundColor Red
$results += [PSCustomObject]@{Server="Fetch"; Status="FAIL"; Details=$_.Exception.Message}
}
# Display Results Summary
Write-Host "`n`n=== Test Results Summary ===" -ForegroundColor Cyan
Write-Host ""
$results | Format-Table -AutoSize
# Count results
$passed = ($results | Where-Object Status -eq "PASS").Count
$failed = ($results | Where-Object Status -eq "FAIL").Count
$skipped = ($results | Where-Object Status -eq "SKIP").Count
$total = $results.Count
Write-Host "`nOverall Results:" -ForegroundColor White
Write-Host " Total Tests: $total" -ForegroundColor White
Write-Host " Passed: $passed" -ForegroundColor Green
Write-Host " Failed: $failed" -ForegroundColor Red
Write-Host " Skipped: $skipped" -ForegroundColor Yellow
# Exit code based on results
if ($failed -gt 0) {
Write-Host "`n⚠️ Some tests failed. Review the results above." -ForegroundColor Yellow
exit 1
} elseif ($passed -eq ($total - $skipped)) {
Write-Host "`n✓ All tests passed!" -ForegroundColor Green
exit 0
} else {
Write-Host "`n⚠️ Tests completed with warnings." -ForegroundColor Yellow
exit 0
}

157
plans/test-mcp-servers.ps1 Normal file
View File

@@ -0,0 +1,157 @@
# test-mcp-servers.ps1
# Automated testing script for all configured MCP servers
Write-Host "=== MCP Server Testing Suite ===" -ForegroundColor Cyan
Write-Host "Testing all configured MCP servers..." -ForegroundColor White
Write-Host ""
$results = @()
# Test 1: Chrome DevTools
Write-Host "[1/8] Testing Chrome DevTools..." -ForegroundColor Yellow
try {
# Use Start-Job to run npx in background since npx is a PowerShell script on Windows
$chromeJob = Start-Job -ScriptBlock {
& npx -y chrome-devtools-mcp@latest --headless true 2>&1
}
Start-Sleep -Seconds 5
$jobState = Get-Job -Id $chromeJob.Id | Select-Object -ExpandProperty State
if ($jobState -eq "Running") {
Write-Host " [PASS] Chrome DevTools server started successfully" -ForegroundColor Green
$results += [PSCustomObject]@{Server="Chrome DevTools"; Status="PASS"; Details="Server started"}
Stop-Job -Id $chromeJob.Id -ErrorAction SilentlyContinue
Remove-Job -Id $chromeJob.Id -Force -ErrorAction SilentlyContinue
} else {
Receive-Job -Id $chromeJob.Id -ErrorAction SilentlyContinue | Out-Null
Write-Host " [FAIL] Chrome DevTools server failed to start" -ForegroundColor Red
$results += [PSCustomObject]@{Server="Chrome DevTools"; Status="FAIL"; Details="Server failed to start"}
Remove-Job -Id $chromeJob.Id -Force -ErrorAction SilentlyContinue
}
} catch {
Write-Host " [FAIL] Chrome DevTools failed: $($_.Exception.Message)" -ForegroundColor Red
$results += [PSCustomObject]@{Server="Chrome DevTools"; Status="FAIL"; Details=$_.Exception.Message}
}
# Test 2: Markitdown
Write-Host "`n[2/8] Testing Markitdown..." -ForegroundColor Yellow
$markitdownPath = "C:\Users\games3\.local\bin\uvx.exe"
if (Test-Path $markitdownPath) {
Write-Host " [PASS] Markitdown executable found at: $markitdownPath" -ForegroundColor Green
$results += [PSCustomObject]@{Server="Markitdown"; Status="PASS"; Details="Executable exists"}
} else {
Write-Host " [FAIL] Markitdown executable not found at: $markitdownPath" -ForegroundColor Red
$results += [PSCustomObject]@{Server="Markitdown"; Status="FAIL"; Details="Executable not found"}
}
# Test 3: Gitea Torbonium
Write-Host "`n[3/8] Testing Gitea Torbonium (gitea.torbonium.com)..." -ForegroundColor Yellow
try {
$headers = @{Authorization="token 391c9ddbe113378bc87bb8184800ba954648fcf8"}
$response = Invoke-RestMethod -Uri "https://gitea.torbonium.com/api/v1/user" -Headers $headers -TimeoutSec 10
Write-Host " [PASS] Gitea Torbonium authenticated as: $($response.login)" -ForegroundColor Green
$results += [PSCustomObject]@{Server="Gitea Torbonium"; Status="PASS"; Details="Authenticated as $($response.login)"}
} catch {
Write-Host " [FAIL] Gitea Torbonium failed: $($_.Exception.Message)" -ForegroundColor Red
$results += [PSCustomObject]@{Server="Gitea Torbonium"; Status="FAIL"; Details=$_.Exception.Message}
}
# Test 4: Gitea LAN
Write-Host "`n[4/8] Testing Gitea LAN (gitea.torbolan.com)..." -ForegroundColor Yellow
Write-Host " [SKIP] Token needs replacement - SKIPPING" -ForegroundColor Yellow
$results += [PSCustomObject]@{Server="Gitea LAN"; Status="SKIP"; Details="Token placeholder needs update"}
# Test 5: Gitea Projectium
Write-Host "`n[5/8] Testing Gitea Projectium (gitea.projectium.com)..." -ForegroundColor Yellow
try {
$headers = @{Authorization="token c72bc0f14f623fec233d3c94b3a16397fe3649ef"}
$response = Invoke-RestMethod -Uri "https://gitea.projectium.com/api/v1/user" -Headers $headers -TimeoutSec 10
Write-Host " [PASS] Gitea Projectium authenticated as: $($response.login)" -ForegroundColor Green
$results += [PSCustomObject]@{Server="Gitea Projectium"; Status="PASS"; Details="Authenticated as $($response.login)"}
} catch {
Write-Host " [FAIL] Gitea Projectium failed: $($_.Exception.Message)" -ForegroundColor Red
$results += [PSCustomObject]@{Server="Gitea Projectium"; Status="FAIL"; Details=$_.Exception.Message}
}
# Test 6: Podman/Docker
Write-Host "`n[6/8] Testing Docker/Podman..." -ForegroundColor Yellow
try {
# Try podman first, then docker
& podman ps 2>$null | Out-Null
if ($LASTEXITCODE -eq 0) {
Write-Host " [PASS] Podman daemon accessible and responding" -ForegroundColor Green
$results += [PSCustomObject]@{Server="Docker/Podman"; Status="PASS"; Details="Podman running"}
} else {
& docker ps 2>$null | Out-Null
if ($LASTEXITCODE -eq 0) {
Write-Host " [PASS] Docker daemon accessible" -ForegroundColor Green
$results += [PSCustomObject]@{Server="Docker/Podman"; Status="PASS"; Details="Docker running"}
} else {
Write-Host " [FAIL] Neither Podman nor Docker available" -ForegroundColor Red
$results += [PSCustomObject]@{Server="Docker/Podman"; Status="FAIL"; Details="No container runtime found"}
}
}
} catch {
Write-Host " [FAIL] Container runtime test failed: $($_.Exception.Message)" -ForegroundColor Red
$results += [PSCustomObject]@{Server="Docker/Podman"; Status="FAIL"; Details=$_.Exception.Message}
}
# Test 7: Filesystem
Write-Host "`n[7/8] Testing Filesystem..." -ForegroundColor Yellow
$projectPath = "D:\gitea\flyer-crawler.projectium.com\flyer-crawler.projectium.com"
if (Test-Path $projectPath) {
$fileCount = (Get-ChildItem $projectPath -File -Recurse -ErrorAction SilentlyContinue | Measure-Object).Count
Write-Host " [PASS] Project directory accessible ($fileCount files)" -ForegroundColor Green
$results += [PSCustomObject]@{Server="Filesystem"; Status="PASS"; Details="Path accessible, $fileCount files"}
} else {
Write-Host " [FAIL] Project directory not accessible" -ForegroundColor Red
$results += [PSCustomObject]@{Server="Filesystem"; Status="FAIL"; Details="Path not accessible"}
}
# Test 8: Fetch MCP Server
Write-Host "`n[8/8] Testing Fetch MCP Server..." -ForegroundColor Yellow
try {
# Test by attempting to fetch a simple public API
$testUrl = "https://api.github.com/zen"
$response = Invoke-RestMethod -Uri $testUrl -TimeoutSec 10 -ErrorAction Stop
if ($response) {
Write-Host " [PASS] Fetch server prerequisites met (network accessible)" -ForegroundColor Green
$results += [PSCustomObject]@{Server="Fetch"; Status="PASS"; Details="Network accessible, can fetch data"}
} else {
Write-Host " [FAIL] Fetch server test failed" -ForegroundColor Red
$results += [PSCustomObject]@{Server="Fetch"; Status="FAIL"; Details="Could not fetch test data"}
}
} catch {
Write-Host " [FAIL] Fetch server test failed: $($_.Exception.Message)" -ForegroundColor Red
$results += [PSCustomObject]@{Server="Fetch"; Status="FAIL"; Details=$_.Exception.Message}
}
# Display Results Summary
Write-Host "`n`n=== Test Results Summary ===" -ForegroundColor Cyan
Write-Host ""
$results | Format-Table -AutoSize
# Count results
$passed = ($results | Where-Object Status -eq "PASS").Count
$failed = ($results | Where-Object Status -eq "FAIL").Count
$skipped = ($results | Where-Object Status -eq "SKIP").Count
$total = $results.Count
Write-Host "`nOverall Results:" -ForegroundColor White
Write-Host " Total Tests: $total" -ForegroundColor White
Write-Host " Passed: $passed" -ForegroundColor Green
Write-Host " Failed: $failed" -ForegroundColor Red
Write-Host " Skipped: $skipped" -ForegroundColor Yellow
# Exit code based on results
if ($failed -gt 0) {
Write-Host "`n[WARNING] Some tests failed. Review the results above." -ForegroundColor Yellow
exit 1
} elseif ($passed -eq ($total - $skipped)) {
Write-Host "`n[SUCCESS] All tests passed!" -ForegroundColor Green
exit 0
} else {
Write-Host "`n[WARNING] Tests completed with warnings." -ForegroundColor Yellow
exit 0
}

View File

@@ -0,0 +1,13 @@
# Update MCP configuration for Podman
$mcpConfigPath = "c:/Users/games3/AppData/Roaming/Code/User/mcp.json"
$content = Get-Content $mcpConfigPath -Raw
# Replace Docker named pipe with Podman SSH connection
$content = $content -replace 'npipe:////./pipe/docker_engine', 'ssh://root@127.0.0.1:2972/run/podman/podman.sock'
# Write back
Set-Content $mcpConfigPath -Value $content -NoNewline
Write-Host "Updated MCP configuration for Podman" -ForegroundColor Green
Write-Host "New DOCKER_HOST: ssh://root@127.0.0.1:2972/run/podman/podman.sock" -ForegroundColor Cyan

88
run-integration-tests.ps1 Normal file
View File

@@ -0,0 +1,88 @@
# PowerShell script to run integration tests with containerized infrastructure
# Sets up environment variables and runs the integration test suite
Write-Host "=== Flyer Crawler Integration Test Runner ===" -ForegroundColor Cyan
Write-Host ""
# Check if containers are running
Write-Host "Checking container status..." -ForegroundColor Yellow
$postgresRunning = podman ps --filter "name=flyer-crawler-postgres" --format "{{.Names}}" 2>$null
$redisRunning = podman ps --filter "name=flyer-crawler-redis" --format "{{.Names}}" 2>$null
if (-not $postgresRunning) {
Write-Host "ERROR: PostgreSQL container is not running!" -ForegroundColor Red
Write-Host "Start it with: podman start flyer-crawler-postgres" -ForegroundColor Yellow
exit 1
}
if (-not $redisRunning) {
Write-Host "ERROR: Redis container is not running!" -ForegroundColor Red
Write-Host "Start it with: podman start flyer-crawler-redis" -ForegroundColor Yellow
exit 1
}
Write-Host "✓ PostgreSQL container: $postgresRunning" -ForegroundColor Green
Write-Host "✓ Redis container: $redisRunning" -ForegroundColor Green
Write-Host ""
# Set environment variables for integration tests
Write-Host "Setting environment variables..." -ForegroundColor Yellow
$env:NODE_ENV = "test"
$env:DB_HOST = "localhost"
$env:DB_USER = "postgres"
$env:DB_PASSWORD = "postgres"
$env:DB_NAME = "flyer_crawler_dev"
$env:DB_PORT = "5432"
$env:REDIS_URL = "redis://localhost:6379"
$env:REDIS_PASSWORD = ""
$env:FRONTEND_URL = "http://localhost:5173"
$env:VITE_API_BASE_URL = "http://localhost:3001/api"
$env:JWT_SECRET = "test-jwt-secret-for-integration-tests"
$env:NODE_OPTIONS = "--max-old-space-size=8192"
Write-Host "✓ Environment configured" -ForegroundColor Green
Write-Host ""
# Display configuration
Write-Host "Test Configuration:" -ForegroundColor Cyan
Write-Host " NODE_ENV: $env:NODE_ENV"
Write-Host " Database: $env:DB_HOST`:$env:DB_PORT/$env:DB_NAME"
Write-Host " Redis: $env:REDIS_URL"
Write-Host " Frontend URL: $env:FRONTEND_URL"
Write-Host ""
# Check database connectivity
Write-Host "Verifying database connection..." -ForegroundColor Yellow
$dbCheck = podman exec flyer-crawler-postgres psql -U postgres -d flyer_crawler_dev -c "SELECT 1;" 2>&1
if ($LASTEXITCODE -ne 0) {
Write-Host "ERROR: Cannot connect to database!" -ForegroundColor Red
Write-Host $dbCheck
exit 1
}
Write-Host "✓ Database connection successful" -ForegroundColor Green
Write-Host ""
# Check URL constraints are enabled
Write-Host "Verifying URL constraints..." -ForegroundColor Yellow
$constraints = podman exec flyer-crawler-postgres psql -U postgres -d flyer_crawler_dev -t -A -c "SELECT COUNT(*) FROM pg_constraint WHERE conname LIKE '%url_check';"
Write-Host "✓ Found $constraints URL constraint(s)" -ForegroundColor Green
Write-Host ""
# Run integration tests
Write-Host "=== Running Integration Tests ===" -ForegroundColor Cyan
Write-Host ""
npm run test:integration
$exitCode = $LASTEXITCODE
Write-Host ""
if ($exitCode -eq 0) {
Write-Host "=== Integration Tests PASSED ===" -ForegroundColor Green
} else {
Write-Host "=== Integration Tests FAILED ===" -ForegroundColor Red
Write-Host "Exit code: $exitCode" -ForegroundColor Red
}
exit $exitCode

80
run-tests.cmd Normal file
View File

@@ -0,0 +1,80 @@
@echo off
REM Simple batch script to run integration tests with container infrastructure
echo === Flyer Crawler Integration Test Runner ===
echo.
REM Check containers
echo Checking container status...
podman ps --filter "name=flyer-crawler-postgres" --format "{{.Names}}" >nul 2>&1
if errorlevel 1 (
echo ERROR: PostgreSQL container is not running!
echo Start it with: podman start flyer-crawler-postgres
exit /b 1
)
podman ps --filter "name=flyer-crawler-redis" --format "{{.Names}}" >nul 2>&1
if errorlevel 1 (
echo ERROR: Redis container is not running!
echo Start it with: podman start flyer-crawler-redis
exit /b 1
)
echo [OK] Containers are running
echo.
REM Set environment variables
echo Setting environment variables...
set NODE_ENV=test
set DB_HOST=localhost
set DB_USER=postgres
set DB_PASSWORD=postgres
set DB_NAME=flyer_crawler_dev
set DB_PORT=5432
set REDIS_URL=redis://localhost:6379
set REDIS_PASSWORD=
set FRONTEND_URL=http://localhost:5173
set VITE_API_BASE_URL=http://localhost:3001/api
set JWT_SECRET=test-jwt-secret-for-integration-tests
set NODE_OPTIONS=--max-old-space-size=8192
echo [OK] Environment configured
echo.
echo Test Configuration:
echo NODE_ENV: %NODE_ENV%
echo Database: %DB_HOST%:%DB_PORT%/%DB_NAME%
echo Redis: %REDIS_URL%
echo Frontend URL: %FRONTEND_URL%
echo.
REM Verify database
echo Verifying database connection...
podman exec flyer-crawler-postgres psql -U postgres -d flyer_crawler_dev -c "SELECT 1;" >nul 2>&1
if errorlevel 1 (
echo ERROR: Cannot connect to database!
exit /b 1
)
echo [OK] Database connection successful
echo.
REM Check URL constraints
echo Verifying URL constraints...
podman exec flyer-crawler-postgres psql -U postgres -d flyer_crawler_dev -t -A -c "SELECT COUNT(*) FROM pg_constraint WHERE conname LIKE '%%url_check';"
echo.
REM Run tests
echo === Running Integration Tests ===
echo.
npm run test:integration
if errorlevel 1 (
echo.
echo === Integration Tests FAILED ===
exit /b 1
) else (
echo.
echo === Integration Tests PASSED ===
exit /b 0
)

View File

@@ -73,8 +73,8 @@ app.use(passport.initialize()); // Initialize Passport
// --- MOCK AUTH FOR TESTING --- // --- MOCK AUTH FOR TESTING ---
// This MUST come after passport.initialize() and BEFORE any of the API routes. // This MUST come after passport.initialize() and BEFORE any of the API routes.
import { mockAuth } from './src/routes/passport.routes'; import { mockAuth } from './src/routes/passport.routes';
app.use(mockAuth); app.use(mockAuth);
// Add a request timeout middleware. This will help prevent requests from hanging indefinitely. // Add a request timeout middleware. This will help prevent requests from hanging indefinitely.
// We set a generous 5-minute timeout to accommodate slow AI processing for large flyers. // We set a generous 5-minute timeout to accommodate slow AI processing for large flyers.

View File

@@ -141,10 +141,10 @@ CREATE TABLE IF NOT EXISTS public.flyers (
updated_at TIMESTAMPTZ DEFAULT now() NOT NULL, updated_at TIMESTAMPTZ DEFAULT now() NOT NULL,
CONSTRAINT flyers_valid_dates_check CHECK (valid_to >= valid_from), CONSTRAINT flyers_valid_dates_check CHECK (valid_to >= valid_from),
CONSTRAINT flyers_file_name_check CHECK (TRIM(file_name) <> ''), CONSTRAINT flyers_file_name_check CHECK (TRIM(file_name) <> ''),
CONSTRAINT flyers_checksum_check CHECK (checksum IS NULL OR length(checksum) = 64) CONSTRAINT flyers_checksum_check CHECK (checksum IS NULL OR length(checksum) = 64),
CONSTRAINT flyers_image_url_check CHECK (image_url ~* '^https?://.*'),
CONSTRAINT flyers_icon_url_check CHECK (icon_url ~* '^https?://.*')
); );
-- CONSTRAINT flyers_image_url_check CHECK (image_url ~* '^https://?.*'),
-- CONSTRAINT flyers_icon_url_check CHECK (icon_url IS NULL OR icon_url ~* '^https://?.*'),
COMMENT ON TABLE public.flyers IS 'Stores metadata for each processed flyer, linking it to a store and its validity period.'; COMMENT ON TABLE public.flyers IS 'Stores metadata for each processed flyer, linking it to a store and its validity period.';
CREATE INDEX IF NOT EXISTS idx_flyers_store_id ON public.flyers(store_id); CREATE INDEX IF NOT EXISTS idx_flyers_store_id ON public.flyers(store_id);
COMMENT ON COLUMN public.flyers.file_name IS 'The original name of the uploaded flyer file (e.g., "flyer_week_1.pdf").'; COMMENT ON COLUMN public.flyers.file_name IS 'The original name of the uploaded flyer file (e.g., "flyer_week_1.pdf").';

View File

@@ -157,10 +157,10 @@ CREATE TABLE IF NOT EXISTS public.flyers (
updated_at TIMESTAMPTZ DEFAULT now() NOT NULL, updated_at TIMESTAMPTZ DEFAULT now() NOT NULL,
CONSTRAINT flyers_valid_dates_check CHECK (valid_to >= valid_from), CONSTRAINT flyers_valid_dates_check CHECK (valid_to >= valid_from),
CONSTRAINT flyers_file_name_check CHECK (TRIM(file_name) <> ''), CONSTRAINT flyers_file_name_check CHECK (TRIM(file_name) <> ''),
CONSTRAINT flyers_checksum_check CHECK (checksum IS NULL OR length(checksum) = 64) CONSTRAINT flyers_checksum_check CHECK (checksum IS NULL OR length(checksum) = 64),
CONSTRAINT flyers_image_url_check CHECK (image_url ~* '^https?://.*'),
CONSTRAINT flyers_icon_url_check CHECK (icon_url ~* '^https?://.*')
); );
-- CONSTRAINT flyers_image_url_check CHECK (image_url ~* '^https?://.*'),
-- CONSTRAINT flyers_icon_url_check CHECK (icon_url ~* '^https?://.*'),
COMMENT ON TABLE public.flyers IS 'Stores metadata for each processed flyer, linking it to a store and its validity period.'; COMMENT ON TABLE public.flyers IS 'Stores metadata for each processed flyer, linking it to a store and its validity period.';
CREATE INDEX IF NOT EXISTS idx_flyers_store_id ON public.flyers(store_id); CREATE INDEX IF NOT EXISTS idx_flyers_store_id ON public.flyers(store_id);
COMMENT ON COLUMN public.flyers.file_name IS 'The original name of the uploaded flyer file (e.g., "flyer_week_1.pdf").'; COMMENT ON COLUMN public.flyers.file_name IS 'The original name of the uploaded flyer file (e.g., "flyer_week_1.pdf").';

53
src/config/queryClient.ts Normal file
View File

@@ -0,0 +1,53 @@
// src/config/queryClient.ts
import { QueryClient } from '@tanstack/react-query';
import { logger } from '../services/logger.client';
/**
* Global QueryClient instance for TanStack Query.
*
* Configured with sensible defaults for the flyer-crawler application:
* - 5 minute stale time for most queries
* - 30 minute garbage collection time
* - Single retry attempt on failure
* - No automatic refetch on window focus (to reduce API load)
* - Refetch on component mount for fresh data
*
* @see https://tanstack.com/query/latest/docs/reference/QueryClient
*/
export const queryClient = new QueryClient({
defaultOptions: {
queries: {
// Data is considered fresh for 5 minutes
staleTime: 1000 * 60 * 5,
// Unused data is garbage collected after 30 minutes
// (gcTime was formerly called cacheTime in v4)
gcTime: 1000 * 60 * 30,
// Retry failed requests once
retry: 1,
// Don't refetch on window focus to reduce API calls
// Users can manually refresh if needed
refetchOnWindowFocus: false,
// Always refetch on component mount to ensure fresh data
refetchOnMount: true,
// Don't refetch on reconnect by default
refetchOnReconnect: false,
},
mutations: {
// Don't retry mutations automatically
// User actions should be explicit
retry: 0,
// Log mutation errors for debugging
onError: (error) => {
logger.error('Mutation error', {
error: error instanceof Error ? error.message : 'Unknown error',
});
},
},
},
});

View File

@@ -0,0 +1,60 @@
// src/hooks/mutations/useAddWatchedItemMutation.ts
import { useMutation, useQueryClient } from '@tanstack/react-query';
import * as apiClient from '../../services/apiClient';
import { notifySuccess, notifyError } from '../../services/notificationService';
interface AddWatchedItemParams {
itemName: string;
category?: string;
}
/**
* Mutation hook for adding an item to the user's watched items list.
*
* This hook provides optimistic updates and automatic cache invalidation.
* When the mutation succeeds, it invalidates the watched-items query to
* trigger a refetch of the updated list.
*
* @returns Mutation object with mutate function and state
*
* @example
* ```tsx
* const addWatchedItem = useAddWatchedItemMutation();
*
* const handleAdd = () => {
* addWatchedItem.mutate(
* { itemName: 'Milk', category: 'Dairy' },
* {
* onSuccess: () => console.log('Added!'),
* onError: (error) => console.error(error),
* }
* );
* };
* ```
*/
export const useAddWatchedItemMutation = () => {
const queryClient = useQueryClient();
return useMutation({
mutationFn: async ({ itemName, category }: AddWatchedItemParams) => {
const response = await apiClient.addWatchedItem(itemName, category);
if (!response.ok) {
const error = await response.json().catch(() => ({
message: `Request failed with status ${response.status}`,
}));
throw new Error(error.message || 'Failed to add watched item');
}
return response.json();
},
onSuccess: () => {
// Invalidate and refetch watched items to get the updated list
queryClient.invalidateQueries({ queryKey: ['watched-items'] });
notifySuccess('Item added to watched list');
},
onError: (error: Error) => {
notifyError(error.message || 'Failed to add item to watched list');
},
});
};

View File

@@ -0,0 +1,46 @@
// src/hooks/queries/useFlyerItemsQuery.ts
import { useQuery } from '@tanstack/react-query';
import * as apiClient from '../../services/apiClient';
import type { FlyerItem } from '../../types';
/**
* Query hook for fetching items for a specific flyer.
*
* This hook is automatically disabled when no flyer ID is provided,
* and caches data per-flyer to avoid refetching the same data.
*
* @param flyerId - The ID of the flyer to fetch items for
* @returns Query result with flyer items data, loading state, and error state
*
* @example
* ```tsx
* const { data: flyerItems, isLoading, error } = useFlyerItemsQuery(flyer?.flyer_id);
* ```
*/
export const useFlyerItemsQuery = (flyerId: number | undefined) => {
return useQuery({
queryKey: ['flyer-items', flyerId],
queryFn: async (): Promise<FlyerItem[]> => {
if (!flyerId) {
throw new Error('Flyer ID is required');
}
const response = await apiClient.fetchFlyerItems(flyerId);
if (!response.ok) {
const error = await response.json().catch(() => ({
message: `Request failed with status ${response.status}`,
}));
throw new Error(error.message || 'Failed to fetch flyer items');
}
const data = await response.json();
// API returns { items: FlyerItem[] }
return data.items || [];
},
// Only run the query if we have a valid flyer ID
enabled: !!flyerId,
// Flyer items don't change, so cache them longer
staleTime: 1000 * 60 * 5,
});
};

View File

@@ -0,0 +1,39 @@
// src/hooks/queries/useFlyersQuery.ts
import { useQuery } from '@tanstack/react-query';
import * as apiClient from '../../services/apiClient';
import type { Flyer } from '../../types';
/**
* Query hook for fetching flyers with pagination.
*
* This replaces the custom useInfiniteQuery hook with TanStack Query,
* providing automatic caching, background refetching, and better state management.
*
* @param limit - Maximum number of flyers to fetch
* @param offset - Number of flyers to skip
* @returns Query result with flyers data, loading state, and error state
*
* @example
* ```tsx
* const { data: flyers, isLoading, error, refetch } = useFlyersQuery(20, 0);
* ```
*/
export const useFlyersQuery = (limit: number = 20, offset: number = 0) => {
return useQuery({
queryKey: ['flyers', { limit, offset }],
queryFn: async (): Promise<Flyer[]> => {
const response = await apiClient.fetchFlyers(limit, offset);
if (!response.ok) {
const error = await response.json().catch(() => ({
message: `Request failed with status ${response.status}`,
}));
throw new Error(error.message || 'Failed to fetch flyers');
}
return response.json();
},
// Keep data fresh for 2 minutes since flyers don't change frequently
staleTime: 1000 * 60 * 2,
});
};

View File

@@ -0,0 +1,40 @@
// src/hooks/queries/useMasterItemsQuery.ts
import { useQuery } from '@tanstack/react-query';
import * as apiClient from '../../services/apiClient';
import type { MasterGroceryItem } from '../../types';
/**
* Query hook for fetching all master grocery items.
*
* Master items are the canonical list of grocery items that users can watch
* and that flyer items are mapped to. This data changes infrequently, so it's
* cached with a longer stale time.
*
* @returns Query result with master items data, loading state, and error state
*
* @example
* ```tsx
* const { data: masterItems, isLoading, error } = useMasterItemsQuery();
* ```
*/
export const useMasterItemsQuery = () => {
return useQuery({
queryKey: ['master-items'],
queryFn: async (): Promise<MasterGroceryItem[]> => {
const response = await apiClient.fetchMasterItems();
if (!response.ok) {
const error = await response.json().catch(() => ({
message: `Request failed with status ${response.status}`,
}));
throw new Error(error.message || 'Failed to fetch master items');
}
return response.json();
},
// Master items change infrequently, keep data fresh for 10 minutes
staleTime: 1000 * 60 * 10,
// Cache for 30 minutes
gcTime: 1000 * 60 * 30,
});
};

View File

@@ -0,0 +1,39 @@
// src/hooks/queries/useShoppingListsQuery.ts
import { useQuery } from '@tantml:parameter>
import * as apiClient from '../../services/apiClient';
import type { ShoppingList } from '../../types';
/**
* Query hook for fetching the user's shopping lists.
*
* This hook is automatically disabled when the user is not authenticated,
* and the cached data is invalidated when the user logs out.
*
* @param enabled - Whether the query should run (typically based on auth status)
* @returns Query result with shopping lists data, loading state, and error state
*
* @example
* ```tsx
* const { data: shoppingLists, isLoading, error } = useShoppingListsQuery(!!user);
* ```
*/
export const useShoppingListsQuery = (enabled: boolean) => {
return useQuery({
queryKey: ['shopping-lists'],
queryFn: async (): Promise<ShoppingList[]> => {
const response = await apiClient.fetchShoppingLists();
if (!response.ok) {
const error = await response.json().catch(() => ({
message: `Request failed with status ${response.status}`,
}));
throw new Error(error.message || 'Failed to fetch shopping lists');
}
return response.json();
},
enabled,
// Keep data fresh for 1 minute since users actively manage shopping lists
staleTime: 1000 * 60,
});
};

View File

@@ -0,0 +1,39 @@
// src/hooks/queries/useWatchedItemsQuery.ts
import { useQuery } from '@tanstack/react-query';
import * as apiClient from '../../services/apiClient';
import type { MasterGroceryItem } from '../../types';
/**
* Query hook for fetching the user's watched items.
*
* This hook is automatically disabled when the user is not authenticated,
* and the cached data is invalidated when the user logs out.
*
* @param enabled - Whether the query should run (typically based on auth status)
* @returns Query result with watched items data, loading state, and error state
*
* @example
* ```tsx
* const { data: watchedItems, isLoading, error } = useWatchedItemsQuery(!!user);
* ```
*/
export const useWatchedItemsQuery = (enabled: boolean) => {
return useQuery({
queryKey: ['watched-items'],
queryFn: async (): Promise<MasterGroceryItem[]> => {
const response = await apiClient.fetchWatchedItems();
if (!response.ok) {
const error = await response.json().catch(() => ({
message: `Request failed with status ${response.status}`,
}));
throw new Error(error.message || 'Failed to fetch watched items');
}
return response.json();
},
enabled,
// Keep data fresh for 1 minute since users actively manage watched items
staleTime: 1000 * 60,
});
};

View File

@@ -1,28 +1,31 @@
// src/hooks/useFlyerItems.ts // src/hooks/useFlyerItems.ts
import type { Flyer, FlyerItem } from '../types'; import type { Flyer } from '../types';
import { useApiOnMount } from './useApiOnMount'; import { useFlyerItemsQuery } from './queries/useFlyerItemsQuery';
import * as apiClient from '../services/apiClient';
/** /**
* A custom hook to fetch the items for a given flyer. * A custom hook to fetch the items for a given flyer using TanStack Query (ADR-0005).
*
* This replaces the previous useApiOnMount implementation with TanStack Query
* for automatic caching and better state management.
*
* @param selectedFlyer The flyer for which to fetch items. * @param selectedFlyer The flyer for which to fetch items.
* @returns An object containing the flyer items, loading state, and any errors. * @returns An object containing the flyer items, loading state, and any errors.
*
* @example
* ```tsx
* const { flyerItems, isLoading, error } = useFlyerItems(selectedFlyer);
* ```
*/ */
export const useFlyerItems = (selectedFlyer: Flyer | null) => { export const useFlyerItems = (selectedFlyer: Flyer | null) => {
const wrappedFetcher = (flyerId?: number): Promise<Response> => { const {
// This should not be called with undefined due to the `enabled` flag, data: flyerItems = [],
// but this wrapper satisfies the type checker. isLoading,
if (flyerId === undefined) { error,
return Promise.reject(new Error('Cannot fetch items for an undefined flyer ID.')); } = useFlyerItemsQuery(selectedFlyer?.flyer_id);
}
return apiClient.fetchFlyerItems(flyerId);
};
const { data, loading, error } = useApiOnMount<{ items: FlyerItem[] }, [number?]>( return {
wrappedFetcher, flyerItems,
[selectedFlyer], isLoading,
{ enabled: !!selectedFlyer }, error,
selectedFlyer?.flyer_id, };
);
return { flyerItems: data?.items || [], isLoading: loading, error };
}; };

View File

@@ -1,5 +1,8 @@
// src/providers/AppProviders.tsx // src/providers/AppProviders.tsx
import React, { ReactNode } from 'react'; import React, { ReactNode } from 'react';
import { QueryClientProvider } from '@tanstack/react-query';
import { ReactQueryDevtools } from '@tanstack/react-query-devtools';
import { queryClient } from '../config/queryClient';
import { AuthProvider } from './AuthProvider'; import { AuthProvider } from './AuthProvider';
import { FlyersProvider } from './FlyersProvider'; import { FlyersProvider } from './FlyersProvider';
import { MasterItemsProvider } from './MasterItemsProvider'; import { MasterItemsProvider } from './MasterItemsProvider';
@@ -13,17 +16,29 @@ interface AppProvidersProps {
/** /**
* A single component to group all application-wide context providers. * A single component to group all application-wide context providers.
* This cleans up index.tsx and makes the provider hierarchy clear. * This cleans up index.tsx and makes the provider hierarchy clear.
*
* Provider hierarchy (from outermost to innermost):
* 1. QueryClientProvider - TanStack Query for server state management (ADR-0005)
* 2. ModalProvider - Modal state management
* 3. AuthProvider - Authentication state
* 4. FlyersProvider - Flyer data fetching
* 5. MasterItemsProvider - Master grocery items
* 6. UserDataProvider - User-specific data (watched items, shopping lists)
*/ */
export const AppProviders: React.FC<AppProvidersProps> = ({ children }) => { export const AppProviders: React.FC<AppProvidersProps> = ({ children }) => {
return ( return (
<ModalProvider> <QueryClientProvider client={queryClient}>
<AuthProvider> <ModalProvider>
<FlyersProvider> <AuthProvider>
<MasterItemsProvider> <FlyersProvider>
<UserDataProvider>{children}</UserDataProvider> <MasterItemsProvider>
</MasterItemsProvider> <UserDataProvider>{children}</UserDataProvider>
</FlyersProvider> </MasterItemsProvider>
</AuthProvider> </FlyersProvider>
</ModalProvider> </AuthProvider>
</ModalProvider>
{/* React Query Devtools - only visible in development */}
{import.meta.env.DEV && <ReactQueryDevtools initialIsOpen={false} />}
</QueryClientProvider>
); );
}; };

View File

@@ -1,34 +1,42 @@
// src/providers/FlyersProvider.tsx // src/providers/FlyersProvider.tsx
import React, { ReactNode } from 'react'; import React, { ReactNode, useMemo } from 'react';
import { FlyersContext, FlyersContextType } from '../contexts/FlyersContext'; import { FlyersContext, FlyersContextType } from '../contexts/FlyersContext';
import type { Flyer } from '../types'; import { useFlyersQuery } from '../hooks/queries/useFlyersQuery';
import * as apiClient from '../services/apiClient';
import { useInfiniteQuery } from '../hooks/useInfiniteQuery';
import { useCallback } from 'react';
/**
* Provider for flyer data using TanStack Query (ADR-0005).
*
* This replaces the previous custom useInfiniteQuery implementation with
* TanStack Query for better caching, automatic refetching, and state management.
*
* Note: Currently fetches all flyers (no pagination UI). Infinite scroll can be
* added later when the backend API returns proper pagination metadata.
*/
export const FlyersProvider: React.FC<{ children: ReactNode }> = ({ children }) => { export const FlyersProvider: React.FC<{ children: ReactNode }> = ({ children }) => {
// Memoize the fetch function to ensure stability for the useInfiniteQuery hook. // Fetch all flyers with a large limit (effectively "all")
const fetchFlyersFn = useCallback(apiClient.fetchFlyers, []); // TODO: Implement proper infinite scroll when backend API is updated
const { const {
data: flyers, data: flyers,
isLoading: isLoadingFlyers, isLoading: isLoadingFlyers,
error: flyersError, error,
fetchNextPage: fetchNextFlyersPage,
hasNextPage: hasNextFlyersPage,
refetch: refetchFlyers, refetch: refetchFlyers,
isRefetching: isRefetchingFlyers, isRefetching: isRefetchingFlyers,
} = useInfiniteQuery<Flyer>(fetchFlyersFn); } = useFlyersQuery(1000, 0);
const value: FlyersContextType = { const value: FlyersContextType = useMemo(
flyers: flyers || [], () => ({
isLoadingFlyers, flyers: flyers || [],
flyersError, isLoadingFlyers,
fetchNextFlyersPage, flyersError: error,
hasNextFlyersPage, // Stub methods for compatibility with existing code
isRefetchingFlyers, // TODO: Remove these when infinite scroll is properly implemented
refetchFlyers, fetchNextFlyersPage: () => {},
}; hasNextFlyersPage: false,
isRefetchingFlyers,
refetchFlyers,
}),
[flyers, isLoadingFlyers, error, isRefetchingFlyers, refetchFlyers]
);
return <FlyersContext.Provider value={value}>{children}</FlyersContext.Provider>; return <FlyersContext.Provider value={value}>{children}</FlyersContext.Provider>;
}; };

View File

@@ -1,30 +1,30 @@
// src/providers/MasterItemsProvider.tsx // src/providers/MasterItemsProvider.tsx
import React, { ReactNode, useMemo, useEffect, useCallback } from 'react'; import React, { ReactNode, useMemo } from 'react';
import { MasterItemsContext } from '../contexts/MasterItemsContext'; import { MasterItemsContext } from '../contexts/MasterItemsContext';
import type { MasterGroceryItem } from '../types'; import { useMasterItemsQuery } from '../hooks/queries/useMasterItemsQuery';
import * as apiClient from '../services/apiClient';
import { useApiOnMount } from '../hooks/useApiOnMount';
import { logger } from '../services/logger.client';
/**
* Provider for master grocery items using TanStack Query (ADR-0005).
*
* This replaces the previous custom useApiOnMount implementation with
* TanStack Query for better caching, automatic refetching, and state management.
*
* Master items are cached longer (10 minutes) since they change infrequently.
*/
export const MasterItemsProvider: React.FC<{ children: ReactNode }> = ({ children }) => { export const MasterItemsProvider: React.FC<{ children: ReactNode }> = ({ children }) => {
// LOGGING: Check if the provider is unmounting/remounting repeatedly const {
useEffect(() => { data: masterItems = [],
logger.debug('MasterItemsProvider: MOUNTED'); isLoading,
return () => logger.debug('MasterItemsProvider: UNMOUNTED'); error,
}, []); } = useMasterItemsQuery();
// Memoize the fetch function to ensure stability for the useApiOnMount hook.
const fetchFn = useCallback(() => apiClient.fetchMasterItems(), []);
const { data, loading, error } = useApiOnMount<MasterGroceryItem[], []>(fetchFn);
const value = useMemo( const value = useMemo(
() => ({ () => ({
masterItems: data || [], masterItems,
isLoading: loading, isLoading,
error: error?.message || null, error: error?.message || null,
}), }),
[data, loading, error], [masterItems, isLoading, error]
); );
return <MasterItemsContext.Provider value={value}>{children}</MasterItemsContext.Provider>; return <MasterItemsContext.Provider value={value}>{children}</MasterItemsContext.Provider>;

View File

@@ -1,74 +1,56 @@
// src/providers/UserDataProvider.tsx // src/providers/UserDataProvider.tsx
import { logger } from '../services/logger.client'; import React, { useMemo, ReactNode } from 'react';
import React, { useState, useEffect, useMemo, ReactNode, useCallback } from 'react';
import { UserDataContext } from '../contexts/UserDataContext'; import { UserDataContext } from '../contexts/UserDataContext';
import type { MasterGroceryItem, ShoppingList } from '../types';
import * as apiClient from '../services/apiClient';
import { useApiOnMount } from '../hooks/useApiOnMount';
import { useAuth } from '../hooks/useAuth'; import { useAuth } from '../hooks/useAuth';
import { useWatchedItemsQuery } from '../hooks/queries/useWatchedItemsQuery';
import { useShoppingListsQuery } from '../hooks/queries/useShoppingListsQuery';
/**
* Provider for user-specific data using TanStack Query (ADR-0005).
*
* This replaces the previous custom useApiOnMount implementation with
* TanStack Query for better caching, automatic refetching, and state management.
*
* Data is automatically cleared when the user logs out (query is disabled),
* and refetched when a new user logs in.
*/
export const UserDataProvider: React.FC<{ children: ReactNode }> = ({ children }) => { export const UserDataProvider: React.FC<{ children: ReactNode }> = ({ children }) => {
const { userProfile } = useAuth(); const { userProfile } = useAuth();
const isEnabled = !!userProfile;
// Wrap the API calls in useCallback to prevent unnecessary re-renders.
const fetchWatchedItemsFn = useCallback(
() => apiClient.fetchWatchedItems(),
[],
);
const fetchShoppingListsFn = useCallback(() => apiClient.fetchShoppingLists(), []);
const { const {
data: watchedItemsData, data: watchedItems = [],
loading: isLoadingWatched, isLoading: isLoadingWatched,
error: watchedItemsError, error: watchedError,
} = useApiOnMount<MasterGroceryItem[], []>(fetchWatchedItemsFn, [userProfile], { } = useWatchedItemsQuery(isEnabled);
enabled: !!userProfile,
});
const { const {
data: shoppingListsData, data: shoppingLists = [],
loading: isLoadingShoppingLists, isLoading: isLoadingLists,
error: shoppingListsError, error: listsError,
} = useApiOnMount<ShoppingList[], []>(fetchShoppingListsFn, [userProfile], { } = useShoppingListsQuery(isEnabled);
enabled: !!userProfile,
});
const [watchedItems, setWatchedItems] = useState<MasterGroceryItem[]>([]);
const [shoppingLists, setShoppingLists] = useState<ShoppingList[]>([]);
// This effect synchronizes the local state (watchedItems, shoppingLists) with the
// data fetched by the useApiOnMount hooks. It also handles cleanup on user logout.
useEffect(() => {
// When the user logs out (user becomes null), immediately clear all user-specific data.
// This also serves to clear out old data when a new user logs in, before their new data arrives.
if (!userProfile) {
setWatchedItems([]);
setShoppingLists([]);
return;
}
// Once data for the new user is fetched, update the state.
if (watchedItemsData) setWatchedItems(watchedItemsData);
if (shoppingListsData) setShoppingLists(shoppingListsData);
}, [userProfile, watchedItemsData, shoppingListsData]);
const value = useMemo( const value = useMemo(
() => ({ () => ({
watchedItems, watchedItems,
shoppingLists, shoppingLists,
setWatchedItems, // Stub setters for backward compatibility
setShoppingLists, // TODO: Replace usages with proper mutations (Phase 3 of ADR-0005)
isLoading: !!userProfile && (isLoadingWatched || isLoadingShoppingLists), setWatchedItems: () => {
error: watchedItemsError?.message || shoppingListsError?.message || null, console.warn(
'setWatchedItems is deprecated. Use mutation hooks instead (TanStack Query mutations).'
);
},
setShoppingLists: () => {
console.warn(
'setShoppingLists is deprecated. Use mutation hooks instead (TanStack Query mutations).'
);
},
isLoading: isEnabled && (isLoadingWatched || isLoadingLists),
error: watchedError?.message || listsError?.message || null,
}), }),
[ [watchedItems, shoppingLists, isEnabled, isLoadingWatched, isLoadingLists, watchedError, listsError]
watchedItems, );
shoppingLists,
userProfile,
isLoadingWatched,
isLoadingShoppingLists,
watchedItemsError,
shoppingListsError,
],
);
return <UserDataContext.Provider value={value}>{children}</UserDataContext.Provider>; return <UserDataContext.Provider value={value}>{children}</UserDataContext.Provider>;
}; };

View File

@@ -30,7 +30,8 @@ import {
optionalNumeric, optionalNumeric,
optionalString, optionalString,
} from '../utils/zodUtils'; } from '../utils/zodUtils';
import { logger } from '../services/logger.server'; // This was a duplicate, fixed. // Removed: import { logger } from '../services/logger.server';
// All route handlers now use req.log (request-scoped logger) as per ADR-004
import { monitoringService } from '../services/monitoringService.server'; import { monitoringService } from '../services/monitoringService.server';
import { userService } from '../services/userService'; import { userService } from '../services/userService';
import { cleanupUploadedFile } from '../utils/fileUtils'; import { cleanupUploadedFile } from '../utils/fileUtils';
@@ -126,7 +127,7 @@ router.get('/corrections', validateRequest(emptySchema), async (req, res, next:
const corrections = await db.adminRepo.getSuggestedCorrections(req.log); const corrections = await db.adminRepo.getSuggestedCorrections(req.log);
res.json(corrections); res.json(corrections);
} catch (error) { } catch (error) {
logger.error({ error }, 'Error fetching suggested corrections'); req.log.error({ error }, 'Error fetching suggested corrections');
next(error); next(error);
} }
}); });
@@ -138,7 +139,7 @@ router.get('/review/flyers', validateRequest(emptySchema), async (req, res, next
req.log.info({ count: Array.isArray(flyers) ? flyers.length : 'unknown' }, 'Successfully fetched flyers for review'); req.log.info({ count: Array.isArray(flyers) ? flyers.length : 'unknown' }, 'Successfully fetched flyers for review');
res.json(flyers); res.json(flyers);
} catch (error) { } catch (error) {
logger.error({ error }, 'Error fetching flyers for review'); req.log.error({ error }, 'Error fetching flyers for review');
next(error); next(error);
} }
}); });
@@ -148,7 +149,7 @@ router.get('/brands', validateRequest(emptySchema), async (req, res, next: NextF
const brands = await db.flyerRepo.getAllBrands(req.log); const brands = await db.flyerRepo.getAllBrands(req.log);
res.json(brands); res.json(brands);
} catch (error) { } catch (error) {
logger.error({ error }, 'Error fetching brands'); req.log.error({ error }, 'Error fetching brands');
next(error); next(error);
} }
}); });
@@ -158,7 +159,7 @@ router.get('/stats', validateRequest(emptySchema), async (req, res, next: NextFu
const stats = await db.adminRepo.getApplicationStats(req.log); const stats = await db.adminRepo.getApplicationStats(req.log);
res.json(stats); res.json(stats);
} catch (error) { } catch (error) {
logger.error({ error }, 'Error fetching application stats'); req.log.error({ error }, 'Error fetching application stats');
next(error); next(error);
} }
}); });
@@ -168,7 +169,7 @@ router.get('/stats/daily', validateRequest(emptySchema), async (req, res, next:
const dailyStats = await db.adminRepo.getDailyStatsForLast30Days(req.log); const dailyStats = await db.adminRepo.getDailyStatsForLast30Days(req.log);
res.json(dailyStats); res.json(dailyStats);
} catch (error) { } catch (error) {
logger.error({ error }, 'Error fetching daily stats'); req.log.error({ error }, 'Error fetching daily stats');
next(error); next(error);
} }
}); });
@@ -183,7 +184,7 @@ router.post(
await db.adminRepo.approveCorrection(params.id, req.log); // params.id is now safely typed as number await db.adminRepo.approveCorrection(params.id, req.log); // params.id is now safely typed as number
res.status(200).json({ message: 'Correction approved successfully.' }); res.status(200).json({ message: 'Correction approved successfully.' });
} catch (error) { } catch (error) {
logger.error({ error }, 'Error approving correction'); req.log.error({ error }, 'Error approving correction');
next(error); next(error);
} }
}, },
@@ -199,7 +200,7 @@ router.post(
await db.adminRepo.rejectCorrection(params.id, req.log); // params.id is now safely typed as number await db.adminRepo.rejectCorrection(params.id, req.log); // params.id is now safely typed as number
res.status(200).json({ message: 'Correction rejected successfully.' }); res.status(200).json({ message: 'Correction rejected successfully.' });
} catch (error) { } catch (error) {
logger.error({ error }, 'Error rejecting correction'); req.log.error({ error }, 'Error rejecting correction');
next(error); next(error);
} }
}, },
@@ -219,7 +220,7 @@ router.put(
); );
res.status(200).json(updatedCorrection); res.status(200).json(updatedCorrection);
} catch (error) { } catch (error) {
logger.error({ error }, 'Error updating suggested correction'); req.log.error({ error }, 'Error updating suggested correction');
next(error); next(error);
} }
}, },
@@ -235,7 +236,7 @@ router.put(
const updatedRecipe = await db.adminRepo.updateRecipeStatus(params.id, body.status, req.log); // This is still a standalone function in admin.db.ts const updatedRecipe = await db.adminRepo.updateRecipeStatus(params.id, body.status, req.log); // This is still a standalone function in admin.db.ts
res.status(200).json(updatedRecipe); res.status(200).json(updatedRecipe);
} catch (error) { } catch (error) {
logger.error({ error }, 'Error updating recipe status'); req.log.error({ error }, 'Error updating recipe status');
next(error); // Pass all errors to the central error handler next(error); // Pass all errors to the central error handler
} }
}, },
@@ -258,13 +259,13 @@ router.post(
const logoUrl = await brandService.updateBrandLogo(params.id, req.file, req.log); const logoUrl = await brandService.updateBrandLogo(params.id, req.file, req.log);
logger.info({ brandId: params.id, logoUrl }, `Brand logo updated for brand ID: ${params.id}`); req.log.info({ brandId: params.id, logoUrl }, `Brand logo updated for brand ID: ${params.id}`);
res.status(200).json({ message: 'Brand logo updated successfully.', logoUrl }); res.status(200).json({ message: 'Brand logo updated successfully.', logoUrl });
} catch (error) { } catch (error) {
// If an error occurs after the file has been uploaded (e.g., DB error), // If an error occurs after the file has been uploaded (e.g., DB error),
// we must clean up the orphaned file from the disk. // we must clean up the orphaned file from the disk.
await cleanupUploadedFile(req.file); await cleanupUploadedFile(req.file);
logger.error({ error }, 'Error updating brand logo'); req.log.error({ error }, 'Error updating brand logo');
next(error); next(error);
} }
}, },
@@ -275,7 +276,7 @@ router.get('/unmatched-items', validateRequest(emptySchema), async (req, res, ne
const items = await db.adminRepo.getUnmatchedFlyerItems(req.log); const items = await db.adminRepo.getUnmatchedFlyerItems(req.log);
res.json(items); res.json(items);
} catch (error) { } catch (error) {
logger.error({ error }, 'Error fetching unmatched items'); req.log.error({ error }, 'Error fetching unmatched items');
next(error); next(error);
} }
}); });
@@ -295,7 +296,7 @@ router.delete(
await db.recipeRepo.deleteRecipe(params.recipeId, userProfile.user.user_id, true, req.log); await db.recipeRepo.deleteRecipe(params.recipeId, userProfile.user.user_id, true, req.log);
res.status(204).send(); res.status(204).send();
} catch (error: unknown) { } catch (error: unknown) {
logger.error({ error }, 'Error deleting recipe'); req.log.error({ error }, 'Error deleting recipe');
next(error); next(error);
} }
}, },
@@ -314,7 +315,7 @@ router.delete(
await db.flyerRepo.deleteFlyer(params.flyerId, req.log); await db.flyerRepo.deleteFlyer(params.flyerId, req.log);
res.status(204).send(); res.status(204).send();
} catch (error: unknown) { } catch (error: unknown) {
logger.error({ error }, 'Error deleting flyer'); req.log.error({ error }, 'Error deleting flyer');
next(error); next(error);
} }
}, },
@@ -334,7 +335,7 @@ router.put(
); // This is still a standalone function in admin.db.ts ); // This is still a standalone function in admin.db.ts
res.status(200).json(updatedComment); res.status(200).json(updatedComment);
} catch (error: unknown) { } catch (error: unknown) {
logger.error({ error }, 'Error updating comment status'); req.log.error({ error }, 'Error updating comment status');
next(error); next(error);
} }
}, },
@@ -345,7 +346,7 @@ router.get('/users', validateRequest(emptySchema), async (req, res, next: NextFu
const users = await db.adminRepo.getAllUsers(req.log); const users = await db.adminRepo.getAllUsers(req.log);
res.json(users); res.json(users);
} catch (error) { } catch (error) {
logger.error({ error }, 'Error fetching users'); req.log.error({ error }, 'Error fetching users');
next(error); next(error);
} }
}); });
@@ -362,7 +363,7 @@ router.get(
const logs = await db.adminRepo.getActivityLog(limit!, offset!, req.log); const logs = await db.adminRepo.getActivityLog(limit!, offset!, req.log);
res.json(logs); res.json(logs);
} catch (error) { } catch (error) {
logger.error({ error }, 'Error fetching activity log'); req.log.error({ error }, 'Error fetching activity log');
next(error); next(error);
} }
}, },
@@ -378,7 +379,7 @@ router.get(
const user = await db.userRepo.findUserProfileById(params.id, req.log); const user = await db.userRepo.findUserProfileById(params.id, req.log);
res.json(user); res.json(user);
} catch (error) { } catch (error) {
logger.error({ error }, 'Error fetching user profile'); req.log.error({ error }, 'Error fetching user profile');
next(error); next(error);
} }
}, },
@@ -394,7 +395,7 @@ router.put(
const updatedUser = await db.adminRepo.updateUserRole(params.id, body.role, req.log); const updatedUser = await db.adminRepo.updateUserRole(params.id, body.role, req.log);
res.json(updatedUser); res.json(updatedUser);
} catch (error) { } catch (error) {
logger.error({ error }, `Error updating user ${params.id}:`); req.log.error({ error }, `Error updating user ${params.id}:`);
next(error); next(error);
} }
}, },
@@ -411,7 +412,7 @@ router.delete(
await userService.deleteUserAsAdmin(userProfile.user.user_id, params.id, req.log); await userService.deleteUserAsAdmin(userProfile.user.user_id, params.id, req.log);
res.status(204).send(); res.status(204).send();
} catch (error) { } catch (error) {
logger.error({ error }, 'Error deleting user'); req.log.error({ error }, 'Error deleting user');
next(error); next(error);
} }
}, },
@@ -427,7 +428,7 @@ router.post(
validateRequest(emptySchema), validateRequest(emptySchema),
async (req: Request, res: Response, next: NextFunction) => { async (req: Request, res: Response, next: NextFunction) => {
const userProfile = req.user as UserProfile; const userProfile = req.user as UserProfile;
logger.info( req.log.info(
`[Admin] Manual trigger for daily deal check received from user: ${userProfile.user.user_id}`, `[Admin] Manual trigger for daily deal check received from user: ${userProfile.user.user_id}`,
); );
@@ -440,7 +441,7 @@ router.post(
'Daily deal check job has been triggered successfully. It will run in the background.', 'Daily deal check job has been triggered successfully. It will run in the background.',
}); });
} catch (error) { } catch (error) {
logger.error({ error }, '[Admin] Failed to trigger daily deal check job.'); req.log.error({ error }, '[Admin] Failed to trigger daily deal check job.');
next(error); next(error);
} }
}, },
@@ -456,7 +457,7 @@ router.post(
validateRequest(emptySchema), validateRequest(emptySchema),
async (req: Request, res: Response, next: NextFunction) => { async (req: Request, res: Response, next: NextFunction) => {
const userProfile = req.user as UserProfile; const userProfile = req.user as UserProfile;
logger.info( req.log.info(
`[Admin] Manual trigger for analytics report generation received from user: ${userProfile.user.user_id}`, `[Admin] Manual trigger for analytics report generation received from user: ${userProfile.user.user_id}`,
); );
@@ -466,7 +467,7 @@ router.post(
message: `Analytics report generation job has been enqueued successfully. Job ID: ${jobId}`, message: `Analytics report generation job has been enqueued successfully. Job ID: ${jobId}`,
}); });
} catch (error) { } catch (error) {
logger.error({ error }, '[Admin] Failed to enqueue analytics report job.'); req.log.error({ error }, '[Admin] Failed to enqueue analytics report job.');
next(error); next(error);
} }
}, },
@@ -484,7 +485,7 @@ router.post(
const userProfile = req.user as UserProfile; const userProfile = req.user as UserProfile;
// Infer type from the schema generator for type safety, as per ADR-003. // Infer type from the schema generator for type safety, as per ADR-003.
const { params } = req as unknown as z.infer<ReturnType<typeof numericIdParam>>; // This was a duplicate, fixed. const { params } = req as unknown as z.infer<ReturnType<typeof numericIdParam>>; // This was a duplicate, fixed.
logger.info( req.log.info(
`[Admin] Manual trigger for flyer file cleanup received from user: ${userProfile.user.user_id} for flyer ID: ${params.flyerId}`, `[Admin] Manual trigger for flyer file cleanup received from user: ${userProfile.user.user_id} for flyer ID: ${params.flyerId}`,
); );
@@ -495,7 +496,7 @@ router.post(
.status(202) .status(202)
.json({ message: `File cleanup job for flyer ID ${params.flyerId} has been enqueued.` }); .json({ message: `File cleanup job for flyer ID ${params.flyerId} has been enqueued.` });
} catch (error) { } catch (error) {
logger.error({ error }, 'Error enqueuing cleanup job'); req.log.error({ error }, 'Error enqueuing cleanup job');
next(error); next(error);
} }
}, },
@@ -511,7 +512,7 @@ router.post(
validateRequest(emptySchema), validateRequest(emptySchema),
async (req: Request, res: Response, next: NextFunction) => { async (req: Request, res: Response, next: NextFunction) => {
const userProfile = req.user as UserProfile; const userProfile = req.user as UserProfile;
logger.info( req.log.info(
`[Admin] Manual trigger for a failing job received from user: ${userProfile.user.user_id}`, `[Admin] Manual trigger for a failing job received from user: ${userProfile.user.user_id}`,
); );
@@ -522,7 +523,7 @@ router.post(
.status(202) .status(202)
.json({ message: `Failing test job has been enqueued successfully. Job ID: ${job.id}` }); .json({ message: `Failing test job has been enqueued successfully. Job ID: ${job.id}` });
} catch (error) { } catch (error) {
logger.error({ error }, 'Error enqueuing failing job'); req.log.error({ error }, 'Error enqueuing failing job');
next(error); next(error);
} }
} }
@@ -538,7 +539,7 @@ router.post(
validateRequest(emptySchema), validateRequest(emptySchema),
async (req: Request, res: Response, next: NextFunction) => { async (req: Request, res: Response, next: NextFunction) => {
const userProfile = req.user as UserProfile; const userProfile = req.user as UserProfile;
logger.info( req.log.info(
`[Admin] Manual trigger for geocode cache clear received from user: ${userProfile.user.user_id}`, `[Admin] Manual trigger for geocode cache clear received from user: ${userProfile.user.user_id}`,
); );
@@ -548,7 +549,7 @@ router.post(
message: `Successfully cleared the geocode cache. ${keysDeleted} keys were removed.`, message: `Successfully cleared the geocode cache. ${keysDeleted} keys were removed.`,
}); });
} catch (error) { } catch (error) {
logger.error({ error }, '[Admin] Failed to clear geocode cache.'); req.log.error({ error }, '[Admin] Failed to clear geocode cache.');
next(error); next(error);
} }
}, },
@@ -563,7 +564,7 @@ router.get('/workers/status', validateRequest(emptySchema), async (req: Request,
const workerStatuses = await monitoringService.getWorkerStatuses(); const workerStatuses = await monitoringService.getWorkerStatuses();
res.json(workerStatuses); res.json(workerStatuses);
} catch (error) { } catch (error) {
logger.error({ error }, 'Error fetching worker statuses'); req.log.error({ error }, 'Error fetching worker statuses');
next(error); next(error);
} }
}); });
@@ -577,7 +578,7 @@ router.get('/queues/status', validateRequest(emptySchema), async (req: Request,
const queueStatuses = await monitoringService.getQueueStatuses(); const queueStatuses = await monitoringService.getQueueStatuses();
res.json(queueStatuses); res.json(queueStatuses);
} catch (error) { } catch (error) {
logger.error({ error }, 'Error fetching queue statuses'); req.log.error({ error }, 'Error fetching queue statuses');
next(error); next(error);
} }
}); });
@@ -603,7 +604,7 @@ router.post(
); );
res.status(200).json({ message: `Job ${jobId} has been successfully marked for retry.` }); res.status(200).json({ message: `Job ${jobId} has been successfully marked for retry.` });
} catch (error) { } catch (error) {
logger.error({ error }, 'Error retrying job'); req.log.error({ error }, 'Error retrying job');
next(error); next(error);
} }
}, },
@@ -618,7 +619,7 @@ router.post(
validateRequest(emptySchema), validateRequest(emptySchema),
async (req: Request, res: Response, next: NextFunction) => { async (req: Request, res: Response, next: NextFunction) => {
const userProfile = req.user as UserProfile; // This was a duplicate, fixed. const userProfile = req.user as UserProfile; // This was a duplicate, fixed.
logger.info( req.log.info(
`[Admin] Manual trigger for weekly analytics report received from user: ${userProfile.user.user_id}`, `[Admin] Manual trigger for weekly analytics report received from user: ${userProfile.user.user_id}`,
); );
@@ -628,7 +629,7 @@ router.post(
.status(202) .status(202)
.json({ message: 'Successfully enqueued weekly analytics job.', jobId }); .json({ message: 'Successfully enqueued weekly analytics job.', jobId });
} catch (error) { } catch (error) {
logger.error({ error }, 'Error enqueuing weekly analytics job'); req.log.error({ error }, 'Error enqueuing weekly analytics job');
next(error); next(error);
} }
}, },

View File

@@ -1,19 +1,30 @@
// src/routes/ai.routes.ts // src/routes/ai.routes.ts
// All route handlers now use req.log (request-scoped logger) as per ADR-004
import { Router, Request, Response, NextFunction } from 'express'; import { Router, Request, Response, NextFunction } from 'express';
// All route handlers now use req.log (request-scoped logger) as per ADR-004
import { z } from 'zod'; import { z } from 'zod';
import passport from './passport.routes'; import passport from './passport.routes';
// All route handlers now use req.log (request-scoped logger) as per ADR-004
import { optionalAuth } from './passport.routes'; import { optionalAuth } from './passport.routes';
// All route handlers now use req.log (request-scoped logger) as per ADR-004
import { aiService, DuplicateFlyerError } from '../services/aiService.server'; import { aiService, DuplicateFlyerError } from '../services/aiService.server';
// All route handlers now use req.log (request-scoped logger) as per ADR-004
import { import {
createUploadMiddleware, createUploadMiddleware,
handleMulterError, handleMulterError,
} from '../middleware/multer.middleware'; } from '../middleware/multer.middleware';
import { logger } from '../services/logger.server'; // This was a duplicate, fixed. // Removed: import { logger } from '../services/logger.server'; // This was a duplicate, fixed.
// All route handlers now use req.log (request-scoped logger) as per ADR-004
import { UserProfile } from '../types'; // This was a duplicate, fixed. import { UserProfile } from '../types'; // This was a duplicate, fixed.
// All route handlers now use req.log (request-scoped logger) as per ADR-004
import { validateRequest } from '../middleware/validation.middleware'; import { validateRequest } from '../middleware/validation.middleware';
// All route handlers now use req.log (request-scoped logger) as per ADR-004
import { requiredString } from '../utils/zodUtils'; import { requiredString } from '../utils/zodUtils';
// All route handlers now use req.log (request-scoped logger) as per ADR-004
import { cleanupUploadedFile, cleanupUploadedFiles } from '../utils/fileUtils'; import { cleanupUploadedFile, cleanupUploadedFiles } from '../utils/fileUtils';
// All route handlers now use req.log (request-scoped logger) as per ADR-004
import { monitoringService } from '../services/monitoringService.server'; import { monitoringService } from '../services/monitoringService.server';
// All route handlers now use req.log (request-scoped logger) as per ADR-004
import { aiUploadLimiter, aiGenerationLimiter } from '../config/rateLimiters'; import { aiUploadLimiter, aiGenerationLimiter } from '../config/rateLimiters';
const router = Router(); const router = Router();
@@ -61,7 +72,7 @@ const rescanAreaSchema = z.object({
return JSON.parse(val); return JSON.parse(val);
} catch (err) { } catch (err) {
// Log the actual parsing error for better debugging if invalid JSON is sent. // Log the actual parsing error for better debugging if invalid JSON is sent.
logger.warn( req.log.warn(
{ error: errMsg(err), receivedValue: val }, { error: errMsg(err), receivedValue: val },
'Failed to parse cropArea in rescanAreaSchema', 'Failed to parse cropArea in rescanAreaSchema',
); );
@@ -151,12 +162,12 @@ router.use((req: Request, res: Response, next: NextFunction) => {
const contentType = req.headers['content-type'] || ''; const contentType = req.headers['content-type'] || '';
const contentLength = req.headers['content-length'] || 'unknown'; const contentLength = req.headers['content-length'] || 'unknown';
const authPresent = !!req.headers['authorization']; const authPresent = !!req.headers['authorization'];
logger.debug( req.log.debug(
{ method: req.method, url: req.originalUrl, contentType, contentLength, authPresent }, { method: req.method, url: req.originalUrl, contentType, contentLength, authPresent },
'[API /ai] Incoming request', '[API /ai] Incoming request',
); );
} catch (e: unknown) { } catch (e: unknown) {
logger.error({ error: errMsg(e) }, 'Failed to log incoming AI request headers'); req.log.error({ error: errMsg(e) }, 'Failed to log incoming AI request headers');
} }
next(); next();
}); });
@@ -181,7 +192,7 @@ router.post(
return res.status(400).json({ message: 'A flyer file (PDF or image) is required.' }); return res.status(400).json({ message: 'A flyer file (PDF or image) is required.' });
} }
logger.debug( req.log.debug(
{ filename: req.file.originalname, size: req.file.size, checksum: body.checksum }, { filename: req.file.originalname, size: req.file.size, checksum: body.checksum },
'Handling /upload-and-process', 'Handling /upload-and-process',
); );
@@ -210,7 +221,7 @@ router.post(
} catch (error) { } catch (error) {
await cleanupUploadedFile(req.file); await cleanupUploadedFile(req.file);
if (error instanceof DuplicateFlyerError) { if (error instanceof DuplicateFlyerError) {
logger.warn(`Duplicate flyer upload attempt blocked for checksum: ${req.body?.checksum}`); req.log.warn(`Duplicate flyer upload attempt blocked for checksum: ${req.body?.checksum}`);
return res.status(409).json({ message: error.message, flyerId: error.flyerId }); return res.status(409).json({ message: error.message, flyerId: error.flyerId });
} }
next(error); next(error);
@@ -239,7 +250,7 @@ router.post(
} catch (error) { } catch (error) {
await cleanupUploadedFile(req.file); await cleanupUploadedFile(req.file);
if (error instanceof DuplicateFlyerError) { if (error instanceof DuplicateFlyerError) {
logger.warn(`Duplicate legacy flyer upload attempt blocked.`); req.log.warn(`Duplicate legacy flyer upload attempt blocked.`);
return res.status(409).json({ message: error.message, flyerId: error.flyerId }); return res.status(409).json({ message: error.message, flyerId: error.flyerId });
} }
next(error); next(error);
@@ -261,7 +272,7 @@ router.get(
try { try {
const jobStatus = await monitoringService.getFlyerJobStatus(jobId); // This was a duplicate, fixed. const jobStatus = await monitoringService.getFlyerJobStatus(jobId); // This was a duplicate, fixed.
logger.debug(`[API /ai/jobs] Status check for job ${jobId}: ${jobStatus.state}`); req.log.debug(`[API /ai/jobs] Status check for job ${jobId}: ${jobStatus.state}`);
res.json(jobStatus); res.json(jobStatus);
} catch (error) { } catch (error) {
next(error); next(error);
@@ -298,7 +309,7 @@ router.post(
} catch (error) { } catch (error) {
await cleanupUploadedFile(req.file); await cleanupUploadedFile(req.file);
if (error instanceof DuplicateFlyerError) { if (error instanceof DuplicateFlyerError) {
logger.warn(`Duplicate flyer upload attempt blocked.`); req.log.warn(`Duplicate flyer upload attempt blocked.`);
return res.status(409).json({ message: error.message, flyerId: error.flyerId }); return res.status(409).json({ message: error.message, flyerId: error.flyerId });
} }
next(error); next(error);
@@ -320,7 +331,7 @@ router.post(
if (!req.file) { if (!req.file) {
return res.status(400).json({ message: 'Image file is required.' }); return res.status(400).json({ message: 'Image file is required.' });
} }
logger.info(`Server-side flyer check for file: ${req.file.originalname}`); req.log.info(`Server-side flyer check for file: ${req.file.originalname}`);
res.status(200).json({ is_flyer: true }); // Stubbed response res.status(200).json({ is_flyer: true }); // Stubbed response
} catch (error) { } catch (error) {
next(error); next(error);
@@ -340,7 +351,7 @@ router.post(
if (!req.file) { if (!req.file) {
return res.status(400).json({ message: 'Image file is required.' }); return res.status(400).json({ message: 'Image file is required.' });
} }
logger.info(`Server-side address extraction for file: ${req.file.originalname}`); req.log.info(`Server-side address extraction for file: ${req.file.originalname}`);
res.status(200).json({ address: 'not identified' }); // Updated stubbed response res.status(200).json({ address: 'not identified' }); // Updated stubbed response
} catch (error) { } catch (error) {
next(error); next(error);
@@ -360,7 +371,7 @@ router.post(
if (!req.files || !Array.isArray(req.files) || req.files.length === 0) { if (!req.files || !Array.isArray(req.files) || req.files.length === 0) {
return res.status(400).json({ message: 'Image files are required.' }); return res.status(400).json({ message: 'Image files are required.' });
} }
logger.info(`Server-side logo extraction for ${req.files.length} image(s).`); req.log.info(`Server-side logo extraction for ${req.files.length} image(s).`);
res.status(200).json({ store_logo_base_64: null }); // Stubbed response res.status(200).json({ store_logo_base_64: null }); // Stubbed response
} catch (error) { } catch (error) {
next(error); next(error);
@@ -377,7 +388,7 @@ router.post(
validateRequest(insightsSchema), validateRequest(insightsSchema),
async (req, res, next: NextFunction) => { async (req, res, next: NextFunction) => {
try { try {
logger.info(`Server-side quick insights requested.`); req.log.info(`Server-side quick insights requested.`);
res res
.status(200) .status(200)
.json({ text: 'This is a server-generated quick insight: buy the cheap stuff!' }); // Stubbed response .json({ text: 'This is a server-generated quick insight: buy the cheap stuff!' }); // Stubbed response
@@ -394,7 +405,7 @@ router.post(
validateRequest(insightsSchema), validateRequest(insightsSchema),
async (req, res, next: NextFunction) => { async (req, res, next: NextFunction) => {
try { try {
logger.info(`Server-side deep dive requested.`); req.log.info(`Server-side deep dive requested.`);
res res
.status(200) .status(200)
.json({ text: 'This is a server-generated deep dive analysis. It is very detailed.' }); // Stubbed response .json({ text: 'This is a server-generated deep dive analysis. It is very detailed.' }); // Stubbed response
@@ -411,7 +422,7 @@ router.post(
validateRequest(searchWebSchema), validateRequest(searchWebSchema),
async (req, res, next: NextFunction) => { async (req, res, next: NextFunction) => {
try { try {
logger.info(`Server-side web search requested.`); req.log.info(`Server-side web search requested.`);
res.status(200).json({ text: 'The web says this is good.', sources: [] }); // Stubbed response res.status(200).json({ text: 'The web says this is good.', sources: [] }); // Stubbed response
} catch (error) { } catch (error) {
next(error); next(error);
@@ -427,7 +438,7 @@ router.post(
async (req, res, next: NextFunction) => { async (req, res, next: NextFunction) => {
try { try {
const { items } = req.body; const { items } = req.body;
logger.info(`Server-side price comparison requested for ${items.length} items.`); req.log.info(`Server-side price comparison requested for ${items.length} items.`);
res.status(200).json({ res.status(200).json({
text: 'This is a server-generated price comparison. Milk is cheaper at SuperMart.', text: 'This is a server-generated price comparison. Milk is cheaper at SuperMart.',
sources: [], sources: [],
@@ -446,11 +457,11 @@ router.post(
async (req, res, next: NextFunction) => { async (req, res, next: NextFunction) => {
try { try {
const { items, store, userLocation } = req.body; const { items, store, userLocation } = req.body;
logger.debug({ itemCount: items.length, storeName: store.name }, 'Trip planning requested.'); req.log.debug({ itemCount: items.length, storeName: store.name }, 'Trip planning requested.');
const result = await aiService.planTripWithMaps(items, store, userLocation); const result = await aiService.planTripWithMaps(items, store, userLocation);
res.status(200).json(result); res.status(200).json(result);
} catch (error) { } catch (error) {
logger.error({ error: errMsg(error) }, 'Error in /api/ai/plan-trip endpoint:'); req.log.error({ error: errMsg(error) }, 'Error in /api/ai/plan-trip endpoint:');
next(error); next(error);
} }
}, },
@@ -466,7 +477,7 @@ router.post(
(req: Request, res: Response) => { (req: Request, res: Response) => {
// This endpoint is a placeholder for a future feature. // This endpoint is a placeholder for a future feature.
// Returning 501 Not Implemented is the correct HTTP response for this case. // Returning 501 Not Implemented is the correct HTTP response for this case.
logger.info('Request received for unimplemented endpoint: /api/ai/generate-image'); req.log.info('Request received for unimplemented endpoint: /api/ai/generate-image');
res.status(501).json({ message: 'Image generation is not yet implemented.' }); res.status(501).json({ message: 'Image generation is not yet implemented.' });
}, },
); );
@@ -479,7 +490,7 @@ router.post(
(req: Request, res: Response) => { (req: Request, res: Response) => {
// This endpoint is a placeholder for a future feature. // This endpoint is a placeholder for a future feature.
// Returning 501 Not Implemented is the correct HTTP response for this case. // Returning 501 Not Implemented is the correct HTTP response for this case.
logger.info('Request received for unimplemented endpoint: /api/ai/generate-speech'); req.log.info('Request received for unimplemented endpoint: /api/ai/generate-speech');
res.status(501).json({ message: 'Speech generation is not yet implemented.' }); res.status(501).json({ message: 'Speech generation is not yet implemented.' });
}, },
); );
@@ -505,7 +516,7 @@ router.post(
const { extractionType } = req.body; const { extractionType } = req.body;
const { path, mimetype } = req.file; const { path, mimetype } = req.file;
logger.debug( req.log.debug(
{ extractionType, cropArea, filename: req.file.originalname }, { extractionType, cropArea, filename: req.file.originalname },
'Rescan area requested', 'Rescan area requested',
); );

View File

@@ -1,13 +1,20 @@
// src/routes/auth.routes.ts // src/routes/auth.routes.ts
// All route handlers now use req.log (request-scoped logger) as per ADR-004
import { Router, Request, Response, NextFunction } from 'express'; import { Router, Request, Response, NextFunction } from 'express';
// All route handlers now use req.log (request-scoped logger) as per ADR-004
import { z } from 'zod'; import { z } from 'zod';
import passport from './passport.routes'; import passport from './passport.routes';
// All route handlers now use req.log (request-scoped logger) as per ADR-004
import { UniqueConstraintError } from '../services/db/errors.db'; // Import actual class for instanceof checks import { UniqueConstraintError } from '../services/db/errors.db'; // Import actual class for instanceof checks
import { logger } from '../services/logger.server'; // Removed: import { logger } from '../services/logger.server';
// All route handlers now use req.log (request-scoped logger) as per ADR-004
import { validateRequest } from '../middleware/validation.middleware'; import { validateRequest } from '../middleware/validation.middleware';
import type { UserProfile } from '../types'; import type { UserProfile } from '../types';
// All route handlers now use req.log (request-scoped logger) as per ADR-004
import { validatePasswordStrength } from '../utils/authUtils'; import { validatePasswordStrength } from '../utils/authUtils';
// All route handlers now use req.log (request-scoped logger) as per ADR-004
import { requiredString } from '../utils/zodUtils'; import { requiredString } from '../utils/zodUtils';
// All route handlers now use req.log (request-scoped logger) as per ADR-004
import { import {
loginLimiter, loginLimiter,
registerLimiter, registerLimiter,
@@ -17,6 +24,7 @@ import {
logoutLimiter, logoutLimiter,
} from '../config/rateLimiters'; } from '../config/rateLimiters';
// All route handlers now use req.log (request-scoped logger) as per ADR-004
import { authService } from '../services/authService'; import { authService } from '../services/authService';
const router = Router(); const router = Router();
@@ -103,7 +111,7 @@ router.post(
// If the email is a duplicate, return a 409 Conflict status. // If the email is a duplicate, return a 409 Conflict status.
return res.status(409).json({ message: error.message }); return res.status(409).json({ message: error.message });
} }
logger.error({ error }, `User registration route failed for email: ${email}.`); req.log.error({ error }, `User registration route failed for email: ${email}.`);
// Pass the error to the centralized handler // Pass the error to the centralized handler
return next(error); return next(error);
} }
@@ -276,7 +284,7 @@ router.post('/logout', logoutLimiter, async (req: Request, res: Response) => {
// // Redirect to a frontend page that can handle the token // // Redirect to a frontend page that can handle the token
// res.redirect(`${process.env.FRONTEND_URL}/auth/callback?token=${accessToken}`); // res.redirect(`${process.env.FRONTEND_URL}/auth/callback?token=${accessToken}`);
// }).catch(err => { // }).catch(err => {
// logger.error('Failed to save refresh token during OAuth callback:', { error: err }); // req.log.error('Failed to save refresh token during OAuth callback:', { error: err });
// res.redirect(`${process.env.FRONTEND_URL}/login?error=auth_failed`); // res.redirect(`${process.env.FRONTEND_URL}/login?error=auth_failed`);
// }); // });
// }; // };

View File

@@ -1,12 +1,18 @@
// src/routes/gamification.routes.ts // src/routes/gamification.routes.ts
import express, { NextFunction } from 'express'; import express, { NextFunction } from 'express';
// All route handlers now use req.log (request-scoped logger) as per ADR-004
import { z } from 'zod'; import { z } from 'zod';
import passport, { isAdmin } from './passport.routes'; // Correctly imported import passport, { isAdmin } from './passport.routes'; // Correctly imported
// All route handlers now use req.log (request-scoped logger) as per ADR-004
import { gamificationService } from '../services/gamificationService'; import { gamificationService } from '../services/gamificationService';
import { logger } from '../services/logger.server'; // Removed: import { logger } from '../services/logger.server';
// All route handlers now use req.log (request-scoped logger) as per ADR-004
import { UserProfile } from '../types'; import { UserProfile } from '../types';
// All route handlers now use req.log (request-scoped logger) as per ADR-004
import { validateRequest } from '../middleware/validation.middleware'; import { validateRequest } from '../middleware/validation.middleware';
// All route handlers now use req.log (request-scoped logger) as per ADR-004
import { requiredString, optionalNumeric } from '../utils/zodUtils'; import { requiredString, optionalNumeric } from '../utils/zodUtils';
// All route handlers now use req.log (request-scoped logger) as per ADR-004
import { import {
publicReadLimiter, publicReadLimiter,
userReadLimiter, userReadLimiter,
@@ -44,7 +50,7 @@ router.get('/', publicReadLimiter, async (req, res, next: NextFunction) => {
const achievements = await gamificationService.getAllAchievements(req.log); const achievements = await gamificationService.getAllAchievements(req.log);
res.json(achievements); res.json(achievements);
} catch (error) { } catch (error) {
logger.error({ error }, 'Error fetching all achievements in /api/achievements:'); req.log.error({ error }, 'Error fetching all achievements in /api/achievements:');
next(error); next(error);
} }
}); });
@@ -65,7 +71,7 @@ router.get(
const leaderboard = await gamificationService.getLeaderboard(limit!, req.log); const leaderboard = await gamificationService.getLeaderboard(limit!, req.log);
res.json(leaderboard); res.json(leaderboard);
} catch (error) { } catch (error) {
logger.error({ error }, 'Error fetching leaderboard:'); req.log.error({ error }, 'Error fetching leaderboard:');
next(error); next(error);
} }
}, },
@@ -90,7 +96,7 @@ router.get(
); );
res.json(userAchievements); res.json(userAchievements);
} catch (error) { } catch (error) {
logger.error( req.log.error(
{ error, userId: userProfile.user.user_id }, { error, userId: userProfile.user.user_id },
'Error fetching user achievements:', 'Error fetching user achievements:',
); );

View File

@@ -1,11 +1,17 @@
// src/routes/health.routes.ts // src/routes/health.routes.ts
// All route handlers now use req.log (request-scoped logger) as per ADR-004
import { Router, Request, Response, NextFunction } from 'express'; import { Router, Request, Response, NextFunction } from 'express';
// All route handlers now use req.log (request-scoped logger) as per ADR-004
import { z } from 'zod'; import { z } from 'zod';
// All route handlers now use req.log (request-scoped logger) as per ADR-004
import { checkTablesExist, getPoolStatus } from '../services/db/connection.db'; import { checkTablesExist, getPoolStatus } from '../services/db/connection.db';
import { logger } from '../services/logger.server'; // Removed: import { logger } from '../services/logger.server';
// All route handlers now use req.log (request-scoped logger) as per ADR-004
import { connection as redisConnection } from '../services/queueService.server'; import { connection as redisConnection } from '../services/queueService.server';
import fs from 'node:fs/promises'; import fs from 'node:fs/promises';
// All route handlers now use req.log (request-scoped logger) as per ADR-004
import { getSimpleWeekAndYear } from '../utils/dateUtils'; import { getSimpleWeekAndYear } from '../utils/dateUtils';
// All route handlers now use req.log (request-scoped logger) as per ADR-004
import { validateRequest } from '../middleware/validation.middleware'; import { validateRequest } from '../middleware/validation.middleware';
const router = Router(); const router = Router();
@@ -87,7 +93,7 @@ router.get(
if (isHealthy) { if (isHealthy) {
return res.status(200).json({ success: true, message }); return res.status(200).json({ success: true, message });
} else { } else {
logger.warn(`Database pool health check shows high waiting count: ${status.waitingCount}`); req.log.warn(`Database pool health check shows high waiting count: ${status.waitingCount}`);
return res return res
.status(500) .status(500)
.json({ success: false, message: `Pool may be under stress. ${message}` }); .json({ success: false, message: `Pool may be under stress. ${message}` });

View File

@@ -1,16 +1,22 @@
// src/routes/passport.routes.ts // src/routes/passport.routes.ts
import passport from 'passport'; import passport from 'passport';
// All route handlers now use req.log (request-scoped logger) as per ADR-004
import { Strategy as LocalStrategy } from 'passport-local'; import { Strategy as LocalStrategy } from 'passport-local';
//import { Strategy as GoogleStrategy } from 'passport-google-oauth20'; //import { Strategy as GoogleStrategy } from 'passport-google-oauth20';
//import { Strategy as GitHubStrategy } from 'passport-github2'; //import { Strategy as GitHubStrategy } from 'passport-github2';
// All route handlers now use req.log (request-scoped logger) as per ADR-004
import { Strategy as JwtStrategy, ExtractJwt } from 'passport-jwt'; import { Strategy as JwtStrategy, ExtractJwt } from 'passport-jwt';
import * as bcrypt from 'bcrypt'; import * as bcrypt from 'bcrypt';
// All route handlers now use req.log (request-scoped logger) as per ADR-004
import { Request, Response, NextFunction } from 'express'; import { Request, Response, NextFunction } from 'express';
import * as db from '../services/db/index.db'; import * as db from '../services/db/index.db';
import { logger } from '../services/logger.server'; // Removed: import { logger } from '../services/logger.server';
// All route handlers now use req.log (request-scoped logger) as per ADR-004
import { UserProfile } from '../types'; import { UserProfile } from '../types';
// All route handlers now use req.log (request-scoped logger) as per ADR-004
import { createMockUserProfile } from '../tests/utils/mockFactories'; import { createMockUserProfile } from '../tests/utils/mockFactories';
// All route handlers now use req.log (request-scoped logger) as per ADR-004
import { ForbiddenError } from '../services/db/errors.db'; import { ForbiddenError } from '../services/db/errors.db';
const JWT_SECRET = process.env.JWT_SECRET!; const JWT_SECRET = process.env.JWT_SECRET!;
@@ -50,7 +56,7 @@ passport.use(
if (!userprofile) { if (!userprofile) {
// User not found // User not found
logger.warn(`Login attempt failed for non-existent user: ${email}`); req.log.warn(`Login attempt failed for non-existent user: ${email}`);
return done(null, false, { message: 'Incorrect email or password.' }); return done(null, false, { message: 'Incorrect email or password.' });
} }
@@ -64,7 +70,7 @@ passport.use(
const lockoutDurationMs = LOCKOUT_DURATION_MINUTES * 60 * 1000; const lockoutDurationMs = LOCKOUT_DURATION_MINUTES * 60 * 1000;
if (timeSinceLockout < lockoutDurationMs) { if (timeSinceLockout < lockoutDurationMs) {
logger.warn(`Login attempt for locked account: ${email}`); req.log.warn(`Login attempt for locked account: ${email}`);
// Refresh the lockout timestamp on each attempt to prevent probing. // Refresh the lockout timestamp on each attempt to prevent probing.
await db.adminRepo.incrementFailedLoginAttempts(userprofile.user.user_id, req.log); await db.adminRepo.incrementFailedLoginAttempts(userprofile.user.user_id, req.log);
return done(null, false, { return done(null, false, {
@@ -75,7 +81,7 @@ passport.use(
if (!userprofile.password_hash) { if (!userprofile.password_hash) {
// User exists but signed up via OAuth, so they don't have a password. // User exists but signed up via OAuth, so they don't have a password.
logger.warn(`Password login attempt for OAuth user: ${email}`); req.log.warn(`Password login attempt for OAuth user: ${email}`);
return done(null, false, { return done(null, false, {
message: message:
'This account was created using a social login. Please use Google or GitHub to sign in.', 'This account was created using a social login. Please use Google or GitHub to sign in.',
@@ -83,15 +89,15 @@ passport.use(
} }
// 2. Compare the submitted password with the hashed password in your DB. // 2. Compare the submitted password with the hashed password in your DB.
logger.debug( req.log.debug(
`[Passport] Verifying password for ${email}. Hash length: ${userprofile.password_hash.length}`, `[Passport] Verifying password for ${email}. Hash length: ${userprofile.password_hash.length}`,
); );
const isMatch = await bcrypt.compare(password, userprofile.password_hash); const isMatch = await bcrypt.compare(password, userprofile.password_hash);
logger.debug(`[Passport] Password match result: ${isMatch}`); req.log.debug(`[Passport] Password match result: ${isMatch}`);
if (!isMatch) { if (!isMatch) {
// Password does not match // Password does not match
logger.warn(`Login attempt failed for user ${email} due to incorrect password.`); req.log.warn(`Login attempt failed for user ${email} due to incorrect password.`);
// Increment failed attempts and get the new count. // Increment failed attempts and get the new count.
const newAttemptCount = await db.adminRepo.incrementFailedLoginAttempts( const newAttemptCount = await db.adminRepo.incrementFailedLoginAttempts(
userprofile.user.user_id, userprofile.user.user_id,
@@ -128,7 +134,7 @@ passport.use(
req.log, req.log,
); );
logger.info(`User successfully authenticated: ${email}`); req.log.info(`User successfully authenticated: ${email}`);
// The `user` object from `findUserWithProfileByEmail` is now a fully formed // The `user` object from `findUserWithProfileByEmail` is now a fully formed
// UserProfile object with additional authentication fields. We must strip these // UserProfile object with additional authentication fields. We must strip these
@@ -170,13 +176,13 @@ passport.use(
// if (user) { // if (user) {
// // User exists, proceed to log them in. // // User exists, proceed to log them in.
// logger.info(`Google OAuth successful for existing user: ${email}`); // req.log.info(`Google OAuth successful for existing user: ${email}`);
// // The password_hash is intentionally destructured and discarded for security. // // The password_hash is intentionally destructured and discarded for security.
// const { password_hash, ...userWithoutHash } = user; // const { password_hash, ...userWithoutHash } = user;
// return done(null, userWithoutHash); // return done(null, userWithoutHash);
// } else { // } else {
// // User does not exist, create a new account for them. // // User does not exist, create a new account for them.
// logger.info(`Google OAuth: creating new user for email: ${email}`); // req.log.info(`Google OAuth: creating new user for email: ${email}`);
// // Since this is an OAuth user, they don't have a password. // // Since this is an OAuth user, they don't have a password.
// // We pass `null` for the password hash. // // We pass `null` for the password hash.
@@ -189,7 +195,7 @@ passport.use(
// try { // try {
// await sendWelcomeEmail(email, profile.displayName); // await sendWelcomeEmail(email, profile.displayName);
// } catch (emailError) { // } catch (emailError) {
// logger.error(`Failed to send welcome email to new Google user ${email}`, { error: emailError }); // req.log.error(`Failed to send welcome email to new Google user ${email}`, { error: emailError });
// // Don't block the login flow if email fails. // // Don't block the login flow if email fails.
// } // }
@@ -197,7 +203,7 @@ passport.use(
// return done(null, newUser); // return done(null, newUser);
// } // }
// } catch (err) { // } catch (err) {
// logger.error('Error during Google authentication strategy:', { error: err }); // req.log.error('Error during Google authentication strategy:', { error: err });
// return done(err, false); // return done(err, false);
// } // }
// } // }
@@ -222,13 +228,13 @@ passport.use(
// if (user) { // if (user) {
// // User exists, proceed to log them in. // // User exists, proceed to log them in.
// logger.info(`GitHub OAuth successful for existing user: ${email}`); // req.log.info(`GitHub OAuth successful for existing user: ${email}`);
// // The password_hash is intentionally destructured and discarded for security. // // The password_hash is intentionally destructured and discarded for security.
// const { password_hash, ...userWithoutHash } = user; // const { password_hash, ...userWithoutHash } = user;
// return done(null, userWithoutHash); // return done(null, userWithoutHash);
// } else { // } else {
// // User does not exist, create a new account for them. // // User does not exist, create a new account for them.
// logger.info(`GitHub OAuth: creating new user for email: ${email}`); // req.log.info(`GitHub OAuth: creating new user for email: ${email}`);
// // Since this is an OAuth user, they don't have a password. // // Since this is an OAuth user, they don't have a password.
// // We pass `null` for the password hash. // // We pass `null` for the password hash.
@@ -241,7 +247,7 @@ passport.use(
// try { // try {
// await sendWelcomeEmail(email, profile.displayName || profile.username); // await sendWelcomeEmail(email, profile.displayName || profile.username);
// } catch (emailError) { // } catch (emailError) {
// logger.error(`Failed to send welcome email to new GitHub user ${email}`, { error: emailError }); // req.log.error(`Failed to send welcome email to new GitHub user ${email}`, { error: emailError });
// // Don't block the login flow if email fails. // // Don't block the login flow if email fails.
// } // }
@@ -249,7 +255,7 @@ passport.use(
// return done(null, newUser); // return done(null, newUser);
// } // }
// } catch (err) { // } catch (err) {
// logger.error('Error during GitHub authentication strategy:', { error: err }); // req.log.error('Error during GitHub authentication strategy:', { error: err });
// return done(err, false); // return done(err, false);
// } // }
// } // }
@@ -265,12 +271,12 @@ const jwtOptions = {
if (!JWT_SECRET) { if (!JWT_SECRET) {
logger.fatal('[Passport] CRITICAL: JWT_SECRET is missing or empty in environment variables! JwtStrategy will fail.'); logger.fatal('[Passport] CRITICAL: JWT_SECRET is missing or empty in environment variables! JwtStrategy will fail.');
} else { } else {
logger.info(`[Passport] JWT_SECRET loaded successfully (length: ${JWT_SECRET.length}).`); req.log.info(`[Passport] JWT_SECRET loaded successfully (length: ${JWT_SECRET.length}).`);
} }
passport.use( passport.use(
new JwtStrategy(jwtOptions, async (jwt_payload, done) => { new JwtStrategy(jwtOptions, async (jwt_payload, done) => {
logger.debug( req.log.debug(
{ jwt_payload: jwt_payload ? { user_id: jwt_payload.user_id } : 'null' }, { jwt_payload: jwt_payload ? { user_id: jwt_payload.user_id } : 'null' },
'[JWT Strategy] Verifying token payload:', '[JWT Strategy] Verifying token payload:',
); );
@@ -280,18 +286,18 @@ passport.use(
const userProfile = await db.userRepo.findUserProfileById(jwt_payload.user_id, logger); const userProfile = await db.userRepo.findUserProfileById(jwt_payload.user_id, logger);
// --- JWT STRATEGY DEBUG LOGGING --- // --- JWT STRATEGY DEBUG LOGGING ---
logger.debug( req.log.debug(
`[JWT Strategy] DB lookup for user ID ${jwt_payload.user_id} result: ${userProfile ? 'FOUND' : 'NOT FOUND'}`, `[JWT Strategy] DB lookup for user ID ${jwt_payload.user_id} result: ${userProfile ? 'FOUND' : 'NOT FOUND'}`,
); );
if (userProfile) { if (userProfile) {
return done(null, userProfile); // User profile object will be available as req.user in protected routes return done(null, userProfile); // User profile object will be available as req.user in protected routes
} else { } else {
logger.warn(`JWT authentication failed: user with ID ${jwt_payload.user_id} not found.`); req.log.warn(`JWT authentication failed: user with ID ${jwt_payload.user_id} not found.`);
return done(null, false); // User not found or invalid token return done(null, false); // User not found or invalid token
} }
} catch (err: unknown) { } catch (err: unknown) {
logger.error({ error: err }, 'Error during JWT authentication strategy:'); req.log.error({ error: err }, 'Error during JWT authentication strategy:');
return done(err, false); return done(err, false);
} }
}), }),
@@ -307,7 +313,7 @@ export const isAdmin = (req: Request, res: Response, next: NextFunction) => {
} else { } else {
// Check if userProfile is a valid UserProfile before accessing its properties for logging. // Check if userProfile is a valid UserProfile before accessing its properties for logging.
const userIdForLog = isUserProfile(userProfile) ? userProfile.user.user_id : 'unknown'; const userIdForLog = isUserProfile(userProfile) ? userProfile.user.user_id : 'unknown';
logger.warn(`Admin access denied for user: ${userIdForLog}`); req.log.warn(`Admin access denied for user: ${userIdForLog}`);
next(new ForbiddenError('Forbidden: Administrator access required.')); next(new ForbiddenError('Forbidden: Administrator access required.'));
} }
}; };
@@ -327,12 +333,12 @@ export const optionalAuth = (req: Request, res: Response, next: NextFunction) =>
if (err) { if (err) {
// An actual error occurred during authentication (e.g., malformed token). // An actual error occurred during authentication (e.g., malformed token).
// For optional auth, we log this but still proceed without a user. // For optional auth, we log this but still proceed without a user.
logger.warn({ error: err }, 'Optional auth encountered an error, proceeding anonymously.'); req.log.warn({ error: err }, 'Optional auth encountered an error, proceeding anonymously.');
return next(); return next();
} }
if (info) { if (info) {
// The patch requested this specific error handling. // The patch requested this specific error handling.
logger.info({ info: info.message || info.toString() }, 'Optional auth info:'); req.log.info({ info: info.message || info.toString() }, 'Optional auth info:');
} }
if (user) (req as Express.Request).user = user; // Attach user if authentication succeeds. if (user) (req as Express.Request).user = user; // Attach user if authentication succeeds.

View File

@@ -1,11 +1,18 @@
// src/routes/system.routes.ts // src/routes/system.routes.ts
// All route handlers now use req.log (request-scoped logger) as per ADR-004
import { Router, Request, Response, NextFunction } from 'express'; import { Router, Request, Response, NextFunction } from 'express';
import { logger } from '../services/logger.server'; // Removed: import { logger } from '../services/logger.server';
// All route handlers now use req.log (request-scoped logger) as per ADR-004
import { geocodingService } from '../services/geocodingService.server'; import { geocodingService } from '../services/geocodingService.server';
// All route handlers now use req.log (request-scoped logger) as per ADR-004
import { validateRequest } from '../middleware/validation.middleware'; import { validateRequest } from '../middleware/validation.middleware';
// All route handlers now use req.log (request-scoped logger) as per ADR-004
import { z } from 'zod'; import { z } from 'zod';
// All route handlers now use req.log (request-scoped logger) as per ADR-004
import { requiredString } from '../utils/zodUtils'; import { requiredString } from '../utils/zodUtils';
// All route handlers now use req.log (request-scoped logger) as per ADR-004
import { systemService } from '../services/systemService'; import { systemService } from '../services/systemService';
// All route handlers now use req.log (request-scoped logger) as per ADR-004
import { geocodeLimiter } from '../config/rateLimiters'; import { geocodeLimiter } from '../config/rateLimiters';
const router = Router(); const router = Router();
@@ -61,7 +68,7 @@ router.post(
res.json(coordinates); res.json(coordinates);
} catch (error) { } catch (error) {
logger.error({ error }, 'Error geocoding address'); req.log.error({ error }, 'Error geocoding address');
next(error); next(error);
} }
}, },

View File

@@ -2,17 +2,25 @@
import express, { Request, Response, NextFunction } from 'express'; import express, { Request, Response, NextFunction } from 'express';
import passport from './passport.routes'; import passport from './passport.routes';
import multer from 'multer'; // Keep for MulterError type check import multer from 'multer'; // Keep for MulterError type check
// All route handlers now use req.log (request-scoped logger) as per ADR-004
import { z } from 'zod'; import { z } from 'zod';
import { logger } from '../services/logger.server'; // Removed: import { logger } from '../services/logger.server';
// All route handlers now use req.log (request-scoped logger) as per ADR-004
import { UserProfile } from '../types'; import { UserProfile } from '../types';
// All route handlers now use req.log (request-scoped logger) as per ADR-004
import { import {
createUploadMiddleware, createUploadMiddleware,
handleMulterError, handleMulterError,
} from '../middleware/multer.middleware'; } from '../middleware/multer.middleware';
// All route handlers now use req.log (request-scoped logger) as per ADR-004
import { userService } from '../services/userService'; import { userService } from '../services/userService';
// All route handlers now use req.log (request-scoped logger) as per ADR-004
import { ForeignKeyConstraintError } from '../services/db/errors.db'; import { ForeignKeyConstraintError } from '../services/db/errors.db';
// All route handlers now use req.log (request-scoped logger) as per ADR-004
import { validateRequest } from '../middleware/validation.middleware'; import { validateRequest } from '../middleware/validation.middleware';
// All route handlers now use req.log (request-scoped logger) as per ADR-004
import { validatePasswordStrength } from '../utils/authUtils'; import { validatePasswordStrength } from '../utils/authUtils';
// All route handlers now use req.log (request-scoped logger) as per ADR-004
import { import {
requiredString, requiredString,
numericIdParam, numericIdParam,
@@ -20,7 +28,9 @@ import {
optionalBoolean, optionalBoolean,
} from '../utils/zodUtils'; } from '../utils/zodUtils';
import * as db from '../services/db/index.db'; import * as db from '../services/db/index.db';
// All route handlers now use req.log (request-scoped logger) as per ADR-004
import { cleanupUploadedFile } from '../utils/fileUtils'; import { cleanupUploadedFile } from '../utils/fileUtils';
// All route handlers now use req.log (request-scoped logger) as per ADR-004
import { import {
userUpdateLimiter, userUpdateLimiter,
userSensitiveUpdateLimiter, userSensitiveUpdateLimiter,
@@ -126,7 +136,7 @@ router.post(
// If an error occurs after the file has been uploaded (e.g., DB error), // If an error occurs after the file has been uploaded (e.g., DB error),
// we must clean up the orphaned file from the disk. // we must clean up the orphaned file from the disk.
await cleanupUploadedFile(req.file); await cleanupUploadedFile(req.file);
logger.error({ error }, 'Error uploading avatar'); req.log.error({ error }, 'Error uploading avatar');
next(error); next(error);
} }
}, },
@@ -156,7 +166,7 @@ router.get(
); );
res.json(notifications); res.json(notifications);
} catch (error) { } catch (error) {
logger.error({ error }, 'Error fetching notifications'); req.log.error({ error }, 'Error fetching notifications');
next(error); next(error);
} }
}, },
@@ -174,7 +184,7 @@ router.post(
await db.notificationRepo.markAllNotificationsAsRead(userProfile.user.user_id, req.log); await db.notificationRepo.markAllNotificationsAsRead(userProfile.user.user_id, req.log);
res.status(204).send(); // No Content res.status(204).send(); // No Content
} catch (error) { } catch (error) {
logger.error({ error }, 'Error marking all notifications as read'); req.log.error({ error }, 'Error marking all notifications as read');
next(error); next(error);
} }
}, },
@@ -200,7 +210,7 @@ router.post(
); );
res.status(204).send(); // Success, no content to return res.status(204).send(); // Success, no content to return
} catch (error) { } catch (error) {
logger.error({ error }, 'Error marking notification as read'); req.log.error({ error }, 'Error marking notification as read');
next(error); next(error);
} }
}, },
@@ -210,10 +220,10 @@ router.post(
* GET /api/users/profile - Get the full profile for the authenticated user. * GET /api/users/profile - Get the full profile for the authenticated user.
*/ */
router.get('/profile', validateRequest(emptySchema), async (req, res, next: NextFunction) => { router.get('/profile', validateRequest(emptySchema), async (req, res, next: NextFunction) => {
logger.debug(`[ROUTE] GET /api/users/profile - ENTER`); req.log.debug(`[ROUTE] GET /api/users/profile - ENTER`);
const userProfile = req.user as UserProfile; const userProfile = req.user as UserProfile;
try { try {
logger.debug( req.log.debug(
`[ROUTE] Calling db.userRepo.findUserProfileById for user: ${userProfile.user.user_id}`, `[ROUTE] Calling db.userRepo.findUserProfileById for user: ${userProfile.user.user_id}`,
); );
const fullUserProfile = await db.userRepo.findUserProfileById( const fullUserProfile = await db.userRepo.findUserProfileById(
@@ -222,7 +232,7 @@ router.get('/profile', validateRequest(emptySchema), async (req, res, next: Next
); );
res.json(fullUserProfile); res.json(fullUserProfile);
} catch (error) { } catch (error) {
logger.error({ error }, `[ROUTE] GET /api/users/profile - ERROR`); req.log.error({ error }, `[ROUTE] GET /api/users/profile - ERROR`);
next(error); next(error);
} }
}); });
@@ -236,7 +246,7 @@ router.put(
userUpdateLimiter, userUpdateLimiter,
validateRequest(updateProfileSchema), validateRequest(updateProfileSchema),
async (req, res, next: NextFunction) => { async (req, res, next: NextFunction) => {
logger.debug(`[ROUTE] PUT /api/users/profile - ENTER`); req.log.debug(`[ROUTE] PUT /api/users/profile - ENTER`);
const userProfile = req.user as UserProfile; const userProfile = req.user as UserProfile;
// Apply ADR-003 pattern for type safety // Apply ADR-003 pattern for type safety
const { body } = req as unknown as UpdateProfileRequest; const { body } = req as unknown as UpdateProfileRequest;
@@ -248,7 +258,7 @@ router.put(
); );
res.json(updatedProfile); res.json(updatedProfile);
} catch (error) { } catch (error) {
logger.error({ error }, `[ROUTE] PUT /api/users/profile - ERROR`); req.log.error({ error }, `[ROUTE] PUT /api/users/profile - ERROR`);
next(error); next(error);
} }
}, },
@@ -263,7 +273,7 @@ router.put(
userSensitiveUpdateLimiter, userSensitiveUpdateLimiter,
validateRequest(updatePasswordSchema), validateRequest(updatePasswordSchema),
async (req, res, next: NextFunction) => { async (req, res, next: NextFunction) => {
logger.debug(`[ROUTE] PUT /api/users/profile/password - ENTER`); req.log.debug(`[ROUTE] PUT /api/users/profile/password - ENTER`);
const userProfile = req.user as UserProfile; const userProfile = req.user as UserProfile;
// Apply ADR-003 pattern for type safety // Apply ADR-003 pattern for type safety
const { body } = req as unknown as UpdatePasswordRequest; const { body } = req as unknown as UpdatePasswordRequest;
@@ -272,7 +282,7 @@ router.put(
await userService.updateUserPassword(userProfile.user.user_id, body.newPassword, req.log); await userService.updateUserPassword(userProfile.user.user_id, body.newPassword, req.log);
res.status(200).json({ message: 'Password updated successfully.' }); res.status(200).json({ message: 'Password updated successfully.' });
} catch (error) { } catch (error) {
logger.error({ error }, `[ROUTE] PUT /api/users/profile/password - ERROR`); req.log.error({ error }, `[ROUTE] PUT /api/users/profile/password - ERROR`);
next(error); next(error);
} }
}, },
@@ -287,7 +297,7 @@ router.delete(
userSensitiveUpdateLimiter, userSensitiveUpdateLimiter,
validateRequest(deleteAccountSchema), validateRequest(deleteAccountSchema),
async (req, res, next: NextFunction) => { async (req, res, next: NextFunction) => {
logger.debug(`[ROUTE] DELETE /api/users/account - ENTER`); req.log.debug(`[ROUTE] DELETE /api/users/account - ENTER`);
const userProfile = req.user as UserProfile; const userProfile = req.user as UserProfile;
// Apply ADR-003 pattern for type safety // Apply ADR-003 pattern for type safety
const { body } = req as unknown as DeleteAccountRequest; const { body } = req as unknown as DeleteAccountRequest;
@@ -296,7 +306,7 @@ router.delete(
await userService.deleteUserAccount(userProfile.user.user_id, body.password, req.log); await userService.deleteUserAccount(userProfile.user.user_id, body.password, req.log);
res.status(200).json({ message: 'Account deleted successfully.' }); res.status(200).json({ message: 'Account deleted successfully.' });
} catch (error) { } catch (error) {
logger.error({ error }, `[ROUTE] DELETE /api/users/account - ERROR`); req.log.error({ error }, `[ROUTE] DELETE /api/users/account - ERROR`);
next(error); next(error);
} }
}, },
@@ -306,13 +316,13 @@ router.delete(
* GET /api/users/watched-items - Get all watched items for the authenticated user. * GET /api/users/watched-items - Get all watched items for the authenticated user.
*/ */
router.get('/watched-items', validateRequest(emptySchema), async (req, res, next: NextFunction) => { router.get('/watched-items', validateRequest(emptySchema), async (req, res, next: NextFunction) => {
logger.debug(`[ROUTE] GET /api/users/watched-items - ENTER`); req.log.debug(`[ROUTE] GET /api/users/watched-items - ENTER`);
const userProfile = req.user as UserProfile; const userProfile = req.user as UserProfile;
try { try {
const items = await db.personalizationRepo.getWatchedItems(userProfile.user.user_id, req.log); const items = await db.personalizationRepo.getWatchedItems(userProfile.user.user_id, req.log);
res.json(items); res.json(items);
} catch (error) { } catch (error) {
logger.error({ error }, `[ROUTE] GET /api/users/watched-items - ERROR`); req.log.error({ error }, `[ROUTE] GET /api/users/watched-items - ERROR`);
next(error); next(error);
} }
}); });
@@ -326,7 +336,7 @@ router.post(
userUpdateLimiter, userUpdateLimiter,
validateRequest(addWatchedItemSchema), validateRequest(addWatchedItemSchema),
async (req, res, next: NextFunction) => { async (req, res, next: NextFunction) => {
logger.debug(`[ROUTE] POST /api/users/watched-items - ENTER`); req.log.debug(`[ROUTE] POST /api/users/watched-items - ENTER`);
const userProfile = req.user as UserProfile; const userProfile = req.user as UserProfile;
// Apply ADR-003 pattern for type safety // Apply ADR-003 pattern for type safety
const { body } = req as unknown as AddWatchedItemRequest; const { body } = req as unknown as AddWatchedItemRequest;
@@ -342,7 +352,7 @@ router.post(
if (error instanceof ForeignKeyConstraintError) { if (error instanceof ForeignKeyConstraintError) {
return res.status(400).json({ message: error.message }); return res.status(400).json({ message: error.message });
} }
logger.error({ error, body: req.body }, 'Failed to add watched item'); req.log.error({ error, body: req.body }, 'Failed to add watched item');
next(error); next(error);
} }
}, },
@@ -358,7 +368,7 @@ router.delete(
userUpdateLimiter, userUpdateLimiter,
validateRequest(watchedItemIdSchema), validateRequest(watchedItemIdSchema),
async (req, res, next: NextFunction) => { async (req, res, next: NextFunction) => {
logger.debug(`[ROUTE] DELETE /api/users/watched-items/:masterItemId - ENTER`); req.log.debug(`[ROUTE] DELETE /api/users/watched-items/:masterItemId - ENTER`);
const userProfile = req.user as UserProfile; const userProfile = req.user as UserProfile;
// Apply ADR-003 pattern for type safety // Apply ADR-003 pattern for type safety
const { params } = req as unknown as DeleteWatchedItemRequest; const { params } = req as unknown as DeleteWatchedItemRequest;
@@ -370,7 +380,7 @@ router.delete(
); );
res.status(204).send(); res.status(204).send();
} catch (error) { } catch (error) {
logger.error({ error }, `[ROUTE] DELETE /api/users/watched-items/:masterItemId - ERROR`); req.log.error({ error }, `[ROUTE] DELETE /api/users/watched-items/:masterItemId - ERROR`);
next(error); next(error);
} }
}, },
@@ -383,13 +393,13 @@ router.get(
'/shopping-lists', '/shopping-lists',
validateRequest(emptySchema), validateRequest(emptySchema),
async (req, res, next: NextFunction) => { async (req, res, next: NextFunction) => {
logger.debug(`[ROUTE] GET /api/users/shopping-lists - ENTER`); req.log.debug(`[ROUTE] GET /api/users/shopping-lists - ENTER`);
const userProfile = req.user as UserProfile; const userProfile = req.user as UserProfile;
try { try {
const lists = await db.shoppingRepo.getShoppingLists(userProfile.user.user_id, req.log); const lists = await db.shoppingRepo.getShoppingLists(userProfile.user.user_id, req.log);
res.json(lists); res.json(lists);
} catch (error) { } catch (error) {
logger.error({ error }, `[ROUTE] GET /api/users/shopping-lists - ERROR`); req.log.error({ error }, `[ROUTE] GET /api/users/shopping-lists - ERROR`);
next(error); next(error);
} }
}, },
@@ -404,7 +414,7 @@ router.get(
'/shopping-lists/:listId', '/shopping-lists/:listId',
validateRequest(shoppingListIdSchema), validateRequest(shoppingListIdSchema),
async (req, res, next: NextFunction) => { async (req, res, next: NextFunction) => {
logger.debug(`[ROUTE] GET /api/users/shopping-lists/:listId - ENTER`); req.log.debug(`[ROUTE] GET /api/users/shopping-lists/:listId - ENTER`);
const userProfile = req.user as UserProfile; const userProfile = req.user as UserProfile;
const { params } = req as unknown as GetShoppingListRequest; const { params } = req as unknown as GetShoppingListRequest;
try { try {
@@ -415,7 +425,7 @@ router.get(
); );
res.json(list); res.json(list);
} catch (error) { } catch (error) {
logger.error( req.log.error(
{ error, listId: params.listId }, { error, listId: params.listId },
`[ROUTE] GET /api/users/shopping-lists/:listId - ERROR`, `[ROUTE] GET /api/users/shopping-lists/:listId - ERROR`,
); );
@@ -433,7 +443,7 @@ router.post(
userUpdateLimiter, userUpdateLimiter,
validateRequest(createShoppingListSchema), validateRequest(createShoppingListSchema),
async (req, res, next: NextFunction) => { async (req, res, next: NextFunction) => {
logger.debug(`[ROUTE] POST /api/users/shopping-lists - ENTER`); req.log.debug(`[ROUTE] POST /api/users/shopping-lists - ENTER`);
const userProfile = req.user as UserProfile; const userProfile = req.user as UserProfile;
// Apply ADR-003 pattern for type safety // Apply ADR-003 pattern for type safety
const { body } = req as unknown as CreateShoppingListRequest; const { body } = req as unknown as CreateShoppingListRequest;
@@ -448,7 +458,7 @@ router.post(
if (error instanceof ForeignKeyConstraintError) { if (error instanceof ForeignKeyConstraintError) {
return res.status(400).json({ message: error.message }); return res.status(400).json({ message: error.message });
} }
logger.error({ error, body: req.body }, 'Failed to create shopping list'); req.log.error({ error, body: req.body }, 'Failed to create shopping list');
next(error); next(error);
} }
}, },
@@ -462,7 +472,7 @@ router.delete(
userUpdateLimiter, userUpdateLimiter,
validateRequest(shoppingListIdSchema), validateRequest(shoppingListIdSchema),
async (req, res, next: NextFunction) => { async (req, res, next: NextFunction) => {
logger.debug(`[ROUTE] DELETE /api/users/shopping-lists/:listId - ENTER`); req.log.debug(`[ROUTE] DELETE /api/users/shopping-lists/:listId - ENTER`);
const userProfile = req.user as UserProfile; const userProfile = req.user as UserProfile;
// Apply ADR-003 pattern for type safety // Apply ADR-003 pattern for type safety
const { params } = req as unknown as GetShoppingListRequest; const { params } = req as unknown as GetShoppingListRequest;
@@ -471,7 +481,7 @@ router.delete(
res.status(204).send(); res.status(204).send();
} catch (error: unknown) { } catch (error: unknown) {
const errorMessage = error instanceof Error ? error.message : 'An unknown error occurred'; const errorMessage = error instanceof Error ? error.message : 'An unknown error occurred';
logger.error( req.log.error(
{ errorMessage, params: req.params }, { errorMessage, params: req.params },
`[ROUTE] DELETE /api/users/shopping-lists/:listId - ERROR`, `[ROUTE] DELETE /api/users/shopping-lists/:listId - ERROR`,
); );
@@ -503,7 +513,7 @@ router.post(
userUpdateLimiter, userUpdateLimiter,
validateRequest(addShoppingListItemSchema), validateRequest(addShoppingListItemSchema),
async (req, res, next: NextFunction) => { async (req, res, next: NextFunction) => {
logger.debug(`[ROUTE] POST /api/users/shopping-lists/:listId/items - ENTER`); req.log.debug(`[ROUTE] POST /api/users/shopping-lists/:listId/items - ENTER`);
const userProfile = req.user as UserProfile; const userProfile = req.user as UserProfile;
// Apply ADR-003 pattern for type safety // Apply ADR-003 pattern for type safety
const { params, body } = req as unknown as AddShoppingListItemRequest; const { params, body } = req as unknown as AddShoppingListItemRequest;
@@ -519,7 +529,7 @@ router.post(
if (error instanceof ForeignKeyConstraintError) { if (error instanceof ForeignKeyConstraintError) {
return res.status(400).json({ message: error.message }); return res.status(400).json({ message: error.message });
} }
logger.error({ error, params: req.params, body: req.body }, 'Failed to add shopping list item'); req.log.error({ error, params: req.params, body: req.body }, 'Failed to add shopping list item');
next(error); next(error);
} }
}, },
@@ -544,7 +554,7 @@ router.put(
userUpdateLimiter, userUpdateLimiter,
validateRequest(updateShoppingListItemSchema), validateRequest(updateShoppingListItemSchema),
async (req, res, next: NextFunction) => { async (req, res, next: NextFunction) => {
logger.debug(`[ROUTE] PUT /api/users/shopping-lists/items/:itemId - ENTER`); req.log.debug(`[ROUTE] PUT /api/users/shopping-lists/items/:itemId - ENTER`);
const userProfile = req.user as UserProfile; const userProfile = req.user as UserProfile;
// Apply ADR-003 pattern for type safety // Apply ADR-003 pattern for type safety
const { params, body } = req as unknown as UpdateShoppingListItemRequest; const { params, body } = req as unknown as UpdateShoppingListItemRequest;
@@ -557,7 +567,7 @@ router.put(
); );
res.json(updatedItem); res.json(updatedItem);
} catch (error: unknown) { } catch (error: unknown) {
logger.error( req.log.error(
{ error, params: req.params, body: req.body }, { error, params: req.params, body: req.body },
`[ROUTE] PUT /api/users/shopping-lists/items/:itemId - ERROR`, `[ROUTE] PUT /api/users/shopping-lists/items/:itemId - ERROR`,
); );
@@ -576,7 +586,7 @@ router.delete(
userUpdateLimiter, userUpdateLimiter,
validateRequest(shoppingListItemIdSchema), validateRequest(shoppingListItemIdSchema),
async (req, res, next: NextFunction) => { async (req, res, next: NextFunction) => {
logger.debug(`[ROUTE] DELETE /api/users/shopping-lists/items/:itemId - ENTER`); req.log.debug(`[ROUTE] DELETE /api/users/shopping-lists/items/:itemId - ENTER`);
const userProfile = req.user as UserProfile; const userProfile = req.user as UserProfile;
// Apply ADR-003 pattern for type safety // Apply ADR-003 pattern for type safety
const { params } = req as unknown as DeleteShoppingListItemRequest; const { params } = req as unknown as DeleteShoppingListItemRequest;
@@ -584,7 +594,7 @@ router.delete(
await db.shoppingRepo.removeShoppingListItem(params.itemId, userProfile.user.user_id, req.log); await db.shoppingRepo.removeShoppingListItem(params.itemId, userProfile.user.user_id, req.log);
res.status(204).send(); res.status(204).send();
} catch (error: unknown) { } catch (error: unknown) {
logger.error( req.log.error(
{ error, params: req.params }, { error, params: req.params },
`[ROUTE] DELETE /api/users/shopping-lists/items/:itemId - ERROR`, `[ROUTE] DELETE /api/users/shopping-lists/items/:itemId - ERROR`,
); );
@@ -605,7 +615,7 @@ router.put(
userUpdateLimiter, userUpdateLimiter,
validateRequest(updatePreferencesSchema), validateRequest(updatePreferencesSchema),
async (req, res, next: NextFunction) => { async (req, res, next: NextFunction) => {
logger.debug(`[ROUTE] PUT /api/users/profile/preferences - ENTER`); req.log.debug(`[ROUTE] PUT /api/users/profile/preferences - ENTER`);
const userProfile = req.user as UserProfile; const userProfile = req.user as UserProfile;
// Apply ADR-003 pattern for type safety // Apply ADR-003 pattern for type safety
const { body } = req as unknown as UpdatePreferencesRequest; const { body } = req as unknown as UpdatePreferencesRequest;
@@ -617,7 +627,7 @@ router.put(
); );
res.json(updatedProfile); res.json(updatedProfile);
} catch (error) { } catch (error) {
logger.error({ error }, `[ROUTE] PUT /api/users/profile/preferences - ERROR`); req.log.error({ error }, `[ROUTE] PUT /api/users/profile/preferences - ERROR`);
next(error); next(error);
} }
}, },
@@ -627,7 +637,7 @@ router.get(
'/me/dietary-restrictions', '/me/dietary-restrictions',
validateRequest(emptySchema), validateRequest(emptySchema),
async (req, res, next: NextFunction) => { async (req, res, next: NextFunction) => {
logger.debug(`[ROUTE] GET /api/users/me/dietary-restrictions - ENTER`); req.log.debug(`[ROUTE] GET /api/users/me/dietary-restrictions - ENTER`);
const userProfile = req.user as UserProfile; const userProfile = req.user as UserProfile;
try { try {
const restrictions = await db.personalizationRepo.getUserDietaryRestrictions( const restrictions = await db.personalizationRepo.getUserDietaryRestrictions(
@@ -636,7 +646,7 @@ router.get(
); );
res.json(restrictions); res.json(restrictions);
} catch (error) { } catch (error) {
logger.error({ error }, `[ROUTE] GET /api/users/me/dietary-restrictions - ERROR`); req.log.error({ error }, `[ROUTE] GET /api/users/me/dietary-restrictions - ERROR`);
next(error); next(error);
} }
}, },
@@ -651,7 +661,7 @@ router.put(
userUpdateLimiter, userUpdateLimiter,
validateRequest(setUserRestrictionsSchema), validateRequest(setUserRestrictionsSchema),
async (req, res, next: NextFunction) => { async (req, res, next: NextFunction) => {
logger.debug(`[ROUTE] PUT /api/users/me/dietary-restrictions - ENTER`); req.log.debug(`[ROUTE] PUT /api/users/me/dietary-restrictions - ENTER`);
const userProfile = req.user as UserProfile; const userProfile = req.user as UserProfile;
// Apply ADR-003 pattern for type safety // Apply ADR-003 pattern for type safety
const { body } = req as unknown as SetUserRestrictionsRequest; const { body } = req as unknown as SetUserRestrictionsRequest;
@@ -666,14 +676,14 @@ router.put(
if (error instanceof ForeignKeyConstraintError) { if (error instanceof ForeignKeyConstraintError) {
return res.status(400).json({ message: error.message }); return res.status(400).json({ message: error.message });
} }
logger.error({ error, body: req.body }, 'Failed to set user dietary restrictions'); req.log.error({ error, body: req.body }, 'Failed to set user dietary restrictions');
next(error); next(error);
} }
}, },
); );
router.get('/me/appliances', validateRequest(emptySchema), async (req, res, next: NextFunction) => { router.get('/me/appliances', validateRequest(emptySchema), async (req, res, next: NextFunction) => {
logger.debug(`[ROUTE] GET /api/users/me/appliances - ENTER`); req.log.debug(`[ROUTE] GET /api/users/me/appliances - ENTER`);
const userProfile = req.user as UserProfile; const userProfile = req.user as UserProfile;
try { try {
const appliances = await db.personalizationRepo.getUserAppliances( const appliances = await db.personalizationRepo.getUserAppliances(
@@ -682,7 +692,7 @@ router.get('/me/appliances', validateRequest(emptySchema), async (req, res, next
); );
res.json(appliances); res.json(appliances);
} catch (error) { } catch (error) {
logger.error({ error }, `[ROUTE] GET /api/users/me/appliances - ERROR`); req.log.error({ error }, `[ROUTE] GET /api/users/me/appliances - ERROR`);
next(error); next(error);
} }
}); });
@@ -696,7 +706,7 @@ router.put(
userUpdateLimiter, userUpdateLimiter,
validateRequest(setUserAppliancesSchema), validateRequest(setUserAppliancesSchema),
async (req, res, next: NextFunction) => { async (req, res, next: NextFunction) => {
logger.debug(`[ROUTE] PUT /api/users/me/appliances - ENTER`); req.log.debug(`[ROUTE] PUT /api/users/me/appliances - ENTER`);
const userProfile = req.user as UserProfile; const userProfile = req.user as UserProfile;
// Apply ADR-003 pattern for type safety // Apply ADR-003 pattern for type safety
const { body } = req as unknown as SetUserAppliancesRequest; const { body } = req as unknown as SetUserAppliancesRequest;
@@ -711,7 +721,7 @@ router.put(
if (error instanceof ForeignKeyConstraintError) { if (error instanceof ForeignKeyConstraintError) {
return res.status(400).json({ message: error.message }); return res.status(400).json({ message: error.message });
} }
logger.error({ error, body: req.body }, 'Failed to set user appliances'); req.log.error({ error, body: req.body }, 'Failed to set user appliances');
next(error); next(error);
} }
}, },
@@ -735,7 +745,7 @@ router.get(
const address = await userService.getUserAddress(userProfile, addressId, req.log); const address = await userService.getUserAddress(userProfile, addressId, req.log);
res.json(address); res.json(address);
} catch (error) { } catch (error) {
logger.error({ error }, 'Error fetching user address'); req.log.error({ error }, 'Error fetching user address');
next(error); next(error);
} }
}, },
@@ -775,7 +785,7 @@ router.put(
const addressId = await userService.upsertUserAddress(userProfile, addressData, req.log); // This was a duplicate, fixed. const addressId = await userService.upsertUserAddress(userProfile, addressData, req.log); // This was a duplicate, fixed.
res.status(200).json({ message: 'Address updated successfully', address_id: addressId }); res.status(200).json({ message: 'Address updated successfully', address_id: addressId });
} catch (error) { } catch (error) {
logger.error({ error }, 'Error updating user address'); req.log.error({ error }, 'Error updating user address');
next(error); next(error);
} }
}, },
@@ -795,7 +805,7 @@ router.post(
const recipe = await db.recipeRepo.createRecipe(userProfile.user.user_id, body, req.log); const recipe = await db.recipeRepo.createRecipe(userProfile.user.user_id, body, req.log);
res.status(201).json(recipe); res.status(201).json(recipe);
} catch (error) { } catch (error) {
logger.error({ error }, 'Error creating recipe'); req.log.error({ error }, 'Error creating recipe');
next(error); next(error);
} }
} }
@@ -811,7 +821,7 @@ router.delete(
userUpdateLimiter, userUpdateLimiter,
validateRequest(recipeIdSchema), validateRequest(recipeIdSchema),
async (req, res, next: NextFunction) => { async (req, res, next: NextFunction) => {
logger.debug(`[ROUTE] DELETE /api/users/recipes/:recipeId - ENTER`); req.log.debug(`[ROUTE] DELETE /api/users/recipes/:recipeId - ENTER`);
const userProfile = req.user as UserProfile; const userProfile = req.user as UserProfile;
// Apply ADR-003 pattern for type safety // Apply ADR-003 pattern for type safety
const { params } = req as unknown as DeleteRecipeRequest; const { params } = req as unknown as DeleteRecipeRequest;
@@ -819,7 +829,7 @@ router.delete(
await db.recipeRepo.deleteRecipe(params.recipeId, userProfile.user.user_id, false, req.log); await db.recipeRepo.deleteRecipe(params.recipeId, userProfile.user.user_id, false, req.log);
res.status(204).send(); res.status(204).send();
} catch (error) { } catch (error) {
logger.error( req.log.error(
{ error, params: req.params }, { error, params: req.params },
`[ROUTE] DELETE /api/users/recipes/:recipeId - ERROR`, `[ROUTE] DELETE /api/users/recipes/:recipeId - ERROR`,
); );
@@ -850,7 +860,7 @@ router.put(
userUpdateLimiter, userUpdateLimiter,
validateRequest(updateRecipeSchema), validateRequest(updateRecipeSchema),
async (req, res, next: NextFunction) => { async (req, res, next: NextFunction) => {
logger.debug(`[ROUTE] PUT /api/users/recipes/:recipeId - ENTER`); req.log.debug(`[ROUTE] PUT /api/users/recipes/:recipeId - ENTER`);
const userProfile = req.user as UserProfile; const userProfile = req.user as UserProfile;
// Apply ADR-003 pattern for type safety // Apply ADR-003 pattern for type safety
const { params, body } = req as unknown as UpdateRecipeRequest; const { params, body } = req as unknown as UpdateRecipeRequest;
@@ -864,7 +874,7 @@ router.put(
); );
res.json(updatedRecipe); res.json(updatedRecipe);
} catch (error) { } catch (error) {
logger.error( req.log.error(
{ error, params: req.params, body: req.body }, { error, params: req.params, body: req.body },
`[ROUTE] PUT /api/users/recipes/:recipeId - ERROR`, `[ROUTE] PUT /api/users/recipes/:recipeId - ERROR`,
); );

View File

@@ -0,0 +1,33 @@
// src/schemas/flyer.schemas.ts
import { z } from 'zod';
import { httpUrl, requiredString } from '../utils/zodUtils';
/**
* Zod schema for FlyerInsert type with strict URL validation.
* Ensures image_url and icon_url match database constraints (^https?://.*).
*/
export const flyerInsertSchema = z.object({
file_name: requiredString('File name is required'),
image_url: httpUrl('Flyer image URL must be a valid HTTP or HTTPS URL'),
icon_url: httpUrl('Flyer icon URL must be a valid HTTP or HTTPS URL'),
checksum: z
.string()
.length(64, 'Checksum must be 64 characters')
.regex(/^[a-f0-9]+$/, 'Checksum must be a valid hexadecimal string')
.nullable(),
store_name: requiredString('Store name is required'),
valid_from: z.string().datetime().nullable(),
valid_to: z.string().datetime().nullable(),
store_address: z.string().nullable(),
status: z.enum(['processed', 'needs_review', 'archived']),
item_count: z.number().int().nonnegative('Item count must be non-negative'),
uploaded_by: z.string().uuid().nullable().optional(),
});
/**
* Zod schema for FlyerDbInsert type with URL validation.
* Same as flyerInsertSchema but uses store_id instead of store_name.
*/
export const flyerDbInsertSchema = flyerInsertSchema.omit({ store_name: true }).extend({
store_id: z.number().int().positive('Store ID must be a positive integer'),
});

View File

@@ -271,10 +271,18 @@ export const checkRedisHealth = (): Promise<Response> => publicGet('/health/redi
export const checkPm2Status = (): Promise<Response> => publicGet('/system/pm2-status'); export const checkPm2Status = (): Promise<Response> => publicGet('/system/pm2-status');
/** /**
* Fetches all flyers from the backend. * Fetches flyers from the backend with pagination support.
* @returns A promise that resolves to an array of Flyer objects. * @param limit - Maximum number of flyers to fetch (default: 20)
* @param offset - Number of flyers to skip (default: 0)
* @returns A promise that resolves to a paginated response of Flyer objects.
*/ */
export const fetchFlyers = (): Promise<Response> => publicGet('/flyers'); export const fetchFlyers = (limit?: number, offset?: number): Promise<Response> => {
const params = new URLSearchParams();
if (limit !== undefined) params.append('limit', limit.toString());
if (offset !== undefined) params.append('offset', offset.toString());
const queryString = params.toString();
return publicGet(queryString ? `/flyers?${queryString}` : '/flyers');
};
/** /**
* Fetches a single flyer by its ID. * Fetches a single flyer by its ID.

View File

@@ -9,18 +9,34 @@ import type { Logger } from 'pino';
* *
* @param logger - The logger instance to use for warnings. * @param logger - The logger instance to use for warnings.
* @returns A validated, fully qualified base URL without a trailing slash. * @returns A validated, fully qualified base URL without a trailing slash.
* @throws Error if the final URL doesn't match the http/https pattern required by database constraints.
*/ */
export function getBaseUrl(logger: Logger): string { export function getBaseUrl(logger: Logger): string {
let baseUrl = (process.env.FRONTEND_URL || process.env.BASE_URL || '').trim(); let baseUrl = (process.env.FRONTEND_URL || process.env.BASE_URL || '').trim();
if (!baseUrl || !baseUrl.startsWith('http')) { if (!baseUrl || !baseUrl.startsWith('http')) {
const port = process.env.PORT || 3000; const port = process.env.PORT || 3000;
const fallbackUrl = `https://example.com:${port}`; // In test/development, use http://localhost. In production, this should never be reached.
const fallbackUrl =
process.env.NODE_ENV === 'test'
? `http://localhost:${port}`
: `http://example.com:${port}`;
if (baseUrl) { if (baseUrl) {
logger.warn( logger.warn(
`[getBaseUrl] FRONTEND_URL/BASE_URL is invalid or incomplete ('${baseUrl}'). Falling back to default local URL: ${fallbackUrl}`, `[getBaseUrl] FRONTEND_URL/BASE_URL is invalid or incomplete ('${baseUrl}'). Falling back to: ${fallbackUrl}`,
); );
} }
baseUrl = fallbackUrl; baseUrl = fallbackUrl;
} }
return baseUrl.endsWith('/') ? baseUrl.slice(0, -1) : baseUrl;
// Remove trailing slash
const finalUrl = baseUrl.endsWith('/') ? baseUrl.slice(0, -1) : baseUrl;
// Validate that the URL matches database constraint pattern (http:// or https://)
if (!/^https?:\/\/.+/.test(finalUrl)) {
throw new Error(
`[getBaseUrl] Generated URL '${finalUrl}' does not match required pattern (must start with http:// or https://)`,
);
}
return finalUrl;
} }

View File

@@ -122,3 +122,31 @@ export const optionalBoolean = (
*/ */
export const optionalString = () => export const optionalString = () =>
z.preprocess((val) => (val === null ? undefined : val), z.string().optional()); z.preprocess((val) => (val === null ? undefined : val), z.string().optional());
/**
* Creates a Zod schema for a required HTTP/HTTPS URL.
* Validates that the URL starts with http:// or https:// to match database constraints.
* @param message Optional custom error message.
* @returns A Zod schema for the URL string.
*/
export const httpUrl = (message = 'Must be a valid HTTP or HTTPS URL') =>
z
.string()
.url(message)
.regex(/^https?:\/\/.+/, message);
/**
* Creates a Zod schema for an optional HTTP/HTTPS URL.
* Validates that if provided, the URL starts with http:// or https://.
* @param message Optional custom error message.
* @returns A Zod schema for the optional URL string.
*/
export const optionalHttpUrl = (message = 'Must be a valid HTTP or HTTPS URL') =>
z.preprocess(
(val) => (val === null ? undefined : val),
z
.string()
.url(message)
.regex(/^https?:\/\/.+/, message)
.optional(),
);