Compare commits
118 Commits
v0.9.60
...
a14816c8ee
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
a14816c8ee | ||
|
|
08b220e29c | ||
|
|
d41a3f1887 | ||
| 1f6cdc62d7 | |||
|
|
978c63bacd | ||
| 544eb7ae3c | |||
|
|
f6839f6e14 | ||
| 3fac29436a | |||
|
|
56f45c9301 | ||
| 83460abce4 | |||
|
|
1b084b2ba4 | ||
| 0ea034bdc8 | |||
|
|
fc9e27078a | ||
| fb8cbe8007 | |||
| f49f786c23 | |||
|
|
dd31141d4e | ||
| 8073094760 | |||
|
|
33a1e146ab | ||
| 4f8216db77 | |||
|
|
42d605d19f | ||
| 749350df7f | |||
|
|
ac085100fe | ||
| ce4ecd1268 | |||
|
|
a57cfc396b | ||
| 987badbf8d | |||
|
|
d38fcd21c1 | ||
| 6e36cc3b07 | |||
|
|
62a8a8bf4b | ||
| 96038cfcf4 | |||
|
|
981214fdd0 | ||
| 92b0138108 | |||
|
|
27f0255240 | ||
| 4e06dde9e1 | |||
|
|
b9a0e5b82c | ||
| bb7fe8dc2c | |||
|
|
81f1f2250b | ||
| c6c90bb615 | |||
|
|
60489a626b | ||
| 3c63e1ecbb | |||
|
|
acbcb39cbe | ||
| a87a0b6af1 | |||
|
|
abdc3cb6db | ||
| 7a1bd50119 | |||
|
|
87d75d0571 | ||
| faf2900c28 | |||
|
|
5258efc179 | ||
| 2a5cc5bb51 | |||
|
|
8eaee2844f | ||
| 440a19c3a7 | |||
| 4ae6d84240 | |||
|
|
5870e5c614 | ||
| 2e7ebbd9ed | |||
|
|
dc3fa21359 | ||
| 11aeac5edd | |||
|
|
f6c0c082bc | ||
| 4e22213cd1 | |||
|
|
9815eb3686 | ||
| 2bf4a7c1e6 | |||
|
|
5eed3f51f4 | ||
| d250932c05 | |||
|
|
7d1f964574 | ||
| 3b69e58de3 | |||
|
|
5211aadd22 | ||
| a997d1d0b0 | |||
| cf5f77c58e | |||
|
|
cf0f5bb820 | ||
| 503e7084da | |||
|
|
d8aa19ac40 | ||
| dcd9452b8c | |||
|
|
6d468544e2 | ||
| 2913c7aa09 | |||
|
|
77f9cb6081 | ||
| 2f1d73ca12 | |||
|
|
402e2617ca | ||
| e14c19c112 | |||
|
|
ea46f66c7a | ||
| a42ee5a461 | |||
|
|
71710c8316 | ||
| 1480a73ab0 | |||
|
|
b3efa3c756 | ||
| fb8fd57bb6 | |||
|
|
0a90d9d590 | ||
| 6ab473f5f0 | |||
|
|
c46efe1474 | ||
| 25d6b76f6d | |||
|
|
9ffcc9d65d | ||
| 1285702210 | |||
|
|
d38b751b40 | ||
| e122d55ced | |||
|
|
af9992f773 | ||
| 3912139273 | |||
| b5f7f5e4d1 | |||
|
|
5173059621 | ||
| ebceb0e2e3 | |||
| e75054b1ab | |||
|
|
639313485a | ||
| 4a04e478c4 | |||
|
|
1814469eb4 | ||
| b777430ff7 | |||
|
|
23830c0d4e | ||
| ef42fee982 | |||
|
|
65cb54500c | ||
| 664ad291be | |||
|
|
ff912b9055 | ||
| ec32027bd4 | |||
|
|
59f773639b | ||
| dd2be5eecf | |||
|
|
a94bfbd3e9 | ||
| 338bbc9440 | |||
|
|
60aad04642 | ||
| 7f2aff9a24 | |||
|
|
689320e7d2 | ||
| e457bbf046 | |||
| 68cdbb6066 | |||
|
|
cea6be7145 | ||
| 74a5ca6331 | |||
|
|
62470e7661 | ||
| 2b517683fd |
16
.claude/hooks.json
Normal file
16
.claude/hooks.json
Normal file
@@ -0,0 +1,16 @@
|
||||
{
|
||||
"$schema": "https://claude.ai/schemas/hooks.json",
|
||||
"hooks": {
|
||||
"PreToolUse": [
|
||||
{
|
||||
"matcher": "Bash",
|
||||
"hooks": [
|
||||
{
|
||||
"type": "command",
|
||||
"command": "node -e \"const cmd = process.argv[1] || ''; const isTest = /\\b(npm\\s+(run\\s+)?test|vitest|jest)\\b/i.test(cmd); const isWindows = process.platform === 'win32'; const inContainer = process.env.REMOTE_CONTAINERS === 'true' || process.env.DEVCONTAINER === 'true'; if (isTest && isWindows && !inContainer) { console.error('BLOCKED: Tests must run on Linux. Use Dev Container (Reopen in Container) or WSL.'); process.exit(1); }\" -- \"$CLAUDE_TOOL_INPUT\""
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
@@ -18,11 +18,9 @@
|
||||
"Bash(PGPASSWORD=postgres psql:*)",
|
||||
"Bash(npm search:*)",
|
||||
"Bash(npx:*)",
|
||||
"Bash(curl -s -H \"Authorization: token c72bc0f14f623fec233d3c94b3a16397fe3649ef\" https://gitea.projectium.com/api/v1/user)",
|
||||
"Bash(curl:*)",
|
||||
"Bash(powershell:*)",
|
||||
"Bash(cmd.exe:*)",
|
||||
"Bash(export NODE_ENV=test DB_HOST=localhost DB_USER=postgres DB_PASSWORD=postgres DB_NAME=flyer_crawler_dev REDIS_URL=redis://localhost:6379 FRONTEND_URL=http://localhost:5173 JWT_SECRET=test-jwt-secret:*)",
|
||||
"Bash(npm run test:integration:*)",
|
||||
"Bash(grep:*)",
|
||||
"Bash(done)",
|
||||
@@ -45,7 +43,62 @@
|
||||
"Bash(powershell.exe -Command \"echo ''List all podman containers'' | claude --print\")",
|
||||
"Bash(powershell.exe -Command \"echo ''List my repositories on gitea.projectium.com using gitea-projectium MCP'' | claude --print\")",
|
||||
"Bash(powershell.exe -Command \"echo ''List my repositories on gitea.projectium.com using gitea-projectium MCP'' | claude --print --allowedTools ''mcp__gitea-projectium__*''\")",
|
||||
"Bash(powershell.exe -Command \"echo ''Fetch the homepage of https://gitea.projectium.com and summarize it'' | claude --print --allowedTools ''mcp__fetch__*''\")"
|
||||
"Bash(powershell.exe -Command \"echo ''Fetch the homepage of https://gitea.projectium.com and summarize it'' | claude --print --allowedTools ''mcp__fetch__*''\")",
|
||||
"Bash(dir \"C:\\\\Users\\\\games3\\\\.claude\")",
|
||||
"Bash(dir:*)",
|
||||
"Bash(D:nodejsnpx.cmd -y @modelcontextprotocol/server-fetch --help)",
|
||||
"Bash(cmd /c \"dir /o-d C:\\\\Users\\\\games3\\\\.claude\\\\debug 2>nul | head -10\")",
|
||||
"mcp__memory__read_graph",
|
||||
"mcp__memory__create_entities",
|
||||
"mcp__memory__search_nodes",
|
||||
"mcp__memory__delete_entities",
|
||||
"mcp__sequential-thinking__sequentialthinking",
|
||||
"mcp__filesystem__list_directory",
|
||||
"mcp__filesystem__read_multiple_files",
|
||||
"mcp__filesystem__directory_tree",
|
||||
"mcp__filesystem__read_text_file",
|
||||
"Bash(wc:*)",
|
||||
"Bash(npm install:*)",
|
||||
"Bash(git grep:*)",
|
||||
"Bash(findstr:*)",
|
||||
"Bash(git add:*)",
|
||||
"mcp__filesystem__write_file",
|
||||
"mcp__podman__container_list",
|
||||
"Bash(podman cp:*)",
|
||||
"mcp__podman__container_inspect",
|
||||
"mcp__podman__network_list",
|
||||
"Bash(podman network connect:*)",
|
||||
"Bash(npm run build:*)",
|
||||
"Bash(set NODE_ENV=test)",
|
||||
"Bash(podman-compose:*)",
|
||||
"Bash(timeout 60 podman machine start:*)",
|
||||
"Bash(podman build:*)",
|
||||
"Bash(podman network rm:*)",
|
||||
"Bash(npm run lint)",
|
||||
"Bash(npm run typecheck:*)",
|
||||
"Bash(npm run type-check:*)",
|
||||
"Bash(npm run test:unit:*)",
|
||||
"mcp__filesystem__move_file",
|
||||
"Bash(git checkout:*)",
|
||||
"Bash(podman image inspect:*)",
|
||||
"Bash(node -e:*)",
|
||||
"Bash(xargs -I {} sh -c 'if ! grep -q \"\"vi.mock.*apiClient\"\" \"\"{}\"\"; then echo \"\"{}\"\"; fi')",
|
||||
"Bash(MSYS_NO_PATHCONV=1 podman exec:*)",
|
||||
"Bash(docker ps:*)",
|
||||
"Bash(find:*)",
|
||||
"Bash(\"/c/Users/games3/.local/bin/uvx.exe\" markitdown-mcp --help)",
|
||||
"Bash(git stash:*)",
|
||||
"Bash(ping:*)",
|
||||
"Bash(tee:*)",
|
||||
"Bash(timeout 1800 podman exec flyer-crawler-dev npm run test:unit:*)",
|
||||
"mcp__filesystem__edit_file",
|
||||
"Bash(timeout 300 tail:*)",
|
||||
"mcp__filesystem__list_allowed_directories",
|
||||
"mcp__memory__add_observations",
|
||||
"Bash(ssh:*)",
|
||||
"mcp__redis__list",
|
||||
"Read(//d/gitea/bugsink-mcp/**)",
|
||||
"Bash(d:/nodejs/npm.cmd install)"
|
||||
]
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,18 +1,96 @@
|
||||
{
|
||||
// ============================================================================
|
||||
// VS CODE DEV CONTAINER CONFIGURATION
|
||||
// ============================================================================
|
||||
// This file configures VS Code's Dev Containers extension to provide a
|
||||
// consistent, fully-configured development environment.
|
||||
//
|
||||
// Features:
|
||||
// - Automatic PostgreSQL + Redis startup with healthchecks
|
||||
// - Automatic npm install
|
||||
// - Automatic database schema initialization and seeding
|
||||
// - Pre-configured VS Code extensions (ESLint, Prettier)
|
||||
// - Podman support for Windows users
|
||||
//
|
||||
// Usage:
|
||||
// 1. Install the "Dev Containers" extension in VS Code
|
||||
// 2. Open this project folder
|
||||
// 3. Click "Reopen in Container" when prompted (or use Command Palette)
|
||||
// 4. Wait for container build and initialization
|
||||
// 5. Development server starts automatically
|
||||
// ============================================================================
|
||||
|
||||
"name": "Flyer Crawler Dev (Ubuntu 22.04)",
|
||||
|
||||
// Use Docker Compose for multi-container setup
|
||||
"dockerComposeFile": ["../compose.dev.yml"],
|
||||
"service": "app",
|
||||
"workspaceFolder": "/app",
|
||||
|
||||
// VS Code customizations
|
||||
"customizations": {
|
||||
"vscode": {
|
||||
"extensions": ["dbaeumer.vscode-eslint", "esbenp.prettier-vscode"]
|
||||
"extensions": [
|
||||
// Code quality
|
||||
"dbaeumer.vscode-eslint",
|
||||
"esbenp.prettier-vscode",
|
||||
// TypeScript
|
||||
"ms-vscode.vscode-typescript-next",
|
||||
// Database
|
||||
"mtxr.sqltools",
|
||||
"mtxr.sqltools-driver-pg",
|
||||
// Utilities
|
||||
"eamodio.gitlens",
|
||||
"streetsidesoftware.code-spell-checker"
|
||||
],
|
||||
"settings": {
|
||||
"editor.formatOnSave": true,
|
||||
"editor.defaultFormatter": "esbenp.prettier-vscode",
|
||||
"typescript.preferences.importModuleSpecifier": "relative"
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
// Run as root (required for npm global installs)
|
||||
"remoteUser": "root",
|
||||
// Automatically install dependencies when the container is created.
|
||||
// This runs inside the container, populating the isolated node_modules volume.
|
||||
"postCreateCommand": "npm install",
|
||||
|
||||
// ============================================================================
|
||||
// Lifecycle Commands
|
||||
// ============================================================================
|
||||
|
||||
// initializeCommand: Runs on the HOST before the container is created.
|
||||
// Starts Podman machine on Windows (no-op if already running or using Docker).
|
||||
"initializeCommand": "powershell -Command \"podman machine start; exit 0\"",
|
||||
|
||||
// postCreateCommand: Runs ONCE when the container is first created.
|
||||
// This is where we do full initialization: npm install + database setup.
|
||||
"postCreateCommand": "chmod +x scripts/docker-init.sh && ./scripts/docker-init.sh",
|
||||
|
||||
// postAttachCommand: Runs EVERY TIME VS Code attaches to the container.
|
||||
// Starts the development server automatically.
|
||||
"postAttachCommand": "npm run dev:container",
|
||||
// Try to start podman machine, but exit with success (0) even if it's already running
|
||||
"initializeCommand": "powershell -Command \"podman machine start; exit 0\""
|
||||
|
||||
// ============================================================================
|
||||
// Port Forwarding
|
||||
// ============================================================================
|
||||
// Automatically forward these ports from the container to the host
|
||||
"forwardPorts": [3000, 3001],
|
||||
|
||||
// Labels for forwarded ports in VS Code's Ports panel
|
||||
"portsAttributes": {
|
||||
"3000": {
|
||||
"label": "Frontend (Vite)",
|
||||
"onAutoForward": "notify"
|
||||
},
|
||||
"3001": {
|
||||
"label": "Backend API",
|
||||
"onAutoForward": "notify"
|
||||
}
|
||||
},
|
||||
|
||||
// ============================================================================
|
||||
// Features
|
||||
// ============================================================================
|
||||
// Additional dev container features (optional)
|
||||
"features": {}
|
||||
}
|
||||
|
||||
104
.env.example
Normal file
104
.env.example
Normal file
@@ -0,0 +1,104 @@
|
||||
# .env.example
|
||||
# ============================================================================
|
||||
# ENVIRONMENT VARIABLES TEMPLATE
|
||||
# ============================================================================
|
||||
# Copy this file to .env and fill in your values.
|
||||
# For local development with Docker/Podman, these defaults should work out of the box.
|
||||
#
|
||||
# IMPORTANT: Never commit .env files with real credentials to version control!
|
||||
# ============================================================================
|
||||
|
||||
# ===================
|
||||
# Database Configuration
|
||||
# ===================
|
||||
# PostgreSQL connection settings
|
||||
# For container development, use the service name "postgres"
|
||||
DB_HOST=postgres
|
||||
DB_PORT=5432
|
||||
DB_USER=postgres
|
||||
DB_PASSWORD=postgres
|
||||
DB_NAME=flyer_crawler_dev
|
||||
|
||||
# ===================
|
||||
# Redis Configuration
|
||||
# ===================
|
||||
# Redis URL for caching and job queues
|
||||
# For container development, use the service name "redis"
|
||||
REDIS_URL=redis://redis:6379
|
||||
# Optional: Redis password (leave empty if not required)
|
||||
REDIS_PASSWORD=
|
||||
|
||||
# ===================
|
||||
# Application Settings
|
||||
# ===================
|
||||
NODE_ENV=development
|
||||
# Frontend URL for CORS and email links
|
||||
FRONTEND_URL=http://localhost:3000
|
||||
|
||||
# ===================
|
||||
# Authentication
|
||||
# ===================
|
||||
# REQUIRED: Secret key for signing JWT tokens (generate a random 64+ character string)
|
||||
JWT_SECRET=your-super-secret-jwt-key-change-this-in-production
|
||||
|
||||
# OAuth Providers (Optional - enable social login)
|
||||
# Google OAuth - https://console.cloud.google.com/apis/credentials
|
||||
GOOGLE_CLIENT_ID=
|
||||
GOOGLE_CLIENT_SECRET=
|
||||
# GitHub OAuth - https://github.com/settings/developers
|
||||
GITHUB_CLIENT_ID=
|
||||
GITHUB_CLIENT_SECRET=
|
||||
|
||||
# ===================
|
||||
# AI/ML Services
|
||||
# ===================
|
||||
# REQUIRED: Google Gemini API key for flyer OCR processing
|
||||
GEMINI_API_KEY=your-gemini-api-key
|
||||
|
||||
# ===================
|
||||
# External APIs
|
||||
# ===================
|
||||
# Optional: Google Maps API key for geocoding store addresses
|
||||
GOOGLE_MAPS_API_KEY=
|
||||
|
||||
# ===================
|
||||
# Email Configuration (Optional)
|
||||
# ===================
|
||||
# SMTP settings for sending emails (deal notifications, password reset)
|
||||
SMTP_HOST=
|
||||
SMTP_PORT=587
|
||||
SMTP_SECURE=false
|
||||
SMTP_USER=
|
||||
SMTP_PASS=
|
||||
SMTP_FROM_EMAIL=noreply@example.com
|
||||
|
||||
# ===================
|
||||
# Worker Configuration (Optional)
|
||||
# ===================
|
||||
# Concurrency settings for background job workers
|
||||
WORKER_CONCURRENCY=1
|
||||
EMAIL_WORKER_CONCURRENCY=10
|
||||
ANALYTICS_WORKER_CONCURRENCY=1
|
||||
CLEANUP_WORKER_CONCURRENCY=10
|
||||
|
||||
# Worker lock duration in milliseconds (default: 2 minutes)
|
||||
WORKER_LOCK_DURATION=120000
|
||||
|
||||
# ===================
|
||||
# Error Tracking (ADR-015)
|
||||
# ===================
|
||||
# Sentry-compatible error tracking via Bugsink (self-hosted)
|
||||
# DSNs are created in Bugsink UI at http://localhost:8000 (dev) or your production URL
|
||||
# Backend DSN - for Express/Node.js errors
|
||||
SENTRY_DSN=
|
||||
# Frontend DSN - for React/browser errors (uses VITE_ prefix)
|
||||
VITE_SENTRY_DSN=
|
||||
# Environment name for error grouping (defaults to NODE_ENV)
|
||||
SENTRY_ENVIRONMENT=development
|
||||
VITE_SENTRY_ENVIRONMENT=development
|
||||
# Enable/disable error tracking (default: true)
|
||||
SENTRY_ENABLED=true
|
||||
VITE_SENTRY_ENABLED=true
|
||||
# Enable debug mode for SDK troubleshooting (default: false)
|
||||
SENTRY_DEBUG=false
|
||||
VITE_SENTRY_DEBUG=false
|
||||
6
.env.test
Normal file
6
.env.test
Normal file
@@ -0,0 +1,6 @@
|
||||
DB_HOST=10.89.0.4
|
||||
DB_USER=flyer
|
||||
DB_PASSWORD=flyer
|
||||
DB_NAME=flyer_crawler_test
|
||||
REDIS_URL=redis://redis:6379
|
||||
NODE_ENV=test
|
||||
@@ -1,61 +0,0 @@
|
||||
{
|
||||
"mcpServers": {
|
||||
"markitdown": {
|
||||
"command": "C:\\Users\\games3\\.local\\bin\\uvx.exe",
|
||||
"args": [
|
||||
"markitdown-mcp"
|
||||
]
|
||||
},
|
||||
"gitea-torbonium": {
|
||||
"command": "d:\\gitea-mcp\\gitea-mcp.exe",
|
||||
"args": ["run", "-t", "stdio"],
|
||||
"env": {
|
||||
"GITEA_HOST": "https://gitea.torbonium.com",
|
||||
"GITEA_ACCESS_TOKEN": "391c9ddbe113378bc87bb8184800ba954648fcf8"
|
||||
}
|
||||
},
|
||||
"gitea-lan": {
|
||||
"command": "d:\\gitea-mcp\\gitea-mcp.exe",
|
||||
"args": ["run", "-t", "stdio"],
|
||||
"env": {
|
||||
"GITEA_HOST": "https://gitea.torbolan.com",
|
||||
"GITEA_ACCESS_TOKEN": "REPLACE_WITH_NEW_TOKEN"
|
||||
}
|
||||
},
|
||||
"gitea-projectium": {
|
||||
"command": "d:\\gitea-mcp\\gitea-mcp.exe",
|
||||
"args": ["run", "-t", "stdio"],
|
||||
"env": {
|
||||
"GITEA_HOST": "https://gitea.projectium.com",
|
||||
"GITEA_ACCESS_TOKEN": "c72bc0f14f623fec233d3c94b3a16397fe3649ef"
|
||||
}
|
||||
},
|
||||
"podman": {
|
||||
"command": "D:\\nodejs\\npx.cmd",
|
||||
"args": ["-y", "podman-mcp-server@latest"],
|
||||
"env": {
|
||||
"DOCKER_HOST": "npipe:////./pipe/podman-machine-default"
|
||||
}
|
||||
},
|
||||
"filesystem": {
|
||||
"command": "D:\\nodejs\\npx.cmd",
|
||||
"args": [
|
||||
"-y",
|
||||
"@modelcontextprotocol/server-filesystem",
|
||||
"D:\\gitea\\flyer-crawler.projectium.com\\flyer-crawler.projectium.com"
|
||||
]
|
||||
},
|
||||
"fetch": {
|
||||
"command": "D:\\nodejs\\npx.cmd",
|
||||
"args": ["-y", "@modelcontextprotocol/server-fetch"]
|
||||
},
|
||||
"sequential-thinking": {
|
||||
"command": "D:\\nodejs\\npx.cmd",
|
||||
"args": ["-y", "@modelcontextprotocol/server-sequential-thinking"]
|
||||
},
|
||||
"memory": {
|
||||
"command": "D:\\nodejs\\npx.cmd",
|
||||
"args": ["-y", "@modelcontextprotocol/server-memory"]
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -63,8 +63,8 @@ jobs:
|
||||
- name: Check for Production Database Schema Changes
|
||||
env:
|
||||
DB_HOST: ${{ secrets.DB_HOST }}
|
||||
DB_USER: ${{ secrets.DB_USER }}
|
||||
DB_PASSWORD: ${{ secrets.DB_PASSWORD }}
|
||||
DB_USER: ${{ secrets.DB_USER_PROD }}
|
||||
DB_PASSWORD: ${{ secrets.DB_PASSWORD_PROD }}
|
||||
DB_NAME: ${{ secrets.DB_DATABASE_PROD }}
|
||||
run: |
|
||||
if [ -z "$DB_HOST" ] || [ -z "$DB_USER" ] || [ -z "$DB_PASSWORD" ] || [ -z "$DB_NAME" ]; then
|
||||
@@ -98,6 +98,9 @@ jobs:
|
||||
VITE_APP_VERSION="$(date +'%Y%m%d-%H%M'):$(git rev-parse --short HEAD):$PACKAGE_VERSION" \
|
||||
VITE_APP_COMMIT_URL="$GITEA_SERVER_URL/${{ gitea.repository }}/commit/${{ gitea.sha }}" \
|
||||
VITE_APP_COMMIT_MESSAGE="$COMMIT_MESSAGE" \
|
||||
VITE_SENTRY_DSN="${{ secrets.VITE_SENTRY_DSN }}" \
|
||||
VITE_SENTRY_ENVIRONMENT="production" \
|
||||
VITE_SENTRY_ENABLED="true" \
|
||||
VITE_API_BASE_URL=/api VITE_API_KEY=${{ secrets.VITE_GOOGLE_GENAI_API_KEY }} npm run build
|
||||
|
||||
- name: Deploy Application to Production Server
|
||||
@@ -114,10 +117,11 @@ jobs:
|
||||
env:
|
||||
# --- Production Secrets Injection ---
|
||||
DB_HOST: ${{ secrets.DB_HOST }}
|
||||
DB_USER: ${{ secrets.DB_USER }}
|
||||
DB_PASSWORD: ${{ secrets.DB_PASSWORD }}
|
||||
DB_USER: ${{ secrets.DB_USER_PROD }}
|
||||
DB_PASSWORD: ${{ secrets.DB_PASSWORD_PROD }}
|
||||
DB_NAME: ${{ secrets.DB_DATABASE_PROD }}
|
||||
REDIS_URL: 'redis://localhost:6379'
|
||||
# Explicitly use database 0 for production (test uses database 1)
|
||||
REDIS_URL: 'redis://localhost:6379/0'
|
||||
REDIS_PASSWORD: ${{ secrets.REDIS_PASSWORD_PROD }}
|
||||
FRONTEND_URL: 'https://flyer-crawler.projectium.com'
|
||||
JWT_SECRET: ${{ secrets.JWT_SECRET }}
|
||||
@@ -129,6 +133,15 @@ jobs:
|
||||
SMTP_USER: ''
|
||||
SMTP_PASS: ''
|
||||
SMTP_FROM_EMAIL: 'noreply@flyer-crawler.projectium.com'
|
||||
# OAuth Providers
|
||||
GOOGLE_CLIENT_ID: ${{ secrets.GOOGLE_CLIENT_ID }}
|
||||
GOOGLE_CLIENT_SECRET: ${{ secrets.GOOGLE_CLIENT_SECRET }}
|
||||
GITHUB_CLIENT_ID: ${{ secrets.GH_CLIENT_ID }}
|
||||
GITHUB_CLIENT_SECRET: ${{ secrets.GH_CLIENT_SECRET }}
|
||||
# Sentry/Bugsink Error Tracking (ADR-015)
|
||||
SENTRY_DSN: ${{ secrets.SENTRY_DSN }}
|
||||
SENTRY_ENVIRONMENT: 'production'
|
||||
SENTRY_ENABLED: 'true'
|
||||
run: |
|
||||
if [ -z "$DB_HOST" ] || [ -z "$DB_USER" ] || [ -z "$DB_PASSWORD" ] || [ -z "$DB_NAME" ]; then
|
||||
echo "ERROR: One or more production database secrets (DB_HOST, DB_USER, DB_PASSWORD, DB_DATABASE_PROD) are not set."
|
||||
@@ -158,7 +171,7 @@ jobs:
|
||||
else
|
||||
echo "Version mismatch (Running: $RUNNING_VERSION -> Deployed: $NEW_VERSION) or app not running. Reloading PM2..."
|
||||
fi
|
||||
pm2 startOrReload ecosystem.config.cjs --env production --update-env && pm2 save
|
||||
pm2 startOrReload ecosystem.config.cjs --update-env && pm2 save
|
||||
echo "Production backend server reloaded successfully."
|
||||
else
|
||||
echo "Version $NEW_VERSION is already running. Skipping PM2 reload."
|
||||
|
||||
@@ -96,6 +96,24 @@ jobs:
|
||||
# It prevents the accumulation of duplicate processes from previous test runs.
|
||||
node -e "const exec = require('child_process').execSync; try { const list = JSON.parse(exec('pm2 jlist').toString()); list.forEach(p => { if (p.name && p.name.endsWith('-test')) { console.log('Deleting test process: ' + p.name + ' (' + p.pm2_env.pm_id + ')'); try { exec('pm2 delete ' + p.pm2_env.pm_id); } catch(e) { console.error('Failed to delete ' + p.pm2_env.pm_id, e.message); } } }); console.log('✅ Test process cleanup complete.'); } catch (e) { if (e.stdout.toString().includes('No process found')) { console.log('No PM2 processes running, cleanup not needed.'); } else { console.error('Error cleaning up test processes:', e.message); } }" || true
|
||||
|
||||
- name: Flush Redis Test Database Before Tests
|
||||
# CRITICAL: Clear Redis database 1 (test database) to remove stale BullMQ jobs.
|
||||
# This prevents old jobs with outdated error messages from polluting test results.
|
||||
# NOTE: We use database 1 for tests to isolate from production (database 0).
|
||||
env:
|
||||
REDIS_PASSWORD: ${{ secrets.REDIS_PASSWORD_TEST }}
|
||||
run: |
|
||||
echo "--- Flushing Redis database 1 (test database) to remove stale jobs ---"
|
||||
if [ -z "$REDIS_PASSWORD" ]; then
|
||||
echo "⚠️ REDIS_PASSWORD_TEST not set, attempting flush without password..."
|
||||
redis-cli -n 1 FLUSHDB || echo "Redis flush failed (no password)"
|
||||
else
|
||||
redis-cli -a "$REDIS_PASSWORD" -n 1 FLUSHDB 2>/dev/null && echo "✅ Redis database 1 (test) flushed successfully." || echo "⚠️ Redis flush failed"
|
||||
fi
|
||||
# Verify the flush worked by checking key count on database 1
|
||||
KEY_COUNT=$(redis-cli -a "$REDIS_PASSWORD" -n 1 DBSIZE 2>/dev/null | grep -oE '[0-9]+' || echo "unknown")
|
||||
echo "Redis database 1 key count after flush: $KEY_COUNT"
|
||||
|
||||
- name: Run All Tests and Generate Merged Coverage Report
|
||||
# This single step runs both unit and integration tests, then merges their
|
||||
# coverage data into a single report. It combines the environment variables
|
||||
@@ -103,13 +121,16 @@ jobs:
|
||||
env:
|
||||
# --- Database credentials for the test suite ---
|
||||
# These are injected from Gitea secrets into the runner's environment.
|
||||
# CRITICAL: Use TEST-specific credentials that have CREATE privileges on the public schema.
|
||||
DB_HOST: ${{ secrets.DB_HOST }}
|
||||
DB_USER: ${{ secrets.DB_USER }}
|
||||
DB_PASSWORD: ${{ secrets.DB_PASSWORD }}
|
||||
DB_NAME: 'flyer-crawler-test' # Explicitly set for tests
|
||||
DB_USER: ${{ secrets.DB_USER_TEST }}
|
||||
DB_PASSWORD: ${{ secrets.DB_PASSWORD_TEST }}
|
||||
DB_NAME: ${{ secrets.DB_DATABASE_TEST }}
|
||||
|
||||
# --- Redis credentials for the test suite ---
|
||||
REDIS_URL: 'redis://localhost:6379'
|
||||
# CRITICAL: Use Redis database 1 to isolate tests from production (which uses db 0).
|
||||
# This prevents the production worker from picking up test jobs.
|
||||
REDIS_URL: 'redis://localhost:6379/1'
|
||||
REDIS_PASSWORD: ${{ secrets.REDIS_PASSWORD_TEST }}
|
||||
|
||||
# --- Integration test specific variables ---
|
||||
@@ -117,6 +138,13 @@ jobs:
|
||||
VITE_API_BASE_URL: 'http://localhost:3001/api'
|
||||
GEMINI_API_KEY: ${{ secrets.VITE_GOOGLE_GENAI_API_KEY }}
|
||||
|
||||
# --- Storage path for flyer images ---
|
||||
# CRITICAL: Use an absolute path in the test runner's working directory for file storage.
|
||||
# This ensures tests can read processed files to verify their contents (e.g., EXIF stripping).
|
||||
# Without this, multer and flyerProcessingService default to /var/www/.../flyer-images.
|
||||
# NOTE: We use ${{ github.workspace }} which resolves to the checkout directory.
|
||||
STORAGE_PATH: '${{ github.workspace }}/flyer-images'
|
||||
|
||||
# --- JWT Secret for Passport authentication in tests ---
|
||||
JWT_SECRET: ${{ secrets.JWT_SECRET }}
|
||||
|
||||
@@ -171,8 +199,8 @@ jobs:
|
||||
--reporter=verbose --includeTaskLocation --testTimeout=10000 --silent=passed-only || true
|
||||
|
||||
echo "--- Running E2E Tests ---"
|
||||
# Run E2E tests using the dedicated E2E config which inherits from integration config.
|
||||
# We still pass --coverage to enable it, but directory and timeout are now in the config.
|
||||
# Run E2E tests using the dedicated E2E config.
|
||||
# E2E uses port 3098, integration uses 3099 to avoid conflicts.
|
||||
npx vitest run --config vitest.config.e2e.ts --coverage \
|
||||
--coverage.exclude='**/*.test.ts' \
|
||||
--coverage.exclude='**/tests/**' \
|
||||
@@ -213,7 +241,19 @@ jobs:
|
||||
# Run c8: read raw files from the temp dir, and output an Istanbul JSON report.
|
||||
# We only generate the 'json' report here because it's all nyc needs for merging.
|
||||
echo "Server coverage report about to be generated..."
|
||||
npx c8 report --exclude='**/*.test.ts' --exclude='**/tests/**' --exclude='**/mocks/**' --reporter=json --temp-directory .coverage/tmp/integration-server --reports-dir .coverage/integration-server
|
||||
npx c8 report \
|
||||
--include='src/**' \
|
||||
--exclude='**/*.test.ts' \
|
||||
--exclude='**/*.test.tsx' \
|
||||
--exclude='**/tests/**' \
|
||||
--exclude='**/mocks/**' \
|
||||
--exclude='hostexecutor/**' \
|
||||
--exclude='scripts/**' \
|
||||
--exclude='*.config.js' \
|
||||
--exclude='*.config.ts' \
|
||||
--reporter=json \
|
||||
--temp-directory .coverage/tmp/integration-server \
|
||||
--reports-dir .coverage/integration-server
|
||||
echo "Server coverage report generated. Verifying existence:"
|
||||
ls -l .coverage/integration-server/coverage-final.json
|
||||
|
||||
@@ -253,12 +293,18 @@ jobs:
|
||||
--reporter=html \
|
||||
--report-dir .coverage/ \
|
||||
--temp-dir "$NYC_SOURCE_DIR" \
|
||||
--include "src/**" \
|
||||
--exclude "**/*.test.ts" \
|
||||
--exclude "**/*.test.tsx" \
|
||||
--exclude "**/tests/**" \
|
||||
--exclude "**/mocks/**" \
|
||||
--exclude "**/index.tsx" \
|
||||
--exclude "**/vite-env.d.ts" \
|
||||
--exclude "**/vitest.setup.ts"
|
||||
--exclude "**/vitest.setup.ts" \
|
||||
--exclude "hostexecutor/**" \
|
||||
--exclude "scripts/**" \
|
||||
--exclude "*.config.js" \
|
||||
--exclude "*.config.ts"
|
||||
|
||||
# Re-enable secret masking for subsequent steps.
|
||||
echo "::secret-masking::"
|
||||
@@ -283,10 +329,11 @@ jobs:
|
||||
- name: Check for Test Database Schema Changes
|
||||
env:
|
||||
# Use test database credentials for this check.
|
||||
# CRITICAL: Use TEST-specific credentials that have CREATE privileges on the public schema.
|
||||
DB_HOST: ${{ secrets.DB_HOST }}
|
||||
DB_USER: ${{ secrets.DB_USER }}
|
||||
DB_PASSWORD: ${{ secrets.DB_PASSWORD }} # This is used by psql
|
||||
DB_NAME: ${{ secrets.DB_DATABASE_TEST }} # This is used by the application
|
||||
DB_USER: ${{ secrets.DB_USER_TEST }}
|
||||
DB_PASSWORD: ${{ secrets.DB_PASSWORD_TEST }}
|
||||
DB_NAME: ${{ secrets.DB_DATABASE_TEST }}
|
||||
run: |
|
||||
# Fail-fast check to ensure secrets are configured in Gitea.
|
||||
if [ -z "$DB_HOST" ] || [ -z "$DB_USER" ] || [ -z "$DB_PASSWORD" ] || [ -z "$DB_NAME" ]; then
|
||||
@@ -341,6 +388,9 @@ jobs:
|
||||
VITE_APP_VERSION="$(date +'%Y%m%d-%H%M'):$(git rev-parse --short HEAD):$PACKAGE_VERSION" \
|
||||
VITE_APP_COMMIT_URL="$GITEA_SERVER_URL/${{ gitea.repository }}/commit/${{ gitea.sha }}" \
|
||||
VITE_APP_COMMIT_MESSAGE="$COMMIT_MESSAGE" \
|
||||
VITE_SENTRY_DSN="${{ secrets.VITE_SENTRY_DSN_TEST }}" \
|
||||
VITE_SENTRY_ENVIRONMENT="test" \
|
||||
VITE_SENTRY_ENABLED="true" \
|
||||
VITE_API_BASE_URL="https://flyer-crawler-test.projectium.com/api" VITE_API_KEY=${{ secrets.VITE_GOOGLE_GENAI_API_KEY_TEST }} npm run build
|
||||
|
||||
- name: Deploy Application to Test Server
|
||||
@@ -379,13 +429,14 @@ jobs:
|
||||
# Your Node.js application will read these directly from `process.env`.
|
||||
|
||||
# Database Credentials
|
||||
# CRITICAL: Use TEST-specific credentials that have CREATE privileges on the public schema.
|
||||
DB_HOST: ${{ secrets.DB_HOST }}
|
||||
DB_USER: ${{ secrets.DB_USER }}
|
||||
DB_PASSWORD: ${{ secrets.DB_PASSWORD }}
|
||||
DB_USER: ${{ secrets.DB_USER_TEST }}
|
||||
DB_PASSWORD: ${{ secrets.DB_PASSWORD_TEST }}
|
||||
DB_NAME: ${{ secrets.DB_DATABASE_TEST }}
|
||||
|
||||
# Redis Credentials
|
||||
REDIS_URL: 'redis://localhost:6379'
|
||||
# Redis Credentials (use database 1 to isolate from production)
|
||||
REDIS_URL: 'redis://localhost:6379/1'
|
||||
REDIS_PASSWORD: ${{ secrets.REDIS_PASSWORD_TEST }}
|
||||
|
||||
# Application Secrets
|
||||
@@ -401,6 +452,10 @@ jobs:
|
||||
SMTP_USER: '' # Using MailHog, no auth needed
|
||||
SMTP_PASS: '' # Using MailHog, no auth needed
|
||||
SMTP_FROM_EMAIL: 'noreply@flyer-crawler-test.projectium.com'
|
||||
# Sentry/Bugsink Error Tracking (ADR-015)
|
||||
SENTRY_DSN: ${{ secrets.SENTRY_DSN_TEST }}
|
||||
SENTRY_ENVIRONMENT: 'test'
|
||||
SENTRY_ENABLED: 'true'
|
||||
|
||||
run: |
|
||||
# Fail-fast check to ensure secrets are configured in Gitea.
|
||||
@@ -424,10 +479,11 @@ jobs:
|
||||
echo "Cleaning up errored or stopped PM2 processes..."
|
||||
node -e "const exec = require('child_process').execSync; try { const list = JSON.parse(exec('pm2 jlist').toString()); list.forEach(p => { if (p.pm2_env.status === 'errored' || p.pm2_env.status === 'stopped') { console.log('Deleting ' + p.pm2_env.status + ' process: ' + p.name + ' (' + p.pm2_env.pm_id + ')'); try { exec('pm2 delete ' + p.pm2_env.pm_id); } catch(e) { console.error('Failed to delete ' + p.pm2_env.pm_id); } } }); } catch (e) { console.error('Error cleaning up processes:', e); }"
|
||||
|
||||
# Use `startOrReload` with the ecosystem file. This is the standard, idempotent way to deploy.
|
||||
# It will START the process if it's not running, or RELOAD it if it is.
|
||||
# Use `startOrReload` with the TEST ecosystem file. This starts test-specific processes
|
||||
# (flyer-crawler-api-test, flyer-crawler-worker-test, flyer-crawler-analytics-worker-test)
|
||||
# that run separately from production processes.
|
||||
# We also add `&& pm2 save` to persist the process list across server reboots.
|
||||
pm2 startOrReload ecosystem.config.cjs --env test --update-env && pm2 save
|
||||
pm2 startOrReload ecosystem-test.config.cjs --update-env && pm2 save
|
||||
echo "Test backend server reloaded successfully."
|
||||
|
||||
# After a successful deployment, update the schema hash in the database.
|
||||
|
||||
@@ -20,9 +20,9 @@ jobs:
|
||||
# Use production database credentials for this entire job.
|
||||
DB_HOST: ${{ secrets.DB_HOST }}
|
||||
DB_PORT: ${{ secrets.DB_PORT }}
|
||||
DB_USER: ${{ secrets.DB_USER }}
|
||||
DB_PASSWORD: ${{ secrets.DB_PASSWORD }}
|
||||
DB_NAME: ${{ secrets.DB_NAME_PROD }}
|
||||
DB_USER: ${{ secrets.DB_USER_PROD }}
|
||||
DB_PASSWORD: ${{ secrets.DB_PASSWORD_PROD }}
|
||||
DB_NAME: ${{ secrets.DB_DATABASE_PROD }}
|
||||
|
||||
steps:
|
||||
- name: Validate Secrets
|
||||
|
||||
@@ -23,9 +23,9 @@ jobs:
|
||||
env:
|
||||
# Use production database credentials for this entire job.
|
||||
DB_HOST: ${{ secrets.DB_HOST }}
|
||||
DB_USER: ${{ secrets.DB_USER }}
|
||||
DB_PASSWORD: ${{ secrets.DB_PASSWORD }} # Used by psql
|
||||
DB_NAME: ${{ secrets.DB_DATABASE_PROD }} # Used by the application
|
||||
DB_USER: ${{ secrets.DB_USER_PROD }}
|
||||
DB_PASSWORD: ${{ secrets.DB_PASSWORD_PROD }}
|
||||
DB_NAME: ${{ secrets.DB_DATABASE_PROD }}
|
||||
|
||||
steps:
|
||||
- name: Checkout Code
|
||||
|
||||
@@ -23,9 +23,9 @@ jobs:
|
||||
env:
|
||||
# Use test database credentials for this entire job.
|
||||
DB_HOST: ${{ secrets.DB_HOST }}
|
||||
DB_USER: ${{ secrets.DB_USER }}
|
||||
DB_PASSWORD: ${{ secrets.DB_PASSWORD }} # Used by psql
|
||||
DB_NAME: ${{ secrets.DB_DATABASE_TEST }} # Used by the application
|
||||
DB_USER: ${{ secrets.DB_USER_TEST }}
|
||||
DB_PASSWORD: ${{ secrets.DB_PASSWORD_TEST }}
|
||||
DB_NAME: ${{ secrets.DB_DATABASE_TEST }}
|
||||
|
||||
steps:
|
||||
- name: Checkout Code
|
||||
|
||||
@@ -22,8 +22,8 @@ jobs:
|
||||
env:
|
||||
# Use production database credentials for this entire job.
|
||||
DB_HOST: ${{ secrets.DB_HOST }}
|
||||
DB_USER: ${{ secrets.DB_USER }}
|
||||
DB_PASSWORD: ${{ secrets.DB_PASSWORD }}
|
||||
DB_USER: ${{ secrets.DB_USER_PROD }}
|
||||
DB_PASSWORD: ${{ secrets.DB_PASSWORD_PROD }}
|
||||
DB_NAME: ${{ secrets.DB_DATABASE_PROD }}
|
||||
BACKUP_DIR: '/var/www/backups' # Define a dedicated directory for backups
|
||||
|
||||
|
||||
@@ -62,8 +62,8 @@ jobs:
|
||||
- name: Check for Production Database Schema Changes
|
||||
env:
|
||||
DB_HOST: ${{ secrets.DB_HOST }}
|
||||
DB_USER: ${{ secrets.DB_USER }}
|
||||
DB_PASSWORD: ${{ secrets.DB_PASSWORD }}
|
||||
DB_USER: ${{ secrets.DB_USER_PROD }}
|
||||
DB_PASSWORD: ${{ secrets.DB_PASSWORD_PROD }}
|
||||
DB_NAME: ${{ secrets.DB_DATABASE_PROD }}
|
||||
run: |
|
||||
if [ -z "$DB_HOST" ] || [ -z "$DB_USER" ] || [ -z "$DB_PASSWORD" ] || [ -z "$DB_NAME" ]; then
|
||||
@@ -113,10 +113,11 @@ jobs:
|
||||
env:
|
||||
# --- Production Secrets Injection ---
|
||||
DB_HOST: ${{ secrets.DB_HOST }}
|
||||
DB_USER: ${{ secrets.DB_USER }}
|
||||
DB_PASSWORD: ${{ secrets.DB_PASSWORD }}
|
||||
DB_USER: ${{ secrets.DB_USER_PROD }}
|
||||
DB_PASSWORD: ${{ secrets.DB_PASSWORD_PROD }}
|
||||
DB_NAME: ${{ secrets.DB_DATABASE_PROD }}
|
||||
REDIS_URL: 'redis://localhost:6379'
|
||||
# Explicitly use database 0 for production (test uses database 1)
|
||||
REDIS_URL: 'redis://localhost:6379/0'
|
||||
REDIS_PASSWORD: ${{ secrets.REDIS_PASSWORD_PROD }}
|
||||
FRONTEND_URL: 'https://flyer-crawler.projectium.com'
|
||||
JWT_SECRET: ${{ secrets.JWT_SECRET }}
|
||||
|
||||
167
.gitea/workflows/manual-redis-flush-prod.yml
Normal file
167
.gitea/workflows/manual-redis-flush-prod.yml
Normal file
@@ -0,0 +1,167 @@
|
||||
# .gitea/workflows/manual-redis-flush-prod.yml
|
||||
#
|
||||
# DANGER: This workflow is DESTRUCTIVE and intended for manual execution only.
|
||||
# It will completely FLUSH the PRODUCTION Redis database (db 0).
|
||||
# This will clear all BullMQ queues, sessions, caches, and any other Redis data.
|
||||
#
|
||||
name: Manual - Flush Production Redis
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
confirmation:
|
||||
description: 'DANGER: This will FLUSH production Redis. Type "flush-production-redis" to confirm.'
|
||||
required: true
|
||||
default: 'do-not-run'
|
||||
flush_type:
|
||||
description: 'What to flush?'
|
||||
required: true
|
||||
type: choice
|
||||
options:
|
||||
- 'queues-only'
|
||||
- 'entire-database'
|
||||
default: 'queues-only'
|
||||
|
||||
jobs:
|
||||
flush-redis:
|
||||
runs-on: projectium.com # This job runs on your self-hosted Gitea runner.
|
||||
|
||||
env:
|
||||
REDIS_PASSWORD: ${{ secrets.REDIS_PASSWORD_PROD }}
|
||||
|
||||
steps:
|
||||
- name: Checkout Code
|
||||
uses: actions/checkout@v3
|
||||
|
||||
- name: Setup Node.js
|
||||
uses: actions/setup-node@v3
|
||||
with:
|
||||
node-version: '20'
|
||||
cache: 'npm'
|
||||
cache-dependency-path: '**/package-lock.json'
|
||||
|
||||
- name: Install Dependencies
|
||||
run: npm ci
|
||||
|
||||
- name: Validate Secrets
|
||||
run: |
|
||||
if [ -z "$REDIS_PASSWORD" ]; then
|
||||
echo "ERROR: REDIS_PASSWORD_PROD secret is not set in Gitea repository settings."
|
||||
exit 1
|
||||
fi
|
||||
echo "✅ Redis password secret is present."
|
||||
|
||||
- name: Verify Confirmation Phrase
|
||||
run: |
|
||||
if [ "${{ gitea.event.inputs.confirmation }}" != "flush-production-redis" ]; then
|
||||
echo "ERROR: Confirmation phrase did not match. Aborting Redis flush."
|
||||
exit 1
|
||||
fi
|
||||
echo "✅ Confirmation accepted. Proceeding with Redis flush."
|
||||
|
||||
- name: Show Current Redis State
|
||||
run: |
|
||||
echo "--- Current Redis Database 0 (Production) State ---"
|
||||
redis-cli -a "$REDIS_PASSWORD" -n 0 INFO keyspace 2>/dev/null || echo "Could not get keyspace info"
|
||||
echo ""
|
||||
echo "--- Key Count ---"
|
||||
KEY_COUNT=$(redis-cli -a "$REDIS_PASSWORD" -n 0 DBSIZE 2>/dev/null | grep -oE '[0-9]+' || echo "unknown")
|
||||
echo "Production Redis (db 0) key count: $KEY_COUNT"
|
||||
echo ""
|
||||
echo "--- BullMQ Queue Keys ---"
|
||||
redis-cli -a "$REDIS_PASSWORD" -n 0 KEYS "bull:*" 2>/dev/null | head -20 || echo "No BullMQ keys found"
|
||||
|
||||
- name: 🚨 FINAL WARNING & PAUSE 🚨
|
||||
run: |
|
||||
echo "*********************************************************************"
|
||||
echo "WARNING: YOU ARE ABOUT TO FLUSH PRODUCTION REDIS DATA."
|
||||
echo "Flush type: ${{ gitea.event.inputs.flush_type }}"
|
||||
echo ""
|
||||
if [ "${{ gitea.event.inputs.flush_type }}" = "entire-database" ]; then
|
||||
echo "This will DELETE ALL Redis data including sessions, caches, and queues!"
|
||||
else
|
||||
echo "This will DELETE ALL BullMQ queue data (pending jobs, failed jobs, etc.)"
|
||||
fi
|
||||
echo ""
|
||||
echo "This action is IRREVERSIBLE. Press Ctrl+C in the runner terminal NOW to cancel."
|
||||
echo "Sleeping for 10 seconds..."
|
||||
echo "*********************************************************************"
|
||||
sleep 10
|
||||
|
||||
- name: Flush BullMQ Queues Only
|
||||
if: ${{ gitea.event.inputs.flush_type == 'queues-only' }}
|
||||
env:
|
||||
REDIS_URL: 'redis://localhost:6379/0'
|
||||
run: |
|
||||
echo "--- Obliterating BullMQ queues using Node.js ---"
|
||||
node -e "
|
||||
const { Queue } = require('bullmq');
|
||||
const IORedis = require('ioredis');
|
||||
|
||||
const connection = new IORedis(process.env.REDIS_URL, {
|
||||
maxRetriesPerRequest: null,
|
||||
password: process.env.REDIS_PASSWORD,
|
||||
});
|
||||
|
||||
const queueNames = [
|
||||
'flyer-processing',
|
||||
'email-sending',
|
||||
'analytics-reporting',
|
||||
'weekly-analytics-reporting',
|
||||
'file-cleanup',
|
||||
'token-cleanup'
|
||||
];
|
||||
|
||||
(async () => {
|
||||
for (const name of queueNames) {
|
||||
try {
|
||||
const queue = new Queue(name, { connection });
|
||||
const counts = await queue.getJobCounts();
|
||||
console.log('Queue \"' + name + '\" before obliterate:', JSON.stringify(counts));
|
||||
await queue.obliterate({ force: true });
|
||||
console.log('✅ Obliterated queue: ' + name);
|
||||
await queue.close();
|
||||
} catch (err) {
|
||||
console.error('⚠️ Failed to obliterate queue ' + name + ':', err.message);
|
||||
}
|
||||
}
|
||||
await connection.quit();
|
||||
console.log('✅ All BullMQ queues obliterated.');
|
||||
})();
|
||||
"
|
||||
|
||||
- name: Flush Entire Redis Database
|
||||
if: ${{ gitea.event.inputs.flush_type == 'entire-database' }}
|
||||
run: |
|
||||
echo "--- Flushing entire Redis database 0 (production) ---"
|
||||
redis-cli -a "$REDIS_PASSWORD" -n 0 FLUSHDB 2>/dev/null && echo "✅ Redis database 0 flushed successfully." || echo "❌ Redis flush failed"
|
||||
|
||||
- name: Verify Flush Results
|
||||
run: |
|
||||
echo "--- Redis Database 0 (Production) State After Flush ---"
|
||||
KEY_COUNT=$(redis-cli -a "$REDIS_PASSWORD" -n 0 DBSIZE 2>/dev/null | grep -oE '[0-9]+' || echo "unknown")
|
||||
echo "Production Redis (db 0) key count after flush: $KEY_COUNT"
|
||||
echo ""
|
||||
echo "--- Remaining BullMQ Queue Keys ---"
|
||||
BULL_KEYS=$(redis-cli -a "$REDIS_PASSWORD" -n 0 KEYS "bull:*" 2>/dev/null | wc -l || echo "0")
|
||||
echo "BullMQ key count: $BULL_KEYS"
|
||||
|
||||
if [ "${{ gitea.event.inputs.flush_type }}" = "queues-only" ] && [ "$BULL_KEYS" -gt 0 ]; then
|
||||
echo "⚠️ Warning: Some BullMQ keys may still exist. This can happen if new jobs were added during the flush."
|
||||
fi
|
||||
|
||||
- name: Summary
|
||||
run: |
|
||||
echo ""
|
||||
echo "=========================================="
|
||||
echo "PRODUCTION REDIS FLUSH COMPLETE"
|
||||
echo "=========================================="
|
||||
echo "Flush type: ${{ gitea.event.inputs.flush_type }}"
|
||||
echo "Timestamp: $(date -u '+%Y-%m-%d %H:%M:%S UTC')"
|
||||
echo ""
|
||||
echo "NOTE: If you flushed queues, any pending jobs (flyer processing,"
|
||||
echo "emails, analytics, etc.) have been permanently deleted."
|
||||
echo ""
|
||||
echo "The production workers will automatically start processing"
|
||||
echo "new jobs as they are added to the queues."
|
||||
echo "=========================================="
|
||||
16
.gitignore
vendored
16
.gitignore
vendored
@@ -11,6 +11,18 @@ node_modules
|
||||
dist
|
||||
dist-ssr
|
||||
*.local
|
||||
.env
|
||||
*.tsbuildinfo
|
||||
|
||||
# Test coverage
|
||||
coverage
|
||||
.nyc_output
|
||||
.coverage
|
||||
|
||||
# Test artifacts - flyer-images/ is a runtime directory
|
||||
# Test fixtures are stored in src/tests/assets/ instead
|
||||
flyer-images/
|
||||
test-output.txt
|
||||
|
||||
# Editor directories and files
|
||||
.vscode/*
|
||||
@@ -22,3 +34,7 @@ dist-ssr
|
||||
*.njsproj
|
||||
*.sln
|
||||
*.sw?
|
||||
Thumbs.db
|
||||
.claude
|
||||
nul
|
||||
tmpclaude*
|
||||
|
||||
1
.husky/pre-commit
Normal file
1
.husky/pre-commit
Normal file
@@ -0,0 +1 @@
|
||||
npx lint-staged
|
||||
4
.lintstagedrc.json
Normal file
4
.lintstagedrc.json
Normal file
@@ -0,0 +1,4 @@
|
||||
{
|
||||
"*.{js,jsx,ts,tsx}": ["eslint --fix", "prettier --write"],
|
||||
"*.{json,md,css,html,yml,yaml}": ["prettier --write"]
|
||||
}
|
||||
5
.nycrc.json
Normal file
5
.nycrc.json
Normal file
@@ -0,0 +1,5 @@
|
||||
{
|
||||
"text": {
|
||||
"maxCols": 200
|
||||
}
|
||||
}
|
||||
41
.prettierignore
Normal file
41
.prettierignore
Normal file
@@ -0,0 +1,41 @@
|
||||
# Dependencies
|
||||
node_modules/
|
||||
|
||||
# Build output
|
||||
dist/
|
||||
build/
|
||||
.cache/
|
||||
|
||||
# Coverage reports
|
||||
coverage/
|
||||
.coverage/
|
||||
|
||||
# IDE and editor configs
|
||||
.idea/
|
||||
.vscode/
|
||||
*.swp
|
||||
*.swo
|
||||
|
||||
# Logs
|
||||
*.log
|
||||
logs/
|
||||
|
||||
# Environment files (may contain secrets)
|
||||
.env*
|
||||
!.env.example
|
||||
|
||||
# Lock files (managed by package managers)
|
||||
package-lock.json
|
||||
pnpm-lock.yaml
|
||||
yarn.lock
|
||||
|
||||
# Generated files
|
||||
*.min.js
|
||||
*.min.css
|
||||
|
||||
# Git directory
|
||||
.git/
|
||||
.gitea/
|
||||
|
||||
# Test artifacts
|
||||
__snapshots__/
|
||||
110
AUTHENTICATION.md
Normal file
110
AUTHENTICATION.md
Normal file
@@ -0,0 +1,110 @@
|
||||
# Authentication Setup
|
||||
|
||||
Flyer Crawler supports OAuth authentication via Google and GitHub. This guide walks through configuring both providers.
|
||||
|
||||
---
|
||||
|
||||
## Google OAuth
|
||||
|
||||
### Step 1: Create OAuth Credentials
|
||||
|
||||
1. Go to the [Google Cloud Console](https://console.cloud.google.com/)
|
||||
2. Create a new project (or select an existing one)
|
||||
3. Navigate to **APIs & Services > Credentials**
|
||||
4. Click **Create Credentials > OAuth client ID**
|
||||
5. Select **Web application** as the application type
|
||||
|
||||
### Step 2: Configure Authorized Redirect URIs
|
||||
|
||||
Add the callback URL where Google will redirect users after authentication:
|
||||
|
||||
| Environment | Redirect URI |
|
||||
| ----------- | -------------------------------------------------- |
|
||||
| Development | `http://localhost:3001/api/auth/google/callback` |
|
||||
| Production | `https://your-domain.com/api/auth/google/callback` |
|
||||
|
||||
### Step 3: Save Credentials
|
||||
|
||||
After clicking **Create**, you'll receive:
|
||||
|
||||
- **Client ID**
|
||||
- **Client Secret**
|
||||
|
||||
Store these securely as environment variables:
|
||||
|
||||
- `GOOGLE_CLIENT_ID`
|
||||
- `GOOGLE_CLIENT_SECRET`
|
||||
|
||||
---
|
||||
|
||||
## GitHub OAuth
|
||||
|
||||
### Step 1: Create OAuth App
|
||||
|
||||
1. Go to your [GitHub Developer Settings](https://github.com/settings/developers)
|
||||
2. Navigate to **OAuth Apps**
|
||||
3. Click **New OAuth App**
|
||||
|
||||
### Step 2: Fill in Application Details
|
||||
|
||||
| Field | Value |
|
||||
| -------------------------- | ---------------------------------------------------- |
|
||||
| Application name | Flyer Crawler (or your preferred name) |
|
||||
| Homepage URL | `http://localhost:5173` (dev) or your production URL |
|
||||
| Authorization callback URL | `http://localhost:3001/api/auth/github/callback` |
|
||||
|
||||
### Step 3: Save GitHub Credentials
|
||||
|
||||
After clicking **Register application**, you'll receive:
|
||||
|
||||
- **Client ID**
|
||||
- **Client Secret**
|
||||
|
||||
Store these securely as environment variables:
|
||||
|
||||
- `GITHUB_CLIENT_ID`
|
||||
- `GITHUB_CLIENT_SECRET`
|
||||
|
||||
---
|
||||
|
||||
## Environment Variables Summary
|
||||
|
||||
| Variable | Description |
|
||||
| ---------------------- | ---------------------------------------- |
|
||||
| `GOOGLE_CLIENT_ID` | Google OAuth client ID |
|
||||
| `GOOGLE_CLIENT_SECRET` | Google OAuth client secret |
|
||||
| `GITHUB_CLIENT_ID` | GitHub OAuth client ID |
|
||||
| `GITHUB_CLIENT_SECRET` | GitHub OAuth client secret |
|
||||
| `JWT_SECRET` | Secret for signing authentication tokens |
|
||||
|
||||
---
|
||||
|
||||
## Production Considerations
|
||||
|
||||
When deploying to production:
|
||||
|
||||
1. **Update redirect URIs** in both Google Cloud Console and GitHub OAuth settings to use your production domain
|
||||
2. **Use HTTPS** for all callback URLs in production
|
||||
3. **Store secrets securely** using your CI/CD platform's secrets management (e.g., Gitea repository secrets)
|
||||
|
||||
---
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
### "redirect_uri_mismatch" Error
|
||||
|
||||
The callback URL in your OAuth provider settings doesn't match what the application is sending. Verify:
|
||||
|
||||
- The URL is exactly correct (no trailing slashes, correct port)
|
||||
- You're using the right environment (dev vs production URLs)
|
||||
|
||||
### "invalid_client" Error
|
||||
|
||||
The Client ID or Client Secret is incorrect. Double-check your environment variables.
|
||||
|
||||
---
|
||||
|
||||
## Related Documentation
|
||||
|
||||
- [Installation Guide](INSTALL.md) - Local development setup
|
||||
- [Deployment Guide](DEPLOYMENT.md) - Production deployment
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user