All checks were successful
Deploy to Test Environment / deploy-to-test (push) Successful in 24m17s
CRITICAL FIX: Prevents rsync --delete from removing PM2 config files Root Cause: - rsync --delete was removing ecosystem*.config.cjs and .env.* files - This caused PM2 daemon corruption affecting ALL projects on shared server - Same vulnerability that crashed stock-alert PM2 processes Three-Layer Safety System: 1. Pre-flight checks (git repo, critical files, file count validation) 2. Stop PM2 before file operations (prevent ENOENT/uv_cwd errors) 3. Comprehensive rsync excludes (ecosystem configs, .env files, coverage) Changes: - deploy-to-prod.yml: Added safety system to production deployment - deploy-to-test.yml: Added safety system to test deployment Files excluded from rsync --delete: - ecosystem*.config.cjs (PM2 configuration) - .env* (environment secrets) - coverage, .nyc_output, .vitest-results (test artifacts) - .vscode, .idea (IDE files) Prevents: - PM2 daemon crashes across all projects - Process CWD (working directory) deletion - Cross-project interference on shared PM2 daemon Related: - Stock-alert fix that identified this vulnerability - PM2 Process Isolation documentation (CLAUDE.md) Co-Authored-By: Claude Sonnet 4.5 <noreply@anthropic.com>
374 lines
18 KiB
YAML
374 lines
18 KiB
YAML
# .gitea/workflows/deploy-to-prod.yml
|
|
#
|
|
# This workflow provides a MANUAL trigger to deploy the application to the
|
|
# PRODUCTION environment. This should only be run after the 'Deploy to Test'
|
|
# workflow has succeeded on the desired commit.
|
|
name: Manual - Deploy to Production
|
|
|
|
on:
|
|
workflow_dispatch:
|
|
inputs:
|
|
confirmation:
|
|
description: 'Type "deploy-to-prod" to confirm you want to deploy the main branch.'
|
|
required: true
|
|
default: 'do-not-run'
|
|
force_reload:
|
|
description: 'Force PM2 reload even if version matches (true/false).'
|
|
required: false
|
|
type: boolean
|
|
default: false
|
|
|
|
jobs:
|
|
deploy-production:
|
|
runs-on: projectium.com
|
|
|
|
steps:
|
|
- name: Verify Confirmation Phrase
|
|
run: |
|
|
if [ "${{ gitea.event.inputs.confirmation }}" != "deploy-to-prod" ]; then
|
|
echo "ERROR: Confirmation phrase did not match. Aborting deployment."
|
|
exit 1
|
|
fi
|
|
echo "✅ Confirmation accepted. Proceeding with production deployment."
|
|
|
|
- name: Checkout Code from 'main' branch
|
|
uses: actions/checkout@v3
|
|
with:
|
|
ref: 'main' # Explicitly check out the main branch for production deployment
|
|
fetch-depth: 0
|
|
|
|
- name: Setup Node.js
|
|
uses: actions/setup-node@v3
|
|
with:
|
|
node-version: '20'
|
|
cache: 'npm'
|
|
cache-dependency-path: '**/package-lock.json'
|
|
|
|
- name: Install Dependencies
|
|
run: npm ci
|
|
|
|
- name: Bump Minor Version and Push
|
|
run: |
|
|
# Configure git for the commit.
|
|
git config --global user.name 'Gitea Actions'
|
|
git config --global user.email 'actions@gitea.projectium.com'
|
|
|
|
# Bump the minor version number. This creates a new commit and a new tag.
|
|
# The commit message includes [skip ci] to prevent this push from triggering another workflow run.
|
|
npm version minor -m "ci: Bump version to %s for production release [skip ci]"
|
|
|
|
# Push the new commit and the new tag back to the main branch.
|
|
git push --follow-tags
|
|
|
|
- name: Check for Production Database Schema Changes
|
|
env:
|
|
DB_HOST: ${{ secrets.DB_HOST }}
|
|
DB_USER: ${{ secrets.DB_USER_PROD }}
|
|
DB_PASSWORD: ${{ secrets.DB_PASSWORD_PROD }}
|
|
DB_NAME: ${{ secrets.DB_DATABASE_PROD }}
|
|
run: |
|
|
if [ -z "$DB_HOST" ] || [ -z "$DB_USER" ] || [ -z "$DB_PASSWORD" ] || [ -z "$DB_NAME" ]; then
|
|
echo "ERROR: One or more production database secrets (DB_HOST, DB_USER, DB_PASSWORD, DB_DATABASE_PROD) are not set."
|
|
exit 1
|
|
fi
|
|
echo "--- Checking for production schema changes ---"
|
|
CURRENT_HASH=$(cat sql/master_schema_rollup.sql | dos2unix | sha256sum | awk '{ print $1 }')
|
|
echo "Current Git Schema Hash: $CURRENT_HASH"
|
|
# The psql command will now fail the step if the query errors (e.g., column missing), preventing deployment on a bad schema.
|
|
DEPLOYED_HASH=$(PGPASSWORD="$DB_PASSWORD" psql -v ON_ERROR_STOP=1 -h "$DB_HOST" -p 5432 -U "$DB_USER" -d "$DB_NAME" -c "SELECT schema_hash FROM public.schema_info WHERE environment = 'production';" -t -A)
|
|
echo "Deployed DB Schema Hash: $DEPLOYED_HASH"
|
|
if [ -z "$DEPLOYED_HASH" ]; then
|
|
echo "WARNING: No schema hash found in the production database. This is expected for a first-time deployment."
|
|
elif [ "$CURRENT_HASH" != "$DEPLOYED_HASH" ]; then
|
|
echo "ERROR: Database schema mismatch detected! A manual database migration is required."
|
|
exit 1
|
|
else
|
|
echo "✅ Schema is up to date. No changes detected."
|
|
fi
|
|
|
|
- name: Build React Application for Production
|
|
# Source Maps (ADR-015): If SENTRY_AUTH_TOKEN is set, the @sentry/vite-plugin will:
|
|
# 1. Generate hidden source maps during build
|
|
# 2. Upload them to Bugsink for error de-minification
|
|
# 3. Delete the .map files after upload (so they're not publicly accessible)
|
|
run: |
|
|
if [ -z "${{ secrets.VITE_GOOGLE_GENAI_API_KEY }}" ]; then
|
|
echo "ERROR: The VITE_GOOGLE_GENAI_API_KEY secret is not set."
|
|
exit 1
|
|
fi
|
|
|
|
# Source map upload is optional - warn if not configured
|
|
if [ -z "${{ secrets.SENTRY_AUTH_TOKEN }}" ]; then
|
|
echo "WARNING: SENTRY_AUTH_TOKEN not set. Source maps will NOT be uploaded to Bugsink."
|
|
echo " Errors will show minified stack traces. To fix, add SENTRY_AUTH_TOKEN to Gitea secrets."
|
|
fi
|
|
|
|
GITEA_SERVER_URL="https://gitea.projectium.com"
|
|
COMMIT_MESSAGE=$(git log -1 --grep="\[skip ci\]" --invert-grep --pretty=%s)
|
|
PACKAGE_VERSION=$(node -p "require('./package.json').version")
|
|
GENERATE_SOURCE_MAPS=true \
|
|
VITE_APP_VERSION="$(date +'%Y%m%d-%H%M'):$(git rev-parse --short HEAD):$PACKAGE_VERSION" \
|
|
VITE_APP_COMMIT_URL="$GITEA_SERVER_URL/${{ gitea.repository }}/commit/${{ gitea.sha }}" \
|
|
VITE_APP_COMMIT_MESSAGE="$COMMIT_MESSAGE" \
|
|
VITE_SENTRY_DSN="${{ secrets.VITE_SENTRY_DSN }}" \
|
|
VITE_SENTRY_ENVIRONMENT="production" \
|
|
VITE_SENTRY_ENABLED="true" \
|
|
SENTRY_AUTH_TOKEN="${{ secrets.SENTRY_AUTH_TOKEN }}" \
|
|
SENTRY_URL="https://bugsink.projectium.com" \
|
|
VITE_API_BASE_URL=/api VITE_API_KEY=${{ secrets.VITE_GOOGLE_GENAI_API_KEY }} npm run build
|
|
|
|
- name: Deploy Application to Production Server
|
|
run: |
|
|
echo "========================================="
|
|
echo "DEPLOYING TO PRODUCTION SERVER"
|
|
echo "========================================="
|
|
APP_PATH="/var/www/flyer-crawler.projectium.com"
|
|
|
|
# ========================================
|
|
# LAYER 1: PRE-FLIGHT SAFETY CHECKS
|
|
# ========================================
|
|
echo ""
|
|
echo "--- Pre-Flight Safety Checks ---"
|
|
|
|
# Check 1: Verify we're in a git repository
|
|
if ! git rev-parse --git-dir > /dev/null 2>&1; then
|
|
echo "❌ FATAL: Not in a git repository! Aborting to prevent data loss."
|
|
exit 1
|
|
fi
|
|
echo "✅ Git repository verified"
|
|
|
|
# Check 2: Verify critical files exist before deployment
|
|
if [ ! -f "package.json" ] || [ ! -f "server.ts" ]; then
|
|
echo "❌ FATAL: Critical files missing (package.json or server.ts). Aborting."
|
|
exit 1
|
|
fi
|
|
echo "✅ Critical files verified"
|
|
|
|
# Check 3: Verify we have actual content to deploy (prevent empty checkout)
|
|
FILE_COUNT=$(find . -type f | wc -l)
|
|
if [ "$FILE_COUNT" -lt 10 ]; then
|
|
echo "❌ FATAL: Suspiciously few files ($FILE_COUNT). Aborting to prevent catastrophic deletion."
|
|
exit 1
|
|
fi
|
|
echo "✅ File count verified: $FILE_COUNT files ready to deploy"
|
|
|
|
# ========================================
|
|
# LAYER 2: STOP PM2 BEFORE FILE OPERATIONS
|
|
# ========================================
|
|
echo ""
|
|
echo "--- Stopping PM2 Processes ---"
|
|
pm2 stop flyer-crawler-api flyer-crawler-worker flyer-crawler-analytics-worker || echo "No production processes to stop"
|
|
pm2 list
|
|
|
|
# ========================================
|
|
# LAYER 3: SAFE RSYNC WITH COMPREHENSIVE EXCLUDES
|
|
# ========================================
|
|
echo ""
|
|
echo "--- Deploying Application Files ---"
|
|
mkdir -p "$APP_PATH"
|
|
mkdir -p "$APP_PATH/flyer-images/icons" "$APP_PATH/flyer-images/archive"
|
|
|
|
# Deploy backend with critical file exclusions
|
|
rsync -avz --delete \
|
|
--exclude 'node_modules' \
|
|
--exclude '.git' \
|
|
--exclude 'dist' \
|
|
--exclude 'flyer-images' \
|
|
--exclude 'ecosystem.config.cjs' \
|
|
--exclude 'ecosystem-test.config.cjs' \
|
|
--exclude 'ecosystem.dev.config.cjs' \
|
|
--exclude '.env.*' \
|
|
--exclude 'coverage' \
|
|
--exclude '.coverage' \
|
|
--exclude 'test-results' \
|
|
--exclude 'playwright-report' \
|
|
--exclude 'playwright-report-visual' \
|
|
./ "$APP_PATH/" 2>&1 | tail -20
|
|
|
|
echo "✅ Backend files deployed ($(find "$APP_PATH" -type f | wc -l) files)"
|
|
|
|
# Deploy frontend assets
|
|
rsync -avz dist/ "$APP_PATH" 2>&1 | tail -10
|
|
echo "✅ Frontend assets deployed"
|
|
|
|
echo ""
|
|
echo "========================================="
|
|
echo "DEPLOYMENT COMPLETE"
|
|
echo "========================================="
|
|
|
|
- name: Log Workflow Metadata
|
|
run: |
|
|
echo "=== WORKFLOW METADATA ==="
|
|
echo "Workflow file: deploy-to-prod.yml"
|
|
echo "Workflow file hash: $(sha256sum .gitea/workflows/deploy-to-prod.yml | cut -d' ' -f1)"
|
|
echo "Git commit: $(git rev-parse HEAD)"
|
|
echo "Git branch: $(git rev-parse --abbrev-ref HEAD)"
|
|
echo "Timestamp: $(date -u '+%Y-%m-%d %H:%M:%S UTC')"
|
|
echo "Actor: ${{ gitea.actor }}"
|
|
echo "=== END METADATA ==="
|
|
|
|
- name: Install Backend Dependencies and Restart Production Server
|
|
env:
|
|
# --- Production Secrets Injection ---
|
|
DB_HOST: ${{ secrets.DB_HOST }}
|
|
DB_USER: ${{ secrets.DB_USER_PROD }}
|
|
DB_PASSWORD: ${{ secrets.DB_PASSWORD_PROD }}
|
|
DB_NAME: ${{ secrets.DB_DATABASE_PROD }}
|
|
# Explicitly use database 0 for production (test uses database 1)
|
|
REDIS_URL: 'redis://localhost:6379/0'
|
|
REDIS_PASSWORD: ${{ secrets.REDIS_PASSWORD_PROD }}
|
|
FRONTEND_URL: 'https://flyer-crawler.projectium.com'
|
|
JWT_SECRET: ${{ secrets.JWT_SECRET }}
|
|
GEMINI_API_KEY: ${{ secrets.VITE_GOOGLE_GENAI_API_KEY }}
|
|
GOOGLE_MAPS_API_KEY: ${{ secrets.GOOGLE_MAPS_API_KEY }}
|
|
SMTP_HOST: 'localhost'
|
|
SMTP_PORT: '1025'
|
|
SMTP_SECURE: 'false'
|
|
SMTP_USER: ''
|
|
SMTP_PASS: ''
|
|
SMTP_FROM_EMAIL: 'noreply@flyer-crawler.projectium.com'
|
|
# OAuth Providers
|
|
GOOGLE_CLIENT_ID: ${{ secrets.GOOGLE_CLIENT_ID }}
|
|
GOOGLE_CLIENT_SECRET: ${{ secrets.GOOGLE_CLIENT_SECRET }}
|
|
GITHUB_CLIENT_ID: ${{ secrets.GH_CLIENT_ID }}
|
|
GITHUB_CLIENT_SECRET: ${{ secrets.GH_CLIENT_SECRET }}
|
|
# Sentry/Bugsink Error Tracking (ADR-015)
|
|
SENTRY_DSN: ${{ secrets.SENTRY_DSN }}
|
|
SENTRY_ENVIRONMENT: 'production'
|
|
SENTRY_ENABLED: 'true'
|
|
run: |
|
|
if [ -z "$DB_HOST" ] || [ -z "$DB_USER" ] || [ -z "$DB_PASSWORD" ] || [ -z "$DB_NAME" ]; then
|
|
echo "ERROR: One or more production database secrets (DB_HOST, DB_USER, DB_PASSWORD, DB_DATABASE_PROD) are not set."
|
|
exit 1
|
|
fi
|
|
echo "Installing production dependencies and restarting server..."
|
|
cd /var/www/flyer-crawler.projectium.com
|
|
npm install --omit=dev
|
|
|
|
# === PRE-CLEANUP PM2 STATE LOGGING ===
|
|
echo "=== PRE-CLEANUP PM2 STATE ==="
|
|
pm2 jlist
|
|
echo "=== END PRE-CLEANUP STATE ==="
|
|
|
|
# --- Cleanup Errored Processes with Defense-in-Depth Safeguards ---
|
|
echo "Cleaning up errored or stopped PRODUCTION PM2 processes..."
|
|
node -e "
|
|
const exec = require('child_process').execSync;
|
|
try {
|
|
const list = JSON.parse(exec('pm2 jlist').toString());
|
|
const prodProcesses = ['flyer-crawler-api', 'flyer-crawler-worker', 'flyer-crawler-analytics-worker'];
|
|
|
|
// Filter for processes that match our criteria
|
|
const targetProcesses = list.filter(p =>
|
|
(p.pm2_env.status === 'errored' || p.pm2_env.status === 'stopped') &&
|
|
prodProcesses.includes(p.name)
|
|
);
|
|
|
|
// SAFEGUARD 1: Process count validation
|
|
const totalProcesses = list.length;
|
|
if (targetProcesses.length === totalProcesses && totalProcesses > 3) {
|
|
console.error('SAFETY ABORT: Filter would delete ALL processes!');
|
|
console.error('Total processes: ' + totalProcesses + ', Target processes: ' + targetProcesses.length);
|
|
console.error('This indicates a potential filter bug. Aborting cleanup.');
|
|
process.exit(1);
|
|
}
|
|
|
|
// SAFEGUARD 2: Explicit name verification
|
|
console.log('Found ' + targetProcesses.length + ' PRODUCTION processes to clean:');
|
|
targetProcesses.forEach(p => {
|
|
console.log(' - ' + p.name + ' (status: ' + p.pm2_env.status + ', pm_id: ' + p.pm2_env.pm_id + ')');
|
|
});
|
|
|
|
// Perform the cleanup
|
|
targetProcesses.forEach(p => {
|
|
console.log('Deleting ' + p.pm2_env.status + ' production process: ' + p.name + ' (' + p.pm2_env.pm_id + ')');
|
|
try {
|
|
exec('pm2 delete ' + p.pm2_env.pm_id);
|
|
} catch(e) {
|
|
console.error('Failed to delete ' + p.pm2_env.pm_id);
|
|
}
|
|
});
|
|
|
|
console.log('Production process cleanup complete.');
|
|
} catch (e) {
|
|
console.error('Error cleaning up processes:', e);
|
|
}
|
|
"
|
|
|
|
# === POST-CLEANUP VERIFICATION ===
|
|
echo "=== POST-CLEANUP VERIFICATION ==="
|
|
pm2 jlist | node -e "
|
|
try {
|
|
const list = JSON.parse(require('fs').readFileSync(0, 'utf-8'));
|
|
const prodProcesses = list.filter(p => p.name && p.name.startsWith('flyer-crawler-') && !p.name.endsWith('-test') && !p.name.endsWith('-dev'));
|
|
console.log('Production processes after cleanup:');
|
|
prodProcesses.forEach(p => {
|
|
console.log(' ' + p.name + ': ' + p.pm2_env.status);
|
|
});
|
|
if (prodProcesses.length === 0) {
|
|
console.log(' (no production processes currently running)');
|
|
}
|
|
} catch (e) {
|
|
console.error('Failed to parse PM2 output:', e.message);
|
|
}
|
|
"
|
|
echo "=== END POST-CLEANUP VERIFICATION ==="
|
|
|
|
# --- Version Check Logic ---
|
|
# Get the version from the newly deployed package.json
|
|
NEW_VERSION=$(node -p "require('./package.json').version")
|
|
echo "Deployed Package Version: $NEW_VERSION"
|
|
|
|
# Get the running version from PM2 for the main API process
|
|
# We use a small node script to parse the JSON output from pm2 jlist
|
|
RUNNING_VERSION=$(pm2 jlist | node -e "try { const list = JSON.parse(require('fs').readFileSync(0, 'utf-8')); const app = list.find(p => p.name === 'flyer-crawler-api'); console.log(app ? app.pm2_env.version : ''); } catch(e) { console.log(''); }")
|
|
echo "Running PM2 Version: $RUNNING_VERSION"
|
|
|
|
if [ "${{ gitea.event.inputs.force_reload }}" == "true" ] || [ "$NEW_VERSION" != "$RUNNING_VERSION" ] || [ -z "$RUNNING_VERSION" ]; then
|
|
if [ "${{ gitea.event.inputs.force_reload }}" == "true" ]; then
|
|
echo "Force reload triggered by manual input. Reloading PM2..."
|
|
else
|
|
echo "Version mismatch (Running: $RUNNING_VERSION -> Deployed: $NEW_VERSION) or app not running. Reloading PM2..."
|
|
fi
|
|
pm2 startOrReload ecosystem.config.cjs --update-env && pm2 save
|
|
echo "Production backend server reloaded successfully."
|
|
else
|
|
echo "Version $NEW_VERSION is already running. Skipping PM2 reload."
|
|
fi
|
|
|
|
echo "Updating schema hash in production database..."
|
|
CURRENT_HASH=$(cat sql/master_schema_rollup.sql | dos2unix | sha256sum | awk '{ print $1 }')
|
|
PGPASSWORD="$DB_PASSWORD" psql -v ON_ERROR_STOP=1 -h "$DB_HOST" -p 5432 -U "$DB_USER" -d "$DB_NAME" -c \
|
|
"CREATE TABLE IF NOT EXISTS public.schema_info (
|
|
environment VARCHAR(50) PRIMARY KEY,
|
|
schema_hash VARCHAR(64) NOT NULL,
|
|
deployed_at TIMESTAMP DEFAULT NOW()
|
|
);
|
|
INSERT INTO public.schema_info (environment, schema_hash, deployed_at) VALUES ('production', '$CURRENT_HASH', NOW())
|
|
ON CONFLICT (environment) DO UPDATE SET schema_hash = EXCLUDED.schema_hash, deployed_at = NOW();"
|
|
|
|
UPDATED_HASH=$(PGPASSWORD="$DB_PASSWORD" psql -v ON_ERROR_STOP=1 -h "$DB_HOST" -p 5432 -U "$DB_USER" -d "$DB_NAME" -c "SELECT schema_hash FROM public.schema_info WHERE environment = 'production';" -t -A)
|
|
if [ "$CURRENT_HASH" = "$UPDATED_HASH" ]; then
|
|
echo "✅ Schema hash successfully updated in the database to: $UPDATED_HASH"
|
|
else
|
|
echo "ERROR: Failed to update schema hash in the database."
|
|
fi
|
|
|
|
- name: Show PM2 Environment for Production
|
|
run: |
|
|
echo "--- Displaying recent PM2 logs for flyer-crawler-api ---"
|
|
sleep 5 # Wait a few seconds for the app to start and log its output.
|
|
|
|
# Resolve the PM2 ID dynamically to ensure we target the correct process
|
|
PM2_ID=$(pm2 jlist | node -e "try { const list = JSON.parse(require('fs').readFileSync(0, 'utf-8')); const app = list.find(p => p.name === 'flyer-crawler-api'); console.log(app ? app.pm2_env.pm_id : ''); } catch(e) { console.log(''); }")
|
|
|
|
if [ -n "$PM2_ID" ]; then
|
|
echo "Found process ID: $PM2_ID"
|
|
pm2 describe "$PM2_ID" || echo "Failed to describe process $PM2_ID"
|
|
pm2 logs "$PM2_ID" --lines 20 --nostream || echo "Failed to get logs for $PM2_ID"
|
|
pm2 env "$PM2_ID" || echo "Failed to get env for $PM2_ID"
|
|
else
|
|
echo "Could not find process 'flyer-crawler-api' in pm2 list."
|
|
pm2 list # Fallback to listing everything to help debug
|
|
fi
|