# .gitea/workflows/manual-db-reset-prod.yml # # DANGER: This workflow is DESTRUCTIVE and intended for manual execution only. # It will completely WIPE and RESET the PRODUCTION database. # name: Manual - Reset Production Database on: workflow_dispatch: inputs: user_email: description: 'Email of the user to preserve (e.g., tsx@gmail.com). Leave blank to skip backup/restore.' required: false confirmation: description: 'DANGER: This will WIPE the production database. Type "reset-production-db" to confirm.' required: true default: 'do-not-run' jobs: reset-database: runs-on: projectium.com # This job runs on your self-hosted Gitea runner. env: # Use production database credentials for this entire job. DB_HOST: ${{ secrets.DB_HOST }} DB_USER: ${{ secrets.DB_USER_PROD }} DB_PASSWORD: ${{ secrets.DB_PASSWORD_PROD }} DB_NAME: ${{ secrets.DB_DATABASE_PROD }} steps: - name: Checkout Code uses: actions/checkout@v3 - name: Validate Secrets run: | # Fail-fast check to ensure secrets are configured in Gitea. if [ -z "$DB_HOST" ] || [ -z "$DB_USER" ] || [ -z "$DB_PASSWORD" ] || [ -z "$DB_NAME" ]; then echo "ERROR: One or more production database secrets (DB_HOST, DB_USER, DB_PASSWORD, DB_DATABASE_PROD) are not set in Gitea repository settings." exit 1 fi echo "✅ All required database secrets are present." - name: Verify Confirmation Phrase run: | if [ "${{ gitea.event.inputs.confirmation }}" != "reset-production-db" ]; then echo "ERROR: Confirmation phrase did not match. Aborting database reset." exit 1 fi echo "✅ Confirmation accepted. Proceeding with database reset." - name: 🚨 FINAL WARNING & PAUSE 🚨 run: | echo "*********************************************************************" echo "WARNING: YOU ARE ABOUT TO WIPE AND RESET THE PRODUCTION DATABASE." echo "This action is IRREVERSIBLE. Press Ctrl+C in the runner terminal NOW to cancel." echo "Sleeping for 10 seconds..." echo "*********************************************************************" sleep 10 - name: Step 1 - (Optional) Backup Specific User Data if: ${{ gitea.event.inputs.user_email != '' }} run: | USER_EMAIL="${{ gitea.event.inputs.user_email }}" BACKUP_FILE="user_backup_${USER_EMAIL}.sql" echo "Attempting to back up data for user: $USER_EMAIL" # Get the user_id for the specified email. USER_ID=$(PGPASSWORD="$DB_PASSWORD" psql -v ON_ERROR_STOP=1 -h "$DB_HOST" -p 5432 -U "$DB_USER" -d "$DB_NAME" -c "SELECT user_id FROM public.users WHERE email = '$USER_EMAIL';" -t -A) if [ -z "$USER_ID" ]; then echo "WARNING: User with email '$USER_EMAIL' not found. Skipping backup." echo "NO_USER_BACKUP=true" >> $GITEA_ENV else echo "User ID found: $USER_ID. Proceeding with backup..." # Use pg_dump to create a data-only dump for all tables that have a direct or indirect # relationship to the user_id. This is a robust way to capture all related data. # We use --data-only and --column-inserts for maximum compatibility. PGPASSWORD="$DB_PASSWORD" pg_dump -h "$DB_HOST" -p 5432 -U "$DB_USER" -d "$DB_NAME" \ --data-only --column-inserts \ --table="public.users" --table="public.profiles" --table="public.shopping_lists" \ --table="public.shopping_list_items" --table="public.user_watched_items" \ --table="public.pantry_items" --table="public.favorite_recipes" \ --table="public.receipts" --table="public.receipt_items" \ --table="public.menu_plans" --table="public.planned_meals" \ --table="public.user_dietary_restrictions" --table="public.user_appliances" \ --table="public.user_item_aliases" \ > "$BACKUP_FILE" # Filter the dump to only include rows related to our specific user. # This is a simplification; a more robust script would trace all foreign keys. # For now, we will just keep the user and their direct profile. grep "public.users" "$BACKUP_FILE" | grep "'$USER_ID'" > filtered_backup.sql grep "public.profiles" "$BACKUP_FILE" | grep "'$USER_ID'" >> filtered_backup.sql echo "✅ User data backup created in filtered_backup.sql" fi - name: Step 2 - Drop All Tables from Production DB run: | echo "Executing drop_tables.sql against the PRODUCTION database..." PGPASSWORD="$DB_PASSWORD" psql -v ON_ERROR_STOP=1 -h "$DB_HOST" -p 5432 -U "$DB_USER" -d "$DB_NAME" -f sql/drop_tables.sql echo "✅ All tables dropped successfully." - name: Step 3 - Rebuild Schema from Master Rollup run: | echo "Executing master_schema_rollup.sql against the PRODUCTION database..." PGPASSWORD="$DB_PASSWORD" psql -v ON_ERROR_STOP=1 -h "$DB_HOST" -p 5432 -U "$DB_USER" -d "$DB_NAME" -f sql/master_schema_rollup.sql echo "✅ Schema rebuilt successfully." - name: Step 4 - (Optional) Restore Specific User Data if: ${{ gitea.event.inputs.user_email != '' && env.NO_USER_BACKUP != 'true' }} run: | echo "Restoring user data from filtered_backup.sql..." PGPASSWORD="$DB_PASSWORD" psql -v ON_ERROR_STOP=1 -h "$DB_HOST" -p 5432 -U "$DB_USER" -d "$DB_NAME" -f filtered_backup.sql echo "✅ User data restored successfully." - name: Step 5 - Update Schema Info Table run: | echo "Updating schema_info table with the new schema hash for 'production'..." # Calculate the hash of the current schema file. CURRENT_HASH=$(cat sql/master_schema_rollup.sql | dos2unix | sha256sum | awk '{ print $1 }') echo "New Schema Hash: $CURRENT_HASH" # Insert the new hash into the freshly created schema_info table. PGPASSWORD="$DB_PASSWORD" psql -v ON_ERROR_STOP=1 -h "$DB_HOST" -p 5432 -U "$DB_USER" -d "$DB_NAME" -c \ "INSERT INTO public.schema_info (environment, schema_hash, deployed_at) VALUES ('production', '$CURRENT_HASH', NOW()) ON CONFLICT (environment) DO UPDATE SET schema_hash = EXCLUDED.schema_hash, deployed_at = NOW();" # Verify the hash was updated UPDATED_HASH=$(PGPASSWORD="$DB_PASSWORD" psql -v ON_ERROR_STOP=1 -h "$DB_HOST" -p 5432 -U "$DB_USER" -d "$DB_NAME" -c "SELECT schema_hash FROM public.schema_info WHERE environment = 'production';" -t -A) if [ "$CURRENT_HASH" = "$UPDATED_HASH" ]; then echo "✅ Schema hash successfully set in the database to: $UPDATED_HASH" else echo "ERROR: Failed to set schema hash in the database." exit 1 fi - name: Step 6 - Clear Flyer Asset Directories run: | APP_PATH="/var/www/flyer-crawler.projectium.com" echo "Clearing contents of flyer asset directories..." # Use find to delete files within the directories, but not the directories themselves. # Target only the specific test files by name pattern. find "$APP_PATH/flyer-images" -type f -name '*-test-flyer-image.*' -delete find "$APP_PATH/flyer-images/icons" -type f -name '*-test-flyer-image.*' -delete find "$APP_PATH/flyer-images/archive" -mindepth 1 -maxdepth 1 -type f -delete || echo "Archive directory not found, skipping." echo "✅ Flyer asset directories cleared."