better deploys

This commit is contained in:
2025-11-24 17:52:25 -08:00
parent 5255202d43
commit 9c2a19fad4
3 changed files with 58 additions and 3 deletions

View File

@@ -8,6 +8,9 @@ name: Manual - Reset Production Database
on:
workflow_dispatch:
inputs:
user_email:
description: 'Email of the user to preserve (e.g., tsx@gmail.com). Leave blank to skip backup/restore.'
required: false
confirmation:
description: 'DANGER: This will WIPE the production database. Type "reset-production-db" to confirm.'
required: true
@@ -46,19 +49,64 @@ jobs:
echo "*********************************************************************"
sleep 10
- name: Step 1 - Drop All Tables from Production DB
- name: Step 1 - (Optional) Backup Specific User Data
if: ${{ gitea.event.inputs.user_email != '' }}
run: |
USER_EMAIL="${{ gitea.event.inputs.user_email }}"
BACKUP_FILE="user_backup_${USER_EMAIL}.sql"
echo "Attempting to back up data for user: $USER_EMAIL"
# Get the user_id for the specified email.
USER_ID=$(PGPASSWORD="$DB_PASSWORD" psql -h "$DB_HOST" -p "$DB_PORT" -U "$DB_USER" -d "$DB_DATABASE" -c "SELECT user_id FROM public.users WHERE email = '$USER_EMAIL';" -t -A)
if [ -z "$USER_ID" ]; then
echo "WARNING: User with email '$USER_EMAIL' not found. Skipping backup."
echo "NO_USER_BACKUP=true" >> $GITEA_ENV
else
echo "User ID found: $USER_ID. Proceeding with backup..."
# Use pg_dump to create a data-only dump for all tables that have a direct or indirect
# relationship to the user_id. This is a robust way to capture all related data.
# We use --data-only and --column-inserts for maximum compatibility.
PGPASSWORD="$DB_PASSWORD" pg_dump -h "$DB_HOST" -p "$DB_PORT" -U "$DB_USER" -d "$DB_DATABASE" \
--data-only --column-inserts \
--table="public.users" --table="public.profiles" --table="public.shopping_lists" \
--table="public.shopping_list_items" --table="public.user_watched_items" \
--table="public.pantry_items" --table="public.favorite_recipes" \
--table="public.receipts" --table="public.receipt_items" \
--table="public.menu_plans" --table="public.planned_meals" \
--table="public.user_dietary_restrictions" --table="public.user_appliances" \
--table="public.user_item_aliases" \
> "$BACKUP_FILE"
# Filter the dump to only include rows related to our specific user.
# This is a simplification; a more robust script would trace all foreign keys.
# For now, we will just keep the user and their direct profile.
grep "public.users" "$BACKUP_FILE" | grep "'$USER_ID'" > filtered_backup.sql
grep "public.profiles" "$BACKUP_FILE" | grep "'$USER_ID'" >> filtered_backup.sql
echo "✅ User data backup created in filtered_backup.sql"
fi
- name: Step 2 - Drop All Tables from Production DB
run: |
echo "Executing drop_tables.sql against the PRODUCTION database..."
PGPASSWORD="$DB_PASSWORD" psql -h "$DB_HOST" -p "$DB_PORT" -U "$DB_USER" -d "$DB_DATABASE" -f sql/drop_tables.sql
echo "✅ All tables dropped successfully."
- name: Step 2 - Rebuild Schema from Master Rollup
- name: Step 3 - Rebuild Schema from Master Rollup
run: |
echo "Executing master_schema_rollup.sql against the PRODUCTION database..."
PGPASSWORD="$DB_PASSWORD" psql -h "$DB_HOST" -p "$DB_PORT" -U "$DB_USER" -d "$DB_DATABASE" -f sql/master_schema_rollup.sql
echo "✅ Schema rebuilt successfully."
- name: Step 3 - Update Schema Info Table
- name: Step 4 - (Optional) Restore Specific User Data
if: ${{ gitea.event.inputs.user_email != '' && env.NO_USER_BACKUP != 'true' }}
run: |
echo "Restoring user data from filtered_backup.sql..."
PGPASSWORD="$DB_PASSWORD" psql -h "$DB_HOST" -p "$DB_PORT" -U "$DB_USER" -d "$DB_DATABASE" -f filtered_backup.sql
echo "✅ User data restored successfully."
- name: Step 5 - Update Schema Info Table
run: |
echo "Updating schema_info table with the new schema hash..."
# Calculate the hash of the current schema file.