diff --git a/.gitea/workflows/deploy.yml b/.gitea/workflows/deploy.yml index b23debf0..e2f43cea 100644 --- a/.gitea/workflows/deploy.yml +++ b/.gitea/workflows/deploy.yml @@ -257,7 +257,7 @@ jobs: # Ensure the destination directory exists mkdir -p "$APP_PATH" - mkdir -p "$APP_PATH/flyer-images/icons" # Also ensure our protected image directory exists + mkdir -p "$APP_PATH/flyer-images/icons" "$APP_PATH/flyer-images/archive" # Ensure all required subdirectories exist # 1. Copy the backend source code and project files first. # CRITICAL: We exclude '.env', 'node_modules', '.git', 'dist', and now 'flyer-images' to protect user content. diff --git a/.gitea/workflows/manual-db-backup.yml b/.gitea/workflows/manual-db-backup.yml new file mode 100644 index 00000000..3ca8652e --- /dev/null +++ b/.gitea/workflows/manual-db-backup.yml @@ -0,0 +1,63 @@ +# .gitea/workflows/manual-db-backup.yml +# +# This workflow provides a manual trigger to back up the production database. +# It creates a compressed SQL dump and saves it as a downloadable artifact. +name: Manual - Backup Production Database + +on: + workflow_dispatch: + inputs: + confirmation: + description: 'Type "backup-production-db" to confirm you want to create a backup.' + required: true + default: 'do-not-run' + +jobs: + backup-database: + runs-on: projectium.com # This job runs on your self-hosted Gitea runner. + + env: + # Use production database credentials for this entire job. + DB_HOST: ${{ secrets.DB_HOST }} + DB_PORT: ${{ secrets.DB_PORT }} + DB_USER: ${{ secrets.DB_USER }} + DB_PASSWORD: ${{ secrets.DB_PASSWORD }} + DB_DATABASE: ${{ secrets.DB_DATABASE_PROD }} + + steps: + - name: Validate Secrets + run: | + if [ -z "$DB_HOST" ] || [ -z "$DB_USER" ] || [ -z "$DB_PASSWORD" ] || [ -z "$DB_DATABASE" ]; then + echo "ERROR: One or more production database secrets are not set in Gitea repository settings." + exit 1 + fi + echo "✅ All required database secrets are present." + + - name: Verify Confirmation Phrase + run: | + if [ "${{ gitea.event.inputs.confirmation }}" != "backup-production-db" ]; then + echo "ERROR: Confirmation phrase did not match. Aborting database backup." + exit 1 + fi + echo "✅ Confirmation accepted. Proceeding with database backup." + + - name: Create Database Backup + id: backup + run: | + # Generate a timestamped filename for the backup. + TIMESTAMP=$(date +'%Y%m%d-%H%M%S') + BACKUP_FILENAME="flyer-crawler-prod-backup-${TIMESTAMP}.sql.gz" + echo "Creating backup file: $BACKUP_FILENAME" + + # Use pg_dump to create a plain-text SQL dump, then pipe it to gzip for compression. + # This is more efficient than creating a large uncompressed file first. + PGPASSWORD="$DB_PASSWORD" pg_dump -h "$DB_HOST" -p "$DB_PORT" -U "$DB_USER" -d "$DB_DATABASE" --clean --if-exists | gzip > "$BACKUP_FILENAME" + + echo "✅ Database backup created successfully." + echo "backup_filename=$BACKUP_FILENAME" >> $GITEA_ENV + + - name: Upload Backup as Artifact + uses: actions/upload-artifact@v3 + with: + name: database-backup + path: ${{ env.backup_filename }} \ No newline at end of file diff --git a/.gitea/workflows/manual-db-reset.yml b/.gitea/workflows/manual-db-reset.yml index 3d41dd7d..50ac4918 100644 --- a/.gitea/workflows/manual-db-reset.yml +++ b/.gitea/workflows/manual-db-reset.yml @@ -134,4 +134,15 @@ jobs: else echo "ERROR: Failed to set schema hash in the database." exit 1 - fi \ No newline at end of file + fi + + - name: Step 6 - Clear Flyer Asset Directories + run: | + APP_PATH="/var/www/flyer-crawler.projectium.com" + echo "Clearing contents of flyer asset directories..." + # Use find to delete files within the directories, but not the directories themselves. + # This is safer than `rm -rf` as it won't fail if a directory doesn't exist. + find "$APP_PATH/flyer-images" -mindepth 1 -maxdepth 1 -type f -delete + find "$APP_PATH/flyer-images/icons" -mindepth 1 -maxdepth 1 -type f -delete + find "$APP_PATH/flyer-images/archive" -mindepth 1 -maxdepth 1 -type f -delete || echo "Archive directory not found, skipping." + echo "✅ Flyer asset directories cleared." \ No newline at end of file diff --git a/.gitea/workflows/manual-db-restore.yml b/.gitea/workflows/manual-db-restore.yml new file mode 100644 index 00000000..13207cc8 --- /dev/null +++ b/.gitea/workflows/manual-db-restore.yml @@ -0,0 +1,96 @@ +# .gitea/workflows/manual-db-restore.yml +# +# DANGER: This workflow is DESTRUCTIVE. It restores the production database from a backup file. +# It should be run manually with extreme caution. +name: Manual - Restore Production Database from Backup + +on: + workflow_dispatch: + inputs: + backup_filename: + description: 'The exact filename of the backup (.sql.gz) located in /var/www/backups/' + required: true + confirmation: + description: 'DANGER: This will WIPE the production DB. Type "restore-production-db" to confirm.' + required: true + default: 'do-not-run' + +jobs: + restore-database: + runs-on: projectium.com # This job runs on your self-hosted Gitea runner. + + env: + # Use production database credentials for this entire job. + DB_HOST: ${{ secrets.DB_HOST }} + DB_PORT: ${{ secrets.DB_PORT }} + DB_USER: ${{ secrets.DB_USER }} + DB_PASSWORD: ${{ secrets.DB_PASSWORD }} + DB_DATABASE: ${{ secrets.DB_DATABASE_PROD }} + BACKUP_DIR: "/var/www/backups" # Define a dedicated directory for backups + + steps: + - name: Validate Secrets and Inputs + run: | + if [ -z "$DB_HOST" ] || [ -z "$DB_USER" ] || [ -z "$DB_PASSWORD" ] || [ -z "$DB_DATABASE" ]; then + echo "ERROR: One or more production database secrets are not set in Gitea repository settings." + exit 1 + fi + if [ "${{ gitea.event.inputs.confirmation }}" != "restore-production-db" ]; then + echo "ERROR: Confirmation phrase did not match. Aborting database restore." + exit 1 + fi + if [ -z "${{ gitea.event.inputs.backup_filename }}" ]; then + echo "ERROR: Backup filename cannot be empty." + exit 1 + fi + echo "✅ Confirmation accepted. Proceeding with database restore." + + - name: 🚨 FINAL WARNING & PAUSE 🚨 + run: | + echo "*********************************************************************" + echo "WARNING: YOU ARE ABOUT TO WIPE AND RESTORE THE PRODUCTION DATABASE." + echo "This action is IRREVERSIBLE. Press Ctrl+C in the runner terminal NOW to cancel." + echo "Restoring from file: ${{ gitea.event.inputs.backup_filename }}" + echo "Sleeping for 10 seconds..." + echo "*********************************************************************" + sleep 10 + + - name: Step 1 - Stop Application Server + run: | + echo "Stopping all PM2 processes to release database connections..." + pm2 stop all || echo "PM2 processes were not running." + echo "✅ Application server stopped." + + - name: Step 2 - Drop and Recreate Database + run: | + echo "Dropping and recreating the production database..." + # Connect as the superuser (postgres) to drop the database. + # First, terminate all active connections to the database. + sudo -u postgres psql -c "SELECT pg_terminate_backend(pid) FROM pg_stat_activity WHERE datname = '${DB_DATABASE}';" + # Now, drop and recreate it. + sudo -u postgres psql -c "DROP DATABASE IF EXISTS \"${DB_DATABASE}\";" + sudo -u postgres psql -c "CREATE DATABASE \"${DB_DATABASE}\" WITH OWNER = ${DB_USER};" + echo "✅ Database dropped and recreated successfully." + + - name: Step 3 - Restore Database from Backup + run: | + BACKUP_FILE_PATH="${BACKUP_DIR}/${{ gitea.event.inputs.backup_filename }}" + echo "Restoring database from: $BACKUP_FILE_PATH" + + if [ ! -f "$BACKUP_FILE_PATH" ]; then + echo "ERROR: Backup file not found at $BACKUP_FILE_PATH" + exit 1 + fi + + # Uncompress the gzipped file and pipe the SQL commands directly into psql. + # This is efficient as it doesn't require an intermediate uncompressed file. + gunzip < "$BACKUP_FILE_PATH" | PGPASSWORD="$DB_PASSWORD" psql -h "$DB_HOST" -p "$DB_PORT" -U "$DB_USER" -d "$DB_DATABASE" + + echo "✅ Database restore completed successfully." + + - name: Step 4 - Restart Application Server + run: | + echo "Restarting application server..." + cd /var/www/flyer-crawler.projectium.com + pm2 startOrReload ecosystem.config.cjs --env production && pm2 save + echo "✅ Application server restarted." \ No newline at end of file diff --git a/sql/drop_tables.sql b/sql/drop_tables.sql index 06bf0b50..46787621 100644 --- a/sql/drop_tables.sql +++ b/sql/drop_tables.sql @@ -67,6 +67,7 @@ DROP TABLE IF EXISTS public.tags CASCADE; DROP TABLE IF EXISTS public.appliances CASCADE; DROP TABLE IF EXISTS public.dietary_restrictions CASCADE; DROP TABLE IF EXISTS public.categories CASCADE; +DROP TABLE IF EXISTS public.addresses CASCADE; DROP TABLE IF EXISTS public.achievements CASCADE; DROP TABLE IF EXISTS public.budgets CASCADE; DROP TABLE IF EXISTS public.profiles CASCADE; diff --git a/sql/initial_schema.sql b/sql/initial_schema.sql index ea7b9e6d..181d1c5b 100644 --- a/sql/initial_schema.sql +++ b/sql/initial_schema.sql @@ -11,6 +11,7 @@ CREATE TABLE IF NOT EXISTS public.users ( refresh_token TEXT, failed_login_attempts INTEGER DEFAULT 0, last_failed_login TIMESTAMPTZ, + last_login_ip TEXT, created_at TIMESTAMPTZ DEFAULT now() NOT NULL, updated_at TIMESTAMPTZ DEFAULT now() NOT NULL ); @@ -18,6 +19,7 @@ COMMENT ON TABLE public.users IS 'Stores user authentication information.'; COMMENT ON COLUMN public.users.refresh_token IS 'Stores the long-lived refresh token for re-authentication.'; COMMENT ON COLUMN public.users.failed_login_attempts IS 'Tracks the number of consecutive failed login attempts.'; COMMENT ON COLUMN public.users.last_failed_login IS 'Timestamp of the last failed login attempt.'; +COMMENT ON COLUMN public.users.last_login_ip IS 'The IP address from which the user last successfully logged in.'; -- Add an index on the refresh_token for faster lookups when refreshing tokens. CREATE INDEX IF NOT EXISTS idx_users_refresh_token ON public.users(refresh_token); CREATE UNIQUE INDEX IF NOT EXISTS idx_users_email ON public.users (email); @@ -40,16 +42,12 @@ CREATE INDEX IF NOT EXISTS idx_activity_log_user_id ON public.activity_log(user_ -- 3. for public user profiles. -- This table is linked to the users table and stores non-sensitive user data. +-- This table now references the new `addresses` table for the user's home address. CREATE TABLE IF NOT EXISTS public.profiles ( user_id UUID PRIMARY KEY REFERENCES public.users(user_id) ON DELETE CASCADE, full_name TEXT, avatar_url TEXT, - address_line_1 TEXT, - address_line_2 TEXT, - city VARCHAR(255), - province_state VARCHAR(255), - postal_code VARCHAR(10), - country VARCHAR(2), + address_id BIGINT REFERENCES public.addresses(address_id) ON DELETE SET NULL, preferences JSONB, role TEXT CHECK (role IN ('admin', 'user')), points INTEGER DEFAULT 0 NOT NULL, @@ -59,12 +57,7 @@ CREATE TABLE IF NOT EXISTS public.profiles ( updated_by UUID REFERENCES public.users(user_id) ON DELETE SET NULL ); COMMENT ON TABLE public.profiles IS 'Stores public-facing user data, linked to the public.users table.'; -COMMENT ON COLUMN public.profiles.address_line_1 IS 'Optional. The first line of the user''s street address.'; -COMMENT ON COLUMN public.profiles.address_line_2 IS 'Optional. The second line of the user''s street address (e.g., apartment, suite).'; -COMMENT ON COLUMN public.profiles.city IS 'Optional. The user''s city for regional content filtering.'; -COMMENT ON COLUMN public.profiles.province_state IS 'Optional. The user''s province or state.'; -COMMENT ON COLUMN public.profiles.postal_code IS 'Optional. The user''s postal or ZIP code.'; -COMMENT ON COLUMN public.profiles.country IS 'Optional. The user''s two-letter ISO 3166-1 alpha-2 country code (e.g., CA, US).'; +COMMENT ON COLUMN public.profiles.address_id IS 'A foreign key to the user''s primary address in the `addresses` table.'; COMMENT ON COLUMN public.profiles.points IS 'A simple integer column to store a user''s total accumulated points from achievements.'; -- 4. The 'stores' table for normalized store data. @@ -217,21 +210,38 @@ CREATE INDEX IF NOT EXISTS idx_notifications_user_id_created_at ON public.notifi -- 12. Store individual store locations with geographic data. CREATE TABLE IF NOT EXISTS public.store_locations ( store_location_id BIGINT PRIMARY KEY GENERATED ALWAYS AS IDENTITY, - store_id BIGINT NOT NULL REFERENCES public.stores(store_id) ON DELETE CASCADE, - address TEXT NOT NULL, - city TEXT, - province_state TEXT, - postal_code TEXT, - location GEOGRAPHY(Point, 4326), + store_id BIGINT REFERENCES public.stores(store_id) ON DELETE CASCADE, + address_id BIGINT NOT NULL REFERENCES public.addresses(address_id) ON DELETE CASCADE, created_at TIMESTAMPTZ DEFAULT now() NOT NULL, updated_at TIMESTAMPTZ DEFAULT now() NOT NULL ); COMMENT ON TABLE public.store_locations IS 'Stores physical locations of stores with geographic data for proximity searches.'; -COMMENT ON COLUMN public.store_locations.location IS 'Geographic coordinates (longitude, latitude) of the store.'; CREATE INDEX IF NOT EXISTS idx_store_locations_store_id ON public.store_locations(store_id); -- Add a GIST index for efficient geographic queries. -- This requires the postgis extension. -CREATE INDEX IF NOT EXISTS store_locations_geo_idx ON public.store_locations USING GIST (location); +-- CREATE INDEX IF NOT EXISTS store_locations_geo_idx ON public.store_locations USING GIST (location); + +-- NEW TABLE: A centralized table for storing all physical addresses. +CREATE TABLE IF NOT EXISTS public.addresses ( + address_id BIGINT PRIMARY KEY GENERATED ALWAYS AS IDENTITY, + address_line_1 TEXT NOT NULL, + address_line_2 TEXT, + city TEXT NOT NULL, + province_state TEXT NOT NULL, + postal_code TEXT NOT NULL, + country TEXT NOT NULL, + latitude NUMERIC(9, 6), + longitude NUMERIC(9, 6), + location GEOGRAPHY(Point, 4326), + created_at TIMESTAMPTZ DEFAULT now() NOT NULL, + updated_at TIMESTAMPTZ DEFAULT now() NOT NULL +); +COMMENT ON TABLE public.addresses IS 'A centralized table for storing all physical addresses for users and stores.'; +COMMENT ON COLUMN public.addresses.latitude IS 'The geographic latitude.'; +COMMENT ON COLUMN public.addresses.longitude IS 'The geographic longitude.'; +COMMENT ON COLUMN public.addresses.location IS 'A PostGIS geography type for efficient spatial queries.'; +CREATE INDEX IF NOT EXISTS addresses_location_idx ON public.addresses USING GIST (location); + -- 13. For aggregated, historical price data for master items. CREATE TABLE IF NOT EXISTS public.item_price_history ( diff --git a/sql/master_schema_rollup.sql b/sql/master_schema_rollup.sql index 8583219b..701e092d 100644 --- a/sql/master_schema_rollup.sql +++ b/sql/master_schema_rollup.sql @@ -27,6 +27,7 @@ CREATE TABLE IF NOT EXISTS public.users ( refresh_token TEXT, failed_login_attempts INTEGER DEFAULT 0, last_failed_login TIMESTAMPTZ, + last_login_ip TEXT, created_at TIMESTAMPTZ DEFAULT now() NOT NULL, updated_at TIMESTAMPTZ DEFAULT now() NOT NULL ); @@ -34,6 +35,7 @@ COMMENT ON TABLE public.users IS 'Stores user authentication information.'; COMMENT ON COLUMN public.users.refresh_token IS 'Stores the long-lived refresh token for re-authentication.'; COMMENT ON COLUMN public.users.failed_login_attempts IS 'Tracks the number of consecutive failed login attempts.'; COMMENT ON COLUMN public.users.last_failed_login IS 'Timestamp of the last failed login attempt.'; +COMMENT ON COLUMN public.users.last_login_ip IS 'The IP address from which the user last successfully logged in.'; -- Add an index on the refresh_token for faster lookups when refreshing tokens. CREATE INDEX IF NOT EXISTS idx_users_refresh_token ON public.users(refresh_token); CREATE UNIQUE INDEX IF NOT EXISTS idx_users_email ON public.users (email); @@ -56,16 +58,12 @@ CREATE INDEX IF NOT EXISTS idx_activity_log_user_id ON public.activity_log(user_ -- 3. for public user profiles. -- This table is linked to the users table and stores non-sensitive user data. +-- This table now references the new `addresses` table for the user's home address. CREATE TABLE IF NOT EXISTS public.profiles ( user_id UUID PRIMARY KEY REFERENCES public.users(user_id) ON DELETE CASCADE, full_name TEXT, avatar_url TEXT, - address_line_1 TEXT, - address_line_2 TEXT, - city VARCHAR(255), - province_state VARCHAR(255), - postal_code VARCHAR(10), - country VARCHAR(2), + address_id BIGINT REFERENCES public.addresses(address_id) ON DELETE SET NULL, points INTEGER DEFAULT 0 NOT NULL, preferences JSONB, role TEXT CHECK (role IN ('admin', 'user')), @@ -75,12 +73,7 @@ CREATE TABLE IF NOT EXISTS public.profiles ( updated_by UUID REFERENCES public.users(user_id) ON DELETE SET NULL ); COMMENT ON TABLE public.profiles IS 'Stores public-facing user data, linked to the public.users table.'; -COMMENT ON COLUMN public.profiles.address_line_1 IS 'Optional. The first line of the user''s street address.'; -COMMENT ON COLUMN public.profiles.address_line_2 IS 'Optional. The second line of the user''s street address (e.g., apartment, suite).'; -COMMENT ON COLUMN public.profiles.city IS 'Optional. The user''s city for regional content filtering.'; -COMMENT ON COLUMN public.profiles.province_state IS 'Optional. The user''s province or state.'; -COMMENT ON COLUMN public.profiles.postal_code IS 'Optional. The user''s postal or ZIP code.'; -COMMENT ON COLUMN public.profiles.country IS 'Optional. The user''s two-letter ISO 3166-1 alpha-2 country code (e.g., CA, US).'; +COMMENT ON COLUMN public.profiles.address_id IS 'A foreign key to the user''s primary address in the `addresses` table.'; COMMENT ON COLUMN public.profiles.points IS 'A simple integer column to store a user''s total accumulated points from achievements.'; -- 4. The 'stores' table for normalized store data. @@ -234,21 +227,37 @@ CREATE INDEX IF NOT EXISTS idx_notifications_user_id_created_at ON public.notifi -- 12. Store individual store locations with geographic data. CREATE TABLE IF NOT EXISTS public.store_locations ( store_location_id BIGINT PRIMARY KEY GENERATED ALWAYS AS IDENTITY, - store_id BIGINT NOT NULL REFERENCES public.stores(store_id) ON DELETE CASCADE, - address TEXT NOT NULL, - city TEXT, - province_state TEXT, - postal_code TEXT, - location GEOGRAPHY(Point, 4326), + store_id BIGINT REFERENCES public.stores(store_id) ON DELETE CASCADE, + address_id BIGINT NOT NULL REFERENCES public.addresses(address_id) ON DELETE CASCADE, created_at TIMESTAMPTZ DEFAULT now() NOT NULL, updated_at TIMESTAMPTZ DEFAULT now() NOT NULL ); COMMENT ON TABLE public.store_locations IS 'Stores physical locations of stores with geographic data for proximity searches.'; -COMMENT ON COLUMN public.store_locations.location IS 'Geographic coordinates (longitude, latitude) of the store.'; CREATE INDEX IF NOT EXISTS idx_store_locations_store_id ON public.store_locations(store_id); -- Add a GIST index for efficient geographic queries. -- This requires the postgis extension. -CREATE INDEX IF NOT EXISTS store_locations_geo_idx ON public.store_locations USING GIST (location); +-- CREATE INDEX IF NOT EXISTS store_locations_geo_idx ON public.store_locations USING GIST (location); + +-- NEW TABLE: A centralized table for storing all physical addresses. +CREATE TABLE IF NOT EXISTS public.addresses ( + address_id BIGINT PRIMARY KEY GENERATED ALWAYS AS IDENTITY, + address_line_1 TEXT NOT NULL, + address_line_2 TEXT, + city TEXT NOT NULL, + province_state TEXT NOT NULL, + postal_code TEXT NOT NULL, + country TEXT NOT NULL, + latitude NUMERIC(9, 6), + longitude NUMERIC(9, 6), + location GEOGRAPHY(Point, 4326), + created_at TIMESTAMPTZ DEFAULT now() NOT NULL, + updated_at TIMESTAMPTZ DEFAULT now() NOT NULL +); +COMMENT ON TABLE public.addresses IS 'A centralized table for storing all physical addresses for users and stores.'; +COMMENT ON COLUMN public.addresses.latitude IS 'The geographic latitude.'; +COMMENT ON COLUMN public.addresses.longitude IS 'The geographic longitude.'; +COMMENT ON COLUMN public.addresses.location IS 'A PostGIS geography type for efficient spatial queries.'; +CREATE INDEX IF NOT EXISTS addresses_location_idx ON public.addresses USING GIST (location); -- 13. For aggregated, historical price data for master items. CREATE TABLE IF NOT EXISTS public.item_price_history ( diff --git a/src/components/MapView.tsx b/src/components/MapView.tsx new file mode 100644 index 00000000..f7a2f842 --- /dev/null +++ b/src/components/MapView.tsx @@ -0,0 +1,30 @@ +// src/components/MapView.tsx +import React from 'react'; + +interface MapViewProps { + latitude: number; + longitude: number; +} + +const apiKey = import.meta.env.VITE_GOOGLE_MAPS_EMBED_API_KEY; + +export const MapView: React.FC = ({ latitude, longitude }) => { + if (!apiKey) { + return
Map view is disabled: API key is not configured.
; + } + + const mapSrc = `https://www.google.com/maps/embed/v1/view?key=${apiKey}¢er=${latitude},${longitude}&zoom=14`; + + return ( +
+ +
+ ); +}; diff --git a/src/features/flyer/FlyerList.tsx b/src/features/flyer/FlyerList.tsx index f9fe6b5a..ea237df0 100644 --- a/src/features/flyer/FlyerList.tsx +++ b/src/features/flyer/FlyerList.tsx @@ -85,13 +85,13 @@ export const FlyerList: React.FC = ({ flyers, onFlyerSelect, sel )}
-
-

+

+

{flyer.store?.name || 'Unknown Store'}

{flyer.store_address && ( e.stopPropagation()} title={`View address: ${flyer.store_address}`}> - + )}
diff --git a/src/hooks/useDebounce.ts b/src/hooks/useDebounce.ts new file mode 100644 index 00000000..eba7ff96 --- /dev/null +++ b/src/hooks/useDebounce.ts @@ -0,0 +1,21 @@ +// src/hooks/useDebounce.ts +import { useState, useEffect } from 'react'; + +/** + * A custom hook that debounces a value. + * It will only update the returned value after the specified delay has passed + * without the input value changing. + * @param value The value to debounce. + * @param delay The debounce delay in milliseconds. + * @returns The debounced value. + */ +export function useDebounce(value: T, delay: number): T { + const [debouncedValue, setDebouncedValue] = useState(value); + + useEffect(() => { + const handler = setTimeout(() => setDebouncedValue(value), delay); + return () => clearTimeout(handler); + }, [value, delay]); + + return debouncedValue; +} \ No newline at end of file diff --git a/src/pages/admin/components/AddressForm.tsx b/src/pages/admin/components/AddressForm.tsx new file mode 100644 index 00000000..aa4ce01e --- /dev/null +++ b/src/pages/admin/components/AddressForm.tsx @@ -0,0 +1,71 @@ +// src/pages/admin/components/AddressForm.tsx +import React from 'react'; +import { Address } from '../../../types'; +import { MapPinIcon } from 'lucide-react'; +import { LoadingSpinner } from '../../../components/LoadingSpinner'; + +interface AddressFormProps { + address: Partial
; + onAddressChange: (field: keyof Address, value: string) => void; + isGeocoding: boolean; +} + +export const AddressForm: React.FC = ({ address, onAddressChange, isGeocoding }) => { + const handleInputChange = (e: React.ChangeEvent) => { + const { name, value } = e.target; + onAddressChange(name as keyof Address, value); + }; + + return ( +
+
+

Home Address

+ {isGeocoding && ( +
+ )} +
+
+ + +
+
+ + +
+
+
+ + +
+
+ + +
+
+
+
+ + +
+
+ + +
+
+
+ ); +}; \ No newline at end of file diff --git a/src/pages/admin/components/ProfileManager.tsx b/src/pages/admin/components/ProfileManager.tsx index 91a581b3..e2592ba1 100644 --- a/src/pages/admin/components/ProfileManager.tsx +++ b/src/pages/admin/components/ProfileManager.tsx @@ -1,6 +1,7 @@ // src/pages/admin/components/ProfileManager.tsx import React, { useState, useEffect } from 'react'; -import type { Profile } from '../../../types'; +import toast from 'react-hot-toast'; +import type { Profile, Address, User } from '../../../types'; import { useApi } from '../../../hooks/useApi'; import * as apiClient from '../../../services/apiClient'; import { notifySuccess, notifyError } from '../../../services/notificationService'; @@ -10,8 +11,10 @@ import { XMarkIcon } from '../../../components/icons/XMarkIcon'; import { GoogleIcon } from '../../../components/icons/GoogleIcon'; import { GithubIcon } from '../../../components/icons/GithubIcon'; import { ConfirmationModal } from '../../../components/ConfirmationModal'; -import { User } from '../../../types'; import { PasswordInput } from './PasswordInput'; +import { AddressForm } from './AddressForm'; +import { MapView } from '../../../components/MapView'; +import { useDebounce } from '../../../hooks/useDebounce'; type AuthStatus = 'SIGNED_OUT' | 'ANONYMOUS' | 'AUTHENTICATED'; interface ProfileManagerProps { @@ -37,6 +40,8 @@ export const ProfileManager: React.FC = ({ isOpen, onClose, const [fullName, setFullName] = useState(profile?.full_name || ''); const [avatarUrl, setAvatarUrl] = useState(profile?.avatar_url || ''); const { execute: updateProfile, loading: profileLoading } = useApi]>(apiClient.updateUserProfile); + const [isGeocoding, setIsGeocoding] = useState(false); + const [address, setAddress] = useState>({}); // Password state const [password, setPassword] = useState(''); @@ -73,6 +78,16 @@ export const ProfileManager: React.FC = ({ isOpen, onClose, if (isOpen && profile) { // Ensure profile exists before setting state setFullName(profile?.full_name || ''); setAvatarUrl(profile?.avatar_url || ''); + // If the user has an address, fetch its details + if (profile.address_id) { + apiClient.getUserAddress(profile.address_id) + .then((res: Response) => res.json()) + .then((data: Address) => setAddress(data)) + .catch((err: Error) => toast.error(`Could not load address details: ${err.message}`)); + } else { + // Reset address form if user has no address + setAddress({}); + } setActiveTab('profile'); setIsConfirmingDelete(false); setPasswordForDelete(''); @@ -83,6 +98,8 @@ export const ProfileManager: React.FC = ({ isOpen, onClose, setIsRegistering(false); setIsForgotPassword(false); setRememberMe(false); // Reset on open + } else { + setAddress({}); } }, [isOpen, profile]); // Depend on isOpen and profile @@ -94,15 +111,23 @@ export const ProfileManager: React.FC = ({ isOpen, onClose, } try { - const updatedProfile = await updateProfile({ // This now calls the hook's execute function - full_name: fullName, - avatar_url: avatarUrl, + // Update profile and address in parallel + const profileUpdatePromise = updateProfile({ + full_name: fullName, + avatar_url: avatarUrl, }); + const addressUpdatePromise = apiClient.updateUserAddress(address); - if (updatedProfile) { - onProfileUpdate(updatedProfile); - notifySuccess('Profile updated successfully!'); + const [profileResponse] = await Promise.all([ + profileUpdatePromise, + addressUpdatePromise + ]); + + if (profileResponse) { + onProfileUpdate(profileResponse); } + notifySuccess('Profile and address updated successfully!'); + onClose(); } catch (error) { // Although the useApi hook is designed to handle errors, we log here // as a safeguard to catch any unexpected issues during profile save. @@ -110,6 +135,46 @@ export const ProfileManager: React.FC = ({ isOpen, onClose, } }; + const handleAddressChange = (field: keyof Address, value: string) => { + setAddress(prev => ({ ...prev, [field]: value })); + }; + + // --- Automatic Geocoding Logic --- + const debouncedAddress = useDebounce(address, 1500); // Debounce address state by 1.5 seconds + + useEffect(() => { + // This effect runs when the debouncedAddress value changes. + const handleGeocode = async () => { + // Only trigger if the core address fields are present and have changed. + const addressString = [ + debouncedAddress.address_line_1, + debouncedAddress.city, + debouncedAddress.province_state, + debouncedAddress.postal_code, + debouncedAddress.country, + ].filter(Boolean).join(', '); + + // Don't geocode an empty address or if we already have coordinates for this exact address. + if (!addressString || (debouncedAddress.latitude && debouncedAddress.longitude)) { + return; + } + + setIsGeocoding(true); + try { + const response = await apiClient.geocodeAddress(addressString); + const { lat, lng } = await response.json(); + setAddress(prev => ({ ...prev, latitude: lat, longitude: lng })); + toast.success('Address geocoded successfully!'); + } catch (error) { + toast.error('Failed to geocode address.'); + } finally { + setIsGeocoding(false); + } + }; + + handleGeocode(); + }, [debouncedAddress]); // Dependency array ensures this runs only when the debounced value changes. + const handleOAuthLink = async (provider: 'google' | 'github') => { // This will redirect the user to the OAuth provider to link the account. // TODO: This is a placeholder. Implement OAuth account linking via the Passport.js backend. @@ -376,6 +441,14 @@ export const ProfileManager: React.FC = ({ isOpen, onClose, setAvatarUrl(e.target.value)} className="mt-1 block w-full px-3 py-2 bg-white dark:bg-gray-700 border border-gray-300 dark:border-gray-600 rounded-md shadow-sm" />
+
+ +
+ {address.latitude && address.longitude && ( +
+ +
+ )}