From f6ce97019e33231804d0b156649db70023ffea1b Mon Sep 17 00:00:00 2001 From: Torben Sorensen Date: Mon, 10 Nov 2025 08:28:40 -0800 Subject: [PATCH] imported files from google gemini ai BUILD env --- .gitignore | 29 + App.tsx | 893 ++++++++++++ README.md | 433 +++++- components/AnalysisPanel.tsx | 174 +++ components/AuthModal.tsx | 200 +++ components/BulkImportSummary.tsx | 84 ++ components/BulkImporter.tsx | 77 ++ components/DarkModeToggle.tsx | 29 + components/DatabaseControls.tsx | 108 ++ components/DatabaseSeeder.tsx | 68 + components/DevTestRunner.tsx | 110 ++ components/ErrorDisplay.tsx | 17 + components/ExtractedDataTable.tsx | 187 +++ components/FileUpload.tsx | 5 + components/FlyerDisplay.tsx | 58 + components/FlyerList.tsx | 66 + components/Header.tsx | 95 ++ components/LoadingSpinner.tsx | 9 + components/LoginPage.tsx | 98 ++ components/PriceChart.tsx | 101 ++ components/PriceHistoryChart.tsx | 187 +++ components/ProcessingStatus.tsx | 226 +++ components/ProfileManager.tsx | 267 ++++ components/SampleDataButton.tsx | 20 + components/ShoppingList.tsx | 206 +++ components/SupabaseConnector.tsx | 102 ++ components/SystemCheck.tsx | 177 +++ components/TopDeals.tsx | 41 + components/UnitSystemToggle.tsx | 31 + components/VoiceAssistant.tsx | 192 +++ components/WatchedItemsList.tsx | 186 +++ components/icons/BeakerIcon.tsx | 7 + components/icons/BrainIcon.tsx | 9 + components/icons/CheckCircleIcon.tsx | 7 + components/icons/CogIcon.tsx | 7 + components/icons/DatabaseIcon.tsx | 8 + components/icons/DocumentTextIcon.tsx | 7 + components/icons/ExclamationTriangleIcon.tsx | 7 + components/icons/EyeIcon.tsx | 8 + components/icons/GithubIcon.tsx | 7 + components/icons/GoogleIcon.tsx | 10 + components/icons/InformationCircleIcon.tsx | 7 + components/icons/LightbulbIcon.tsx | 9 + components/icons/ListBulletIcon.tsx | 7 + components/icons/MapPinIcon.tsx | 8 + components/icons/MicrophoneIcon.tsx | 7 + components/icons/MoonIcon.tsx | 8 + components/icons/PdfIcon.tsx | 8 + components/icons/PhotoIcon.tsx | 7 + components/icons/PlugIcon.tsx | 8 + components/icons/PlusCircleIcon.tsx | 7 + components/icons/SearchIcon.tsx | 9 + components/icons/ServerIcon.tsx | 7 + components/icons/ShieldCheckIcon.tsx | 7 + components/icons/ShoppingCartIcon.tsx | 9 + components/icons/SortAscIcon.tsx | 7 + components/icons/SortDescIcon.tsx | 7 + components/icons/SparklesIcon.tsx | 7 + components/icons/SpeakerWaveIcon.tsx | 7 + components/icons/SunIcon.tsx | 8 + components/icons/TagIcon.tsx | 9 + components/icons/TrashIcon.tsx | 7 + components/icons/TrophyIcon.tsx | 8 + components/icons/UploadIcon.tsx | 9 + components/icons/UserIcon.tsx | 8 + components/icons/XCircleIcon.tsx | 7 + components/icons/XMarkIcon.tsx | 8 + ideas.txt | 5 + index.html | 54 + index.tsx | 17 + metadata.json | 8 + notes-for-ai.txt | 8 + package copy.json | 5 + package.json | 24 + services/geminiService.ts | 429 ++++++ services/supabaseClient.ts | 806 +++++++++++ sql/2025-11-05 | 1215 +++++++++++++++++ sql/delete_all_tables.sql.txt | 38 + sql/drop_tables.sql | 15 + sql/fix_permissions.sql.txt | 79 ++ sql/functions.sql | 367 +++++ sql/initial.sql | 657 +++++++++ sql/schema.sql.txt | 1186 ++++++++++++++++ sql/triggers.sql | 183 +++ supabase/functions/_shared/.temp/cli-latest | 1 + .../_shared/.temp/functions/_shared/cors.ts | 6 + .../functions/_shared/.temp/gotrue-version | 1 + supabase/functions/_shared/.temp/pooler-url | 1 + .../functions/_shared/.temp/postgres-version | 1 + supabase/functions/_shared/.temp/project-ref | 1 + supabase/functions/_shared/.temp/rest-version | 1 + .../functions/_shared/.temp/storage-migration | 1 + .../functions/_shared/.temp/storage-version | 1 + supabase/functions/_shared/config.toml | 33 + supabase/functions/_shared/cors.ts | 6 + supabase/functions/delete-user/.npmrc | 3 + supabase/functions/delete-user/deno.json | 3 + supabase/functions/delete-user/index.ts | 107 ++ supabase/functions/seed-database/.npmrc | 3 + supabase/functions/seed-database/deno.json | 3 + supabase/functions/seed-database/index.ts | 113 ++ supabase/functions/system-check/.npmrc | 3 + supabase/functions/system-check/deno.json | 3 + supabase/functions/system-check/index.ts | 129 ++ tsconfig.json | 29 + types.ts | 283 ++++ utils/audioUtils.ts | 58 + utils/checksum.ts | 12 + utils/pdfConverter.ts | 66 + utils/priceParser.ts | 55 + utils/processingTimer.ts | 44 + utils/timeout.ts | 23 + utils/unitConverter.ts | 95 ++ vite.config.ts | 23 + 114 files changed, 10955 insertions(+), 2 deletions(-) create mode 100644 .gitignore create mode 100644 App.tsx create mode 100644 components/AnalysisPanel.tsx create mode 100644 components/AuthModal.tsx create mode 100644 components/BulkImportSummary.tsx create mode 100644 components/BulkImporter.tsx create mode 100644 components/DarkModeToggle.tsx create mode 100644 components/DatabaseControls.tsx create mode 100644 components/DatabaseSeeder.tsx create mode 100644 components/DevTestRunner.tsx create mode 100644 components/ErrorDisplay.tsx create mode 100644 components/ExtractedDataTable.tsx create mode 100644 components/FileUpload.tsx create mode 100644 components/FlyerDisplay.tsx create mode 100644 components/FlyerList.tsx create mode 100644 components/Header.tsx create mode 100644 components/LoadingSpinner.tsx create mode 100644 components/LoginPage.tsx create mode 100644 components/PriceChart.tsx create mode 100644 components/PriceHistoryChart.tsx create mode 100644 components/ProcessingStatus.tsx create mode 100644 components/ProfileManager.tsx create mode 100644 components/SampleDataButton.tsx create mode 100644 components/ShoppingList.tsx create mode 100644 components/SupabaseConnector.tsx create mode 100644 components/SystemCheck.tsx create mode 100644 components/TopDeals.tsx create mode 100644 components/UnitSystemToggle.tsx create mode 100644 components/VoiceAssistant.tsx create mode 100644 components/WatchedItemsList.tsx create mode 100644 components/icons/BeakerIcon.tsx create mode 100644 components/icons/BrainIcon.tsx create mode 100644 components/icons/CheckCircleIcon.tsx create mode 100644 components/icons/CogIcon.tsx create mode 100644 components/icons/DatabaseIcon.tsx create mode 100644 components/icons/DocumentTextIcon.tsx create mode 100644 components/icons/ExclamationTriangleIcon.tsx create mode 100644 components/icons/EyeIcon.tsx create mode 100644 components/icons/GithubIcon.tsx create mode 100644 components/icons/GoogleIcon.tsx create mode 100644 components/icons/InformationCircleIcon.tsx create mode 100644 components/icons/LightbulbIcon.tsx create mode 100644 components/icons/ListBulletIcon.tsx create mode 100644 components/icons/MapPinIcon.tsx create mode 100644 components/icons/MicrophoneIcon.tsx create mode 100644 components/icons/MoonIcon.tsx create mode 100644 components/icons/PdfIcon.tsx create mode 100644 components/icons/PhotoIcon.tsx create mode 100644 components/icons/PlugIcon.tsx create mode 100644 components/icons/PlusCircleIcon.tsx create mode 100644 components/icons/SearchIcon.tsx create mode 100644 components/icons/ServerIcon.tsx create mode 100644 components/icons/ShieldCheckIcon.tsx create mode 100644 components/icons/ShoppingCartIcon.tsx create mode 100644 components/icons/SortAscIcon.tsx create mode 100644 components/icons/SortDescIcon.tsx create mode 100644 components/icons/SparklesIcon.tsx create mode 100644 components/icons/SpeakerWaveIcon.tsx create mode 100644 components/icons/SunIcon.tsx create mode 100644 components/icons/TagIcon.tsx create mode 100644 components/icons/TrashIcon.tsx create mode 100644 components/icons/TrophyIcon.tsx create mode 100644 components/icons/UploadIcon.tsx create mode 100644 components/icons/UserIcon.tsx create mode 100644 components/icons/XCircleIcon.tsx create mode 100644 components/icons/XMarkIcon.tsx create mode 100644 ideas.txt create mode 100644 index.html create mode 100644 index.tsx create mode 100644 metadata.json create mode 100644 notes-for-ai.txt create mode 100644 package copy.json create mode 100644 package.json create mode 100644 services/geminiService.ts create mode 100644 services/supabaseClient.ts create mode 100644 sql/2025-11-05 create mode 100644 sql/delete_all_tables.sql.txt create mode 100644 sql/drop_tables.sql create mode 100644 sql/fix_permissions.sql.txt create mode 100644 sql/functions.sql create mode 100644 sql/initial.sql create mode 100644 sql/schema.sql.txt create mode 100644 sql/triggers.sql create mode 100644 supabase/functions/_shared/.temp/cli-latest create mode 100644 supabase/functions/_shared/.temp/functions/_shared/cors.ts create mode 100644 supabase/functions/_shared/.temp/gotrue-version create mode 100644 supabase/functions/_shared/.temp/pooler-url create mode 100644 supabase/functions/_shared/.temp/postgres-version create mode 100644 supabase/functions/_shared/.temp/project-ref create mode 100644 supabase/functions/_shared/.temp/rest-version create mode 100644 supabase/functions/_shared/.temp/storage-migration create mode 100644 supabase/functions/_shared/.temp/storage-version create mode 100644 supabase/functions/_shared/config.toml create mode 100644 supabase/functions/_shared/cors.ts create mode 100644 supabase/functions/delete-user/.npmrc create mode 100644 supabase/functions/delete-user/deno.json create mode 100644 supabase/functions/delete-user/index.ts create mode 100644 supabase/functions/seed-database/.npmrc create mode 100644 supabase/functions/seed-database/deno.json create mode 100644 supabase/functions/seed-database/index.ts create mode 100644 supabase/functions/system-check/.npmrc create mode 100644 supabase/functions/system-check/deno.json create mode 100644 supabase/functions/system-check/index.ts create mode 100644 tsconfig.json create mode 100644 types.ts create mode 100644 utils/audioUtils.ts create mode 100644 utils/checksum.ts create mode 100644 utils/pdfConverter.ts create mode 100644 utils/priceParser.ts create mode 100644 utils/processingTimer.ts create mode 100644 utils/timeout.ts create mode 100644 utils/unitConverter.ts create mode 100644 vite.config.ts diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..4e4f905 --- /dev/null +++ b/.gitignore @@ -0,0 +1,29 @@ +# Logs +logs +*.log +npm-debug.log* +yarn-debug.log* +yarn-error.log* +pnpm-debug.log* +lerna-debug.log* + +node_modules +dist +dist-ssr +*.local + +# Editor directories and files +.vscode/* +!.vscode/extensions.json +.idea +.DS_Store +*.suo +*.ntvs* +*.njsproj +*.sln +*.sw? + + +# secrets +.env +.env.local diff --git a/App.tsx b/App.tsx new file mode 100644 index 0000000..77ce41d --- /dev/null +++ b/App.tsx @@ -0,0 +1,893 @@ +import React, { useState, useCallback, useEffect } from 'react'; +import { FlyerDisplay } from './components/FlyerDisplay'; +import { ExtractedDataTable } from './components/ExtractedDataTable'; +import { AnalysisPanel } from './components/AnalysisPanel'; +import { PriceChart } from './components/PriceChart'; +import { ErrorDisplay } from './components/ErrorDisplay'; +import { Header } from './components/Header'; +import { isImageAFlyer, extractCoreDataFromImage, extractAddressFromImage, extractLogoFromImage } from './services/geminiService'; +import type { FlyerItem, Flyer, MasterGroceryItem, DealItem, ProcessingStage, StageStatus, Store, Profile, ShoppingList, ShoppingListItem } from './types'; +import { BulkImporter } from './components/BulkImporter'; +import { PriceHistoryChart } from './components/PriceHistoryChart'; +import { supabase, uploadFlyerImage, createFlyerRecord, saveFlyerItems, getFlyers, getFlyerItems, initializeSupabase, findFlyerByChecksum, getWatchedItems, addWatchedItem, getAllMasterItems, getFlyerItemsForFlyers, countFlyerItemsForFlyers, getUserProfile, updateUserPreferences, removeWatchedItem, getShoppingLists, createShoppingList, addShoppingListItem, updateShoppingListItem, removeShoppingListItem, deleteShoppingList, uploadLogoAndUpdateStore } from './services/supabaseClient'; +import { FlyerList } from './components/FlyerList'; +import { recordProcessingTime, getAverageProcessingTime } from './utils/processingTimer'; +import { ProcessingStatus } from './components/ProcessingStatus'; +import { SupabaseConnector } from './components/SupabaseConnector'; +import { generateFileChecksum } from './utils/checksum'; +import { convertPdfToImageFiles } from './utils/pdfConverter'; +import { BulkImportSummary } from './components/BulkImportSummary'; +import { WatchedItemsList } from './components/WatchedItemsList'; +import { withTimeout } from './utils/timeout'; +import { Session } from '@supabase/supabase-js'; +import { ProfileManager } from './components/ProfileManager'; +import { ShoppingListComponent } from './components/ShoppingList'; +import { SystemCheck } from './components/SystemCheck'; +import { LoginPage } from './components/LoginPage'; +import { VoiceAssistant } from './components/VoiceAssistant'; + +function App() { + const [isAuthenticated, setIsAuthenticated] = useState(false); + const [isFakeAuth, setIsFakeAuth] = useState(false); + const [loginError, setLoginError] = useState(null); + + const [flyers, setFlyers] = useState([]); + const [selectedFlyer, setSelectedFlyer] = useState(null); + const [flyerItems, setFlyerItems] = useState([]); + const [watchedItems, setWatchedItems] = useState([]); + const [masterItems, setMasterItems] = useState([]); + const [activeDeals, setActiveDeals] = useState([]); + const [activeDealsLoading, setActiveDealsLoading] = useState(false); + const [totalActiveItems, setTotalActiveItems] = useState(0); + + const [isProcessing, setIsProcessing] = useState(false); + const [error, setError] = useState(null); + + const [processingProgress, setProcessingProgress] = useState(0); + const [currentFile, setCurrentFile] = useState(null); + const [fileCount, setFileCount] = useState<{current: number, total: number} | null>(null); + const [importSummary, setImportSummary] = useState<{ + processed: string[]; + skipped: string[]; + errors: { fileName: string; message: string }[]; + } | null>(null); + + const [isDbConnected, setIsDbConnected] = useState(!!supabase); + const [isReady, setIsReady] = useState(false); + const [isDarkMode, setIsDarkMode] = useState(false); + const [unitSystem, setUnitSystem] = useState<'metric' | 'imperial'>('imperial'); + const [session, setSession] = useState(null); + const [profile, setProfile] = useState(null); + const [isProfileManagerOpen, setIsProfileManagerOpen] = useState(false); + const [isVoiceAssistantOpen, setIsVoiceAssistantOpen] = useState(false); + + const [processingStages, setProcessingStages] = useState([]); + const [estimatedTime, setEstimatedTime] = useState(0); + const [pageProgress, setPageProgress] = useState<{current: number, total: number} | null>(null); + + const [shoppingLists, setShoppingLists] = useState([]); + const [activeListId, setActiveListId] = useState(null); + + // Effect to set initial theme based on user profile, local storage, or system preference + useEffect(() => { + if (profile && profile.preferences?.darkMode !== undefined) { + // Preference from DB + const dbDarkMode = profile.preferences.darkMode; + setIsDarkMode(dbDarkMode); + document.documentElement.classList.toggle('dark', dbDarkMode); + } else { + // Fallback to local storage or system preference + const savedMode = localStorage.getItem('darkMode'); + const prefersDark = window.matchMedia('(prefers-color-scheme: dark)').matches; + const initialDarkMode = savedMode !== null ? savedMode === 'true' : prefersDark; + setIsDarkMode(initialDarkMode); + document.documentElement.classList.toggle('dark', initialDarkMode); + } + }, [profile]); + + // Effect to set initial unit system based on user profile or local storage + useEffect(() => { + if (profile && profile.preferences?.unitSystem) { + setUnitSystem(profile.preferences.unitSystem); + } else { + const savedSystem = localStorage.getItem('unitSystem') as 'metric' | 'imperial' | null; + if (savedSystem) { + setUnitSystem(savedSystem); + } + } + }, [profile]); + + + const toggleDarkMode = async () => { + const newMode = !isDarkMode; + setIsDarkMode(newMode); + document.documentElement.classList.toggle('dark', newMode); + + if (session && !isFakeAuth) { + const newPreferences = { ...profile?.preferences, darkMode: newMode }; + setProfile(p => p ? {...p, preferences: newPreferences} : null); + await updateUserPreferences(session.user.id, newPreferences); + } else { + localStorage.setItem('darkMode', String(newMode)); + } + }; + + const toggleUnitSystem = async () => { + const newSystem = unitSystem === 'metric' ? 'imperial' : 'metric'; + setUnitSystem(newSystem); + + if (session && !isFakeAuth) { + // FIX: Explicitly type `newPreferences` to prevent TypeScript from incorrectly widening `newSystem` to a generic `string`. + // This ensures compatibility with the `Profile` type definition. + const newPreferences: Profile['preferences'] = { ...profile?.preferences, unitSystem: newSystem }; + setProfile(p => p ? {...p, preferences: newPreferences} : null); + await updateUserPreferences(session.user.id, newPreferences); + } else { + localStorage.setItem('unitSystem', newSystem); + } + }; + + + const fetchFlyers = useCallback(async () => { + if (!supabase) return; + try { + const allFlyers = await getFlyers(); + setFlyers(allFlyers); + } catch(e: any) { + setError(e.message); + } + }, []); + + const fetchWatchedItems = useCallback(async (userId: string | undefined) => { + if (!supabase || !userId) { + setWatchedItems([]); + return; + } + try { + const items = await getWatchedItems(userId); + setWatchedItems(items); + } catch (e: any) { + setError(`Could not fetch watched items: ${e.message}`); + } + }, []); + + const fetchShoppingLists = useCallback(async (userId: string | undefined) => { + if (!supabase || !userId) { + setShoppingLists([]); + setActiveListId(null); + return; + } + try { + const lists = await getShoppingLists(userId); + setShoppingLists(lists); + if (lists.length > 0 && !activeListId) { + setActiveListId(lists[0].id); + } else if (lists.length === 0) { + setActiveListId(null); + } + } catch (e: any) { + setError(`Could not fetch shopping lists: ${e.message}`); + } + }, [activeListId]); + + const fetchMasterItems = useCallback(async () => { + if (!supabase) return; + try { + const items = await getAllMasterItems(); + setMasterItems(items); + } catch (e: any) { + setError(`Could not fetch master item list: ${e.message}`); + } + }, []); + + useEffect(() => { + if (!supabase) { + const storedUrl = localStorage.getItem('supabaseUrl'); + const storedKey = localStorage.getItem('supabaseAnonKey'); + if (storedUrl && storedKey) { + initializeSupabase(storedUrl, storedKey); + setIsDbConnected(true); + } + } else { + setIsDbConnected(true); + } + }, []); + + // Effect to handle authentication state changes. + useEffect(() => { + if (!isDbConnected || !supabase) return; + + // If using fake auth, set up a mock session and state. + // We don't listen to Supabase auth changes in this mode. + if (isFakeAuth) { + const mockSession = { + user: { id: 'test-user-123', email: 'test@test.com' }, + } as unknown as Session; + setSession(mockSession); + setProfile({ id: 'test-user-123' }); + // User-specific data is empty for the fake user. + setWatchedItems([]); + setShoppingLists([]); + return; // Early return to avoid setting up the real auth listener. + } + + // This logic ONLY runs for real Supabase authentication. + const fetchRealUserSessionData = async (session: Session | null) => { + setSession(session); + if (session) { + const userProfile = await getUserProfile(session.user.id); + setProfile(userProfile); + fetchWatchedItems(session.user.id); + fetchShoppingLists(session.user.id); + } else { + setProfile(null); + setWatchedItems([]); + setShoppingLists([]); + } + }; + + supabase.auth.getSession().then(({ data: { session } }) => { + fetchRealUserSessionData(session); + }); + + const { data: { subscription } } = supabase.auth.onAuthStateChange((_event, session) => { + if (_event === "SIGNED_OUT") { + setIsProfileManagerOpen(false); + // On sign out, always de-authenticate, regardless of fake auth status. + setIsAuthenticated(false); + setIsFakeAuth(false); + } + fetchRealUserSessionData(session); + }); + + return () => subscription.unsubscribe(); + }, [isDbConnected, fetchWatchedItems, fetchShoppingLists, isFakeAuth]); + + + useEffect(() => { + if (isDbConnected && isReady) { + fetchFlyers(); + fetchMasterItems(); + } + }, [isDbConnected, isReady, fetchFlyers, fetchMasterItems]); + + + const resetState = useCallback(() => { + setSelectedFlyer(null); + setFlyerItems([]); + setError(null); + setProcessingProgress(0); + setProcessingStages([]); + setImportSummary(null); + setCurrentFile(null); + setPageProgress(null); + setFileCount(null); + }, []); + + const handleFlyerSelect = useCallback(async (flyer: Flyer) => { + setSelectedFlyer(flyer); + setError(null); + setFlyerItems([]); // Clear previous items + + if (!supabase) return; + + try { + const items = await getFlyerItems(flyer.id); + setFlyerItems(items); + } catch (e: any) { + setError(e.message); + } + }, []); + + useEffect(() => { + if (!isProcessing && !selectedFlyer && flyers.length > 0) { + handleFlyerSelect(flyers[0]); + } + }, [flyers, selectedFlyer, handleFlyerSelect, isProcessing]); + + useEffect(() => { + const findActiveDeals = async () => { + if (!isDbConnected || !isReady || flyers.length === 0 || watchedItems.length === 0) { + setActiveDeals([]); + return; + } + + setActiveDealsLoading(true); + + try { + const today = new Date(); + today.setHours(0, 0, 0, 0); + + const validFlyers = flyers.filter(flyer => { + if (!flyer.valid_from || !flyer.valid_to) return false; + try { + const from = new Date(`${flyer.valid_from}T00:00:00`); + const to = new Date(`${flyer.valid_to}T00:00:00`); + return today >= from && today <= to; + } catch (e) { + console.error("Error parsing flyer date", e); + return false; + } + }); + + if (validFlyers.length === 0) { + setActiveDeals([]); + return; + } + + const validFlyerIds = validFlyers.map(f => f.id); + const allItems = await getFlyerItemsForFlyers(validFlyerIds); + + const watchedItemIds = new Set(watchedItems.map(item => item.id)); + const dealItemsRaw = allItems.filter(item => + item.master_item_id && watchedItemIds.has(item.master_item_id) + ); + + const flyerIdToStoreName = new Map(validFlyers.map(f => [f.id, f.store?.name || 'Unknown Store'])); + + const deals: DealItem[] = dealItemsRaw.map(item => ({ + item: item.item, + price_display: item.price_display, + price_in_cents: item.price_in_cents, + quantity: item.quantity, + storeName: flyerIdToStoreName.get(item.flyer_id!) || 'Unknown Store', + master_item_name: item.master_item_name, + unit_price: item.unit_price, + })); + + setActiveDeals(deals); + } catch (e: any) { + setError(`Could not fetch active deals: ${e.message}`); + } finally { + setActiveDealsLoading(false); + } + }; + + findActiveDeals(); + }, [flyers, watchedItems, isDbConnected, isReady]); + + useEffect(() => { + const calculateTotalActiveItems = async () => { + if (!isDbConnected || !isReady || flyers.length === 0) { + setTotalActiveItems(0); + return; + } + + try { + const today = new Date(); + today.setHours(0, 0, 0, 0); + + const validFlyers = flyers.filter(flyer => { + if (!flyer.valid_from || !flyer.valid_to) return false; + try { + const from = new Date(`${flyer.valid_from}T00:00:00`); + const to = new Date(`${flyer.valid_to}T00:00:00`); + return today >= from && today <= to; + } catch (e) { + console.error("Error parsing flyer date", e); + return false; + } + }); + + if (validFlyers.length === 0) { + setTotalActiveItems(0); + return; + } + + const validFlyerIds = validFlyers.map(f => f.id); + const totalCount = await countFlyerItemsForFlyers(validFlyerIds); + setTotalActiveItems(totalCount); + } catch (e: any) { + console.error("Failed to calculate total active items:", e.message); + setTotalActiveItems(0); + } + }; + + calculateTotalActiveItems(); + }, [flyers, isDbConnected, isReady]); + + const processFiles = async (files: File[], checksum: string, originalFileName: string, updateStage?: (index: number, updates: Partial) => void) => { + let stageIndex = 0; + + // Stage: Validating Flyer + updateStage?.(stageIndex, { status: 'in-progress' }); + const isFlyer = await withTimeout(isImageAFlyer(files[0]), 15000); + if (!isFlyer) { + throw new Error("The uploaded image does not appear to be a grocery flyer."); + } + updateStage?.(stageIndex++, { status: 'completed' }); // stageIndex is now 1 + + const pageCount = files.length; + const coreDataTimeout = 60000 * pageCount; + const nonCriticalTimeout = 30000; + + // Granular stages for core data extraction + const storeInfoStageIndex = stageIndex; // Stage 1: Extracting Store Name & Sale Dates + const itemExtractionStageIndex = stageIndex + 1; // Stage 2: Extracting All Items from Flyer + + // Mark both stages as in-progress for the single AI call + updateStage?.(storeInfoStageIndex, { status: 'in-progress' }); + updateStage?.(itemExtractionStageIndex, { status: 'in-progress', detail: pageCount > 1 ? `(${pageCount} pages)` : undefined }); + + let progressInterval: number | undefined; + let extractedData; + + try { + if (pageCount > 1) { + let currentPage = 0; + const intervalTime = 2500; + // Attach progress bar to the item extraction stage + progressInterval = window.setInterval(() => { + currentPage++; + if (currentPage <= pageCount) { + updateStage?.(itemExtractionStageIndex, { progress: { current: currentPage, total: pageCount } }); + } else { + clearInterval(progressInterval); + } + }, intervalTime); + } + + extractedData = await withTimeout(extractCoreDataFromImage(files, masterItems), coreDataTimeout); + + // Mark both stages as completed after the AI call finishes + updateStage?.(storeInfoStageIndex, { status: 'completed' }); + updateStage?.(itemExtractionStageIndex, { status: 'completed', progress: null }); + } finally { + if (progressInterval) { + clearInterval(progressInterval); + } + } + + const { store_name, valid_from, valid_to, items: extractedItems } = extractedData; + stageIndex += 2; // Increment by 2 for the stages we just completed. stageIndex is now 3 + + // Stage: Extracting Store Address + let storeAddress: string | null = null; + try { + updateStage?.(stageIndex, { status: 'in-progress' }); + storeAddress = await withTimeout(extractAddressFromImage(files[0]), nonCriticalTimeout); + updateStage?.(stageIndex++, { status: 'completed' }); // stageIndex is now 4 + } catch (e: any) { + console.warn("Non-critical step failed: Address extraction.", e.message); + updateStage?.(stageIndex++, { status: 'error', detail: '(Skipped)' }); // stageIndex is now 4 + } + + // Stage: Extracting Store Logo + let storeLogoBase64: string | null = null; + try { + updateStage?.(stageIndex, { status: 'in-progress' }); + const logoData = await withTimeout(extractLogoFromImage(files.slice(0, 1)), nonCriticalTimeout); + storeLogoBase64 = logoData.store_logo_base_64; + updateStage?.(stageIndex++, { status: 'completed' }); // stageIndex is now 5 + } catch (e: any) { + console.warn("Non-critical step failed: Logo extraction.", e.message); + updateStage?.(stageIndex++, { status: 'error', detail: '(Skipped)' }); // stageIndex is now 5 + } + + if (!supabase) { + throw new Error("Cannot process flyer: Supabase client not initialized."); + } + + // Stage: Uploading Flyer Image + updateStage?.(stageIndex, { status: 'in-progress' }); + const imageUrl = await withTimeout(uploadFlyerImage(files[0]), 30000); + updateStage?.(stageIndex++, { status: 'completed' }); // stageIndex is now 6 + + // Stage: Creating Database Record + updateStage?.(stageIndex, { status: 'in-progress' }); + const newFlyer = await withTimeout(createFlyerRecord(originalFileName, imageUrl, checksum, store_name, valid_from, valid_to, storeAddress), 10000); + if (!newFlyer) { + throw new Error("Could not create a record for the new flyer."); + } + + // Upload logo if extracted and if the store doesn't have one already. + // This is a non-critical, fire-and-forget task. + if (storeLogoBase64 && newFlyer.store_id && !newFlyer.store?.logo_url) { + uploadLogoAndUpdateStore(newFlyer.store_id, storeLogoBase64); + } + + updateStage?.(stageIndex++, { status: 'completed' }); // stageIndex is now 7 + + // Stage: Saving Items to Database + updateStage?.(stageIndex, { status: 'in-progress' }); + const savedItems = await withTimeout(saveFlyerItems(extractedItems, newFlyer.id), 20000); + updateStage?.(stageIndex, { status: 'completed' }); + + return { newFlyer, items: savedItems }; + }; + + const setupProcessingStages = (isPdf: boolean) => { + const pendingStatus: StageStatus = 'pending'; + const isDbAvailable = !!supabase; + + const baseStages: ProcessingStage[] = [ + ...(isDbAvailable ? [{ name: 'Checking for Duplicates', status: pendingStatus, critical: true }] : []), + { name: 'Validating Flyer', status: pendingStatus, critical: true }, + { name: 'Extracting Store Name & Sale Dates', status: pendingStatus, critical: true }, + { name: 'Extracting All Items from Flyer', status: pendingStatus, critical: true }, + { name: 'Extracting Store Address', status: pendingStatus, critical: false }, + { name: 'Extracting Store Logo', status: pendingStatus, critical: false }, + ...(isDbAvailable ? [ + { name: 'Uploading Flyer Image', status: pendingStatus, critical: true }, + { name: 'Creating Database Record', status: pendingStatus, critical: true }, + { name: 'Saving Items to Database', status: pendingStatus, critical: true }, + ] : []), + ]; + if (isPdf) { + return [ + { name: 'Analyzing PDF', status: pendingStatus, critical: true }, + { name: 'Converting PDF to Images', status: pendingStatus, critical: true }, + ...baseStages + ]; + } + return baseStages; + }; + + const handleProcessFiles = useCallback(async (files: FileList) => { + if (files.length === 0) return; + + resetState(); + setIsProcessing(true); + setProcessingProgress(0); + setError(null); + + if (!supabase) { + setError("A database connection is required to process flyers."); + setIsProcessing(false); + return; + } + + const summary = { + processed: [] as string[], + skipped: [] as string[], + errors: [] as { fileName: string; message: string }[], + }; + + const avgTime = getAverageProcessingTime(); + setEstimatedTime(avgTime * files.length); + + for (let i = 0; i < files.length; i++) { + const originalFile = files[i]; + setCurrentFile(originalFile.name); + setFileCount({ current: i + 1, total: files.length }); + setPageProgress(null); + + const isPdf = originalFile.type === 'application/pdf'; + setProcessingStages(setupProcessingStages(isPdf)); + + const updateStage = (index: number, updates: Partial) => { + setProcessingStages(prev => + prev.map((stage, j) => (j === index ? { ...stage, ...updates } : stage)) + ); + }; + + let currentStageIndex = 0; + const startTime = Date.now(); + + try { + let filesToProcess: File[]; + let checksum = ''; + + if (isPdf) { + updateStage(currentStageIndex, { status: 'in-progress' }); + const onPdfProgress = (currentPage: number, totalPages: number) => { + setPageProgress({ current: currentPage, total: totalPages }); + }; + const { imageFiles, pageCount } = await convertPdfToImageFiles(originalFile, onPdfProgress); + filesToProcess = imageFiles; + setPageProgress(null); + updateStage(currentStageIndex++, { status: 'completed', detail: `(${pageCount} pages)` }); + updateStage(currentStageIndex++, { status: 'completed' }); + } else { + filesToProcess = [originalFile]; + } + + if (supabase) { + updateStage(currentStageIndex, { status: 'in-progress' }); + checksum = await generateFileChecksum(originalFile); + const existing = await findFlyerByChecksum(checksum); + if (existing) { + console.log(`Skipping duplicate file: ${originalFile.name}`); + summary.skipped.push(originalFile.name); + updateStage(currentStageIndex, { status: 'completed', detail: '(Duplicate)' }); + setProcessingProgress(((i + 1) / files.length) * 100); + continue; + } + updateStage(currentStageIndex++, { status: 'completed' }); + } + + const processFilesUpdateStage = (idx: number, updates: Partial) => updateStage(idx + currentStageIndex, updates); + + await processFiles(filesToProcess, checksum, originalFile.name, processFilesUpdateStage); + summary.processed.push(originalFile.name); + } catch (e: any) { + console.error(`Failed to process ${originalFile.name}:`, e); + summary.errors.push({ fileName: originalFile.name, message: e.message }); + setProcessingStages(prev => prev.map(stage => { + if (stage.status === 'in-progress' && (stage.critical ?? true)) { + return {...stage, status: 'error'}; + } + return stage; + })); + await new Promise(resolve => setTimeout(resolve, 2000)); + } finally { + const duration = (Date.now() - startTime) / 1000; + recordProcessingTime(duration); + } + setProcessingProgress(((i + 1) / files.length) * 100); + } + + await fetchFlyers(); + await fetchMasterItems(); + setImportSummary(summary); + setIsProcessing(false); + setCurrentFile(null); + setPageProgress(null); + setFileCount(null); + }, [resetState, fetchFlyers, masterItems, fetchMasterItems]); + + const handleAddWatchedItem = useCallback(async (itemName: string, category: string) => { + if (!supabase || !session || isFakeAuth) return; + try { + const updatedOrNewItem = await addWatchedItem(session.user.id, itemName, category); + setWatchedItems(prevItems => { + const itemExists = prevItems.some(item => item.id === updatedOrNewItem.id); + if (!itemExists) { + const newItems = [...prevItems, updatedOrNewItem]; + return newItems.sort((a,b) => a.name.localeCompare(b.name)); + } + return prevItems; // Item already existed in list + }); + } catch (e: any) { + setError(`Could not add watched item: ${e.message}`); + await fetchWatchedItems(session?.user?.id); + } + }, [session, fetchWatchedItems, isFakeAuth]); + + const handleRemoveWatchedItem = useCallback(async (masterItemId: number) => { + if (!supabase || !session || isFakeAuth) return; + try { + await removeWatchedItem(session.user.id, masterItemId); + setWatchedItems(prevItems => prevItems.filter(item => item.id !== masterItemId)); + } catch (e: any) { + setError(`Could not remove watched item: ${e.message}`); + } + }, [session, isFakeAuth]); + + // --- Shopping List Handlers --- + const handleCreateList = useCallback(async (name: string) => { + if (!session || isFakeAuth) return; + try { + const newList = await createShoppingList(session.user.id, name); + setShoppingLists(prev => [...prev, newList]); + setActiveListId(newList.id); + } catch (e: any) { + setError(`Could not create list: ${e.message}`); + } + }, [session, isFakeAuth]); + + const handleDeleteList = useCallback(async (listId: number) => { + if (!session || isFakeAuth) return; + try { + await deleteShoppingList(listId); + const newLists = shoppingLists.filter(l => l.id !== listId); + setShoppingLists(newLists); + if (activeListId === listId) { + setActiveListId(newLists.length > 0 ? newLists[0].id : null); + } + } catch (e: any) { + setError(`Could not delete list: ${e.message}`); + } + }, [session, shoppingLists, activeListId, isFakeAuth]); + + const handleAddShoppingListItem = useCallback(async (listId: number, item: { masterItemId?: number, customItemName?: string }) => { + if (!session || isFakeAuth) return; + try { + const newItem = await addShoppingListItem(listId, item); + setShoppingLists(prevLists => prevLists.map(list => { + if (list.id === listId) { + // Avoid adding duplicates to the state if it's already there + const itemExists = list.items.some(i => i.id === newItem.id); + if (itemExists) return list; + return { ...list, items: [...list.items, newItem] }; + } + return list; + })); + } catch (e: any) { + setError(`Could not add item to list: ${e.message}`); + } + }, [session, isFakeAuth]); + + const handleUpdateShoppingListItem = useCallback(async (itemId: number, updates: Partial) => { + if (!session || !activeListId || isFakeAuth) return; + try { + const updatedItem = await updateShoppingListItem(itemId, updates); + setShoppingLists(prevLists => prevLists.map(list => { + if (list.id === activeListId) { + return { ...list, items: list.items.map(i => i.id === itemId ? updatedItem : i) }; + } + return list; + })); + } catch (e: any) { + setError(`Could not update list item: ${e.message}`); + } + }, [session, activeListId, isFakeAuth]); + + const handleRemoveShoppingListItem = useCallback(async (itemId: number) => { + if (!session || !activeListId || isFakeAuth) return; + try { + await removeShoppingListItem(itemId); + setShoppingLists(prevLists => prevLists.map(list => { + if (list.id === activeListId) { + return { ...list, items: list.items.filter(i => i.id !== itemId) }; + } + return list; + })); + } catch (e: any) { + setError(`Could not remove list item: ${e.message}`); + } + }, [session, activeListId, isFakeAuth]); + + const handleFakeLogin = (email: string, pass: string) => { + if (email === 'test@test.com' && pass === 'pass123') { + setIsAuthenticated(true); + setIsFakeAuth(true); // Mark that we are using fake auth + setLoginError(null); + } else { + setLoginError('Invalid credentials'); + } + }; + + const handleSignOut = () => { + if (supabase && !isFakeAuth) { + supabase.auth.signOut(); + } + // For fake auth, signing out just resets local state. + // The onAuthStateChange listener will handle real sign-outs. + setIsAuthenticated(false); + setIsFakeAuth(false); + setSession(null); + }; + + + const hasData = flyerItems.length > 0; + + if (!isAuthenticated) { + return ; + } + + return ( +
+
setIsProfileManagerOpen(true)} + onOpenVoiceAssistant={() => setIsVoiceAssistantOpen(true)} + onSignOut={handleSignOut} + /> + {session && profile && !isFakeAuth && ( + setIsProfileManagerOpen(false)} + session={session} + profile={profile} + onProfileUpdate={(updatedProfile) => setProfile(updatedProfile)} + /> + )} + {session && ( + setIsVoiceAssistantOpen(false)} + /> + )} +
+
+ +
+ {isDbConnected ? ( + <> + + {isReady && ( + + )} + setIsReady(true)} /> + + ) : ( + setIsDbConnected(true)} /> + )} +
+ +
+ + + {isProcessing ? ( + + ) : selectedFlyer ? ( + <> + + {hasData && ( + <> + handleAddShoppingListItem(activeListId!, { masterItemId })} + /> + + + )} + + ) : importSummary ? ( + setImportSummary(null)} /> + ) : ( +
+

Welcome to Flyer Crawler!

+

Upload a new grocery flyer to begin, or select a previously processed flyer from the list on the left.

+
+ )} +
+ +
+ {isDbConnected && ( + <> + handleAddShoppingListItem(activeListId!, item)} + onUpdateItem={handleUpdateShoppingListItem} + onRemoveItem={handleRemoveShoppingListItem} + /> + handleAddShoppingListItem(activeListId!, { masterItemId })} + /> + + + + )} +
+
+
+
+ ); +} + +export default App; \ No newline at end of file diff --git a/README.md b/README.md index 7f439ed..544bda8 100644 --- a/README.md +++ b/README.md @@ -1,3 +1,432 @@ -# flyer-crawler.projectium.com +# Flyer Crawler - Grocery AI Analyzer -The Google AI based Flyer Crawler App \ No newline at end of file +Flyer Crawler is a web application that uses the Google Gemini AI to extract, analyze, and manage data from grocery store flyers. Users can upload flyer images or PDFs, and the application will automatically identify items, prices, and sale dates, storing the structured data in a Supabase database for historical analysis, price tracking, and personalized deal alerts. + +## Features + +- **AI-Powered Data Extraction**: Upload PNG, JPG, or PDF flyers to automatically extract store names, sale dates, and a detailed list of items with prices and quantities. +- **Bulk Import**: Process multiple flyers at once with a summary report of successes, skips (duplicates), and errors. +- **Database Integration**: Securely connect to your own Supabase instance to save all extracted data, enabling long-term persistence and analysis. +- **Personalized Watchlist**: Authenticated users can create a "watchlist" of specific grocery items they want to track. +- **Active Deal Alerts**: The app highlights current sales on your watched items from all valid flyers in the database. +- **Price History Charts**: Visualize the price trends of your watched items over time. +- **Shopping List Management**: Users can create multiple shopping lists, add items from flyers or their watchlist, and track purchased items. +- **User Authentication & Management**: Secure user sign-up, login, and profile management, including a secure account deletion process. +- **Dynamic UI**: A responsive interface with dark mode and a choice between metric/imperial unit systems. + +## Tech Stack + +- **Frontend**: React, TypeScript, Tailwind CSS +- **AI**: Google Gemini API (`@google/genai`) +- **Backend**: Supabase (PostgreSQL, Auth, Storage, Edge Functions) +- **UI Components**: Recharts for charts + +--- + +## Required Environment Variables & Setup + +This project requires several secret keys to function. See the `env.example` file for a complete template. + +- **For the Frontend (Interactive Prompt)**: `REACT_APP_SUPABASE_URL` & `REACT_APP_SUPABASE_ANON_KEY`. These are your project's **public-facing** credentials used in the browser. +- **For the AI Service**: `API_KEY`. In this development environment, this is provided for you automatically. + +You can find your Supabase keys in your project's **Dashboard > Project Settings > API**. + +## Setup and Installation + +### Step 1: Set Up Supabase Project + +1. **Create a New Project**: Go to your [Supabase Dashboard](https://app.supabase.com/) and create a new project. +2. **Run the Database Schema**: + - Navigate to the **SQL Editor** in your Supabase dashboard. + - Open `sql/schema.sql.txt`, copy its entire contents, and paste them into the SQL Editor. + - Click **"Run"**. This creates all necessary tables, relationships, and security policies. + +### Step 2: Configure and Deploy Backend Edge Functions + +The application uses secure Supabase Edge Functions for sensitive operations. You must deploy them using the Supabase CLI. + +1. **Install the Supabase CLI**: + ```bash + npm install supabase --save-dev + ``` + +2. **Log in and Link Your Project**: + ```bash + # Login to the Supabase CLI + npx supabase login + + # Link your project (get from your Supabase URL) + npx supabase link --project-ref + ``` + +3. **Create a Shared CORS File**: All functions need a shared file for CORS headers. + - Create a folder path: `supabase/functions/_shared`. + - Inside that folder, create a file named `cors.ts` and paste the code from **PART 7.1** below. + +4. **Create and Deploy the Functions**: + - For each of the three functions (`system-check`, `delete-user`, `seed-database`): + 1. Run `npx supabase functions new `. + 2. Copy the corresponding code from **PART 7.2**, **7.3**, or **7.4** into the new `supabase/functions//index.ts` file. + - Deploy all three functions to your project: + ```bash + npx supabase functions deploy system-check + npx supabase functions deploy delete-user + npx supabase functions deploy seed-database + ``` + +### Step 3: Connect and Run the Frontend + +1. **Run the Application**: This project is configured to run in an environment where dependencies are managed automatically. +2. **Connect to Database**: On first launch, the app will prompt you for your Supabase credentials. Enter your **`Supabase URL`** and your **`Supabase Anon Key`** (the public `anon` key) here. The app will save these in your browser's local storage. +3. **Follow the System Check**: The **System Check** panel will now guide you through verifying your setup. If any steps fail, it will provide instructions. +4. **Seed Dev Users**: If the "Seeded Users" check fails, a **"Seed Dev Users"** button will appear. Click it to create the initial `admin@example.com` and `user@example.com` accounts. The check will re-run automatically and should now pass. +5. **Start Using**: You can now upload flyers or log in with `admin@example.com` (password: `password123`). + +--- +-- ============================================================================ +-- PART 7: EDGE FUNCTIONS DOCUMENTATION +-- The following sections contain the TypeScript code for the required +-- Supabase Edge Functions. +-- ============================================================================ +-- PART 7.1: SHARED CORS HELPER +-- File: supabase/functions/_shared/cors.ts +-- ============================================================================ +export const corsHeaders = { + 'Access-Control-Allow-Origin': '*', + 'Access-Control-Allow-Headers': 'authorization, x-client-info, apikey, content-type', +}; + +-- ============================================================================ +-- PART 7.2: SYSTEM CHECK FUNCTION +-- File: supabase/functions/system-check/index.ts +-- ============================================================================ +import { createClient } from 'https://esm.sh/@supabase/supabase-js@2'; +import { corsHeaders } from '../_shared/cors.ts'; + +// Helper function to create a Supabase admin client +const createAdminClient = () => createClient( + Deno.env.get('SUPABASE_URL')!, + Deno.env.get('SUPABASE_SERVICE_ROLE_KEY')!, + { auth: { autoRefreshToken: false, persistSession: false } } +); + +const checkDatabaseSchema = async (client) => { + const { data, error } = await client.rpc('check_schema'); + if (error) throw new Error(`Schema check failed: ${error.message}`); + if (!data) throw new Error('Schema check returned no data.'); + + const requiredTables = ['flyers', 'flyer_items', 'profiles', 'shopping_lists']; + const missingTables = requiredTables.filter(t => !data.tables.includes(t)); + if (missingTables.length > 0) { + return { pass: false, message: `Missing tables: ${missingTables.join(', ')}. Please run the full schema.sql.txt script.` }; + } + return { pass: true, message: 'All required tables exist.' }; +}; + +const checkRlsPolicies = async (client) => { + const { data, error } = await client.rpc('check_rls'); + if (error) throw new Error(`RLS check failed: ${error.message}`); + + const requiredPolicies = { + 'profiles': 'Users can update their own profile.', + 'shopping_lists': 'Users can manage their own shopping lists.', + 'flyer_items': `Allow anon users to manage test items` + }; + + for (const table in requiredPolicies) { + const policyName = requiredPolicies[table]; + if (!data.some(p => p.table_name === table && p.policy_name === policyName)) { + return { pass: false, message: `Missing RLS policy "${policyName}" on table "${table}". Please run the schema.sql.txt script.` }; + } + } + return { pass: true, message: 'Key RLS policies are in place.' }; +}; + +const checkUserCreationTrigger = async (client) => { + const { data, error } = await client.rpc('check_trigger_security'); + if (error) throw new Error(`Trigger check failed: ${error.message}`); + + if (!data || data.length === 0) { + return { pass: false, message: `Function 'handle_new_user' not found. Please run schema.sql.txt.` }; + } + + const handleNewUser = data[0]; + if (!handleNewUser.is_security_definer) { + return { pass: false, message: `Trigger function 'handle_new_user' must be 'SECURITY DEFINER'. Please run the schema.sql.txt script to fix it.` }; + } + if (handleNewUser.owner_role !== 'postgres') { + return { pass: false, message: `Trigger function 'handle_new_user' must be owned by 'postgres'. Yours is owned by '${handleNewUser.owner_role}'. Please run schema.sql.txt to fix it.` }; + } + return { pass: true, message: 'User creation trigger is correctly configured.' }; +}; + +const checkStorageBucket = async (client) => { + const { data, error } = await client.storage.getBucket('flyers'); + if (error) { + return { pass: false, message: `Failed to access 'flyers' bucket: ${error.message}. Ensure it exists and permissions are set.` }; + } + if (!data.public) { + return { pass: false, message: `Storage bucket 'flyers' is not public. Please enable public access in your Supabase dashboard.` }; + } + return { pass: true, message: "'flyers' bucket exists and is public." }; +}; + +Deno.serve(async (req) => { + if (req.method === 'OPTIONS') { + return new Response('ok', { headers: corsHeaders }); + } + + try { + const adminClient = createAdminClient(); + const results = {}; + + results['schema'] = await checkDatabaseSchema(adminClient); + results['rls'] = await checkRlsPolicies(adminClient); + results['trigger'] = await checkUserCreationTrigger(adminClient); + results['storage'] = await checkStorageBucket(adminClient); + + return new Response(JSON.stringify({ results }), { + headers: { ...corsHeaders, 'Content-Type': 'application/json' }, + status: 200, + }); + } catch (error) { + return new Response(JSON.stringify({ error: error.message, stack: error.stack }), { + headers: { ...corsHeaders, 'Content-Type': 'application/json' }, + status: 500, + }); + } +}); + +-- ============================================================================ +-- PART 7.3: DELETE USER FUNCTION +-- File: supabase/functions/delete-user/index.ts +-- ============================================================================ +import { createClient } from 'https://esm.sh/@supabase/supabase-js@2'; +import { corsHeaders } from '../_shared/cors.ts'; + +Deno.serve(async (req) => { + // Handle preflight OPTIONS request for CORS + if (req.method === 'OPTIONS') { + return new Response('ok', { headers: corsHeaders }); + } + + try { + const { password } = await req.json(); + if (!password) { + throw new Error('Password is required.'); + } + + // Create a Supabase client with the user's authentication token + const authHeader = req.headers.get('Authorization'); + if (!authHeader) { + throw new Error('Missing authorization header.'); + } + + const userSupabaseClient = createClient( + Deno.env.get('SUPABASE_URL')!, + Deno.env.get('SUPABASE_ANON_KEY')!, + { global: { headers: { Authorization: authHeader } } } + ); + + // Get the user from the token + const { data: { user }, error: userError } = await userSupabaseClient.auth.getUser(); + if (userError || !user) { + return new Response(JSON.stringify({ error: userError?.message || 'User not authenticated.' }), { + status: 401, + headers: { ...corsHeaders, 'Content-Type': 'application/json' }, + }); + } + + // Verify the user's password by attempting to sign in + const { error: signInError } = await userSupabaseClient.auth.signInWithPassword({ + email: user.email!, + password: password, + }); + + if (signInError) { + return new Response(JSON.stringify({ error: 'Invalid password.' }), { + status: 403, + headers: { ...corsHeaders, 'Content-Type': 'application/json' }, + }); + } + + // If password is correct, create an admin client with the service_role key + const adminSupabaseClient = createClient( + Deno.env.get('SUPABASE_URL')!, + Deno.env.get('SUPABASE_SERVICE_ROLE_KEY')!, + { auth: { autoRefreshToken: false, persistSession: false } } + ); + + // Delete the user + const { error: deleteError } = await adminSupabaseClient.auth.admin.deleteUser(user.id); + if (deleteError) { + throw deleteError; + } + + return new Response(JSON.stringify({ message: 'User deleted successfully.' }), { + headers: { ...corsHeaders, 'Content-Type': 'application/json' }, + status: 200, + }); + + } catch (error) { + // Return a detailed error with a stack trace for better debugging. + return new Response(JSON.stringify({ error: error.message, stack: error.stack }), { + headers: { ...corsHeaders, 'Content-Type': 'application/json' }, + status: 500, + }); + } +}); + +-- ============================================================================ +-- PART 7.4: SEED DATABASE FUNCTION +-- File: supabase/functions/seed-database/index.ts +-- ============================================================================ +import { createClient } from 'https://esm.sh/@supabase/supabase-js@2'; +import { corsHeaders } from '../_shared/cors.ts'; + +Deno.serve(async (req) => { + if (req.method === 'OPTIONS') { + return new Response('ok', { headers: corsHeaders }); + } + + try { + // We create an admin client using the service_role key to perform elevated actions. + // This key is automatically provided by Supabase in the production environment. + const adminSupabaseClient = createClient( + Deno.env.get('SUPABASE_URL')!, + Deno.env.get('SUPABASE_SERVICE_ROLE_KEY')!, + { auth: { autoRefreshToken: false, persistSession: false } } + ); + + const usersToSeed = [ + { + email: 'admin@example.com', + password: 'password123', + user_metadata: { full_name: 'Admin User' } + }, + { + email: 'user@example.com', + password: 'password123', + user_metadata: { full_name: 'Normal User' } + } + ]; + + const createdUsers = []; + const existingUsers = []; + + const { data: { users: existingUserList }, error: listError } = await adminSupabaseClient.auth.admin.listUsers(); + if (listError) throw listError; + + const existingEmails = new Set(existingUserList.map(u => u.email)); + + for (const user of usersToSeed) { + if (!existingEmails.has(user.email)) { + const { error } = await adminSupabaseClient.auth.admin.createUser({ + email: user.email, + password: user.password, + user_metadata: user.user_metadata, + email_confirm: true, // Auto-confirm for dev environment + }); + + if (error) { + throw new Error(`Failed to create user ${user.email}: ${error.message}`); + } + createdUsers.push(user.email); + } else { + existingUsers.push(user.email); + } + } + + let message = ''; + if (createdUsers.length > 0) { + message += `Successfully created users: ${createdUsers.join(', ')}. `; + } + if (existingUsers.length > 0) { + message += `Users already existed: ${existingUsers.join(', ')}.`; + } + if (message === '') { + message = 'All development users already exist.' + } + + return new Response(JSON.stringify({ message: message.trim() }), { + headers: { ...corsHeaders, 'Content-Type': 'application/json' }, + status: 200, + }); + + } catch (error) { + // Return a detailed error with a stack trace for better debugging. + return new Response(JSON.stringify({ error: error.message, stack: error.stack }), { + headers: { ...corsHeaders, 'Content-Type': 'application/json' }, + status: 500, + }); + } +}); + + + + + + + + + + + +2025-11-08 - from Googel AI BUILD env Gemini + +ok, time to move this project from the Google AI BUILD environment, and to a proper build and deploy process using git, etc - can you make a list of what the overall projects progress has been so far, where we are at with the overall goals, and what steps were still incomplete, and what areas were started but not finished + +answer: + +Of course. Moving from a rapid-prototyping environment to a structured build process is a critical and exciting step for any project. It's the perfect time to take stock of our progress. +Here is a comprehensive overview of the Flyer Crawler project's current status, its alignment with our goals, and a roadmap for what lies ahead. +Project Status & Next Steps Report +1. Project Goal Recap +Our primary objective is to create a powerful tool that transforms static, often confusing grocery store flyers into structured, actionable data. By leveraging AI, we aim to help users save money, plan meals, and streamline their shopping through features like price tracking, deal alerts, and smart shopping lists. +2. Current Progress: Implemented Features (What's Done & Working) +We have successfully built a robust Minimum Viable Product (MVP) that validates the core concept. The application is functional end-to-end. +Core AI Pipeline: +Multi-Format Upload: Users can upload single images (PNG, JPG) and multi-page PDFs. The system automatically converts PDFs into images for the AI. +Intelligent Data Extraction: The Gemini-powered backend can reliably extract: +Store Name & Address +Flyer Validity Dates (From/To) +A detailed list of all sale items. +Granular Item Analysis: For each item, the AI extracts the display price, quantity/deal info (e.g., "per lb", "2 for $5"), and calculates a standardized unit_price and a normalized price_in_cents for comparison. +Canonical Item Matching: A critical feature, the AI matches flyer items (e.g., "Dole Bananas") to a canonical entry in our master_grocery_items table (e.g., "bananas"), which powers all personalization features. +Backend & Data Persistence (Supabase): +Full Integration: The application is fully backed by Supabase for database (PostgreSQL), file storage, and authentication. +Durable Storage: Processed flyers are saved to the database, and their images are uploaded to Supabase Storage, creating a permanent, queryable history. +Duplicate Prevention: A checksum is generated for each uploaded file to prevent processing the same flyer twice. +User Features & Personalization: +Full Authentication: Complete user auth system with email/password, OAuth (Google/GitHub), password reset, and secure account deletion via Edge Functions. +User Profiles & Preferences: Users can manage their name and preferences (Dark Mode, Metric/Imperial units), which are saved to their profile. +The Watchlist: Authenticated users can create a personalized list of items they want to track. The UI clearly highlights these items in any flyer. +Active Deals Panel: A key feature that scans all current, valid flyers in the database and shows a user the best deals on their specific watched items. +Shopping Lists: Users can create and manage multiple shopping lists, add items from flyers or their watchlist, and check them off as "purchased." +AI Analysis Panel: Provides three levels of AI-driven analysis on a selected flyer's contents: Quick Insights, a Deep-Dive meal plan, and a Web Search for recipes/info, complete with source links. +Development & Onboarding: +Comprehensive Setup Guide: The README.md and schema.sql.txt provide a complete guide for a developer to set up the entire Supabase backend from scratch. +Automated System Check: A SystemCheck component automatically verifies a developer's backend configuration (schema, RLS policies, Edge Functions) and provides actionable feedback, streamlining setup. +Database Seeder: A one-click tool to seed the database with initial development users. +3. Current State vs. Overall Goals +The project is in an excellent state. The core loop of Upload -> Process -> View -> Personalize is complete and stable. We have successfully de-risked the most technically challenging part of the project: using a large language model to accurately parse unstructured visual data and link it to a relational database. +The current application serves as a powerful proof-of-concept and a solid foundation for the more advanced features outlined in the database schema. +4. Incomplete Features (What Hasn't Been Started) +The database schema was designed with a larger vision in mind. The following features are fully designed on the backend but have no corresponding UI or logic implemented yet: +User Alerts & Notifications: The user_alerts and notifications tables exist, but the mechanism to trigger them (e.g., a nightly cron job that checks for new deals on watched items) has not been built. This is the next major feature needed to make the app proactive. +Full Recipe & Meal Planning System: The schema supports recipes, ingredients, menu plans, and a user's pantry. However, there is no UI for users to: +Create or import recipes. +Build a weekly menu_plan. +Manage their pantry_items. +Generate a shopping list from a meal plan while subtracting pantry items (the database function for this, generate_shopping_list_for_menu_plan, exists but is not used). +Crowdsourced Data Quality: The schema includes tables for suggested_corrections and user_submitted_prices. The UI to allow users to submit corrections or report in-store prices has not been created. An admin dashboard to review and approve these submissions would also be needed. +Location-Based Services: The database can store detailed store_locations with geographic coordinates, but the app does not currently use this. It doesn't request the user's location to find nearby flyers or stores. +5. Partially Completed Areas (Started but Unfinished) +These are areas where a foundation exists but require more work to be considered production-ready. +Price History Chart: The PriceHistoryChart component is built and the backend has a complex trigger (update_price_history_on_flyer_item_insert) to populate the summary table. However, this is a complex data aggregation process that needs extensive testing with real-world scenarios (including flyer deletions and corrections) to ensure accuracy. +Scalability of AI Processing: Currently, all processing happens in the user's browser and blocks the UI. For a production application, this should be moved to a background job. A better architecture would be: Upload to Storage -> Trigger Supabase Edge Function -> Function calls Gemini -> Function writes results to DB. This makes the frontend non-blocking and more scalable. +AI Matching Robustness: The AI's ability to match items to the master_grocery_items list is good but can always be improved. This will require ongoing prompt engineering and testing with a wider variety of flyers from different stores and regions. The master_item_aliases table, designed for this, is currently unused. +Formal Testing: The project lacks a formal testing suite (e.g., Vitest, Jest, React Testing Library). While the SystemCheck is great for setup, it's not a substitute for unit and integration tests to ensure code quality and prevent regressions. +Accessibility & Advanced Responsiveness: The app is functional on mobile, but it would benefit from a dedicated pass to ensure all components are fully responsive, accessible, and navigable via keyboard, adhering to WCAG standards. +By addressing these areas, we can transition Flyer Crawler from a powerful MVP into a polished, scalable, and feature-rich production application. \ No newline at end of file diff --git a/components/AnalysisPanel.tsx b/components/AnalysisPanel.tsx new file mode 100644 index 0000000..0811420 --- /dev/null +++ b/components/AnalysisPanel.tsx @@ -0,0 +1,174 @@ + +import React, { useState, useCallback } from 'react'; +import { AnalysisType, FlyerItem, Store } from '../types'; +import { getQuickInsights, getDeepDiveAnalysis, searchWeb, planTripWithMaps, generateImageFromText } from '../services/geminiService'; +import { LoadingSpinner } from './LoadingSpinner'; +import { LightbulbIcon } from './icons/LightbulbIcon'; +import { BrainIcon } from './icons/BrainIcon'; +import { SearchIcon } from './icons/SearchIcon'; +import { MapPinIcon } from './icons/MapPinIcon'; +import { PhotoIcon } from './icons/PhotoIcon'; + +interface AnalysisPanelProps { + flyerItems: FlyerItem[]; + store?: Store; +} + +interface TabButtonProps { + label: string; + icon: React.ReactNode; + isActive: boolean; + onClick: () => void; +} + +const TabButton: React.FC = ({ label, icon, isActive, onClick }) => { + const activeClasses = 'bg-brand-primary text-white'; + const inactiveClasses = 'bg-gray-200 dark:bg-gray-700 text-gray-600 dark:text-gray-300 hover:bg-gray-300 dark:hover:bg-gray-600'; + return ( + + ); +}; + +export const AnalysisPanel: React.FC = ({ flyerItems, store }) => { + const [activeTab, setActiveTab] = useState(AnalysisType.QUICK_INSIGHTS); + const [results, setResults] = useState<{ [key in AnalysisType]?: string }>({}); + const [sources, setSources] = useState([]); + const [loadingStates, setLoadingStates] = useState<{ [key in AnalysisType]?: boolean }>({}); + const [error, setError] = useState(null); + + // State for new feature stubs + const [generatedImageUrl, setGeneratedImageUrl] = useState(null); + const [isGeneratingImage, setIsGeneratingImage] = useState(false); + + const handleAnalysis = useCallback(async (type: AnalysisType) => { + setLoadingStates(prev => ({ ...prev, [type]: true })); + setError(null); + try { + let responseText = ''; + if (type === AnalysisType.QUICK_INSIGHTS) { + responseText = await getQuickInsights(flyerItems); + } else if (type === AnalysisType.DEEP_DIVE) { + responseText = await getDeepDiveAnalysis(flyerItems); + } else if (type === AnalysisType.WEB_SEARCH) { + const { text, sources } = await searchWeb(flyerItems); + responseText = text; + setSources(sources); + } else if (type === AnalysisType.PLAN_TRIP) { + const userLocation = await new Promise((resolve, reject) => { + navigator.geolocation.getCurrentPosition( + (position) => resolve(position.coords), + (error) => reject(error) + ); + }); + const { text, sources } = await planTripWithMaps(flyerItems, store, userLocation); + responseText = text; + setSources(sources); + } + setResults(prev => ({ ...prev, [type]: responseText })); + } catch (e: any) { + console.error(`Analysis failed for type ${type}:`, e); + const userFriendlyMessage = e.code === 1 ? "Please allow location access to use this feature." : `Failed to get ${type.replace('_', ' ')}. Please try again.`; + setError(userFriendlyMessage); + } finally { + setLoadingStates(prev => ({ ...prev, [type]: false })); + } + }, [flyerItems, store]); + + const handleGenerateImage = useCallback(async () => { + const mealPlanText = results[AnalysisType.DEEP_DIVE]; + if (!mealPlanText) return; + + setIsGeneratingImage(true); + try { + const base64Image = await generateImageFromText(mealPlanText); + setGeneratedImageUrl(`data:image/png;base64,${base64Image}`); + } catch (e: any) { + setError(`Failed to generate image: ${e.message}`); + } finally { + setIsGeneratingImage(false); + } + }, [results]); + + const renderContent = () => { + if (loadingStates[activeTab]) { + return
; + } + + const resultText = results[activeTab]; + if (resultText) { + const isSearchType = activeTab === AnalysisType.WEB_SEARCH || activeTab === AnalysisType.PLAN_TRIP; + return ( +
+ {resultText} + {isSearchType && sources.length > 0 && ( +
+

Sources:

+
    + {sources.map((source, index) => { + const uri = source.web?.uri || source.maps?.uri; + const title = source.web?.title || source.maps?.title || 'Map Link'; + if (!uri) return null; + return ( +
  • + + {title} + +
  • + ) + })} +
+
+ )} + {activeTab === AnalysisType.DEEP_DIVE && ( +
+ {generatedImageUrl ? ( + AI generated meal plan + ) : ( + + )} +
+ )} +
+ ); + } + + return ( +
+

Click below to generate AI-powered insights.

+ +
+ ); + }; + + return ( +
+
+ } isActive={activeTab === AnalysisType.QUICK_INSIGHTS} onClick={() => setActiveTab(AnalysisType.QUICK_INSIGHTS)} /> + } isActive={activeTab === AnalysisType.DEEP_DIVE} onClick={() => setActiveTab(AnalysisType.DEEP_DIVE)} /> + } isActive={activeTab === AnalysisType.WEB_SEARCH} onClick={() => setActiveTab(AnalysisType.WEB_SEARCH)} /> + } isActive={activeTab === AnalysisType.PLAN_TRIP} onClick={() => setActiveTab(AnalysisType.PLAN_TRIP)} /> +
+
+ {error &&

{error}

} + {renderContent()} +
+
+ ); +}; \ No newline at end of file diff --git a/components/AuthModal.tsx b/components/AuthModal.tsx new file mode 100644 index 0000000..9bebe06 --- /dev/null +++ b/components/AuthModal.tsx @@ -0,0 +1,200 @@ +import React, { useState } from 'react'; +import { supabase } from '../services/supabaseClient'; +import { LoadingSpinner } from './LoadingSpinner'; +import { XMarkIcon } from './icons/XMarkIcon'; +import { GoogleIcon } from './icons/GoogleIcon'; +import { GithubIcon } from './icons/GithubIcon'; + +interface AuthModalProps { + isOpen: boolean; + onClose: () => void; +} + +type AuthView = 'signIn' | 'signUp' | 'resetPassword'; + +export const AuthModal: React.FC = ({ isOpen, onClose }) => { + const [view, setView] = useState('signIn'); + const [email, setEmail] = useState(''); + const [password, setPassword] = useState(''); + const [loading, setLoading] = useState(false); + const [error, setError] = useState(null); + const [message, setMessage] = useState(null); + + const clearState = () => { + setError(null); + setMessage(null); + setEmail(''); + setPassword(''); + } + + const handleViewChange = (newView: AuthView) => { + setView(newView); + clearState(); + }; + + const handleSubmit = async (e: React.FormEvent) => { + e.preventDefault(); + setLoading(true); + setError(null); + setMessage(null); + + try { + if (view === 'signUp') { + const { error } = await supabase.auth.signUp({ email, password }); + if (error) throw error; + setMessage('Check your email for the confirmation link!'); + } else { + const { error } = await supabase.auth.signInWithPassword({ email, password }); + if (error) throw error; + onClose(); + } + } catch (err: any) { + setError(err.message || 'An unexpected error occurred.'); + } finally { + setLoading(false); + } + }; + + const handlePasswordReset = async (e: React.FormEvent) => { + e.preventDefault(); + setLoading(true); + setError(null); + setMessage(null); + try { + const { error } = await supabase.auth.resetPasswordForEmail(email, { + redirectTo: window.location.href, + }); + if (error) throw error; + setMessage('Password reset link sent! Check your email.'); + } catch(err: any) { + setError(err.message || 'An unexpected error occurred.'); + } finally { + setLoading(false); + } + }; + + const handleOAuthSignIn = async (provider: 'google' | 'github') => { + setLoading(true); + setError(null); + const { error } = await supabase.auth.signInWithOAuth({ + provider, + options: { + redirectTo: window.location.href, + } + }); + if (error) { + setError(error.message); + setLoading(false); + } + }; + + + if (!isOpen) return null; + + return ( +
+
e.stopPropagation()} + > + + +
+ {view !== 'resetPassword' && ( +
+ + +
+ )} + + {view === 'resetPassword' ? ( + <> +

Reset Password

+

Enter your email to receive a reset link.

+
+
+ + setEmail(e.target.value)} required className="mt-1 block w-full px-3 py-2 bg-white dark:bg-gray-700 border border-gray-300 dark:border-gray-600 rounded-md shadow-sm" placeholder="you@example.com"/> +
+ {error &&

{error}

} + {message &&

{message}

} + + +
+ + ) : ( + <> +

+ {view === 'signUp' ? 'Create an Account' : 'Welcome Back'} +

+

+ {view === 'signUp' ? 'to start personalizing your experience.' : 'to access your watched items and lists.'} +

+ +
+ + +
+ +
+
+ OR +
+
+ +
+
+ + setEmail(e.target.value)} required className="mt-1 block w-full px-3 py-2 bg-white dark:bg-gray-700 border border-gray-300 dark:border-gray-600 rounded-md shadow-sm placeholder-gray-400 focus:outline-none focus:ring-brand-primary focus:border-brand-primary" placeholder="you@example.com" /> +
+
+
+ + {view === 'signIn' && ( + + )} +
+ setPassword(e.target.value)} required className="mt-1 block w-full px-3 py-2 bg-white dark:bg-gray-700 border border-gray-300 dark:border-gray-600 rounded-md shadow-sm placeholder-gray-400 focus:outline-none focus:ring-brand-primary focus:border-brand-primary" placeholder="••••••••" /> +
+ {error &&

{error}

} + {message &&

{message}

} + +
+ + )} +
+
+
+ ); +}; diff --git a/components/BulkImportSummary.tsx b/components/BulkImportSummary.tsx new file mode 100644 index 0000000..f62f477 --- /dev/null +++ b/components/BulkImportSummary.tsx @@ -0,0 +1,84 @@ +import React from 'react'; +import { CheckCircleIcon } from './icons/CheckCircleIcon'; +import { ExclamationTriangleIcon } from './icons/ExclamationTriangleIcon'; +import { InformationCircleIcon } from './icons/InformationCircleIcon'; + +interface BulkImportSummaryProps { + summary: { + processed: string[]; + skipped: string[]; + errors: { fileName: string; message: string }[]; + }; + onDismiss: () => void; +} + +export const BulkImportSummary: React.FC = ({ summary, onDismiss }) => { + const hasContent = summary.processed.length > 0 || summary.skipped.length > 0 || summary.errors.length > 0; + + return ( +
+
+
+

Bulk Import Report

+

+ {`Processed: ${summary.processed.length}, Skipped: ${summary.skipped.length}, Errors: ${summary.errors.length}`} +

+
+ +
+ + {hasContent ? ( +
+ {summary.processed.length > 0 && ( +
+

+ + Successfully Processed ({summary.processed.length}) +

+
    + {summary.processed.map((item, index) =>
  • {item}
  • )} +
+
+ )} + {summary.skipped.length > 0 && ( +
+

+ + Skipped Duplicates ({summary.skipped.length}) +

+
    + {summary.skipped.map((item, index) =>
  • {item}
  • )} +
+
+ )} + {summary.errors.length > 0 && ( +
+

+ + Errors ({summary.errors.length}) +

+
    + {summary.errors.map((err, index) => ( +
  • + {err.fileName}: {err.message} +
  • + ))} +
+
+ )} +
+ ) : ( +
+ +

No new files were found to process.

+
+ )} +
+ ); +}; \ No newline at end of file diff --git a/components/BulkImporter.tsx b/components/BulkImporter.tsx new file mode 100644 index 0000000..34b19b6 --- /dev/null +++ b/components/BulkImporter.tsx @@ -0,0 +1,77 @@ +import React, { useCallback, useState } from 'react'; +import { UploadIcon } from './icons/UploadIcon'; + +interface BulkImporterProps { + onProcess: (files: FileList) => void; + isProcessing: boolean; +} + +export const BulkImporter: React.FC = ({ onProcess, isProcessing }) => { + const [isDragging, setIsDragging] = useState(false); + + const handleFiles = (files: FileList | null) => { + if (files && files.length > 0 && !isProcessing) { + onProcess(files); + } + }; + + const handleFileChange = (e: React.ChangeEvent) => { + handleFiles(e.target.files); + // Reset input value to allow selecting the same file again + e.target.value = ''; + }; + + const handleDragEnter = useCallback((e: React.DragEvent) => { + e.preventDefault(); + e.stopPropagation(); + if (!isProcessing) setIsDragging(true); + }, [isProcessing]); + + const handleDragLeave = useCallback((e: React.DragEvent) => { + e.preventDefault(); + e.stopPropagation(); + setIsDragging(false); + }, []); + + const handleDrop = useCallback((e: React.DragEvent) => { + e.preventDefault(); + e.stopPropagation(); + setIsDragging(false); + if (!isProcessing && e.dataTransfer.files) { + handleFiles(e.dataTransfer.files); + } + }, [isProcessing, onProcess]); + + const borderColor = isDragging ? 'border-brand-primary' : 'border-gray-300 dark:border-gray-600'; + const bgColor = isDragging ? 'bg-brand-light/50 dark:bg-brand-dark/20' : 'bg-gray-50 dark:bg-gray-800'; + + return ( +
+ +
+ ); +}; \ No newline at end of file diff --git a/components/DarkModeToggle.tsx b/components/DarkModeToggle.tsx new file mode 100644 index 0000000..f741b9b --- /dev/null +++ b/components/DarkModeToggle.tsx @@ -0,0 +1,29 @@ + +import React from 'react'; +import { SunIcon } from './icons/SunIcon'; +import { MoonIcon } from './icons/MoonIcon'; + +interface DarkModeToggleProps { + isDarkMode: boolean; + onToggle: () => void; +} + +export const DarkModeToggle: React.FC = ({ isDarkMode, onToggle }) => { + return ( + + ); +}; diff --git a/components/DatabaseControls.tsx b/components/DatabaseControls.tsx new file mode 100644 index 0000000..a8d6da8 --- /dev/null +++ b/components/DatabaseControls.tsx @@ -0,0 +1,108 @@ + +import React, { useState, useEffect, useCallback } from 'react'; +import { supabase, runDatabaseSelfTest, testStorageConnection } from '../services/supabaseClient'; +import { ServerIcon } from './icons/ServerIcon'; +import { LoadingSpinner } from './LoadingSpinner'; + +type TestStatus = 'idle' | 'testing' | 'success' | 'error'; + +interface DatabaseControlsProps { + onReady: () => void; +} + +export const DatabaseControls: React.FC = ({ onReady }) => { + const [status, setStatus] = useState('idle'); + const [message, setMessage] = useState(''); + const [hasRunAutoTest, setHasRunAutoTest] = useState(false); + + const handleTestConnection = useCallback(async () => { + setStatus('testing'); + setMessage(''); + + try { + // Test 1: Full Database CRUD Self-Test + const dbResult = await runDatabaseSelfTest(); + if (!dbResult.success) { + setStatus('error'); + setMessage(dbResult.error || 'An unknown database error occurred.'); + return; + } + + // Test 2: Storage Write/Delete + const storageResult = await testStorageConnection(); + if (!storageResult.success) { + setStatus('error'); + setMessage(storageResult.error || 'An unknown storage error occurred.'); + return; + } + + // All tests passed + setStatus('success'); + setMessage('Connection successful! Database and Storage are working correctly.'); + + // Reset after a few seconds if it was a manual test + setTimeout(() => { + if (status !== 'testing') { + setStatus('idle'); + setMessage(''); + } + }, 8000); + } finally { + // This is the crucial step: always signal readiness after the test sequence completes. + onReady(); + } + }, [status, onReady]); + + // Auto-run the test once on initial connection + useEffect(() => { + if (supabase && !hasRunAutoTest) { + setHasRunAutoTest(true); + handleTestConnection(); + } + }, [supabase, hasRunAutoTest, handleTestConnection]); + + + if (!supabase) { + return null; // Don't render anything if Supabase is not configured + } + + const statusText = status === 'success' ? 'OK' : status === 'error' ? 'Error' : status === 'testing' ? 'Testing...' : 'Connected'; + const statusColor = status === 'success' ? 'text-green-600 dark:text-green-400' + : status === 'error' ? 'text-red-600 dark:text-red-400' + : 'text-gray-600 dark:text-gray-400'; + + return ( +
+

+ + Backend Status +

+

+ Status: {statusText}. The self-test checks all database permissions. +

+ + {message && ( +
+ {message} +
+ )} +
+ ); +}; diff --git a/components/DatabaseSeeder.tsx b/components/DatabaseSeeder.tsx new file mode 100644 index 0000000..e842691 --- /dev/null +++ b/components/DatabaseSeeder.tsx @@ -0,0 +1,68 @@ +import React, { useState } from 'react'; +import { invokeSeedDatabaseFunction } from '../services/supabaseClient'; +import { SparklesIcon } from './icons/SparklesIcon'; +import { LoadingSpinner } from './LoadingSpinner'; + +interface DatabaseSeederProps { + onSuccess: () => void; +} + +export const DatabaseSeeder: React.FC = ({ onSuccess }) => { + const [isLoading, setIsLoading] = useState(false); + const [error, setError] = useState(null); + const [message, setMessage] = useState(null); + + const handleSeed = async () => { + setIsLoading(true); + setError(null); + setMessage(null); + + try { + const result = await invokeSeedDatabaseFunction(); + setMessage(result.message); + // Wait a moment for the success message to be readable, then trigger re-check + setTimeout(() => { + onSuccess(); + }, 2500); + } catch (e: any) { + setError(e.message); + } finally { + setIsLoading(false); + } + }; + + return ( +
+
+
+
+
+

+ It looks like the development users are missing. Use this tool to create them. +

+
+ + {error && ( +

{error}

+ )} + {message && ( +

{message}

+ )} +
+
+
+
+ ); +}; diff --git a/components/DevTestRunner.tsx b/components/DevTestRunner.tsx new file mode 100644 index 0000000..14cfcea --- /dev/null +++ b/components/DevTestRunner.tsx @@ -0,0 +1,110 @@ +import React, { useState } from 'react'; +import { supabase } from '../services/supabaseClient'; +import { BeakerIcon } from './icons/BeakerIcon'; +import { LoadingSpinner } from './LoadingSpinner'; +import { CheckCircleIcon } from './icons/CheckCircleIcon'; +import { XCircleIcon } from './icons/XCircleIcon'; + +type TestStatus = 'idle' | 'running' | 'pass' | 'fail'; + +interface TestResult { + name: string; + status: TestStatus; + message: string; +} + +const initialTests: TestResult[] = [ + { name: 'Test Admin Login', status: 'idle', message: 'Verifies the seeded admin user can log in.' }, +]; + +export const DevTestRunner: React.FC = () => { + const [testResults, setTestResults] = useState(initialTests); + const [isRunning, setIsRunning] = useState(false); + + const runTests = async () => { + setIsRunning(true); + // Reset statuses to running + setTestResults(prev => prev.map(t => ({ ...t, status: 'running' }))); + + // --- Test Case 1: Admin Login --- + try { + const { error } = await supabase.auth.signInWithPassword({ + email: 'admin@example.com', + password: 'password123', + }); + + if (error) { + throw new Error(error.message); + } + + // IMPORTANT: Sign out immediately so the test doesn't affect the app's state + await supabase.auth.signOut(); + + setTestResults(prev => prev.map(t => t.name === 'Test Admin Login' + ? { ...t, status: 'pass', message: 'Successfully logged in and out.' } + : t + )); + } catch (e: any) { + setTestResults(prev => prev.map(t => t.name === 'Test Admin Login' + ? { ...t, status: 'fail', message: `Failed: ${e.message}` } + : t + )); + } + + setIsRunning(false); + }; + + const getStatusIndicator = (status: TestStatus) => { + switch (status) { + case 'running': return
; + case 'pass': return ; + case 'fail': return ; + case 'idle': return
; + } + }; + + if (!supabase) { + return null; + } + + return ( +
+

+ + Development Test Runner +

+

+ Run integration tests to verify your backend setup. +

+ +
    + {testResults.map(test => ( +
  • +
    {getStatusIndicator(test.status)}
    +
    +

    {test.name}

    +

    + {test.message} +

    +
    +
  • + ))} +
+ + +
+ ); +}; diff --git a/components/ErrorDisplay.tsx b/components/ErrorDisplay.tsx new file mode 100644 index 0000000..e831839 --- /dev/null +++ b/components/ErrorDisplay.tsx @@ -0,0 +1,17 @@ + +import React from 'react'; + +interface ErrorDisplayProps { + message: string; +} + +export const ErrorDisplay: React.FC = ({ message }) => { + if (!message) return null; + return ( +
+ Error: + {message} +
+ ); +}; + \ No newline at end of file diff --git a/components/ExtractedDataTable.tsx b/components/ExtractedDataTable.tsx new file mode 100644 index 0000000..fe881b3 --- /dev/null +++ b/components/ExtractedDataTable.tsx @@ -0,0 +1,187 @@ +import React, { useMemo, useState } from 'react'; +import type { FlyerItem, MasterGroceryItem, ShoppingList } from '../types'; +import { formatUnitPrice } from '../utils/unitConverter'; +import { Session } from '@supabase/supabase-js'; +import { PlusCircleIcon } from './icons/PlusCircleIcon'; + +interface ExtractedDataTableProps { + items: FlyerItem[]; + totalActiveItems?: number; + watchedItems?: MasterGroceryItem[]; + masterItems: MasterGroceryItem[]; + unitSystem: 'metric' | 'imperial'; + session: Session | null; + onAddItem: (itemName: string, category: string) => Promise; + shoppingLists: ShoppingList[]; + activeListId: number | null; + onAddItemToList: (masterItemId: number) => void; +} + +export const ExtractedDataTable: React.FC = ({ items, totalActiveItems, watchedItems = [], masterItems, unitSystem, session, onAddItem, shoppingLists, activeListId, onAddItemToList }) => { + const [categoryFilter, setCategoryFilter] = useState('all'); + + const watchedItemIds = useMemo(() => new Set(watchedItems.map(item => item.id)), [watchedItems]); + const masterItemsMap = useMemo(() => new Map(masterItems.map(item => [item.id, item.name])), [masterItems]); + + const activeShoppingListItems = useMemo(() => { + if (!activeListId) return new Set(); + const activeList = shoppingLists.find(list => list.id === activeListId); + return new Set(activeList?.items.map(item => item.master_item_id)); + }, [shoppingLists, activeListId]); + + const availableCategories = useMemo(() => { + const cats = new Set(items.map(i => i.category_name).filter((c): c is string => !!c)); + return Array.from(cats).sort(); + }, [items]); + + const itemsWithCanonicalNames = useMemo(() => { + return items.map(item => ({ + ...item, + resolved_canonical_name: item.master_item_id ? masterItemsMap.get(item.master_item_id) : null, + })); + }, [items, masterItemsMap]); + + + const sortedItems = useMemo(() => { + const filtered = categoryFilter === 'all' + ? itemsWithCanonicalNames + : itemsWithCanonicalNames.filter(item => item.category_name === categoryFilter); + + if (watchedItemIds.size === 0) { + return filtered; + } + const watched = []; + const others = []; + for (const item of filtered) { + const isWatched = item.master_item_id && watchedItemIds.has(item.master_item_id); + if (isWatched) { + watched.push(item); + } else { + others.push(item); + } + } + return [...watched, ...others]; + }, [itemsWithCanonicalNames, watchedItemIds, categoryFilter]); + + if (items.length === 0) { + return ( +
+

No items extracted yet.

+
+ ); + } + + const title = (totalActiveItems && totalActiveItems > 0) + ? `Item List (${items.length} in flyer / ${totalActiveItems} total active deals)` + : `Item List (${items.length})`; + + return ( +
+
+

+ {title} +

+ {availableCategories.length > 1 && ( + + )} +
+ +
+ {sortedItems.length === 0 ? ( +
+ No items found for the selected category. +
+ ) : ( + + + {sortedItems.map((item, index) => { + const canonicalName = item.resolved_canonical_name; + const isWatched = item.master_item_id && watchedItemIds.has(item.master_item_id); + const isInList = !!(item.master_item_id && activeShoppingListItems.has(item.master_item_id)); + + const itemNameClass = isWatched + ? 'text-sm font-bold text-green-600 dark:text-green-400' + : 'text-sm font-semibold text-gray-900 dark:text-white'; + + const shouldShowCanonical = canonicalName && canonicalName.toLowerCase() !== item.item.toLowerCase(); + const formattedUnitPrice = formatUnitPrice(item.unit_price, unitSystem); + + return ( + + + + ); + })} + +
+
+
{item.item}
+
+ {session && canonicalName && !isInList && ( + + )} + {session && !isWatched && canonicalName && ( + + )} +
+
+
+
+ Price: + {item.price_display} +
+
+ Deal: +
+ {item.quantity} + {item.quantity_num && ({item.quantity_num})} +
+
+
+ Unit Price: +
+ + {formattedUnitPrice.price} + + {formattedUnitPrice.unit && ( + + {formattedUnitPrice.unit} + + )} +
+
+
+ Category: + {item.category_name} + {shouldShowCanonical && ( + (Canonical: {canonicalName}) + )} +
+
+
+ )} +
+
+ ); +}; \ No newline at end of file diff --git a/components/FileUpload.tsx b/components/FileUpload.tsx new file mode 100644 index 0000000..1a7e682 --- /dev/null +++ b/components/FileUpload.tsx @@ -0,0 +1,5 @@ + +// This component is no longer in use. +// Its functionality has been merged into components/BulkImporter.tsx +// to create a single, unified file upload experience. +// This file can be safely deleted. diff --git a/components/FlyerDisplay.tsx b/components/FlyerDisplay.tsx new file mode 100644 index 0000000..7ff010a --- /dev/null +++ b/components/FlyerDisplay.tsx @@ -0,0 +1,58 @@ + +import React from 'react'; +import type { Store } from '../types'; + +const formatDateRange = (from: string | null | undefined, to: string | null | undefined): string | null => { + if (!from && !to) return null; + const options: Intl.DateTimeFormatOptions = { month: 'long', day: 'numeric', year: 'numeric' }; + + const fromDate = from ? new Date(`${from}T00:00:00`).toLocaleDateString('en-US', options) : null; + const toDate = to ? new Date(`${to}T00:00:00`).toLocaleDateString('en-US', options) : null; + + if (fromDate && toDate) { + return fromDate === toDate ? `Valid on ${fromDate}` : `Deals valid from ${fromDate} to ${toDate}`; + } + return fromDate ? `Deals start ${fromDate}` : (toDate ? `Deals end ${toDate}` : null); +}; + +interface FlyerDisplayProps { + imageUrl: string | null; + store?: Store; + validFrom?: string | null; + validTo?: string | null; + storeAddress?: string | null; +} + +export const FlyerDisplay: React.FC = ({ imageUrl, store, validFrom, validTo, storeAddress }) => { + const dateRange = formatDateRange(validFrom, validTo); + + return ( +
+ {(store || dateRange) && ( +
+ {store?.logo_url && ( + {`${store.name + )} +
+ {store?.name &&

{store.name}

} + {dateRange &&

{dateRange}

} + {storeAddress &&

{storeAddress}

} +
+
+ )} +
+ {imageUrl ? ( + Grocery Flyer + ) : ( +
+

Flyer image will be displayed here

+
+ )} +
+
+ ); +}; diff --git a/components/FlyerList.tsx b/components/FlyerList.tsx new file mode 100644 index 0000000..0a5aa44 --- /dev/null +++ b/components/FlyerList.tsx @@ -0,0 +1,66 @@ +import React from 'react'; +import type { Flyer } from '../types'; +import { DocumentTextIcon } from './icons/DocumentTextIcon'; + +const formatShortDate = (dateString: string | null | undefined): string | null => { + if (!dateString) return null; + // Dates from DB are YYYY-MM-DD, which can be interpreted as UTC midnight by new Date(). + // Appending T00:00:00 ensures it's not shifted by local timezone. + try { + return new Date(`${dateString}T00:00:00`).toLocaleDateString('en-US', { month: 'short', day: 'numeric' }); + } catch { + return null; + } +} + + +interface FlyerListProps { + flyers: Flyer[]; + onFlyerSelect: (flyer: Flyer) => void; + selectedFlyerId: number | null; +} + +export const FlyerList: React.FC = ({ flyers, onFlyerSelect, selectedFlyerId }) => { + return ( +
+

+ Processed Flyers +

+ {flyers.length > 0 ? ( +
    + {flyers.map(flyer => { + const from = formatShortDate(flyer.valid_from); + const to = formatShortDate(flyer.valid_to); + const dateRange = from && to ? (from === to ? from : `${from} - ${to}`) : from || to; + + return ( +
  • onFlyerSelect(flyer)} + className={`p-4 flex items-center space-x-3 cursor-pointer transition-colors duration-200 ${selectedFlyerId === flyer.id ? 'bg-brand-light dark:bg-brand-dark/30' : 'hover:bg-gray-50 dark:hover:bg-gray-800'}`} + > + +
    +

    + {flyer.store?.name || 'Unknown Store'} +

    +

    + {flyer.file_name} +

    +

    + {`Processed: ${new Date(flyer.created_at).toLocaleDateString()}`} + {dateRange && ` • Valid: ${dateRange}`} +

    +
    +
  • + ); + })} +
+ ) : ( +

+ No flyers have been processed yet. Upload one to get started. +

+ )} +
+ ); +}; \ No newline at end of file diff --git a/components/Header.tsx b/components/Header.tsx new file mode 100644 index 0000000..b87211b --- /dev/null +++ b/components/Header.tsx @@ -0,0 +1,95 @@ +import React, { useState } from 'react'; +import { ShoppingCartIcon } from './icons/ShoppingCartIcon'; +import { DarkModeToggle } from './DarkModeToggle'; +import { UnitSystemToggle } from './UnitSystemToggle'; +import { Session } from '@supabase/supabase-js'; +import { supabase } from '../services/supabaseClient'; +import { AuthModal } from './AuthModal'; +import { UserIcon } from './icons/UserIcon'; +import { CogIcon } from './icons/CogIcon'; +import { MicrophoneIcon } from './icons/MicrophoneIcon'; + +interface HeaderProps { + isDarkMode: boolean; + toggleDarkMode: () => void; + unitSystem: 'metric' | 'imperial'; + toggleUnitSystem: () => void; + session: Session | null; + onOpenProfile: () => void; + onOpenVoiceAssistant: () => void; + onSignOut: () => void; +} + +export const Header: React.FC = ({ isDarkMode, toggleDarkMode, unitSystem, toggleUnitSystem, session, onOpenProfile, onOpenVoiceAssistant, onSignOut }) => { + const [isAuthModalOpen, setIsAuthModalOpen] = useState(false); + + return ( + <> +
+
+
+
+ +

+ Flyer Crawler +

+
+
+ {session && ( + + )} + + +
+ {session ? ( +
+
+ + {session.user.email} +
+ + +
+ ) : ( +
+ + +
+ )} +
+
+
+
+ {isAuthModalOpen && supabase && setIsAuthModalOpen(false)} />} + + ); +}; \ No newline at end of file diff --git a/components/LoadingSpinner.tsx b/components/LoadingSpinner.tsx new file mode 100644 index 0000000..9fcc422 --- /dev/null +++ b/components/LoadingSpinner.tsx @@ -0,0 +1,9 @@ + +import React from 'react'; + +export const LoadingSpinner: React.FC = () => ( + + + + +); \ No newline at end of file diff --git a/components/LoginPage.tsx b/components/LoginPage.tsx new file mode 100644 index 0000000..c6d617f --- /dev/null +++ b/components/LoginPage.tsx @@ -0,0 +1,98 @@ +import React, { useState } from 'react'; +import { ShoppingCartIcon } from './icons/ShoppingCartIcon'; +import { LoadingSpinner } from './LoadingSpinner'; + +interface LoginPageProps { + onLogin: (email: string, pass: string) => void; + error: string | null; +} + +export const LoginPage: React.FC = ({ onLogin, error }) => { + const [email, setEmail] = useState('test@test.com'); + const [password, setPassword] = useState('pass123'); + const [isLoading, setIsLoading] = useState(false); + + const handleSubmit = (e: React.FormEvent) => { + e.preventDefault(); + setIsLoading(true); + // Simulate network delay + setTimeout(() => { + onLogin(email, password); + setIsLoading(false); + }, 500); + }; + + return ( +
+
+
+ +
+

+ Sign in to Flyer Crawler +

+

+ Use test@test.com and pass123 +

+
+ +
+
+
+ +
+ setEmail(e.target.value)} + className="block w-full rounded-md border-0 py-1.5 px-2 text-gray-900 dark:text-white bg-white dark:bg-gray-800 shadow-sm ring-1 ring-inset ring-gray-300 dark:ring-gray-700 placeholder:text-gray-400 focus:ring-2 focus:ring-inset focus:ring-brand-secondary sm:text-sm sm:leading-6" + /> +
+
+ +
+
+ +
+
+ setPassword(e.target.value)} + className="block w-full rounded-md border-0 py-1.5 px-2 text-gray-900 dark:text-white bg-white dark:bg-gray-800 shadow-sm ring-1 ring-inset ring-gray-300 dark:ring-gray-700 placeholder:text-gray-400 focus:ring-2 focus:ring-inset focus:ring-brand-secondary sm:text-sm sm:leading-6" + /> +
+
+ + {error && ( +
+ {error} +
+ )} + +
+ +
+
+
+
+ ); +}; diff --git a/components/PriceChart.tsx b/components/PriceChart.tsx new file mode 100644 index 0000000..ee8e34c --- /dev/null +++ b/components/PriceChart.tsx @@ -0,0 +1,101 @@ +import React from 'react'; +import type { DealItem } from '../types'; +import { TagIcon } from './icons/TagIcon'; +import { LoadingSpinner } from './LoadingSpinner'; +import { formatUnitPrice } from '../utils/unitConverter'; +import { Session } from '@supabase/supabase-js'; +import { UserIcon } from './icons/UserIcon'; + +interface PriceChartProps { + deals: DealItem[]; + isLoading: boolean; + unitSystem: 'metric' | 'imperial'; + session: Session | null; +} + +export const PriceChart: React.FC = ({ deals, isLoading, unitSystem, session }) => { + const renderContent = () => { + if (!session) { + return ( +
+ +

Personalized Deals

+

+ Log in to see active deals for items on your watchlist. +

+
+ ); + } + + if (isLoading) { + return ( +
+ Finding active deals... +
+ ); + } + + if (deals.length === 0) { + return

No deals for your watched items found in any currently valid flyers.

; + } + + return ( +
+ + + + + + + + + + + {deals.map((deal, index) => { + const formattedUnitPrice = formatUnitPrice(deal.unit_price, unitSystem); + return ( + + + + + + + ) + })} + +
ItemStorePriceUnit Price
+
+ {deal.item} + {deal.master_item_name && deal.master_item_name.toLowerCase() !== deal.item.toLowerCase() && ( + + ({deal.master_item_name}) + + )} +
+
{deal.quantity}
+
{deal.storeName}{deal.price_display} +
+ + {formattedUnitPrice.price} + + {formattedUnitPrice.unit && ( + + {formattedUnitPrice.unit} + + )} +
+
+
+ ); + }; + + return ( +
+

+ + Active Deals on Watched Items +

+ {renderContent()} +
+ ); +}; \ No newline at end of file diff --git a/components/PriceHistoryChart.tsx b/components/PriceHistoryChart.tsx new file mode 100644 index 0000000..9516620 --- /dev/null +++ b/components/PriceHistoryChart.tsx @@ -0,0 +1,187 @@ +import React, { useState, useEffect, useMemo } from 'react'; +import { LineChart, Line, XAxis, YAxis, CartesianGrid, Tooltip, Legend, ResponsiveContainer } from 'recharts'; +import { loadAllHistoricalItems } from '../services/supabaseClient'; +import { LoadingSpinner } from './LoadingSpinner'; +import type { MasterGroceryItem, FlyerItem } from '../types'; + +type HistoricalData = Record; // price is in cents +type ChartData = { date: string; [itemName: string]: number | string }; + +const COLORS = ['#10B981', '#3B82F6', '#F59E0B', '#EF4444', '#8B5CF6', '#EC4899']; + +interface PriceHistoryChartProps { + watchedItems: MasterGroceryItem[]; +} + +export const PriceHistoryChart: React.FC = ({ watchedItems }) => { + const [historicalData, setHistoricalData] = useState({}); + const [isLoading, setIsLoading] = useState(true); + const [error, setError] = useState(null); + + const watchedItemsMap = useMemo(() => new Map(watchedItems.map(item => [item.id, item.name])), [watchedItems]); + + useEffect(() => { + if (watchedItems.length === 0) { + setIsLoading(false); + setHistoricalData({}); + return; + } + + const fetchData = async () => { + setIsLoading(true); + setError(null); + try { + const rawData: Pick[] = await loadAllHistoricalItems(watchedItems); + if (rawData.length === 0) { + setHistoricalData({}); + return; + } + + const processedData = rawData.reduce((acc, record) => { + if (!record.master_item_id || record.price_in_cents === null || !record.created_at) return acc; + + const itemName = watchedItemsMap.get(record.master_item_id); + if (!itemName) return acc; + + const priceInCents = record.price_in_cents; + const date = new Date(record.created_at).toLocaleDateString('en-US', { month: 'short', day: 'numeric' }); + + if(priceInCents === 0) return acc; + + if (!acc[itemName]) { + acc[itemName] = []; + } + + // Ensure we only store the LOWEST price for a given day + const existingEntryIndex = acc[itemName].findIndex(entry => entry.date === date); + if (existingEntryIndex > -1) { + if (priceInCents < acc[itemName][existingEntryIndex].price) { + acc[itemName][existingEntryIndex].price = priceInCents; + } + } else { + acc[itemName].push({ date, price: priceInCents }); + } + + return acc; + }, {}); + + // Filter out items that only have one data point for a meaningful trend line + const filteredData = Object.entries(processedData).reduce((acc, [key, value]) => { + if(value.length > 1){ + acc[key] = value.sort((a,b) => new Date(a.date).getTime() - new Date(b.date).getTime()); + } + return acc; + }, {}); + + setHistoricalData(filteredData); + } catch (e: any) { + setError(e.message || 'Failed to load price history.'); + } finally { + setIsLoading(false); + } + }; + fetchData(); + }, [watchedItems, watchedItemsMap]); + + const chartData = useMemo(() => { + const availableItems = Object.keys(historicalData); + if (availableItems.length === 0) return []; + + const dateMap: Map = new Map(); + + availableItems.forEach(itemName => { + historicalData[itemName]?.forEach(({ date, price }) => { + if (!dateMap.has(date)) { + dateMap.set(date, { date }); + } + // Store price in cents + dateMap.get(date)![itemName] = price; + }); + }); + + return Array.from(dateMap.values()).sort((a, b) => new Date(a.date).getTime() - new Date(b.date).getTime()); + }, [historicalData]); + + const availableItems = Object.keys(historicalData); + + const renderContent = () => { + if (isLoading) { + return ( +
+ Loading Price History... +
+ ); + } + + if (error) { + return ( +
+

Error: {error}

+
+ ); + } + + if (watchedItems.length === 0) { + return ( +
+

Add items to your watchlist to see their price trends over time.

+
+ ); + } + + if (availableItems.length === 0) { + return ( +
+

Not enough historical data for your watched items. Process more flyers to build a trend.

+
+ ); + } + + return ( + + + + + `$${(Number(value) / 100).toFixed(2)}`} + domain={['dataMin', 'auto']} + /> + `$${(value / 100).toFixed(2)}`} + /> + + {availableItems.map((item, index) => ( + + ))} + + + ) + } + + return ( +
+

Historical Price Trends

+
+ {renderContent()} +
+
+ ); +}; \ No newline at end of file diff --git a/components/ProcessingStatus.tsx b/components/ProcessingStatus.tsx new file mode 100644 index 0000000..eee30b9 --- /dev/null +++ b/components/ProcessingStatus.tsx @@ -0,0 +1,226 @@ +import React, { useState, useEffect } from 'react'; +import { LoadingSpinner } from './LoadingSpinner'; +import { CheckCircleIcon } from './icons/CheckCircleIcon'; +import { ExclamationTriangleIcon } from './icons/ExclamationTriangleIcon'; +import { StageStatus, ProcessingStage } from '../types'; + +interface ProcessingStatusProps { + stages: ProcessingStage[]; + estimatedTime: number; + currentFile?: string | null; + pageProgress?: {current: number, total: number} | null; + bulkProgress?: number; + bulkFileCount?: {current: number, total: number} | null; +} + +interface StageIconProps { + status: StageStatus; + isCritical: boolean; +} + +const StageIcon: React.FC = ({ status, isCritical }) => { + switch (status) { + case 'in-progress': + return
; + case 'completed': + return ; + case 'pending': + return
; + case 'error': + return isCritical ? ( + + + + ) : ( + + ); + default: + return null; + } +}; + +export const ProcessingStatus: React.FC = ({ stages, estimatedTime, currentFile, pageProgress, bulkProgress, bulkFileCount }) => { + const [timeRemaining, setTimeRemaining] = useState(estimatedTime); + + useEffect(() => { + setTimeRemaining(estimatedTime); // Reset when component gets new props + const timer = setInterval(() => { + setTimeRemaining(prevTime => (prevTime > 0 ? prevTime - 1 : 0)); + }, 1000); + + return () => clearInterval(timer); + }, [estimatedTime]); + + const getStatusTextColor = (status: StageStatus, isCritical: boolean) => { + switch (status) { + case 'in-progress': + return 'text-brand-primary font-semibold'; + case 'completed': + return 'text-gray-700 dark:text-gray-300'; + case 'pending': + return 'text-gray-400 dark:text-gray-500'; + case 'error': + return isCritical ? 'text-red-500 font-semibold' : 'text-yellow-600 dark:text-yellow-400'; + default: + return ''; + } + } + + // Render new layout for bulk processing + if (currentFile) { + const extractionStage = stages.find(s => s.name === 'Extracting All Items from Flyer' && s.status === 'in-progress' && s.progress); + + const stageList = ( +
    + {stages.map((stage, index) => { + const isCritical = stage.critical ?? true; + return ( +
  • +
    +
    + +
    + + {stage.name} + {!isCritical && (optional)} + {stage.detail} + +
    +
  • + ); + })} +
+ ); + + return ( +
+

+ Processing Steps for:
+ {currentFile} +

+
+ {/* Left Column: Spinners and Progress Bars */} +
+
+ +
+ + {/* Overall Progress */} + {bulkFileCount && ( +
+

+ File {bulkFileCount.current} of {bulkFileCount.total} +

+
+
+
+
+ )} + + {/* PDF Conversion Progress */} + {pageProgress && pageProgress.total > 1 && ( +
+

+ Converting PDF: Page {pageProgress.current} of {pageProgress.total} +

+
+
+
+
+ )} + + {/* Item Extraction Progress */} + {extractionStage && extractionStage.progress && ( +
+

+ Analyzing page {extractionStage.progress.current} of {extractionStage.progress.total} +

+
+
+
+
+ )} +
+ + {/* Right Column: Checklist */} +
+
+ {stageList} +
+
+
+
+ ); + } + + // Original layout for single file processing + const title = 'Processing Your Flyer...'; + const subTitle = `Estimated time remaining: ${Math.floor(timeRemaining / 60)}m ${timeRemaining % 60}s`; + + return ( +
+

{title}

+

+ {subTitle} +

+ + {pageProgress && pageProgress.total > 1 && ( +
+

+ Converting PDF: Page {pageProgress.current} of {pageProgress.total} +

+
+
+
+
+ )} + +
+
    + {stages.map((stage, index) => { + const isCritical = stage.critical ?? true; + return ( +
  • +
    +
    + +
    + + {stage.name} + {!isCritical && (optional)} + {stage.detail} + +
    + {stage.progress && stage.status === 'in-progress' && stage.progress.total > 1 && ( +
    +

    + Analyzing page {stage.progress.current} of {stage.progress.total} +

    +
    +
    +
    +
    + )} +
  • + ); + })} +
+
+
+ ); +}; \ No newline at end of file diff --git a/components/ProfileManager.tsx b/components/ProfileManager.tsx new file mode 100644 index 0000000..ce1814f --- /dev/null +++ b/components/ProfileManager.tsx @@ -0,0 +1,267 @@ +import React, { useState, useEffect } from 'react'; +import { Session } from '@supabase/supabase-js'; +import type { Profile } from '../types'; +import { supabase, updateUserProfile, updateUserPassword, exportUserData, deleteUserAccount } from '../services/supabaseClient'; +import { LoadingSpinner } from './LoadingSpinner'; +import { XMarkIcon } from './icons/XMarkIcon'; + +interface ProfileManagerProps { + isOpen: boolean; + onClose: () => void; + session: Session; + profile: Profile; + onProfileUpdate: (updatedProfile: Profile) => void; +} + +export const ProfileManager: React.FC = ({ isOpen, onClose, session, profile, onProfileUpdate }) => { + const [activeTab, setActiveTab] = useState('profile'); + + // Profile state + const [fullName, setFullName] = useState(profile.full_name || ''); + const [avatarUrl, setAvatarUrl] = useState(profile.avatar_url || ''); + const [profileLoading, setProfileLoading] = useState(false); + const [profileMessage, setProfileMessage] = useState(''); + + // Password state + const [password, setPassword] = useState(''); + const [confirmPassword, setConfirmPassword] = useState(''); + const [passwordLoading, setPasswordLoading] = useState(false); + const [passwordError, setPasswordError] = useState(''); + const [passwordMessage, setPasswordMessage] = useState(''); + + // Data & Privacy state + const [exportLoading, setExportLoading] = useState(false); + const [isConfirmingDelete, setIsConfirmingDelete] = useState(false); + const [passwordForDelete, setPasswordForDelete] = useState(''); + const [deleteLoading, setDeleteLoading] = useState(false); + const [deleteError, setDeleteError] = useState(''); + + + useEffect(() => { + if (isOpen) { + // Reset state when modal opens + setFullName(profile.full_name || ''); + setAvatarUrl(profile.avatar_url || ''); + setActiveTab('profile'); + setIsConfirmingDelete(false); + setPasswordForDelete(''); + setDeleteError(''); + setPasswordError(''); + setPasswordMessage(''); + } + }, [isOpen, profile]); + + const handleProfileSave = async (e: React.FormEvent) => { + e.preventDefault(); + setProfileLoading(true); + setProfileMessage(''); + try { + const updatedProfile = await updateUserProfile(session.user.id, { + full_name: fullName, + avatar_url: avatarUrl + }); + onProfileUpdate(updatedProfile); + setProfileMessage('Profile updated successfully!'); + } catch (error: any) { + setProfileMessage(error.message); + } finally { + setProfileLoading(false); + setTimeout(() => setProfileMessage(''), 3000); + } + }; + + const handlePasswordUpdate = async (e: React.FormEvent) => { + e.preventDefault(); + if (password !== confirmPassword) { + setPasswordError("Passwords do not match."); + return; + } + if (password.length < 6) { + setPasswordError("Password must be at least 6 characters long."); + return; + } + setPasswordLoading(true); + setPasswordError(''); + setPasswordMessage(''); + try { + await updateUserPassword(password); + setPasswordMessage("Password updated successfully!"); + setPassword(''); + setConfirmPassword(''); + } catch (error: any) { + setPasswordError(error.message); + } finally { + setPasswordLoading(false); + setTimeout(() => { + setPasswordMessage(''); + setPasswordError(''); + }, 4000); + } + }; + + const handleExportData = async () => { + setExportLoading(true); + try { + const userData = await exportUserData(session.user.id); + const jsonString = `data:text/json;charset=utf-8,${encodeURIComponent(JSON.stringify(userData, null, 2))}`; + const link = document.createElement("a"); + link.href = jsonString; + link.download = `flyer-crawler-data-export-${new Date().toISOString().split('T')[0]}.json`; + link.click(); + } catch (error: any) { + console.error("Failed to export data:", error); + alert(`Error exporting data: ${error.message}`); + } finally { + setExportLoading(false); + } + }; + + const handleDeleteAccount = async (e: React.FormEvent) => { + e.preventDefault(); + setDeleteLoading(true); + setDeleteError(''); + try { + await deleteUserAccount(passwordForDelete); + alert("Your account and all associated data have been permanently deleted."); + // The onAuthStateChange listener in App.tsx will handle the UI update + await supabase.auth.signOut(); + onClose(); + } catch (error: any) { + setDeleteError(error.message); + } finally { + setDeleteLoading(false); + } + }; + + if (!isOpen) return null; + + return ( +
+
e.stopPropagation()} + > + + +
+

My Account

+

Manage your profile, preferences, and security.

+ +
+ +
+ + {activeTab === 'profile' && ( +
+
+ + setFullName(e.target.value)} className="mt-1 block w-full px-3 py-2 bg-white dark:bg-gray-700 border border-gray-300 dark:border-gray-600 rounded-md shadow-sm" /> +
+
+ + setAvatarUrl(e.target.value)} className="mt-1 block w-full px-3 py-2 bg-white dark:bg-gray-700 border border-gray-300 dark:border-gray-600 rounded-md shadow-sm" /> +
+
+ + {profileMessage &&

{profileMessage}

} +
+
+ )} + + {activeTab === 'security' && ( +
+
+ + setPassword(e.target.value)} placeholder="••••••••" required className="mt-1 block w-full px-3 py-2 bg-white dark:bg-gray-700 border border-gray-300 dark:border-gray-600 rounded-md shadow-sm" /> +
+
+ + setConfirmPassword(e.target.value)} placeholder="••••••••" required className="mt-1 block w-full px-3 py-2 bg-white dark:bg-gray-700 border border-gray-300 dark:border-gray-600 rounded-md shadow-sm" /> +
+
+ + {passwordError &&

{passwordError}

} + {passwordMessage &&

{passwordMessage}

} +
+
+ )} + + {activeTab === 'data' && ( +
+
+

Export Your Data

+

Download a JSON file of your profile, watched items, and shopping lists.

+ +
+ +
+ +
+

Danger Zone

+

This action is permanent and cannot be undone. All your data will be erased.

+ + {!isConfirmingDelete ? ( + + ) : ( +
+

To confirm, please enter your current password.

+
+ + setPasswordForDelete(e.target.value)} + required + placeholder="Enter your password" + className="block w-full px-3 py-2 bg-white dark:bg-gray-700 border border-gray-300 dark:border-gray-600 rounded-md shadow-sm" + /> +
+ {deleteError &&

{deleteError}

} +
+ + +
+
+ )} +
+
+ )} +
+
+
+ ); +}; \ No newline at end of file diff --git a/components/SampleDataButton.tsx b/components/SampleDataButton.tsx new file mode 100644 index 0000000..5963cfb --- /dev/null +++ b/components/SampleDataButton.tsx @@ -0,0 +1,20 @@ + +import React from 'react'; + +interface SampleDataButtonProps { + onClick: () => void; +} + +export const SampleDataButton: React.FC = ({ onClick }) => { + return ( +
+ +
+ ); +}; + \ No newline at end of file diff --git a/components/ShoppingList.tsx b/components/ShoppingList.tsx new file mode 100644 index 0000000..1bcc7c3 --- /dev/null +++ b/components/ShoppingList.tsx @@ -0,0 +1,206 @@ +import React, { useState, useMemo, useCallback } from 'react'; +import { Session } from '@supabase/supabase-js'; +import type { ShoppingList, ShoppingListItem } from '../types'; +import { UserIcon } from './icons/UserIcon'; +import { ListBulletIcon } from './icons/ListBulletIcon'; +import { LoadingSpinner } from './LoadingSpinner'; +import { TrashIcon } from './icons/TrashIcon'; +import { SpeakerWaveIcon } from './icons/SpeakerWaveIcon'; +import { generateSpeechFromText } from '../services/geminiService'; +import { decode, decodeAudioData } from '../utils/audioUtils'; + +interface ShoppingListComponentProps { + session: Session | null; + lists: ShoppingList[]; + activeListId: number | null; + onSelectList: (listId: number) => void; + onCreateList: (name: string) => Promise; + onDeleteList: (listId: number) => Promise; + onAddItem: (item: { customItemName: string }) => Promise; + onUpdateItem: (itemId: number, updates: Partial) => Promise; + onRemoveItem: (itemId: number) => Promise; +} + +export const ShoppingListComponent: React.FC = ({ session, lists, activeListId, onSelectList, onCreateList, onDeleteList, onAddItem, onUpdateItem, onRemoveItem }) => { + const [isCreatingList, setIsCreatingList] = useState(false); + const [customItemName, setCustomItemName] = useState(''); + const [isAddingCustom, setIsAddingCustom] = useState(false); + const [isReadingAloud, setIsReadingAloud] = useState(false); + + const activeList = useMemo(() => lists.find(list => list.id === activeListId), [lists, activeListId]); + const { neededItems, purchasedItems } = useMemo(() => { + if (!activeList) return { neededItems: [], purchasedItems: [] }; + const neededItems: ShoppingListItem[] = []; + const purchasedItems: ShoppingListItem[] = []; + activeList.items.forEach(item => { + if (item.is_purchased) { + purchasedItems.push(item); + } else { + neededItems.push(item); + } + }); + return { neededItems, purchasedItems }; + }, [activeList]); + + const handleCreateList = async () => { + const name = prompt("Enter a name for your new shopping list:"); + if (name && name.trim()) { + setIsCreatingList(true); + await onCreateList(name.trim()); + setIsCreatingList(false); + } + }; + + const handleDeleteList = async () => { + if (activeList && window.confirm(`Are you sure you want to delete the "${activeList.name}" list? This cannot be undone.`)) { + await onDeleteList(activeList.id); + } + }; + + const handleAddCustomItem = async (e: React.FormEvent) => { + e.preventDefault(); + if (!customItemName.trim()) return; + setIsAddingCustom(true); + await onAddItem({ customItemName: customItemName.trim() }); + setCustomItemName(''); + setIsAddingCustom(false); + }; + + const handleReadAloud = useCallback(async () => { + if (!activeList || neededItems.length === 0) return; + + setIsReadingAloud(true); + try { + const listText = "Here is your shopping list: " + neededItems.map(item => item.custom_item_name || item.master_item?.name).join(', '); + const base64Audio = await generateSpeechFromText(listText); + + // Play the audio + const audioContext = new (window.AudioContext)({ sampleRate: 24000 }); + const audioBuffer = await decodeAudioData(decode(base64Audio), audioContext, 24000, 1); + const source = audioContext.createBufferSource(); + source.buffer = audioBuffer; + source.connect(audioContext.destination); + source.start(); + } catch (e: any) { + console.error("Failed to read list aloud:", e); + alert(`Could not read list aloud: ${e.message}`); + } finally { + setIsReadingAloud(false); + } + }, [activeList, neededItems]); + + if (!session) { + return ( +
+
+ +

Your Shopping Lists

+

+ Please log in to manage your shopping lists. +

+
+
+ ); + } + + return ( +
+
+

+ + Shopping List +

+ +
+ +
+ {lists.length > 0 && ( + + )} +
+ + +
+
+ + {activeList ? ( + <> +
+ setCustomItemName(e.target.value)} + placeholder="Add a custom item..." + className="flex-grow block w-full px-3 py-2 bg-white dark:bg-gray-800 border border-gray-300 dark:border-gray-600 rounded-md shadow-sm sm:text-sm" + disabled={isAddingCustom} + /> + +
+ +
+ {neededItems.length > 0 ? neededItems.map(item => ( +
+ onUpdateItem(item.id, { is_purchased: !item.is_purchased })} + className="h-4 w-4 rounded border-gray-300 text-brand-primary focus:ring-brand-secondary" + /> + {item.custom_item_name || item.master_item?.name} + +
+ )) : ( +

This list is empty.

+ )} + + {purchasedItems.length > 0 && ( +
+

Purchased

+ {purchasedItems.map(item => ( +
+ onUpdateItem(item.id, { is_purchased: !item.is_purchased })} + className="h-4 w-4 rounded border-gray-300 text-brand-primary focus:ring-brand-secondary" + /> + {item.custom_item_name || item.master_item?.name} + +
+ ))} +
+ )} +
+ + ) : ( +
+

No shopping lists found. Create one to get started!

+
+ )} + +
+ ); +}; \ No newline at end of file diff --git a/components/SupabaseConnector.tsx b/components/SupabaseConnector.tsx new file mode 100644 index 0000000..9b91a84 --- /dev/null +++ b/components/SupabaseConnector.tsx @@ -0,0 +1,102 @@ +import React, { useState } from 'react'; +import { initializeSupabase, testDatabaseConnection, disconnectSupabase } from '../services/supabaseClient'; +import { PlugIcon } from './icons/PlugIcon'; +import { LoadingSpinner } from './LoadingSpinner'; + +interface SupabaseConnectorProps { + onSuccess: () => void; +} + +export const SupabaseConnector: React.FC = ({ onSuccess }) => { + const [url, setUrl] = useState(''); + const [anonKey, setAnonKey] = useState(''); + const [isLoading, setIsLoading] = useState(false); + const [error, setError] = useState(null); + + const handleConnect = async (e: React.FormEvent) => { + e.preventDefault(); + setIsLoading(true); + setError(null); + + const client = initializeSupabase(url, anonKey); + if (!client) { + setError("Failed to initialize client. Check credentials."); + setIsLoading(false); + return; + } + + const { success, error: testError } = await testDatabaseConnection(); + + if (success) { + localStorage.setItem('supabaseUrl', url); + localStorage.setItem('supabaseAnonKey', anonKey); + onSuccess(); + } else { + setError(testError || 'Connection failed. Please check your URL, Key, and RLS policies.'); + disconnectSupabase(); // Clear the invalid client + } + + setIsLoading(false); + }; + + return ( +
+

+ + Connect to Database +

+

+ To save and view flyer history, connect to your Supabase project. This is optional. +

+
+
+ + setUrl(e.target.value)} + required + className="mt-1 block w-full px-3 py-2 bg-white dark:bg-gray-800 border border-gray-300 dark:border-gray-600 rounded-md shadow-sm placeholder-gray-400 focus:outline-none focus:ring-brand-primary focus:border-brand-primary sm:text-sm" + placeholder="https://your-project.supabase.co" + /> +
+
+ + setAnonKey(e.target.value)} + required + className="mt-1 block w-full px-3 py-2 bg-white dark:bg-gray-800 border border-gray-300 dark:border-gray-600 rounded-md shadow-sm placeholder-gray-400 focus:outline-none focus:ring-brand-primary focus:border-brand-primary sm:text-sm" + placeholder="ey..." + /> +
+ + {error && ( +
+ {error} +
+ )} +
+
+ ); +}; diff --git a/components/SystemCheck.tsx b/components/SystemCheck.tsx new file mode 100644 index 0000000..3c90c7c --- /dev/null +++ b/components/SystemCheck.tsx @@ -0,0 +1,177 @@ +import React, { useState, useEffect, useCallback } from 'react'; +import { supabase, invokeSystemCheckFunction } from '../services/supabaseClient'; +import { ShieldCheckIcon } from './icons/ShieldCheckIcon'; +import { LoadingSpinner } from './LoadingSpinner'; +import { CheckCircleIcon } from './icons/CheckCircleIcon'; +import { XCircleIcon } from './icons/XCircleIcon'; +import { DatabaseSeeder } from './DatabaseSeeder'; + +type TestStatus = 'idle' | 'running' | 'pass' | 'fail'; + +interface Check { + id: string; + name: string; + status: TestStatus; + message: string; +} + +const initialChecks: Check[] = [ + { id: 'schema', name: 'Database Schema', status: 'idle', message: 'Verifies required tables exist.' }, + { id: 'rls', name: 'RLS Policies', status: 'idle', message: 'Verifies key security policies are active.' }, + { id: 'trigger', name: 'User Creation Trigger', status: 'idle', message: 'Checks function security configuration.' }, + { id: 'storage', name: 'Storage Bucket', status: 'idle', message: "Checks 'flyers' bucket exists and is public." }, + { id: 'functions', name: 'Edge Functions', status: 'idle', message: "Verifies 'delete-user' and 'seed-database' are deployed." }, + { id: 'seed', name: 'Seeded Users', status: 'idle', message: 'Verifies default development users exist.' }, +]; + +interface SystemCheckProps { + onReady: () => void; +} + +export const SystemCheck: React.FC = ({ onReady }) => { + const [checks, setChecks] = useState(initialChecks); + const [isRunning, setIsRunning] = useState(false); + const [hasRunAutoTest, setHasRunAutoTest] = useState(false); + const [showSeeder, setShowSeeder] = useState(false); + + const updateCheckStatus = (id: string, status: TestStatus, message: string) => { + setChecks(prev => prev.map(c => c.id === id ? { ...c, status, message } : c)); + }; + + const runChecks = useCallback(async () => { + setIsRunning(true); + setShowSeeder(false); + setChecks(prev => prev.map(c => ({ ...c, status: 'running', message: 'Checking...' }))); + + let allTestsPassed = true; + + // Step 1: Backend Schema, RLS, Trigger, Storage checks via Edge Function + try { + const results = await invokeSystemCheckFunction(); + for (const key in results) { + const { pass, message } = results[key]; + updateCheckStatus(key, pass ? 'pass' : 'fail', message); + if (!pass) allTestsPassed = false; + } + } catch (e: any) { + allTestsPassed = false; + const failedCheckIds = ['schema', 'rls', 'trigger', 'storage']; + failedCheckIds.forEach(id => updateCheckStatus(id, 'fail', e.message)); + } + + if (!allTestsPassed) { + setIsRunning(false); + return; + } + + // Step 2: Edge Function Deployment + try { + // Test if functions are deployed by calling them. A "Not found" error is a failure. + // Any other error (like missing body) is a pass for this check's purpose. + const { error: seedError } = await supabase.functions.invoke('seed-database', {body: {}}); + if (seedError && seedError.message.includes('Not found')) throw new Error("'seed-database' function not found."); + + const { error: deleteError } = await supabase.functions.invoke('delete-user', {body: {}}); + if (deleteError && deleteError.message.includes('Not found')) throw new Error("'delete-user' function not found."); + + updateCheckStatus('functions', 'pass', 'All required Edge Functions are deployed.'); + } catch (e: any) { + allTestsPassed = false; + updateCheckStatus('functions', 'fail', `${e.message} Please deploy it via the Supabase CLI.`); + } + + if (!allTestsPassed) { + setIsRunning(false); + return; + } + + // Step 3: Seeded User Login + try { + const { error } = await supabase.auth.signInWithPassword({ + email: 'admin@example.com', + password: 'password123', + }); + if (error) throw error; + await supabase.auth.signOut(); + updateCheckStatus('seed', 'pass', 'Default admin user login verified.'); + } catch (e: any) { + allTestsPassed = false; + const message = e.message.includes('Invalid login credentials') + ? "Invalid login credentials. The seeded users are missing from your database." + : `Failed: ${e.message}`; + updateCheckStatus('seed', 'fail', message); + setShowSeeder(true); + } + + setIsRunning(false); + if (allTestsPassed) { + onReady(); + } + }, [onReady]); + + useEffect(() => { + if (supabase && !hasRunAutoTest) { + setHasRunAutoTest(true); + runChecks(); + } + }, [supabase, hasRunAutoTest, runChecks]); + + const getStatusIndicator = (status: TestStatus) => { + switch (status) { + case 'running': return
; + case 'pass': return ; + case 'fail': return ; + case 'idle': return
; + default: return null; + } + }; + + if (!supabase) return null; + + return ( +
+

+ + System Check +

+

+ This checklist verifies your Supabase setup against the README instructions. +

+ +
    + {checks.map(check => ( +
  • +
    {getStatusIndicator(check.status)}
    +
    +

    {check.name}

    +

    + {check.message} +

    +
    +
  • + ))} +
+ + {showSeeder && ( +
+ +
+ )} + + +
+ ); +}; diff --git a/components/TopDeals.tsx b/components/TopDeals.tsx new file mode 100644 index 0000000..492b386 --- /dev/null +++ b/components/TopDeals.tsx @@ -0,0 +1,41 @@ +import React, { useMemo } from 'react'; +import type { FlyerItem } from '../types'; +import { TrophyIcon } from './icons/TrophyIcon'; + +interface TopDealsProps { + items: FlyerItem[]; +} + +export const TopDeals: React.FC = ({ items }) => { + + const topDeals = useMemo(() => { + return [...items] + .filter(item => item.price_in_cents !== null) // Only include items with a parseable price + .sort((a, b) => (a.price_in_cents ?? Infinity) - (b.price_in_cents ?? Infinity)) + .slice(0, 10); + }, [items]); + + if (topDeals.length === 0) { + return null; + } + + return ( +
+

+ + Top 10 Deals Across All Flyers +

+
    + {topDeals.map((item, index) => ( +
  • + {item.item} + {item.price_display} + + (Qty: {item.quantity}) + +
  • + ))} +
+
+ ); +}; \ No newline at end of file diff --git a/components/UnitSystemToggle.tsx b/components/UnitSystemToggle.tsx new file mode 100644 index 0000000..508baac --- /dev/null +++ b/components/UnitSystemToggle.tsx @@ -0,0 +1,31 @@ +import React from 'react'; + +interface UnitSystemToggleProps { + currentSystem: 'metric' | 'imperial'; + onToggle: () => void; +} + +export const UnitSystemToggle: React.FC = ({ currentSystem, onToggle }) => { + const isImperial = currentSystem === 'imperial'; + + return ( +
+ + Metric + + + + Imperial + +
+ ); +}; \ No newline at end of file diff --git a/components/VoiceAssistant.tsx b/components/VoiceAssistant.tsx new file mode 100644 index 0000000..1b73ee2 --- /dev/null +++ b/components/VoiceAssistant.tsx @@ -0,0 +1,192 @@ +import React, { useState, useEffect, useCallback, useRef } from 'react'; +import { startVoiceSession } from '../services/geminiService'; +import { XMarkIcon } from './icons/XMarkIcon'; +import { MicrophoneIcon } from './icons/MicrophoneIcon'; +// FIX: Removed `LiveSession` as it is not an exported member of the module. +import { LiveServerMessage, Blob } from '@google/genai'; +import { encode } from '../utils/audioUtils'; + +interface VoiceAssistantProps { + isOpen: boolean; + onClose: () => void; +} + +type VoiceStatus = 'idle' | 'connecting' | 'listening' | 'speaking' | 'error'; + +export const VoiceAssistant: React.FC = ({ isOpen, onClose }) => { + const [status, setStatus] = useState('idle'); + const [userTranscript, setUserTranscript] = useState(''); + const [modelTranscript, setModelTranscript] = useState(''); + const [history, setHistory] = useState<{speaker: 'user' | 'model', text: string}[]>([]); + + // FIX: Infer the session promise type from the return type of `startVoiceSession` + // to avoid needing to import the `LiveSession` type directly. + const sessionPromiseRef = useRef | null>(null); + const mediaStreamRef = useRef(null); + const audioContextRef = useRef(null); + const scriptProcessorRef = useRef(null); + + const stopRecording = useCallback(() => { + if (mediaStreamRef.current) { + mediaStreamRef.current.getTracks().forEach(track => track.stop()); + mediaStreamRef.current = null; + } + if (scriptProcessorRef.current) { + scriptProcessorRef.current.disconnect(); + scriptProcessorRef.current = null; + } + if (audioContextRef.current && audioContextRef.current.state !== 'closed') { + audioContextRef.current.close(); + audioContextRef.current = null; + } + }, []); + + const handleClose = useCallback(() => { + if (sessionPromiseRef.current) { + sessionPromiseRef.current.then(session => session.close()); + sessionPromiseRef.current = null; + } + stopRecording(); + setStatus('idle'); + setHistory([]); + setUserTranscript(''); + setModelTranscript(''); + onClose(); + }, [onClose, stopRecording]); + + const startSession = useCallback(async () => { + if (status !== 'idle' && status !== 'error') return; + + setStatus('connecting'); + setHistory([]); + setUserTranscript(''); + setModelTranscript(''); + + try { + const stream = await navigator.mediaDevices.getUserMedia({ audio: true }); + mediaStreamRef.current = stream; + + const callbacks = { + onopen: () => { + console.log('Voice session opened.'); + setStatus('listening'); + + // Start streaming microphone audio to the model + audioContextRef.current = new (window.AudioContext)({ sampleRate: 16000 }); + const source = audioContextRef.current.createMediaStreamSource(stream); + const scriptProcessor = audioContextRef.current.createScriptProcessor(4096, 1, 1); + scriptProcessorRef.current = scriptProcessor; + + scriptProcessor.onaudioprocess = (audioProcessingEvent) => { + const inputData = audioProcessingEvent.inputBuffer.getChannelData(0); + const pcmBlob: Blob = { + data: encode(new Uint8Array(new Int16Array(inputData.map(x => x * 32768)).buffer)), + mimeType: 'audio/pcm;rate=16000', + }; + sessionPromiseRef.current?.then((session) => { + session.sendRealtimeInput({ media: pcmBlob }); + }); + }; + source.connect(scriptProcessor); + scriptProcessor.connect(audioContextRef.current.destination); + }, + onmessage: (message: LiveServerMessage) => { + // NOTE: This stub doesn't play audio, just displays transcripts. + // A full implementation would use the audioUtils to decode and play audio. + + if (message.serverContent?.inputTranscription) { + setUserTranscript(prev => prev + message.serverContent.inputTranscription.text); + } + if (message.serverContent?.outputTranscription) { + setModelTranscript(prev => prev + message.serverContent.outputTranscription.text); + } + if (message.serverContent?.turnComplete) { + setHistory(prev => [...prev, + { speaker: 'user', text: userTranscript }, + { speaker: 'model', text: modelTranscript } + ]); + setUserTranscript(''); + setModelTranscript(''); + } + }, + onerror: (e: ErrorEvent) => { + console.error('Voice session error:', e); + setStatus('error'); + stopRecording(); + }, + onclose: () => { + console.log('Voice session closed.'); + stopRecording(); + setStatus('idle'); + }, + }; + + sessionPromiseRef.current = startVoiceSession(callbacks); + + } catch (e: any) { + console.error("Failed to start voice session:", e); + setStatus('error'); + } + + }, [status, stopRecording, userTranscript, modelTranscript]); + + + useEffect(() => { + // Cleanup on unmount + return () => { + handleClose(); + }; + }, [handleClose]); + + + if (!isOpen) return null; + + const getStatusText = () => { + switch (status) { + case 'idle': return 'Click the mic to start'; + case 'connecting': return 'Connecting...'; + case 'listening': return 'Listening...'; + case 'speaking': return 'Thinking...'; + case 'error': return 'Connection error. Please try again.'; + } + } + + return ( +
+
e.stopPropagation()} + > +
+

Voice Assistant

+ +
+ +
+ {history.map((entry, index) => ( +
+

{entry.text}

+
+ ))} + {userTranscript &&

{userTranscript}

} + {modelTranscript &&

{modelTranscript}

} +
+ +
+ +

{getStatusText()}

+
+
+
+ ); +}; diff --git a/components/WatchedItemsList.tsx b/components/WatchedItemsList.tsx new file mode 100644 index 0000000..23aa407 --- /dev/null +++ b/components/WatchedItemsList.tsx @@ -0,0 +1,186 @@ +import React, { useState, useMemo } from 'react'; +import type { MasterGroceryItem } from '../types'; +import { EyeIcon } from './icons/EyeIcon'; +import { LoadingSpinner } from './LoadingSpinner'; +import { SortAscIcon } from './icons/SortAscIcon'; +import { SortDescIcon } from './icons/SortDescIcon'; +import { CATEGORIES } from '../types'; +import { Session } from '@supabase/supabase-js'; +import { TrashIcon } from './icons/TrashIcon'; +import { UserIcon } from './icons/UserIcon'; +import { PlusCircleIcon } from './icons/PlusCircleIcon'; + +interface WatchedItemsListProps { + items: MasterGroceryItem[]; + onAddItem: (itemName: string, category: string) => Promise; + onRemoveItem: (masterItemId: number) => Promise; + session: Session | null; + activeListId: number | null; + onAddItemToList: (masterItemId: number) => void; +} + +export const WatchedItemsList: React.FC = ({ items, onAddItem, onRemoveItem, session, activeListId, onAddItemToList }) => { + const [newItemName, setNewItemName] = useState(''); + const [newCategory, setNewCategory] = useState(''); + const [isAdding, setIsAdding] = useState(false); + const [sortOrder, setSortOrder] = useState<'asc' | 'desc'>('asc'); + const [categoryFilter, setCategoryFilter] = useState('all'); + + const handleSubmit = async (e: React.FormEvent) => { + e.preventDefault(); + if (!newItemName.trim() || !newCategory) return; + + setIsAdding(true); + try { + await onAddItem(newItemName, newCategory); + setNewItemName(''); + setNewCategory(''); + } catch (error) { + // Error is handled in the parent component + console.error(error); + } finally { + setIsAdding(false); + } + }; + + const handleSortToggle = () => { + setSortOrder(prev => (prev === 'asc' ? 'desc' : 'asc')); + }; + + const availableCategories = useMemo(() => { + const cats = new Set(items.map(i => i.category_name).filter((c): c is string => !!c)); + return Array.from(cats).sort(); + }, [items]); + + const sortedAndFilteredItems = useMemo(() => { + const filteredItems = categoryFilter === 'all' + ? items + : items.filter(item => item.category_name === categoryFilter); + + return [...filteredItems].sort((a, b) => { + if (sortOrder === 'asc') { + return a.name.localeCompare(b.name); + } else { + return b.name.localeCompare(a.name); + } + }); + }, [items, sortOrder, categoryFilter]); + + if (!session) { + return ( +
+
+ +

Personalize Your Deals

+

+ Please log in to create and manage your personal watchlist. +

+
+
+ ); + } + + return ( +
+
+

+ + Your Watched Items +

+
+ {items.length > 0 && ( + + )} + {items.length > 1 && ( + + )} +
+
+ +
+ setNewItemName(e.target.value)} + placeholder="Add item (e.g., Avocados)" + className="flex-grow block w-full px-3 py-2 bg-white dark:bg-gray-800 border border-gray-300 dark:border-gray-600 rounded-md shadow-sm placeholder-gray-400 focus:outline-none focus:ring-brand-primary focus:border-brand-primary sm:text-sm" + disabled={isAdding} + /> +
+ + +
+
+ + {sortedAndFilteredItems.length > 0 ? ( +
    + {sortedAndFilteredItems.map(item => ( +
  • +
    + {item.name} + {item.category_name} +
    +
    + + +
    +
  • + ))} +
+ ) : ( +

+ {categoryFilter === 'all' + ? 'Your watchlist is empty. Add items above to start tracking prices.' + : `No watched items in the "${categoryFilter}" category.`} +

+ )} +
+ ); +}; \ No newline at end of file diff --git a/components/icons/BeakerIcon.tsx b/components/icons/BeakerIcon.tsx new file mode 100644 index 0000000..292ee7e --- /dev/null +++ b/components/icons/BeakerIcon.tsx @@ -0,0 +1,7 @@ +import React from 'react'; + +export const BeakerIcon: React.FC> = (props) => ( + + + +); \ No newline at end of file diff --git a/components/icons/BrainIcon.tsx b/components/icons/BrainIcon.tsx new file mode 100644 index 0000000..8702d64 --- /dev/null +++ b/components/icons/BrainIcon.tsx @@ -0,0 +1,9 @@ + +import React from 'react'; + +export const BrainIcon: React.FC> = (props) => ( + + + +); + \ No newline at end of file diff --git a/components/icons/CheckCircleIcon.tsx b/components/icons/CheckCircleIcon.tsx new file mode 100644 index 0000000..1f83125 --- /dev/null +++ b/components/icons/CheckCircleIcon.tsx @@ -0,0 +1,7 @@ +import React from 'react'; + +export const CheckCircleIcon: React.FC> = (props) => ( + + + +); diff --git a/components/icons/CogIcon.tsx b/components/icons/CogIcon.tsx new file mode 100644 index 0000000..228e570 --- /dev/null +++ b/components/icons/CogIcon.tsx @@ -0,0 +1,7 @@ +import React from 'react'; + +export const CogIcon: React.FC> = (props) => ( + + + +); \ No newline at end of file diff --git a/components/icons/DatabaseIcon.tsx b/components/icons/DatabaseIcon.tsx new file mode 100644 index 0000000..2c0cbd2 --- /dev/null +++ b/components/icons/DatabaseIcon.tsx @@ -0,0 +1,8 @@ + +import React from 'react'; + +export const DatabaseIcon: React.FC> = (props) => ( + + + +); diff --git a/components/icons/DocumentTextIcon.tsx b/components/icons/DocumentTextIcon.tsx new file mode 100644 index 0000000..00ed5af --- /dev/null +++ b/components/icons/DocumentTextIcon.tsx @@ -0,0 +1,7 @@ +import React from 'react'; + +export const DocumentTextIcon: React.FC> = (props) => ( + + + +); diff --git a/components/icons/ExclamationTriangleIcon.tsx b/components/icons/ExclamationTriangleIcon.tsx new file mode 100644 index 0000000..43bd6a9 --- /dev/null +++ b/components/icons/ExclamationTriangleIcon.tsx @@ -0,0 +1,7 @@ +import React from 'react'; + +export const ExclamationTriangleIcon: React.FC> = (props) => ( + + + +); \ No newline at end of file diff --git a/components/icons/EyeIcon.tsx b/components/icons/EyeIcon.tsx new file mode 100644 index 0000000..dc02b09 --- /dev/null +++ b/components/icons/EyeIcon.tsx @@ -0,0 +1,8 @@ +import React from 'react'; + +export const EyeIcon: React.FC> = (props) => ( + + + + +); diff --git a/components/icons/GithubIcon.tsx b/components/icons/GithubIcon.tsx new file mode 100644 index 0000000..825ae7d --- /dev/null +++ b/components/icons/GithubIcon.tsx @@ -0,0 +1,7 @@ +import React from 'react'; + +export const GithubIcon: React.FC> = (props) => ( + + + +); diff --git a/components/icons/GoogleIcon.tsx b/components/icons/GoogleIcon.tsx new file mode 100644 index 0000000..d27c034 --- /dev/null +++ b/components/icons/GoogleIcon.tsx @@ -0,0 +1,10 @@ +import React from 'react'; + +export const GoogleIcon: React.FC> = (props) => ( + + + + + + +); diff --git a/components/icons/InformationCircleIcon.tsx b/components/icons/InformationCircleIcon.tsx new file mode 100644 index 0000000..04b3b5e --- /dev/null +++ b/components/icons/InformationCircleIcon.tsx @@ -0,0 +1,7 @@ +import React from 'react'; + +export const InformationCircleIcon: React.FC> = (props) => ( + + + +); \ No newline at end of file diff --git a/components/icons/LightbulbIcon.tsx b/components/icons/LightbulbIcon.tsx new file mode 100644 index 0000000..f2c2488 --- /dev/null +++ b/components/icons/LightbulbIcon.tsx @@ -0,0 +1,9 @@ + +import React from 'react'; + +export const LightbulbIcon: React.FC> = (props) => ( + + + +); + \ No newline at end of file diff --git a/components/icons/ListBulletIcon.tsx b/components/icons/ListBulletIcon.tsx new file mode 100644 index 0000000..c0b973c --- /dev/null +++ b/components/icons/ListBulletIcon.tsx @@ -0,0 +1,7 @@ +import React from 'react'; + +export const ListBulletIcon: React.FC> = (props) => ( + + + +); \ No newline at end of file diff --git a/components/icons/MapPinIcon.tsx b/components/icons/MapPinIcon.tsx new file mode 100644 index 0000000..6155784 --- /dev/null +++ b/components/icons/MapPinIcon.tsx @@ -0,0 +1,8 @@ +import React from 'react'; + +export const MapPinIcon: React.FC> = (props) => ( + + + + +); \ No newline at end of file diff --git a/components/icons/MicrophoneIcon.tsx b/components/icons/MicrophoneIcon.tsx new file mode 100644 index 0000000..991cac8 --- /dev/null +++ b/components/icons/MicrophoneIcon.tsx @@ -0,0 +1,7 @@ +import React from 'react'; + +export const MicrophoneIcon: React.FC> = (props) => ( + + + +); \ No newline at end of file diff --git a/components/icons/MoonIcon.tsx b/components/icons/MoonIcon.tsx new file mode 100644 index 0000000..78a1231 --- /dev/null +++ b/components/icons/MoonIcon.tsx @@ -0,0 +1,8 @@ + +import React from 'react'; + +export const MoonIcon: React.FC> = (props) => ( + + + +); diff --git a/components/icons/PdfIcon.tsx b/components/icons/PdfIcon.tsx new file mode 100644 index 0000000..669b518 --- /dev/null +++ b/components/icons/PdfIcon.tsx @@ -0,0 +1,8 @@ + +import React from 'react'; + +export const PdfIcon: React.FC> = (props) => ( + + + +); diff --git a/components/icons/PhotoIcon.tsx b/components/icons/PhotoIcon.tsx new file mode 100644 index 0000000..9646698 --- /dev/null +++ b/components/icons/PhotoIcon.tsx @@ -0,0 +1,7 @@ +import React from 'react'; + +export const PhotoIcon: React.FC> = (props) => ( + + + +); \ No newline at end of file diff --git a/components/icons/PlugIcon.tsx b/components/icons/PlugIcon.tsx new file mode 100644 index 0000000..5090c6d --- /dev/null +++ b/components/icons/PlugIcon.tsx @@ -0,0 +1,8 @@ +import React from 'react'; + +export const PlugIcon: React.FC> = (props) => ( + + + + +); diff --git a/components/icons/PlusCircleIcon.tsx b/components/icons/PlusCircleIcon.tsx new file mode 100644 index 0000000..c927a6e --- /dev/null +++ b/components/icons/PlusCircleIcon.tsx @@ -0,0 +1,7 @@ +import React from 'react'; + +export const PlusCircleIcon: React.FC> = (props) => ( + + + +); \ No newline at end of file diff --git a/components/icons/SearchIcon.tsx b/components/icons/SearchIcon.tsx new file mode 100644 index 0000000..3aa6807 --- /dev/null +++ b/components/icons/SearchIcon.tsx @@ -0,0 +1,9 @@ + +import React from 'react'; + +export const SearchIcon: React.FC> = (props) => ( + + + +); + \ No newline at end of file diff --git a/components/icons/ServerIcon.tsx b/components/icons/ServerIcon.tsx new file mode 100644 index 0000000..10bfdb2 --- /dev/null +++ b/components/icons/ServerIcon.tsx @@ -0,0 +1,7 @@ +import React from 'react'; + +export const ServerIcon: React.FC> = (props) => ( + + + +); \ No newline at end of file diff --git a/components/icons/ShieldCheckIcon.tsx b/components/icons/ShieldCheckIcon.tsx new file mode 100644 index 0000000..8eb92ed --- /dev/null +++ b/components/icons/ShieldCheckIcon.tsx @@ -0,0 +1,7 @@ +import React from 'react'; + +export const ShieldCheckIcon: React.FC> = (props) => ( + + + +); diff --git a/components/icons/ShoppingCartIcon.tsx b/components/icons/ShoppingCartIcon.tsx new file mode 100644 index 0000000..a44d0d5 --- /dev/null +++ b/components/icons/ShoppingCartIcon.tsx @@ -0,0 +1,9 @@ + +import React from 'react'; + +export const ShoppingCartIcon: React.FC> = (props) => ( + + + +); + \ No newline at end of file diff --git a/components/icons/SortAscIcon.tsx b/components/icons/SortAscIcon.tsx new file mode 100644 index 0000000..1a469fd --- /dev/null +++ b/components/icons/SortAscIcon.tsx @@ -0,0 +1,7 @@ +import React from 'react'; + +export const SortAscIcon: React.FC> = (props) => ( + + + +); \ No newline at end of file diff --git a/components/icons/SortDescIcon.tsx b/components/icons/SortDescIcon.tsx new file mode 100644 index 0000000..e38b834 --- /dev/null +++ b/components/icons/SortDescIcon.tsx @@ -0,0 +1,7 @@ +import React from 'react'; + +export const SortDescIcon: React.FC> = (props) => ( + + + +); \ No newline at end of file diff --git a/components/icons/SparklesIcon.tsx b/components/icons/SparklesIcon.tsx new file mode 100644 index 0000000..4069707 --- /dev/null +++ b/components/icons/SparklesIcon.tsx @@ -0,0 +1,7 @@ +import React from 'react'; + +export const SparklesIcon: React.FC> = (props) => ( + + + +); diff --git a/components/icons/SpeakerWaveIcon.tsx b/components/icons/SpeakerWaveIcon.tsx new file mode 100644 index 0000000..5d67afb --- /dev/null +++ b/components/icons/SpeakerWaveIcon.tsx @@ -0,0 +1,7 @@ +import React from 'react'; + +export const SpeakerWaveIcon: React.FC> = (props) => ( + + + +); \ No newline at end of file diff --git a/components/icons/SunIcon.tsx b/components/icons/SunIcon.tsx new file mode 100644 index 0000000..15d2871 --- /dev/null +++ b/components/icons/SunIcon.tsx @@ -0,0 +1,8 @@ + +import React from 'react'; + +export const SunIcon: React.FC> = (props) => ( + + + +); diff --git a/components/icons/TagIcon.tsx b/components/icons/TagIcon.tsx new file mode 100644 index 0000000..fac928d --- /dev/null +++ b/components/icons/TagIcon.tsx @@ -0,0 +1,9 @@ + +import React from 'react'; + +export const TagIcon: React.FC> = (props) => ( + + + + +); diff --git a/components/icons/TrashIcon.tsx b/components/icons/TrashIcon.tsx new file mode 100644 index 0000000..33ee3dc --- /dev/null +++ b/components/icons/TrashIcon.tsx @@ -0,0 +1,7 @@ +import React from 'react'; + +export const TrashIcon: React.FC> = (props) => ( + + + +); \ No newline at end of file diff --git a/components/icons/TrophyIcon.tsx b/components/icons/TrophyIcon.tsx new file mode 100644 index 0000000..690384c --- /dev/null +++ b/components/icons/TrophyIcon.tsx @@ -0,0 +1,8 @@ +import React from 'react'; + +export const TrophyIcon: React.FC> = (props) => ( + + + + +); diff --git a/components/icons/UploadIcon.tsx b/components/icons/UploadIcon.tsx new file mode 100644 index 0000000..e464d9f --- /dev/null +++ b/components/icons/UploadIcon.tsx @@ -0,0 +1,9 @@ + +import React from 'react'; + +export const UploadIcon: React.FC> = (props) => ( + + + +); + \ No newline at end of file diff --git a/components/icons/UserIcon.tsx b/components/icons/UserIcon.tsx new file mode 100644 index 0000000..fe62d0f --- /dev/null +++ b/components/icons/UserIcon.tsx @@ -0,0 +1,8 @@ + +import React from 'react'; + +export const UserIcon: React.FC> = (props) => ( + + + +); diff --git a/components/icons/XCircleIcon.tsx b/components/icons/XCircleIcon.tsx new file mode 100644 index 0000000..60f8f3a --- /dev/null +++ b/components/icons/XCircleIcon.tsx @@ -0,0 +1,7 @@ +import React from 'react'; + +export const XCircleIcon: React.FC> = (props) => ( + + + +); diff --git a/components/icons/XMarkIcon.tsx b/components/icons/XMarkIcon.tsx new file mode 100644 index 0000000..5e89aef --- /dev/null +++ b/components/icons/XMarkIcon.tsx @@ -0,0 +1,8 @@ + +import React from 'react'; + +export const XMarkIcon: React.FC> = (props) => ( + + + +); diff --git a/ideas.txt b/ideas.txt new file mode 100644 index 0000000..8389fd5 --- /dev/null +++ b/ideas.txt @@ -0,0 +1,5 @@ +Problem: Graphical Flyers are used by grocery stores to obfuscate their sales - prices and quantity are hard to determine - use AI + OCR to get this data + +Solution: take in grocery and related ad flyers, from store sites and sites like flipp.com, process for item, price and quantity and store in database. Eventually allow user to store common purchases, get alerts to useful sales + prices, and make graphs for over-time understanding of cyclical prices + +Target Market: People who eat food who want to save money \ No newline at end of file diff --git a/index.html b/index.html new file mode 100644 index 0000000..2bb2777 --- /dev/null +++ b/index.html @@ -0,0 +1,54 @@ + + + + + + Grocery Flyer AI Analyzer + + + + + + + + + +
+ + + + \ No newline at end of file diff --git a/index.tsx b/index.tsx new file mode 100644 index 0000000..afee0d1 --- /dev/null +++ b/index.tsx @@ -0,0 +1,17 @@ + +import React from 'react'; +import ReactDOM from 'react-dom/client'; +import App from './App'; + +const rootElement = document.getElementById('root'); +if (!rootElement) { + throw new Error("Could not find root element to mount to"); +} + +const root = ReactDOM.createRoot(rootElement); +root.render( + + + +); + \ No newline at end of file diff --git a/metadata.json b/metadata.json new file mode 100644 index 0000000..cedc0a4 --- /dev/null +++ b/metadata.json @@ -0,0 +1,8 @@ +{ + "name": "Flyer Crawler", + "description": "Upload a grocery store flyer image to extract item details, prices, and quantities using AI. Get insights, meal plans, and compare prices to save money on your shopping.", + "requestFramePermissions": [ + "geolocation", + "microphone" + ] +} \ No newline at end of file diff --git a/notes-for-ai.txt b/notes-for-ai.txt new file mode 100644 index 0000000..3f83e95 --- /dev/null +++ b/notes-for-ai.txt @@ -0,0 +1,8 @@ +First, some rules: +1) if you ever do not have a file that you need, stop, and request it right then. STOP AND REQUEST IT AND THEN DO NOT OUTPUT CODE! +2) never remove logging or comments +3) when creating new files, output there entire path in your explanation, to make it easier to know where to save those new files and directories to +4) add comments when you can, as that will help ensure ideas persist into the app +5) Before you make any destructive changes, you must stop and ask for my explicit confirmation. A 'destructive change' includes: Deleting more than 20 lines of code at once. Deleting an entire file. Deleting a major, self-contained block of code like a schema definition, a component, or a large function. When you identify a need for such a change, you must first state exactly what you intend to delete and why. Then, you must wait for me to reply with 'Confirm' or 'Proceed' before you generate the code." +6) Operate in 'conservative mode'. Your primary task is to add or modify code. Do not remove any existing functions, components, files, or large code blocks unless I have explicitly and unambiguously instructed you to do so in the current prompt. If you believe a piece of code is redundant or should be refactored, you may suggest it, but you must not perform the deletion or move yourself without my prior instruction. +7) When you are refactoring by moving a significant amount of code from an existing file to a new file, you are required to provide the complete contents of both files in your response: the newly created file and the original file from which the code was removed. Do not just show me the new file and an empty old file. \ No newline at end of file diff --git a/package copy.json b/package copy.json new file mode 100644 index 0000000..cfd8156 --- /dev/null +++ b/package copy.json @@ -0,0 +1,5 @@ +{ + "devDependencies": { + "supabase": "^2.54.11" + } +} diff --git a/package.json b/package.json new file mode 100644 index 0000000..7815143 --- /dev/null +++ b/package.json @@ -0,0 +1,24 @@ +{ + "name": "flyer-crawler", + "private": true, + "version": "0.0.0", + "type": "module", + "scripts": { + "dev": "vite", + "build": "vite build", + "preview": "vite preview" + }, + "dependencies": { + "react": "^19.2.0", + "react-dom": "^19.2.0", + "@google/genai": "^1.28.0", + "recharts": "^3.3.0", + "@supabase/supabase-js": "^2.78.0" + }, + "devDependencies": { + "@types/node": "^22.14.0", + "@vitejs/plugin-react": "^5.0.0", + "typescript": "~5.8.2", + "vite": "^6.2.0" + } +} diff --git a/services/geminiService.ts b/services/geminiService.ts new file mode 100644 index 0000000..072685a --- /dev/null +++ b/services/geminiService.ts @@ -0,0 +1,429 @@ + + +import { GoogleGenAI, Type, Modality } from "@google/genai"; +import type { FlyerItem, MasterGroceryItem, UnitPrice, Store } from '../types'; +import { CATEGORIES } from '../types'; +import { parsePriceToCents } from '../utils/priceParser'; + +/* +NOTE ON THE GOOGLE AI API KEY: +This project uses a Google AI (Gemini) API key. In this environment, you do not need to manually create one. +You may see a "Choose a key" dialog. If it mentions a "free tier", you can simply close or ignore that dialog. +The environment will automatically provide a free-tier API key as `process.env.API_KEY` for the AI to work. +*/ + +if (!process.env.API_KEY) { + throw new Error("API_KEY environment variable not set"); +} + +const ai = new GoogleGenAI({ apiKey: process.env.API_KEY }); + +/** + * Parses a JSON string from a Gemini response, robustly handling markdown fences. + * @param responseText The raw text from the AI response. + * @returns The parsed JSON object. + */ +function parseGeminiJson(responseText: string): T { + let cleanedText = responseText.trim(); + + // Remove markdown fences ` ```json ... ``` ` + const jsonRegex = /```json\s*([\s\S]*?)\s*```/; + const match = cleanedText.match(jsonRegex); + if (match && match[1]) { + cleanedText = match[1]; + } + + try { + return JSON.parse(cleanedText) as T; + } catch (e: any) { + console.error("Failed to parse JSON response from AI.", { + originalResponse: responseText, + cleanedJSON: cleanedText, + error: e.message, + }); + + // Re-throw with more context. + throw new Error(`Failed to parse JSON response from AI. Error: ${e.message}. The AI may have returned malformed data.`); + } +} + +const fileToGenerativePart = async (file: File) => { + const base64EncodedDataPromise = new Promise((resolve) => { + const reader = new FileReader(); + reader.onloadend = () => resolve((reader.result as string).split(',')[1]); + reader.readAsDataURL(file); + }); + return { + inlineData: { data: await base64EncodedDataPromise, mimeType: file.type }, + }; +}; + +export const isImageAFlyer = async (imageFile: File): Promise => { + const imagePart = await fileToGenerativePart(imageFile); + try { + const response = await ai.models.generateContent({ + model: 'gemini-flash-lite-latest', + contents: { + parts: [ + imagePart, + { text: `Is this a grocery store flyer or advertisement? Analyze the image and answer with only a JSON object: {"is_flyer": true} or {"is_flyer": false}.` } + ] + }, + config: { + responseMimeType: "application/json", + responseSchema: { + type: Type.OBJECT, + properties: { + is_flyer: { type: Type.BOOLEAN } + }, + required: ['is_flyer'] + } + } + }); + const parsedJson = parseGeminiJson<{ is_flyer: boolean }>(response.text); + return parsedJson.is_flyer; + } catch(e) { + console.error("Flyer check failed:", e); + return false; + } +} + +export const extractAddressFromImage = async (imageFile: File): Promise => { + const imagePart = await fileToGenerativePart(imageFile); + const response = await ai.models.generateContent({ + model: 'gemini-flash-lite-latest', + contents: { + parts: [ + imagePart, + { text: `Is there a physical store address visible in this image? If so, extract the full address. If not, return null. Return ONLY a JSON object: {"address": "123 Main St, Anytown, USA"} or {"address": null}.` } + ] + }, + config: { + responseMimeType: "application/json", + responseSchema: { + type: Type.OBJECT, + properties: { + address: { type: Type.STRING, nullable: true, description: "The full store address found in the image, or null if not present." }, + }, + required: ['address'] + } + } + }); + const parsedJson = parseGeminiJson<{ address: string | null }>(response.text); + return parsedJson.address; +}; + + +// Raw item structure as returned by the AI model +interface RawFlyerItem { + item: string; + price: string; + quantity: string; + category: string; + quantity_num: number | null; + master_item_id: number | null; + unit_price: UnitPrice | null; +} + +interface ExtractedCoreData { + store_name: string; + valid_from: string | null; + valid_to: string | null; + items: Omit[]; +} + +interface ExtractedLogoData { + store_logo_base_64: string | null; +} + + +export const extractCoreDataFromImage = async (imageFiles: File[], masterItems: MasterGroceryItem[]): Promise => { + const imageParts = await Promise.all(imageFiles.map(fileToGenerativePart)); + + // Create a special "unmatched" item to act as a fallback for the AI. + // This gives the AI a valid, required choice when no other match is suitable. + const UNMATCHED_ITEM_ID = 0; + const unmatchedMasterItem = { id: UNMATCHED_ITEM_ID, name: '_UNMATCHED_' }; + + const masterItemsForPrompt = [ + ...masterItems.map(item => ({ id: item.id, name: item.name })), + unmatchedMasterItem + ]; + + const response = await ai.models.generateContent({ + model: 'gemini-2.5-flash', + contents: { + parts: [ + ...imageParts, + { text: `You are an expert data extraction and matching system for grocery store flyers. Analyze the provided flyer images (which may be multiple pages of the same flyer). +1. Identify the name of the grocery store/company. +2. Identify the date range for which the flyer's deals are valid. Extract a 'valid_from' and 'valid_to' date. You MUST return dates in 'YYYY-MM-DD' format. If no date range is clearly visible, you MUST return 'null' for both date fields. +3. Extract all distinct sale items from all pages. For each item, extract its name, price, and quantity/deal description. +4. **Categorization**: For each item, determine its category from the provided list. You MUST select one of the exact category names from the list. +5. **Extract Numeric Quantity**: From the quantity string, extract the primary numeric value if one is present (e.g., for "500g bag", extract 500; for "2L bottle", extract 2; for "per lb", return null). This should be a number, not a string. If no clear number is present, you MUST return 'null'. Store this in the 'quantity_num' field. +6. **CRITICAL ITEM MATCHING (STRICTLY ENFORCED)**: For each extracted item, you MUST match it to its corresponding canonical item from the 'Master Items List'. This is the most important task. + * **RULE 1: BE EXTREMELY STRICT.** The goal is to link an item to its *exact* canonical counterpart (e.g., 'Granny Smith Apples' -> 'apples'), not a vaguely related item. + * **RULE 2: NEW FALLBACK RULE.** If you are not 100% certain of a perfect match, you MUST assign the \`master_item_id\` of the special \`_UNMATCHED_\` item (ID: ${UNMATCHED_ITEM_ID}). This is not optional. Assigning a wrong item is a critical failure. + * **RULE 3: FOCUS ON THE CORE PRODUCT.** Ignore brands unless the master item is brand-specific. For example, 'Compliments Organic Mushrooms' should match a master item named 'mushrooms'. + * **NEGATIVE EXAMPLES (WHAT NOT TO DO):** + * 'Sunrise Salmon Fillets' should NOT be matched to 'chicken thighs'. + * 'Large Avocados' should NOT be matched to 'chicken thighs'. + * If the flyer item is 'bananas' and the only master items are 'apples' and 'oranges', you MUST assign the \`_UNMATCHED_\` ID. + * **FINAL CHECK:** Before assigning an ID, ask: "Is '[flyer item name]' a type of '[master item name]'?". If the answer is no, and no other item fits, you MUST use the \`_UNMATCHED_\` ID. +7. **Unit Price Calculation**: For each item, calculate and provide a 'unit_price'. This is CRITICAL for price comparison. Your goal is to standardize the price into a common unit. + * **Multi-buy deals**: If the price is '2 for $5.00', the unit price object should reflect a value of 2.50 and a unit of 'each'. + * **Price by weight**: If the price is '$3.99/lb', the unit price object should be { "value": 3.99, "unit": "lb" }. + * The unit price MUST be a JSON object: \`{ "value": , "unit": "" }\`. Use standard units: 'g', 'kg', 'ml', 'l' for metric, and 'oz', 'lb', 'fl oz' for imperial. Use 'each' for items sold individually. + * If a unit price cannot be determined or is not applicable, you MUST return \`null\` for this field. + +Return the result as a single JSON object, strictly following the provided schema. It is critical that all string values within the JSON are correctly escaped. + +Category List (for categorization): ${JSON.stringify(CATEGORIES)} +Master Items List (for matching): ${JSON.stringify(masterItemsForPrompt)} +` } + ] + }, + config: { + responseMimeType: "application/json", + responseSchema: { + type: Type.OBJECT, + properties: { + store_name: { type: Type.STRING, description: 'The name of the grocery store (e.g., "Safeway", "Kroger").' }, + valid_from: { type: Type.STRING, description: "The start date of the sale in YYYY-MM-DD format, or null if not found." }, + valid_to: { type: Type.STRING, description: "The end date of the sale in YYYY-MM-DD format, or null if not found." }, + items: { + type: Type.ARRAY, + items: { + type: Type.OBJECT, + properties: { + item: { type: Type.STRING, description: 'The name of the grocery item as it appears in the flyer.' }, + price: { type: Type.STRING, description: 'The price of the item, including currency symbols.' }, + quantity: { type: Type.STRING, description: 'The quantity, unit, or deal description (e.g., "per lb", "5 oz").' }, + category: { type: Type.STRING, description: "The category of the item from the predefined list." }, + quantity_num: { type: Type.NUMBER, nullable: true, description: 'The parsed numeric value from the quantity string, or null.'}, + master_item_id: { type: Type.INTEGER, description: "The integer ID of the matching item from the master list, or null if no match is found." }, + unit_price: { + type: Type.OBJECT, + nullable: true, + description: "A structured object for the calculated price per single standard unit, or null if not applicable.", + properties: { + value: { type: Type.NUMBER, description: "The numeric value of the unit price." }, + unit: { type: Type.STRING, description: "The standard unit of measurement (e.g., 'g', 'ml', 'lb', 'oz', 'each')." } + }, + required: ["value", "unit"] + } + }, + required: ['item', 'price', 'quantity', 'category', 'quantity_num', 'master_item_id', 'unit_price'] + } + } + }, + required: ['store_name', 'valid_from', 'valid_to', 'items'] + } + } + }); + + const parsedJson = parseGeminiJson<{ + store_name: string; + valid_from: string | null; + valid_to: string | null; + items: RawFlyerItem[]; + }>(response.text); + + const processedItems: Omit[] = parsedJson.items.map(rawItem => ({ + item: rawItem.item, + price_display: rawItem.price, + price_in_cents: parsePriceToCents(rawItem.price), + quantity: rawItem.quantity, + category_name: rawItem.category, + quantity_num: rawItem.quantity_num, + // Convert the special _UNMATCHED_ ID back to null for the database. + master_item_id: rawItem.master_item_id === UNMATCHED_ITEM_ID ? null : rawItem.master_item_id, + unit_price: rawItem.unit_price, + })); + + const today = new Date().toISOString().split('T')[0]; + + const finalData: ExtractedCoreData = { + store_name: parsedJson.store_name, + // Per user instruction, a date is mandatory. If the AI cannot find one, + // we must use today's date as a fallback. + valid_from: parsedJson.valid_from || today, + valid_to: parsedJson.valid_to || today, + items: processedItems, + }; + + return finalData; +}; + +export const extractLogoFromImage = async (imageFiles: File[]): Promise => { + const imageParts = await Promise.all(imageFiles.map(fileToGenerativePart)); + + const response = await ai.models.generateContent({ + model: 'gemini-2.5-flash', + contents: { + parts: [ + ...imageParts, + { text: `You are a specialized image analysis tool. Your only task is to identify the main store logo in the provided flyer image. Crop it from the image into a small square (approx 64x64 pixels). Return it as a base64-encoded PNG string. If no logo is found, return null. Return ONLY a JSON object matching the specified schema. It is critical that all string values within the JSON are correctly escaped.` } + ] + }, + config: { + responseMimeType: "application/json", + responseSchema: { + type: Type.OBJECT, + properties: { + store_logo_base_64: { type: Type.STRING, description: "A small, cropped, base64-encoded PNG string of the store's logo, or null if not found." }, + }, + required: ['store_logo_base_64'] + } + } + }); + return parseGeminiJson(response.text); +}; + +export const getQuickInsights = async (items: FlyerItem[]): Promise => { + const prompt = `Based on this list of grocery items on sale, provide some quick insights, simple meal ideas, or shopping tips. Keep it concise and easy to read.\n\nItems:\n${JSON.stringify(items, null, 2)}`; + + const response = await ai.models.generateContent({ + model: 'gemini-flash-lite-latest', + contents: prompt + }); + + return response.text; +}; + +export const getDeepDiveAnalysis = async (items: FlyerItem[]): Promise => { + const prompt = `Perform a detailed analysis of these grocery sale items. Create a comprehensive weekly meal plan to maximize savings. Identify the best value-for-money deals, considering unit prices if possible. Point out any potential purchasing traps (e.g., items that seem cheap but have a high cost per unit or are near expiration). Format the output in clear, well-structured markdown.\n\nItems:\n${JSON.stringify(items, null, 2)}`; + + const response = await ai.models.generateContent({ + model: 'gemini-2.5-pro', + contents: prompt, + config: { + thinkingConfig: { thinkingBudget: 32768 } + } + }); + + return response.text; +}; + +export const searchWeb = async (items: FlyerItem[]): Promise<{text: string; sources: any[]}> => { + const topItems = items.slice(0, 3).map(i => i.item).join(', '); + const prompt = `Find recipes, nutritional information, or price comparisons for these items: ${topItems}. Provide a summary and the sources you used.`; + + const response = await ai.models.generateContent({ + model: 'gemini-2.5-flash', + contents: prompt, + config: { + tools: [{googleSearch: {}}] + } + }); + + const sources = response.candidates?.[0]?.groundingMetadata?.groundingChunks || []; + return { text: response.text, sources }; +}; + +// ============================================================================ +// STUBS FOR FUTURE AI FEATURES +// ============================================================================ + +/** + * [STUB] Uses Google Maps grounding to find nearby stores and plan a shopping trip. + * @param items The items from the flyer. + * @param store The store associated with the flyer. + * @param userLocation The user's current geographic coordinates. + * @returns A text response with trip planning advice and a list of map sources. + */ +export const planTripWithMaps = async (items: FlyerItem[], store: Store | undefined, userLocation: GeolocationCoordinates): Promise<{text: string; sources: any[]}> => { + console.log("Stub: planTripWithMaps called with location:", userLocation); + const topItems = items.slice(0, 5).map(i => i.item).join(', '); + const storeName = store?.name || 'the grocery store'; + + const response = await ai.models.generateContent({ + model: "gemini-2.5-flash", + contents: `I have a shopping list with items like ${topItems}. Find the nearest ${storeName} to me and suggest the best route. Also, are there any other specialty stores nearby (like a bakery or butcher) that might have good deals on related items?`, + config: { + tools: [{googleMaps: {}}], + toolConfig: { + retrievalConfig: { + latLng: { + latitude: userLocation.latitude, + longitude: userLocation.longitude + } + } + } + }, + }); + + // In a real implementation, you would render the map URLs from the sources. + const sources = response.candidates?.[0]?.groundingMetadata?.groundingChunks || []; + return { text: response.text, sources }; +}; + +/** + * [STUB] Generates an image based on a text prompt using the Imagen model. + * @param prompt A description of the image to generate (e.g., a meal plan). + * @returns A base64-encoded string of the generated PNG image. + */ +export const generateImageFromText = async (prompt: string): Promise => { + console.log("Stub: generateImageFromText called with prompt:", prompt); + const response = await ai.models.generateImages({ + model: 'imagen-4.0-generate-001', + prompt: `A vibrant, appetizing flat-lay photo of a meal plan featuring: ${prompt}. Studio lighting, high detail.`, + config: { + numberOfImages: 1, + outputMimeType: 'image/png', + aspectRatio: '16:9', + }, + }); + + const base64ImageBytes: string = response.generatedImages[0].image.imageBytes; + return base64ImageBytes; +}; + +/** + * [STUB] Converts a string of text into speech audio data. + * @param text The text to be spoken. + * @returns A base64-encoded string of the raw audio data. + */ +export const generateSpeechFromText = async (text: string): Promise => { + console.log("Stub: generateSpeechFromText called with text:", text); + const response = await ai.models.generateContent({ + model: "gemini-2.5-flash-preview-tts", + contents: [{ parts: [{ text: `Say cheerfully: ${text}` }] }], + config: { + responseModalities: [Modality.AUDIO], + speechConfig: { + voiceConfig: { + prebuiltVoiceConfig: { voiceName: 'Kore' }, + }, + }, + }, + }); + const base64Audio = response.candidates?.[0]?.content?.parts?.[0]?.inlineData?.data; + if (!base64Audio) { + throw new Error("No audio data returned from TTS API."); + } + return base64Audio; +}; + +/** + * [STUB] Initiates a real-time voice conversation session using the Live API. + * @param callbacks An object containing onopen, onmessage, onerror, and onclose handlers. + * @returns A promise that resolves to the live session object. + */ +export const startVoiceSession = (callbacks: any) => { + console.log("Stub: startVoiceSession called."); + // This returns the promise that the UI will use to send data once the connection is open. + return ai.live.connect({ + model: 'gemini-2.5-flash-native-audio-preview-09-2025', + callbacks: callbacks, + config: { + responseModalities: [Modality.AUDIO], + speechConfig: { + voiceConfig: { prebuiltVoiceConfig: { voiceName: 'Zephyr' } }, + }, + inputAudioTranscription: {}, + outputAudioTranscription: {}, + systemInstruction: 'You are a friendly and helpful grocery shopping assistant. Keep your answers concise.', + }, + }); +}; \ No newline at end of file diff --git a/services/supabaseClient.ts b/services/supabaseClient.ts new file mode 100644 index 0000000..d7e032c --- /dev/null +++ b/services/supabaseClient.ts @@ -0,0 +1,806 @@ +import { createClient, SupabaseClient } from '@supabase/supabase-js'; +import type { Flyer, FlyerItem, MasterGroceryItem, Profile, ShoppingList, ShoppingListItem } from '../types'; + +export let supabase: SupabaseClient | null = null; + +// Attempt to initialize from environment variables +const supabaseUrl = process.env.REACT_APP_SUPABASE_URL; +const supabaseAnonKey = process.env.REACT_APP_SUPABASE_ANON_KEY; + +if (supabaseUrl && supabaseAnonKey) { + try { + supabase = createClient(supabaseUrl, supabaseAnonKey); + } catch (e) { + console.error("Failed to initialize Supabase from env vars:", e); + supabase = null; + } +} + +/** + * Initializes the Supabase client. Can be called with user-provided credentials. + * @param url - The Supabase project URL. + * @param key - The Supabase anon key. + * @returns The Supabase client instance. + */ +export const initializeSupabase = (url: string, key: string): SupabaseClient => { + if (!supabase) { + supabase = createClient(url, key); + } + return supabase; +}; + +/** + * Disconnects the Supabase client by setting the instance to null. + */ +export const disconnectSupabase = () => { + supabase = null; + // Clear stored credentials on explicit disconnect + localStorage.removeItem('supabaseUrl'); + localStorage.removeItem('supabaseAnonKey'); +}; + +/** + * Tests basic read access to the database. + * @returns An object indicating success and any error message. + */ +export const testDatabaseConnection = async (): Promise<{ success: boolean; error: string | null }> => { + if (!supabase) return { success: false, error: 'Supabase client not initialized.' }; + try { + const { error } = await supabase.from('stores').select('id').limit(1); + if (error) throw error; + return { success: true, error: null }; + } catch (error: any) { + return { success: false, error: `Database connection test failed: ${error.message}. Check RLS policies.` }; + } +}; + +/** + * Performs a full CRUD (Create, Read, Update, Delete) test on a table. + * @returns An object indicating success and any error message. + */ +export const runDatabaseSelfTest = async (): Promise<{ success: boolean; error: string | null }> => { + if (!supabase) return { success: false, error: 'Supabase client not initialized.' }; + + const testItem = { + item: `DB_SELF_TEST_ITEM_${Date.now()}`, + price_display: '$0.00', + quantity: 'test', + }; + + try { + // 1. Insert + const { data: insertData, error: insertError } = await supabase + .from('flyer_items') + .insert(testItem) + .select() + .single(); + if (insertError) throw new Error(`Insert failed: ${insertError.message}`); + if (!insertData) throw new Error('Insert did not return data.'); + + // 2. Select (implicit in insert's .select()) + const testItemId = insertData.id; + + // 3. Update + const { error: updateError } = await supabase + .from('flyer_items') + .update({ item: 'DB_SELF_TEST_ITEM_UPDATED' }) + .eq('id', testItemId); + if (updateError) throw new Error(`Update failed: ${updateError.message}`); + + // 4. Delete + const { error: deleteError } = await supabase + .from('flyer_items') + .delete() + .eq('id', testItemId); + if (deleteError) throw new Error(`Delete failed: ${deleteError.message}`); + + return { success: true, error: null }; + } catch (error: any) { + return { success: false, error: `Database self-test failed: ${error.message}. Check table permissions (select, insert, update, delete) and RLS policies for 'flyer_items'.` }; + } +}; + +/** + * Tests storage by uploading and deleting a file. + * @returns An object indicating success and any error message. + */ +export const testStorageConnection = async (): Promise<{ success: boolean; error: string | null }> => { + if (!supabase) return { success: false, error: 'Supabase client not initialized.' }; + + const bucketName = 'flyers'; + const testFileName = `storage-self-test-${Date.now()}.txt`; + const testFileContent = 'test'; + + try { + // 1. Upload + const { error: uploadError } = await supabase.storage + .from(bucketName) + .upload(testFileName, testFileContent); + if (uploadError) throw new Error(`Upload to storage failed: ${uploadError.message}`); + + // 2. Delete + const { error: deleteError } = await supabase.storage + .from(bucketName) + .remove([testFileName]); + if (deleteError) throw new Error(`Deleting from storage failed: ${deleteError.message}`); + + return { success: true, error: null }; + } catch (error: any) { + return { success: false, error: `Storage connection test failed: ${error.message}. Check bucket permissions (select, insert, delete) and RLS policies for bucket '${bucketName}'.` }; + } +}; + +/** + * Uploads a flyer image to Supabase storage. + * @param file The image file to upload. + * @returns The public URL of the uploaded image. + */ +export const uploadFlyerImage = async (file: File): Promise => { + if (!supabase) throw new Error("Supabase client not initialized"); + + const fileName = `${Date.now()}-${file.name.replace(/[^a-zA-Z0-9.-]/g, '_')}`; + const { data, error } = await supabase.storage + .from('flyers') + .upload(fileName, file); + if (error) throw new Error(`Failed to upload flyer image: ${error.message}`); + + const { data: { publicUrl } } = supabase.storage.from('flyers').getPublicUrl(data.path); + if (!publicUrl) throw new Error("Could not get public URL for uploaded flyer image."); + + return publicUrl; +}; + +/** + * Creates a record for a new flyer in the database, handling store creation if needed. + * @returns The newly created flyer object, joined with its store. + */ +export const createFlyerRecord = async ( + fileName: string, + imageUrl: string, + checksum: string, + storeName: string, + validFrom: string | null, + validTo: string | null, + storeAddress: string | null +): Promise => { + if (!supabase) throw new Error("Supabase client not initialized"); + + let { data: store } = await supabase + .from('stores') + .select('*') + .ilike('name', storeName) + .single(); + + if (!store) { + const { data: newStore, error: newStoreError } = await supabase + .from('stores') + .insert({ name: storeName }) + .select() + .single(); + if (newStoreError) throw new Error(`Error creating store: ${newStoreError.message}`); + store = newStore; + } + + const { data: newFlyer, error: flyerError } = await supabase + .from('flyers') + .insert({ + file_name: fileName, + image_url: imageUrl, + checksum: checksum, + store_id: store.id, + valid_from: validFrom, + valid_to: validTo, + store_address: storeAddress, + }) + .select('*, store:stores(*)') + .single(); + + if (flyerError) throw new Error(`Failed to create flyer record: ${flyerError.message}`); + if (!newFlyer) throw new Error("Flyer record creation did not return data."); + + return newFlyer as Flyer; +}; + +/** + * Saves a list of extracted items to the database. + * @param items The items to save. + * @param flyerId The ID of the flyer these items belong to. + * @returns The array of saved items with their new IDs. + */ +export const saveFlyerItems = async (items: Omit[], flyerId: number): Promise => { + if (!supabase) throw new Error("Supabase client not initialized"); + if (items.length === 0) return []; + + const itemsToInsert = items.map(item => ({ ...item, flyer_id: flyerId })); + + const { data: savedItems, error } = await supabase + .from('flyer_items') + .insert(itemsToInsert) + .select(); + if (error) throw new Error(`Failed to save flyer items: ${error.message}`); + + return savedItems; +}; + +/** + * Retrieves all flyers from the database, ordered by most recent. + * @returns An array of flyer objects. + */ +export const getFlyers = async (): Promise => { + if (!supabase) return []; + + const { data, error } = await supabase + .from('flyers') + .select('*, store:stores(*)') + .order('created_at', { ascending: false }); + if (error) throw new Error(`Failed to get flyers: ${error.message}`); + + return data || []; +}; + +/** + * Retrieves all items for a specific flyer. + * @param flyerId The ID of the flyer. + * @returns An array of flyer item objects. + */ +export const getFlyerItems = async (flyerId: number): Promise => { + if (!supabase) return []; + + const { data, error } = await supabase + .from('flyer_items') + .select('*') + .eq('flyer_id', flyerId) + .order('item', { ascending: true }); + if (error) throw new Error(`Failed to get flyer items: ${error.message}`); + + return data || []; +}; + +/** + * Looks for an existing flyer with a matching checksum to prevent duplicates. + * @param checksum The SHA-256 checksum of the file. + * @returns The found flyer object or null. + */ +export const findFlyerByChecksum = async (checksum: string): Promise => { + if (!supabase) return null; + + const { data, error } = await supabase + .from('flyers') + .select('*') + .eq('checksum', checksum) + .single(); + + if (error && error.code !== 'PGRST116') throw new Error(`Error finding flyer by checksum: ${error.message}`); + + return data; +}; + +/** + * Uploads a store logo and updates the store record. + * This is designed to be a non-critical step. It won't throw an error but will log warnings. + * It will only update the store if no logo_url is currently set. + * @param storeId The ID of the store to update. + * @param logoBase64 The base64 encoded logo string. + * @returns The public URL of the logo if successful, otherwise null. + */ +export const uploadLogoAndUpdateStore = async (storeId: number, logoBase64: string): Promise => { + if (!supabase) { + console.warn("Cannot upload logo: Supabase client not initialized."); + return null; + } + + try { + // Helper function to convert base64 to a Blob for uploading. + const base64ToBlob = (base64: string, mimeType: string): Blob => { + const byteCharacters = atob(base64); + const byteNumbers = new Array(byteCharacters.length); + for (let i = 0; i < byteCharacters.length; i++) { + byteNumbers[i] = byteCharacters.charCodeAt(i); + } + const byteArray = new Uint8Array(byteNumbers); + return new Blob([byteArray], { type: mimeType }); + }; + + const logoBlob = base64ToBlob(logoBase64, 'image/png'); + const filePath = `logos/store_logo_${storeId}.png`; + + const { data, error: uploadError } = await supabase.storage + .from('flyers') + .upload(filePath, logoBlob, { + cacheControl: '3600', + upsert: true, // Overwrite if it exists, simplifies logic + }); + + if (uploadError) { + console.warn(`Failed to upload logo image: ${uploadError.message}`); + return null; + } + + const { data: { publicUrl } } = supabase.storage.from('flyers').getPublicUrl(data.path); + if (!publicUrl) { + console.warn("Could not get public URL for uploaded logo."); + return null; + } + + // Update the store record with the new URL, but only if it's currently null. + const { error: updateError } = await supabase + .from('stores') + .update({ logo_url: publicUrl }) + .eq('id', storeId) + .is('logo_url', null); + + if (updateError) { + console.warn(`Failed to update store with new logo URL: ${updateError.message}`); + } + + return publicUrl; + } catch (e: any) { + console.warn(`An error occurred during logo processing: ${e.message}`); + return null; + } +}; + +/** + * Retrieves all items a specific user is watching. + * @param userId The UUID of the user. + * @returns An array of master grocery item objects. + */ +export const getWatchedItems = async (userId: string): Promise => { + if (!supabase) return []; + + const { data, error } = await supabase + .from('user_watched_items') + .select('master_grocery_items(*, category_name:categories(name))') + .eq('user_id', userId) + .order('name', { ascending: true, referencedTable: 'master_grocery_items' }); + + if (error) throw new Error(`Error fetching watched items: ${error.message}`); + + return (data || []).map((item: any) => ({ + ...item.master_grocery_items, + category_name: item.master_grocery_items.category_name?.name, + })); +}; + +/** + * Retrieves all master grocery items. This is used for matching during extraction. + * @returns An array of master grocery item objects. + */ +export const getAllMasterItems = async (): Promise => { + if (!supabase) return []; + + const { data, error } = await supabase + .from('master_grocery_items') + .select('*, category_name:categories(name)') + .order('name', { ascending: true }); + + if (error) throw new Error(`Error fetching master items: ${error.message}`); + + return (data || []).map(item => ({ + ...item, + category_name: (item.category_name as any)?.name, + })); +}; + +/** + * Adds a new item to a user's watchlist. + * It first ensures the master item exists, then creates the user-item link. + * @param userId The UUID of the user. + * @param itemName The name of the item to add. + * @param category The category of the item. + * @returns The master item object that was added to the watchlist. + */ +export const addWatchedItem = async (userId: string, itemName: string, category: string): Promise => { + if (!supabase) throw new Error("Supabase client not initialized"); + + // 1. Find or create the category + let { data: categoryData } = await supabase + .from('categories') + .select('id') + .eq('name', category) + .single(); + + if (!categoryData) { + const { data: newCategoryData, error: newCategoryError } = await supabase + .from('categories') + .insert({ name: category }) + .select('id') + .single(); + if (newCategoryError) throw new Error(`Error creating category: ${newCategoryError.message}`); + categoryData = newCategoryData; + } + + // 2. Upsert the master item to ensure it exists and get its ID + const { data: masterItem, error: masterItemError } = await supabase + .from('master_grocery_items') + .upsert({ name: itemName.trim(), category_id: categoryData.id }, { onConflict: 'name', ignoreDuplicates: false }) + .select('*, category_name:categories(name)') + .single(); + if (masterItemError) throw new Error(`Failed to upsert master item: ${masterItemError.message}`); + if (!masterItem) throw new Error("Master item operation did not return data."); + + // 3. Create the link in user_watched_items + const { error: watchLinkError } = await supabase + .from('user_watched_items') + .insert({ user_id: userId, master_item_id: masterItem.id }); + + // Ignore duplicate errors (user already watching), throw others + if (watchLinkError && watchLinkError.code !== '23505') { + throw new Error(`Failed to add item to watchlist: ${watchLinkError.message}`); + } + + // 4. Return the full master item object for UI update + return { + ...masterItem, + category_name: (masterItem.category_name as any)?.name, + }; +}; + +/** + * Removes an item from a user's watchlist. + * @param userId The UUID of the user. + * @param masterItemId The ID of the master item to remove. + */ +export const removeWatchedItem = async (userId: string, masterItemId: number): Promise => { + if (!supabase) throw new Error("Supabase client not initialized"); + + const { error } = await supabase + .from('user_watched_items') + .delete() + .eq('user_id', userId) + .eq('master_item_id', masterItemId); + + if (error) { + throw new Error(`Failed to remove watched item: ${error.message}`); + } +}; + +/** + * Fetches all flyer items for a given list of flyer IDs. + * @param flyerIds An array of flyer IDs. + * @returns An array of flyer item objects. + */ +export const getFlyerItemsForFlyers = async (flyerIds: number[]): Promise => { + if (!supabase || flyerIds.length === 0) return []; + + const { data, error } = await supabase + .from('flyer_items') + .select('*') + .in('flyer_id', flyerIds); + if (error) throw new Error(`Error fetching items for flyers: ${error.message}`); + + return data || []; +}; + +/** + * Counts the total number of items across a list of flyers. + * @param flyerIds An array of flyer IDs. + * @returns The total count of items. + */ +export const countFlyerItemsForFlyers = async (flyerIds: number[]): Promise => { + if (!supabase || flyerIds.length === 0) return 0; + + const { count, error } = await supabase + .from('flyer_items') + .select('*', { count: 'exact', head: true }) + .in('flyer_id', flyerIds); + if (error) throw new Error(`Error counting items for flyers: ${error.message}`); + + return count || 0; +}; + +/** + * Loads historical price data for watched items. + * @param watchedItems An array of master grocery items. + * @returns An array of historical price data points. + */ +export const loadAllHistoricalItems = async (watchedItems: MasterGroceryItem[]): Promise[]> => { + if (!supabase || watchedItems.length === 0) return []; + + const watchedItemIds = watchedItems.map(item => item.id); + + const { data, error } = await supabase + .from('flyer_items') + .select('master_item_id, price_in_cents, created_at') + .in('master_item_id', watchedItemIds) + .not('price_in_cents', 'is', null) + .order('created_at', { ascending: true }); + if (error) throw new Error(`Error loading historical items: ${error.message}`); + + return data || []; +}; + +/** + * Fetches a user's profile from the database. + * @param userId The UUID of the user. + * @returns The user's profile object. + */ +export const getUserProfile = async (userId: string): Promise => { + if (!supabase) throw new Error("Supabase client not initialized"); + const { data, error } = await supabase + .from('profiles') + .select('*') + .eq('id', userId) + .single(); + + if (error) { + console.error("Error fetching user profile:", error.message); + return null; + } + return data; +}; + +/** + * Updates a user's profile information. + * @param userId The UUID of the user. + * @param updates The profile fields to update. + * @returns The updated profile object. + */ +export const updateUserProfile = async (userId: string, updates: { full_name?: string; avatar_url?: string }): Promise => { + if (!supabase) throw new Error("Supabase client not initialized"); + const { data, error } = await supabase + .from('profiles') + .update(updates) + .eq('id', userId) + .select() + .single(); + if (error) throw new Error(`Error updating profile: ${error.message}`); + return data; +}; + +/** + * Updates a user's preferences. + * @param userId The UUID of the user. + * @param preferences The preferences object to save. + * @returns The updated profile object. + */ +export const updateUserPreferences = async (userId: string, preferences: Profile['preferences']): Promise => { + if (!supabase) throw new Error("Supabase client not initialized"); + const { data, error } = await supabase + .from('profiles') + .update({ preferences }) + .eq('id', userId) + .select() + .single(); + if (error) throw new Error(`Error updating preferences: ${error.message}`); + return data; +}; + +/** + * Updates the authenticated user's password. + * @param newPassword The new password. + */ +export const updateUserPassword = async (newPassword: string): Promise => { + if (!supabase) throw new Error("Supabase client not initialized"); + const { error } = await supabase.auth.updateUser({ password: newPassword }); + if (error) throw new Error(`Error updating password: ${error.message}`); +}; + +/** + * Gathers all data for a specific user for export. + * @param userId The UUID of the user. + * @returns An object containing all of the user's data. + */ +export const exportUserData = async (userId: string): Promise => { + if (!supabase) throw new Error("Supabase client not initialized"); + + const { data: profile, error: profileError } = await supabase + .from('profiles') + .select('*') + .eq('id', userId) + .single(); + if (profileError) throw new Error(`Could not fetch profile: ${profileError.message}`); + + const { data: watchedItems, error: watchedItemsError } = await supabase + .from('user_watched_items') + .select('created_at, item:master_grocery_items(name, category:categories(name))') + .eq('user_id', userId); + if (watchedItemsError) throw new Error(`Could not fetch watched items: ${watchedItemsError.message}`); + + const { data: shoppingLists, error: shoppingListsError } = await supabase + .from('shopping_lists') + .select('name, created_at, items:shopping_list_items(custom_item_name, quantity, is_purchased, master_item:master_grocery_items(name))') + .eq('user_id', userId); + if (shoppingListsError) throw new Error(`Could not fetch shopping lists: ${shoppingListsError.message}`); + + return { + profile, + watchedItems, + shoppingLists, + }; +}; + + +/** + * Deletes the current user's account by invoking a secure Edge Function. + * @param password The user's current password for verification. + */ +export const deleteUserAccount = async (password: string): Promise => { + if (!supabase) throw new Error("Supabase client not initialized"); + + const { data, error } = await supabase.functions.invoke('delete-user', { + body: { password }, + }); + + if (error) { + let errorDetails = `Edge Function returned an error: ${error.message}.`; + try { + const errorBody = await error.context.json(); + const message = errorBody.error || 'No error message in body.'; + const stack = errorBody.stack || 'No stack trace in body.'; + errorDetails = `Error: ${message}\n\nStack Trace:\n${stack}`; + } catch (parseError) { + errorDetails += `\nCould not parse error response body.`; + } + throw new Error(errorDetails); + } + if (data.error) throw new Error(data.error); +}; + +/** + * Calls the `system-check` Edge Function to verify the backend setup. + * @returns The results of the system checks. + */ +export const invokeSystemCheckFunction = async (): Promise => { + if (!supabase) throw new Error("Supabase client not initialized"); + const { data, error } = await supabase.functions.invoke('system-check'); + + if (error) { + let errorDetails = `System check function failed: ${error.message}.`; + if (error.message.includes("Not found")) { + errorDetails = "The 'system-check' Edge Function is not deployed. Please follow the instructions in the README to deploy it."; + } else { + try { + const errorBody = await error.context.json(); + errorDetails += `\nDetails: ${errorBody.error || 'Unknown error'}`; + } catch (e) { /* ignore */ } + } + throw new Error(errorDetails); + } + if (data.error) throw new Error(data.error); + return data.results; +}; + + +/** + * Creates the initial development users by invoking a secure Edge Function. + */ +export const invokeSeedDatabaseFunction = async (): Promise<{ message: string }> => { + if (!supabase) throw new Error("Supabase client not initialized"); + const { data, error } = await supabase.functions.invoke('seed-database'); + + if (error) { + let errorDetails = `Edge Function returned a non-2xx status code: ${error.message}.`; + try { + const errorBody = await error.context.json(); + const message = errorBody.error || 'No error message in body.'; + const stack = errorBody.stack || 'No stack trace in body.'; + errorDetails = `Error: ${message}\n\nStack Trace:\n${stack}`; + } catch (parseError) { + errorDetails += `\nCould not parse error response body. Raw response might be in browser network tab.`; + } + throw new Error(errorDetails); + } + + if (data.error) throw new Error(data.error); + return data; +}; + + +// ============================================= +// SHOPPING LIST FUNCTIONS +// ============================================= + +/** + * Fetches all shopping lists for a user, including their items. + * @param userId The UUID of the user. + * @returns An array of shopping list objects. + */ +export const getShoppingLists = async (userId: string): Promise => { + if (!supabase) return []; + const { data, error } = await supabase + .from('shopping_lists') + .select(` + *, + items:shopping_list_items ( + *, + master_item:master_grocery_items (name) + ) + `) + .eq('user_id', userId) + .order('created_at', { ascending: true }) + .order('added_at', { ascending: true, referencedTable: 'shopping_list_items' }); + + if (error) throw new Error(`Error fetching shopping lists: ${error.message}`); + return data || []; +}; + +/** + * Creates a new shopping list for a user. + * @param userId The UUID of the user. + * @param name The name of the new list. + * @returns The newly created shopping list object. + */ +export const createShoppingList = async (userId: string, name: string): Promise => { + if (!supabase) throw new Error("Supabase client not initialized"); + const { data, error } = await supabase + .from('shopping_lists') + .insert({ user_id: userId, name }) + .select() + .single(); + if (error) throw new Error(`Error creating shopping list: ${error.message}`); + return { ...data, items: [] }; // Return with empty items array +}; + +/** + * Deletes a shopping list. + * @param listId The ID of the list to delete. + */ +export const deleteShoppingList = async (listId: number): Promise => { + if (!supabase) throw new Error("Supabase client not initialized"); + const { error } = await supabase + .from('shopping_lists') + .delete() + .eq('id', listId); + if (error) throw new Error(`Error deleting shopping list: ${error.message}`); +}; + +/** + * Adds an item to a shopping list. + * @param listId The ID of the list. + * @param masterItemId Optional ID of the master grocery item. + * @param customItemName Optional name for a custom item. + * @returns The newly created shopping list item. + */ +export const addShoppingListItem = async (listId: number, { masterItemId, customItemName }: { masterItemId?: number; customItemName?: string }): Promise => { + if (!supabase) throw new Error("Supabase client not initialized"); + if (!masterItemId && !customItemName) throw new Error("Either masterItemId or customItemName must be provided."); + + const itemToInsert = { + shopping_list_id: listId, + master_item_id: masterItemId, + custom_item_name: customItemName, + quantity: 1 + }; + + // Use upsert to handle potential duplicates of master items gracefully + const query = supabase.from('shopping_list_items').upsert(itemToInsert, { + onConflict: 'shopping_list_id, master_item_id', + ignoreDuplicates: masterItemId ? false : true // Only upsert quantity for master items + }).select('*, master_item:master_grocery_items(name)').single(); + + const { data, error } = await query; + if (error) throw new Error(`Error adding shopping list item: ${error.message}`); + return data; +}; + +/** + * Updates a shopping list item. + * @param itemId The ID of the item to update. + * @param updates The fields to update (e.g., is_purchased, quantity). + * @returns The updated shopping list item. + */ +export const updateShoppingListItem = async (itemId: number, updates: Partial): Promise => { + if (!supabase) throw new Error("Supabase client not initialized"); + const { data, error } = await supabase + .from('shopping_list_items') + .update(updates) + .eq('id', itemId) + .select('*, master_item:master_grocery_items(name)') + .single(); + if (error) throw new Error(`Error updating shopping list item: ${error.message}`); + return data; +}; + +/** + * Removes an item from a shopping list. + * @param itemId The ID of the item to remove. + */ +export const removeShoppingListItem = async (itemId: number): Promise => { + if (!supabase) throw new Error("Supabase client not initialized"); + const { error } = await supabase + .from('shopping_list_items') + .delete() + .eq('id', itemId); + if (error) throw new Error(`Error removing shopping list item: ${error.message}`); +}; diff --git a/sql/2025-11-05 b/sql/2025-11-05 new file mode 100644 index 0000000..325f419 --- /dev/null +++ b/sql/2025-11-05 @@ -0,0 +1,1215 @@ +-- ============================================================================ +-- PERMISSION FIX (v3 - Ownership) +-- ============================================================================ +-- The following command addresses "permission denied" errors by setting the +-- owner of the 'public' schema to 'postgres'. This user is typically the one +-- running scripts in the Supabase SQL Editor. The schema owner has all +-- necessary privileges, including creating extensions and tables. +ALTER SCHEMA public OWNER TO postgres; + +-- Grant the service_role to the postgres user, who is running this script. +-- This is necessary for a later step where we change the ownership of a +-- trigger function to the service_role. +GRANT service_role TO postgres; + +-- ============================================================================ +-- MASTER SCHEMA SCRIPT +-- ============================================================================ +-- Purpose: +-- This file contains the master SQL schema for the entire Supabase database. +-- It is designed to be a "one-click" script that can be run in the Supabase +-- SQL Editor to set up the entire backend from scratch, including: +-- 1. Enabling required Postgres extensions. +-- 2. Creating all tables with relationships and constraints. +-- 3. Seeding essential initial data (categories, master items). +-- 4. Creating the necessary storage buckets. +-- 5. Applying comprehensive Row Level Security (RLS) policies. +-- 6. Defining database functions for business logic. +-- 7. Setting up triggers for automation (e.g., creating user profiles). +-- +-- Usage: +-- For a fresh setup, run this entire script. To reset the environment, +-- first run 'delete_all_tables.sql.txt' and then run this script. + +-- ============================================================================ +-- PART 0: EXTENSIONS +-- ============================================================================ +-- Enable necessary PostgreSQL extensions. +-- postgis: For storing and querying geographic data (store locations). +-- pg_trgm: For trigram-based fuzzy string matching (improving item searches). +CREATE EXTENSION IF NOT EXISTS postgis; +CREATE EXTENSION IF NOT EXISTS pg_trgm; + +-- ============================================================================ +-- PART 1: TABLE CREATION +-- ============================================================================ + +-- 1. Create the 'stores' table for normalized store data. +CREATE TABLE IF NOT EXISTS public.stores ( + id BIGINT PRIMARY KEY GENERATED ALWAYS AS IDENTITY, + created_at TIMESTAMPTZ DEFAULT now() NOT NULL, + name TEXT NOT NULL UNIQUE, + logo_url TEXT +); +COMMENT ON TABLE public.stores IS 'Stores metadata for grocery store chains (e.g., Safeway, Kroger).'; + +-- 2. Create the 'categories' table for normalized category data. +CREATE TABLE IF NOT EXISTS public.categories ( + id BIGINT PRIMARY KEY GENERATED ALWAYS AS IDENTITY, + name TEXT NOT NULL UNIQUE +); +COMMENT ON TABLE public.categories IS 'Stores a predefined list of grocery item categories (e.g., ''Fruits & Vegetables'', ''Dairy & Eggs'').'; + +-- 4. Create the 'master_grocery_items' table. This is the master dictionary. +CREATE TABLE IF NOT EXISTS public.master_grocery_items ( + id BIGINT PRIMARY KEY GENERATED ALWAYS AS IDENTITY, + created_at TIMESTAMPTZ DEFAULT now() NOT NULL, + name TEXT NOT NULL UNIQUE, + category_id BIGINT REFERENCES public.categories(id) +); +COMMENT ON TABLE public.master_grocery_items IS 'The master dictionary of canonical grocery items. Each item has a unique name and is linked to a category.'; + +-- 3. Create the 'flyers' table with its full, final schema. +CREATE TABLE IF NOT EXISTS public.flyers ( + id BIGINT PRIMARY KEY GENERATED ALWAYS AS IDENTITY, + created_at TIMESTAMPTZ DEFAULT now() NOT NULL, + file_name TEXT NOT NULL, + image_url TEXT NOT NULL, + checksum TEXT UNIQUE, + store_id BIGINT REFERENCES public.stores(id), + valid_from DATE, + valid_to DATE, + store_address TEXT +); +COMMENT ON TABLE public.flyers IS 'Stores metadata for each processed flyer, linking it to a store and its validity period.'; +COMMENT ON COLUMN public.flyers.file_name IS 'The original name of the uploaded flyer file (e.g., "flyer_week_1.pdf").'; +COMMENT ON COLUMN public.flyers.image_url IS 'The public URL of the primary flyer image stored in Supabase Storage.'; +COMMENT ON COLUMN public.flyers.checksum IS 'A SHA-256 hash of the original file content to prevent duplicate processing.'; +COMMENT ON COLUMN public.flyers.store_id IS 'Foreign key linking this flyer to a specific store in the `stores` table.'; +COMMENT ON COLUMN public.flyers.valid_from IS 'The start date of the sale period for this flyer, extracted by the AI.'; +COMMENT ON COLUMN public.flyers.valid_to IS 'The end date of the sale period for this flyer, extracted by the AI.'; +COMMENT ON COLUMN public.flyers.store_address IS 'The physical store address if it was successfully extracted from the flyer image.'; + + +-- 6. Create the 'flyer_items' table with its full, final schema. +CREATE TABLE IF NOT EXISTS public.flyer_items ( + id BIGINT PRIMARY KEY GENERATED ALWAYS AS IDENTITY, + created_at TIMESTAMPTZ DEFAULT now() NOT NULL, + flyer_id BIGINT REFERENCES public.flyers(id) ON DELETE CASCADE, + item TEXT NOT NULL, + price_display TEXT NOT NULL, + price_in_cents INTEGER, + quantity TEXT NOT NULL, + quantity_num NUMERIC, + master_item_id BIGINT REFERENCES public.master_grocery_items(id), + category_id BIGINT REFERENCES public.categories(id), + category_name TEXT, -- Denormalized for easier display + unit_price JSONB, -- {"value": 1.99, "unit": "lb"} + product_id BIGINT -- Future use for specific product linking +); +COMMENT ON TABLE public.flyer_items IS 'Stores individual items extracted from a specific flyer.'; +COMMENT ON COLUMN public.flyer_items.flyer_id IS 'Foreign key linking this item to its parent flyer in the `flyers` table.'; +COMMENT ON COLUMN public.flyer_items.item IS 'The raw item name as it appears in the flyer (e.g., "Granny Smith Apples").'; +COMMENT ON COLUMN public.flyer_items.price_display IS 'The raw price string from the flyer (e.g., "$3.99", "2 for $5.00").'; +COMMENT ON COLUMN public.flyer_items.price_in_cents IS 'The normalized price for a single item, in cents, for easier sorting and comparison.'; +COMMENT ON COLUMN public.flyer_items.quantity IS 'The raw quantity or deal description string (e.g., "per lb", "500g bag").'; +COMMENT ON COLUMN public.flyer_items.quantity_num IS 'The parsed primary numeric value from the quantity string (e.g., 500 from "500g").'; +COMMENT ON COLUMN public.flyer_items.master_item_id IS 'Foreign key linking this flyer item to its canonical entry in `master_grocery_items`. Null if no match was found.'; +COMMENT ON COLUMN public.flyer_items.category_id IS 'Foreign key to the item''s category. Can be redundant if master_item_id is set, but useful.'; +COMMENT ON COLUMN public.flyer_items.category_name IS 'The denormalized name of the category for faster UI display without extra joins.'; +COMMENT ON COLUMN public.flyer_items.unit_price IS 'A JSONB object storing the calculated unit price (e.g., {"value": 1.99, "unit": "lb"}) for standardized price comparisons.'; +COMMENT ON COLUMN public.flyer_items.product_id IS 'A foreign key for future use, to link to a specific product with a UPC code in the `products` table.'; + +-- Add a GIN index to the 'item' column for fast fuzzy text searching. +-- This requires the pg_trgm extension. +CREATE INDEX IF NOT EXISTS flyer_items_item_trgm_idx ON public.flyer_items USING GIN (item gin_trgm_ops); + +-- 0. Create a table for public user profiles. +-- This table is linked to the auth.users table and stores non-sensitive user data. +CREATE TABLE IF NOT EXISTS public.profiles ( + id UUID PRIMARY KEY REFERENCES auth.users(id) ON DELETE CASCADE, + updated_at TIMESTAMPTZ, + full_name TEXT, + avatar_url TEXT, + preferences JSONB +); +COMMENT ON TABLE public.profiles IS 'Stores public-facing user data, linked to the private auth.users table.'; + +-- 5. Create the 'user_watched_items' table. This links to the master list. +CREATE TABLE IF NOT EXISTS public.user_watched_items ( + id BIGINT PRIMARY KEY GENERATED ALWAYS AS IDENTITY, + user_id UUID NOT NULL REFERENCES auth.users(id) ON DELETE CASCADE, + master_item_id BIGINT NOT NULL REFERENCES public.master_grocery_items(id) ON DELETE CASCADE, + created_at TIMESTAMPTZ DEFAULT now() NOT NULL, + UNIQUE(user_id, master_item_id) +); +COMMENT ON TABLE public.user_watched_items IS 'A linking table that represents a user''s personal watchlist of grocery items.'; + +-- 7. Create a table for user-defined alerts on watched items. +CREATE TABLE IF NOT EXISTS public.user_alerts ( + id BIGINT PRIMARY KEY GENERATED ALWAYS AS IDENTITY, + user_watched_item_id BIGINT NOT NULL REFERENCES public.user_watched_items(id) ON DELETE CASCADE, + alert_type TEXT NOT NULL CHECK (alert_type IN ('PRICE_BELOW', 'PERCENT_OFF_AVERAGE')), + threshold_value NUMERIC NOT NULL, + is_active BOOLEAN DEFAULT true NOT NULL, + created_at TIMESTAMPTZ DEFAULT now() NOT NULL +); +COMMENT ON TABLE public.user_alerts IS 'Stores user-configured alert rules for their watched items.'; +COMMENT ON COLUMN public.user_alerts.alert_type IS 'The condition that triggers the alert, e.g., ''PRICE_BELOW''.'; +COMMENT ON COLUMN public.user_alerts.threshold_value IS 'The numeric threshold for the alert condition (e.g., price in cents, or percentage).'; + + +-- 8. Create a table to store notifications for users. +CREATE TABLE IF NOT EXISTS public.notifications ( + id BIGINT PRIMARY KEY GENERATED ALWAYS AS IDENTITY, + user_id UUID NOT NULL REFERENCES auth.users(id) ON DELETE CASCADE, + content TEXT NOT NULL, + link_url TEXT, + is_read BOOLEAN DEFAULT false NOT NULL, + created_at TIMESTAMPTZ DEFAULT now() NOT NULL +); +COMMENT ON TABLE public.notifications IS 'A central log of notifications generated for users, such as price alerts.'; +COMMENT ON COLUMN public.notifications.content IS 'The notification message displayed to the user.'; +COMMENT ON COLUMN public.notifications.link_url IS 'A URL to navigate to when the notification is clicked.'; + + +-- 9. Create a table for aggregated, historical price data for master items. +CREATE TABLE IF NOT EXISTS public.item_price_history ( + id BIGINT PRIMARY KEY GENERATED ALWAYS AS IDENTITY, + master_item_id BIGINT NOT NULL REFERENCES public.master_grocery_items(id) ON DELETE CASCADE, + summary_date DATE NOT NULL, + min_price_in_cents INTEGER, + max_price_in_cents INTEGER, + avg_price_in_cents INTEGER, + data_points_count INTEGER DEFAULT 0 NOT NULL, + UNIQUE(master_item_id, summary_date) +); +COMMENT ON TABLE public.item_price_history IS 'Serves as a summary table to speed up charting and analytics.'; +COMMENT ON COLUMN public.item_price_history.summary_date IS 'The date for which the price data is summarized.'; + + +-- 10. Create a table to map various names to a single master grocery item. +CREATE TABLE IF NOT EXISTS public.master_item_aliases ( + id BIGINT PRIMARY KEY GENERATED ALWAYS AS IDENTITY, + master_item_id BIGINT NOT NULL REFERENCES public.master_grocery_items(id) ON DELETE CASCADE, + alias TEXT NOT NULL UNIQUE +); +COMMENT ON TABLE public.master_item_aliases IS 'Stores synonyms or alternative names for master items to improve matching.'; +COMMENT ON COLUMN public.master_item_aliases.alias IS 'An alternative name, e.g., "Ground Chuck" for the master item "Ground Beef".'; + + +-- 11. Create tables for user shopping lists. +CREATE TABLE IF NOT EXISTS public.shopping_lists ( + id BIGINT PRIMARY KEY GENERATED ALWAYS AS IDENTITY, + user_id UUID NOT NULL REFERENCES auth.users(id) ON DELETE CASCADE, + name TEXT NOT NULL, + created_at TIMESTAMPTZ DEFAULT now() NOT NULL +); +COMMENT ON TABLE public.shopping_lists IS 'Stores user-created shopping lists, e.g., "Weekly Groceries".'; + + +CREATE TABLE IF NOT EXISTS public.shopping_list_items ( + id BIGINT PRIMARY KEY GENERATED ALWAYS AS IDENTITY, + shopping_list_id BIGINT NOT NULL REFERENCES public.shopping_lists(id) ON DELETE CASCADE, + master_item_id BIGINT REFERENCES public.master_grocery_items(id), + custom_item_name TEXT, + quantity NUMERIC DEFAULT 1 NOT NULL, + is_purchased BOOLEAN DEFAULT false NOT NULL, + added_at TIMESTAMPTZ DEFAULT now() NOT NULL, + UNIQUE(shopping_list_id, master_item_id), + CONSTRAINT must_have_item_identifier CHECK (master_item_id IS NOT NULL OR custom_item_name IS NOT NULL) +); +COMMENT ON TABLE public.shopping_list_items IS 'Contains individual items for a specific shopping list.'; +COMMENT ON COLUMN public.shopping_list_items.custom_item_name IS 'For items not in the master list, e.g., "Grandma''s special spice mix".'; +COMMENT ON COLUMN public.shopping_list_items.is_purchased IS 'Lets users check items off their list as they shop.'; + + +-- 12. Create a table to store user-submitted corrections for flyer items. +CREATE TABLE IF NOT EXISTS public.suggested_corrections ( + id BIGINT PRIMARY KEY GENERATED ALWAYS AS IDENTITY, + flyer_item_id BIGINT NOT NULL REFERENCES public.flyer_items(id) ON DELETE CASCADE, + user_id UUID NOT NULL REFERENCES auth.users(id), + correction_type TEXT NOT NULL, + suggested_value TEXT NOT NULL, + status TEXT DEFAULT 'pending' NOT NULL, + created_at TIMESTAMPTZ DEFAULT now() NOT NULL, + reviewed_notes TEXT, + reviewed_at TIMESTAMPTZ +); +COMMENT ON TABLE public.suggested_corrections IS 'A queue for user-submitted data corrections, enabling crowdsourced data quality improvements.'; +COMMENT ON COLUMN public.suggested_corrections.correction_type IS 'The type of error the user is reporting.'; +COMMENT ON COLUMN public.suggested_corrections.suggested_value IS 'The corrected value proposed by the user (e.g., a new price or master_item_id).'; +COMMENT ON COLUMN public.suggested_corrections.status IS 'The moderation status of the correction: pending, approved, or rejected.'; + + +-- 13. Create a table for prices submitted directly by users from in-store. +CREATE TABLE IF NOT EXISTS public.user_submitted_prices ( + id BIGINT PRIMARY KEY GENERATED ALWAYS AS IDENTITY, + user_id UUID NOT NULL REFERENCES auth.users(id), + master_item_id BIGINT NOT NULL REFERENCES public.master_grocery_items(id), + store_id BIGINT NOT NULL REFERENCES public.stores(id), + price_in_cents INTEGER NOT NULL, + photo_url TEXT, + upvotes INTEGER DEFAULT 0 NOT NULL, + downvotes INTEGER DEFAULT 0 NOT NULL, + created_at TIMESTAMPTZ DEFAULT now() NOT NULL +); +COMMENT ON TABLE public.user_submitted_prices IS 'Stores item prices submitted by users directly from physical stores.'; +COMMENT ON COLUMN public.user_submitted_prices.photo_url IS 'URL to user-submitted photo evidence of the price.'; +COMMENT ON COLUMN public.user_submitted_prices.upvotes IS 'Community validation score indicating accuracy.'; + + +-- A table to store brand information. +CREATE TABLE IF NOT EXISTS public.brands ( + id BIGINT PRIMARY KEY GENERATED ALWAYS AS IDENTITY, + name TEXT NOT NULL UNIQUE +); +COMMENT ON TABLE public.brands IS 'Stores brand names like "Coca-Cola", "Maple Leaf", or "Kraft".'; + + +-- A table for specific products, linking a master item with a brand and size. +CREATE TABLE IF NOT EXISTS public.products ( + id BIGINT PRIMARY KEY GENERATED ALWAYS AS IDENTITY, + master_item_id BIGINT NOT NULL REFERENCES public.master_grocery_items(id), + brand_id BIGINT REFERENCES public.brands(id), + name TEXT NOT NULL, + description TEXT, + size TEXT, + upc_code TEXT UNIQUE +); +COMMENT ON TABLE public.products IS 'Represents a specific, sellable product, combining a generic item with a brand and size.'; +COMMENT ON COLUMN public.products.upc_code IS 'Universal Product Code, if available, for exact product matching.'; + +-- Link flyer_items to the new products table. +-- This is done via ALTER TABLE because 'products' is created after 'flyer_items'. +ALTER TABLE public.flyer_items +ADD CONSTRAINT flyer_items_product_id_fkey +FOREIGN KEY (product_id) REFERENCES public.products(id); + + +-- A table to store individual store locations with geographic data. +CREATE TABLE IF NOT EXISTS public.store_locations ( + id BIGINT PRIMARY KEY GENERATED ALWAYS AS IDENTITY, + store_id BIGINT NOT NULL REFERENCES public.stores(id), + address TEXT NOT NULL, + city TEXT, + province_state TEXT, + postal_code TEXT, + location GEOGRAPHY(Point, 4326) +); +COMMENT ON TABLE public.store_locations IS 'Stores physical locations of stores with geographic data for proximity searches.'; +COMMENT ON COLUMN public.store_locations.location IS 'Geographic coordinates (longitude, latitude) of the store.'; + +-- Add a GIST index for efficient geographic queries. +-- This requires the postgis extension. +CREATE INDEX IF NOT EXISTS store_locations_geo_idx ON public.store_locations USING GIST (location); + + +-- A linking table if one flyer is valid for multiple locations. +CREATE TABLE IF NOT EXISTS public.flyer_locations ( + flyer_id BIGINT NOT NULL REFERENCES public.flyers(id) ON DELETE CASCADE, + store_location_id BIGINT NOT NULL REFERENCES public.store_locations(id) ON DELETE CASCADE, + PRIMARY KEY (flyer_id, store_location_id) +); +COMMENT ON TABLE public.flyer_locations IS 'A linking table associating a single flyer with multiple store locations where its deals are valid.'; + +-- A table to store recipes, which can be user-created or pre-populated. +CREATE TABLE IF NOT EXISTS public.recipes ( + id BIGINT PRIMARY KEY GENERATED ALWAYS AS IDENTITY, + user_id UUID REFERENCES auth.users(id), + name TEXT NOT NULL, + description TEXT, + instructions TEXT, + prep_time_minutes INTEGER, + cook_time_minutes INTEGER, + servings INTEGER, + photo_url TEXT, + calories_per_serving INTEGER, + protein_grams NUMERIC, + fat_grams NUMERIC, + carb_grams NUMERIC, + avg_rating NUMERIC(2,1) DEFAULT 0.0 NOT NULL, + rating_count INTEGER DEFAULT 0 NOT NULL, + created_at TIMESTAMPTZ DEFAULT now() NOT NULL +); +COMMENT ON TABLE public.recipes IS 'Stores recipes that can be used to generate shopping lists.'; +COMMENT ON COLUMN public.recipes.servings IS 'The number of servings this recipe yields.'; + + +-- A linking table for ingredients required for each recipe. +CREATE TABLE IF NOT EXISTS public.recipe_ingredients ( + id BIGINT PRIMARY KEY GENERATED ALWAYS AS IDENTITY, + recipe_id BIGINT NOT NULL REFERENCES public.recipes(id) ON DELETE CASCADE, + master_item_id BIGINT NOT NULL REFERENCES public.master_grocery_items(id), + quantity NUMERIC NOT NULL, + unit TEXT NOT NULL +); +COMMENT ON TABLE public.recipe_ingredients IS 'Defines the ingredients and quantities needed for a recipe.'; + + +-- A table to store a predefined list of tags for recipes. +CREATE TABLE IF NOT EXISTS public.tags ( + id BIGINT PRIMARY KEY GENERATED ALWAYS AS IDENTITY, + name TEXT NOT NULL UNIQUE +); +COMMENT ON TABLE public.tags IS 'Stores tags for categorizing recipes, e.g., "Vegetarian", "Quick & Easy".'; + + +-- A linking table to associate multiple tags with a recipe. +CREATE TABLE IF NOT EXISTS public.recipe_tags ( + recipe_id BIGINT NOT NULL REFERENCES public.recipes(id) ON DELETE CASCADE, + tag_id BIGINT NOT NULL REFERENCES public.tags(id) ON DELETE CASCADE, + PRIMARY KEY (recipe_id, tag_id) +); +COMMENT ON TABLE public.recipe_tags IS 'A linking table to associate multiple tags with a single recipe.'; + +-- A table to store individual user ratings for recipes. +CREATE TABLE IF NOT EXISTS public.recipe_ratings ( + id BIGINT PRIMARY KEY GENERATED ALWAYS AS IDENTITY, + recipe_id BIGINT NOT NULL REFERENCES public.recipes(id) ON DELETE CASCADE, + user_id UUID NOT NULL REFERENCES auth.users(id) ON DELETE CASCADE, + rating INTEGER NOT NULL CHECK (rating >= 1 AND rating <= 5), + comment TEXT, + created_at TIMESTAMPTZ DEFAULT now() NOT NULL, + UNIQUE(recipe_id, user_id) +); +COMMENT ON TABLE public.recipe_ratings IS 'Stores individual user ratings for recipes, ensuring a user can only rate a recipe once.'; + +-- A table to store a user's collection of planned meals for a date range. +CREATE TABLE IF NOT EXISTS public.menu_plans ( + id BIGINT PRIMARY KEY GENERATED ALWAYS AS IDENTITY, + user_id UUID NOT NULL REFERENCES auth.users(id) ON DELETE CASCADE, + name TEXT NOT NULL, + start_date DATE NOT NULL, + end_date DATE NOT NULL, + created_at TIMESTAMPTZ DEFAULT now() NOT NULL +); +COMMENT ON TABLE public.menu_plans IS 'Represents a user''s meal plan for a specific period, e.g., "Week of Oct 23".'; + + +-- A table to associate a recipe with a specific date and meal type within a menu plan. +CREATE TABLE IF NOT EXISTS public.planned_meals ( + id BIGINT PRIMARY KEY GENERATED ALWAYS AS IDENTITY, + menu_plan_id BIGINT NOT NULL REFERENCES public.menu_plans(id) ON DELETE CASCADE, + recipe_id BIGINT NOT NULL REFERENCES public.recipes(id) ON DELETE CASCADE, + plan_date DATE NOT NULL, + meal_type TEXT NOT NULL, -- e.g., 'Breakfast', 'Lunch', 'Dinner' + servings_to_cook INTEGER +); +COMMENT ON TABLE public.planned_meals IS 'Assigns a recipe to a specific day and meal type within a user''s menu plan.'; +COMMENT ON COLUMN public.planned_meals.meal_type IS 'The designated meal for the recipe, e.g., ''Breakfast'', ''Lunch'', ''Dinner''.'; + + +-- A table to track the grocery items a user currently has in their pantry. +CREATE TABLE IF NOT EXISTS public.pantry_items ( + id BIGINT PRIMARY KEY GENERATED ALWAYS AS IDENTITY, + user_id UUID NOT NULL REFERENCES auth.users(id) ON DELETE CASCADE, + master_item_id BIGINT NOT NULL REFERENCES public.master_grocery_items(id) ON DELETE CASCADE, + quantity NUMERIC NOT NULL, + unit TEXT, + best_before_date DATE, + updated_at TIMESTAMPTZ DEFAULT now() NOT NULL, + UNIQUE(user_id, master_item_id, unit) +); +COMMENT ON TABLE public.pantry_items IS 'Tracks a user''s personal inventory of grocery items to enable smart shopping lists.'; +COMMENT ON COLUMN public.pantry_items.quantity IS 'The current amount of the item. Convention: use grams for weight, mL for volume where applicable.'; +COMMENT ON COLUMN public.pantry_items.unit IS 'e.g., ''g'', ''ml'', ''items''. Should align with recipe_ingredients.unit and quantity convention.'; + + + +-- ============================================================================ +-- PART 2: DATA SEEDING +-- ============================================================================ +-- Pre-populate the categories table. +INSERT INTO public.categories (name) VALUES + ('Fruits & Vegetables'), ('Meat & Seafood'), ('Dairy & Eggs'), ('Bakery & Bread'), + ('Pantry & Dry Goods'), ('Beverages'), ('Frozen Foods'), ('Snacks'), ('Household & Cleaning'), + ('Personal Care & Health'), ('Baby & Child'), ('Pet Supplies'), ('Deli & Prepared Foods'), + ('Canned Goods'), ('Condiments & Spices'), ('Breakfast & Cereal'), ('Organic'), + ('International Foods'), ('Other/Miscellaneous') +ON CONFLICT (name) DO NOTHING; + +-- Pre-populate the master grocery items dictionary. +DO $$ +DECLARE + fv_cat_id BIGINT; ms_cat_id BIGINT; de_cat_id BIGINT; bb_cat_id BIGINT; pdg_cat_id BIGINT; + bev_cat_id BIGINT; ff_cat_id BIGINT; snk_cat_id BIGINT; hc_cat_id BIGINT; pch_cat_id BIGINT; + bc_cat_id BIGINT; ps_cat_id BIGINT; dpf_cat_id BIGINT; cg_cat_id BIGINT; cs_cat_id BIGINT; + bkc_cat_id BIGINT; +BEGIN + SELECT id INTO fv_cat_id FROM public.categories WHERE name = 'Fruits & Vegetables'; + SELECT id INTO ms_cat_id FROM public.categories WHERE name = 'Meat & Seafood'; + SELECT id INTO de_cat_id FROM public.categories WHERE name = 'Dairy & Eggs'; + SELECT id INTO bb_cat_id FROM public.categories WHERE name = 'Bakery & Bread'; + SELECT id INTO pdg_cat_id FROM public.categories WHERE name = 'Pantry & Dry Goods'; + SELECT id INTO bev_cat_id FROM public.categories WHERE name = 'Beverages'; + SELECT id INTO ff_cat_id FROM public.categories WHERE name = 'Frozen Foods'; + SELECT id INTO snk_cat_id FROM public.categories WHERE name = 'Snacks'; + SELECT id INTO hc_cat_id FROM public.categories WHERE name = 'Household & Cleaning'; + SELECT id INTO pch_cat_id FROM public.categories WHERE name = 'Personal Care & Health'; + SELECT id INTO bc_cat_id FROM public.categories WHERE name = 'Baby & Child'; + SELECT id INTO ps_cat_id FROM public.categories WHERE name = 'Pet Supplies'; + SELECT id INTO dpf_cat_id FROM public.categories WHERE name = 'Deli & Prepared Foods'; + SELECT id INTO cg_cat_id FROM public.categories WHERE name = 'Canned Goods'; + SELECT id INTO cs_cat_id FROM public.categories WHERE name = 'Condiments & Spices'; + SELECT id INTO bkc_cat_id FROM public.categories WHERE name = 'Breakfast & Cereal'; + + INSERT INTO public.master_grocery_items (name, category_id) VALUES + ('apples', fv_cat_id), ('bananas', fv_cat_id), ('oranges', fv_cat_id), ('grapes', fv_cat_id), ('strawberries', fv_cat_id), ('blueberries', fv_cat_id), ('raspberries', fv_cat_id), ('avocados', fv_cat_id), ('tomatoes', fv_cat_id), ('potatoes', fv_cat_id), ('onions', fv_cat_id), ('garlic', fv_cat_id), ('carrots', fv_cat_id), ('broccoli', fv_cat_id), ('spinach', fv_cat_id), ('lettuce', fv_cat_id), ('bell peppers', fv_cat_id), ('cucumbers', fv_cat_id), ('mushrooms', fv_cat_id), ('lemons', fv_cat_id), ('limes', fv_cat_id), ('celery', fv_cat_id), ('corn', fv_cat_id), ('sweet potatoes', fv_cat_id), ('zucchini', fv_cat_id), ('cauliflower', fv_cat_id), ('green beans', fv_cat_id), ('peas', fv_cat_id), ('asparagus', fv_cat_id), + ('chicken breast', ms_cat_id), ('chicken thighs', ms_cat_id), ('ground beef', ms_cat_id), ('steak', ms_cat_id), ('pork chops', ms_cat_id), ('bacon', ms_cat_id), ('sausage', ms_cat_id), ('salmon', ms_cat_id), ('shrimp', ms_cat_id), ('tilapia', ms_cat_id), ('cod', ms_cat_id), ('tuna', ms_cat_id), ('ham', ms_cat_id), ('turkey', ms_cat_id), + ('milk', de_cat_id), ('cheese', de_cat_id), ('yogurt', de_cat_id), ('butter', de_cat_id), ('eggs', de_cat_id), ('cream cheese', de_cat_id), ('sour cream', de_cat_id), ('cottage cheese', de_cat_id), + ('bread', bb_cat_id), ('bagels', bb_cat_id), ('tortillas', bb_cat_id), ('croissants', bb_cat_id), ('muffins', bb_cat_id), ('baguette', bb_cat_id), ('pita bread', bb_cat_id), + ('rice', pdg_cat_id), ('pasta', pdg_cat_id), ('flour', pdg_cat_id), ('sugar', pdg_cat_id), ('salt', pdg_cat_id), ('pepper', pdg_cat_id), ('olive oil', pdg_cat_id), ('vegetable oil', pdg_cat_id), ('canned tomatoes', pdg_cat_id), ('canned beans', pdg_cat_id), ('peanut butter', pdg_cat_id), ('jam', pdg_cat_id), ('honey', pdg_cat_id), ('syrup', pdg_cat_id), ('nuts', pdg_cat_id), ('dried fruit', pdg_cat_id), ('crackers', pdg_cat_id), ('quinoa', pdg_cat_id), ('lentils', pdg_cat_id), + ('water', bev_cat_id), ('juice', bev_cat_id), ('soda', bev_cat_id), ('coffee', bev_cat_id), ('tea', bev_cat_id), ('almond milk', bev_cat_id), ('soy milk', bev_cat_id), ('coconut water', bev_cat_id), + ('frozen pizza', ff_cat_id), ('frozen vegetables', ff_cat_id), ('frozen fruit', ff_cat_id), ('ice cream', ff_cat_id), ('frozen dinners', ff_cat_id), ('french fries', ff_cat_id), ('frozen fish', ff_cat_id), + ('chips', snk_cat_id), ('pretzels', snk_cat_id), ('popcorn', snk_cat_id), ('granola bars', snk_cat_id), ('cookies', snk_cat_id), ('chocolate', snk_cat_id), ('candy', snk_cat_id), + ('paper towels', hc_cat_id), ('toilet paper', hc_cat_id), ('trash bags', hc_cat_id), ('dish soap', hc_cat_id), ('laundry detergent', hc_cat_id), ('all-purpose cleaner', hc_cat_id), ('sponges', hc_cat_id), + ('soap', pch_cat_id), ('shampoo', pch_cat_id), ('conditioner', pch_cat_id), ('toothpaste', pch_cat_id), ('deodorant', pch_cat_id), ('vitamins', pch_cat_id), ('pain reliever', pch_cat_id), + ('diapers', bc_cat_id), ('baby wipes', bc_cat_id), ('baby food', bc_cat_id), ('formula', bc_cat_id), + ('dog food', ps_cat_id), ('cat food', ps_cat_id), ('cat litter', ps_cat_id), + ('deli meat', dpf_cat_id), ('deli cheese', dpf_cat_id), ('rotisserie chicken', dpf_cat_id), ('prepared salads', dpf_cat_id), + ('canned soup', cg_cat_id), ('canned corn', cg_cat_id), ('canned tuna', cg_cat_id), ('canned chicken', cg_cat_id), + ('ketchup', cs_cat_id), ('mustard', cs_cat_id), ('mayonnaise', cs_cat_id), ('soy sauce', cs_cat_id), ('hot sauce', cs_cat_id), ('bbq sauce', cs_cat_id), ('salad dressing', cs_cat_id), ('cinnamon', cs_cat_id), ('oregano', cs_cat_id), ('paprika', cs_cat_id), ('garlic powder', cs_cat_id), + ('cereal', bkc_cat_id), ('oatmeal', bkc_cat_id), ('granola', bkc_cat_id), ('pancake mix', bkc_cat_id) + ON CONFLICT (name) DO NOTHING; +END $$; + +-- ============================================================================ +-- PART 3: STORAGE +-- ============================================================================ +-- Create the storage bucket for flyers if it doesn't exist. +INSERT INTO storage.buckets (id, name, public) +VALUES ('flyers', 'flyers', true) +ON CONFLICT (id) DO NOTHING; + +-- ============================================================================ +-- PART 4: ROW LEVEL SECURITY (RLS) +-- ============================================================================ +-- Enable RLS on all tables. +ALTER TABLE public.profiles ENABLE ROW LEVEL SECURITY; +ALTER TABLE public.stores ENABLE ROW LEVEL SECURITY; +ALTER TABLE public.categories ENABLE ROW LEVEL SECURITY; +ALTER TABLE public.flyers ENABLE ROW LEVEL SECURITY; +ALTER TABLE public.flyer_items ENABLE ROW LEVEL SECURITY; +ALTER TABLE public.master_grocery_items ENABLE ROW LEVEL SECURITY; +ALTER TABLE public.user_watched_items ENABLE ROW LEVEL SECURITY; +ALTER TABLE public.user_alerts ENABLE ROW LEVEL SECURITY; +ALTER TABLE public.notifications ENABLE ROW LEVEL SECURITY; +ALTER TABLE public.item_price_history ENABLE ROW LEVEL SECURITY; +ALTER TABLE public.master_item_aliases ENABLE ROW LEVEL SECURITY; +ALTER TABLE public.shopping_lists ENABLE ROW LEVEL SECURITY; +ALTER TABLE public.shopping_list_items ENABLE ROW LEVEL SECURITY; +ALTER TABLE public.suggested_corrections ENABLE ROW LEVEL SECURITY; +ALTER TABLE public.user_submitted_prices ENABLE ROW LEVEL SECURITY; +ALTER TABLE public.brands ENABLE ROW LEVEL SECURITY; +ALTER TABLE public.products ENABLE ROW LEVEL SECURITY; +ALTER TABLE public.store_locations ENABLE ROW LEVEL SECURITY; +ALTER TABLE public.flyer_locations ENABLE ROW LEVEL SECURITY; +ALTER TABLE public.recipes ENABLE ROW LEVEL SECURITY; +ALTER TABLE public.recipe_ingredients ENABLE ROW LEVEL SECURITY; +ALTER TABLE public.menu_plans ENABLE ROW LEVEL SECURITY; +ALTER TABLE public.planned_meals ENABLE ROW LEVEL SECURITY; +ALTER TABLE public.tags ENABLE ROW LEVEL SECURITY; +ALTER TABLE public.recipe_tags ENABLE ROW LEVEL SECURITY; +ALTER TABLE public.recipe_ratings ENABLE ROW LEVEL SECURITY; +ALTER TABLE public.pantry_items ENABLE ROW LEVEL SECURITY; + +-- Create policies. +DROP POLICY IF EXISTS "Public profiles are viewable by everyone." ON public.profiles; +CREATE POLICY "Public profiles are viewable by everyone." ON public.profiles FOR SELECT USING (true); +DROP POLICY IF EXISTS "Users can update their own profile." ON public.profiles; +CREATE POLICY "Users can update their own profile." ON public.profiles FOR UPDATE USING (auth.uid() = id) WITH CHECK (auth.uid() = id); +-- **FIX**: Add policy to allow service_role to insert new profiles via the handle_new_user trigger. +DROP POLICY IF EXISTS "Allow service_role to insert new profiles" ON public.profiles; +CREATE POLICY "Allow service_role to insert new profiles" ON public.profiles FOR INSERT TO service_role WITH CHECK (true); + + +-- Public read access for most public data +DROP POLICY IF EXISTS "Public read access" ON public.stores; CREATE POLICY "Public read access" ON public.stores FOR SELECT USING (true); +DROP POLICY IF EXISTS "Public read access" ON public.categories; CREATE POLICY "Public read access" ON public.categories FOR SELECT USING (true); +DROP POLICY IF EXISTS "Public read access" ON public.flyers; CREATE POLICY "Public read access" ON public.flyers FOR SELECT USING (true); +DROP POLICY IF EXISTS "Public read access" ON public.flyer_items; CREATE POLICY "Public read access" ON public.flyer_items FOR SELECT USING (true); +DROP POLICY IF EXISTS "Public read access" ON public.master_grocery_items; CREATE POLICY "Public read access" ON public.master_grocery_items FOR SELECT USING (true); +DROP POLICY IF EXISTS "Public read access for price history" ON public.item_price_history; CREATE POLICY "Public read access for price history" ON public.item_price_history FOR SELECT USING (true); +DROP POLICY IF EXISTS "Aliases are publicly viewable." ON public.master_item_aliases; CREATE POLICY "Aliases are publicly viewable." ON public.master_item_aliases FOR SELECT USING (true); +DROP POLICY IF EXISTS "Public read access" ON public.brands; CREATE POLICY "Public read access" ON public.brands FOR SELECT USING (true); +DROP POLICY IF EXISTS "Public read access" ON public.products; CREATE POLICY "Public read access" ON public.products FOR SELECT USING (true); +DROP POLICY IF EXISTS "Public read access" ON public.store_locations; CREATE POLICY "Public read access" ON public.store_locations FOR SELECT USING (true); +DROP POLICY IF EXISTS "Public read access" ON public.flyer_locations; CREATE POLICY "Public read access" ON public.flyer_locations FOR SELECT USING (true); +DROP POLICY IF EXISTS "Submitted prices are publicly viewable." ON public.user_submitted_prices; CREATE POLICY "Submitted prices are publicly viewable." ON public.user_submitted_prices FOR SELECT USING (true); +DROP POLICY IF EXISTS "Recipes are publicly viewable." ON public.recipes; CREATE POLICY "Recipes are publicly viewable." ON public.recipes FOR SELECT USING (true); +DROP POLICY IF EXISTS "Recipe ingredients are publicly viewable." ON public.recipe_ingredients; CREATE POLICY "Recipe ingredients are publicly viewable." ON public.recipe_ingredients FOR SELECT USING (true); +DROP POLICY IF EXISTS "Tags are publicly viewable." ON public.tags; CREATE POLICY "Tags are publicly viewable." ON public.tags FOR SELECT USING (true); +DROP POLICY IF EXISTS "Recipe-tag links are publicly viewable." ON public.recipe_tags; CREATE POLICY "Recipe-tag links are publicly viewable." ON public.recipe_tags FOR SELECT USING (true); +DROP POLICY IF EXISTS "Recipe ratings are publicly viewable." ON public.recipe_ratings; CREATE POLICY "Recipe ratings are publicly viewable." ON public.recipe_ratings FOR SELECT USING (true); + +-- Authenticated write access for core data tables +DROP POLICY IF EXISTS "Allow full access for authenticated users" ON public.stores; +CREATE POLICY "Allow full access for authenticated users" ON public.stores FOR ALL TO authenticated USING (true) WITH CHECK (true); + +DROP POLICY IF EXISTS "Allow full access for authenticated users" ON public.categories; +CREATE POLICY "Allow full access for authenticated users" ON public.categories FOR ALL TO authenticated USING (true) WITH CHECK (true); + +DROP POLICY IF EXISTS "Allow full access for authenticated users" ON public.flyers; +CREATE POLICY "Allow full access for authenticated users" ON public.flyers FOR ALL TO authenticated USING (true) WITH CHECK (true); + +DROP POLICY IF EXISTS "Allow full access for authenticated users" ON public.flyer_items; +CREATE POLICY "Allow full access for authenticated users" ON public.flyer_items FOR ALL TO authenticated USING (true) WITH CHECK (true); + +DROP POLICY IF EXISTS "Allow anon users to manage test items" ON public.flyer_items; +CREATE POLICY "Allow anon users to manage test items" ON public.flyer_items +FOR ALL TO anon +USING (item LIKE 'DB_SELF_TEST_ITEM_%') +WITH CHECK (item LIKE 'DB_SELF_TEST_ITEM_%'); + +DROP POLICY IF EXISTS "Allow full access for authenticated users" ON public.master_grocery_items; +CREATE POLICY "Allow full access for authenticated users" ON public.master_grocery_items FOR ALL TO authenticated USING (true) WITH CHECK (true); + +DROP POLICY IF EXISTS "Allow full access for authenticated users" ON public.brands; +CREATE POLICY "Allow full access for authenticated users" ON public.brands FOR ALL TO authenticated USING (true) WITH CHECK (true); + +DROP POLICY IF EXISTS "Allow full access for authenticated users" ON public.products; +CREATE POLICY "Allow full access for authenticated users" ON public.products FOR ALL TO authenticated USING (true) WITH CHECK (true); + +-- User-specific policies +DROP POLICY IF EXISTS "Users can manage their own watched items." ON public.user_watched_items; +CREATE POLICY "Users can manage their own watched items." ON public.user_watched_items FOR ALL USING (auth.uid() = user_id) WITH CHECK (auth.uid() = user_id); + +DROP POLICY IF EXISTS "Users can manage their own alerts" ON public.user_alerts; +CREATE POLICY "Users can manage their own alerts" ON public.user_alerts FOR ALL USING (auth.uid() = (SELECT user_id FROM public.user_watched_items WHERE id = user_watched_item_id)); + +DROP POLICY IF EXISTS "Users can manage their own notifications" ON public.notifications; +CREATE POLICY "Users can manage their own notifications" ON public.notifications FOR ALL USING (auth.uid() = user_id) WITH CHECK (auth.uid() = user_id); + +DROP POLICY IF EXISTS "Users can manage their own shopping lists." ON public.shopping_lists; +CREATE POLICY "Users can manage their own shopping lists." ON public.shopping_lists FOR ALL USING (auth.uid() = user_id) WITH CHECK (auth.uid() = user_id); +-- **FIX**: Add policy to allow service_role to insert new shopping_lists via the handle_new_user trigger. +DROP POLICY IF EXISTS "Allow service_role to insert new shopping lists" ON public.shopping_lists; +CREATE POLICY "Allow service_role to insert new shopping lists" ON public.shopping_lists FOR INSERT TO service_role WITH CHECK (true); + +DROP POLICY IF EXISTS "Users can manage items in their own shopping lists." ON public.shopping_list_items; +CREATE POLICY "Users can manage items in their own shopping lists." ON public.shopping_list_items FOR ALL USING (auth.uid() = (SELECT user_id FROM public.shopping_lists WHERE id = shopping_list_id)); + +DROP POLICY IF EXISTS "Users can manage their own suggestions." ON public.suggested_corrections; +CREATE POLICY "Users can manage their own suggestions." ON public.suggested_corrections FOR ALL USING (auth.uid() = user_id) WITH CHECK (auth.uid() = user_id); + +DROP POLICY IF EXISTS "Users can manage their own submitted prices." ON public.user_submitted_prices; +CREATE POLICY "Users can manage their own submitted prices." ON public.user_submitted_prices FOR ALL USING (auth.uid() = user_id) WITH CHECK (auth.uid() = user_id); + +DROP POLICY IF EXISTS "Users can manage their own recipes." ON public.recipes; +CREATE POLICY "Users can manage their own recipes." ON public.recipes FOR ALL USING (auth.uid() = user_id) WITH CHECK (auth.uid() = user_id); + +DROP POLICY IF EXISTS "Users can manage their own menu plans." ON public.menu_plans; +CREATE POLICY "Users can manage their own menu plans." ON public.menu_plans FOR ALL USING (auth.uid() = user_id); + +DROP POLICY IF EXISTS "Users can manage meals in their own menu plans." ON public.planned_meals; +CREATE POLICY "Users can manage meals in their own menu plans." ON public.planned_meals FOR ALL USING (auth.uid() = (SELECT user_id FROM public.menu_plans WHERE id = menu_plan_id)); + +DROP POLICY IF EXISTS "Users can manage their own recipe ratings." ON public.recipe_ratings; +CREATE POLICY "Users can manage their own recipe ratings." ON public.recipe_ratings FOR ALL USING (auth.uid() = user_id); + +DROP POLICY IF EXISTS "Users can manage their own pantry items." ON public.pantry_items; +CREATE POLICY "Users can manage their own pantry items." ON public.pantry_items FOR ALL USING (auth.uid() = user_id); + +-- Policies for the 'flyers' storage bucket +DROP POLICY IF EXISTS "Allow public access to flyers storage" ON storage.objects; +CREATE POLICY "Allow public access to flyers storage" ON storage.objects FOR SELECT TO public USING (bucket_id = 'flyers'); + +DROP POLICY IF EXISTS "Allow authenticated users to manage flyers" ON storage.objects; +CREATE POLICY "Allow authenticated users to manage flyers" ON storage.objects FOR ALL TO authenticated USING (bucket_id = 'flyers'); + +DROP POLICY IF EXISTS "Allow anon users to manage test files" ON storage.objects; +CREATE POLICY "Allow anon users to manage test files" ON storage.objects +FOR ALL TO anon +USING (bucket_id = 'flyers' AND name LIKE 'storage-self-test-%') +WITH CHECK (bucket_id = 'flyers' AND name LIKE 'storage-self-test-%'); + + +-- ============================================================================ +-- PART 5: DATABASE FUNCTIONS +-- ============================================================================ +-- Function to find the best current sale price for a user's watched items. +-- This function queries all currently active flyers to find the lowest price +-- for each item on a specific user's watchlist. +CREATE OR REPLACE FUNCTION public.get_best_sale_prices_for_user(p_user_id UUID) +RETURNS TABLE ( + master_item_id BIGINT, + item_name TEXT, + best_price_in_cents INTEGER, + store_name TEXT, + flyer_id BIGINT, + flyer_image_url TEXT, + flyer_valid_from DATE, + flyer_valid_to DATE +) +LANGUAGE plpgsql +SECURITY INVOKER -- Runs with the privileges of the calling user. RLS policies will apply. +AS $$ +BEGIN + RETURN QUERY + WITH UserWatchedSales AS ( + -- This CTE gathers all sales from active flyers that match the user's watched items. + SELECT + uwi.master_item_id, + mgi.name AS item_name, + fi.price_in_cents, + s.name AS store_name, + f.id AS flyer_id, + f.image_url AS flyer_image_url, + f.valid_from AS flyer_valid_from, + f.valid_to AS flyer_valid_to, + -- We use ROW_NUMBER to rank sales for the same item, prioritizing the lowest price. + ROW_NUMBER() OVER (PARTITION BY uwi.master_item_id ORDER BY fi.price_in_cents ASC, f.valid_to DESC, s.name ASC) as rn + FROM + public.user_watched_items uwi + JOIN public.master_grocery_items mgi ON uwi.master_item_id = mgi.id + JOIN public.flyer_items fi ON uwi.master_item_id = fi.master_item_id + JOIN public.flyers f ON fi.flyer_id = f.id + JOIN public.stores s ON f.store_id = s.id + WHERE uwi.user_id = p_user_id + AND f.valid_from <= CURRENT_DATE + AND f.valid_to >= CURRENT_DATE + AND fi.price_in_cents IS NOT NULL + ) + -- The final select returns only the top-ranked sale (rn = 1) for each item. + SELECT uws.master_item_id, uws.item_name, uws.price_in_cents, uws.store_name, uws.flyer_id, uws.flyer_image_url, uws.flyer_valid_from, uws.flyer_valid_to + FROM UserWatchedSales uws + WHERE uws.rn = 1; +END; +$$; + +-- Function to generate a smart shopping list from a menu plan, subtracting pantry items. +-- This function calculates the total ingredients needed for a user's menu plan, +-- scales them by desired servings, and then subtracts what the user already has +-- in their pantry to determine what needs to be bought. +CREATE OR REPLACE FUNCTION public.generate_shopping_list_for_menu_plan(p_menu_plan_id BIGINT, p_user_id UUID) +RETURNS TABLE ( + master_item_id BIGINT, + item_name TEXT, + required_quantity NUMERIC, + pantry_quantity NUMERIC, + shopping_list_quantity NUMERIC, + unit TEXT +) +LANGUAGE plpgsql +SECURITY INVOKER -- Runs with the privileges of the calling user. RLS policies will apply. +AS $$ +BEGIN + RETURN QUERY + WITH RequiredIngredients AS ( + -- This CTE calculates the total quantity of each ingredient needed for the menu plan. + -- It accounts for scaling the recipe based on the number of servings the user plans to cook. + SELECT + ri.master_item_id, + ri.unit, + SUM( + ri.quantity * -- The base ingredient quantity from the recipe + -- Calculate the scaling factor. Default to 1 if servings_to_cook is not set. + (COALESCE(pm.servings_to_cook, r.servings)::NUMERIC / NULLIF(r.servings, 0)::NUMERIC) + ) AS total_required + FROM public.menu_plans mp + JOIN public.planned_meals pm ON mp.id = pm.menu_plan_id + JOIN public.recipe_ingredients ri ON pm.recipe_id = ri.recipe_id + JOIN public.recipes r ON pm.recipe_id = r.id -- Join to get the recipe's base servings + WHERE mp.id = p_menu_plan_id AND mp.user_id = p_user_id + GROUP BY ri.master_item_id, ri.unit + ) + -- This final select compares the required ingredients with the user's pantry. + SELECT + req.master_item_id, + mgi.name AS item_name, + req.total_required AS required_quantity, + COALESCE(pi.quantity, 0) AS pantry_quantity, + -- Calculate the amount to buy. If pantry has enough, this will be 0 or less, so GREATEST(0, ...) ensures we don't get negative values. + GREATEST(0, req.total_required - COALESCE(pi.quantity, 0)) AS shopping_list_quantity, + req.unit + FROM RequiredIngredients req + JOIN public.master_grocery_items mgi ON req.master_item_id = mgi.id + LEFT JOIN public.pantry_items pi + ON req.master_item_id = pi.master_item_id + AND req.unit = pi.unit -- Critical: only subtract if units match to avoid errors (e.g., subtracting 2 "items" from 500 "grams"). + AND pi.user_id = p_user_id + WHERE + -- Only include items that actually need to be purchased. + GREATEST(0, req.total_required - COALESCE(pi.quantity, 0)) > 0; +END; +$$; + +-- Function to find recipes based on the percentage of their ingredients that are currently on sale. +-- For example, you can ask for recipes where at least 50% of the ingredients are on sale. +CREATE OR REPLACE FUNCTION public.get_recipes_by_sale_percentage(p_min_sale_percentage NUMERIC DEFAULT 100.0) +RETURNS TABLE (recipe_details JSONB) +LANGUAGE sql +STABLE +SECURITY INVOKER +AS $$ + WITH BestCurrentPrices AS ( + -- CTE 1: For every distinct item on sale, find its single best price and the store offering it. + SELECT + bcp.master_item_id, + bcp.price_in_cents, + bcp.store_name + FROM ( + SELECT + fi.master_item_id, + fi.price_in_cents, + s.name as store_name, + ROW_NUMBER() OVER(PARTITION BY fi.master_item_id ORDER BY fi.price_in_cents ASC, f.valid_to DESC) as rn + FROM public.flyer_items fi + JOIN public.flyers f ON fi.flyer_id = f.id + JOIN public.stores s ON f.store_id = s.id + WHERE fi.master_item_id IS NOT NULL + AND fi.price_in_cents IS NOT NULL + AND CURRENT_DATE BETWEEN f.valid_from AND f.valid_to + ) bcp + WHERE bcp.rn = 1 + ), + RecipeIngredientStats AS ( + -- CTE 2: For each recipe, count its total ingredients and how many of them are on sale. + SELECT + ri.recipe_id, + COUNT(ri.master_item_id) AS total_ingredients, + COUNT(bcp.master_item_id) AS sale_ingredients -- COUNT(column) only counts non-NULL values. + FROM public.recipe_ingredients ri + LEFT JOIN BestCurrentPrices bcp ON ri.master_item_id = bcp.master_item_id + GROUP BY ri.recipe_id + ), + EligibleRecipes AS ( + -- CTE 3: Filter recipes based on the minimum sale percentage provided as an argument. + SELECT + ris.recipe_id + FROM RecipeIngredientStats ris + WHERE ris.total_ingredients > 0 -- Avoid division by zero + AND (ris.sale_ingredients * 100.0 / ris.total_ingredients) >= p_min_sale_percentage + ), + RecipeSaleDetails AS ( + -- CTE 4: Gather details for the eligible recipes and ALL their ingredients, noting which are on sale. + SELECT + r.id AS recipe_id, + r.name AS recipe_name, + mgi.name AS item_name, + bcp.price_in_cents AS best_price_in_cents, -- This will be NULL if the ingredient is not on sale. + bcp.store_name + FROM public.recipes r + JOIN EligibleRecipes er ON r.id = er.recipe_id + JOIN public.recipe_ingredients ri ON r.id = ri.recipe_id + JOIN public.master_grocery_items mgi ON ri.master_item_id = mgi.id + LEFT JOIN BestCurrentPrices bcp ON ri.master_item_id = bcp.master_item_id -- LEFT JOIN to include all ingredients, not just sale ones. + ) + -- Final Step: Aggregate the details into a single JSON object for each recipe. + SELECT + jsonb_build_object( + 'id', rsd.recipe_id, + 'name', rsd.recipe_name, + 'ingredients', jsonb_agg( + jsonb_build_object( + 'item_name', rsd.item_name, + 'on_sale', (rsd.best_price_in_cents IS NOT NULL), + 'best_price_in_cents', rsd.best_price_in_cents, + 'store_name', rsd.store_name + ) + ORDER BY (rsd.best_price_in_cents IS NOT NULL) DESC, rsd.item_name ASC -- Show sale items first in the list. + ) + ) + FROM RecipeSaleDetails rsd + GROUP BY rsd.recipe_id, rsd.recipe_name; +$$; + +-- Function to add items generated from a menu plan directly to a user's shopping list. +-- This acts as a utility function to chain `generate_shopping_list_for_menu_plan` with an INSERT action. +CREATE OR REPLACE FUNCTION public.add_menu_plan_to_shopping_list( + p_menu_plan_id BIGINT, + p_shopping_list_id BIGINT, + p_user_id UUID +) +RETURNS TABLE ( + master_item_id BIGINT, + item_name TEXT, + quantity_added NUMERIC +) +LANGUAGE plpgsql +-- SECURITY DEFINER is used here to perform actions with elevated privileges, +-- but it's safe because we first perform a strict ownership check inside the function. +SECURITY DEFINER +AS $$ +DECLARE + list_owner_id UUID; + item_to_add RECORD; +BEGIN + -- Security Check: Ensure the user calling this function owns the target shopping list. + SELECT user_id INTO list_owner_id + FROM public.shopping_lists + WHERE id = p_shopping_list_id; + + IF list_owner_id IS NULL OR list_owner_id <> p_user_id THEN + RAISE EXCEPTION 'Permission denied: You do not own shopping list %', p_shopping_list_id; + END IF; + + -- Loop through the items generated by the smart shopping list function. + FOR item_to_add IN + SELECT * FROM public.generate_shopping_list_for_menu_plan(p_menu_plan_id, p_user_id) + LOOP + -- Insert the item into the shopping list. If it already exists, add to the quantity. + INSERT INTO public.shopping_list_items (shopping_list_id, master_item_id, quantity) + VALUES (p_shopping_list_id, item_to_add.master_item_id, item_to_add.shopping_list_quantity) + ON CONFLICT (shopping_list_id, master_item_id) + DO UPDATE SET + quantity = shopping_list_items.quantity + EXCLUDED.quantity; + + -- Return the details of the item that was added/updated. + RETURN QUERY SELECT item_to_add.master_item_id, item_to_add.item_name, item_to_add.shopping_list_quantity; + END LOOP; +END; +$$; + +-- Function to find recipes that have at least a specified number of ingredients currently on sale. +CREATE OR REPLACE FUNCTION public.get_recipes_by_min_sale_ingredients(p_min_sale_ingredients INTEGER) +RETURNS TABLE ( + recipe_id BIGINT, + recipe_name TEXT, + description TEXT, + sale_ingredients_count BIGINT +) +LANGUAGE sql +STABLE +SECURITY INVOKER +AS $$ + WITH CurrentSaleItems AS ( + -- CTE 1: Get a distinct list of all master item IDs that are currently on sale. + SELECT DISTINCT fi.master_item_id + FROM public.flyer_items fi + JOIN public.flyers f ON fi.flyer_id = f.id + WHERE fi.master_item_id IS NOT NULL + AND fi.price_in_cents IS NOT NULL + AND CURRENT_DATE BETWEEN f.valid_from AND f.valid_to + ), + RecipeIngredientStats AS ( + -- CTE 2: For each recipe, count how many of its ingredients are on the sale list. + SELECT + ri.recipe_id, + COUNT(csi.master_item_id) AS sale_ingredients_count + FROM public.recipe_ingredients ri + LEFT JOIN CurrentSaleItems csi ON ri.master_item_id = csi.master_item_id + GROUP BY ri.recipe_id + ) + -- Final Step: Select recipes that meet the minimum sale ingredient count and order them. + SELECT + r.id, + r.name, + r.description, + ris.sale_ingredients_count + FROM public.recipes r + JOIN RecipeIngredientStats ris ON r.id = ris.recipe_id + WHERE ris.sale_ingredients_count >= p_min_sale_ingredients + ORDER BY + ris.sale_ingredients_count DESC, + r.avg_rating DESC; +$$; + +-- Function to find the most frequently advertised items in a given period. +-- This helps identify which items go on sale most often. +CREATE OR REPLACE FUNCTION public.get_most_frequent_sale_items(days_interval INTEGER, result_limit INTEGER) +RETURNS TABLE ( + item_name TEXT, + sale_occurrence_count BIGINT +) +LANGUAGE sql +STABLE +SECURITY INVOKER +AS $$ + SELECT + mgi.name AS item_name, + COUNT(DISTINCT fi.flyer_id) AS sale_occurrence_count -- Count distinct flyers the item appeared in + FROM + public.flyer_items fi + JOIN + public.flyers f ON fi.flyer_id = f.id + JOIN + public.master_grocery_items mgi ON fi.master_item_id = mgi.id + WHERE + fi.master_item_id IS NOT NULL + -- Filter for flyers that have been active in the last X days. + AND f.valid_to >= (CURRENT_DATE - (days_interval || ' days')::INTERVAL) + AND f.valid_from <= CURRENT_DATE + GROUP BY + mgi.id, mgi.name + ORDER BY + sale_occurrence_count DESC + LIMIT result_limit; +$$; + +-- Function to find recipes by a specific ingredient AND a specific tag. +-- This allows for more refined recipe searching, e.g., "Find me a quick & easy recipe with chicken breast". +CREATE OR REPLACE FUNCTION public.find_recipes_by_ingredient_and_tag(p_ingredient_name TEXT, p_tag_name TEXT) +RETURNS TABLE ( + id BIGINT, + name TEXT, + description TEXT, + prep_time_minutes INTEGER, + cook_time_minutes INTEGER, + avg_rating NUMERIC +) +LANGUAGE sql +STABLE +SECURITY INVOKER +AS $$ + SELECT + r.id, r.name, r.description, r.prep_time_minutes, r.cook_time_minutes, r.avg_rating + FROM + public.recipes r + WHERE + -- Check that the recipe has the required ingredient using an EXISTS subquery. + EXISTS ( + SELECT 1 FROM public.recipe_ingredients ri + JOIN public.master_grocery_items mgi ON ri.master_item_id = mgi.id + WHERE ri.recipe_id = r.id AND mgi.name = p_ingredient_name + ) + AND + -- Check that the recipe has the required tag using another EXISTS subquery. + EXISTS ( + SELECT 1 FROM public.recipe_tags rt + JOIN public.tags t ON rt.tag_id = t.id + WHERE rt.recipe_id = r.id AND t.name = p_tag_name + ) + ORDER BY + r.avg_rating DESC, r.name ASC; +$$; + + +-- ============================================================================ +-- PART 6: SYSTEM CHECK HELPER FUNCTIONS +-- These functions are called by the 'system-check' Edge Function to inspect +-- the database state without exposing schema details to the client. They are +-- defined as `SECURITY DEFINER` to bypass RLS for inspection purposes. +-- ============================================================================ +DROP FUNCTION IF EXISTS public.check_schema(); +CREATE OR REPLACE FUNCTION public.check_schema() +RETURNS json +LANGUAGE sql +SECURITY DEFINER +AS $$ + SELECT json_build_object( + 'tables', (SELECT array_agg(table_name) FROM information_schema.tables WHERE table_schema = 'public') + ); +$$; + +DROP FUNCTION IF EXISTS public.check_rls(); +CREATE OR REPLACE FUNCTION public.check_rls() +RETURNS TABLE(table_name text, policy_name text) +LANGUAGE sql +SECURITY DEFINER +AS $$ + SELECT + tablename::text, + policyname::text + FROM pg_policies + WHERE schemaname = 'public'; +$$; + +DROP FUNCTION IF EXISTS public.check_trigger_security(); +CREATE OR REPLACE FUNCTION public.check_trigger_security() +RETURNS TABLE(function_name text, is_security_definer boolean, owner_role text) +LANGUAGE sql +SECURITY DEFINER +AS $$ + SELECT + p.proname::text, + p.prosecdef, + r.rolname::text + FROM pg_proc p + JOIN pg_roles r ON p.proowner = r.oid + WHERE p.proname = 'handle_new_user'; +$$; + + +-- ============================================================================ +-- PART 7: TRIGGERS +-- ============================================================================ + +-- 1. Set up the trigger to automatically create a profile when a new user signs up. +-- This function will be called by the trigger. +-- It is set to SECURITY DEFINER and owned by service_role to ensure it can +-- insert into public tables, bypassing the restrictive permissions of the +-- internal supabase_auth_admin role that invokes the trigger. +CREATE OR REPLACE FUNCTION public.handle_new_user() +RETURNS TRIGGER AS $$ +DECLARE + new_profile_id UUID; +BEGIN + INSERT INTO public.profiles (id, full_name, avatar_url) + VALUES (new.id, new.raw_user_meta_data->>'full_name', new.raw_user_meta_data->>'avatar_url') + RETURNING id INTO new_profile_id; + -- Also create a default shopping list for the new user. + INSERT INTO public.shopping_lists (user_id, name) + VALUES (new_profile_id, 'Main Shopping List'); + RETURN new; +END; +$$ LANGUAGE plpgsql SECURITY DEFINER; + +-- This trigger calls the function after a new user is created. +DROP TRIGGER IF EXISTS on_auth_user_created ON auth.users; +CREATE TRIGGER on_auth_user_created + AFTER INSERT ON auth.users + FOR EACH ROW EXECUTE FUNCTION public.handle_new_user(); + +-- Grant ownership to the service_role to align with RLS policies. +ALTER FUNCTION public.handle_new_user() OWNER TO service_role; + +-- 2. Create a reusable function to automatically update 'updated_at' columns. +CREATE OR REPLACE FUNCTION public.handle_updated_at() +RETURNS TRIGGER AS $$ +BEGIN + NEW.updated_at = now(); + RETURN NEW; +END; +$$ LANGUAGE plpgsql SECURITY DEFINER; + +-- Apply the trigger to the 'profiles' table. +DROP TRIGGER IF EXISTS on_profile_updated ON public.profiles; +CREATE TRIGGER on_profile_updated + BEFORE UPDATE ON public.profiles + FOR EACH ROW EXECUTE FUNCTION public.handle_updated_at(); + +-- Apply the trigger to the 'pantry_items' table. +DROP TRIGGER IF EXISTS on_pantry_item_updated ON public.pantry_items; +CREATE TRIGGER on_pantry_item_updated + BEFORE UPDATE ON public.pantry_items + FOR EACH ROW EXECUTE FUNCTION public.handle_updated_at(); + +-- 3. Create a trigger function to populate the item_price_history table on insert. +CREATE OR REPLACE FUNCTION public.update_price_history_on_flyer_item_insert() +RETURNS TRIGGER AS $$ +DECLARE + flyer_valid_from DATE; + flyer_valid_to DATE; + current_summary_date DATE; +BEGIN + -- Only run if the new flyer item is linked to a master item and has a price. + IF NEW.master_item_id IS NULL OR NEW.price_in_cents IS NULL THEN + RETURN NEW; + END IF; + + -- Get the validity dates of the flyer. + SELECT valid_from, valid_to INTO flyer_valid_from, flyer_valid_to + FROM public.flyers + WHERE id = NEW.flyer_id; + + -- If the flyer dates are not set, we cannot proceed. + IF flyer_valid_from IS NULL OR flyer_valid_to IS NULL THEN + RETURN NEW; + END IF; + + -- Loop through each day the flyer is valid and update the price history. + FOR current_summary_date IN SELECT generate_series(flyer_valid_from, flyer_valid_to, '1 day'::interval)::date LOOP + INSERT INTO public.item_price_history (master_item_id, summary_date, min_price_in_cents, max_price_in_cents, avg_price_in_cents, data_points_count) + VALUES (NEW.master_item_id, current_summary_date, NEW.price_in_cents, NEW.price_in_cents, NEW.price_in_cents, 1) + ON CONFLICT (master_item_id, summary_date) + DO UPDATE SET + min_price_in_cents = LEAST(item_price_history.min_price_in_cents, EXCLUDED.min_price_in_cents), + max_price_in_cents = GREATEST(item_price_history.max_price_in_cents, EXCLUDED.max_price_in_cents), + -- Recalculate the average price: ((old_avg * old_count) + new_price) / (old_count + 1) + avg_price_in_cents = ROUND(((item_price_history.avg_price_in_cents * item_price_history.data_points_count) + EXCLUDED.avg_price_in_cents) / (item_price_history.data_points_count + 1)), + data_points_count = item_price_history.data_points_count + 1; + END LOOP; + + RETURN NEW; +END; +$$ LANGUAGE plpgsql SECURITY DEFINER; + +-- Create the trigger on the flyer_items table for insert. +DROP TRIGGER IF EXISTS trigger_update_price_history ON public.flyer_items; +CREATE TRIGGER trigger_update_price_history + AFTER INSERT ON public.flyer_items + FOR EACH ROW EXECUTE FUNCTION public.update_price_history_on_flyer_item_insert(); + +-- 4. Create a trigger function to recalculate price history when a flyer item is deleted. +CREATE OR REPLACE FUNCTION public.recalculate_price_history_on_flyer_item_delete() +RETURNS TRIGGER AS $$ +DECLARE + flyer_valid_from DATE; + flyer_valid_to DATE; + current_summary_date DATE; + new_aggregates RECORD; +BEGIN + -- Only run if the deleted item was linked to a master item and had a price. + IF OLD.master_item_id IS NULL OR OLD.price_in_cents IS NULL THEN + RETURN OLD; + END IF; + + -- Get the validity dates of the flyer. + SELECT valid_from, valid_to INTO flyer_valid_from, flyer_valid_to + FROM public.flyers + WHERE id = OLD.flyer_id; + + -- If the flyer dates are not set, we cannot proceed. + IF flyer_valid_from IS NULL OR flyer_valid_to IS NULL THEN + RETURN OLD; + END IF; + + -- Loop through each day the flyer was valid to recalculate the history. + FOR current_summary_date IN SELECT generate_series(flyer_valid_from, flyer_valid_to, '1 day'::interval)::date LOOP + -- Recalculate aggregates for the master item on this specific day from all other flyers. + SELECT + MIN(fi.price_in_cents) AS min_price, + MAX(fi.price_in_cents) AS max_price, + ROUND(AVG(fi.price_in_cents)) AS avg_price, + COUNT(fi.id) AS data_points + INTO new_aggregates + FROM public.flyer_items fi + JOIN public.flyers f ON fi.flyer_id = f.id + WHERE fi.master_item_id = OLD.master_item_id + AND fi.price_in_cents IS NOT NULL + AND current_summary_date BETWEEN f.valid_from AND f.valid_to; + + -- If there are still data points, update the summary. Otherwise, delete it. + IF new_aggregates.data_points > 0 THEN + UPDATE public.item_price_history + SET + min_price_in_cents = new_aggregates.min_price, + max_price_in_cents = new_aggregates.max_price, + avg_price_in_cents = new_aggregates.avg_price, + data_points_count = new_aggregates.data_points + WHERE master_item_id = OLD.master_item_id AND summary_date = current_summary_date; + ELSE + DELETE FROM public.item_price_history + WHERE master_item_id = OLD.master_item_id AND summary_date = current_summary_date; + END IF; + END LOOP; + + RETURN OLD; +END; +$$ LANGUAGE plpgsql SECURITY DEFINER; + +-- Create the trigger on the flyer_items table for DELETE operations. +DROP TRIGGER IF EXISTS trigger_recalculate_price_history_on_delete ON public.flyer_items; +CREATE TRIGGER trigger_recalculate_price_history_on_delete + AFTER DELETE ON public.flyer_items + FOR EACH ROW EXECUTE FUNCTION public.recalculate_price_history_on_flyer_item_delete(); + +-- 5. Trigger function to update the average rating on the recipes table. +CREATE OR REPLACE FUNCTION public.update_recipe_rating_aggregates() +RETURNS TRIGGER AS $$ +BEGIN + UPDATE public.recipes + SET + avg_rating = ( + SELECT AVG(rating) + FROM public.recipe_ratings + WHERE recipe_id = COALESCE(NEW.recipe_id, OLD.recipe_id) + ), + rating_count = ( + SELECT COUNT(*) + FROM public.recipe_ratings + WHERE recipe_id = COALESCE(NEW.recipe_id, OLD.recipe_id) + ) + WHERE id = COALESCE(NEW.recipe_id, OLD.recipe_id); + + RETURN NULL; -- The result is ignored since this is an AFTER trigger. +END; +$$ LANGUAGE plpgsql SECURITY DEFINER; + +-- Trigger to call the function after any change to recipe_ratings. +DROP TRIGGER IF EXISTS on_recipe_rating_change ON public.recipe_ratings; +CREATE TRIGGER on_recipe_rating_change + AFTER INSERT OR UPDATE OR DELETE ON public.recipe_ratings + FOR EACH ROW EXECUTE FUNCTION public.update_recipe_rating_aggregates(); diff --git a/sql/delete_all_tables.sql.txt b/sql/delete_all_tables.sql.txt new file mode 100644 index 0000000..e1f4279 --- /dev/null +++ b/sql/delete_all_tables.sql.txt @@ -0,0 +1,38 @@ +/* +-- This script is used to completely reset the public schema by deleting all tables. +-- It should be run before re-running the schema.sql.txt script to ensure a clean state. +-- The CASCADE option is used to automatically handle dependent objects like foreign keys. +-- The order is roughly the reverse of creation to minimize dependency issues. +*/ + +DROP TABLE IF EXISTS public.pantry_items CASCADE; +DROP TABLE IF EXISTS public.planned_meals CASCADE; +DROP TABLE IF EXISTS public.menu_plans CASCADE; +DROP TABLE IF EXISTS public.recipe_ratings CASCADE; +DROP TABLE IF EXISTS public.recipe_tags CASCADE; +DROP TABLE IF EXISTS public.tags CASCADE; +DROP TABLE IF EXISTS public.recipe_ingredients CASCADE; +DROP TABLE IF EXISTS public.recipes CASCADE; +DROP TABLE IF EXISTS public.flyer_locations CASCADE; +DROP TABLE IF EXISTS public.store_locations CASCADE; +DROP TABLE IF EXISTS public.user_submitted_prices CASCADE; +DROP TABLE IF EXISTS public.suggested_corrections CASCADE; +DROP TABLE IF EXISTS public.shopping_list_items CASCADE; +DROP TABLE IF EXISTS public.shopping_lists CASCADE; +DROP TABLE IF EXISTS public.notifications CASCADE; +DROP TABLE IF EXISTS public.user_alerts CASCADE; +DROP TABLE IF EXISTS public.user_watched_items CASCADE; +DROP TABLE IF EXISTS public.master_item_aliases CASCADE; +DROP TABLE IF EXISTS public.item_price_history CASCADE; +DROP TABLE IF EXISTS public.flyer_items CASCADE; +DROP TABLE IF EXISTS public.products CASCADE; +DROP TABLE IF EXISTS public.brands CASCADE; +DROP TABLE IF EXISTS public.flyers CASCADE; +DROP TABLE IF EXISTS public.master_grocery_items CASCADE; +DROP TABLE IF EXISTS public.stores CASCADE; +DROP TABLE IF EXISTS public.categories CASCADE; +DROP TABLE IF EXISTS public.profiles CASCADE; + +/* +-- The delete_all_tables.sql.txt script does not and cannot remove the auth.users table - Go to your Supabase Project Dashboard -> Authentication -> Users. +*/ diff --git a/sql/drop_tables.sql b/sql/drop_tables.sql new file mode 100644 index 0000000..64e5893 --- /dev/null +++ b/sql/drop_tables.sql @@ -0,0 +1,15 @@ +DROP TABLE IF EXISTS public.user_submitted_prices CASCADE; +DROP TABLE IF EXISTS public.suggested_corrections CASCADE; +DROP TABLE IF EXISTS public.shopping_list_items CASCADE; +DROP TABLE IF EXISTS public.shopping_lists CASCADE; +DROP TABLE IF EXISTS public.master_item_aliases CASCADE; +DROP TABLE IF EXISTS public.item_price_history CASCADE; +DROP TABLE IF EXISTS public.notifications CASCADE; +DROP TABLE IF EXISTS public.user_alerts CASCADE; +DROP TABLE IF EXISTS public.flyer_items CASCADE; +DROP TABLE IF EXISTS public.user_watched_items CASCADE; +DROP TABLE IF EXISTS public.flyers CASCADE; +DROP TABLE IF EXISTS public.master_grocery_items CASCADE; +DROP TABLE IF EXISTS public.stores CASCADE; +DROP TABLE IF EXISTS public.categories CASCADE; +DROP TABLE IF EXISTS public.profiles CASCADE; \ No newline at end of file diff --git a/sql/fix_permissions.sql.txt b/sql/fix_permissions.sql.txt new file mode 100644 index 0000000..ccf04de --- /dev/null +++ b/sql/fix_permissions.sql.txt @@ -0,0 +1,79 @@ +-- ============================================================================ +-- PERMISSION FIX & VERIFICATION SCRIPT +-- ============================================================================ +-- Purpose: +-- This script first resets the default privileges on the public schema to fix +-- "permission denied" errors. It then runs a series of tests to VERIFY that +-- the permissions have been correctly applied, providing clear feedback. +-- +-- Usage: +-- RUN THIS SCRIPT FIRST. If it completes successfully, you can then run +-- the main schema.sql.txt script. + +-- ============================================================================ +-- STEP 1: APPLY PERMISSION FIXES +-- ============================================================================ + +-- Grant usage on the schema to the key roles. +GRANT USAGE ON SCHEMA public TO postgres, anon, authenticated, service_role; + +-- Grant ALL privileges on EXISTING tables, sequences, and functions to the key roles. +-- This is a "catch-all" for any existing objects that may have wrong permissions. +GRANT ALL PRIVILEGES ON ALL TABLES IN SCHEMA public TO postgres, anon, authenticated, service_role; +GRANT ALL PRIVILEGES ON ALL SEQUENCES IN SCHEMA public TO postgres, anon, authenticated, service_role; +GRANT ALL PRIVILEGES ON ALL FUNCTIONS IN SCHEMA public TO postgres, anon, authenticated, service_role; + +-- Grant ALL privileges on FUTURE tables, sequences, and functions to the key roles. +-- This is the most important part: it ensures new objects get the right permissions. +ALTER DEFAULT PRIVILEGES IN SCHEMA public GRANT ALL ON TABLES TO postgres, anon, authenticated, service_role; +ALTER DEFAULT PRIVILEGES IN SCHEMA public GRANT ALL ON SEQUENCES TO postgres, anon, authenticated, service_role; +ALTER DEFAULT PRIVILEGES IN SCHEMA public GRANT ALL ON FUNCTIONS TO postgres, anon, authenticated, service_role; + + +-- ============================================================================ +-- STEP 2: VERIFY PERMISSIONS HAVE BEEN APPLIED +-- ============================================================================ +-- This block performs live tests to confirm the fixes worked. If any test +-- fails, it will raise an error and stop the script. + +DO $$ +BEGIN + RAISE NOTICE '--- Starting Permission Verification Tests ---'; + + -- TEST 1: Check if the 'postgres' user (you) can CREATE in the public schema. + -- This directly tests the original "permission denied" error. + IF NOT has_schema_privilege('postgres', 'public', 'CREATE') THEN + RAISE EXCEPTION 'TEST FAILED: The "postgres" role still does not have CREATE permission on the public schema. The script cannot proceed.'; + END IF; + RAISE NOTICE '✅ TEST PASSED: "postgres" role has CREATE permission on public schema.'; + + -- TEST 2: Create a temporary table to check default privileges. + -- This confirms that the ALTER DEFAULT PRIVILEGES command worked. + CREATE TABLE public.permission_test_table (id int); + RAISE NOTICE ' -> Created temporary table "permission_test_table".'; + + -- TEST 3: Check if the 'authenticated' role has full rights on the NEW table. + -- This is crucial for your application's logged-in users. + IF NOT has_table_privilege('authenticated', 'public.permission_test_table', 'SELECT, INSERT, UPDATE, DELETE') THEN + DROP TABLE public.permission_test_table; + RAISE EXCEPTION 'TEST FAILED: The "authenticated" role did not automatically get full permissions on a newly created table. Default privileges are incorrect.'; + END IF; + RAISE NOTICE '✅ TEST PASSED: "authenticated" role has full CRUD permissions on the new test table.'; + + -- TEST 4: Check if the 'anon' role also has full rights on the NEW table. + -- This is important for RLS policies that might allow anonymous access for specific queries. + IF NOT has_table_privilege('anon', 'public.permission_test_table', 'SELECT, INSERT, UPDATE, DELETE') THEN + DROP TABLE public.permission_test_table; + RAISE EXCEPTION 'TEST FAILED: The "anon" role did not automatically get full permissions on a newly created table. Default privileges are incorrect.'; + END IF; + RAISE NOTICE '✅ TEST PASSED: "anon" role has full CRUD permissions on the new test table.'; + + + -- Cleanup: Drop the temporary table. + DROP TABLE public.permission_test_table; + RAISE NOTICE ' -> Cleaned up temporary table.'; + + RAISE NOTICE '--- ALL PERMISSION TESTS PASSED SUCCESSFULLY ---'; + RAISE NOTICE 'You should now be able to run the main schema.sql.txt script without permission errors.'; +END; +$$; diff --git a/sql/functions.sql b/sql/functions.sql new file mode 100644 index 0000000..214ec18 --- /dev/null +++ b/sql/functions.sql @@ -0,0 +1,367 @@ + + + + +-- 19. Policies for the 'flyers' storage bucket +DROP POLICY IF EXISTS "Allow public access to flyers storage" ON storage.objects; +CREATE POLICY "Allow public access to flyers storage" ON storage.objects +-- Allow public read access to flyers +FOR SELECT TO public USING (bucket_id = 'flyers'); +-- Allow authenticated users to upload/manage their own flyers (if applicable, needs user_id association) +-- For now, restricting write access to authenticated users. If specific roles are needed, adjust TO authenticated. +CREATE POLICY "Allow authenticated users to manage flyers" ON storage.objects +FOR INSERT, UPDATE, DELETE TO authenticated USING (bucket_id = 'flyers'); + +-- 20. Set up the trigger to automatically create a profile when a new user signs up. +-- NOTE: Trigger definitions have been moved to `triggers.sql`. + +-- 21. Create a reusable function to automatically update 'updated_at' columns. +-- NOTE: Trigger definitions have been moved to `triggers.sql`. + +-- 22. Function to find the best current sale price for a user's watched items. +CREATE OR REPLACE FUNCTION public.get_best_sale_prices_for_user(p_user_id UUID) +RETURNS TABLE ( + master_item_id BIGINT, + item_name TEXT, + best_price_in_cents INTEGER, + store_name TEXT, + flyer_id BIGINT, + flyer_image_url TEXT, + flyer_valid_from DATE, + flyer_valid_to DATE +) +LANGUAGE plpgsql +SECURITY INVOKER -- Runs with the privileges of the calling user. RLS policies will apply. +AS $$ +BEGIN + RETURN QUERY + WITH UserWatchedSales AS ( + SELECT + uwi.master_item_id, + mgi.name AS item_name, + fi.price_in_cents, + s.name AS store_name, + f.id AS flyer_id, + f.image_url AS flyer_image_url, + f.valid_from AS flyer_valid_from, + f.valid_to AS flyer_valid_to, + ROW_NUMBER() OVER (PARTITION BY uwi.master_item_id ORDER BY fi.price_in_cents ASC, f.valid_to DESC, s.name ASC) as rn + FROM + public.user_watched_items uwi + JOIN public.master_grocery_items mgi ON uwi.master_item_id = mgi.id + JOIN public.flyer_items fi ON uwi.master_item_id = fi.master_item_id + JOIN public.flyers f ON fi.flyer_id = f.id + JOIN public.stores s ON f.store_id = s.id + WHERE uwi.user_id = p_user_id + AND f.valid_from <= CURRENT_DATE + AND f.valid_to >= CURRENT_DATE + AND fi.price_in_cents IS NOT NULL + ) + SELECT uws.master_item_id, uws.item_name, uws.price_in_cents, uws.store_name, uws.flyer_id, uws.flyer_image_url, uws.flyer_valid_from, uws.flyer_valid_to + FROM UserWatchedSales uws + WHERE uws.rn = 1; +END; +$$; + +-- 23. Create a trigger function to populate the item_price_history table. +-- NOTE: Trigger definitions have been moved to `triggers.sql`. + +-- 24. Create a trigger function to recalculate price history when a flyer item is deleted. +-- NOTE: Trigger definitions have been moved to `triggers.sql`. + +-- 25. Function to generate a smart shopping list from a menu plan, subtracting pantry items. +CREATE OR REPLACE FUNCTION public.generate_shopping_list_for_menu_plan(p_menu_plan_id BIGINT, p_user_id UUID) +RETURNS TABLE ( + master_item_id BIGINT, + item_name TEXT, + required_quantity NUMERIC, + pantry_quantity NUMERIC, + shopping_list_quantity NUMERIC, + unit TEXT +) +LANGUAGE plpgsql +SECURITY INVOKER -- Runs with the privileges of the calling user. RLS policies will apply. +AS $$ +BEGIN + RETURN QUERY + WITH RequiredIngredients AS ( + -- First, calculate the total quantity of each ingredient needed for the menu plan. + -- This now accounts for scaling the recipe based on desired servings. + SELECT + ri.master_item_id, + ri.unit, + SUM( + ri.quantity * -- The base ingredient quantity + -- Calculate the scaling factor. Default to 1 if servings_to_cook is not set. + (COALESCE(pm.servings_to_cook, r.servings)::NUMERIC / NULLIF(r.servings, 0)::NUMERIC) + ) AS total_required + FROM public.menu_plans mp + JOIN public.planned_meals pm ON mp.id = pm.menu_plan_id + JOIN public.recipe_ingredients ri ON pm.recipe_id = ri.recipe_id + JOIN public.recipes r ON pm.recipe_id = r.id -- Join to get the recipe's base servings + WHERE mp.id = p_menu_plan_id AND mp.user_id = p_user_id + GROUP BY ri.master_item_id, ri.unit + ) + -- Now, compare the required ingredients with the user's pantry. + SELECT + req.master_item_id, + mgi.name AS item_name, + req.total_required AS required_quantity, + COALESCE(pi.quantity, 0) AS pantry_quantity, + -- Calculate the amount to buy. If pantry has enough, this will be 0. + GREATEST(0, req.total_required - COALESCE(pi.quantity, 0)) AS shopping_list_quantity, + req.unit + FROM RequiredIngredients req + JOIN public.master_grocery_items mgi ON req.master_item_id = mgi.id + LEFT JOIN public.pantry_items pi + ON req.master_item_id = pi.master_item_id + AND req.unit = pi.unit -- Critical: only subtract if units match + AND pi.user_id = p_user_id + WHERE + -- Only include items that need to be purchased. + GREATEST(0, req.total_required - COALESCE(pi.quantity, 0)) > 0; +END; +$$; + +-- 26. Function to find all recipes that can be made entirely from items currently on sale. +CREATE OR REPLACE FUNCTION public.get_recipes_by_sale_percentage(p_min_sale_percentage NUMERIC DEFAULT 100.0) +RETURNS TABLE (recipe_details JSONB) -- The return type remains the same (JSONB object per recipe) +LANGUAGE sql +STABLE -- Indicates the function cannot modify the database and is safe for read-only queries. +SECURITY INVOKER +AS $$ + WITH BestCurrentPrices AS ( + -- 1. For every item on sale, find its single best price and the store offering it. + SELECT + bcp.master_item_id, + bcp.price_in_cents, + bcp.store_name + FROM ( + SELECT + fi.master_item_id, + fi.price_in_cents, + s.name as store_name, + ROW_NUMBER() OVER(PARTITION BY fi.master_item_id ORDER BY fi.price_in_cents ASC, f.valid_to DESC) as rn + FROM public.flyer_items fi + JOIN public.flyers f ON fi.flyer_id = f.id + JOIN public.stores s ON f.store_id = s.id + WHERE fi.master_item_id IS NOT NULL + AND fi.price_in_cents IS NOT NULL + AND CURRENT_DATE BETWEEN f.valid_from AND f.valid_to + ) bcp + WHERE bcp.rn = 1 + ), + RecipeIngredientStats AS ( + -- 2. For each recipe, count its total ingredients and how many of them are on sale. + SELECT + ri.recipe_id, + COUNT(ri.master_item_id) AS total_ingredients, + COUNT(bcp.master_item_id) AS sale_ingredients + FROM public.recipe_ingredients ri + LEFT JOIN BestCurrentPrices bcp ON ri.master_item_id = bcp.master_item_id -- Join to count how many ingredients are on sale + GROUP BY ri.recipe_id + ), + EligibleRecipes AS ( + -- 3. Filter recipes based on the minimum sale percentage. + SELECT + ris.recipe_id, + ris.total_ingredients, + ris.sale_ingredients + FROM RecipeIngredientStats ris + WHERE ris.total_ingredients > 0 -- Avoid division by zero and recipes with no ingredients + AND (ris.sale_ingredients * 100.0 / ris.total_ingredients) >= p_min_sale_percentage + ), + RecipeSaleDetails AS ( + -- 4. Gather details for the eligible recipes and ALL their ingredients, noting which are on sale. + SELECT + r.id AS recipe_id, + r.name AS recipe_name, + mgi.name AS item_name, + bcp.price_in_cents AS best_price_in_cents, -- This will be NULL if not on sale + bcp.store_name -- This will be NULL if not on sale + FROM public.recipes r + JOIN EligibleRecipes er ON r.id = er.recipe_id -- Join with the filtered eligible recipes + JOIN public.recipe_ingredients ri ON r.id = ri.recipe_id + JOIN public.master_grocery_items mgi ON ri.master_item_id = mgi.id + LEFT JOIN BestCurrentPrices bcp ON ri.master_item_id = bcp.master_item_id -- LEFT JOIN to include all ingredients + ) + -- 5. Aggregate the details into a JSON object for each recipe. + SELECT + jsonb_build_object( + 'id', rsd.recipe_id, + 'name', rsd.recipe_name, + -- Aggregate all ingredients for the recipe into a JSON array. + 'ingredients', jsonb_agg( + jsonb_build_object( + 'item_name', rsd.item_name, + 'on_sale', (rsd.best_price_in_cents IS NOT NULL), -- Mark if the item is on sale + 'best_price_in_cents', rsd.best_price_in_cents, + 'store_name', rsd.store_name + ) + ORDER BY (rsd.best_price_in_cents IS NOT NULL) DESC, rsd.item_name ASC -- Show sale items first + ) + ) + FROM RecipeSaleDetails rsd + GROUP BY rsd.recipe_id, rsd.recipe_name; +$$; + +-- 27. Function to add items from a menu plan to a user's shopping list. +CREATE OR REPLACE FUNCTION public.add_menu_plan_to_shopping_list( + p_menu_plan_id BIGINT, + p_shopping_list_id BIGINT, + p_user_id UUID +) +RETURNS TABLE ( + master_item_id BIGINT, + item_name TEXT, + quantity_added NUMERIC +) +LANGUAGE plpgsql +-- SECURITY DEFINER is used here to chain functions and perform checks before modification. +-- The function internally ensures the calling user has the correct permissions. +SECURITY DEFINER +AS $$ +DECLARE + list_owner_id UUID; + item_to_add RECORD; +BEGIN + -- Security Check: Ensure the user owns the target shopping list. + SELECT user_id INTO list_owner_id + FROM public.shopping_lists + WHERE id = p_shopping_list_id; + + IF list_owner_id IS NULL OR list_owner_id <> p_user_id THEN + RAISE EXCEPTION 'Permission denied: You do not own shopping list %', p_shopping_list_id; + END IF; + + -- Loop through the items generated by the smart shopping list function. + FOR item_to_add IN + SELECT * FROM public.generate_shopping_list_for_menu_plan(p_menu_plan_id, p_user_id) + LOOP + -- Insert the item into the shopping list. If it already exists, update the quantity. + INSERT INTO public.shopping_list_items (shopping_list_id, master_item_id, quantity) + VALUES (p_shopping_list_id, item_to_add.master_item_id, item_to_add.shopping_list_quantity) + ON CONFLICT (shopping_list_id, master_item_id) + DO UPDATE SET + quantity = shopping_list_items.quantity + EXCLUDED.quantity; + + -- Return the details of the item that was added/updated. + RETURN QUERY SELECT item_to_add.master_item_id, item_to_add.item_name, item_to_add.shopping_list_quantity; + END LOOP; +END; +$$; + +-- 28. Trigger function to update the average rating on the recipes table. +-- NOTE: Trigger definitions have been moved to `triggers.sql`. + +-- 29. Function to find recipes that have at least 'x' ingredients currently on sale. +CREATE OR REPLACE FUNCTION public.get_recipes_by_min_sale_ingredients(p_min_sale_ingredients INTEGER) +RETURNS TABLE ( + recipe_id BIGINT, + recipe_name TEXT, + description TEXT, + sale_ingredients_count BIGINT +) +LANGUAGE sql +STABLE +SECURITY INVOKER +AS $$ + WITH CurrentSaleItems AS ( + -- 1. Get a distinct list of all master item IDs that are currently on sale. + SELECT DISTINCT fi.master_item_id + FROM public.flyer_items fi + JOIN public.flyers f ON fi.flyer_id = f.id + WHERE fi.master_item_id IS NOT NULL + AND fi.price_in_cents IS NOT NULL + AND CURRENT_DATE BETWEEN f.valid_from AND f.valid_to + ), + RecipeIngredientStats AS ( + -- 2. For each recipe, count how many of its ingredients are on sale. + SELECT + ri.recipe_id, + COUNT(csi.master_item_id) AS sale_ingredients_count + FROM public.recipe_ingredients ri + LEFT JOIN CurrentSaleItems csi ON ri.master_item_id = csi.master_item_id + GROUP BY ri.recipe_id + ) + -- 3. Select recipes that meet the minimum sale ingredient count. + SELECT + r.id, + r.name, + r.description, + ris.sale_ingredients_count + FROM public.recipes r + JOIN RecipeIngredientStats ris ON r.id = ris.recipe_id + WHERE ris.sale_ingredients_count >= p_min_sale_ingredients + ORDER BY + ris.sale_ingredients_count DESC, + r.avg_rating DESC; +$$; + +-- 30. Function to find the most frequently advertised items in a given period. +CREATE OR REPLACE FUNCTION public.get_most_frequent_sale_items(days_interval INTEGER, result_limit INTEGER) +RETURNS TABLE ( + item_name TEXT, + sale_occurrence_count BIGINT +) +LANGUAGE sql +STABLE +SECURITY INVOKER +AS $$ + SELECT + mgi.name AS item_name, + COUNT(DISTINCT fi.flyer_id) AS sale_occurrence_count + FROM + public.flyer_items fi + JOIN + public.flyers f ON fi.flyer_id = f.id + JOIN + public.master_grocery_items mgi ON fi.master_item_id = mgi.id + WHERE + -- Only consider items linked to our master list + fi.master_item_id IS NOT NULL + -- Filter for flyers that have been active in the last X days + AND f.valid_to >= (CURRENT_DATE - (days_interval || ' days')::INTERVAL) + AND f.valid_from <= CURRENT_DATE + GROUP BY + mgi.id, mgi.name + ORDER BY + sale_occurrence_count DESC + LIMIT result_limit; +$$; + +-- 31. Function to find recipes by a specific ingredient and tag. +CREATE OR REPLACE FUNCTION public.find_recipes_by_ingredient_and_tag(p_ingredient_name TEXT, p_tag_name TEXT) +RETURNS TABLE ( + id BIGINT, + name TEXT, + description TEXT, + prep_time_minutes INTEGER, + cook_time_minutes INTEGER, + avg_rating NUMERIC +) +LANGUAGE sql +STABLE +SECURITY INVOKER +AS $$ + SELECT + r.id, r.name, r.description, r.prep_time_minutes, r.cook_time_minutes, r.avg_rating + FROM + public.recipes r + WHERE + -- Check that the recipe has the required ingredient + EXISTS ( + SELECT 1 FROM public.recipe_ingredients ri + JOIN public.master_grocery_items mgi ON ri.master_item_id = mgi.id + WHERE ri.recipe_id = r.id AND mgi.name = p_ingredient_name + ) + AND + -- Check that the recipe has the required tag + EXISTS ( + SELECT 1 FROM public.recipe_tags rt + JOIN public.tags t ON rt.tag_id = t.id + WHERE rt.recipe_id = r.id AND t.name = p_tag_name + ) + ORDER BY + r.avg_rating DESC, r.name ASC; +$$; diff --git a/sql/initial.sql b/sql/initial.sql new file mode 100644 index 0000000..e2bfbfc --- /dev/null +++ b/sql/initial.sql @@ -0,0 +1,657 @@ + +-- DONE +-- 0. Create a simple users table for future expansion. +-- 0. Create a table for public user profiles. +-- This table is linked to the auth.users table and stores non-sensitive user data. +CREATE TABLE IF NOT EXISTS public.profiles ( + id UUID PRIMARY KEY REFERENCES auth.users ON DELETE CASCADE, + updated_at TIMESTAMPTZ, + username TEXT UNIQUE, + full_name TEXT, + avatar_url TEXT, + CONSTRAINT username_length CHECK (char_length(username) >= 3) +); + +-- DONE +-- 1. Create the 'stores' table for normalized store data. +CREATE TABLE IF NOT EXISTS public.stores ( + id BIGINT PRIMARY KEY GENERATED ALWAYS AS IDENTITY, + created_at TIMESTAMPTZ DEFAULT now() NOT NULL, + name TEXT NOT NULL UNIQUE, + logo_url TEXT +); + +-- DONE +-- 2. Create the 'categories' table for normalized category data. +CREATE TABLE IF NOT EXISTS public.categories ( + id BIGINT PRIMARY KEY GENERATED ALWAYS AS IDENTITY, + created_at TIMESTAMPTZ DEFAULT now() NOT NULL, + name TEXT NOT NULL UNIQUE +); + +-- DONE +-- 3. Create the 'flyers' table with its full, final schema. +CREATE TABLE IF NOT EXISTS public.flyers ( + id BIGINT PRIMARY KEY GENERATED ALWAYS AS IDENTITY, + created_at TIMESTAMPTZ DEFAULT now() NOT NULL, + file_name TEXT, + image_url TEXT, + checksum TEXT UNIQUE, + store_id BIGINT REFERENCES public.stores(id), + valid_from DATE, + valid_to DATE +); + +-- DONE +-- 4. Create the 'master_grocery_items' table. This is the master dictionary. +CREATE TABLE IF NOT EXISTS public.master_grocery_items ( + id BIGINT PRIMARY KEY GENERATED ALWAYS AS IDENTITY, + created_at TIMESTAMPTZ DEFAULT now() NOT NULL, + name TEXT NOT NULL UNIQUE, + category_id BIGINT REFERENCES public.categories(id) +); + +-- DONE +-- 5. Create the 'user_watched_items' table. This links to the master list. +CREATE TABLE IF NOT EXISTS public.user_watched_items ( + id BIGINT PRIMARY KEY GENERATED ALWAYS AS IDENTITY, + created_at TIMESTAMPTZ DEFAULT now() NOT NULL, + user_id UUID NOT NULL REFERENCES public.profiles(id) ON DELETE CASCADE, + master_item_id BIGINT NOT NULL REFERENCES public.master_grocery_items(id) ON DELETE CASCADE, + UNIQUE(user_id, master_item_id) +); + +-- DONE +-- 6. Create the 'flyer_items' table with its full, final schema. +CREATE TABLE IF NOT EXISTS public.flyer_items ( + id BIGINT PRIMARY KEY GENERATED ALWAYS AS IDENTITY, + created_at TIMESTAMPTZ DEFAULT now() NOT NULL, + item TEXT, + price_display TEXT, + price_in_cents INTEGER, + quantity TEXT, + quantity_num NUMERIC, + unit_price JSONB, + flyer_id BIGINT REFERENCES public.flyers(id) ON DELETE CASCADE, + master_item_id BIGINT REFERENCES public.master_grocery_items(id) +); + +-- DONE +-- 7. Create a table for user-defined alerts on watched items. +CREATE TABLE IF NOT EXISTS public.user_alerts ( + id BIGINT PRIMARY KEY GENERATED ALWAYS AS IDENTITY, + user_watched_item_id BIGINT NOT NULL REFERENCES public.user_watched_items(id) ON DELETE CASCADE, + alert_type TEXT NOT NULL, -- e.g., 'PRICE_BELOW', 'PERCENT_OFF_AVERAGE' + threshold_value NUMERIC NOT NULL, -- The value for the alert condition (e.g., 299 for a price of $2.99, or 20 for 20%) + is_active BOOLEAN DEFAULT true NOT NULL, + created_at TIMESTAMPTZ DEFAULT now() NOT NULL, + -- A user should only have one type of alert per watched item + UNIQUE(user_watched_item_id, alert_type) +); +COMMENT ON COLUMN public.user_alerts.alert_type IS 'The condition that triggers the alert, e.g., ''PRICE_BELOW''.'; +COMMENT ON COLUMN public.user_alerts.threshold_value IS 'The numeric threshold for the alert condition (e.g., price in cents, or percentage).'; + +-- DONE +-- 8. Create a table to store notifications for users. +CREATE TABLE IF NOT EXISTS public.notifications ( + id BIGINT PRIMARY KEY GENERATED ALWAYS AS IDENTITY, + user_id UUID NOT NULL REFERENCES public.profiles(id) ON DELETE CASCADE, + content TEXT NOT NULL, -- The message for the user, e.g., "Chicken Thighs are on sale at Superstore!" + link_url TEXT, -- A deep link to the relevant flyer or item in the app + is_read BOOLEAN DEFAULT false NOT NULL, + created_at TIMESTAMPTZ DEFAULT now() NOT NULL +); +COMMENT ON COLUMN public.notifications.content IS 'The notification message displayed to the user.'; +COMMENT ON COLUMN public.notifications.link_url IS 'A URL to navigate to when the notification is clicked.'; + +-- DONE +-- 9. Create a table for aggregated, historical price data for master items. +CREATE TABLE IF NOT EXISTS public.item_price_history ( + id BIGINT PRIMARY KEY GENERATED ALWAYS AS IDENTITY, + master_item_id BIGINT NOT NULL REFERENCES public.master_grocery_items(id) ON DELETE CASCADE, + summary_date DATE NOT NULL, -- The date this summary applies to + min_price_in_cents INTEGER, -- The lowest price found for this item on this day, in cents + max_price_in_cents INTEGER, -- The highest price found for this item on this day, in cents + avg_price_in_cents INTEGER, -- The average price found for this item on this day, in cents + data_points_count INTEGER NOT NULL, -- How many data points were used for this summary + UNIQUE(master_item_id, summary_date) +); +COMMENT ON TABLE public.item_price_history IS 'Serves as a summary table to speed up charting and analytics.'; +COMMENT ON COLUMN public.item_price_history.summary_date IS 'The date for which the price data is summarized.'; + +-- DONE +-- 10. Create a table to map various names to a single master grocery item. +CREATE TABLE IF NOT EXISTS public.master_item_aliases ( + id BIGINT PRIMARY KEY GENERATED ALWAYS AS IDENTITY, + master_item_id BIGINT NOT NULL REFERENCES public.master_grocery_items(id) ON DELETE CASCADE, + alias TEXT NOT NULL UNIQUE +); +COMMENT ON TABLE public.master_item_aliases IS 'Stores synonyms or alternative names for master items to improve matching.'; +COMMENT ON COLUMN public.master_item_aliases.alias IS 'An alternative name, e.g., "Ground Chuck" for the master item "Ground Beef".'; + +-- DONE +-- 11. Create tables for user shopping lists. +CREATE TABLE IF NOT EXISTS public.shopping_lists ( + id BIGINT PRIMARY KEY GENERATED ALWAYS AS IDENTITY, + user_id UUID NOT NULL REFERENCES public.profiles(id) ON DELETE CASCADE, + name TEXT NOT NULL, + created_at TIMESTAMPTZ DEFAULT now() NOT NULL +); +COMMENT ON TABLE public.shopping_lists IS 'Stores user-created shopping lists, e.g., "Weekly Groceries".'; + +-- DONE +CREATE TABLE IF NOT EXISTS public.shopping_list_items ( + id BIGINT PRIMARY KEY GENERATED ALWAYS AS IDENTITY, + shopping_list_id BIGINT NOT NULL REFERENCES public.shopping_lists(id) ON DELETE CASCADE, + master_item_id BIGINT REFERENCES public.master_grocery_items(id), + custom_item_name TEXT, -- For items not in the master list, e.g., "Grandma's special spice mix" + quantity INTEGER DEFAULT 1 NOT NULL, + is_purchased BOOLEAN DEFAULT false NOT NULL, + added_at TIMESTAMPTZ DEFAULT now() NOT NULL, + -- Ensure a master item is not added twice to the same list + UNIQUE(shopping_list_id, master_item_id), + -- Ensure one of the item identifiers is present + CONSTRAINT must_have_item_identifier CHECK (master_item_id IS NOT NULL OR custom_item_name IS NOT NULL) +); +COMMENT ON TABLE public.shopping_list_items IS 'Contains individual items for a specific shopping list.'; +COMMENT ON COLUMN public.shopping_list_items.custom_item_name IS 'For items not in the master list, e.g., "Grandma''s special spice mix".'; +COMMENT ON COLUMN public.shopping_list_items.is_purchased IS 'Lets users check items off their list as they shop.'; + + +-- DONE +-- 12. Create a table to store user-submitted corrections for flyer items. +CREATE TABLE IF NOT EXISTS public.suggested_corrections ( + id BIGINT PRIMARY KEY GENERATED ALWAYS AS IDENTITY, + flyer_item_id BIGINT NOT NULL REFERENCES public.flyer_items(id) ON DELETE CASCADE, + user_id UUID NOT NULL REFERENCES public.profiles(id) ON DELETE CASCADE, + correction_type TEXT NOT NULL, -- e.g., 'WRONG_PRICE', 'INCORRECT_ITEM_LINK', 'INVALID_QUANTITY' + suggested_value TEXT NOT NULL, -- The proposed new value, stored as text for flexibility. + status TEXT DEFAULT 'pending' NOT NULL, -- e.g., 'pending', 'approved', 'rejected' + created_at TIMESTAMPTZ DEFAULT now() NOT NULL, + reviewed_notes TEXT, -- An admin can leave a note about why a suggestion was approved/rejected. + reviewed_at TIMESTAMPTZ +); +COMMENT ON TABLE public.suggested_corrections IS 'A queue for user-submitted data corrections, enabling crowdsourced data quality improvements.'; +COMMENT ON COLUMN public.suggested_corrections.correction_type IS 'The type of error the user is reporting.'; +COMMENT ON COLUMN public.suggested_corrections.suggested_value IS 'The corrected value proposed by the user (e.g., a new price or master_item_id).'; +COMMENT ON COLUMN public.suggested_corrections.status IS 'The moderation status of the correction: pending, approved, or rejected.'; + +-- DONE +-- 13. Create a table for prices submitted directly by users from in-store. +CREATE TABLE IF NOT EXISTS public.user_submitted_prices ( + id BIGINT PRIMARY KEY GENERATED ALWAYS AS IDENTITY, + user_id UUID NOT NULL REFERENCES public.profiles(id) ON DELETE CASCADE, + master_item_id BIGINT NOT NULL REFERENCES public.master_grocery_items(id) ON DELETE CASCADE, + store_id BIGINT NOT NULL REFERENCES public.stores(id) ON DELETE CASCADE, + price_in_cents INTEGER NOT NULL, + photo_url TEXT, -- Optional: URL to a photo of the price tag for verification. + upvotes INTEGER DEFAULT 1 NOT NULL, -- Community validation mechanism. + downvotes INTEGER DEFAULT 0 NOT NULL, + created_at TIMESTAMPTZ DEFAULT now() NOT NULL, + -- A user can only submit one price for an item at a store per day. + UNIQUE(user_id, master_item_id, store_id, created_at::date) +); +COMMENT ON TABLE public.user_submitted_prices IS 'Stores item prices submitted by users directly from physical stores.'; +COMMENT ON COLUMN public.user_submitted_prices.photo_url IS 'URL to user-submitted photo evidence of the price.'; +COMMENT ON COLUMN public.user_submitted_prices.upvotes IS 'Community validation score indicating accuracy.'; + +-- 14. Pre-populate categories table from a predefined list. +INSERT INTO public.categories (name) VALUES +('Fruits & Vegetables'), ('Meat & Seafood'), ('Dairy & Eggs'), ('Bakery & Bread'), +('Pantry & Dry Goods'), ('Beverages'), ('Frozen Foods'), ('Snacks'), ('Household & Cleaning'), +('Personal Care & Health'), ('Baby & Child'), ('Pet Supplies'), ('Deli & Prepared Foods'), +('Canned Goods'), ('Condiments & Spices'), ('Breakfast & Cereal'), ('Organic'), +('International Foods'), ('Other/Miscellaneous') +ON CONFLICT (name) DO NOTHING; + +-- DONE +-- A table to store brand information. +CREATE TABLE IF NOT EXISTS public.brands ( + id BIGINT PRIMARY KEY GENERATED ALWAYS AS IDENTITY, + name TEXT NOT NULL UNIQUE +); +COMMENT ON TABLE public.brands IS 'Stores brand names like "Coca-Cola", "Maple Leaf", or "Kraft".'; + +-- DONE +-- A table for specific products, linking a master item with a brand and size. +CREATE TABLE IF NOT EXISTS public.products ( + id BIGINT PRIMARY KEY GENERATED ALWAYS AS IDENTITY, + master_item_id BIGINT NOT NULL REFERENCES public.master_grocery_items(id), + brand_id BIGINT REFERENCES public.brands(id), -- Can be null for generic/store-brand items + name TEXT NOT NULL, -- e.g., "Prime Raised without Antibiotics Chicken Breast" + description TEXT, + size TEXT, -- e.g., "4L", "500g" + upc_code TEXT UNIQUE, -- Universal Product Code for precise identification + UNIQUE(master_item_id, brand_id, name, size) +); +COMMENT ON TABLE public.products IS 'Represents a specific, sellable product, combining a generic item with a brand and size.'; +COMMENT ON COLUMN public.products.upc_code IS 'Universal Product Code, if available, for exact product matching.'; + +-- Then, you would update 'flyer_items' to link to this new table. +ALTER TABLE public.flyer_items +ADD COLUMN IF NOT EXISTS product_id BIGINT REFERENCES public.products(id); + +-- Enable trigram support for fuzzy string matching +CREATE EXTENSION IF NOT EXISTS pg_trgm; +-- Add a GIN index to the 'item' column for fast fuzzy text searching. +CREATE INDEX IF NOT EXISTS flyer_items_item_trgm_idx ON public.flyer_items USING GIN (item gin_trgm_ops); + +-- First, enable the PostGIS extension if you haven't already. +-- In Supabase, you can do this under Database -> Extensions. +-- CREATE EXTENSION IF NOT EXISTS postgis; + +-- DONE +-- A table to store individual store locations with geographic data. +CREATE TABLE IF NOT EXISTS public.store_locations ( + id BIGINT PRIMARY KEY GENERATED ALWAYS AS IDENTITY, + store_id BIGINT NOT NULL REFERENCES public.stores(id) ON DELETE CASCADE, + address TEXT NOT NULL, + city TEXT, + province_state TEXT, + postal_code TEXT, + -- Use the 'geography' type for lat/lon data. + location GEOGRAPHY(Point, 4326) +); +CREATE INDEX IF NOT EXISTS store_locations_geo_idx ON public.store_locations USING GIST (location); +COMMENT ON TABLE public.store_locations IS 'Stores physical locations of stores with geographic data for proximity searches.'; +COMMENT ON COLUMN public.store_locations.location IS 'Geographic coordinates (longitude, latitude) of the store.'; + +-- DONE +-- You might also need a linking table if one flyer is valid for multiple locations. +CREATE TABLE IF NOT EXISTS public.flyer_locations ( + flyer_id BIGINT NOT NULL REFERENCES public.flyers(id) ON DELETE CASCADE, + store_location_id BIGINT NOT NULL REFERENCES public.store_locations(id) ON DELETE CASCADE, + PRIMARY KEY (flyer_id, store_location_id) +); + +-- done +-- A table to store recipes, which can be user-created or pre-populated. +CREATE TABLE IF NOT EXISTS public.recipes ( + id BIGINT PRIMARY KEY GENERATED ALWAYS AS IDENTITY, + user_id UUID REFERENCES public.profiles(id) ON DELETE SET NULL, -- Can be a system recipe (user_id is NULL) or user-submitted + name TEXT NOT NULL, + description TEXT, + instructions TEXT, + prep_time_minutes INTEGER, + cook_time_minutes INTEGER, + servings INTEGER, -- How many people the recipe is intended for. + photo_url TEXT, + -- Optional nutritional information + calories_per_serving INTEGER, + protein_grams INTEGER, + fat_grams INTEGER, + carb_grams INTEGER, + -- Aggregated rating data for fast sorting/display + avg_rating NUMERIC(3, 2) DEFAULT 0.00, + rating_count INTEGER DEFAULT 0, + created_at TIMESTAMPTZ DEFAULT now() NOT NULL +); +COMMENT ON TABLE public.recipes IS 'Stores recipes that can be used to generate shopping lists.'; +COMMENT ON COLUMN public.recipes.servings IS 'The number of servings this recipe yields.'; + +-- done +-- A linking table for ingredients required for each recipe. +CREATE TABLE IF NOT EXISTS public.recipe_ingredients ( + id BIGINT PRIMARY KEY GENERATED ALWAYS AS IDENTITY, + recipe_id BIGINT NOT NULL REFERENCES public.recipes(id) ON DELETE CASCADE, + master_item_id BIGINT NOT NULL REFERENCES public.master_grocery_items(id), + quantity NUMERIC NOT NULL, + unit TEXT NOT NULL -- e.g., 'cups', 'tbsp', 'g', 'each' +); +COMMENT ON TABLE public.recipe_ingredients IS 'Defines the ingredients and quantities needed for a recipe.'; + +-- done +-- A table to store a predefined list of tags for recipes. +CREATE TABLE IF NOT EXISTS public.tags ( + id BIGINT PRIMARY KEY GENERATED ALWAYS AS IDENTITY, + name TEXT NOT NULL UNIQUE +); +COMMENT ON TABLE public.tags IS 'Stores tags for categorizing recipes, e.g., "Vegetarian", "Quick & Easy".'; + +-- done +-- A linking table to associate multiple tags with a recipe. +CREATE TABLE IF NOT EXISTS public.recipe_tags ( + recipe_id BIGINT NOT NULL REFERENCES public.recipes(id) ON DELETE CASCADE, + tag_id BIGINT NOT NULL REFERENCES public.tags(id) ON DELETE CASCADE, + PRIMARY KEY (recipe_id, tag_id) +); + +-- done +-- A table to store individual user ratings for recipes. +CREATE TABLE IF NOT EXISTS public.recipe_ratings ( + id BIGINT PRIMARY KEY GENERATED ALWAYS AS IDENTITY, + recipe_id BIGINT NOT NULL REFERENCES public.recipes(id) ON DELETE CASCADE, + user_id UUID NOT NULL REFERENCES public.profiles(id) ON DELETE CASCADE, + rating INTEGER NOT NULL CHECK (rating >= 1 AND rating <= 5), + comment TEXT, + created_at TIMESTAMPTZ DEFAULT now() NOT NULL, + UNIQUE(recipe_id, user_id) -- A user can only rate a recipe once. +); + + +-- DONE +-- A table to store a user's collection of planned meals for a date range. +CREATE TABLE IF NOT EXISTS public.menu_plans ( + id BIGINT PRIMARY KEY GENERATED ALWAYS AS IDENTITY, + user_id UUID NOT NULL REFERENCES public.profiles(id) ON DELETE CASCADE, + name TEXT NOT NULL, + start_date DATE NOT NULL, + end_date DATE NOT NULL, + created_at TIMESTAMPTZ DEFAULT now() NOT NULL, + CONSTRAINT date_range_check CHECK (end_date >= start_date) +); +COMMENT ON TABLE public.menu_plans IS 'Represents a user''s meal plan for a specific period, e.g., "Week of Oct 23".'; + +-- DONE +-- A table to associate a recipe with a specific date and meal type within a menu plan. +CREATE TABLE IF NOT EXISTS public.planned_meals ( + id BIGINT PRIMARY KEY GENERATED ALWAYS AS IDENTITY, + menu_plan_id BIGINT NOT NULL REFERENCES public.menu_plans(id) ON DELETE CASCADE, + recipe_id BIGINT NOT NULL REFERENCES public.recipes(id) ON DELETE CASCADE, + plan_date DATE NOT NULL, + -- e.g., 'Breakfast', 'Lunch', 'Dinner', 'Snack' + meal_type TEXT NOT NULL, + -- A user can plan the same recipe for multiple meal types on the same day (e.g., leftovers for lunch) +); +COMMENT ON TABLE public.planned_meals IS 'Assigns a recipe to a specific day and meal type within a user''s menu plan.'; +COMMENT ON COLUMN public.planned_meals.meal_type IS 'The designated meal for the recipe, e.g., ''Breakfast'', ''Lunch'', ''Dinner''.'; + +-- DONE +-- A table to track the grocery items a user currently has in their pantry. +CREATE TABLE IF NOT EXISTS public.pantry_items ( + id BIGINT PRIMARY KEY GENERATED ALWAYS AS IDENTITY, + user_id UUID NOT NULL REFERENCES public.profiles(id) ON DELETE CASCADE, + master_item_id BIGINT NOT NULL REFERENCES public.master_grocery_items(id) ON DELETE CASCADE, + quantity NUMERIC NOT NULL, + unit TEXT, -- e.g., 'g', 'ml', 'items'. Should align with recipe_ingredients.unit + best_before_date DATE, + updated_at TIMESTAMPTZ DEFAULT now() NOT NULL, + -- A user should only have one entry per master item in their pantry. + UNIQUE(user_id, master_item_id) +); +COMMENT ON TABLE public.pantry_items IS 'Tracks a user''s personal inventory of grocery items to enable smart shopping lists.'; +COMMENT ON COLUMN public.pantry_items.quantity IS 'The current amount of the item. Convention: use grams for weight, mL for volume where applicable.'; +COMMENT ON COLUMN public.pantry_items.unit IS 'e.g., ''g'', ''ml'', ''items''. Should align with recipe_ingredients.unit and quantity convention.'; + + + + + + +-- 15. Pre-populate the master grocery items dictionary. +DO $$ +DECLARE + fv_cat_id BIGINT; + ms_cat_id BIGINT; + de_cat_id BIGINT; + bb_cat_id BIGINT; + pdg_cat_id BIGINT; + bev_cat_id BIGINT; + ff_cat_id BIGINT; + snk_cat_id BIGINT; + hc_cat_id BIGINT; + pch_cat_id BIGINT; + bc_cat_id BIGINT; + ps_cat_id BIGINT; + dpf_cat_id BIGINT; + cg_cat_id BIGINT; + cs_cat_id BIGINT; + bkc_cat_id BIGINT; +BEGIN + SELECT id INTO fv_cat_id FROM public.categories WHERE name = 'Fruits & Vegetables'; + SELECT id INTO ms_cat_id FROM public.categories WHERE name = 'Meat & Seafood'; + SELECT id INTO de_cat_id FROM public.categories WHERE name = 'Dairy & Eggs'; + SELECT id INTO bb_cat_id FROM public.categories WHERE name = 'Bakery & Bread'; + SELECT id INTO pdg_cat_id FROM public.categories WHERE name = 'Pantry & Dry Goods'; + SELECT id INTO bev_cat_id FROM public.categories WHERE name = 'Beverages'; + SELECT id INTO ff_cat_id FROM public.categories WHERE name = 'Frozen Foods'; + SELECT id INTO snk_cat_id FROM public.categories WHERE name = 'Snacks'; + SELECT id INTO hc_cat_id FROM public.categories WHERE name = 'Household & Cleaning'; + SELECT id INTO pch_cat_id FROM public.categories WHERE name = 'Personal Care & Health'; + SELECT id INTO bc_cat_id FROM public.categories WHERE name = 'Baby & Child'; + SELECT id INTO ps_cat_id FROM public.categories WHERE name = 'Pet Supplies'; + SELECT id INTO dpf_cat_id FROM public.categories WHERE name = 'Deli & Prepared Foods'; + SELECT id INTO cg_cat_id FROM public.categories WHERE name = 'Canned Goods'; + SELECT id INTO cs_cat_id FROM public.categories WHERE name = 'Condiments & Spices'; + SELECT id INTO bkc_cat_id FROM public.categories WHERE name = 'Breakfast & Cereal'; + + INSERT INTO public.master_grocery_items (name, category_id) VALUES + ('apples', fv_cat_id), ('bananas', fv_cat_id), ('oranges', fv_cat_id), ('grapes', fv_cat_id), ('strawberries', fv_cat_id), ('blueberries', fv_cat_id), ('raspberries', fv_cat_id), ('avocados', fv_cat_id), ('tomatoes', fv_cat_id), ('potatoes', fv_cat_id), ('onions', fv_cat_id), ('garlic', fv_cat_id), ('carrots', fv_cat_id), ('broccoli', fv_cat_id), ('spinach', fv_cat_id), ('lettuce', fv_cat_id), ('bell peppers', fv_cat_id), ('cucumbers', fv_cat_id), ('mushrooms', fv_cat_id), ('lemons', fv_cat_id), ('limes', fv_cat_id), ('celery', fv_cat_id), ('corn', fv_cat_id), ('sweet potatoes', fv_cat_id), ('zucchini', fv_cat_id), ('cauliflower', fv_cat_id), ('green beans', fv_cat_id), ('peas', fv_cat_id), ('asparagus', fv_cat_id), + ('chicken breast', ms_cat_id), ('chicken thighs', ms_cat_id), ('ground beef', ms_cat_id), ('steak', ms_cat_id), ('pork chops', ms_cat_id), ('bacon', ms_cat_id), ('sausage', ms_cat_id), ('salmon', ms_cat_id), ('shrimp', ms_cat_id), ('tilapia', ms_cat_id), ('cod', ms_cat_id), ('tuna', ms_cat_id), ('ham', ms_cat_id), ('turkey', ms_cat_id), + ('milk', de_cat_id), ('cheese', de_cat_id), ('yogurt', de_cat_id), ('butter', de_cat_id), ('eggs', de_cat_id), ('cream cheese', de_cat_id), ('sour cream', de_cat_id), ('cottage cheese', de_cat_id), + ('bread', bb_cat_id), ('bagels', bb_cat_id), ('tortillas', bb_cat_id), ('croissants', bb_cat_id), ('muffins', bb_cat_id), ('baguette', bb_cat_id), ('pita bread', bb_cat_id), + ('rice', pdg_cat_id), ('pasta', pdg_cat_id), ('flour', pdg_cat_id), ('sugar', pdg_cat_id), ('salt', pdg_cat_id), ('pepper', pdg_cat_id), ('olive oil', pdg_cat_id), ('vegetable oil', pdg_cat_id), ('canned tomatoes', pdg_cat_id), ('canned beans', pdg_cat_id), ('peanut butter', pdg_cat_id), ('jam', pdg_cat_id), ('honey', pdg_cat_id), ('syrup', pdg_cat_id), ('nuts', pdg_cat_id), ('dried fruit', pdg_cat_id), ('crackers', pdg_cat_id), ('quinoa', pdg_cat_id), ('lentils', pdg_cat_id), + ('water', bev_cat_id), ('juice', bev_cat_id), ('soda', bev_cat_id), ('coffee', bev_cat_id), ('tea', bev_cat_id), ('almond milk', bev_cat_id), ('soy milk', bev_cat_id), ('coconut water', bev_cat_id), + ('frozen pizza', ff_cat_id), ('frozen vegetables', ff_cat_id), ('frozen fruit', ff_cat_id), ('ice cream', ff_cat_id), ('frozen dinners', ff_cat_id), ('french fries', ff_cat_id), ('frozen fish', ff_cat_id), + ('chips', snk_cat_id), ('pretzels', snk_cat_id), ('popcorn', snk_cat_id), ('granola bars', snk_cat_id), ('cookies', snk_cat_id), ('chocolate', snk_cat_id), ('candy', snk_cat_id), + ('paper towels', hc_cat_id), ('toilet paper', hc_cat_id), ('trash bags', hc_cat_id), ('dish soap', hc_cat_id), ('laundry detergent', hc_cat_id), ('all-purpose cleaner', hc_cat_id), ('sponges', hc_cat_id), + ('soap', pch_cat_id), ('shampoo', pch_cat_id), ('conditioner', pch_cat_id), ('toothpaste', pch_cat_id), ('deodorant', pch_cat_id), ('vitamins', pch_cat_id), ('pain reliever', pch_cat_id), + ('diapers', bc_cat_id), ('baby wipes', bc_cat_id), ('baby food', bc_cat_id), ('formula', bc_cat_id), + ('dog food', ps_cat_id), ('cat food', ps_cat_id), ('cat litter', ps_cat_id), + ('deli meat', dpf_cat_id), ('deli cheese', dpf_cat_id), ('rotisserie chicken', dpf_cat_id), ('prepared salads', dpf_cat_id), + ('canned soup', cg_cat_id), ('canned corn', cg_cat_id), ('canned tuna', cg_cat_id), ('canned chicken', cg_cat_id), + ('ketchup', cs_cat_id), ('mustard', cs_cat_id), ('mayonnaise', cs_cat_id), ('soy sauce', cs_cat_id), ('hot sauce', cs_cat_id), ('bbq sauce', cs_cat_id), ('salad dressing', cs_cat_id), ('cinnamon', cs_cat_id), ('oregano', cs_cat_id), ('paprika', cs_cat_id), ('garlic powder', cs_cat_id), + ('cereal', bkc_cat_id), ('oatmeal', bkc_cat_id), ('granola', bkc_cat_id), ('pancake mix', bkc_cat_id) + ON CONFLICT (name) DO NOTHING; +END $$; + + +-- 16. Pre-populate initial watched items after master list exists. +DO $$ +DECLARE + tp_id BIGINT; + ct_id BIGINT; + -- Note: Seeding user-specific data in a generic script is tricky. + -- This block is for demonstration. In a real app, users would add their own watched items. + -- We'll comment it out to avoid errors if no users exist. + -- sample_user_id UUID; +BEGIN + -- -- Find a user to assign watched items to. + -- SELECT id INTO sample_user_id FROM auth.users LIMIT 1; + + -- -- If a user exists, add some default watched items for them. + -- IF sample_user_id IS NOT NULL THEN + -- SELECT id INTO tp_id FROM public.master_grocery_items WHERE name = 'toilet paper'; + -- SELECT id INTO ct_id FROM public.master_grocery_items WHERE name = 'chicken thighs'; + + -- IF tp_id IS NOT NULL THEN + -- INSERT INTO public.user_watched_items (user_id, master_item_id) + -- VALUES (sample_user_id, tp_id) + -- ON CONFLICT (user_id, master_item_id) DO NOTHING; + -- END IF; + + -- IF ct_id IS NOT NULL THEN + -- INSERT INTO public.user_watched_items (user_id, master_item_id) + -- VALUES (sample_user_id, ct_id) + -- ON CONFLICT (user_id, master_item_id) DO NOTHING; + -- END IF; + -- END IF; + NULL; -- PL/pgSQL block cannot be empty +END $$; + +-- 17. Enable Row Level Security (RLS) on all tables. This is safe to run multiple times. +ALTER TABLE public.profiles ENABLE ROW LEVEL SECURITY; +ALTER TABLE public.stores ENABLE ROW LEVEL SECURITY; +ALTER TABLE public.categories ENABLE ROW LEVEL SECURITY; +ALTER TABLE public.flyers ENABLE ROW LEVEL SECURITY; +ALTER TABLE public.flyer_items ENABLE ROW LEVEL SECURITY; +ALTER TABLE public.master_grocery_items ENABLE ROW LEVEL SECURITY; +ALTER TABLE public.user_watched_items ENABLE ROW LEVEL SECURITY; +ALTER TABLE public.user_alerts ENABLE ROW LEVEL SECURITY; +ALTER TABLE public.notifications ENABLE ROW LEVEL SECURITY; +ALTER TABLE public.item_price_history ENABLE ROW LEVEL SECURITY; +ALTER TABLE public.master_item_aliases ENABLE ROW LEVEL SECURITY; +ALTER TABLE public.shopping_lists ENABLE ROW LEVEL SECURITY; +ALTER TABLE public.shopping_list_items ENABLE ROW LEVEL SECURITY; +ALTER TABLE public.suggested_corrections ENABLE ROW LEVEL SECURITY; +ALTER TABLE public.user_submitted_prices ENABLE ROW LEVEL SECURITY; +ALTER TABLE public.brands ENABLE ROW LEVEL SECURITY; +ALTER TABLE public.products ENABLE ROW LEVEL SECURITY; +ALTER TABLE public.store_locations ENABLE ROW LEVEL SECURITY; +ALTER TABLE public.flyer_locations ENABLE ROW LEVEL SECURITY; +ALTER TABLE public.recipes ENABLE ROW LEVEL SECURITY; +ALTER TABLE public.recipe_ingredients ENABLE ROW LEVEL SECURITY; +ALTER TABLE public.menu_plans ENABLE ROW LEVEL SECURITY; +ALTER TABLE public.planned_meals ENABLE ROW LEVEL SECURITY; +ALTER TABLE public.tags ENABLE ROW LEVEL SECURITY; +ALTER TABLE public.recipe_tags ENABLE ROW LEVEL SECURITY; +ALTER TABLE public.recipe_ratings ENABLE ROW LEVEL SECURITY; +ALTER TABLE public.pantry_items ENABLE ROW LEVEL SECURITY; + + + + +-- 18. Create policies. Using DROP/CREATE makes this script idempotent and safe to re-run. +-- Policies for 'profiles' table +-- Profiles are publicly readable +DROP POLICY IF EXISTS "Public profiles are viewable by everyone." ON public.profiles; +CREATE POLICY "Public profiles are viewable by everyone." ON public.profiles FOR SELECT USING (true); +-- Users can insert and update their own profile +DROP POLICY IF EXISTS "Users can update their own profile." ON public.profiles; +CREATE POLICY "Users can update their own profile." ON public.profiles FOR UPDATE USING (auth.uid() = id) WITH CHECK (auth.uid() = id); + + +-- Policies for 'stores' table +DROP POLICY IF EXISTS "Public read access" ON public.stores; +CREATE POLICY "Public read access" ON public.stores FOR SELECT USING (true); + +-- Policies for 'categories' table +DROP POLICY IF EXISTS "Public read access" ON public.categories; +CREATE POLICY "Public read access" ON public.categories FOR SELECT USING (true); + +-- Policies for 'flyers' table +DROP POLICY IF EXISTS "Public read access" ON public.flyers; +CREATE POLICY "Public read access" ON public.flyers FOR SELECT USING (true); + +-- Policies for 'flyer_items' table +DROP POLICY IF EXISTS "Public read access" ON public.flyer_items; +CREATE POLICY "Public read access" ON public.flyer_items FOR SELECT USING (true); + +-- Policies for 'master_grocery_items' table +DROP POLICY IF EXISTS "Public read access" ON public.master_grocery_items; +CREATE POLICY "Public read access" ON public.master_grocery_items FOR SELECT USING (true); + +-- Policies for 'user_watched_items' table +-- Users can view their own watched items. +DROP POLICY IF EXISTS "Users can view their own watched items." ON public.user_watched_items; +CREATE POLICY "Users can view their own watched items." ON public.user_watched_items FOR SELECT USING (auth.uid() = user_id); +-- Users can insert, update, and delete their own watched items. +DROP POLICY IF EXISTS "Users can manage their own watched items." ON public.user_watched_items; +CREATE POLICY "Users can manage their own watched items." ON public.user_watched_items FOR ALL USING (auth.uid() = user_id) WITH CHECK (auth.uid() = user_id); + +-- Policies for 'user_alerts' table +DROP POLICY IF EXISTS "Users can manage their own alerts" ON public.user_alerts; +CREATE POLICY "Users can manage their own alerts" ON public.user_alerts +FOR ALL USING ( + -- Check that the user owns the parent 'user_watched_item' + auth.uid() = (SELECT user_id FROM public.user_watched_items WHERE id = user_watched_item_id) +); + +-- Policies for 'notifications' table +DROP POLICY IF EXISTS "Users can manage their own notifications" ON public.notifications; +CREATE POLICY "Users can manage their own notifications" ON public.notifications +FOR ALL USING (auth.uid() = user_id) WITH CHECK (auth.uid() = user_id); + +-- Policies for 'item_price_history' table +DROP POLICY IF EXISTS "Public read access for price history" ON public.item_price_history; +CREATE POLICY "Public read access for price history" ON public.item_price_history +FOR SELECT USING (true); + +-- Policies for 'master_item_aliases' table +DROP POLICY IF EXISTS "Aliases are publicly viewable." ON public.master_item_aliases; +CREATE POLICY "Aliases are publicly viewable." ON public.master_item_aliases FOR SELECT USING (true); +-- Note: Write access to aliases should be restricted to an admin or trusted role. + +-- Policies for 'shopping_lists' table +DROP POLICY IF EXISTS "Users can manage their own shopping lists." ON public.shopping_lists; +CREATE POLICY "Users can manage their own shopping lists." ON public.shopping_lists +FOR ALL USING (auth.uid() = user_id) WITH CHECK (auth.uid() = user_id); + +-- Policies for 'shopping_list_items' table +DROP POLICY IF EXISTS "Users can manage items in their own shopping lists." ON public.shopping_list_items; +CREATE POLICY "Users can manage items in their own shopping lists." ON public.shopping_list_items +FOR ALL USING ( + -- Check that the user owns the parent 'shopping_list' + auth.uid() = (SELECT user_id FROM public.shopping_lists WHERE id = shopping_list_id) +); + + +-- Policies for 'suggested_corrections' table +DROP POLICY IF EXISTS "Users can manage their own suggestions." ON public.suggested_corrections; +CREATE POLICY "Users can manage their own suggestions." ON public.suggested_corrections +FOR ALL USING (auth.uid() = user_id) WITH CHECK (auth.uid() = user_id); +-- Note: An admin role would need a separate policy to view/update all suggestions. + +-- Policies for 'user_submitted_prices' table +DROP POLICY IF EXISTS "Submitted prices are publicly viewable." ON public.user_submitted_prices; +CREATE POLICY "Submitted prices are publicly viewable." ON public.user_submitted_prices +FOR SELECT USING (true); + +DROP POLICY IF EXISTS "Users can manage their own submitted prices." ON public.user_submitted_prices; +CREATE POLICY "Users can manage their own submitted prices." ON public.user_submitted_prices +FOR INSERT, UPDATE, DELETE USING (auth.uid() = user_id) WITH CHECK (auth.uid() = user_id); + +-- Policies for new product/location tables +DROP POLICY IF EXISTS "Public read access" ON public.brands; +CREATE POLICY "Public read access" ON public.brands FOR SELECT USING (true); + +DROP POLICY IF EXISTS "Public read access" ON public.products; +CREATE POLICY "Public read access" ON public.products FOR SELECT USING (true); + +DROP POLICY IF EXISTS "Public read access" ON public.store_locations; +CREATE POLICY "Public read access" ON public.store_locations FOR SELECT USING (true); + +DROP POLICY IF EXISTS "Public read access" ON public.flyer_locations; +CREATE POLICY "Public read access" ON public.flyer_locations FOR SELECT USING (true); + +-- Policies for recipe/menu plan tables +DROP POLICY IF EXISTS "Recipes are publicly viewable." ON public.recipes; +CREATE POLICY "Recipes are publicly viewable." ON public.recipes FOR SELECT USING (true); + +DROP POLICY IF EXISTS "Users can manage their own recipes." ON public.recipes; +CREATE POLICY "Users can manage their own recipes." ON public.recipes FOR INSERT, UPDATE, DELETE USING (auth.uid() = user_id); + +DROP POLICY IF EXISTS "Recipe ingredients are publicly viewable." ON public.recipe_ingredients; +CREATE POLICY "Recipe ingredients are publicly viewable." ON public.recipe_ingredients FOR SELECT USING (true); +-- Note: Write access to recipe_ingredients should be controlled by who owns the parent recipe. +-- A more complex policy or SECURITY DEFINER function would be needed for users to edit ingredients. +-- For now, we assume inserts/updates happen via a trusted process or admin role. + +DROP POLICY IF EXISTS "Users can manage their own menu plans." ON public.menu_plans; +CREATE POLICY "Users can manage their own menu plans." ON public.menu_plans FOR ALL USING (auth.uid() = user_id); + +DROP POLICY IF EXISTS "Users can manage meals in their own menu plans." ON public.planned_meals; +CREATE POLICY "Users can manage meals in their own menu plans." ON public.planned_meals +FOR ALL USING ( + auth.uid() = (SELECT user_id FROM public.menu_plans WHERE id = menu_plan_id) +); + +-- Policies for pantry_items table +DROP POLICY IF EXISTS "Users can manage their own pantry items." ON public.pantry_items; +CREATE POLICY "Users can manage their own pantry items." ON public.pantry_items FOR ALL USING (auth.uid() = user_id); + +-- Policies for new recipe-related tables +DROP POLICY IF EXISTS "Tags are publicly viewable." ON public.tags; +CREATE POLICY "Tags are publicly viewable." ON public.tags FOR SELECT USING (true); + +DROP POLICY IF EXISTS "Recipe-tag links are publicly viewable." ON public.recipe_tags; +CREATE POLICY "Recipe-tag links are publicly viewable." ON public.recipe_tags FOR SELECT USING (true); + +DROP POLICY IF EXISTS "Recipe ratings are publicly viewable." ON public.recipe_ratings; +CREATE POLICY "Recipe ratings are publicly viewable." ON public.recipe_ratings FOR SELECT USING (true); + +DROP POLICY IF EXISTS "Users can manage their own recipe ratings." ON public.recipe_ratings; +CREATE POLICY "Users can manage their own recipe ratings." ON public.recipe_ratings FOR ALL USING (auth.uid() = user_id); + + + + + + + + + + + + + diff --git a/sql/schema.sql.txt b/sql/schema.sql.txt new file mode 100644 index 0000000..67cbc86 --- /dev/null +++ b/sql/schema.sql.txt @@ -0,0 +1,1186 @@ +-- ============================================================================ +-- MASTER SCHEMA SCRIPT +-- ============================================================================ +-- Purpose: +-- This file contains the master SQL schema for the entire Supabase database. +-- It is designed to be a "one-click" script that can be run in the Supabase +-- SQL Editor to set up the entire backend from scratch, including: +-- 1. Enabling required Postgres extensions. +-- 2. Creating all tables with relationships and constraints. +-- 3. Seeding essential initial data (categories, master items). +-- 4. Creating the necessary storage buckets. +-- 5. Applying comprehensive Row Level Security (RLS) policies. +-- 6. Defining database functions for business logic. +-- 7. Setting up triggers for automation (e.g., creating user profiles). +-- +-- Usage: +-- For a fresh setup, run this entire script. To reset the environment, +-- first run 'delete_all_tables.sql.txt' and then run this script. + +-- ============================================================================ +-- PART 0: EXTENSIONS +-- ============================================================================ +-- Enable necessary PostgreSQL extensions. +-- postgis: For storing and querying geographic data (store locations). +-- pg_trgm: For trigram-based fuzzy string matching (improving item searches). +CREATE EXTENSION IF NOT EXISTS postgis; +CREATE EXTENSION IF NOT EXISTS pg_trgm; + +-- ============================================================================ +-- PART 1: TABLE CREATION +-- ============================================================================ + +-- 1. Create the 'stores' table for normalized store data. +CREATE TABLE IF NOT EXISTS public.stores ( + id BIGINT PRIMARY KEY GENERATED ALWAYS AS IDENTITY, + created_at TIMESTAMPTZ DEFAULT now() NOT NULL, + name TEXT NOT NULL UNIQUE, + logo_url TEXT +); +COMMENT ON TABLE public.stores IS 'Stores metadata for grocery store chains (e.g., Safeway, Kroger).'; + +-- 2. Create the 'categories' table for normalized category data. +CREATE TABLE IF NOT EXISTS public.categories ( + id BIGINT PRIMARY KEY GENERATED ALWAYS AS IDENTITY, + name TEXT NOT NULL UNIQUE +); +COMMENT ON TABLE public.categories IS 'Stores a predefined list of grocery item categories (e.g., ''Fruits & Vegetables'', ''Dairy & Eggs'').'; + +-- 4. Create the 'master_grocery_items' table. This is the master dictionary. +CREATE TABLE IF NOT EXISTS public.master_grocery_items ( + id BIGINT PRIMARY KEY GENERATED ALWAYS AS IDENTITY, + created_at TIMESTAMPTZ DEFAULT now() NOT NULL, + name TEXT NOT NULL UNIQUE, + category_id BIGINT REFERENCES public.categories(id) +); +COMMENT ON TABLE public.master_grocery_items IS 'The master dictionary of canonical grocery items. Each item has a unique name and is linked to a category.'; + +-- 3. Create the 'flyers' table with its full, final schema. +CREATE TABLE IF NOT EXISTS public.flyers ( + id BIGINT PRIMARY KEY GENERATED ALWAYS AS IDENTITY, + created_at TIMESTAMPTZ DEFAULT now() NOT NULL, + file_name TEXT NOT NULL, + image_url TEXT NOT NULL, + checksum TEXT UNIQUE, + store_id BIGINT REFERENCES public.stores(id), + valid_from DATE, + valid_to DATE, + store_address TEXT +); +COMMENT ON TABLE public.flyers IS 'Stores metadata for each processed flyer, linking it to a store and its validity period.'; +COMMENT ON COLUMN public.flyers.file_name IS 'The original name of the uploaded flyer file (e.g., "flyer_week_1.pdf").'; +COMMENT ON COLUMN public.flyers.image_url IS 'The public URL of the primary flyer image stored in Supabase Storage.'; +COMMENT ON COLUMN public.flyers.checksum IS 'A SHA-256 hash of the original file content to prevent duplicate processing.'; +COMMENT ON COLUMN public.flyers.store_id IS 'Foreign key linking this flyer to a specific store in the `stores` table.'; +COMMENT ON COLUMN public.flyers.valid_from IS 'The start date of the sale period for this flyer, extracted by the AI.'; +COMMENT ON COLUMN public.flyers.valid_to IS 'The end date of the sale period for this flyer, extracted by the AI.'; +COMMENT ON COLUMN public.flyers.store_address IS 'The physical store address if it was successfully extracted from the flyer image.'; + + +-- 6. Create the 'flyer_items' table with its full, final schema. +CREATE TABLE IF NOT EXISTS public.flyer_items ( + id BIGINT PRIMARY KEY GENERATED ALWAYS AS IDENTITY, + created_at TIMESTAMPTZ DEFAULT now() NOT NULL, + flyer_id BIGINT REFERENCES public.flyers(id) ON DELETE CASCADE, + item TEXT NOT NULL, + price_display TEXT NOT NULL, + price_in_cents INTEGER, + quantity TEXT NOT NULL, + quantity_num NUMERIC, + master_item_id BIGINT REFERENCES public.master_grocery_items(id), + category_id BIGINT REFERENCES public.categories(id), + category_name TEXT, -- Denormalized for easier display + unit_price JSONB, -- {"value": 1.99, "unit": "lb"} + product_id BIGINT -- Future use for specific product linking +); +COMMENT ON TABLE public.flyer_items IS 'Stores individual items extracted from a specific flyer.'; +COMMENT ON COLUMN public.flyer_items.flyer_id IS 'Foreign key linking this item to its parent flyer in the `flyers` table.'; +COMMENT ON COLUMN public.flyer_items.item IS 'The raw item name as it appears in the flyer (e.g., "Granny Smith Apples").'; +COMMENT ON COLUMN public.flyer_items.price_display IS 'The raw price string from the flyer (e.g., "$3.99", "2 for $5.00").'; +COMMENT ON COLUMN public.flyer_items.price_in_cents IS 'The normalized price for a single item, in cents, for easier sorting and comparison.'; +COMMENT ON COLUMN public.flyer_items.quantity IS 'The raw quantity or deal description string (e.g., "per lb", "500g bag").'; +COMMENT ON COLUMN public.flyer_items.quantity_num IS 'The parsed primary numeric value from the quantity string (e.g., 500 from "500g").'; +COMMENT ON COLUMN public.flyer_items.master_item_id IS 'Foreign key linking this flyer item to its canonical entry in `master_grocery_items`. Null if no match was found.'; +COMMENT ON COLUMN public.flyer_items.category_id IS 'Foreign key to the item''s category. Can be redundant if master_item_id is set, but useful.'; +COMMENT ON COLUMN public.flyer_items.category_name IS 'The denormalized name of the category for faster UI display without extra joins.'; +COMMENT ON COLUMN public.flyer_items.unit_price IS 'A JSONB object storing the calculated unit price (e.g., {"value": 1.99, "unit": "lb"}) for standardized price comparisons.'; +COMMENT ON COLUMN public.flyer_items.product_id IS 'A foreign key for future use, to link to a specific product with a UPC code in the `products` table.'; + +-- Add a GIN index to the 'item' column for fast fuzzy text searching. +-- This requires the pg_trgm extension. +CREATE INDEX IF NOT EXISTS flyer_items_item_trgm_idx ON public.flyer_items USING GIN (item gin_trgm_ops); + +-- 0. Create a table for public user profiles. +-- This table is linked to the auth.users table and stores non-sensitive user data. +CREATE TABLE IF NOT EXISTS public.profiles ( + id UUID PRIMARY KEY REFERENCES auth.users(id) ON DELETE CASCADE, + updated_at TIMESTAMPTZ, + full_name TEXT, + avatar_url TEXT, + preferences JSONB +); +COMMENT ON TABLE public.profiles IS 'Stores public-facing user data, linked to the private auth.users table.'; + +-- 5. Create the 'user_watched_items' table. This links to the master list. +CREATE TABLE IF NOT EXISTS public.user_watched_items ( + id BIGINT PRIMARY KEY GENERATED ALWAYS AS IDENTITY, + user_id UUID NOT NULL REFERENCES auth.users(id) ON DELETE CASCADE, + master_item_id BIGINT NOT NULL REFERENCES public.master_grocery_items(id) ON DELETE CASCADE, + created_at TIMESTAMPTZ DEFAULT now() NOT NULL, + UNIQUE(user_id, master_item_id) +); +COMMENT ON TABLE public.user_watched_items IS 'A linking table that represents a user''s personal watchlist of grocery items.'; + +-- 7. Create a table for user-defined alerts on watched items. +CREATE TABLE IF NOT EXISTS public.user_alerts ( + id BIGINT PRIMARY KEY GENERATED ALWAYS AS IDENTITY, + user_watched_item_id BIGINT NOT NULL REFERENCES public.user_watched_items(id) ON DELETE CASCADE, + alert_type TEXT NOT NULL CHECK (alert_type IN ('PRICE_BELOW', 'PERCENT_OFF_AVERAGE')), + threshold_value NUMERIC NOT NULL, + is_active BOOLEAN DEFAULT true NOT NULL, + created_at TIMESTAMPTZ DEFAULT now() NOT NULL +); +COMMENT ON TABLE public.user_alerts IS 'Stores user-configured alert rules for their watched items.'; +COMMENT ON COLUMN public.user_alerts.alert_type IS 'The condition that triggers the alert, e.g., ''PRICE_BELOW''.'; +COMMENT ON COLUMN public.user_alerts.threshold_value IS 'The numeric threshold for the alert condition (e.g., price in cents, or percentage).'; + + +-- 8. Create a table to store notifications for users. +CREATE TABLE IF NOT EXISTS public.notifications ( + id BIGINT PRIMARY KEY GENERATED ALWAYS AS IDENTITY, + user_id UUID NOT NULL REFERENCES auth.users(id) ON DELETE CASCADE, + content TEXT NOT NULL, + link_url TEXT, + is_read BOOLEAN DEFAULT false NOT NULL, + created_at TIMESTAMPTZ DEFAULT now() NOT NULL +); +COMMENT ON TABLE public.notifications IS 'A central log of notifications generated for users, such as price alerts.'; +COMMENT ON COLUMN public.notifications.content IS 'The notification message displayed to the user.'; +COMMENT ON COLUMN public.notifications.link_url IS 'A URL to navigate to when the notification is clicked.'; + + +-- 9. Create a table for aggregated, historical price data for master items. +CREATE TABLE IF NOT EXISTS public.item_price_history ( + id BIGINT PRIMARY KEY GENERATED ALWAYS AS IDENTITY, + master_item_id BIGINT NOT NULL REFERENCES public.master_grocery_items(id) ON DELETE CASCADE, + summary_date DATE NOT NULL, + min_price_in_cents INTEGER, + max_price_in_cents INTEGER, + avg_price_in_cents INTEGER, + data_points_count INTEGER DEFAULT 0 NOT NULL, + UNIQUE(master_item_id, summary_date) +); +COMMENT ON TABLE public.item_price_history IS 'Serves as a summary table to speed up charting and analytics.'; +COMMENT ON COLUMN public.item_price_history.summary_date IS 'The date for which the price data is summarized.'; + + +-- 10. Create a table to map various names to a single master grocery item. +CREATE TABLE IF NOT EXISTS public.master_item_aliases ( + id BIGINT PRIMARY KEY GENERATED ALWAYS AS IDENTITY, + master_item_id BIGINT NOT NULL REFERENCES public.master_grocery_items(id) ON DELETE CASCADE, + alias TEXT NOT NULL UNIQUE +); +COMMENT ON TABLE public.master_item_aliases IS 'Stores synonyms or alternative names for master items to improve matching.'; +COMMENT ON COLUMN public.master_item_aliases.alias IS 'An alternative name, e.g., "Ground Chuck" for the master item "Ground Beef".'; + + +-- 11. Create tables for user shopping lists. +CREATE TABLE IF NOT EXISTS public.shopping_lists ( + id BIGINT PRIMARY KEY GENERATED ALWAYS AS IDENTITY, + user_id UUID NOT NULL REFERENCES auth.users(id) ON DELETE CASCADE, + name TEXT NOT NULL, + created_at TIMESTAMPTZ DEFAULT now() NOT NULL +); +COMMENT ON TABLE public.shopping_lists IS 'Stores user-created shopping lists, e.g., "Weekly Groceries".'; + + +CREATE TABLE IF NOT EXISTS public.shopping_list_items ( + id BIGINT PRIMARY KEY GENERATED ALWAYS AS IDENTITY, + shopping_list_id BIGINT NOT NULL REFERENCES public.shopping_lists(id) ON DELETE CASCADE, + master_item_id BIGINT REFERENCES public.master_grocery_items(id), + custom_item_name TEXT, + quantity NUMERIC DEFAULT 1 NOT NULL, + is_purchased BOOLEAN DEFAULT false NOT NULL, + added_at TIMESTAMPTZ DEFAULT now() NOT NULL, + UNIQUE(shopping_list_id, master_item_id), + CONSTRAINT must_have_item_identifier CHECK (master_item_id IS NOT NULL OR custom_item_name IS NOT NULL) +); +COMMENT ON TABLE public.shopping_list_items IS 'Contains individual items for a specific shopping list.'; +COMMENT ON COLUMN public.shopping_list_items.custom_item_name IS 'For items not in the master list, e.g., "Grandma''s special spice mix".'; +COMMENT ON COLUMN public.shopping_list_items.is_purchased IS 'Lets users check items off their list as they shop.'; + + +-- 12. Create a table to store user-submitted corrections for flyer items. +CREATE TABLE IF NOT EXISTS public.suggested_corrections ( + id BIGINT PRIMARY KEY GENERATED ALWAYS AS IDENTITY, + flyer_item_id BIGINT NOT NULL REFERENCES public.flyer_items(id) ON DELETE CASCADE, + user_id UUID NOT NULL REFERENCES auth.users(id), + correction_type TEXT NOT NULL, + suggested_value TEXT NOT NULL, + status TEXT DEFAULT 'pending' NOT NULL, + created_at TIMESTAMPTZ DEFAULT now() NOT NULL, + reviewed_notes TEXT, + reviewed_at TIMESTAMPTZ +); +COMMENT ON TABLE public.suggested_corrections IS 'A queue for user-submitted data corrections, enabling crowdsourced data quality improvements.'; +COMMENT ON COLUMN public.suggested_corrections.correction_type IS 'The type of error the user is reporting.'; +COMMENT ON COLUMN public.suggested_corrections.suggested_value IS 'The corrected value proposed by the user (e.g., a new price or master_item_id).'; +COMMENT ON COLUMN public.suggested_corrections.status IS 'The moderation status of the correction: pending, approved, or rejected.'; + + +-- 13. Create a table for prices submitted directly by users from in-store. +CREATE TABLE IF NOT EXISTS public.user_submitted_prices ( + id BIGINT PRIMARY KEY GENERATED ALWAYS AS IDENTITY, + user_id UUID NOT NULL REFERENCES auth.users(id), + master_item_id BIGINT NOT NULL REFERENCES public.master_grocery_items(id), + store_id BIGINT NOT NULL REFERENCES public.stores(id), + price_in_cents INTEGER NOT NULL, + photo_url TEXT, + upvotes INTEGER DEFAULT 0 NOT NULL, + downvotes INTEGER DEFAULT 0 NOT NULL, + created_at TIMESTAMPTZ DEFAULT now() NOT NULL +); +COMMENT ON TABLE public.user_submitted_prices IS 'Stores item prices submitted by users directly from physical stores.'; +COMMENT ON COLUMN public.user_submitted_prices.photo_url IS 'URL to user-submitted photo evidence of the price.'; +COMMENT ON COLUMN public.user_submitted_prices.upvotes IS 'Community validation score indicating accuracy.'; + + +-- A table to store brand information. +CREATE TABLE IF NOT EXISTS public.brands ( + id BIGINT PRIMARY KEY GENERATED ALWAYS AS IDENTITY, + name TEXT NOT NULL UNIQUE +); +COMMENT ON TABLE public.brands IS 'Stores brand names like "Coca-Cola", "Maple Leaf", or "Kraft".'; + + +-- A table for specific products, linking a master item with a brand and size. +CREATE TABLE IF NOT EXISTS public.products ( + id BIGINT PRIMARY KEY GENERATED ALWAYS AS IDENTITY, + master_item_id BIGINT NOT NULL REFERENCES public.master_grocery_items(id), + brand_id BIGINT REFERENCES public.brands(id), + name TEXT NOT NULL, + description TEXT, + size TEXT, + upc_code TEXT UNIQUE +); +COMMENT ON TABLE public.products IS 'Represents a specific, sellable product, combining a generic item with a brand and size.'; +COMMENT ON COLUMN public.products.upc_code IS 'Universal Product Code, if available, for exact product matching.'; + +-- Link flyer_items to the new products table. +-- This is done via ALTER TABLE because 'products' is created after 'flyer_items'. +ALTER TABLE public.flyer_items +ADD CONSTRAINT flyer_items_product_id_fkey +FOREIGN KEY (product_id) REFERENCES public.products(id); + + +-- A table to store individual store locations with geographic data. +CREATE TABLE IF NOT EXISTS public.store_locations ( + id BIGINT PRIMARY KEY GENERATED ALWAYS AS IDENTITY, + store_id BIGINT NOT NULL REFERENCES public.stores(id), + address TEXT NOT NULL, + city TEXT, + province_state TEXT, + postal_code TEXT, + location GEOGRAPHY(Point, 4326) +); +COMMENT ON TABLE public.store_locations IS 'Stores physical locations of stores with geographic data for proximity searches.'; +COMMENT ON COLUMN public.store_locations.location IS 'Geographic coordinates (longitude, latitude) of the store.'; + +-- Add a GIST index for efficient geographic queries. +-- This requires the postgis extension. +CREATE INDEX IF NOT EXISTS store_locations_geo_idx ON public.store_locations USING GIST (location); + + +-- A linking table if one flyer is valid for multiple locations. +CREATE TABLE IF NOT EXISTS public.flyer_locations ( + flyer_id BIGINT NOT NULL REFERENCES public.flyers(id) ON DELETE CASCADE, + store_location_id BIGINT NOT NULL REFERENCES public.store_locations(id) ON DELETE CASCADE, + PRIMARY KEY (flyer_id, store_location_id) +); +COMMENT ON TABLE public.flyer_locations IS 'A linking table associating a single flyer with multiple store locations where its deals are valid.'; + +-- A table to store recipes, which can be user-created or pre-populated. +CREATE TABLE IF NOT EXISTS public.recipes ( + id BIGINT PRIMARY KEY GENERATED ALWAYS AS IDENTITY, + user_id UUID REFERENCES auth.users(id), + name TEXT NOT NULL, + description TEXT, + instructions TEXT, + prep_time_minutes INTEGER, + cook_time_minutes INTEGER, + servings INTEGER, + photo_url TEXT, + calories_per_serving INTEGER, + protein_grams NUMERIC, + fat_grams NUMERIC, + carb_grams NUMERIC, + avg_rating NUMERIC(2,1) DEFAULT 0.0 NOT NULL, + rating_count INTEGER DEFAULT 0 NOT NULL, + created_at TIMESTAMPTZ DEFAULT now() NOT NULL +); +COMMENT ON TABLE public.recipes IS 'Stores recipes that can be used to generate shopping lists.'; +COMMENT ON COLUMN public.recipes.servings IS 'The number of servings this recipe yields.'; + + +-- A linking table for ingredients required for each recipe. +CREATE TABLE IF NOT EXISTS public.recipe_ingredients ( + id BIGINT PRIMARY KEY GENERATED ALWAYS AS IDENTITY, + recipe_id BIGINT NOT NULL REFERENCES public.recipes(id) ON DELETE CASCADE, + master_item_id BIGINT NOT NULL REFERENCES public.master_grocery_items(id), + quantity NUMERIC NOT NULL, + unit TEXT NOT NULL +); +COMMENT ON TABLE public.recipe_ingredients IS 'Defines the ingredients and quantities needed for a recipe.'; + + +-- A table to store a predefined list of tags for recipes. +CREATE TABLE IF NOT EXISTS public.tags ( + id BIGINT PRIMARY KEY GENERATED ALWAYS AS IDENTITY, + name TEXT NOT NULL UNIQUE +); +COMMENT ON TABLE public.tags IS 'Stores tags for categorizing recipes, e.g., "Vegetarian", "Quick & Easy".'; + + +-- A linking table to associate multiple tags with a recipe. +CREATE TABLE IF NOT EXISTS public.recipe_tags ( + recipe_id BIGINT NOT NULL REFERENCES public.recipes(id) ON DELETE CASCADE, + tag_id BIGINT NOT NULL REFERENCES public.tags(id) ON DELETE CASCADE, + PRIMARY KEY (recipe_id, tag_id) +); +COMMENT ON TABLE public.recipe_tags IS 'A linking table to associate multiple tags with a single recipe.'; + +-- A table to store individual user ratings for recipes. +CREATE TABLE IF NOT EXISTS public.recipe_ratings ( + id BIGINT PRIMARY KEY GENERATED ALWAYS AS IDENTITY, + recipe_id BIGINT NOT NULL REFERENCES public.recipes(id) ON DELETE CASCADE, + user_id UUID NOT NULL REFERENCES auth.users(id) ON DELETE CASCADE, + rating INTEGER NOT NULL CHECK (rating >= 1 AND rating <= 5), + comment TEXT, + created_at TIMESTAMPTZ DEFAULT now() NOT NULL, + UNIQUE(recipe_id, user_id) +); +COMMENT ON TABLE public.recipe_ratings IS 'Stores individual user ratings for recipes, ensuring a user can only rate a recipe once.'; + +-- A table to store a user's collection of planned meals for a date range. +CREATE TABLE IF NOT EXISTS public.menu_plans ( + id BIGINT PRIMARY KEY GENERATED ALWAYS AS IDENTITY, + user_id UUID NOT NULL REFERENCES auth.users(id) ON DELETE CASCADE, + name TEXT NOT NULL, + start_date DATE NOT NULL, + end_date DATE NOT NULL, + created_at TIMESTAMPTZ DEFAULT now() NOT NULL +); +COMMENT ON TABLE public.menu_plans IS 'Represents a user''s meal plan for a specific period, e.g., "Week of Oct 23".'; + + +-- A table to associate a recipe with a specific date and meal type within a menu plan. +CREATE TABLE IF NOT EXISTS public.planned_meals ( + id BIGINT PRIMARY KEY GENERATED ALWAYS AS IDENTITY, + menu_plan_id BIGINT NOT NULL REFERENCES public.menu_plans(id) ON DELETE CASCADE, + recipe_id BIGINT NOT NULL REFERENCES public.recipes(id) ON DELETE CASCADE, + plan_date DATE NOT NULL, + meal_type TEXT NOT NULL, -- e.g., 'Breakfast', 'Lunch', 'Dinner' + servings_to_cook INTEGER +); +COMMENT ON TABLE public.planned_meals IS 'Assigns a recipe to a specific day and meal type within a user''s menu plan.'; +COMMENT ON COLUMN public.planned_meals.meal_type IS 'The designated meal for the recipe, e.g., ''Breakfast'', ''Lunch'', ''Dinner''.'; + + +-- A table to track the grocery items a user currently has in their pantry. +CREATE TABLE IF NOT EXISTS public.pantry_items ( + id BIGINT PRIMARY KEY GENERATED ALWAYS AS IDENTITY, + user_id UUID NOT NULL REFERENCES auth.users(id) ON DELETE CASCADE, + master_item_id BIGINT NOT NULL REFERENCES public.master_grocery_items(id) ON DELETE CASCADE, + quantity NUMERIC NOT NULL, + unit TEXT, + best_before_date DATE, + updated_at TIMESTAMPTZ DEFAULT now() NOT NULL, + UNIQUE(user_id, master_item_id, unit) +); +COMMENT ON TABLE public.pantry_items IS 'Tracks a user''s personal inventory of grocery items to enable smart shopping lists.'; +COMMENT ON COLUMN public.pantry_items.quantity IS 'The current amount of the item. Convention: use grams for weight, mL for volume where applicable.'; +COMMENT ON COLUMN public.pantry_items.unit IS 'e.g., ''g'', ''ml'', ''items''. Should align with recipe_ingredients.unit and quantity convention.'; + + + +-- ============================================================================ +-- PART 2: DATA SEEDING +-- ============================================================================ +-- Pre-populate the categories table. +INSERT INTO public.categories (name) VALUES + ('Fruits & Vegetables'), ('Meat & Seafood'), ('Dairy & Eggs'), ('Bakery & Bread'), + ('Pantry & Dry Goods'), ('Beverages'), ('Frozen Foods'), ('Snacks'), ('Household & Cleaning'), + ('Personal Care & Health'), ('Baby & Child'), ('Pet Supplies'), ('Deli & Prepared Foods'), + ('Canned Goods'), ('Condiments & Spices'), ('Breakfast & Cereal'), ('Organic'), + ('International Foods'), ('Other/Miscellaneous') +ON CONFLICT (name) DO NOTHING; + +-- Pre-populate the master grocery items dictionary. +DO $$ +DECLARE + fv_cat_id BIGINT; ms_cat_id BIGINT; de_cat_id BIGINT; bb_cat_id BIGINT; pdg_cat_id BIGINT; + bev_cat_id BIGINT; ff_cat_id BIGINT; snk_cat_id BIGINT; hc_cat_id BIGINT; pch_cat_id BIGINT; + bc_cat_id BIGINT; ps_cat_id BIGINT; dpf_cat_id BIGINT; cg_cat_id BIGINT; cs_cat_id BIGINT; + bkc_cat_id BIGINT; +BEGIN + SELECT id INTO fv_cat_id FROM public.categories WHERE name = 'Fruits & Vegetables'; + SELECT id INTO ms_cat_id FROM public.categories WHERE name = 'Meat & Seafood'; + SELECT id INTO de_cat_id FROM public.categories WHERE name = 'Dairy & Eggs'; + SELECT id INTO bb_cat_id FROM public.categories WHERE name = 'Bakery & Bread'; + SELECT id INTO pdg_cat_id FROM public.categories WHERE name = 'Pantry & Dry Goods'; + SELECT id INTO bev_cat_id FROM public.categories WHERE name = 'Beverages'; + SELECT id INTO ff_cat_id FROM public.categories WHERE name = 'Frozen Foods'; + SELECT id INTO snk_cat_id FROM public.categories WHERE name = 'Snacks'; + SELECT id INTO hc_cat_id FROM public.categories WHERE name = 'Household & Cleaning'; + SELECT id INTO pch_cat_id FROM public.categories WHERE name = 'Personal Care & Health'; + SELECT id INTO bc_cat_id FROM public.categories WHERE name = 'Baby & Child'; + SELECT id INTO ps_cat_id FROM public.categories WHERE name = 'Pet Supplies'; + SELECT id INTO dpf_cat_id FROM public.categories WHERE name = 'Deli & Prepared Foods'; + SELECT id INTO cg_cat_id FROM public.categories WHERE name = 'Canned Goods'; + SELECT id INTO cs_cat_id FROM public.categories WHERE name = 'Condiments & Spices'; + SELECT id INTO bkc_cat_id FROM public.categories WHERE name = 'Breakfast & Cereal'; + + INSERT INTO public.master_grocery_items (name, category_id) VALUES + ('apples', fv_cat_id), ('bananas', fv_cat_id), ('oranges', fv_cat_id), ('grapes', fv_cat_id), ('strawberries', fv_cat_id), ('blueberries', fv_cat_id), ('raspberries', fv_cat_id), ('avocados', fv_cat_id), ('tomatoes', fv_cat_id), ('potatoes', fv_cat_id), ('onions', fv_cat_id), ('garlic', fv_cat_id), ('carrots', fv_cat_id), ('broccoli', fv_cat_id), ('spinach', fv_cat_id), ('lettuce', fv_cat_id), ('bell peppers', fv_cat_id), ('cucumbers', fv_cat_id), ('mushrooms', fv_cat_id), ('lemons', fv_cat_id), ('limes', fv_cat_id), ('celery', fv_cat_id), ('corn', fv_cat_id), ('sweet potatoes', fv_cat_id), ('zucchini', fv_cat_id), ('cauliflower', fv_cat_id), ('green beans', fv_cat_id), ('peas', fv_cat_id), ('asparagus', fv_cat_id), + ('chicken breast', ms_cat_id), ('chicken thighs', ms_cat_id), ('ground beef', ms_cat_id), ('steak', ms_cat_id), ('pork chops', ms_cat_id), ('bacon', ms_cat_id), ('sausage', ms_cat_id), ('salmon', ms_cat_id), ('shrimp', ms_cat_id), ('tilapia', ms_cat_id), ('cod', ms_cat_id), ('tuna', ms_cat_id), ('ham', ms_cat_id), ('turkey', ms_cat_id), + ('milk', de_cat_id), ('cheese', de_cat_id), ('yogurt', de_cat_id), ('butter', de_cat_id), ('eggs', de_cat_id), ('cream cheese', de_cat_id), ('sour cream', de_cat_id), ('cottage cheese', de_cat_id), + ('bread', bb_cat_id), ('bagels', bb_cat_id), ('tortillas', bb_cat_id), ('croissants', bb_cat_id), ('muffins', bb_cat_id), ('baguette', bb_cat_id), ('pita bread', bb_cat_id), + ('rice', pdg_cat_id), ('pasta', pdg_cat_id), ('flour', pdg_cat_id), ('sugar', pdg_cat_id), ('salt', pdg_cat_id), ('pepper', pdg_cat_id), ('olive oil', pdg_cat_id), ('vegetable oil', pdg_cat_id), ('canned tomatoes', pdg_cat_id), ('canned beans', pdg_cat_id), ('peanut butter', pdg_cat_id), ('jam', pdg_cat_id), ('honey', pdg_cat_id), ('syrup', pdg_cat_id), ('nuts', pdg_cat_id), ('dried fruit', pdg_cat_id), ('crackers', pdg_cat_id), ('quinoa', pdg_cat_id), ('lentils', pdg_cat_id), + ('water', bev_cat_id), ('juice', bev_cat_id), ('soda', bev_cat_id), ('coffee', bev_cat_id), ('tea', bev_cat_id), ('almond milk', bev_cat_id), ('soy milk', bev_cat_id), ('coconut water', bev_cat_id), + ('frozen pizza', ff_cat_id), ('frozen vegetables', ff_cat_id), ('frozen fruit', ff_cat_id), ('ice cream', ff_cat_id), ('frozen dinners', ff_cat_id), ('french fries', ff_cat_id), ('frozen fish', ff_cat_id), + ('chips', snk_cat_id), ('pretzels', snk_cat_id), ('popcorn', snk_cat_id), ('granola bars', snk_cat_id), ('cookies', snk_cat_id), ('chocolate', snk_cat_id), ('candy', snk_cat_id), + ('paper towels', hc_cat_id), ('toilet paper', hc_cat_id), ('trash bags', hc_cat_id), ('dish soap', hc_cat_id), ('laundry detergent', hc_cat_id), ('all-purpose cleaner', hc_cat_id), ('sponges', hc_cat_id), + ('soap', pch_cat_id), ('shampoo', pch_cat_id), ('conditioner', pch_cat_id), ('toothpaste', pch_cat_id), ('deodorant', pch_cat_id), ('vitamins', pch_cat_id), ('pain reliever', pch_cat_id), + ('diapers', bc_cat_id), ('baby wipes', bc_cat_id), ('baby food', bc_cat_id), ('formula', bc_cat_id), + ('dog food', ps_cat_id), ('cat food', ps_cat_id), ('cat litter', ps_cat_id), + ('deli meat', dpf_cat_id), ('deli cheese', dpf_cat_id), ('rotisserie chicken', dpf_cat_id), ('prepared salads', dpf_cat_id), + ('canned soup', cg_cat_id), ('canned corn', cg_cat_id), ('canned tuna', cg_cat_id), ('canned chicken', cg_cat_id), + ('ketchup', cs_cat_id), ('mustard', cs_cat_id), ('mayonnaise', cs_cat_id), ('soy sauce', cs_cat_id), ('hot sauce', cs_cat_id), ('bbq sauce', cs_cat_id), ('salad dressing', cs_cat_id), ('cinnamon', cs_cat_id), ('oregano', cs_cat_id), ('paprika', cs_cat_id), ('garlic powder', cs_cat_id), + ('cereal', bkc_cat_id), ('oatmeal', bkc_cat_id), ('granola', bkc_cat_id), ('pancake mix', bkc_cat_id) + ON CONFLICT (name) DO NOTHING; +END $$; + +-- ============================================================================ +-- PART 3: STORAGE +-- ============================================================================ +-- Create the storage bucket for flyers if it doesn't exist. +INSERT INTO storage.buckets (id, name, public) +VALUES ('flyers', 'flyers', true) +ON CONFLICT (id) DO NOTHING; + +-- ============================================================================ +-- PART 4: ROW LEVEL SECURITY (RLS) +-- ============================================================================ +-- Enable RLS on all tables. +ALTER TABLE public.profiles ENABLE ROW LEVEL SECURITY; +ALTER TABLE public.stores ENABLE ROW LEVEL SECURITY; +ALTER TABLE public.categories ENABLE ROW LEVEL SECURITY; +ALTER TABLE public.flyers ENABLE ROW LEVEL SECURITY; +ALTER TABLE public.flyer_items ENABLE ROW LEVEL SECURITY; +ALTER TABLE public.master_grocery_items ENABLE ROW LEVEL SECURITY; +ALTER TABLE public.user_watched_items ENABLE ROW LEVEL SECURITY; +ALTER TABLE public.user_alerts ENABLE ROW LEVEL SECURITY; +ALTER TABLE public.notifications ENABLE ROW LEVEL SECURITY; +ALTER TABLE public.item_price_history ENABLE ROW LEVEL SECURITY; +ALTER TABLE public.master_item_aliases ENABLE ROW LEVEL SECURITY; +ALTER TABLE public.shopping_lists ENABLE ROW LEVEL SECURITY; +ALTER TABLE public.shopping_list_items ENABLE ROW LEVEL SECURITY; +ALTER TABLE public.suggested_corrections ENABLE ROW LEVEL SECURITY; +ALTER TABLE public.user_submitted_prices ENABLE ROW LEVEL SECURITY; +ALTER TABLE public.brands ENABLE ROW LEVEL SECURITY; +ALTER TABLE public.products ENABLE ROW LEVEL SECURITY; +ALTER TABLE public.store_locations ENABLE ROW LEVEL SECURITY; +ALTER TABLE public.flyer_locations ENABLE ROW LEVEL SECURITY; +ALTER TABLE public.recipes ENABLE ROW LEVEL SECURITY; +ALTER TABLE public.recipe_ingredients ENABLE ROW LEVEL SECURITY; +ALTER TABLE public.menu_plans ENABLE ROW LEVEL SECURITY; +ALTER TABLE public.planned_meals ENABLE ROW LEVEL SECURITY; +ALTER TABLE public.tags ENABLE ROW LEVEL SECURITY; +ALTER TABLE public.recipe_tags ENABLE ROW LEVEL SECURITY; +ALTER TABLE public.recipe_ratings ENABLE ROW LEVEL SECURITY; +ALTER TABLE public.pantry_items ENABLE ROW LEVEL SECURITY; + +-- Create policies. +DROP POLICY IF EXISTS "Public profiles are viewable by everyone." ON public.profiles; +CREATE POLICY "Public profiles are viewable by everyone." ON public.profiles FOR SELECT USING (true); +DROP POLICY IF EXISTS "Users can update their own profile." ON public.profiles; +CREATE POLICY "Users can update their own profile." ON public.profiles FOR UPDATE USING (auth.uid() = id) WITH CHECK (auth.uid() = id); +-- **FIX**: Add policy to allow the user creation trigger (running as 'postgres' owner) to insert new profiles. +DROP POLICY IF EXISTS "Allow service_role to insert new profiles" ON public.profiles; -- Cleanup old policy +DROP POLICY IF EXISTS "Allow postgres to insert new profiles" ON public.profiles; -- Cleanup in case of re-runs +CREATE POLICY "Allow postgres to insert new profiles" ON public.profiles FOR INSERT TO postgres WITH CHECK (true); + + +-- Public read access for most public data +DROP POLICY IF EXISTS "Public read access for price history" ON public.item_price_history; CREATE POLICY "Public read access for price history" ON public.item_price_history FOR SELECT USING (true); +DROP POLICY IF EXISTS "Aliases are publicly viewable." ON public.master_item_aliases; CREATE POLICY "Aliases are publicly viewable." ON public.master_item_aliases FOR SELECT USING (true); +DROP POLICY IF EXISTS "Public read access" ON public.store_locations; CREATE POLICY "Public read access" ON public.store_locations FOR SELECT USING (true); +DROP POLICY IF EXISTS "Public read access" ON public.flyer_locations; CREATE POLICY "Public read access" ON public.flyer_locations FOR SELECT USING (true); +DROP POLICY IF EXISTS "Submitted prices are publicly viewable." ON public.user_submitted_prices; CREATE POLICY "Submitted prices are publicly viewable." ON public.user_submitted_prices FOR SELECT USING (true); +DROP POLICY IF EXISTS "Recipes are publicly viewable." ON public.recipes; CREATE POLICY "Recipes are publicly viewable." ON public.recipes FOR SELECT USING (true); +DROP POLICY IF EXISTS "Recipe ingredients are publicly viewable." ON public.recipe_ingredients; CREATE POLICY "Recipe ingredients are publicly viewable." ON public.recipe_ingredients FOR SELECT USING (true); +DROP POLICY IF EXISTS "Tags are publicly viewable." ON public.tags; CREATE POLICY "Tags are publicly viewable." ON public.tags FOR SELECT USING (true); +DROP POLICY IF EXISTS "Recipe-tag links are publicly viewable." ON public.recipe_tags; CREATE POLICY "Recipe-tag links are publicly viewable." ON public.recipe_tags FOR SELECT USING (true); +DROP POLICY IF EXISTS "Recipe ratings are publicly viewable." ON public.recipe_ratings; CREATE POLICY "Recipe ratings are publicly viewable." ON public.recipe_ratings FOR SELECT USING (true); + +-- Allow FULL public access (read & write) for core data tables for demo purposes. +-- This allows the "fake user" (anon role) to write flyer data. +DROP POLICY IF EXISTS "Allow full public access" ON public.stores; +CREATE POLICY "Allow full public access" ON public.stores FOR ALL USING (true) WITH CHECK (true); + +DROP POLICY IF EXISTS "Allow full public access" ON public.categories; +CREATE POLICY "Allow full public access" ON public.categories FOR ALL USING (true) WITH CHECK (true); + +DROP POLICY IF EXISTS "Allow full public access" ON public.flyers; +CREATE POLICY "Allow full public access" ON public.flyers FOR ALL USING (true) WITH CHECK (true); + +DROP POLICY IF EXISTS "Allow full public access" ON public.flyer_items; +CREATE POLICY "Allow full public access" ON public.flyer_items FOR ALL USING (true) WITH CHECK (true); + +DROP POLICY IF EXISTS "Allow full public access" ON public.master_grocery_items; +CREATE POLICY "Allow full public access" ON public.master_grocery_items FOR ALL USING (true) WITH CHECK (true); + +DROP POLICY IF EXISTS "Allow full public access" ON public.brands; +CREATE POLICY "Allow full public access" ON public.brands FOR ALL USING (true) WITH CHECK (true); + +DROP POLICY IF EXISTS "Allow full public access" ON public.products; +CREATE POLICY "Allow full public access" ON public.products FOR ALL USING (true) WITH CHECK (true); + +-- User-specific policies (these remain locked down to authenticated users) +DROP POLICY IF EXISTS "Users can manage their own watched items." ON public.user_watched_items; +CREATE POLICY "Users can manage their own watched items." ON public.user_watched_items FOR ALL USING (auth.uid() = user_id) WITH CHECK (auth.uid() = user_id); + +DROP POLICY IF EXISTS "Users can manage their own alerts" ON public.user_alerts; +CREATE POLICY "Users can manage their own alerts" ON public.user_alerts FOR ALL USING (auth.uid() = (SELECT user_id FROM public.user_watched_items WHERE id = user_watched_item_id)); + +DROP POLICY IF EXISTS "Users can manage their own notifications" ON public.notifications; +CREATE POLICY "Users can manage their own notifications" ON public.notifications FOR ALL USING (auth.uid() = user_id) WITH CHECK (auth.uid() = user_id); + +DROP POLICY IF EXISTS "Users can manage their own shopping lists." ON public.shopping_lists; +CREATE POLICY "Users can manage their own shopping lists." ON public.shopping_lists FOR ALL USING (auth.uid() = user_id) WITH CHECK (auth.uid() = user_id); +-- **FIX**: Add policy to allow the user creation trigger (running as 'postgres' owner) to insert new shopping lists. +DROP POLICY IF EXISTS "Allow service_role to insert new shopping lists" ON public.shopping_lists; -- Cleanup old policy +DROP POLICY IF EXISTS "Allow postgres to insert new shopping lists" ON public.shopping_lists; -- Cleanup in case of re-runs +CREATE POLICY "Allow postgres to insert new shopping lists" ON public.shopping_lists FOR INSERT TO postgres WITH CHECK (true); + +DROP POLICY IF EXISTS "Users can manage items in their own shopping lists." ON public.shopping_list_items; +CREATE POLICY "Users can manage items in their own shopping lists." ON public.shopping_list_items FOR ALL USING (auth.uid() = (SELECT user_id FROM public.shopping_lists WHERE id = shopping_list_id)); + +DROP POLICY IF EXISTS "Users can manage their own suggestions." ON public.suggested_corrections; +CREATE POLICY "Users can manage their own suggestions." ON public.suggested_corrections FOR ALL USING (auth.uid() = user_id) WITH CHECK (auth.uid() = user_id); + +DROP POLICY IF EXISTS "Users can manage their own submitted prices." ON public.user_submitted_prices; +CREATE POLICY "Users can manage their own submitted prices." ON public.user_submitted_prices FOR ALL USING (auth.uid() = user_id) WITH CHECK (auth.uid() = user_id); + +DROP POLICY IF EXISTS "Users can manage their own recipes." ON public.recipes; +CREATE POLICY "Users can manage their own recipes." ON public.recipes FOR ALL USING (auth.uid() = user_id) WITH CHECK (auth.uid() = user_id); + +DROP POLICY IF EXISTS "Users can manage their own menu plans." ON public.menu_plans; +CREATE POLICY "Users can manage their own menu plans." ON public.menu_plans FOR ALL USING (auth.uid() = user_id); + +DROP POLICY IF EXISTS "Users can manage meals in their own menu plans." ON public.planned_meals; +CREATE POLICY "Users can manage meals in their own menu plans." ON public.planned_meals FOR ALL USING (auth.uid() = (SELECT user_id FROM public.menu_plans WHERE id = menu_plan_id)); + +DROP POLICY IF EXISTS "Users can manage their own recipe ratings." ON public.recipe_ratings; +CREATE POLICY "Users can manage their own recipe ratings." ON public.recipe_ratings FOR ALL USING (auth.uid() = user_id); + +DROP POLICY IF EXISTS "Users can manage their own pantry items." ON public.pantry_items; +CREATE POLICY "Users can manage their own pantry items." ON public.pantry_items FOR ALL USING (auth.uid() = user_id); + +-- Policies for the 'flyers' storage bucket +DROP POLICY IF EXISTS "Allow public read access to flyers storage" ON storage.objects; +CREATE POLICY "Allow public read access to flyers storage" ON storage.objects FOR SELECT TO public USING (bucket_id = 'flyers'); + +DROP POLICY IF EXISTS "Allow anyone to upload to flyers storage" ON storage.objects; +CREATE POLICY "Allow anyone to upload to flyers storage" ON storage.objects FOR INSERT TO public WITH CHECK (bucket_id = 'flyers'); + +DROP POLICY IF EXISTS "Allow authenticated users to delete their own flyers" ON storage.objects; +-- Note: This is a simplified policy. A real app would link storage objects to user IDs. +CREATE POLICY "Allow authenticated users to delete their own flyers" ON storage.objects FOR DELETE TO authenticated USING (bucket_id = 'flyers'); + + +-- ============================================================================ +-- PART 5: DATABASE FUNCTIONS +-- ============================================================================ +-- Function to find the best current sale price for a user's watched items. +-- This function queries all currently active flyers to find the lowest price +-- for each item on a specific user's watchlist. +CREATE OR REPLACE FUNCTION public.get_best_sale_prices_for_user(p_user_id UUID) +RETURNS TABLE ( + master_item_id BIGINT, + item_name TEXT, + best_price_in_cents INTEGER, + store_name TEXT, + flyer_id BIGINT, + flyer_image_url TEXT, + flyer_valid_from DATE, + flyer_valid_to DATE +) +LANGUAGE plpgsql +SECURITY INVOKER -- Runs with the privileges of the calling user. RLS policies will apply. +AS $$ +BEGIN + RETURN QUERY + WITH UserWatchedSales AS ( + -- This CTE gathers all sales from active flyers that match the user's watched items. + SELECT + uwi.master_item_id, + mgi.name AS item_name, + fi.price_in_cents, + s.name AS store_name, + f.id AS flyer_id, + f.image_url AS flyer_image_url, + f.valid_from AS flyer_valid_from, + f.valid_to AS flyer_valid_to, + -- We use ROW_NUMBER to rank sales for the same item, prioritizing the lowest price. + ROW_NUMBER() OVER (PARTITION BY uwi.master_item_id ORDER BY fi.price_in_cents ASC, f.valid_to DESC, s.name ASC) as rn + FROM + public.user_watched_items uwi + JOIN public.master_grocery_items mgi ON uwi.master_item_id = mgi.id + JOIN public.flyer_items fi ON uwi.master_item_id = fi.master_item_id + JOIN public.flyers f ON fi.flyer_id = f.id + JOIN public.stores s ON f.store_id = s.id + WHERE uwi.user_id = p_user_id + AND f.valid_from <= CURRENT_DATE + AND f.valid_to >= CURRENT_DATE + AND fi.price_in_cents IS NOT NULL + ) + -- The final select returns only the top-ranked sale (rn = 1) for each item. + SELECT uws.master_item_id, uws.item_name, uws.price_in_cents, uws.store_name, uws.flyer_id, uws.flyer_image_url, uws.flyer_valid_from, uws.flyer_valid_to + FROM UserWatchedSales uws + WHERE uws.rn = 1; +END; +$$; + +-- Function to generate a smart shopping list from a menu plan, subtracting pantry items. +-- This function calculates the total ingredients needed for a user's menu plan, +-- scales them by desired servings, and then subtracts what the user already has +-- in their pantry to determine what needs to be bought. +CREATE OR REPLACE FUNCTION public.generate_shopping_list_for_menu_plan(p_menu_plan_id BIGINT, p_user_id UUID) +RETURNS TABLE ( + master_item_id BIGINT, + item_name TEXT, + required_quantity NUMERIC, + pantry_quantity NUMERIC, + shopping_list_quantity NUMERIC, + unit TEXT +) +LANGUAGE plpgsql +SECURITY INVOKER -- Runs with the privileges of the calling user. RLS policies will apply. +AS $$ +BEGIN + RETURN QUERY + WITH RequiredIngredients AS ( + -- This CTE calculates the total quantity of each ingredient needed for the menu plan. + -- It accounts for scaling the recipe based on the number of servings the user plans to cook. + SELECT + ri.master_item_id, + ri.unit, + SUM( + ri.quantity * -- The base ingredient quantity from the recipe + -- Calculate the scaling factor. Default to 1 if servings_to_cook is not set. + (COALESCE(pm.servings_to_cook, r.servings)::NUMERIC / NULLIF(r.servings, 0)::NUMERIC) + ) AS total_required + FROM public.menu_plans mp + JOIN public.planned_meals pm ON mp.id = pm.menu_plan_id + JOIN public.recipe_ingredients ri ON pm.recipe_id = ri.recipe_id + JOIN public.recipes r ON pm.recipe_id = r.id -- Join to get the recipe's base servings + WHERE mp.id = p_menu_plan_id AND mp.user_id = p_user_id + GROUP BY ri.master_item_id, ri.unit + ) + -- This final select compares the required ingredients with the user's pantry. + SELECT + req.master_item_id, + mgi.name AS item_name, + req.total_required AS required_quantity, + COALESCE(pi.quantity, 0) AS pantry_quantity, + -- Calculate the amount to buy. If pantry has enough, this will be 0 or less, so GREATEST(0, ...) ensures we don't get negative values. + GREATEST(0, req.total_required - COALESCE(pi.quantity, 0)) AS shopping_list_quantity, + req.unit + FROM RequiredIngredients req + JOIN public.master_grocery_items mgi ON req.master_item_id = mgi.id + LEFT JOIN public.pantry_items pi + ON req.master_item_id = pi.master_item_id + AND req.unit = pi.unit -- Critical: only subtract if units match to avoid errors (e.g., subtracting 2 "items" from 500 "grams"). + AND pi.user_id = p_user_id + WHERE + -- Only include items that actually need to be purchased. + GREATEST(0, req.total_required - COALESCE(pi.quantity, 0)) > 0; +END; +$$; + +-- Function to find recipes based on the percentage of their ingredients that are currently on sale. +-- For example, you can ask for recipes where at least 50% of the ingredients are on sale. +CREATE OR REPLACE FUNCTION public.get_recipes_by_sale_percentage(p_min_sale_percentage NUMERIC DEFAULT 100.0) +RETURNS TABLE (recipe_details JSONB) +LANGUAGE sql +STABLE +SECURITY INVOKER +AS $$ + WITH BestCurrentPrices AS ( + -- CTE 1: For every distinct item on sale, find its single best price and the store offering it. + SELECT + bcp.master_item_id, + bcp.price_in_cents, + bcp.store_name + FROM ( + SELECT + fi.master_item_id, + fi.price_in_cents, + s.name as store_name, + ROW_NUMBER() OVER(PARTITION BY fi.master_item_id ORDER BY fi.price_in_cents ASC, f.valid_to DESC) as rn + FROM public.flyer_items fi + JOIN public.flyers f ON fi.flyer_id = f.id + JOIN public.stores s ON f.store_id = s.id + WHERE fi.master_item_id IS NOT NULL + AND fi.price_in_cents IS NOT NULL + AND CURRENT_DATE BETWEEN f.valid_from AND f.valid_to + ) bcp + WHERE bcp.rn = 1 + ), + RecipeIngredientStats AS ( + -- CTE 2: For each recipe, count its total ingredients and how many of them are on sale. + SELECT + ri.recipe_id, + COUNT(ri.master_item_id) AS total_ingredients, + COUNT(bcp.master_item_id) AS sale_ingredients -- COUNT(column) only counts non-NULL values. + FROM public.recipe_ingredients ri + LEFT JOIN BestCurrentPrices bcp ON ri.master_item_id = bcp.master_item_id + GROUP BY ri.recipe_id + ), + EligibleRecipes AS ( + -- CTE 3: Filter recipes based on the minimum sale percentage provided as an argument. + SELECT + ris.recipe_id + FROM RecipeIngredientStats ris + WHERE ris.total_ingredients > 0 -- Avoid division by zero + AND (ris.sale_ingredients * 100.0 / ris.total_ingredients) >= p_min_sale_percentage + ), + RecipeSaleDetails AS ( + -- CTE 4: Gather details for the eligible recipes and ALL their ingredients, noting which are on sale. + SELECT + r.id AS recipe_id, + r.name AS recipe_name, + mgi.name AS item_name, + bcp.price_in_cents AS best_price_in_cents, -- This will be NULL if the ingredient is not on sale. + bcp.store_name + FROM public.recipes r + JOIN EligibleRecipes er ON r.id = er.recipe_id + JOIN public.recipe_ingredients ri ON r.id = ri.recipe_id + JOIN public.master_grocery_items mgi ON ri.master_item_id = mgi.id + LEFT JOIN BestCurrentPrices bcp ON ri.master_item_id = bcp.master_item_id -- LEFT JOIN to include all ingredients, not just sale ones. + ) + -- Final Step: Aggregate the details into a single JSON object for each recipe. + SELECT + jsonb_build_object( + 'id', rsd.recipe_id, + 'name', rsd.recipe_name, + 'ingredients', jsonb_agg( + jsonb_build_object( + 'item_name', rsd.item_name, + 'on_sale', (rsd.best_price_in_cents IS NOT NULL), + 'best_price_in_cents', rsd.best_price_in_cents, + 'store_name', rsd.store_name + ) + ORDER BY (rsd.best_price_in_cents IS NOT NULL) DESC, rsd.item_name ASC -- Show sale items first in the list. + ) + ) + FROM RecipeSaleDetails rsd + GROUP BY rsd.recipe_id, rsd.recipe_name; +$$; + +-- Function to add items generated from a menu plan directly to a user's shopping list. +-- This acts as a utility function to chain `generate_shopping_list_for_menu_plan` with an INSERT action. +CREATE OR REPLACE FUNCTION public.add_menu_plan_to_shopping_list( + p_menu_plan_id BIGINT, + p_shopping_list_id BIGINT, + p_user_id UUID +) +RETURNS TABLE ( + master_item_id BIGINT, + item_name TEXT, + quantity_added NUMERIC +) +LANGUAGE plpgsql +-- SECURITY DEFINER is used here to perform actions with elevated privileges, +-- but it's safe because we first perform a strict ownership check inside the function. +SECURITY DEFINER +AS $$ +DECLARE + list_owner_id UUID; + item_to_add RECORD; +BEGIN + -- Security Check: Ensure the user calling this function owns the target shopping list. + SELECT user_id INTO list_owner_id + FROM public.shopping_lists + WHERE id = p_shopping_list_id; + + IF list_owner_id IS NULL OR list_owner_id <> p_user_id THEN + RAISE EXCEPTION 'Permission denied: You do not own shopping list %', p_shopping_list_id; + END IF; + + -- Loop through the items generated by the smart shopping list function. + FOR item_to_add IN + SELECT * FROM public.generate_shopping_list_for_menu_plan(p_menu_plan_id, p_user_id) + LOOP + -- Insert the item into the shopping list. If it already exists, add to the quantity. + INSERT INTO public.shopping_list_items (shopping_list_id, master_item_id, quantity) + VALUES (p_shopping_list_id, item_to_add.master_item_id, item_to_add.shopping_list_quantity) + ON CONFLICT (shopping_list_id, master_item_id) + DO UPDATE SET + quantity = shopping_list_items.quantity + EXCLUDED.quantity; + + -- Return the details of the item that was added/updated. + RETURN QUERY SELECT item_to_add.master_item_id, item_to_add.item_name, item_to_add.shopping_list_quantity; + END LOOP; +END; +$$; + +-- Function to find recipes that have at least a specified number of ingredients currently on sale. +CREATE OR REPLACE FUNCTION public.get_recipes_by_min_sale_ingredients(p_min_sale_ingredients INTEGER) +RETURNS TABLE ( + recipe_id BIGINT, + recipe_name TEXT, + description TEXT, + sale_ingredients_count BIGINT +) +LANGUAGE sql +STABLE +SECURITY INVOKER +AS $$ + WITH CurrentSaleItems AS ( + -- CTE 1: Get a distinct list of all master item IDs that are currently on sale. + SELECT DISTINCT fi.master_item_id + FROM public.flyer_items fi + JOIN public.flyers f ON fi.flyer_id = f.id + WHERE fi.master_item_id IS NOT NULL + AND fi.price_in_cents IS NOT NULL + AND CURRENT_DATE BETWEEN f.valid_from AND f.valid_to + ), + RecipeIngredientStats AS ( + -- CTE 2: For each recipe, count how many of its ingredients are on the sale list. + SELECT + ri.recipe_id, + COUNT(csi.master_item_id) AS sale_ingredients_count + FROM public.recipe_ingredients ri + LEFT JOIN CurrentSaleItems csi ON ri.master_item_id = csi.master_item_id + GROUP BY ri.recipe_id + ) + -- Final Step: Select recipes that meet the minimum sale ingredient count and order them. + SELECT + r.id, + r.name, + r.description, + ris.sale_ingredients_count + FROM public.recipes r + JOIN RecipeIngredientStats ris ON r.id = ris.recipe_id + WHERE ris.sale_ingredients_count >= p_min_sale_ingredients + ORDER BY + ris.sale_ingredients_count DESC, + r.avg_rating DESC; +$$; + +-- Function to find the most frequently advertised items in a given period. +-- This helps identify which items go on sale most often. +CREATE OR REPLACE FUNCTION public.get_most_frequent_sale_items(days_interval INTEGER, result_limit INTEGER) +RETURNS TABLE ( + item_name TEXT, + sale_occurrence_count BIGINT +) +LANGUAGE sql +STABLE +SECURITY INVOKER +AS $$ + SELECT + mgi.name AS item_name, + COUNT(DISTINCT fi.flyer_id) AS sale_occurrence_count -- Count distinct flyers the item appeared in + FROM + public.flyer_items fi + JOIN + public.flyers f ON fi.flyer_id = f.id + JOIN + public.master_grocery_items mgi ON fi.master_item_id = mgi.id + WHERE + fi.master_item_id IS NOT NULL + -- Filter for flyers that have been active in the last X days. + AND f.valid_to >= (CURRENT_DATE - (days_interval || ' days')::INTERVAL) + AND f.valid_from <= CURRENT_DATE + GROUP BY + mgi.id, mgi.name + ORDER BY + sale_occurrence_count DESC + LIMIT result_limit; +$$; + +-- Function to find recipes by a specific ingredient AND a specific tag. +-- This allows for more refined recipe searching, e.g., "Find me a quick & easy recipe with chicken breast". +CREATE OR REPLACE FUNCTION public.find_recipes_by_ingredient_and_tag(p_ingredient_name TEXT, p_tag_name TEXT) +RETURNS TABLE ( + id BIGINT, + name TEXT, + description TEXT, + prep_time_minutes INTEGER, + cook_time_minutes INTEGER, + avg_rating NUMERIC +) +LANGUAGE sql +STABLE +SECURITY INVOKER +AS $$ + SELECT + r.id, r.name, r.description, r.prep_time_minutes, r.cook_time_minutes, r.avg_rating + FROM + public.recipes r + WHERE + -- Check that the recipe has the required ingredient using an EXISTS subquery. + EXISTS ( + SELECT 1 FROM public.recipe_ingredients ri + JOIN public.master_grocery_items mgi ON ri.master_item_id = mgi.id + WHERE ri.recipe_id = r.id AND mgi.name = p_ingredient_name + ) + AND + -- Check that the recipe has the required tag using another EXISTS subquery. + EXISTS ( + SELECT 1 FROM public.recipe_tags rt + JOIN public.tags t ON rt.tag_id = t.id + WHERE rt.recipe_id = r.id AND t.name = p_tag_name + ) + ORDER BY + r.avg_rating DESC, r.name ASC; +$$; + + +-- ============================================================================ +-- PART 6: SYSTEM CHECK HELPER FUNCTIONS +-- These functions are called by the 'system-check' Edge Function to inspect +-- the database state without exposing schema details to the client. They are +-- defined as `SECURITY DEFINER` to bypass RLS for inspection purposes. +-- ============================================================================ +DROP FUNCTION IF EXISTS public.check_schema(); +CREATE OR REPLACE FUNCTION public.check_schema() +RETURNS json +LANGUAGE sql +SECURITY DEFINER +AS $$ + SELECT json_build_object( + 'tables', (SELECT array_agg(table_name) FROM information_schema.tables WHERE table_schema = 'public') + ); +$$; + +DROP FUNCTION IF EXISTS public.check_rls(); +CREATE OR REPLACE FUNCTION public.check_rls() +RETURNS TABLE(table_name text, policy_name text) +LANGUAGE sql +SECURITY DEFINER +AS $$ + SELECT + tablename::text, + policyname::text + FROM pg_policies + WHERE schemaname = 'public'; +$$; + +DROP FUNCTION IF EXISTS public.check_trigger_security(); +CREATE OR REPLACE FUNCTION public.check_trigger_security() +RETURNS TABLE(function_name text, is_security_definer boolean, owner_role text) +LANGUAGE sql +SECURITY DEFINER +AS $$ + SELECT + p.proname::text, + p.prosecdef, + r.rolname::text + FROM pg_proc p + JOIN pg_roles r ON p.proowner = r.oid + WHERE p.proname = 'handle_new_user'; +$$; + + +-- ============================================================================ +-- PART 7: TRIGGERS +-- ============================================================================ + +-- 1. Set up the trigger to automatically create a profile when a new user signs up. +-- This function will be called by the trigger. +-- It is set to SECURITY DEFINER to ensure it can insert into public tables. +-- The owner will be 'postgres' (the user running this script), which has an +-- RLS policy allowing it to insert. +CREATE OR REPLACE FUNCTION public.handle_new_user() +RETURNS TRIGGER AS $$ +DECLARE + new_profile_id UUID; +BEGIN + INSERT INTO public.profiles (id, full_name, avatar_url) + VALUES (new.id, new.raw_user_meta_data->>'full_name', new.raw_user_meta_data->>'avatar_url') + RETURNING id INTO new_profile_id; + -- Also create a default shopping list for the new user. + INSERT INTO public.shopping_lists (user_id, name) + VALUES (new_profile_id, 'Main Shopping List'); + RETURN new; +END; +$$ LANGUAGE plpgsql SECURITY DEFINER; + +-- This trigger calls the function after a new user is created. +DROP TRIGGER IF EXISTS on_auth_user_created ON auth.users; +CREATE TRIGGER on_auth_user_created + AFTER INSERT ON auth.users + FOR EACH ROW EXECUTE FUNCTION public.handle_new_user(); + +-- 2. Create a reusable function to automatically update 'updated_at' columns. +CREATE OR REPLACE FUNCTION public.handle_updated_at() +RETURNS TRIGGER AS $$ +BEGIN + NEW.updated_at = now(); + RETURN NEW; +END; +$$ LANGUAGE plpgsql SECURITY DEFINER; + +-- Apply the trigger to the 'profiles' table. +DROP TRIGGER IF EXISTS on_profile_updated ON public.profiles; +CREATE TRIGGER on_profile_updated + BEFORE UPDATE ON public.profiles + FOR EACH ROW EXECUTE FUNCTION public.handle_updated_at(); + +-- Apply the trigger to the 'pantry_items' table. +DROP TRIGGER IF EXISTS on_pantry_item_updated ON public.pantry_items; +CREATE TRIGGER on_pantry_item_updated + BEFORE UPDATE ON public.pantry_items + FOR EACH ROW EXECUTE FUNCTION public.handle_updated_at(); + +-- 3. Create a trigger function to populate the item_price_history table on insert. +CREATE OR REPLACE FUNCTION public.update_price_history_on_flyer_item_insert() +RETURNS TRIGGER AS $$ +DECLARE + flyer_valid_from DATE; + flyer_valid_to DATE; + current_summary_date DATE; +BEGIN + -- Only run if the new flyer item is linked to a master item and has a price. + IF NEW.master_item_id IS NULL OR NEW.price_in_cents IS NULL THEN + RETURN NEW; + END IF; + + -- Get the validity dates of the flyer. + SELECT valid_from, valid_to INTO flyer_valid_from, flyer_valid_to + FROM public.flyers + WHERE id = NEW.flyer_id; + + -- If the flyer dates are not set, we cannot proceed. + IF flyer_valid_from IS NULL OR flyer_valid_to IS NULL THEN + RETURN NEW; + END IF; + + -- Loop through each day the flyer is valid and update the price history. + FOR current_summary_date IN SELECT generate_series(flyer_valid_from, flyer_valid_to, '1 day'::interval)::date LOOP + INSERT INTO public.item_price_history (master_item_id, summary_date, min_price_in_cents, max_price_in_cents, avg_price_in_cents, data_points_count) + VALUES (NEW.master_item_id, current_summary_date, NEW.price_in_cents, NEW.price_in_cents, NEW.price_in_cents, 1) + ON CONFLICT (master_item_id, summary_date) + DO UPDATE SET + min_price_in_cents = LEAST(item_price_history.min_price_in_cents, EXCLUDED.min_price_in_cents), + max_price_in_cents = GREATEST(item_price_history.max_price_in_cents, EXCLUDED.max_price_in_cents), + -- Recalculate the average price: ((old_avg * old_count) + new_price) / (old_count + 1) + avg_price_in_cents = ROUND(((item_price_history.avg_price_in_cents * item_price_history.data_points_count) + EXCLUDED.avg_price_in_cents) / (item_price_history.data_points_count + 1)), + data_points_count = item_price_history.data_points_count + 1; + END LOOP; + + RETURN NEW; +END; +$$ LANGUAGE plpgsql SECURITY DEFINER; + +-- Create the trigger on the flyer_items table for insert. +DROP TRIGGER IF EXISTS trigger_update_price_history ON public.flyer_items; +CREATE TRIGGER trigger_update_price_history + AFTER INSERT ON public.flyer_items + FOR EACH ROW EXECUTE FUNCTION public.update_price_history_on_flyer_item_insert(); + +-- 4. Create a trigger function to recalculate price history when a flyer item is deleted. +CREATE OR REPLACE FUNCTION public.recalculate_price_history_on_flyer_item_delete() +RETURNS TRIGGER AS $$ +DECLARE + flyer_valid_from DATE; + flyer_valid_to DATE; + current_summary_date DATE; + new_aggregates RECORD; +BEGIN + -- Only run if the deleted item was linked to a master item and had a price. + IF OLD.master_item_id IS NULL OR OLD.price_in_cents IS NULL THEN + RETURN OLD; + END IF; + + -- Get the validity dates of the flyer. + SELECT valid_from, valid_to INTO flyer_valid_from, flyer_valid_to + FROM public.flyers + WHERE id = OLD.flyer_id; + + -- If the flyer dates are not set, we cannot proceed. + IF flyer_valid_from IS NULL OR flyer_valid_to IS NULL THEN + RETURN OLD; + END IF; + + -- Loop through each day the flyer was valid to recalculate the history. + FOR current_summary_date IN SELECT generate_series(flyer_valid_from, flyer_valid_to, '1 day'::interval)::date LOOP + -- Recalculate aggregates for the master item on this specific day from all other flyers. + SELECT + MIN(fi.price_in_cents) AS min_price, + MAX(fi.price_in_cents) AS max_price, + ROUND(AVG(fi.price_in_cents)) AS avg_price, + COUNT(fi.id) AS data_points + INTO new_aggregates + FROM public.flyer_items fi + JOIN public.flyers f ON fi.flyer_id = f.id + WHERE fi.master_item_id = OLD.master_item_id + AND fi.price_in_cents IS NOT NULL + AND current_summary_date BETWEEN f.valid_from AND f.valid_to; + + -- If there are still data points, update the summary. Otherwise, delete it. + IF new_aggregates.data_points > 0 THEN + UPDATE public.item_price_history + SET + min_price_in_cents = new_aggregates.min_price, + max_price_in_cents = new_aggregates.max_price, + avg_price_in_cents = new_aggregates.avg_price, + data_points_count = new_aggregates.data_points + WHERE master_item_id = OLD.master_item_id AND summary_date = current_summary_date; + ELSE + DELETE FROM public.item_price_history + WHERE master_item_id = OLD.master_item_id AND summary_date = current_summary_date; + END IF; + END LOOP; + + RETURN OLD; +END; +$$ LANGUAGE plpgsql SECURITY DEFINER; + +-- Create the trigger on the flyer_items table for DELETE operations. +DROP TRIGGER IF EXISTS trigger_recalculate_price_history_on_delete ON public.flyer_items; +CREATE TRIGGER trigger_recalculate_price_history_on_delete + AFTER DELETE ON public.flyer_items + FOR EACH ROW EXECUTE FUNCTION public.recalculate_price_history_on_flyer_item_delete(); + +-- 5. Trigger function to update the average rating on the recipes table. +CREATE OR REPLACE FUNCTION public.update_recipe_rating_aggregates() +RETURNS TRIGGER AS $$ +BEGIN + UPDATE public.recipes + SET + avg_rating = ( + SELECT AVG(rating) + FROM public.recipe_ratings + WHERE recipe_id = COALESCE(NEW.recipe_id, OLD.recipe_id) + ), + rating_count = ( + SELECT COUNT(*) + FROM public.recipe_ratings + WHERE recipe_id = COALESCE(NEW.recipe_id, OLD.recipe_id) + ) + WHERE id = COALESCE(NEW.recipe_id, OLD.recipe_id); + + RETURN NULL; -- The result is ignored since this is an AFTER trigger. +END; +$$ LANGUAGE plpgsql SECURITY DEFINER; + +-- Trigger to call the function after any change to recipe_ratings. +DROP TRIGGER IF EXISTS on_recipe_rating_change ON public.recipe_ratings; +CREATE TRIGGER on_recipe_rating_change + AFTER INSERT OR UPDATE OR DELETE ON public.recipe_ratings + FOR EACH ROW EXECUTE FUNCTION public.update_recipe_rating_aggregates(); diff --git a/sql/triggers.sql b/sql/triggers.sql new file mode 100644 index 0000000..9138ff5 --- /dev/null +++ b/sql/triggers.sql @@ -0,0 +1,183 @@ +-- This file contains all trigger functions and trigger definitions for the database. + +-- 1. Set up the trigger to automatically create a profile when a new user signs up. +-- This function will be called by the trigger. +CREATE OR REPLACE FUNCTION public.handle_new_user() +RETURNS TRIGGER AS $$ +DECLARE + new_profile_id UUID; +BEGIN + INSERT INTO public.profiles (id, full_name, avatar_url) + VALUES (new.id, new.raw_user_meta_data->>'full_name', new.raw_user_meta_data->>'avatar_url') + RETURNING id INTO new_profile_id; + -- Also create a default shopping list for the new user. + INSERT INTO public.shopping_lists (user_id, name) + VALUES (new_profile_id, 'Main Shopping List'); + RETURN new; +END; +$$ LANGUAGE plpgsql SECURITY DEFINER; + +-- This trigger calls the function after a new user is created. +DROP TRIGGER IF EXISTS on_auth_user_created ON auth.users; +CREATE TRIGGER on_auth_user_created + AFTER INSERT ON auth.users + FOR EACH ROW EXECUTE FUNCTION public.handle_new_user(); + +-- 2. Create a reusable function to automatically update 'updated_at' columns. +CREATE OR REPLACE FUNCTION public.handle_updated_at() +RETURNS TRIGGER AS $$ +BEGIN + NEW.updated_at = now(); + RETURN NEW; +END; +$$ LANGUAGE plpgsql SECURITY DEFINER; + +-- Apply the trigger to the 'profiles' table. +DROP TRIGGER IF EXISTS on_profile_updated ON public.profiles; +CREATE TRIGGER on_profile_updated + BEFORE UPDATE ON public.profiles + FOR EACH ROW EXECUTE FUNCTION public.handle_updated_at(); + +-- Apply the trigger to the 'pantry_items' table. +DROP TRIGGER IF EXISTS on_pantry_item_updated ON public.pantry_items; +CREATE TRIGGER on_pantry_item_updated + BEFORE UPDATE ON public.pantry_items + FOR EACH ROW EXECUTE FUNCTION public.handle_updated_at(); + +-- 3. Create a trigger function to populate the item_price_history table on insert. +CREATE OR REPLACE FUNCTION public.update_price_history_on_flyer_item_insert() +RETURNS TRIGGER AS $$ +DECLARE + flyer_valid_from DATE; + flyer_valid_to DATE; + current_summary_date DATE; +BEGIN + -- Only run if the new flyer item is linked to a master item and has a price. + IF NEW.master_item_id IS NULL OR NEW.price_in_cents IS NULL THEN + RETURN NEW; + END IF; + + -- Get the validity dates of the flyer. + SELECT valid_from, valid_to INTO flyer_valid_from, flyer_valid_to + FROM public.flyers + WHERE id = NEW.flyer_id; + + -- If the flyer dates are not set, we cannot proceed. + IF flyer_valid_from IS NULL OR flyer_valid_to IS NULL THEN + RETURN NEW; + END IF; + + -- Loop through each day the flyer is valid and update the price history. + FOR current_summary_date IN SELECT generate_series(flyer_valid_from, flyer_valid_to, '1 day'::interval)::date LOOP + INSERT INTO public.item_price_history (master_item_id, summary_date, min_price_in_cents, max_price_in_cents, avg_price_in_cents, data_points_count) + VALUES (NEW.master_item_id, current_summary_date, NEW.price_in_cents, NEW.price_in_cents, NEW.price_in_cents, 1) + ON CONFLICT (master_item_id, summary_date) + DO UPDATE SET + min_price_in_cents = LEAST(item_price_history.min_price_in_cents, EXCLUDED.min_price_in_cents), + max_price_in_cents = GREATEST(item_price_history.max_price_in_cents, EXCLUDED.max_price_in_cents), + -- Recalculate the average price: ((old_avg * old_count) + new_price) / (old_count + 1) + avg_price_in_cents = ROUND(((item_price_history.avg_price_in_cents * item_price_history.data_points_count) + EXCLUDED.avg_price_in_cents) / (item_price_history.data_points_count + 1)), + data_points_count = item_price_history.data_points_count + 1; + END LOOP; + + RETURN NEW; +END; +$$ LANGUAGE plpgsql SECURITY DEFINER; + +-- Create the trigger on the flyer_items table for insert. +DROP TRIGGER IF EXISTS trigger_update_price_history ON public.flyer_items; +CREATE TRIGGER trigger_update_price_history + AFTER INSERT ON public.flyer_items + FOR EACH ROW EXECUTE FUNCTION public.update_price_history_on_flyer_item_insert(); + +-- 4. Create a trigger function to recalculate price history when a flyer item is deleted. +CREATE OR REPLACE FUNCTION public.recalculate_price_history_on_flyer_item_delete() +RETURNS TRIGGER AS $$ +DECLARE + flyer_valid_from DATE; + flyer_valid_to DATE; + current_summary_date DATE; + new_aggregates RECORD; +BEGIN + -- Only run if the deleted item was linked to a master item and had a price. + IF OLD.master_item_id IS NULL OR OLD.price_in_cents IS NULL THEN + RETURN OLD; + END IF; + + -- Get the validity dates of the flyer. + SELECT valid_from, valid_to INTO flyer_valid_from, flyer_valid_to + FROM public.flyers + WHERE id = OLD.flyer_id; + + -- If the flyer dates are not set, we cannot proceed. + IF flyer_valid_from IS NULL OR flyer_valid_to IS NULL THEN + RETURN OLD; + END IF; + + -- Loop through each day the flyer was valid to recalculate the history. + FOR current_summary_date IN SELECT generate_series(flyer_valid_from, flyer_valid_to, '1 day'::interval)::date LOOP + -- Recalculate aggregates for the master item on this specific day from all other flyers. + SELECT + MIN(fi.price_in_cents) AS min_price, + MAX(fi.price_in_cents) AS max_price, + ROUND(AVG(fi.price_in_cents)) AS avg_price, + COUNT(fi.id) AS data_points + INTO new_aggregates + FROM public.flyer_items fi + JOIN public.flyers f ON fi.flyer_id = f.id + WHERE fi.master_item_id = OLD.master_item_id + AND fi.price_in_cents IS NOT NULL + AND current_summary_date BETWEEN f.valid_from AND f.valid_to; + + -- If there are still data points, update the summary. Otherwise, delete it. + IF new_aggregates.data_points > 0 THEN + UPDATE public.item_price_history + SET + min_price_in_cents = new_aggregates.min_price, + max_price_in_cents = new_aggregates.max_price, + avg_price_in_cents = new_aggregates.avg_price, + data_points_count = new_aggregates.data_points + WHERE master_item_id = OLD.master_item_id AND summary_date = current_summary_date; + ELSE + DELETE FROM public.item_price_history + WHERE master_item_id = OLD.master_item_id AND summary_date = current_summary_date; + END IF; + END LOOP; + + RETURN OLD; +END; +$$ LANGUAGE plpgsql SECURITY DEFINER; + +-- Create the trigger on the flyer_items table for DELETE operations. +DROP TRIGGER IF EXISTS trigger_recalculate_price_history_on_delete ON public.flyer_items; +CREATE TRIGGER trigger_recalculate_price_history_on_delete + AFTER DELETE ON public.flyer_items + FOR EACH ROW EXECUTE FUNCTION public.recalculate_price_history_on_flyer_item_delete(); + +-- 5. Trigger function to update the average rating on the recipes table. +CREATE OR REPLACE FUNCTION public.update_recipe_rating_aggregates() +RETURNS TRIGGER AS $$ +BEGIN + UPDATE public.recipes + SET + avg_rating = ( + SELECT AVG(rating) + FROM public.recipe_ratings + WHERE recipe_id = COALESCE(NEW.recipe_id, OLD.recipe_id) + ), + rating_count = ( + SELECT COUNT(*) + FROM public.recipe_ratings + WHERE recipe_id = COALESCE(NEW.recipe_id, OLD.recipe_id) + ) + WHERE id = COALESCE(NEW.recipe_id, OLD.recipe_id); + + RETURN NULL; -- The result is ignored since this is an AFTER trigger. +END; +$$ LANGUAGE plpgsql SECURITY DEFINER; + +-- Trigger to call the function after any change to recipe_ratings. +DROP TRIGGER IF EXISTS on_recipe_rating_change ON public.recipe_ratings; +CREATE TRIGGER on_recipe_rating_change + AFTER INSERT OR UPDATE OR DELETE ON public.recipe_ratings + FOR EACH ROW EXECUTE FUNCTION public.update_recipe_rating_aggregates(); \ No newline at end of file diff --git a/supabase/functions/_shared/.temp/cli-latest b/supabase/functions/_shared/.temp/cli-latest new file mode 100644 index 0000000..11335d2 --- /dev/null +++ b/supabase/functions/_shared/.temp/cli-latest @@ -0,0 +1 @@ +v2.54.11 \ No newline at end of file diff --git a/supabase/functions/_shared/.temp/functions/_shared/cors.ts b/supabase/functions/_shared/.temp/functions/_shared/cors.ts new file mode 100644 index 0000000..2a19062 --- /dev/null +++ b/supabase/functions/_shared/.temp/functions/_shared/cors.ts @@ -0,0 +1,6 @@ +// This file provides shared CORS headers for Supabase Edge Functions. +// It allows the web application to securely call these backend functions. +export const corsHeaders = { + 'Access-Control-Allow-Origin': '*', + 'Access-Control-Allow-Headers': 'authorization, x-client-info, apikey, content-type', +}; diff --git a/supabase/functions/_shared/.temp/gotrue-version b/supabase/functions/_shared/.temp/gotrue-version new file mode 100644 index 0000000..b5583d3 --- /dev/null +++ b/supabase/functions/_shared/.temp/gotrue-version @@ -0,0 +1 @@ +v2.181.0 \ No newline at end of file diff --git a/supabase/functions/_shared/.temp/pooler-url b/supabase/functions/_shared/.temp/pooler-url new file mode 100644 index 0000000..39ccebd --- /dev/null +++ b/supabase/functions/_shared/.temp/pooler-url @@ -0,0 +1 @@ +postgresql://postgres.azmmnxkvjryracrnmhvj@aws-1-us-east-2.pooler.supabase.com:5432/postgres \ No newline at end of file diff --git a/supabase/functions/_shared/.temp/postgres-version b/supabase/functions/_shared/.temp/postgres-version new file mode 100644 index 0000000..c24663d --- /dev/null +++ b/supabase/functions/_shared/.temp/postgres-version @@ -0,0 +1 @@ +17.6.1.036 \ No newline at end of file diff --git a/supabase/functions/_shared/.temp/project-ref b/supabase/functions/_shared/.temp/project-ref new file mode 100644 index 0000000..a77efa8 --- /dev/null +++ b/supabase/functions/_shared/.temp/project-ref @@ -0,0 +1 @@ +azmmnxkvjryracrnmhvj \ No newline at end of file diff --git a/supabase/functions/_shared/.temp/rest-version b/supabase/functions/_shared/.temp/rest-version new file mode 100644 index 0000000..93c142b --- /dev/null +++ b/supabase/functions/_shared/.temp/rest-version @@ -0,0 +1 @@ +v13.0.5 \ No newline at end of file diff --git a/supabase/functions/_shared/.temp/storage-migration b/supabase/functions/_shared/.temp/storage-migration new file mode 100644 index 0000000..581d478 --- /dev/null +++ b/supabase/functions/_shared/.temp/storage-migration @@ -0,0 +1 @@ +fix-object-level \ No newline at end of file diff --git a/supabase/functions/_shared/.temp/storage-version b/supabase/functions/_shared/.temp/storage-version new file mode 100644 index 0000000..344cc86 --- /dev/null +++ b/supabase/functions/_shared/.temp/storage-version @@ -0,0 +1 @@ +v1.28.4 \ No newline at end of file diff --git a/supabase/functions/_shared/config.toml b/supabase/functions/_shared/config.toml new file mode 100644 index 0000000..027ee24 --- /dev/null +++ b/supabase/functions/_shared/config.toml @@ -0,0 +1,33 @@ + +[functions.delete-user] +enabled = true +verify_jwt = true +import_map = "./functions/delete-user/deno.json" +# Uncomment to specify a custom file path to the entrypoint. +# Supported file extensions are: .ts, .js, .mjs, .jsx, .tsx +entrypoint = "./functions/delete-user/index.ts" +# Specifies static files to be bundled with the function. Supports glob patterns. +# For example, if you want to serve static HTML pages in your function: +# static_files = [ "./functions/delete-user/*.html" ] + +[functions.seed-database] +enabled = true +verify_jwt = true +import_map = "./functions/seed-database/deno.json" +# Uncomment to specify a custom file path to the entrypoint. +# Supported file extensions are: .ts, .js, .mjs, .jsx, .tsx +entrypoint = "./functions/seed-database/index.ts" +# Specifies static files to be bundled with the function. Supports glob patterns. +# For example, if you want to serve static HTML pages in your function: +# static_files = [ "./functions/seed-database/*.html" ] + +[functions.system-check] +enabled = true +verify_jwt = true +import_map = "./functions/system-check/deno.json" +# Uncomment to specify a custom file path to the entrypoint. +# Supported file extensions are: .ts, .js, .mjs, .jsx, .tsx +entrypoint = "./functions/system-check/index.ts" +# Specifies static files to be bundled with the function. Supports glob patterns. +# For example, if you want to serve static HTML pages in your function: +# static_files = [ "./functions/system-check/*.html" ] diff --git a/supabase/functions/_shared/cors.ts b/supabase/functions/_shared/cors.ts new file mode 100644 index 0000000..2a19062 --- /dev/null +++ b/supabase/functions/_shared/cors.ts @@ -0,0 +1,6 @@ +// This file provides shared CORS headers for Supabase Edge Functions. +// It allows the web application to securely call these backend functions. +export const corsHeaders = { + 'Access-Control-Allow-Origin': '*', + 'Access-Control-Allow-Headers': 'authorization, x-client-info, apikey, content-type', +}; diff --git a/supabase/functions/delete-user/.npmrc b/supabase/functions/delete-user/.npmrc new file mode 100644 index 0000000..48c6388 --- /dev/null +++ b/supabase/functions/delete-user/.npmrc @@ -0,0 +1,3 @@ +# Configuration for private npm package dependencies +# For more information on using private registries with Edge Functions, see: +# https://supabase.com/docs/guides/functions/import-maps#importing-from-private-registries diff --git a/supabase/functions/delete-user/deno.json b/supabase/functions/delete-user/deno.json new file mode 100644 index 0000000..f6ca845 --- /dev/null +++ b/supabase/functions/delete-user/deno.json @@ -0,0 +1,3 @@ +{ + "imports": {} +} diff --git a/supabase/functions/delete-user/index.ts b/supabase/functions/delete-user/index.ts new file mode 100644 index 0000000..da7bfd5 --- /dev/null +++ b/supabase/functions/delete-user/index.ts @@ -0,0 +1,107 @@ +// Follow this setup guide to integrate the Deno language server with your editor: +// https://deno.land/manual/getting_started/setup_your_environment +// This enables autocomplete, go to definition, etc. + +// Setup type definitions for built-in Supabase Runtime APIs +//import "jsr:@supabase/functions-js/edge-runtime.d.ts" + +//console.log("Hello from Functions!") + +//Deno.serve(async (req) => { +// const { name } = await req.json() +// const data = { +// message: `Hello ${name}!`, +// } + +// return new Response( +// JSON.stringify(data), +// { headers: { "Content-Type": "application/json" } }, +// ) +//}) + +/* To invoke locally: + + 1. Run `supabase start` (see: https://supabase.com/docs/reference/cli/supabase-start) + 2. Make an HTTP request: + + curl -i --location --request POST 'http://127.0.0.1:54321/functions/v1/delete-user' \ + --header 'Authorization: Bearer eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJpc3MiOiJzdXBhYmFzZS1kZW1vIiwicm9sZSI6ImFub24iLCJleHAiOjE5ODM4MTI5OTZ9.CRXP1A7WOeoJeXxjNni43kdQwgnWNReilDMblYTn_I0' \ + --header 'Content-Type: application/json' \ + --data '{"name":"Functions"}' + +*/ +import { createClient } from 'https://esm.sh/@supabase/supabase-js@2'; +import { corsHeaders } from '../_shared/.temp/functions/_shared/cors.ts'; + +Deno.serve(async (req) => { + // Handle preflight OPTIONS request for CORS + if (req.method === 'OPTIONS') { + return new Response('ok', { headers: corsHeaders }); + } + + try { + const { password } = await req.json(); + if (!password) { + throw new Error('Password is required.'); + } + + // Create a Supabase client with the user's authentication token + const authHeader = req.headers.get('Authorization'); + if (!authHeader) { + throw new Error('Missing authorization header.'); + } + + const userSupabaseClient = createClient( + Deno.env.get('SUPABASE_URL')!, + Deno.env.get('SUPABASE_ANON_KEY')!, + { global: { headers: { Authorization: authHeader } } } + ); + + // Get the user from the token + const { data: { user }, error: userError } = await userSupabaseClient.auth.getUser(); + if (userError || !user) { + return new Response(JSON.stringify({ error: userError?.message || 'User not authenticated.' }), { + status: 401, + headers: { ...corsHeaders, 'Content-Type': 'application/json' }, + }); + } + + // Verify the user's password by attempting to sign in + const { error: signInError } = await userSupabaseClient.auth.signInWithPassword({ + email: user.email!, + password: password, + }); + + if (signInError) { + return new Response(JSON.stringify({ error: 'Invalid password.' }), { + status: 403, + headers: { ...corsHeaders, 'Content-Type': 'application/json' }, + }); + } + + // If password is correct, create an admin client with the service_role key + const adminSupabaseClient = createClient( + Deno.env.get('SUPABASE_URL')!, + Deno.env.get('SUPABASE_SERVICE_ROLE_KEY')!, + { auth: { autoRefreshToken: false, persistSession: false } } + ); + + // Delete the user + const { error: deleteError } = await adminSupabaseClient.auth.admin.deleteUser(user.id); + if (deleteError) { + throw deleteError; + } + + return new Response(JSON.stringify({ message: 'User deleted successfully.' }), { + headers: { ...corsHeaders, 'Content-Type': 'application/json' }, + status: 200, + }); + + } catch (error) { + // Return a detailed error with a stack trace for better debugging. + return new Response(JSON.stringify({ error: error.message, stack: error.stack }), { + headers: { ...corsHeaders, 'Content-Type': 'application/json' }, + status: 500, + }); + } +}); \ No newline at end of file diff --git a/supabase/functions/seed-database/.npmrc b/supabase/functions/seed-database/.npmrc new file mode 100644 index 0000000..48c6388 --- /dev/null +++ b/supabase/functions/seed-database/.npmrc @@ -0,0 +1,3 @@ +# Configuration for private npm package dependencies +# For more information on using private registries with Edge Functions, see: +# https://supabase.com/docs/guides/functions/import-maps#importing-from-private-registries diff --git a/supabase/functions/seed-database/deno.json b/supabase/functions/seed-database/deno.json new file mode 100644 index 0000000..f6ca845 --- /dev/null +++ b/supabase/functions/seed-database/deno.json @@ -0,0 +1,3 @@ +{ + "imports": {} +} diff --git a/supabase/functions/seed-database/index.ts b/supabase/functions/seed-database/index.ts new file mode 100644 index 0000000..233c25a --- /dev/null +++ b/supabase/functions/seed-database/index.ts @@ -0,0 +1,113 @@ +// Follow this setup guide to integrate the Deno language server with your editor: +// https://deno.land/manual/getting_started/setup_your_environment +// This enables autocomplete, go to definition, etc. + +// Setup type definitions for built-in Supabase Runtime APIs +//import "jsr:@supabase/functions-js/edge-runtime.d.ts" + +//console.log("Hello from Functions!") + +//Deno.serve(async (req) => { +// const { name } = await req.json() +// const data = { +// message: `Hello ${name}!`, +// } +// +// return new Response( +// JSON.stringify(data), +// { headers: { "Content-Type": "application/json" } }, +// ) +//}) + +/* To invoke locally: + + 1. Run `supabase start` (see: https://supabase.com/docs/reference/cli/supabase-start) + 2. Make an HTTP request: + + curl -i --location --request POST 'http://127.0.0.1:54321/functions/v1/seed-database' \ + --header 'Authorization: Bearer eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJpc3MiOiJzdXBhYmFzZS1kZW1vIiwicm9sZSI6ImFub24iLCJleHAiOjE5ODM4MTI5OTZ9.CRXP1A7WOeoJeXxjNni43kdQwgnWNReilDMblYTn_I0' \ + --header 'Content-Type: application/json' \ + --data '{"name":"Functions"}' + +*/ + +import { createClient } from 'https://esm.sh/@supabase/supabase-js@2'; +import { corsHeaders } from '../_shared/.temp/functions/_shared/cors.ts'; + +Deno.serve(async (req) => { + if (req.method === 'OPTIONS') { + return new Response('ok', { headers: corsHeaders }); + } + + try { + // We create an admin client using the service_role key to perform elevated actions. + // This key is automatically provided by Supabase in the production environment. + const adminSupabaseClient = createClient( + Deno.env.get('SUPABASE_URL')!, + Deno.env.get('SUPABASE_SERVICE_ROLE_KEY')!, + { auth: { autoRefreshToken: false, persistSession: false } } + ); + + const usersToSeed = [ + { + email: 'admin@example.com', + password: 'password123', + user_metadata: { full_name: 'Admin User' } + }, + { + email: 'user@example.com', + password: 'password123', + user_metadata: { full_name: 'Normal User' } + } + ]; + + const createdUsers = []; + const existingUsers = []; + + const { data: { users: existingUserList }, error: listError } = await adminSupabaseClient.auth.admin.listUsers(); + if (listError) throw listError; + + const existingEmails = new Set(existingUserList.map(u => u.email)); + + for (const user of usersToSeed) { + if (!existingEmails.has(user.email)) { + const { error } = await adminSupabaseClient.auth.admin.createUser({ + email: user.email, + password: user.password, + user_metadata: user.user_metadata, + email_confirm: true, // Auto-confirm for dev environment + }); + + if (error) { + throw new Error(`Failed to create user ${user.email}: ${error.message}`); + } + createdUsers.push(user.email); + } else { + existingUsers.push(user.email); + } + } + + let message = ''; + if (createdUsers.length > 0) { + message += `Successfully created users: ${createdUsers.join(', ')}. `; + } + if (existingUsers.length > 0) { + message += `Users already existed: ${existingUsers.join(', ')}.`; + } + if (message === '') { + message = 'All development users already exist.' + } + + return new Response(JSON.stringify({ message: message.trim() }), { + headers: { ...corsHeaders, 'Content-Type': 'application/json' }, + status: 200, + }); + + } catch (error) { + // Return a detailed error with a stack trace for better debugging. + return new Response(JSON.stringify({ error: error.message, stack: error.stack }), { + headers: { ...corsHeaders, 'Content-Type': 'application/json' }, + status: 500, + }); + } +}); \ No newline at end of file diff --git a/supabase/functions/system-check/.npmrc b/supabase/functions/system-check/.npmrc new file mode 100644 index 0000000..48c6388 --- /dev/null +++ b/supabase/functions/system-check/.npmrc @@ -0,0 +1,3 @@ +# Configuration for private npm package dependencies +# For more information on using private registries with Edge Functions, see: +# https://supabase.com/docs/guides/functions/import-maps#importing-from-private-registries diff --git a/supabase/functions/system-check/deno.json b/supabase/functions/system-check/deno.json new file mode 100644 index 0000000..f6ca845 --- /dev/null +++ b/supabase/functions/system-check/deno.json @@ -0,0 +1,3 @@ +{ + "imports": {} +} diff --git a/supabase/functions/system-check/index.ts b/supabase/functions/system-check/index.ts new file mode 100644 index 0000000..bcd679c --- /dev/null +++ b/supabase/functions/system-check/index.ts @@ -0,0 +1,129 @@ +// Follow this setup guide to integrate the Deno language server with your editor: +// https://deno.land/manual/getting_started/setup_your_environment +// This enables autocomplete, go to definition, etc. + +// Setup type definitions for built-in Supabase Runtime APIs +// import "jsr:@supabase/functions-js/edge-runtime.d.ts" + +// console.log("Hello from Functions!") + +// Deno.serve(async (req) => { +// const { name } = await req.json() +// const data = { +// message: `Hello ${name}!`, +// } + +// return new Response( +// JSON.stringify(data), +// { headers: { "Content-Type": "application/json" } }, +// ) +// }) + +/* To invoke locally: + + 1. Run `supabase start` (see: https://supabase.com/docs/reference/cli/supabase-start) + 2. Make an HTTP request: + + curl -i --location --request POST 'http://127.0.0.1:54321/functions/v1/system-check' \ + --header 'Authorization: Bearer eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJpc3MiOiJzdXBhYmFzZS1kZW1vIiwicm9sZSI6ImFub24iLCJleHAiOjE5ODM4MTI5OTZ9.CRXP1A7WOeoJeXxjNni43kdQwgnWNReilDMblYTn_I0' \ + --header 'Content-Type: application/json' \ + --data '{"name":"Functions"}' + +*/ + +import { createClient } from 'https://esm.sh/@supabase/supabase-js@2'; +import { corsHeaders } from '../_shared/.temp/functions/_shared/cors.ts'; + +// Helper function to create a Supabase admin client +const createAdminClient = () => createClient( + Deno.env.get('SUPABASE_URL')!, + Deno.env.get('SUPABASE_SERVICE_ROLE_KEY')!, + { auth: { autoRefreshToken: false, persistSession: false } } +); + +const checkDatabaseSchema = async (client) => { + const { data, error } = await client.rpc('check_schema'); + if (error) throw new Error(`Schema check failed: ${error.message}`); + if (!data) throw new Error('Schema check returned no data.'); + + const requiredTables = ['flyers', 'flyer_items', 'profiles', 'shopping_lists']; + const missingTables = requiredTables.filter(t => !data.tables.includes(t)); + if (missingTables.length > 0) { + return { pass: false, message: `Missing tables: ${missingTables.join(', ')}. Please run the full schema.sql.txt script.` }; + } + return { pass: true, message: 'All required tables exist.' }; +}; + +const checkRlsPolicies = async (client) => { + const { data, error } = await client.rpc('check_rls'); + if (error) throw new Error(`RLS check failed: ${error.message}`); + + const requiredPolicies = { + 'profiles': 'Users can update their own profile.', + 'shopping_lists': 'Users can manage their own shopping lists.', + 'flyer_items': `Allow anon users to manage test items` + }; + + for (const table in requiredPolicies) { + const policyName = requiredPolicies[table]; + if (!data.some(p => p.table_name === table && p.policy_name === policyName)) { + return { pass: false, message: `Missing RLS policy "${policyName}" on table "${table}". Please run the schema.sql.txt script.` }; + } + } + return { pass: true, message: 'Key RLS policies are in place.' }; +}; + +const checkUserCreationTrigger = async (client) => { + const { data, error } = await client.rpc('check_trigger_security'); + if (error) throw new Error(`Trigger check failed: ${error.message}`); + + if (!data || data.length === 0) { + return { pass: false, message: `Function 'handle_new_user' not found. Please run schema.sql.txt.` }; + } + + const handleNewUser = data[0]; + if (!handleNewUser.is_security_definer) { + return { pass: false, message: `Trigger function 'handle_new_user' must be 'SECURITY DEFINER'. Please run the schema.sql.txt script to fix it.` }; + } + if (handleNewUser.owner_role !== 'postgres') { + return { pass: false, message: `Trigger function 'handle_new_user' must be owned by 'postgres'. Yours is owned by '${handleNewUser.owner_role}'. Please run schema.sql.txt to fix it.` }; + } + return { pass: true, message: 'User creation trigger is correctly configured.' }; +}; + +const checkStorageBucket = async (client) => { + const { data, error } = await client.storage.getBucket('flyers'); + if (error) { + return { pass: false, message: `Failed to access 'flyers' bucket: ${error.message}. Ensure it exists and permissions are set.` }; + } + if (!data.public) { + return { pass: false, message: `Storage bucket 'flyers' is not public. Please enable public access in your Supabase dashboard.` }; + } + return { pass: true, message: "'flyers' bucket exists and is public." }; +}; + +Deno.serve(async (req) => { + if (req.method === 'OPTIONS') { + return new Response('ok', { headers: corsHeaders }); + } + + try { + const adminClient = createAdminClient(); + const results = {}; + + results['schema'] = await checkDatabaseSchema(adminClient); + results['rls'] = await checkRlsPolicies(adminClient); + results['trigger'] = await checkUserCreationTrigger(adminClient); + results['storage'] = await checkStorageBucket(adminClient); + + return new Response(JSON.stringify({ results }), { + headers: { ...corsHeaders, 'Content-Type': 'application/json' }, + status: 200, + }); + } catch (error) { + return new Response(JSON.stringify({ error: error.message, stack: error.stack }), { + headers: { ...corsHeaders, 'Content-Type': 'application/json' }, + status: 500, + }); + } +}); \ No newline at end of file diff --git a/tsconfig.json b/tsconfig.json new file mode 100644 index 0000000..2c6eed5 --- /dev/null +++ b/tsconfig.json @@ -0,0 +1,29 @@ +{ + "compilerOptions": { + "target": "ES2022", + "experimentalDecorators": true, + "useDefineForClassFields": false, + "module": "ESNext", + "lib": [ + "ES2022", + "DOM", + "DOM.Iterable" + ], + "skipLibCheck": true, + "types": [ + "node" + ], + "moduleResolution": "bundler", + "isolatedModules": true, + "moduleDetection": "force", + "allowJs": true, + "jsx": "react-jsx", + "paths": { + "@/*": [ + "./*" + ] + }, + "allowImportingTsExtensions": true, + "noEmit": true + } +} \ No newline at end of file diff --git a/types.ts b/types.ts new file mode 100644 index 0000000..87faf1a --- /dev/null +++ b/types.ts @@ -0,0 +1,283 @@ +export interface Store { + id: number; + created_at: string; + name: string; + logo_url?: string | null; +} + +export interface Flyer { + id: number; + created_at: string; + file_name: string; + image_url: string; + checksum?: string; + store_id?: number; + valid_from?: string | null; + valid_to?: string | null; + store_address?: string | null; + store?: Store; +} + +export interface UnitPrice { + value: number; + unit: string; // e.g., 'g', 'kg', 'ml', 'l', 'oz', 'lb', 'each' +} + +export interface FlyerItem { + id?: number; + flyer_id?: number; + created_at?: string; + item: string; + price_display: string; + price_in_cents: number | null; + quantity: string; + quantity_num?: number | null; + master_item_id?: number; + master_item_name?: string | null; + category_id?: number | null; + category_name?: string | null; + unit_price?: UnitPrice | null; + product_id?: number | null; +} + +export interface MasterGroceryItem { + id: number; + created_at: string; + name: string; + category_id?: number | null; + category_name?: string | null; +} + +export interface Brand { + id: number; + name: string; +} + +export interface Product { + id: number; + master_item_id: number; + brand_id?: number | null; + name: string; + description?: string | null; + size?: string | null; + upc_code?: string | null; +} + +export interface DealItem { + item: string; + price_display: string; + price_in_cents: number | null; + quantity: string; + storeName: string; + master_item_name?: string | null; + unit_price?: UnitPrice | null; +} + +// User-specific types +export interface Profile { + id: string; // UUID + updated_at?: string; + full_name?: string | null; + avatar_url?: string | null; + preferences?: { + darkMode?: boolean; + unitSystem?: 'metric' | 'imperial'; + } | null; +} + +export interface UserAlert { + id: number; + user_watched_item_id: number; + alert_type: 'PRICE_BELOW' | 'PERCENT_OFF_AVERAGE'; + threshold_value: number; + is_active: boolean; + created_at: string; +} + +export interface Notification { + id: number; + user_id: string; // UUID + content: string; + link_url?: string | null; + is_read: boolean; + created_at: string; +} + +export interface ShoppingList { + id: number; + user_id: string; // UUID + name: string; + created_at: string; + items: ShoppingListItem[]; // Nested items +} + +export interface ShoppingListItem { + id: number; + shopping_list_id: number; + master_item_id?: number | null; + custom_item_name?: string | null; + quantity: number; + is_purchased: boolean; + added_at: string; + // Joined data for display + master_item?: { + name: string; + } | null; +} + + +export interface SuggestedCorrection { + id: number; + flyer_item_id: number; + user_id: string; // UUID + correction_type: string; // e.g., 'WRONG_PRICE', 'INCORRECT_ITEM_LINK' + suggested_value: string; + status: 'pending' | 'approved' | 'rejected'; + created_at: string; + reviewed_notes?: string | null; + reviewed_at?: string | null; +} + +export interface UserSubmittedPrice { + id: number; + user_id: string; // UUID + master_item_id: number; + store_id: number; + price_in_cents: number; + photo_url?: string | null; + upvotes: number; + downvotes: number; + created_at: string; +} + +export interface ItemPriceHistory { + id: number; + master_item_id: number; + summary_date: string; // DATE + min_price_in_cents?: number | null; + max_price_in_cents?: number | null; + avg_price_in_cents?: number | null; + data_points_count: number; +} + +export interface MasterItemAlias { + id: number; + master_item_id: number; + alias: string; +} + +export interface Recipe { + id: number; + user_id?: string | null; // UUID + name: string; + description?: string | null; + instructions?: string | null; + prep_time_minutes?: number | null; + cook_time_minutes?: number | null; + servings?: number | null; + photo_url?: string | null; + calories_per_serving?: number | null; + protein_grams?: number | null; + fat_grams?: number | null; + carb_grams?: number | null; + avg_rating: number; + rating_count: number; + created_at: string; +} + +export interface RecipeIngredient { + id: number; + recipe_id: number; + master_item_id: number; + quantity: number; + unit: string; +} + +export interface Tag { + id: number; + name: string; +} + +export interface RecipeTag { + recipe_id: number; + tag_id: number; +} + +export interface RecipeRating { + id: number; + recipe_id: number; + user_id: string; // UUID + rating: number; + comment?: string | null; + created_at: string; +} + +export interface MenuPlan { + id: number; + user_id: string; // UUID + name: string; + start_date: string; // DATE + end_date: string; // DATE + created_at: string; +} + +export interface PlannedMeal { + id: number; + menu_plan_id: number; + recipe_id: number; + plan_date: string; // DATE + meal_type: string; + servings_to_cook?: number | null; +} + +export interface PantryItem { + id: number; + user_id: string; // UUID + master_item_id: number; + quantity: number; + unit?: string | null; + best_before_date?: string | null; // DATE + updated_at: string; +} + +export interface StoreLocation { + id: number; + store_id: number; + address: string; + city?: string | null; + province_state?: string | null; + postal_code?: string | null; + // eslint-disable-next-line @typescript-eslint/no-explicit-any + location?: any; // Represents PostGIS GEOGRAPHY(Point, 4326) +} + +export interface FlyerLocation { + flyer_id: number; + store_location_id: number; +} + +export enum AnalysisType { + QUICK_INSIGHTS = 'QUICK_INSIGHTS', + DEEP_DIVE = 'DEEP_DIVE', + WEB_SEARCH = 'WEB_SEARCH', + PLAN_TRIP = 'PLAN_TRIP', + GENERATE_IMAGE = 'GENERATE_IMAGE', +} + +export type StageStatus = 'pending' | 'in-progress' | 'completed' | 'error'; + +export interface ProcessingStage { + name: string; + status: StageStatus; + detail?: string; + critical?: boolean; + progress?: { current: number; total: number } | null; +} + +export const CATEGORIES = [ + 'Fruits & Vegetables', 'Meat & Seafood', 'Dairy & Eggs', 'Bakery & Bread', + 'Pantry & Dry Goods', 'Beverages', 'Frozen Foods', 'Snacks', 'Household & Cleaning', + 'Personal Care & Health', 'Baby & Child', 'Pet Supplies', 'Deli & Prepared Foods', + 'Canned Goods', 'Condiments & Spices', 'Breakfast & Cereal', 'Organic', + 'International Foods', 'Other/Miscellaneous' +]; \ No newline at end of file diff --git a/utils/audioUtils.ts b/utils/audioUtils.ts new file mode 100644 index 0000000..d238711 --- /dev/null +++ b/utils/audioUtils.ts @@ -0,0 +1,58 @@ +/** + * Encodes a Uint8Array into a base64 string. + * This is a required utility for handling audio data for the Gemini API. + * @param bytes The Uint8Array to encode. + * @returns A base64 encoded string. + */ +export function encode(bytes: Uint8Array): string { + let binary = ''; + const len = bytes.byteLength; + for (let i = 0; i < len; i++) { + binary += String.fromCharCode(bytes[i]); + } + return btoa(binary); +} + +/** + * Decodes a base64 string into a Uint8Array. + * This is a required utility for handling audio data from the Gemini API. + * @param base64 The base64 encoded string. + * @returns A Uint8Array of the decoded data. + */ +export function decode(base64: string): Uint8Array { + const binaryString = atob(base64); + const len = binaryString.length; + const bytes = new Uint8Array(len); + for (let i = 0; i < len; i++) { + bytes[i] = binaryString.charCodeAt(i); + } + return bytes; +} + +/** + * Decodes raw PCM audio data into an AudioBuffer that can be played by the browser. + * This is a required utility for handling audio data from the Gemini API. + * @param data The raw audio data as a Uint8Array. + * @param ctx The AudioContext to use for creating the buffer. + * @param sampleRate The sample rate of the audio (e.g., 24000 for TTS). + * @param numChannels The number of audio channels (typically 1). + * @returns A promise that resolves to an AudioBuffer. + */ +export async function decodeAudioData( + data: Uint8Array, + ctx: AudioContext, + sampleRate: number, + numChannels: number, +): Promise { + const dataInt16 = new Int16Array(data.buffer); + const frameCount = dataInt16.length / numChannels; + const buffer = ctx.createBuffer(numChannels, frameCount, sampleRate); + + for (let channel = 0; channel < numChannels; channel++) { + const channelData = buffer.getChannelData(channel); + for (let i = 0; i < frameCount; i++) { + channelData[i] = dataInt16[i * numChannels + channel] / 32768.0; + } + } + return buffer; +} diff --git a/utils/checksum.ts b/utils/checksum.ts new file mode 100644 index 0000000..428e654 --- /dev/null +++ b/utils/checksum.ts @@ -0,0 +1,12 @@ +/** + * Generates a SHA-256 checksum for a file. + * @param file The file to hash. + * @returns A promise that resolves to the hex string of the checksum. + */ +export const generateFileChecksum = async (file: File): Promise => { + const buffer = await file.arrayBuffer(); + const hashBuffer = await crypto.subtle.digest('SHA-256', buffer); + const hashArray = Array.from(new Uint8Array(hashBuffer)); + const hashHex = hashArray.map(b => b.toString(16).padStart(2, '0')).join(''); + return hashHex; +}; diff --git a/utils/pdfConverter.ts b/utils/pdfConverter.ts new file mode 100644 index 0000000..0b3b3c1 --- /dev/null +++ b/utils/pdfConverter.ts @@ -0,0 +1,66 @@ +// This assumes pdfjsLib is available on the window, loaded from index.html +declare const pdfjsLib: any; + +// FIX: Add a global declaration for `window.pdfjsWorker` to inform TypeScript +// that this property is available, as it's loaded via a script in index.html. +declare global { + interface Window { + pdfjsWorker: string; + } +} + +/** + * Converts all pages of a PDF file into an array of image File objects. + * @param pdfFile The PDF file to convert. + * @param onProgress Optional callback to report conversion progress. + * @returns A promise that resolves to an object containing the array of image files and the total page count. + */ +export const convertPdfToImageFiles = async ( + pdfFile: File, + onProgress?: (currentPage: number, totalPages: number) => void +): Promise<{ imageFiles: File[], pageCount: number }> => { + if (typeof pdfjsLib === 'undefined' || !pdfjsLib.GlobalWorkerOptions.workerSrc) { + // Set workerSrc if not already set by the script in index.html + pdfjsLib.GlobalWorkerOptions.workerSrc = window.pdfjsWorker; + } + + const arrayBuffer = await pdfFile.arrayBuffer(); + const pdf = await pdfjsLib.getDocument(arrayBuffer).promise; + const pageCount = pdf.numPages; + const imageFiles: File[] = []; + + for (let i = 1; i <= pageCount; i++) { + onProgress?.(i, pageCount); + const page = await pdf.getPage(i); + const viewport = page.getViewport({ scale: 1.5 }); + + const canvas = document.createElement('canvas'); + const context = canvas.getContext('2d'); + canvas.height = viewport.height; + canvas.width = viewport.width; + + if (!context) { + throw new Error('Could not get canvas context'); + } + + await page.render({ canvasContext: context, viewport: viewport }).promise; + + const blob = await new Promise((resolve) => { + canvas.toBlob(resolve, 'image/jpeg', 0.9); + }); + + if (blob) { + const newFileName = pdfFile.name.replace(/\.pdf$/i, `_page_${i}.jpeg`); + const imageFile = new File([blob], newFileName, { type: 'image/jpeg' }); + imageFiles.push(imageFile); + } else { + console.warn(`Failed to convert page ${i} of PDF to blob.`); + } + } + + if (imageFiles.length === 0 && pageCount > 0) { + throw new Error('PDF conversion resulted in zero images, though the PDF has pages. It might be corrupted or contain non-standard content.'); + } + + return { imageFiles, pageCount }; +}; diff --git a/utils/priceParser.ts b/utils/priceParser.ts new file mode 100644 index 0000000..b0fb7db --- /dev/null +++ b/utils/priceParser.ts @@ -0,0 +1,55 @@ +export const parsePrice = (price: string): number => { + // Handles formats like "$3.99", "2 for $5", "$0.69" + const match = price.match(/\$?(\d+\.?\d*)/); + if (!match) return 0; + + let numericPrice = parseFloat(match[1]); + + const forMatch = price.match(/(\d+)\s+for/i); + if(forMatch){ + const quantity = parseInt(forMatch[1], 10); + if(quantity > 0){ + return numericPrice / quantity; + } + } + + return numericPrice; +}; + + +/** + * Parses a price string into an integer number of cents. + * Handles formats like "$10.99", "99¢", "250". + * @param price The price string to parse. + * @returns The price in cents, or null if unparsable. + */ +export const parsePriceToCents = (price: string): number | null => { + if (!price || typeof price !== 'string') return null; + + const cleanedPrice = price.trim(); + + // Handle "X for Y" cases - these are not a single item price. + if (cleanedPrice.match(/\d+\s+for/i)) { + return null; + } + + // Handle "99¢" format + const centsMatch = cleanedPrice.match(/(\d+\.?\d*)\s?¢/); + if (centsMatch && centsMatch[1]) { + return Math.round(parseFloat(centsMatch[1])); + } + + // Handle "$10.99" or "10.99" format + const dollarsMatch = cleanedPrice.match(/\$?(\d+\.?\d*)/); + if (dollarsMatch && dollarsMatch[1]) { + const numericValue = parseFloat(dollarsMatch[1]); + // If the original string did not contain a decimal, and it's a whole number > 50, + // it's likely already in cents (e.g., "399" for "$3.99"). + if (!cleanedPrice.includes('.') && numericValue > 50 && numericValue % 1 === 0) { + return numericValue; + } + return Math.round(numericValue * 100); + } + + return null; +}; diff --git a/utils/processingTimer.ts b/utils/processingTimer.ts new file mode 100644 index 0000000..f200231 --- /dev/null +++ b/utils/processingTimer.ts @@ -0,0 +1,44 @@ +const PROCESSING_TIMES_KEY = 'flyerProcessingTimes'; +const MAX_SAMPLES = 5; + +/** + * Records the duration of a flyer processing job in localStorage. + * @param durationInSeconds - The processing time in seconds. + */ +export const recordProcessingTime = (durationInSeconds: number): void => { + try { + const storedTimes = localStorage.getItem(PROCESSING_TIMES_KEY); + const times: number[] = storedTimes ? JSON.parse(storedTimes) : []; + + // Add the new time and keep only the last MAX_SAMPLES + times.push(durationInSeconds); + const recentTimes = times.slice(-MAX_SAMPLES); + + localStorage.setItem(PROCESSING_TIMES_KEY, JSON.stringify(recentTimes)); + } catch (error) { + console.error("Could not record processing time:", error); + } +}; + +/** + * Calculates the average processing time from stored durations. + * @returns The average time in seconds, or a default of 45 seconds if no data exists. + */ +export const getAverageProcessingTime = (): number => { + try { + const storedTimes = localStorage.getItem(PROCESSING_TIMES_KEY); + if (!storedTimes) return 45; // Default estimate if no history + + const times: number[] = JSON.parse(storedTimes); + if (times.length === 0) return 45; + + const sum = times.reduce((acc, time) => acc + time, 0); + const average = sum / times.length; + + // Return a rounded, reasonable number + return Math.round(average); + } catch (error) { + console.error("Could not get average processing time:", error); + return 45; // Default on error + } +}; diff --git a/utils/timeout.ts b/utils/timeout.ts new file mode 100644 index 0000000..bc9799b --- /dev/null +++ b/utils/timeout.ts @@ -0,0 +1,23 @@ +/** + * Wraps a promise with a timeout. + * @param promise The promise to wrap. + * @param ms The timeout duration in milliseconds. + * @returns A promise that resolves or rejects with the original promise, or rejects with a timeout error. + */ +export function withTimeout(promise: Promise, ms: number): Promise { + return new Promise((resolve, reject) => { + const timer = setTimeout(() => { + reject(new Error(`Operation timed out after ${ms / 1000} seconds`)); + }, ms); + + promise + .then(value => { + clearTimeout(timer); + resolve(value); + }) + .catch(reason => { + clearTimeout(timer); + reject(reason); + }); + }); +} diff --git a/utils/unitConverter.ts b/utils/unitConverter.ts new file mode 100644 index 0000000..8e27dce --- /dev/null +++ b/utils/unitConverter.ts @@ -0,0 +1,95 @@ +import type { UnitPrice } from '../types'; + +const METRIC_UNITS = ['g', 'kg', 'ml', 'l']; +const IMPERIAL_UNITS = ['oz', 'lb', 'fl oz']; + +const CONVERSIONS: Record = { + // metric to imperial + g: { to: 'oz', factor: 0.035274 }, + kg: { to: 'lb', factor: 2.20462 }, + ml: { to: 'fl oz', factor: 0.033814 }, + l: { to: 'fl oz', factor: 33.814 }, + + // imperial to metric + oz: { to: 'g', factor: 28.3495 }, + lb: { to: 'kg', factor: 0.453592 }, + 'fl oz': { to: 'ml', factor: 29.5735 }, +}; + +interface FormattedPrice { + price: string; + unit: string | null; +} + +/** + * Converts a unit price to the target system and formats it for display. + * @param unitPrice The structured unit price object from the database. + * @param system The target system ('metric' or 'imperial'). + * @returns An object with formatted price and unit strings. + */ +export const formatUnitPrice = (unitPrice: UnitPrice | null | undefined, system: 'metric' | 'imperial'): FormattedPrice => { + if (!unitPrice || typeof unitPrice.value !== 'number' || !unitPrice.unit) { + return { price: '—', unit: null }; + } + + const { value, unit } = unitPrice; + const isMetric = METRIC_UNITS.includes(unit); + const isImperial = IMPERIAL_UNITS.includes(unit); + + let displayValue = value; + let displayUnit = unit; + + if (system === 'imperial' && isMetric) { + const conversion = CONVERSIONS[unit]; + if (conversion) { + displayValue = value * conversion.factor; + displayUnit = conversion.to; + } + } else if (system === 'metric' && isImperial) { + const conversion = CONVERSIONS[unit]; + if (conversion) { + displayValue = value * conversion.factor; + displayUnit = conversion.to; + } + } + + // Smart formatting for price + const formattedPrice = displayValue < 0.10 + ? displayValue.toLocaleString('en-US', { style: 'currency', currency: 'USD', minimumFractionDigits: 3, maximumFractionDigits: 3 }) + : displayValue.toLocaleString('en-US', { style: 'currency', currency: 'USD', minimumFractionDigits: 2, maximumFractionDigits: 2 }); + + // Always show a unit if one exists for clarity + if (displayUnit === 'each') { + return { price: formattedPrice, unit: '/each' }; + } + + return { price: formattedPrice, unit: `/${displayUnit}` }; +}; + + +/** + * Converts an imperial unit price to its metric equivalent for database storage. + * @param unitPrice The structured unit price object, potentially in imperial units. + * @returns A unit price object with metric units, or the original if already metric or not applicable. + */ +export const convertToMetric = (unitPrice: UnitPrice | null | undefined): UnitPrice | null | undefined => { + if (!unitPrice || typeof unitPrice.value !== 'number' || !unitPrice.unit) { + return unitPrice; + } + + const { value, unit } = unitPrice; + const isImperial = IMPERIAL_UNITS.includes(unit); + + if (isImperial) { + const conversion = CONVERSIONS[unit]; + if (conversion) { + return { + value: value * conversion.factor, + unit: conversion.to, + }; + } + } + + // Return original if it's already metric or not a weight/volume unit (like 'each') + return unitPrice; +}; \ No newline at end of file diff --git a/vite.config.ts b/vite.config.ts new file mode 100644 index 0000000..ee5fb8d --- /dev/null +++ b/vite.config.ts @@ -0,0 +1,23 @@ +import path from 'path'; +import { defineConfig, loadEnv } from 'vite'; +import react from '@vitejs/plugin-react'; + +export default defineConfig(({ mode }) => { + const env = loadEnv(mode, '.', ''); + return { + server: { + port: 3000, + host: '0.0.0.0', + }, + plugins: [react()], + define: { + 'process.env.API_KEY': JSON.stringify(env.GEMINI_API_KEY), + 'process.env.GEMINI_API_KEY': JSON.stringify(env.GEMINI_API_KEY) + }, + resolve: { + alias: { + '@': path.resolve(__dirname, '.'), + } + } + }; +});