Compare commits
21 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
6e70f08e3c | ||
| 459f5f7976 | |||
|
|
a2e6331ddd | ||
| 13cd30bec9 | |||
|
|
baeb9488c6 | ||
| 0cba0f987e | |||
|
|
958a79997d | ||
| 8fb1c96f93 | |||
| 6e6fe80c7f | |||
|
|
d1554050bd | ||
|
|
b1fae270bb | ||
|
|
c852483e18 | ||
| 2e01ad5bc9 | |||
|
|
26763c7183 | ||
| f0c5c2c45b | |||
|
|
034bb60fd5 | ||
| d4b389cb79 | |||
|
|
a71fb81468 | ||
| 9bee0a013b | |||
|
|
8bcb4311b3 | ||
| 9fd15f3a50 |
@@ -20,6 +20,9 @@ Create a new test file for `StatCard.tsx` to verify its props and rendering.
|
||||
|
||||
|
||||
|
||||
while assuming that master_schema_rollup.sql is the "ultimate source of truth", issues can happen and it may not have been properly
|
||||
updated - look for differences between these files
|
||||
|
||||
|
||||
UPC SCANNING !
|
||||
|
||||
|
||||
4
package-lock.json
generated
4
package-lock.json
generated
@@ -1,12 +1,12 @@
|
||||
{
|
||||
"name": "flyer-crawler",
|
||||
"version": "0.7.24",
|
||||
"version": "0.9.4",
|
||||
"lockfileVersion": 3,
|
||||
"requires": true,
|
||||
"packages": {
|
||||
"": {
|
||||
"name": "flyer-crawler",
|
||||
"version": "0.7.24",
|
||||
"version": "0.9.4",
|
||||
"dependencies": {
|
||||
"@bull-board/api": "^6.14.2",
|
||||
"@bull-board/express": "^6.14.2",
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
{
|
||||
"name": "flyer-crawler",
|
||||
"private": true,
|
||||
"version": "0.7.24",
|
||||
"version": "0.9.4",
|
||||
"type": "module",
|
||||
"scripts": {
|
||||
"dev": "concurrently \"npm:start:dev\" \"vite\"",
|
||||
|
||||
@@ -1,477 +1,8 @@
|
||||
-- sql/Initial_triggers_and_functions.sql
|
||||
-- This file contains all trigger functions and trigger definitions for the database.
|
||||
|
||||
-- 1. Set up the trigger to automatically create a profile when a new user signs up.
|
||||
-- This function is called by a trigger on the `public.users` table.
|
||||
DROP FUNCTION IF EXISTS public.handle_new_user();
|
||||
|
||||
-- It creates a corresponding profile and a default shopping list for the new user.
|
||||
-- It now accepts full_name and avatar_url from the user's metadata.
|
||||
CREATE OR REPLACE FUNCTION public.handle_new_user()
|
||||
RETURNS TRIGGER AS $$
|
||||
DECLARE
|
||||
new_profile_id UUID;
|
||||
user_meta_data JSONB;
|
||||
BEGIN
|
||||
-- The user's metadata (full_name, avatar_url) is passed via a temporary session variable.
|
||||
user_meta_data := current_setting('my_app.user_metadata', true)::JSONB;
|
||||
|
||||
INSERT INTO public.profiles (user_id, role, full_name, avatar_url)
|
||||
VALUES (new.user_id, 'user', user_meta_data->>'full_name', user_meta_data->>'avatar_url')
|
||||
RETURNING user_id INTO new_profile_id;
|
||||
|
||||
-- Also create a default shopping list for the new user.
|
||||
INSERT INTO public.shopping_lists (user_id, name)
|
||||
VALUES (new.user_id, 'Main Shopping List');
|
||||
|
||||
-- Log the new user event
|
||||
INSERT INTO public.activity_log (user_id, action, display_text, icon, details)
|
||||
VALUES (new.user_id, 'user_registered',
|
||||
COALESCE(user_meta_data->>'full_name', new.email) || ' has registered.',
|
||||
'user-plus',
|
||||
jsonb_build_object('email', new.email)
|
||||
);
|
||||
|
||||
RETURN new;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
-- This trigger calls the function after a new user is created.
|
||||
DROP TRIGGER IF EXISTS on_auth_user_created ON public.users;
|
||||
CREATE TRIGGER on_auth_user_created
|
||||
AFTER INSERT ON public.users
|
||||
FOR EACH ROW EXECUTE FUNCTION public.handle_new_user();
|
||||
|
||||
-- 2. Create a reusable function to automatically update 'updated_at' columns.
|
||||
DROP FUNCTION IF EXISTS public.handle_updated_at();
|
||||
|
||||
CREATE OR REPLACE FUNCTION public.handle_updated_at()
|
||||
RETURNS TRIGGER AS $$
|
||||
BEGIN
|
||||
NEW.updated_at = now();
|
||||
RETURN NEW;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
-- Dynamically apply the 'handle_updated_at' trigger to all tables in the public schema
|
||||
-- that have an 'updated_at' column. This is more maintainable than creating a separate
|
||||
-- trigger for each table.
|
||||
DO $$
|
||||
DECLARE
|
||||
t_name TEXT;
|
||||
BEGIN
|
||||
FOR t_name IN
|
||||
SELECT table_name
|
||||
FROM information_schema.columns
|
||||
WHERE table_schema = 'public' AND column_name = 'updated_at'
|
||||
LOOP
|
||||
EXECUTE format('DROP TRIGGER IF EXISTS on_%s_updated ON public.%I;
|
||||
CREATE TRIGGER on_%s_updated
|
||||
BEFORE UPDATE ON public.%I
|
||||
FOR EACH ROW EXECUTE FUNCTION public.handle_updated_at();',
|
||||
t_name, t_name, t_name, t_name);
|
||||
END LOOP;
|
||||
END;
|
||||
$$;
|
||||
|
||||
-- 3. Create a trigger function to populate the item_price_history table on insert.
|
||||
DROP FUNCTION IF EXISTS public.update_price_history_on_flyer_item_insert();
|
||||
|
||||
CREATE OR REPLACE FUNCTION public.update_price_history_on_flyer_item_insert()
|
||||
RETURNS TRIGGER AS $$
|
||||
DECLARE
|
||||
flyer_valid_from DATE;
|
||||
flyer_valid_to DATE;
|
||||
current_summary_date DATE;
|
||||
flyer_location_id BIGINT;
|
||||
BEGIN
|
||||
-- If the item could not be matched, add it to the unmatched queue for review.
|
||||
IF NEW.master_item_id IS NULL THEN
|
||||
INSERT INTO public.unmatched_flyer_items (flyer_item_id)
|
||||
VALUES (NEW.flyer_item_id)
|
||||
ON CONFLICT (flyer_item_id) DO NOTHING;
|
||||
END IF;
|
||||
|
||||
-- Only run if the new flyer item is linked to a master item and has a price.
|
||||
IF NEW.master_item_id IS NULL OR NEW.price_in_cents IS NULL THEN
|
||||
RETURN NEW;
|
||||
END IF;
|
||||
|
||||
-- Get the validity dates of the flyer and the store_id.
|
||||
SELECT valid_from, valid_to INTO flyer_valid_from, flyer_valid_to
|
||||
FROM public.flyers
|
||||
WHERE flyer_id = NEW.flyer_id;
|
||||
|
||||
-- This single, set-based query is much more performant than looping.
|
||||
-- It generates all date/location pairs and inserts/updates them in one operation.
|
||||
INSERT INTO public.item_price_history (master_item_id, summary_date, store_location_id, min_price_in_cents, max_price_in_cents, avg_price_in_cents, data_points_count)
|
||||
SELECT
|
||||
NEW.master_item_id,
|
||||
d.day,
|
||||
fl.store_location_id,
|
||||
NEW.price_in_cents,
|
||||
NEW.price_in_cents,
|
||||
NEW.price_in_cents,
|
||||
1
|
||||
FROM public.flyer_locations fl
|
||||
CROSS JOIN generate_series(flyer_valid_from, flyer_valid_to, '1 day'::interval) AS d(day)
|
||||
WHERE fl.flyer_id = NEW.flyer_id
|
||||
ON CONFLICT (master_item_id, summary_date, store_location_id)
|
||||
DO UPDATE SET
|
||||
min_price_in_cents = LEAST(item_price_history.min_price_in_cents, EXCLUDED.min_price_in_cents),
|
||||
max_price_in_cents = GREATEST(item_price_history.max_price_in_cents, EXCLUDED.max_price_in_cents),
|
||||
avg_price_in_cents = ROUND(((item_price_history.avg_price_in_cents * item_price_history.data_points_count) + EXCLUDED.avg_price_in_cents) / (item_price_history.data_points_count + 1.0)),
|
||||
data_points_count = item_price_history.data_points_count + 1;
|
||||
|
||||
RETURN NEW;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
-- Create the trigger on the flyer_items table for insert.
|
||||
DROP TRIGGER IF EXISTS trigger_update_price_history ON public.flyer_items;
|
||||
CREATE TRIGGER trigger_update_price_history
|
||||
AFTER INSERT ON public.flyer_items
|
||||
FOR EACH ROW EXECUTE FUNCTION public.update_price_history_on_flyer_item_insert();
|
||||
|
||||
-- 4. Create a trigger function to recalculate price history when a flyer item is deleted.
|
||||
DROP FUNCTION IF EXISTS public.recalculate_price_history_on_flyer_item_delete();
|
||||
|
||||
CREATE OR REPLACE FUNCTION public.recalculate_price_history_on_flyer_item_delete()
|
||||
RETURNS TRIGGER AS $$
|
||||
DECLARE
|
||||
affected_dates RECORD;
|
||||
BEGIN
|
||||
-- Only run if the deleted item was linked to a master item and had a price.
|
||||
IF OLD.master_item_id IS NULL OR OLD.price_in_cents IS NULL THEN
|
||||
RETURN OLD;
|
||||
END IF;
|
||||
|
||||
-- This single, set-based query is much more performant than looping.
|
||||
-- It recalculates aggregates for all affected dates and locations at once.
|
||||
WITH affected_days_and_locations AS (
|
||||
-- 1. Get all date/location pairs affected by the deleted item's flyer.
|
||||
SELECT DISTINCT
|
||||
generate_series(f.valid_from, f.valid_to, '1 day'::interval)::date AS summary_date,
|
||||
fl.store_location_id
|
||||
FROM public.flyers f
|
||||
JOIN public.flyer_locations fl ON f.flyer_id = fl.flyer_id
|
||||
WHERE f.flyer_id = OLD.flyer_id
|
||||
),
|
||||
new_aggregates AS (
|
||||
-- 2. For each affected date/location, recalculate the aggregates from all other relevant flyer items.
|
||||
SELECT
|
||||
adl.summary_date,
|
||||
adl.store_location_id,
|
||||
MIN(fi.price_in_cents) AS min_price,
|
||||
MAX(fi.price_in_cents) AS max_price,
|
||||
ROUND(AVG(fi.price_in_cents))::int AS avg_price,
|
||||
COUNT(fi.flyer_item_id)::int AS data_points
|
||||
FROM affected_days_and_locations adl
|
||||
LEFT JOIN public.flyer_items fi ON fi.master_item_id = OLD.master_item_id AND fi.price_in_cents IS NOT NULL
|
||||
LEFT JOIN public.flyers f ON fi.flyer_id = f.flyer_id AND adl.summary_date BETWEEN f.valid_from AND f.valid_to
|
||||
LEFT JOIN public.flyer_locations fl ON fi.flyer_id = fl.flyer_id AND adl.store_location_id = fl.store_location_id
|
||||
WHERE fl.flyer_id IS NOT NULL -- Ensure the join was successful
|
||||
GROUP BY adl.summary_date, adl.store_location_id
|
||||
)
|
||||
-- 3. Update the history table with the new aggregates.
|
||||
UPDATE public.item_price_history iph
|
||||
SET
|
||||
min_price_in_cents = na.min_price,
|
||||
max_price_in_cents = na.max_price,
|
||||
avg_price_in_cents = na.avg_price,
|
||||
data_points_count = na.data_points
|
||||
FROM new_aggregates na
|
||||
WHERE iph.master_item_id = OLD.master_item_id
|
||||
AND iph.summary_date = na.summary_date
|
||||
AND iph.store_location_id = na.store_location_id;
|
||||
|
||||
-- 4. Delete any history records that no longer have any data points.
|
||||
DELETE FROM public.item_price_history iph
|
||||
WHERE iph.master_item_id = OLD.master_item_id
|
||||
AND NOT EXISTS (
|
||||
SELECT 1 FROM new_aggregates na
|
||||
WHERE na.summary_date = iph.summary_date AND na.store_location_id = iph.store_location_id
|
||||
);
|
||||
|
||||
RETURN OLD;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
-- Create the trigger on the flyer_items table for DELETE operations.
|
||||
DROP TRIGGER IF EXISTS trigger_recalculate_price_history_on_delete ON public.flyer_items;
|
||||
CREATE TRIGGER trigger_recalculate_price_history_on_delete
|
||||
AFTER DELETE ON public.flyer_items
|
||||
FOR EACH ROW EXECUTE FUNCTION public.recalculate_price_history_on_flyer_item_delete();
|
||||
|
||||
-- 5. Trigger function to update the average rating on the recipes table.
|
||||
DROP FUNCTION IF EXISTS public.update_recipe_rating_aggregates();
|
||||
|
||||
CREATE OR REPLACE FUNCTION public.update_recipe_rating_aggregates()
|
||||
RETURNS TRIGGER AS $$
|
||||
BEGIN
|
||||
UPDATE public.recipes
|
||||
SET
|
||||
avg_rating = (
|
||||
SELECT AVG(rating)
|
||||
FROM public.recipe_ratings
|
||||
WHERE recipe_id = COALESCE(NEW.recipe_id, OLD.recipe_id) -- This is correct, no change needed
|
||||
),
|
||||
rating_count = (
|
||||
SELECT COUNT(*)
|
||||
FROM public.recipe_ratings
|
||||
WHERE recipe_id = COALESCE(NEW.recipe_id, OLD.recipe_id) -- This is correct, no change needed
|
||||
)
|
||||
WHERE recipe_id = COALESCE(NEW.recipe_id, OLD.recipe_id);
|
||||
|
||||
RETURN NULL; -- The result is ignored since this is an AFTER trigger.
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
-- Trigger to call the function after any change to recipe_ratings.
|
||||
DROP TRIGGER IF EXISTS on_recipe_rating_change ON public.recipe_ratings;
|
||||
CREATE TRIGGER on_recipe_rating_change
|
||||
AFTER INSERT OR UPDATE OR DELETE ON public.recipe_ratings
|
||||
FOR EACH ROW EXECUTE FUNCTION public.update_recipe_rating_aggregates();
|
||||
|
||||
-- 6. Trigger function to log the creation of a new recipe.
|
||||
DROP FUNCTION IF EXISTS public.log_new_recipe();
|
||||
|
||||
CREATE OR REPLACE FUNCTION public.log_new_recipe()
|
||||
RETURNS TRIGGER AS $$
|
||||
BEGIN
|
||||
INSERT INTO public.activity_log (user_id, action, display_text, icon, details)
|
||||
VALUES (
|
||||
NEW.user_id,
|
||||
'recipe_created',
|
||||
(SELECT full_name FROM public.profiles WHERE user_id = NEW.user_id) || ' created a new recipe: ' || NEW.name,
|
||||
'chef-hat',
|
||||
jsonb_build_object('recipe_id', NEW.recipe_id, 'recipe_name', NEW.name)
|
||||
);
|
||||
|
||||
-- Award 'First Recipe' achievement if it's their first one.
|
||||
PERFORM public.award_achievement(NEW.user_id, 'First Recipe');
|
||||
|
||||
RETURN NEW;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
-- Trigger to call the function after a new recipe is inserted.
|
||||
DROP TRIGGER IF EXISTS on_new_recipe_created ON public.recipes;
|
||||
CREATE TRIGGER on_new_recipe_created
|
||||
AFTER INSERT ON public.recipes
|
||||
FOR EACH ROW
|
||||
WHEN (NEW.user_id IS NOT NULL) -- Only log activity for user-created recipes.
|
||||
EXECUTE FUNCTION public.log_new_recipe();
|
||||
|
||||
-- 7a. Trigger function to update the item_count on the flyers table.
|
||||
DROP FUNCTION IF EXISTS public.update_flyer_item_count();
|
||||
|
||||
CREATE OR REPLACE FUNCTION public.update_flyer_item_count()
|
||||
RETURNS TRIGGER AS $$
|
||||
BEGIN
|
||||
IF (TG_OP = 'INSERT') THEN
|
||||
UPDATE public.flyers SET item_count = item_count + 1 WHERE flyer_id = NEW.flyer_id;
|
||||
ELSIF (TG_OP = 'DELETE') THEN
|
||||
UPDATE public.flyers SET item_count = item_count - 1 WHERE flyer_id = OLD.flyer_id;
|
||||
END IF;
|
||||
RETURN NULL; -- The result is ignored since this is an AFTER trigger.
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
-- Trigger to call the function after any change to flyer_items.
|
||||
-- This ensures the item_count on the parent flyer is always accurate.
|
||||
DROP TRIGGER IF EXISTS on_flyer_item_change ON public.flyer_items;
|
||||
CREATE TRIGGER on_flyer_item_change
|
||||
AFTER INSERT OR DELETE ON public.flyer_items
|
||||
FOR EACH ROW EXECUTE FUNCTION public.update_flyer_item_count();
|
||||
|
||||
-- 7. Trigger function to log the creation of a new flyer.
|
||||
DROP FUNCTION IF EXISTS public.log_new_flyer();
|
||||
|
||||
CREATE OR REPLACE FUNCTION public.log_new_flyer()
|
||||
RETURNS TRIGGER AS $$
|
||||
BEGIN
|
||||
INSERT INTO public.activity_log (action, display_text, icon, details)
|
||||
VALUES (
|
||||
'flyer_uploaded',
|
||||
'A new flyer for ' || (SELECT name FROM public.stores WHERE store_id = NEW.store_id) || ' has been uploaded.',
|
||||
'file-text',
|
||||
jsonb_build_object(
|
||||
'flyer_id', NEW.flyer_id,
|
||||
'store_name', (SELECT name FROM public.stores WHERE store_id = NEW.store_id),
|
||||
'valid_from', to_char(NEW.valid_from, 'YYYY-MM-DD'),
|
||||
'valid_to', to_char(NEW.valid_to, 'YYYY-MM-DD')
|
||||
)
|
||||
);
|
||||
RETURN NEW;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
-- Trigger to call the function after a new flyer is inserted.
|
||||
DROP TRIGGER IF EXISTS on_new_flyer_created ON public.flyers;
|
||||
CREATE TRIGGER on_new_flyer_created
|
||||
AFTER INSERT ON public.flyers
|
||||
FOR EACH ROW EXECUTE FUNCTION public.log_new_flyer();
|
||||
|
||||
-- 8. Trigger function to log when a user favorites a recipe.
|
||||
DROP FUNCTION IF EXISTS public.log_new_favorite_recipe();
|
||||
|
||||
CREATE OR REPLACE FUNCTION public.log_new_favorite_recipe()
|
||||
RETURNS TRIGGER AS $$
|
||||
BEGIN
|
||||
INSERT INTO public.activity_log (user_id, action, display_text, icon, details)
|
||||
VALUES (
|
||||
NEW.user_id,
|
||||
'recipe_favorited',
|
||||
(SELECT full_name FROM public.profiles WHERE user_id = NEW.user_id) || ' favorited the recipe: ' || (SELECT name FROM public.recipes WHERE recipe_id = NEW.recipe_id),
|
||||
'heart',
|
||||
jsonb_build_object(
|
||||
'recipe_id', NEW.recipe_id
|
||||
)
|
||||
);
|
||||
|
||||
-- Award 'First Favorite' achievement.
|
||||
PERFORM public.award_achievement(NEW.user_id, 'First Favorite');
|
||||
RETURN NEW;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
-- Trigger to call the function after a recipe is favorited.
|
||||
DROP TRIGGER IF EXISTS on_new_favorite_recipe ON public.favorite_recipes;
|
||||
CREATE TRIGGER on_new_favorite_recipe
|
||||
AFTER INSERT ON public.favorite_recipes
|
||||
FOR EACH ROW EXECUTE FUNCTION public.log_new_favorite_recipe();
|
||||
|
||||
-- 9. Trigger function to log when a user shares a shopping list.
|
||||
DROP FUNCTION IF EXISTS public.log_new_list_share();
|
||||
|
||||
CREATE OR REPLACE FUNCTION public.log_new_list_share()
|
||||
RETURNS TRIGGER AS $$
|
||||
BEGIN
|
||||
INSERT INTO public.activity_log (user_id, action, display_text, icon, details)
|
||||
VALUES (
|
||||
NEW.shared_by_user_id,
|
||||
'list_shared',
|
||||
(SELECT full_name FROM public.profiles WHERE user_id = NEW.shared_by_user_id) || ' shared a shopping list.',
|
||||
'share-2',
|
||||
jsonb_build_object(
|
||||
'shopping_list_id', NEW.shopping_list_id,
|
||||
'list_name', (SELECT name FROM public.shopping_lists WHERE shopping_list_id = NEW.shopping_list_id),
|
||||
'shared_with_user_id', NEW.shared_with_user_id
|
||||
)
|
||||
);
|
||||
|
||||
-- Award 'List Sharer' achievement.
|
||||
PERFORM public.award_achievement(NEW.shared_by_user_id, 'List Sharer');
|
||||
RETURN NEW;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
-- Trigger to call the function after a shopping list is shared.
|
||||
DROP TRIGGER IF EXISTS on_new_list_share ON public.shared_shopping_lists;
|
||||
CREATE TRIGGER on_new_list_share
|
||||
AFTER INSERT ON public.shared_shopping_lists
|
||||
FOR EACH ROW EXECUTE FUNCTION public.log_new_list_share();
|
||||
|
||||
-- 9a. Trigger function to log when a user shares a recipe collection.
|
||||
DROP FUNCTION IF EXISTS public.log_new_recipe_collection_share();
|
||||
|
||||
CREATE OR REPLACE FUNCTION public.log_new_recipe_collection_share()
|
||||
RETURNS TRIGGER AS $$
|
||||
BEGIN
|
||||
-- Log the activity
|
||||
INSERT INTO public.activity_log (user_id, action, display_text, icon, details)
|
||||
VALUES (
|
||||
NEW.shared_by_user_id, 'recipe_collection_shared',
|
||||
(SELECT full_name FROM public.profiles WHERE user_id = NEW.shared_by_user_id) || ' shared a recipe collection.',
|
||||
'book',
|
||||
jsonb_build_object('collection_id', NEW.recipe_collection_id, 'shared_with_user_id', NEW.shared_with_user_id)
|
||||
);
|
||||
|
||||
-- Award 'Recipe Sharer' achievement.
|
||||
PERFORM public.award_achievement(NEW.shared_by_user_id, 'Recipe Sharer');
|
||||
RETURN NEW;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
DROP TRIGGER IF EXISTS on_new_recipe_collection_share ON public.shared_recipe_collections;
|
||||
CREATE TRIGGER on_new_recipe_collection_share
|
||||
AFTER INSERT ON public.shared_recipe_collections
|
||||
FOR EACH ROW EXECUTE FUNCTION public.log_new_recipe_collection_share();
|
||||
|
||||
-- 10. Trigger function to geocode a store location's address.
|
||||
-- This function is designed to be extensible. In a production environment,
|
||||
-- you would replace the placeholder with a call to an external geocoding service
|
||||
-- (e.g., using the `http` extension or a `plpythonu` function) to convert
|
||||
-- the address into geographic coordinates.
|
||||
DROP FUNCTION IF EXISTS public.geocode_store_location();
|
||||
|
||||
CREATE OR REPLACE FUNCTION public.geocode_store_location()
|
||||
RETURNS TRIGGER AS $$
|
||||
DECLARE
|
||||
full_address TEXT;
|
||||
BEGIN
|
||||
-- Only proceed if the address has actually changed.
|
||||
IF TG_OP = 'INSERT' OR (TG_OP = 'UPDATE' AND NEW.address IS DISTINCT FROM OLD.address) THEN
|
||||
-- Concatenate address parts into a single string for the geocoder.
|
||||
full_address := CONCAT_WS(', ', NEW.address, NEW.city, NEW.province_state, NEW.postal_code);
|
||||
|
||||
-- ======================================================================
|
||||
-- Placeholder for Geocoding API Call
|
||||
-- ======================================================================
|
||||
-- In a real application, you would call a geocoding service here.
|
||||
-- For example, using the `http` extension:
|
||||
--
|
||||
-- DECLARE
|
||||
-- response http_get;
|
||||
-- lat NUMERIC;
|
||||
-- lon NUMERIC;
|
||||
-- BEGIN
|
||||
-- SELECT * INTO response FROM http_get('https://api.geocodingservice.com/geocode?address=' || url_encode(full_address));
|
||||
-- lat := (response.content::jsonb)->'results'->0->'geometry'->'location'->'lat';
|
||||
-- lon := (response.content::jsonb)->'results'->0->'geometry'->'location'->'lng';
|
||||
-- NEW.location := ST_SetSRID(ST_MakePoint(lon, lat), 4326)::geography;
|
||||
-- END;
|
||||
--
|
||||
-- For now, this function does nothing, but the trigger is in place.
|
||||
-- If you manually provide lat/lon, you could parse them here.
|
||||
-- For this example, we will assume the `location` might be set manually
|
||||
-- or by a separate batch process.
|
||||
-- ======================================================================
|
||||
END IF;
|
||||
|
||||
RETURN NEW;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
-- Trigger to call the geocoding function.
|
||||
DROP TRIGGER IF EXISTS on_store_location_address_change ON public.store_locations;
|
||||
CREATE TRIGGER on_store_location_address_change
|
||||
BEFORE INSERT OR UPDATE ON public.store_locations
|
||||
FOR EACH ROW EXECUTE FUNCTION public.geocode_store_location();
|
||||
|
||||
-- 11. Trigger function to increment the fork_count on the original recipe.
|
||||
DROP FUNCTION IF EXISTS public.increment_recipe_fork_count();
|
||||
|
||||
CREATE OR REPLACE FUNCTION public.increment_recipe_fork_count()
|
||||
RETURNS TRIGGER AS $$
|
||||
BEGIN
|
||||
-- Only run if the recipe is a fork (original_recipe_id is not null).
|
||||
IF NEW.original_recipe_id IS NOT NULL THEN
|
||||
UPDATE public.recipes SET fork_count = fork_count + 1 WHERE recipe_id = NEW.original_recipe_id;
|
||||
-- Award 'First Fork' achievement.
|
||||
PERFORM public.award_achievement(NEW.user_id, 'First Fork');
|
||||
END IF;
|
||||
RETURN NEW;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
DROP TRIGGER IF EXISTS on_recipe_fork ON public.recipes;
|
||||
CREATE TRIGGER on_recipe_fork
|
||||
AFTER INSERT ON public.recipes
|
||||
FOR EACH ROW EXECUTE FUNCTION public.increment_recipe_fork_count();
|
||||
-- ============================================================================
|
||||
-- PART 6: DATABASE FUNCTIONS
|
||||
-- PART 3: DATABASE FUNCTIONS
|
||||
-- ============================================================================
|
||||
-- Function to find the best current sale price for a user's watched items.
|
||||
DROP FUNCTION IF EXISTS public.get_best_sale_prices_for_user(UUID);
|
||||
@@ -1336,8 +867,7 @@ AS $$
|
||||
'list_shared'
|
||||
-- 'new_recipe_rating' could be added here later
|
||||
)
|
||||
ORDER BY
|
||||
al.created_at DESC
|
||||
ORDER BY al.created_at DESC, al.display_text, al.icon
|
||||
LIMIT p_limit
|
||||
OFFSET p_offset;
|
||||
$$;
|
||||
@@ -1549,16 +1079,18 @@ $$;
|
||||
-- It replaces the need to call get_best_sale_prices_for_user for each user individually.
|
||||
-- Returns: TABLE(...) - A set of records including user details and deal information.
|
||||
-- =================================================================
|
||||
DROP FUNCTION IF EXISTS public.get_best_sale_prices_for_all_users();
|
||||
|
||||
CREATE OR REPLACE FUNCTION public.get_best_sale_prices_for_all_users()
|
||||
RETURNS TABLE(
|
||||
user_id uuid,
|
||||
email text,
|
||||
full_name text,
|
||||
master_item_id integer,
|
||||
master_item_id bigint,
|
||||
item_name text,
|
||||
best_price_in_cents integer,
|
||||
store_name text,
|
||||
flyer_id integer,
|
||||
flyer_id bigint,
|
||||
valid_to date
|
||||
) AS $$
|
||||
BEGIN
|
||||
@@ -1569,11 +1101,12 @@ BEGIN
|
||||
SELECT
|
||||
fi.master_item_id,
|
||||
fi.price_in_cents,
|
||||
f.store_name,
|
||||
s.name as store_name,
|
||||
f.flyer_id,
|
||||
f.valid_to
|
||||
FROM public.flyer_items fi
|
||||
JOIN public.flyers f ON fi.flyer_id = f.flyer_id
|
||||
JOIN public.stores s ON f.store_id = s.store_id
|
||||
WHERE
|
||||
fi.master_item_id IS NOT NULL
|
||||
AND fi.price_in_cents IS NOT NULL
|
||||
@@ -1616,3 +1149,472 @@ BEGIN
|
||||
bp.price_rank = 1;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
-- ============================================================================
|
||||
-- PART 4: TRIGGERS
|
||||
-- ============================================================================
|
||||
|
||||
-- 1. Trigger to automatically create a profile when a new user signs up.
|
||||
-- This function is called by a trigger on the `public.users` table.
|
||||
DROP FUNCTION IF EXISTS public.handle_new_user();
|
||||
|
||||
-- It creates a corresponding profile and a default shopping list for the new user.
|
||||
-- It now accepts full_name and avatar_url from the user's metadata.
|
||||
CREATE OR REPLACE FUNCTION public.handle_new_user()
|
||||
RETURNS TRIGGER AS $$
|
||||
DECLARE
|
||||
new_profile_id UUID;
|
||||
user_meta_data JSONB;
|
||||
BEGIN
|
||||
-- The user's metadata (full_name, avatar_url) is passed via a temporary session variable.
|
||||
user_meta_data := current_setting('my_app.user_metadata', true)::JSONB;
|
||||
|
||||
INSERT INTO public.profiles (user_id, role, full_name, avatar_url)
|
||||
VALUES (new.user_id, 'user', user_meta_data->>'full_name', user_meta_data->>'avatar_url')
|
||||
RETURNING user_id INTO new_profile_id;
|
||||
|
||||
-- Also create a default shopping list for the new user.
|
||||
INSERT INTO public.shopping_lists (user_id, name)
|
||||
VALUES (new.user_id, 'Main Shopping List');
|
||||
|
||||
-- Log the new user event
|
||||
INSERT INTO public.activity_log (user_id, action, display_text, icon, details)
|
||||
VALUES (new.user_id, 'user_registered',
|
||||
COALESCE(user_meta_data->>'full_name', new.email) || ' has registered.',
|
||||
'user-plus',
|
||||
jsonb_build_object('email', new.email)
|
||||
);
|
||||
|
||||
RETURN new;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
-- This trigger calls the function after a new user is created.
|
||||
DROP TRIGGER IF EXISTS on_auth_user_created ON public.users;
|
||||
CREATE TRIGGER on_auth_user_created
|
||||
AFTER INSERT ON public.users
|
||||
FOR EACH ROW EXECUTE FUNCTION public.handle_new_user();
|
||||
|
||||
-- 2. Create a reusable function to automatically update 'updated_at' columns.
|
||||
DROP FUNCTION IF EXISTS public.handle_updated_at();
|
||||
|
||||
CREATE OR REPLACE FUNCTION public.handle_updated_at()
|
||||
RETURNS TRIGGER AS $$
|
||||
BEGIN
|
||||
NEW.updated_at = now();
|
||||
RETURN NEW;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
-- Dynamically apply the 'handle_updated_at' trigger to all tables in the public schema
|
||||
-- that have an 'updated_at' column. This is more maintainable than creating a separate
|
||||
-- trigger for each table.
|
||||
DO $$
|
||||
DECLARE
|
||||
t_name TEXT;
|
||||
BEGIN
|
||||
FOR t_name IN
|
||||
SELECT table_name
|
||||
FROM information_schema.columns
|
||||
WHERE table_schema = 'public' AND column_name = 'updated_at'
|
||||
LOOP
|
||||
EXECUTE format('DROP TRIGGER IF EXISTS on_%s_updated ON public.%I;
|
||||
CREATE TRIGGER on_%s_updated
|
||||
BEFORE UPDATE ON public.%I
|
||||
FOR EACH ROW EXECUTE FUNCTION public.handle_updated_at();',
|
||||
t_name, t_name, t_name, t_name);
|
||||
END LOOP;
|
||||
END;
|
||||
$$;
|
||||
|
||||
-- 3. Create a trigger function to populate the item_price_history table on insert.
|
||||
DROP FUNCTION IF EXISTS public.update_price_history_on_flyer_item_insert();
|
||||
|
||||
CREATE OR REPLACE FUNCTION public.update_price_history_on_flyer_item_insert()
|
||||
RETURNS TRIGGER AS $$
|
||||
DECLARE
|
||||
flyer_valid_from DATE;
|
||||
flyer_valid_to DATE;
|
||||
current_summary_date DATE;
|
||||
flyer_location_id BIGINT;
|
||||
BEGIN
|
||||
-- If the item could not be matched, add it to the unmatched queue for review.
|
||||
IF NEW.master_item_id IS NULL THEN
|
||||
INSERT INTO public.unmatched_flyer_items (flyer_item_id)
|
||||
VALUES (NEW.flyer_item_id)
|
||||
ON CONFLICT (flyer_item_id) DO NOTHING;
|
||||
END IF;
|
||||
|
||||
-- Only run if the new flyer item is linked to a master item and has a price.
|
||||
IF NEW.master_item_id IS NULL OR NEW.price_in_cents IS NULL THEN
|
||||
RETURN NEW;
|
||||
END IF;
|
||||
|
||||
-- Get the validity dates of the flyer and the store_id.
|
||||
SELECT valid_from, valid_to INTO flyer_valid_from, flyer_valid_to
|
||||
FROM public.flyers
|
||||
WHERE flyer_id = NEW.flyer_id;
|
||||
|
||||
-- This single, set-based query is much more performant than looping.
|
||||
-- It generates all date/location pairs and inserts/updates them in one operation.
|
||||
INSERT INTO public.item_price_history (master_item_id, summary_date, store_location_id, min_price_in_cents, max_price_in_cents, avg_price_in_cents, data_points_count)
|
||||
SELECT
|
||||
NEW.master_item_id,
|
||||
d.day,
|
||||
fl.store_location_id,
|
||||
NEW.price_in_cents,
|
||||
NEW.price_in_cents,
|
||||
NEW.price_in_cents,
|
||||
1
|
||||
FROM public.flyer_locations fl
|
||||
CROSS JOIN generate_series(flyer_valid_from, flyer_valid_to, '1 day'::interval) AS d(day)
|
||||
WHERE fl.flyer_id = NEW.flyer_id
|
||||
ON CONFLICT (master_item_id, summary_date, store_location_id)
|
||||
DO UPDATE SET
|
||||
min_price_in_cents = LEAST(item_price_history.min_price_in_cents, EXCLUDED.min_price_in_cents),
|
||||
max_price_in_cents = GREATEST(item_price_history.max_price_in_cents, EXCLUDED.max_price_in_cents),
|
||||
avg_price_in_cents = ROUND(((item_price_history.avg_price_in_cents * item_price_history.data_points_count) + EXCLUDED.avg_price_in_cents) / (item_price_history.data_points_count + 1.0)),
|
||||
data_points_count = item_price_history.data_points_count + 1;
|
||||
|
||||
RETURN NEW;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
-- Create the trigger on the flyer_items table for insert.
|
||||
DROP TRIGGER IF EXISTS trigger_update_price_history ON public.flyer_items;
|
||||
CREATE TRIGGER trigger_update_price_history
|
||||
AFTER INSERT ON public.flyer_items
|
||||
FOR EACH ROW EXECUTE FUNCTION public.update_price_history_on_flyer_item_insert();
|
||||
|
||||
-- 4. Create a trigger function to recalculate price history when a flyer item is deleted.
|
||||
DROP FUNCTION IF EXISTS public.recalculate_price_history_on_flyer_item_delete();
|
||||
|
||||
CREATE OR REPLACE FUNCTION public.recalculate_price_history_on_flyer_item_delete()
|
||||
RETURNS TRIGGER AS $$
|
||||
DECLARE
|
||||
affected_dates RECORD;
|
||||
BEGIN
|
||||
-- Only run if the deleted item was linked to a master item and had a price.
|
||||
IF OLD.master_item_id IS NULL OR OLD.price_in_cents IS NULL THEN
|
||||
RETURN OLD;
|
||||
END IF;
|
||||
|
||||
-- This single, set-based query is much more performant than looping.
|
||||
-- It recalculates aggregates for all affected dates and locations at once.
|
||||
WITH affected_days_and_locations AS (
|
||||
-- 1. Get all date/location pairs affected by the deleted item's flyer.
|
||||
SELECT DISTINCT
|
||||
generate_series(f.valid_from, f.valid_to, '1 day'::interval)::date AS summary_date,
|
||||
fl.store_location_id
|
||||
FROM public.flyers f
|
||||
JOIN public.flyer_locations fl ON f.flyer_id = fl.flyer_id
|
||||
WHERE f.flyer_id = OLD.flyer_id
|
||||
),
|
||||
new_aggregates AS (
|
||||
-- 2. For each affected date/location, recalculate the aggregates from all other relevant flyer items.
|
||||
SELECT
|
||||
adl.summary_date,
|
||||
adl.store_location_id,
|
||||
MIN(fi.price_in_cents) AS min_price,
|
||||
MAX(fi.price_in_cents) AS max_price,
|
||||
ROUND(AVG(fi.price_in_cents))::int AS avg_price,
|
||||
COUNT(fi.flyer_item_id)::int AS data_points
|
||||
FROM affected_days_and_locations adl
|
||||
LEFT JOIN public.flyer_items fi ON fi.master_item_id = OLD.master_item_id AND fi.price_in_cents IS NOT NULL
|
||||
LEFT JOIN public.flyers f ON fi.flyer_id = f.flyer_id AND adl.summary_date BETWEEN f.valid_from AND f.valid_to
|
||||
LEFT JOIN public.flyer_locations fl ON fi.flyer_id = fl.flyer_id AND adl.store_location_id = fl.store_location_id
|
||||
WHERE fl.flyer_id IS NOT NULL -- Ensure the join was successful
|
||||
GROUP BY adl.summary_date, adl.store_location_id
|
||||
)
|
||||
-- 3. Update the history table with the new aggregates.
|
||||
UPDATE public.item_price_history iph
|
||||
SET
|
||||
min_price_in_cents = na.min_price,
|
||||
max_price_in_cents = na.max_price,
|
||||
avg_price_in_cents = na.avg_price,
|
||||
data_points_count = na.data_points
|
||||
FROM new_aggregates na
|
||||
WHERE iph.master_item_id = OLD.master_item_id
|
||||
AND iph.summary_date = na.summary_date
|
||||
AND iph.store_location_id = na.store_location_id;
|
||||
|
||||
-- 4. Delete any history records that no longer have any data points.
|
||||
DELETE FROM public.item_price_history iph
|
||||
WHERE iph.master_item_id = OLD.master_item_id
|
||||
AND NOT EXISTS (
|
||||
SELECT 1 FROM new_aggregates na
|
||||
WHERE na.summary_date = iph.summary_date AND na.store_location_id = iph.store_location_id
|
||||
);
|
||||
|
||||
RETURN OLD;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
-- Create the trigger on the flyer_items table for DELETE operations.
|
||||
DROP TRIGGER IF EXISTS trigger_recalculate_price_history_on_delete ON public.flyer_items;
|
||||
CREATE TRIGGER trigger_recalculate_price_history_on_delete
|
||||
AFTER DELETE ON public.flyer_items
|
||||
FOR EACH ROW EXECUTE FUNCTION public.recalculate_price_history_on_flyer_item_delete();
|
||||
|
||||
-- 5. Trigger function to update the average rating on the recipes table.
|
||||
DROP FUNCTION IF EXISTS public.update_recipe_rating_aggregates();
|
||||
|
||||
CREATE OR REPLACE FUNCTION public.update_recipe_rating_aggregates()
|
||||
RETURNS TRIGGER AS $$
|
||||
BEGIN
|
||||
UPDATE public.recipes
|
||||
SET
|
||||
avg_rating = (
|
||||
SELECT AVG(rating)
|
||||
FROM public.recipe_ratings
|
||||
WHERE recipe_id = COALESCE(NEW.recipe_id, OLD.recipe_id) -- This is correct, no change needed
|
||||
),
|
||||
rating_count = (
|
||||
SELECT COUNT(*)
|
||||
FROM public.recipe_ratings
|
||||
WHERE recipe_id = COALESCE(NEW.recipe_id, OLD.recipe_id) -- This is correct, no change needed
|
||||
)
|
||||
WHERE recipe_id = COALESCE(NEW.recipe_id, OLD.recipe_id);
|
||||
|
||||
RETURN NULL; -- The result is ignored since this is an AFTER trigger.
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
-- Trigger to call the function after any change to recipe_ratings.
|
||||
DROP TRIGGER IF EXISTS on_recipe_rating_change ON public.recipe_ratings;
|
||||
CREATE TRIGGER on_recipe_rating_change
|
||||
AFTER INSERT OR UPDATE OR DELETE ON public.recipe_ratings
|
||||
FOR EACH ROW EXECUTE FUNCTION public.update_recipe_rating_aggregates();
|
||||
|
||||
-- 6. Trigger function to log the creation of a new recipe.
|
||||
DROP FUNCTION IF EXISTS public.log_new_recipe();
|
||||
|
||||
CREATE OR REPLACE FUNCTION public.log_new_recipe()
|
||||
RETURNS TRIGGER AS $$
|
||||
BEGIN
|
||||
INSERT INTO public.activity_log (user_id, action, display_text, icon, details)
|
||||
VALUES (
|
||||
NEW.user_id,
|
||||
'recipe_created',
|
||||
(SELECT full_name FROM public.profiles WHERE user_id = NEW.user_id) || ' created a new recipe: ' || NEW.name,
|
||||
'chef-hat',
|
||||
jsonb_build_object('recipe_id', NEW.recipe_id, 'recipe_name', NEW.name)
|
||||
);
|
||||
|
||||
-- Award 'First Recipe' achievement if it's their first one.
|
||||
PERFORM public.award_achievement(NEW.user_id, 'First Recipe');
|
||||
|
||||
RETURN NEW;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
-- Trigger to call the function after a new recipe is inserted.
|
||||
DROP TRIGGER IF EXISTS on_new_recipe_created ON public.recipes;
|
||||
CREATE TRIGGER on_new_recipe_created
|
||||
AFTER INSERT ON public.recipes
|
||||
FOR EACH ROW
|
||||
WHEN (NEW.user_id IS NOT NULL) -- Only log activity for user-created recipes.
|
||||
EXECUTE FUNCTION public.log_new_recipe();
|
||||
|
||||
-- 7a. Trigger function to update the item_count on the flyers table.
|
||||
DROP FUNCTION IF EXISTS public.update_flyer_item_count();
|
||||
|
||||
CREATE OR REPLACE FUNCTION public.update_flyer_item_count()
|
||||
RETURNS TRIGGER AS $$
|
||||
BEGIN
|
||||
IF (TG_OP = 'INSERT') THEN
|
||||
UPDATE public.flyers SET item_count = item_count + 1 WHERE flyer_id = NEW.flyer_id;
|
||||
ELSIF (TG_OP = 'DELETE') THEN
|
||||
UPDATE public.flyers SET item_count = item_count - 1 WHERE flyer_id = OLD.flyer_id;
|
||||
END IF;
|
||||
RETURN NULL; -- The result is ignored since this is an AFTER trigger.
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
-- Trigger to call the function after any change to flyer_items.
|
||||
-- This ensures the item_count on the parent flyer is always accurate.
|
||||
DROP TRIGGER IF EXISTS on_flyer_item_change ON public.flyer_items;
|
||||
CREATE TRIGGER on_flyer_item_change
|
||||
AFTER INSERT OR DELETE ON public.flyer_items
|
||||
FOR EACH ROW EXECUTE FUNCTION public.update_flyer_item_count();
|
||||
|
||||
-- 7. Trigger function to log the creation of a new flyer.
|
||||
DROP FUNCTION IF EXISTS public.log_new_flyer();
|
||||
|
||||
CREATE OR REPLACE FUNCTION public.log_new_flyer()
|
||||
RETURNS TRIGGER AS $$
|
||||
BEGIN
|
||||
-- If the flyer was uploaded by a registered user, award the 'First-Upload' achievement.
|
||||
-- The award_achievement function handles checking if the user already has it.
|
||||
IF NEW.uploaded_by IS NOT NULL THEN
|
||||
PERFORM public.award_achievement(NEW.uploaded_by, 'First-Upload');
|
||||
END IF;
|
||||
|
||||
INSERT INTO public.activity_log (user_id, action, display_text, icon, details)
|
||||
VALUES (
|
||||
NEW.uploaded_by, -- Log the user who uploaded it
|
||||
'flyer_uploaded',
|
||||
'A new flyer for ' || (SELECT name FROM public.stores WHERE store_id = NEW.store_id) || ' has been uploaded.',
|
||||
'file-text',
|
||||
jsonb_build_object(
|
||||
'flyer_id', NEW.flyer_id,
|
||||
'store_name', (SELECT name FROM public.stores WHERE store_id = NEW.store_id),
|
||||
'valid_from', to_char(NEW.valid_from, 'YYYY-MM-DD'),
|
||||
'valid_to', to_char(NEW.valid_to, 'YYYY-MM-DD')
|
||||
)
|
||||
);
|
||||
RETURN NEW;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
-- Trigger to call the function after a new flyer is inserted.
|
||||
DROP TRIGGER IF EXISTS on_new_flyer_created ON public.flyers;
|
||||
CREATE TRIGGER on_new_flyer_created
|
||||
AFTER INSERT ON public.flyers
|
||||
FOR EACH ROW EXECUTE FUNCTION public.log_new_flyer();
|
||||
|
||||
-- 8. Trigger function to log when a user favorites a recipe.
|
||||
DROP FUNCTION IF EXISTS public.log_new_favorite_recipe();
|
||||
|
||||
CREATE OR REPLACE FUNCTION public.log_new_favorite_recipe()
|
||||
RETURNS TRIGGER AS $$
|
||||
BEGIN
|
||||
INSERT INTO public.activity_log (user_id, action, display_text, icon, details)
|
||||
VALUES (
|
||||
NEW.user_id,
|
||||
'recipe_favorited',
|
||||
(SELECT full_name FROM public.profiles WHERE user_id = NEW.user_id) || ' favorited the recipe: ' || (SELECT name FROM public.recipes WHERE recipe_id = NEW.recipe_id),
|
||||
'heart',
|
||||
jsonb_build_object(
|
||||
'recipe_id', NEW.recipe_id
|
||||
)
|
||||
);
|
||||
|
||||
-- Award 'First Favorite' achievement.
|
||||
PERFORM public.award_achievement(NEW.user_id, 'First Favorite');
|
||||
RETURN NEW;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
-- Trigger to call the function after a recipe is favorited.
|
||||
DROP TRIGGER IF EXISTS on_new_favorite_recipe ON public.favorite_recipes;
|
||||
CREATE TRIGGER on_new_favorite_recipe
|
||||
AFTER INSERT ON public.favorite_recipes
|
||||
FOR EACH ROW EXECUTE FUNCTION public.log_new_favorite_recipe();
|
||||
|
||||
-- 9. Trigger function to log when a user shares a shopping list.
|
||||
DROP FUNCTION IF EXISTS public.log_new_list_share();
|
||||
|
||||
CREATE OR REPLACE FUNCTION public.log_new_list_share()
|
||||
RETURNS TRIGGER AS $$
|
||||
BEGIN
|
||||
INSERT INTO public.activity_log (user_id, action, display_text, icon, details)
|
||||
VALUES (
|
||||
NEW.shared_by_user_id,
|
||||
'list_shared',
|
||||
(SELECT full_name FROM public.profiles WHERE user_id = NEW.shared_by_user_id) || ' shared a shopping list.',
|
||||
'share-2',
|
||||
jsonb_build_object(
|
||||
'shopping_list_id', NEW.shopping_list_id,
|
||||
'list_name', (SELECT name FROM public.shopping_lists WHERE shopping_list_id = NEW.shopping_list_id),
|
||||
'shared_with_user_id', NEW.shared_with_user_id
|
||||
)
|
||||
);
|
||||
|
||||
-- Award 'List Sharer' achievement.
|
||||
PERFORM public.award_achievement(NEW.shared_by_user_id, 'List Sharer');
|
||||
RETURN NEW;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
-- Trigger to call the function after a shopping list is shared.
|
||||
DROP TRIGGER IF EXISTS on_new_list_share ON public.shared_shopping_lists;
|
||||
CREATE TRIGGER on_new_list_share
|
||||
AFTER INSERT ON public.shared_shopping_lists
|
||||
FOR EACH ROW EXECUTE FUNCTION public.log_new_list_share();
|
||||
|
||||
-- 9a. Trigger function to log when a user shares a recipe collection.
|
||||
DROP FUNCTION IF EXISTS public.log_new_recipe_collection_share();
|
||||
|
||||
CREATE OR REPLACE FUNCTION public.log_new_recipe_collection_share()
|
||||
RETURNS TRIGGER AS $$
|
||||
BEGIN
|
||||
-- Log the activity
|
||||
INSERT INTO public.activity_log (user_id, action, display_text, icon, details)
|
||||
VALUES (
|
||||
NEW.shared_by_user_id, 'recipe_collection_shared',
|
||||
(SELECT full_name FROM public.profiles WHERE user_id = NEW.shared_by_user_id) || ' shared a recipe collection.',
|
||||
'book',
|
||||
jsonb_build_object('collection_id', NEW.recipe_collection_id, 'shared_with_user_id', NEW.shared_with_user_id)
|
||||
);
|
||||
|
||||
-- Award 'Recipe Sharer' achievement.
|
||||
PERFORM public.award_achievement(NEW.shared_by_user_id, 'Recipe Sharer');
|
||||
RETURN NEW;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
DROP TRIGGER IF EXISTS on_new_recipe_collection_share ON public.shared_recipe_collections;
|
||||
CREATE TRIGGER on_new_recipe_collection_share
|
||||
AFTER INSERT ON public.shared_recipe_collections
|
||||
FOR EACH ROW EXECUTE FUNCTION public.log_new_recipe_collection_share();
|
||||
|
||||
-- 10. Trigger function to geocode a store location's address.
|
||||
-- This function is triggered when an address is inserted or updated, and is
|
||||
-- designed to be extensible for external geocoding services to populate the
|
||||
-- latitude, longitude, and location fields.
|
||||
DROP FUNCTION IF EXISTS public.geocode_address();
|
||||
|
||||
CREATE OR REPLACE FUNCTION public.geocode_address()
|
||||
RETURNS TRIGGER AS $$
|
||||
DECLARE
|
||||
full_address TEXT;
|
||||
BEGIN
|
||||
-- Only proceed if an address component has actually changed.
|
||||
IF TG_OP = 'INSERT' OR (TG_OP = 'UPDATE' AND (
|
||||
NEW.address_line_1 IS DISTINCT FROM OLD.address_line_1 OR
|
||||
NEW.address_line_2 IS DISTINCT FROM OLD.address_line_2 OR
|
||||
NEW.city IS DISTINCT FROM OLD.city OR
|
||||
NEW.province_state IS DISTINCT FROM OLD.province_state OR
|
||||
NEW.postal_code IS DISTINCT FROM OLD.postal_code OR
|
||||
NEW.country IS DISTINCT FROM OLD.country
|
||||
)) THEN
|
||||
-- Concatenate address parts into a single string for the geocoder.
|
||||
full_address := CONCAT_WS(', ', NEW.address_line_1, NEW.address_line_2, NEW.city, NEW.province_state, NEW.postal_code, NEW.country);
|
||||
|
||||
-- Placeholder for Geocoding API Call
|
||||
-- In a real application, you would call a service here and update NEW.latitude, NEW.longitude, and NEW.location.
|
||||
-- e.g., NEW.latitude := result.lat; NEW.longitude := result.lon;
|
||||
-- NEW.location := ST_SetSRID(ST_MakePoint(NEW.longitude, NEW.latitude), 4326);
|
||||
END IF;
|
||||
|
||||
RETURN NEW;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
-- This trigger calls the geocoding function when an address changes.
|
||||
DROP TRIGGER IF EXISTS on_address_change_geocode ON public.addresses;
|
||||
CREATE TRIGGER on_address_change_geocode
|
||||
BEFORE INSERT OR UPDATE ON public.addresses
|
||||
FOR EACH ROW EXECUTE FUNCTION public.geocode_address();
|
||||
|
||||
-- 11. Trigger function to increment the fork_count on the original recipe.
|
||||
DROP FUNCTION IF EXISTS public.increment_recipe_fork_count();
|
||||
|
||||
CREATE OR REPLACE FUNCTION public.increment_recipe_fork_count()
|
||||
RETURNS TRIGGER AS $$
|
||||
BEGIN
|
||||
-- Only run if the recipe is a fork (original_recipe_id is not null).
|
||||
IF NEW.original_recipe_id IS NOT NULL THEN
|
||||
UPDATE public.recipes SET fork_count = fork_count + 1 WHERE recipe_id = NEW.original_recipe_id;
|
||||
-- Award 'First Fork' achievement.
|
||||
PERFORM public.award_achievement(NEW.user_id, 'First Fork');
|
||||
END IF;
|
||||
RETURN NEW;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
DROP TRIGGER IF EXISTS on_recipe_fork ON public.recipes;
|
||||
CREATE TRIGGER on_recipe_fork
|
||||
AFTER INSERT ON public.recipes
|
||||
FOR EACH ROW EXECUTE FUNCTION public.increment_recipe_fork_count();
|
||||
|
||||
@@ -265,5 +265,6 @@ INSERT INTO public.achievements (name, description, icon, points_value) VALUES
|
||||
('List Sharer', 'Share a shopping list with another user for the first time.', 'list', 20),
|
||||
('First Favorite', 'Mark a recipe as one of your favorites.', 'heart', 5),
|
||||
('First Fork', 'Make a personal copy of a public recipe.', 'git-fork', 10),
|
||||
('First Budget Created', 'Create your first budget to track spending.', 'piggy-bank', 15)
|
||||
('First Budget Created', 'Create your first budget to track spending.', 'piggy-bank', 15),
|
||||
('First-Upload', 'Upload your first flyer.', 'upload-cloud', 25)
|
||||
ON CONFLICT (name) DO NOTHING;
|
||||
|
||||
@@ -162,7 +162,6 @@ COMMENT ON COLUMN public.flyers.uploaded_by IS 'The user who uploaded the flyer.
|
||||
CREATE INDEX IF NOT EXISTS idx_flyers_status ON public.flyers(status);
|
||||
CREATE INDEX IF NOT EXISTS idx_flyers_created_at ON public.flyers (created_at DESC);
|
||||
CREATE INDEX IF NOT EXISTS idx_flyers_valid_to_file_name ON public.flyers (valid_to DESC, file_name ASC);
|
||||
CREATE INDEX IF NOT EXISTS idx_flyers_status ON public.flyers(status);
|
||||
-- 7. The 'master_grocery_items' table. This is the master dictionary.
|
||||
CREATE TABLE IF NOT EXISTS public.master_grocery_items (
|
||||
master_grocery_item_id BIGINT PRIMARY KEY GENERATED ALWAYS AS IDENTITY,
|
||||
@@ -973,6 +972,21 @@ COMMENT ON COLUMN public.user_reactions.reaction_type IS 'The type of reaction (
|
||||
CREATE INDEX IF NOT EXISTS idx_user_reactions_user_id ON public.user_reactions(user_id);
|
||||
CREATE INDEX IF NOT EXISTS idx_user_reactions_entity ON public.user_reactions(entity_type, entity_id);
|
||||
|
||||
-- 56. Store user-defined budgets for spending analysis.
|
||||
CREATE TABLE IF NOT EXISTS public.budgets (
|
||||
budget_id BIGINT PRIMARY KEY GENERATED ALWAYS AS IDENTITY,
|
||||
user_id UUID NOT NULL REFERENCES public.users(user_id) ON DELETE CASCADE,
|
||||
name TEXT NOT NULL,
|
||||
amount_cents INTEGER NOT NULL CHECK (amount_cents > 0),
|
||||
period TEXT NOT NULL CHECK (period IN ('weekly', 'monthly')),
|
||||
start_date DATE NOT NULL,
|
||||
created_at TIMESTAMPTZ DEFAULT now() NOT NULL,
|
||||
updated_at TIMESTAMPTZ DEFAULT now() NOT NULL,
|
||||
CONSTRAINT budgets_name_check CHECK (TRIM(name) <> '')
|
||||
);
|
||||
COMMENT ON TABLE public.budgets IS 'Allows users to set weekly or monthly grocery budgets for spending tracking.';
|
||||
CREATE INDEX IF NOT EXISTS idx_budgets_user_id ON public.budgets(user_id);
|
||||
|
||||
-- 57. Static table defining available achievements for gamification.
|
||||
CREATE TABLE IF NOT EXISTS public.achievements (
|
||||
achievement_id BIGINT PRIMARY KEY GENERATED ALWAYS AS IDENTITY,
|
||||
@@ -998,17 +1012,3 @@ CREATE INDEX IF NOT EXISTS idx_user_achievements_user_id ON public.user_achievem
|
||||
CREATE INDEX IF NOT EXISTS idx_user_achievements_achievement_id ON public.user_achievements(achievement_id);
|
||||
|
||||
|
||||
-- 56. Store user-defined budgets for spending analysis.
|
||||
CREATE TABLE IF NOT EXISTS public.budgets (
|
||||
budget_id BIGINT PRIMARY KEY GENERATED ALWAYS AS IDENTITY,
|
||||
user_id UUID NOT NULL REFERENCES public.users(user_id) ON DELETE CASCADE,
|
||||
name TEXT NOT NULL,
|
||||
amount_cents INTEGER NOT NULL CHECK (amount_cents > 0),
|
||||
period TEXT NOT NULL CHECK (period IN ('weekly', 'monthly')),
|
||||
start_date DATE NOT NULL,
|
||||
created_at TIMESTAMPTZ DEFAULT now() NOT NULL,
|
||||
updated_at TIMESTAMPTZ DEFAULT now() NOT NULL,
|
||||
CONSTRAINT budgets_name_check CHECK (TRIM(name) <> '')
|
||||
);
|
||||
COMMENT ON TABLE public.budgets IS 'Allows users to set weekly or monthly grocery budgets for spending tracking.';
|
||||
CREATE INDEX IF NOT EXISTS idx_budgets_user_id ON public.budgets(user_id);
|
||||
|
||||
@@ -102,11 +102,11 @@ CREATE TABLE IF NOT EXISTS public.profiles (
|
||||
address_id BIGINT REFERENCES public.addresses(address_id) ON DELETE SET NULL,
|
||||
points INTEGER DEFAULT 0 NOT NULL CHECK (points >= 0),
|
||||
preferences JSONB,
|
||||
role TEXT CHECK (role IN ('admin', 'user')),
|
||||
role TEXT NOT NULL CHECK (role IN ('admin', 'user')),
|
||||
created_at TIMESTAMPTZ DEFAULT now() NOT NULL,
|
||||
updated_at TIMESTAMPTZ DEFAULT now() NOT NULL,
|
||||
CONSTRAINT profiles_full_name_check CHECK (full_name IS NULL OR TRIM(full_name) <> ''),
|
||||
CONSTRAINT profiles_avatar_url_check CHECK (avatar_url IS NULL OR avatar_url ~* '^https://?.*'),
|
||||
CONSTRAINT profiles_avatar_url_check CHECK (avatar_url IS NULL OR avatar_url ~* '^https?://.*'),
|
||||
created_by UUID REFERENCES public.users(user_id) ON DELETE SET NULL,
|
||||
updated_by UUID REFERENCES public.users(user_id) ON DELETE SET NULL
|
||||
);
|
||||
@@ -124,7 +124,7 @@ CREATE TABLE IF NOT EXISTS public.stores (
|
||||
created_at TIMESTAMPTZ DEFAULT now() NOT NULL,
|
||||
updated_at TIMESTAMPTZ DEFAULT now() NOT NULL,
|
||||
CONSTRAINT stores_name_check CHECK (TRIM(name) <> ''),
|
||||
CONSTRAINT stores_logo_url_check CHECK (logo_url IS NULL OR logo_url ~* '^https://?.*'),
|
||||
CONSTRAINT stores_logo_url_check CHECK (logo_url IS NULL OR logo_url ~* '^https?://.*'),
|
||||
created_by UUID REFERENCES public.users(user_id) ON DELETE SET NULL
|
||||
);
|
||||
COMMENT ON TABLE public.stores IS 'Stores metadata for grocery store chains (e.g., Safeway, Kroger).';
|
||||
@@ -144,7 +144,7 @@ CREATE TABLE IF NOT EXISTS public.flyers (
|
||||
flyer_id BIGINT PRIMARY KEY GENERATED ALWAYS AS IDENTITY,
|
||||
file_name TEXT NOT NULL,
|
||||
image_url TEXT NOT NULL,
|
||||
icon_url TEXT,
|
||||
icon_url TEXT NOT NULL,
|
||||
checksum TEXT UNIQUE,
|
||||
store_id BIGINT REFERENCES public.stores(store_id) ON DELETE CASCADE,
|
||||
valid_from DATE,
|
||||
@@ -157,8 +157,8 @@ CREATE TABLE IF NOT EXISTS public.flyers (
|
||||
updated_at TIMESTAMPTZ DEFAULT now() NOT NULL,
|
||||
CONSTRAINT flyers_valid_dates_check CHECK (valid_to >= valid_from),
|
||||
CONSTRAINT flyers_file_name_check CHECK (TRIM(file_name) <> ''),
|
||||
CONSTRAINT flyers_image_url_check CHECK (image_url ~* '^https://?.*'),
|
||||
CONSTRAINT flyers_icon_url_check CHECK (icon_url IS NULL OR icon_url ~* '^https://?.*'),
|
||||
CONSTRAINT flyers_image_url_check CHECK (image_url ~* '^https?://.*'),
|
||||
CONSTRAINT flyers_icon_url_check CHECK (icon_url ~* '^https?://.*'),
|
||||
CONSTRAINT flyers_checksum_check CHECK (checksum IS NULL OR length(checksum) = 64)
|
||||
);
|
||||
COMMENT ON TABLE public.flyers IS 'Stores metadata for each processed flyer, linking it to a store and its validity period.';
|
||||
@@ -215,7 +215,7 @@ CREATE TABLE IF NOT EXISTS public.brands (
|
||||
created_at TIMESTAMPTZ DEFAULT now() NOT NULL,
|
||||
updated_at TIMESTAMPTZ DEFAULT now() NOT NULL,
|
||||
CONSTRAINT brands_name_check CHECK (TRIM(name) <> ''),
|
||||
CONSTRAINT brands_logo_url_check CHECK (logo_url IS NULL OR logo_url ~* '^https://?.*')
|
||||
CONSTRAINT brands_logo_url_check CHECK (logo_url IS NULL OR logo_url ~* '^https?://.*')
|
||||
);
|
||||
COMMENT ON TABLE public.brands IS 'Stores brand names like "Coca-Cola", "Maple Leaf", or "Kraft".';
|
||||
COMMENT ON COLUMN public.brands.store_id IS 'If this is a store-specific brand (e.g., President''s Choice), this links to the parent store.';
|
||||
@@ -482,7 +482,7 @@ CREATE TABLE IF NOT EXISTS public.user_submitted_prices (
|
||||
downvotes INTEGER DEFAULT 0 NOT NULL CHECK (downvotes >= 0),
|
||||
created_at TIMESTAMPTZ DEFAULT now() NOT NULL,
|
||||
updated_at TIMESTAMPTZ DEFAULT now() NOT NULL,
|
||||
CONSTRAINT user_submitted_prices_photo_url_check CHECK (photo_url IS NULL OR photo_url ~* '^https://?.*')
|
||||
CONSTRAINT user_submitted_prices_photo_url_check CHECK (photo_url IS NULL OR photo_url ~* '^https?://.*')
|
||||
);
|
||||
COMMENT ON TABLE public.user_submitted_prices IS 'Stores item prices submitted by users directly from physical stores.';
|
||||
COMMENT ON COLUMN public.user_submitted_prices.photo_url IS 'URL to user-submitted photo evidence of the price.';
|
||||
@@ -539,7 +539,7 @@ CREATE TABLE IF NOT EXISTS public.recipes (
|
||||
created_at TIMESTAMPTZ DEFAULT now() NOT NULL,
|
||||
updated_at TIMESTAMPTZ DEFAULT now() NOT NULL,
|
||||
CONSTRAINT recipes_name_check CHECK (TRIM(name) <> ''),
|
||||
CONSTRAINT recipes_photo_url_check CHECK (photo_url IS NULL OR photo_url ~* '^https://?.*')
|
||||
CONSTRAINT recipes_photo_url_check CHECK (photo_url IS NULL OR photo_url ~* '^https?://.*')
|
||||
);
|
||||
COMMENT ON TABLE public.recipes IS 'Stores recipes that can be used to generate shopping lists.';
|
||||
COMMENT ON COLUMN public.recipes.servings IS 'The number of servings this recipe yields.';
|
||||
@@ -689,8 +689,8 @@ CREATE TABLE IF NOT EXISTS public.planned_meals (
|
||||
meal_type TEXT NOT NULL,
|
||||
servings_to_cook INTEGER,
|
||||
created_at TIMESTAMPTZ DEFAULT now() NOT NULL,
|
||||
CONSTRAINT planned_meals_meal_type_check CHECK (TRIM(meal_type) <> ''),
|
||||
updated_at TIMESTAMPTZ DEFAULT now() NOT NULL
|
||||
updated_at TIMESTAMPTZ DEFAULT now() NOT NULL,
|
||||
CONSTRAINT planned_meals_meal_type_check CHECK (TRIM(meal_type) <> '')
|
||||
);
|
||||
COMMENT ON TABLE public.planned_meals IS 'Assigns a recipe to a specific day and meal type within a user''s menu plan.';
|
||||
COMMENT ON COLUMN public.planned_meals.meal_type IS 'The designated meal for the recipe, e.g., ''Breakfast'', ''Lunch'', ''Dinner''.';
|
||||
@@ -940,7 +940,7 @@ CREATE TABLE IF NOT EXISTS public.receipts (
|
||||
raw_text TEXT,
|
||||
created_at TIMESTAMPTZ DEFAULT now() NOT NULL,
|
||||
processed_at TIMESTAMPTZ,
|
||||
CONSTRAINT receipts_receipt_image_url_check CHECK (receipt_image_url ~* '^https://?.*'),
|
||||
CONSTRAINT receipts_receipt_image_url_check CHECK (receipt_image_url ~* '^https?://.*'),
|
||||
updated_at TIMESTAMPTZ DEFAULT now() NOT NULL
|
||||
);
|
||||
COMMENT ON TABLE public.receipts IS 'Stores uploaded user receipts for purchase tracking and analysis.';
|
||||
@@ -1113,6 +1113,7 @@ DECLARE
|
||||
ground_beef_id BIGINT; pasta_item_id BIGINT; tomatoes_id BIGINT; onions_id BIGINT; garlic_id BIGINT;
|
||||
bell_peppers_id BIGINT; carrots_id BIGINT; soy_sauce_id BIGINT;
|
||||
soda_item_id BIGINT; turkey_item_id BIGINT; bread_item_id BIGINT; cheese_item_id BIGINT;
|
||||
chicken_thighs_id BIGINT; paper_towels_id BIGINT; toilet_paper_id BIGINT;
|
||||
|
||||
-- Tag IDs
|
||||
quick_easy_tag BIGINT; healthy_tag BIGINT; chicken_tag BIGINT;
|
||||
@@ -1164,6 +1165,9 @@ BEGIN
|
||||
SELECT mgi.master_grocery_item_id INTO turkey_item_id FROM public.master_grocery_items mgi WHERE mgi.name = 'turkey';
|
||||
SELECT mgi.master_grocery_item_id INTO bread_item_id FROM public.master_grocery_items mgi WHERE mgi.name = 'bread';
|
||||
SELECT mgi.master_grocery_item_id INTO cheese_item_id FROM public.master_grocery_items mgi WHERE mgi.name = 'cheese';
|
||||
SELECT mgi.master_grocery_item_id INTO chicken_thighs_id FROM public.master_grocery_items mgi WHERE mgi.name = 'chicken thighs';
|
||||
SELECT mgi.master_grocery_item_id INTO paper_towels_id FROM public.master_grocery_items mgi WHERE mgi.name = 'paper towels';
|
||||
SELECT mgi.master_grocery_item_id INTO toilet_paper_id FROM public.master_grocery_items mgi WHERE mgi.name = 'toilet paper';
|
||||
|
||||
-- Insert ingredients for each recipe
|
||||
INSERT INTO public.recipe_ingredients (recipe_id, master_item_id, quantity, unit) VALUES
|
||||
@@ -1200,6 +1204,17 @@ BEGIN
|
||||
(bolognese_recipe_id, family_tag), (bolognese_recipe_id, beef_tag), (bolognese_recipe_id, weeknight_tag),
|
||||
(stir_fry_recipe_id, quick_easy_tag), (stir_fry_recipe_id, healthy_tag), (stir_fry_recipe_id, vegetarian_tag)
|
||||
ON CONFLICT (recipe_id, tag_id) DO NOTHING;
|
||||
|
||||
INSERT INTO public.master_item_aliases (master_item_id, alias) VALUES
|
||||
(ground_beef_id, 'ground chuck'), (ground_beef_id, 'lean ground beef'),
|
||||
(ground_beef_id, 'extra lean ground beef'), (ground_beef_id, 'hamburger meat'),
|
||||
(chicken_breast_id, 'boneless skinless chicken breast'), (chicken_breast_id, 'chicken cutlets'),
|
||||
(chicken_thighs_id, 'boneless skinless chicken thighs'), (chicken_thighs_id, 'bone-in chicken thighs'),
|
||||
(bell_peppers_id, 'red pepper'), (bell_peppers_id, 'green pepper'), (bell_peppers_id, 'yellow pepper'), (bell_peppers_id, 'orange pepper'),
|
||||
(soda_item_id, 'pop'), (soda_item_id, 'soft drink'), (soda_item_id, 'coke'), (soda_item_id, 'pepsi'),
|
||||
(paper_towels_id, 'paper towel'),
|
||||
(toilet_paper_id, 'bathroom tissue'), (toilet_paper_id, 'toilet tissue')
|
||||
ON CONFLICT (alias) DO NOTHING;
|
||||
END $$;
|
||||
|
||||
-- Pre-populate the unit_conversions table with common cooking conversions.
|
||||
@@ -2115,6 +2130,61 @@ AS $$
|
||||
ORDER BY potential_savings_cents DESC;
|
||||
$$;
|
||||
|
||||
-- Function to get a user's spending breakdown by category for a given date range.
|
||||
DROP FUNCTION IF EXISTS public.get_spending_by_category(UUID, DATE, DATE);
|
||||
|
||||
CREATE OR REPLACE FUNCTION public.get_spending_by_category(p_user_id UUID, p_start_date DATE, p_end_date DATE)
|
||||
RETURNS TABLE (
|
||||
category_id BIGINT,
|
||||
category_name TEXT,
|
||||
total_spent_cents BIGINT
|
||||
)
|
||||
LANGUAGE sql
|
||||
STABLE
|
||||
SECURITY INVOKER
|
||||
AS $$
|
||||
WITH all_purchases AS (
|
||||
-- CTE 1: Combine purchases from completed shopping trips.
|
||||
-- We only consider items that have a price paid.
|
||||
SELECT
|
||||
sti.master_item_id,
|
||||
sti.price_paid_cents
|
||||
FROM public.shopping_trip_items sti
|
||||
JOIN public.shopping_trips st ON sti.shopping_trip_id = st.shopping_trip_id
|
||||
WHERE st.user_id = p_user_id
|
||||
AND st.completed_at::date BETWEEN p_start_date AND p_end_date
|
||||
AND sti.price_paid_cents IS NOT NULL
|
||||
|
||||
UNION ALL
|
||||
|
||||
-- CTE 2: Combine purchases from processed receipts.
|
||||
SELECT
|
||||
ri.master_item_id,
|
||||
ri.price_paid_cents
|
||||
FROM public.receipt_items ri
|
||||
JOIN public.receipts r ON ri.receipt_id = r.receipt_id
|
||||
WHERE r.user_id = p_user_id
|
||||
AND r.transaction_date::date BETWEEN p_start_date AND p_end_date
|
||||
AND ri.master_item_id IS NOT NULL -- Only include items matched to a master item
|
||||
)
|
||||
-- Final Aggregation: Group all combined purchases by category and sum the spending.
|
||||
SELECT
|
||||
c.category_id,
|
||||
c.name AS category_name,
|
||||
SUM(ap.price_paid_cents)::BIGINT AS total_spent_cents
|
||||
FROM all_purchases ap
|
||||
-- Join with master_grocery_items to get the category_id for each purchase.
|
||||
JOIN public.master_grocery_items mgi ON ap.master_item_id = mgi.master_grocery_item_id
|
||||
-- Join with categories to get the category name for display.
|
||||
JOIN public.categories c ON mgi.category_id = c.category_id
|
||||
GROUP BY
|
||||
c.category_id, c.name
|
||||
HAVING
|
||||
SUM(ap.price_paid_cents) > 0
|
||||
ORDER BY
|
||||
total_spent_cents DESC;
|
||||
$$;
|
||||
|
||||
-- Function to approve a suggested correction and apply it.
|
||||
DROP FUNCTION IF EXISTS public.approve_correction(BIGINT);
|
||||
|
||||
@@ -2572,7 +2642,9 @@ BEGIN
|
||||
'file-text',
|
||||
jsonb_build_object(
|
||||
'flyer_id', NEW.flyer_id,
|
||||
'store_name', (SELECT name FROM public.stores WHERE store_id = NEW.store_id)
|
||||
'store_name', (SELECT name FROM public.stores WHERE store_id = NEW.store_id),
|
||||
'valid_from', to_char(NEW.valid_from, 'YYYY-MM-DD'),
|
||||
'valid_to', to_char(NEW.valid_to, 'YYYY-MM-DD')
|
||||
)
|
||||
);
|
||||
RETURN NEW;
|
||||
@@ -2622,6 +2694,7 @@ BEGIN
|
||||
(SELECT full_name FROM public.profiles WHERE user_id = NEW.shared_by_user_id) || ' shared a shopping list.',
|
||||
'share-2',
|
||||
jsonb_build_object(
|
||||
'shopping_list_id', NEW.shopping_list_id,
|
||||
'list_name', (SELECT name FROM public.shopping_lists WHERE shopping_list_id = NEW.shopping_list_id),
|
||||
'shared_with_user_id', NEW.shared_with_user_id
|
||||
)
|
||||
@@ -2669,6 +2742,66 @@ CREATE TRIGGER on_new_recipe_collection_share
|
||||
AFTER INSERT ON public.shared_recipe_collections
|
||||
FOR EACH ROW EXECUTE FUNCTION public.log_new_recipe_collection_share();
|
||||
|
||||
-- 10. Trigger function to geocode a store location's address.
|
||||
-- This function is triggered when an address is inserted or updated, and is
|
||||
-- designed to be extensible for external geocoding services to populate the
|
||||
-- latitude, longitude, and location fields.
|
||||
DROP FUNCTION IF EXISTS public.geocode_address();
|
||||
|
||||
CREATE OR REPLACE FUNCTION public.geocode_address()
|
||||
RETURNS TRIGGER AS $$
|
||||
DECLARE
|
||||
full_address TEXT;
|
||||
BEGIN
|
||||
-- Only proceed if an address component has actually changed.
|
||||
IF TG_OP = 'INSERT' OR (TG_OP = 'UPDATE' AND (
|
||||
NEW.address_line_1 IS DISTINCT FROM OLD.address_line_1 OR
|
||||
NEW.address_line_2 IS DISTINCT FROM OLD.address_line_2 OR
|
||||
NEW.city IS DISTINCT FROM OLD.city OR
|
||||
NEW.province_state IS DISTINCT FROM OLD.province_state OR
|
||||
NEW.postal_code IS DISTINCT FROM OLD.postal_code OR
|
||||
NEW.country IS DISTINCT FROM OLD.country
|
||||
)) THEN
|
||||
-- Concatenate address parts into a single string for the geocoder.
|
||||
full_address := CONCAT_WS(', ', NEW.address_line_1, NEW.address_line_2, NEW.city, NEW.province_state, NEW.postal_code, NEW.country);
|
||||
|
||||
-- Placeholder for Geocoding API Call.
|
||||
-- In a real application, you would call a service here and update NEW.latitude, NEW.longitude, and NEW.location.
|
||||
-- e.g., NEW.latitude := result.lat; NEW.longitude := result.lon;
|
||||
-- NEW.location := ST_SetSRID(ST_MakePoint(NEW.longitude, NEW.latitude), 4326);
|
||||
END IF;
|
||||
|
||||
RETURN NEW;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
-- This trigger calls the geocoding function when an address changes.
|
||||
DROP TRIGGER IF EXISTS on_address_change_geocode ON public.addresses;
|
||||
CREATE TRIGGER on_address_change_geocode
|
||||
BEFORE INSERT OR UPDATE ON public.addresses
|
||||
FOR EACH ROW EXECUTE FUNCTION public.geocode_address();
|
||||
|
||||
-- 11. Trigger function to increment the fork_count on the original recipe.
|
||||
DROP FUNCTION IF EXISTS public.increment_recipe_fork_count();
|
||||
|
||||
CREATE OR REPLACE FUNCTION public.increment_recipe_fork_count()
|
||||
RETURNS TRIGGER AS $$
|
||||
BEGIN
|
||||
-- Only run if the recipe is a fork (original_recipe_id is not null).
|
||||
IF NEW.original_recipe_id IS NOT NULL THEN
|
||||
UPDATE public.recipes SET fork_count = fork_count + 1 WHERE recipe_id = NEW.original_recipe_id;
|
||||
-- Award 'First Fork' achievement.
|
||||
PERFORM public.award_achievement(NEW.user_id, 'First Fork');
|
||||
END IF;
|
||||
RETURN NEW;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
DROP TRIGGER IF EXISTS on_recipe_fork ON public.recipes;
|
||||
CREATE TRIGGER on_recipe_fork
|
||||
AFTER INSERT ON public.recipes
|
||||
FOR EACH ROW EXECUTE FUNCTION public.increment_recipe_fork_count();
|
||||
|
||||
-- =================================================================
|
||||
-- Function: get_best_sale_prices_for_all_users()
|
||||
-- Description: Retrieves the best sale price for every item on every user's watchlist.
|
||||
@@ -2676,17 +2809,19 @@ CREATE TRIGGER on_new_recipe_collection_share
|
||||
-- It replaces the need to call get_best_sale_prices_for_user for each user individually.
|
||||
-- Returns: TABLE(...) - A set of records including user details and deal information.
|
||||
-- =================================================================
|
||||
DROP FUNCTION IF EXISTS public.get_best_sale_prices_for_all_users();
|
||||
|
||||
CREATE OR REPLACE FUNCTION public.get_best_sale_prices_for_all_users()
|
||||
RETURNS TABLE(
|
||||
user_id uuid,
|
||||
|
||||
email text,
|
||||
full_name text,
|
||||
master_item_id integer,
|
||||
master_item_id bigint,
|
||||
item_name text,
|
||||
best_price_in_cents integer,
|
||||
store_name text,
|
||||
flyer_id integer,
|
||||
flyer_id bigint,
|
||||
valid_to date
|
||||
) AS $$
|
||||
BEGIN
|
||||
@@ -2698,7 +2833,7 @@ BEGIN
|
||||
SELECT
|
||||
fi.master_item_id,
|
||||
fi.price_in_cents,
|
||||
f.store_name,
|
||||
s.name as store_name,
|
||||
f.flyer_id,
|
||||
f.valid_to
|
||||
FROM public.flyer_items fi
|
||||
|
||||
@@ -4,13 +4,14 @@ import { screen, waitFor } from '@testing-library/react';
|
||||
import { describe, it, expect, vi, beforeEach } from 'vitest';
|
||||
import { AppGuard } from './AppGuard';
|
||||
import { useAppInitialization } from '../hooks/useAppInitialization';
|
||||
import * as apiClient from '../services/apiClient';
|
||||
import { useModal } from '../hooks/useModal';
|
||||
import { renderWithProviders } from '../tests/utils/renderWithProviders';
|
||||
|
||||
// Mock dependencies
|
||||
// The apiClient is mocked globally in `src/tests/setup/globalApiMock.ts`.
|
||||
vi.mock('../hooks/useAppInitialization');
|
||||
vi.mock('../hooks/useModal');
|
||||
vi.mock('../services/apiClient');
|
||||
vi.mock('./WhatsNewModal', () => ({
|
||||
WhatsNewModal: ({ isOpen }: { isOpen: boolean }) =>
|
||||
isOpen ? <div data-testid="whats-new-modal-mock" /> : null,
|
||||
@@ -21,6 +22,7 @@ vi.mock('../config', () => ({
|
||||
},
|
||||
}));
|
||||
|
||||
const mockedApiClient = vi.mocked(apiClient);
|
||||
const mockedUseAppInitialization = vi.mocked(useAppInitialization);
|
||||
const mockedUseModal = vi.mocked(useModal);
|
||||
|
||||
|
||||
@@ -10,16 +10,9 @@ import { renderWithProviders } from '../tests/utils/renderWithProviders';
|
||||
// Unmock the component to test the real implementation
|
||||
vi.unmock('./FlyerCorrectionTool');
|
||||
|
||||
// Mock dependencies
|
||||
vi.mock('../services/aiApiClient');
|
||||
vi.mock('../services/notificationService');
|
||||
vi.mock('../services/logger', () => ({
|
||||
logger: {
|
||||
error: vi.fn(),
|
||||
},
|
||||
}));
|
||||
|
||||
const mockedAiApiClient = aiApiClient as Mocked<typeof aiApiClient>;
|
||||
// The aiApiClient, notificationService, and logger are mocked globally.
|
||||
// We can get a typed reference to the aiApiClient for individual test overrides.
|
||||
const mockedAiApiClient = vi.mocked(aiApiClient);
|
||||
const mockedNotifySuccess = notifySuccess as Mocked<typeof notifySuccess>;
|
||||
const mockedNotifyError = notifyError as Mocked<typeof notifyError>;
|
||||
|
||||
|
||||
@@ -9,14 +9,9 @@ import { createMockLeaderboardUser } from '../tests/utils/mockFactories';
|
||||
import { createMockLogger } from '../tests/utils/mockLogger';
|
||||
import { renderWithProviders } from '../tests/utils/renderWithProviders';
|
||||
|
||||
// Mock the apiClient
|
||||
vi.mock('../services/apiClient'); // This was correct
|
||||
const mockedApiClient = apiClient as Mocked<typeof apiClient>;
|
||||
|
||||
// Mock the logger
|
||||
vi.mock('../services/logger', () => ({
|
||||
logger: createMockLogger(),
|
||||
}));
|
||||
// The apiClient and logger are mocked globally.
|
||||
// We can get a typed reference to the apiClient for individual test overrides.
|
||||
const mockedApiClient = vi.mocked(apiClient);
|
||||
|
||||
// Mock lucide-react icons to prevent rendering errors in the test environment
|
||||
vi.mock('lucide-react', () => ({
|
||||
|
||||
@@ -2,23 +2,15 @@
|
||||
import { describe, it, expect, vi, beforeEach } from 'vitest';
|
||||
import { screen, waitFor } from '@testing-library/react';
|
||||
import userEvent from '@testing-library/user-event';
|
||||
import { RecipeSuggester } from './RecipeSuggester';
|
||||
import { suggestRecipe } from '../services/apiClient';
|
||||
import { RecipeSuggester } from './RecipeSuggester'; // This should be after mocks
|
||||
import * as apiClient from '../services/apiClient';
|
||||
import { logger } from '../services/logger.client';
|
||||
import { renderWithProviders } from '../tests/utils/renderWithProviders';
|
||||
import '@testing-library/jest-dom';
|
||||
|
||||
// Mock the API client
|
||||
vi.mock('../services/apiClient', () => ({
|
||||
suggestRecipe: vi.fn(),
|
||||
}));
|
||||
|
||||
// Mock the logger
|
||||
vi.mock('../services/logger.client', () => ({
|
||||
logger: {
|
||||
error: vi.fn(),
|
||||
},
|
||||
}));
|
||||
// The apiClient is mocked globally in `src/tests/setup/globalApiMock.ts`.
|
||||
// We can get a typed reference to it for individual test overrides.
|
||||
const mockedApiClient = vi.mocked(apiClient);
|
||||
|
||||
describe('RecipeSuggester Component', () => {
|
||||
beforeEach(() => {
|
||||
@@ -45,7 +37,7 @@ describe('RecipeSuggester Component', () => {
|
||||
await user.click(button);
|
||||
|
||||
expect(await screen.findByText('Please enter at least one ingredient.')).toBeInTheDocument();
|
||||
expect(suggestRecipe).not.toHaveBeenCalled();
|
||||
expect(mockedApiClient.suggestRecipe).not.toHaveBeenCalled();
|
||||
console.log('TEST: Validation error displayed correctly');
|
||||
});
|
||||
|
||||
@@ -60,7 +52,7 @@ describe('RecipeSuggester Component', () => {
|
||||
// Mock successful API response
|
||||
const mockSuggestion = 'Here is a nice Chicken and Rice recipe...';
|
||||
// Add a delay to ensure the loading state is visible during the test
|
||||
vi.mocked(suggestRecipe).mockImplementation(async () => {
|
||||
mockedApiClient.suggestRecipe.mockImplementation(async () => {
|
||||
await new Promise((resolve) => setTimeout(resolve, 50));
|
||||
return { ok: true, json: async () => ({ suggestion: mockSuggestion }) } as Response;
|
||||
});
|
||||
@@ -76,7 +68,7 @@ describe('RecipeSuggester Component', () => {
|
||||
expect(screen.getByText(mockSuggestion)).toBeInTheDocument();
|
||||
});
|
||||
|
||||
expect(suggestRecipe).toHaveBeenCalledWith(['chicken', 'rice']);
|
||||
expect(mockedApiClient.suggestRecipe).toHaveBeenCalledWith(['chicken', 'rice']);
|
||||
console.log('TEST: Suggestion displayed and API called with correct args');
|
||||
});
|
||||
|
||||
@@ -90,7 +82,7 @@ describe('RecipeSuggester Component', () => {
|
||||
|
||||
// Mock API failure response
|
||||
const errorMessage = 'Invalid ingredients provided.';
|
||||
vi.mocked(suggestRecipe).mockResolvedValue({
|
||||
mockedApiClient.suggestRecipe.mockResolvedValue({
|
||||
ok: false,
|
||||
json: async () => ({ message: errorMessage }),
|
||||
} as Response);
|
||||
@@ -117,7 +109,7 @@ describe('RecipeSuggester Component', () => {
|
||||
|
||||
// Mock network error
|
||||
const networkError = new Error('Network Error');
|
||||
vi.mocked(suggestRecipe).mockRejectedValue(networkError);
|
||||
mockedApiClient.suggestRecipe.mockRejectedValue(networkError);
|
||||
|
||||
const button = screen.getByRole('button', { name: /Suggest a Recipe/i });
|
||||
await user.click(button);
|
||||
@@ -148,7 +140,7 @@ describe('RecipeSuggester Component', () => {
|
||||
await user.type(input, 'tofu');
|
||||
|
||||
// Mock success for the second click
|
||||
vi.mocked(suggestRecipe).mockResolvedValue({
|
||||
mockedApiClient.suggestRecipe.mockResolvedValue({
|
||||
ok: true,
|
||||
json: async () => ({ suggestion: 'Tofu Stir Fry' }),
|
||||
} as Response);
|
||||
|
||||
@@ -110,8 +110,8 @@ async function main() {
|
||||
validTo.setDate(today.getDate() + 5);
|
||||
|
||||
const flyerQuery = `
|
||||
INSERT INTO public.flyers (file_name, image_url, checksum, store_id, valid_from, valid_to)
|
||||
VALUES ('safeway-flyer.jpg', 'https://example.com/flyer-images/safeway-flyer.jpg', 'a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0', ${storeMap.get('Safeway')}, $1, $2)
|
||||
INSERT INTO public.flyers (file_name, image_url, icon_url, checksum, store_id, valid_from, valid_to)
|
||||
VALUES ('safeway-flyer.jpg', 'https://example.com/flyer-images/safeway-flyer.jpg', 'https://example.com/flyer-images/icons/safeway-flyer.jpg', 'a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0', ${storeMap.get('Safeway')}, $1, $2)
|
||||
RETURNING flyer_id;
|
||||
`;
|
||||
const flyerRes = await client.query<{ flyer_id: number }>(flyerQuery, [
|
||||
|
||||
@@ -12,12 +12,7 @@ import {
|
||||
} from '../tests/utils/mockFactories';
|
||||
import { mockUseFlyers, mockUseUserData } from '../tests/setup/mockHooks';
|
||||
|
||||
// Explicitly mock apiClient to ensure stable spies are used
|
||||
vi.mock('../services/apiClient', () => ({
|
||||
countFlyerItemsForFlyers: vi.fn(),
|
||||
fetchFlyerItemsForFlyers: vi.fn(),
|
||||
}));
|
||||
|
||||
// The apiClient is mocked globally in `src/tests/setup/globalApiMock.ts`.
|
||||
// Mock the hooks to avoid Missing Context errors
|
||||
vi.mock('./useFlyers', () => ({
|
||||
useFlyers: () => mockUseFlyers(),
|
||||
@@ -30,14 +25,6 @@ vi.mock('../hooks/useUserData', () => ({
|
||||
// The apiClient is globally mocked in our test setup, so we just need to cast it
|
||||
const mockedApiClient = vi.mocked(apiClient);
|
||||
|
||||
// Mock the logger to prevent console noise
|
||||
vi.mock('../services/logger.client', () => ({
|
||||
logger: {
|
||||
error: vi.fn(),
|
||||
info: vi.fn(), // Added to prevent crashes on abort logging
|
||||
},
|
||||
}));
|
||||
|
||||
// Set a consistent "today" for testing flyer validity to make tests deterministic
|
||||
const TODAY = new Date('2024-01-15T12:00:00.000Z');
|
||||
|
||||
|
||||
@@ -11,21 +11,9 @@ import { createMockUserProfile } from '../tests/utils/mockFactories';
|
||||
import { logger } from '../services/logger.client';
|
||||
|
||||
// Mock the dependencies
|
||||
vi.mock('../services/apiClient', () => ({
|
||||
// Mock other functions if needed
|
||||
getAuthenticatedUserProfile: vi.fn(),
|
||||
}));
|
||||
// The apiClient is mocked globally in `src/tests/setup/globalApiMock.ts`.
|
||||
vi.mock('../services/tokenStorage');
|
||||
|
||||
// Mock the logger to spy on its methods
|
||||
vi.mock('../services/logger.client', () => ({
|
||||
logger: {
|
||||
info: vi.fn(),
|
||||
warn: vi.fn(),
|
||||
error: vi.fn(),
|
||||
},
|
||||
}));
|
||||
|
||||
const mockedApiClient = vi.mocked(apiClient);
|
||||
const mockedTokenStorage = vi.mocked(tokenStorage);
|
||||
|
||||
|
||||
@@ -3,12 +3,11 @@ import { renderHook } from '@testing-library/react';
|
||||
import { describe, it, expect, vi, beforeEach } from 'vitest';
|
||||
import { useFlyerItems } from './useFlyerItems';
|
||||
import { useApiOnMount } from './useApiOnMount';
|
||||
import { createMockFlyer, createMockFlyerItem } from '../tests/utils/mockFactories';
|
||||
import * as apiClient from '../services/apiClient';
|
||||
import { createMockFlyer, createMockFlyerItem } from '../tests/utils/mockFactories';
|
||||
|
||||
// Mock the underlying useApiOnMount hook to isolate the useFlyerItems hook's logic.
|
||||
vi.mock('./useApiOnMount');
|
||||
vi.mock('../services/apiClient');
|
||||
|
||||
const mockedUseApiOnMount = vi.mocked(useApiOnMount);
|
||||
|
||||
@@ -61,7 +60,6 @@ describe('useFlyerItems Hook', () => {
|
||||
expect(result.current.flyerItems).toEqual([]);
|
||||
expect(result.current.isLoading).toBe(false);
|
||||
expect(result.current.error).toBeNull();
|
||||
|
||||
// Assert: Check that useApiOnMount was called with `enabled: false`.
|
||||
expect(mockedUseApiOnMount).toHaveBeenCalledWith(
|
||||
expect.any(Function), // the wrapped fetcher function
|
||||
@@ -171,11 +169,11 @@ describe('useFlyerItems Hook', () => {
|
||||
|
||||
const wrappedFetcher = mockedUseApiOnMount.mock.calls[0][0];
|
||||
const mockResponse = new Response();
|
||||
vi.mocked(apiClient.fetchFlyerItems).mockResolvedValue(mockResponse);
|
||||
|
||||
const mockedApiClient = vi.mocked(apiClient);
|
||||
mockedApiClient.fetchFlyerItems.mockResolvedValue(mockResponse);
|
||||
const response = await wrappedFetcher(123);
|
||||
|
||||
expect(apiClient.fetchFlyerItems).toHaveBeenCalledWith(123);
|
||||
expect(mockedApiClient.fetchFlyerItems).toHaveBeenCalledWith(123);
|
||||
expect(response).toBe(mockResponse);
|
||||
});
|
||||
});
|
||||
|
||||
@@ -29,7 +29,6 @@ type MockApiResult = {
|
||||
vi.mock('./useApi');
|
||||
vi.mock('../hooks/useAuth');
|
||||
vi.mock('../hooks/useUserData');
|
||||
vi.mock('../services/apiClient');
|
||||
|
||||
// The apiClient is globally mocked in our test setup, so we just need to cast it
|
||||
const mockedUseApi = vi.mocked(useApi);
|
||||
|
||||
@@ -17,7 +17,6 @@ import {
|
||||
vi.mock('./useApi');
|
||||
vi.mock('../hooks/useAuth');
|
||||
vi.mock('../hooks/useUserData');
|
||||
vi.mock('../services/apiClient');
|
||||
|
||||
// The apiClient is globally mocked in our test setup, so we just need to cast it
|
||||
const mockedUseApi = vi.mocked(useApi);
|
||||
|
||||
@@ -1,25 +1,15 @@
|
||||
// src/components/MyDealsPage.test.tsx
|
||||
// src/pages/MyDealsPage.test.tsx
|
||||
import React from 'react';
|
||||
import { render, screen, waitFor } from '@testing-library/react';
|
||||
import { describe, it, expect, vi, beforeEach, type Mocked } from 'vitest';
|
||||
import MyDealsPage from './MyDealsPage';
|
||||
import * as apiClient from '../services/apiClient';
|
||||
import { WatchedItemDeal } from '../types';
|
||||
import type { WatchedItemDeal } from '../types';
|
||||
import { logger } from '../services/logger.client';
|
||||
import { createMockWatchedItemDeal } from '../tests/utils/mockFactories';
|
||||
|
||||
// Mock the apiClient. The component now directly uses `fetchBestSalePrices`.
|
||||
// By mocking the entire module, we can control the behavior of `fetchBestSalePrices`
|
||||
// for our tests.
|
||||
vi.mock('../services/apiClient');
|
||||
const mockedApiClient = apiClient as Mocked<typeof apiClient>;
|
||||
|
||||
// Mock the logger
|
||||
vi.mock('../services/logger.client', () => ({
|
||||
logger: {
|
||||
error: vi.fn(),
|
||||
},
|
||||
}));
|
||||
// The apiClient is mocked globally in `src/tests/setup/globalApiMock.ts`.
|
||||
const mockedApiClient = vi.mocked(apiClient);
|
||||
|
||||
// Mock lucide-react icons to prevent rendering errors in the test environment
|
||||
vi.mock('lucide-react', () => ({
|
||||
|
||||
@@ -10,13 +10,7 @@ import { logger } from '../services/logger.client';
|
||||
// The apiClient and logger are now mocked globally.
|
||||
const mockedApiClient = vi.mocked(apiClient);
|
||||
|
||||
vi.mock('../services/logger.client', () => ({
|
||||
logger: {
|
||||
info: vi.fn(),
|
||||
error: vi.fn(),
|
||||
},
|
||||
}));
|
||||
|
||||
// The logger is mocked globally.
|
||||
// Helper function to render the component within a router context
|
||||
const renderWithRouter = (token: string) => {
|
||||
return render(
|
||||
|
||||
@@ -11,16 +11,8 @@ import {
|
||||
createMockUser,
|
||||
} from '../tests/utils/mockFactories';
|
||||
|
||||
// Mock dependencies
|
||||
vi.mock('../services/apiClient'); // This was correct
|
||||
vi.mock('../services/logger.client', () => ({
|
||||
logger: {
|
||||
info: vi.fn(),
|
||||
error: vi.fn(),
|
||||
},
|
||||
}));
|
||||
vi.mock('../services/notificationService');
|
||||
vi.mock('../services/aiApiClient'); // Mock aiApiClient as it's used in the component
|
||||
// The apiClient, logger, notificationService, and aiApiClient are all mocked globally.
|
||||
// We can get a typed reference to the notificationService for individual test overrides.
|
||||
const mockedNotificationService = vi.mocked(await import('../services/notificationService'));
|
||||
vi.mock('../components/AchievementsList', () => ({
|
||||
AchievementsList: ({ achievements }: { achievements: (UserAchievement & Achievement)[] }) => (
|
||||
@@ -28,7 +20,7 @@ vi.mock('../components/AchievementsList', () => ({
|
||||
),
|
||||
}));
|
||||
|
||||
const mockedApiClient = apiClient as Mocked<typeof apiClient>;
|
||||
const mockedApiClient = vi.mocked(apiClient);
|
||||
|
||||
// --- Mock Data ---
|
||||
const mockProfile: UserProfile = createMockUserProfile({
|
||||
|
||||
@@ -10,21 +10,10 @@ import { logger } from '../services/logger.client';
|
||||
// Extensive logging for debugging
|
||||
const LOG_PREFIX = '[TEST DEBUG]';
|
||||
|
||||
vi.mock('../services/notificationService');
|
||||
|
||||
// 1. Mock the module to replace its exports with mock functions.
|
||||
vi.mock('../services/aiApiClient');
|
||||
// 2. Get a typed reference to the mocked module to control its functions in tests.
|
||||
// The aiApiClient, notificationService, and logger are mocked globally.
|
||||
// We can get a typed reference to the aiApiClient for individual test overrides.
|
||||
const mockedAiApiClient = vi.mocked(aiApiClient);
|
||||
|
||||
// Mock the logger
|
||||
vi.mock('../services/logger.client', () => ({
|
||||
logger: {
|
||||
info: vi.fn(),
|
||||
error: vi.fn(),
|
||||
},
|
||||
}));
|
||||
|
||||
// Define mock at module level so it can be referenced in the implementation
|
||||
const mockAudioPlay = vi.fn(() => {
|
||||
console.log(`${LOG_PREFIX} mockAudioPlay executed`);
|
||||
|
||||
@@ -6,16 +6,9 @@ import { MemoryRouter } from 'react-router-dom';
|
||||
import * as apiClient from '../../services/apiClient';
|
||||
import { logger } from '../../services/logger.client';
|
||||
|
||||
// Mock dependencies
|
||||
vi.mock('../../services/apiClient', () => ({
|
||||
getFlyersForReview: vi.fn(),
|
||||
}));
|
||||
|
||||
vi.mock('../../services/logger.client', () => ({
|
||||
logger: {
|
||||
error: vi.fn(),
|
||||
},
|
||||
}));
|
||||
// The apiClient and logger are mocked globally.
|
||||
// We can get a typed reference to the apiClient for individual test overrides.
|
||||
const mockedApiClient = vi.mocked(apiClient);
|
||||
|
||||
// Mock LoadingSpinner to simplify DOM and avoid potential issues
|
||||
vi.mock('../../components/LoadingSpinner', () => ({
|
||||
@@ -29,7 +22,7 @@ describe('FlyerReviewPage', () => {
|
||||
|
||||
it('renders loading spinner initially', () => {
|
||||
// Mock a promise that doesn't resolve immediately to check loading state
|
||||
vi.mocked(apiClient.getFlyersForReview).mockReturnValue(new Promise(() => {}));
|
||||
mockedApiClient.getFlyersForReview.mockReturnValue(new Promise(() => {}));
|
||||
|
||||
render(
|
||||
<MemoryRouter>
|
||||
@@ -41,7 +34,7 @@ describe('FlyerReviewPage', () => {
|
||||
});
|
||||
|
||||
it('renders empty state when no flyers are returned', async () => {
|
||||
vi.mocked(apiClient.getFlyersForReview).mockResolvedValue({
|
||||
mockedApiClient.getFlyersForReview.mockResolvedValue({
|
||||
ok: true,
|
||||
json: async () => [],
|
||||
} as Response);
|
||||
@@ -84,7 +77,7 @@ describe('FlyerReviewPage', () => {
|
||||
},
|
||||
];
|
||||
|
||||
vi.mocked(apiClient.getFlyersForReview).mockResolvedValue({
|
||||
mockedApiClient.getFlyersForReview.mockResolvedValue({
|
||||
ok: true,
|
||||
json: async () => mockFlyers,
|
||||
} as Response);
|
||||
@@ -114,7 +107,7 @@ describe('FlyerReviewPage', () => {
|
||||
});
|
||||
|
||||
it('renders error message when API response is not ok', async () => {
|
||||
vi.mocked(apiClient.getFlyersForReview).mockResolvedValue({
|
||||
mockedApiClient.getFlyersForReview.mockResolvedValue({
|
||||
ok: false,
|
||||
json: async () => ({ message: 'Server error' }),
|
||||
} as Response);
|
||||
@@ -138,7 +131,7 @@ describe('FlyerReviewPage', () => {
|
||||
|
||||
it('renders error message when API throws an error', async () => {
|
||||
const networkError = new Error('Network error');
|
||||
vi.mocked(apiClient.getFlyersForReview).mockRejectedValue(networkError);
|
||||
mockedApiClient.getFlyersForReview.mockRejectedValue(networkError);
|
||||
|
||||
render(
|
||||
<MemoryRouter>
|
||||
@@ -159,7 +152,7 @@ describe('FlyerReviewPage', () => {
|
||||
|
||||
it('renders a generic error for non-Error rejections', async () => {
|
||||
const nonErrorRejection = { message: 'This is not an Error object' };
|
||||
vi.mocked(apiClient.getFlyersForReview).mockRejectedValue(nonErrorRejection);
|
||||
mockedApiClient.getFlyersForReview.mockRejectedValue(nonErrorRejection);
|
||||
|
||||
render(
|
||||
<MemoryRouter>
|
||||
|
||||
@@ -12,14 +12,9 @@ import {
|
||||
} from '../../../tests/utils/mockFactories';
|
||||
import { renderWithProviders } from '../../../tests/utils/renderWithProviders';
|
||||
|
||||
// Cast the mocked module to its mocked type to retain type safety and autocompletion.
|
||||
// The apiClient is now mocked globally via src/tests/setup/tests-setup-unit.ts.
|
||||
const mockedApiClient = apiClient as Mocked<typeof apiClient>;
|
||||
|
||||
// Mock the logger
|
||||
vi.mock('../../../services/logger', () => ({
|
||||
logger: { info: vi.fn(), error: vi.fn() },
|
||||
}));
|
||||
// The apiClient and logger are mocked globally.
|
||||
// We can get a typed reference to the apiClient for individual test overrides.
|
||||
const mockedApiClient = vi.mocked(apiClient);
|
||||
|
||||
// Mock the ConfirmationModal to test its props and interactions
|
||||
// The ConfirmationModal is now in a different directory.
|
||||
|
||||
@@ -21,25 +21,10 @@ vi.mock('../../../components/PasswordInput', () => ({
|
||||
PasswordInput: (props: any) => <input {...props} data-testid="password-input" />,
|
||||
}));
|
||||
|
||||
// The apiClient, notificationService, react-hot-toast, and logger are all mocked globally.
|
||||
// We can get a typed reference to the apiClient for individual test overrides.
|
||||
const mockedApiClient = vi.mocked(apiClient, true);
|
||||
|
||||
vi.mock('../../../services/notificationService');
|
||||
vi.mock('react-hot-toast', () => ({
|
||||
__esModule: true,
|
||||
default: {
|
||||
success: vi.fn(),
|
||||
error: vi.fn(),
|
||||
},
|
||||
}));
|
||||
vi.mock('../../../services/logger.client', () => ({
|
||||
logger: {
|
||||
debug: vi.fn(),
|
||||
info: vi.fn(),
|
||||
warn: vi.fn(),
|
||||
error: vi.fn(),
|
||||
},
|
||||
}));
|
||||
|
||||
const mockOnClose = vi.fn();
|
||||
const mockOnLoginSuccess = vi.fn();
|
||||
const mockOnSignOut = vi.fn();
|
||||
@@ -982,6 +967,13 @@ describe('ProfileManager', () => {
|
||||
|
||||
it('should show error notification when auto-geocoding fails', async () => {
|
||||
vi.useFakeTimers();
|
||||
// FIX: Mock getUserAddress to return an address *without* coordinates.
|
||||
// This is the condition required to trigger the auto-geocoding logic.
|
||||
const addressWithoutCoords = { ...mockAddress, latitude: undefined, longitude: undefined };
|
||||
mockedApiClient.getUserAddress.mockResolvedValue(
|
||||
new Response(JSON.stringify(addressWithoutCoords)),
|
||||
);
|
||||
|
||||
render(<ProfileManager {...defaultAuthenticatedProps} />);
|
||||
|
||||
// Wait for initial load
|
||||
|
||||
@@ -8,46 +8,11 @@ import toast from 'react-hot-toast';
|
||||
import { createMockUser } from '../../../tests/utils/mockFactories';
|
||||
import { renderWithProviders } from '../../../tests/utils/renderWithProviders';
|
||||
|
||||
// Mock the entire apiClient module to ensure all exports are defined.
|
||||
// This is the primary fix for the error: [vitest] No "..." export is defined on the mock.
|
||||
vi.mock('../../../services/apiClient', () => ({
|
||||
// Mocks for providers used by renderWithProviders
|
||||
fetchFlyers: vi.fn(),
|
||||
fetchMasterItems: vi.fn(),
|
||||
fetchWatchedItems: vi.fn(),
|
||||
fetchShoppingLists: vi.fn(),
|
||||
getAuthenticatedUserProfile: vi.fn(),
|
||||
pingBackend: vi.fn(),
|
||||
checkStorage: vi.fn(),
|
||||
checkDbPoolHealth: vi.fn(),
|
||||
checkPm2Status: vi.fn(),
|
||||
checkRedisHealth: vi.fn(),
|
||||
checkDbSchema: vi.fn(),
|
||||
loginUser: vi.fn(),
|
||||
triggerFailingJob: vi.fn(),
|
||||
clearGeocodeCache: vi.fn(),
|
||||
}));
|
||||
// Get a type-safe mocked version of the apiClient module.
|
||||
// The apiClient is mocked globally in `src/tests/setup/globalApiMock.ts`.
|
||||
// We can get a type-safe mocked version of the module to override functions for specific tests.
|
||||
const mockedApiClient = vi.mocked(apiClient);
|
||||
|
||||
// Correct the relative path to the logger module.
|
||||
vi.mock('../../../services/logger', () => ({
|
||||
logger: {
|
||||
info: vi.fn(),
|
||||
warn: vi.fn(),
|
||||
error: vi.fn(),
|
||||
debug: vi.fn(),
|
||||
},
|
||||
}));
|
||||
|
||||
// Mock toast to check for notifications
|
||||
vi.mock('react-hot-toast', () => ({
|
||||
__esModule: true,
|
||||
default: {
|
||||
success: vi.fn(),
|
||||
error: vi.fn(),
|
||||
},
|
||||
}));
|
||||
// The logger and react-hot-toast are mocked globally.
|
||||
|
||||
describe('SystemCheck', () => {
|
||||
// Store original env variable
|
||||
|
||||
@@ -6,14 +6,8 @@ import { ApiProvider } from './ApiProvider';
|
||||
import { ApiContext } from '../contexts/ApiContext';
|
||||
import * as apiClient from '../services/apiClient';
|
||||
|
||||
// Mock the apiClient module.
|
||||
// Since ApiProvider and ApiContext import * as apiClient, mocking it ensures
|
||||
// we control the reference identity and can verify it's being passed correctly.
|
||||
vi.mock('../services/apiClient', () => ({
|
||||
fetchFlyers: vi.fn(),
|
||||
fetchMasterItems: vi.fn(),
|
||||
// Add other mocked methods as needed for the shape to be valid-ish
|
||||
}));
|
||||
// The apiClient is mocked globally in `src/tests/setup/globalApiMock.ts`.
|
||||
// This test verifies that the ApiProvider correctly provides this mocked module.
|
||||
|
||||
describe('ApiProvider & ApiContext', () => {
|
||||
const TestConsumer = () => {
|
||||
|
||||
@@ -4,12 +4,12 @@ import { render, screen, waitFor, fireEvent, act } from '@testing-library/react'
|
||||
import { describe, it, expect, vi, beforeEach, type Mocked } from 'vitest';
|
||||
import { AuthProvider } from './AuthProvider';
|
||||
import { AuthContext } from '../contexts/AuthContext';
|
||||
import * as apiClient from '../services/apiClient';
|
||||
import * as tokenStorage from '../services/tokenStorage';
|
||||
import { createMockUserProfile } from '../tests/utils/mockFactories';
|
||||
import * as apiClient from '../services/apiClient';
|
||||
|
||||
// Mocks
|
||||
vi.mock('../services/apiClient');
|
||||
// The apiClient is mocked globally in `src/tests/setup/globalApiMock.ts`.
|
||||
vi.mock('../services/tokenStorage');
|
||||
vi.mock('../services/logger.client', () => ({
|
||||
logger: {
|
||||
@@ -20,7 +20,7 @@ vi.mock('../services/logger.client', () => ({
|
||||
},
|
||||
}));
|
||||
|
||||
const mockedApiClient = apiClient as Mocked<typeof apiClient>;
|
||||
const mockedApiClient = vi.mocked(apiClient);
|
||||
const mockedTokenStorage = tokenStorage as Mocked<typeof tokenStorage>;
|
||||
|
||||
const mockProfile = createMockUserProfile({
|
||||
@@ -198,7 +198,7 @@ describe('AuthProvider', () => {
|
||||
await waitFor(() => {
|
||||
// The error is now caught and displayed by the TestConsumer
|
||||
expect(screen.getByTestId('error-display')).toHaveTextContent(
|
||||
'Login succeeded, but failed to fetch your data: API is down',
|
||||
'Login succeeded, but failed to fetch your data: Received null or undefined profile from API.',
|
||||
);
|
||||
|
||||
expect(mockedTokenStorage.setToken).toHaveBeenCalledWith('test-token-no-profile');
|
||||
|
||||
@@ -94,8 +94,8 @@ export class AIService {
|
||||
// The fallback list is ordered by preference (speed/cost vs. power).
|
||||
// We try the fastest models first, then the more powerful 'pro' model as a high-quality fallback,
|
||||
// and finally the 'lite' model as a last resort.
|
||||
private readonly models = [ 'gemini-3-flash-preview', 'gemini-2.5-flash', 'gemini-2.5-flash-lite', 'gemma-3-27b', 'gemma-3-12b'];
|
||||
private readonly models_lite = ["gemma-3-4b", "gemma-3-2b", "gemma-3-1b"];
|
||||
private readonly models = [ 'gemini-3-flash-preview','gemini-2.5-pro', 'gemini-2.5-flash', 'gemini-2.5-flash-lite','gemini-2.0-flash-001','gemini-2.0-flash','gemini-2.0-flash-exp','gemini-2.0-flash-lite-001','gemini-2.0-flash-lite', 'gemma-3-27b-it', 'gemma-3-12b-it'];
|
||||
private readonly models_lite = ["gemma-3-4b-it", "gemma-3-2b-it", "gemma-3-1b-it"];
|
||||
|
||||
constructor(logger: Logger, aiClient?: IAiClient, fs?: IFileSystem) {
|
||||
this.logger = logger;
|
||||
|
||||
@@ -103,17 +103,22 @@ export class FlyerRepository {
|
||||
const result = await this.db.query<Flyer>(query, values);
|
||||
return result.rows[0];
|
||||
} catch (error) {
|
||||
const isChecksumError =
|
||||
error instanceof Error && error.message.includes('flyers_checksum_check');
|
||||
const errorMessage = error instanceof Error ? error.message : '';
|
||||
let checkMsg = 'A database check constraint failed.';
|
||||
|
||||
if (errorMessage.includes('flyers_checksum_check')) {
|
||||
checkMsg =
|
||||
'The provided checksum is invalid or does not meet format requirements (e.g., must be a 64-character SHA-256 hash).';
|
||||
} else if (errorMessage.includes('flyers_status_check')) {
|
||||
checkMsg = 'Invalid status provided for flyer.';
|
||||
} else if (errorMessage.includes('url_check')) {
|
||||
checkMsg = 'Invalid URL format provided for image or icon.';
|
||||
}
|
||||
|
||||
handleDbError(error, logger, 'Database error in insertFlyer', { flyerData }, {
|
||||
uniqueMessage: 'A flyer with this checksum already exists.',
|
||||
fkMessage: 'The specified user or store for this flyer does not exist.',
|
||||
// Provide a more specific message for the checksum constraint violation,
|
||||
// which is a common issue during seeding or testing with placeholder data.
|
||||
checkMessage: isChecksumError
|
||||
? 'The provided checksum is invalid or does not meet format requirements (e.g., must be a 64-character SHA-256 hash).'
|
||||
: 'Invalid status provided for flyer.',
|
||||
checkMessage: checkMsg,
|
||||
defaultMessage: 'Failed to insert flyer into database.',
|
||||
});
|
||||
}
|
||||
|
||||
@@ -83,8 +83,8 @@ describe('FlyerDataTransformer', () => {
|
||||
// 1. Check flyer data
|
||||
expect(flyerData).toEqual({
|
||||
file_name: originalFileName,
|
||||
image_url: '/flyer-images/flyer-page-1.jpg',
|
||||
icon_url: '/flyer-images/icons/icon-flyer-page-1.webp',
|
||||
image_url: `http://localhost:3000/flyer-images/flyer-page-1.jpg`,
|
||||
icon_url: `http://localhost:3000/flyer-images/icons/icon-flyer-page-1.webp`,
|
||||
checksum,
|
||||
store_name: 'Test Store',
|
||||
valid_from: '2024-01-01',
|
||||
@@ -167,8 +167,8 @@ describe('FlyerDataTransformer', () => {
|
||||
expect(itemsForDb).toHaveLength(0);
|
||||
expect(flyerData).toEqual({
|
||||
file_name: originalFileName,
|
||||
image_url: '/flyer-images/another.png',
|
||||
icon_url: '/flyer-images/icons/icon-another.webp',
|
||||
image_url: `http://localhost:3000/flyer-images/another.png`,
|
||||
icon_url: `http://localhost:3000/flyer-images/icons/icon-another.webp`,
|
||||
checksum,
|
||||
store_name: 'Unknown Store (auto)', // Should use fallback
|
||||
valid_from: null,
|
||||
|
||||
@@ -23,14 +23,14 @@ export class FlyerDataTransformer {
|
||||
): FlyerItemInsert {
|
||||
return {
|
||||
...item,
|
||||
// Use logical OR to default falsy values (null, undefined, '') to a fallback.
|
||||
// The trim is important for cases where the AI returns only whitespace.
|
||||
item: String(item.item || '').trim() || 'Unknown Item',
|
||||
// Use nullish coalescing to default only null/undefined to an empty string.
|
||||
price_display: String(item.price_display ?? ''),
|
||||
quantity: String(item.quantity ?? ''),
|
||||
// Use logical OR to default falsy category names (null, undefined, '') to a fallback.
|
||||
category_name: String(item.category_name || 'Other/Miscellaneous'),
|
||||
// Use nullish coalescing and trim for robustness.
|
||||
// An empty or whitespace-only name falls back to 'Unknown Item'.
|
||||
item: (item.item ?? '').trim() || 'Unknown Item',
|
||||
// Default null/undefined to an empty string and trim.
|
||||
price_display: (item.price_display ?? '').trim(),
|
||||
quantity: (item.quantity ?? '').trim(),
|
||||
// An empty or whitespace-only category falls back to 'Other/Miscellaneous'.
|
||||
category_name: (item.category_name ?? '').trim() || 'Other/Miscellaneous',
|
||||
// Use nullish coalescing to convert null to undefined for the database.
|
||||
master_item_id: item.master_item_id ?? undefined,
|
||||
view_count: 0,
|
||||
@@ -75,10 +75,13 @@ export class FlyerDataTransformer {
|
||||
logger.warn('AI did not return a store name. Using fallback "Unknown Store (auto)".');
|
||||
}
|
||||
|
||||
// Construct proper URLs including protocol and host to satisfy DB constraints
|
||||
const baseUrl = process.env.BASE_URL || `http://localhost:${process.env.PORT || 3000}`;
|
||||
|
||||
const flyerData: FlyerInsert = {
|
||||
file_name: originalFileName,
|
||||
image_url: `/flyer-images/${path.basename(firstImage)}`,
|
||||
icon_url: `/flyer-images/icons/${iconFileName}`,
|
||||
image_url: new URL(`/flyer-images/${path.basename(firstImage)}`, baseUrl).href,
|
||||
icon_url: new URL(`/flyer-images/icons/${iconFileName}`, baseUrl).href,
|
||||
checksum,
|
||||
store_name: storeName,
|
||||
valid_from: extractedData.valid_from,
|
||||
|
||||
@@ -163,8 +163,8 @@ describe('Admin API Routes Integration Tests', () => {
|
||||
// Before each modification test, create a fresh flyer item and a correction for it.
|
||||
beforeEach(async () => {
|
||||
const flyerRes = await getPool().query(
|
||||
`INSERT INTO public.flyers (store_id, file_name, image_url, item_count, checksum)
|
||||
VALUES ($1, 'admin-test.jpg', 'https://example.com/flyer-images/asdmin-test.jpg', 1, $2) RETURNING flyer_id`,
|
||||
`INSERT INTO public.flyers (store_id, file_name, image_url, icon_url, item_count, checksum)
|
||||
VALUES ($1, 'admin-test.jpg', 'https://example.com/flyer-images/asdmin-test.jpg', 'https://example.com/flyer-images/icons/admin-test.jpg', 1, $2) RETURNING flyer_id`,
|
||||
// The checksum must be a unique 64-character string to satisfy the DB constraint.
|
||||
// We generate a dynamic string and pad it to 64 characters.
|
||||
[testStoreId, `checksum-${Date.now()}-${Math.random()}`.padEnd(64, '0')],
|
||||
|
||||
@@ -240,7 +240,7 @@ describe('Flyer Processing Background Job Integration Test', () => {
|
||||
console.error('[DEBUG] EXIF test job failed:', jobStatus.failedReason);
|
||||
}
|
||||
expect(jobStatus?.state).toBe('completed');
|
||||
const flyerId = jobStatus?.data?.flyerId;
|
||||
const flyerId = jobStatus?.returnValue?.flyerId;
|
||||
expect(flyerId).toBeTypeOf('number');
|
||||
createdFlyerIds.push(flyerId);
|
||||
|
||||
@@ -326,7 +326,7 @@ describe('Flyer Processing Background Job Integration Test', () => {
|
||||
console.error('[DEBUG] PNG metadata test job failed:', jobStatus.failedReason);
|
||||
}
|
||||
expect(jobStatus?.state).toBe('completed');
|
||||
const flyerId = jobStatus?.data?.flyerId;
|
||||
const flyerId = jobStatus?.returnValue?.flyerId;
|
||||
expect(flyerId).toBeTypeOf('number');
|
||||
createdFlyerIds.push(flyerId);
|
||||
|
||||
|
||||
@@ -24,8 +24,8 @@ describe('Public Flyer API Routes Integration Tests', () => {
|
||||
const storeId = storeRes.rows[0].store_id;
|
||||
|
||||
const flyerRes = await getPool().query(
|
||||
`INSERT INTO public.flyers (store_id, file_name, image_url, item_count, checksum)
|
||||
VALUES ($1, 'integration-test.jpg', 'https://example.com/flyer-images/integration-test.jpg', 1, $2) RETURNING flyer_id`,
|
||||
`INSERT INTO public.flyers (store_id, file_name, image_url, icon_url, item_count, checksum)
|
||||
VALUES ($1, 'integration-test.jpg', 'https://example.com/flyer-images/integration-test.jpg', 'https://example.com/flyer-images/icons/integration-test.jpg', 1, $2) RETURNING flyer_id`,
|
||||
[storeId, `${Date.now().toString(16)}`.padEnd(64, '0')],
|
||||
);
|
||||
createdFlyerId = flyerRes.rows[0].flyer_id;
|
||||
|
||||
@@ -34,22 +34,22 @@ describe('Price History API Integration Test (/api/price-history)', () => {
|
||||
|
||||
// 3. Create two flyers with different dates
|
||||
const flyerRes1 = await pool.query(
|
||||
`INSERT INTO public.flyers (store_id, file_name, image_url, item_count, checksum, valid_from)
|
||||
VALUES ($1, 'price-test-1.jpg', 'https://example.com/flyer-images/price-test-1.jpg', 1, $2, '2025-01-01') RETURNING flyer_id`,
|
||||
`INSERT INTO public.flyers (store_id, file_name, image_url, icon_url, item_count, checksum, valid_from)
|
||||
VALUES ($1, 'price-test-1.jpg', 'https://example.com/flyer-images/price-test-1.jpg', 'https://example.com/flyer-images/icons/price-test-1.jpg', 1, $2, '2025-01-01') RETURNING flyer_id`,
|
||||
[storeId, `${Date.now().toString(16)}1`.padEnd(64, '0')],
|
||||
);
|
||||
flyerId1 = flyerRes1.rows[0].flyer_id;
|
||||
|
||||
const flyerRes2 = await pool.query(
|
||||
`INSERT INTO public.flyers (store_id, file_name, image_url, item_count, checksum, valid_from)
|
||||
VALUES ($1, 'price-test-2.jpg', 'https://example.com/flyer-images/price-test-2.jpg', 1, $2, '2025-01-08') RETURNING flyer_id`,
|
||||
`INSERT INTO public.flyers (store_id, file_name, image_url, icon_url, item_count, checksum, valid_from)
|
||||
VALUES ($1, 'price-test-2.jpg', 'https://example.com/flyer-images/price-test-2.jpg', 'https://example.com/flyer-images/icons/price-test-2.jpg', 1, $2, '2025-01-08') RETURNING flyer_id`,
|
||||
[storeId, `${Date.now().toString(16)}2`.padEnd(64, '0')],
|
||||
);
|
||||
flyerId2 = flyerRes2.rows[0].flyer_id; // This was a duplicate, fixed.
|
||||
|
||||
const flyerRes3 = await pool.query(
|
||||
`INSERT INTO public.flyers (store_id, file_name, image_url, item_count, checksum, valid_from)
|
||||
VALUES ($1, 'price-test-3.jpg', 'https://example.com/flyer-images/price-test-3.jpg', 1, $2, '2025-01-15') RETURNING flyer_id`,
|
||||
`INSERT INTO public.flyers (store_id, file_name, image_url, icon_url, item_count, checksum, valid_from)
|
||||
VALUES ($1, 'price-test-3.jpg', 'https://example.com/flyer-images/price-test-3.jpg', 'https://example.com/flyer-images/icons/price-test-3.jpg', 1, $2, '2025-01-15') RETURNING flyer_id`,
|
||||
[storeId, `${Date.now().toString(16)}3`.padEnd(64, '0')],
|
||||
);
|
||||
flyerId3 = flyerRes3.rows[0].flyer_id;
|
||||
|
||||
@@ -77,8 +77,8 @@ describe('Public API Routes Integration Tests', () => {
|
||||
);
|
||||
testStoreId = storeRes.rows[0].store_id;
|
||||
const flyerRes = await pool.query(
|
||||
`INSERT INTO public.flyers (store_id, file_name, image_url, item_count, checksum)
|
||||
VALUES ($1, 'public-routes-test.jpg', 'https://example.com/flyer-images/public-routes-test.jpg', 1, $2) RETURNING *`,
|
||||
`INSERT INTO public.flyers (store_id, file_name, image_url, icon_url, item_count, checksum)
|
||||
VALUES ($1, 'public-routes-test.jpg', 'https://example.com/flyer-images/public-routes-test.jpg', 'https://example.com/flyer-images/icons/public-routes-test.jpg', 1, $2) RETURNING *`,
|
||||
[testStoreId, `${Date.now().toString(16)}`.padEnd(64, '0')],
|
||||
);
|
||||
testFlyer = flyerRes.rows[0];
|
||||
|
||||
80
src/tests/setup/globalApiMock.ts
Normal file
80
src/tests/setup/globalApiMock.ts
Normal file
@@ -0,0 +1,80 @@
|
||||
// src/tests/setup/globalApiMock.ts
|
||||
import { vi } from 'vitest';
|
||||
|
||||
/**
|
||||
* Mocks the entire apiClient module.
|
||||
* This global mock is loaded for all tests via the `setupFiles` config in vitest.config.ts.
|
||||
* It prevents test failures in components that use providers (like FlyersProvider, AuthProvider)
|
||||
* which make API calls on mount when using `renderWithProviders`.
|
||||
*
|
||||
* Individual tests can override specific functions as needed, for example:
|
||||
*
|
||||
* import { vi } from 'vitest';
|
||||
* import * as apiClient from '../services/apiClient';
|
||||
*
|
||||
* const mockedApiClient = vi.mocked(apiClient);
|
||||
*
|
||||
* it('should test something', () => {
|
||||
* mockedApiClient.someFunction.mockResolvedValue({ ... });
|
||||
* // ... rest of the test
|
||||
* });
|
||||
*/
|
||||
vi.mock('../../services/apiClient', () => ({
|
||||
// --- Provider Mocks (with default successful responses) ---
|
||||
// These are essential for any test using renderWithProviders, as AppProviders
|
||||
// will mount all these data providers.
|
||||
fetchFlyers: vi.fn(() => Promise.resolve(new Response(JSON.stringify({ flyers: [], hasMore: false })))),
|
||||
fetchMasterItems: vi.fn(() => Promise.resolve(new Response(JSON.stringify([])))),
|
||||
fetchWatchedItems: vi.fn(() => Promise.resolve(new Response(JSON.stringify([])))),
|
||||
fetchShoppingLists: vi.fn(() => Promise.resolve(new Response(JSON.stringify([])))),
|
||||
getAuthenticatedUserProfile: vi.fn(() => Promise.resolve(new Response(JSON.stringify(null)))),
|
||||
fetchCategories: vi.fn(() => Promise.resolve(new Response(JSON.stringify([])))), // For CorrectionsPage
|
||||
fetchAllBrands: vi.fn(() => Promise.resolve(new Response(JSON.stringify([])))), // For AdminBrandManager
|
||||
|
||||
// --- General Mocks (return empty vi.fn() by default) ---
|
||||
// These functions are commonly used and can be implemented in specific tests.
|
||||
suggestRecipe: vi.fn(),
|
||||
getApplicationStats: vi.fn(),
|
||||
getSuggestedCorrections: vi.fn(),
|
||||
approveCorrection: vi.fn(),
|
||||
rejectCorrection: vi.fn(),
|
||||
updateSuggestedCorrection: vi.fn(),
|
||||
pingBackend: vi.fn(),
|
||||
checkStorage: vi.fn(),
|
||||
checkDbPoolHealth: vi.fn(),
|
||||
checkPm2Status: vi.fn(),
|
||||
checkRedisHealth: vi.fn(),
|
||||
checkDbSchema: vi.fn(),
|
||||
loginUser: vi.fn(),
|
||||
registerUser: vi.fn(),
|
||||
requestPasswordReset: vi.fn(),
|
||||
triggerFailingJob: vi.fn(),
|
||||
clearGeocodeCache: vi.fn(),
|
||||
uploadBrandLogo: vi.fn(),
|
||||
fetchActivityLog: vi.fn(),
|
||||
updateUserProfile: vi.fn(),
|
||||
updateUserPassword: vi.fn(),
|
||||
updateUserPreferences: vi.fn(),
|
||||
exportUserData: vi.fn(),
|
||||
deleteUserAccount: vi.fn(),
|
||||
getUserAddress: vi.fn(),
|
||||
updateUserAddress: vi.fn(),
|
||||
geocodeAddress: vi.fn(),
|
||||
getFlyersForReview: vi.fn(),
|
||||
fetchLeaderboard: vi.fn(),
|
||||
// --- Added to fix "No export is defined on the mock" errors ---
|
||||
fetchFlyerItems: vi.fn(),
|
||||
createShoppingList: vi.fn(),
|
||||
deleteShoppingList: vi.fn(),
|
||||
addShoppingListItem: vi.fn(),
|
||||
updateShoppingListItem: vi.fn(),
|
||||
removeShoppingListItem: vi.fn(),
|
||||
addWatchedItem: vi.fn(),
|
||||
removeWatchedItem: vi.fn(),
|
||||
fetchBestSalePrices: vi.fn(),
|
||||
resetPassword: vi.fn(),
|
||||
getUserAchievements: vi.fn(),
|
||||
uploadAvatar: vi.fn(),
|
||||
countFlyerItemsForFlyers: vi.fn(),
|
||||
fetchFlyerItemsForFlyers: vi.fn(),
|
||||
}));
|
||||
@@ -257,67 +257,6 @@ vi.mock('@google/genai', () => {
|
||||
};
|
||||
});
|
||||
|
||||
/**
|
||||
* Mocks the entire apiClient module.
|
||||
* This ensures that all test files that import from apiClient will get this mocked version.
|
||||
*/
|
||||
vi.mock('../../services/apiClient', () => ({
|
||||
// --- Auth ---
|
||||
registerUser: vi.fn(),
|
||||
loginUser: vi.fn(),
|
||||
getAuthenticatedUserProfile: vi.fn(),
|
||||
requestPasswordReset: vi.fn(),
|
||||
resetPassword: vi.fn(),
|
||||
updateUserPassword: vi.fn(),
|
||||
deleteUserAccount: vi.fn(),
|
||||
updateUserPreferences: vi.fn(),
|
||||
updateUserProfile: vi.fn(),
|
||||
// --- Data Fetching & Manipulation ---
|
||||
fetchFlyers: vi.fn(),
|
||||
fetchFlyerItems: vi.fn(),
|
||||
// Provide a default implementation that returns a valid Response object to prevent timeouts.
|
||||
fetchFlyerItemsForFlyers: vi.fn(() => Promise.resolve(new Response(JSON.stringify([])))),
|
||||
countFlyerItemsForFlyers: vi.fn(() =>
|
||||
Promise.resolve(new Response(JSON.stringify({ count: 0 }))),
|
||||
),
|
||||
fetchMasterItems: vi.fn(),
|
||||
fetchWatchedItems: vi.fn(),
|
||||
addWatchedItem: vi.fn(),
|
||||
removeWatchedItem: vi.fn(),
|
||||
fetchShoppingLists: vi.fn(),
|
||||
createShoppingList: vi.fn(),
|
||||
deleteShoppingList: vi.fn(),
|
||||
addShoppingListItem: vi.fn(),
|
||||
updateShoppingListItem: vi.fn(),
|
||||
removeShoppingListItem: vi.fn(),
|
||||
fetchHistoricalPriceData: vi.fn(),
|
||||
processFlyerFile: vi.fn(),
|
||||
uploadLogoAndUpdateStore: vi.fn(),
|
||||
exportUserData: vi.fn(),
|
||||
// --- Address ---
|
||||
getUserAddress: vi.fn(),
|
||||
updateUserAddress: vi.fn(),
|
||||
geocodeAddress: vi.fn(() => Promise.resolve(new Response(JSON.stringify({ lat: 0, lng: 0 })))),
|
||||
// --- Admin ---
|
||||
getSuggestedCorrections: vi.fn(),
|
||||
fetchCategories: vi.fn(),
|
||||
approveCorrection: vi.fn(),
|
||||
rejectCorrection: vi.fn(),
|
||||
updateSuggestedCorrection: vi.fn(),
|
||||
getApplicationStats: vi.fn(),
|
||||
fetchActivityLog: vi.fn(),
|
||||
fetchAllBrands: vi.fn(),
|
||||
uploadBrandLogo: vi.fn(),
|
||||
// --- System ---
|
||||
pingBackend: vi.fn(),
|
||||
checkDbSchema: vi.fn(),
|
||||
checkStorage: vi.fn(),
|
||||
checkDbPoolHealth: vi.fn(),
|
||||
checkRedisHealth: vi.fn(),
|
||||
checkPm2Status: vi.fn(),
|
||||
fetchLeaderboard: vi.fn(),
|
||||
}));
|
||||
|
||||
// FIX: Mock the aiApiClient module as well, which is used by AnalysisPanel
|
||||
vi.mock('../../services/aiApiClient', () => ({
|
||||
// Provide a default implementation that returns a valid Response object to prevent timeouts.
|
||||
|
||||
@@ -14,7 +14,7 @@ export interface Flyer {
|
||||
readonly flyer_id: number;
|
||||
file_name: string;
|
||||
image_url: string;
|
||||
icon_url?: string | null; // URL for the 64x64 icon version of the flyer
|
||||
icon_url: string; // URL for the 64x64 icon version of the flyer
|
||||
readonly checksum?: string;
|
||||
readonly store_id?: number;
|
||||
valid_from?: string | null;
|
||||
@@ -72,7 +72,7 @@ export interface FlyerItem {
|
||||
item: string;
|
||||
price_display: string;
|
||||
price_in_cents?: number | null;
|
||||
quantity?: string;
|
||||
quantity: string;
|
||||
quantity_num?: number | null;
|
||||
master_item_id?: number; // Can be updated by admin correction
|
||||
master_item_name?: string | null;
|
||||
@@ -536,7 +536,7 @@ export type ActivityLogAction =
|
||||
interface ActivityLogItemBase {
|
||||
readonly activity_log_id: number;
|
||||
readonly user_id: string | null;
|
||||
action: string;
|
||||
action: ActivityLogAction;
|
||||
display_text: string;
|
||||
icon?: string | null;
|
||||
readonly created_at: string;
|
||||
|
||||
102
src/types/exif-parser.d.ts
vendored
102
src/types/exif-parser.d.ts
vendored
@@ -5,4 +5,104 @@
|
||||
* which does not ship with its own TypeScript types. This allows TypeScript
|
||||
* to recognize it as a module and avoids "implicit any" errors.
|
||||
*/
|
||||
declare module 'exif-parser';
|
||||
declare module 'exif-parser' {
|
||||
/**
|
||||
* Represents the size of the image.
|
||||
*/
|
||||
export interface ImageSize {
|
||||
width: number;
|
||||
height: number;
|
||||
}
|
||||
|
||||
/**
|
||||
* Represents thumbnail data if available.
|
||||
*/
|
||||
export interface Thumbnail {
|
||||
format: string;
|
||||
width: number;
|
||||
height: number;
|
||||
offset: number;
|
||||
size: number;
|
||||
buffer: Buffer;
|
||||
}
|
||||
|
||||
/**
|
||||
* Represents GPS information if available.
|
||||
*/
|
||||
export interface GPS {
|
||||
latitude: number;
|
||||
longitude: number;
|
||||
altitude: number;
|
||||
latitudeRef: string;
|
||||
longitudeRef: string;
|
||||
altitudeRef: number;
|
||||
GPSDateStamp: string;
|
||||
GPSTimeStamp: number[]; // [hour, minute, second]
|
||||
}
|
||||
|
||||
/**
|
||||
* Represents the parsed EXIF data structure.
|
||||
* This includes common tags and derived properties.
|
||||
*/
|
||||
export interface ExifData {
|
||||
/**
|
||||
* A dictionary of raw EXIF tags. Keys are tag names (e.g., 'Make', 'Model', 'DateTimeOriginal').
|
||||
* Values can be of various types (string, number, Date, etc.).
|
||||
*/
|
||||
tags: {
|
||||
Make?: string;
|
||||
Model?: string;
|
||||
Orientation?: number;
|
||||
XResolution?: number;
|
||||
YResolution?: number;
|
||||
ResolutionUnit?: number;
|
||||
DateTimeOriginal?: Date; // Parsed into a Date object
|
||||
DateTimeDigitized?: Date;
|
||||
ExposureTime?: number;
|
||||
FNumber?: number;
|
||||
ISOSpeedRatings?: number;
|
||||
ShutterSpeedValue?: number;
|
||||
ApertureValue?: number;
|
||||
BrightnessValue?: number;
|
||||
ExposureBiasValue?: number;
|
||||
MaxApertureValue?: number;
|
||||
MeteringMode?: number;
|
||||
LightSource?: number;
|
||||
Flash?: number;
|
||||
FocalLength?: number;
|
||||
ColorSpace?: number;
|
||||
ExifImageWidth?: number;
|
||||
ExifImageHeight?: number;
|
||||
ExposureMode?: number;
|
||||
WhiteBalance?: number;
|
||||
DigitalZoomRatio?: number;
|
||||
FocalLengthIn35mmFilm?: number;
|
||||
SceneCaptureType?: number;
|
||||
GainControl?: number;
|
||||
Contrast?: number;
|
||||
Saturation?: number;
|
||||
Sharpness?: number;
|
||||
SubjectDistanceRange?: number;
|
||||
GPSVersionID?: number[];
|
||||
GPSLatitudeRef?: string;
|
||||
GPSLatitude?: number[];
|
||||
GPSLongitudeRef?: string;
|
||||
GPSLongitude?: number[];
|
||||
GPSAltitudeRef?: number;
|
||||
GPSAltitude?: number;
|
||||
GPSTimeStamp?: number[];
|
||||
GPSDateStamp?: string;
|
||||
[key: string]: any; // Allow for other, less common tags
|
||||
};
|
||||
imageSize: ImageSize;
|
||||
thumbnail?: Thumbnail;
|
||||
gps?: GPS;
|
||||
}
|
||||
|
||||
export class ExifParser {
|
||||
static create(buffer: Buffer): ExifParser;
|
||||
parse(): ExifData;
|
||||
}
|
||||
|
||||
export default ExifParser;
|
||||
}
|
||||
80
src/types/pdf-poppler.d.ts
vendored
80
src/types/pdf-poppler.d.ts
vendored
@@ -7,37 +7,115 @@
|
||||
* structure, preventing import errors and enabling type checking.
|
||||
*/
|
||||
declare module 'pdf-poppler' {
|
||||
/**
|
||||
* Defines the options available for the main `convert` method.
|
||||
* This appears to be a simplified wrapper around pdftocairo.
|
||||
*/
|
||||
export interface ConvertOptions {
|
||||
/**
|
||||
* The output image format.
|
||||
*/
|
||||
format?: 'jpeg' | 'png' | 'tiff';
|
||||
/**
|
||||
* The directory where output images will be saved.
|
||||
*/
|
||||
out_dir?: string;
|
||||
/**
|
||||
* The prefix for the output image files.
|
||||
*/
|
||||
out_prefix?: string;
|
||||
/**
|
||||
* Specify a page number to convert a specific page, or null to convert all pages.
|
||||
*/
|
||||
page?: number | null;
|
||||
/**
|
||||
* Specifies the resolution, in DPI. The default is 72 DPI.
|
||||
*/
|
||||
resolution?: number;
|
||||
/**
|
||||
* Scales each page to fit in scale-to x scale-to pixel square.
|
||||
*/
|
||||
scale_to?: number;
|
||||
}
|
||||
|
||||
/**
|
||||
* Defines the options available for the pdfToCairo conversion method.
|
||||
* This interface can be expanded as more options are used.
|
||||
* These options correspond to the command-line arguments for the `pdftocairo` utility.
|
||||
*/
|
||||
export interface PopplerOptions {
|
||||
antialias?: 'default' | 'gray' | 'none' | 'subpixel';
|
||||
cropBox?: boolean;
|
||||
cropHeight?: number;
|
||||
cropWidth?: number;
|
||||
cropSize?: number;
|
||||
cropX?: number;
|
||||
cropY?: number;
|
||||
duplex?: boolean;
|
||||
epsFile?: boolean;
|
||||
expand?: boolean;
|
||||
firstPage?: number;
|
||||
grayFile?: boolean;
|
||||
lastPage?: number;
|
||||
jpegFile?: boolean;
|
||||
jpegOptions?: string;
|
||||
level2?: boolean;
|
||||
level3?: boolean;
|
||||
monoFile?: boolean;
|
||||
noCenter?: boolean;
|
||||
noCrop?: boolean;
|
||||
noRotate?: boolean;
|
||||
noShrink?: boolean;
|
||||
ownerPassword?: string;
|
||||
paperHeight?: number;
|
||||
paperWidth?: number;
|
||||
paperSize?: 'letter' | 'legal' | 'A4' | 'A3' | 'match';
|
||||
pngFile?: boolean;
|
||||
psFile?: boolean;
|
||||
pdfFile?: boolean;
|
||||
resolution?: number;
|
||||
scaleTo?: number;
|
||||
scaleToX?: number;
|
||||
scaleToY?: number;
|
||||
svgFile?: boolean;
|
||||
tiffFile?: boolean;
|
||||
userPassword?: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* Defines the structure of the PDF information object returned by `pdfInfo`.
|
||||
*/
|
||||
export interface PdfInfo {
|
||||
// Based on common pdfinfo output
|
||||
title: string;
|
||||
author: string;
|
||||
creator: string;
|
||||
producer: string;
|
||||
creationDate: string;
|
||||
modDate: string;
|
||||
tagged: boolean;
|
||||
form: string;
|
||||
pages: number;
|
||||
encrypted: boolean;
|
||||
pageSize: string;
|
||||
fileSize: string;
|
||||
optimized: boolean;
|
||||
pdfVersion: string;
|
||||
}
|
||||
|
||||
export class Poppler {
|
||||
constructor(binPath?: string);
|
||||
pdfToCairo(file: string, outputFilePrefix?: string, options?: PopplerOptions): Promise<string>;
|
||||
pdfInfo(file: string, options?: { ownerPassword?: string; userPassword?: string }): Promise<PdfInfo>;
|
||||
pdfToPs(file: string, outputFile: string, options?: any): Promise<string>;
|
||||
pdfToText(file: string, outputFile: string, options?: any): Promise<string>;
|
||||
}
|
||||
|
||||
/**
|
||||
* Converts a PDF file to images. This seems to be a convenience function provided by the library.
|
||||
* @param pdfPath The path to the PDF file.
|
||||
* @param options The conversion options.
|
||||
*/
|
||||
export function convert(pdfPath: string, options?: ConvertOptions): Promise<string>;
|
||||
|
||||
export default Poppler;
|
||||
}
|
||||
|
||||
@@ -41,12 +41,14 @@ export default defineConfig({
|
||||
// By default, Vitest does not suppress console logs.
|
||||
// The onConsoleLog hook is only needed if you want to conditionally filter specific logs.
|
||||
// Keeping the default behavior is often safer to avoid missing important warnings.
|
||||
|
||||
environment: 'jsdom',
|
||||
// Explicitly point Vitest to the correct tsconfig and enable globals.
|
||||
globals: true, // tsconfig is auto-detected, so the explicit property is not needed and causes an error.
|
||||
globalSetup: './src/tests/setup/global-setup.ts',
|
||||
setupFiles: ['./src/tests/setup/tests-setup-unit.ts'],
|
||||
// The globalApiMock MUST come first to ensure it's applied before other mocks that might depend on it.
|
||||
setupFiles: [
|
||||
'./src/tests/setup/globalApiMock.ts',
|
||||
'./src/tests/setup/tests-setup-unit.ts',
|
||||
],
|
||||
// Explicitly include only test files.
|
||||
// We remove 'src/vite-env.d.ts' which was causing it to be run as a test.
|
||||
include: ['src/**/*.test.{ts,tsx}'],
|
||||
|
||||
Reference in New Issue
Block a user