Compare commits
21 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
007ff8e538 | ||
| 1fc70e3915 | |||
|
|
d891e47e02 | ||
| 08c39afde4 | |||
|
|
c579543b8a | ||
| 0d84137786 | |||
|
|
20ee30c4b4 | ||
| 93612137e3 | |||
|
|
6e70f08e3c | ||
| 459f5f7976 | |||
|
|
a2e6331ddd | ||
| 13cd30bec9 | |||
|
|
baeb9488c6 | ||
| 0cba0f987e | |||
|
|
958a79997d | ||
| 8fb1c96f93 | |||
| 6e6fe80c7f | |||
|
|
d1554050bd | ||
|
|
b1fae270bb | ||
|
|
c852483e18 | ||
| 2e01ad5bc9 |
@@ -52,6 +52,7 @@ module.exports = {
|
||||
SMTP_USER: process.env.SMTP_USER,
|
||||
SMTP_PASS: process.env.SMTP_PASS,
|
||||
SMTP_FROM_EMAIL: process.env.SMTP_FROM_EMAIL,
|
||||
WORKER_LOCK_DURATION: '120000',
|
||||
},
|
||||
// Test Environment Settings
|
||||
env_test: {
|
||||
@@ -74,6 +75,7 @@ module.exports = {
|
||||
SMTP_USER: process.env.SMTP_USER,
|
||||
SMTP_PASS: process.env.SMTP_PASS,
|
||||
SMTP_FROM_EMAIL: process.env.SMTP_FROM_EMAIL,
|
||||
WORKER_LOCK_DURATION: '120000',
|
||||
},
|
||||
// Development Environment Settings
|
||||
env_development: {
|
||||
@@ -97,6 +99,7 @@ module.exports = {
|
||||
SMTP_USER: process.env.SMTP_USER,
|
||||
SMTP_PASS: process.env.SMTP_PASS,
|
||||
SMTP_FROM_EMAIL: process.env.SMTP_FROM_EMAIL,
|
||||
WORKER_LOCK_DURATION: '120000',
|
||||
},
|
||||
},
|
||||
{
|
||||
|
||||
@@ -20,6 +20,9 @@ Create a new test file for `StatCard.tsx` to verify its props and rendering.
|
||||
|
||||
|
||||
|
||||
while assuming that master_schema_rollup.sql is the "ultimate source of truth", issues can happen and it may not have been properly
|
||||
updated - look for differences between these files
|
||||
|
||||
|
||||
UPC SCANNING !
|
||||
|
||||
|
||||
4
package-lock.json
generated
4
package-lock.json
generated
@@ -1,12 +1,12 @@
|
||||
{
|
||||
"name": "flyer-crawler",
|
||||
"version": "0.7.28",
|
||||
"version": "0.9.8",
|
||||
"lockfileVersion": 3,
|
||||
"requires": true,
|
||||
"packages": {
|
||||
"": {
|
||||
"name": "flyer-crawler",
|
||||
"version": "0.7.28",
|
||||
"version": "0.9.8",
|
||||
"dependencies": {
|
||||
"@bull-board/api": "^6.14.2",
|
||||
"@bull-board/express": "^6.14.2",
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
{
|
||||
"name": "flyer-crawler",
|
||||
"private": true,
|
||||
"version": "0.7.28",
|
||||
"version": "0.9.8",
|
||||
"type": "module",
|
||||
"scripts": {
|
||||
"dev": "concurrently \"npm:start:dev\" \"vite\"",
|
||||
|
||||
@@ -1,477 +1,8 @@
|
||||
-- sql/Initial_triggers_and_functions.sql
|
||||
-- This file contains all trigger functions and trigger definitions for the database.
|
||||
|
||||
-- 1. Set up the trigger to automatically create a profile when a new user signs up.
|
||||
-- This function is called by a trigger on the `public.users` table.
|
||||
DROP FUNCTION IF EXISTS public.handle_new_user();
|
||||
|
||||
-- It creates a corresponding profile and a default shopping list for the new user.
|
||||
-- It now accepts full_name and avatar_url from the user's metadata.
|
||||
CREATE OR REPLACE FUNCTION public.handle_new_user()
|
||||
RETURNS TRIGGER AS $$
|
||||
DECLARE
|
||||
new_profile_id UUID;
|
||||
user_meta_data JSONB;
|
||||
BEGIN
|
||||
-- The user's metadata (full_name, avatar_url) is passed via a temporary session variable.
|
||||
user_meta_data := current_setting('my_app.user_metadata', true)::JSONB;
|
||||
|
||||
INSERT INTO public.profiles (user_id, role, full_name, avatar_url)
|
||||
VALUES (new.user_id, 'user', user_meta_data->>'full_name', user_meta_data->>'avatar_url')
|
||||
RETURNING user_id INTO new_profile_id;
|
||||
|
||||
-- Also create a default shopping list for the new user.
|
||||
INSERT INTO public.shopping_lists (user_id, name)
|
||||
VALUES (new.user_id, 'Main Shopping List');
|
||||
|
||||
-- Log the new user event
|
||||
INSERT INTO public.activity_log (user_id, action, display_text, icon, details)
|
||||
VALUES (new.user_id, 'user_registered',
|
||||
COALESCE(user_meta_data->>'full_name', new.email) || ' has registered.',
|
||||
'user-plus',
|
||||
jsonb_build_object('email', new.email)
|
||||
);
|
||||
|
||||
RETURN new;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
-- This trigger calls the function after a new user is created.
|
||||
DROP TRIGGER IF EXISTS on_auth_user_created ON public.users;
|
||||
CREATE TRIGGER on_auth_user_created
|
||||
AFTER INSERT ON public.users
|
||||
FOR EACH ROW EXECUTE FUNCTION public.handle_new_user();
|
||||
|
||||
-- 2. Create a reusable function to automatically update 'updated_at' columns.
|
||||
DROP FUNCTION IF EXISTS public.handle_updated_at();
|
||||
|
||||
CREATE OR REPLACE FUNCTION public.handle_updated_at()
|
||||
RETURNS TRIGGER AS $$
|
||||
BEGIN
|
||||
NEW.updated_at = now();
|
||||
RETURN NEW;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
-- Dynamically apply the 'handle_updated_at' trigger to all tables in the public schema
|
||||
-- that have an 'updated_at' column. This is more maintainable than creating a separate
|
||||
-- trigger for each table.
|
||||
DO $$
|
||||
DECLARE
|
||||
t_name TEXT;
|
||||
BEGIN
|
||||
FOR t_name IN
|
||||
SELECT table_name
|
||||
FROM information_schema.columns
|
||||
WHERE table_schema = 'public' AND column_name = 'updated_at'
|
||||
LOOP
|
||||
EXECUTE format('DROP TRIGGER IF EXISTS on_%s_updated ON public.%I;
|
||||
CREATE TRIGGER on_%s_updated
|
||||
BEFORE UPDATE ON public.%I
|
||||
FOR EACH ROW EXECUTE FUNCTION public.handle_updated_at();',
|
||||
t_name, t_name, t_name, t_name);
|
||||
END LOOP;
|
||||
END;
|
||||
$$;
|
||||
|
||||
-- 3. Create a trigger function to populate the item_price_history table on insert.
|
||||
DROP FUNCTION IF EXISTS public.update_price_history_on_flyer_item_insert();
|
||||
|
||||
CREATE OR REPLACE FUNCTION public.update_price_history_on_flyer_item_insert()
|
||||
RETURNS TRIGGER AS $$
|
||||
DECLARE
|
||||
flyer_valid_from DATE;
|
||||
flyer_valid_to DATE;
|
||||
current_summary_date DATE;
|
||||
flyer_location_id BIGINT;
|
||||
BEGIN
|
||||
-- If the item could not be matched, add it to the unmatched queue for review.
|
||||
IF NEW.master_item_id IS NULL THEN
|
||||
INSERT INTO public.unmatched_flyer_items (flyer_item_id)
|
||||
VALUES (NEW.flyer_item_id)
|
||||
ON CONFLICT (flyer_item_id) DO NOTHING;
|
||||
END IF;
|
||||
|
||||
-- Only run if the new flyer item is linked to a master item and has a price.
|
||||
IF NEW.master_item_id IS NULL OR NEW.price_in_cents IS NULL THEN
|
||||
RETURN NEW;
|
||||
END IF;
|
||||
|
||||
-- Get the validity dates of the flyer and the store_id.
|
||||
SELECT valid_from, valid_to INTO flyer_valid_from, flyer_valid_to
|
||||
FROM public.flyers
|
||||
WHERE flyer_id = NEW.flyer_id;
|
||||
|
||||
-- This single, set-based query is much more performant than looping.
|
||||
-- It generates all date/location pairs and inserts/updates them in one operation.
|
||||
INSERT INTO public.item_price_history (master_item_id, summary_date, store_location_id, min_price_in_cents, max_price_in_cents, avg_price_in_cents, data_points_count)
|
||||
SELECT
|
||||
NEW.master_item_id,
|
||||
d.day,
|
||||
fl.store_location_id,
|
||||
NEW.price_in_cents,
|
||||
NEW.price_in_cents,
|
||||
NEW.price_in_cents,
|
||||
1
|
||||
FROM public.flyer_locations fl
|
||||
CROSS JOIN generate_series(flyer_valid_from, flyer_valid_to, '1 day'::interval) AS d(day)
|
||||
WHERE fl.flyer_id = NEW.flyer_id
|
||||
ON CONFLICT (master_item_id, summary_date, store_location_id)
|
||||
DO UPDATE SET
|
||||
min_price_in_cents = LEAST(item_price_history.min_price_in_cents, EXCLUDED.min_price_in_cents),
|
||||
max_price_in_cents = GREATEST(item_price_history.max_price_in_cents, EXCLUDED.max_price_in_cents),
|
||||
avg_price_in_cents = ROUND(((item_price_history.avg_price_in_cents * item_price_history.data_points_count) + EXCLUDED.avg_price_in_cents) / (item_price_history.data_points_count + 1.0)),
|
||||
data_points_count = item_price_history.data_points_count + 1;
|
||||
|
||||
RETURN NEW;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
-- Create the trigger on the flyer_items table for insert.
|
||||
DROP TRIGGER IF EXISTS trigger_update_price_history ON public.flyer_items;
|
||||
CREATE TRIGGER trigger_update_price_history
|
||||
AFTER INSERT ON public.flyer_items
|
||||
FOR EACH ROW EXECUTE FUNCTION public.update_price_history_on_flyer_item_insert();
|
||||
|
||||
-- 4. Create a trigger function to recalculate price history when a flyer item is deleted.
|
||||
DROP FUNCTION IF EXISTS public.recalculate_price_history_on_flyer_item_delete();
|
||||
|
||||
CREATE OR REPLACE FUNCTION public.recalculate_price_history_on_flyer_item_delete()
|
||||
RETURNS TRIGGER AS $$
|
||||
DECLARE
|
||||
affected_dates RECORD;
|
||||
BEGIN
|
||||
-- Only run if the deleted item was linked to a master item and had a price.
|
||||
IF OLD.master_item_id IS NULL OR OLD.price_in_cents IS NULL THEN
|
||||
RETURN OLD;
|
||||
END IF;
|
||||
|
||||
-- This single, set-based query is much more performant than looping.
|
||||
-- It recalculates aggregates for all affected dates and locations at once.
|
||||
WITH affected_days_and_locations AS (
|
||||
-- 1. Get all date/location pairs affected by the deleted item's flyer.
|
||||
SELECT DISTINCT
|
||||
generate_series(f.valid_from, f.valid_to, '1 day'::interval)::date AS summary_date,
|
||||
fl.store_location_id
|
||||
FROM public.flyers f
|
||||
JOIN public.flyer_locations fl ON f.flyer_id = fl.flyer_id
|
||||
WHERE f.flyer_id = OLD.flyer_id
|
||||
),
|
||||
new_aggregates AS (
|
||||
-- 2. For each affected date/location, recalculate the aggregates from all other relevant flyer items.
|
||||
SELECT
|
||||
adl.summary_date,
|
||||
adl.store_location_id,
|
||||
MIN(fi.price_in_cents) AS min_price,
|
||||
MAX(fi.price_in_cents) AS max_price,
|
||||
ROUND(AVG(fi.price_in_cents))::int AS avg_price,
|
||||
COUNT(fi.flyer_item_id)::int AS data_points
|
||||
FROM affected_days_and_locations adl
|
||||
LEFT JOIN public.flyer_items fi ON fi.master_item_id = OLD.master_item_id AND fi.price_in_cents IS NOT NULL
|
||||
LEFT JOIN public.flyers f ON fi.flyer_id = f.flyer_id AND adl.summary_date BETWEEN f.valid_from AND f.valid_to
|
||||
LEFT JOIN public.flyer_locations fl ON fi.flyer_id = fl.flyer_id AND adl.store_location_id = fl.store_location_id
|
||||
WHERE fl.flyer_id IS NOT NULL -- Ensure the join was successful
|
||||
GROUP BY adl.summary_date, adl.store_location_id
|
||||
)
|
||||
-- 3. Update the history table with the new aggregates.
|
||||
UPDATE public.item_price_history iph
|
||||
SET
|
||||
min_price_in_cents = na.min_price,
|
||||
max_price_in_cents = na.max_price,
|
||||
avg_price_in_cents = na.avg_price,
|
||||
data_points_count = na.data_points
|
||||
FROM new_aggregates na
|
||||
WHERE iph.master_item_id = OLD.master_item_id
|
||||
AND iph.summary_date = na.summary_date
|
||||
AND iph.store_location_id = na.store_location_id;
|
||||
|
||||
-- 4. Delete any history records that no longer have any data points.
|
||||
DELETE FROM public.item_price_history iph
|
||||
WHERE iph.master_item_id = OLD.master_item_id
|
||||
AND NOT EXISTS (
|
||||
SELECT 1 FROM new_aggregates na
|
||||
WHERE na.summary_date = iph.summary_date AND na.store_location_id = iph.store_location_id
|
||||
);
|
||||
|
||||
RETURN OLD;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
-- Create the trigger on the flyer_items table for DELETE operations.
|
||||
DROP TRIGGER IF EXISTS trigger_recalculate_price_history_on_delete ON public.flyer_items;
|
||||
CREATE TRIGGER trigger_recalculate_price_history_on_delete
|
||||
AFTER DELETE ON public.flyer_items
|
||||
FOR EACH ROW EXECUTE FUNCTION public.recalculate_price_history_on_flyer_item_delete();
|
||||
|
||||
-- 5. Trigger function to update the average rating on the recipes table.
|
||||
DROP FUNCTION IF EXISTS public.update_recipe_rating_aggregates();
|
||||
|
||||
CREATE OR REPLACE FUNCTION public.update_recipe_rating_aggregates()
|
||||
RETURNS TRIGGER AS $$
|
||||
BEGIN
|
||||
UPDATE public.recipes
|
||||
SET
|
||||
avg_rating = (
|
||||
SELECT AVG(rating)
|
||||
FROM public.recipe_ratings
|
||||
WHERE recipe_id = COALESCE(NEW.recipe_id, OLD.recipe_id) -- This is correct, no change needed
|
||||
),
|
||||
rating_count = (
|
||||
SELECT COUNT(*)
|
||||
FROM public.recipe_ratings
|
||||
WHERE recipe_id = COALESCE(NEW.recipe_id, OLD.recipe_id) -- This is correct, no change needed
|
||||
)
|
||||
WHERE recipe_id = COALESCE(NEW.recipe_id, OLD.recipe_id);
|
||||
|
||||
RETURN NULL; -- The result is ignored since this is an AFTER trigger.
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
-- Trigger to call the function after any change to recipe_ratings.
|
||||
DROP TRIGGER IF EXISTS on_recipe_rating_change ON public.recipe_ratings;
|
||||
CREATE TRIGGER on_recipe_rating_change
|
||||
AFTER INSERT OR UPDATE OR DELETE ON public.recipe_ratings
|
||||
FOR EACH ROW EXECUTE FUNCTION public.update_recipe_rating_aggregates();
|
||||
|
||||
-- 6. Trigger function to log the creation of a new recipe.
|
||||
DROP FUNCTION IF EXISTS public.log_new_recipe();
|
||||
|
||||
CREATE OR REPLACE FUNCTION public.log_new_recipe()
|
||||
RETURNS TRIGGER AS $$
|
||||
BEGIN
|
||||
INSERT INTO public.activity_log (user_id, action, display_text, icon, details)
|
||||
VALUES (
|
||||
NEW.user_id,
|
||||
'recipe_created',
|
||||
(SELECT full_name FROM public.profiles WHERE user_id = NEW.user_id) || ' created a new recipe: ' || NEW.name,
|
||||
'chef-hat',
|
||||
jsonb_build_object('recipe_id', NEW.recipe_id, 'recipe_name', NEW.name)
|
||||
);
|
||||
|
||||
-- Award 'First Recipe' achievement if it's their first one.
|
||||
PERFORM public.award_achievement(NEW.user_id, 'First Recipe');
|
||||
|
||||
RETURN NEW;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
-- Trigger to call the function after a new recipe is inserted.
|
||||
DROP TRIGGER IF EXISTS on_new_recipe_created ON public.recipes;
|
||||
CREATE TRIGGER on_new_recipe_created
|
||||
AFTER INSERT ON public.recipes
|
||||
FOR EACH ROW
|
||||
WHEN (NEW.user_id IS NOT NULL) -- Only log activity for user-created recipes.
|
||||
EXECUTE FUNCTION public.log_new_recipe();
|
||||
|
||||
-- 7a. Trigger function to update the item_count on the flyers table.
|
||||
DROP FUNCTION IF EXISTS public.update_flyer_item_count();
|
||||
|
||||
CREATE OR REPLACE FUNCTION public.update_flyer_item_count()
|
||||
RETURNS TRIGGER AS $$
|
||||
BEGIN
|
||||
IF (TG_OP = 'INSERT') THEN
|
||||
UPDATE public.flyers SET item_count = item_count + 1 WHERE flyer_id = NEW.flyer_id;
|
||||
ELSIF (TG_OP = 'DELETE') THEN
|
||||
UPDATE public.flyers SET item_count = item_count - 1 WHERE flyer_id = OLD.flyer_id;
|
||||
END IF;
|
||||
RETURN NULL; -- The result is ignored since this is an AFTER trigger.
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
-- Trigger to call the function after any change to flyer_items.
|
||||
-- This ensures the item_count on the parent flyer is always accurate.
|
||||
DROP TRIGGER IF EXISTS on_flyer_item_change ON public.flyer_items;
|
||||
CREATE TRIGGER on_flyer_item_change
|
||||
AFTER INSERT OR DELETE ON public.flyer_items
|
||||
FOR EACH ROW EXECUTE FUNCTION public.update_flyer_item_count();
|
||||
|
||||
-- 7. Trigger function to log the creation of a new flyer.
|
||||
DROP FUNCTION IF EXISTS public.log_new_flyer();
|
||||
|
||||
CREATE OR REPLACE FUNCTION public.log_new_flyer()
|
||||
RETURNS TRIGGER AS $$
|
||||
BEGIN
|
||||
INSERT INTO public.activity_log (action, display_text, icon, details)
|
||||
VALUES (
|
||||
'flyer_uploaded',
|
||||
'A new flyer for ' || (SELECT name FROM public.stores WHERE store_id = NEW.store_id) || ' has been uploaded.',
|
||||
'file-text',
|
||||
jsonb_build_object(
|
||||
'flyer_id', NEW.flyer_id,
|
||||
'store_name', (SELECT name FROM public.stores WHERE store_id = NEW.store_id),
|
||||
'valid_from', to_char(NEW.valid_from, 'YYYY-MM-DD'),
|
||||
'valid_to', to_char(NEW.valid_to, 'YYYY-MM-DD')
|
||||
)
|
||||
);
|
||||
RETURN NEW;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
-- Trigger to call the function after a new flyer is inserted.
|
||||
DROP TRIGGER IF EXISTS on_new_flyer_created ON public.flyers;
|
||||
CREATE TRIGGER on_new_flyer_created
|
||||
AFTER INSERT ON public.flyers
|
||||
FOR EACH ROW EXECUTE FUNCTION public.log_new_flyer();
|
||||
|
||||
-- 8. Trigger function to log when a user favorites a recipe.
|
||||
DROP FUNCTION IF EXISTS public.log_new_favorite_recipe();
|
||||
|
||||
CREATE OR REPLACE FUNCTION public.log_new_favorite_recipe()
|
||||
RETURNS TRIGGER AS $$
|
||||
BEGIN
|
||||
INSERT INTO public.activity_log (user_id, action, display_text, icon, details)
|
||||
VALUES (
|
||||
NEW.user_id,
|
||||
'recipe_favorited',
|
||||
(SELECT full_name FROM public.profiles WHERE user_id = NEW.user_id) || ' favorited the recipe: ' || (SELECT name FROM public.recipes WHERE recipe_id = NEW.recipe_id),
|
||||
'heart',
|
||||
jsonb_build_object(
|
||||
'recipe_id', NEW.recipe_id
|
||||
)
|
||||
);
|
||||
|
||||
-- Award 'First Favorite' achievement.
|
||||
PERFORM public.award_achievement(NEW.user_id, 'First Favorite');
|
||||
RETURN NEW;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
-- Trigger to call the function after a recipe is favorited.
|
||||
DROP TRIGGER IF EXISTS on_new_favorite_recipe ON public.favorite_recipes;
|
||||
CREATE TRIGGER on_new_favorite_recipe
|
||||
AFTER INSERT ON public.favorite_recipes
|
||||
FOR EACH ROW EXECUTE FUNCTION public.log_new_favorite_recipe();
|
||||
|
||||
-- 9. Trigger function to log when a user shares a shopping list.
|
||||
DROP FUNCTION IF EXISTS public.log_new_list_share();
|
||||
|
||||
CREATE OR REPLACE FUNCTION public.log_new_list_share()
|
||||
RETURNS TRIGGER AS $$
|
||||
BEGIN
|
||||
INSERT INTO public.activity_log (user_id, action, display_text, icon, details)
|
||||
VALUES (
|
||||
NEW.shared_by_user_id,
|
||||
'list_shared',
|
||||
(SELECT full_name FROM public.profiles WHERE user_id = NEW.shared_by_user_id) || ' shared a shopping list.',
|
||||
'share-2',
|
||||
jsonb_build_object(
|
||||
'shopping_list_id', NEW.shopping_list_id,
|
||||
'list_name', (SELECT name FROM public.shopping_lists WHERE shopping_list_id = NEW.shopping_list_id),
|
||||
'shared_with_user_id', NEW.shared_with_user_id
|
||||
)
|
||||
);
|
||||
|
||||
-- Award 'List Sharer' achievement.
|
||||
PERFORM public.award_achievement(NEW.shared_by_user_id, 'List Sharer');
|
||||
RETURN NEW;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
-- Trigger to call the function after a shopping list is shared.
|
||||
DROP TRIGGER IF EXISTS on_new_list_share ON public.shared_shopping_lists;
|
||||
CREATE TRIGGER on_new_list_share
|
||||
AFTER INSERT ON public.shared_shopping_lists
|
||||
FOR EACH ROW EXECUTE FUNCTION public.log_new_list_share();
|
||||
|
||||
-- 9a. Trigger function to log when a user shares a recipe collection.
|
||||
DROP FUNCTION IF EXISTS public.log_new_recipe_collection_share();
|
||||
|
||||
CREATE OR REPLACE FUNCTION public.log_new_recipe_collection_share()
|
||||
RETURNS TRIGGER AS $$
|
||||
BEGIN
|
||||
-- Log the activity
|
||||
INSERT INTO public.activity_log (user_id, action, display_text, icon, details)
|
||||
VALUES (
|
||||
NEW.shared_by_user_id, 'recipe_collection_shared',
|
||||
(SELECT full_name FROM public.profiles WHERE user_id = NEW.shared_by_user_id) || ' shared a recipe collection.',
|
||||
'book',
|
||||
jsonb_build_object('collection_id', NEW.recipe_collection_id, 'shared_with_user_id', NEW.shared_with_user_id)
|
||||
);
|
||||
|
||||
-- Award 'Recipe Sharer' achievement.
|
||||
PERFORM public.award_achievement(NEW.shared_by_user_id, 'Recipe Sharer');
|
||||
RETURN NEW;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
DROP TRIGGER IF EXISTS on_new_recipe_collection_share ON public.shared_recipe_collections;
|
||||
CREATE TRIGGER on_new_recipe_collection_share
|
||||
AFTER INSERT ON public.shared_recipe_collections
|
||||
FOR EACH ROW EXECUTE FUNCTION public.log_new_recipe_collection_share();
|
||||
|
||||
-- 10. Trigger function to geocode a store location's address.
|
||||
-- This function is designed to be extensible. In a production environment,
|
||||
-- you would replace the placeholder with a call to an external geocoding service
|
||||
-- (e.g., using the `http` extension or a `plpythonu` function) to convert
|
||||
-- the address into geographic coordinates.
|
||||
DROP FUNCTION IF EXISTS public.geocode_store_location();
|
||||
|
||||
CREATE OR REPLACE FUNCTION public.geocode_store_location()
|
||||
RETURNS TRIGGER AS $$
|
||||
DECLARE
|
||||
full_address TEXT;
|
||||
BEGIN
|
||||
-- Only proceed if the address has actually changed.
|
||||
IF TG_OP = 'INSERT' OR (TG_OP = 'UPDATE' AND NEW.address IS DISTINCT FROM OLD.address) THEN
|
||||
-- Concatenate address parts into a single string for the geocoder.
|
||||
full_address := CONCAT_WS(', ', NEW.address, NEW.city, NEW.province_state, NEW.postal_code);
|
||||
|
||||
-- ======================================================================
|
||||
-- Placeholder for Geocoding API Call
|
||||
-- ======================================================================
|
||||
-- In a real application, you would call a geocoding service here.
|
||||
-- For example, using the `http` extension:
|
||||
--
|
||||
-- DECLARE
|
||||
-- response http_get;
|
||||
-- lat NUMERIC;
|
||||
-- lon NUMERIC;
|
||||
-- BEGIN
|
||||
-- SELECT * INTO response FROM http_get('https://api.geocodingservice.com/geocode?address=' || url_encode(full_address));
|
||||
-- lat := (response.content::jsonb)->'results'->0->'geometry'->'location'->'lat';
|
||||
-- lon := (response.content::jsonb)->'results'->0->'geometry'->'location'->'lng';
|
||||
-- NEW.location := ST_SetSRID(ST_MakePoint(lon, lat), 4326)::geography;
|
||||
-- END;
|
||||
--
|
||||
-- For now, this function does nothing, but the trigger is in place.
|
||||
-- If you manually provide lat/lon, you could parse them here.
|
||||
-- For this example, we will assume the `location` might be set manually
|
||||
-- or by a separate batch process.
|
||||
-- ======================================================================
|
||||
END IF;
|
||||
|
||||
RETURN NEW;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
-- Trigger to call the geocoding function.
|
||||
DROP TRIGGER IF EXISTS on_store_location_address_change ON public.store_locations;
|
||||
CREATE TRIGGER on_store_location_address_change
|
||||
BEFORE INSERT OR UPDATE ON public.store_locations
|
||||
FOR EACH ROW EXECUTE FUNCTION public.geocode_store_location();
|
||||
|
||||
-- 11. Trigger function to increment the fork_count on the original recipe.
|
||||
DROP FUNCTION IF EXISTS public.increment_recipe_fork_count();
|
||||
|
||||
CREATE OR REPLACE FUNCTION public.increment_recipe_fork_count()
|
||||
RETURNS TRIGGER AS $$
|
||||
BEGIN
|
||||
-- Only run if the recipe is a fork (original_recipe_id is not null).
|
||||
IF NEW.original_recipe_id IS NOT NULL THEN
|
||||
UPDATE public.recipes SET fork_count = fork_count + 1 WHERE recipe_id = NEW.original_recipe_id;
|
||||
-- Award 'First Fork' achievement.
|
||||
PERFORM public.award_achievement(NEW.user_id, 'First Fork');
|
||||
END IF;
|
||||
RETURN NEW;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
DROP TRIGGER IF EXISTS on_recipe_fork ON public.recipes;
|
||||
CREATE TRIGGER on_recipe_fork
|
||||
AFTER INSERT ON public.recipes
|
||||
FOR EACH ROW EXECUTE FUNCTION public.increment_recipe_fork_count();
|
||||
-- ============================================================================
|
||||
-- PART 6: DATABASE FUNCTIONS
|
||||
-- PART 3: DATABASE FUNCTIONS
|
||||
-- ============================================================================
|
||||
-- Function to find the best current sale price for a user's watched items.
|
||||
DROP FUNCTION IF EXISTS public.get_best_sale_prices_for_user(UUID);
|
||||
@@ -1336,8 +867,7 @@ AS $$
|
||||
'list_shared'
|
||||
-- 'new_recipe_rating' could be added here later
|
||||
)
|
||||
ORDER BY
|
||||
al.created_at DESC
|
||||
ORDER BY al.created_at DESC, al.display_text, al.icon
|
||||
LIMIT p_limit
|
||||
OFFSET p_offset;
|
||||
$$;
|
||||
@@ -1549,16 +1079,18 @@ $$;
|
||||
-- It replaces the need to call get_best_sale_prices_for_user for each user individually.
|
||||
-- Returns: TABLE(...) - A set of records including user details and deal information.
|
||||
-- =================================================================
|
||||
DROP FUNCTION IF EXISTS public.get_best_sale_prices_for_all_users();
|
||||
|
||||
CREATE OR REPLACE FUNCTION public.get_best_sale_prices_for_all_users()
|
||||
RETURNS TABLE(
|
||||
user_id uuid,
|
||||
email text,
|
||||
full_name text,
|
||||
master_item_id integer,
|
||||
master_item_id bigint,
|
||||
item_name text,
|
||||
best_price_in_cents integer,
|
||||
store_name text,
|
||||
flyer_id integer,
|
||||
flyer_id bigint,
|
||||
valid_to date
|
||||
) AS $$
|
||||
BEGIN
|
||||
@@ -1569,11 +1101,12 @@ BEGIN
|
||||
SELECT
|
||||
fi.master_item_id,
|
||||
fi.price_in_cents,
|
||||
f.store_name,
|
||||
s.name as store_name,
|
||||
f.flyer_id,
|
||||
f.valid_to
|
||||
FROM public.flyer_items fi
|
||||
JOIN public.flyers f ON fi.flyer_id = f.flyer_id
|
||||
JOIN public.stores s ON f.store_id = s.store_id
|
||||
WHERE
|
||||
fi.master_item_id IS NOT NULL
|
||||
AND fi.price_in_cents IS NOT NULL
|
||||
@@ -1616,3 +1149,472 @@ BEGIN
|
||||
bp.price_rank = 1;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
-- ============================================================================
|
||||
-- PART 4: TRIGGERS
|
||||
-- ============================================================================
|
||||
|
||||
-- 1. Trigger to automatically create a profile when a new user signs up.
|
||||
-- This function is called by a trigger on the `public.users` table.
|
||||
DROP FUNCTION IF EXISTS public.handle_new_user();
|
||||
|
||||
-- It creates a corresponding profile and a default shopping list for the new user.
|
||||
-- It now accepts full_name and avatar_url from the user's metadata.
|
||||
CREATE OR REPLACE FUNCTION public.handle_new_user()
|
||||
RETURNS TRIGGER AS $$
|
||||
DECLARE
|
||||
new_profile_id UUID;
|
||||
user_meta_data JSONB;
|
||||
BEGIN
|
||||
-- The user's metadata (full_name, avatar_url) is passed via a temporary session variable.
|
||||
user_meta_data := current_setting('my_app.user_metadata', true)::JSONB;
|
||||
|
||||
INSERT INTO public.profiles (user_id, role, full_name, avatar_url)
|
||||
VALUES (new.user_id, 'user', user_meta_data->>'full_name', user_meta_data->>'avatar_url')
|
||||
RETURNING user_id INTO new_profile_id;
|
||||
|
||||
-- Also create a default shopping list for the new user.
|
||||
INSERT INTO public.shopping_lists (user_id, name)
|
||||
VALUES (new.user_id, 'Main Shopping List');
|
||||
|
||||
-- Log the new user event
|
||||
INSERT INTO public.activity_log (user_id, action, display_text, icon, details)
|
||||
VALUES (new.user_id, 'user_registered',
|
||||
COALESCE(user_meta_data->>'full_name', new.email) || ' has registered.',
|
||||
'user-plus',
|
||||
jsonb_build_object('email', new.email)
|
||||
);
|
||||
|
||||
RETURN new;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
-- This trigger calls the function after a new user is created.
|
||||
DROP TRIGGER IF EXISTS on_auth_user_created ON public.users;
|
||||
CREATE TRIGGER on_auth_user_created
|
||||
AFTER INSERT ON public.users
|
||||
FOR EACH ROW EXECUTE FUNCTION public.handle_new_user();
|
||||
|
||||
-- 2. Create a reusable function to automatically update 'updated_at' columns.
|
||||
DROP FUNCTION IF EXISTS public.handle_updated_at();
|
||||
|
||||
CREATE OR REPLACE FUNCTION public.handle_updated_at()
|
||||
RETURNS TRIGGER AS $$
|
||||
BEGIN
|
||||
NEW.updated_at = now();
|
||||
RETURN NEW;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
-- Dynamically apply the 'handle_updated_at' trigger to all tables in the public schema
|
||||
-- that have an 'updated_at' column. This is more maintainable than creating a separate
|
||||
-- trigger for each table.
|
||||
DO $$
|
||||
DECLARE
|
||||
t_name TEXT;
|
||||
BEGIN
|
||||
FOR t_name IN
|
||||
SELECT table_name
|
||||
FROM information_schema.columns
|
||||
WHERE table_schema = 'public' AND column_name = 'updated_at'
|
||||
LOOP
|
||||
EXECUTE format('DROP TRIGGER IF EXISTS on_%s_updated ON public.%I;
|
||||
CREATE TRIGGER on_%s_updated
|
||||
BEFORE UPDATE ON public.%I
|
||||
FOR EACH ROW EXECUTE FUNCTION public.handle_updated_at();',
|
||||
t_name, t_name, t_name, t_name);
|
||||
END LOOP;
|
||||
END;
|
||||
$$;
|
||||
|
||||
-- 3. Create a trigger function to populate the item_price_history table on insert.
|
||||
DROP FUNCTION IF EXISTS public.update_price_history_on_flyer_item_insert();
|
||||
|
||||
CREATE OR REPLACE FUNCTION public.update_price_history_on_flyer_item_insert()
|
||||
RETURNS TRIGGER AS $$
|
||||
DECLARE
|
||||
flyer_valid_from DATE;
|
||||
flyer_valid_to DATE;
|
||||
current_summary_date DATE;
|
||||
flyer_location_id BIGINT;
|
||||
BEGIN
|
||||
-- If the item could not be matched, add it to the unmatched queue for review.
|
||||
IF NEW.master_item_id IS NULL THEN
|
||||
INSERT INTO public.unmatched_flyer_items (flyer_item_id)
|
||||
VALUES (NEW.flyer_item_id)
|
||||
ON CONFLICT (flyer_item_id) DO NOTHING;
|
||||
END IF;
|
||||
|
||||
-- Only run if the new flyer item is linked to a master item and has a price.
|
||||
IF NEW.master_item_id IS NULL OR NEW.price_in_cents IS NULL THEN
|
||||
RETURN NEW;
|
||||
END IF;
|
||||
|
||||
-- Get the validity dates of the flyer and the store_id.
|
||||
SELECT valid_from, valid_to INTO flyer_valid_from, flyer_valid_to
|
||||
FROM public.flyers
|
||||
WHERE flyer_id = NEW.flyer_id;
|
||||
|
||||
-- This single, set-based query is much more performant than looping.
|
||||
-- It generates all date/location pairs and inserts/updates them in one operation.
|
||||
INSERT INTO public.item_price_history (master_item_id, summary_date, store_location_id, min_price_in_cents, max_price_in_cents, avg_price_in_cents, data_points_count)
|
||||
SELECT
|
||||
NEW.master_item_id,
|
||||
d.day,
|
||||
fl.store_location_id,
|
||||
NEW.price_in_cents,
|
||||
NEW.price_in_cents,
|
||||
NEW.price_in_cents,
|
||||
1
|
||||
FROM public.flyer_locations fl
|
||||
CROSS JOIN generate_series(flyer_valid_from, flyer_valid_to, '1 day'::interval) AS d(day)
|
||||
WHERE fl.flyer_id = NEW.flyer_id
|
||||
ON CONFLICT (master_item_id, summary_date, store_location_id)
|
||||
DO UPDATE SET
|
||||
min_price_in_cents = LEAST(item_price_history.min_price_in_cents, EXCLUDED.min_price_in_cents),
|
||||
max_price_in_cents = GREATEST(item_price_history.max_price_in_cents, EXCLUDED.max_price_in_cents),
|
||||
avg_price_in_cents = ROUND(((item_price_history.avg_price_in_cents * item_price_history.data_points_count) + EXCLUDED.avg_price_in_cents) / (item_price_history.data_points_count + 1.0)),
|
||||
data_points_count = item_price_history.data_points_count + 1;
|
||||
|
||||
RETURN NEW;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
-- Create the trigger on the flyer_items table for insert.
|
||||
DROP TRIGGER IF EXISTS trigger_update_price_history ON public.flyer_items;
|
||||
CREATE TRIGGER trigger_update_price_history
|
||||
AFTER INSERT ON public.flyer_items
|
||||
FOR EACH ROW EXECUTE FUNCTION public.update_price_history_on_flyer_item_insert();
|
||||
|
||||
-- 4. Create a trigger function to recalculate price history when a flyer item is deleted.
|
||||
DROP FUNCTION IF EXISTS public.recalculate_price_history_on_flyer_item_delete();
|
||||
|
||||
CREATE OR REPLACE FUNCTION public.recalculate_price_history_on_flyer_item_delete()
|
||||
RETURNS TRIGGER AS $$
|
||||
DECLARE
|
||||
affected_dates RECORD;
|
||||
BEGIN
|
||||
-- Only run if the deleted item was linked to a master item and had a price.
|
||||
IF OLD.master_item_id IS NULL OR OLD.price_in_cents IS NULL THEN
|
||||
RETURN OLD;
|
||||
END IF;
|
||||
|
||||
-- This single, set-based query is much more performant than looping.
|
||||
-- It recalculates aggregates for all affected dates and locations at once.
|
||||
WITH affected_days_and_locations AS (
|
||||
-- 1. Get all date/location pairs affected by the deleted item's flyer.
|
||||
SELECT DISTINCT
|
||||
generate_series(f.valid_from, f.valid_to, '1 day'::interval)::date AS summary_date,
|
||||
fl.store_location_id
|
||||
FROM public.flyers f
|
||||
JOIN public.flyer_locations fl ON f.flyer_id = fl.flyer_id
|
||||
WHERE f.flyer_id = OLD.flyer_id
|
||||
),
|
||||
new_aggregates AS (
|
||||
-- 2. For each affected date/location, recalculate the aggregates from all other relevant flyer items.
|
||||
SELECT
|
||||
adl.summary_date,
|
||||
adl.store_location_id,
|
||||
MIN(fi.price_in_cents) AS min_price,
|
||||
MAX(fi.price_in_cents) AS max_price,
|
||||
ROUND(AVG(fi.price_in_cents))::int AS avg_price,
|
||||
COUNT(fi.flyer_item_id)::int AS data_points
|
||||
FROM affected_days_and_locations adl
|
||||
LEFT JOIN public.flyer_items fi ON fi.master_item_id = OLD.master_item_id AND fi.price_in_cents IS NOT NULL
|
||||
LEFT JOIN public.flyers f ON fi.flyer_id = f.flyer_id AND adl.summary_date BETWEEN f.valid_from AND f.valid_to
|
||||
LEFT JOIN public.flyer_locations fl ON fi.flyer_id = fl.flyer_id AND adl.store_location_id = fl.store_location_id
|
||||
WHERE fl.flyer_id IS NOT NULL -- Ensure the join was successful
|
||||
GROUP BY adl.summary_date, adl.store_location_id
|
||||
)
|
||||
-- 3. Update the history table with the new aggregates.
|
||||
UPDATE public.item_price_history iph
|
||||
SET
|
||||
min_price_in_cents = na.min_price,
|
||||
max_price_in_cents = na.max_price,
|
||||
avg_price_in_cents = na.avg_price,
|
||||
data_points_count = na.data_points
|
||||
FROM new_aggregates na
|
||||
WHERE iph.master_item_id = OLD.master_item_id
|
||||
AND iph.summary_date = na.summary_date
|
||||
AND iph.store_location_id = na.store_location_id;
|
||||
|
||||
-- 4. Delete any history records that no longer have any data points.
|
||||
DELETE FROM public.item_price_history iph
|
||||
WHERE iph.master_item_id = OLD.master_item_id
|
||||
AND NOT EXISTS (
|
||||
SELECT 1 FROM new_aggregates na
|
||||
WHERE na.summary_date = iph.summary_date AND na.store_location_id = iph.store_location_id
|
||||
);
|
||||
|
||||
RETURN OLD;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
-- Create the trigger on the flyer_items table for DELETE operations.
|
||||
DROP TRIGGER IF EXISTS trigger_recalculate_price_history_on_delete ON public.flyer_items;
|
||||
CREATE TRIGGER trigger_recalculate_price_history_on_delete
|
||||
AFTER DELETE ON public.flyer_items
|
||||
FOR EACH ROW EXECUTE FUNCTION public.recalculate_price_history_on_flyer_item_delete();
|
||||
|
||||
-- 5. Trigger function to update the average rating on the recipes table.
|
||||
DROP FUNCTION IF EXISTS public.update_recipe_rating_aggregates();
|
||||
|
||||
CREATE OR REPLACE FUNCTION public.update_recipe_rating_aggregates()
|
||||
RETURNS TRIGGER AS $$
|
||||
BEGIN
|
||||
UPDATE public.recipes
|
||||
SET
|
||||
avg_rating = (
|
||||
SELECT AVG(rating)
|
||||
FROM public.recipe_ratings
|
||||
WHERE recipe_id = COALESCE(NEW.recipe_id, OLD.recipe_id) -- This is correct, no change needed
|
||||
),
|
||||
rating_count = (
|
||||
SELECT COUNT(*)
|
||||
FROM public.recipe_ratings
|
||||
WHERE recipe_id = COALESCE(NEW.recipe_id, OLD.recipe_id) -- This is correct, no change needed
|
||||
)
|
||||
WHERE recipe_id = COALESCE(NEW.recipe_id, OLD.recipe_id);
|
||||
|
||||
RETURN NULL; -- The result is ignored since this is an AFTER trigger.
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
-- Trigger to call the function after any change to recipe_ratings.
|
||||
DROP TRIGGER IF EXISTS on_recipe_rating_change ON public.recipe_ratings;
|
||||
CREATE TRIGGER on_recipe_rating_change
|
||||
AFTER INSERT OR UPDATE OR DELETE ON public.recipe_ratings
|
||||
FOR EACH ROW EXECUTE FUNCTION public.update_recipe_rating_aggregates();
|
||||
|
||||
-- 6. Trigger function to log the creation of a new recipe.
|
||||
DROP FUNCTION IF EXISTS public.log_new_recipe();
|
||||
|
||||
CREATE OR REPLACE FUNCTION public.log_new_recipe()
|
||||
RETURNS TRIGGER AS $$
|
||||
BEGIN
|
||||
INSERT INTO public.activity_log (user_id, action, display_text, icon, details)
|
||||
VALUES (
|
||||
NEW.user_id,
|
||||
'recipe_created',
|
||||
(SELECT full_name FROM public.profiles WHERE user_id = NEW.user_id) || ' created a new recipe: ' || NEW.name,
|
||||
'chef-hat',
|
||||
jsonb_build_object('recipe_id', NEW.recipe_id, 'recipe_name', NEW.name)
|
||||
);
|
||||
|
||||
-- Award 'First Recipe' achievement if it's their first one.
|
||||
PERFORM public.award_achievement(NEW.user_id, 'First Recipe');
|
||||
|
||||
RETURN NEW;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
-- Trigger to call the function after a new recipe is inserted.
|
||||
DROP TRIGGER IF EXISTS on_new_recipe_created ON public.recipes;
|
||||
CREATE TRIGGER on_new_recipe_created
|
||||
AFTER INSERT ON public.recipes
|
||||
FOR EACH ROW
|
||||
WHEN (NEW.user_id IS NOT NULL) -- Only log activity for user-created recipes.
|
||||
EXECUTE FUNCTION public.log_new_recipe();
|
||||
|
||||
-- 7a. Trigger function to update the item_count on the flyers table.
|
||||
DROP FUNCTION IF EXISTS public.update_flyer_item_count();
|
||||
|
||||
CREATE OR REPLACE FUNCTION public.update_flyer_item_count()
|
||||
RETURNS TRIGGER AS $$
|
||||
BEGIN
|
||||
IF (TG_OP = 'INSERT') THEN
|
||||
UPDATE public.flyers SET item_count = item_count + 1 WHERE flyer_id = NEW.flyer_id;
|
||||
ELSIF (TG_OP = 'DELETE') THEN
|
||||
UPDATE public.flyers SET item_count = item_count - 1 WHERE flyer_id = OLD.flyer_id;
|
||||
END IF;
|
||||
RETURN NULL; -- The result is ignored since this is an AFTER trigger.
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
-- Trigger to call the function after any change to flyer_items.
|
||||
-- This ensures the item_count on the parent flyer is always accurate.
|
||||
DROP TRIGGER IF EXISTS on_flyer_item_change ON public.flyer_items;
|
||||
CREATE TRIGGER on_flyer_item_change
|
||||
AFTER INSERT OR DELETE ON public.flyer_items
|
||||
FOR EACH ROW EXECUTE FUNCTION public.update_flyer_item_count();
|
||||
|
||||
-- 7. Trigger function to log the creation of a new flyer.
|
||||
DROP FUNCTION IF EXISTS public.log_new_flyer();
|
||||
|
||||
CREATE OR REPLACE FUNCTION public.log_new_flyer()
|
||||
RETURNS TRIGGER AS $$
|
||||
BEGIN
|
||||
-- If the flyer was uploaded by a registered user, award the 'First-Upload' achievement.
|
||||
-- The award_achievement function handles checking if the user already has it.
|
||||
IF NEW.uploaded_by IS NOT NULL THEN
|
||||
PERFORM public.award_achievement(NEW.uploaded_by, 'First-Upload');
|
||||
END IF;
|
||||
|
||||
INSERT INTO public.activity_log (user_id, action, display_text, icon, details)
|
||||
VALUES (
|
||||
NEW.uploaded_by, -- Log the user who uploaded it
|
||||
'flyer_uploaded',
|
||||
'A new flyer for ' || (SELECT name FROM public.stores WHERE store_id = NEW.store_id) || ' has been uploaded.',
|
||||
'file-text',
|
||||
jsonb_build_object(
|
||||
'flyer_id', NEW.flyer_id,
|
||||
'store_name', (SELECT name FROM public.stores WHERE store_id = NEW.store_id),
|
||||
'valid_from', to_char(NEW.valid_from, 'YYYY-MM-DD'),
|
||||
'valid_to', to_char(NEW.valid_to, 'YYYY-MM-DD')
|
||||
)
|
||||
);
|
||||
RETURN NEW;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
-- Trigger to call the function after a new flyer is inserted.
|
||||
DROP TRIGGER IF EXISTS on_new_flyer_created ON public.flyers;
|
||||
CREATE TRIGGER on_new_flyer_created
|
||||
AFTER INSERT ON public.flyers
|
||||
FOR EACH ROW EXECUTE FUNCTION public.log_new_flyer();
|
||||
|
||||
-- 8. Trigger function to log when a user favorites a recipe.
|
||||
DROP FUNCTION IF EXISTS public.log_new_favorite_recipe();
|
||||
|
||||
CREATE OR REPLACE FUNCTION public.log_new_favorite_recipe()
|
||||
RETURNS TRIGGER AS $$
|
||||
BEGIN
|
||||
INSERT INTO public.activity_log (user_id, action, display_text, icon, details)
|
||||
VALUES (
|
||||
NEW.user_id,
|
||||
'recipe_favorited',
|
||||
(SELECT full_name FROM public.profiles WHERE user_id = NEW.user_id) || ' favorited the recipe: ' || (SELECT name FROM public.recipes WHERE recipe_id = NEW.recipe_id),
|
||||
'heart',
|
||||
jsonb_build_object(
|
||||
'recipe_id', NEW.recipe_id
|
||||
)
|
||||
);
|
||||
|
||||
-- Award 'First Favorite' achievement.
|
||||
PERFORM public.award_achievement(NEW.user_id, 'First Favorite');
|
||||
RETURN NEW;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
-- Trigger to call the function after a recipe is favorited.
|
||||
DROP TRIGGER IF EXISTS on_new_favorite_recipe ON public.favorite_recipes;
|
||||
CREATE TRIGGER on_new_favorite_recipe
|
||||
AFTER INSERT ON public.favorite_recipes
|
||||
FOR EACH ROW EXECUTE FUNCTION public.log_new_favorite_recipe();
|
||||
|
||||
-- 9. Trigger function to log when a user shares a shopping list.
|
||||
DROP FUNCTION IF EXISTS public.log_new_list_share();
|
||||
|
||||
CREATE OR REPLACE FUNCTION public.log_new_list_share()
|
||||
RETURNS TRIGGER AS $$
|
||||
BEGIN
|
||||
INSERT INTO public.activity_log (user_id, action, display_text, icon, details)
|
||||
VALUES (
|
||||
NEW.shared_by_user_id,
|
||||
'list_shared',
|
||||
(SELECT full_name FROM public.profiles WHERE user_id = NEW.shared_by_user_id) || ' shared a shopping list.',
|
||||
'share-2',
|
||||
jsonb_build_object(
|
||||
'shopping_list_id', NEW.shopping_list_id,
|
||||
'list_name', (SELECT name FROM public.shopping_lists WHERE shopping_list_id = NEW.shopping_list_id),
|
||||
'shared_with_user_id', NEW.shared_with_user_id
|
||||
)
|
||||
);
|
||||
|
||||
-- Award 'List Sharer' achievement.
|
||||
PERFORM public.award_achievement(NEW.shared_by_user_id, 'List Sharer');
|
||||
RETURN NEW;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
-- Trigger to call the function after a shopping list is shared.
|
||||
DROP TRIGGER IF EXISTS on_new_list_share ON public.shared_shopping_lists;
|
||||
CREATE TRIGGER on_new_list_share
|
||||
AFTER INSERT ON public.shared_shopping_lists
|
||||
FOR EACH ROW EXECUTE FUNCTION public.log_new_list_share();
|
||||
|
||||
-- 9a. Trigger function to log when a user shares a recipe collection.
|
||||
DROP FUNCTION IF EXISTS public.log_new_recipe_collection_share();
|
||||
|
||||
CREATE OR REPLACE FUNCTION public.log_new_recipe_collection_share()
|
||||
RETURNS TRIGGER AS $$
|
||||
BEGIN
|
||||
-- Log the activity
|
||||
INSERT INTO public.activity_log (user_id, action, display_text, icon, details)
|
||||
VALUES (
|
||||
NEW.shared_by_user_id, 'recipe_collection_shared',
|
||||
(SELECT full_name FROM public.profiles WHERE user_id = NEW.shared_by_user_id) || ' shared a recipe collection.',
|
||||
'book',
|
||||
jsonb_build_object('collection_id', NEW.recipe_collection_id, 'shared_with_user_id', NEW.shared_with_user_id)
|
||||
);
|
||||
|
||||
-- Award 'Recipe Sharer' achievement.
|
||||
PERFORM public.award_achievement(NEW.shared_by_user_id, 'Recipe Sharer');
|
||||
RETURN NEW;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
DROP TRIGGER IF EXISTS on_new_recipe_collection_share ON public.shared_recipe_collections;
|
||||
CREATE TRIGGER on_new_recipe_collection_share
|
||||
AFTER INSERT ON public.shared_recipe_collections
|
||||
FOR EACH ROW EXECUTE FUNCTION public.log_new_recipe_collection_share();
|
||||
|
||||
-- 10. Trigger function to geocode a store location's address.
|
||||
-- This function is triggered when an address is inserted or updated, and is
|
||||
-- designed to be extensible for external geocoding services to populate the
|
||||
-- latitude, longitude, and location fields.
|
||||
DROP FUNCTION IF EXISTS public.geocode_address();
|
||||
|
||||
CREATE OR REPLACE FUNCTION public.geocode_address()
|
||||
RETURNS TRIGGER AS $$
|
||||
DECLARE
|
||||
full_address TEXT;
|
||||
BEGIN
|
||||
-- Only proceed if an address component has actually changed.
|
||||
IF TG_OP = 'INSERT' OR (TG_OP = 'UPDATE' AND (
|
||||
NEW.address_line_1 IS DISTINCT FROM OLD.address_line_1 OR
|
||||
NEW.address_line_2 IS DISTINCT FROM OLD.address_line_2 OR
|
||||
NEW.city IS DISTINCT FROM OLD.city OR
|
||||
NEW.province_state IS DISTINCT FROM OLD.province_state OR
|
||||
NEW.postal_code IS DISTINCT FROM OLD.postal_code OR
|
||||
NEW.country IS DISTINCT FROM OLD.country
|
||||
)) THEN
|
||||
-- Concatenate address parts into a single string for the geocoder.
|
||||
full_address := CONCAT_WS(', ', NEW.address_line_1, NEW.address_line_2, NEW.city, NEW.province_state, NEW.postal_code, NEW.country);
|
||||
|
||||
-- Placeholder for Geocoding API Call
|
||||
-- In a real application, you would call a service here and update NEW.latitude, NEW.longitude, and NEW.location.
|
||||
-- e.g., NEW.latitude := result.lat; NEW.longitude := result.lon;
|
||||
-- NEW.location := ST_SetSRID(ST_MakePoint(NEW.longitude, NEW.latitude), 4326);
|
||||
END IF;
|
||||
|
||||
RETURN NEW;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
-- This trigger calls the geocoding function when an address changes.
|
||||
DROP TRIGGER IF EXISTS on_address_change_geocode ON public.addresses;
|
||||
CREATE TRIGGER on_address_change_geocode
|
||||
BEFORE INSERT OR UPDATE ON public.addresses
|
||||
FOR EACH ROW EXECUTE FUNCTION public.geocode_address();
|
||||
|
||||
-- 11. Trigger function to increment the fork_count on the original recipe.
|
||||
DROP FUNCTION IF EXISTS public.increment_recipe_fork_count();
|
||||
|
||||
CREATE OR REPLACE FUNCTION public.increment_recipe_fork_count()
|
||||
RETURNS TRIGGER AS $$
|
||||
BEGIN
|
||||
-- Only run if the recipe is a fork (original_recipe_id is not null).
|
||||
IF NEW.original_recipe_id IS NOT NULL THEN
|
||||
UPDATE public.recipes SET fork_count = fork_count + 1 WHERE recipe_id = NEW.original_recipe_id;
|
||||
-- Award 'First Fork' achievement.
|
||||
PERFORM public.award_achievement(NEW.user_id, 'First Fork');
|
||||
END IF;
|
||||
RETURN NEW;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
DROP TRIGGER IF EXISTS on_recipe_fork ON public.recipes;
|
||||
CREATE TRIGGER on_recipe_fork
|
||||
AFTER INSERT ON public.recipes
|
||||
FOR EACH ROW EXECUTE FUNCTION public.increment_recipe_fork_count();
|
||||
|
||||
@@ -265,5 +265,6 @@ INSERT INTO public.achievements (name, description, icon, points_value) VALUES
|
||||
('List Sharer', 'Share a shopping list with another user for the first time.', 'list', 20),
|
||||
('First Favorite', 'Mark a recipe as one of your favorites.', 'heart', 5),
|
||||
('First Fork', 'Make a personal copy of a public recipe.', 'git-fork', 10),
|
||||
('First Budget Created', 'Create your first budget to track spending.', 'piggy-bank', 15)
|
||||
('First Budget Created', 'Create your first budget to track spending.', 'piggy-bank', 15),
|
||||
('First-Upload', 'Upload your first flyer.', 'upload-cloud', 25)
|
||||
ON CONFLICT (name) DO NOTHING;
|
||||
|
||||
@@ -162,7 +162,6 @@ COMMENT ON COLUMN public.flyers.uploaded_by IS 'The user who uploaded the flyer.
|
||||
CREATE INDEX IF NOT EXISTS idx_flyers_status ON public.flyers(status);
|
||||
CREATE INDEX IF NOT EXISTS idx_flyers_created_at ON public.flyers (created_at DESC);
|
||||
CREATE INDEX IF NOT EXISTS idx_flyers_valid_to_file_name ON public.flyers (valid_to DESC, file_name ASC);
|
||||
CREATE INDEX IF NOT EXISTS idx_flyers_status ON public.flyers(status);
|
||||
-- 7. The 'master_grocery_items' table. This is the master dictionary.
|
||||
CREATE TABLE IF NOT EXISTS public.master_grocery_items (
|
||||
master_grocery_item_id BIGINT PRIMARY KEY GENERATED ALWAYS AS IDENTITY,
|
||||
@@ -973,6 +972,21 @@ COMMENT ON COLUMN public.user_reactions.reaction_type IS 'The type of reaction (
|
||||
CREATE INDEX IF NOT EXISTS idx_user_reactions_user_id ON public.user_reactions(user_id);
|
||||
CREATE INDEX IF NOT EXISTS idx_user_reactions_entity ON public.user_reactions(entity_type, entity_id);
|
||||
|
||||
-- 56. Store user-defined budgets for spending analysis.
|
||||
CREATE TABLE IF NOT EXISTS public.budgets (
|
||||
budget_id BIGINT PRIMARY KEY GENERATED ALWAYS AS IDENTITY,
|
||||
user_id UUID NOT NULL REFERENCES public.users(user_id) ON DELETE CASCADE,
|
||||
name TEXT NOT NULL,
|
||||
amount_cents INTEGER NOT NULL CHECK (amount_cents > 0),
|
||||
period TEXT NOT NULL CHECK (period IN ('weekly', 'monthly')),
|
||||
start_date DATE NOT NULL,
|
||||
created_at TIMESTAMPTZ DEFAULT now() NOT NULL,
|
||||
updated_at TIMESTAMPTZ DEFAULT now() NOT NULL,
|
||||
CONSTRAINT budgets_name_check CHECK (TRIM(name) <> '')
|
||||
);
|
||||
COMMENT ON TABLE public.budgets IS 'Allows users to set weekly or monthly grocery budgets for spending tracking.';
|
||||
CREATE INDEX IF NOT EXISTS idx_budgets_user_id ON public.budgets(user_id);
|
||||
|
||||
-- 57. Static table defining available achievements for gamification.
|
||||
CREATE TABLE IF NOT EXISTS public.achievements (
|
||||
achievement_id BIGINT PRIMARY KEY GENERATED ALWAYS AS IDENTITY,
|
||||
@@ -998,17 +1012,3 @@ CREATE INDEX IF NOT EXISTS idx_user_achievements_user_id ON public.user_achievem
|
||||
CREATE INDEX IF NOT EXISTS idx_user_achievements_achievement_id ON public.user_achievements(achievement_id);
|
||||
|
||||
|
||||
-- 56. Store user-defined budgets for spending analysis.
|
||||
CREATE TABLE IF NOT EXISTS public.budgets (
|
||||
budget_id BIGINT PRIMARY KEY GENERATED ALWAYS AS IDENTITY,
|
||||
user_id UUID NOT NULL REFERENCES public.users(user_id) ON DELETE CASCADE,
|
||||
name TEXT NOT NULL,
|
||||
amount_cents INTEGER NOT NULL CHECK (amount_cents > 0),
|
||||
period TEXT NOT NULL CHECK (period IN ('weekly', 'monthly')),
|
||||
start_date DATE NOT NULL,
|
||||
created_at TIMESTAMPTZ DEFAULT now() NOT NULL,
|
||||
updated_at TIMESTAMPTZ DEFAULT now() NOT NULL,
|
||||
CONSTRAINT budgets_name_check CHECK (TRIM(name) <> '')
|
||||
);
|
||||
COMMENT ON TABLE public.budgets IS 'Allows users to set weekly or monthly grocery budgets for spending tracking.';
|
||||
CREATE INDEX IF NOT EXISTS idx_budgets_user_id ON public.budgets(user_id);
|
||||
|
||||
@@ -102,11 +102,11 @@ CREATE TABLE IF NOT EXISTS public.profiles (
|
||||
address_id BIGINT REFERENCES public.addresses(address_id) ON DELETE SET NULL,
|
||||
points INTEGER DEFAULT 0 NOT NULL CHECK (points >= 0),
|
||||
preferences JSONB,
|
||||
role TEXT CHECK (role IN ('admin', 'user')),
|
||||
role TEXT NOT NULL CHECK (role IN ('admin', 'user')),
|
||||
created_at TIMESTAMPTZ DEFAULT now() NOT NULL,
|
||||
updated_at TIMESTAMPTZ DEFAULT now() NOT NULL,
|
||||
CONSTRAINT profiles_full_name_check CHECK (full_name IS NULL OR TRIM(full_name) <> ''),
|
||||
CONSTRAINT profiles_avatar_url_check CHECK (avatar_url IS NULL OR avatar_url ~* '^https://?.*'),
|
||||
CONSTRAINT profiles_avatar_url_check CHECK (avatar_url IS NULL OR avatar_url ~* '^https?://.*'),
|
||||
created_by UUID REFERENCES public.users(user_id) ON DELETE SET NULL,
|
||||
updated_by UUID REFERENCES public.users(user_id) ON DELETE SET NULL
|
||||
);
|
||||
@@ -124,7 +124,7 @@ CREATE TABLE IF NOT EXISTS public.stores (
|
||||
created_at TIMESTAMPTZ DEFAULT now() NOT NULL,
|
||||
updated_at TIMESTAMPTZ DEFAULT now() NOT NULL,
|
||||
CONSTRAINT stores_name_check CHECK (TRIM(name) <> ''),
|
||||
CONSTRAINT stores_logo_url_check CHECK (logo_url IS NULL OR logo_url ~* '^https://?.*'),
|
||||
CONSTRAINT stores_logo_url_check CHECK (logo_url IS NULL OR logo_url ~* '^https?://.*'),
|
||||
created_by UUID REFERENCES public.users(user_id) ON DELETE SET NULL
|
||||
);
|
||||
COMMENT ON TABLE public.stores IS 'Stores metadata for grocery store chains (e.g., Safeway, Kroger).';
|
||||
@@ -144,7 +144,7 @@ CREATE TABLE IF NOT EXISTS public.flyers (
|
||||
flyer_id BIGINT PRIMARY KEY GENERATED ALWAYS AS IDENTITY,
|
||||
file_name TEXT NOT NULL,
|
||||
image_url TEXT NOT NULL,
|
||||
icon_url TEXT,
|
||||
icon_url TEXT NOT NULL,
|
||||
checksum TEXT UNIQUE,
|
||||
store_id BIGINT REFERENCES public.stores(store_id) ON DELETE CASCADE,
|
||||
valid_from DATE,
|
||||
@@ -157,8 +157,8 @@ CREATE TABLE IF NOT EXISTS public.flyers (
|
||||
updated_at TIMESTAMPTZ DEFAULT now() NOT NULL,
|
||||
CONSTRAINT flyers_valid_dates_check CHECK (valid_to >= valid_from),
|
||||
CONSTRAINT flyers_file_name_check CHECK (TRIM(file_name) <> ''),
|
||||
CONSTRAINT flyers_image_url_check CHECK (image_url ~* '^https://?.*'),
|
||||
CONSTRAINT flyers_icon_url_check CHECK (icon_url IS NULL OR icon_url ~* '^https://?.*'),
|
||||
CONSTRAINT flyers_image_url_check CHECK (image_url ~* '^https?://.*'),
|
||||
CONSTRAINT flyers_icon_url_check CHECK (icon_url ~* '^https?://.*'),
|
||||
CONSTRAINT flyers_checksum_check CHECK (checksum IS NULL OR length(checksum) = 64)
|
||||
);
|
||||
COMMENT ON TABLE public.flyers IS 'Stores metadata for each processed flyer, linking it to a store and its validity period.';
|
||||
@@ -215,7 +215,7 @@ CREATE TABLE IF NOT EXISTS public.brands (
|
||||
created_at TIMESTAMPTZ DEFAULT now() NOT NULL,
|
||||
updated_at TIMESTAMPTZ DEFAULT now() NOT NULL,
|
||||
CONSTRAINT brands_name_check CHECK (TRIM(name) <> ''),
|
||||
CONSTRAINT brands_logo_url_check CHECK (logo_url IS NULL OR logo_url ~* '^https://?.*')
|
||||
CONSTRAINT brands_logo_url_check CHECK (logo_url IS NULL OR logo_url ~* '^https?://.*')
|
||||
);
|
||||
COMMENT ON TABLE public.brands IS 'Stores brand names like "Coca-Cola", "Maple Leaf", or "Kraft".';
|
||||
COMMENT ON COLUMN public.brands.store_id IS 'If this is a store-specific brand (e.g., President''s Choice), this links to the parent store.';
|
||||
@@ -482,7 +482,7 @@ CREATE TABLE IF NOT EXISTS public.user_submitted_prices (
|
||||
downvotes INTEGER DEFAULT 0 NOT NULL CHECK (downvotes >= 0),
|
||||
created_at TIMESTAMPTZ DEFAULT now() NOT NULL,
|
||||
updated_at TIMESTAMPTZ DEFAULT now() NOT NULL,
|
||||
CONSTRAINT user_submitted_prices_photo_url_check CHECK (photo_url IS NULL OR photo_url ~* '^https://?.*')
|
||||
CONSTRAINT user_submitted_prices_photo_url_check CHECK (photo_url IS NULL OR photo_url ~* '^https?://.*')
|
||||
);
|
||||
COMMENT ON TABLE public.user_submitted_prices IS 'Stores item prices submitted by users directly from physical stores.';
|
||||
COMMENT ON COLUMN public.user_submitted_prices.photo_url IS 'URL to user-submitted photo evidence of the price.';
|
||||
@@ -539,7 +539,7 @@ CREATE TABLE IF NOT EXISTS public.recipes (
|
||||
created_at TIMESTAMPTZ DEFAULT now() NOT NULL,
|
||||
updated_at TIMESTAMPTZ DEFAULT now() NOT NULL,
|
||||
CONSTRAINT recipes_name_check CHECK (TRIM(name) <> ''),
|
||||
CONSTRAINT recipes_photo_url_check CHECK (photo_url IS NULL OR photo_url ~* '^https://?.*')
|
||||
CONSTRAINT recipes_photo_url_check CHECK (photo_url IS NULL OR photo_url ~* '^https?://.*')
|
||||
);
|
||||
COMMENT ON TABLE public.recipes IS 'Stores recipes that can be used to generate shopping lists.';
|
||||
COMMENT ON COLUMN public.recipes.servings IS 'The number of servings this recipe yields.';
|
||||
@@ -689,8 +689,8 @@ CREATE TABLE IF NOT EXISTS public.planned_meals (
|
||||
meal_type TEXT NOT NULL,
|
||||
servings_to_cook INTEGER,
|
||||
created_at TIMESTAMPTZ DEFAULT now() NOT NULL,
|
||||
CONSTRAINT planned_meals_meal_type_check CHECK (TRIM(meal_type) <> ''),
|
||||
updated_at TIMESTAMPTZ DEFAULT now() NOT NULL
|
||||
updated_at TIMESTAMPTZ DEFAULT now() NOT NULL,
|
||||
CONSTRAINT planned_meals_meal_type_check CHECK (TRIM(meal_type) <> '')
|
||||
);
|
||||
COMMENT ON TABLE public.planned_meals IS 'Assigns a recipe to a specific day and meal type within a user''s menu plan.';
|
||||
COMMENT ON COLUMN public.planned_meals.meal_type IS 'The designated meal for the recipe, e.g., ''Breakfast'', ''Lunch'', ''Dinner''.';
|
||||
@@ -940,7 +940,7 @@ CREATE TABLE IF NOT EXISTS public.receipts (
|
||||
raw_text TEXT,
|
||||
created_at TIMESTAMPTZ DEFAULT now() NOT NULL,
|
||||
processed_at TIMESTAMPTZ,
|
||||
CONSTRAINT receipts_receipt_image_url_check CHECK (receipt_image_url ~* '^https://?.*'),
|
||||
CONSTRAINT receipts_receipt_image_url_check CHECK (receipt_image_url ~* '^https?://.*'),
|
||||
updated_at TIMESTAMPTZ DEFAULT now() NOT NULL
|
||||
);
|
||||
COMMENT ON TABLE public.receipts IS 'Stores uploaded user receipts for purchase tracking and analysis.';
|
||||
@@ -1113,6 +1113,7 @@ DECLARE
|
||||
ground_beef_id BIGINT; pasta_item_id BIGINT; tomatoes_id BIGINT; onions_id BIGINT; garlic_id BIGINT;
|
||||
bell_peppers_id BIGINT; carrots_id BIGINT; soy_sauce_id BIGINT;
|
||||
soda_item_id BIGINT; turkey_item_id BIGINT; bread_item_id BIGINT; cheese_item_id BIGINT;
|
||||
chicken_thighs_id BIGINT; paper_towels_id BIGINT; toilet_paper_id BIGINT;
|
||||
|
||||
-- Tag IDs
|
||||
quick_easy_tag BIGINT; healthy_tag BIGINT; chicken_tag BIGINT;
|
||||
@@ -1164,6 +1165,9 @@ BEGIN
|
||||
SELECT mgi.master_grocery_item_id INTO turkey_item_id FROM public.master_grocery_items mgi WHERE mgi.name = 'turkey';
|
||||
SELECT mgi.master_grocery_item_id INTO bread_item_id FROM public.master_grocery_items mgi WHERE mgi.name = 'bread';
|
||||
SELECT mgi.master_grocery_item_id INTO cheese_item_id FROM public.master_grocery_items mgi WHERE mgi.name = 'cheese';
|
||||
SELECT mgi.master_grocery_item_id INTO chicken_thighs_id FROM public.master_grocery_items mgi WHERE mgi.name = 'chicken thighs';
|
||||
SELECT mgi.master_grocery_item_id INTO paper_towels_id FROM public.master_grocery_items mgi WHERE mgi.name = 'paper towels';
|
||||
SELECT mgi.master_grocery_item_id INTO toilet_paper_id FROM public.master_grocery_items mgi WHERE mgi.name = 'toilet paper';
|
||||
|
||||
-- Insert ingredients for each recipe
|
||||
INSERT INTO public.recipe_ingredients (recipe_id, master_item_id, quantity, unit) VALUES
|
||||
@@ -1200,6 +1204,17 @@ BEGIN
|
||||
(bolognese_recipe_id, family_tag), (bolognese_recipe_id, beef_tag), (bolognese_recipe_id, weeknight_tag),
|
||||
(stir_fry_recipe_id, quick_easy_tag), (stir_fry_recipe_id, healthy_tag), (stir_fry_recipe_id, vegetarian_tag)
|
||||
ON CONFLICT (recipe_id, tag_id) DO NOTHING;
|
||||
|
||||
INSERT INTO public.master_item_aliases (master_item_id, alias) VALUES
|
||||
(ground_beef_id, 'ground chuck'), (ground_beef_id, 'lean ground beef'),
|
||||
(ground_beef_id, 'extra lean ground beef'), (ground_beef_id, 'hamburger meat'),
|
||||
(chicken_breast_id, 'boneless skinless chicken breast'), (chicken_breast_id, 'chicken cutlets'),
|
||||
(chicken_thighs_id, 'boneless skinless chicken thighs'), (chicken_thighs_id, 'bone-in chicken thighs'),
|
||||
(bell_peppers_id, 'red pepper'), (bell_peppers_id, 'green pepper'), (bell_peppers_id, 'yellow pepper'), (bell_peppers_id, 'orange pepper'),
|
||||
(soda_item_id, 'pop'), (soda_item_id, 'soft drink'), (soda_item_id, 'coke'), (soda_item_id, 'pepsi'),
|
||||
(paper_towels_id, 'paper towel'),
|
||||
(toilet_paper_id, 'bathroom tissue'), (toilet_paper_id, 'toilet tissue')
|
||||
ON CONFLICT (alias) DO NOTHING;
|
||||
END $$;
|
||||
|
||||
-- Pre-populate the unit_conversions table with common cooking conversions.
|
||||
@@ -2627,7 +2642,9 @@ BEGIN
|
||||
'file-text',
|
||||
jsonb_build_object(
|
||||
'flyer_id', NEW.flyer_id,
|
||||
'store_name', (SELECT name FROM public.stores WHERE store_id = NEW.store_id)
|
||||
'store_name', (SELECT name FROM public.stores WHERE store_id = NEW.store_id),
|
||||
'valid_from', to_char(NEW.valid_from, 'YYYY-MM-DD'),
|
||||
'valid_to', to_char(NEW.valid_to, 'YYYY-MM-DD')
|
||||
)
|
||||
);
|
||||
RETURN NEW;
|
||||
@@ -2677,6 +2694,7 @@ BEGIN
|
||||
(SELECT full_name FROM public.profiles WHERE user_id = NEW.shared_by_user_id) || ' shared a shopping list.',
|
||||
'share-2',
|
||||
jsonb_build_object(
|
||||
'shopping_list_id', NEW.shopping_list_id,
|
||||
'list_name', (SELECT name FROM public.shopping_lists WHERE shopping_list_id = NEW.shopping_list_id),
|
||||
'shared_with_user_id', NEW.shared_with_user_id
|
||||
)
|
||||
@@ -2725,35 +2743,43 @@ CREATE TRIGGER on_new_recipe_collection_share
|
||||
FOR EACH ROW EXECUTE FUNCTION public.log_new_recipe_collection_share();
|
||||
|
||||
-- 10. Trigger function to geocode a store location's address.
|
||||
-- This function is designed to be extensible for external geocoding services.
|
||||
DROP FUNCTION IF EXISTS public.geocode_store_location();
|
||||
-- This function is triggered when an address is inserted or updated, and is
|
||||
-- designed to be extensible for external geocoding services to populate the
|
||||
-- latitude, longitude, and location fields.
|
||||
DROP FUNCTION IF EXISTS public.geocode_address();
|
||||
|
||||
CREATE OR REPLACE FUNCTION public.geocode_store_location()
|
||||
CREATE OR REPLACE FUNCTION public.geocode_address()
|
||||
RETURNS TRIGGER AS $$
|
||||
DECLARE
|
||||
full_address TEXT;
|
||||
BEGIN
|
||||
-- Only proceed if the address has actually changed.
|
||||
-- Note: We check against the linked address fields via the NEW.address_id in a real scenario,
|
||||
-- but for this trigger to work effectively, it usually requires a direct update on the address table
|
||||
-- or this trigger should be moved to the 'addresses' table.
|
||||
-- However, based on the provided logic, we are keeping the placeholder structure.
|
||||
|
||||
-- Placeholder logic:
|
||||
IF TG_OP = 'INSERT' THEN
|
||||
-- Logic to fetch address string based on NEW.address_id and geocode
|
||||
NULL;
|
||||
-- Only proceed if an address component has actually changed.
|
||||
IF TG_OP = 'INSERT' OR (TG_OP = 'UPDATE' AND (
|
||||
NEW.address_line_1 IS DISTINCT FROM OLD.address_line_1 OR
|
||||
NEW.address_line_2 IS DISTINCT FROM OLD.address_line_2 OR
|
||||
NEW.city IS DISTINCT FROM OLD.city OR
|
||||
NEW.province_state IS DISTINCT FROM OLD.province_state OR
|
||||
NEW.postal_code IS DISTINCT FROM OLD.postal_code OR
|
||||
NEW.country IS DISTINCT FROM OLD.country
|
||||
)) THEN
|
||||
-- Concatenate address parts into a single string for the geocoder.
|
||||
full_address := CONCAT_WS(', ', NEW.address_line_1, NEW.address_line_2, NEW.city, NEW.province_state, NEW.postal_code, NEW.country);
|
||||
|
||||
-- Placeholder for Geocoding API Call.
|
||||
-- In a real application, you would call a service here and update NEW.latitude, NEW.longitude, and NEW.location.
|
||||
-- e.g., NEW.latitude := result.lat; NEW.longitude := result.lon;
|
||||
-- NEW.location := ST_SetSRID(ST_MakePoint(NEW.longitude, NEW.latitude), 4326);
|
||||
END IF;
|
||||
|
||||
RETURN NEW;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
-- Trigger to call the geocoding function.
|
||||
DROP TRIGGER IF EXISTS on_store_location_address_change ON public.store_locations;
|
||||
CREATE TRIGGER on_store_location_address_change
|
||||
BEFORE INSERT OR UPDATE ON public.store_locations
|
||||
FOR EACH ROW EXECUTE FUNCTION public.geocode_store_location();
|
||||
-- This trigger calls the geocoding function when an address changes.
|
||||
DROP TRIGGER IF EXISTS on_address_change_geocode ON public.addresses;
|
||||
CREATE TRIGGER on_address_change_geocode
|
||||
BEFORE INSERT OR UPDATE ON public.addresses
|
||||
FOR EACH ROW EXECUTE FUNCTION public.geocode_address();
|
||||
|
||||
-- 11. Trigger function to increment the fork_count on the original recipe.
|
||||
DROP FUNCTION IF EXISTS public.increment_recipe_fork_count();
|
||||
@@ -2783,17 +2809,19 @@ CREATE TRIGGER on_recipe_fork
|
||||
-- It replaces the need to call get_best_sale_prices_for_user for each user individually.
|
||||
-- Returns: TABLE(...) - A set of records including user details and deal information.
|
||||
-- =================================================================
|
||||
DROP FUNCTION IF EXISTS public.get_best_sale_prices_for_all_users();
|
||||
|
||||
CREATE OR REPLACE FUNCTION public.get_best_sale_prices_for_all_users()
|
||||
RETURNS TABLE(
|
||||
user_id uuid,
|
||||
|
||||
email text,
|
||||
full_name text,
|
||||
master_item_id integer,
|
||||
master_item_id bigint,
|
||||
item_name text,
|
||||
best_price_in_cents integer,
|
||||
store_name text,
|
||||
flyer_id integer,
|
||||
flyer_id bigint,
|
||||
valid_to date
|
||||
) AS $$
|
||||
BEGIN
|
||||
@@ -2805,7 +2833,7 @@ BEGIN
|
||||
SELECT
|
||||
fi.master_item_id,
|
||||
fi.price_in_cents,
|
||||
f.store_name,
|
||||
s.name as store_name,
|
||||
f.flyer_id,
|
||||
f.valid_to
|
||||
FROM public.flyer_items fi
|
||||
|
||||
@@ -110,8 +110,8 @@ async function main() {
|
||||
validTo.setDate(today.getDate() + 5);
|
||||
|
||||
const flyerQuery = `
|
||||
INSERT INTO public.flyers (file_name, image_url, checksum, store_id, valid_from, valid_to)
|
||||
VALUES ('safeway-flyer.jpg', 'https://example.com/flyer-images/safeway-flyer.jpg', 'a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0', ${storeMap.get('Safeway')}, $1, $2)
|
||||
INSERT INTO public.flyers (file_name, image_url, icon_url, checksum, store_id, valid_from, valid_to)
|
||||
VALUES ('safeway-flyer.jpg', 'https://example.com/flyer-images/safeway-flyer.jpg', 'https://example.com/flyer-images/icons/safeway-flyer.jpg', 'a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0', ${storeMap.get('Safeway')}, $1, $2)
|
||||
RETURNING flyer_id;
|
||||
`;
|
||||
const flyerRes = await client.query<{ flyer_id: number }>(flyerQuery, [
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
// src/middleware/errorHandler.test.ts
|
||||
import { describe, it, expect, vi, beforeEach, afterAll } from 'vitest';
|
||||
import { describe, it, expect, vi, beforeEach, afterAll, afterEach } from 'vitest';
|
||||
import supertest from 'supertest';
|
||||
import express, { Request, Response, NextFunction } from 'express';
|
||||
import { errorHandler } from './errorHandler'; // This was a duplicate, fixed.
|
||||
@@ -98,12 +98,15 @@ describe('errorHandler Middleware', () => {
|
||||
vi.clearAllMocks();
|
||||
consoleErrorSpy.mockClear(); // Clear spy for console.error
|
||||
// Ensure NODE_ENV is set to 'test' for console.error logging
|
||||
process.env.NODE_ENV = 'test';
|
||||
vi.stubEnv('NODE_ENV', 'test');
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
vi.unstubAllEnvs(); // Clean up environment variable stubs after each test
|
||||
});
|
||||
|
||||
afterAll(() => {
|
||||
consoleErrorSpy.mockRestore(); // Restore console.error after all tests
|
||||
delete process.env.NODE_ENV; // Clean up environment variable
|
||||
});
|
||||
|
||||
it('should return a generic 500 error for a standard Error object', async () => {
|
||||
@@ -293,11 +296,7 @@ describe('errorHandler Middleware', () => {
|
||||
|
||||
describe('when NODE_ENV is "production"', () => {
|
||||
beforeEach(() => {
|
||||
process.env.NODE_ENV = 'production';
|
||||
});
|
||||
|
||||
afterAll(() => {
|
||||
process.env.NODE_ENV = 'test'; // Reset for other test files
|
||||
vi.stubEnv('NODE_ENV', 'production');
|
||||
});
|
||||
|
||||
it('should return a generic message with an error ID for a 500 error', async () => {
|
||||
|
||||
@@ -109,20 +109,19 @@ describe('Multer Middleware Directory Creation', () => {
|
||||
describe('createUploadMiddleware', () => {
|
||||
const mockFile = { originalname: 'test.png' } as Express.Multer.File;
|
||||
const mockUser = createMockUserProfile({ user: { user_id: 'user-123', email: 'test@user.com' } });
|
||||
let originalNodeEnv: string | undefined;
|
||||
|
||||
beforeEach(() => {
|
||||
vi.clearAllMocks();
|
||||
originalNodeEnv = process.env.NODE_ENV;
|
||||
vi.unstubAllEnvs();
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
process.env.NODE_ENV = originalNodeEnv;
|
||||
vi.unstubAllEnvs();
|
||||
});
|
||||
|
||||
describe('Avatar Storage', () => {
|
||||
it('should generate a unique filename for an authenticated user', () => {
|
||||
process.env.NODE_ENV = 'production';
|
||||
vi.stubEnv('NODE_ENV', 'production');
|
||||
createUploadMiddleware({ storageType: 'avatar' });
|
||||
const storageOptions = vi.mocked(multer.diskStorage).mock.calls[0][0];
|
||||
const cb = vi.fn();
|
||||
@@ -150,7 +149,7 @@ describe('createUploadMiddleware', () => {
|
||||
});
|
||||
|
||||
it('should use a predictable filename in test environment', () => {
|
||||
process.env.NODE_ENV = 'test';
|
||||
vi.stubEnv('NODE_ENV', 'test');
|
||||
createUploadMiddleware({ storageType: 'avatar' });
|
||||
const storageOptions = vi.mocked(multer.diskStorage).mock.calls[0][0];
|
||||
const cb = vi.fn();
|
||||
@@ -164,7 +163,7 @@ describe('createUploadMiddleware', () => {
|
||||
|
||||
describe('Flyer Storage', () => {
|
||||
it('should generate a unique, sanitized filename in production environment', () => {
|
||||
process.env.NODE_ENV = 'production';
|
||||
vi.stubEnv('NODE_ENV', 'production');
|
||||
const mockFlyerFile = {
|
||||
fieldname: 'flyerFile',
|
||||
originalname: 'My Flyer (Special!).pdf',
|
||||
@@ -184,7 +183,7 @@ describe('createUploadMiddleware', () => {
|
||||
|
||||
it('should generate a predictable filename in test environment', () => {
|
||||
// This test covers lines 43-46
|
||||
process.env.NODE_ENV = 'test';
|
||||
vi.stubEnv('NODE_ENV', 'test');
|
||||
const mockFlyerFile = {
|
||||
fieldname: 'flyerFile',
|
||||
originalname: 'test-flyer.jpg',
|
||||
|
||||
@@ -618,21 +618,19 @@ describe('Passport Configuration', () => {
|
||||
|
||||
describe('mockAuth Middleware', () => {
|
||||
const mockNext: NextFunction = vi.fn();
|
||||
let mockRes: Partial<Response>;
|
||||
let originalNodeEnv: string | undefined;
|
||||
const mockRes: Partial<Response> = {
|
||||
status: vi.fn().mockReturnThis(),
|
||||
json: vi.fn(),
|
||||
};
|
||||
|
||||
beforeEach(() => {
|
||||
mockRes = { status: vi.fn().mockReturnThis(), json: vi.fn() };
|
||||
originalNodeEnv = process.env.NODE_ENV;
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
process.env.NODE_ENV = originalNodeEnv;
|
||||
// Unstub env variables before each test in this block to ensure a clean state.
|
||||
vi.unstubAllEnvs();
|
||||
});
|
||||
|
||||
it('should attach a mock admin user to req when NODE_ENV is "test"', () => {
|
||||
// Arrange
|
||||
process.env.NODE_ENV = 'test';
|
||||
vi.stubEnv('NODE_ENV', 'test');
|
||||
const mockReq = {} as Request;
|
||||
|
||||
// Act
|
||||
@@ -646,7 +644,7 @@ describe('Passport Configuration', () => {
|
||||
|
||||
it('should do nothing and call next() when NODE_ENV is not "test"', () => {
|
||||
// Arrange
|
||||
process.env.NODE_ENV = 'production';
|
||||
vi.stubEnv('NODE_ENV', 'production');
|
||||
const mockReq = {} as Request;
|
||||
|
||||
// Act
|
||||
|
||||
211
src/routes/reactions.routes.test.ts
Normal file
211
src/routes/reactions.routes.test.ts
Normal file
@@ -0,0 +1,211 @@
|
||||
import { describe, it, expect, vi, beforeEach } from 'vitest';
|
||||
import supertest from 'supertest';
|
||||
import { createTestApp } from '../tests/utils/createTestApp';
|
||||
import { createMockUserProfile } from '../tests/utils/mockFactories';
|
||||
|
||||
// 1. Mock the Service Layer directly.
|
||||
vi.mock('../services/db/index.db', () => ({
|
||||
reactionRepo: {
|
||||
getReactions: vi.fn(),
|
||||
getReactionSummary: vi.fn(),
|
||||
toggleReaction: vi.fn(),
|
||||
},
|
||||
}));
|
||||
|
||||
// Mock the logger to keep test output clean
|
||||
vi.mock('../services/logger.server', async () => ({
|
||||
logger: (await import('../tests/utils/mockLogger')).mockLogger,
|
||||
}));
|
||||
|
||||
// Mock Passport middleware
|
||||
vi.mock('./passport.routes', () => ({
|
||||
default: {
|
||||
authenticate: vi.fn(
|
||||
() => (req: any, res: any, next: any) => {
|
||||
// If we are testing the unauthenticated state (no user injected), simulate 401.
|
||||
if (!req.user) {
|
||||
return res.status(401).json({ message: 'Unauthorized' });
|
||||
}
|
||||
next();
|
||||
},
|
||||
),
|
||||
},
|
||||
}));
|
||||
|
||||
// Import the router and mocked DB AFTER all mocks are defined.
|
||||
import reactionsRouter from './reactions.routes';
|
||||
import { reactionRepo } from '../services/db/index.db';
|
||||
import { mockLogger } from '../tests/utils/mockLogger';
|
||||
|
||||
const expectLogger = expect.objectContaining({
|
||||
info: expect.any(Function),
|
||||
error: expect.any(Function),
|
||||
});
|
||||
|
||||
describe('Reaction Routes (/api/reactions)', () => {
|
||||
beforeEach(() => {
|
||||
vi.clearAllMocks();
|
||||
});
|
||||
|
||||
describe('GET /', () => {
|
||||
const app = createTestApp({ router: reactionsRouter, basePath: '/api/reactions' });
|
||||
|
||||
it('should return a list of reactions', async () => {
|
||||
const mockReactions = [{ id: 1, reaction_type: 'like', entity_id: '123' }];
|
||||
vi.mocked(reactionRepo.getReactions).mockResolvedValue(mockReactions as any);
|
||||
|
||||
const response = await supertest(app).get('/api/reactions');
|
||||
|
||||
expect(response.status).toBe(200);
|
||||
expect(response.body).toEqual(mockReactions);
|
||||
expect(reactionRepo.getReactions).toHaveBeenCalledWith({}, expectLogger);
|
||||
});
|
||||
|
||||
it('should filter by query parameters', async () => {
|
||||
const mockReactions = [{ id: 1, reaction_type: 'like' }];
|
||||
vi.mocked(reactionRepo.getReactions).mockResolvedValue(mockReactions as any);
|
||||
|
||||
const validUuid = '123e4567-e89b-12d3-a456-426614174000';
|
||||
const query = { userId: validUuid, entityType: 'recipe', entityId: '1' };
|
||||
|
||||
const response = await supertest(app).get('/api/reactions').query(query);
|
||||
|
||||
expect(response.status).toBe(200);
|
||||
expect(reactionRepo.getReactions).toHaveBeenCalledWith(
|
||||
expect.objectContaining(query),
|
||||
expectLogger
|
||||
);
|
||||
});
|
||||
|
||||
it('should return 500 on database error', async () => {
|
||||
const error = new Error('DB Error');
|
||||
vi.mocked(reactionRepo.getReactions).mockRejectedValue(error);
|
||||
|
||||
const response = await supertest(app).get('/api/reactions');
|
||||
|
||||
expect(response.status).toBe(500);
|
||||
expect(mockLogger.error).toHaveBeenCalledWith(
|
||||
{ error },
|
||||
'Error fetching user reactions'
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('GET /summary', () => {
|
||||
const app = createTestApp({ router: reactionsRouter, basePath: '/api/reactions' });
|
||||
|
||||
it('should return reaction summary for an entity', async () => {
|
||||
const mockSummary = { like: 10, love: 5 };
|
||||
vi.mocked(reactionRepo.getReactionSummary).mockResolvedValue(mockSummary as any);
|
||||
|
||||
const response = await supertest(app)
|
||||
.get('/api/reactions/summary')
|
||||
.query({ entityType: 'recipe', entityId: '123' });
|
||||
|
||||
expect(response.status).toBe(200);
|
||||
expect(response.body).toEqual(mockSummary);
|
||||
expect(reactionRepo.getReactionSummary).toHaveBeenCalledWith(
|
||||
'recipe',
|
||||
'123',
|
||||
expectLogger
|
||||
);
|
||||
});
|
||||
|
||||
it('should return 400 if required parameters are missing', async () => {
|
||||
const response = await supertest(app).get('/api/reactions/summary');
|
||||
expect(response.status).toBe(400);
|
||||
expect(response.body.errors[0].message).toContain('required');
|
||||
});
|
||||
|
||||
it('should return 500 on database error', async () => {
|
||||
const error = new Error('DB Error');
|
||||
vi.mocked(reactionRepo.getReactionSummary).mockRejectedValue(error);
|
||||
|
||||
const response = await supertest(app)
|
||||
.get('/api/reactions/summary')
|
||||
.query({ entityType: 'recipe', entityId: '123' });
|
||||
|
||||
expect(response.status).toBe(500);
|
||||
expect(mockLogger.error).toHaveBeenCalledWith(
|
||||
{ error },
|
||||
'Error fetching reaction summary'
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('POST /toggle', () => {
|
||||
const mockUser = createMockUserProfile({ user: { user_id: 'user-123' } });
|
||||
const app = createTestApp({
|
||||
router: reactionsRouter,
|
||||
basePath: '/api/reactions',
|
||||
authenticatedUser: mockUser,
|
||||
});
|
||||
|
||||
const validBody = {
|
||||
entity_type: 'recipe',
|
||||
entity_id: '123',
|
||||
reaction_type: 'like',
|
||||
};
|
||||
|
||||
it('should return 201 when a reaction is added', async () => {
|
||||
const mockResult = { ...validBody, id: 1, user_id: 'user-123' };
|
||||
vi.mocked(reactionRepo.toggleReaction).mockResolvedValue(mockResult as any);
|
||||
|
||||
const response = await supertest(app)
|
||||
.post('/api/reactions/toggle')
|
||||
.send(validBody);
|
||||
|
||||
expect(response.status).toBe(201);
|
||||
expect(response.body).toEqual({ message: 'Reaction added.', reaction: mockResult });
|
||||
expect(reactionRepo.toggleReaction).toHaveBeenCalledWith(
|
||||
{ user_id: 'user-123', ...validBody },
|
||||
expectLogger
|
||||
);
|
||||
});
|
||||
|
||||
it('should return 200 when a reaction is removed', async () => {
|
||||
// Returning null/false from toggleReaction implies the reaction was removed
|
||||
vi.mocked(reactionRepo.toggleReaction).mockResolvedValue(null);
|
||||
|
||||
const response = await supertest(app)
|
||||
.post('/api/reactions/toggle')
|
||||
.send(validBody);
|
||||
|
||||
expect(response.status).toBe(200);
|
||||
expect(response.body).toEqual({ message: 'Reaction removed.' });
|
||||
});
|
||||
|
||||
it('should return 400 if body is invalid', async () => {
|
||||
const response = await supertest(app)
|
||||
.post('/api/reactions/toggle')
|
||||
.send({ entity_type: 'recipe' }); // Missing other required fields
|
||||
|
||||
expect(response.status).toBe(400);
|
||||
expect(response.body.errors).toBeDefined();
|
||||
});
|
||||
|
||||
it('should return 401 if not authenticated', async () => {
|
||||
const unauthApp = createTestApp({ router: reactionsRouter, basePath: '/api/reactions' });
|
||||
const response = await supertest(unauthApp)
|
||||
.post('/api/reactions/toggle')
|
||||
.send(validBody);
|
||||
|
||||
expect(response.status).toBe(401);
|
||||
});
|
||||
|
||||
it('should return 500 on database error', async () => {
|
||||
const error = new Error('DB Error');
|
||||
vi.mocked(reactionRepo.toggleReaction).mockRejectedValue(error);
|
||||
|
||||
const response = await supertest(app)
|
||||
.post('/api/reactions/toggle')
|
||||
.send(validBody);
|
||||
|
||||
expect(response.status).toBe(500);
|
||||
expect(mockLogger.error).toHaveBeenCalledWith(
|
||||
{ error, body: validBody },
|
||||
'Error toggling user reaction'
|
||||
);
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -1,7 +1,7 @@
|
||||
// src/routes/recipe.routes.test.ts
|
||||
import { describe, it, expect, vi, beforeEach } from 'vitest';
|
||||
import supertest from 'supertest';
|
||||
import { createMockRecipe, createMockRecipeComment } from '../tests/utils/mockFactories';
|
||||
import { createMockRecipe, createMockRecipeComment, createMockUserProfile } from '../tests/utils/mockFactories';
|
||||
import { NotFoundError } from '../services/db/errors.db';
|
||||
import { createTestApp } from '../tests/utils/createTestApp';
|
||||
|
||||
@@ -16,9 +16,31 @@ vi.mock('../services/db/index.db', () => ({
|
||||
},
|
||||
}));
|
||||
|
||||
// Mock AI Service
|
||||
vi.mock('../services/aiService.server', () => ({
|
||||
aiService: {
|
||||
generateRecipeSuggestion: vi.fn(),
|
||||
},
|
||||
}));
|
||||
|
||||
// Mock Passport
|
||||
vi.mock('./passport.routes', () => ({
|
||||
default: {
|
||||
authenticate: vi.fn(
|
||||
() => (req: any, res: any, next: any) => {
|
||||
if (!req.user) {
|
||||
return res.status(401).json({ message: 'Unauthorized' });
|
||||
}
|
||||
next();
|
||||
},
|
||||
),
|
||||
},
|
||||
}));
|
||||
|
||||
// Import the router and mocked DB AFTER all mocks are defined.
|
||||
import recipeRouter from './recipe.routes';
|
||||
import * as db from '../services/db/index.db';
|
||||
import { aiService } from '../services/aiService.server';
|
||||
import { mockLogger } from '../tests/utils/mockLogger';
|
||||
|
||||
// Mock the logger to keep test output clean
|
||||
@@ -229,4 +251,71 @@ describe('Recipe Routes (/api/recipes)', () => {
|
||||
expect(response.body.errors[0].message).toContain('received NaN');
|
||||
});
|
||||
});
|
||||
|
||||
describe('POST /suggest', () => {
|
||||
const mockUser = createMockUserProfile({ user: { user_id: 'user-123' } });
|
||||
const authApp = createTestApp({
|
||||
router: recipeRouter,
|
||||
basePath: '/api/recipes',
|
||||
authenticatedUser: mockUser,
|
||||
});
|
||||
|
||||
it('should return a recipe suggestion', async () => {
|
||||
const ingredients = ['chicken', 'rice'];
|
||||
const mockSuggestion = 'Chicken and Rice Casserole...';
|
||||
vi.mocked(aiService.generateRecipeSuggestion).mockResolvedValue(mockSuggestion);
|
||||
|
||||
const response = await supertest(authApp)
|
||||
.post('/api/recipes/suggest')
|
||||
.send({ ingredients });
|
||||
|
||||
expect(response.status).toBe(200);
|
||||
expect(response.body).toEqual({ suggestion: mockSuggestion });
|
||||
expect(aiService.generateRecipeSuggestion).toHaveBeenCalledWith(ingredients, expectLogger);
|
||||
});
|
||||
|
||||
it('should return 503 if AI service returns null', async () => {
|
||||
vi.mocked(aiService.generateRecipeSuggestion).mockResolvedValue(null);
|
||||
|
||||
const response = await supertest(authApp)
|
||||
.post('/api/recipes/suggest')
|
||||
.send({ ingredients: ['water'] });
|
||||
|
||||
expect(response.status).toBe(503);
|
||||
expect(response.body.message).toContain('unavailable');
|
||||
});
|
||||
|
||||
it('should return 400 if ingredients list is empty', async () => {
|
||||
const response = await supertest(authApp)
|
||||
.post('/api/recipes/suggest')
|
||||
.send({ ingredients: [] });
|
||||
|
||||
expect(response.status).toBe(400);
|
||||
expect(response.body.errors[0].message).toContain('At least one ingredient is required');
|
||||
});
|
||||
|
||||
it('should return 401 if not authenticated', async () => {
|
||||
const unauthApp = createTestApp({ router: recipeRouter, basePath: '/api/recipes' });
|
||||
const response = await supertest(unauthApp)
|
||||
.post('/api/recipes/suggest')
|
||||
.send({ ingredients: ['chicken'] });
|
||||
|
||||
expect(response.status).toBe(401);
|
||||
});
|
||||
|
||||
it('should return 500 on service error', async () => {
|
||||
const error = new Error('AI Error');
|
||||
vi.mocked(aiService.generateRecipeSuggestion).mockRejectedValue(error);
|
||||
|
||||
const response = await supertest(authApp)
|
||||
.post('/api/recipes/suggest')
|
||||
.send({ ingredients: ['chicken'] });
|
||||
|
||||
expect(response.status).toBe(500);
|
||||
expect(mockLogger.error).toHaveBeenCalledWith(
|
||||
{ error },
|
||||
'Error generating recipe suggestion'
|
||||
);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@@ -122,10 +122,10 @@ describe('User Routes (/api/users)', () => {
|
||||
describe('Avatar Upload Directory Creation', () => {
|
||||
it('should log an error if avatar directory creation fails', async () => {
|
||||
// Arrange
|
||||
const mkdirError = new Error('EACCES: permission denied');
|
||||
const mkdirError = new Error('EACCES: permission denied'); // This error is specific to the fs.mkdir mock.
|
||||
// Reset modules to force re-import with a new mock implementation
|
||||
vi.resetModules();
|
||||
// Set up the mock *before* the module is re-imported
|
||||
// Set up the mock *before* the module is re-imported.
|
||||
vi.doMock('node:fs/promises', () => ({
|
||||
default: {
|
||||
// We only need to mock mkdir for this test.
|
||||
@@ -133,6 +133,10 @@ describe('User Routes (/api/users)', () => {
|
||||
},
|
||||
}));
|
||||
const { logger } = await import('../services/logger.server');
|
||||
// Stub NODE_ENV to ensure the relevant code path is executed if it depends on it.
|
||||
// Although the mkdir call itself doesn't depend on NODE_ENV, this is good practice
|
||||
// when re-importing modules that might have conditional logic based on it.
|
||||
vi.stubEnv('NODE_ENV', 'test');
|
||||
|
||||
// Act: Dynamically import the router to trigger the top-level fs.mkdir call
|
||||
await import('./user.routes');
|
||||
@@ -142,6 +146,7 @@ describe('User Routes (/api/users)', () => {
|
||||
{ error: mkdirError },
|
||||
'Failed to create multer storage directories on startup.',
|
||||
);
|
||||
vi.unstubAllEnvs(); // Clean up the stubbed environment variable.
|
||||
vi.doUnmock('node:fs/promises'); // Clean up
|
||||
});
|
||||
});
|
||||
|
||||
@@ -136,45 +136,29 @@ describe('AI Service (Server)', () => {
|
||||
});
|
||||
|
||||
describe('Constructor', () => {
|
||||
const originalEnv = process.env;
|
||||
|
||||
beforeEach(() => {
|
||||
// Reset process.env before each test in this block
|
||||
vi.unstubAllEnvs();
|
||||
vi.unstubAllEnvs(); // Force-removes all environment mocking
|
||||
vi.resetModules(); // Important to re-evaluate the service file
|
||||
process.env = { ...originalEnv };
|
||||
console.log('CONSTRUCTOR beforeEach: process.env reset.');
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
// Restore original environment variables
|
||||
vi.unstubAllEnvs();
|
||||
process.env = originalEnv;
|
||||
console.log('CONSTRUCTOR afterEach: process.env restored.');
|
||||
});
|
||||
|
||||
it('should throw an error if GEMINI_API_KEY is not set in a non-test environment', async () => {
|
||||
console.log("TEST START: 'should throw an error if GEMINI_API_KEY is not set...'");
|
||||
console.log(
|
||||
`PRE-TEST ENV: NODE_ENV=${process.env.NODE_ENV}, VITEST_POOL_ID=${process.env.VITEST_POOL_ID}`,
|
||||
);
|
||||
// Simulate a non-test environment
|
||||
process.env.NODE_ENV = 'production';
|
||||
delete process.env.GEMINI_API_KEY;
|
||||
delete process.env.VITEST_POOL_ID;
|
||||
console.log(
|
||||
`POST-MANIPULATION ENV: NODE_ENV=${process.env.NODE_ENV}, VITEST_POOL_ID=${process.env.VITEST_POOL_ID}`,
|
||||
);
|
||||
vi.stubEnv('NODE_ENV', 'production');
|
||||
vi.stubEnv('GEMINI_API_KEY', '');
|
||||
vi.stubEnv('VITEST_POOL_ID', '');
|
||||
|
||||
let error: Error | undefined;
|
||||
// Dynamically import the class to re-evaluate the constructor logic
|
||||
try {
|
||||
console.log('Attempting to import and instantiate AIService which is expected to throw...');
|
||||
const { AIService } = await import('./aiService.server');
|
||||
new AIService(mockLoggerInstance);
|
||||
} catch (e) {
|
||||
console.log('Successfully caught an error during instantiation.');
|
||||
error = e as Error;
|
||||
}
|
||||
expect(error).toBeInstanceOf(Error);
|
||||
@@ -185,8 +169,8 @@ describe('AI Service (Server)', () => {
|
||||
|
||||
it('should use a mock placeholder if API key is missing in a test environment', async () => {
|
||||
// Arrange: Simulate a test environment without an API key
|
||||
process.env.NODE_ENV = 'test';
|
||||
delete process.env.GEMINI_API_KEY;
|
||||
vi.stubEnv('NODE_ENV', 'test');
|
||||
vi.stubEnv('GEMINI_API_KEY', '');
|
||||
|
||||
// Act: Dynamically import and instantiate the service
|
||||
const { AIService } = await import('./aiService.server');
|
||||
@@ -202,7 +186,7 @@ describe('AI Service (Server)', () => {
|
||||
});
|
||||
|
||||
it('should use the adapter to call generateContent when using real GoogleGenAI client', async () => {
|
||||
process.env.GEMINI_API_KEY = 'test-key';
|
||||
vi.stubEnv('GEMINI_API_KEY', 'test-key');
|
||||
// We need to force the constructor to use the real client logic, not the injected mock.
|
||||
// So we instantiate AIService without passing aiClient.
|
||||
|
||||
@@ -224,7 +208,7 @@ describe('AI Service (Server)', () => {
|
||||
});
|
||||
|
||||
it('should throw error if adapter is called without content', async () => {
|
||||
process.env.GEMINI_API_KEY = 'test-key';
|
||||
vi.stubEnv('GEMINI_API_KEY', 'test-key');
|
||||
vi.resetModules();
|
||||
const { AIService } = await import('./aiService.server');
|
||||
const service = new AIService(mockLoggerInstance);
|
||||
@@ -237,17 +221,14 @@ describe('AI Service (Server)', () => {
|
||||
});
|
||||
|
||||
describe('Model Fallback Logic', () => {
|
||||
const originalEnv = process.env;
|
||||
|
||||
beforeEach(() => {
|
||||
vi.unstubAllEnvs();
|
||||
process.env = { ...originalEnv, GEMINI_API_KEY: 'test-key' };
|
||||
vi.stubEnv('GEMINI_API_KEY', 'test-key');
|
||||
vi.resetModules(); // Re-import to use the new env var and re-instantiate the service
|
||||
mockGenerateContent.mockReset();
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
process.env = originalEnv;
|
||||
vi.unstubAllEnvs();
|
||||
});
|
||||
|
||||
@@ -279,8 +260,8 @@ describe('AI Service (Server)', () => {
|
||||
});
|
||||
|
||||
// Check second call
|
||||
expect(mockGenerateContent).toHaveBeenNthCalledWith(2, { // The second model in the list is 'gemini-2.5-flash'
|
||||
model: 'gemini-2.5-flash',
|
||||
expect(mockGenerateContent).toHaveBeenNthCalledWith(2, { // The second model in the list is 'gemini-2.5-pro'
|
||||
model: 'gemini-2.5-pro',
|
||||
...request,
|
||||
});
|
||||
|
||||
|
||||
@@ -94,8 +94,8 @@ export class AIService {
|
||||
// The fallback list is ordered by preference (speed/cost vs. power).
|
||||
// We try the fastest models first, then the more powerful 'pro' model as a high-quality fallback,
|
||||
// and finally the 'lite' model as a last resort.
|
||||
private readonly models = [ 'gemini-3-flash-preview', 'gemini-2.5-flash', 'gemini-2.5-flash-lite', 'gemma-3-27b', 'gemma-3-12b'];
|
||||
private readonly models_lite = ["gemma-3-4b", "gemma-3-2b", "gemma-3-1b"];
|
||||
private readonly models = [ 'gemini-3-flash-preview','gemini-2.5-pro', 'gemini-2.5-flash', 'gemini-2.5-flash-lite','gemini-2.0-flash-001','gemini-2.0-flash','gemini-2.0-flash-exp','gemini-2.0-flash-lite-001','gemini-2.0-flash-lite', 'gemma-3-27b-it', 'gemma-3-12b-it'];
|
||||
private readonly models_lite = ["gemma-3-4b-it", "gemma-3-2b-it", "gemma-3-1b-it"];
|
||||
|
||||
constructor(logger: Logger, aiClient?: IAiClient, fs?: IFileSystem) {
|
||||
this.logger = logger;
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
import { describe, it, expect, vi, beforeEach } from 'vitest';
|
||||
// src/services/authService.test.ts
|
||||
import { describe, it, expect, vi, beforeEach, afterEach } from 'vitest';
|
||||
import type { UserProfile } from '../types';
|
||||
import type * as jsonwebtoken from 'jsonwebtoken';
|
||||
|
||||
@@ -33,8 +34,8 @@ describe('AuthService', () => {
|
||||
vi.resetModules();
|
||||
|
||||
// Set environment variables before any modules are imported
|
||||
process.env.JWT_SECRET = 'test-secret';
|
||||
process.env.FRONTEND_URL = 'http://localhost:3000';
|
||||
vi.stubEnv('JWT_SECRET', 'test-secret');
|
||||
vi.stubEnv('FRONTEND_URL', 'http://localhost:3000');
|
||||
|
||||
// Mock all dependencies before dynamically importing the service
|
||||
// Core modules like bcrypt, jsonwebtoken, and crypto are now mocked globally in tests-setup-unit.ts
|
||||
@@ -77,6 +78,10 @@ describe('AuthService', () => {
|
||||
UniqueConstraintError = (await import('./db/errors.db')).UniqueConstraintError;
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
vi.unstubAllEnvs();
|
||||
});
|
||||
|
||||
describe('registerUser', () => {
|
||||
it('should successfully register a new user', async () => {
|
||||
vi.mocked(bcrypt.hash).mockImplementation(async () => 'hashed-password');
|
||||
|
||||
@@ -51,67 +51,72 @@ describe('Flyer DB Service', () => {
|
||||
|
||||
describe('findOrCreateStore', () => {
|
||||
it('should find an existing store and return its ID', async () => {
|
||||
mockPoolInstance.query.mockResolvedValue({ rows: [{ store_id: 1 }] });
|
||||
// 1. INSERT...ON CONFLICT does nothing. 2. SELECT finds the store.
|
||||
mockPoolInstance.query
|
||||
.mockResolvedValueOnce({ rows: [], rowCount: 0 })
|
||||
.mockResolvedValueOnce({ rows: [{ store_id: 1 }] });
|
||||
|
||||
const result = await flyerRepo.findOrCreateStore('Existing Store', mockLogger);
|
||||
expect(result).toBe(1);
|
||||
expect(mockPoolInstance.query).toHaveBeenCalledTimes(2);
|
||||
expect(mockPoolInstance.query).toHaveBeenCalledWith(
|
||||
expect.stringContaining('SELECT store_id FROM public.stores WHERE name = $1'),
|
||||
'INSERT INTO public.stores (name) VALUES ($1) ON CONFLICT (name) DO NOTHING',
|
||||
['Existing Store'],
|
||||
);
|
||||
expect(mockPoolInstance.query).toHaveBeenCalledWith(
|
||||
'SELECT store_id FROM public.stores WHERE name = $1',
|
||||
['Existing Store'],
|
||||
);
|
||||
});
|
||||
|
||||
it('should create a new store if it does not exist', async () => {
|
||||
it('should create a new store if it does not exist and return its ID', async () => {
|
||||
// 1. INSERT...ON CONFLICT creates the store. 2. SELECT finds it.
|
||||
mockPoolInstance.query
|
||||
.mockResolvedValueOnce({ rows: [] }) // First SELECT finds nothing
|
||||
.mockResolvedValueOnce({ rows: [{ store_id: 2 }] })
|
||||
.mockResolvedValueOnce({ rows: [], rowCount: 1 }) // INSERT affects 1 row
|
||||
.mockResolvedValueOnce({ rows: [{ store_id: 2 }] }); // SELECT finds the new store
|
||||
|
||||
const result = await flyerRepo.findOrCreateStore('New Store', mockLogger);
|
||||
expect(result).toBe(2);
|
||||
expect(mockPoolInstance.query).toHaveBeenCalledTimes(2);
|
||||
expect(mockPoolInstance.query).toHaveBeenCalledWith(
|
||||
expect.stringContaining('INSERT INTO public.stores (name) VALUES ($1) RETURNING store_id'),
|
||||
'INSERT INTO public.stores (name) VALUES ($1) ON CONFLICT (name) DO NOTHING',
|
||||
['New Store'],
|
||||
);
|
||||
expect(mockPoolInstance.query).toHaveBeenCalledWith(
|
||||
'SELECT store_id FROM public.stores WHERE name = $1',
|
||||
['New Store'],
|
||||
);
|
||||
});
|
||||
|
||||
it('should handle race condition where store is created between SELECT and INSERT', async () => {
|
||||
const uniqueConstraintError = new Error('duplicate key value violates unique constraint');
|
||||
(uniqueConstraintError as Error & { code: string }).code = '23505';
|
||||
|
||||
mockPoolInstance.query
|
||||
.mockResolvedValueOnce({ rows: [] }) // First SELECT finds nothing
|
||||
.mockRejectedValueOnce(uniqueConstraintError) // INSERT fails due to race condition
|
||||
.mockResolvedValueOnce({ rows: [{ store_id: 3 }] }); // Second SELECT finds the store
|
||||
|
||||
const result = await flyerRepo.findOrCreateStore('Racy Store', mockLogger);
|
||||
expect(result).toBe(3);
|
||||
//expect(mockDb.query).toHaveBeenCalledTimes(3);
|
||||
});
|
||||
|
||||
it('should throw an error if the database query fails', async () => {
|
||||
it('should throw an error if the database query fails', async () => {
|
||||
const dbError = new Error('DB Error');
|
||||
mockPoolInstance.query.mockRejectedValue(dbError);
|
||||
// The new implementation uses handleDbError, which will throw a generic Error with the default message.
|
||||
await expect(flyerRepo.findOrCreateStore('Any Store', mockLogger)).rejects.toThrow(
|
||||
'Failed to find or create store in database.',
|
||||
);
|
||||
// handleDbError also logs the error.
|
||||
expect(mockLogger.error).toHaveBeenCalledWith(
|
||||
{ err: dbError, storeName: 'Any Store' },
|
||||
'Database error in findOrCreateStore',
|
||||
);
|
||||
});
|
||||
|
||||
it('should throw an error if race condition recovery fails', async () => {
|
||||
const uniqueConstraintError = new Error('duplicate key value violates unique constraint');
|
||||
(uniqueConstraintError as Error & { code: string }).code = '23505';
|
||||
|
||||
it('should throw an error if store is not found after upsert (edge case)', async () => {
|
||||
// This simulates a very unlikely scenario where the store is deleted between the
|
||||
// INSERT...ON CONFLICT and the subsequent SELECT.
|
||||
mockPoolInstance.query
|
||||
.mockResolvedValueOnce({ rows: [] }) // First SELECT
|
||||
.mockRejectedValueOnce(uniqueConstraintError) // INSERT fails
|
||||
.mockRejectedValueOnce(new Error('Second select fails')); // Recovery SELECT fails
|
||||
.mockResolvedValueOnce({ rows: [], rowCount: 1 }) // INSERT succeeds
|
||||
.mockResolvedValueOnce({ rows: [] }); // SELECT finds nothing
|
||||
|
||||
await expect(flyerRepo.findOrCreateStore('Racy Store', mockLogger)).rejects.toThrow(
|
||||
await expect(flyerRepo.findOrCreateStore('Weird Store', mockLogger)).rejects.toThrow(
|
||||
'Failed to find or create store in database.',
|
||||
);
|
||||
expect(mockLogger.error).toHaveBeenCalledWith(
|
||||
{ err: expect.any(Error), storeName: 'Racy Store' },
|
||||
{
|
||||
err: new Error('Failed to find store immediately after upsert operation.'),
|
||||
storeName: 'Weird Store',
|
||||
},
|
||||
'Database error in findOrCreateStore',
|
||||
);
|
||||
});
|
||||
|
||||
@@ -28,46 +28,32 @@ export class FlyerRepository {
|
||||
* @returns A promise that resolves to the store's ID.
|
||||
*/
|
||||
async findOrCreateStore(storeName: string, logger: Logger): Promise<number> {
|
||||
// Note: This method should be called within a transaction if the caller
|
||||
// needs to ensure atomicity with other operations.
|
||||
try {
|
||||
// First, try to find the store.
|
||||
let result = await this.db.query<{ store_id: number }>(
|
||||
// Atomically insert the store if it doesn't exist. This is safe from race conditions.
|
||||
await this.db.query(
|
||||
'INSERT INTO public.stores (name) VALUES ($1) ON CONFLICT (name) DO NOTHING',
|
||||
[storeName],
|
||||
);
|
||||
|
||||
// Now, the store is guaranteed to exist, so we can safely select its ID.
|
||||
const result = await this.db.query<{ store_id: number }>(
|
||||
'SELECT store_id FROM public.stores WHERE name = $1',
|
||||
[storeName],
|
||||
);
|
||||
|
||||
if (result.rows.length > 0) {
|
||||
return result.rows[0].store_id;
|
||||
} else {
|
||||
// If not found, create it.
|
||||
result = await this.db.query<{ store_id: number }>(
|
||||
'INSERT INTO public.stores (name) VALUES ($1) RETURNING store_id',
|
||||
[storeName],
|
||||
);
|
||||
return result.rows[0].store_id;
|
||||
// This case should be virtually impossible if the INSERT...ON CONFLICT logic is correct,
|
||||
// as it would mean the store was deleted between the two queries. We throw an error to be safe.
|
||||
if (result.rows.length === 0) {
|
||||
throw new Error('Failed to find store immediately after upsert operation.');
|
||||
}
|
||||
|
||||
return result.rows[0].store_id;
|
||||
} catch (error) {
|
||||
// Check for a unique constraint violation on name, which could happen in a race condition
|
||||
// if two processes try to create the same store at the same time.
|
||||
if (error instanceof Error && 'code' in error && error.code === '23505') {
|
||||
try {
|
||||
logger.warn(
|
||||
{ storeName },
|
||||
`Race condition avoided: Store was created by another process. Refetching.`,
|
||||
);
|
||||
const result = await this.db.query<{ store_id: number }>(
|
||||
'SELECT store_id FROM public.stores WHERE name = $1',
|
||||
[storeName],
|
||||
);
|
||||
if (result.rows.length > 0) return result.rows[0].store_id;
|
||||
} catch (recoveryError) {
|
||||
// If recovery fails, log a warning and fall through to the generic error handler
|
||||
logger.warn({ err: recoveryError, storeName }, 'Race condition recovery failed');
|
||||
}
|
||||
}
|
||||
logger.error({ err: error, storeName }, 'Database error in findOrCreateStore');
|
||||
throw new Error('Failed to find or create store in database.');
|
||||
// Use the centralized error handler for any unexpected database errors.
|
||||
handleDbError(error, logger, 'Database error in findOrCreateStore', { storeName }, {
|
||||
// Any error caught here is unexpected, so we use a generic message.
|
||||
defaultMessage: 'Failed to find or create store in database.',
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -21,6 +21,11 @@ describe('FlyerDataTransformer', () => {
|
||||
beforeEach(() => {
|
||||
vi.clearAllMocks();
|
||||
transformer = new FlyerDataTransformer();
|
||||
// Stub environment variables to ensure consistency and predictability.
|
||||
// Prioritize FRONTEND_URL to match the updated service logic.
|
||||
vi.stubEnv('FRONTEND_URL', 'http://localhost:3000');
|
||||
vi.stubEnv('BASE_URL', ''); // Ensure this is not used to confirm priority logic
|
||||
vi.stubEnv('PORT', ''); // Ensure this is not used
|
||||
|
||||
// Provide a default mock implementation for generateFlyerIcon
|
||||
vi.mocked(generateFlyerIcon).mockResolvedValue('icon-flyer-page-1.webp');
|
||||
@@ -70,7 +75,8 @@ describe('FlyerDataTransformer', () => {
|
||||
mockLogger,
|
||||
);
|
||||
|
||||
const baseUrl = `http://localhost:${process.env.PORT || 3000}`;
|
||||
// Dynamically construct the expected base URL, mirroring the logic in the transformer.
|
||||
const expectedBaseUrl = `http://localhost:3000`;
|
||||
|
||||
// Assert
|
||||
// 0. Check logging
|
||||
@@ -85,8 +91,8 @@ describe('FlyerDataTransformer', () => {
|
||||
// 1. Check flyer data
|
||||
expect(flyerData).toEqual({
|
||||
file_name: originalFileName,
|
||||
image_url: `${baseUrl}/flyer-images/flyer-page-1.jpg`,
|
||||
icon_url: `${baseUrl}/flyer-images/icons/icon-flyer-page-1.webp`,
|
||||
image_url: `${expectedBaseUrl}/flyer-images/flyer-page-1.jpg`,
|
||||
icon_url: `${expectedBaseUrl}/flyer-images/icons/icon-flyer-page-1.webp`,
|
||||
checksum,
|
||||
store_name: 'Test Store',
|
||||
valid_from: '2024-01-01',
|
||||
@@ -153,7 +159,8 @@ describe('FlyerDataTransformer', () => {
|
||||
mockLogger,
|
||||
);
|
||||
|
||||
const baseUrl = `http://localhost:${process.env.PORT || 3000}`;
|
||||
// Dynamically construct the expected base URL, mirroring the logic in the transformer.
|
||||
const expectedBaseUrl = `http://localhost:3000`;
|
||||
|
||||
// Assert
|
||||
// 0. Check logging
|
||||
@@ -171,8 +178,8 @@ describe('FlyerDataTransformer', () => {
|
||||
expect(itemsForDb).toHaveLength(0);
|
||||
expect(flyerData).toEqual({
|
||||
file_name: originalFileName,
|
||||
image_url: `${baseUrl}/flyer-images/another.png`,
|
||||
icon_url: `${baseUrl}/flyer-images/icons/icon-another.webp`,
|
||||
image_url: `${expectedBaseUrl}/flyer-images/another.png`,
|
||||
icon_url: `${expectedBaseUrl}/flyer-images/icons/icon-another.webp`,
|
||||
checksum,
|
||||
store_name: 'Unknown Store (auto)', // Should use fallback
|
||||
valid_from: null,
|
||||
@@ -180,7 +187,7 @@ describe('FlyerDataTransformer', () => {
|
||||
store_address: null,
|
||||
item_count: 0,
|
||||
status: 'needs_review',
|
||||
uploaded_by: undefined, // Should be undefined
|
||||
uploaded_by: null, // Should be null
|
||||
});
|
||||
});
|
||||
|
||||
|
||||
@@ -23,14 +23,14 @@ export class FlyerDataTransformer {
|
||||
): FlyerItemInsert {
|
||||
return {
|
||||
...item,
|
||||
// Use logical OR to default falsy values (null, undefined, '') to a fallback.
|
||||
// The trim is important for cases where the AI returns only whitespace.
|
||||
item: String(item.item || '').trim() || 'Unknown Item',
|
||||
// Use nullish coalescing to default only null/undefined to an empty string.
|
||||
price_display: String(item.price_display ?? ''),
|
||||
quantity: String(item.quantity ?? ''),
|
||||
// Use logical OR to default falsy category names (null, undefined, '') to a fallback.
|
||||
category_name: String(item.category_name || 'Other/Miscellaneous'),
|
||||
// Use nullish coalescing and trim for robustness.
|
||||
// An empty or whitespace-only name falls back to 'Unknown Item'.
|
||||
item: (item.item ?? '').trim() || 'Unknown Item',
|
||||
// Default null/undefined to an empty string and trim.
|
||||
price_display: (item.price_display ?? '').trim(),
|
||||
quantity: (item.quantity ?? '').trim(),
|
||||
// An empty or whitespace-only category falls back to 'Other/Miscellaneous'.
|
||||
category_name: (item.category_name ?? '').trim() || 'Other/Miscellaneous',
|
||||
// Use nullish coalescing to convert null to undefined for the database.
|
||||
master_item_id: item.master_item_id ?? undefined,
|
||||
view_count: 0,
|
||||
@@ -75,20 +75,38 @@ export class FlyerDataTransformer {
|
||||
logger.warn('AI did not return a store name. Using fallback "Unknown Store (auto)".');
|
||||
}
|
||||
|
||||
// Construct proper URLs including protocol and host to satisfy DB constraints
|
||||
const baseUrl = process.env.BASE_URL || `http://localhost:${process.env.PORT || 3000}`;
|
||||
// Construct proper URLs including protocol and host to satisfy DB constraints.
|
||||
// This logic is made more robust to handle cases where env vars might be present but invalid (e.g., whitespace or missing protocol).
|
||||
let baseUrl = (process.env.FRONTEND_URL || process.env.BASE_URL || '').trim();
|
||||
|
||||
if (!baseUrl || !baseUrl.startsWith('http')) {
|
||||
const port = process.env.PORT || 3000;
|
||||
const fallbackUrl = `http://localhost:${port}`;
|
||||
if (baseUrl) {
|
||||
// It was set but invalid
|
||||
logger.warn(
|
||||
`FRONTEND_URL/BASE_URL is invalid or incomplete ('${baseUrl}'). Falling back to default local URL: ${fallbackUrl}`,
|
||||
);
|
||||
}
|
||||
baseUrl = fallbackUrl;
|
||||
}
|
||||
|
||||
baseUrl = baseUrl.endsWith('/') ? baseUrl.slice(0, -1) : baseUrl;
|
||||
|
||||
const flyerData: FlyerInsert = {
|
||||
file_name: originalFileName,
|
||||
image_url: new URL(`/flyer-images/${path.basename(firstImage)}`, baseUrl).href,
|
||||
icon_url: new URL(`/flyer-images/icons/${iconFileName}`, baseUrl).href,
|
||||
image_url: `${baseUrl}/flyer-images/${path.basename(firstImage)}`,
|
||||
icon_url: `${baseUrl}/flyer-images/icons/${iconFileName}`,
|
||||
checksum,
|
||||
store_name: storeName,
|
||||
valid_from: extractedData.valid_from,
|
||||
valid_to: extractedData.valid_to,
|
||||
store_address: extractedData.store_address, // The number of items is now calculated directly from the transformed data.
|
||||
item_count: itemsForDb.length,
|
||||
uploaded_by: userId,
|
||||
// Defensively handle the userId. An empty string ('') is not a valid UUID,
|
||||
// but `null` is. This ensures that any falsy value for userId (undefined, null, '')
|
||||
// is converted to `null` for the database, preventing a 22P02 error.
|
||||
uploaded_by: userId || null,
|
||||
status: needsReview ? 'needs_review' : 'processed',
|
||||
};
|
||||
|
||||
|
||||
@@ -35,15 +35,13 @@ vi.mock('./logger.server', () => ({
|
||||
import { logger } from './logger.server';
|
||||
|
||||
describe('Geocoding Service', () => {
|
||||
const originalEnv = process.env;
|
||||
let geocodingService: GeocodingService;
|
||||
let mockGoogleService: GoogleGeocodingService;
|
||||
let mockNominatimService: NominatimGeocodingService;
|
||||
|
||||
beforeEach(() => {
|
||||
vi.clearAllMocks();
|
||||
// Restore process.env to its original state before each test
|
||||
process.env = { ...originalEnv };
|
||||
vi.unstubAllEnvs();
|
||||
|
||||
// Create a mock instance of the Google service
|
||||
mockGoogleService = { geocode: vi.fn() } as unknown as GoogleGeocodingService;
|
||||
@@ -53,8 +51,7 @@ describe('Geocoding Service', () => {
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
// Restore process.env after each test
|
||||
process.env = originalEnv;
|
||||
vi.unstubAllEnvs();
|
||||
});
|
||||
|
||||
describe('geocodeAddress', () => {
|
||||
@@ -77,7 +74,7 @@ describe('Geocoding Service', () => {
|
||||
|
||||
it('should log an error but continue if Redis GET fails', async () => {
|
||||
// Arrange: Mock Redis 'get' to fail, but Google API to succeed
|
||||
process.env.GOOGLE_MAPS_API_KEY = 'test-key';
|
||||
vi.stubEnv('GOOGLE_MAPS_API_KEY', 'test-key');
|
||||
mocks.mockRedis.get.mockRejectedValue(new Error('Redis down'));
|
||||
vi.mocked(mockGoogleService.geocode).mockResolvedValue(coordinates);
|
||||
|
||||
@@ -95,7 +92,7 @@ describe('Geocoding Service', () => {
|
||||
|
||||
it('should proceed to fetch if cached data is invalid JSON', async () => {
|
||||
// Arrange: Mock Redis to return a malformed JSON string
|
||||
process.env.GOOGLE_MAPS_API_KEY = 'test-key';
|
||||
vi.stubEnv('GOOGLE_MAPS_API_KEY', 'test-key');
|
||||
mocks.mockRedis.get.mockResolvedValue('{ "lat": 45.0, "lng": -75.0 '); // Missing closing brace
|
||||
vi.mocked(mockGoogleService.geocode).mockResolvedValue(coordinates);
|
||||
|
||||
@@ -115,7 +112,7 @@ describe('Geocoding Service', () => {
|
||||
|
||||
it('should fetch from Google, return coordinates, and cache the result on cache miss', async () => {
|
||||
// Arrange
|
||||
process.env.GOOGLE_MAPS_API_KEY = 'test-key';
|
||||
vi.stubEnv('GOOGLE_MAPS_API_KEY', 'test-key');
|
||||
mocks.mockRedis.get.mockResolvedValue(null); // Cache miss
|
||||
vi.mocked(mockGoogleService.geocode).mockResolvedValue(coordinates);
|
||||
|
||||
@@ -136,7 +133,7 @@ describe('Geocoding Service', () => {
|
||||
|
||||
it('should fall back to Nominatim if Google API key is missing', async () => {
|
||||
// Arrange
|
||||
delete process.env.GOOGLE_MAPS_API_KEY;
|
||||
vi.stubEnv('GOOGLE_MAPS_API_KEY', '');
|
||||
mocks.mockRedis.get.mockResolvedValue(null);
|
||||
vi.mocked(mockNominatimService.geocode).mockResolvedValue(coordinates);
|
||||
|
||||
@@ -155,7 +152,7 @@ describe('Geocoding Service', () => {
|
||||
|
||||
it('should fall back to Nominatim if Google API returns a non-OK status', async () => {
|
||||
// Arrange
|
||||
process.env.GOOGLE_MAPS_API_KEY = 'test-key';
|
||||
vi.stubEnv('GOOGLE_MAPS_API_KEY', 'test-key');
|
||||
mocks.mockRedis.get.mockResolvedValue(null);
|
||||
vi.mocked(mockGoogleService.geocode).mockResolvedValue(null); // Google returns no results
|
||||
vi.mocked(mockNominatimService.geocode).mockResolvedValue(coordinates);
|
||||
@@ -174,7 +171,7 @@ describe('Geocoding Service', () => {
|
||||
|
||||
it('should fall back to Nominatim if Google API fetch call fails', async () => {
|
||||
// Arrange
|
||||
process.env.GOOGLE_MAPS_API_KEY = 'test-key';
|
||||
vi.stubEnv('GOOGLE_MAPS_API_KEY', 'test-key');
|
||||
mocks.mockRedis.get.mockResolvedValue(null);
|
||||
vi.mocked(mockGoogleService.geocode).mockRejectedValue(new Error('Network Error'));
|
||||
vi.mocked(mockNominatimService.geocode).mockResolvedValue(coordinates);
|
||||
@@ -193,7 +190,7 @@ describe('Geocoding Service', () => {
|
||||
|
||||
it('should return null and log an error if both Google and Nominatim fail', async () => {
|
||||
// Arrange
|
||||
process.env.GOOGLE_MAPS_API_KEY = 'test-key';
|
||||
vi.stubEnv('GOOGLE_MAPS_API_KEY', 'test-key');
|
||||
mocks.mockRedis.get.mockResolvedValue(null);
|
||||
vi.mocked(mockGoogleService.geocode).mockRejectedValue(new Error('Network Error'));
|
||||
vi.mocked(mockNominatimService.geocode).mockResolvedValue(null); // Nominatim also fails
|
||||
@@ -209,7 +206,7 @@ describe('Geocoding Service', () => {
|
||||
|
||||
it('should return coordinates even if Redis SET fails', async () => {
|
||||
// Arrange
|
||||
process.env.GOOGLE_MAPS_API_KEY = 'test-key';
|
||||
vi.stubEnv('GOOGLE_MAPS_API_KEY', 'test-key');
|
||||
mocks.mockRedis.get.mockResolvedValue(null); // Cache miss
|
||||
vi.mocked(mockGoogleService.geocode).mockResolvedValue(coordinates);
|
||||
// Mock Redis 'set' to fail
|
||||
|
||||
@@ -20,25 +20,22 @@ import { logger as mockLogger } from './logger.server';
|
||||
|
||||
describe('Google Geocoding Service', () => {
|
||||
let googleGeocodingService: GoogleGeocodingService;
|
||||
const originalEnv = process.env;
|
||||
|
||||
beforeEach(() => {
|
||||
vi.clearAllMocks();
|
||||
// Mock the global fetch function before each test
|
||||
vi.stubGlobal('fetch', vi.fn());
|
||||
// Restore process.env to a clean state for each test
|
||||
process.env = { ...originalEnv };
|
||||
vi.unstubAllEnvs();
|
||||
googleGeocodingService = new GoogleGeocodingService();
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
// Restore original environment variables after each test
|
||||
process.env = originalEnv;
|
||||
vi.unstubAllEnvs();
|
||||
});
|
||||
|
||||
it('should return coordinates for a valid address when API key is present', async () => {
|
||||
// Arrange
|
||||
process.env.GOOGLE_MAPS_API_KEY = 'test-api-key';
|
||||
vi.stubEnv('GOOGLE_MAPS_API_KEY', 'test-api-key');
|
||||
const mockApiResponse = {
|
||||
status: 'OK',
|
||||
results: [
|
||||
@@ -70,7 +67,7 @@ describe('Google Geocoding Service', () => {
|
||||
|
||||
it('should throw an error if GOOGLE_MAPS_API_KEY is not set', async () => {
|
||||
// Arrange
|
||||
delete process.env.GOOGLE_MAPS_API_KEY;
|
||||
vi.stubEnv('GOOGLE_MAPS_API_KEY', '');
|
||||
|
||||
// Act & Assert
|
||||
await expect(googleGeocodingService.geocode('Any Address', mockLogger)).rejects.toThrow(
|
||||
@@ -81,7 +78,7 @@ describe('Google Geocoding Service', () => {
|
||||
|
||||
it('should return null if the API returns a status other than "OK"', async () => {
|
||||
// Arrange
|
||||
process.env.GOOGLE_MAPS_API_KEY = 'test-api-key';
|
||||
vi.stubEnv('GOOGLE_MAPS_API_KEY', 'test-api-key');
|
||||
const mockApiResponse = { status: 'ZERO_RESULTS', results: [] };
|
||||
vi.mocked(fetch).mockResolvedValue({
|
||||
ok: true,
|
||||
@@ -101,7 +98,7 @@ describe('Google Geocoding Service', () => {
|
||||
|
||||
it('should throw an error if the fetch response is not ok', async () => {
|
||||
// Arrange
|
||||
process.env.GOOGLE_MAPS_API_KEY = 'test-api-key';
|
||||
vi.stubEnv('GOOGLE_MAPS_API_KEY', 'test-api-key');
|
||||
vi.mocked(fetch).mockResolvedValue({
|
||||
ok: false,
|
||||
status: 403,
|
||||
@@ -119,7 +116,7 @@ describe('Google Geocoding Service', () => {
|
||||
|
||||
it('should throw an error if the fetch call itself fails', async () => {
|
||||
// Arrange
|
||||
process.env.GOOGLE_MAPS_API_KEY = 'test-api-key';
|
||||
vi.stubEnv('GOOGLE_MAPS_API_KEY', 'test-api-key');
|
||||
const networkError = new Error('Network request failed');
|
||||
vi.mocked(fetch).mockRejectedValue(networkError);
|
||||
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
// src/services/logger.server.test.ts
|
||||
import { describe, it, expect, vi, beforeEach } from 'vitest';
|
||||
import { describe, it, expect, vi, beforeEach, afterEach } from 'vitest';
|
||||
|
||||
// Mock pino before importing the logger
|
||||
const pinoMock = vi.fn(() => ({
|
||||
@@ -15,16 +15,21 @@ describe('Server Logger', () => {
|
||||
// Reset modules to ensure process.env changes are applied to new module instances
|
||||
vi.resetModules();
|
||||
vi.clearAllMocks();
|
||||
vi.unstubAllEnvs();
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
vi.unstubAllEnvs();
|
||||
});
|
||||
|
||||
it('should initialize pino with the correct level for production', async () => {
|
||||
process.env.NODE_ENV = 'production';
|
||||
vi.stubEnv('NODE_ENV', 'production');
|
||||
await import('./logger.server');
|
||||
expect(pinoMock).toHaveBeenCalledWith(expect.objectContaining({ level: 'info' }));
|
||||
});
|
||||
|
||||
it('should initialize pino with pretty-print transport for development', async () => {
|
||||
process.env.NODE_ENV = 'development';
|
||||
vi.stubEnv('NODE_ENV', 'development');
|
||||
await import('./logger.server');
|
||||
expect(pinoMock).toHaveBeenCalledWith(
|
||||
expect.objectContaining({ transport: expect.any(Object) }),
|
||||
|
||||
@@ -92,6 +92,8 @@ export const flyerWorker = new Worker<FlyerJobData>(
|
||||
{
|
||||
connection,
|
||||
concurrency: parseInt(process.env.WORKER_CONCURRENCY || '1', 10),
|
||||
// Increase lock duration to prevent jobs from being re-processed prematurely.
|
||||
lockDuration: parseInt(process.env.WORKER_LOCK_DURATION || '30000', 10),
|
||||
},
|
||||
);
|
||||
|
||||
|
||||
@@ -174,6 +174,11 @@ describe('Authentication E2E Flow', () => {
|
||||
expect(registerResponse.status).toBe(201);
|
||||
createdUserIds.push(registerData.userprofile.user.user_id);
|
||||
|
||||
// Add a small delay to mitigate potential DB replication lag or race conditions
|
||||
// in the test environment. Increased from 2s to 5s to improve stability.
|
||||
// The root cause is likely environmental slowness in the CI database.
|
||||
await new Promise((resolve) => setTimeout(resolve, 5000));
|
||||
|
||||
// Act 1: Request a password reset.
|
||||
// The test environment returns the token directly in the response for E2E testing.
|
||||
const forgotResponse = await apiClient.requestPasswordReset(email);
|
||||
|
||||
@@ -89,7 +89,7 @@ describe('E2E Flyer Upload and Processing Workflow', () => {
|
||||
|
||||
// 5. Poll for job completion
|
||||
let jobStatus;
|
||||
const maxRetries = 30; // Poll for up to 90 seconds
|
||||
const maxRetries = 60; // Poll for up to 180 seconds
|
||||
for (let i = 0; i < maxRetries; i++) {
|
||||
await new Promise((resolve) => setTimeout(resolve, 3000)); // Wait 3s
|
||||
|
||||
@@ -106,5 +106,5 @@ describe('E2E Flyer Upload and Processing Workflow', () => {
|
||||
expect(jobStatus.state).toBe('completed');
|
||||
flyerId = jobStatus.returnValue?.flyerId;
|
||||
expect(flyerId).toBeTypeOf('number');
|
||||
}, 120000); // Extended timeout for AI processing
|
||||
}, 240000); // Extended timeout for AI processing
|
||||
});
|
||||
@@ -163,8 +163,8 @@ describe('Admin API Routes Integration Tests', () => {
|
||||
// Before each modification test, create a fresh flyer item and a correction for it.
|
||||
beforeEach(async () => {
|
||||
const flyerRes = await getPool().query(
|
||||
`INSERT INTO public.flyers (store_id, file_name, image_url, item_count, checksum)
|
||||
VALUES ($1, 'admin-test.jpg', 'https://example.com/flyer-images/asdmin-test.jpg', 1, $2) RETURNING flyer_id`,
|
||||
`INSERT INTO public.flyers (store_id, file_name, image_url, icon_url, item_count, checksum)
|
||||
VALUES ($1, 'admin-test.jpg', 'https://example.com/flyer-images/asdmin-test.jpg', 'https://example.com/flyer-images/icons/admin-test.jpg', 1, $2) RETURNING flyer_id`,
|
||||
// The checksum must be a unique 64-character string to satisfy the DB constraint.
|
||||
// We generate a dynamic string and pad it to 64 characters.
|
||||
[testStoreId, `checksum-${Date.now()}-${Math.random()}`.padEnd(64, '0')],
|
||||
|
||||
@@ -101,7 +101,9 @@ describe('Flyer Processing Background Job Integration Test', () => {
|
||||
|
||||
// Act 2: Poll for the job status until it completes.
|
||||
let jobStatus;
|
||||
const maxRetries = 60; // Poll for up to 180 seconds (60 * 3s)
|
||||
// Poll for up to 210 seconds (70 * 3s). This should be greater than the worker's
|
||||
// lockDuration (120s) to patiently wait for long-running jobs.
|
||||
const maxRetries = 70;
|
||||
for (let i = 0; i < maxRetries; i++) {
|
||||
console.log(`Polling attempt ${i + 1}...`);
|
||||
await new Promise((resolve) => setTimeout(resolve, 3000)); // Wait 3 seconds between polls
|
||||
@@ -223,7 +225,7 @@ describe('Flyer Processing Background Job Integration Test', () => {
|
||||
|
||||
// Poll for job completion
|
||||
let jobStatus;
|
||||
const maxRetries = 30; // Poll for up to 90 seconds
|
||||
const maxRetries = 60; // Poll for up to 180 seconds
|
||||
for (let i = 0; i < maxRetries; i++) {
|
||||
await new Promise((resolve) => setTimeout(resolve, 3000));
|
||||
const statusResponse = await request
|
||||
@@ -240,7 +242,7 @@ describe('Flyer Processing Background Job Integration Test', () => {
|
||||
console.error('[DEBUG] EXIF test job failed:', jobStatus.failedReason);
|
||||
}
|
||||
expect(jobStatus?.state).toBe('completed');
|
||||
const flyerId = jobStatus?.data?.flyerId;
|
||||
const flyerId = jobStatus?.returnValue?.flyerId;
|
||||
expect(flyerId).toBeTypeOf('number');
|
||||
createdFlyerIds.push(flyerId);
|
||||
|
||||
@@ -309,7 +311,7 @@ describe('Flyer Processing Background Job Integration Test', () => {
|
||||
|
||||
// Poll for job completion
|
||||
let jobStatus;
|
||||
const maxRetries = 30;
|
||||
const maxRetries = 60; // Poll for up to 180 seconds
|
||||
for (let i = 0; i < maxRetries; i++) {
|
||||
await new Promise((resolve) => setTimeout(resolve, 3000));
|
||||
const statusResponse = await request
|
||||
@@ -326,7 +328,7 @@ describe('Flyer Processing Background Job Integration Test', () => {
|
||||
console.error('[DEBUG] PNG metadata test job failed:', jobStatus.failedReason);
|
||||
}
|
||||
expect(jobStatus?.state).toBe('completed');
|
||||
const flyerId = jobStatus?.data?.flyerId;
|
||||
const flyerId = jobStatus?.returnValue?.flyerId;
|
||||
expect(flyerId).toBeTypeOf('number');
|
||||
createdFlyerIds.push(flyerId);
|
||||
|
||||
|
||||
@@ -24,8 +24,8 @@ describe('Public Flyer API Routes Integration Tests', () => {
|
||||
const storeId = storeRes.rows[0].store_id;
|
||||
|
||||
const flyerRes = await getPool().query(
|
||||
`INSERT INTO public.flyers (store_id, file_name, image_url, item_count, checksum)
|
||||
VALUES ($1, 'integration-test.jpg', 'https://example.com/flyer-images/integration-test.jpg', 1, $2) RETURNING flyer_id`,
|
||||
`INSERT INTO public.flyers (store_id, file_name, image_url, icon_url, item_count, checksum)
|
||||
VALUES ($1, 'integration-test.jpg', 'https://example.com/flyer-images/integration-test.jpg', 'https://example.com/flyer-images/icons/integration-test.jpg', 1, $2) RETURNING flyer_id`,
|
||||
[storeId, `${Date.now().toString(16)}`.padEnd(64, '0')],
|
||||
);
|
||||
createdFlyerId = flyerRes.rows[0].flyer_id;
|
||||
|
||||
@@ -101,7 +101,7 @@ describe('Gamification Flow Integration Test', () => {
|
||||
|
||||
// --- Act 2: Poll for job completion ---
|
||||
let jobStatus;
|
||||
const maxRetries = 30; // Poll for up to 90 seconds
|
||||
const maxRetries = 60; // Poll for up to 180 seconds
|
||||
for (let i = 0; i < maxRetries; i++) {
|
||||
await new Promise((resolve) => setTimeout(resolve, 3000));
|
||||
const statusResponse = await request
|
||||
@@ -162,6 +162,6 @@ describe('Gamification Flow Integration Test', () => {
|
||||
firstUploadAchievement!.points_value,
|
||||
);
|
||||
},
|
||||
120000, // Increase timeout to 120 seconds for this long-running test
|
||||
240000, // Increase timeout to 240s to match other long-running processing tests
|
||||
);
|
||||
});
|
||||
@@ -34,22 +34,22 @@ describe('Price History API Integration Test (/api/price-history)', () => {
|
||||
|
||||
// 3. Create two flyers with different dates
|
||||
const flyerRes1 = await pool.query(
|
||||
`INSERT INTO public.flyers (store_id, file_name, image_url, item_count, checksum, valid_from)
|
||||
VALUES ($1, 'price-test-1.jpg', 'https://example.com/flyer-images/price-test-1.jpg', 1, $2, '2025-01-01') RETURNING flyer_id`,
|
||||
`INSERT INTO public.flyers (store_id, file_name, image_url, icon_url, item_count, checksum, valid_from)
|
||||
VALUES ($1, 'price-test-1.jpg', 'https://example.com/flyer-images/price-test-1.jpg', 'https://example.com/flyer-images/icons/price-test-1.jpg', 1, $2, '2025-01-01') RETURNING flyer_id`,
|
||||
[storeId, `${Date.now().toString(16)}1`.padEnd(64, '0')],
|
||||
);
|
||||
flyerId1 = flyerRes1.rows[0].flyer_id;
|
||||
|
||||
const flyerRes2 = await pool.query(
|
||||
`INSERT INTO public.flyers (store_id, file_name, image_url, item_count, checksum, valid_from)
|
||||
VALUES ($1, 'price-test-2.jpg', 'https://example.com/flyer-images/price-test-2.jpg', 1, $2, '2025-01-08') RETURNING flyer_id`,
|
||||
`INSERT INTO public.flyers (store_id, file_name, image_url, icon_url, item_count, checksum, valid_from)
|
||||
VALUES ($1, 'price-test-2.jpg', 'https://example.com/flyer-images/price-test-2.jpg', 'https://example.com/flyer-images/icons/price-test-2.jpg', 1, $2, '2025-01-08') RETURNING flyer_id`,
|
||||
[storeId, `${Date.now().toString(16)}2`.padEnd(64, '0')],
|
||||
);
|
||||
flyerId2 = flyerRes2.rows[0].flyer_id; // This was a duplicate, fixed.
|
||||
|
||||
const flyerRes3 = await pool.query(
|
||||
`INSERT INTO public.flyers (store_id, file_name, image_url, item_count, checksum, valid_from)
|
||||
VALUES ($1, 'price-test-3.jpg', 'https://example.com/flyer-images/price-test-3.jpg', 1, $2, '2025-01-15') RETURNING flyer_id`,
|
||||
`INSERT INTO public.flyers (store_id, file_name, image_url, icon_url, item_count, checksum, valid_from)
|
||||
VALUES ($1, 'price-test-3.jpg', 'https://example.com/flyer-images/price-test-3.jpg', 'https://example.com/flyer-images/icons/price-test-3.jpg', 1, $2, '2025-01-15') RETURNING flyer_id`,
|
||||
[storeId, `${Date.now().toString(16)}3`.padEnd(64, '0')],
|
||||
);
|
||||
flyerId3 = flyerRes3.rows[0].flyer_id;
|
||||
|
||||
@@ -77,8 +77,8 @@ describe('Public API Routes Integration Tests', () => {
|
||||
);
|
||||
testStoreId = storeRes.rows[0].store_id;
|
||||
const flyerRes = await pool.query(
|
||||
`INSERT INTO public.flyers (store_id, file_name, image_url, item_count, checksum)
|
||||
VALUES ($1, 'public-routes-test.jpg', 'https://example.com/flyer-images/public-routes-test.jpg', 1, $2) RETURNING *`,
|
||||
`INSERT INTO public.flyers (store_id, file_name, image_url, icon_url, item_count, checksum)
|
||||
VALUES ($1, 'public-routes-test.jpg', 'https://example.com/flyer-images/public-routes-test.jpg', 'https://example.com/flyer-images/icons/public-routes-test.jpg', 1, $2) RETURNING *`,
|
||||
[testStoreId, `${Date.now().toString(16)}`.padEnd(64, '0')],
|
||||
);
|
||||
testFlyer = flyerRes.rows[0];
|
||||
|
||||
@@ -14,7 +14,7 @@ export interface Flyer {
|
||||
readonly flyer_id: number;
|
||||
file_name: string;
|
||||
image_url: string;
|
||||
icon_url?: string | null; // URL for the 64x64 icon version of the flyer
|
||||
icon_url: string; // URL for the 64x64 icon version of the flyer
|
||||
readonly checksum?: string;
|
||||
readonly store_id?: number;
|
||||
valid_from?: string | null;
|
||||
@@ -72,7 +72,7 @@ export interface FlyerItem {
|
||||
item: string;
|
||||
price_display: string;
|
||||
price_in_cents?: number | null;
|
||||
quantity?: string;
|
||||
quantity: string;
|
||||
quantity_num?: number | null;
|
||||
master_item_id?: number; // Can be updated by admin correction
|
||||
master_item_name?: string | null;
|
||||
@@ -536,7 +536,7 @@ export type ActivityLogAction =
|
||||
interface ActivityLogItemBase {
|
||||
readonly activity_log_id: number;
|
||||
readonly user_id: string | null;
|
||||
action: string;
|
||||
action: ActivityLogAction;
|
||||
display_text: string;
|
||||
icon?: string | null;
|
||||
readonly created_at: string;
|
||||
|
||||
Reference in New Issue
Block a user