Compare commits
2 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
cb453aa949 | ||
| 2651bd16ae |
4
package-lock.json
generated
4
package-lock.json
generated
@@ -1,12 +1,12 @@
|
||||
{
|
||||
"name": "flyer-crawler",
|
||||
"version": "0.9.16",
|
||||
"version": "0.9.17",
|
||||
"lockfileVersion": 3,
|
||||
"requires": true,
|
||||
"packages": {
|
||||
"": {
|
||||
"name": "flyer-crawler",
|
||||
"version": "0.9.16",
|
||||
"version": "0.9.17",
|
||||
"dependencies": {
|
||||
"@bull-board/api": "^6.14.2",
|
||||
"@bull-board/express": "^6.14.2",
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
{
|
||||
"name": "flyer-crawler",
|
||||
"private": true,
|
||||
"version": "0.9.16",
|
||||
"version": "0.9.17",
|
||||
"type": "module",
|
||||
"scripts": {
|
||||
"dev": "concurrently \"npm:start:dev\" \"vite\"",
|
||||
|
||||
@@ -59,21 +59,21 @@ describe('FlyerReviewPage', () => {
|
||||
file_name: 'flyer1.jpg',
|
||||
created_at: '2023-01-01T00:00:00Z',
|
||||
store: { name: 'Store A' },
|
||||
icon_url: 'icon1.jpg',
|
||||
icon_url: 'http://example.com/icon1.jpg',
|
||||
},
|
||||
{
|
||||
flyer_id: 2,
|
||||
file_name: 'flyer2.jpg',
|
||||
created_at: '2023-01-02T00:00:00Z',
|
||||
store: { name: 'Store B' },
|
||||
icon_url: 'icon2.jpg',
|
||||
icon_url: 'http://example.com/icon2.jpg',
|
||||
},
|
||||
{
|
||||
flyer_id: 3,
|
||||
file_name: 'flyer3.jpg',
|
||||
created_at: '2023-01-03T00:00:00Z',
|
||||
store: null,
|
||||
icon_url: null,
|
||||
icon_url: 'http://example.com/icon2.jpg',
|
||||
},
|
||||
];
|
||||
|
||||
|
||||
@@ -203,12 +203,13 @@ describe('AI Service (Server)', () => {
|
||||
|
||||
// Access the private aiClient (which is now the adapter)
|
||||
const adapter = (service as any).aiClient;
|
||||
const models = (service as any).models;
|
||||
|
||||
const request = { contents: [{ parts: [{ text: 'test' }] }] };
|
||||
await adapter.generateContent(request);
|
||||
|
||||
expect(mockGenerateContent).toHaveBeenCalledWith({
|
||||
model: 'gemini-3-flash-preview',
|
||||
model: models[0],
|
||||
...request,
|
||||
});
|
||||
});
|
||||
@@ -238,11 +239,44 @@ describe('AI Service (Server)', () => {
|
||||
vi.unstubAllEnvs();
|
||||
});
|
||||
|
||||
it('should use lite models when useLiteModels is true', async () => {
|
||||
// Arrange
|
||||
const { AIService } = await import('./aiService.server');
|
||||
const { logger } = await import('./logger.server');
|
||||
const serviceWithFallback = new AIService(logger);
|
||||
const models_lite = (serviceWithFallback as any).models_lite;
|
||||
const successResponse = { text: 'Success from lite model', candidates: [] };
|
||||
|
||||
mockGenerateContent.mockResolvedValue(successResponse);
|
||||
|
||||
const request = {
|
||||
contents: [{ parts: [{ text: 'test prompt' }] }],
|
||||
useLiteModels: true,
|
||||
};
|
||||
// The adapter strips `useLiteModels` before calling the underlying client,
|
||||
// so we prepare the expected request shape for our assertions.
|
||||
const { useLiteModels, ...apiReq } = request;
|
||||
|
||||
// Act
|
||||
const result = await (serviceWithFallback as any).aiClient.generateContent(request);
|
||||
|
||||
// Assert
|
||||
expect(result).toEqual(successResponse);
|
||||
expect(mockGenerateContent).toHaveBeenCalledTimes(1);
|
||||
|
||||
// Check that the first model from the lite list was used
|
||||
expect(mockGenerateContent).toHaveBeenCalledWith({
|
||||
model: models_lite[0],
|
||||
...apiReq,
|
||||
});
|
||||
});
|
||||
|
||||
it('should try the next model if the first one fails with a quota error', async () => {
|
||||
// Arrange
|
||||
const { AIService } = await import('./aiService.server');
|
||||
const { logger } = await import('./logger.server');
|
||||
const serviceWithFallback = new AIService(logger);
|
||||
const models = (serviceWithFallback as any).models;
|
||||
|
||||
const quotaError = new Error('User rate limit exceeded due to quota');
|
||||
const successResponse = { text: 'Success from fallback model', candidates: [] };
|
||||
@@ -260,22 +294,23 @@ describe('AI Service (Server)', () => {
|
||||
expect(mockGenerateContent).toHaveBeenCalledTimes(2);
|
||||
|
||||
// Check first call
|
||||
expect(mockGenerateContent).toHaveBeenNthCalledWith(1, { // The first model in the list is now 'gemini-3-flash-preview'
|
||||
model: 'gemini-3-flash-preview',
|
||||
expect(mockGenerateContent).toHaveBeenNthCalledWith(1, { // The first model in the list
|
||||
model: models[0],
|
||||
...request,
|
||||
});
|
||||
|
||||
// Check second call
|
||||
expect(mockGenerateContent).toHaveBeenNthCalledWith(2, { // The second model in the list is 'gemini-2.5-pro'
|
||||
model: 'gemini-2.5-pro',
|
||||
expect(mockGenerateContent).toHaveBeenNthCalledWith(2, { // The second model in the list
|
||||
model: models[1],
|
||||
...request,
|
||||
});
|
||||
|
||||
// Check that a warning was logged
|
||||
expect(logger.warn).toHaveBeenCalledWith(
|
||||
// The warning should be for the model that failed ('gemini-3-flash-preview'), not the next one.
|
||||
// The warning should be for the model that failed ('gemini-2.5-flash'), not the next one.
|
||||
// The warning should be for the model that failed, not the next one.
|
||||
expect.stringContaining(
|
||||
"Model 'gemini-3-flash-preview' failed due to quota/rate limit. Trying next model.",
|
||||
`Model '${models[0]}' failed due to quota/rate limit. Trying next model.`,
|
||||
),
|
||||
);
|
||||
});
|
||||
@@ -285,6 +320,7 @@ describe('AI Service (Server)', () => {
|
||||
const { AIService } = await import('./aiService.server');
|
||||
const { logger } = await import('./logger.server');
|
||||
const serviceWithFallback = new AIService(logger);
|
||||
const models = (serviceWithFallback as any).models;
|
||||
|
||||
const nonRetriableError = new Error('Invalid API Key');
|
||||
mockGenerateContent.mockRejectedValueOnce(nonRetriableError);
|
||||
@@ -298,8 +334,10 @@ describe('AI Service (Server)', () => {
|
||||
|
||||
expect(mockGenerateContent).toHaveBeenCalledTimes(1);
|
||||
expect(logger.error).toHaveBeenCalledWith(
|
||||
{ error: nonRetriableError }, // The first model in the list is now 'gemini-3-flash-preview'
|
||||
`[AIService Adapter] Model 'gemini-3-flash-preview' failed with a non-retriable error.`,
|
||||
{ error: nonRetriableError }, // The first model in the list is now 'gemini-2.5-flash'
|
||||
`[AIService Adapter] Model 'gemini-2.5-flash' failed with a non-retriable error.`,
|
||||
{ error: nonRetriableError }, // The first model in the list
|
||||
`[AIService Adapter] Model '${models[0]}' failed with a non-retriable error.`,
|
||||
);
|
||||
});
|
||||
|
||||
@@ -839,6 +877,23 @@ describe('AI Service (Server)', () => {
|
||||
});
|
||||
});
|
||||
|
||||
describe('generateRecipeSuggestion', () => {
|
||||
it('should call generateContent with useLiteModels set to true', async () => {
|
||||
const ingredients = ['carrots', 'onions'];
|
||||
const expectedPrompt = `Suggest a simple recipe using these ingredients: ${ingredients.join(
|
||||
', ',
|
||||
)}. Keep it brief.`;
|
||||
mockAiClient.generateContent.mockResolvedValue({ text: 'Some recipe', candidates: [] });
|
||||
|
||||
await aiServiceInstance.generateRecipeSuggestion(ingredients, mockLoggerInstance);
|
||||
|
||||
expect(mockAiClient.generateContent).toHaveBeenCalledWith({
|
||||
contents: [{ parts: [{ text: expectedPrompt }] }],
|
||||
useLiteModels: true,
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('planTripWithMaps', () => {
|
||||
const mockUserLocation: GeolocationCoordinates = {
|
||||
latitude: 45,
|
||||
@@ -949,6 +1004,7 @@ describe('AI Service (Server)', () => {
|
||||
userId: 'user123',
|
||||
submitterIp: '127.0.0.1',
|
||||
userProfileAddress: '123 St, City, Country', // Partial address match based on filter(Boolean)
|
||||
baseUrl: 'http://localhost:3000',
|
||||
});
|
||||
expect(result.id).toBe('job123');
|
||||
});
|
||||
@@ -970,6 +1026,7 @@ describe('AI Service (Server)', () => {
|
||||
expect.objectContaining({
|
||||
userId: undefined,
|
||||
userProfileAddress: undefined,
|
||||
baseUrl: 'http://localhost:3000',
|
||||
}),
|
||||
);
|
||||
});
|
||||
|
||||
@@ -23,6 +23,7 @@ import * as db from './db/index.db';
|
||||
import { flyerQueue } from './queueService.server';
|
||||
import type { Job } from 'bullmq';
|
||||
import { createFlyerAndItems } from './db/flyer.db';
|
||||
import { getBaseUrl } from '../utils/serverUtils';
|
||||
import { generateFlyerIcon } from '../utils/imageProcessor';
|
||||
import path from 'path';
|
||||
import { ValidationError } from './db/errors.db'; // Keep this import for ValidationError
|
||||
@@ -824,6 +825,8 @@ async enqueueFlyerProcessing(
|
||||
.join(', ');
|
||||
}
|
||||
|
||||
const baseUrl = getBaseUrl(logger);
|
||||
|
||||
// 3. Add job to the queue
|
||||
const job = await flyerQueue.add('process-flyer', {
|
||||
filePath: file.path,
|
||||
@@ -832,6 +835,7 @@ async enqueueFlyerProcessing(
|
||||
userId: userProfile?.user.user_id,
|
||||
submitterIp: submitterIp,
|
||||
userProfileAddress: userProfileAddress,
|
||||
baseUrl: baseUrl,
|
||||
});
|
||||
|
||||
logger.info(
|
||||
@@ -926,20 +930,7 @@ async enqueueFlyerProcessing(
|
||||
const iconsDir = path.join(path.dirname(file.path), 'icons');
|
||||
const iconFileName = await generateFlyerIcon(file.path, iconsDir, logger);
|
||||
|
||||
// Construct proper URLs including protocol and host to satisfy DB constraints.
|
||||
let baseUrl = (process.env.FRONTEND_URL || process.env.BASE_URL || '').trim();
|
||||
if (!baseUrl || !baseUrl.startsWith('http')) {
|
||||
const port = process.env.PORT || 3000;
|
||||
const fallbackUrl = `http://localhost:${port}`;
|
||||
if (baseUrl) {
|
||||
logger.warn(
|
||||
`FRONTEND_URL/BASE_URL is invalid or incomplete ('${baseUrl}'). Falling back to default local URL: ${fallbackUrl}`,
|
||||
);
|
||||
}
|
||||
baseUrl = fallbackUrl;
|
||||
}
|
||||
baseUrl = baseUrl.endsWith('/') ? baseUrl.slice(0, -1) : baseUrl;
|
||||
|
||||
const baseUrl = getBaseUrl(logger);
|
||||
const iconUrl = `${baseUrl}/flyer-images/icons/${iconFileName}`;
|
||||
const imageUrl = `${baseUrl}/flyer-images/${file.filename}`;
|
||||
|
||||
|
||||
@@ -21,6 +21,7 @@ const createMockJobData = (data: Partial<FlyerJobData>): FlyerJobData => ({
|
||||
filePath: '/tmp/flyer.jpg',
|
||||
originalFileName: 'flyer.jpg',
|
||||
checksum: 'checksum-123',
|
||||
baseUrl: 'http://localhost:3000',
|
||||
...data,
|
||||
});
|
||||
|
||||
|
||||
@@ -64,6 +64,7 @@ describe('FlyerDataTransformer', () => {
|
||||
const originalFileName = 'my-flyer.pdf';
|
||||
const checksum = 'checksum-abc-123';
|
||||
const userId = 'user-xyz-456';
|
||||
const baseUrl = 'http://test.host';
|
||||
|
||||
// Act
|
||||
const { flyerData, itemsForDb } = await transformer.transform(
|
||||
@@ -73,11 +74,9 @@ describe('FlyerDataTransformer', () => {
|
||||
checksum,
|
||||
userId,
|
||||
mockLogger,
|
||||
baseUrl,
|
||||
);
|
||||
|
||||
// Dynamically construct the expected base URL, mirroring the logic in the transformer.
|
||||
const expectedBaseUrl = `http://localhost:3000`;
|
||||
|
||||
// Assert
|
||||
// 0. Check logging
|
||||
expect(mockLogger.info).toHaveBeenCalledWith(
|
||||
@@ -91,8 +90,8 @@ describe('FlyerDataTransformer', () => {
|
||||
// 1. Check flyer data
|
||||
expect(flyerData).toEqual({
|
||||
file_name: originalFileName,
|
||||
image_url: `${expectedBaseUrl}/flyer-images/flyer-page-1.jpg`,
|
||||
icon_url: `${expectedBaseUrl}/flyer-images/icons/icon-flyer-page-1.webp`,
|
||||
image_url: `${baseUrl}/flyer-images/flyer-page-1.jpg`,
|
||||
icon_url: `${baseUrl}/flyer-images/icons/icon-flyer-page-1.webp`,
|
||||
checksum,
|
||||
store_name: 'Test Store',
|
||||
valid_from: '2024-01-01',
|
||||
@@ -157,11 +156,9 @@ describe('FlyerDataTransformer', () => {
|
||||
checksum,
|
||||
undefined,
|
||||
mockLogger,
|
||||
'http://another.host',
|
||||
);
|
||||
|
||||
// Dynamically construct the expected base URL, mirroring the logic in the transformer.
|
||||
const expectedBaseUrl = `http://localhost:3000`;
|
||||
|
||||
// Assert
|
||||
// 0. Check logging
|
||||
expect(mockLogger.info).toHaveBeenCalledWith(
|
||||
@@ -178,8 +175,8 @@ describe('FlyerDataTransformer', () => {
|
||||
expect(itemsForDb).toHaveLength(0);
|
||||
expect(flyerData).toEqual({
|
||||
file_name: originalFileName,
|
||||
image_url: `${expectedBaseUrl}/flyer-images/another.png`,
|
||||
icon_url: `${expectedBaseUrl}/flyer-images/icons/icon-another.webp`,
|
||||
image_url: `http://another.host/flyer-images/another.png`,
|
||||
icon_url: `http://another.host/flyer-images/icons/icon-another.webp`,
|
||||
checksum,
|
||||
store_name: 'Unknown Store (auto)', // Should use fallback
|
||||
valid_from: null,
|
||||
@@ -232,6 +229,7 @@ describe('FlyerDataTransformer', () => {
|
||||
'checksum',
|
||||
'user-1',
|
||||
mockLogger,
|
||||
'http://normalize.host',
|
||||
);
|
||||
|
||||
// Assert
|
||||
@@ -251,4 +249,47 @@ describe('FlyerDataTransformer', () => {
|
||||
}),
|
||||
);
|
||||
});
|
||||
|
||||
it('should use fallback baseUrl if none is provided and log a warning', async () => {
|
||||
// Arrange
|
||||
const aiResult: AiProcessorResult = {
|
||||
data: {
|
||||
store_name: 'Test Store',
|
||||
valid_from: '2024-01-01',
|
||||
valid_to: '2024-01-07',
|
||||
store_address: '123 Test St',
|
||||
items: [],
|
||||
},
|
||||
needsReview: false,
|
||||
};
|
||||
const imagePaths = [{ path: '/uploads/flyer-page-1.jpg', mimetype: 'image/jpeg' }];
|
||||
const baseUrl = undefined; // Explicitly pass undefined for this test
|
||||
|
||||
// The fallback logic uses process.env.PORT || 3000.
|
||||
// The beforeEach sets PORT to '', so it should fallback to 3000.
|
||||
const expectedFallbackUrl = 'http://localhost:3000';
|
||||
|
||||
// Act
|
||||
const { flyerData } = await transformer.transform(
|
||||
aiResult,
|
||||
imagePaths,
|
||||
'my-flyer.pdf',
|
||||
'checksum-abc-123',
|
||||
'user-xyz-456',
|
||||
mockLogger,
|
||||
baseUrl, // Pass undefined here
|
||||
);
|
||||
|
||||
// Assert
|
||||
// 1. Check that a warning was logged
|
||||
expect(mockLogger.warn).toHaveBeenCalledWith(
|
||||
`Base URL not provided in job data. Falling back to default local URL: ${expectedFallbackUrl}`,
|
||||
);
|
||||
|
||||
// 2. Check that the URLs were constructed with the fallback
|
||||
expect(flyerData.image_url).toBe(`${expectedFallbackUrl}/flyer-images/flyer-page-1.jpg`);
|
||||
expect(flyerData.icon_url).toBe(
|
||||
`${expectedFallbackUrl}/flyer-images/icons/icon-flyer-page-1.webp`,
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
@@ -55,6 +55,7 @@ export class FlyerDataTransformer {
|
||||
checksum: string,
|
||||
userId: string | undefined,
|
||||
logger: Logger,
|
||||
baseUrl?: string,
|
||||
): Promise<{ flyerData: FlyerInsert; itemsForDb: FlyerItemInsert[] }> {
|
||||
logger.info('Starting data transformation from AI output to database format.');
|
||||
|
||||
@@ -75,37 +76,29 @@ export class FlyerDataTransformer {
|
||||
logger.warn('AI did not return a store name. Using fallback "Unknown Store (auto)".');
|
||||
}
|
||||
|
||||
// Construct proper URLs including protocol and host to satisfy DB constraints.
|
||||
// This logic is made more robust to handle cases where env vars might be present but invalid (e.g., whitespace or missing protocol).
|
||||
let baseUrl = (process.env.FRONTEND_URL || process.env.BASE_URL || '').trim();
|
||||
|
||||
if (!baseUrl || !baseUrl.startsWith('http')) {
|
||||
// The baseUrl is passed from the job payload to ensure the worker has the correct environment context.
|
||||
// If it's missing for any reason, we fall back to a sensible default for local development.
|
||||
let finalBaseUrl = baseUrl;
|
||||
if (!finalBaseUrl) {
|
||||
const port = process.env.PORT || 3000;
|
||||
const fallbackUrl = `http://localhost:${port}`;
|
||||
if (baseUrl) {
|
||||
// It was set but invalid
|
||||
logger.warn(
|
||||
`FRONTEND_URL/BASE_URL is invalid or incomplete ('${baseUrl}'). Falling back to default local URL: ${fallbackUrl}`,
|
||||
);
|
||||
}
|
||||
baseUrl = fallbackUrl;
|
||||
finalBaseUrl = `http://localhost:${port}`;
|
||||
logger.warn(
|
||||
`Base URL not provided in job data. Falling back to default local URL: ${finalBaseUrl}`,
|
||||
);
|
||||
}
|
||||
|
||||
baseUrl = baseUrl.endsWith('/') ? baseUrl.slice(0, -1) : baseUrl;
|
||||
finalBaseUrl = finalBaseUrl.endsWith('/') ? finalBaseUrl.slice(0, -1) : finalBaseUrl;
|
||||
|
||||
const flyerData: FlyerInsert = {
|
||||
file_name: originalFileName,
|
||||
image_url: `${baseUrl}/flyer-images/${path.basename(firstImage)}`,
|
||||
icon_url: `${baseUrl}/flyer-images/icons/${iconFileName}`,
|
||||
image_url: `${finalBaseUrl}/flyer-images/${path.basename(firstImage)}`,
|
||||
icon_url: `${finalBaseUrl}/flyer-images/icons/${iconFileName}`,
|
||||
checksum,
|
||||
store_name: storeName,
|
||||
valid_from: extractedData.valid_from,
|
||||
valid_to: extractedData.valid_to,
|
||||
store_address: extractedData.store_address, // The number of items is now calculated directly from the transformed data.
|
||||
store_address: extractedData.store_address,
|
||||
item_count: itemsForDb.length,
|
||||
// Defensively handle the userId. An empty string ('') is not a valid UUID,
|
||||
// but `null` is. This ensures that any falsy value for userId (undefined, null, '')
|
||||
// is converted to `null` for the database, preventing a 22P02 error.
|
||||
uploaded_by: userId ? userId : null,
|
||||
status: needsReview ? 'needs_review' : 'processed',
|
||||
};
|
||||
|
||||
@@ -166,6 +166,7 @@ describe('FlyerProcessingService', () => {
|
||||
filePath: '/tmp/flyer.jpg',
|
||||
originalFileName: 'flyer.jpg',
|
||||
checksum: 'checksum-123',
|
||||
baseUrl: 'http://localhost:3000',
|
||||
...data,
|
||||
},
|
||||
updateProgress: vi.fn(),
|
||||
|
||||
@@ -99,35 +99,11 @@ export class FlyerProcessingService {
|
||||
job.data.checksum,
|
||||
job.data.userId,
|
||||
logger,
|
||||
job.data.baseUrl,
|
||||
);
|
||||
stages[2].status = 'completed';
|
||||
await job.updateProgress({ stages });
|
||||
|
||||
// Sanitize URLs before database insertion to prevent constraint violations,
|
||||
// especially in test environments where a base URL might not be configured.
|
||||
const sanitizeUrl = (url: string): string => {
|
||||
if (url.startsWith('http')) {
|
||||
return url;
|
||||
}
|
||||
// If it's a relative path, build an absolute URL.
|
||||
let baseUrl = (process.env.FRONTEND_URL || process.env.BASE_URL || '').trim();
|
||||
if (!baseUrl || !baseUrl.startsWith('http')) {
|
||||
const port = process.env.PORT || 3000;
|
||||
const fallbackUrl = `http://localhost:${port}`;
|
||||
if (baseUrl) {
|
||||
logger.warn(
|
||||
`URL Sanitization: FRONTEND_URL/BASE_URL is invalid ('${baseUrl}'). Falling back to ${fallbackUrl}.`,
|
||||
);
|
||||
}
|
||||
baseUrl = fallbackUrl;
|
||||
}
|
||||
baseUrl = baseUrl.endsWith('/') ? baseUrl.slice(0, -1) : baseUrl;
|
||||
return `${baseUrl}${url.startsWith('/') ? url : `/${url}`}`;
|
||||
};
|
||||
|
||||
flyerData.image_url = sanitizeUrl(flyerData.image_url);
|
||||
flyerData.icon_url = sanitizeUrl(flyerData.icon_url);
|
||||
|
||||
// Stage 4: Save to Database
|
||||
stages[3].status = 'in-progress';
|
||||
await job.updateProgress({ stages });
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
// src/types/job-data.ts
|
||||
|
||||
/**
|
||||
* Defines the data structure for a flyer processing job.
|
||||
* This is the information passed to the worker when a new flyer is uploaded.
|
||||
* Defines the shape of the data payload for a flyer processing job.
|
||||
* This is the data that gets passed to the BullMQ worker.
|
||||
*/
|
||||
export interface FlyerJobData {
|
||||
filePath: string;
|
||||
@@ -11,44 +11,13 @@ export interface FlyerJobData {
|
||||
userId?: string;
|
||||
submitterIp?: string;
|
||||
userProfileAddress?: string;
|
||||
baseUrl: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* Defines the data structure for an email sending job.
|
||||
*/
|
||||
export interface EmailJobData {
|
||||
to: string;
|
||||
subject: string;
|
||||
text: string;
|
||||
html: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* Defines the data structure for a daily analytics reporting job.
|
||||
*/
|
||||
export interface AnalyticsJobData {
|
||||
reportDate: string; // e.g., '2024-10-26'
|
||||
}
|
||||
|
||||
/**
|
||||
* Defines the data structure for a weekly analytics reporting job.
|
||||
*/
|
||||
export interface WeeklyAnalyticsJobData {
|
||||
reportYear: number;
|
||||
reportWeek: number; // ISO week number (1-53)
|
||||
}
|
||||
|
||||
/**
|
||||
* Defines the data structure for a file cleanup job, which runs after a flyer is successfully processed.
|
||||
* Defines the shape of the data payload for a file cleanup job.
|
||||
*/
|
||||
export interface CleanupJobData {
|
||||
flyerId: number;
|
||||
paths?: string[];
|
||||
}
|
||||
|
||||
/**
|
||||
* Defines the data structure for the job that cleans up expired password reset tokens.
|
||||
*/
|
||||
export interface TokenCleanupJobData {
|
||||
timestamp: string;
|
||||
paths: string[];
|
||||
}
|
||||
26
src/utils/serverUtils.ts
Normal file
26
src/utils/serverUtils.ts
Normal file
@@ -0,0 +1,26 @@
|
||||
// src/utils/serverUtils.ts
|
||||
import type { Logger } from 'pino';
|
||||
|
||||
/**
|
||||
* Constructs a fully qualified base URL for generating absolute URLs.
|
||||
* It prioritizes `FRONTEND_URL`, then `BASE_URL`, and falls back to a localhost URL
|
||||
* based on the `PORT` environment variable. It also logs a warning if the provided
|
||||
* URL is invalid or missing.
|
||||
*
|
||||
* @param logger - The logger instance to use for warnings.
|
||||
* @returns A validated, fully qualified base URL without a trailing slash.
|
||||
*/
|
||||
export function getBaseUrl(logger: Logger): string {
|
||||
let baseUrl = (process.env.FRONTEND_URL || process.env.BASE_URL || '').trim();
|
||||
if (!baseUrl || !baseUrl.startsWith('http')) {
|
||||
const port = process.env.PORT || 3000;
|
||||
const fallbackUrl = `http://localhost:${port}`;
|
||||
if (baseUrl) {
|
||||
logger.warn(
|
||||
`[getBaseUrl] FRONTEND_URL/BASE_URL is invalid or incomplete ('${baseUrl}'). Falling back to default local URL: ${fallbackUrl}`,
|
||||
);
|
||||
}
|
||||
baseUrl = fallbackUrl;
|
||||
}
|
||||
return baseUrl.endsWith('/') ? baseUrl.slice(0, -1) : baseUrl;
|
||||
}
|
||||
Reference in New Issue
Block a user