Compare commits
396 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
71710c8316 | ||
| 1480a73ab0 | |||
|
|
b3efa3c756 | ||
| fb8fd57bb6 | |||
|
|
0a90d9d590 | ||
| 6ab473f5f0 | |||
|
|
c46efe1474 | ||
| 25d6b76f6d | |||
|
|
9ffcc9d65d | ||
| 1285702210 | |||
|
|
d38b751b40 | ||
| e122d55ced | |||
|
|
af9992f773 | ||
| 3912139273 | |||
| b5f7f5e4d1 | |||
|
|
5173059621 | ||
| ebceb0e2e3 | |||
| e75054b1ab | |||
|
|
639313485a | ||
| 4a04e478c4 | |||
|
|
1814469eb4 | ||
| b777430ff7 | |||
|
|
23830c0d4e | ||
| ef42fee982 | |||
|
|
65cb54500c | ||
| 664ad291be | |||
|
|
ff912b9055 | ||
| ec32027bd4 | |||
|
|
59f773639b | ||
| dd2be5eecf | |||
|
|
a94bfbd3e9 | ||
| 338bbc9440 | |||
|
|
60aad04642 | ||
| 7f2aff9a24 | |||
|
|
689320e7d2 | ||
| e457bbf046 | |||
| 68cdbb6066 | |||
|
|
cea6be7145 | ||
| 74a5ca6331 | |||
|
|
62470e7661 | ||
| 2b517683fd | |||
|
|
5d06d1ba09 | ||
| 46c1e56b14 | |||
|
|
78a9b80010 | ||
| d356d9dfb6 | |||
|
|
ab63f83f50 | ||
| b546a55eaf | |||
|
|
dfa53a93dd | ||
| f30464cd0e | |||
|
|
2d2fa3c2c8 | ||
| 58cb391f4b | |||
|
|
0ebe2f0806 | ||
| 7867abc5bc | |||
|
|
cc4c8e2839 | ||
| 33ee2eeac9 | |||
|
|
e0b13f26fb | ||
| eee7f36756 | |||
|
|
622c919733 | ||
| c7f6b6369a | |||
|
|
879d956003 | ||
| 27eaac7ea8 | |||
|
|
93618c57e5 | ||
| 7f043ef704 | |||
|
|
62e35deddc | ||
| 59f6f43d03 | |||
|
|
e675c1a73c | ||
| 3c19084a0a | |||
|
|
e2049c6b9f | ||
| a3839c2f0d | |||
|
|
c1df3d7b1b | ||
| 94782f030d | |||
|
|
1c25b79251 | ||
| 0b0fa8294d | |||
|
|
f49f3a75fb | ||
| 8f14044ae6 | |||
|
|
55e1e425f4 | ||
| 68b16ad2e8 | |||
|
|
6a28934692 | ||
| 78c4a5fee6 | |||
|
|
1ce5f481a8 | ||
|
|
e0120d38fd | ||
| 6b2079ef2c | |||
|
|
0478e176d5 | ||
| 47f7f97cd9 | |||
|
|
b0719d1e39 | ||
| 0039ac3752 | |||
|
|
3c8316f4f7 | ||
| 2564df1c64 | |||
|
|
696c547238 | ||
| 38165bdb9a | |||
|
|
6139dca072 | ||
| 68bfaa50e6 | |||
|
|
9c42621f74 | ||
| 1b98282202 | |||
|
|
b6731b220c | ||
| 3507d455e8 | |||
|
|
92b2adf8e8 | ||
| d6c7452256 | |||
|
|
d812b681dd | ||
| b4306a6092 | |||
|
|
57fdd159d5 | ||
| 4a747ca042 | |||
|
|
e0bf96824c | ||
| e86e09703e | |||
|
|
275741c79e | ||
| 3a40249ddb | |||
|
|
4c70905950 | ||
| 0b4884ff2a | |||
|
|
e4acab77c8 | ||
| 4e20b1b430 | |||
|
|
15747ac942 | ||
| e5fa89ef17 | |||
|
|
2c65da31e9 | ||
| eeec6af905 | |||
|
|
e7d03951b9 | ||
| af8816e0af | |||
|
|
64f6427e1a | ||
| c9b7a75429 | |||
|
|
0490f6922e | ||
| 057c4c9174 | |||
|
|
a9e56bc707 | ||
| e5d09c73b7 | |||
|
|
6e1298b825 | ||
| fc8e43437a | |||
|
|
cb453aa949 | ||
| 2651bd16ae | |||
|
|
91e0f0c46f | ||
| e6986d512b | |||
|
|
8f9c21675c | ||
| 7fb22cdd20 | |||
|
|
780291303d | ||
| 4f607f7d2f | |||
|
|
208227b3ed | ||
| bf1c7d4adf | |||
|
|
a7a30cf983 | ||
| 0bc0676b33 | |||
|
|
73484d3eb4 | ||
| b3253d5bbc | |||
|
|
54f3769e90 | ||
| bad6f74ee6 | |||
|
|
bcf16168b6 | ||
| 498fbd9e0e | |||
|
|
007ff8e538 | ||
| 1fc70e3915 | |||
|
|
d891e47e02 | ||
| 08c39afde4 | |||
|
|
c579543b8a | ||
| 0d84137786 | |||
|
|
20ee30c4b4 | ||
| 93612137e3 | |||
|
|
6e70f08e3c | ||
| 459f5f7976 | |||
|
|
a2e6331ddd | ||
| 13cd30bec9 | |||
|
|
baeb9488c6 | ||
| 0cba0f987e | |||
|
|
958a79997d | ||
| 8fb1c96f93 | |||
| 6e6fe80c7f | |||
|
|
d1554050bd | ||
|
|
b1fae270bb | ||
|
|
c852483e18 | ||
| 2e01ad5bc9 | |||
|
|
26763c7183 | ||
| f0c5c2c45b | |||
|
|
034bb60fd5 | ||
| d4b389cb79 | |||
|
|
a71fb81468 | ||
| 9bee0a013b | |||
|
|
8bcb4311b3 | ||
| 9fd15f3a50 | |||
|
|
e3c876c7be | ||
| 32dcf3b89e | |||
| 7066b937f6 | |||
|
|
8553ea8811 | ||
| 19885a50f7 | |||
|
|
ce82034b9d | ||
| 4528da2934 | |||
|
|
146d4c1351 | ||
| 88625706f4 | |||
|
|
e395faed30 | ||
| e8f8399896 | |||
|
|
ac0115af2b | ||
| f24b15f19b | |||
|
|
e64426bd84 | ||
| 0ec4cd68d2 | |||
|
|
840516d2a3 | ||
| 59355c3eef | |||
| d024935fe9 | |||
|
|
5a5470634e | ||
| 392231ad63 | |||
|
|
4b1c896621 | ||
| 720920a51c | |||
|
|
460adb9506 | ||
| 7aa1f756a9 | |||
|
|
c484a8ca9b | ||
| 28d2c9f4ec | |||
|
|
ee253e9449 | ||
| b6c15e53d0 | |||
|
|
722162c2c3 | ||
| 02a76fe996 | |||
|
|
0ebb03a7ab | ||
| 748ac9e049 | |||
|
|
495edd621c | ||
| 4ffca19db6 | |||
|
|
717427c5d7 | ||
| cc438a0e36 | |||
|
|
a32a0b62fc | ||
| 342f72b713 | |||
|
|
91254d18f3 | ||
| 40580dbf15 | |||
| 7f1d74c047 | |||
|
|
ecec686347 | ||
| 86de680080 | |||
|
|
0371947065 | ||
| 296698758c | |||
|
|
18c1161587 | ||
| 0010396780 | |||
|
|
d4557e13fb | ||
| 3e41130c69 | |||
|
|
d9034563d6 | ||
| 5836a75157 | |||
|
|
790008ae0d | ||
|
|
b5b91eb968 | ||
| 38eb810e7a | |||
|
|
458588a6e7 | ||
| 0b4113417f | |||
|
|
b59d2a9533 | ||
| 6740b35f8a | |||
|
|
92ad82a012 | ||
| 672e4ca597 | |||
|
|
e4d70a9b37 | ||
| c30f1c4162 | |||
|
|
44062a9f5b | ||
| 17fac8cf86 | |||
|
|
9fa8553486 | ||
|
|
f5b0b3b543 | ||
| e3ed5c7e63 | |||
|
|
ae0040e092 | ||
| 1f3f99d430 | |||
|
|
7be72f1758 | ||
| 0967c7a33d | |||
| 1f1c0fa6f3 | |||
|
|
728b1a20d3 | ||
| f248f7cbd0 | |||
|
|
0ad9bb16c2 | ||
| 510787bc5b | |||
|
|
9f696e7676 | ||
|
|
a77105316f | ||
| cadacb63f5 | |||
|
|
62592f707e | ||
| 023e48d99a | |||
|
|
99efca0371 | ||
| 1448950b81 | |||
|
|
a811fdac63 | ||
| 1201fe4d3c | |||
|
|
ba9228c9cb | ||
| b392b82c25 | |||
|
|
87825d13d6 | ||
| 21a6a796cf | |||
|
|
ecd0a73bc8 | ||
|
|
39d61dc7ad | ||
|
|
43491359d9 | ||
| 5ed2cea7e9 | |||
|
|
cbb16a8d52 | ||
| 70e94a6ce0 | |||
|
|
b61a00003a | ||
| 52dba6f890 | |||
| 4242678aab | |||
|
|
b2e086d5ba | ||
| 07a9787570 | |||
|
|
4bf5dc3d58 | ||
| be3d269928 | |||
|
|
80a53fae94 | ||
| e15d2b6c2f | |||
|
|
7a52bf499e | ||
| 2489ec8d2d | |||
|
|
4a4f349805 | ||
| 517a268307 | |||
|
|
a94b2a97b1 | ||
| 542cdfbb82 | |||
|
|
262062f468 | ||
| 0a14193371 | |||
|
|
7f665f5117 | ||
| 2782a8fb3b | |||
|
|
c182ef6d30 | ||
| fdb3b76cbd | |||
|
|
01e7c843cb | ||
| a0dbefbfa0 | |||
|
|
ab3fc318a0 | ||
| e658b35e43 | |||
|
|
67e106162a | ||
| b7f3182fd6 | |||
|
|
ac60072d88 | ||
| 9390f38bf6 | |||
|
|
236d5518c9 | ||
| fd52a79a72 | |||
|
|
f72819e343 | ||
| 1af8be3f15 | |||
|
|
28d03f4e21 | ||
| 2e72ee81dd | |||
|
|
ba67ace190 | ||
|
|
50782c30e5 | ||
| 4a2ff8afc5 | |||
|
|
7a1c14ce89 | ||
| 6fafc3d089 | |||
|
|
4316866bce | ||
| 356c1a1894 | |||
|
|
2a310648ca | ||
| 8592633c22 | |||
|
|
0a9cdb8709 | ||
| 0d21e098f8 | |||
| b6799ed167 | |||
|
|
be5bda169e | ||
| 4ede403356 | |||
| 5d31605b80 | |||
| ddd4ad024e | |||
|
|
4e927f48bd | ||
| af5644d17a | |||
|
|
016c0a883a | ||
| c6a5f889b4 | |||
|
|
c895ecdb28 | ||
| 05e3f8a61c | |||
|
|
f79a2abc65 | ||
| a726c270bb | |||
|
|
8a4965c45b | ||
| 93497bf7c7 | |||
|
|
20584af729 | ||
| be9f452656 | |||
| ef4b8e58fe | |||
|
|
a42f7d7007 | ||
| 768d02b9ed | |||
|
|
c4742959e4 | ||
| 97c54c0c5c | |||
| 7cc50907d1 | |||
|
|
b4199f7c48 | ||
| dda36f7bc5 | |||
| 27810bbb36 | |||
|
|
7a1421d5c2 | ||
| 1b52478f97 | |||
| fe8b000737 | |||
|
|
d2babbe3b0 | ||
|
|
684d81db2a | ||
| 59ffa65562 | |||
| 0c0dd852ac | |||
|
|
cde766872e | ||
| 604b543c12 | |||
| fd67fe2941 | |||
|
|
582035b60e | ||
| 44e7670a89 | |||
| 2abfb3ed6e | |||
|
|
219de4a25c | ||
| 1540d5051f | |||
| 9c978c26fa | |||
|
|
adb109d8e9 | ||
| c668c8785f | |||
|
|
695bbb61b9 | ||
| 877c971833 | |||
| ed3af07aab | |||
|
|
dd4b34edfa | ||
| 91fa2f0516 | |||
|
|
aefd57e57b | ||
| 2ca4eb47ac | |||
| a4fe30da22 | |||
|
|
abab7fd25e | ||
| 53dd26d2d9 | |||
| ab3da0336c | |||
|
|
ed6d6349a2 | ||
| d4db2a709a | |||
| 508583809b | |||
|
|
6b1f7e7590 | ||
| 07bb31f4fb | |||
| a42fb76da8 | |||
|
|
08c320423c | ||
| d2498065ed | |||
| 56dc96f418 | |||
|
|
4e9aa0efc3 | ||
| e5e4b1316c | |||
| e8d511b4de | |||
|
|
c4bbf5c251 | ||
| 32a9e6732b | |||
| e7c076e2ed | |||
|
|
dbe8e72efe | ||
| 38bd193042 | |||
|
|
57215e2778 | ||
| 2c1de24e9a | |||
| c8baff7aac | |||
| de3f21a7ec | |||
|
|
c6adbf79e7 | ||
| 7399a27600 | |||
|
|
68aadcaa4e | ||
| 971d2c3fa7 | |||
|
|
daaacfde5e | ||
| 7ac8fe1d29 | |||
| a2462dfb6b | |||
|
|
a911224fb4 |
85
.claude/settings.local.json
Normal file
85
.claude/settings.local.json
Normal file
@@ -0,0 +1,85 @@
|
|||||||
|
{
|
||||||
|
"permissions": {
|
||||||
|
"allow": [
|
||||||
|
"Bash(npm test:*)",
|
||||||
|
"Bash(podman --version:*)",
|
||||||
|
"Bash(podman ps:*)",
|
||||||
|
"Bash(podman machine start:*)",
|
||||||
|
"Bash(podman compose:*)",
|
||||||
|
"Bash(podman pull:*)",
|
||||||
|
"Bash(podman images:*)",
|
||||||
|
"Bash(podman stop:*)",
|
||||||
|
"Bash(echo:*)",
|
||||||
|
"Bash(podman rm:*)",
|
||||||
|
"Bash(podman run:*)",
|
||||||
|
"Bash(podman start:*)",
|
||||||
|
"Bash(podman exec:*)",
|
||||||
|
"Bash(cat:*)",
|
||||||
|
"Bash(PGPASSWORD=postgres psql:*)",
|
||||||
|
"Bash(npm search:*)",
|
||||||
|
"Bash(npx:*)",
|
||||||
|
"Bash(curl -s -H \"Authorization: token c72bc0f14f623fec233d3c94b3a16397fe3649ef\" https://gitea.projectium.com/api/v1/user)",
|
||||||
|
"Bash(curl:*)",
|
||||||
|
"Bash(powershell:*)",
|
||||||
|
"Bash(cmd.exe:*)",
|
||||||
|
"Bash(export NODE_ENV=test DB_HOST=localhost DB_USER=postgres DB_PASSWORD=postgres DB_NAME=flyer_crawler_dev REDIS_URL=redis://localhost:6379 FRONTEND_URL=http://localhost:5173 JWT_SECRET=test-jwt-secret:*)",
|
||||||
|
"Bash(npm run test:integration:*)",
|
||||||
|
"Bash(grep:*)",
|
||||||
|
"Bash(done)",
|
||||||
|
"Bash(podman info:*)",
|
||||||
|
"Bash(podman machine:*)",
|
||||||
|
"Bash(podman system connection:*)",
|
||||||
|
"Bash(podman inspect:*)",
|
||||||
|
"Bash(python -m json.tool:*)",
|
||||||
|
"Bash(claude mcp status)",
|
||||||
|
"Bash(powershell.exe -Command \"claude mcp status\")",
|
||||||
|
"Bash(powershell.exe -Command \"claude mcp\")",
|
||||||
|
"Bash(powershell.exe -Command \"claude mcp list\")",
|
||||||
|
"Bash(powershell.exe -Command \"claude --version\")",
|
||||||
|
"Bash(powershell.exe -Command \"claude config\")",
|
||||||
|
"Bash(powershell.exe -Command \"claude mcp get gitea-projectium\")",
|
||||||
|
"Bash(powershell.exe -Command \"claude mcp add --help\")",
|
||||||
|
"Bash(powershell.exe -Command \"claude mcp add -t stdio -s user filesystem -- D:\\\\nodejs\\\\npx.cmd -y @modelcontextprotocol/server-filesystem D:\\\\gitea\\\\flyer-crawler.projectium.com\\\\flyer-crawler.projectium.com\")",
|
||||||
|
"Bash(powershell.exe -Command \"claude mcp add -t stdio -s user fetch -- D:\\\\nodejs\\\\npx.cmd -y @modelcontextprotocol/server-fetch\")",
|
||||||
|
"Bash(powershell.exe -Command \"echo ''List files in src/hooks using filesystem MCP'' | claude --print\")",
|
||||||
|
"Bash(powershell.exe -Command \"echo ''List all podman containers'' | claude --print\")",
|
||||||
|
"Bash(powershell.exe -Command \"echo ''List my repositories on gitea.projectium.com using gitea-projectium MCP'' | claude --print\")",
|
||||||
|
"Bash(powershell.exe -Command \"echo ''List my repositories on gitea.projectium.com using gitea-projectium MCP'' | claude --print --allowedTools ''mcp__gitea-projectium__*''\")",
|
||||||
|
"Bash(powershell.exe -Command \"echo ''Fetch the homepage of https://gitea.projectium.com and summarize it'' | claude --print --allowedTools ''mcp__fetch__*''\")",
|
||||||
|
"Bash(dir \"C:\\\\Users\\\\games3\\\\.claude\")",
|
||||||
|
"Bash(dir:*)",
|
||||||
|
"Bash(D:nodejsnpx.cmd -y @modelcontextprotocol/server-fetch --help)",
|
||||||
|
"Bash(cmd /c \"dir /o-d C:\\\\Users\\\\games3\\\\.claude\\\\debug 2>nul | head -10\")",
|
||||||
|
"mcp__memory__read_graph",
|
||||||
|
"mcp__memory__create_entities",
|
||||||
|
"mcp__memory__search_nodes",
|
||||||
|
"mcp__memory__delete_entities",
|
||||||
|
"mcp__sequential-thinking__sequentialthinking",
|
||||||
|
"mcp__filesystem__list_directory",
|
||||||
|
"mcp__filesystem__read_multiple_files",
|
||||||
|
"mcp__filesystem__directory_tree",
|
||||||
|
"mcp__filesystem__read_text_file",
|
||||||
|
"Bash(wc:*)",
|
||||||
|
"Bash(npm install:*)",
|
||||||
|
"Bash(git grep:*)",
|
||||||
|
"Bash(findstr:*)",
|
||||||
|
"Bash(git add:*)",
|
||||||
|
"mcp__filesystem__write_file",
|
||||||
|
"mcp__podman__container_list",
|
||||||
|
"Bash(podman cp:*)",
|
||||||
|
"mcp__podman__container_inspect",
|
||||||
|
"mcp__podman__network_list",
|
||||||
|
"Bash(podman network connect:*)",
|
||||||
|
"Bash(npm run build:*)",
|
||||||
|
"Bash(set NODE_ENV=test)",
|
||||||
|
"Bash(podman-compose:*)",
|
||||||
|
"Bash(timeout 60 podman machine start:*)",
|
||||||
|
"Bash(podman build:*)",
|
||||||
|
"Bash(podman network rm:*)",
|
||||||
|
"Bash(npm run lint)",
|
||||||
|
"Bash(npm run typecheck:*)",
|
||||||
|
"Bash(npm run type-check:*)",
|
||||||
|
"Bash(npm run test:unit:*)"
|
||||||
|
]
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -1,18 +1,96 @@
|
|||||||
{
|
{
|
||||||
|
// ============================================================================
|
||||||
|
// VS CODE DEV CONTAINER CONFIGURATION
|
||||||
|
// ============================================================================
|
||||||
|
// This file configures VS Code's Dev Containers extension to provide a
|
||||||
|
// consistent, fully-configured development environment.
|
||||||
|
//
|
||||||
|
// Features:
|
||||||
|
// - Automatic PostgreSQL + Redis startup with healthchecks
|
||||||
|
// - Automatic npm install
|
||||||
|
// - Automatic database schema initialization and seeding
|
||||||
|
// - Pre-configured VS Code extensions (ESLint, Prettier)
|
||||||
|
// - Podman support for Windows users
|
||||||
|
//
|
||||||
|
// Usage:
|
||||||
|
// 1. Install the "Dev Containers" extension in VS Code
|
||||||
|
// 2. Open this project folder
|
||||||
|
// 3. Click "Reopen in Container" when prompted (or use Command Palette)
|
||||||
|
// 4. Wait for container build and initialization
|
||||||
|
// 5. Development server starts automatically
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
"name": "Flyer Crawler Dev (Ubuntu 22.04)",
|
"name": "Flyer Crawler Dev (Ubuntu 22.04)",
|
||||||
|
|
||||||
|
// Use Docker Compose for multi-container setup
|
||||||
"dockerComposeFile": ["../compose.dev.yml"],
|
"dockerComposeFile": ["../compose.dev.yml"],
|
||||||
"service": "app",
|
"service": "app",
|
||||||
"workspaceFolder": "/app",
|
"workspaceFolder": "/app",
|
||||||
|
|
||||||
|
// VS Code customizations
|
||||||
"customizations": {
|
"customizations": {
|
||||||
"vscode": {
|
"vscode": {
|
||||||
"extensions": ["dbaeumer.vscode-eslint", "esbenp.prettier-vscode"]
|
"extensions": [
|
||||||
|
// Code quality
|
||||||
|
"dbaeumer.vscode-eslint",
|
||||||
|
"esbenp.prettier-vscode",
|
||||||
|
// TypeScript
|
||||||
|
"ms-vscode.vscode-typescript-next",
|
||||||
|
// Database
|
||||||
|
"mtxr.sqltools",
|
||||||
|
"mtxr.sqltools-driver-pg",
|
||||||
|
// Utilities
|
||||||
|
"eamodio.gitlens",
|
||||||
|
"streetsidesoftware.code-spell-checker"
|
||||||
|
],
|
||||||
|
"settings": {
|
||||||
|
"editor.formatOnSave": true,
|
||||||
|
"editor.defaultFormatter": "esbenp.prettier-vscode",
|
||||||
|
"typescript.preferences.importModuleSpecifier": "relative"
|
||||||
|
}
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
|
||||||
|
// Run as root (required for npm global installs)
|
||||||
"remoteUser": "root",
|
"remoteUser": "root",
|
||||||
// Automatically install dependencies when the container is created.
|
|
||||||
// This runs inside the container, populating the isolated node_modules volume.
|
// ============================================================================
|
||||||
"postCreateCommand": "npm install",
|
// Lifecycle Commands
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
|
// initializeCommand: Runs on the HOST before the container is created.
|
||||||
|
// Starts Podman machine on Windows (no-op if already running or using Docker).
|
||||||
|
"initializeCommand": "powershell -Command \"podman machine start; exit 0\"",
|
||||||
|
|
||||||
|
// postCreateCommand: Runs ONCE when the container is first created.
|
||||||
|
// This is where we do full initialization: npm install + database setup.
|
||||||
|
"postCreateCommand": "chmod +x scripts/docker-init.sh && ./scripts/docker-init.sh",
|
||||||
|
|
||||||
|
// postAttachCommand: Runs EVERY TIME VS Code attaches to the container.
|
||||||
|
// Starts the development server automatically.
|
||||||
"postAttachCommand": "npm run dev:container",
|
"postAttachCommand": "npm run dev:container",
|
||||||
// Try to start podman machine, but exit with success (0) even if it's already running
|
|
||||||
"initializeCommand": "powershell -Command \"podman machine start; exit 0\""
|
// ============================================================================
|
||||||
|
// Port Forwarding
|
||||||
|
// ============================================================================
|
||||||
|
// Automatically forward these ports from the container to the host
|
||||||
|
"forwardPorts": [3000, 3001],
|
||||||
|
|
||||||
|
// Labels for forwarded ports in VS Code's Ports panel
|
||||||
|
"portsAttributes": {
|
||||||
|
"3000": {
|
||||||
|
"label": "Frontend (Vite)",
|
||||||
|
"onAutoForward": "notify"
|
||||||
|
},
|
||||||
|
"3001": {
|
||||||
|
"label": "Backend API",
|
||||||
|
"onAutoForward": "notify"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// Features
|
||||||
|
// ============================================================================
|
||||||
|
// Additional dev container features (optional)
|
||||||
|
"features": {}
|
||||||
}
|
}
|
||||||
|
|||||||
77
.env.example
Normal file
77
.env.example
Normal file
@@ -0,0 +1,77 @@
|
|||||||
|
# .env.example
|
||||||
|
# ============================================================================
|
||||||
|
# ENVIRONMENT VARIABLES TEMPLATE
|
||||||
|
# ============================================================================
|
||||||
|
# Copy this file to .env and fill in your values.
|
||||||
|
# For local development with Docker/Podman, these defaults should work out of the box.
|
||||||
|
#
|
||||||
|
# IMPORTANT: Never commit .env files with real credentials to version control!
|
||||||
|
# ============================================================================
|
||||||
|
|
||||||
|
# ===================
|
||||||
|
# Database Configuration
|
||||||
|
# ===================
|
||||||
|
# PostgreSQL connection settings
|
||||||
|
# For container development, use the service name "postgres"
|
||||||
|
DB_HOST=postgres
|
||||||
|
DB_PORT=5432
|
||||||
|
DB_USER=postgres
|
||||||
|
DB_PASSWORD=postgres
|
||||||
|
DB_NAME=flyer_crawler_dev
|
||||||
|
|
||||||
|
# ===================
|
||||||
|
# Redis Configuration
|
||||||
|
# ===================
|
||||||
|
# Redis URL for caching and job queues
|
||||||
|
# For container development, use the service name "redis"
|
||||||
|
REDIS_URL=redis://redis:6379
|
||||||
|
# Optional: Redis password (leave empty if not required)
|
||||||
|
REDIS_PASSWORD=
|
||||||
|
|
||||||
|
# ===================
|
||||||
|
# Application Settings
|
||||||
|
# ===================
|
||||||
|
NODE_ENV=development
|
||||||
|
# Frontend URL for CORS and email links
|
||||||
|
FRONTEND_URL=http://localhost:3000
|
||||||
|
|
||||||
|
# ===================
|
||||||
|
# Authentication
|
||||||
|
# ===================
|
||||||
|
# REQUIRED: Secret key for signing JWT tokens (generate a random 64+ character string)
|
||||||
|
JWT_SECRET=your-super-secret-jwt-key-change-this-in-production
|
||||||
|
|
||||||
|
# ===================
|
||||||
|
# AI/ML Services
|
||||||
|
# ===================
|
||||||
|
# REQUIRED: Google Gemini API key for flyer OCR processing
|
||||||
|
GEMINI_API_KEY=your-gemini-api-key
|
||||||
|
|
||||||
|
# ===================
|
||||||
|
# External APIs
|
||||||
|
# ===================
|
||||||
|
# Optional: Google Maps API key for geocoding store addresses
|
||||||
|
GOOGLE_MAPS_API_KEY=
|
||||||
|
|
||||||
|
# ===================
|
||||||
|
# Email Configuration (Optional)
|
||||||
|
# ===================
|
||||||
|
# SMTP settings for sending emails (deal notifications, password reset)
|
||||||
|
SMTP_HOST=
|
||||||
|
SMTP_PORT=587
|
||||||
|
SMTP_SECURE=false
|
||||||
|
SMTP_USER=
|
||||||
|
SMTP_PASS=
|
||||||
|
SMTP_FROM_EMAIL=noreply@example.com
|
||||||
|
|
||||||
|
# ===================
|
||||||
|
# Worker Configuration (Optional)
|
||||||
|
# ===================
|
||||||
|
# Concurrency settings for background job workers
|
||||||
|
WORKER_CONCURRENCY=1
|
||||||
|
EMAIL_WORKER_CONCURRENCY=10
|
||||||
|
ANALYTICS_WORKER_CONCURRENCY=1
|
||||||
|
CLEANUP_WORKER_CONCURRENCY=10
|
||||||
|
|
||||||
|
# Worker lock duration in milliseconds (default: 2 minutes)
|
||||||
|
WORKER_LOCK_DURATION=120000
|
||||||
6
.env.test
Normal file
6
.env.test
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
DB_HOST=10.89.0.4
|
||||||
|
DB_USER=flyer
|
||||||
|
DB_PASSWORD=flyer
|
||||||
|
DB_NAME=flyer_crawler_test
|
||||||
|
REDIS_URL=redis://redis:6379
|
||||||
|
NODE_ENV=test
|
||||||
66
.gemini/settings.json
Normal file
66
.gemini/settings.json
Normal file
@@ -0,0 +1,66 @@
|
|||||||
|
{
|
||||||
|
"mcpServers": {
|
||||||
|
"gitea-projectium": {
|
||||||
|
"command": "d:\\gitea-mcp\\gitea-mcp.exe",
|
||||||
|
"args": ["run", "-t", "stdio"],
|
||||||
|
"env": {
|
||||||
|
"GITEA_HOST": "https://gitea.projectium.com",
|
||||||
|
"GITEA_ACCESS_TOKEN": "c72bc0f14f623fec233d3c94b3a16397fe3649ef"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"gitea-torbonium": {
|
||||||
|
"command": "d:\\gitea-mcp\\gitea-mcp.exe",
|
||||||
|
"args": ["run", "-t", "stdio"],
|
||||||
|
"env": {
|
||||||
|
"GITEA_HOST": "https://gitea.torbonium.com",
|
||||||
|
"GITEA_ACCESS_TOKEN": "391c9ddbe113378bc87bb8184800ba954648fcf8"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"gitea-lan": {
|
||||||
|
"command": "d:\\gitea-mcp\\gitea-mcp.exe",
|
||||||
|
"args": ["run", "-t", "stdio"],
|
||||||
|
"env": {
|
||||||
|
"GITEA_HOST": "https://gitea.torbolan.com",
|
||||||
|
"GITEA_ACCESS_TOKEN": "YOUR_LAN_TOKEN_HERE"
|
||||||
|
},
|
||||||
|
"disabled": true
|
||||||
|
},
|
||||||
|
"podman": {
|
||||||
|
"command": "D:\\nodejs\\npx.cmd",
|
||||||
|
"args": ["-y", "podman-mcp-server@latest"],
|
||||||
|
"env": {
|
||||||
|
"DOCKER_HOST": "npipe:////./pipe/podman-machine-default"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"filesystem": {
|
||||||
|
"command": "d:\\nodejs\\node.exe",
|
||||||
|
"args": [
|
||||||
|
"c:\\Users\\games3\\AppData\\Roaming\\npm\\node_modules\\@modelcontextprotocol\\server-filesystem\\dist\\index.js",
|
||||||
|
"d:\\gitea\\flyer-crawler.projectium.com\\flyer-crawler.projectium.com"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"fetch": {
|
||||||
|
"command": "D:\\nodejs\\npx.cmd",
|
||||||
|
"args": ["-y", "@modelcontextprotocol/server-fetch"]
|
||||||
|
},
|
||||||
|
"io.github.ChromeDevTools/chrome-devtools-mcp": {
|
||||||
|
"type": "stdio",
|
||||||
|
"command": "npx",
|
||||||
|
"args": ["chrome-devtools-mcp@0.12.1"],
|
||||||
|
"gallery": "https://api.mcp.github.com",
|
||||||
|
"version": "0.12.1"
|
||||||
|
},
|
||||||
|
"markitdown": {
|
||||||
|
"command": "C:\\Users\\games3\\.local\\bin\\uvx.exe",
|
||||||
|
"args": ["markitdown-mcp"]
|
||||||
|
},
|
||||||
|
"sequential-thinking": {
|
||||||
|
"command": "D:\\nodejs\\npx.cmd",
|
||||||
|
"args": ["-y", "@modelcontextprotocol/server-sequential-thinking"]
|
||||||
|
},
|
||||||
|
"memory": {
|
||||||
|
"command": "D:\\nodejs\\npx.cmd",
|
||||||
|
"args": ["-y", "@modelcontextprotocol/server-memory"]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -93,8 +93,9 @@ jobs:
|
|||||||
exit 1
|
exit 1
|
||||||
fi
|
fi
|
||||||
GITEA_SERVER_URL="https://gitea.projectium.com"
|
GITEA_SERVER_URL="https://gitea.projectium.com"
|
||||||
COMMIT_MESSAGE=$(git log -1 --pretty=%s)
|
COMMIT_MESSAGE=$(git log -1 --grep="\[skip ci\]" --invert-grep --pretty=%s)
|
||||||
VITE_APP_VERSION="$(date +'%Y%m%d-%H%M'):$(git rev-parse --short HEAD)" \
|
PACKAGE_VERSION=$(node -p "require('./package.json').version")
|
||||||
|
VITE_APP_VERSION="$(date +'%Y%m%d-%H%M'):$(git rev-parse --short HEAD):$PACKAGE_VERSION" \
|
||||||
VITE_APP_COMMIT_URL="$GITEA_SERVER_URL/${{ gitea.repository }}/commit/${{ gitea.sha }}" \
|
VITE_APP_COMMIT_URL="$GITEA_SERVER_URL/${{ gitea.repository }}/commit/${{ gitea.sha }}" \
|
||||||
VITE_APP_COMMIT_MESSAGE="$COMMIT_MESSAGE" \
|
VITE_APP_COMMIT_MESSAGE="$COMMIT_MESSAGE" \
|
||||||
VITE_API_BASE_URL=/api VITE_API_KEY=${{ secrets.VITE_GOOGLE_GENAI_API_KEY }} npm run build
|
VITE_API_BASE_URL=/api VITE_API_KEY=${{ secrets.VITE_GOOGLE_GENAI_API_KEY }} npm run build
|
||||||
@@ -116,7 +117,8 @@ jobs:
|
|||||||
DB_USER: ${{ secrets.DB_USER }}
|
DB_USER: ${{ secrets.DB_USER }}
|
||||||
DB_PASSWORD: ${{ secrets.DB_PASSWORD }}
|
DB_PASSWORD: ${{ secrets.DB_PASSWORD }}
|
||||||
DB_NAME: ${{ secrets.DB_DATABASE_PROD }}
|
DB_NAME: ${{ secrets.DB_DATABASE_PROD }}
|
||||||
REDIS_URL: 'redis://localhost:6379'
|
# Explicitly use database 0 for production (test uses database 1)
|
||||||
|
REDIS_URL: 'redis://localhost:6379/0'
|
||||||
REDIS_PASSWORD: ${{ secrets.REDIS_PASSWORD_PROD }}
|
REDIS_PASSWORD: ${{ secrets.REDIS_PASSWORD_PROD }}
|
||||||
FRONTEND_URL: 'https://flyer-crawler.projectium.com'
|
FRONTEND_URL: 'https://flyer-crawler.projectium.com'
|
||||||
JWT_SECRET: ${{ secrets.JWT_SECRET }}
|
JWT_SECRET: ${{ secrets.JWT_SECRET }}
|
||||||
@@ -137,6 +139,10 @@ jobs:
|
|||||||
cd /var/www/flyer-crawler.projectium.com
|
cd /var/www/flyer-crawler.projectium.com
|
||||||
npm install --omit=dev
|
npm install --omit=dev
|
||||||
|
|
||||||
|
# --- Cleanup Errored Processes ---
|
||||||
|
echo "Cleaning up errored or stopped PM2 processes..."
|
||||||
|
node -e "const exec = require('child_process').execSync; try { const list = JSON.parse(exec('pm2 jlist').toString()); list.forEach(p => { if (p.pm2_env.status === 'errored' || p.pm2_env.status === 'stopped') { console.log('Deleting ' + p.pm2_env.status + ' process: ' + p.name + ' (' + p.pm2_env.pm_id + ')'); try { exec('pm2 delete ' + p.pm2_env.pm_id); } catch(e) { console.error('Failed to delete ' + p.pm2_env.pm_id); } } }); } catch (e) { console.error('Error cleaning up processes:', e); }"
|
||||||
|
|
||||||
# --- Version Check Logic ---
|
# --- Version Check Logic ---
|
||||||
# Get the version from the newly deployed package.json
|
# Get the version from the newly deployed package.json
|
||||||
NEW_VERSION=$(node -p "require('./package.json').version")
|
NEW_VERSION=$(node -p "require('./package.json').version")
|
||||||
@@ -153,7 +159,7 @@ jobs:
|
|||||||
else
|
else
|
||||||
echo "Version mismatch (Running: $RUNNING_VERSION -> Deployed: $NEW_VERSION) or app not running. Reloading PM2..."
|
echo "Version mismatch (Running: $RUNNING_VERSION -> Deployed: $NEW_VERSION) or app not running. Reloading PM2..."
|
||||||
fi
|
fi
|
||||||
pm2 startOrReload ecosystem.config.cjs --env production && pm2 save
|
pm2 startOrReload ecosystem.config.cjs --env production --update-env && pm2 save
|
||||||
echo "Production backend server reloaded successfully."
|
echo "Production backend server reloaded successfully."
|
||||||
else
|
else
|
||||||
echo "Version $NEW_VERSION is already running. Skipping PM2 reload."
|
echo "Version $NEW_VERSION is already running. Skipping PM2 reload."
|
||||||
@@ -162,7 +168,12 @@ jobs:
|
|||||||
echo "Updating schema hash in production database..."
|
echo "Updating schema hash in production database..."
|
||||||
CURRENT_HASH=$(cat sql/master_schema_rollup.sql | dos2unix | sha256sum | awk '{ print $1 }')
|
CURRENT_HASH=$(cat sql/master_schema_rollup.sql | dos2unix | sha256sum | awk '{ print $1 }')
|
||||||
PGPASSWORD="$DB_PASSWORD" psql -v ON_ERROR_STOP=1 -h "$DB_HOST" -p 5432 -U "$DB_USER" -d "$DB_NAME" -c \
|
PGPASSWORD="$DB_PASSWORD" psql -v ON_ERROR_STOP=1 -h "$DB_HOST" -p 5432 -U "$DB_USER" -d "$DB_NAME" -c \
|
||||||
"INSERT INTO public.schema_info (environment, schema_hash, deployed_at) VALUES ('production', '$CURRENT_HASH', NOW())
|
"CREATE TABLE IF NOT EXISTS public.schema_info (
|
||||||
|
environment VARCHAR(50) PRIMARY KEY,
|
||||||
|
schema_hash VARCHAR(64) NOT NULL,
|
||||||
|
deployed_at TIMESTAMP DEFAULT NOW()
|
||||||
|
);
|
||||||
|
INSERT INTO public.schema_info (environment, schema_hash, deployed_at) VALUES ('production', '$CURRENT_HASH', NOW())
|
||||||
ON CONFLICT (environment) DO UPDATE SET schema_hash = EXCLUDED.schema_hash, deployed_at = NOW();"
|
ON CONFLICT (environment) DO UPDATE SET schema_hash = EXCLUDED.schema_hash, deployed_at = NOW();"
|
||||||
|
|
||||||
UPDATED_HASH=$(PGPASSWORD="$DB_PASSWORD" psql -v ON_ERROR_STOP=1 -h "$DB_HOST" -p 5432 -U "$DB_USER" -d "$DB_NAME" -c "SELECT schema_hash FROM public.schema_info WHERE environment = 'production';" -t -A)
|
UPDATED_HASH=$(PGPASSWORD="$DB_PASSWORD" psql -v ON_ERROR_STOP=1 -h "$DB_HOST" -p 5432 -U "$DB_USER" -d "$DB_NAME" -c "SELECT schema_hash FROM public.schema_info WHERE environment = 'production';" -t -A)
|
||||||
@@ -175,7 +186,17 @@ jobs:
|
|||||||
- name: Show PM2 Environment for Production
|
- name: Show PM2 Environment for Production
|
||||||
run: |
|
run: |
|
||||||
echo "--- Displaying recent PM2 logs for flyer-crawler-api ---"
|
echo "--- Displaying recent PM2 logs for flyer-crawler-api ---"
|
||||||
sleep 5
|
sleep 5 # Wait a few seconds for the app to start and log its output.
|
||||||
pm2 describe flyer-crawler-api || echo "Could not find production pm2 process."
|
|
||||||
pm2 logs flyer-crawler-api --lines 20 --nostream || echo "Could not find production pm2 process."
|
# Resolve the PM2 ID dynamically to ensure we target the correct process
|
||||||
pm2 env flyer-crawler-api || echo "Could not find production pm2 process."
|
PM2_ID=$(pm2 jlist | node -e "try { const list = JSON.parse(require('fs').readFileSync(0, 'utf-8')); const app = list.find(p => p.name === 'flyer-crawler-api'); console.log(app ? app.pm2_env.pm_id : ''); } catch(e) { console.log(''); }")
|
||||||
|
|
||||||
|
if [ -n "$PM2_ID" ]; then
|
||||||
|
echo "Found process ID: $PM2_ID"
|
||||||
|
pm2 describe "$PM2_ID" || echo "Failed to describe process $PM2_ID"
|
||||||
|
pm2 logs "$PM2_ID" --lines 20 --nostream || echo "Failed to get logs for $PM2_ID"
|
||||||
|
pm2 env "$PM2_ID" || echo "Failed to get env for $PM2_ID"
|
||||||
|
else
|
||||||
|
echo "Could not find process 'flyer-crawler-api' in pm2 list."
|
||||||
|
pm2 list # Fallback to listing everything to help debug
|
||||||
|
fi
|
||||||
|
|||||||
@@ -90,10 +90,29 @@ jobs:
|
|||||||
# integration test suite can launch its own, fresh server instance.
|
# integration test suite can launch its own, fresh server instance.
|
||||||
# '|| true' ensures the workflow doesn't fail if the process isn't running.
|
# '|| true' ensures the workflow doesn't fail if the process isn't running.
|
||||||
run: |
|
run: |
|
||||||
pm2 stop flyer-crawler-api-test || true
|
echo "--- Stopping and deleting all test processes ---"
|
||||||
pm2 stop flyer-crawler-worker-test || true
|
# Use a script to parse pm2's JSON output and delete any process whose name ends with '-test'.
|
||||||
pm2 delete flyer-crawler-api-test || true
|
# This is safer than 'pm2 delete all' and more robust than naming each process individually.
|
||||||
pm2 delete flyer-crawler-worker-test || true
|
# It prevents the accumulation of duplicate processes from previous test runs.
|
||||||
|
node -e "const exec = require('child_process').execSync; try { const list = JSON.parse(exec('pm2 jlist').toString()); list.forEach(p => { if (p.name && p.name.endsWith('-test')) { console.log('Deleting test process: ' + p.name + ' (' + p.pm2_env.pm_id + ')'); try { exec('pm2 delete ' + p.pm2_env.pm_id); } catch(e) { console.error('Failed to delete ' + p.pm2_env.pm_id, e.message); } } }); console.log('✅ Test process cleanup complete.'); } catch (e) { if (e.stdout.toString().includes('No process found')) { console.log('No PM2 processes running, cleanup not needed.'); } else { console.error('Error cleaning up test processes:', e.message); } }" || true
|
||||||
|
|
||||||
|
- name: Flush Redis Test Database Before Tests
|
||||||
|
# CRITICAL: Clear Redis database 1 (test database) to remove stale BullMQ jobs.
|
||||||
|
# This prevents old jobs with outdated error messages from polluting test results.
|
||||||
|
# NOTE: We use database 1 for tests to isolate from production (database 0).
|
||||||
|
env:
|
||||||
|
REDIS_PASSWORD: ${{ secrets.REDIS_PASSWORD_TEST }}
|
||||||
|
run: |
|
||||||
|
echo "--- Flushing Redis database 1 (test database) to remove stale jobs ---"
|
||||||
|
if [ -z "$REDIS_PASSWORD" ]; then
|
||||||
|
echo "⚠️ REDIS_PASSWORD_TEST not set, attempting flush without password..."
|
||||||
|
redis-cli -n 1 FLUSHDB || echo "Redis flush failed (no password)"
|
||||||
|
else
|
||||||
|
redis-cli -a "$REDIS_PASSWORD" -n 1 FLUSHDB 2>/dev/null && echo "✅ Redis database 1 (test) flushed successfully." || echo "⚠️ Redis flush failed"
|
||||||
|
fi
|
||||||
|
# Verify the flush worked by checking key count on database 1
|
||||||
|
KEY_COUNT=$(redis-cli -a "$REDIS_PASSWORD" -n 1 DBSIZE 2>/dev/null | grep -oE '[0-9]+' || echo "unknown")
|
||||||
|
echo "Redis database 1 key count after flush: $KEY_COUNT"
|
||||||
|
|
||||||
- name: Run All Tests and Generate Merged Coverage Report
|
- name: Run All Tests and Generate Merged Coverage Report
|
||||||
# This single step runs both unit and integration tests, then merges their
|
# This single step runs both unit and integration tests, then merges their
|
||||||
@@ -108,14 +127,23 @@ jobs:
|
|||||||
DB_NAME: 'flyer-crawler-test' # Explicitly set for tests
|
DB_NAME: 'flyer-crawler-test' # Explicitly set for tests
|
||||||
|
|
||||||
# --- Redis credentials for the test suite ---
|
# --- Redis credentials for the test suite ---
|
||||||
REDIS_URL: 'redis://localhost:6379'
|
# CRITICAL: Use Redis database 1 to isolate tests from production (which uses db 0).
|
||||||
|
# This prevents the production worker from picking up test jobs.
|
||||||
|
REDIS_URL: 'redis://localhost:6379/1'
|
||||||
REDIS_PASSWORD: ${{ secrets.REDIS_PASSWORD_TEST }}
|
REDIS_PASSWORD: ${{ secrets.REDIS_PASSWORD_TEST }}
|
||||||
|
|
||||||
# --- Integration test specific variables ---
|
# --- Integration test specific variables ---
|
||||||
FRONTEND_URL: 'http://localhost:3000'
|
FRONTEND_URL: 'https://example.com'
|
||||||
VITE_API_BASE_URL: 'http://localhost:3001/api'
|
VITE_API_BASE_URL: 'http://localhost:3001/api'
|
||||||
GEMINI_API_KEY: ${{ secrets.VITE_GOOGLE_GENAI_API_KEY }}
|
GEMINI_API_KEY: ${{ secrets.VITE_GOOGLE_GENAI_API_KEY }}
|
||||||
|
|
||||||
|
# --- Storage path for flyer images ---
|
||||||
|
# CRITICAL: Use an absolute path in the test runner's working directory for file storage.
|
||||||
|
# This ensures tests can read processed files to verify their contents (e.g., EXIF stripping).
|
||||||
|
# Without this, multer and flyerProcessingService default to /var/www/.../flyer-images.
|
||||||
|
# NOTE: We use ${{ github.workspace }} which resolves to the checkout directory.
|
||||||
|
STORAGE_PATH: '${{ github.workspace }}/flyer-images'
|
||||||
|
|
||||||
# --- JWT Secret for Passport authentication in tests ---
|
# --- JWT Secret for Passport authentication in tests ---
|
||||||
JWT_SECRET: ${{ secrets.JWT_SECRET }}
|
JWT_SECRET: ${{ secrets.JWT_SECRET }}
|
||||||
|
|
||||||
@@ -126,7 +154,7 @@ jobs:
|
|||||||
|
|
||||||
# --- Increase Node.js memory limit to prevent heap out of memory errors ---
|
# --- Increase Node.js memory limit to prevent heap out of memory errors ---
|
||||||
# This is crucial for memory-intensive tasks like running tests and coverage.
|
# This is crucial for memory-intensive tasks like running tests and coverage.
|
||||||
NODE_OPTIONS: '--max-old-space-size=8192'
|
NODE_OPTIONS: '--max-old-space-size=8192 --trace-warnings --unhandled-rejections=strict'
|
||||||
|
|
||||||
run: |
|
run: |
|
||||||
# Fail-fast check to ensure secrets are configured in Gitea for testing.
|
# Fail-fast check to ensure secrets are configured in Gitea for testing.
|
||||||
@@ -142,15 +170,48 @@ jobs:
|
|||||||
# The `|| true` ensures the workflow continues even if tests fail, allowing coverage to run.
|
# The `|| true` ensures the workflow continues even if tests fail, allowing coverage to run.
|
||||||
echo "--- Running Unit Tests ---"
|
echo "--- Running Unit Tests ---"
|
||||||
# npm run test:unit -- --coverage --reporter=verbose --includeTaskLocation --testTimeout=10000 --silent=passed-only || true
|
# npm run test:unit -- --coverage --reporter=verbose --includeTaskLocation --testTimeout=10000 --silent=passed-only || true
|
||||||
npm run test:unit -- --coverage --coverage.exclude='**/*.test.ts' --coverage.exclude='**/tests/**' --coverage.exclude='**/mocks/**' --reporter=verbose --includeTaskLocation --testTimeout=10000 --silent=passed-only --no-file-parallelism || true
|
npm run test:unit -- --coverage \
|
||||||
|
--coverage.exclude='**/*.test.ts' \
|
||||||
|
--coverage.exclude='**/tests/**' \
|
||||||
|
--coverage.exclude='**/mocks/**' \
|
||||||
|
--coverage.exclude='src/components/icons/**' \
|
||||||
|
--coverage.exclude='src/db/**' \
|
||||||
|
--coverage.exclude='src/lib/**' \
|
||||||
|
--coverage.exclude='src/types/**' \
|
||||||
|
--coverage.exclude='**/index.tsx' \
|
||||||
|
--coverage.exclude='**/vite-env.d.ts' \
|
||||||
|
--coverage.exclude='**/vitest.setup.ts' \
|
||||||
|
--reporter=verbose --includeTaskLocation --testTimeout=10000 --silent=passed-only --no-file-parallelism || true
|
||||||
|
|
||||||
echo "--- Running Integration Tests ---"
|
echo "--- Running Integration Tests ---"
|
||||||
npm run test:integration -- --coverage --coverage.exclude='**/*.test.ts' --coverage.exclude='**/tests/**' --coverage.exclude='**/mocks/**' --reporter=verbose --includeTaskLocation --testTimeout=10000 --silent=passed-only || true
|
npm run test:integration -- --coverage \
|
||||||
|
--coverage.exclude='**/*.test.ts' \
|
||||||
|
--coverage.exclude='**/tests/**' \
|
||||||
|
--coverage.exclude='**/mocks/**' \
|
||||||
|
--coverage.exclude='src/components/icons/**' \
|
||||||
|
--coverage.exclude='src/db/**' \
|
||||||
|
--coverage.exclude='src/lib/**' \
|
||||||
|
--coverage.exclude='src/types/**' \
|
||||||
|
--coverage.exclude='**/index.tsx' \
|
||||||
|
--coverage.exclude='**/vite-env.d.ts' \
|
||||||
|
--coverage.exclude='**/vitest.setup.ts' \
|
||||||
|
--reporter=verbose --includeTaskLocation --testTimeout=10000 --silent=passed-only || true
|
||||||
|
|
||||||
echo "--- Running E2E Tests ---"
|
echo "--- Running E2E Tests ---"
|
||||||
# Run E2E tests using the dedicated E2E config which inherits from integration config.
|
# Run E2E tests using the dedicated E2E config which inherits from integration config.
|
||||||
# We still pass --coverage to enable it, but directory and timeout are now in the config.
|
# We still pass --coverage to enable it, but directory and timeout are now in the config.
|
||||||
npx vitest run --config vitest.config.e2e.ts --coverage --coverage.exclude='**/*.test.ts' --coverage.exclude='**/tests/**' --coverage.exclude='**/mocks/**' --reporter=verbose --no-file-parallelism || true
|
npx vitest run --config vitest.config.e2e.ts --coverage \
|
||||||
|
--coverage.exclude='**/*.test.ts' \
|
||||||
|
--coverage.exclude='**/tests/**' \
|
||||||
|
--coverage.exclude='**/mocks/**' \
|
||||||
|
--coverage.exclude='src/components/icons/**' \
|
||||||
|
--coverage.exclude='src/db/**' \
|
||||||
|
--coverage.exclude='src/lib/**' \
|
||||||
|
--coverage.exclude='src/types/**' \
|
||||||
|
--coverage.exclude='**/index.tsx' \
|
||||||
|
--coverage.exclude='**/vite-env.d.ts' \
|
||||||
|
--coverage.exclude='**/vitest.setup.ts' \
|
||||||
|
--reporter=verbose --no-file-parallelism || true
|
||||||
|
|
||||||
# Re-enable secret masking for subsequent steps.
|
# Re-enable secret masking for subsequent steps.
|
||||||
echo "::secret-masking::"
|
echo "::secret-masking::"
|
||||||
@@ -221,7 +282,10 @@ jobs:
|
|||||||
--temp-dir "$NYC_SOURCE_DIR" \
|
--temp-dir "$NYC_SOURCE_DIR" \
|
||||||
--exclude "**/*.test.ts" \
|
--exclude "**/*.test.ts" \
|
||||||
--exclude "**/tests/**" \
|
--exclude "**/tests/**" \
|
||||||
--exclude "**/mocks/**"
|
--exclude "**/mocks/**" \
|
||||||
|
--exclude "**/index.tsx" \
|
||||||
|
--exclude "**/vite-env.d.ts" \
|
||||||
|
--exclude "**/vitest.setup.ts"
|
||||||
|
|
||||||
# Re-enable secret masking for subsequent steps.
|
# Re-enable secret masking for subsequent steps.
|
||||||
echo "::secret-masking::"
|
echo "::secret-masking::"
|
||||||
@@ -234,16 +298,6 @@ jobs:
|
|||||||
if: always() # This step runs even if the previous test or coverage steps failed.
|
if: always() # This step runs even if the previous test or coverage steps failed.
|
||||||
run: echo "Skipping test artifact cleanup on runner; this is handled on the server."
|
run: echo "Skipping test artifact cleanup on runner; this is handled on the server."
|
||||||
|
|
||||||
- name: Deploy Coverage Report to Public URL
|
|
||||||
if: always()
|
|
||||||
run: |
|
|
||||||
TARGET_DIR="/var/www/flyer-crawler-test.projectium.com/coverage"
|
|
||||||
echo "Deploying HTML coverage report to $TARGET_DIR..."
|
|
||||||
mkdir -p "$TARGET_DIR"
|
|
||||||
rm -rf "$TARGET_DIR"/*
|
|
||||||
cp -r .coverage/* "$TARGET_DIR/"
|
|
||||||
echo "✅ Coverage report deployed to https://flyer-crawler-test.projectium.com/coverage"
|
|
||||||
|
|
||||||
- name: Archive Code Coverage Report
|
- name: Archive Code Coverage Report
|
||||||
# This action saves the generated HTML coverage report as a downloadable artifact.
|
# This action saves the generated HTML coverage report as a downloadable artifact.
|
||||||
uses: actions/upload-artifact@v3
|
uses: actions/upload-artifact@v3
|
||||||
@@ -282,6 +336,9 @@ jobs:
|
|||||||
if [ -z "$DEPLOYED_HASH" ]; then
|
if [ -z "$DEPLOYED_HASH" ]; then
|
||||||
echo "WARNING: No schema hash found in the test database."
|
echo "WARNING: No schema hash found in the test database."
|
||||||
echo "This is expected for a first-time deployment. The hash will be set after a successful deployment."
|
echo "This is expected for a first-time deployment. The hash will be set after a successful deployment."
|
||||||
|
echo "--- Debug: Dumping schema_info table ---"
|
||||||
|
PGPASSWORD="$DB_PASSWORD" psql -v ON_ERROR_STOP=0 -h "$DB_HOST" -p 5432 -U "$DB_USER" -d "$DB_NAME" -P pager=off -c "SELECT * FROM public.schema_info;" || true
|
||||||
|
echo "----------------------------------------"
|
||||||
# We allow the deployment to continue, but a manual schema update is required.
|
# We allow the deployment to continue, but a manual schema update is required.
|
||||||
# You could choose to fail here by adding `exit 1`.
|
# You could choose to fail here by adding `exit 1`.
|
||||||
elif [ "$CURRENT_HASH" != "$DEPLOYED_HASH" ]; then
|
elif [ "$CURRENT_HASH" != "$DEPLOYED_HASH" ]; then
|
||||||
@@ -305,8 +362,10 @@ jobs:
|
|||||||
fi
|
fi
|
||||||
|
|
||||||
GITEA_SERVER_URL="https://gitea.projectium.com" # Your Gitea instance URL
|
GITEA_SERVER_URL="https://gitea.projectium.com" # Your Gitea instance URL
|
||||||
COMMIT_MESSAGE=$(git log -1 --pretty=%s)
|
# Sanitize commit message to prevent shell injection or build breaks (removes quotes, backticks, backslashes, $)
|
||||||
VITE_APP_VERSION="$(date +'%Y%m%d-%H%M'):$(git rev-parse --short HEAD)" \
|
COMMIT_MESSAGE=$(git log -1 --grep="\[skip ci\]" --invert-grep --pretty=%s | tr -d '"`\\$')
|
||||||
|
PACKAGE_VERSION=$(node -p "require('./package.json').version")
|
||||||
|
VITE_APP_VERSION="$(date +'%Y%m%d-%H%M'):$(git rev-parse --short HEAD):$PACKAGE_VERSION" \
|
||||||
VITE_APP_COMMIT_URL="$GITEA_SERVER_URL/${{ gitea.repository }}/commit/${{ gitea.sha }}" \
|
VITE_APP_COMMIT_URL="$GITEA_SERVER_URL/${{ gitea.repository }}/commit/${{ gitea.sha }}" \
|
||||||
VITE_APP_COMMIT_MESSAGE="$COMMIT_MESSAGE" \
|
VITE_APP_COMMIT_MESSAGE="$COMMIT_MESSAGE" \
|
||||||
VITE_API_BASE_URL="https://flyer-crawler-test.projectium.com/api" VITE_API_KEY=${{ secrets.VITE_GOOGLE_GENAI_API_KEY_TEST }} npm run build
|
VITE_API_BASE_URL="https://flyer-crawler-test.projectium.com/api" VITE_API_KEY=${{ secrets.VITE_GOOGLE_GENAI_API_KEY_TEST }} npm run build
|
||||||
@@ -329,6 +388,17 @@ jobs:
|
|||||||
rsync -avz dist/ "$APP_PATH"
|
rsync -avz dist/ "$APP_PATH"
|
||||||
echo "Application deployment complete."
|
echo "Application deployment complete."
|
||||||
|
|
||||||
|
- name: Deploy Coverage Report to Public URL
|
||||||
|
if: always()
|
||||||
|
run: |
|
||||||
|
TARGET_DIR="/var/www/flyer-crawler-test.projectium.com/coverage"
|
||||||
|
echo "Deploying HTML coverage report to $TARGET_DIR..."
|
||||||
|
mkdir -p "$TARGET_DIR"
|
||||||
|
rm -rf "$TARGET_DIR"/*
|
||||||
|
# The merged nyc report is generated in the .coverage directory. We copy its contents.
|
||||||
|
cp -r .coverage/* "$TARGET_DIR/"
|
||||||
|
echo "✅ Coverage report deployed to https://flyer-crawler-test.projectium.com/coverage"
|
||||||
|
|
||||||
- name: Install Backend Dependencies and Restart Test Server
|
- name: Install Backend Dependencies and Restart Test Server
|
||||||
env:
|
env:
|
||||||
# --- Test Secrets Injection ---
|
# --- Test Secrets Injection ---
|
||||||
@@ -341,13 +411,13 @@ jobs:
|
|||||||
DB_PASSWORD: ${{ secrets.DB_PASSWORD }}
|
DB_PASSWORD: ${{ secrets.DB_PASSWORD }}
|
||||||
DB_NAME: ${{ secrets.DB_DATABASE_TEST }}
|
DB_NAME: ${{ secrets.DB_DATABASE_TEST }}
|
||||||
|
|
||||||
# Redis Credentials
|
# Redis Credentials (use database 1 to isolate from production)
|
||||||
REDIS_URL: 'redis://localhost:6379'
|
REDIS_URL: 'redis://localhost:6379/1'
|
||||||
REDIS_PASSWORD: ${{ secrets.REDIS_PASSWORD_TEST }}
|
REDIS_PASSWORD: ${{ secrets.REDIS_PASSWORD_TEST }}
|
||||||
|
|
||||||
# Application Secrets
|
# Application Secrets
|
||||||
FRONTEND_URL: 'https://flyer-crawler-test.projectium.com'
|
FRONTEND_URL: 'https://example.com'
|
||||||
JWT_SECRET: ${{ secrets.JWT_SECRET_TEST }}
|
JWT_SECRET: ${{ secrets.JWT_SECRET }}
|
||||||
GEMINI_API_KEY: ${{ secrets.VITE_GOOGLE_GENAI_API_KEY_TEST }}
|
GEMINI_API_KEY: ${{ secrets.VITE_GOOGLE_GENAI_API_KEY_TEST }}
|
||||||
GOOGLE_MAPS_API_KEY: ${{ secrets.GOOGLE_MAPS_API_KEY }}
|
GOOGLE_MAPS_API_KEY: ${{ secrets.GOOGLE_MAPS_API_KEY }}
|
||||||
|
|
||||||
@@ -361,18 +431,30 @@ jobs:
|
|||||||
|
|
||||||
run: |
|
run: |
|
||||||
# Fail-fast check to ensure secrets are configured in Gitea.
|
# Fail-fast check to ensure secrets are configured in Gitea.
|
||||||
if [ -z "$DB_HOST" ] || [ -z "$DB_USER" ] || [ -z "$DB_PASSWORD" ] || [ -z "$DB_NAME" ]; then
|
MISSING_SECRETS=""
|
||||||
echo "ERROR: One or more test database secrets (DB_HOST, DB_USER, DB_PASSWORD, DB_DATABASE_TEST) are not set in Gitea repository settings."
|
if [ -z "$DB_HOST" ]; then MISSING_SECRETS="${MISSING_SECRETS} DB_HOST"; fi
|
||||||
|
if [ -z "$DB_USER" ]; then MISSING_SECRETS="${MISSING_SECRETS} DB_USER"; fi
|
||||||
|
if [ -z "$DB_PASSWORD" ]; then MISSING_SECRETS="${MISSING_SECRETS} DB_PASSWORD"; fi
|
||||||
|
if [ -z "$DB_NAME" ]; then MISSING_SECRETS="${MISSING_SECRETS} DB_NAME"; fi
|
||||||
|
if [ -z "$JWT_SECRET" ]; then MISSING_SECRETS="${MISSING_SECRETS} JWT_SECRET"; fi
|
||||||
|
|
||||||
|
if [ ! -z "$MISSING_SECRETS" ]; then
|
||||||
|
echo "ERROR: The following required secrets are missing in Gitea:${MISSING_SECRETS}"
|
||||||
exit 1
|
exit 1
|
||||||
fi
|
fi
|
||||||
|
|
||||||
echo "Installing production dependencies and restarting test server..."
|
echo "Installing production dependencies and restarting test server..."
|
||||||
cd /var/www/flyer-crawler-test.projectium.com
|
cd /var/www/flyer-crawler-test.projectium.com
|
||||||
npm install --omit=dev
|
npm install --omit=dev
|
||||||
|
|
||||||
|
# --- Cleanup Errored Processes ---
|
||||||
|
echo "Cleaning up errored or stopped PM2 processes..."
|
||||||
|
node -e "const exec = require('child_process').execSync; try { const list = JSON.parse(exec('pm2 jlist').toString()); list.forEach(p => { if (p.pm2_env.status === 'errored' || p.pm2_env.status === 'stopped') { console.log('Deleting ' + p.pm2_env.status + ' process: ' + p.name + ' (' + p.pm2_env.pm_id + ')'); try { exec('pm2 delete ' + p.pm2_env.pm_id); } catch(e) { console.error('Failed to delete ' + p.pm2_env.pm_id); } } }); } catch (e) { console.error('Error cleaning up processes:', e); }"
|
||||||
|
|
||||||
# Use `startOrReload` with the ecosystem file. This is the standard, idempotent way to deploy.
|
# Use `startOrReload` with the ecosystem file. This is the standard, idempotent way to deploy.
|
||||||
# It will START the process if it's not running, or RELOAD it if it is.
|
# It will START the process if it's not running, or RELOAD it if it is.
|
||||||
# We also add `&& pm2 save` to persist the process list across server reboots.
|
# We also add `&& pm2 save` to persist the process list across server reboots.
|
||||||
pm2 startOrReload ecosystem.config.cjs --env test && pm2 save
|
pm2 startOrReload ecosystem.config.cjs --env test --update-env && pm2 save
|
||||||
echo "Test backend server reloaded successfully."
|
echo "Test backend server reloaded successfully."
|
||||||
|
|
||||||
# After a successful deployment, update the schema hash in the database.
|
# After a successful deployment, update the schema hash in the database.
|
||||||
@@ -380,7 +462,12 @@ jobs:
|
|||||||
echo "Updating schema hash in test database..."
|
echo "Updating schema hash in test database..."
|
||||||
CURRENT_HASH=$(cat sql/master_schema_rollup.sql | dos2unix | sha256sum | awk '{ print $1 }')
|
CURRENT_HASH=$(cat sql/master_schema_rollup.sql | dos2unix | sha256sum | awk '{ print $1 }')
|
||||||
PGPASSWORD="$DB_PASSWORD" psql -v ON_ERROR_STOP=1 -h "$DB_HOST" -p 5432 -U "$DB_USER" -d "$DB_NAME" -c \
|
PGPASSWORD="$DB_PASSWORD" psql -v ON_ERROR_STOP=1 -h "$DB_HOST" -p 5432 -U "$DB_USER" -d "$DB_NAME" -c \
|
||||||
"INSERT INTO public.schema_info (environment, schema_hash, deployed_at) VALUES ('test', '$CURRENT_HASH', NOW())
|
"CREATE TABLE IF NOT EXISTS public.schema_info (
|
||||||
|
environment VARCHAR(50) PRIMARY KEY,
|
||||||
|
schema_hash VARCHAR(64) NOT NULL,
|
||||||
|
deployed_at TIMESTAMP DEFAULT NOW()
|
||||||
|
);
|
||||||
|
INSERT INTO public.schema_info (environment, schema_hash, deployed_at) VALUES ('test', '$CURRENT_HASH', NOW())
|
||||||
ON CONFLICT (environment) DO UPDATE SET schema_hash = EXCLUDED.schema_hash, deployed_at = NOW();"
|
ON CONFLICT (environment) DO UPDATE SET schema_hash = EXCLUDED.schema_hash, deployed_at = NOW();"
|
||||||
|
|
||||||
# Verify the hash was updated
|
# Verify the hash was updated
|
||||||
@@ -402,7 +489,17 @@ jobs:
|
|||||||
run: |
|
run: |
|
||||||
echo "--- Displaying recent PM2 logs for flyer-crawler-api-test ---"
|
echo "--- Displaying recent PM2 logs for flyer-crawler-api-test ---"
|
||||||
# After a reload, the server restarts. We'll show the last 20 lines of the log to see the startup messages.
|
# After a reload, the server restarts. We'll show the last 20 lines of the log to see the startup messages.
|
||||||
sleep 5 # Wait a few seconds for the app to start and log its output.
|
sleep 5
|
||||||
pm2 describe flyer-crawler-api-test || echo "Could not find test pm2 process."
|
|
||||||
pm2 logs flyer-crawler-api-test --lines 20 --nostream || echo "Could not find test pm2 process."
|
# Resolve the PM2 ID dynamically to ensure we target the correct process
|
||||||
pm2 env flyer-crawler-api-test || echo "Could not find test pm2 process."
|
PM2_ID=$(pm2 jlist | node -e "try { const list = JSON.parse(require('fs').readFileSync(0, 'utf-8')); const app = list.find(p => p.name === 'flyer-crawler-api-test'); console.log(app ? app.pm2_env.pm_id : ''); } catch(e) { console.log(''); }")
|
||||||
|
|
||||||
|
if [ -n "$PM2_ID" ]; then
|
||||||
|
echo "Found process ID: $PM2_ID"
|
||||||
|
pm2 describe "$PM2_ID" || echo "Failed to describe process $PM2_ID"
|
||||||
|
pm2 logs "$PM2_ID" --lines 20 --nostream || echo "Failed to get logs for $PM2_ID"
|
||||||
|
pm2 env "$PM2_ID" || echo "Failed to get env for $PM2_ID"
|
||||||
|
else
|
||||||
|
echo "Could not find process 'flyer-crawler-api-test' in pm2 list."
|
||||||
|
pm2 list # Fallback to listing everything to help debug
|
||||||
|
fi
|
||||||
|
|||||||
@@ -92,8 +92,9 @@ jobs:
|
|||||||
exit 1
|
exit 1
|
||||||
fi
|
fi
|
||||||
GITEA_SERVER_URL="https://gitea.projectium.com"
|
GITEA_SERVER_URL="https://gitea.projectium.com"
|
||||||
COMMIT_MESSAGE=$(git log -1 --pretty=%s)
|
COMMIT_MESSAGE=$(git log -1 --grep="\[skip ci\]" --invert-grep --pretty=%s)
|
||||||
VITE_APP_VERSION="$(date +'%Y%m%d-%H%M'):$(git rev-parse --short HEAD)" \
|
PACKAGE_VERSION=$(node -p "require('./package.json').version")
|
||||||
|
VITE_APP_VERSION="$(date +'%Y%m%d-%H%M'):$(git rev-parse --short HEAD):$PACKAGE_VERSION" \
|
||||||
VITE_APP_COMMIT_URL="$GITEA_SERVER_URL/${{ gitea.repository }}/commit/${{ gitea.sha }}" \
|
VITE_APP_COMMIT_URL="$GITEA_SERVER_URL/${{ gitea.repository }}/commit/${{ gitea.sha }}" \
|
||||||
VITE_APP_COMMIT_MESSAGE="$COMMIT_MESSAGE" \
|
VITE_APP_COMMIT_MESSAGE="$COMMIT_MESSAGE" \
|
||||||
VITE_API_BASE_URL=/api VITE_API_KEY=${{ secrets.VITE_GOOGLE_GENAI_API_KEY }} npm run build
|
VITE_API_BASE_URL=/api VITE_API_KEY=${{ secrets.VITE_GOOGLE_GENAI_API_KEY }} npm run build
|
||||||
@@ -115,7 +116,8 @@ jobs:
|
|||||||
DB_USER: ${{ secrets.DB_USER }}
|
DB_USER: ${{ secrets.DB_USER }}
|
||||||
DB_PASSWORD: ${{ secrets.DB_PASSWORD }}
|
DB_PASSWORD: ${{ secrets.DB_PASSWORD }}
|
||||||
DB_NAME: ${{ secrets.DB_DATABASE_PROD }}
|
DB_NAME: ${{ secrets.DB_DATABASE_PROD }}
|
||||||
REDIS_URL: 'redis://localhost:6379'
|
# Explicitly use database 0 for production (test uses database 1)
|
||||||
|
REDIS_URL: 'redis://localhost:6379/0'
|
||||||
REDIS_PASSWORD: ${{ secrets.REDIS_PASSWORD_PROD }}
|
REDIS_PASSWORD: ${{ secrets.REDIS_PASSWORD_PROD }}
|
||||||
FRONTEND_URL: 'https://flyer-crawler.projectium.com'
|
FRONTEND_URL: 'https://flyer-crawler.projectium.com'
|
||||||
JWT_SECRET: ${{ secrets.JWT_SECRET }}
|
JWT_SECRET: ${{ secrets.JWT_SECRET }}
|
||||||
@@ -136,6 +138,10 @@ jobs:
|
|||||||
cd /var/www/flyer-crawler.projectium.com
|
cd /var/www/flyer-crawler.projectium.com
|
||||||
npm install --omit=dev
|
npm install --omit=dev
|
||||||
|
|
||||||
|
# --- Cleanup Errored Processes ---
|
||||||
|
echo "Cleaning up errored or stopped PM2 processes..."
|
||||||
|
node -e "const exec = require('child_process').execSync; try { const list = JSON.parse(exec('pm2 jlist').toString()); list.forEach(p => { if (p.pm2_env.status === 'errored' || p.pm2_env.status === 'stopped') { console.log('Deleting ' + p.pm2_env.status + ' process: ' + p.name + ' (' + p.pm2_env.pm_id + ')'); try { exec('pm2 delete ' + p.pm2_env.pm_id); } catch(e) { console.error('Failed to delete ' + p.pm2_env.pm_id); } } }); } catch (e) { console.error('Error cleaning up processes:', e); }"
|
||||||
|
|
||||||
# --- Version Check Logic ---
|
# --- Version Check Logic ---
|
||||||
# Get the version from the newly deployed package.json
|
# Get the version from the newly deployed package.json
|
||||||
NEW_VERSION=$(node -p "require('./package.json').version")
|
NEW_VERSION=$(node -p "require('./package.json').version")
|
||||||
@@ -152,7 +158,7 @@ jobs:
|
|||||||
else
|
else
|
||||||
echo "Version mismatch (Running: $RUNNING_VERSION -> Deployed: $NEW_VERSION) or app not running. Reloading PM2..."
|
echo "Version mismatch (Running: $RUNNING_VERSION -> Deployed: $NEW_VERSION) or app not running. Reloading PM2..."
|
||||||
fi
|
fi
|
||||||
pm2 startOrReload ecosystem.config.cjs --env production && pm2 save
|
pm2 startOrReload ecosystem.config.cjs --env production --update-env && pm2 save
|
||||||
echo "Production backend server reloaded successfully."
|
echo "Production backend server reloaded successfully."
|
||||||
else
|
else
|
||||||
echo "Version $NEW_VERSION is already running. Skipping PM2 reload."
|
echo "Version $NEW_VERSION is already running. Skipping PM2 reload."
|
||||||
|
|||||||
167
.gitea/workflows/manual-redis-flush-prod.yml
Normal file
167
.gitea/workflows/manual-redis-flush-prod.yml
Normal file
@@ -0,0 +1,167 @@
|
|||||||
|
# .gitea/workflows/manual-redis-flush-prod.yml
|
||||||
|
#
|
||||||
|
# DANGER: This workflow is DESTRUCTIVE and intended for manual execution only.
|
||||||
|
# It will completely FLUSH the PRODUCTION Redis database (db 0).
|
||||||
|
# This will clear all BullMQ queues, sessions, caches, and any other Redis data.
|
||||||
|
#
|
||||||
|
name: Manual - Flush Production Redis
|
||||||
|
|
||||||
|
on:
|
||||||
|
workflow_dispatch:
|
||||||
|
inputs:
|
||||||
|
confirmation:
|
||||||
|
description: 'DANGER: This will FLUSH production Redis. Type "flush-production-redis" to confirm.'
|
||||||
|
required: true
|
||||||
|
default: 'do-not-run'
|
||||||
|
flush_type:
|
||||||
|
description: 'What to flush?'
|
||||||
|
required: true
|
||||||
|
type: choice
|
||||||
|
options:
|
||||||
|
- 'queues-only'
|
||||||
|
- 'entire-database'
|
||||||
|
default: 'queues-only'
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
flush-redis:
|
||||||
|
runs-on: projectium.com # This job runs on your self-hosted Gitea runner.
|
||||||
|
|
||||||
|
env:
|
||||||
|
REDIS_PASSWORD: ${{ secrets.REDIS_PASSWORD_PROD }}
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: Checkout Code
|
||||||
|
uses: actions/checkout@v3
|
||||||
|
|
||||||
|
- name: Setup Node.js
|
||||||
|
uses: actions/setup-node@v3
|
||||||
|
with:
|
||||||
|
node-version: '20'
|
||||||
|
cache: 'npm'
|
||||||
|
cache-dependency-path: '**/package-lock.json'
|
||||||
|
|
||||||
|
- name: Install Dependencies
|
||||||
|
run: npm ci
|
||||||
|
|
||||||
|
- name: Validate Secrets
|
||||||
|
run: |
|
||||||
|
if [ -z "$REDIS_PASSWORD" ]; then
|
||||||
|
echo "ERROR: REDIS_PASSWORD_PROD secret is not set in Gitea repository settings."
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
echo "✅ Redis password secret is present."
|
||||||
|
|
||||||
|
- name: Verify Confirmation Phrase
|
||||||
|
run: |
|
||||||
|
if [ "${{ gitea.event.inputs.confirmation }}" != "flush-production-redis" ]; then
|
||||||
|
echo "ERROR: Confirmation phrase did not match. Aborting Redis flush."
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
echo "✅ Confirmation accepted. Proceeding with Redis flush."
|
||||||
|
|
||||||
|
- name: Show Current Redis State
|
||||||
|
run: |
|
||||||
|
echo "--- Current Redis Database 0 (Production) State ---"
|
||||||
|
redis-cli -a "$REDIS_PASSWORD" -n 0 INFO keyspace 2>/dev/null || echo "Could not get keyspace info"
|
||||||
|
echo ""
|
||||||
|
echo "--- Key Count ---"
|
||||||
|
KEY_COUNT=$(redis-cli -a "$REDIS_PASSWORD" -n 0 DBSIZE 2>/dev/null | grep -oE '[0-9]+' || echo "unknown")
|
||||||
|
echo "Production Redis (db 0) key count: $KEY_COUNT"
|
||||||
|
echo ""
|
||||||
|
echo "--- BullMQ Queue Keys ---"
|
||||||
|
redis-cli -a "$REDIS_PASSWORD" -n 0 KEYS "bull:*" 2>/dev/null | head -20 || echo "No BullMQ keys found"
|
||||||
|
|
||||||
|
- name: 🚨 FINAL WARNING & PAUSE 🚨
|
||||||
|
run: |
|
||||||
|
echo "*********************************************************************"
|
||||||
|
echo "WARNING: YOU ARE ABOUT TO FLUSH PRODUCTION REDIS DATA."
|
||||||
|
echo "Flush type: ${{ gitea.event.inputs.flush_type }}"
|
||||||
|
echo ""
|
||||||
|
if [ "${{ gitea.event.inputs.flush_type }}" = "entire-database" ]; then
|
||||||
|
echo "This will DELETE ALL Redis data including sessions, caches, and queues!"
|
||||||
|
else
|
||||||
|
echo "This will DELETE ALL BullMQ queue data (pending jobs, failed jobs, etc.)"
|
||||||
|
fi
|
||||||
|
echo ""
|
||||||
|
echo "This action is IRREVERSIBLE. Press Ctrl+C in the runner terminal NOW to cancel."
|
||||||
|
echo "Sleeping for 10 seconds..."
|
||||||
|
echo "*********************************************************************"
|
||||||
|
sleep 10
|
||||||
|
|
||||||
|
- name: Flush BullMQ Queues Only
|
||||||
|
if: ${{ gitea.event.inputs.flush_type == 'queues-only' }}
|
||||||
|
env:
|
||||||
|
REDIS_URL: 'redis://localhost:6379/0'
|
||||||
|
run: |
|
||||||
|
echo "--- Obliterating BullMQ queues using Node.js ---"
|
||||||
|
node -e "
|
||||||
|
const { Queue } = require('bullmq');
|
||||||
|
const IORedis = require('ioredis');
|
||||||
|
|
||||||
|
const connection = new IORedis(process.env.REDIS_URL, {
|
||||||
|
maxRetriesPerRequest: null,
|
||||||
|
password: process.env.REDIS_PASSWORD,
|
||||||
|
});
|
||||||
|
|
||||||
|
const queueNames = [
|
||||||
|
'flyer-processing',
|
||||||
|
'email-sending',
|
||||||
|
'analytics-reporting',
|
||||||
|
'weekly-analytics-reporting',
|
||||||
|
'file-cleanup',
|
||||||
|
'token-cleanup'
|
||||||
|
];
|
||||||
|
|
||||||
|
(async () => {
|
||||||
|
for (const name of queueNames) {
|
||||||
|
try {
|
||||||
|
const queue = new Queue(name, { connection });
|
||||||
|
const counts = await queue.getJobCounts();
|
||||||
|
console.log('Queue \"' + name + '\" before obliterate:', JSON.stringify(counts));
|
||||||
|
await queue.obliterate({ force: true });
|
||||||
|
console.log('✅ Obliterated queue: ' + name);
|
||||||
|
await queue.close();
|
||||||
|
} catch (err) {
|
||||||
|
console.error('⚠️ Failed to obliterate queue ' + name + ':', err.message);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
await connection.quit();
|
||||||
|
console.log('✅ All BullMQ queues obliterated.');
|
||||||
|
})();
|
||||||
|
"
|
||||||
|
|
||||||
|
- name: Flush Entire Redis Database
|
||||||
|
if: ${{ gitea.event.inputs.flush_type == 'entire-database' }}
|
||||||
|
run: |
|
||||||
|
echo "--- Flushing entire Redis database 0 (production) ---"
|
||||||
|
redis-cli -a "$REDIS_PASSWORD" -n 0 FLUSHDB 2>/dev/null && echo "✅ Redis database 0 flushed successfully." || echo "❌ Redis flush failed"
|
||||||
|
|
||||||
|
- name: Verify Flush Results
|
||||||
|
run: |
|
||||||
|
echo "--- Redis Database 0 (Production) State After Flush ---"
|
||||||
|
KEY_COUNT=$(redis-cli -a "$REDIS_PASSWORD" -n 0 DBSIZE 2>/dev/null | grep -oE '[0-9]+' || echo "unknown")
|
||||||
|
echo "Production Redis (db 0) key count after flush: $KEY_COUNT"
|
||||||
|
echo ""
|
||||||
|
echo "--- Remaining BullMQ Queue Keys ---"
|
||||||
|
BULL_KEYS=$(redis-cli -a "$REDIS_PASSWORD" -n 0 KEYS "bull:*" 2>/dev/null | wc -l || echo "0")
|
||||||
|
echo "BullMQ key count: $BULL_KEYS"
|
||||||
|
|
||||||
|
if [ "${{ gitea.event.inputs.flush_type }}" = "queues-only" ] && [ "$BULL_KEYS" -gt 0 ]; then
|
||||||
|
echo "⚠️ Warning: Some BullMQ keys may still exist. This can happen if new jobs were added during the flush."
|
||||||
|
fi
|
||||||
|
|
||||||
|
- name: Summary
|
||||||
|
run: |
|
||||||
|
echo ""
|
||||||
|
echo "=========================================="
|
||||||
|
echo "PRODUCTION REDIS FLUSH COMPLETE"
|
||||||
|
echo "=========================================="
|
||||||
|
echo "Flush type: ${{ gitea.event.inputs.flush_type }}"
|
||||||
|
echo "Timestamp: $(date -u '+%Y-%m-%d %H:%M:%S UTC')"
|
||||||
|
echo ""
|
||||||
|
echo "NOTE: If you flushed queues, any pending jobs (flyer processing,"
|
||||||
|
echo "emails, analytics, etc.) have been permanently deleted."
|
||||||
|
echo ""
|
||||||
|
echo "The production workers will automatically start processing"
|
||||||
|
echo "new jobs as they are added to the queues."
|
||||||
|
echo "=========================================="
|
||||||
3
.gitignore
vendored
3
.gitignore
vendored
@@ -12,6 +12,9 @@ dist
|
|||||||
dist-ssr
|
dist-ssr
|
||||||
*.local
|
*.local
|
||||||
|
|
||||||
|
# Test coverage
|
||||||
|
coverage
|
||||||
|
|
||||||
# Editor directories and files
|
# Editor directories and files
|
||||||
.vscode/*
|
.vscode/*
|
||||||
!.vscode/extensions.json
|
!.vscode/extensions.json
|
||||||
|
|||||||
1
.husky/pre-commit
Normal file
1
.husky/pre-commit
Normal file
@@ -0,0 +1 @@
|
|||||||
|
npx lint-staged
|
||||||
4
.lintstagedrc.json
Normal file
4
.lintstagedrc.json
Normal file
@@ -0,0 +1,4 @@
|
|||||||
|
{
|
||||||
|
"*.{js,jsx,ts,tsx}": ["eslint --fix", "prettier --write"],
|
||||||
|
"*.{json,md,css,html,yml,yaml}": ["prettier --write"]
|
||||||
|
}
|
||||||
41
.prettierignore
Normal file
41
.prettierignore
Normal file
@@ -0,0 +1,41 @@
|
|||||||
|
# Dependencies
|
||||||
|
node_modules/
|
||||||
|
|
||||||
|
# Build output
|
||||||
|
dist/
|
||||||
|
build/
|
||||||
|
.cache/
|
||||||
|
|
||||||
|
# Coverage reports
|
||||||
|
coverage/
|
||||||
|
.coverage/
|
||||||
|
|
||||||
|
# IDE and editor configs
|
||||||
|
.idea/
|
||||||
|
.vscode/
|
||||||
|
*.swp
|
||||||
|
*.swo
|
||||||
|
|
||||||
|
# Logs
|
||||||
|
*.log
|
||||||
|
logs/
|
||||||
|
|
||||||
|
# Environment files (may contain secrets)
|
||||||
|
.env*
|
||||||
|
!.env.example
|
||||||
|
|
||||||
|
# Lock files (managed by package managers)
|
||||||
|
package-lock.json
|
||||||
|
pnpm-lock.yaml
|
||||||
|
yarn.lock
|
||||||
|
|
||||||
|
# Generated files
|
||||||
|
*.min.js
|
||||||
|
*.min.css
|
||||||
|
|
||||||
|
# Git directory
|
||||||
|
.git/
|
||||||
|
.gitea/
|
||||||
|
|
||||||
|
# Test artifacts
|
||||||
|
__snapshots__/
|
||||||
@@ -1,31 +1,60 @@
|
|||||||
# Use Ubuntu 22.04 (LTS) as the base image to match production
|
# Dockerfile.dev
|
||||||
|
# ============================================================================
|
||||||
|
# DEVELOPMENT DOCKERFILE
|
||||||
|
# ============================================================================
|
||||||
|
# This Dockerfile creates a development environment that matches production
|
||||||
|
# as closely as possible while providing the tools needed for development.
|
||||||
|
#
|
||||||
|
# Base: Ubuntu 22.04 (LTS) - matches production server
|
||||||
|
# Node: v20.x (LTS) - matches production
|
||||||
|
# Includes: PostgreSQL client, Redis CLI, build tools
|
||||||
|
# ============================================================================
|
||||||
|
|
||||||
FROM ubuntu:22.04
|
FROM ubuntu:22.04
|
||||||
|
|
||||||
# Set environment variables to non-interactive to avoid prompts during installation
|
# Set environment variables to non-interactive to avoid prompts during installation
|
||||||
ENV DEBIAN_FRONTEND=noninteractive
|
ENV DEBIAN_FRONTEND=noninteractive
|
||||||
|
|
||||||
# Update package lists and install essential tools
|
# ============================================================================
|
||||||
# - curl: for downloading Node.js setup script
|
# Install System Dependencies
|
||||||
|
# ============================================================================
|
||||||
|
# - curl: for downloading Node.js setup script and health checks
|
||||||
# - git: for version control operations
|
# - git: for version control operations
|
||||||
# - build-essential: for compiling native Node.js modules (node-gyp)
|
# - build-essential: for compiling native Node.js modules (node-gyp)
|
||||||
# - python3: required by some Node.js build tools
|
# - python3: required by some Node.js build tools
|
||||||
|
# - postgresql-client: for psql CLI (database initialization)
|
||||||
|
# - redis-tools: for redis-cli (health checks)
|
||||||
RUN apt-get update && apt-get install -y \
|
RUN apt-get update && apt-get install -y \
|
||||||
curl \
|
curl \
|
||||||
git \
|
git \
|
||||||
build-essential \
|
build-essential \
|
||||||
python3 \
|
python3 \
|
||||||
|
postgresql-client \
|
||||||
|
redis-tools \
|
||||||
&& rm -rf /var/lib/apt/lists/*
|
&& rm -rf /var/lib/apt/lists/*
|
||||||
|
|
||||||
# Install Node.js 20.x (LTS) from NodeSource
|
# ============================================================================
|
||||||
|
# Install Node.js 20.x (LTS)
|
||||||
|
# ============================================================================
|
||||||
RUN curl -fsSL https://deb.nodesource.com/setup_20.x | bash - \
|
RUN curl -fsSL https://deb.nodesource.com/setup_20.x | bash - \
|
||||||
&& apt-get install -y nodejs
|
&& apt-get install -y nodejs
|
||||||
|
|
||||||
# Set the working directory inside the container
|
# ============================================================================
|
||||||
|
# Set Working Directory
|
||||||
|
# ============================================================================
|
||||||
WORKDIR /app
|
WORKDIR /app
|
||||||
|
|
||||||
# Set default environment variables for development
|
# ============================================================================
|
||||||
|
# Environment Configuration
|
||||||
|
# ============================================================================
|
||||||
|
# Default environment variables for development
|
||||||
ENV NODE_ENV=development
|
ENV NODE_ENV=development
|
||||||
|
# Increase Node.js memory limit for large builds
|
||||||
ENV NODE_OPTIONS='--max-old-space-size=8192'
|
ENV NODE_OPTIONS='--max-old-space-size=8192'
|
||||||
|
|
||||||
# Default command keeps the container running so you can attach to it
|
# ============================================================================
|
||||||
CMD ["bash"]
|
# Default Command
|
||||||
|
# ============================================================================
|
||||||
|
# Keep container running so VS Code can attach.
|
||||||
|
# Actual commands (npm run dev, etc.) are run via devcontainer.json.
|
||||||
|
CMD ["bash"]
|
||||||
|
|||||||
630
README.vscode.md
Normal file
630
README.vscode.md
Normal file
File diff suppressed because it is too large
Load Diff
303
READMEv2.md
Normal file
303
READMEv2.md
Normal file
@@ -0,0 +1,303 @@
|
|||||||
|
# Flyer Crawler - Development Environment Setup
|
||||||
|
|
||||||
|
Quick start guide for getting the development environment running with Podman containers.
|
||||||
|
|
||||||
|
## Prerequisites
|
||||||
|
|
||||||
|
- **Windows with WSL 2**: Install WSL 2 by running `wsl --install` in an administrator PowerShell
|
||||||
|
- **Podman Desktop**: Download and install [Podman Desktop for Windows](https://podman-desktop.io/)
|
||||||
|
- **Node.js 20+**: Required for running the application
|
||||||
|
|
||||||
|
## Quick Start - Container Environment
|
||||||
|
|
||||||
|
### 1. Initialize Podman
|
||||||
|
|
||||||
|
```powershell
|
||||||
|
# Start Podman machine (do this once after installing Podman Desktop)
|
||||||
|
podman machine init
|
||||||
|
podman machine start
|
||||||
|
```
|
||||||
|
|
||||||
|
### 2. Start Required Services
|
||||||
|
|
||||||
|
Start PostgreSQL (with PostGIS) and Redis containers:
|
||||||
|
|
||||||
|
```powershell
|
||||||
|
# Navigate to project directory
|
||||||
|
cd D:\gitea\flyer-crawler.projectium.com\flyer-crawler.projectium.com
|
||||||
|
|
||||||
|
# Start PostgreSQL with PostGIS
|
||||||
|
podman run -d \
|
||||||
|
--name flyer-crawler-postgres \
|
||||||
|
-e POSTGRES_USER=postgres \
|
||||||
|
-e POSTGRES_PASSWORD=postgres \
|
||||||
|
-e POSTGRES_DB=flyer_crawler_dev \
|
||||||
|
-p 5432:5432 \
|
||||||
|
docker.io/postgis/postgis:15-3.3
|
||||||
|
|
||||||
|
# Start Redis
|
||||||
|
podman run -d \
|
||||||
|
--name flyer-crawler-redis \
|
||||||
|
-e REDIS_PASSWORD="" \
|
||||||
|
-p 6379:6379 \
|
||||||
|
docker.io/library/redis:alpine
|
||||||
|
```
|
||||||
|
|
||||||
|
### 3. Wait for PostgreSQL to Initialize
|
||||||
|
|
||||||
|
```powershell
|
||||||
|
# Wait a few seconds, then check if PostgreSQL is ready
|
||||||
|
podman exec flyer-crawler-postgres pg_isready -U postgres
|
||||||
|
# Should output: /var/run/postgresql:5432 - accepting connections
|
||||||
|
```
|
||||||
|
|
||||||
|
### 4. Install Required PostgreSQL Extensions
|
||||||
|
|
||||||
|
```powershell
|
||||||
|
podman exec flyer-crawler-postgres psql -U postgres -d flyer_crawler_dev -c "CREATE EXTENSION IF NOT EXISTS postgis; CREATE EXTENSION IF NOT EXISTS pg_trgm; CREATE EXTENSION IF NOT EXISTS \"uuid-ossp\";"
|
||||||
|
```
|
||||||
|
|
||||||
|
### 5. Apply Database Schema
|
||||||
|
|
||||||
|
```powershell
|
||||||
|
# Apply the complete schema with URL constraints enabled
|
||||||
|
podman exec -i flyer-crawler-postgres psql -U postgres -d flyer_crawler_dev < sql/master_schema_rollup.sql
|
||||||
|
```
|
||||||
|
|
||||||
|
### 6. Verify URL Constraints Are Enabled
|
||||||
|
|
||||||
|
```powershell
|
||||||
|
podman exec flyer-crawler-postgres psql -U postgres -d flyer_crawler_dev -c "\d public.flyers" | grep -E "(image_url|icon_url|Check)"
|
||||||
|
```
|
||||||
|
|
||||||
|
You should see:
|
||||||
|
```
|
||||||
|
image_url | text | | not null |
|
||||||
|
icon_url | text | | not null |
|
||||||
|
Check constraints:
|
||||||
|
"flyers_icon_url_check" CHECK (icon_url ~* '^https?://.*'::text)
|
||||||
|
"flyers_image_url_check" CHECK (image_url ~* '^https?://.*'::text)
|
||||||
|
```
|
||||||
|
|
||||||
|
### 7. Set Environment Variables and Start Application
|
||||||
|
|
||||||
|
```powershell
|
||||||
|
# Set required environment variables
|
||||||
|
$env:NODE_ENV="development"
|
||||||
|
$env:DB_HOST="localhost"
|
||||||
|
$env:DB_USER="postgres"
|
||||||
|
$env:DB_PASSWORD="postgres"
|
||||||
|
$env:DB_NAME="flyer_crawler_dev"
|
||||||
|
$env:REDIS_URL="redis://localhost:6379"
|
||||||
|
$env:PORT="3001"
|
||||||
|
$env:FRONTEND_URL="http://localhost:5173"
|
||||||
|
|
||||||
|
# Install dependencies (first time only)
|
||||||
|
npm install
|
||||||
|
|
||||||
|
# Start the development server (runs both backend and frontend)
|
||||||
|
npm run dev
|
||||||
|
```
|
||||||
|
|
||||||
|
The application will be available at:
|
||||||
|
- **Frontend**: http://localhost:5173
|
||||||
|
- **Backend API**: http://localhost:3001
|
||||||
|
|
||||||
|
## Managing Containers
|
||||||
|
|
||||||
|
### View Running Containers
|
||||||
|
```powershell
|
||||||
|
podman ps
|
||||||
|
```
|
||||||
|
|
||||||
|
### Stop Containers
|
||||||
|
```powershell
|
||||||
|
podman stop flyer-crawler-postgres flyer-crawler-redis
|
||||||
|
```
|
||||||
|
|
||||||
|
### Start Containers (After They've Been Created)
|
||||||
|
```powershell
|
||||||
|
podman start flyer-crawler-postgres flyer-crawler-redis
|
||||||
|
```
|
||||||
|
|
||||||
|
### Remove Containers (Clean Slate)
|
||||||
|
```powershell
|
||||||
|
podman stop flyer-crawler-postgres flyer-crawler-redis
|
||||||
|
podman rm flyer-crawler-postgres flyer-crawler-redis
|
||||||
|
```
|
||||||
|
|
||||||
|
### View Container Logs
|
||||||
|
```powershell
|
||||||
|
podman logs flyer-crawler-postgres
|
||||||
|
podman logs flyer-crawler-redis
|
||||||
|
```
|
||||||
|
|
||||||
|
## Database Management
|
||||||
|
|
||||||
|
### Connect to PostgreSQL
|
||||||
|
```powershell
|
||||||
|
podman exec -it flyer-crawler-postgres psql -U postgres -d flyer_crawler_dev
|
||||||
|
```
|
||||||
|
|
||||||
|
### Reset Database Schema
|
||||||
|
```powershell
|
||||||
|
# Drop all tables
|
||||||
|
podman exec -i flyer-crawler-postgres psql -U postgres -d flyer_crawler_dev < sql/drop_tables.sql
|
||||||
|
|
||||||
|
# Reapply schema
|
||||||
|
podman exec -i flyer-crawler-postgres psql -U postgres -d flyer_crawler_dev < sql/master_schema_rollup.sql
|
||||||
|
```
|
||||||
|
|
||||||
|
### Seed Development Data
|
||||||
|
```powershell
|
||||||
|
npm run db:reset:dev
|
||||||
|
```
|
||||||
|
|
||||||
|
## Running Tests
|
||||||
|
|
||||||
|
### Unit Tests
|
||||||
|
```powershell
|
||||||
|
npm run test:unit
|
||||||
|
```
|
||||||
|
|
||||||
|
### Integration Tests
|
||||||
|
|
||||||
|
**IMPORTANT**: Integration tests require the PostgreSQL and Redis containers to be running.
|
||||||
|
|
||||||
|
```powershell
|
||||||
|
# Make sure containers are running
|
||||||
|
podman ps
|
||||||
|
|
||||||
|
# Run integration tests
|
||||||
|
npm run test:integration
|
||||||
|
```
|
||||||
|
|
||||||
|
## Troubleshooting
|
||||||
|
|
||||||
|
### Podman Machine Issues
|
||||||
|
If you get "unable to connect to Podman socket" errors:
|
||||||
|
```powershell
|
||||||
|
podman machine start
|
||||||
|
```
|
||||||
|
|
||||||
|
### PostgreSQL Connection Refused
|
||||||
|
Make sure PostgreSQL is ready:
|
||||||
|
```powershell
|
||||||
|
podman exec flyer-crawler-postgres pg_isready -U postgres
|
||||||
|
```
|
||||||
|
|
||||||
|
### Port Already in Use
|
||||||
|
If ports 5432 or 6379 are already in use, you can either:
|
||||||
|
1. Stop the conflicting service
|
||||||
|
2. Change the port mapping when creating containers (e.g., `-p 5433:5432`)
|
||||||
|
|
||||||
|
### URL Validation Errors
|
||||||
|
The database now enforces URL constraints. All `image_url` and `icon_url` fields must:
|
||||||
|
- Start with `http://` or `https://`
|
||||||
|
- Match the regex pattern: `^https?://.*`
|
||||||
|
|
||||||
|
Make sure the `FRONTEND_URL` environment variable is set correctly to avoid URL validation errors.
|
||||||
|
|
||||||
|
## ADR Implementation Status
|
||||||
|
|
||||||
|
This development environment implements:
|
||||||
|
|
||||||
|
- **ADR-0002**: Transaction Management ✅
|
||||||
|
- All database operations use the `withTransaction` pattern
|
||||||
|
- Automatic rollback on errors
|
||||||
|
- No connection pool leaks
|
||||||
|
|
||||||
|
- **ADR-0003**: Input Validation ✅
|
||||||
|
- Zod schemas for URL validation
|
||||||
|
- Database constraints enabled
|
||||||
|
- Validation at API boundaries
|
||||||
|
|
||||||
|
## Development Workflow
|
||||||
|
|
||||||
|
1. **Start Containers** (once per development session)
|
||||||
|
```powershell
|
||||||
|
podman start flyer-crawler-postgres flyer-crawler-redis
|
||||||
|
```
|
||||||
|
|
||||||
|
2. **Start Application**
|
||||||
|
```powershell
|
||||||
|
npm run dev
|
||||||
|
```
|
||||||
|
|
||||||
|
3. **Make Changes** to code (auto-reloads via `tsx watch`)
|
||||||
|
|
||||||
|
4. **Run Tests** before committing
|
||||||
|
```powershell
|
||||||
|
npm run test:unit
|
||||||
|
npm run test:integration
|
||||||
|
```
|
||||||
|
|
||||||
|
5. **Stop Application** (Ctrl+C)
|
||||||
|
|
||||||
|
6. **Stop Containers** (optional, or leave running)
|
||||||
|
```powershell
|
||||||
|
podman stop flyer-crawler-postgres flyer-crawler-redis
|
||||||
|
```
|
||||||
|
|
||||||
|
## PM2 Worker Setup (Production-like)
|
||||||
|
|
||||||
|
To test with PM2 workers locally:
|
||||||
|
|
||||||
|
```powershell
|
||||||
|
# Install PM2 globally (once)
|
||||||
|
npm install -g pm2
|
||||||
|
|
||||||
|
# Start the worker
|
||||||
|
pm2 start npm --name "flyer-crawler-worker" -- run worker:prod
|
||||||
|
|
||||||
|
# View logs
|
||||||
|
pm2 logs flyer-crawler-worker
|
||||||
|
|
||||||
|
# Stop worker
|
||||||
|
pm2 stop flyer-crawler-worker
|
||||||
|
pm2 delete flyer-crawler-worker
|
||||||
|
```
|
||||||
|
|
||||||
|
## Next Steps
|
||||||
|
|
||||||
|
After getting the environment running:
|
||||||
|
|
||||||
|
1. Review [docs/adr/](docs/adr/) for architectural decisions
|
||||||
|
2. Check [sql/master_schema_rollup.sql](sql/master_schema_rollup.sql) for database schema
|
||||||
|
3. Explore [src/routes/](src/routes/) for API endpoints
|
||||||
|
4. Review [src/types.ts](src/types.ts) for TypeScript type definitions
|
||||||
|
|
||||||
|
## Common Environment Variables
|
||||||
|
|
||||||
|
Create these environment variables for development:
|
||||||
|
|
||||||
|
```powershell
|
||||||
|
# Database
|
||||||
|
$env:DB_HOST="localhost"
|
||||||
|
$env:DB_USER="postgres"
|
||||||
|
$env:DB_PASSWORD="postgres"
|
||||||
|
$env:DB_NAME="flyer_crawler_dev"
|
||||||
|
$env:DB_PORT="5432"
|
||||||
|
|
||||||
|
# Redis
|
||||||
|
$env:REDIS_URL="redis://localhost:6379"
|
||||||
|
|
||||||
|
# Application
|
||||||
|
$env:NODE_ENV="development"
|
||||||
|
$env:PORT="3001"
|
||||||
|
$env:FRONTEND_URL="http://localhost:5173"
|
||||||
|
|
||||||
|
# Authentication (generate your own secrets)
|
||||||
|
$env:JWT_SECRET="your-dev-jwt-secret-change-this"
|
||||||
|
$env:SESSION_SECRET="your-dev-session-secret-change-this"
|
||||||
|
|
||||||
|
# AI Services (get your own API keys)
|
||||||
|
$env:VITE_GOOGLE_GENAI_API_KEY="your-google-genai-api-key"
|
||||||
|
$env:GOOGLE_MAPS_API_KEY="your-google-maps-api-key"
|
||||||
|
```
|
||||||
|
|
||||||
|
## Resources
|
||||||
|
|
||||||
|
- [Podman Desktop Documentation](https://podman-desktop.io/docs)
|
||||||
|
- [PostGIS Documentation](https://postgis.net/documentation/)
|
||||||
|
- [Original README.md](README.md) for production setup
|
||||||
@@ -1,8 +1,36 @@
|
|||||||
|
# compose.dev.yml
|
||||||
|
# ============================================================================
|
||||||
|
# DEVELOPMENT DOCKER COMPOSE CONFIGURATION
|
||||||
|
# ============================================================================
|
||||||
|
# This file defines the local development environment using Docker/Podman.
|
||||||
|
#
|
||||||
|
# Services:
|
||||||
|
# - app: Node.js application (API + Frontend)
|
||||||
|
# - postgres: PostgreSQL 15 with PostGIS extension
|
||||||
|
# - redis: Redis for caching and job queues
|
||||||
|
#
|
||||||
|
# Usage:
|
||||||
|
# Start all services: podman-compose -f compose.dev.yml up -d
|
||||||
|
# Stop all services: podman-compose -f compose.dev.yml down
|
||||||
|
# View logs: podman-compose -f compose.dev.yml logs -f
|
||||||
|
# Reset everything: podman-compose -f compose.dev.yml down -v
|
||||||
|
#
|
||||||
|
# VS Code Dev Containers:
|
||||||
|
# This file is referenced by .devcontainer/devcontainer.json for seamless
|
||||||
|
# VS Code integration. Open the project in VS Code and use "Reopen in Container".
|
||||||
|
# ============================================================================
|
||||||
|
|
||||||
version: '3.8'
|
version: '3.8'
|
||||||
|
|
||||||
services:
|
services:
|
||||||
|
# ===================
|
||||||
|
# Application Service
|
||||||
|
# ===================
|
||||||
app:
|
app:
|
||||||
container_name: flyer-crawler-dev
|
container_name: flyer-crawler-dev
|
||||||
|
# Use pre-built image if available, otherwise build from Dockerfile.dev
|
||||||
|
# To build: podman build -f Dockerfile.dev -t flyer-crawler-dev:latest .
|
||||||
|
image: localhost/flyer-crawler-dev:latest
|
||||||
build:
|
build:
|
||||||
context: .
|
context: .
|
||||||
dockerfile: Dockerfile.dev
|
dockerfile: Dockerfile.dev
|
||||||
@@ -16,21 +44,44 @@ services:
|
|||||||
- '3000:3000' # Frontend (Vite default)
|
- '3000:3000' # Frontend (Vite default)
|
||||||
- '3001:3001' # Backend API
|
- '3001:3001' # Backend API
|
||||||
environment:
|
environment:
|
||||||
|
# Core settings
|
||||||
- NODE_ENV=development
|
- NODE_ENV=development
|
||||||
|
# Database - use service name for Docker networking
|
||||||
- DB_HOST=postgres
|
- DB_HOST=postgres
|
||||||
|
- DB_PORT=5432
|
||||||
- DB_USER=postgres
|
- DB_USER=postgres
|
||||||
- DB_PASSWORD=postgres
|
- DB_PASSWORD=postgres
|
||||||
- DB_NAME=flyer_crawler_dev
|
- DB_NAME=flyer_crawler_dev
|
||||||
|
# Redis - use service name for Docker networking
|
||||||
- REDIS_URL=redis://redis:6379
|
- REDIS_URL=redis://redis:6379
|
||||||
# Add other secrets here or use a .env file
|
- REDIS_HOST=redis
|
||||||
|
- REDIS_PORT=6379
|
||||||
|
# Frontend URL for CORS
|
||||||
|
- FRONTEND_URL=http://localhost:3000
|
||||||
|
# Default JWT secret for development (override in production!)
|
||||||
|
- JWT_SECRET=dev-jwt-secret-change-in-production
|
||||||
|
# Worker settings
|
||||||
|
- WORKER_LOCK_DURATION=120000
|
||||||
depends_on:
|
depends_on:
|
||||||
- postgres
|
postgres:
|
||||||
- redis
|
condition: service_healthy
|
||||||
|
redis:
|
||||||
|
condition: service_healthy
|
||||||
# Keep container running so VS Code can attach
|
# Keep container running so VS Code can attach
|
||||||
command: tail -f /dev/null
|
command: tail -f /dev/null
|
||||||
|
# Healthcheck for the app (once it's running)
|
||||||
|
healthcheck:
|
||||||
|
test: ['CMD', 'curl', '-f', 'http://localhost:3001/api/health', '||', 'exit', '0']
|
||||||
|
interval: 30s
|
||||||
|
timeout: 10s
|
||||||
|
retries: 3
|
||||||
|
start_period: 60s
|
||||||
|
|
||||||
|
# ===================
|
||||||
|
# PostgreSQL Database
|
||||||
|
# ===================
|
||||||
postgres:
|
postgres:
|
||||||
image: docker.io/library/postgis/postgis:15-3.4
|
image: docker.io/postgis/postgis:15-3.4
|
||||||
container_name: flyer-crawler-postgres
|
container_name: flyer-crawler-postgres
|
||||||
ports:
|
ports:
|
||||||
- '5432:5432'
|
- '5432:5432'
|
||||||
@@ -38,15 +89,54 @@ services:
|
|||||||
POSTGRES_USER: postgres
|
POSTGRES_USER: postgres
|
||||||
POSTGRES_PASSWORD: postgres
|
POSTGRES_PASSWORD: postgres
|
||||||
POSTGRES_DB: flyer_crawler_dev
|
POSTGRES_DB: flyer_crawler_dev
|
||||||
|
# Optimize for development
|
||||||
|
POSTGRES_INITDB_ARGS: '--encoding=UTF8 --locale=C'
|
||||||
volumes:
|
volumes:
|
||||||
- postgres_data:/var/lib/postgresql/data
|
- postgres_data:/var/lib/postgresql/data
|
||||||
|
# Mount the extensions init script to run on first database creation
|
||||||
|
# The 00- prefix ensures it runs before any other init scripts
|
||||||
|
- ./sql/00-init-extensions.sql:/docker-entrypoint-initdb.d/00-init-extensions.sql:ro
|
||||||
|
# Healthcheck ensures postgres is ready before app starts
|
||||||
|
healthcheck:
|
||||||
|
test: ['CMD-SHELL', 'pg_isready -U postgres -d flyer_crawler_dev']
|
||||||
|
interval: 5s
|
||||||
|
timeout: 5s
|
||||||
|
retries: 10
|
||||||
|
start_period: 10s
|
||||||
|
|
||||||
|
# ===================
|
||||||
|
# Redis Cache/Queue
|
||||||
|
# ===================
|
||||||
redis:
|
redis:
|
||||||
image: docker.io/library/redis:alpine
|
image: docker.io/library/redis:alpine
|
||||||
container_name: flyer-crawler-redis
|
container_name: flyer-crawler-redis
|
||||||
ports:
|
ports:
|
||||||
- '6379:6379'
|
- '6379:6379'
|
||||||
|
volumes:
|
||||||
|
- redis_data:/data
|
||||||
|
# Healthcheck ensures redis is ready before app starts
|
||||||
|
healthcheck:
|
||||||
|
test: ['CMD', 'redis-cli', 'ping']
|
||||||
|
interval: 5s
|
||||||
|
timeout: 5s
|
||||||
|
retries: 10
|
||||||
|
start_period: 5s
|
||||||
|
# Enable persistence for development data
|
||||||
|
command: redis-server --appendonly yes
|
||||||
|
|
||||||
|
# ===================
|
||||||
|
# Named Volumes
|
||||||
|
# ===================
|
||||||
volumes:
|
volumes:
|
||||||
postgres_data:
|
postgres_data:
|
||||||
|
name: flyer-crawler-postgres-data
|
||||||
|
redis_data:
|
||||||
|
name: flyer-crawler-redis-data
|
||||||
node_modules_data:
|
node_modules_data:
|
||||||
|
name: flyer-crawler-node-modules
|
||||||
|
|
||||||
|
# ===================
|
||||||
|
# Network Configuration
|
||||||
|
# ===================
|
||||||
|
# All services are on the default bridge network.
|
||||||
|
# Use service names (postgres, redis) as hostnames.
|
||||||
|
|||||||
@@ -4,6 +4,8 @@
|
|||||||
|
|
||||||
**Status**: Accepted
|
**Status**: Accepted
|
||||||
|
|
||||||
|
**Implemented**: 2026-01-07
|
||||||
|
|
||||||
## Context
|
## Context
|
||||||
|
|
||||||
Our application has experienced a recurring pattern of bugs and brittle tests related to error handling, specifically for "resource not found" scenarios. The root causes identified are:
|
Our application has experienced a recurring pattern of bugs and brittle tests related to error handling, specifically for "resource not found" scenarios. The root causes identified are:
|
||||||
@@ -41,3 +43,86 @@ We will adopt a strict, consistent error-handling contract for the service and r
|
|||||||
|
|
||||||
**Initial Refactoring**: Requires a one-time effort to audit and refactor all existing repository methods to conform to this new standard.
|
**Initial Refactoring**: Requires a one-time effort to audit and refactor all existing repository methods to conform to this new standard.
|
||||||
**Convention Adherence**: Developers must be aware of and adhere to this convention. This ADR serves as the primary documentation for this pattern.
|
**Convention Adherence**: Developers must be aware of and adhere to this convention. This ADR serves as the primary documentation for this pattern.
|
||||||
|
|
||||||
|
## Implementation Details
|
||||||
|
|
||||||
|
### Custom Error Types
|
||||||
|
|
||||||
|
All custom errors are defined in `src/services/db/errors.db.ts`:
|
||||||
|
|
||||||
|
| Error Class | HTTP Status | PostgreSQL Code | Use Case |
|
||||||
|
| -------------------------------- | ----------- | --------------- | ------------------------------- |
|
||||||
|
| `NotFoundError` | 404 | - | Resource not found |
|
||||||
|
| `UniqueConstraintError` | 409 | 23505 | Duplicate key violation |
|
||||||
|
| `ForeignKeyConstraintError` | 400 | 23503 | Referenced record doesn't exist |
|
||||||
|
| `NotNullConstraintError` | 400 | 23502 | Required field is null |
|
||||||
|
| `CheckConstraintError` | 400 | 23514 | Check constraint violated |
|
||||||
|
| `InvalidTextRepresentationError` | 400 | 22P02 | Invalid data type format |
|
||||||
|
| `NumericValueOutOfRangeError` | 400 | 22003 | Numeric overflow |
|
||||||
|
| `ValidationError` | 400 | - | Request validation failed |
|
||||||
|
| `ForbiddenError` | 403 | - | Access denied |
|
||||||
|
|
||||||
|
### Error Handler Middleware
|
||||||
|
|
||||||
|
The centralized error handler in `src/middleware/errorHandler.ts`:
|
||||||
|
|
||||||
|
1. Catches all errors from route handlers
|
||||||
|
2. Maps custom error types to HTTP status codes
|
||||||
|
3. Logs errors with appropriate severity (warn for 4xx, error for 5xx)
|
||||||
|
4. Returns consistent JSON error responses
|
||||||
|
5. Includes error ID for server errors (for support correlation)
|
||||||
|
|
||||||
|
### Usage Pattern
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
// In repository (throws NotFoundError)
|
||||||
|
async function getUserById(id: number): Promise<User> {
|
||||||
|
const result = await pool.query('SELECT * FROM users WHERE id = $1', [id]);
|
||||||
|
if (result.rows.length === 0) {
|
||||||
|
throw new NotFoundError(`User with ID ${id} not found.`);
|
||||||
|
}
|
||||||
|
return result.rows[0];
|
||||||
|
}
|
||||||
|
|
||||||
|
// In route handler (simple try/catch)
|
||||||
|
router.get('/:id', async (req, res, next) => {
|
||||||
|
try {
|
||||||
|
const user = await getUserById(req.params.id);
|
||||||
|
res.json(user);
|
||||||
|
} catch (error) {
|
||||||
|
next(error); // errorHandler maps NotFoundError to 404
|
||||||
|
}
|
||||||
|
});
|
||||||
|
```
|
||||||
|
|
||||||
|
### Centralized Error Handler Helper
|
||||||
|
|
||||||
|
The `handleDbError` function in `src/services/db/errors.db.ts` provides centralized PostgreSQL error handling:
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
import { handleDbError } from './errors.db';
|
||||||
|
|
||||||
|
try {
|
||||||
|
await pool.query('INSERT INTO users (email) VALUES ($1)', [email]);
|
||||||
|
} catch (error) {
|
||||||
|
handleDbError(
|
||||||
|
error,
|
||||||
|
logger,
|
||||||
|
'Failed to create user',
|
||||||
|
{ email },
|
||||||
|
{
|
||||||
|
uniqueMessage: 'A user with this email already exists.',
|
||||||
|
defaultMessage: 'Failed to create user.',
|
||||||
|
},
|
||||||
|
);
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
## Key Files
|
||||||
|
|
||||||
|
- `src/services/db/errors.db.ts` - Custom error classes and `handleDbError` utility
|
||||||
|
- `src/middleware/errorHandler.ts` - Centralized Express error handling middleware
|
||||||
|
|
||||||
|
## Related ADRs
|
||||||
|
|
||||||
|
- [ADR-034](./0034-repository-pattern-standards.md) - Repository Pattern Standards (extends this ADR)
|
||||||
|
|||||||
@@ -2,7 +2,9 @@
|
|||||||
|
|
||||||
**Date**: 2025-12-12
|
**Date**: 2025-12-12
|
||||||
|
|
||||||
**Status**: Proposed
|
**Status**: Accepted
|
||||||
|
|
||||||
|
**Implemented**: 2026-01-07
|
||||||
|
|
||||||
## Context
|
## Context
|
||||||
|
|
||||||
@@ -58,3 +60,109 @@ async function registerUserAndCreateDefaultList(userData) {
|
|||||||
|
|
||||||
**Learning Curve**: Developers will need to learn and adopt the `withTransaction` pattern for all transactional database work.
|
**Learning Curve**: Developers will need to learn and adopt the `withTransaction` pattern for all transactional database work.
|
||||||
**Refactoring Effort**: Existing methods that manually manage transactions (`createUser`, `createBudget`, etc.) will need to be refactored to use the new pattern.
|
**Refactoring Effort**: Existing methods that manually manage transactions (`createUser`, `createBudget`, etc.) will need to be refactored to use the new pattern.
|
||||||
|
|
||||||
|
## Implementation Details
|
||||||
|
|
||||||
|
### The `withTransaction` Helper
|
||||||
|
|
||||||
|
Located in `src/services/db/connection.db.ts`:
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
export async function withTransaction<T>(callback: (client: PoolClient) => Promise<T>): Promise<T> {
|
||||||
|
const client = await getPool().connect();
|
||||||
|
try {
|
||||||
|
await client.query('BEGIN');
|
||||||
|
const result = await callback(client);
|
||||||
|
await client.query('COMMIT');
|
||||||
|
return result;
|
||||||
|
} catch (error) {
|
||||||
|
await client.query('ROLLBACK');
|
||||||
|
logger.error({ err: error }, 'Transaction failed, rolling back.');
|
||||||
|
throw error;
|
||||||
|
} finally {
|
||||||
|
client.release();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### Repository Pattern for Transaction Support
|
||||||
|
|
||||||
|
Repository methods accept an optional `PoolClient` parameter:
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
// Function-based approach
|
||||||
|
export async function createUser(userData: CreateUserInput, client?: PoolClient): Promise<User> {
|
||||||
|
const queryable = client || getPool();
|
||||||
|
const result = await queryable.query<User>(
|
||||||
|
'INSERT INTO users (email, password_hash) VALUES ($1, $2) RETURNING *',
|
||||||
|
[userData.email, userData.passwordHash],
|
||||||
|
);
|
||||||
|
return result.rows[0];
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### Transactional Service Example
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
// src/services/authService.ts
|
||||||
|
import { withTransaction } from './db/connection.db';
|
||||||
|
import { createUser, createProfile } from './db';
|
||||||
|
|
||||||
|
export async function registerUserWithProfile(
|
||||||
|
email: string,
|
||||||
|
password: string,
|
||||||
|
profileData: ProfileInput,
|
||||||
|
): Promise<UserWithProfile> {
|
||||||
|
return withTransaction(async (client) => {
|
||||||
|
// All operations use the same transactional client
|
||||||
|
const user = await createUser({ email, password }, client);
|
||||||
|
const profile = await createProfile(
|
||||||
|
{
|
||||||
|
userId: user.user_id,
|
||||||
|
...profileData,
|
||||||
|
},
|
||||||
|
client,
|
||||||
|
);
|
||||||
|
|
||||||
|
return { user, profile };
|
||||||
|
});
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### Services Using `withTransaction`
|
||||||
|
|
||||||
|
| Service | Function | Operations |
|
||||||
|
| ------------------------- | ----------------------- | ----------------------------------- |
|
||||||
|
| `authService` | `registerAndLoginUser` | Create user + profile + preferences |
|
||||||
|
| `userService` | `updateUserWithProfile` | Update user + profile atomically |
|
||||||
|
| `flyerPersistenceService` | `saveFlyer` | Create flyer + items + metadata |
|
||||||
|
| `shoppingService` | `createListWithItems` | Create list + initial items |
|
||||||
|
| `gamificationService` | `awardAchievement` | Create achievement + update points |
|
||||||
|
|
||||||
|
### Connection Pool Configuration
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
const poolConfig: PoolConfig = {
|
||||||
|
max: 20, // Max clients in pool
|
||||||
|
idleTimeoutMillis: 30000, // Close idle clients after 30s
|
||||||
|
connectionTimeoutMillis: 2000, // Fail connect after 2s
|
||||||
|
};
|
||||||
|
```
|
||||||
|
|
||||||
|
### Pool Status Monitoring
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
import { getPoolStatus } from './db/connection.db';
|
||||||
|
|
||||||
|
const status = getPoolStatus();
|
||||||
|
// { totalCount: 20, idleCount: 15, waitingCount: 0 }
|
||||||
|
```
|
||||||
|
|
||||||
|
## Key Files
|
||||||
|
|
||||||
|
- `src/services/db/connection.db.ts` - `getPool()`, `withTransaction()`, `getPoolStatus()`
|
||||||
|
|
||||||
|
## Related ADRs
|
||||||
|
|
||||||
|
- [ADR-001](./0001-standardized-error-handling.md) - Error handling within transactions
|
||||||
|
- [ADR-034](./0034-repository-pattern-standards.md) - Repository patterns for transaction participation
|
||||||
|
|||||||
@@ -2,7 +2,9 @@
|
|||||||
|
|
||||||
**Date**: 2025-12-12
|
**Date**: 2025-12-12
|
||||||
|
|
||||||
**Status**: Proposed
|
**Status**: Accepted
|
||||||
|
|
||||||
|
**Implemented**: 2026-01-07
|
||||||
|
|
||||||
## Context
|
## Context
|
||||||
|
|
||||||
@@ -77,3 +79,140 @@ router.get('/:id', validateRequest(getFlyerSchema), async (req, res, next) => {
|
|||||||
**New Dependency**: Introduces `zod` as a new project dependency.
|
**New Dependency**: Introduces `zod` as a new project dependency.
|
||||||
**Learning Curve**: Developers need to learn the `zod` schema definition syntax.
|
**Learning Curve**: Developers need to learn the `zod` schema definition syntax.
|
||||||
**Refactoring Effort**: Requires a one-time effort to create schemas and refactor all existing routes to use the `validateRequest` middleware.
|
**Refactoring Effort**: Requires a one-time effort to create schemas and refactor all existing routes to use the `validateRequest` middleware.
|
||||||
|
|
||||||
|
## Implementation Details
|
||||||
|
|
||||||
|
### The `validateRequest` Middleware
|
||||||
|
|
||||||
|
Located in `src/middleware/validation.middleware.ts`:
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
export const validateRequest =
|
||||||
|
(schema: ZodObject<z.ZodRawShape>) => async (req: Request, res: Response, next: NextFunction) => {
|
||||||
|
try {
|
||||||
|
const { params, query, body } = await schema.parseAsync({
|
||||||
|
params: req.params,
|
||||||
|
query: req.query,
|
||||||
|
body: req.body,
|
||||||
|
});
|
||||||
|
|
||||||
|
// Merge parsed data back into request
|
||||||
|
Object.keys(req.params).forEach((key) => delete req.params[key]);
|
||||||
|
Object.assign(req.params, params);
|
||||||
|
Object.keys(req.query).forEach((key) => delete req.query[key]);
|
||||||
|
Object.assign(req.query, query);
|
||||||
|
req.body = body;
|
||||||
|
|
||||||
|
return next();
|
||||||
|
} catch (error) {
|
||||||
|
if (error instanceof ZodError) {
|
||||||
|
const validationIssues = error.issues.map((issue) => ({
|
||||||
|
...issue,
|
||||||
|
path: issue.path.map((p) => String(p)),
|
||||||
|
}));
|
||||||
|
return next(new ValidationError(validationIssues));
|
||||||
|
}
|
||||||
|
return next(error);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
```
|
||||||
|
|
||||||
|
### Common Zod Patterns
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
import { z } from 'zod';
|
||||||
|
import { requiredString } from '../utils/zodUtils';
|
||||||
|
|
||||||
|
// String that coerces to positive integer (for ID params)
|
||||||
|
const idParam = z.string().pipe(z.coerce.number().int().positive());
|
||||||
|
|
||||||
|
// Pagination query params with defaults
|
||||||
|
const paginationQuery = z.object({
|
||||||
|
limit: z.coerce.number().int().positive().max(100).default(20),
|
||||||
|
offset: z.coerce.number().int().nonnegative().default(0),
|
||||||
|
});
|
||||||
|
|
||||||
|
// Email with sanitization
|
||||||
|
const emailSchema = z.string().trim().toLowerCase().email('A valid email is required.');
|
||||||
|
|
||||||
|
// Password with strength validation
|
||||||
|
const passwordSchema = z
|
||||||
|
.string()
|
||||||
|
.trim()
|
||||||
|
.min(8, 'Password must be at least 8 characters long.')
|
||||||
|
.superRefine((password, ctx) => {
|
||||||
|
const strength = validatePasswordStrength(password);
|
||||||
|
if (!strength.isValid) ctx.addIssue({ code: 'custom', message: strength.feedback });
|
||||||
|
});
|
||||||
|
|
||||||
|
// Optional string that converts empty string to undefined
|
||||||
|
const optionalString = z.preprocess(
|
||||||
|
(val) => (val === '' ? undefined : val),
|
||||||
|
z.string().trim().optional(),
|
||||||
|
);
|
||||||
|
```
|
||||||
|
|
||||||
|
### Routes Using `validateRequest`
|
||||||
|
|
||||||
|
All API routes use the validation middleware:
|
||||||
|
|
||||||
|
| Router | Schemas Defined | Validated Endpoints |
|
||||||
|
| ------------------------ | --------------- | -------------------------------------------------------------------------------- |
|
||||||
|
| `auth.routes.ts` | 5 | `/register`, `/login`, `/forgot-password`, `/reset-password`, `/change-password` |
|
||||||
|
| `user.routes.ts` | 4 | `/profile`, `/address`, `/preferences`, `/notifications` |
|
||||||
|
| `flyer.routes.ts` | 6 | `GET /:id`, `GET /`, `GET /:id/items`, `DELETE /:id` |
|
||||||
|
| `budget.routes.ts` | 5 | `/`, `/:id`, `/batch`, `/categories` |
|
||||||
|
| `recipe.routes.ts` | 4 | `GET /`, `GET /:id`, `POST /`, `PATCH /:id` |
|
||||||
|
| `admin.routes.ts` | 8 | Various admin endpoints |
|
||||||
|
| `ai.routes.ts` | 3 | `/upload-and-process`, `/analyze`, `/jobs/:jobId/status` |
|
||||||
|
| `gamification.routes.ts` | 3 | `/achievements`, `/leaderboard`, `/points` |
|
||||||
|
|
||||||
|
### Validation Error Response Format
|
||||||
|
|
||||||
|
When validation fails, the `errorHandler` returns:
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"message": "The request data is invalid.",
|
||||||
|
"errors": [
|
||||||
|
{
|
||||||
|
"path": ["body", "email"],
|
||||||
|
"message": "A valid email is required."
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"path": ["body", "password"],
|
||||||
|
"message": "Password must be at least 8 characters long."
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
HTTP Status: `400 Bad Request`
|
||||||
|
|
||||||
|
### Zod Utility Functions
|
||||||
|
|
||||||
|
Located in `src/utils/zodUtils.ts`:
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
// String that rejects empty strings
|
||||||
|
export const requiredString = (message?: string) =>
|
||||||
|
z.string().min(1, message || 'This field is required.');
|
||||||
|
|
||||||
|
// Number from string with validation
|
||||||
|
export const numericString = z.string().pipe(z.coerce.number());
|
||||||
|
|
||||||
|
// Boolean from string ('true'/'false')
|
||||||
|
export const booleanString = z.enum(['true', 'false']).transform((v) => v === 'true');
|
||||||
|
```
|
||||||
|
|
||||||
|
## Key Files
|
||||||
|
|
||||||
|
- `src/middleware/validation.middleware.ts` - The `validateRequest` middleware
|
||||||
|
- `src/services/db/errors.db.ts` - `ValidationError` class definition
|
||||||
|
- `src/middleware/errorHandler.ts` - Error formatting for validation errors
|
||||||
|
- `src/utils/zodUtils.ts` - Reusable Zod schema utilities
|
||||||
|
|
||||||
|
## Related ADRs
|
||||||
|
|
||||||
|
- [ADR-001](./0001-standardized-error-handling.md) - Error handling for validation errors
|
||||||
|
- [ADR-032](./0032-rate-limiting-strategy.md) - Rate limiting applied alongside validation
|
||||||
|
|||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user