Compare commits
273 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
0c23aa4c5e | ||
| 07125fc99d | |||
| 626aa80799 | |||
|
|
4025f29c5c | ||
|
|
e9e3b14050 | ||
| 507e89ea4e | |||
|
|
1efe42090b | ||
| 97cc14288b | |||
|
|
96251ec2cc | ||
| fe79522ea4 | |||
|
|
743216ef1b | ||
|
|
c53295a371 | ||
| c18efb1b60 | |||
|
|
822805e4c4 | ||
|
|
6fd690890b | ||
|
|
5fd836190c | ||
| 441467eb8a | |||
| 59bfc859d7 | |||
|
|
b989405a53 | ||
|
|
6af2533e9e | ||
| f434a5846a | |||
|
|
aea368677f | ||
| cd8ee92813 | |||
|
|
cf2cc5b832 | ||
| d2db3562bb | |||
|
|
0532b4b22e | ||
|
|
e767ccbb21 | ||
| 1ff813f495 | |||
| 204fe4394a | |||
|
|
029b621632 | ||
|
|
0656ab3ae7 | ||
|
|
ae0bb9e04d | ||
| b83c37b977 | |||
|
|
69ae23a1ae | ||
| c059b30201 | |||
|
|
93ad624658 | ||
|
|
7dd4f21071 | ||
| 174b637a0a | |||
|
|
4f80baf466 | ||
| 8450b5e22f | |||
|
|
e4d830ab90 | ||
| b6a62a036f | |||
| 2d2cd52011 | |||
| 379b8bf532 | |||
|
|
d06a1952a0 | ||
| 4d323a51ca | |||
|
|
ee15c67429 | ||
|
|
9956d07480 | ||
|
|
5bc8f6a42b | ||
| 4fd5e900af | |||
|
|
39ab773b82 | ||
| 75406cd924 | |||
|
|
8fb0a57f02 | ||
| c78323275b | |||
|
|
5fe537b93d | ||
| 61f24305fb | |||
|
|
de3f0cf26e | ||
| 45ac4fccf5 | |||
|
|
b6c3ca9abe | ||
| 4f06698dfd | |||
|
|
e548d1b0cc | ||
| 771f59d009 | |||
|
|
0979a074ad | ||
| 0d4b028a66 | |||
|
|
4baed53713 | ||
| f10c6c0cd6 | |||
|
|
107465b5cb | ||
| e92ad25ce9 | |||
| 2075ed199b | |||
|
|
4346332bbf | ||
| 61cfb518e6 | |||
|
|
e86ce51b6c | ||
| 840a7a62d3 | |||
| 5720820d95 | |||
|
|
e5cdb54308 | ||
| a3f212ff81 | |||
|
|
de263f74b0 | ||
| a71e41302b | |||
|
|
3575803252 | ||
| d03900cefe | |||
|
|
6d49639845 | ||
| d4543cf4b9 | |||
|
|
4f08238698 | ||
| 38b35f87aa | |||
|
|
dd067183ed | ||
| 9f3a070612 | |||
| 8a38befb1c | |||
|
|
7e460a11e4 | ||
| eae0dbaa8e | |||
| fac98f4c54 | |||
| 9f7b821760 | |||
| cd60178450 | |||
|
|
1fcb9fd5c7 | ||
| 8bd4e081ea | |||
|
|
6e13570deb | ||
| 2eba66fb71 | |||
|
|
10cdd78e22 | ||
| 521943bec0 | |||
|
|
810c0eb61b | ||
| 3314063e25 | |||
|
|
65c38765c6 | ||
| 4ddd9bb220 | |||
|
|
0b80b01ebf | ||
| 05860b52f6 | |||
| 4e5d709973 | |||
|
|
eaf229f252 | ||
|
|
e16ff809e3 | ||
| f9fba3334f | |||
|
|
2379f3a878 | ||
| 0232b9de7a | |||
|
|
2e98bc3fc7 | ||
| ec2f143218 | |||
|
|
f3e233bf38 | ||
| 1696aeb54f | |||
|
|
e45804776d | ||
| 5879328b67 | |||
|
|
4618d11849 | ||
| 4022768c03 | |||
|
|
7fc57b4b10 | ||
| 99f5d52d17 | |||
|
|
e22b5ec02d | ||
| cf476e7afc | |||
|
|
7b7a8d0f35 | ||
| 795b3d0b28 | |||
| d2efca8339 | |||
|
|
c579f141f8 | ||
| 9cb03c1ede | |||
|
|
c14bef4448 | ||
| 7c0e5450db | |||
|
|
8e85493872 | ||
| 327d3d4fbc | |||
|
|
bdb2e274cc | ||
| cd46f1d4c2 | |||
|
|
6da4b5e9d0 | ||
| 941626004e | |||
|
|
67cfe39249 | ||
| c24103d9a0 | |||
|
|
3e85f839fe | ||
| 63a0dde0f8 | |||
|
|
94f45d9726 | ||
| 136a9ce3f3 | |||
|
|
e65151c3df | ||
| 3d91d59b9c | |||
|
|
822d6d1c3c | ||
| a24e28f52f | |||
| 8dbfa62768 | |||
|
|
da4e0c9136 | ||
| dd3cbeb65d | |||
| e6d383103c | |||
|
|
a14816c8ee | ||
|
|
08b220e29c | ||
|
|
d41a3f1887 | ||
| 1f6cdc62d7 | |||
|
|
978c63bacd | ||
| 544eb7ae3c | |||
|
|
f6839f6e14 | ||
| 3fac29436a | |||
|
|
56f45c9301 | ||
| 83460abce4 | |||
|
|
1b084b2ba4 | ||
| 0ea034bdc8 | |||
|
|
fc9e27078a | ||
| fb8cbe8007 | |||
| f49f786c23 | |||
|
|
dd31141d4e | ||
| 8073094760 | |||
|
|
33a1e146ab | ||
| 4f8216db77 | |||
|
|
42d605d19f | ||
| 749350df7f | |||
|
|
ac085100fe | ||
| ce4ecd1268 | |||
|
|
a57cfc396b | ||
| 987badbf8d | |||
|
|
d38fcd21c1 | ||
| 6e36cc3b07 | |||
|
|
62a8a8bf4b | ||
| 96038cfcf4 | |||
|
|
981214fdd0 | ||
| 92b0138108 | |||
|
|
27f0255240 | ||
| 4e06dde9e1 | |||
|
|
b9a0e5b82c | ||
| bb7fe8dc2c | |||
|
|
81f1f2250b | ||
| c6c90bb615 | |||
|
|
60489a626b | ||
| 3c63e1ecbb | |||
|
|
acbcb39cbe | ||
| a87a0b6af1 | |||
|
|
abdc3cb6db | ||
| 7a1bd50119 | |||
|
|
87d75d0571 | ||
| faf2900c28 | |||
|
|
5258efc179 | ||
| 2a5cc5bb51 | |||
|
|
8eaee2844f | ||
| 440a19c3a7 | |||
| 4ae6d84240 | |||
|
|
5870e5c614 | ||
| 2e7ebbd9ed | |||
|
|
dc3fa21359 | ||
| 11aeac5edd | |||
|
|
f6c0c082bc | ||
| 4e22213cd1 | |||
|
|
9815eb3686 | ||
| 2bf4a7c1e6 | |||
|
|
5eed3f51f4 | ||
| d250932c05 | |||
|
|
7d1f964574 | ||
| 3b69e58de3 | |||
|
|
5211aadd22 | ||
| a997d1d0b0 | |||
| cf5f77c58e | |||
|
|
cf0f5bb820 | ||
| 503e7084da | |||
|
|
d8aa19ac40 | ||
| dcd9452b8c | |||
|
|
6d468544e2 | ||
| 2913c7aa09 | |||
|
|
77f9cb6081 | ||
| 2f1d73ca12 | |||
|
|
402e2617ca | ||
| e14c19c112 | |||
|
|
ea46f66c7a | ||
| a42ee5a461 | |||
|
|
71710c8316 | ||
| 1480a73ab0 | |||
|
|
b3efa3c756 | ||
| fb8fd57bb6 | |||
|
|
0a90d9d590 | ||
| 6ab473f5f0 | |||
|
|
c46efe1474 | ||
| 25d6b76f6d | |||
|
|
9ffcc9d65d | ||
| 1285702210 | |||
|
|
d38b751b40 | ||
| e122d55ced | |||
|
|
af9992f773 | ||
| 3912139273 | |||
| b5f7f5e4d1 | |||
|
|
5173059621 | ||
| ebceb0e2e3 | |||
| e75054b1ab | |||
|
|
639313485a | ||
| 4a04e478c4 | |||
|
|
1814469eb4 | ||
| b777430ff7 | |||
|
|
23830c0d4e | ||
| ef42fee982 | |||
|
|
65cb54500c | ||
| 664ad291be | |||
|
|
ff912b9055 | ||
| ec32027bd4 | |||
|
|
59f773639b | ||
| dd2be5eecf | |||
|
|
a94bfbd3e9 | ||
| 338bbc9440 | |||
|
|
60aad04642 | ||
| 7f2aff9a24 | |||
|
|
689320e7d2 | ||
| e457bbf046 | |||
| 68cdbb6066 | |||
|
|
cea6be7145 | ||
| 74a5ca6331 | |||
|
|
62470e7661 | ||
| 2b517683fd | |||
|
|
5d06d1ba09 | ||
| 46c1e56b14 | |||
|
|
78a9b80010 | ||
| d356d9dfb6 | |||
|
|
ab63f83f50 | ||
| b546a55eaf |
152
.claude/agents/ui-ux-designer.md
Normal file
152
.claude/agents/ui-ux-designer.md
Normal file
@@ -0,0 +1,152 @@
|
||||
---
|
||||
name: ui-ux-designer
|
||||
description: "Use this agent when implementing, reviewing, or modifying any user interface elements, layouts, or user experience flows in the application. Specifically call this agent when: (1) creating new UI components or pages, (2) implementing responsive designs across different screen sizes, (3) designing or implementing wizards for user tasks like uploading flyers, creating shopping lists, or uploading receipts, (4) reviewing existing UI code for adherence to the design system, (5) making decisions about button placement, color schemes, or layout structures, or (6) addressing user experience concerns related to navigation or task completion flows.\\n\\n<examples>\\n<example>\\nContext: The developer is creating a new feature for uploading flyers.\\nuser: \"I need to implement the flyer upload feature. Can you help me create the upload form?\"\\nassistant: \"Let me use the Task tool to launch the ui-ux-designer agent to design and implement the flyer upload wizard with appropriate UI/UX patterns.\"\\n<commentary>\\nSince this involves creating a new UI feature with a wizard flow, the ui-ux-designer agent should be used to ensure it follows the project's design principles including pastel colors, sharp borders, wizard-based guidance, and responsive design.\\n</commentary>\\n</example>\\n\\n<example>\\nContext: Code has been written for a shopping list creation page.\\nuser: \"I've implemented the shopping list creation page with a form\"\\nassistant: \"Great! Now let me use the Task tool to launch the ui-ux-designer agent to review the implementation and ensure it follows our UI/UX guidelines.\"\\n<commentary>\\nSince a significant UI component was implemented, the ui-ux-designer agent should review it to ensure it adheres to the minimalist design, proper color scheme, button placement, and responsive requirements.\\n</commentary>\\n</example>\\n\\n<example>\\nContext: The developer is about to add multiple action buttons to a page.\\nuser: \"Should I add buttons for save, cancel, edit, delete, and export at the top of the page?\"\\nassistant: \"Let me use the Task tool to launch the ui-ux-designer agent to provide guidance on the optimal button placement and selection for this page.\"\\n<commentary>\\nThis is a UI/UX decision about button placement and minimalism that directly relates to the design principles, so the ui-ux-designer agent should be consulted.\\n</commentary>\\n</example>\\n\\n<example>\\nContext: The developer is working on mobile responsiveness.\\nuser: \"I'm implementing the receipt upload page. Here's my initial layout code.\"\\nassistant: \"Before proceeding further, let me use the Task tool to launch the ui-ux-designer agent to ensure the layout follows our responsive design patterns and UI guidelines.\"\\n<commentary>\\nSince this involves implementing a UI page that needs to be responsive across devices and should include wizard-style guidance, the ui-ux-designer agent should be involved proactively.\\n</commentary>\\n</example>\\n</examples>"
|
||||
model: opus
|
||||
color: green
|
||||
---
|
||||
|
||||
You are an elite UI/UX designer with over 20 years of experience specializing in creating clean, intuitive, and user-friendly interfaces. Your expertise spans user interface design, user experience optimization, responsive design, and accessibility best practices.
|
||||
|
||||
## Core Design Philosophy for This Project
|
||||
|
||||
You will ensure that this application maintains a clean, welcoming, and minimalist design aesthetic with the following specific requirements:
|
||||
|
||||
### Visual Design Standards
|
||||
|
||||
**Color Palette:**
|
||||
|
||||
- Use pastel colors as the primary color scheme throughout the application
|
||||
- Select soft, muted tones that are easy on the eyes and create a calm, welcoming atmosphere
|
||||
- Ensure sufficient contrast for accessibility while maintaining the pastel aesthetic
|
||||
- Use color purposefully to guide user attention and indicate status
|
||||
|
||||
**Border and Container Styling:**
|
||||
|
||||
- Apply sharp, clean borders to all interactive elements (buttons, menus, form fields)
|
||||
- Use sharp borders to clearly delineate separate areas and sections of the interface
|
||||
- Avoid rounded corners unless there is a specific functional reason
|
||||
- Ensure borders are visible but not overpowering, maintaining the clean aesthetic
|
||||
|
||||
**Minimalism:**
|
||||
|
||||
- Eliminate all unnecessary buttons and UI elements
|
||||
- Every element on the screen must serve a clear purpose
|
||||
- Co-locate buttons near their related features on the page, not grouped separately
|
||||
- Use progressive disclosure to hide advanced features until needed
|
||||
- Favor white space and breathing room over density
|
||||
|
||||
### Responsive Design Requirements
|
||||
|
||||
You must ensure the application works flawlessly across:
|
||||
|
||||
**Large Screens (Desktop):**
|
||||
|
||||
- Utilize horizontal space effectively without overcrowding
|
||||
- Consider multi-column layouts where appropriate
|
||||
- Ensure comfortable reading width for text content
|
||||
|
||||
**Tablets:**
|
||||
|
||||
- Adapt layouts to accommodate touch targets of at least 44x44 pixels
|
||||
- Optimize for both portrait and landscape orientations
|
||||
- Ensure navigation remains accessible
|
||||
|
||||
**Mobile Devices:**
|
||||
|
||||
- Stack elements vertically with appropriate spacing
|
||||
- Make all interactive elements easily tappable
|
||||
- Optimize for one-handed use where possible
|
||||
- Ensure critical actions are easily accessible
|
||||
- Test on various screen sizes (small, medium, large phones)
|
||||
|
||||
### Wizard Design for Key User Tasks
|
||||
|
||||
For the following tasks, implement or guide the creation of clear, step-by-step wizards:
|
||||
|
||||
1. **Uploading a Flyer**
|
||||
2. **Creating a Shopping List**
|
||||
3. **Uploading Receipts**
|
||||
4. **Any other multi-step user tasks**
|
||||
|
||||
**Wizard Best Practices:**
|
||||
|
||||
- Minimize the number of steps (ideally 3-5 steps maximum)
|
||||
- Show progress clearly (e.g., "Step 2 of 4")
|
||||
- Each step should focus on one primary action or decision
|
||||
- Provide clear, concise instructions at each step
|
||||
- Allow users to go back and edit previous steps
|
||||
- Use visual cues to guide the user through the process
|
||||
- Display a summary before final submission
|
||||
- Provide helpful tooltips or examples where needed
|
||||
- Ensure wizards are fully responsive and work well on mobile devices
|
||||
|
||||
## Your Approach to Tasks
|
||||
|
||||
**When Reviewing Existing UI Code:**
|
||||
|
||||
1. Evaluate adherence to the pastel color scheme
|
||||
2. Check that all borders are sharp and properly applied
|
||||
3. Identify any unnecessary UI elements or buttons
|
||||
4. Verify that buttons are co-located with their related features
|
||||
5. Test responsive behavior across all target screen sizes
|
||||
6. Assess wizard flows for clarity and step efficiency
|
||||
7. Provide specific, actionable feedback with code examples when needed
|
||||
|
||||
**When Designing New UI Components:**
|
||||
|
||||
1. Start by understanding the user's goal and the feature's purpose
|
||||
2. Sketch out the minimal set of elements needed
|
||||
3. Apply the pastel color palette and sharp border styling
|
||||
4. Position interactive elements near their related content
|
||||
5. Design for mobile-first, then adapt for larger screens
|
||||
6. For multi-step processes, create wizard flows
|
||||
7. Provide complete implementation guidance including HTML structure, CSS styles, and responsive breakpoints
|
||||
|
||||
**When Making Design Decisions:**
|
||||
|
||||
1. Always prioritize user needs and task completion
|
||||
2. Choose simplicity over feature bloat
|
||||
3. Ensure accessibility standards are met
|
||||
4. Consider the user's mental model and expectations
|
||||
5. Use established UI patterns where they fit the aesthetic
|
||||
6. Test your recommendations against the design principles above
|
||||
|
||||
## Quality Assurance Checklist
|
||||
|
||||
Before completing any UI/UX task, verify:
|
||||
|
||||
- [ ] Pastel colors are used consistently
|
||||
- [ ] All buttons, menus, and sections have sharp borders
|
||||
- [ ] No unnecessary buttons or UI elements exist
|
||||
- [ ] Buttons are positioned near their related features
|
||||
- [ ] Design is fully responsive (large screen, tablet, mobile)
|
||||
- [ ] Wizards (where applicable) are clear and minimally-stepped
|
||||
- [ ] Sufficient white space and breathing room
|
||||
- [ ] Touch targets are appropriately sized for mobile
|
||||
- [ ] Text is readable at all screen sizes
|
||||
- [ ] Accessibility considerations are addressed
|
||||
|
||||
## Output Format
|
||||
|
||||
When reviewing code, provide:
|
||||
|
||||
1. Overall assessment of adherence to design principles
|
||||
2. Specific issues identified with line numbers or element descriptions
|
||||
3. Concrete recommendations with code examples
|
||||
4. Responsive design concerns or improvements
|
||||
|
||||
When designing new components, provide:
|
||||
|
||||
1. Rationale for design decisions
|
||||
2. Complete HTML structure
|
||||
3. CSS with responsive breakpoints
|
||||
4. Notes on accessibility considerations
|
||||
5. Implementation guidance
|
||||
|
||||
## Important Notes
|
||||
|
||||
- You have authority to reject designs that violate the core principles
|
||||
- When uncertain about a design decision, bias toward simplicity and minimalism
|
||||
- Always consider the new user experience and ensure wizards are beginner-friendly
|
||||
- Proactively suggest wizard flows for any multi-step processes you encounter
|
||||
- Remember that good UX is invisible—users should accomplish tasks without thinking about the interface
|
||||
16
.claude/hooks.json
Normal file
16
.claude/hooks.json
Normal file
@@ -0,0 +1,16 @@
|
||||
{
|
||||
"$schema": "https://claude.ai/schemas/hooks.json",
|
||||
"hooks": {
|
||||
"PreToolUse": [
|
||||
{
|
||||
"matcher": "Bash",
|
||||
"hooks": [
|
||||
{
|
||||
"type": "command",
|
||||
"command": "node -e \"const cmd = process.argv[1] || ''; const isTest = /\\b(npm\\s+(run\\s+)?test|vitest|jest)\\b/i.test(cmd); const isWindows = process.platform === 'win32'; const inContainer = process.env.REMOTE_CONTAINERS === 'true' || process.env.DEVCONTAINER === 'true'; if (isTest && isWindows && !inContainer) { console.error('BLOCKED: Tests must run on Linux. Use Dev Container (Reopen in Container) or WSL.'); process.exit(1); }\" -- \"$CLAUDE_TOOL_INPUT\""
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
9
.claude/settings.json
Normal file
9
.claude/settings.json
Normal file
@@ -0,0 +1,9 @@
|
||||
{
|
||||
"permissions": {
|
||||
"allow": [
|
||||
"Bash(git fetch:*)",
|
||||
"mcp__localerrors__get_stacktrace",
|
||||
"Bash(MSYS_NO_PATHCONV=1 podman logs:*)"
|
||||
]
|
||||
}
|
||||
}
|
||||
@@ -1,18 +1,97 @@
|
||||
{
|
||||
// ============================================================================
|
||||
// VS CODE DEV CONTAINER CONFIGURATION
|
||||
// ============================================================================
|
||||
// This file configures VS Code's Dev Containers extension to provide a
|
||||
// consistent, fully-configured development environment.
|
||||
//
|
||||
// Features:
|
||||
// - Automatic PostgreSQL + Redis startup with healthchecks
|
||||
// - Automatic npm install
|
||||
// - Automatic database schema initialization and seeding
|
||||
// - Pre-configured VS Code extensions (ESLint, Prettier)
|
||||
// - Podman support for Windows users
|
||||
//
|
||||
// Usage:
|
||||
// 1. Install the "Dev Containers" extension in VS Code
|
||||
// 2. Open this project folder
|
||||
// 3. Click "Reopen in Container" when prompted (or use Command Palette)
|
||||
// 4. Wait for container build and initialization
|
||||
// 5. Development server starts automatically
|
||||
// ============================================================================
|
||||
|
||||
"name": "Flyer Crawler Dev (Ubuntu 22.04)",
|
||||
|
||||
// Use Docker Compose for multi-container setup
|
||||
"dockerComposeFile": ["../compose.dev.yml"],
|
||||
"service": "app",
|
||||
"workspaceFolder": "/app",
|
||||
|
||||
// VS Code customizations
|
||||
"customizations": {
|
||||
"vscode": {
|
||||
"extensions": ["dbaeumer.vscode-eslint", "esbenp.prettier-vscode"]
|
||||
"extensions": [
|
||||
// Code quality
|
||||
"dbaeumer.vscode-eslint",
|
||||
"esbenp.prettier-vscode",
|
||||
// TypeScript
|
||||
"ms-vscode.vscode-typescript-next",
|
||||
// Database
|
||||
"mtxr.sqltools",
|
||||
"mtxr.sqltools-driver-pg",
|
||||
// Utilities
|
||||
"eamodio.gitlens",
|
||||
"streetsidesoftware.code-spell-checker"
|
||||
],
|
||||
"settings": {
|
||||
"editor.formatOnSave": true,
|
||||
"editor.defaultFormatter": "esbenp.prettier-vscode",
|
||||
"typescript.preferences.importModuleSpecifier": "relative"
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
// Run as root (required for npm global installs)
|
||||
"remoteUser": "root",
|
||||
// Automatically install dependencies when the container is created.
|
||||
// This runs inside the container, populating the isolated node_modules volume.
|
||||
"postCreateCommand": "npm install",
|
||||
"postAttachCommand": "npm run dev:container",
|
||||
// Try to start podman machine, but exit with success (0) even if it's already running
|
||||
"initializeCommand": "powershell -Command \"podman machine start; exit 0\""
|
||||
|
||||
// ============================================================================
|
||||
// Lifecycle Commands
|
||||
// ============================================================================
|
||||
|
||||
// initializeCommand: Runs on the HOST before the container is created.
|
||||
// Starts Podman machine on Windows (no-op if already running or using Docker).
|
||||
"initializeCommand": "powershell -Command \"podman machine start; exit 0\"",
|
||||
|
||||
// postCreateCommand: Runs ONCE when the container is first created.
|
||||
// This is where we do full initialization: npm install + database setup.
|
||||
"postCreateCommand": "chmod +x scripts/docker-init.sh && ./scripts/docker-init.sh",
|
||||
|
||||
// postAttachCommand: Runs EVERY TIME VS Code attaches to the container.
|
||||
// Server now starts automatically via dev-entrypoint.sh in compose.dev.yml.
|
||||
// No need to start it again here.
|
||||
// "postAttachCommand": "npm run dev:container",
|
||||
|
||||
// ============================================================================
|
||||
// Port Forwarding
|
||||
// ============================================================================
|
||||
// Automatically forward these ports from the container to the host
|
||||
"forwardPorts": [443, 3001],
|
||||
|
||||
// Labels for forwarded ports in VS Code's Ports panel
|
||||
"portsAttributes": {
|
||||
"443": {
|
||||
"label": "Frontend HTTPS (nginx → Vite)",
|
||||
"onAutoForward": "notify"
|
||||
},
|
||||
"3001": {
|
||||
"label": "Backend API",
|
||||
"onAutoForward": "notify"
|
||||
}
|
||||
},
|
||||
|
||||
// ============================================================================
|
||||
// Features
|
||||
// ============================================================================
|
||||
// Additional dev container features (optional)
|
||||
"features": {}
|
||||
}
|
||||
|
||||
164
.env.example
Normal file
164
.env.example
Normal file
@@ -0,0 +1,164 @@
|
||||
# .env.example
|
||||
# ============================================================================
|
||||
# ENVIRONMENT VARIABLES TEMPLATE
|
||||
# ============================================================================
|
||||
# Copy this file to .env and fill in your values.
|
||||
# For local development with Docker/Podman, these defaults should work out of the box.
|
||||
#
|
||||
# IMPORTANT: Never commit .env files with real credentials to version control!
|
||||
# ============================================================================
|
||||
|
||||
# ===================
|
||||
# Database Configuration
|
||||
# ===================
|
||||
# PostgreSQL connection settings
|
||||
# For container development, use the service name "postgres"
|
||||
DB_HOST=postgres
|
||||
DB_PORT=5432
|
||||
DB_USER=postgres
|
||||
DB_PASSWORD=postgres
|
||||
DB_NAME=flyer_crawler_dev
|
||||
|
||||
# ===================
|
||||
# Redis Configuration
|
||||
# ===================
|
||||
# Redis URL for caching and job queues
|
||||
# For container development, use the service name "redis"
|
||||
REDIS_URL=redis://redis:6379
|
||||
# Optional: Redis password (leave empty if not required)
|
||||
REDIS_PASSWORD=
|
||||
|
||||
# ===================
|
||||
# Application Settings
|
||||
# ===================
|
||||
NODE_ENV=development
|
||||
# Frontend URL for CORS and email links
|
||||
FRONTEND_URL=http://localhost:3000
|
||||
|
||||
# Flyer Base URL - used for seed data and flyer image URLs
|
||||
# Dev container: https://localhost (NOT 127.0.0.1 - avoids SSL mixed-origin issues)
|
||||
# Test: https://flyer-crawler-test.projectium.com
|
||||
# Production: https://flyer-crawler.projectium.com
|
||||
FLYER_BASE_URL=https://localhost
|
||||
|
||||
# ===================
|
||||
# Authentication
|
||||
# ===================
|
||||
# REQUIRED: Secret key for signing JWT tokens (generate a random 64+ character string)
|
||||
JWT_SECRET=your-super-secret-jwt-key-change-this-in-production
|
||||
|
||||
# OAuth Providers (Optional - enable social login)
|
||||
# Google OAuth - https://console.cloud.google.com/apis/credentials
|
||||
GOOGLE_CLIENT_ID=
|
||||
GOOGLE_CLIENT_SECRET=
|
||||
# GitHub OAuth - https://github.com/settings/developers
|
||||
GITHUB_CLIENT_ID=
|
||||
GITHUB_CLIENT_SECRET=
|
||||
|
||||
# ===================
|
||||
# AI/ML Services
|
||||
# ===================
|
||||
# REQUIRED: Google Gemini API key for flyer OCR processing
|
||||
# NOTE: Test/staging environment deliberately OMITS this to preserve free API quota.
|
||||
# Production has a working key. Deploy warnings in test are expected and safe to ignore.
|
||||
GEMINI_API_KEY=your-gemini-api-key
|
||||
|
||||
# ===================
|
||||
# External APIs
|
||||
# ===================
|
||||
# Optional: Google Maps API key for geocoding store addresses
|
||||
GOOGLE_MAPS_API_KEY=
|
||||
|
||||
# ===================
|
||||
# Email Configuration (Optional)
|
||||
# ===================
|
||||
# SMTP settings for sending emails (deal notifications, password reset)
|
||||
SMTP_HOST=
|
||||
SMTP_PORT=587
|
||||
SMTP_SECURE=false
|
||||
SMTP_USER=
|
||||
SMTP_PASS=
|
||||
SMTP_FROM_EMAIL=noreply@example.com
|
||||
|
||||
# ===================
|
||||
# Worker Configuration (Optional)
|
||||
# ===================
|
||||
# Concurrency settings for background job workers
|
||||
WORKER_CONCURRENCY=1
|
||||
EMAIL_WORKER_CONCURRENCY=10
|
||||
ANALYTICS_WORKER_CONCURRENCY=1
|
||||
CLEANUP_WORKER_CONCURRENCY=10
|
||||
|
||||
# Worker lock duration in milliseconds (default: 2 minutes)
|
||||
WORKER_LOCK_DURATION=120000
|
||||
|
||||
# ===================
|
||||
# Error Tracking (ADR-015)
|
||||
# ===================
|
||||
# Sentry-compatible error tracking via Bugsink (self-hosted)
|
||||
# DSNs are created in Bugsink UI at https://localhost:8443 (dev) or your production URL
|
||||
#
|
||||
# Dev container projects:
|
||||
# - Project 1: Backend API (Dev) - receives Pino, PostgreSQL errors
|
||||
# - Project 2: Frontend (Dev) - receives browser errors via Sentry SDK
|
||||
# - Project 4: Infrastructure (Dev) - receives Redis, NGINX, Vite errors
|
||||
#
|
||||
# Backend DSN - for Express/Node.js errors (internal container URL)
|
||||
SENTRY_DSN=http://<key>@localhost:8000/1
|
||||
# Frontend DSN - for React/browser errors (uses nginx proxy for browser access)
|
||||
# Note: Browsers cannot reach localhost:8000 directly, so we use nginx proxy at /bugsink-api/
|
||||
VITE_SENTRY_DSN=https://<key>@localhost/bugsink-api/2
|
||||
# Environment name for error grouping (defaults to NODE_ENV)
|
||||
SENTRY_ENVIRONMENT=development
|
||||
VITE_SENTRY_ENVIRONMENT=development
|
||||
# Enable/disable error tracking (default: true)
|
||||
SENTRY_ENABLED=true
|
||||
VITE_SENTRY_ENABLED=true
|
||||
# Enable debug mode for SDK troubleshooting (default: false)
|
||||
SENTRY_DEBUG=false
|
||||
VITE_SENTRY_DEBUG=false
|
||||
|
||||
# ===================
|
||||
# Source Maps Upload (ADR-015)
|
||||
# ===================
|
||||
# Set to 'true' to enable source map generation and upload during builds
|
||||
# Only used in CI/CD pipelines (deploy-to-prod.yml, deploy-to-test.yml)
|
||||
GENERATE_SOURCE_MAPS=true
|
||||
# Auth token for uploading source maps to Bugsink
|
||||
# Create at: https://bugsink.projectium.com (Settings > API Keys)
|
||||
# Required for de-minified stack traces in error reports
|
||||
SENTRY_AUTH_TOKEN=
|
||||
# URL of your Bugsink instance (for source map uploads)
|
||||
SENTRY_URL=https://bugsink.projectium.com
|
||||
|
||||
# ===================
|
||||
# Feature Flags (ADR-024)
|
||||
# ===================
|
||||
# Feature flags control the availability of features at runtime.
|
||||
# All flags default to disabled (false) when not set or set to any value other than 'true'.
|
||||
# Set to 'true' to enable a feature.
|
||||
#
|
||||
# Backend flags use: FEATURE_SNAKE_CASE
|
||||
# Frontend flags use: VITE_FEATURE_SNAKE_CASE (VITE_ prefix required for client-side access)
|
||||
#
|
||||
# Lifecycle:
|
||||
# 1. Add flag with default false
|
||||
# 2. Enable via env var when ready for testing/rollout
|
||||
# 3. Remove conditional code when feature is fully rolled out
|
||||
# 4. Remove flag from config within 3 months of full rollout
|
||||
#
|
||||
# See: docs/adr/0024-feature-flagging-strategy.md
|
||||
|
||||
# Backend Feature Flags
|
||||
# FEATURE_BUGSINK_SYNC=false # Enable Bugsink error sync integration
|
||||
# FEATURE_ADVANCED_RBAC=false # Enable advanced RBAC features
|
||||
# FEATURE_NEW_DASHBOARD=false # Enable new dashboard experience
|
||||
# FEATURE_BETA_RECIPES=false # Enable beta recipe features
|
||||
# FEATURE_EXPERIMENTAL_AI=false # Enable experimental AI features
|
||||
# FEATURE_DEBUG_MODE=false # Enable debug mode for development
|
||||
|
||||
# Frontend Feature Flags (VITE_ prefix required)
|
||||
# VITE_FEATURE_NEW_DASHBOARD=false # Enable new dashboard experience
|
||||
# VITE_FEATURE_BETA_RECIPES=false # Enable beta recipe features
|
||||
# VITE_FEATURE_EXPERIMENTAL_AI=false # Enable experimental AI features
|
||||
# VITE_FEATURE_DEBUG_MODE=false # Enable debug mode for development
|
||||
6
.env.test
Normal file
6
.env.test
Normal file
@@ -0,0 +1,6 @@
|
||||
DB_HOST=10.89.0.4
|
||||
DB_USER=flyer
|
||||
DB_PASSWORD=flyer
|
||||
DB_NAME=flyer_crawler_test
|
||||
REDIS_URL=redis://redis:6379
|
||||
NODE_ENV=test
|
||||
93
.gitattributes
vendored
Normal file
93
.gitattributes
vendored
Normal file
@@ -0,0 +1,93 @@
|
||||
# .gitattributes
|
||||
#
|
||||
# Optimize Gitea performance by excluding generated and vendored files
|
||||
# from language statistics and indexing.
|
||||
#
|
||||
# See: https://github.com/github/linguist/blob/master/docs/overrides.md
|
||||
|
||||
# =============================================================================
|
||||
# Vendored Dependencies
|
||||
# =============================================================================
|
||||
node_modules/** linguist-vendored
|
||||
|
||||
# =============================================================================
|
||||
# Generated Files - Coverage Reports
|
||||
# =============================================================================
|
||||
coverage/** linguist-generated
|
||||
.coverage/** linguist-generated
|
||||
public/coverage/** linguist-generated
|
||||
.nyc_output/** linguist-generated
|
||||
|
||||
# =============================================================================
|
||||
# Generated Files - Build Artifacts
|
||||
# =============================================================================
|
||||
dist/** linguist-generated
|
||||
build/** linguist-generated
|
||||
|
||||
# =============================================================================
|
||||
# Generated Files - Test Results
|
||||
# =============================================================================
|
||||
test-results/** linguist-generated
|
||||
playwright-report/** linguist-generated
|
||||
playwright-report-visual/** linguist-generated
|
||||
.vitest-results/** linguist-generated
|
||||
|
||||
# =============================================================================
|
||||
# Generated Files - TSOA OpenAPI Spec & Routes
|
||||
# =============================================================================
|
||||
src/routes/routes.ts linguist-generated
|
||||
public/swagger.json linguist-generated
|
||||
|
||||
# =============================================================================
|
||||
# Documentation Files
|
||||
# =============================================================================
|
||||
*.md linguist-documentation
|
||||
|
||||
# =============================================================================
|
||||
# Line Ending Normalization
|
||||
# =============================================================================
|
||||
# Ensure consistent line endings across platforms
|
||||
* text=auto
|
||||
|
||||
# Shell scripts should always use LF
|
||||
*.sh text eol=lf
|
||||
|
||||
# Windows batch files should use CRLF
|
||||
*.bat text eol=crlf
|
||||
*.cmd text eol=crlf
|
||||
|
||||
# SQL files should use LF
|
||||
*.sql text eol=lf
|
||||
|
||||
# Configuration files
|
||||
*.json text
|
||||
*.yml text
|
||||
*.yaml text
|
||||
*.toml text
|
||||
*.ini text
|
||||
|
||||
# Source code
|
||||
*.ts text
|
||||
*.tsx text
|
||||
*.js text
|
||||
*.jsx text
|
||||
*.cjs text
|
||||
*.mjs text
|
||||
*.css text
|
||||
*.scss text
|
||||
*.html text
|
||||
|
||||
# =============================================================================
|
||||
# Binary Files (explicit binary to prevent corruption)
|
||||
# =============================================================================
|
||||
*.png binary
|
||||
*.jpg binary
|
||||
*.jpeg binary
|
||||
*.gif binary
|
||||
*.ico binary
|
||||
*.pdf binary
|
||||
*.woff binary
|
||||
*.woff2 binary
|
||||
*.ttf binary
|
||||
*.eot binary
|
||||
*.otf binary
|
||||
@@ -63,8 +63,8 @@ jobs:
|
||||
- name: Check for Production Database Schema Changes
|
||||
env:
|
||||
DB_HOST: ${{ secrets.DB_HOST }}
|
||||
DB_USER: ${{ secrets.DB_USER }}
|
||||
DB_PASSWORD: ${{ secrets.DB_PASSWORD }}
|
||||
DB_USER: ${{ secrets.DB_USER_PROD }}
|
||||
DB_PASSWORD: ${{ secrets.DB_PASSWORD_PROD }}
|
||||
DB_NAME: ${{ secrets.DB_DATABASE_PROD }}
|
||||
run: |
|
||||
if [ -z "$DB_HOST" ] || [ -z "$DB_USER" ] || [ -z "$DB_PASSWORD" ] || [ -z "$DB_NAME" ]; then
|
||||
@@ -86,38 +86,142 @@ jobs:
|
||||
echo "✅ Schema is up to date. No changes detected."
|
||||
fi
|
||||
|
||||
- name: Generate TSOA OpenAPI Spec and Routes
|
||||
run: |
|
||||
echo "Generating TSOA OpenAPI specification and route handlers..."
|
||||
npm run tsoa:build
|
||||
echo "✅ TSOA files generated successfully"
|
||||
|
||||
- name: Build React Application for Production
|
||||
# Source Maps (ADR-015): If SENTRY_AUTH_TOKEN is set, the @sentry/vite-plugin will:
|
||||
# 1. Generate hidden source maps during build
|
||||
# 2. Upload them to Bugsink for error de-minification
|
||||
# 3. Delete the .map files after upload (so they're not publicly accessible)
|
||||
run: |
|
||||
if [ -z "${{ secrets.VITE_GOOGLE_GENAI_API_KEY }}" ]; then
|
||||
echo "ERROR: The VITE_GOOGLE_GENAI_API_KEY secret is not set."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Source map upload is optional - warn if not configured
|
||||
if [ -z "${{ secrets.SENTRY_AUTH_TOKEN }}" ]; then
|
||||
echo "WARNING: SENTRY_AUTH_TOKEN not set. Source maps will NOT be uploaded to Bugsink."
|
||||
echo " Errors will show minified stack traces. To fix, add SENTRY_AUTH_TOKEN to Gitea secrets."
|
||||
fi
|
||||
|
||||
GITEA_SERVER_URL="https://gitea.projectium.com"
|
||||
COMMIT_MESSAGE=$(git log -1 --grep="\[skip ci\]" --invert-grep --pretty=%s)
|
||||
PACKAGE_VERSION=$(node -p "require('./package.json').version")
|
||||
GENERATE_SOURCE_MAPS=true \
|
||||
VITE_APP_VERSION="$(date +'%Y%m%d-%H%M'):$(git rev-parse --short HEAD):$PACKAGE_VERSION" \
|
||||
VITE_APP_COMMIT_URL="$GITEA_SERVER_URL/${{ gitea.repository }}/commit/${{ gitea.sha }}" \
|
||||
VITE_APP_COMMIT_MESSAGE="$COMMIT_MESSAGE" \
|
||||
VITE_SENTRY_DSN="${{ secrets.VITE_SENTRY_DSN }}" \
|
||||
VITE_SENTRY_ENVIRONMENT="production" \
|
||||
VITE_SENTRY_ENABLED="true" \
|
||||
SENTRY_AUTH_TOKEN="${{ secrets.SENTRY_AUTH_TOKEN }}" \
|
||||
SENTRY_URL="https://bugsink.projectium.com" \
|
||||
VITE_API_BASE_URL=/api VITE_API_KEY=${{ secrets.VITE_GOOGLE_GENAI_API_KEY }} npm run build
|
||||
|
||||
- name: Deploy Application to Production Server
|
||||
run: |
|
||||
echo "Deploying application files to /var/www/flyer-crawler.projectium.com..."
|
||||
echo "========================================="
|
||||
echo "DEPLOYING TO PRODUCTION SERVER"
|
||||
echo "========================================="
|
||||
APP_PATH="/var/www/flyer-crawler.projectium.com"
|
||||
|
||||
# ========================================
|
||||
# LAYER 1: PRE-FLIGHT SAFETY CHECKS
|
||||
# ========================================
|
||||
echo ""
|
||||
echo "--- Pre-Flight Safety Checks ---"
|
||||
|
||||
# Check 1: Verify we're in a git repository
|
||||
if ! git rev-parse --git-dir > /dev/null 2>&1; then
|
||||
echo "❌ FATAL: Not in a git repository! Aborting to prevent data loss."
|
||||
exit 1
|
||||
fi
|
||||
echo "✅ Git repository verified"
|
||||
|
||||
# Check 2: Verify critical files exist before deployment
|
||||
if [ ! -f "package.json" ] || [ ! -f "server.ts" ]; then
|
||||
echo "❌ FATAL: Critical files missing (package.json or server.ts). Aborting."
|
||||
exit 1
|
||||
fi
|
||||
echo "✅ Critical files verified"
|
||||
|
||||
# Check 3: Verify we have actual content to deploy (prevent empty checkout)
|
||||
FILE_COUNT=$(find . -type f | wc -l)
|
||||
if [ "$FILE_COUNT" -lt 10 ]; then
|
||||
echo "❌ FATAL: Suspiciously few files ($FILE_COUNT). Aborting to prevent catastrophic deletion."
|
||||
exit 1
|
||||
fi
|
||||
echo "✅ File count verified: $FILE_COUNT files ready to deploy"
|
||||
|
||||
# ========================================
|
||||
# LAYER 2: STOP PM2 BEFORE FILE OPERATIONS
|
||||
# ========================================
|
||||
echo ""
|
||||
echo "--- Stopping PM2 Processes ---"
|
||||
pm2 stop flyer-crawler-api flyer-crawler-worker flyer-crawler-analytics-worker || echo "No production processes to stop"
|
||||
pm2 list
|
||||
|
||||
# ========================================
|
||||
# LAYER 3: SAFE RSYNC WITH COMPREHENSIVE EXCLUDES
|
||||
# ========================================
|
||||
echo ""
|
||||
echo "--- Deploying Application Files ---"
|
||||
mkdir -p "$APP_PATH"
|
||||
mkdir -p "$APP_PATH/flyer-images/icons" "$APP_PATH/flyer-images/archive"
|
||||
rsync -avz --delete --exclude 'node_modules' --exclude '.git' --exclude 'dist' --exclude 'flyer-images' ./ "$APP_PATH/"
|
||||
rsync -avz dist/ "$APP_PATH"
|
||||
echo "Application deployment complete."
|
||||
|
||||
# Deploy backend with critical file exclusions
|
||||
rsync -avz --delete \
|
||||
--exclude 'node_modules' \
|
||||
--exclude '.git' \
|
||||
--exclude 'dist' \
|
||||
--exclude 'flyer-images' \
|
||||
--exclude 'ecosystem.config.cjs' \
|
||||
--exclude 'ecosystem-test.config.cjs' \
|
||||
--exclude 'ecosystem.dev.config.cjs' \
|
||||
--exclude '.env.*' \
|
||||
--exclude 'coverage' \
|
||||
--exclude '.coverage' \
|
||||
--exclude 'test-results' \
|
||||
--exclude 'playwright-report' \
|
||||
--exclude 'playwright-report-visual' \
|
||||
./ "$APP_PATH/" 2>&1 | tail -20
|
||||
|
||||
echo "✅ Backend files deployed ($(find "$APP_PATH" -type f | wc -l) files)"
|
||||
|
||||
# Deploy frontend assets
|
||||
rsync -avz dist/ "$APP_PATH" 2>&1 | tail -10
|
||||
echo "✅ Frontend assets deployed"
|
||||
|
||||
echo ""
|
||||
echo "========================================="
|
||||
echo "DEPLOYMENT COMPLETE"
|
||||
echo "========================================="
|
||||
|
||||
- name: Log Workflow Metadata
|
||||
run: |
|
||||
echo "=== WORKFLOW METADATA ==="
|
||||
echo "Workflow file: deploy-to-prod.yml"
|
||||
echo "Workflow file hash: $(sha256sum .gitea/workflows/deploy-to-prod.yml | cut -d' ' -f1)"
|
||||
echo "Git commit: $(git rev-parse HEAD)"
|
||||
echo "Git branch: $(git rev-parse --abbrev-ref HEAD)"
|
||||
echo "Timestamp: $(date -u '+%Y-%m-%d %H:%M:%S UTC')"
|
||||
echo "Actor: ${{ gitea.actor }}"
|
||||
echo "=== END METADATA ==="
|
||||
|
||||
- name: Install Backend Dependencies and Restart Production Server
|
||||
env:
|
||||
# --- Production Secrets Injection ---
|
||||
DB_HOST: ${{ secrets.DB_HOST }}
|
||||
DB_USER: ${{ secrets.DB_USER }}
|
||||
DB_PASSWORD: ${{ secrets.DB_PASSWORD }}
|
||||
DB_USER: ${{ secrets.DB_USER_PROD }}
|
||||
DB_PASSWORD: ${{ secrets.DB_PASSWORD_PROD }}
|
||||
DB_NAME: ${{ secrets.DB_DATABASE_PROD }}
|
||||
REDIS_URL: 'redis://localhost:6379'
|
||||
# Explicitly use database 0 for production (test uses database 1)
|
||||
REDIS_URL: 'redis://localhost:6379/0'
|
||||
REDIS_PASSWORD: ${{ secrets.REDIS_PASSWORD_PROD }}
|
||||
FRONTEND_URL: 'https://flyer-crawler.projectium.com'
|
||||
JWT_SECRET: ${{ secrets.JWT_SECRET }}
|
||||
@@ -129,6 +233,15 @@ jobs:
|
||||
SMTP_USER: ''
|
||||
SMTP_PASS: ''
|
||||
SMTP_FROM_EMAIL: 'noreply@flyer-crawler.projectium.com'
|
||||
# OAuth Providers
|
||||
GOOGLE_CLIENT_ID: ${{ secrets.GOOGLE_CLIENT_ID }}
|
||||
GOOGLE_CLIENT_SECRET: ${{ secrets.GOOGLE_CLIENT_SECRET }}
|
||||
GITHUB_CLIENT_ID: ${{ secrets.GH_CLIENT_ID }}
|
||||
GITHUB_CLIENT_SECRET: ${{ secrets.GH_CLIENT_SECRET }}
|
||||
# Sentry/Bugsink Error Tracking (ADR-015)
|
||||
SENTRY_DSN: ${{ secrets.SENTRY_DSN }}
|
||||
SENTRY_ENVIRONMENT: 'production'
|
||||
SENTRY_ENABLED: 'true'
|
||||
run: |
|
||||
if [ -z "$DB_HOST" ] || [ -z "$DB_USER" ] || [ -z "$DB_PASSWORD" ] || [ -z "$DB_NAME" ]; then
|
||||
echo "ERROR: One or more production database secrets (DB_HOST, DB_USER, DB_PASSWORD, DB_DATABASE_PROD) are not set."
|
||||
@@ -138,9 +251,78 @@ jobs:
|
||||
cd /var/www/flyer-crawler.projectium.com
|
||||
npm install --omit=dev
|
||||
|
||||
# --- Cleanup Errored Processes ---
|
||||
echo "Cleaning up errored or stopped PM2 processes..."
|
||||
node -e "const exec = require('child_process').execSync; try { const list = JSON.parse(exec('pm2 jlist').toString()); list.forEach(p => { if (p.pm2_env.status === 'errored' || p.pm2_env.status === 'stopped') { console.log('Deleting ' + p.pm2_env.status + ' process: ' + p.name + ' (' + p.pm2_env.pm_id + ')'); try { exec('pm2 delete ' + p.pm2_env.pm_id); } catch(e) { console.error('Failed to delete ' + p.pm2_env.pm_id); } } }); } catch (e) { console.error('Error cleaning up processes:', e); }"
|
||||
# === PRE-CLEANUP PM2 STATE LOGGING ===
|
||||
echo "=== PRE-CLEANUP PM2 STATE ==="
|
||||
pm2 jlist
|
||||
echo "=== END PRE-CLEANUP STATE ==="
|
||||
|
||||
# --- Cleanup Errored Processes with Defense-in-Depth Safeguards ---
|
||||
echo "Cleaning up errored or stopped PRODUCTION PM2 processes..."
|
||||
node -e "
|
||||
const exec = require('child_process').execSync;
|
||||
try {
|
||||
const list = JSON.parse(exec('pm2 jlist').toString());
|
||||
const prodProcesses = ['flyer-crawler-api', 'flyer-crawler-worker', 'flyer-crawler-analytics-worker'];
|
||||
|
||||
// Filter for processes that match our criteria
|
||||
const targetProcesses = list.filter(p =>
|
||||
(p.pm2_env.status === 'errored' || p.pm2_env.status === 'stopped') &&
|
||||
prodProcesses.includes(p.name)
|
||||
);
|
||||
|
||||
// SAFEGUARD 1: Process count validation
|
||||
const totalProcesses = list.length;
|
||||
if (targetProcesses.length === totalProcesses && totalProcesses > 3) {
|
||||
console.error('SAFETY ABORT: Filter would delete ALL processes!');
|
||||
console.error('Total processes: ' + totalProcesses + ', Target processes: ' + targetProcesses.length);
|
||||
console.error('This indicates a potential filter bug. Aborting cleanup.');
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
// SAFEGUARD 2: Explicit name verification
|
||||
console.log('Found ' + targetProcesses.length + ' PRODUCTION processes to clean:');
|
||||
targetProcesses.forEach(p => {
|
||||
console.log(' - ' + p.name + ' (status: ' + p.pm2_env.status + ', pm_id: ' + p.pm2_env.pm_id + ')');
|
||||
});
|
||||
|
||||
// Perform the cleanup
|
||||
targetProcesses.forEach(p => {
|
||||
console.log('Deleting ' + p.pm2_env.status + ' production process: ' + p.name + ' (' + p.pm2_env.pm_id + ')');
|
||||
try {
|
||||
exec('pm2 delete ' + p.pm2_env.pm_id);
|
||||
} catch(e) {
|
||||
console.error('Failed to delete ' + p.pm2_env.pm_id);
|
||||
}
|
||||
});
|
||||
|
||||
console.log('Production process cleanup complete.');
|
||||
} catch (e) {
|
||||
console.error('Error cleaning up processes:', e);
|
||||
}
|
||||
"
|
||||
|
||||
# Save PM2 process list after cleanup to persist deletions
|
||||
echo "Saving PM2 process list after cleanup..."
|
||||
pm2 save
|
||||
|
||||
# === POST-CLEANUP VERIFICATION ===
|
||||
echo "=== POST-CLEANUP VERIFICATION ==="
|
||||
pm2 jlist | node -e "
|
||||
try {
|
||||
const list = JSON.parse(require('fs').readFileSync(0, 'utf-8'));
|
||||
const prodProcesses = list.filter(p => p.name && p.name.startsWith('flyer-crawler-') && !p.name.endsWith('-test') && !p.name.endsWith('-dev'));
|
||||
console.log('Production processes after cleanup:');
|
||||
prodProcesses.forEach(p => {
|
||||
console.log(' ' + p.name + ': ' + p.pm2_env.status);
|
||||
});
|
||||
if (prodProcesses.length === 0) {
|
||||
console.log(' (no production processes currently running)');
|
||||
}
|
||||
} catch (e) {
|
||||
console.error('Failed to parse PM2 output:', e.message);
|
||||
}
|
||||
"
|
||||
echo "=== END POST-CLEANUP VERIFICATION ==="
|
||||
|
||||
# --- Version Check Logic ---
|
||||
# Get the version from the newly deployed package.json
|
||||
@@ -158,7 +340,7 @@ jobs:
|
||||
else
|
||||
echo "Version mismatch (Running: $RUNNING_VERSION -> Deployed: $NEW_VERSION) or app not running. Reloading PM2..."
|
||||
fi
|
||||
pm2 startOrReload ecosystem.config.cjs --env production --update-env && pm2 save
|
||||
pm2 startOrReload ecosystem.config.cjs --update-env && pm2 save
|
||||
echo "Production backend server reloaded successfully."
|
||||
else
|
||||
echo "Version $NEW_VERSION is already running. Skipping PM2 reload."
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -20,9 +20,9 @@ jobs:
|
||||
# Use production database credentials for this entire job.
|
||||
DB_HOST: ${{ secrets.DB_HOST }}
|
||||
DB_PORT: ${{ secrets.DB_PORT }}
|
||||
DB_USER: ${{ secrets.DB_USER }}
|
||||
DB_PASSWORD: ${{ secrets.DB_PASSWORD }}
|
||||
DB_NAME: ${{ secrets.DB_NAME_PROD }}
|
||||
DB_USER: ${{ secrets.DB_USER_PROD }}
|
||||
DB_PASSWORD: ${{ secrets.DB_PASSWORD_PROD }}
|
||||
DB_NAME: ${{ secrets.DB_DATABASE_PROD }}
|
||||
|
||||
steps:
|
||||
- name: Validate Secrets
|
||||
|
||||
@@ -23,9 +23,9 @@ jobs:
|
||||
env:
|
||||
# Use production database credentials for this entire job.
|
||||
DB_HOST: ${{ secrets.DB_HOST }}
|
||||
DB_USER: ${{ secrets.DB_USER }}
|
||||
DB_PASSWORD: ${{ secrets.DB_PASSWORD }} # Used by psql
|
||||
DB_NAME: ${{ secrets.DB_DATABASE_PROD }} # Used by the application
|
||||
DB_USER: ${{ secrets.DB_USER_PROD }}
|
||||
DB_PASSWORD: ${{ secrets.DB_PASSWORD_PROD }}
|
||||
DB_NAME: ${{ secrets.DB_DATABASE_PROD }}
|
||||
|
||||
steps:
|
||||
- name: Checkout Code
|
||||
|
||||
@@ -23,9 +23,9 @@ jobs:
|
||||
env:
|
||||
# Use test database credentials for this entire job.
|
||||
DB_HOST: ${{ secrets.DB_HOST }}
|
||||
DB_USER: ${{ secrets.DB_USER }}
|
||||
DB_PASSWORD: ${{ secrets.DB_PASSWORD }} # Used by psql
|
||||
DB_NAME: ${{ secrets.DB_DATABASE_TEST }} # Used by the application
|
||||
DB_USER: ${{ secrets.DB_USER_TEST }}
|
||||
DB_PASSWORD: ${{ secrets.DB_PASSWORD_TEST }}
|
||||
DB_NAME: ${{ secrets.DB_DATABASE_TEST }}
|
||||
|
||||
steps:
|
||||
- name: Checkout Code
|
||||
|
||||
@@ -22,8 +22,8 @@ jobs:
|
||||
env:
|
||||
# Use production database credentials for this entire job.
|
||||
DB_HOST: ${{ secrets.DB_HOST }}
|
||||
DB_USER: ${{ secrets.DB_USER }}
|
||||
DB_PASSWORD: ${{ secrets.DB_PASSWORD }}
|
||||
DB_USER: ${{ secrets.DB_USER_PROD }}
|
||||
DB_PASSWORD: ${{ secrets.DB_PASSWORD_PROD }}
|
||||
DB_NAME: ${{ secrets.DB_DATABASE_PROD }}
|
||||
BACKUP_DIR: '/var/www/backups' # Define a dedicated directory for backups
|
||||
|
||||
@@ -56,9 +56,10 @@ jobs:
|
||||
|
||||
- name: Step 1 - Stop Application Server
|
||||
run: |
|
||||
echo "Stopping all PM2 processes to release database connections..."
|
||||
pm2 stop all || echo "PM2 processes were not running."
|
||||
echo "✅ Application server stopped."
|
||||
echo "Stopping PRODUCTION PM2 processes to release database connections..."
|
||||
pm2 stop flyer-crawler-api flyer-crawler-worker flyer-crawler-analytics-worker --namespace flyer-crawler-prod || echo "Production PM2 processes were not running."
|
||||
pm2 save --namespace flyer-crawler-prod
|
||||
echo "✅ Production application server stopped and saved."
|
||||
|
||||
- name: Step 2 - Drop and Recreate Database
|
||||
run: |
|
||||
@@ -91,5 +92,5 @@ jobs:
|
||||
run: |
|
||||
echo "Restarting application server..."
|
||||
cd /var/www/flyer-crawler.projectium.com
|
||||
pm2 startOrReload ecosystem.config.cjs --env production && pm2 save
|
||||
pm2 startOrReload ecosystem.config.cjs --env production --namespace flyer-crawler-prod && pm2 save --namespace flyer-crawler-prod
|
||||
echo "✅ Application server restarted."
|
||||
|
||||
@@ -62,8 +62,8 @@ jobs:
|
||||
- name: Check for Production Database Schema Changes
|
||||
env:
|
||||
DB_HOST: ${{ secrets.DB_HOST }}
|
||||
DB_USER: ${{ secrets.DB_USER }}
|
||||
DB_PASSWORD: ${{ secrets.DB_PASSWORD }}
|
||||
DB_USER: ${{ secrets.DB_USER_PROD }}
|
||||
DB_PASSWORD: ${{ secrets.DB_PASSWORD_PROD }}
|
||||
DB_NAME: ${{ secrets.DB_DATABASE_PROD }}
|
||||
run: |
|
||||
if [ -z "$DB_HOST" ] || [ -z "$DB_USER" ] || [ -z "$DB_PASSWORD" ] || [ -z "$DB_NAME" ]; then
|
||||
@@ -85,6 +85,12 @@ jobs:
|
||||
echo "✅ Schema is up to date. No changes detected."
|
||||
fi
|
||||
|
||||
- name: Generate TSOA OpenAPI Spec and Routes
|
||||
run: |
|
||||
echo "Generating TSOA OpenAPI specification and route handlers..."
|
||||
npm run tsoa:build
|
||||
echo "✅ TSOA files generated successfully"
|
||||
|
||||
- name: Build React Application for Production
|
||||
run: |
|
||||
if [ -z "${{ secrets.VITE_GOOGLE_GENAI_API_KEY }}" ]; then
|
||||
@@ -109,14 +115,26 @@ jobs:
|
||||
rsync -avz dist/ "$APP_PATH"
|
||||
echo "Application deployment complete."
|
||||
|
||||
- name: Log Workflow Metadata
|
||||
run: |
|
||||
echo "=== WORKFLOW METADATA ==="
|
||||
echo "Workflow file: manual-deploy-major.yml"
|
||||
echo "Workflow file hash: $(sha256sum .gitea/workflows/manual-deploy-major.yml | cut -d' ' -f1)"
|
||||
echo "Git commit: $(git rev-parse HEAD)"
|
||||
echo "Git branch: $(git rev-parse --abbrev-ref HEAD)"
|
||||
echo "Timestamp: $(date -u '+%Y-%m-%d %H:%M:%S UTC')"
|
||||
echo "Actor: ${{ gitea.actor }}"
|
||||
echo "=== END METADATA ==="
|
||||
|
||||
- name: Install Backend Dependencies and Restart Production Server
|
||||
env:
|
||||
# --- Production Secrets Injection ---
|
||||
DB_HOST: ${{ secrets.DB_HOST }}
|
||||
DB_USER: ${{ secrets.DB_USER }}
|
||||
DB_PASSWORD: ${{ secrets.DB_PASSWORD }}
|
||||
DB_USER: ${{ secrets.DB_USER_PROD }}
|
||||
DB_PASSWORD: ${{ secrets.DB_PASSWORD_PROD }}
|
||||
DB_NAME: ${{ secrets.DB_DATABASE_PROD }}
|
||||
REDIS_URL: 'redis://localhost:6379'
|
||||
# Explicitly use database 0 for production (test uses database 1)
|
||||
REDIS_URL: 'redis://localhost:6379/0'
|
||||
REDIS_PASSWORD: ${{ secrets.REDIS_PASSWORD_PROD }}
|
||||
FRONTEND_URL: 'https://flyer-crawler.projectium.com'
|
||||
JWT_SECRET: ${{ secrets.JWT_SECRET }}
|
||||
@@ -137,9 +155,78 @@ jobs:
|
||||
cd /var/www/flyer-crawler.projectium.com
|
||||
npm install --omit=dev
|
||||
|
||||
# --- Cleanup Errored Processes ---
|
||||
echo "Cleaning up errored or stopped PM2 processes..."
|
||||
node -e "const exec = require('child_process').execSync; try { const list = JSON.parse(exec('pm2 jlist').toString()); list.forEach(p => { if (p.pm2_env.status === 'errored' || p.pm2_env.status === 'stopped') { console.log('Deleting ' + p.pm2_env.status + ' process: ' + p.name + ' (' + p.pm2_env.pm_id + ')'); try { exec('pm2 delete ' + p.pm2_env.pm_id); } catch(e) { console.error('Failed to delete ' + p.pm2_env.pm_id); } } }); } catch (e) { console.error('Error cleaning up processes:', e); }"
|
||||
# === PRE-CLEANUP PM2 STATE LOGGING ===
|
||||
echo "=== PRE-CLEANUP PM2 STATE ==="
|
||||
pm2 jlist
|
||||
echo "=== END PRE-CLEANUP STATE ==="
|
||||
|
||||
# --- Cleanup Errored Processes with Defense-in-Depth Safeguards ---
|
||||
echo "Cleaning up errored or stopped PRODUCTION PM2 processes..."
|
||||
node -e "
|
||||
const exec = require('child_process').execSync;
|
||||
try {
|
||||
const list = JSON.parse(exec('pm2 jlist').toString());
|
||||
const prodProcesses = ['flyer-crawler-api', 'flyer-crawler-worker', 'flyer-crawler-analytics-worker'];
|
||||
|
||||
// Filter for processes that match our criteria
|
||||
const targetProcesses = list.filter(p =>
|
||||
(p.pm2_env.status === 'errored' || p.pm2_env.status === 'stopped') &&
|
||||
prodProcesses.includes(p.name)
|
||||
);
|
||||
|
||||
// SAFEGUARD 1: Process count validation
|
||||
const totalProcesses = list.length;
|
||||
if (targetProcesses.length === totalProcesses && totalProcesses > 3) {
|
||||
console.error('SAFETY ABORT: Filter would delete ALL processes!');
|
||||
console.error('Total processes: ' + totalProcesses + ', Target processes: ' + targetProcesses.length);
|
||||
console.error('This indicates a potential filter bug. Aborting cleanup.');
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
// SAFEGUARD 2: Explicit name verification
|
||||
console.log('Found ' + targetProcesses.length + ' PRODUCTION processes to clean:');
|
||||
targetProcesses.forEach(p => {
|
||||
console.log(' - ' + p.name + ' (status: ' + p.pm2_env.status + ', pm_id: ' + p.pm2_env.pm_id + ')');
|
||||
});
|
||||
|
||||
// Perform the cleanup
|
||||
targetProcesses.forEach(p => {
|
||||
console.log('Deleting ' + p.pm2_env.status + ' production process: ' + p.name + ' (' + p.pm2_env.pm_id + ')');
|
||||
try {
|
||||
exec('pm2 delete ' + p.pm2_env.pm_id);
|
||||
} catch(e) {
|
||||
console.error('Failed to delete ' + p.pm2_env.pm_id);
|
||||
}
|
||||
});
|
||||
|
||||
console.log('Production process cleanup complete.');
|
||||
} catch (e) {
|
||||
console.error('Error cleaning up processes:', e);
|
||||
}
|
||||
"
|
||||
|
||||
# Save PM2 process list after cleanup to persist deletions
|
||||
echo "Saving PM2 process list after cleanup..."
|
||||
pm2 save
|
||||
|
||||
# === POST-CLEANUP VERIFICATION ===
|
||||
echo "=== POST-CLEANUP VERIFICATION ==="
|
||||
pm2 jlist | node -e "
|
||||
try {
|
||||
const list = JSON.parse(require('fs').readFileSync(0, 'utf-8'));
|
||||
const prodProcesses = list.filter(p => p.name && p.name.startsWith('flyer-crawler-') && !p.name.endsWith('-test') && !p.name.endsWith('-dev'));
|
||||
console.log('Production processes after cleanup:');
|
||||
prodProcesses.forEach(p => {
|
||||
console.log(' ' + p.name + ': ' + p.pm2_env.status);
|
||||
});
|
||||
if (prodProcesses.length === 0) {
|
||||
console.log(' (no production processes currently running)');
|
||||
}
|
||||
} catch (e) {
|
||||
console.error('Failed to parse PM2 output:', e.message);
|
||||
}
|
||||
"
|
||||
echo "=== END POST-CLEANUP VERIFICATION ==="
|
||||
|
||||
# --- Version Check Logic ---
|
||||
# Get the version from the newly deployed package.json
|
||||
|
||||
167
.gitea/workflows/manual-redis-flush-prod.yml
Normal file
167
.gitea/workflows/manual-redis-flush-prod.yml
Normal file
@@ -0,0 +1,167 @@
|
||||
# .gitea/workflows/manual-redis-flush-prod.yml
|
||||
#
|
||||
# DANGER: This workflow is DESTRUCTIVE and intended for manual execution only.
|
||||
# It will completely FLUSH the PRODUCTION Redis database (db 0).
|
||||
# This will clear all BullMQ queues, sessions, caches, and any other Redis data.
|
||||
#
|
||||
name: Manual - Flush Production Redis
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
confirmation:
|
||||
description: 'DANGER: This will FLUSH production Redis. Type "flush-production-redis" to confirm.'
|
||||
required: true
|
||||
default: 'do-not-run'
|
||||
flush_type:
|
||||
description: 'What to flush?'
|
||||
required: true
|
||||
type: choice
|
||||
options:
|
||||
- 'queues-only'
|
||||
- 'entire-database'
|
||||
default: 'queues-only'
|
||||
|
||||
jobs:
|
||||
flush-redis:
|
||||
runs-on: projectium.com # This job runs on your self-hosted Gitea runner.
|
||||
|
||||
env:
|
||||
REDIS_PASSWORD: ${{ secrets.REDIS_PASSWORD_PROD }}
|
||||
|
||||
steps:
|
||||
- name: Checkout Code
|
||||
uses: actions/checkout@v3
|
||||
|
||||
- name: Setup Node.js
|
||||
uses: actions/setup-node@v3
|
||||
with:
|
||||
node-version: '20'
|
||||
cache: 'npm'
|
||||
cache-dependency-path: '**/package-lock.json'
|
||||
|
||||
- name: Install Dependencies
|
||||
run: npm ci
|
||||
|
||||
- name: Validate Secrets
|
||||
run: |
|
||||
if [ -z "$REDIS_PASSWORD" ]; then
|
||||
echo "ERROR: REDIS_PASSWORD_PROD secret is not set in Gitea repository settings."
|
||||
exit 1
|
||||
fi
|
||||
echo "✅ Redis password secret is present."
|
||||
|
||||
- name: Verify Confirmation Phrase
|
||||
run: |
|
||||
if [ "${{ gitea.event.inputs.confirmation }}" != "flush-production-redis" ]; then
|
||||
echo "ERROR: Confirmation phrase did not match. Aborting Redis flush."
|
||||
exit 1
|
||||
fi
|
||||
echo "✅ Confirmation accepted. Proceeding with Redis flush."
|
||||
|
||||
- name: Show Current Redis State
|
||||
run: |
|
||||
echo "--- Current Redis Database 0 (Production) State ---"
|
||||
redis-cli -a "$REDIS_PASSWORD" -n 0 INFO keyspace 2>/dev/null || echo "Could not get keyspace info"
|
||||
echo ""
|
||||
echo "--- Key Count ---"
|
||||
KEY_COUNT=$(redis-cli -a "$REDIS_PASSWORD" -n 0 DBSIZE 2>/dev/null | grep -oE '[0-9]+' || echo "unknown")
|
||||
echo "Production Redis (db 0) key count: $KEY_COUNT"
|
||||
echo ""
|
||||
echo "--- BullMQ Queue Keys ---"
|
||||
redis-cli -a "$REDIS_PASSWORD" -n 0 KEYS "bull:*" 2>/dev/null | head -20 || echo "No BullMQ keys found"
|
||||
|
||||
- name: 🚨 FINAL WARNING & PAUSE 🚨
|
||||
run: |
|
||||
echo "*********************************************************************"
|
||||
echo "WARNING: YOU ARE ABOUT TO FLUSH PRODUCTION REDIS DATA."
|
||||
echo "Flush type: ${{ gitea.event.inputs.flush_type }}"
|
||||
echo ""
|
||||
if [ "${{ gitea.event.inputs.flush_type }}" = "entire-database" ]; then
|
||||
echo "This will DELETE ALL Redis data including sessions, caches, and queues!"
|
||||
else
|
||||
echo "This will DELETE ALL BullMQ queue data (pending jobs, failed jobs, etc.)"
|
||||
fi
|
||||
echo ""
|
||||
echo "This action is IRREVERSIBLE. Press Ctrl+C in the runner terminal NOW to cancel."
|
||||
echo "Sleeping for 10 seconds..."
|
||||
echo "*********************************************************************"
|
||||
sleep 10
|
||||
|
||||
- name: Flush BullMQ Queues Only
|
||||
if: ${{ gitea.event.inputs.flush_type == 'queues-only' }}
|
||||
env:
|
||||
REDIS_URL: 'redis://localhost:6379/0'
|
||||
run: |
|
||||
echo "--- Obliterating BullMQ queues using Node.js ---"
|
||||
node -e "
|
||||
const { Queue } = require('bullmq');
|
||||
const IORedis = require('ioredis');
|
||||
|
||||
const connection = new IORedis(process.env.REDIS_URL, {
|
||||
maxRetriesPerRequest: null,
|
||||
password: process.env.REDIS_PASSWORD,
|
||||
});
|
||||
|
||||
const queueNames = [
|
||||
'flyer-processing',
|
||||
'email-sending',
|
||||
'analytics-reporting',
|
||||
'weekly-analytics-reporting',
|
||||
'file-cleanup',
|
||||
'token-cleanup'
|
||||
];
|
||||
|
||||
(async () => {
|
||||
for (const name of queueNames) {
|
||||
try {
|
||||
const queue = new Queue(name, { connection });
|
||||
const counts = await queue.getJobCounts();
|
||||
console.log('Queue \"' + name + '\" before obliterate:', JSON.stringify(counts));
|
||||
await queue.obliterate({ force: true });
|
||||
console.log('✅ Obliterated queue: ' + name);
|
||||
await queue.close();
|
||||
} catch (err) {
|
||||
console.error('⚠️ Failed to obliterate queue ' + name + ':', err.message);
|
||||
}
|
||||
}
|
||||
await connection.quit();
|
||||
console.log('✅ All BullMQ queues obliterated.');
|
||||
})();
|
||||
"
|
||||
|
||||
- name: Flush Entire Redis Database
|
||||
if: ${{ gitea.event.inputs.flush_type == 'entire-database' }}
|
||||
run: |
|
||||
echo "--- Flushing entire Redis database 0 (production) ---"
|
||||
redis-cli -a "$REDIS_PASSWORD" -n 0 FLUSHDB 2>/dev/null && echo "✅ Redis database 0 flushed successfully." || echo "❌ Redis flush failed"
|
||||
|
||||
- name: Verify Flush Results
|
||||
run: |
|
||||
echo "--- Redis Database 0 (Production) State After Flush ---"
|
||||
KEY_COUNT=$(redis-cli -a "$REDIS_PASSWORD" -n 0 DBSIZE 2>/dev/null | grep -oE '[0-9]+' || echo "unknown")
|
||||
echo "Production Redis (db 0) key count after flush: $KEY_COUNT"
|
||||
echo ""
|
||||
echo "--- Remaining BullMQ Queue Keys ---"
|
||||
BULL_KEYS=$(redis-cli -a "$REDIS_PASSWORD" -n 0 KEYS "bull:*" 2>/dev/null | wc -l || echo "0")
|
||||
echo "BullMQ key count: $BULL_KEYS"
|
||||
|
||||
if [ "${{ gitea.event.inputs.flush_type }}" = "queues-only" ] && [ "$BULL_KEYS" -gt 0 ]; then
|
||||
echo "⚠️ Warning: Some BullMQ keys may still exist. This can happen if new jobs were added during the flush."
|
||||
fi
|
||||
|
||||
- name: Summary
|
||||
run: |
|
||||
echo ""
|
||||
echo "=========================================="
|
||||
echo "PRODUCTION REDIS FLUSH COMPLETE"
|
||||
echo "=========================================="
|
||||
echo "Flush type: ${{ gitea.event.inputs.flush_type }}"
|
||||
echo "Timestamp: $(date -u '+%Y-%m-%d %H:%M:%S UTC')"
|
||||
echo ""
|
||||
echo "NOTE: If you flushed queues, any pending jobs (flyer processing,"
|
||||
echo "emails, analytics, etc.) have been permanently deleted."
|
||||
echo ""
|
||||
echo "The production workers will automatically start processing"
|
||||
echo "new jobs as they are added to the queues."
|
||||
echo "=========================================="
|
||||
188
.gitea/workflows/pm2-diagnostics.yml
Normal file
188
.gitea/workflows/pm2-diagnostics.yml
Normal file
@@ -0,0 +1,188 @@
|
||||
# .gitea/workflows/pm2-diagnostics.yml
|
||||
#
|
||||
# Comprehensive PM2 diagnostics to identify crash causes and problematic projects
|
||||
name: PM2 Diagnostics
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
capture_interval:
|
||||
description: 'Seconds between PM2 state captures (default: 5)'
|
||||
required: false
|
||||
default: '5'
|
||||
duration:
|
||||
description: 'Total monitoring duration in seconds (default: 60)'
|
||||
required: false
|
||||
default: '60'
|
||||
|
||||
jobs:
|
||||
pm2-diagnostics:
|
||||
runs-on: projectium.com
|
||||
|
||||
steps:
|
||||
- name: PM2 Current State Snapshot
|
||||
run: |
|
||||
echo "========================================="
|
||||
echo "PM2 CURRENT STATE SNAPSHOT"
|
||||
echo "========================================="
|
||||
echo ""
|
||||
echo "--- PM2 List (Human Readable) ---"
|
||||
pm2 list
|
||||
echo ""
|
||||
echo "--- PM2 List (JSON) ---"
|
||||
pm2 jlist > /tmp/pm2-state-initial.json
|
||||
cat /tmp/pm2-state-initial.json | jq '.'
|
||||
echo ""
|
||||
echo "--- PM2 Daemon Info ---"
|
||||
pm2 info pm2-logrotate || echo "pm2-logrotate not found"
|
||||
echo ""
|
||||
echo "--- PM2 Version ---"
|
||||
pm2 --version
|
||||
echo ""
|
||||
echo "--- Node Version ---"
|
||||
node --version
|
||||
|
||||
- name: PM2 Process Working Directories
|
||||
run: |
|
||||
echo "========================================="
|
||||
echo "PROCESS WORKING DIRECTORIES"
|
||||
echo "========================================="
|
||||
pm2 jlist | jq -r '.[] | "Process: \(.name) | CWD: \(.pm2_env.pm_cwd) | Exists: \(if .pm2_env.pm_cwd then "checking..." else "N/A" end)"'
|
||||
echo ""
|
||||
echo "--- Checking if CWDs still exist ---"
|
||||
pm2 jlist | jq -r '.[].pm2_env.pm_cwd' | while read cwd; do
|
||||
if [ -d "$cwd" ]; then
|
||||
echo "✅ EXISTS: $cwd"
|
||||
else
|
||||
echo "❌ MISSING: $cwd (THIS WILL CAUSE CRASHES!)"
|
||||
fi
|
||||
done
|
||||
|
||||
- name: PM2 Log Analysis
|
||||
run: |
|
||||
echo "========================================="
|
||||
echo "PM2 LOG ANALYSIS"
|
||||
echo "========================================="
|
||||
echo ""
|
||||
echo "--- PM2 Daemon Log (Last 100 Lines) ---"
|
||||
tail -100 /home/gitea-runner/.pm2/pm2.log
|
||||
echo ""
|
||||
echo "--- Searching for ENOENT errors ---"
|
||||
grep -i "ENOENT\|no such file or directory\|uv_cwd" /home/gitea-runner/.pm2/pm2.log || echo "No ENOENT errors found"
|
||||
echo ""
|
||||
echo "--- Searching for crash patterns ---"
|
||||
grep -i "crash\|error\|exception" /home/gitea-runner/.pm2/pm2.log | tail -50 || echo "No crashes found"
|
||||
|
||||
- name: Identify All PM2-Managed Projects
|
||||
run: |
|
||||
echo "========================================="
|
||||
echo "ALL PM2-MANAGED PROJECTS"
|
||||
echo "========================================="
|
||||
pm2 jlist | jq -r '.[] | "[\(.pm_id)] \(.name) - v\(.pm2_env.version // "N/A") - \(.pm2_env.status) - CWD: \(.pm2_env.pm_cwd)"'
|
||||
echo ""
|
||||
echo "--- Projects by CWD ---"
|
||||
pm2 jlist | jq -r '.[].pm2_env.pm_cwd' | sort -u
|
||||
echo ""
|
||||
echo "--- Checking which projects might interfere ---"
|
||||
for dir in /var/www/*; do
|
||||
if [ -d "$dir" ]; then
|
||||
echo ""
|
||||
echo "Directory: $dir"
|
||||
ls -la "$dir" | grep -E "ecosystem|package.json|node_modules" || echo " No PM2/Node files"
|
||||
fi
|
||||
done
|
||||
|
||||
- name: Monitor PM2 State Over Time
|
||||
run: |
|
||||
echo "========================================="
|
||||
echo "PM2 STATE MONITORING"
|
||||
echo "========================================="
|
||||
echo "Monitoring PM2 for ${{ gitea.event.inputs.duration }} seconds..."
|
||||
echo "Capturing state every ${{ gitea.event.inputs.capture_interval }} seconds"
|
||||
echo ""
|
||||
|
||||
INTERVAL=${{ gitea.event.inputs.capture_interval }}
|
||||
DURATION=${{ gitea.event.inputs.duration }}
|
||||
COUNT=$((DURATION / INTERVAL))
|
||||
|
||||
for i in $(seq 1 $COUNT); do
|
||||
echo "--- Capture $i at $(date) ---"
|
||||
pm2 jlist | jq -r '.[] | "\(.name): \(.pm2_env.status) (restarts: \(.pm2_env.restart_time))"'
|
||||
|
||||
# Check for new crashes
|
||||
CRASHED=$(pm2 jlist | jq '[.[] | select(.pm2_env.status == "errored" or .pm2_env.status == "stopped")] | length')
|
||||
if [ "$CRASHED" -gt 0 ]; then
|
||||
echo "⚠️ WARNING: $CRASHED process(es) in crashed state!"
|
||||
pm2 jlist | jq -r '.[] | select(.pm2_env.status == "errored" or .pm2_env.status == "stopped") | " - \(.name): \(.pm2_env.status)"'
|
||||
fi
|
||||
|
||||
sleep $INTERVAL
|
||||
done
|
||||
|
||||
- name: PM2 Dump File Analysis
|
||||
run: |
|
||||
echo "========================================="
|
||||
echo "PM2 DUMP FILE ANALYSIS"
|
||||
echo "========================================="
|
||||
echo "--- Dump file location ---"
|
||||
ls -lh /home/gitea-runner/.pm2/dump.pm2
|
||||
echo ""
|
||||
echo "--- Dump file contents ---"
|
||||
cat /home/gitea-runner/.pm2/dump.pm2 | jq '.'
|
||||
echo ""
|
||||
echo "--- Processes in dump ---"
|
||||
cat /home/gitea-runner/.pm2/dump.pm2 | jq -r '.apps[] | "\(.name) at \(.pm_cwd)"'
|
||||
|
||||
- name: Check for Rogue Deployment Scripts
|
||||
run: |
|
||||
echo "========================================="
|
||||
echo "DEPLOYMENT SCRIPT ANALYSIS"
|
||||
echo "========================================="
|
||||
echo "Checking for scripts that might delete directories..."
|
||||
echo ""
|
||||
for project in flyer-crawler stock-alert; do
|
||||
for env in "" "-test"; do
|
||||
DIR="/var/www/$project$env.projectium.com"
|
||||
if [ -d "$DIR" ]; then
|
||||
echo "--- Project: $project$env ---"
|
||||
echo "Location: $DIR"
|
||||
if [ -f "$DIR/.gitea/workflows/deploy-to-test.yml" ]; then
|
||||
echo "Has deploy-to-test workflow"
|
||||
grep -n "rsync.*--delete\|rm -rf" "$DIR/.gitea/workflows/deploy-to-test.yml" | head -5 || echo "No dangerous commands found"
|
||||
fi
|
||||
if [ -f "$DIR/.gitea/workflows/deploy-to-prod.yml" ]; then
|
||||
echo "Has deploy-to-prod workflow"
|
||||
grep -n "rsync.*--delete\|rm -rf" "$DIR/.gitea/workflows/deploy-to-prod.yml" | head -5 || echo "No dangerous commands found"
|
||||
fi
|
||||
echo ""
|
||||
fi
|
||||
done
|
||||
done
|
||||
|
||||
- name: Generate Diagnostic Report
|
||||
run: |
|
||||
echo "========================================="
|
||||
echo "DIAGNOSTIC SUMMARY"
|
||||
echo "========================================="
|
||||
echo ""
|
||||
echo "Total PM2 processes: $(pm2 jlist | jq 'length')"
|
||||
echo "Online: $(pm2 jlist | jq '[.[] | select(.pm2_env.status == "online")] | length')"
|
||||
echo "Stopped: $(pm2 jlist | jq '[.[] | select(.pm2_env.status == "stopped")] | length')"
|
||||
echo "Errored: $(pm2 jlist | jq '[.[] | select(.pm2_env.status == "errored")] | length')"
|
||||
echo ""
|
||||
echo "Flyer-crawler processes:"
|
||||
pm2 jlist | jq -r '.[] | select(.name | contains("flyer-crawler")) | " \(.name): \(.pm2_env.status)"'
|
||||
echo ""
|
||||
echo "Stock-alert processes:"
|
||||
pm2 jlist | jq -r '.[] | select(.name | contains("stock-alert")) | " \(.name): \(.pm2_env.status)"'
|
||||
echo ""
|
||||
echo "Other processes:"
|
||||
pm2 jlist | jq -r '.[] | select(.name | contains("flyer-crawler") | not) | select(.name | contains("stock-alert") | not) | " \(.name): \(.pm2_env.status)"'
|
||||
echo ""
|
||||
echo "========================================="
|
||||
echo "RECOMMENDATIONS"
|
||||
echo "========================================="
|
||||
echo "1. Check for missing CWDs (marked with ❌ above)"
|
||||
echo "2. Review PM2 daemon log for ENOENT errors"
|
||||
echo "3. Verify no deployments are running rsync --delete while processes are online"
|
||||
echo "4. Consider separating PM2 daemons by user or using PM2 namespaces"
|
||||
107
.gitea/workflows/restart-pm2.yml
Normal file
107
.gitea/workflows/restart-pm2.yml
Normal file
@@ -0,0 +1,107 @@
|
||||
# .gitea/workflows/restart-pm2.yml
|
||||
#
|
||||
# Manual workflow to restart PM2 processes and verify their status.
|
||||
# Useful for recovering from PM2 daemon crashes or process issues.
|
||||
name: Restart PM2 Processes
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
environment:
|
||||
description: 'Environment to restart (test, production, or both)'
|
||||
required: true
|
||||
default: 'test'
|
||||
type: choice
|
||||
options:
|
||||
- test
|
||||
- production
|
||||
- both
|
||||
|
||||
jobs:
|
||||
restart-pm2:
|
||||
runs-on: projectium.com
|
||||
|
||||
steps:
|
||||
- name: Validate Environment Input
|
||||
run: |
|
||||
echo "Restarting PM2 processes for environment: ${{ gitea.event.inputs.environment }}"
|
||||
|
||||
- name: Restart Test Environment
|
||||
if: gitea.event.inputs.environment == 'test' || gitea.event.inputs.environment == 'both'
|
||||
run: |
|
||||
echo "=== RESTARTING TEST ENVIRONMENT ==="
|
||||
cd /var/www/flyer-crawler-test.projectium.com
|
||||
|
||||
echo "--- Current PM2 State (Before Restart) ---"
|
||||
pm2 list --namespace flyer-crawler-test
|
||||
|
||||
echo "--- Restarting Test Processes ---"
|
||||
pm2 restart flyer-crawler-api-test flyer-crawler-worker-test flyer-crawler-analytics-worker-test --namespace flyer-crawler-test || {
|
||||
echo "Restart failed, attempting to start processes..."
|
||||
pm2 start ecosystem-test.config.cjs --namespace flyer-crawler-test
|
||||
}
|
||||
|
||||
echo "--- Saving PM2 Process List ---"
|
||||
pm2 save --namespace flyer-crawler-test
|
||||
|
||||
echo "--- Waiting 3 seconds for processes to stabilize ---"
|
||||
sleep 3
|
||||
|
||||
echo "=== TEST ENVIRONMENT STATUS ==="
|
||||
pm2 ps --namespace flyer-crawler-test
|
||||
|
||||
- name: Restart Production Environment
|
||||
if: gitea.event.inputs.environment == 'production' || gitea.event.inputs.environment == 'both'
|
||||
run: |
|
||||
echo "=== RESTARTING PRODUCTION ENVIRONMENT ==="
|
||||
cd /var/www/flyer-crawler.projectium.com
|
||||
|
||||
echo "--- Current PM2 State (Before Restart) ---"
|
||||
pm2 list --namespace flyer-crawler-prod
|
||||
|
||||
echo "--- Restarting Production Processes ---"
|
||||
pm2 restart flyer-crawler-api flyer-crawler-worker flyer-crawler-analytics-worker --namespace flyer-crawler-prod || {
|
||||
echo "Restart failed, attempting to start processes..."
|
||||
pm2 start ecosystem.config.cjs --namespace flyer-crawler-prod
|
||||
}
|
||||
|
||||
echo "--- Saving PM2 Process List ---"
|
||||
pm2 save --namespace flyer-crawler-prod
|
||||
|
||||
echo "--- Waiting 3 seconds for processes to stabilize ---"
|
||||
sleep 3
|
||||
|
||||
echo "=== PRODUCTION ENVIRONMENT STATUS ==="
|
||||
pm2 ps --namespace flyer-crawler-prod
|
||||
|
||||
- name: Final PM2 Status (All Processes)
|
||||
run: |
|
||||
echo "========================================="
|
||||
echo "FINAL PM2 STATUS - ALL PROCESSES"
|
||||
echo "========================================="
|
||||
|
||||
if [ "${{ gitea.event.inputs.environment }}" = "test" ]; then
|
||||
echo "--- Test Namespace ---"
|
||||
pm2 ps --namespace flyer-crawler-test
|
||||
echo ""
|
||||
echo "--- PM2 Logs (Last 20 Lines) ---"
|
||||
pm2 logs --namespace flyer-crawler-test --lines 20 --nostream || echo "No logs available"
|
||||
elif [ "${{ gitea.event.inputs.environment }}" = "production" ]; then
|
||||
echo "--- Production Namespace ---"
|
||||
pm2 ps --namespace flyer-crawler-prod
|
||||
echo ""
|
||||
echo "--- PM2 Logs (Last 20 Lines) ---"
|
||||
pm2 logs --namespace flyer-crawler-prod --lines 20 --nostream || echo "No logs available"
|
||||
else
|
||||
echo "--- Test Namespace ---"
|
||||
pm2 ps --namespace flyer-crawler-test
|
||||
echo ""
|
||||
echo "--- Production Namespace ---"
|
||||
pm2 ps --namespace flyer-crawler-prod
|
||||
echo ""
|
||||
echo "--- PM2 Logs - Test (Last 10 Lines) ---"
|
||||
pm2 logs --namespace flyer-crawler-test --lines 10 --nostream || echo "No logs available"
|
||||
echo ""
|
||||
echo "--- PM2 Logs - Production (Last 10 Lines) ---"
|
||||
pm2 logs --namespace flyer-crawler-prod --lines 10 --nostream || echo "No logs available"
|
||||
fi
|
||||
100
.gitea/workflows/sync-test-version.yml
Normal file
100
.gitea/workflows/sync-test-version.yml
Normal file
@@ -0,0 +1,100 @@
|
||||
# .gitea/workflows/sync-test-version.yml
|
||||
#
|
||||
# Lightweight workflow to sync version numbers from production to test environment.
|
||||
# This runs after successful production deployments to update test PM2 metadata
|
||||
# without re-running the full test suite, build, and deployment pipeline.
|
||||
#
|
||||
# Duration: ~30 seconds (vs 5+ minutes for full test deployment)
|
||||
name: Sync Test Version
|
||||
|
||||
on:
|
||||
workflow_run:
|
||||
workflows: ["Deploy to Production"]
|
||||
types:
|
||||
- completed
|
||||
branches:
|
||||
- main
|
||||
|
||||
jobs:
|
||||
sync-version:
|
||||
runs-on: projectium.com
|
||||
# Only run if the production deployment succeeded
|
||||
if: ${{ gitea.event.workflow_run.conclusion == 'success' }}
|
||||
|
||||
steps:
|
||||
- name: Checkout Latest Code
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
fetch-depth: 1 # Shallow clone, we only need latest commit
|
||||
|
||||
- name: Update Test Package Version
|
||||
run: |
|
||||
echo "========================================="
|
||||
echo "SYNCING VERSION TO TEST ENVIRONMENT"
|
||||
echo "========================================="
|
||||
|
||||
APP_PATH="/var/www/flyer-crawler-test.projectium.com"
|
||||
|
||||
# Get version from this repo's package.json
|
||||
NEW_VERSION=$(node -p "require('./package.json').version")
|
||||
echo "Production version: $NEW_VERSION"
|
||||
|
||||
# Get current test version
|
||||
if [ -f "$APP_PATH/package.json" ]; then
|
||||
CURRENT_VERSION=$(node -p "require('$APP_PATH/package.json').version")
|
||||
echo "Current test version: $CURRENT_VERSION"
|
||||
else
|
||||
CURRENT_VERSION="unknown"
|
||||
echo "Test package.json not found"
|
||||
fi
|
||||
|
||||
# Only update if versions differ
|
||||
if [ "$NEW_VERSION" != "$CURRENT_VERSION" ]; then
|
||||
echo "Updating test package.json to version $NEW_VERSION..."
|
||||
|
||||
# Update just the version field in test's package.json
|
||||
cd "$APP_PATH"
|
||||
npm version "$NEW_VERSION" --no-git-tag-version --allow-same-version
|
||||
|
||||
echo "✅ Test package.json updated to $NEW_VERSION"
|
||||
else
|
||||
echo "ℹ️ Versions already match, no update needed"
|
||||
fi
|
||||
|
||||
- name: Restart Test PM2 Processes
|
||||
run: |
|
||||
echo "Restarting test PM2 processes to refresh version metadata..."
|
||||
|
||||
# Restart with --update-env to pick up new package.json version
|
||||
pm2 restart flyer-crawler-api-test flyer-crawler-worker-test flyer-crawler-analytics-worker-test --update-env && pm2 save
|
||||
|
||||
echo "✅ Test PM2 processes restarted and saved"
|
||||
|
||||
# Show current state
|
||||
echo ""
|
||||
echo "--- Current PM2 State ---"
|
||||
pm2 list
|
||||
|
||||
# Verify version in PM2 metadata
|
||||
echo ""
|
||||
echo "--- Verifying Version in PM2 ---"
|
||||
pm2 jlist | node -e "
|
||||
try {
|
||||
const list = JSON.parse(require('fs').readFileSync(0, 'utf-8'));
|
||||
const testProcesses = list.filter(p => p.name && p.name.endsWith('-test'));
|
||||
testProcesses.forEach(p => {
|
||||
console.log(p.name + ': v' + (p.pm2_env.version || 'unknown') + ' (' + p.pm2_env.status + ')');
|
||||
});
|
||||
} catch(e) {
|
||||
console.error('Failed to parse PM2 output');
|
||||
}
|
||||
"
|
||||
|
||||
- name: Summary
|
||||
run: |
|
||||
echo "========================================="
|
||||
echo "VERSION SYNC COMPLETE"
|
||||
echo "========================================="
|
||||
echo "Test environment version updated to match production"
|
||||
echo "No tests run, no builds performed"
|
||||
echo "Duration: ~30 seconds"
|
||||
24
.gitignore
vendored
24
.gitignore
vendored
@@ -11,6 +11,22 @@ node_modules
|
||||
dist
|
||||
dist-ssr
|
||||
*.local
|
||||
.env
|
||||
*.tsbuildinfo
|
||||
|
||||
# tsoa generated files (regenerated on build)
|
||||
src/routes/tsoa-generated.ts
|
||||
src/config/tsoa-spec.json
|
||||
|
||||
# Test coverage
|
||||
coverage
|
||||
.nyc_output
|
||||
.coverage
|
||||
|
||||
# Test artifacts - flyer-images/ is a runtime directory
|
||||
# Test fixtures are stored in src/tests/assets/ instead
|
||||
flyer-images/
|
||||
test-output.txt
|
||||
|
||||
# Editor directories and files
|
||||
.vscode/*
|
||||
@@ -22,3 +38,11 @@ dist-ssr
|
||||
*.njsproj
|
||||
*.sln
|
||||
*.sw?
|
||||
Thumbs.db
|
||||
.claude/settings.local.json
|
||||
nul
|
||||
tmpclaude*
|
||||
|
||||
|
||||
|
||||
test.tmp
|
||||
1
.husky/pre-commit
Normal file
1
.husky/pre-commit
Normal file
@@ -0,0 +1 @@
|
||||
FORCE_COLOR=0 npx lint-staged --quiet
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user