diff --git a/.claude/settings.local.json b/.claude/settings.local.json index e5daaed..16140c5 100644 --- a/.claude/settings.local.json +++ b/.claude/settings.local.json @@ -95,7 +95,10 @@ "Bash(timeout 300 tail:*)", "mcp__filesystem__list_allowed_directories", "mcp__memory__add_observations", - "Bash(ssh:*)" + "Bash(ssh:*)", + "mcp__redis__list", + "Read(//d/gitea/bugsink-mcp/**)", + "Bash(d:/nodejs/npm.cmd install)" ] } } diff --git a/CLAUDE-MCP.md b/CLAUDE-MCP.md new file mode 100644 index 0000000..27e3b4e --- /dev/null +++ b/CLAUDE-MCP.md @@ -0,0 +1,378 @@ +# Claude Code MCP Configuration Guide + +This document explains how to configure MCP (Model Context Protocol) servers for Claude Code, covering both the CLI and VS Code extension. + +## The Two Config Files + +Claude Code uses **two separate configuration files** for MCP servers. They must be kept in sync manually. + +| File | Used By | Notes | +| ------------------------- | ----------------------------- | ------------------------------------------- | +| `~/.claude.json` | Claude CLI (`claude` command) | Requires `"type": "stdio"` in each server | +| `~/.claude/settings.json` | VS Code Extension | Simpler format, supports `"disabled": true` | + +**Important:** Changes to one file do NOT automatically sync to the other! + +## File Locations (Windows) + +``` +C:\Users\\.claude.json # CLI config +C:\Users\\.claude\settings.json # VS Code extension config +``` + +## Config Format Differences + +### VS Code Extension Format (`~/.claude/settings.json`) + +```json +{ + "mcpServers": { + "server-name": { + "command": "path/to/executable", + "args": ["arg1", "arg2"], + "env": { + "ENV_VAR": "value" + }, + "disabled": true // Optional - disable without removing + } + } +} +``` + +### CLI Format (`~/.claude.json`) + +The CLI config is a larger file with many settings. The `mcpServers` section is nested within it: + +```json +{ + "numStartups": 14, + "installMethod": "global", + // ... other settings ... + "mcpServers": { + "server-name": { + "type": "stdio", // REQUIRED for CLI + "command": "path/to/executable", + "args": ["arg1", "arg2"], + "env": { + "ENV_VAR": "value" + } + } + } + // ... more settings ... +} +``` + +**Key difference:** CLI format requires `"type": "stdio"` in each server definition. + +## Common MCP Server Examples + +### Memory (Knowledge Graph) + +```json +// VS Code format +"memory": { + "command": "D:\\nodejs\\npx.cmd", + "args": ["-y", "@modelcontextprotocol/server-memory"] +} + +// CLI format +"memory": { + "type": "stdio", + "command": "D:\\nodejs\\npx.cmd", + "args": ["-y", "@modelcontextprotocol/server-memory"], + "env": {} +} +``` + +### Filesystem + +```json +// VS Code format +"filesystem": { + "command": "d:\\nodejs\\node.exe", + "args": [ + "c:\\Users\\\\AppData\\Roaming\\npm\\node_modules\\@modelcontextprotocol\\server-filesystem\\dist\\index.js", + "d:\\path\\to\\project" + ] +} + +// CLI format +"filesystem": { + "type": "stdio", + "command": "d:\\nodejs\\node.exe", + "args": [ + "c:\\Users\\\\AppData\\Roaming\\npm\\node_modules\\@modelcontextprotocol\\server-filesystem\\dist\\index.js", + "d:\\path\\to\\project" + ], + "env": {} +} +``` + +### Podman/Docker + +```json +// VS Code format +"podman": { + "command": "D:\\nodejs\\npx.cmd", + "args": ["-y", "podman-mcp-server@latest"], + "env": { + "DOCKER_HOST": "npipe:////./pipe/podman-machine-default" + } +} +``` + +### Gitea + +```json +// VS Code format +"gitea-myserver": { + "command": "d:\\gitea-mcp\\gitea-mcp.exe", + "args": ["run", "-t", "stdio"], + "env": { + "GITEA_HOST": "https://gitea.example.com", + "GITEA_ACCESS_TOKEN": "your-token-here" + } +} +``` + +### Redis + +```json +// VS Code format +"redis": { + "command": "D:\\nodejs\\npx.cmd", + "args": ["-y", "@modelcontextprotocol/server-redis", "redis://localhost:6379"] +} +``` + +### Bugsink (Error Tracking) + +**Important:** Bugsink has a different API than Sentry. Use `bugsink-mcp`, NOT `sentry-selfhosted-mcp`. + +**Note:** The `bugsink-mcp` npm package is NOT published. You must clone and build from source: + +```bash +# Clone and build bugsink-mcp +git clone https://github.com/j-shelfwood/bugsink-mcp.git d:\gitea\bugsink-mcp +cd d:\gitea\bugsink-mcp +npm install +npm run build +``` + +```json +// VS Code format (using locally built version) +"bugsink": { + "command": "d:\\nodejs\\node.exe", + "args": ["d:\\gitea\\bugsink-mcp\\dist\\index.js"], + "env": { + "BUGSINK_URL": "https://bugsink.example.com", + "BUGSINK_TOKEN": "your-api-token" + } +} + +// CLI format +"bugsink": { + "type": "stdio", + "command": "d:\\nodejs\\node.exe", + "args": ["d:\\gitea\\bugsink-mcp\\dist\\index.js"], + "env": { + "BUGSINK_URL": "https://bugsink.example.com", + "BUGSINK_TOKEN": "your-api-token" + } +} +``` + +- GitHub: https://github.com/j-shelfwood/bugsink-mcp +- Get token from Bugsink UI: Settings > API Tokens +- **Do NOT use npx** - the package is not on npm + +### Sentry (Cloud or Self-hosted) + +For actual Sentry instances (not Bugsink), use: + +```json +"sentry": { + "command": "D:\\nodejs\\npx.cmd", + "args": ["-y", "@sentry/mcp-server"], + "env": { + "SENTRY_AUTH_TOKEN": "your-sentry-token" + } +} +``` + +## Troubleshooting + +### Server Not Loading + +1. **Check both config files** - Make sure the server is defined in both `~/.claude.json` AND `~/.claude/settings.json` + +2. **Verify server order** - Servers load sequentially. Broken/slow servers can block others. Put important servers first. + +3. **Check for timeout** - Each server has 30 seconds to connect. Slow npx downloads can cause timeouts. + +4. **Fully restart VS Code** - Window reload is not enough. Close all VS Code windows and reopen. + +### Verifying Configuration + +**For CLI:** + +```bash +claude mcp list +``` + +**For VS Code:** + +1. Open VS Code +2. View → Output +3. Select "Claude" from the dropdown +4. Look for MCP server connection logs + +### Common Errors + +| Error | Cause | Solution | +| ------------------------------------ | ----------------------------- | --------------------------------------------------------------------------- | +| `Connection timed out after 30000ms` | Server took too long to start | Move server earlier in config, or use pre-installed packages instead of npx | +| `npm error 404 Not Found` | Package doesn't exist | Check package name spelling | +| `The system cannot find the path` | Wrong executable path | Verify the command path exists | +| `Connection closed` | Server crashed on startup | Check server logs, verify environment variables | + +### Disabling Problem Servers + +In `~/.claude/settings.json`, add `"disabled": true`: + +```json +"problem-server": { + "command": "...", + "args": ["..."], + "disabled": true +} +``` + +**Note:** The CLI config (`~/.claude.json`) does not support the `disabled` flag. You must remove the server entirely from that file. + +## Adding a New MCP Server + +1. **Install/clone the MCP server** (if not using npx) + +2. **Add to VS Code config** (`~/.claude/settings.json`): + + ```json + "new-server": { + "command": "path/to/command", + "args": ["arg1", "arg2"], + "env": { "VAR": "value" } + } + ``` + +3. **Add to CLI config** (`~/.claude.json`) - find the `mcpServers` section: + + ```json + "new-server": { + "type": "stdio", + "command": "path/to/command", + "args": ["arg1", "arg2"], + "env": { "VAR": "value" } + } + ``` + +4. **Fully restart VS Code** + +5. **Verify with `claude mcp list`** + +## Quick Reference: Available MCP Servers + +| Server | Package/Repo | Purpose | +| ------------------- | -------------------------------------------------- | --------------------------- | +| memory | `@modelcontextprotocol/server-memory` | Knowledge graph persistence | +| filesystem | `@modelcontextprotocol/server-filesystem` | File system access | +| redis | `@modelcontextprotocol/server-redis` | Redis cache inspection | +| postgres | `@modelcontextprotocol/server-postgres` | PostgreSQL queries | +| sequential-thinking | `@modelcontextprotocol/server-sequential-thinking` | Step-by-step reasoning | +| podman | `podman-mcp-server` | Container management | +| gitea | `gitea-mcp` (binary) | Gitea API access | +| bugsink | `j-shelfwood/bugsink-mcp` (build from source) | Error tracking for Bugsink | +| sentry | `@sentry/mcp-server` | Error tracking for Sentry | +| playwright | `@anthropics/mcp-server-playwright` | Browser automation | + +## Best Practices + +1. **Keep configs in sync** - When you change one file, update the other + +2. **Order servers by importance** - Put essential servers (memory, filesystem) first + +3. **Disable instead of delete** - Use `"disabled": true` in settings.json to troubleshoot + +4. **Use node.exe directly** - For faster startup, install packages globally and use `node.exe` instead of `npx` + +5. **Store sensitive data in memory** - Use the memory MCP to store API tokens and config for future sessions + +--- + +## Future: MCP Launchpad + +**Project:** https://github.com/kenneth-liao/mcp-launchpad + +MCP Launchpad is a CLI tool that wraps multiple MCP servers into a single interface. Worth revisiting when: + +- [ ] Windows support is stable (currently experimental) +- [ ] Available as an MCP server itself (currently Bash-based) + +**Why it's interesting:** + +| Benefit | Description | +| ---------------------- | -------------------------------------------------------------- | +| Single config file | No more syncing `~/.claude.json` and `~/.claude/settings.json` | +| Project-level configs | Drop `mcp.json` in any project for instant MCP setup | +| Context window savings | One MCP server in context instead of 10+, reducing token usage | +| Persistent daemon | Keeps server connections alive for faster repeated calls | +| Tool search | Find tools across all servers with `mcpl search` | + +**Current limitations:** + +- Experimental Windows support +- Requires Python 3.13+ and uv +- Claude calls tools via Bash instead of native MCP integration +- Different mental model (runtime discovery vs startup loading) + +--- + +## Future: Graphiti (Advanced Knowledge Graph) + +**Project:** https://github.com/getzep/graphiti + +Graphiti provides temporal-aware knowledge graphs - it tracks not just facts, but _when_ they became true/outdated. Much more powerful than simple memory MCP, but requires significant infrastructure. + +**Ideal setup:** Run on a Linux server, connect via HTTP from Windows: + +```json +// Windows client config (settings.json) +"graphiti": { + "type": "sse", + "url": "http://linux-server:8000/mcp/" +} +``` + +**Linux server setup:** + +```bash +git clone https://github.com/getzep/graphiti.git +cd graphiti/mcp_server +docker compose up -d # Starts FalkorDB + MCP server on port 8000 +``` + +**Requirements:** + +- Docker on Linux server +- OpenAI API key (for embeddings) +- Port 8000 open on LAN + +**Benefits of remote deployment:** + +- Heavy lifting (Neo4j/FalkorDB + embeddings) offloaded to Linux +- Always-on server, Windows connects/disconnects freely +- Multiple machines can share the same knowledge graph +- Avoids Windows Docker/WSL2 complexity + +--- + +_Last updated: January 2026_ diff --git a/CLAUDE.md b/CLAUDE.md index 0482558..dbd14c8 100644 --- a/CLAUDE.md +++ b/CLAUDE.md @@ -1,5 +1,35 @@ # Claude Code Project Instructions +## Session Startup Checklist + +**IMPORTANT**: At the start of every session, perform these steps: + +1. **Check Memory First** - Use `mcp__memory__read_graph` or `mcp__memory__search_nodes` to recall: + - Project-specific configurations and credentials + - Previous work context and decisions + - Infrastructure details (URLs, ports, access patterns) + - Known issues and their solutions + +2. **Review Recent Git History** - Check `git log --oneline -10` to understand recent changes + +3. **Check Container Status** - Use `mcp__podman__container_list` to see what's running + +--- + +## Project Instructions + +### Things to Remember + +Before writing any code: + +1. State how you will verify this change works (test, bash command, browser check, etc.) + +2. Write the test or verification step first + +3. Then implement the code + +4. Run verification and iterate until it passes + ## Communication Style: Ask Before Assuming **IMPORTANT**: When helping with tasks, **ask clarifying questions before making assumptions**. Do not assume: diff --git a/notes-to-ai5.txt b/notes-to-ai5.txt new file mode 100644 index 0000000..c8b605a --- /dev/null +++ b/notes-to-ai5.txt @@ -0,0 +1,196 @@ +What is the correct named pipe address for Podman on Windows if it's not using the default docker_engine pipe? + +How do I configure the chrome-devtools-mcp server to connect to an existing Chrome instance using the browser_url input? + + +Unused Input: The browser_url input is defined but not used in any server arguments. If you intend to connect chrome-devtools-mcp to an existing Chrome instance, you would typically pass that URL in the arguments instead of launching a new headless instance. + + + + + +Based on your current configuration and project setup (Gitea, Podman, PostgreSQL, Redis), here are several other valuable MCP servers available for VS Code. + +Recommended for Your Stack +Since you are running a PostgreSQL and Redis stack via Podman, these servers would be immediately useful: + +PostgreSQL (@modelcontextprotocol/server-postgres) + +Purpose: Allows the AI to query your database directly (inspect schemas, run SELECT queries) without needing to shell into the container. +Config: +json +"postgres": { + "command": "npx", + "args": ["-y", "@modelcontextprotocol/server-postgres", "postgresql://postgres:postgres@localhost:5432/flyer_crawler_dev"] +} + + + +Redis (@modelcontextprotocol/server-redis) + +Purpose: Inspect and modify keys in your Redis instance. +Config: +json +"redis": { + "command": "npx", + "args": ["-y", "@modelcontextprotocol/server-redis", "redis://localhost:6379"] +} +General Utility Servers +These servers enhance the AI's core capabilities regardless of the specific project: + + +Memory (@modelcontextprotocol/server-memory) + +Purpose: Gives the AI a persistent "knowledge graph" to remember context, user preferences, and project details across different chat sessions. +Config: +json +"memory": { + "command": "npx", + "args": ["-y", "@modelcontextprotocol/server-memory"] +} + + +Brave Search (@modelcontextprotocol/server-brave-search) + +Purpose: Performs actual web searches (finding current docs, libraries, or solutions) rather than just fetching specific URLs like the fetch server does. +Config: +json + Show full code block +"brave-search": { + "command": "npx", + "args": ["-y", "@modelcontextprotocol/server-brave-search"], + "env": { + "BRAVE_API_KEY": "YOUR_API_KEY_HERE" + } +} + + +Sequential Thinking (@modelcontextprotocol/server-sequential-thinking) + +Purpose: A tool that helps the AI break down complex problems into step-by-step thought processes. It is particularly useful for debugging difficult logic or architectural planning. +Config: +json +"sequential-thinking": { + "command": "npx", + "args": ["-y", "@modelcontextprotocol/server-sequential-thinking"] +} + + +Other Popular Servers + +GitHub: (@modelcontextprotocol/server-github) - If you ever need to interact with GitHub.com in addition to Gitea. + +Google Drive: (@modelcontextprotocol/server-google-drive) - Access files in Google Drive. + +Slack: (@modelcontextprotocol/server-slack) - Read and send Slack messages. + +Sentry: (@modelcontextprotocol/server-sentry) - Analyze error reports directly from Sentry. + +How to Find More +The community maintains a comprehensive list of servers in the Awesome MCP Servers repository. You can find servers for almost any major tool (AWS, Azure, Linear, Jira, etc.). + +To add any of these, you simply add a new entry to the mcpServers object in your mcp.json file, similar to how you configured podman or fetch. + + + + + + +Create a new ADR for "Worker Health Checks" to monitor stalled background jobs. + +Storybook ! for UI + + + + + + + + + + + +{ + "mcpServers": { + "gitea-projectium": { + "command": "d:\\gitea-mcp\\gitea-mcp.exe", + "args": ["run", "-t", "stdio"], + "env": { + "GITEA_HOST": "https://gitea.projectium.com", + "GITEA_ACCESS_TOKEN": "b111259253aa3cadcb6a37618de03bf388f6235a" + } + }, + "gitea-torbonium": { + "command": "d:\\gitea-mcp\\gitea-mcp.exe", + "args": ["run", "-t", "stdio"], + "env": { + "GITEA_HOST": "https://gitea.torbonium.com", + "GITEA_ACCESS_TOKEN": "563d01f9edc792b6dd09bf4cbd3a98bce45360a4" + } + }, + "gitea-lan": { + "command": "d:\\gitea-mcp\\gitea-mcp.exe", + "args": ["run", "-t", "stdio"], + "env": { + "GITEA_HOST": "https://gitea.torbolan.com", + "GITEA_ACCESS_TOKEN": "YOUR_LAN_TOKEN_HERE" + }, + "disabled": true + }, + "podman": { + "command": "D:\\nodejs\\npx.cmd", + "args": ["-y", "podman-mcp-server@latest"], + "env": { + "DOCKER_HOST": "npipe:////./pipe/podman-machine-default" + } + }, + "filesystem": { + "command": "d:\\nodejs\\node.exe", + "args": [ + "c:\\Users\\games3\\AppData\\Roaming\\npm\\node_modules\\@modelcontextprotocol\\server-filesystem\\dist\\index.js", + "d:\\gitea\\flyer-crawler.projectium.com\\flyer-crawler.projectium.com" + ] + }, + "fetch": { + "command": "C:\\Users\\games3\\.local\\bin\\uvx.exe", + "args": ["mcp-server-fetch"] + }, + "chrome-devtools": { + "command": "D:\\nodejs\\npx.cmd", + "args": [ + "chrome-devtools-mcp@latest", + "--headless", + "false", + "--isolated", + "false", + "--channel", + "stable" + ], + "disabled": true + }, + "markitdown": { + "command": "C:\\Users\\games3\\.local\\bin\\uvx.exe", + "args": ["markitdown-mcp"] + }, + "sequential-thinking": { + "command": "D:\\nodejs\\npx.cmd", + "args": ["-y", "@modelcontextprotocol/server-sequential-thinking"] + }, + "memory": { + "command": "D:\\nodejs\\npx.cmd", + "args": ["-y", "@modelcontextprotocol/server-memory"] + }, + "postgres": { + "command": "D:\\nodejs\\npx.cmd", + "args": ["-y", "@modelcontextprotocol/server-postgres", "postgresql://postgres:postgres@localhost:5432/flyer_crawler_dev"] + }, + "playwright": { + "command": "D:\\nodejs\\npx.cmd", + "args": ["-y", "@anthropics/mcp-server-playwright"] + }, + "redis": { + "command": "D:\\nodejs\\npx.cmd", + "args": ["-y", "@modelcontextprotocol/server-redis", "redis://localhost:6379"] + } + } +}