Compare commits
46 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
| a77b8ee123 | |||
| 498dac5230 | |||
| af0b92461a | |||
| 89f0354ccc | |||
| 6a267d074b | |||
| bcde33c7d0 | |||
| ee3268fbe0 | |||
| f6a38ffaa8 | |||
| b13ffce0a0 | |||
| b39e01efd7 | |||
| 98eea5b6f9 | |||
| fe36ed91f2 | |||
| 8c85f9ca5f | |||
| 98df35a33e | |||
| 70d6963d0d | |||
| 54c6694117 | |||
| 2402f88daf | |||
| 1cf1dbefb8 | |||
| dafa8db8bb | |||
| 65e79efb24 | |||
| 5ffc13b635 | |||
| 50bfd20fd4 | |||
| c14f1f46cd | |||
| 52c8371f4a | |||
| f8d6d42150 | |||
| 469487f6ed | |||
| 7a2966367d | |||
| 0466b299a7 | |||
| b34304ed57 | |||
| 96963531fc | |||
| 4c9a7c55ae | |||
| 8a75203251 | |||
| da6e81260e | |||
| e1f1335655 | |||
| b017db83a1 | |||
| bc136fab7e | |||
| 6c24bcbb91 | |||
| 11a05799d3 | |||
| 403271dc0c | |||
| cc4abf67b9 | |||
| 35cf20e02d | |||
| 5209f82efb | |||
| 1f55387e9e | |||
| 32bbca73ba | |||
| 0e6999ea21 | |||
| 6cf3c1830c |
@@ -6,7 +6,7 @@
|
|||||||
},
|
},
|
||||||
"metadata": {
|
"metadata": {
|
||||||
"description": "Project management plugins with Gitea and NetBox integrations",
|
"description": "Project management plugins with Gitea and NetBox integrations",
|
||||||
"version": "3.1.0"
|
"version": "4.0.0"
|
||||||
},
|
},
|
||||||
"plugins": [
|
"plugins": [
|
||||||
{
|
{
|
||||||
@@ -149,6 +149,22 @@
|
|||||||
"category": "development",
|
"category": "development",
|
||||||
"tags": ["code-review", "pull-requests", "security", "quality"],
|
"tags": ["code-review", "pull-requests", "security", "quality"],
|
||||||
"license": "MIT"
|
"license": "MIT"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "data-platform",
|
||||||
|
"version": "1.0.0",
|
||||||
|
"description": "Data engineering tools with pandas, PostgreSQL/PostGIS, and dbt integration",
|
||||||
|
"source": "./plugins/data-platform",
|
||||||
|
"author": {
|
||||||
|
"name": "Leo Miranda",
|
||||||
|
"email": "leobmiranda@gmail.com"
|
||||||
|
},
|
||||||
|
"homepage": "https://gitea.hotserv.cloud/personal-projects/leo-claude-mktplace/src/branch/main/plugins/data-platform/README.md",
|
||||||
|
"repository": "https://gitea.hotserv.cloud/personal-projects/leo-claude-mktplace.git",
|
||||||
|
"mcpServers": ["./.mcp.json"],
|
||||||
|
"category": "data",
|
||||||
|
"tags": ["pandas", "postgresql", "postgis", "dbt", "data-engineering", "etl"],
|
||||||
|
"license": "MIT"
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
|
|||||||
74
CHANGELOG.md
74
CHANGELOG.md
@@ -4,6 +4,80 @@ All notable changes to the Leo Claude Marketplace will be documented in this fil
|
|||||||
|
|
||||||
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/).
|
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/).
|
||||||
|
|
||||||
|
## [4.0.0] - 2026-01-25
|
||||||
|
|
||||||
|
### Added
|
||||||
|
|
||||||
|
#### New Plugin: data-platform v1.0.0
|
||||||
|
- **pandas MCP Tools** (14 tools): DataFrame operations with Arrow IPC data_ref persistence
|
||||||
|
- `read_csv`, `read_parquet`, `read_json` - Load data with chunking support
|
||||||
|
- `to_csv`, `to_parquet` - Export to various formats
|
||||||
|
- `describe`, `head`, `tail` - Data exploration
|
||||||
|
- `filter`, `select`, `groupby`, `join` - Data transformation
|
||||||
|
- `list_data`, `drop_data` - Memory management
|
||||||
|
|
||||||
|
- **PostgreSQL MCP Tools** (10 tools): Database operations with asyncpg connection pooling
|
||||||
|
- `pg_connect`, `pg_query`, `pg_execute` - Core database operations
|
||||||
|
- `pg_tables`, `pg_columns`, `pg_schemas` - Schema exploration
|
||||||
|
- `st_tables`, `st_geometry_type`, `st_srid`, `st_extent` - PostGIS spatial support
|
||||||
|
|
||||||
|
- **dbt MCP Tools** (8 tools): Build tool wrapper with pre-execution validation
|
||||||
|
- `dbt_parse` - Pre-flight validation (catches dbt 1.9+ deprecations)
|
||||||
|
- `dbt_run`, `dbt_test`, `dbt_build` - Execution with auto-validation
|
||||||
|
- `dbt_compile`, `dbt_ls`, `dbt_docs_generate`, `dbt_lineage` - Analysis tools
|
||||||
|
|
||||||
|
- **Commands**: `/ingest`, `/profile`, `/schema`, `/explain`, `/lineage`, `/run`
|
||||||
|
- **Agents**: `data-ingestion` (loading/transformation), `data-analysis` (exploration/profiling)
|
||||||
|
- **SessionStart Hook**: Graceful PostgreSQL connection check (non-blocking warning)
|
||||||
|
|
||||||
|
- **Key Features**:
|
||||||
|
- data_ref system for DataFrame persistence across tool calls
|
||||||
|
- 100k row limit with chunking support for large datasets
|
||||||
|
- Hybrid configuration (system: `~/.config/claude/postgres.env`, project: `.env`)
|
||||||
|
- Auto-detection of dbt projects
|
||||||
|
- Arrow IPC format for efficient memory management
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## [3.2.0] - 2026-01-24
|
||||||
|
|
||||||
|
### Added
|
||||||
|
- **git-flow:** `/commit` now detects protected branches before committing
|
||||||
|
- Warns when on protected branch (main, master, development, staging, production)
|
||||||
|
- Offers to create feature branch automatically instead of committing directly
|
||||||
|
- Configurable via `GIT_PROTECTED_BRANCHES` environment variable
|
||||||
|
- **netbox:** Platform and primary_ip parameters added to device update tools
|
||||||
|
- **claude-config-maintainer:** Auto-enforce mandatory behavior rules via SessionStart hook
|
||||||
|
- **scripts:** `release.sh` - Versioning workflow script for consistent releases
|
||||||
|
- **scripts:** `verify-hooks.sh` - Verify all hooks are command type
|
||||||
|
|
||||||
|
### Changed
|
||||||
|
- **doc-guardian:** Hook switched from `prompt` type to `command` type
|
||||||
|
- Prompt hooks unreliable - Claude ignores explicit instructions
|
||||||
|
- New `notify.sh` bash script guarantees exact output behavior
|
||||||
|
- Only notifies for config file changes (commands/, agents/, skills/, hooks/)
|
||||||
|
- Silent exit for all other files - no blocking possible
|
||||||
|
- **All hooks:** Converted to command type with stricter plugin prefix enforcement
|
||||||
|
- All hooks now mandate `[plugin-name]` prefix with "NO EXCEPTIONS" rule
|
||||||
|
- Simplified output formats with word limits
|
||||||
|
- Consistent structure across projman, pr-review, code-sentinel, doc-guardian
|
||||||
|
- **CLAUDE.md:** Replaced destructive "ALWAYS CLEAR CACHE" rule with "VERIFY AND RESTART"
|
||||||
|
- Cache clearing mid-session breaks MCP tools
|
||||||
|
- Added guidance for proper plugin development workflow
|
||||||
|
|
||||||
|
### Fixed
|
||||||
|
- **cmdb-assistant:** Complete MCP tool schemas for update operations (#138)
|
||||||
|
- **netbox:** Shorten tool names to meet 64-char API limit (#134)
|
||||||
|
- **cmdb-assistant:** Correct NetBox API URL format in setup wizard (#132)
|
||||||
|
- **gitea/projman:** Type safety for `create_label_smart`, curl-based debug-report (#124)
|
||||||
|
- **netbox:** Add diagnostic logging for JSON parse errors (#121)
|
||||||
|
- **labels:** Add duplicate check before creating labels (#116)
|
||||||
|
- **hooks:** Convert ALL hooks to command type with proper prefixes (#114)
|
||||||
|
- Protected branch workflow: Claude no longer commits directly to protected branches (fixes #109)
|
||||||
|
- doc-guardian hook no longer blocks workflow (fixes #110)
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
## [3.1.1] - 2026-01-22
|
## [3.1.1] - 2026-01-22
|
||||||
|
|
||||||
### Added
|
### Added
|
||||||
|
|||||||
122
CLAUDE.md
122
CLAUDE.md
@@ -1,18 +1,63 @@
|
|||||||
# CLAUDE.md
|
# CLAUDE.md
|
||||||
|
|
||||||
This file provides guidance to Claude Code when working with code in this repository.
|
This file provides guidance to Claude Code when working with code in this repository.
|
||||||
|
## ⛔ MANDATORY BEHAVIOR RULES - READ FIRST
|
||||||
|
|
||||||
|
**These rules are NON-NEGOTIABLE. Violating them wastes the user's time and money.**
|
||||||
|
|
||||||
|
### 1. WHEN USER ASKS YOU TO CHECK SOMETHING - CHECK EVERYTHING
|
||||||
|
- Search ALL locations, not just where you think it is
|
||||||
|
- Check cache directories: `~/.claude/plugins/cache/`
|
||||||
|
- Check installed: `~/.claude/plugins/marketplaces/`
|
||||||
|
- Check source: `~/claude-plugins-work/`
|
||||||
|
- **NEVER say "no" or "that's not the issue" without exhaustive verification**
|
||||||
|
|
||||||
|
### 2. WHEN USER SAYS SOMETHING IS WRONG - BELIEVE THEM
|
||||||
|
- The user knows their system better than you
|
||||||
|
- Investigate thoroughly before disagreeing
|
||||||
|
- If user suspects cache, CHECK THE CACHE
|
||||||
|
- If user suspects a file, READ THE FILE
|
||||||
|
- **Your confidence is often wrong. User's instincts are often right.**
|
||||||
|
|
||||||
|
### 3. NEVER SAY "DONE" WITHOUT VERIFICATION
|
||||||
|
- Run the actual command/script to verify
|
||||||
|
- Show the output to the user
|
||||||
|
- Check ALL affected locations
|
||||||
|
- **"Done" means VERIFIED WORKING, not "I made changes"**
|
||||||
|
|
||||||
|
### 4. SHOW EXACTLY WHAT USER ASKS FOR
|
||||||
|
- If user asks for messages, show the MESSAGES
|
||||||
|
- If user asks for code, show the CODE
|
||||||
|
- If user asks for output, show the OUTPUT
|
||||||
|
- **Don't interpret or summarize unless asked**
|
||||||
|
|
||||||
|
### 5. AFTER PLUGIN UPDATES - VERIFY AND RESTART
|
||||||
|
|
||||||
|
**⚠️ DO NOT clear cache mid-session** - this breaks MCP tools that are already loaded.
|
||||||
|
|
||||||
|
1. Run `./scripts/verify-hooks.sh` to check hook types
|
||||||
|
2. If changes affect MCP servers or hooks, inform the user:
|
||||||
|
> "Plugin changes require a session restart to take effect. Please restart Claude Code."
|
||||||
|
3. Cache clearing is ONLY safe **before** starting a new session (not during)
|
||||||
|
|
||||||
|
See `docs/DEBUGGING-CHECKLIST.md` for details on cache timing.
|
||||||
|
|
||||||
|
**FAILURE TO FOLLOW THESE RULES = WASTED USER TIME = UNACCEPTABLE**
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
|
||||||
## Project Overview
|
## Project Overview
|
||||||
|
|
||||||
**Repository:** leo-claude-mktplace
|
**Repository:** leo-claude-mktplace
|
||||||
**Version:** 3.0.1
|
**Version:** 3.1.2
|
||||||
**Status:** Production Ready
|
**Status:** Production Ready
|
||||||
|
|
||||||
A plugin marketplace for Claude Code containing:
|
A plugin marketplace for Claude Code containing:
|
||||||
|
|
||||||
| Plugin | Description | Version |
|
| Plugin | Description | Version |
|
||||||
|--------|-------------|---------|
|
|--------|-------------|---------|
|
||||||
| `projman` | Sprint planning and project management with Gitea integration | 3.0.0 |
|
| `projman` | Sprint planning and project management with Gitea integration | 3.1.0 |
|
||||||
| `git-flow` | Git workflow automation with smart commits and branch management | 1.0.0 |
|
| `git-flow` | Git workflow automation with smart commits and branch management | 1.0.0 |
|
||||||
| `pr-review` | Multi-agent PR review with confidence scoring | 1.0.0 |
|
| `pr-review` | Multi-agent PR review with confidence scoring | 1.0.0 |
|
||||||
| `clarity-assist` | Prompt optimization with ND-friendly accommodations | 1.0.0 |
|
| `clarity-assist` | Prompt optimization with ND-friendly accommodations | 1.0.0 |
|
||||||
@@ -59,7 +104,7 @@ leo-claude-mktplace/
|
|||||||
│ │ ├── .claude-plugin/plugin.json
|
│ │ ├── .claude-plugin/plugin.json
|
||||||
│ │ ├── .mcp.json
|
│ │ ├── .mcp.json
|
||||||
│ │ ├── mcp-servers/gitea -> ../../../mcp-servers/gitea # SYMLINK
|
│ │ ├── mcp-servers/gitea -> ../../../mcp-servers/gitea # SYMLINK
|
||||||
│ │ ├── commands/ # 12 commands (incl. setup)
|
│ │ ├── commands/ # 13 commands (incl. setup, debug)
|
||||||
│ │ ├── hooks/ # SessionStart mismatch detection
|
│ │ ├── hooks/ # SessionStart mismatch detection
|
||||||
│ │ ├── agents/ # 4 agents
|
│ │ ├── agents/ # 4 agents
|
||||||
│ │ └── skills/label-taxonomy/
|
│ │ └── skills/label-taxonomy/
|
||||||
@@ -85,7 +130,9 @@ leo-claude-mktplace/
|
|||||||
│ └── project-hygiene/
|
│ └── project-hygiene/
|
||||||
├── scripts/
|
├── scripts/
|
||||||
│ ├── setup.sh, post-update.sh
|
│ ├── setup.sh, post-update.sh
|
||||||
│ └── validate-marketplace.sh # Marketplace compliance validation
|
│ ├── validate-marketplace.sh # Marketplace compliance validation
|
||||||
|
│ ├── verify-hooks.sh # Verify all hooks are command type
|
||||||
|
│ └── check-venv.sh # Check MCP server venvs exist
|
||||||
└── docs/
|
└── docs/
|
||||||
├── CANONICAL-PATHS.md # Single source of truth for paths
|
├── CANONICAL-PATHS.md # Single source of truth for paths
|
||||||
└── CONFIGURATION.md # Centralized configuration guide
|
└── CONFIGURATION.md # Centralized configuration guide
|
||||||
@@ -132,12 +179,12 @@ leo-claude-mktplace/
|
|||||||
|
|
||||||
| Category | Tools |
|
| Category | Tools |
|
||||||
|----------|-------|
|
|----------|-------|
|
||||||
| Issues | `list_issues`, `get_issue`, `create_issue`, `update_issue`, `add_comment` |
|
| Issues | `list_issues`, `get_issue`, `create_issue`, `update_issue`, `add_comment`, `aggregate_issues` |
|
||||||
| Labels | `get_labels`, `suggest_labels`, `create_label` |
|
| Labels | `get_labels`, `suggest_labels`, `create_label`, `create_label_smart` |
|
||||||
| Milestones | `list_milestones`, `get_milestone`, `create_milestone`, `update_milestone` |
|
| Milestones | `list_milestones`, `get_milestone`, `create_milestone`, `update_milestone`, `delete_milestone` |
|
||||||
| Dependencies | `list_issue_dependencies`, `create_issue_dependency`, `get_execution_order` |
|
| Dependencies | `list_issue_dependencies`, `create_issue_dependency`, `remove_issue_dependency`, `get_execution_order` |
|
||||||
| Wiki | `list_wiki_pages`, `get_wiki_page`, `create_wiki_page`, `create_lesson`, `search_lessons` |
|
| Wiki | `list_wiki_pages`, `get_wiki_page`, `create_wiki_page`, `update_wiki_page`, `create_lesson`, `search_lessons` |
|
||||||
| **Pull Requests** | `list_pull_requests`, `get_pull_request`, `get_pr_diff`, `get_pr_comments`, `create_pr_review`, `add_pr_comment` *(NEW v3.0.0)* |
|
| **Pull Requests** | `list_pull_requests`, `get_pull_request`, `get_pr_diff`, `get_pr_comments`, `create_pr_review`, `add_pr_comment` |
|
||||||
| Validation | `validate_repo_org`, `get_branch_protection` |
|
| Validation | `validate_repo_org`, `get_branch_protection` |
|
||||||
|
|
||||||
### Hybrid Configuration
|
### Hybrid Configuration
|
||||||
@@ -246,13 +293,56 @@ See `docs/DEBUGGING-CHECKLIST.md` for systematic troubleshooting.
|
|||||||
- `/debug-report` - Run full diagnostics, create issue if needed
|
- `/debug-report` - Run full diagnostics, create issue if needed
|
||||||
- `/debug-review` - Investigate and propose fixes
|
- `/debug-review` - Investigate and propose fixes
|
||||||
|
|
||||||
## Versioning Rules
|
## Versioning Workflow
|
||||||
|
|
||||||
- Version displayed ONLY in main `README.md` title: `# Leo Claude Marketplace - vX.Y.Z`
|
This project follows [SemVer](https://semver.org/) and [Keep a Changelog](https://keepachangelog.com).
|
||||||
- `CHANGELOG.md` is authoritative for version history
|
|
||||||
- Follow [SemVer](https://semver.org/): MAJOR.MINOR.PATCH
|
### Version Locations (must stay in sync)
|
||||||
- On release: Update README title → CHANGELOG → marketplace.json → plugin.json files
|
|
||||||
|
| Location | Format | Example |
|
||||||
|
|----------|--------|---------|
|
||||||
|
| Git tags | `vX.Y.Z` | `v3.2.0` |
|
||||||
|
| README.md title | `# Leo Claude Marketplace - vX.Y.Z` | `v3.2.0` |
|
||||||
|
| marketplace.json | `"version": "X.Y.Z"` | `3.2.0` |
|
||||||
|
| CHANGELOG.md | `## [X.Y.Z] - YYYY-MM-DD` | `[3.2.0] - 2026-01-24` |
|
||||||
|
|
||||||
|
### During Development
|
||||||
|
|
||||||
|
**All changes go under `[Unreleased]` in CHANGELOG.md.** Never create a versioned section until release time.
|
||||||
|
|
||||||
|
```markdown
|
||||||
|
## [Unreleased]
|
||||||
|
|
||||||
|
### Added
|
||||||
|
- New feature description
|
||||||
|
|
||||||
|
### Fixed
|
||||||
|
- Bug fix description
|
||||||
|
```
|
||||||
|
|
||||||
|
### Creating a Release
|
||||||
|
|
||||||
|
Use the release script to ensure consistency:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
./scripts/release.sh 3.2.0
|
||||||
|
```
|
||||||
|
|
||||||
|
The script will:
|
||||||
|
1. Validate `[Unreleased]` section has content
|
||||||
|
2. Replace `[Unreleased]` with `[3.2.0] - YYYY-MM-DD`
|
||||||
|
3. Update README.md title
|
||||||
|
4. Update marketplace.json version
|
||||||
|
5. Commit and create git tag
|
||||||
|
|
||||||
|
### SemVer Guidelines
|
||||||
|
|
||||||
|
| Change Type | Version Bump | Example |
|
||||||
|
|-------------|--------------|---------|
|
||||||
|
| Bug fixes only | PATCH (x.y.**Z**) | 3.1.1 → 3.1.2 |
|
||||||
|
| New features (backwards compatible) | MINOR (x.**Y**.0) | 3.1.2 → 3.2.0 |
|
||||||
|
| Breaking changes | MAJOR (**X**.0.0) | 3.2.0 → 4.0.0 |
|
||||||
|
|
||||||
---
|
---
|
||||||
|
|
||||||
**Last Updated:** 2026-01-22
|
**Last Updated:** 2026-01-24
|
||||||
|
|||||||
55
README.md
55
README.md
@@ -1,4 +1,4 @@
|
|||||||
# Leo Claude Marketplace - v3.1.1
|
# Leo Claude Marketplace - v4.0.0
|
||||||
|
|
||||||
A collection of Claude Code plugins for project management, infrastructure automation, and development workflows.
|
A collection of Claude Code plugins for project management, infrastructure automation, and development workflows.
|
||||||
|
|
||||||
@@ -19,7 +19,7 @@ AI-guided sprint planning with full Gitea integration. Transforms a proven 15-sp
|
|||||||
- Branch-aware security (development/staging/production)
|
- Branch-aware security (development/staging/production)
|
||||||
- Pre-sprint-close code quality review and test verification
|
- Pre-sprint-close code quality review and test verification
|
||||||
|
|
||||||
**Commands:** `/sprint-plan`, `/sprint-start`, `/sprint-status`, `/sprint-close`, `/labels-sync`, `/initial-setup`, `/project-init`, `/project-sync`, `/review`, `/test-check`, `/test-gen`
|
**Commands:** `/sprint-plan`, `/sprint-start`, `/sprint-status`, `/sprint-close`, `/labels-sync`, `/initial-setup`, `/project-init`, `/project-sync`, `/review`, `/test-check`, `/test-gen`, `/debug-report`, `/debug-review`
|
||||||
|
|
||||||
#### [git-flow](./plugins/git-flow/README.md) *NEW in v3.0.0*
|
#### [git-flow](./plugins/git-flow/README.md) *NEW in v3.0.0*
|
||||||
**Git Workflow Automation**
|
**Git Workflow Automation**
|
||||||
@@ -96,6 +96,21 @@ Full CRUD operations for network infrastructure management directly from Claude
|
|||||||
|
|
||||||
**Commands:** `/initial-setup`, `/cmdb-search`, `/cmdb-device`, `/cmdb-ip`, `/cmdb-site`
|
**Commands:** `/initial-setup`, `/cmdb-search`, `/cmdb-device`, `/cmdb-ip`, `/cmdb-site`
|
||||||
|
|
||||||
|
### Data Engineering
|
||||||
|
|
||||||
|
#### [data-platform](./plugins/data-platform/README.md) *NEW*
|
||||||
|
**pandas, PostgreSQL/PostGIS, and dbt Integration**
|
||||||
|
|
||||||
|
Comprehensive data engineering toolkit with persistent DataFrame storage.
|
||||||
|
|
||||||
|
- 14 pandas tools with Arrow IPC data_ref system
|
||||||
|
- 10 PostgreSQL/PostGIS tools with connection pooling
|
||||||
|
- 8 dbt tools with automatic pre-validation
|
||||||
|
- 100k row limit with chunking support
|
||||||
|
- Auto-detection of dbt projects
|
||||||
|
|
||||||
|
**Commands:** `/ingest`, `/profile`, `/schema`, `/explain`, `/lineage`, `/run`
|
||||||
|
|
||||||
## MCP Servers
|
## MCP Servers
|
||||||
|
|
||||||
MCP servers are **shared at repository root** with **symlinks** from plugins that use them.
|
MCP servers are **shared at repository root** with **symlinks** from plugins that use them.
|
||||||
@@ -106,11 +121,11 @@ Full Gitea API integration for project management.
|
|||||||
|
|
||||||
| Category | Tools |
|
| Category | Tools |
|
||||||
|----------|-------|
|
|----------|-------|
|
||||||
| Issues | `list_issues`, `get_issue`, `create_issue`, `update_issue`, `add_comment` |
|
| Issues | `list_issues`, `get_issue`, `create_issue`, `update_issue`, `add_comment`, `aggregate_issues` |
|
||||||
| Labels | `get_labels`, `suggest_labels`, `create_label` |
|
| Labels | `get_labels`, `suggest_labels`, `create_label`, `create_label_smart` |
|
||||||
| Wiki | `list_wiki_pages`, `get_wiki_page`, `create_wiki_page`, `create_lesson`, `search_lessons` |
|
| Wiki | `list_wiki_pages`, `get_wiki_page`, `create_wiki_page`, `update_wiki_page`, `create_lesson`, `search_lessons` |
|
||||||
| Milestones | `list_milestones`, `get_milestone`, `create_milestone`, `update_milestone` |
|
| Milestones | `list_milestones`, `get_milestone`, `create_milestone`, `update_milestone`, `delete_milestone` |
|
||||||
| Dependencies | `list_issue_dependencies`, `create_issue_dependency`, `get_execution_order` |
|
| Dependencies | `list_issue_dependencies`, `create_issue_dependency`, `remove_issue_dependency`, `get_execution_order` |
|
||||||
| **Pull Requests** | `list_pull_requests`, `get_pull_request`, `get_pr_diff`, `get_pr_comments`, `create_pr_review`, `add_pr_comment` *(NEW in v3.0.0)* |
|
| **Pull Requests** | `list_pull_requests`, `get_pull_request`, `get_pr_diff`, `get_pr_comments`, `create_pr_review`, `add_pr_comment` *(NEW in v3.0.0)* |
|
||||||
| Validation | `validate_repo_org`, `get_branch_protection` |
|
| Validation | `validate_repo_org`, `get_branch_protection` |
|
||||||
|
|
||||||
@@ -126,6 +141,17 @@ Comprehensive NetBox REST API integration for infrastructure management.
|
|||||||
| Virtualization | Clusters, VMs, Interfaces |
|
| Virtualization | Clusters, VMs, Interfaces |
|
||||||
| Extras | Tags, Custom Fields, Audit Log |
|
| Extras | Tags, Custom Fields, Audit Log |
|
||||||
|
|
||||||
|
### Data Platform MCP Server (shared) *NEW*
|
||||||
|
|
||||||
|
pandas, PostgreSQL/PostGIS, and dbt integration for data engineering.
|
||||||
|
|
||||||
|
| Category | Tools |
|
||||||
|
|----------|-------|
|
||||||
|
| pandas | `read_csv`, `read_parquet`, `read_json`, `to_csv`, `to_parquet`, `describe`, `head`, `tail`, `filter`, `select`, `groupby`, `join`, `list_data`, `drop_data` |
|
||||||
|
| PostgreSQL | `pg_connect`, `pg_query`, `pg_execute`, `pg_tables`, `pg_columns`, `pg_schemas` |
|
||||||
|
| PostGIS | `st_tables`, `st_geometry_type`, `st_srid`, `st_extent` |
|
||||||
|
| dbt | `dbt_parse`, `dbt_run`, `dbt_test`, `dbt_build`, `dbt_compile`, `dbt_ls`, `dbt_docs_generate`, `dbt_lineage` |
|
||||||
|
|
||||||
## Installation
|
## Installation
|
||||||
|
|
||||||
### Prerequisites
|
### Prerequisites
|
||||||
@@ -222,6 +248,7 @@ After installing plugins, the `/plugin` command may show `(no content)` - this i
|
|||||||
| code-sentinel | `/code-sentinel:security-scan` |
|
| code-sentinel | `/code-sentinel:security-scan` |
|
||||||
| claude-config-maintainer | `/claude-config-maintainer:config-analyze` |
|
| claude-config-maintainer | `/claude-config-maintainer:config-analyze` |
|
||||||
| cmdb-assistant | `/cmdb-assistant:cmdb-search` |
|
| cmdb-assistant | `/cmdb-assistant:cmdb-search` |
|
||||||
|
| data-platform | `/data-platform:ingest` |
|
||||||
|
|
||||||
## Repository Structure
|
## Repository Structure
|
||||||
|
|
||||||
@@ -231,12 +258,14 @@ leo-claude-mktplace/
|
|||||||
│ └── marketplace.json
|
│ └── marketplace.json
|
||||||
├── mcp-servers/ # SHARED MCP servers (v3.0.0+)
|
├── mcp-servers/ # SHARED MCP servers (v3.0.0+)
|
||||||
│ ├── gitea/ # Gitea MCP (issues, PRs, wiki)
|
│ ├── gitea/ # Gitea MCP (issues, PRs, wiki)
|
||||||
│ └── netbox/ # NetBox MCP (CMDB)
|
│ ├── netbox/ # NetBox MCP (CMDB)
|
||||||
|
│ └── data-platform/ # Data engineering (pandas, PostgreSQL, dbt)
|
||||||
├── plugins/ # All plugins
|
├── plugins/ # All plugins
|
||||||
│ ├── projman/ # Sprint management
|
│ ├── projman/ # Sprint management
|
||||||
│ ├── git-flow/ # Git workflow automation (NEW)
|
│ ├── git-flow/ # Git workflow automation
|
||||||
│ ├── pr-review/ # PR review (NEW)
|
│ ├── pr-review/ # PR review
|
||||||
│ ├── clarity-assist/ # Prompt optimization (NEW)
|
│ ├── clarity-assist/ # Prompt optimization
|
||||||
|
│ ├── data-platform/ # Data engineering (NEW)
|
||||||
│ ├── claude-config-maintainer/ # CLAUDE.md optimization
|
│ ├── claude-config-maintainer/ # CLAUDE.md optimization
|
||||||
│ ├── cmdb-assistant/ # NetBox CMDB integration
|
│ ├── cmdb-assistant/ # NetBox CMDB integration
|
||||||
│ ├── doc-guardian/ # Documentation drift detection
|
│ ├── doc-guardian/ # Documentation drift detection
|
||||||
@@ -245,7 +274,8 @@ leo-claude-mktplace/
|
|||||||
├── docs/ # Documentation
|
├── docs/ # Documentation
|
||||||
│ ├── CANONICAL-PATHS.md # Path reference
|
│ ├── CANONICAL-PATHS.md # Path reference
|
||||||
│ └── CONFIGURATION.md # Setup guide
|
│ └── CONFIGURATION.md # Setup guide
|
||||||
└── scripts/ # Setup scripts
|
├── scripts/ # Setup scripts
|
||||||
|
└── CHANGELOG.md # Version history
|
||||||
```
|
```
|
||||||
|
|
||||||
## Documentation
|
## Documentation
|
||||||
@@ -257,6 +287,7 @@ leo-claude-mktplace/
|
|||||||
| [COMMANDS-CHEATSHEET.md](./docs/COMMANDS-CHEATSHEET.md) | All commands quick reference |
|
| [COMMANDS-CHEATSHEET.md](./docs/COMMANDS-CHEATSHEET.md) | All commands quick reference |
|
||||||
| [UPDATING.md](./docs/UPDATING.md) | Update guide for the marketplace |
|
| [UPDATING.md](./docs/UPDATING.md) | Update guide for the marketplace |
|
||||||
| [CANONICAL-PATHS.md](./docs/CANONICAL-PATHS.md) | Authoritative path reference |
|
| [CANONICAL-PATHS.md](./docs/CANONICAL-PATHS.md) | Authoritative path reference |
|
||||||
|
| [DEBUGGING-CHECKLIST.md](./docs/DEBUGGING-CHECKLIST.md) | Systematic troubleshooting guide |
|
||||||
| [CHANGELOG.md](./CHANGELOG.md) | Version history |
|
| [CHANGELOG.md](./CHANGELOG.md) | Version history |
|
||||||
|
|
||||||
## License
|
## License
|
||||||
|
|||||||
@@ -2,7 +2,7 @@
|
|||||||
|
|
||||||
**This file defines ALL valid paths in this repository. No exceptions. No inference. No assumptions.**
|
**This file defines ALL valid paths in this repository. No exceptions. No inference. No assumptions.**
|
||||||
|
|
||||||
Last Updated: 2026-01-20 (v3.0.0)
|
Last Updated: 2026-01-23 (v3.1.2)
|
||||||
|
|
||||||
---
|
---
|
||||||
|
|
||||||
@@ -17,10 +17,10 @@ leo-claude-mktplace/
|
|||||||
├── docs/ # All documentation
|
├── docs/ # All documentation
|
||||||
│ ├── architecture/ # Draw.io diagrams and specs
|
│ ├── architecture/ # Draw.io diagrams and specs
|
||||||
│ ├── CANONICAL-PATHS.md # This file - single source of truth
|
│ ├── CANONICAL-PATHS.md # This file - single source of truth
|
||||||
|
│ ├── COMMANDS-CHEATSHEET.md # All commands quick reference
|
||||||
│ ├── CONFIGURATION.md # Centralized configuration guide
|
│ ├── CONFIGURATION.md # Centralized configuration guide
|
||||||
│ ├── DEBUGGING-CHECKLIST.md # Systematic troubleshooting guide
|
│ ├── DEBUGGING-CHECKLIST.md # Systematic troubleshooting guide
|
||||||
│ ├── UPDATING.md # Update guide
|
│ └── UPDATING.md # Update guide
|
||||||
│ └── workflows/ # Workflow documentation
|
|
||||||
├── hooks/ # Shared hooks (if any)
|
├── hooks/ # Shared hooks (if any)
|
||||||
├── mcp-servers/ # SHARED MCP servers (v3.0.0+)
|
├── mcp-servers/ # SHARED MCP servers (v3.0.0+)
|
||||||
│ ├── gitea/ # Gitea MCP server
|
│ ├── gitea/ # Gitea MCP server
|
||||||
@@ -156,7 +156,6 @@ The symlink target is relative: `../../../mcp-servers/{server}`
|
|||||||
| Type | Location |
|
| Type | Location |
|
||||||
|------|----------|
|
|------|----------|
|
||||||
| Architecture diagrams | `docs/architecture/` |
|
| Architecture diagrams | `docs/architecture/` |
|
||||||
| Workflow docs | `docs/workflows/` |
|
|
||||||
| This file | `docs/CANONICAL-PATHS.md` |
|
| This file | `docs/CANONICAL-PATHS.md` |
|
||||||
| Update guide | `docs/UPDATING.md` |
|
| Update guide | `docs/UPDATING.md` |
|
||||||
| Configuration guide | `docs/CONFIGURATION.md` |
|
| Configuration guide | `docs/CONFIGURATION.md` |
|
||||||
|
|||||||
@@ -197,6 +197,51 @@ echo -e "\n=== Config Files ==="
|
|||||||
|
|
||||||
---
|
---
|
||||||
|
|
||||||
|
## Cache Clearing: When It's Safe vs Destructive
|
||||||
|
|
||||||
|
**⚠️ CRITICAL: Never clear plugin cache mid-session.**
|
||||||
|
|
||||||
|
### Why Cache Clearing Breaks MCP Tools
|
||||||
|
|
||||||
|
When Claude Code starts a session:
|
||||||
|
1. MCP tools are loaded from the cache directory
|
||||||
|
2. Tool definitions include **absolute paths** to the venv (e.g., `~/.claude/plugins/cache/.../venv/`)
|
||||||
|
3. These paths are cached in the session memory
|
||||||
|
4. Deleting the cache removes the venv, but the session still references the old paths
|
||||||
|
5. Any MCP tool making HTTP requests fails with TLS certificate errors
|
||||||
|
|
||||||
|
### When Cache Clearing is SAFE
|
||||||
|
|
||||||
|
| Scenario | Safe? | Action |
|
||||||
|
|----------|-------|--------|
|
||||||
|
| Before starting Claude Code | ✅ Yes | Clear cache, then start session |
|
||||||
|
| Between sessions | ✅ Yes | Clear cache after `/exit`, before next session |
|
||||||
|
| During a session | ❌ NO | Never - will break MCP tools |
|
||||||
|
| After plugin source edits | ❌ NO | Restart session instead |
|
||||||
|
|
||||||
|
### Recovery: MCP Tools Broken Mid-Session
|
||||||
|
|
||||||
|
If you accidentally cleared cache during a session and MCP tools fail:
|
||||||
|
|
||||||
|
```
|
||||||
|
Error: Could not find a suitable TLS CA certificate bundle, invalid path:
|
||||||
|
/home/.../.claude/plugins/cache/.../certifi/cacert.pem
|
||||||
|
```
|
||||||
|
|
||||||
|
**Fix:**
|
||||||
|
1. Exit the current session (`/exit` or Ctrl+C)
|
||||||
|
2. Start a new Claude Code session
|
||||||
|
3. MCP tools will reload from the reinstalled cache
|
||||||
|
|
||||||
|
### Correct Workflow for Plugin Development
|
||||||
|
|
||||||
|
1. Make changes to plugin source files
|
||||||
|
2. Run `./scripts/verify-hooks.sh` (verifies hook types)
|
||||||
|
3. Tell user: "Please restart Claude Code for changes to take effect"
|
||||||
|
4. **Do NOT clear cache** - session restart handles reloading
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
## Automated Diagnostics
|
## Automated Diagnostics
|
||||||
|
|
||||||
Use these commands for automated checking:
|
Use these commands for automated checking:
|
||||||
|
|||||||
131
mcp-servers/data-platform/README.md
Normal file
131
mcp-servers/data-platform/README.md
Normal file
@@ -0,0 +1,131 @@
|
|||||||
|
# Data Platform MCP Server
|
||||||
|
|
||||||
|
MCP Server providing pandas, PostgreSQL/PostGIS, and dbt tools for Claude Code.
|
||||||
|
|
||||||
|
## Features
|
||||||
|
|
||||||
|
- **pandas Tools**: DataFrame operations with Arrow IPC data_ref persistence
|
||||||
|
- **PostgreSQL Tools**: Database queries with asyncpg connection pooling
|
||||||
|
- **PostGIS Tools**: Spatial data operations
|
||||||
|
- **dbt Tools**: Build tool wrapper with pre-execution validation
|
||||||
|
|
||||||
|
## Installation
|
||||||
|
|
||||||
|
```bash
|
||||||
|
cd mcp-servers/data-platform
|
||||||
|
python -m venv .venv
|
||||||
|
source .venv/bin/activate # On Windows: .venv\Scripts\activate
|
||||||
|
pip install -r requirements.txt
|
||||||
|
```
|
||||||
|
|
||||||
|
## Configuration
|
||||||
|
|
||||||
|
### System-Level (PostgreSQL credentials)
|
||||||
|
|
||||||
|
Create `~/.config/claude/postgres.env`:
|
||||||
|
|
||||||
|
```env
|
||||||
|
POSTGRES_URL=postgresql://user:password@host:5432/database
|
||||||
|
```
|
||||||
|
|
||||||
|
### Project-Level (dbt paths)
|
||||||
|
|
||||||
|
Create `.env` in your project root:
|
||||||
|
|
||||||
|
```env
|
||||||
|
DBT_PROJECT_DIR=/path/to/dbt/project
|
||||||
|
DBT_PROFILES_DIR=/path/to/.dbt
|
||||||
|
DATA_PLATFORM_MAX_ROWS=100000
|
||||||
|
```
|
||||||
|
|
||||||
|
## Tools
|
||||||
|
|
||||||
|
### pandas Tools (14 tools)
|
||||||
|
|
||||||
|
| Tool | Description |
|
||||||
|
|------|-------------|
|
||||||
|
| `read_csv` | Load CSV file into DataFrame |
|
||||||
|
| `read_parquet` | Load Parquet file into DataFrame |
|
||||||
|
| `read_json` | Load JSON/JSONL file into DataFrame |
|
||||||
|
| `to_csv` | Export DataFrame to CSV file |
|
||||||
|
| `to_parquet` | Export DataFrame to Parquet file |
|
||||||
|
| `describe` | Get statistical summary of DataFrame |
|
||||||
|
| `head` | Get first N rows of DataFrame |
|
||||||
|
| `tail` | Get last N rows of DataFrame |
|
||||||
|
| `filter` | Filter DataFrame rows by condition |
|
||||||
|
| `select` | Select specific columns from DataFrame |
|
||||||
|
| `groupby` | Group DataFrame and aggregate |
|
||||||
|
| `join` | Join two DataFrames |
|
||||||
|
| `list_data` | List all stored DataFrames |
|
||||||
|
| `drop_data` | Remove a DataFrame from storage |
|
||||||
|
|
||||||
|
### PostgreSQL Tools (6 tools)
|
||||||
|
|
||||||
|
| Tool | Description |
|
||||||
|
|------|-------------|
|
||||||
|
| `pg_connect` | Test connection and return status |
|
||||||
|
| `pg_query` | Execute SELECT, return as data_ref |
|
||||||
|
| `pg_execute` | Execute INSERT/UPDATE/DELETE |
|
||||||
|
| `pg_tables` | List all tables in schema |
|
||||||
|
| `pg_columns` | Get column info for table |
|
||||||
|
| `pg_schemas` | List all schemas |
|
||||||
|
|
||||||
|
### PostGIS Tools (4 tools)
|
||||||
|
|
||||||
|
| Tool | Description |
|
||||||
|
|------|-------------|
|
||||||
|
| `st_tables` | List PostGIS-enabled tables |
|
||||||
|
| `st_geometry_type` | Get geometry type of column |
|
||||||
|
| `st_srid` | Get SRID of geometry column |
|
||||||
|
| `st_extent` | Get bounding box of geometries |
|
||||||
|
|
||||||
|
### dbt Tools (8 tools)
|
||||||
|
|
||||||
|
| Tool | Description |
|
||||||
|
|------|-------------|
|
||||||
|
| `dbt_parse` | Validate project (pre-execution) |
|
||||||
|
| `dbt_run` | Run models with selection |
|
||||||
|
| `dbt_test` | Run tests |
|
||||||
|
| `dbt_build` | Run + test |
|
||||||
|
| `dbt_compile` | Compile SQL without executing |
|
||||||
|
| `dbt_ls` | List resources |
|
||||||
|
| `dbt_docs_generate` | Generate documentation |
|
||||||
|
| `dbt_lineage` | Get model dependencies |
|
||||||
|
|
||||||
|
## data_ref System
|
||||||
|
|
||||||
|
All DataFrame operations use a `data_ref` system to persist data across tool calls:
|
||||||
|
|
||||||
|
1. **Load data**: Returns a `data_ref` string (e.g., `"df_a1b2c3d4"`)
|
||||||
|
2. **Use data_ref**: Pass to other tools (filter, join, export)
|
||||||
|
3. **List data**: Use `list_data` to see all stored DataFrames
|
||||||
|
4. **Clean up**: Use `drop_data` when done
|
||||||
|
|
||||||
|
### Example Flow
|
||||||
|
|
||||||
|
```
|
||||||
|
read_csv("data.csv") → {"data_ref": "sales_data", "rows": 1000}
|
||||||
|
filter("sales_data", "amount > 100") → {"data_ref": "sales_data_filtered"}
|
||||||
|
describe("sales_data_filtered") → {statistics}
|
||||||
|
to_parquet("sales_data_filtered", "output.parquet") → {success}
|
||||||
|
```
|
||||||
|
|
||||||
|
## Memory Management
|
||||||
|
|
||||||
|
- Default row limit: 100,000 rows per DataFrame
|
||||||
|
- Configure via `DATA_PLATFORM_MAX_ROWS` environment variable
|
||||||
|
- Use chunked processing for large files (`chunk_size` parameter)
|
||||||
|
- Monitor with `list_data` tool (shows memory usage)
|
||||||
|
|
||||||
|
## Running
|
||||||
|
|
||||||
|
```bash
|
||||||
|
python -m mcp_server.server
|
||||||
|
```
|
||||||
|
|
||||||
|
## Development
|
||||||
|
|
||||||
|
```bash
|
||||||
|
pip install -e ".[dev]"
|
||||||
|
pytest
|
||||||
|
```
|
||||||
7
mcp-servers/data-platform/mcp_server/__init__.py
Normal file
7
mcp-servers/data-platform/mcp_server/__init__.py
Normal file
@@ -0,0 +1,7 @@
|
|||||||
|
"""
|
||||||
|
Data Platform MCP Server.
|
||||||
|
|
||||||
|
Provides pandas, PostgreSQL/PostGIS, and dbt tools to Claude Code via MCP.
|
||||||
|
"""
|
||||||
|
|
||||||
|
__version__ = "1.0.0"
|
||||||
195
mcp-servers/data-platform/mcp_server/config.py
Normal file
195
mcp-servers/data-platform/mcp_server/config.py
Normal file
@@ -0,0 +1,195 @@
|
|||||||
|
"""
|
||||||
|
Configuration loader for Data Platform MCP Server.
|
||||||
|
|
||||||
|
Implements hybrid configuration system:
|
||||||
|
- System-level: ~/.config/claude/postgres.env (credentials)
|
||||||
|
- Project-level: .env (dbt project paths, overrides)
|
||||||
|
- Auto-detection: dbt_project.yml discovery
|
||||||
|
"""
|
||||||
|
from pathlib import Path
|
||||||
|
from dotenv import load_dotenv
|
||||||
|
import os
|
||||||
|
import logging
|
||||||
|
from typing import Dict, Optional
|
||||||
|
|
||||||
|
logging.basicConfig(level=logging.INFO)
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class DataPlatformConfig:
|
||||||
|
"""Hybrid configuration loader for data platform tools"""
|
||||||
|
|
||||||
|
def __init__(self):
|
||||||
|
self.postgres_url: Optional[str] = None
|
||||||
|
self.dbt_project_dir: Optional[str] = None
|
||||||
|
self.dbt_profiles_dir: Optional[str] = None
|
||||||
|
self.max_rows: int = 100_000
|
||||||
|
|
||||||
|
def load(self) -> Dict[str, Optional[str]]:
|
||||||
|
"""
|
||||||
|
Load configuration from system and project levels.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Dict containing postgres_url, dbt_project_dir, dbt_profiles_dir, max_rows
|
||||||
|
|
||||||
|
Note:
|
||||||
|
PostgreSQL credentials are optional - server can run in pandas-only mode.
|
||||||
|
"""
|
||||||
|
# Load system config (PostgreSQL credentials)
|
||||||
|
system_config = Path.home() / '.config' / 'claude' / 'postgres.env'
|
||||||
|
if system_config.exists():
|
||||||
|
load_dotenv(system_config)
|
||||||
|
logger.info(f"Loaded system configuration from {system_config}")
|
||||||
|
else:
|
||||||
|
logger.info(
|
||||||
|
f"System config not found: {system_config} - "
|
||||||
|
"PostgreSQL tools will be unavailable"
|
||||||
|
)
|
||||||
|
|
||||||
|
# Find project directory
|
||||||
|
project_dir = self._find_project_directory()
|
||||||
|
|
||||||
|
# Load project config (overrides system)
|
||||||
|
if project_dir:
|
||||||
|
project_config = project_dir / '.env'
|
||||||
|
if project_config.exists():
|
||||||
|
load_dotenv(project_config, override=True)
|
||||||
|
logger.info(f"Loaded project configuration from {project_config}")
|
||||||
|
|
||||||
|
# Extract values
|
||||||
|
self.postgres_url = os.getenv('POSTGRES_URL')
|
||||||
|
self.dbt_project_dir = os.getenv('DBT_PROJECT_DIR')
|
||||||
|
self.dbt_profiles_dir = os.getenv('DBT_PROFILES_DIR')
|
||||||
|
self.max_rows = int(os.getenv('DATA_PLATFORM_MAX_ROWS', '100000'))
|
||||||
|
|
||||||
|
# Auto-detect dbt project if not specified
|
||||||
|
if not self.dbt_project_dir and project_dir:
|
||||||
|
self.dbt_project_dir = self._find_dbt_project(project_dir)
|
||||||
|
if self.dbt_project_dir:
|
||||||
|
logger.info(f"Auto-detected dbt project: {self.dbt_project_dir}")
|
||||||
|
|
||||||
|
# Default dbt profiles dir to ~/.dbt
|
||||||
|
if not self.dbt_profiles_dir:
|
||||||
|
default_profiles = Path.home() / '.dbt'
|
||||||
|
if default_profiles.exists():
|
||||||
|
self.dbt_profiles_dir = str(default_profiles)
|
||||||
|
|
||||||
|
return {
|
||||||
|
'postgres_url': self.postgres_url,
|
||||||
|
'dbt_project_dir': self.dbt_project_dir,
|
||||||
|
'dbt_profiles_dir': self.dbt_profiles_dir,
|
||||||
|
'max_rows': self.max_rows,
|
||||||
|
'postgres_available': self.postgres_url is not None,
|
||||||
|
'dbt_available': self.dbt_project_dir is not None
|
||||||
|
}
|
||||||
|
|
||||||
|
def _find_project_directory(self) -> Optional[Path]:
|
||||||
|
"""
|
||||||
|
Find the user's project directory.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Path to project directory, or None if not found
|
||||||
|
"""
|
||||||
|
# Strategy 1: Check CLAUDE_PROJECT_DIR environment variable
|
||||||
|
project_dir = os.getenv('CLAUDE_PROJECT_DIR')
|
||||||
|
if project_dir:
|
||||||
|
path = Path(project_dir)
|
||||||
|
if path.exists():
|
||||||
|
logger.info(f"Found project directory from CLAUDE_PROJECT_DIR: {path}")
|
||||||
|
return path
|
||||||
|
|
||||||
|
# Strategy 2: Check PWD
|
||||||
|
pwd = os.getenv('PWD')
|
||||||
|
if pwd:
|
||||||
|
path = Path(pwd)
|
||||||
|
if path.exists() and (
|
||||||
|
(path / '.git').exists() or
|
||||||
|
(path / '.env').exists() or
|
||||||
|
(path / 'dbt_project.yml').exists()
|
||||||
|
):
|
||||||
|
logger.info(f"Found project directory from PWD: {path}")
|
||||||
|
return path
|
||||||
|
|
||||||
|
# Strategy 3: Check current working directory
|
||||||
|
cwd = Path.cwd()
|
||||||
|
if (cwd / '.git').exists() or (cwd / '.env').exists() or (cwd / 'dbt_project.yml').exists():
|
||||||
|
logger.info(f"Found project directory from cwd: {cwd}")
|
||||||
|
return cwd
|
||||||
|
|
||||||
|
logger.debug("Could not determine project directory")
|
||||||
|
return None
|
||||||
|
|
||||||
|
def _find_dbt_project(self, start_dir: Path) -> Optional[str]:
|
||||||
|
"""
|
||||||
|
Find dbt_project.yml in the project or its subdirectories.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
start_dir: Directory to start searching from
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Path to dbt project directory, or None if not found
|
||||||
|
"""
|
||||||
|
# Check root
|
||||||
|
if (start_dir / 'dbt_project.yml').exists():
|
||||||
|
return str(start_dir)
|
||||||
|
|
||||||
|
# Check common subdirectories
|
||||||
|
for subdir in ['dbt', 'transform', 'analytics', 'models']:
|
||||||
|
candidate = start_dir / subdir
|
||||||
|
if (candidate / 'dbt_project.yml').exists():
|
||||||
|
return str(candidate)
|
||||||
|
|
||||||
|
# Search one level deep
|
||||||
|
for item in start_dir.iterdir():
|
||||||
|
if item.is_dir() and not item.name.startswith('.'):
|
||||||
|
if (item / 'dbt_project.yml').exists():
|
||||||
|
return str(item)
|
||||||
|
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
def load_config() -> Dict[str, Optional[str]]:
|
||||||
|
"""
|
||||||
|
Convenience function to load configuration.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Configuration dictionary
|
||||||
|
"""
|
||||||
|
config = DataPlatformConfig()
|
||||||
|
return config.load()
|
||||||
|
|
||||||
|
|
||||||
|
def check_postgres_connection() -> Dict[str, any]:
|
||||||
|
"""
|
||||||
|
Check PostgreSQL connection status for SessionStart hook.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Dict with connection status and message
|
||||||
|
"""
|
||||||
|
import asyncio
|
||||||
|
|
||||||
|
config = load_config()
|
||||||
|
if not config.get('postgres_url'):
|
||||||
|
return {
|
||||||
|
'connected': False,
|
||||||
|
'message': 'PostgreSQL not configured (POSTGRES_URL not set)'
|
||||||
|
}
|
||||||
|
|
||||||
|
async def test_connection():
|
||||||
|
try:
|
||||||
|
import asyncpg
|
||||||
|
conn = await asyncpg.connect(config['postgres_url'], timeout=5)
|
||||||
|
version = await conn.fetchval('SELECT version()')
|
||||||
|
await conn.close()
|
||||||
|
return {
|
||||||
|
'connected': True,
|
||||||
|
'message': f'Connected to PostgreSQL',
|
||||||
|
'version': version.split(',')[0] if version else 'Unknown'
|
||||||
|
}
|
||||||
|
except Exception as e:
|
||||||
|
return {
|
||||||
|
'connected': False,
|
||||||
|
'message': f'PostgreSQL connection failed: {str(e)}'
|
||||||
|
}
|
||||||
|
|
||||||
|
return asyncio.run(test_connection())
|
||||||
219
mcp-servers/data-platform/mcp_server/data_store.py
Normal file
219
mcp-servers/data-platform/mcp_server/data_store.py
Normal file
@@ -0,0 +1,219 @@
|
|||||||
|
"""
|
||||||
|
Arrow IPC DataFrame Registry.
|
||||||
|
|
||||||
|
Provides persistent storage for DataFrames across tool calls using Apache Arrow
|
||||||
|
for efficient memory management and serialization.
|
||||||
|
"""
|
||||||
|
import pyarrow as pa
|
||||||
|
import pandas as pd
|
||||||
|
import uuid
|
||||||
|
import logging
|
||||||
|
from typing import Dict, Optional, List, Union
|
||||||
|
from dataclasses import dataclass
|
||||||
|
from datetime import datetime
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class DataFrameInfo:
|
||||||
|
"""Metadata about a stored DataFrame"""
|
||||||
|
ref: str
|
||||||
|
rows: int
|
||||||
|
columns: int
|
||||||
|
column_names: List[str]
|
||||||
|
dtypes: Dict[str, str]
|
||||||
|
memory_bytes: int
|
||||||
|
created_at: datetime
|
||||||
|
source: Optional[str] = None
|
||||||
|
|
||||||
|
|
||||||
|
class DataStore:
|
||||||
|
"""
|
||||||
|
Singleton registry for Arrow Tables (DataFrames).
|
||||||
|
|
||||||
|
Uses Arrow IPC format for efficient memory usage and supports
|
||||||
|
data_ref based retrieval across multiple tool calls.
|
||||||
|
"""
|
||||||
|
_instance = None
|
||||||
|
_dataframes: Dict[str, pa.Table] = {}
|
||||||
|
_metadata: Dict[str, DataFrameInfo] = {}
|
||||||
|
_max_rows: int = 100_000
|
||||||
|
|
||||||
|
def __new__(cls):
|
||||||
|
if cls._instance is None:
|
||||||
|
cls._instance = super().__new__(cls)
|
||||||
|
cls._dataframes = {}
|
||||||
|
cls._metadata = {}
|
||||||
|
return cls._instance
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def get_instance(cls) -> 'DataStore':
|
||||||
|
"""Get the singleton instance"""
|
||||||
|
if cls._instance is None:
|
||||||
|
cls._instance = cls()
|
||||||
|
return cls._instance
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def set_max_rows(cls, max_rows: int):
|
||||||
|
"""Set the maximum rows limit"""
|
||||||
|
cls._max_rows = max_rows
|
||||||
|
|
||||||
|
def store(
|
||||||
|
self,
|
||||||
|
data: Union[pa.Table, pd.DataFrame],
|
||||||
|
name: Optional[str] = None,
|
||||||
|
source: Optional[str] = None
|
||||||
|
) -> str:
|
||||||
|
"""
|
||||||
|
Store a DataFrame and return its reference.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
data: Arrow Table or pandas DataFrame
|
||||||
|
name: Optional name for the reference (auto-generated if not provided)
|
||||||
|
source: Optional source description (e.g., file path, query)
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
data_ref string to retrieve the DataFrame later
|
||||||
|
"""
|
||||||
|
# Convert pandas to Arrow if needed
|
||||||
|
if isinstance(data, pd.DataFrame):
|
||||||
|
table = pa.Table.from_pandas(data)
|
||||||
|
else:
|
||||||
|
table = data
|
||||||
|
|
||||||
|
# Generate reference
|
||||||
|
data_ref = name or f"df_{uuid.uuid4().hex[:8]}"
|
||||||
|
|
||||||
|
# Ensure unique reference
|
||||||
|
if data_ref in self._dataframes and name is None:
|
||||||
|
data_ref = f"{data_ref}_{uuid.uuid4().hex[:4]}"
|
||||||
|
|
||||||
|
# Store table
|
||||||
|
self._dataframes[data_ref] = table
|
||||||
|
|
||||||
|
# Store metadata
|
||||||
|
schema = table.schema
|
||||||
|
self._metadata[data_ref] = DataFrameInfo(
|
||||||
|
ref=data_ref,
|
||||||
|
rows=table.num_rows,
|
||||||
|
columns=table.num_columns,
|
||||||
|
column_names=[f.name for f in schema],
|
||||||
|
dtypes={f.name: str(f.type) for f in schema},
|
||||||
|
memory_bytes=table.nbytes,
|
||||||
|
created_at=datetime.now(),
|
||||||
|
source=source
|
||||||
|
)
|
||||||
|
|
||||||
|
logger.info(f"Stored DataFrame '{data_ref}': {table.num_rows} rows, {table.num_columns} cols")
|
||||||
|
return data_ref
|
||||||
|
|
||||||
|
def get(self, data_ref: str) -> Optional[pa.Table]:
|
||||||
|
"""
|
||||||
|
Retrieve an Arrow Table by reference.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
data_ref: Reference string from store()
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Arrow Table or None if not found
|
||||||
|
"""
|
||||||
|
return self._dataframes.get(data_ref)
|
||||||
|
|
||||||
|
def get_pandas(self, data_ref: str) -> Optional[pd.DataFrame]:
|
||||||
|
"""
|
||||||
|
Retrieve a DataFrame as pandas.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
data_ref: Reference string from store()
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
pandas DataFrame or None if not found
|
||||||
|
"""
|
||||||
|
table = self.get(data_ref)
|
||||||
|
if table is not None:
|
||||||
|
return table.to_pandas()
|
||||||
|
return None
|
||||||
|
|
||||||
|
def get_info(self, data_ref: str) -> Optional[DataFrameInfo]:
|
||||||
|
"""
|
||||||
|
Get metadata about a stored DataFrame.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
data_ref: Reference string
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
DataFrameInfo or None if not found
|
||||||
|
"""
|
||||||
|
return self._metadata.get(data_ref)
|
||||||
|
|
||||||
|
def list_refs(self) -> List[Dict]:
|
||||||
|
"""
|
||||||
|
List all stored DataFrame references with metadata.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
List of dicts with ref, rows, columns, memory info
|
||||||
|
"""
|
||||||
|
result = []
|
||||||
|
for ref, info in self._metadata.items():
|
||||||
|
result.append({
|
||||||
|
'ref': ref,
|
||||||
|
'rows': info.rows,
|
||||||
|
'columns': info.columns,
|
||||||
|
'column_names': info.column_names,
|
||||||
|
'memory_mb': round(info.memory_bytes / (1024 * 1024), 2),
|
||||||
|
'source': info.source,
|
||||||
|
'created_at': info.created_at.isoformat()
|
||||||
|
})
|
||||||
|
return result
|
||||||
|
|
||||||
|
def drop(self, data_ref: str) -> bool:
|
||||||
|
"""
|
||||||
|
Remove a DataFrame from the store.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
data_ref: Reference string
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
True if removed, False if not found
|
||||||
|
"""
|
||||||
|
if data_ref in self._dataframes:
|
||||||
|
del self._dataframes[data_ref]
|
||||||
|
del self._metadata[data_ref]
|
||||||
|
logger.info(f"Dropped DataFrame '{data_ref}'")
|
||||||
|
return True
|
||||||
|
return False
|
||||||
|
|
||||||
|
def clear(self):
|
||||||
|
"""Remove all stored DataFrames"""
|
||||||
|
count = len(self._dataframes)
|
||||||
|
self._dataframes.clear()
|
||||||
|
self._metadata.clear()
|
||||||
|
logger.info(f"Cleared {count} DataFrames from store")
|
||||||
|
|
||||||
|
def total_memory_bytes(self) -> int:
|
||||||
|
"""Get total memory used by all stored DataFrames"""
|
||||||
|
return sum(info.memory_bytes for info in self._metadata.values())
|
||||||
|
|
||||||
|
def total_memory_mb(self) -> float:
|
||||||
|
"""Get total memory in MB"""
|
||||||
|
return round(self.total_memory_bytes() / (1024 * 1024), 2)
|
||||||
|
|
||||||
|
def check_row_limit(self, row_count: int) -> Dict:
|
||||||
|
"""
|
||||||
|
Check if row count exceeds limit.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
row_count: Number of rows
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Dict with 'exceeded' bool and 'message' if exceeded
|
||||||
|
"""
|
||||||
|
if row_count > self._max_rows:
|
||||||
|
return {
|
||||||
|
'exceeded': True,
|
||||||
|
'message': f"Row count ({row_count:,}) exceeds limit ({self._max_rows:,})",
|
||||||
|
'suggestion': f"Use chunked processing or filter data first",
|
||||||
|
'limit': self._max_rows
|
||||||
|
}
|
||||||
|
return {'exceeded': False}
|
||||||
387
mcp-servers/data-platform/mcp_server/dbt_tools.py
Normal file
387
mcp-servers/data-platform/mcp_server/dbt_tools.py
Normal file
@@ -0,0 +1,387 @@
|
|||||||
|
"""
|
||||||
|
dbt MCP Tools.
|
||||||
|
|
||||||
|
Provides dbt CLI wrapper with pre-execution validation.
|
||||||
|
"""
|
||||||
|
import subprocess
|
||||||
|
import json
|
||||||
|
import logging
|
||||||
|
import os
|
||||||
|
from pathlib import Path
|
||||||
|
from typing import Dict, List, Optional, Any
|
||||||
|
|
||||||
|
from .config import load_config
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class DbtTools:
|
||||||
|
"""dbt CLI wrapper tools with pre-validation"""
|
||||||
|
|
||||||
|
def __init__(self):
|
||||||
|
self.config = load_config()
|
||||||
|
self.project_dir = self.config.get('dbt_project_dir')
|
||||||
|
self.profiles_dir = self.config.get('dbt_profiles_dir')
|
||||||
|
|
||||||
|
def _get_dbt_command(self, cmd: List[str]) -> List[str]:
|
||||||
|
"""Build dbt command with project and profiles directories"""
|
||||||
|
base = ['dbt']
|
||||||
|
if self.project_dir:
|
||||||
|
base.extend(['--project-dir', self.project_dir])
|
||||||
|
if self.profiles_dir:
|
||||||
|
base.extend(['--profiles-dir', self.profiles_dir])
|
||||||
|
base.extend(cmd)
|
||||||
|
return base
|
||||||
|
|
||||||
|
def _run_dbt(
|
||||||
|
self,
|
||||||
|
cmd: List[str],
|
||||||
|
timeout: int = 300,
|
||||||
|
capture_json: bool = False
|
||||||
|
) -> Dict:
|
||||||
|
"""
|
||||||
|
Run dbt command and return result.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
cmd: dbt subcommand and arguments
|
||||||
|
timeout: Command timeout in seconds
|
||||||
|
capture_json: If True, parse JSON output
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Dict with command result
|
||||||
|
"""
|
||||||
|
if not self.project_dir:
|
||||||
|
return {
|
||||||
|
'error': 'dbt project not found',
|
||||||
|
'suggestion': 'Set DBT_PROJECT_DIR in project .env or ensure dbt_project.yml exists'
|
||||||
|
}
|
||||||
|
|
||||||
|
full_cmd = self._get_dbt_command(cmd)
|
||||||
|
logger.info(f"Running: {' '.join(full_cmd)}")
|
||||||
|
|
||||||
|
try:
|
||||||
|
env = os.environ.copy()
|
||||||
|
# Disable dbt analytics/tracking
|
||||||
|
env['DBT_SEND_ANONYMOUS_USAGE_STATS'] = 'false'
|
||||||
|
|
||||||
|
result = subprocess.run(
|
||||||
|
full_cmd,
|
||||||
|
capture_output=True,
|
||||||
|
text=True,
|
||||||
|
timeout=timeout,
|
||||||
|
cwd=self.project_dir,
|
||||||
|
env=env
|
||||||
|
)
|
||||||
|
|
||||||
|
output = {
|
||||||
|
'success': result.returncode == 0,
|
||||||
|
'command': ' '.join(cmd),
|
||||||
|
'stdout': result.stdout,
|
||||||
|
'stderr': result.stderr if result.returncode != 0 else None
|
||||||
|
}
|
||||||
|
|
||||||
|
if capture_json and result.returncode == 0:
|
||||||
|
try:
|
||||||
|
output['data'] = json.loads(result.stdout)
|
||||||
|
except json.JSONDecodeError:
|
||||||
|
pass
|
||||||
|
|
||||||
|
return output
|
||||||
|
|
||||||
|
except subprocess.TimeoutExpired:
|
||||||
|
return {
|
||||||
|
'error': f'Command timed out after {timeout}s',
|
||||||
|
'command': ' '.join(cmd)
|
||||||
|
}
|
||||||
|
except FileNotFoundError:
|
||||||
|
return {
|
||||||
|
'error': 'dbt not found in PATH',
|
||||||
|
'suggestion': 'Install dbt: pip install dbt-core dbt-postgres'
|
||||||
|
}
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"dbt command failed: {e}")
|
||||||
|
return {'error': str(e)}
|
||||||
|
|
||||||
|
async def dbt_parse(self) -> Dict:
|
||||||
|
"""
|
||||||
|
Validate dbt project without executing (pre-flight check).
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Dict with validation result and any errors
|
||||||
|
"""
|
||||||
|
result = self._run_dbt(['parse'])
|
||||||
|
|
||||||
|
# Check if _run_dbt returned an error (e.g., project not found, timeout, dbt not installed)
|
||||||
|
if 'error' in result:
|
||||||
|
return result
|
||||||
|
|
||||||
|
if not result.get('success'):
|
||||||
|
# Extract useful error info from stderr
|
||||||
|
stderr = result.get('stderr', '') or result.get('stdout', '')
|
||||||
|
errors = []
|
||||||
|
|
||||||
|
# Look for common dbt 1.9+ deprecation warnings
|
||||||
|
if 'deprecated' in stderr.lower():
|
||||||
|
errors.append({
|
||||||
|
'type': 'deprecation',
|
||||||
|
'message': 'Deprecated syntax found - check dbt 1.9+ migration guide'
|
||||||
|
})
|
||||||
|
|
||||||
|
# Look for compilation errors
|
||||||
|
if 'compilation error' in stderr.lower():
|
||||||
|
errors.append({
|
||||||
|
'type': 'compilation',
|
||||||
|
'message': 'SQL compilation error - check model syntax'
|
||||||
|
})
|
||||||
|
|
||||||
|
return {
|
||||||
|
'valid': False,
|
||||||
|
'errors': errors,
|
||||||
|
'details': stderr[:2000] if stderr else None,
|
||||||
|
'suggestion': 'Fix issues before running dbt models'
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
'valid': True,
|
||||||
|
'message': 'dbt project validation passed'
|
||||||
|
}
|
||||||
|
|
||||||
|
async def dbt_run(
|
||||||
|
self,
|
||||||
|
select: Optional[str] = None,
|
||||||
|
exclude: Optional[str] = None,
|
||||||
|
full_refresh: bool = False
|
||||||
|
) -> Dict:
|
||||||
|
"""
|
||||||
|
Run dbt models with pre-validation.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
select: Model selection (e.g., "model_name", "+model_name", "tag:daily")
|
||||||
|
exclude: Models to exclude
|
||||||
|
full_refresh: If True, rebuild incremental models
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Dict with run result
|
||||||
|
"""
|
||||||
|
# ALWAYS validate first
|
||||||
|
parse_result = await self.dbt_parse()
|
||||||
|
if not parse_result.get('valid'):
|
||||||
|
return {
|
||||||
|
'error': 'Pre-validation failed',
|
||||||
|
**parse_result
|
||||||
|
}
|
||||||
|
|
||||||
|
cmd = ['run']
|
||||||
|
if select:
|
||||||
|
cmd.extend(['--select', select])
|
||||||
|
if exclude:
|
||||||
|
cmd.extend(['--exclude', exclude])
|
||||||
|
if full_refresh:
|
||||||
|
cmd.append('--full-refresh')
|
||||||
|
|
||||||
|
return self._run_dbt(cmd)
|
||||||
|
|
||||||
|
async def dbt_test(
|
||||||
|
self,
|
||||||
|
select: Optional[str] = None,
|
||||||
|
exclude: Optional[str] = None
|
||||||
|
) -> Dict:
|
||||||
|
"""
|
||||||
|
Run dbt tests.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
select: Test selection
|
||||||
|
exclude: Tests to exclude
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Dict with test results
|
||||||
|
"""
|
||||||
|
cmd = ['test']
|
||||||
|
if select:
|
||||||
|
cmd.extend(['--select', select])
|
||||||
|
if exclude:
|
||||||
|
cmd.extend(['--exclude', exclude])
|
||||||
|
|
||||||
|
return self._run_dbt(cmd)
|
||||||
|
|
||||||
|
async def dbt_build(
|
||||||
|
self,
|
||||||
|
select: Optional[str] = None,
|
||||||
|
exclude: Optional[str] = None,
|
||||||
|
full_refresh: bool = False
|
||||||
|
) -> Dict:
|
||||||
|
"""
|
||||||
|
Run dbt build (run + test) with pre-validation.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
select: Model/test selection
|
||||||
|
exclude: Resources to exclude
|
||||||
|
full_refresh: If True, rebuild incremental models
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Dict with build result
|
||||||
|
"""
|
||||||
|
# ALWAYS validate first
|
||||||
|
parse_result = await self.dbt_parse()
|
||||||
|
if not parse_result.get('valid'):
|
||||||
|
return {
|
||||||
|
'error': 'Pre-validation failed',
|
||||||
|
**parse_result
|
||||||
|
}
|
||||||
|
|
||||||
|
cmd = ['build']
|
||||||
|
if select:
|
||||||
|
cmd.extend(['--select', select])
|
||||||
|
if exclude:
|
||||||
|
cmd.extend(['--exclude', exclude])
|
||||||
|
if full_refresh:
|
||||||
|
cmd.append('--full-refresh')
|
||||||
|
|
||||||
|
return self._run_dbt(cmd)
|
||||||
|
|
||||||
|
async def dbt_compile(
|
||||||
|
self,
|
||||||
|
select: Optional[str] = None
|
||||||
|
) -> Dict:
|
||||||
|
"""
|
||||||
|
Compile dbt models to SQL without executing.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
select: Model selection
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Dict with compiled SQL info
|
||||||
|
"""
|
||||||
|
cmd = ['compile']
|
||||||
|
if select:
|
||||||
|
cmd.extend(['--select', select])
|
||||||
|
|
||||||
|
return self._run_dbt(cmd)
|
||||||
|
|
||||||
|
async def dbt_ls(
|
||||||
|
self,
|
||||||
|
select: Optional[str] = None,
|
||||||
|
resource_type: Optional[str] = None,
|
||||||
|
output: str = 'name'
|
||||||
|
) -> Dict:
|
||||||
|
"""
|
||||||
|
List dbt resources.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
select: Resource selection
|
||||||
|
resource_type: Filter by type (model, test, seed, snapshot, source)
|
||||||
|
output: Output format ('name', 'path', 'json')
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Dict with list of resources
|
||||||
|
"""
|
||||||
|
cmd = ['ls', '--output', output]
|
||||||
|
if select:
|
||||||
|
cmd.extend(['--select', select])
|
||||||
|
if resource_type:
|
||||||
|
cmd.extend(['--resource-type', resource_type])
|
||||||
|
|
||||||
|
result = self._run_dbt(cmd)
|
||||||
|
|
||||||
|
if result.get('success') and result.get('stdout'):
|
||||||
|
lines = [l.strip() for l in result['stdout'].split('\n') if l.strip()]
|
||||||
|
result['resources'] = lines
|
||||||
|
result['count'] = len(lines)
|
||||||
|
|
||||||
|
return result
|
||||||
|
|
||||||
|
async def dbt_docs_generate(self) -> Dict:
|
||||||
|
"""
|
||||||
|
Generate dbt documentation.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Dict with generation result
|
||||||
|
"""
|
||||||
|
result = self._run_dbt(['docs', 'generate'])
|
||||||
|
|
||||||
|
if result.get('success') and self.project_dir:
|
||||||
|
# Check for generated catalog
|
||||||
|
catalog_path = Path(self.project_dir) / 'target' / 'catalog.json'
|
||||||
|
manifest_path = Path(self.project_dir) / 'target' / 'manifest.json'
|
||||||
|
result['catalog_generated'] = catalog_path.exists()
|
||||||
|
result['manifest_generated'] = manifest_path.exists()
|
||||||
|
|
||||||
|
return result
|
||||||
|
|
||||||
|
async def dbt_lineage(self, model: str) -> Dict:
|
||||||
|
"""
|
||||||
|
Get model dependencies and lineage.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
model: Model name to analyze
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Dict with upstream and downstream dependencies
|
||||||
|
"""
|
||||||
|
if not self.project_dir:
|
||||||
|
return {'error': 'dbt project not found'}
|
||||||
|
|
||||||
|
manifest_path = Path(self.project_dir) / 'target' / 'manifest.json'
|
||||||
|
|
||||||
|
# Generate manifest if not exists
|
||||||
|
if not manifest_path.exists():
|
||||||
|
compile_result = await self.dbt_compile(select=model)
|
||||||
|
if not compile_result.get('success'):
|
||||||
|
return {
|
||||||
|
'error': 'Failed to compile manifest',
|
||||||
|
'details': compile_result
|
||||||
|
}
|
||||||
|
|
||||||
|
if not manifest_path.exists():
|
||||||
|
return {
|
||||||
|
'error': 'Manifest not found',
|
||||||
|
'suggestion': 'Run dbt compile first'
|
||||||
|
}
|
||||||
|
|
||||||
|
try:
|
||||||
|
with open(manifest_path) as f:
|
||||||
|
manifest = json.load(f)
|
||||||
|
|
||||||
|
# Find the model node
|
||||||
|
model_key = None
|
||||||
|
for key in manifest.get('nodes', {}):
|
||||||
|
if key.endswith(f'.{model}') or manifest['nodes'][key].get('name') == model:
|
||||||
|
model_key = key
|
||||||
|
break
|
||||||
|
|
||||||
|
if not model_key:
|
||||||
|
return {
|
||||||
|
'error': f'Model not found: {model}',
|
||||||
|
'available_models': [
|
||||||
|
n.get('name') for n in manifest.get('nodes', {}).values()
|
||||||
|
if n.get('resource_type') == 'model'
|
||||||
|
][:20]
|
||||||
|
}
|
||||||
|
|
||||||
|
node = manifest['nodes'][model_key]
|
||||||
|
|
||||||
|
# Get upstream (depends_on)
|
||||||
|
upstream = node.get('depends_on', {}).get('nodes', [])
|
||||||
|
|
||||||
|
# Get downstream (find nodes that depend on this one)
|
||||||
|
downstream = []
|
||||||
|
for key, other_node in manifest.get('nodes', {}).items():
|
||||||
|
deps = other_node.get('depends_on', {}).get('nodes', [])
|
||||||
|
if model_key in deps:
|
||||||
|
downstream.append(key)
|
||||||
|
|
||||||
|
return {
|
||||||
|
'model': model,
|
||||||
|
'unique_id': model_key,
|
||||||
|
'materialization': node.get('config', {}).get('materialized'),
|
||||||
|
'schema': node.get('schema'),
|
||||||
|
'database': node.get('database'),
|
||||||
|
'upstream': upstream,
|
||||||
|
'downstream': downstream,
|
||||||
|
'description': node.get('description'),
|
||||||
|
'tags': node.get('tags', [])
|
||||||
|
}
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"dbt_lineage failed: {e}")
|
||||||
|
return {'error': str(e)}
|
||||||
500
mcp-servers/data-platform/mcp_server/pandas_tools.py
Normal file
500
mcp-servers/data-platform/mcp_server/pandas_tools.py
Normal file
@@ -0,0 +1,500 @@
|
|||||||
|
"""
|
||||||
|
pandas MCP Tools.
|
||||||
|
|
||||||
|
Provides DataFrame operations with Arrow IPC data_ref persistence.
|
||||||
|
"""
|
||||||
|
import pandas as pd
|
||||||
|
import pyarrow as pa
|
||||||
|
import pyarrow.parquet as pq
|
||||||
|
import json
|
||||||
|
import logging
|
||||||
|
from pathlib import Path
|
||||||
|
from typing import Dict, List, Optional, Any, Union
|
||||||
|
|
||||||
|
from .data_store import DataStore
|
||||||
|
from .config import load_config
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class PandasTools:
|
||||||
|
"""pandas data manipulation tools with data_ref persistence"""
|
||||||
|
|
||||||
|
def __init__(self):
|
||||||
|
self.store = DataStore.get_instance()
|
||||||
|
config = load_config()
|
||||||
|
self.max_rows = config.get('max_rows', 100_000)
|
||||||
|
self.store.set_max_rows(self.max_rows)
|
||||||
|
|
||||||
|
def _check_and_store(
|
||||||
|
self,
|
||||||
|
df: pd.DataFrame,
|
||||||
|
name: Optional[str] = None,
|
||||||
|
source: Optional[str] = None
|
||||||
|
) -> Dict:
|
||||||
|
"""Check row limit and store DataFrame if within limits"""
|
||||||
|
check = self.store.check_row_limit(len(df))
|
||||||
|
if check['exceeded']:
|
||||||
|
return {
|
||||||
|
'error': 'row_limit_exceeded',
|
||||||
|
**check,
|
||||||
|
'preview': df.head(100).to_dict(orient='records')
|
||||||
|
}
|
||||||
|
|
||||||
|
data_ref = self.store.store(df, name=name, source=source)
|
||||||
|
return {
|
||||||
|
'data_ref': data_ref,
|
||||||
|
'rows': len(df),
|
||||||
|
'columns': list(df.columns),
|
||||||
|
'dtypes': {col: str(dtype) for col, dtype in df.dtypes.items()}
|
||||||
|
}
|
||||||
|
|
||||||
|
async def read_csv(
|
||||||
|
self,
|
||||||
|
file_path: str,
|
||||||
|
name: Optional[str] = None,
|
||||||
|
chunk_size: Optional[int] = None,
|
||||||
|
**kwargs
|
||||||
|
) -> Dict:
|
||||||
|
"""
|
||||||
|
Load CSV file into DataFrame.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
file_path: Path to CSV file
|
||||||
|
name: Optional name for data_ref
|
||||||
|
chunk_size: If provided, process in chunks
|
||||||
|
**kwargs: Additional pandas read_csv arguments
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Dict with data_ref or error info
|
||||||
|
"""
|
||||||
|
path = Path(file_path)
|
||||||
|
if not path.exists():
|
||||||
|
return {'error': f'File not found: {file_path}'}
|
||||||
|
|
||||||
|
try:
|
||||||
|
if chunk_size:
|
||||||
|
# Chunked processing - return iterator info
|
||||||
|
chunks = []
|
||||||
|
for i, chunk in enumerate(pd.read_csv(path, chunksize=chunk_size, **kwargs)):
|
||||||
|
chunk_ref = self.store.store(chunk, name=f"{name or 'chunk'}_{i}", source=file_path)
|
||||||
|
chunks.append({'ref': chunk_ref, 'rows': len(chunk)})
|
||||||
|
return {
|
||||||
|
'chunked': True,
|
||||||
|
'chunks': chunks,
|
||||||
|
'total_chunks': len(chunks)
|
||||||
|
}
|
||||||
|
|
||||||
|
df = pd.read_csv(path, **kwargs)
|
||||||
|
return self._check_and_store(df, name=name, source=file_path)
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"read_csv failed: {e}")
|
||||||
|
return {'error': str(e)}
|
||||||
|
|
||||||
|
async def read_parquet(
|
||||||
|
self,
|
||||||
|
file_path: str,
|
||||||
|
name: Optional[str] = None,
|
||||||
|
columns: Optional[List[str]] = None
|
||||||
|
) -> Dict:
|
||||||
|
"""
|
||||||
|
Load Parquet file into DataFrame.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
file_path: Path to Parquet file
|
||||||
|
name: Optional name for data_ref
|
||||||
|
columns: Optional list of columns to load
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Dict with data_ref or error info
|
||||||
|
"""
|
||||||
|
path = Path(file_path)
|
||||||
|
if not path.exists():
|
||||||
|
return {'error': f'File not found: {file_path}'}
|
||||||
|
|
||||||
|
try:
|
||||||
|
table = pq.read_table(path, columns=columns)
|
||||||
|
df = table.to_pandas()
|
||||||
|
return self._check_and_store(df, name=name, source=file_path)
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"read_parquet failed: {e}")
|
||||||
|
return {'error': str(e)}
|
||||||
|
|
||||||
|
async def read_json(
|
||||||
|
self,
|
||||||
|
file_path: str,
|
||||||
|
name: Optional[str] = None,
|
||||||
|
lines: bool = False,
|
||||||
|
**kwargs
|
||||||
|
) -> Dict:
|
||||||
|
"""
|
||||||
|
Load JSON/JSONL file into DataFrame.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
file_path: Path to JSON file
|
||||||
|
name: Optional name for data_ref
|
||||||
|
lines: If True, read as JSON Lines format
|
||||||
|
**kwargs: Additional pandas read_json arguments
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Dict with data_ref or error info
|
||||||
|
"""
|
||||||
|
path = Path(file_path)
|
||||||
|
if not path.exists():
|
||||||
|
return {'error': f'File not found: {file_path}'}
|
||||||
|
|
||||||
|
try:
|
||||||
|
df = pd.read_json(path, lines=lines, **kwargs)
|
||||||
|
return self._check_and_store(df, name=name, source=file_path)
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"read_json failed: {e}")
|
||||||
|
return {'error': str(e)}
|
||||||
|
|
||||||
|
async def to_csv(
|
||||||
|
self,
|
||||||
|
data_ref: str,
|
||||||
|
file_path: str,
|
||||||
|
index: bool = False,
|
||||||
|
**kwargs
|
||||||
|
) -> Dict:
|
||||||
|
"""
|
||||||
|
Export DataFrame to CSV file.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
data_ref: Reference to stored DataFrame
|
||||||
|
file_path: Output file path
|
||||||
|
index: Whether to include index
|
||||||
|
**kwargs: Additional pandas to_csv arguments
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Dict with success status
|
||||||
|
"""
|
||||||
|
df = self.store.get_pandas(data_ref)
|
||||||
|
if df is None:
|
||||||
|
return {'error': f'DataFrame not found: {data_ref}'}
|
||||||
|
|
||||||
|
try:
|
||||||
|
df.to_csv(file_path, index=index, **kwargs)
|
||||||
|
return {
|
||||||
|
'success': True,
|
||||||
|
'file_path': file_path,
|
||||||
|
'rows': len(df),
|
||||||
|
'size_bytes': Path(file_path).stat().st_size
|
||||||
|
}
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"to_csv failed: {e}")
|
||||||
|
return {'error': str(e)}
|
||||||
|
|
||||||
|
async def to_parquet(
|
||||||
|
self,
|
||||||
|
data_ref: str,
|
||||||
|
file_path: str,
|
||||||
|
compression: str = 'snappy'
|
||||||
|
) -> Dict:
|
||||||
|
"""
|
||||||
|
Export DataFrame to Parquet file.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
data_ref: Reference to stored DataFrame
|
||||||
|
file_path: Output file path
|
||||||
|
compression: Compression codec
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Dict with success status
|
||||||
|
"""
|
||||||
|
table = self.store.get(data_ref)
|
||||||
|
if table is None:
|
||||||
|
return {'error': f'DataFrame not found: {data_ref}'}
|
||||||
|
|
||||||
|
try:
|
||||||
|
pq.write_table(table, file_path, compression=compression)
|
||||||
|
return {
|
||||||
|
'success': True,
|
||||||
|
'file_path': file_path,
|
||||||
|
'rows': table.num_rows,
|
||||||
|
'size_bytes': Path(file_path).stat().st_size
|
||||||
|
}
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"to_parquet failed: {e}")
|
||||||
|
return {'error': str(e)}
|
||||||
|
|
||||||
|
async def describe(self, data_ref: str) -> Dict:
|
||||||
|
"""
|
||||||
|
Get statistical summary of DataFrame.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
data_ref: Reference to stored DataFrame
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Dict with statistical summary
|
||||||
|
"""
|
||||||
|
df = self.store.get_pandas(data_ref)
|
||||||
|
if df is None:
|
||||||
|
return {'error': f'DataFrame not found: {data_ref}'}
|
||||||
|
|
||||||
|
try:
|
||||||
|
desc = df.describe(include='all')
|
||||||
|
info = self.store.get_info(data_ref)
|
||||||
|
|
||||||
|
return {
|
||||||
|
'data_ref': data_ref,
|
||||||
|
'shape': {'rows': len(df), 'columns': len(df.columns)},
|
||||||
|
'columns': list(df.columns),
|
||||||
|
'dtypes': {col: str(dtype) for col, dtype in df.dtypes.items()},
|
||||||
|
'memory_mb': info.memory_bytes / (1024 * 1024) if info else None,
|
||||||
|
'null_counts': df.isnull().sum().to_dict(),
|
||||||
|
'statistics': desc.to_dict()
|
||||||
|
}
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"describe failed: {e}")
|
||||||
|
return {'error': str(e)}
|
||||||
|
|
||||||
|
async def head(self, data_ref: str, n: int = 10) -> Dict:
|
||||||
|
"""
|
||||||
|
Get first N rows of DataFrame.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
data_ref: Reference to stored DataFrame
|
||||||
|
n: Number of rows
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Dict with rows as records
|
||||||
|
"""
|
||||||
|
df = self.store.get_pandas(data_ref)
|
||||||
|
if df is None:
|
||||||
|
return {'error': f'DataFrame not found: {data_ref}'}
|
||||||
|
|
||||||
|
try:
|
||||||
|
head_df = df.head(n)
|
||||||
|
return {
|
||||||
|
'data_ref': data_ref,
|
||||||
|
'total_rows': len(df),
|
||||||
|
'returned_rows': len(head_df),
|
||||||
|
'columns': list(df.columns),
|
||||||
|
'data': head_df.to_dict(orient='records')
|
||||||
|
}
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"head failed: {e}")
|
||||||
|
return {'error': str(e)}
|
||||||
|
|
||||||
|
async def tail(self, data_ref: str, n: int = 10) -> Dict:
|
||||||
|
"""
|
||||||
|
Get last N rows of DataFrame.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
data_ref: Reference to stored DataFrame
|
||||||
|
n: Number of rows
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Dict with rows as records
|
||||||
|
"""
|
||||||
|
df = self.store.get_pandas(data_ref)
|
||||||
|
if df is None:
|
||||||
|
return {'error': f'DataFrame not found: {data_ref}'}
|
||||||
|
|
||||||
|
try:
|
||||||
|
tail_df = df.tail(n)
|
||||||
|
return {
|
||||||
|
'data_ref': data_ref,
|
||||||
|
'total_rows': len(df),
|
||||||
|
'returned_rows': len(tail_df),
|
||||||
|
'columns': list(df.columns),
|
||||||
|
'data': tail_df.to_dict(orient='records')
|
||||||
|
}
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"tail failed: {e}")
|
||||||
|
return {'error': str(e)}
|
||||||
|
|
||||||
|
async def filter(
|
||||||
|
self,
|
||||||
|
data_ref: str,
|
||||||
|
condition: str,
|
||||||
|
name: Optional[str] = None
|
||||||
|
) -> Dict:
|
||||||
|
"""
|
||||||
|
Filter DataFrame rows by condition.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
data_ref: Reference to stored DataFrame
|
||||||
|
condition: pandas query string (e.g., "age > 30 and city == 'NYC'")
|
||||||
|
name: Optional name for result data_ref
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Dict with new data_ref for filtered result
|
||||||
|
"""
|
||||||
|
df = self.store.get_pandas(data_ref)
|
||||||
|
if df is None:
|
||||||
|
return {'error': f'DataFrame not found: {data_ref}'}
|
||||||
|
|
||||||
|
try:
|
||||||
|
filtered = df.query(condition)
|
||||||
|
result_name = name or f"{data_ref}_filtered"
|
||||||
|
return self._check_and_store(
|
||||||
|
filtered,
|
||||||
|
name=result_name,
|
||||||
|
source=f"filter({data_ref}, '{condition}')"
|
||||||
|
)
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"filter failed: {e}")
|
||||||
|
return {'error': str(e)}
|
||||||
|
|
||||||
|
async def select(
|
||||||
|
self,
|
||||||
|
data_ref: str,
|
||||||
|
columns: List[str],
|
||||||
|
name: Optional[str] = None
|
||||||
|
) -> Dict:
|
||||||
|
"""
|
||||||
|
Select specific columns from DataFrame.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
data_ref: Reference to stored DataFrame
|
||||||
|
columns: List of column names to select
|
||||||
|
name: Optional name for result data_ref
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Dict with new data_ref for selected columns
|
||||||
|
"""
|
||||||
|
df = self.store.get_pandas(data_ref)
|
||||||
|
if df is None:
|
||||||
|
return {'error': f'DataFrame not found: {data_ref}'}
|
||||||
|
|
||||||
|
try:
|
||||||
|
# Validate columns exist
|
||||||
|
missing = [c for c in columns if c not in df.columns]
|
||||||
|
if missing:
|
||||||
|
return {
|
||||||
|
'error': f'Columns not found: {missing}',
|
||||||
|
'available_columns': list(df.columns)
|
||||||
|
}
|
||||||
|
|
||||||
|
selected = df[columns]
|
||||||
|
result_name = name or f"{data_ref}_select"
|
||||||
|
return self._check_and_store(
|
||||||
|
selected,
|
||||||
|
name=result_name,
|
||||||
|
source=f"select({data_ref}, {columns})"
|
||||||
|
)
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"select failed: {e}")
|
||||||
|
return {'error': str(e)}
|
||||||
|
|
||||||
|
async def groupby(
|
||||||
|
self,
|
||||||
|
data_ref: str,
|
||||||
|
by: Union[str, List[str]],
|
||||||
|
agg: Dict[str, Union[str, List[str]]],
|
||||||
|
name: Optional[str] = None
|
||||||
|
) -> Dict:
|
||||||
|
"""
|
||||||
|
Group DataFrame and aggregate.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
data_ref: Reference to stored DataFrame
|
||||||
|
by: Column(s) to group by
|
||||||
|
agg: Aggregation dict (e.g., {"sales": "sum", "count": "mean"})
|
||||||
|
name: Optional name for result data_ref
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Dict with new data_ref for aggregated result
|
||||||
|
"""
|
||||||
|
df = self.store.get_pandas(data_ref)
|
||||||
|
if df is None:
|
||||||
|
return {'error': f'DataFrame not found: {data_ref}'}
|
||||||
|
|
||||||
|
try:
|
||||||
|
grouped = df.groupby(by).agg(agg).reset_index()
|
||||||
|
# Flatten column names if multi-level
|
||||||
|
if isinstance(grouped.columns, pd.MultiIndex):
|
||||||
|
grouped.columns = ['_'.join(col).strip('_') for col in grouped.columns]
|
||||||
|
|
||||||
|
result_name = name or f"{data_ref}_grouped"
|
||||||
|
return self._check_and_store(
|
||||||
|
grouped,
|
||||||
|
name=result_name,
|
||||||
|
source=f"groupby({data_ref}, by={by})"
|
||||||
|
)
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"groupby failed: {e}")
|
||||||
|
return {'error': str(e)}
|
||||||
|
|
||||||
|
async def join(
|
||||||
|
self,
|
||||||
|
left_ref: str,
|
||||||
|
right_ref: str,
|
||||||
|
on: Optional[Union[str, List[str]]] = None,
|
||||||
|
left_on: Optional[Union[str, List[str]]] = None,
|
||||||
|
right_on: Optional[Union[str, List[str]]] = None,
|
||||||
|
how: str = 'inner',
|
||||||
|
name: Optional[str] = None
|
||||||
|
) -> Dict:
|
||||||
|
"""
|
||||||
|
Join two DataFrames.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
left_ref: Reference to left DataFrame
|
||||||
|
right_ref: Reference to right DataFrame
|
||||||
|
on: Column(s) to join on (if same name in both)
|
||||||
|
left_on: Left join column(s)
|
||||||
|
right_on: Right join column(s)
|
||||||
|
how: Join type ('inner', 'left', 'right', 'outer')
|
||||||
|
name: Optional name for result data_ref
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Dict with new data_ref for joined result
|
||||||
|
"""
|
||||||
|
left_df = self.store.get_pandas(left_ref)
|
||||||
|
right_df = self.store.get_pandas(right_ref)
|
||||||
|
|
||||||
|
if left_df is None:
|
||||||
|
return {'error': f'DataFrame not found: {left_ref}'}
|
||||||
|
if right_df is None:
|
||||||
|
return {'error': f'DataFrame not found: {right_ref}'}
|
||||||
|
|
||||||
|
try:
|
||||||
|
joined = pd.merge(
|
||||||
|
left_df, right_df,
|
||||||
|
on=on, left_on=left_on, right_on=right_on,
|
||||||
|
how=how
|
||||||
|
)
|
||||||
|
result_name = name or f"{left_ref}_{right_ref}_joined"
|
||||||
|
return self._check_and_store(
|
||||||
|
joined,
|
||||||
|
name=result_name,
|
||||||
|
source=f"join({left_ref}, {right_ref}, how={how})"
|
||||||
|
)
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"join failed: {e}")
|
||||||
|
return {'error': str(e)}
|
||||||
|
|
||||||
|
async def list_data(self) -> Dict:
|
||||||
|
"""
|
||||||
|
List all stored DataFrames.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Dict with list of stored DataFrames and their info
|
||||||
|
"""
|
||||||
|
refs = self.store.list_refs()
|
||||||
|
return {
|
||||||
|
'count': len(refs),
|
||||||
|
'total_memory_mb': self.store.total_memory_mb(),
|
||||||
|
'max_rows_limit': self.max_rows,
|
||||||
|
'dataframes': refs
|
||||||
|
}
|
||||||
|
|
||||||
|
async def drop_data(self, data_ref: str) -> Dict:
|
||||||
|
"""
|
||||||
|
Remove a DataFrame from storage.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
data_ref: Reference to drop
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Dict with success status
|
||||||
|
"""
|
||||||
|
if self.store.drop(data_ref):
|
||||||
|
return {'success': True, 'dropped': data_ref}
|
||||||
|
return {'error': f'DataFrame not found: {data_ref}'}
|
||||||
538
mcp-servers/data-platform/mcp_server/postgres_tools.py
Normal file
538
mcp-servers/data-platform/mcp_server/postgres_tools.py
Normal file
@@ -0,0 +1,538 @@
|
|||||||
|
"""
|
||||||
|
PostgreSQL/PostGIS MCP Tools.
|
||||||
|
|
||||||
|
Provides database operations with connection pooling and PostGIS support.
|
||||||
|
"""
|
||||||
|
import asyncio
|
||||||
|
import logging
|
||||||
|
from typing import Dict, List, Optional, Any
|
||||||
|
import json
|
||||||
|
|
||||||
|
from .data_store import DataStore
|
||||||
|
from .config import load_config
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
# Optional imports - gracefully handle missing dependencies
|
||||||
|
try:
|
||||||
|
import asyncpg
|
||||||
|
ASYNCPG_AVAILABLE = True
|
||||||
|
except ImportError:
|
||||||
|
ASYNCPG_AVAILABLE = False
|
||||||
|
logger.warning("asyncpg not available - PostgreSQL tools will be disabled")
|
||||||
|
|
||||||
|
try:
|
||||||
|
import pandas as pd
|
||||||
|
PANDAS_AVAILABLE = True
|
||||||
|
except ImportError:
|
||||||
|
PANDAS_AVAILABLE = False
|
||||||
|
|
||||||
|
|
||||||
|
class PostgresTools:
|
||||||
|
"""PostgreSQL/PostGIS database tools"""
|
||||||
|
|
||||||
|
def __init__(self):
|
||||||
|
self.store = DataStore.get_instance()
|
||||||
|
self.config = load_config()
|
||||||
|
self.pool: Optional[Any] = None
|
||||||
|
self.max_rows = self.config.get('max_rows', 100_000)
|
||||||
|
|
||||||
|
async def _get_pool(self):
|
||||||
|
"""Get or create connection pool"""
|
||||||
|
if not ASYNCPG_AVAILABLE:
|
||||||
|
raise RuntimeError("asyncpg not installed - run: pip install asyncpg")
|
||||||
|
|
||||||
|
if self.pool is None:
|
||||||
|
postgres_url = self.config.get('postgres_url')
|
||||||
|
if not postgres_url:
|
||||||
|
raise RuntimeError(
|
||||||
|
"PostgreSQL not configured. Set POSTGRES_URL in "
|
||||||
|
"~/.config/claude/postgres.env"
|
||||||
|
)
|
||||||
|
self.pool = await asyncpg.create_pool(postgres_url, min_size=1, max_size=5)
|
||||||
|
return self.pool
|
||||||
|
|
||||||
|
async def pg_connect(self) -> Dict:
|
||||||
|
"""
|
||||||
|
Test PostgreSQL connection and return status.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Dict with connection status, version, and database info
|
||||||
|
"""
|
||||||
|
if not ASYNCPG_AVAILABLE:
|
||||||
|
return {
|
||||||
|
'connected': False,
|
||||||
|
'error': 'asyncpg not installed',
|
||||||
|
'suggestion': 'pip install asyncpg'
|
||||||
|
}
|
||||||
|
|
||||||
|
postgres_url = self.config.get('postgres_url')
|
||||||
|
if not postgres_url:
|
||||||
|
return {
|
||||||
|
'connected': False,
|
||||||
|
'error': 'POSTGRES_URL not configured',
|
||||||
|
'suggestion': 'Create ~/.config/claude/postgres.env with POSTGRES_URL=postgresql://...'
|
||||||
|
}
|
||||||
|
|
||||||
|
try:
|
||||||
|
pool = await self._get_pool()
|
||||||
|
async with pool.acquire() as conn:
|
||||||
|
version = await conn.fetchval('SELECT version()')
|
||||||
|
db_name = await conn.fetchval('SELECT current_database()')
|
||||||
|
user = await conn.fetchval('SELECT current_user')
|
||||||
|
|
||||||
|
# Check for PostGIS
|
||||||
|
postgis_version = None
|
||||||
|
try:
|
||||||
|
postgis_version = await conn.fetchval('SELECT PostGIS_Version()')
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
|
||||||
|
return {
|
||||||
|
'connected': True,
|
||||||
|
'database': db_name,
|
||||||
|
'user': user,
|
||||||
|
'version': version.split(',')[0] if version else 'Unknown',
|
||||||
|
'postgis_version': postgis_version,
|
||||||
|
'postgis_available': postgis_version is not None
|
||||||
|
}
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"pg_connect failed: {e}")
|
||||||
|
return {
|
||||||
|
'connected': False,
|
||||||
|
'error': str(e)
|
||||||
|
}
|
||||||
|
|
||||||
|
async def pg_query(
|
||||||
|
self,
|
||||||
|
query: str,
|
||||||
|
params: Optional[List] = None,
|
||||||
|
name: Optional[str] = None
|
||||||
|
) -> Dict:
|
||||||
|
"""
|
||||||
|
Execute SELECT query and return results as data_ref.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
query: SQL SELECT query
|
||||||
|
params: Query parameters (positional, use $1, $2, etc.)
|
||||||
|
name: Optional name for result data_ref
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Dict with data_ref for results or error
|
||||||
|
"""
|
||||||
|
if not PANDAS_AVAILABLE:
|
||||||
|
return {'error': 'pandas not available'}
|
||||||
|
|
||||||
|
try:
|
||||||
|
pool = await self._get_pool()
|
||||||
|
async with pool.acquire() as conn:
|
||||||
|
if params:
|
||||||
|
rows = await conn.fetch(query, *params)
|
||||||
|
else:
|
||||||
|
rows = await conn.fetch(query)
|
||||||
|
|
||||||
|
if not rows:
|
||||||
|
return {
|
||||||
|
'data_ref': None,
|
||||||
|
'rows': 0,
|
||||||
|
'message': 'Query returned no results'
|
||||||
|
}
|
||||||
|
|
||||||
|
# Convert to DataFrame
|
||||||
|
df = pd.DataFrame([dict(r) for r in rows])
|
||||||
|
|
||||||
|
# Check row limit
|
||||||
|
check = self.store.check_row_limit(len(df))
|
||||||
|
if check['exceeded']:
|
||||||
|
return {
|
||||||
|
'error': 'row_limit_exceeded',
|
||||||
|
**check,
|
||||||
|
'preview': df.head(100).to_dict(orient='records')
|
||||||
|
}
|
||||||
|
|
||||||
|
# Store result
|
||||||
|
data_ref = self.store.store(df, name=name, source=f"pg_query: {query[:100]}...")
|
||||||
|
return {
|
||||||
|
'data_ref': data_ref,
|
||||||
|
'rows': len(df),
|
||||||
|
'columns': list(df.columns)
|
||||||
|
}
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"pg_query failed: {e}")
|
||||||
|
return {'error': str(e)}
|
||||||
|
|
||||||
|
async def pg_execute(
|
||||||
|
self,
|
||||||
|
query: str,
|
||||||
|
params: Optional[List] = None
|
||||||
|
) -> Dict:
|
||||||
|
"""
|
||||||
|
Execute INSERT/UPDATE/DELETE query.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
query: SQL DML query
|
||||||
|
params: Query parameters
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Dict with affected rows count
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
pool = await self._get_pool()
|
||||||
|
async with pool.acquire() as conn:
|
||||||
|
if params:
|
||||||
|
result = await conn.execute(query, *params)
|
||||||
|
else:
|
||||||
|
result = await conn.execute(query)
|
||||||
|
|
||||||
|
# Parse result (e.g., "INSERT 0 1" or "UPDATE 5")
|
||||||
|
parts = result.split()
|
||||||
|
affected = int(parts[-1]) if parts else 0
|
||||||
|
|
||||||
|
return {
|
||||||
|
'success': True,
|
||||||
|
'command': parts[0] if parts else 'UNKNOWN',
|
||||||
|
'affected_rows': affected
|
||||||
|
}
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"pg_execute failed: {e}")
|
||||||
|
return {'error': str(e)}
|
||||||
|
|
||||||
|
async def pg_tables(self, schema: str = 'public') -> Dict:
|
||||||
|
"""
|
||||||
|
List all tables in schema.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
schema: Schema name (default: public)
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Dict with list of tables
|
||||||
|
"""
|
||||||
|
query = """
|
||||||
|
SELECT
|
||||||
|
table_name,
|
||||||
|
table_type,
|
||||||
|
(SELECT count(*) FROM information_schema.columns c
|
||||||
|
WHERE c.table_schema = t.table_schema
|
||||||
|
AND c.table_name = t.table_name) as column_count
|
||||||
|
FROM information_schema.tables t
|
||||||
|
WHERE table_schema = $1
|
||||||
|
ORDER BY table_name
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
pool = await self._get_pool()
|
||||||
|
async with pool.acquire() as conn:
|
||||||
|
rows = await conn.fetch(query, schema)
|
||||||
|
tables = [
|
||||||
|
{
|
||||||
|
'name': r['table_name'],
|
||||||
|
'type': r['table_type'],
|
||||||
|
'columns': r['column_count']
|
||||||
|
}
|
||||||
|
for r in rows
|
||||||
|
]
|
||||||
|
return {
|
||||||
|
'schema': schema,
|
||||||
|
'count': len(tables),
|
||||||
|
'tables': tables
|
||||||
|
}
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"pg_tables failed: {e}")
|
||||||
|
return {'error': str(e)}
|
||||||
|
|
||||||
|
async def pg_columns(self, table: str, schema: str = 'public') -> Dict:
|
||||||
|
"""
|
||||||
|
Get column information for a table.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
table: Table name
|
||||||
|
schema: Schema name (default: public)
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Dict with column details
|
||||||
|
"""
|
||||||
|
query = """
|
||||||
|
SELECT
|
||||||
|
column_name,
|
||||||
|
data_type,
|
||||||
|
udt_name,
|
||||||
|
is_nullable,
|
||||||
|
column_default,
|
||||||
|
character_maximum_length,
|
||||||
|
numeric_precision
|
||||||
|
FROM information_schema.columns
|
||||||
|
WHERE table_schema = $1 AND table_name = $2
|
||||||
|
ORDER BY ordinal_position
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
pool = await self._get_pool()
|
||||||
|
async with pool.acquire() as conn:
|
||||||
|
rows = await conn.fetch(query, schema, table)
|
||||||
|
columns = [
|
||||||
|
{
|
||||||
|
'name': r['column_name'],
|
||||||
|
'type': r['data_type'],
|
||||||
|
'udt': r['udt_name'],
|
||||||
|
'nullable': r['is_nullable'] == 'YES',
|
||||||
|
'default': r['column_default'],
|
||||||
|
'max_length': r['character_maximum_length'],
|
||||||
|
'precision': r['numeric_precision']
|
||||||
|
}
|
||||||
|
for r in rows
|
||||||
|
]
|
||||||
|
return {
|
||||||
|
'table': f'{schema}.{table}',
|
||||||
|
'column_count': len(columns),
|
||||||
|
'columns': columns
|
||||||
|
}
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"pg_columns failed: {e}")
|
||||||
|
return {'error': str(e)}
|
||||||
|
|
||||||
|
async def pg_schemas(self) -> Dict:
|
||||||
|
"""
|
||||||
|
List all schemas in database.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Dict with list of schemas
|
||||||
|
"""
|
||||||
|
query = """
|
||||||
|
SELECT schema_name
|
||||||
|
FROM information_schema.schemata
|
||||||
|
WHERE schema_name NOT IN ('pg_catalog', 'information_schema', 'pg_toast')
|
||||||
|
ORDER BY schema_name
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
pool = await self._get_pool()
|
||||||
|
async with pool.acquire() as conn:
|
||||||
|
rows = await conn.fetch(query)
|
||||||
|
schemas = [r['schema_name'] for r in rows]
|
||||||
|
return {
|
||||||
|
'count': len(schemas),
|
||||||
|
'schemas': schemas
|
||||||
|
}
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"pg_schemas failed: {e}")
|
||||||
|
return {'error': str(e)}
|
||||||
|
|
||||||
|
async def st_tables(self, schema: str = 'public') -> Dict:
|
||||||
|
"""
|
||||||
|
List PostGIS-enabled tables.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
schema: Schema name (default: public)
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Dict with list of tables with geometry columns
|
||||||
|
"""
|
||||||
|
query = """
|
||||||
|
SELECT
|
||||||
|
f_table_name as table_name,
|
||||||
|
f_geometry_column as geometry_column,
|
||||||
|
type as geometry_type,
|
||||||
|
srid,
|
||||||
|
coord_dimension
|
||||||
|
FROM geometry_columns
|
||||||
|
WHERE f_table_schema = $1
|
||||||
|
ORDER BY f_table_name
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
pool = await self._get_pool()
|
||||||
|
async with pool.acquire() as conn:
|
||||||
|
rows = await conn.fetch(query, schema)
|
||||||
|
tables = [
|
||||||
|
{
|
||||||
|
'table': r['table_name'],
|
||||||
|
'geometry_column': r['geometry_column'],
|
||||||
|
'geometry_type': r['geometry_type'],
|
||||||
|
'srid': r['srid'],
|
||||||
|
'dimensions': r['coord_dimension']
|
||||||
|
}
|
||||||
|
for r in rows
|
||||||
|
]
|
||||||
|
return {
|
||||||
|
'schema': schema,
|
||||||
|
'count': len(tables),
|
||||||
|
'postgis_tables': tables
|
||||||
|
}
|
||||||
|
except Exception as e:
|
||||||
|
if 'geometry_columns' in str(e):
|
||||||
|
return {
|
||||||
|
'error': 'PostGIS not installed or extension not enabled',
|
||||||
|
'suggestion': 'Run: CREATE EXTENSION IF NOT EXISTS postgis;'
|
||||||
|
}
|
||||||
|
logger.error(f"st_tables failed: {e}")
|
||||||
|
return {'error': str(e)}
|
||||||
|
|
||||||
|
async def st_geometry_type(self, table: str, column: str, schema: str = 'public') -> Dict:
|
||||||
|
"""
|
||||||
|
Get geometry type of a column.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
table: Table name
|
||||||
|
column: Geometry column name
|
||||||
|
schema: Schema name
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Dict with geometry type information
|
||||||
|
"""
|
||||||
|
query = f"""
|
||||||
|
SELECT DISTINCT ST_GeometryType({column}) as geom_type
|
||||||
|
FROM {schema}.{table}
|
||||||
|
WHERE {column} IS NOT NULL
|
||||||
|
LIMIT 10
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
pool = await self._get_pool()
|
||||||
|
async with pool.acquire() as conn:
|
||||||
|
rows = await conn.fetch(query)
|
||||||
|
types = [r['geom_type'] for r in rows]
|
||||||
|
return {
|
||||||
|
'table': f'{schema}.{table}',
|
||||||
|
'column': column,
|
||||||
|
'geometry_types': types
|
||||||
|
}
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"st_geometry_type failed: {e}")
|
||||||
|
return {'error': str(e)}
|
||||||
|
|
||||||
|
async def st_srid(self, table: str, column: str, schema: str = 'public') -> Dict:
|
||||||
|
"""
|
||||||
|
Get SRID of geometry column.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
table: Table name
|
||||||
|
column: Geometry column name
|
||||||
|
schema: Schema name
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Dict with SRID information
|
||||||
|
"""
|
||||||
|
query = f"""
|
||||||
|
SELECT DISTINCT ST_SRID({column}) as srid
|
||||||
|
FROM {schema}.{table}
|
||||||
|
WHERE {column} IS NOT NULL
|
||||||
|
LIMIT 1
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
pool = await self._get_pool()
|
||||||
|
async with pool.acquire() as conn:
|
||||||
|
row = await conn.fetchrow(query)
|
||||||
|
srid = row['srid'] if row else None
|
||||||
|
|
||||||
|
# Get SRID description
|
||||||
|
srid_info = None
|
||||||
|
if srid:
|
||||||
|
srid_query = """
|
||||||
|
SELECT srtext, proj4text
|
||||||
|
FROM spatial_ref_sys
|
||||||
|
WHERE srid = $1
|
||||||
|
"""
|
||||||
|
srid_row = await conn.fetchrow(srid_query, srid)
|
||||||
|
if srid_row:
|
||||||
|
srid_info = {
|
||||||
|
'description': srid_row['srtext'][:200] if srid_row['srtext'] else None,
|
||||||
|
'proj4': srid_row['proj4text']
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
'table': f'{schema}.{table}',
|
||||||
|
'column': column,
|
||||||
|
'srid': srid,
|
||||||
|
'info': srid_info
|
||||||
|
}
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"st_srid failed: {e}")
|
||||||
|
return {'error': str(e)}
|
||||||
|
|
||||||
|
async def st_extent(self, table: str, column: str, schema: str = 'public') -> Dict:
|
||||||
|
"""
|
||||||
|
Get bounding box of all geometries.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
table: Table name
|
||||||
|
column: Geometry column name
|
||||||
|
schema: Schema name
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Dict with bounding box coordinates
|
||||||
|
"""
|
||||||
|
query = f"""
|
||||||
|
SELECT
|
||||||
|
ST_XMin(extent) as xmin,
|
||||||
|
ST_YMin(extent) as ymin,
|
||||||
|
ST_XMax(extent) as xmax,
|
||||||
|
ST_YMax(extent) as ymax
|
||||||
|
FROM (
|
||||||
|
SELECT ST_Extent({column}) as extent
|
||||||
|
FROM {schema}.{table}
|
||||||
|
) sub
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
pool = await self._get_pool()
|
||||||
|
async with pool.acquire() as conn:
|
||||||
|
row = await conn.fetchrow(query)
|
||||||
|
if row and row['xmin'] is not None:
|
||||||
|
return {
|
||||||
|
'table': f'{schema}.{table}',
|
||||||
|
'column': column,
|
||||||
|
'bbox': {
|
||||||
|
'xmin': float(row['xmin']),
|
||||||
|
'ymin': float(row['ymin']),
|
||||||
|
'xmax': float(row['xmax']),
|
||||||
|
'ymax': float(row['ymax'])
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return {
|
||||||
|
'table': f'{schema}.{table}',
|
||||||
|
'column': column,
|
||||||
|
'bbox': None,
|
||||||
|
'message': 'No geometries found or all NULL'
|
||||||
|
}
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"st_extent failed: {e}")
|
||||||
|
return {'error': str(e)}
|
||||||
|
|
||||||
|
async def close(self):
|
||||||
|
"""Close connection pool"""
|
||||||
|
if self.pool:
|
||||||
|
await self.pool.close()
|
||||||
|
self.pool = None
|
||||||
|
|
||||||
|
|
||||||
|
def check_connection() -> None:
|
||||||
|
"""
|
||||||
|
Check PostgreSQL connection for SessionStart hook.
|
||||||
|
Prints warning to stderr if connection fails.
|
||||||
|
"""
|
||||||
|
import sys
|
||||||
|
|
||||||
|
config = load_config()
|
||||||
|
if not config.get('postgres_url'):
|
||||||
|
print(
|
||||||
|
"[data-platform] PostgreSQL not configured (POSTGRES_URL not set)",
|
||||||
|
file=sys.stderr
|
||||||
|
)
|
||||||
|
return
|
||||||
|
|
||||||
|
async def test():
|
||||||
|
try:
|
||||||
|
if not ASYNCPG_AVAILABLE:
|
||||||
|
print(
|
||||||
|
"[data-platform] asyncpg not installed - PostgreSQL tools unavailable",
|
||||||
|
file=sys.stderr
|
||||||
|
)
|
||||||
|
return
|
||||||
|
|
||||||
|
conn = await asyncpg.connect(config['postgres_url'], timeout=5)
|
||||||
|
await conn.close()
|
||||||
|
print("[data-platform] PostgreSQL connection OK", file=sys.stderr)
|
||||||
|
except Exception as e:
|
||||||
|
print(
|
||||||
|
f"[data-platform] PostgreSQL connection failed: {e}",
|
||||||
|
file=sys.stderr
|
||||||
|
)
|
||||||
|
|
||||||
|
asyncio.run(test())
|
||||||
795
mcp-servers/data-platform/mcp_server/server.py
Normal file
795
mcp-servers/data-platform/mcp_server/server.py
Normal file
@@ -0,0 +1,795 @@
|
|||||||
|
"""
|
||||||
|
MCP Server entry point for Data Platform integration.
|
||||||
|
|
||||||
|
Provides pandas, PostgreSQL/PostGIS, and dbt tools to Claude Code via JSON-RPC 2.0 over stdio.
|
||||||
|
"""
|
||||||
|
import asyncio
|
||||||
|
import logging
|
||||||
|
import json
|
||||||
|
from mcp.server import Server
|
||||||
|
from mcp.server.stdio import stdio_server
|
||||||
|
from mcp.types import Tool, TextContent
|
||||||
|
|
||||||
|
from .config import DataPlatformConfig
|
||||||
|
from .data_store import DataStore
|
||||||
|
from .pandas_tools import PandasTools
|
||||||
|
from .postgres_tools import PostgresTools
|
||||||
|
from .dbt_tools import DbtTools
|
||||||
|
|
||||||
|
# Suppress noisy MCP validation warnings on stderr
|
||||||
|
logging.basicConfig(level=logging.INFO)
|
||||||
|
logging.getLogger("root").setLevel(logging.ERROR)
|
||||||
|
logging.getLogger("mcp").setLevel(logging.ERROR)
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class DataPlatformMCPServer:
|
||||||
|
"""MCP Server for data platform integration"""
|
||||||
|
|
||||||
|
def __init__(self):
|
||||||
|
self.server = Server("data-platform-mcp")
|
||||||
|
self.config = None
|
||||||
|
self.pandas_tools = None
|
||||||
|
self.postgres_tools = None
|
||||||
|
self.dbt_tools = None
|
||||||
|
|
||||||
|
async def initialize(self):
|
||||||
|
"""Initialize server and load configuration."""
|
||||||
|
try:
|
||||||
|
config_loader = DataPlatformConfig()
|
||||||
|
self.config = config_loader.load()
|
||||||
|
|
||||||
|
self.pandas_tools = PandasTools()
|
||||||
|
self.postgres_tools = PostgresTools()
|
||||||
|
self.dbt_tools = DbtTools()
|
||||||
|
|
||||||
|
# Log available capabilities
|
||||||
|
caps = []
|
||||||
|
caps.append("pandas")
|
||||||
|
if self.config.get('postgres_available'):
|
||||||
|
caps.append("PostgreSQL")
|
||||||
|
if self.config.get('dbt_available'):
|
||||||
|
caps.append("dbt")
|
||||||
|
|
||||||
|
logger.info(f"Data Platform MCP Server initialized with: {', '.join(caps)}")
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Failed to initialize: {e}")
|
||||||
|
raise
|
||||||
|
|
||||||
|
def setup_tools(self):
|
||||||
|
"""Register all available tools with the MCP server"""
|
||||||
|
|
||||||
|
@self.server.list_tools()
|
||||||
|
async def list_tools() -> list[Tool]:
|
||||||
|
"""Return list of available tools"""
|
||||||
|
tools = [
|
||||||
|
# pandas tools - always available
|
||||||
|
Tool(
|
||||||
|
name="read_csv",
|
||||||
|
description="Load CSV file into DataFrame",
|
||||||
|
inputSchema={
|
||||||
|
"type": "object",
|
||||||
|
"properties": {
|
||||||
|
"file_path": {
|
||||||
|
"type": "string",
|
||||||
|
"description": "Path to CSV file"
|
||||||
|
},
|
||||||
|
"name": {
|
||||||
|
"type": "string",
|
||||||
|
"description": "Optional name for data_ref"
|
||||||
|
},
|
||||||
|
"chunk_size": {
|
||||||
|
"type": "integer",
|
||||||
|
"description": "Process in chunks of this size"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"required": ["file_path"]
|
||||||
|
}
|
||||||
|
),
|
||||||
|
Tool(
|
||||||
|
name="read_parquet",
|
||||||
|
description="Load Parquet file into DataFrame",
|
||||||
|
inputSchema={
|
||||||
|
"type": "object",
|
||||||
|
"properties": {
|
||||||
|
"file_path": {
|
||||||
|
"type": "string",
|
||||||
|
"description": "Path to Parquet file"
|
||||||
|
},
|
||||||
|
"name": {
|
||||||
|
"type": "string",
|
||||||
|
"description": "Optional name for data_ref"
|
||||||
|
},
|
||||||
|
"columns": {
|
||||||
|
"type": "array",
|
||||||
|
"items": {"type": "string"},
|
||||||
|
"description": "Optional list of columns to load"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"required": ["file_path"]
|
||||||
|
}
|
||||||
|
),
|
||||||
|
Tool(
|
||||||
|
name="read_json",
|
||||||
|
description="Load JSON/JSONL file into DataFrame",
|
||||||
|
inputSchema={
|
||||||
|
"type": "object",
|
||||||
|
"properties": {
|
||||||
|
"file_path": {
|
||||||
|
"type": "string",
|
||||||
|
"description": "Path to JSON file"
|
||||||
|
},
|
||||||
|
"name": {
|
||||||
|
"type": "string",
|
||||||
|
"description": "Optional name for data_ref"
|
||||||
|
},
|
||||||
|
"lines": {
|
||||||
|
"type": "boolean",
|
||||||
|
"default": False,
|
||||||
|
"description": "Read as JSON Lines format"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"required": ["file_path"]
|
||||||
|
}
|
||||||
|
),
|
||||||
|
Tool(
|
||||||
|
name="to_csv",
|
||||||
|
description="Export DataFrame to CSV file",
|
||||||
|
inputSchema={
|
||||||
|
"type": "object",
|
||||||
|
"properties": {
|
||||||
|
"data_ref": {
|
||||||
|
"type": "string",
|
||||||
|
"description": "Reference to stored DataFrame"
|
||||||
|
},
|
||||||
|
"file_path": {
|
||||||
|
"type": "string",
|
||||||
|
"description": "Output file path"
|
||||||
|
},
|
||||||
|
"index": {
|
||||||
|
"type": "boolean",
|
||||||
|
"default": False,
|
||||||
|
"description": "Include index column"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"required": ["data_ref", "file_path"]
|
||||||
|
}
|
||||||
|
),
|
||||||
|
Tool(
|
||||||
|
name="to_parquet",
|
||||||
|
description="Export DataFrame to Parquet file",
|
||||||
|
inputSchema={
|
||||||
|
"type": "object",
|
||||||
|
"properties": {
|
||||||
|
"data_ref": {
|
||||||
|
"type": "string",
|
||||||
|
"description": "Reference to stored DataFrame"
|
||||||
|
},
|
||||||
|
"file_path": {
|
||||||
|
"type": "string",
|
||||||
|
"description": "Output file path"
|
||||||
|
},
|
||||||
|
"compression": {
|
||||||
|
"type": "string",
|
||||||
|
"default": "snappy",
|
||||||
|
"description": "Compression codec"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"required": ["data_ref", "file_path"]
|
||||||
|
}
|
||||||
|
),
|
||||||
|
Tool(
|
||||||
|
name="describe",
|
||||||
|
description="Get statistical summary of DataFrame",
|
||||||
|
inputSchema={
|
||||||
|
"type": "object",
|
||||||
|
"properties": {
|
||||||
|
"data_ref": {
|
||||||
|
"type": "string",
|
||||||
|
"description": "Reference to stored DataFrame"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"required": ["data_ref"]
|
||||||
|
}
|
||||||
|
),
|
||||||
|
Tool(
|
||||||
|
name="head",
|
||||||
|
description="Get first N rows of DataFrame",
|
||||||
|
inputSchema={
|
||||||
|
"type": "object",
|
||||||
|
"properties": {
|
||||||
|
"data_ref": {
|
||||||
|
"type": "string",
|
||||||
|
"description": "Reference to stored DataFrame"
|
||||||
|
},
|
||||||
|
"n": {
|
||||||
|
"type": "integer",
|
||||||
|
"default": 10,
|
||||||
|
"description": "Number of rows"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"required": ["data_ref"]
|
||||||
|
}
|
||||||
|
),
|
||||||
|
Tool(
|
||||||
|
name="tail",
|
||||||
|
description="Get last N rows of DataFrame",
|
||||||
|
inputSchema={
|
||||||
|
"type": "object",
|
||||||
|
"properties": {
|
||||||
|
"data_ref": {
|
||||||
|
"type": "string",
|
||||||
|
"description": "Reference to stored DataFrame"
|
||||||
|
},
|
||||||
|
"n": {
|
||||||
|
"type": "integer",
|
||||||
|
"default": 10,
|
||||||
|
"description": "Number of rows"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"required": ["data_ref"]
|
||||||
|
}
|
||||||
|
),
|
||||||
|
Tool(
|
||||||
|
name="filter",
|
||||||
|
description="Filter DataFrame rows by condition",
|
||||||
|
inputSchema={
|
||||||
|
"type": "object",
|
||||||
|
"properties": {
|
||||||
|
"data_ref": {
|
||||||
|
"type": "string",
|
||||||
|
"description": "Reference to stored DataFrame"
|
||||||
|
},
|
||||||
|
"condition": {
|
||||||
|
"type": "string",
|
||||||
|
"description": "pandas query string (e.g., 'age > 30 and city == \"NYC\"')"
|
||||||
|
},
|
||||||
|
"name": {
|
||||||
|
"type": "string",
|
||||||
|
"description": "Optional name for result data_ref"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"required": ["data_ref", "condition"]
|
||||||
|
}
|
||||||
|
),
|
||||||
|
Tool(
|
||||||
|
name="select",
|
||||||
|
description="Select specific columns from DataFrame",
|
||||||
|
inputSchema={
|
||||||
|
"type": "object",
|
||||||
|
"properties": {
|
||||||
|
"data_ref": {
|
||||||
|
"type": "string",
|
||||||
|
"description": "Reference to stored DataFrame"
|
||||||
|
},
|
||||||
|
"columns": {
|
||||||
|
"type": "array",
|
||||||
|
"items": {"type": "string"},
|
||||||
|
"description": "List of column names to select"
|
||||||
|
},
|
||||||
|
"name": {
|
||||||
|
"type": "string",
|
||||||
|
"description": "Optional name for result data_ref"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"required": ["data_ref", "columns"]
|
||||||
|
}
|
||||||
|
),
|
||||||
|
Tool(
|
||||||
|
name="groupby",
|
||||||
|
description="Group DataFrame and aggregate",
|
||||||
|
inputSchema={
|
||||||
|
"type": "object",
|
||||||
|
"properties": {
|
||||||
|
"data_ref": {
|
||||||
|
"type": "string",
|
||||||
|
"description": "Reference to stored DataFrame"
|
||||||
|
},
|
||||||
|
"by": {
|
||||||
|
"oneOf": [
|
||||||
|
{"type": "string"},
|
||||||
|
{"type": "array", "items": {"type": "string"}}
|
||||||
|
],
|
||||||
|
"description": "Column(s) to group by"
|
||||||
|
},
|
||||||
|
"agg": {
|
||||||
|
"type": "object",
|
||||||
|
"description": "Aggregation dict (e.g., {\"sales\": \"sum\", \"count\": \"mean\"})"
|
||||||
|
},
|
||||||
|
"name": {
|
||||||
|
"type": "string",
|
||||||
|
"description": "Optional name for result data_ref"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"required": ["data_ref", "by", "agg"]
|
||||||
|
}
|
||||||
|
),
|
||||||
|
Tool(
|
||||||
|
name="join",
|
||||||
|
description="Join two DataFrames",
|
||||||
|
inputSchema={
|
||||||
|
"type": "object",
|
||||||
|
"properties": {
|
||||||
|
"left_ref": {
|
||||||
|
"type": "string",
|
||||||
|
"description": "Reference to left DataFrame"
|
||||||
|
},
|
||||||
|
"right_ref": {
|
||||||
|
"type": "string",
|
||||||
|
"description": "Reference to right DataFrame"
|
||||||
|
},
|
||||||
|
"on": {
|
||||||
|
"oneOf": [
|
||||||
|
{"type": "string"},
|
||||||
|
{"type": "array", "items": {"type": "string"}}
|
||||||
|
],
|
||||||
|
"description": "Column(s) to join on (if same name in both)"
|
||||||
|
},
|
||||||
|
"left_on": {
|
||||||
|
"oneOf": [
|
||||||
|
{"type": "string"},
|
||||||
|
{"type": "array", "items": {"type": "string"}}
|
||||||
|
],
|
||||||
|
"description": "Left join column(s)"
|
||||||
|
},
|
||||||
|
"right_on": {
|
||||||
|
"oneOf": [
|
||||||
|
{"type": "string"},
|
||||||
|
{"type": "array", "items": {"type": "string"}}
|
||||||
|
],
|
||||||
|
"description": "Right join column(s)"
|
||||||
|
},
|
||||||
|
"how": {
|
||||||
|
"type": "string",
|
||||||
|
"enum": ["inner", "left", "right", "outer"],
|
||||||
|
"default": "inner",
|
||||||
|
"description": "Join type"
|
||||||
|
},
|
||||||
|
"name": {
|
||||||
|
"type": "string",
|
||||||
|
"description": "Optional name for result data_ref"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"required": ["left_ref", "right_ref"]
|
||||||
|
}
|
||||||
|
),
|
||||||
|
Tool(
|
||||||
|
name="list_data",
|
||||||
|
description="List all stored DataFrames",
|
||||||
|
inputSchema={
|
||||||
|
"type": "object",
|
||||||
|
"properties": {}
|
||||||
|
}
|
||||||
|
),
|
||||||
|
Tool(
|
||||||
|
name="drop_data",
|
||||||
|
description="Remove a DataFrame from storage",
|
||||||
|
inputSchema={
|
||||||
|
"type": "object",
|
||||||
|
"properties": {
|
||||||
|
"data_ref": {
|
||||||
|
"type": "string",
|
||||||
|
"description": "Reference to drop"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"required": ["data_ref"]
|
||||||
|
}
|
||||||
|
),
|
||||||
|
# PostgreSQL tools
|
||||||
|
Tool(
|
||||||
|
name="pg_connect",
|
||||||
|
description="Test PostgreSQL connection and return status",
|
||||||
|
inputSchema={
|
||||||
|
"type": "object",
|
||||||
|
"properties": {}
|
||||||
|
}
|
||||||
|
),
|
||||||
|
Tool(
|
||||||
|
name="pg_query",
|
||||||
|
description="Execute SELECT query and return results as data_ref",
|
||||||
|
inputSchema={
|
||||||
|
"type": "object",
|
||||||
|
"properties": {
|
||||||
|
"query": {
|
||||||
|
"type": "string",
|
||||||
|
"description": "SQL SELECT query"
|
||||||
|
},
|
||||||
|
"params": {
|
||||||
|
"type": "array",
|
||||||
|
"items": {},
|
||||||
|
"description": "Query parameters (use $1, $2, etc.)"
|
||||||
|
},
|
||||||
|
"name": {
|
||||||
|
"type": "string",
|
||||||
|
"description": "Optional name for result data_ref"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"required": ["query"]
|
||||||
|
}
|
||||||
|
),
|
||||||
|
Tool(
|
||||||
|
name="pg_execute",
|
||||||
|
description="Execute INSERT/UPDATE/DELETE query",
|
||||||
|
inputSchema={
|
||||||
|
"type": "object",
|
||||||
|
"properties": {
|
||||||
|
"query": {
|
||||||
|
"type": "string",
|
||||||
|
"description": "SQL DML query"
|
||||||
|
},
|
||||||
|
"params": {
|
||||||
|
"type": "array",
|
||||||
|
"items": {},
|
||||||
|
"description": "Query parameters"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"required": ["query"]
|
||||||
|
}
|
||||||
|
),
|
||||||
|
Tool(
|
||||||
|
name="pg_tables",
|
||||||
|
description="List all tables in schema",
|
||||||
|
inputSchema={
|
||||||
|
"type": "object",
|
||||||
|
"properties": {
|
||||||
|
"schema": {
|
||||||
|
"type": "string",
|
||||||
|
"default": "public",
|
||||||
|
"description": "Schema name"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
),
|
||||||
|
Tool(
|
||||||
|
name="pg_columns",
|
||||||
|
description="Get column information for a table",
|
||||||
|
inputSchema={
|
||||||
|
"type": "object",
|
||||||
|
"properties": {
|
||||||
|
"table": {
|
||||||
|
"type": "string",
|
||||||
|
"description": "Table name"
|
||||||
|
},
|
||||||
|
"schema": {
|
||||||
|
"type": "string",
|
||||||
|
"default": "public",
|
||||||
|
"description": "Schema name"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"required": ["table"]
|
||||||
|
}
|
||||||
|
),
|
||||||
|
Tool(
|
||||||
|
name="pg_schemas",
|
||||||
|
description="List all schemas in database",
|
||||||
|
inputSchema={
|
||||||
|
"type": "object",
|
||||||
|
"properties": {}
|
||||||
|
}
|
||||||
|
),
|
||||||
|
# PostGIS tools
|
||||||
|
Tool(
|
||||||
|
name="st_tables",
|
||||||
|
description="List PostGIS-enabled tables",
|
||||||
|
inputSchema={
|
||||||
|
"type": "object",
|
||||||
|
"properties": {
|
||||||
|
"schema": {
|
||||||
|
"type": "string",
|
||||||
|
"default": "public",
|
||||||
|
"description": "Schema name"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
),
|
||||||
|
Tool(
|
||||||
|
name="st_geometry_type",
|
||||||
|
description="Get geometry type of a column",
|
||||||
|
inputSchema={
|
||||||
|
"type": "object",
|
||||||
|
"properties": {
|
||||||
|
"table": {
|
||||||
|
"type": "string",
|
||||||
|
"description": "Table name"
|
||||||
|
},
|
||||||
|
"column": {
|
||||||
|
"type": "string",
|
||||||
|
"description": "Geometry column name"
|
||||||
|
},
|
||||||
|
"schema": {
|
||||||
|
"type": "string",
|
||||||
|
"default": "public",
|
||||||
|
"description": "Schema name"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"required": ["table", "column"]
|
||||||
|
}
|
||||||
|
),
|
||||||
|
Tool(
|
||||||
|
name="st_srid",
|
||||||
|
description="Get SRID of geometry column",
|
||||||
|
inputSchema={
|
||||||
|
"type": "object",
|
||||||
|
"properties": {
|
||||||
|
"table": {
|
||||||
|
"type": "string",
|
||||||
|
"description": "Table name"
|
||||||
|
},
|
||||||
|
"column": {
|
||||||
|
"type": "string",
|
||||||
|
"description": "Geometry column name"
|
||||||
|
},
|
||||||
|
"schema": {
|
||||||
|
"type": "string",
|
||||||
|
"default": "public",
|
||||||
|
"description": "Schema name"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"required": ["table", "column"]
|
||||||
|
}
|
||||||
|
),
|
||||||
|
Tool(
|
||||||
|
name="st_extent",
|
||||||
|
description="Get bounding box of all geometries",
|
||||||
|
inputSchema={
|
||||||
|
"type": "object",
|
||||||
|
"properties": {
|
||||||
|
"table": {
|
||||||
|
"type": "string",
|
||||||
|
"description": "Table name"
|
||||||
|
},
|
||||||
|
"column": {
|
||||||
|
"type": "string",
|
||||||
|
"description": "Geometry column name"
|
||||||
|
},
|
||||||
|
"schema": {
|
||||||
|
"type": "string",
|
||||||
|
"default": "public",
|
||||||
|
"description": "Schema name"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"required": ["table", "column"]
|
||||||
|
}
|
||||||
|
),
|
||||||
|
# dbt tools
|
||||||
|
Tool(
|
||||||
|
name="dbt_parse",
|
||||||
|
description="Validate dbt project (pre-flight check)",
|
||||||
|
inputSchema={
|
||||||
|
"type": "object",
|
||||||
|
"properties": {}
|
||||||
|
}
|
||||||
|
),
|
||||||
|
Tool(
|
||||||
|
name="dbt_run",
|
||||||
|
description="Run dbt models with pre-validation",
|
||||||
|
inputSchema={
|
||||||
|
"type": "object",
|
||||||
|
"properties": {
|
||||||
|
"select": {
|
||||||
|
"type": "string",
|
||||||
|
"description": "Model selection (e.g., 'model_name', '+model_name', 'tag:daily')"
|
||||||
|
},
|
||||||
|
"exclude": {
|
||||||
|
"type": "string",
|
||||||
|
"description": "Models to exclude"
|
||||||
|
},
|
||||||
|
"full_refresh": {
|
||||||
|
"type": "boolean",
|
||||||
|
"default": False,
|
||||||
|
"description": "Rebuild incremental models"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
),
|
||||||
|
Tool(
|
||||||
|
name="dbt_test",
|
||||||
|
description="Run dbt tests",
|
||||||
|
inputSchema={
|
||||||
|
"type": "object",
|
||||||
|
"properties": {
|
||||||
|
"select": {
|
||||||
|
"type": "string",
|
||||||
|
"description": "Test selection"
|
||||||
|
},
|
||||||
|
"exclude": {
|
||||||
|
"type": "string",
|
||||||
|
"description": "Tests to exclude"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
),
|
||||||
|
Tool(
|
||||||
|
name="dbt_build",
|
||||||
|
description="Run dbt build (run + test) with pre-validation",
|
||||||
|
inputSchema={
|
||||||
|
"type": "object",
|
||||||
|
"properties": {
|
||||||
|
"select": {
|
||||||
|
"type": "string",
|
||||||
|
"description": "Model/test selection"
|
||||||
|
},
|
||||||
|
"exclude": {
|
||||||
|
"type": "string",
|
||||||
|
"description": "Resources to exclude"
|
||||||
|
},
|
||||||
|
"full_refresh": {
|
||||||
|
"type": "boolean",
|
||||||
|
"default": False,
|
||||||
|
"description": "Rebuild incremental models"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
),
|
||||||
|
Tool(
|
||||||
|
name="dbt_compile",
|
||||||
|
description="Compile dbt models to SQL without executing",
|
||||||
|
inputSchema={
|
||||||
|
"type": "object",
|
||||||
|
"properties": {
|
||||||
|
"select": {
|
||||||
|
"type": "string",
|
||||||
|
"description": "Model selection"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
),
|
||||||
|
Tool(
|
||||||
|
name="dbt_ls",
|
||||||
|
description="List dbt resources",
|
||||||
|
inputSchema={
|
||||||
|
"type": "object",
|
||||||
|
"properties": {
|
||||||
|
"select": {
|
||||||
|
"type": "string",
|
||||||
|
"description": "Resource selection"
|
||||||
|
},
|
||||||
|
"resource_type": {
|
||||||
|
"type": "string",
|
||||||
|
"enum": ["model", "test", "seed", "snapshot", "source"],
|
||||||
|
"description": "Filter by type"
|
||||||
|
},
|
||||||
|
"output": {
|
||||||
|
"type": "string",
|
||||||
|
"enum": ["name", "path", "json"],
|
||||||
|
"default": "name",
|
||||||
|
"description": "Output format"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
),
|
||||||
|
Tool(
|
||||||
|
name="dbt_docs_generate",
|
||||||
|
description="Generate dbt documentation",
|
||||||
|
inputSchema={
|
||||||
|
"type": "object",
|
||||||
|
"properties": {}
|
||||||
|
}
|
||||||
|
),
|
||||||
|
Tool(
|
||||||
|
name="dbt_lineage",
|
||||||
|
description="Get model dependencies and lineage",
|
||||||
|
inputSchema={
|
||||||
|
"type": "object",
|
||||||
|
"properties": {
|
||||||
|
"model": {
|
||||||
|
"type": "string",
|
||||||
|
"description": "Model name to analyze"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"required": ["model"]
|
||||||
|
}
|
||||||
|
)
|
||||||
|
]
|
||||||
|
return tools
|
||||||
|
|
||||||
|
@self.server.call_tool()
|
||||||
|
async def call_tool(name: str, arguments: dict) -> list[TextContent]:
|
||||||
|
"""Handle tool invocation."""
|
||||||
|
try:
|
||||||
|
# Route to appropriate tool handler
|
||||||
|
# pandas tools
|
||||||
|
if name == "read_csv":
|
||||||
|
result = await self.pandas_tools.read_csv(**arguments)
|
||||||
|
elif name == "read_parquet":
|
||||||
|
result = await self.pandas_tools.read_parquet(**arguments)
|
||||||
|
elif name == "read_json":
|
||||||
|
result = await self.pandas_tools.read_json(**arguments)
|
||||||
|
elif name == "to_csv":
|
||||||
|
result = await self.pandas_tools.to_csv(**arguments)
|
||||||
|
elif name == "to_parquet":
|
||||||
|
result = await self.pandas_tools.to_parquet(**arguments)
|
||||||
|
elif name == "describe":
|
||||||
|
result = await self.pandas_tools.describe(**arguments)
|
||||||
|
elif name == "head":
|
||||||
|
result = await self.pandas_tools.head(**arguments)
|
||||||
|
elif name == "tail":
|
||||||
|
result = await self.pandas_tools.tail(**arguments)
|
||||||
|
elif name == "filter":
|
||||||
|
result = await self.pandas_tools.filter(**arguments)
|
||||||
|
elif name == "select":
|
||||||
|
result = await self.pandas_tools.select(**arguments)
|
||||||
|
elif name == "groupby":
|
||||||
|
result = await self.pandas_tools.groupby(**arguments)
|
||||||
|
elif name == "join":
|
||||||
|
result = await self.pandas_tools.join(**arguments)
|
||||||
|
elif name == "list_data":
|
||||||
|
result = await self.pandas_tools.list_data()
|
||||||
|
elif name == "drop_data":
|
||||||
|
result = await self.pandas_tools.drop_data(**arguments)
|
||||||
|
# PostgreSQL tools
|
||||||
|
elif name == "pg_connect":
|
||||||
|
result = await self.postgres_tools.pg_connect()
|
||||||
|
elif name == "pg_query":
|
||||||
|
result = await self.postgres_tools.pg_query(**arguments)
|
||||||
|
elif name == "pg_execute":
|
||||||
|
result = await self.postgres_tools.pg_execute(**arguments)
|
||||||
|
elif name == "pg_tables":
|
||||||
|
result = await self.postgres_tools.pg_tables(**arguments)
|
||||||
|
elif name == "pg_columns":
|
||||||
|
result = await self.postgres_tools.pg_columns(**arguments)
|
||||||
|
elif name == "pg_schemas":
|
||||||
|
result = await self.postgres_tools.pg_schemas()
|
||||||
|
# PostGIS tools
|
||||||
|
elif name == "st_tables":
|
||||||
|
result = await self.postgres_tools.st_tables(**arguments)
|
||||||
|
elif name == "st_geometry_type":
|
||||||
|
result = await self.postgres_tools.st_geometry_type(**arguments)
|
||||||
|
elif name == "st_srid":
|
||||||
|
result = await self.postgres_tools.st_srid(**arguments)
|
||||||
|
elif name == "st_extent":
|
||||||
|
result = await self.postgres_tools.st_extent(**arguments)
|
||||||
|
# dbt tools
|
||||||
|
elif name == "dbt_parse":
|
||||||
|
result = await self.dbt_tools.dbt_parse()
|
||||||
|
elif name == "dbt_run":
|
||||||
|
result = await self.dbt_tools.dbt_run(**arguments)
|
||||||
|
elif name == "dbt_test":
|
||||||
|
result = await self.dbt_tools.dbt_test(**arguments)
|
||||||
|
elif name == "dbt_build":
|
||||||
|
result = await self.dbt_tools.dbt_build(**arguments)
|
||||||
|
elif name == "dbt_compile":
|
||||||
|
result = await self.dbt_tools.dbt_compile(**arguments)
|
||||||
|
elif name == "dbt_ls":
|
||||||
|
result = await self.dbt_tools.dbt_ls(**arguments)
|
||||||
|
elif name == "dbt_docs_generate":
|
||||||
|
result = await self.dbt_tools.dbt_docs_generate()
|
||||||
|
elif name == "dbt_lineage":
|
||||||
|
result = await self.dbt_tools.dbt_lineage(**arguments)
|
||||||
|
else:
|
||||||
|
raise ValueError(f"Unknown tool: {name}")
|
||||||
|
|
||||||
|
return [TextContent(
|
||||||
|
type="text",
|
||||||
|
text=json.dumps(result, indent=2, default=str)
|
||||||
|
)]
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Tool {name} failed: {e}")
|
||||||
|
return [TextContent(
|
||||||
|
type="text",
|
||||||
|
text=json.dumps({"error": str(e)}, indent=2)
|
||||||
|
)]
|
||||||
|
|
||||||
|
async def run(self):
|
||||||
|
"""Run the MCP server"""
|
||||||
|
await self.initialize()
|
||||||
|
self.setup_tools()
|
||||||
|
|
||||||
|
async with stdio_server() as (read_stream, write_stream):
|
||||||
|
await self.server.run(
|
||||||
|
read_stream,
|
||||||
|
write_stream,
|
||||||
|
self.server.create_initialization_options()
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
async def main():
|
||||||
|
"""Main entry point"""
|
||||||
|
server = DataPlatformMCPServer()
|
||||||
|
await server.run()
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
asyncio.run(main())
|
||||||
49
mcp-servers/data-platform/pyproject.toml
Normal file
49
mcp-servers/data-platform/pyproject.toml
Normal file
@@ -0,0 +1,49 @@
|
|||||||
|
[build-system]
|
||||||
|
requires = ["setuptools>=61.0", "wheel"]
|
||||||
|
build-backend = "setuptools.build_meta"
|
||||||
|
|
||||||
|
[project]
|
||||||
|
name = "data-platform-mcp"
|
||||||
|
version = "1.0.0"
|
||||||
|
description = "MCP Server for data engineering with pandas, PostgreSQL/PostGIS, and dbt"
|
||||||
|
readme = "README.md"
|
||||||
|
license = {text = "MIT"}
|
||||||
|
requires-python = ">=3.10"
|
||||||
|
authors = [
|
||||||
|
{name = "Leo Miranda"}
|
||||||
|
]
|
||||||
|
classifiers = [
|
||||||
|
"Development Status :: 4 - Beta",
|
||||||
|
"Intended Audience :: Developers",
|
||||||
|
"License :: OSI Approved :: MIT License",
|
||||||
|
"Programming Language :: Python :: 3",
|
||||||
|
"Programming Language :: Python :: 3.10",
|
||||||
|
"Programming Language :: Python :: 3.11",
|
||||||
|
"Programming Language :: Python :: 3.12",
|
||||||
|
]
|
||||||
|
dependencies = [
|
||||||
|
"mcp>=0.9.0",
|
||||||
|
"pandas>=2.0.0",
|
||||||
|
"pyarrow>=14.0.0",
|
||||||
|
"asyncpg>=0.29.0",
|
||||||
|
"geoalchemy2>=0.14.0",
|
||||||
|
"shapely>=2.0.0",
|
||||||
|
"dbt-core>=1.9.0",
|
||||||
|
"dbt-postgres>=1.9.0",
|
||||||
|
"python-dotenv>=1.0.0",
|
||||||
|
"pydantic>=2.5.0",
|
||||||
|
]
|
||||||
|
|
||||||
|
[project.optional-dependencies]
|
||||||
|
dev = [
|
||||||
|
"pytest>=7.4.3",
|
||||||
|
"pytest-asyncio>=0.23.0",
|
||||||
|
]
|
||||||
|
|
||||||
|
[tool.setuptools.packages.find]
|
||||||
|
where = ["."]
|
||||||
|
include = ["mcp_server*"]
|
||||||
|
|
||||||
|
[tool.pytest.ini_options]
|
||||||
|
asyncio_mode = "auto"
|
||||||
|
testpaths = ["tests"]
|
||||||
23
mcp-servers/data-platform/requirements.txt
Normal file
23
mcp-servers/data-platform/requirements.txt
Normal file
@@ -0,0 +1,23 @@
|
|||||||
|
# MCP SDK
|
||||||
|
mcp>=0.9.0
|
||||||
|
|
||||||
|
# Data Processing
|
||||||
|
pandas>=2.0.0
|
||||||
|
pyarrow>=14.0.0
|
||||||
|
|
||||||
|
# PostgreSQL/PostGIS
|
||||||
|
asyncpg>=0.29.0
|
||||||
|
geoalchemy2>=0.14.0
|
||||||
|
shapely>=2.0.0
|
||||||
|
|
||||||
|
# dbt
|
||||||
|
dbt-core>=1.9.0
|
||||||
|
dbt-postgres>=1.9.0
|
||||||
|
|
||||||
|
# Utilities
|
||||||
|
python-dotenv>=1.0.0
|
||||||
|
pydantic>=2.5.0
|
||||||
|
|
||||||
|
# Testing
|
||||||
|
pytest>=7.4.3
|
||||||
|
pytest-asyncio>=0.23.0
|
||||||
3
mcp-servers/data-platform/tests/__init__.py
Normal file
3
mcp-servers/data-platform/tests/__init__.py
Normal file
@@ -0,0 +1,3 @@
|
|||||||
|
"""
|
||||||
|
Tests for Data Platform MCP Server.
|
||||||
|
"""
|
||||||
239
mcp-servers/data-platform/tests/test_config.py
Normal file
239
mcp-servers/data-platform/tests/test_config.py
Normal file
@@ -0,0 +1,239 @@
|
|||||||
|
"""
|
||||||
|
Unit tests for configuration loader.
|
||||||
|
"""
|
||||||
|
import pytest
|
||||||
|
from pathlib import Path
|
||||||
|
import os
|
||||||
|
|
||||||
|
|
||||||
|
def test_load_system_config(tmp_path, monkeypatch):
|
||||||
|
"""Test loading system-level PostgreSQL configuration"""
|
||||||
|
# Import here to avoid import errors before setup
|
||||||
|
from mcp_server.config import DataPlatformConfig
|
||||||
|
|
||||||
|
# Mock home directory
|
||||||
|
config_dir = tmp_path / '.config' / 'claude'
|
||||||
|
config_dir.mkdir(parents=True)
|
||||||
|
|
||||||
|
config_file = config_dir / 'postgres.env'
|
||||||
|
config_file.write_text(
|
||||||
|
"POSTGRES_URL=postgresql://user:pass@localhost:5432/testdb\n"
|
||||||
|
)
|
||||||
|
|
||||||
|
monkeypatch.setenv('HOME', str(tmp_path))
|
||||||
|
monkeypatch.chdir(tmp_path)
|
||||||
|
|
||||||
|
config = DataPlatformConfig()
|
||||||
|
result = config.load()
|
||||||
|
|
||||||
|
assert result['postgres_url'] == 'postgresql://user:pass@localhost:5432/testdb'
|
||||||
|
assert result['postgres_available'] is True
|
||||||
|
|
||||||
|
|
||||||
|
def test_postgres_optional(tmp_path, monkeypatch):
|
||||||
|
"""Test that PostgreSQL configuration is optional"""
|
||||||
|
from mcp_server.config import DataPlatformConfig
|
||||||
|
|
||||||
|
# No postgres.env file
|
||||||
|
monkeypatch.setenv('HOME', str(tmp_path))
|
||||||
|
monkeypatch.chdir(tmp_path)
|
||||||
|
|
||||||
|
# Clear any existing env vars
|
||||||
|
monkeypatch.delenv('POSTGRES_URL', raising=False)
|
||||||
|
|
||||||
|
config = DataPlatformConfig()
|
||||||
|
result = config.load()
|
||||||
|
|
||||||
|
assert result['postgres_url'] is None
|
||||||
|
assert result['postgres_available'] is False
|
||||||
|
|
||||||
|
|
||||||
|
def test_project_config_override(tmp_path, monkeypatch):
|
||||||
|
"""Test that project config overrides system config"""
|
||||||
|
from mcp_server.config import DataPlatformConfig
|
||||||
|
|
||||||
|
# Set up system config
|
||||||
|
system_config_dir = tmp_path / '.config' / 'claude'
|
||||||
|
system_config_dir.mkdir(parents=True)
|
||||||
|
|
||||||
|
system_config = system_config_dir / 'postgres.env'
|
||||||
|
system_config.write_text(
|
||||||
|
"POSTGRES_URL=postgresql://system:pass@localhost:5432/systemdb\n"
|
||||||
|
)
|
||||||
|
|
||||||
|
# Set up project config
|
||||||
|
project_dir = tmp_path / 'project'
|
||||||
|
project_dir.mkdir()
|
||||||
|
|
||||||
|
project_config = project_dir / '.env'
|
||||||
|
project_config.write_text(
|
||||||
|
"POSTGRES_URL=postgresql://project:pass@localhost:5432/projectdb\n"
|
||||||
|
"DBT_PROJECT_DIR=/path/to/dbt\n"
|
||||||
|
)
|
||||||
|
|
||||||
|
monkeypatch.setenv('HOME', str(tmp_path))
|
||||||
|
monkeypatch.chdir(project_dir)
|
||||||
|
|
||||||
|
config = DataPlatformConfig()
|
||||||
|
result = config.load()
|
||||||
|
|
||||||
|
# Project config should override
|
||||||
|
assert result['postgres_url'] == 'postgresql://project:pass@localhost:5432/projectdb'
|
||||||
|
assert result['dbt_project_dir'] == '/path/to/dbt'
|
||||||
|
|
||||||
|
|
||||||
|
def test_max_rows_config(tmp_path, monkeypatch):
|
||||||
|
"""Test max rows configuration"""
|
||||||
|
from mcp_server.config import DataPlatformConfig
|
||||||
|
|
||||||
|
project_dir = tmp_path / 'project'
|
||||||
|
project_dir.mkdir()
|
||||||
|
|
||||||
|
project_config = project_dir / '.env'
|
||||||
|
project_config.write_text("DATA_PLATFORM_MAX_ROWS=50000\n")
|
||||||
|
|
||||||
|
monkeypatch.setenv('HOME', str(tmp_path))
|
||||||
|
monkeypatch.chdir(project_dir)
|
||||||
|
|
||||||
|
config = DataPlatformConfig()
|
||||||
|
result = config.load()
|
||||||
|
|
||||||
|
assert result['max_rows'] == 50000
|
||||||
|
|
||||||
|
|
||||||
|
def test_default_max_rows(tmp_path, monkeypatch):
|
||||||
|
"""Test default max rows value"""
|
||||||
|
from mcp_server.config import DataPlatformConfig
|
||||||
|
|
||||||
|
monkeypatch.setenv('HOME', str(tmp_path))
|
||||||
|
monkeypatch.chdir(tmp_path)
|
||||||
|
|
||||||
|
# Clear any existing env vars
|
||||||
|
monkeypatch.delenv('DATA_PLATFORM_MAX_ROWS', raising=False)
|
||||||
|
|
||||||
|
config = DataPlatformConfig()
|
||||||
|
result = config.load()
|
||||||
|
|
||||||
|
assert result['max_rows'] == 100_000 # Default value
|
||||||
|
|
||||||
|
|
||||||
|
def test_dbt_auto_detection(tmp_path, monkeypatch):
|
||||||
|
"""Test automatic dbt project detection"""
|
||||||
|
from mcp_server.config import DataPlatformConfig
|
||||||
|
|
||||||
|
# Create project with dbt_project.yml
|
||||||
|
project_dir = tmp_path / 'project'
|
||||||
|
project_dir.mkdir()
|
||||||
|
(project_dir / 'dbt_project.yml').write_text("name: test_project\n")
|
||||||
|
|
||||||
|
monkeypatch.setenv('HOME', str(tmp_path))
|
||||||
|
monkeypatch.chdir(project_dir)
|
||||||
|
# Clear PWD and DBT_PROJECT_DIR to ensure auto-detection
|
||||||
|
monkeypatch.delenv('PWD', raising=False)
|
||||||
|
monkeypatch.delenv('DBT_PROJECT_DIR', raising=False)
|
||||||
|
monkeypatch.delenv('CLAUDE_PROJECT_DIR', raising=False)
|
||||||
|
|
||||||
|
config = DataPlatformConfig()
|
||||||
|
result = config.load()
|
||||||
|
|
||||||
|
assert result['dbt_project_dir'] == str(project_dir)
|
||||||
|
assert result['dbt_available'] is True
|
||||||
|
|
||||||
|
|
||||||
|
def test_dbt_subdirectory_detection(tmp_path, monkeypatch):
|
||||||
|
"""Test dbt project detection in subdirectory"""
|
||||||
|
from mcp_server.config import DataPlatformConfig
|
||||||
|
|
||||||
|
# Create project with dbt in subdirectory
|
||||||
|
project_dir = tmp_path / 'project'
|
||||||
|
project_dir.mkdir()
|
||||||
|
# Need a marker file for _find_project_directory to find the project
|
||||||
|
(project_dir / '.git').mkdir()
|
||||||
|
dbt_dir = project_dir / 'transform'
|
||||||
|
dbt_dir.mkdir()
|
||||||
|
(dbt_dir / 'dbt_project.yml').write_text("name: test_project\n")
|
||||||
|
|
||||||
|
monkeypatch.setenv('HOME', str(tmp_path))
|
||||||
|
monkeypatch.chdir(project_dir)
|
||||||
|
# Clear env vars to ensure auto-detection
|
||||||
|
monkeypatch.delenv('PWD', raising=False)
|
||||||
|
monkeypatch.delenv('DBT_PROJECT_DIR', raising=False)
|
||||||
|
monkeypatch.delenv('CLAUDE_PROJECT_DIR', raising=False)
|
||||||
|
|
||||||
|
config = DataPlatformConfig()
|
||||||
|
result = config.load()
|
||||||
|
|
||||||
|
assert result['dbt_project_dir'] == str(dbt_dir)
|
||||||
|
assert result['dbt_available'] is True
|
||||||
|
|
||||||
|
|
||||||
|
def test_no_dbt_project(tmp_path, monkeypatch):
|
||||||
|
"""Test when no dbt project exists"""
|
||||||
|
from mcp_server.config import DataPlatformConfig
|
||||||
|
|
||||||
|
project_dir = tmp_path / 'project'
|
||||||
|
project_dir.mkdir()
|
||||||
|
|
||||||
|
monkeypatch.setenv('HOME', str(tmp_path))
|
||||||
|
monkeypatch.chdir(project_dir)
|
||||||
|
|
||||||
|
# Clear any existing env vars
|
||||||
|
monkeypatch.delenv('DBT_PROJECT_DIR', raising=False)
|
||||||
|
|
||||||
|
config = DataPlatformConfig()
|
||||||
|
result = config.load()
|
||||||
|
|
||||||
|
assert result['dbt_project_dir'] is None
|
||||||
|
assert result['dbt_available'] is False
|
||||||
|
|
||||||
|
|
||||||
|
def test_find_project_directory_from_env(tmp_path, monkeypatch):
|
||||||
|
"""Test finding project directory from CLAUDE_PROJECT_DIR env var"""
|
||||||
|
from mcp_server.config import DataPlatformConfig
|
||||||
|
|
||||||
|
project_dir = tmp_path / 'my-project'
|
||||||
|
project_dir.mkdir()
|
||||||
|
(project_dir / '.git').mkdir()
|
||||||
|
|
||||||
|
monkeypatch.setenv('CLAUDE_PROJECT_DIR', str(project_dir))
|
||||||
|
|
||||||
|
config = DataPlatformConfig()
|
||||||
|
result = config._find_project_directory()
|
||||||
|
|
||||||
|
assert result == project_dir
|
||||||
|
|
||||||
|
|
||||||
|
def test_find_project_directory_from_cwd(tmp_path, monkeypatch):
|
||||||
|
"""Test finding project directory from cwd with .env file"""
|
||||||
|
from mcp_server.config import DataPlatformConfig
|
||||||
|
|
||||||
|
project_dir = tmp_path / 'project'
|
||||||
|
project_dir.mkdir()
|
||||||
|
(project_dir / '.env').write_text("TEST=value")
|
||||||
|
|
||||||
|
monkeypatch.chdir(project_dir)
|
||||||
|
monkeypatch.delenv('CLAUDE_PROJECT_DIR', raising=False)
|
||||||
|
monkeypatch.delenv('PWD', raising=False)
|
||||||
|
|
||||||
|
config = DataPlatformConfig()
|
||||||
|
result = config._find_project_directory()
|
||||||
|
|
||||||
|
assert result == project_dir
|
||||||
|
|
||||||
|
|
||||||
|
def test_find_project_directory_none_when_no_markers(tmp_path, monkeypatch):
|
||||||
|
"""Test returns None when no project markers found"""
|
||||||
|
from mcp_server.config import DataPlatformConfig
|
||||||
|
|
||||||
|
empty_dir = tmp_path / 'empty'
|
||||||
|
empty_dir.mkdir()
|
||||||
|
|
||||||
|
monkeypatch.chdir(empty_dir)
|
||||||
|
monkeypatch.delenv('CLAUDE_PROJECT_DIR', raising=False)
|
||||||
|
monkeypatch.delenv('PWD', raising=False)
|
||||||
|
monkeypatch.delenv('DBT_PROJECT_DIR', raising=False)
|
||||||
|
|
||||||
|
config = DataPlatformConfig()
|
||||||
|
result = config._find_project_directory()
|
||||||
|
|
||||||
|
assert result is None
|
||||||
240
mcp-servers/data-platform/tests/test_data_store.py
Normal file
240
mcp-servers/data-platform/tests/test_data_store.py
Normal file
@@ -0,0 +1,240 @@
|
|||||||
|
"""
|
||||||
|
Unit tests for Arrow IPC DataFrame registry.
|
||||||
|
"""
|
||||||
|
import pytest
|
||||||
|
import pandas as pd
|
||||||
|
import pyarrow as pa
|
||||||
|
|
||||||
|
|
||||||
|
def test_store_pandas_dataframe():
|
||||||
|
"""Test storing pandas DataFrame"""
|
||||||
|
from mcp_server.data_store import DataStore
|
||||||
|
|
||||||
|
# Create fresh instance for test
|
||||||
|
store = DataStore()
|
||||||
|
store._dataframes = {}
|
||||||
|
store._metadata = {}
|
||||||
|
|
||||||
|
df = pd.DataFrame({'a': [1, 2, 3], 'b': ['x', 'y', 'z']})
|
||||||
|
data_ref = store.store(df, name='test_df')
|
||||||
|
|
||||||
|
assert data_ref == 'test_df'
|
||||||
|
assert 'test_df' in store._dataframes
|
||||||
|
assert store._metadata['test_df'].rows == 3
|
||||||
|
assert store._metadata['test_df'].columns == 2
|
||||||
|
|
||||||
|
|
||||||
|
def test_store_arrow_table():
|
||||||
|
"""Test storing Arrow Table directly"""
|
||||||
|
from mcp_server.data_store import DataStore
|
||||||
|
|
||||||
|
store = DataStore()
|
||||||
|
store._dataframes = {}
|
||||||
|
store._metadata = {}
|
||||||
|
|
||||||
|
table = pa.table({'x': [1, 2, 3], 'y': [4, 5, 6]})
|
||||||
|
data_ref = store.store(table, name='arrow_test')
|
||||||
|
|
||||||
|
assert data_ref == 'arrow_test'
|
||||||
|
assert store._dataframes['arrow_test'].num_rows == 3
|
||||||
|
|
||||||
|
|
||||||
|
def test_store_auto_name():
|
||||||
|
"""Test auto-generated data_ref names"""
|
||||||
|
from mcp_server.data_store import DataStore
|
||||||
|
|
||||||
|
store = DataStore()
|
||||||
|
store._dataframes = {}
|
||||||
|
store._metadata = {}
|
||||||
|
|
||||||
|
df = pd.DataFrame({'a': [1, 2]})
|
||||||
|
data_ref = store.store(df)
|
||||||
|
|
||||||
|
assert data_ref.startswith('df_')
|
||||||
|
assert len(data_ref) == 11 # df_ + 8 hex chars
|
||||||
|
|
||||||
|
|
||||||
|
def test_get_dataframe():
|
||||||
|
"""Test retrieving stored DataFrame"""
|
||||||
|
from mcp_server.data_store import DataStore
|
||||||
|
|
||||||
|
store = DataStore()
|
||||||
|
store._dataframes = {}
|
||||||
|
store._metadata = {}
|
||||||
|
|
||||||
|
df = pd.DataFrame({'a': [1, 2, 3]})
|
||||||
|
store.store(df, name='get_test')
|
||||||
|
|
||||||
|
result = store.get('get_test')
|
||||||
|
assert result is not None
|
||||||
|
assert result.num_rows == 3
|
||||||
|
|
||||||
|
|
||||||
|
def test_get_pandas():
|
||||||
|
"""Test retrieving as pandas DataFrame"""
|
||||||
|
from mcp_server.data_store import DataStore
|
||||||
|
|
||||||
|
store = DataStore()
|
||||||
|
store._dataframes = {}
|
||||||
|
store._metadata = {}
|
||||||
|
|
||||||
|
df = pd.DataFrame({'a': [1, 2, 3], 'b': ['x', 'y', 'z']})
|
||||||
|
store.store(df, name='pandas_test')
|
||||||
|
|
||||||
|
result = store.get_pandas('pandas_test')
|
||||||
|
assert isinstance(result, pd.DataFrame)
|
||||||
|
assert list(result.columns) == ['a', 'b']
|
||||||
|
assert len(result) == 3
|
||||||
|
|
||||||
|
|
||||||
|
def test_get_nonexistent():
|
||||||
|
"""Test getting nonexistent data_ref returns None"""
|
||||||
|
from mcp_server.data_store import DataStore
|
||||||
|
|
||||||
|
store = DataStore()
|
||||||
|
store._dataframes = {}
|
||||||
|
store._metadata = {}
|
||||||
|
|
||||||
|
assert store.get('nonexistent') is None
|
||||||
|
assert store.get_pandas('nonexistent') is None
|
||||||
|
|
||||||
|
|
||||||
|
def test_list_refs():
|
||||||
|
"""Test listing all stored DataFrames"""
|
||||||
|
from mcp_server.data_store import DataStore
|
||||||
|
|
||||||
|
store = DataStore()
|
||||||
|
store._dataframes = {}
|
||||||
|
store._metadata = {}
|
||||||
|
|
||||||
|
store.store(pd.DataFrame({'a': [1, 2]}), name='df1')
|
||||||
|
store.store(pd.DataFrame({'b': [3, 4, 5]}), name='df2')
|
||||||
|
|
||||||
|
refs = store.list_refs()
|
||||||
|
|
||||||
|
assert len(refs) == 2
|
||||||
|
ref_names = [r['ref'] for r in refs]
|
||||||
|
assert 'df1' in ref_names
|
||||||
|
assert 'df2' in ref_names
|
||||||
|
|
||||||
|
|
||||||
|
def test_drop_dataframe():
|
||||||
|
"""Test dropping a DataFrame"""
|
||||||
|
from mcp_server.data_store import DataStore
|
||||||
|
|
||||||
|
store = DataStore()
|
||||||
|
store._dataframes = {}
|
||||||
|
store._metadata = {}
|
||||||
|
|
||||||
|
store.store(pd.DataFrame({'a': [1]}), name='drop_test')
|
||||||
|
assert store.get('drop_test') is not None
|
||||||
|
|
||||||
|
result = store.drop('drop_test')
|
||||||
|
assert result is True
|
||||||
|
assert store.get('drop_test') is None
|
||||||
|
|
||||||
|
|
||||||
|
def test_drop_nonexistent():
|
||||||
|
"""Test dropping nonexistent data_ref"""
|
||||||
|
from mcp_server.data_store import DataStore
|
||||||
|
|
||||||
|
store = DataStore()
|
||||||
|
store._dataframes = {}
|
||||||
|
store._metadata = {}
|
||||||
|
|
||||||
|
result = store.drop('nonexistent')
|
||||||
|
assert result is False
|
||||||
|
|
||||||
|
|
||||||
|
def test_clear():
|
||||||
|
"""Test clearing all DataFrames"""
|
||||||
|
from mcp_server.data_store import DataStore
|
||||||
|
|
||||||
|
store = DataStore()
|
||||||
|
store._dataframes = {}
|
||||||
|
store._metadata = {}
|
||||||
|
|
||||||
|
store.store(pd.DataFrame({'a': [1]}), name='df1')
|
||||||
|
store.store(pd.DataFrame({'b': [2]}), name='df2')
|
||||||
|
|
||||||
|
store.clear()
|
||||||
|
|
||||||
|
assert len(store.list_refs()) == 0
|
||||||
|
|
||||||
|
|
||||||
|
def test_get_info():
|
||||||
|
"""Test getting DataFrame metadata"""
|
||||||
|
from mcp_server.data_store import DataStore
|
||||||
|
|
||||||
|
store = DataStore()
|
||||||
|
store._dataframes = {}
|
||||||
|
store._metadata = {}
|
||||||
|
|
||||||
|
df = pd.DataFrame({'a': [1, 2, 3], 'b': ['x', 'y', 'z']})
|
||||||
|
store.store(df, name='info_test', source='test source')
|
||||||
|
|
||||||
|
info = store.get_info('info_test')
|
||||||
|
|
||||||
|
assert info.ref == 'info_test'
|
||||||
|
assert info.rows == 3
|
||||||
|
assert info.columns == 2
|
||||||
|
assert info.column_names == ['a', 'b']
|
||||||
|
assert info.source == 'test source'
|
||||||
|
assert info.memory_bytes > 0
|
||||||
|
|
||||||
|
|
||||||
|
def test_total_memory():
|
||||||
|
"""Test total memory calculation"""
|
||||||
|
from mcp_server.data_store import DataStore
|
||||||
|
|
||||||
|
store = DataStore()
|
||||||
|
store._dataframes = {}
|
||||||
|
store._metadata = {}
|
||||||
|
|
||||||
|
store.store(pd.DataFrame({'a': range(100)}), name='df1')
|
||||||
|
store.store(pd.DataFrame({'b': range(200)}), name='df2')
|
||||||
|
|
||||||
|
total = store.total_memory_bytes()
|
||||||
|
assert total > 0
|
||||||
|
|
||||||
|
total_mb = store.total_memory_mb()
|
||||||
|
assert total_mb >= 0
|
||||||
|
|
||||||
|
|
||||||
|
def test_check_row_limit():
|
||||||
|
"""Test row limit checking"""
|
||||||
|
from mcp_server.data_store import DataStore
|
||||||
|
|
||||||
|
store = DataStore()
|
||||||
|
store._max_rows = 100
|
||||||
|
|
||||||
|
# Under limit
|
||||||
|
result = store.check_row_limit(50)
|
||||||
|
assert result['exceeded'] is False
|
||||||
|
|
||||||
|
# Over limit
|
||||||
|
result = store.check_row_limit(150)
|
||||||
|
assert result['exceeded'] is True
|
||||||
|
assert 'suggestion' in result
|
||||||
|
|
||||||
|
|
||||||
|
def test_metadata_dtypes():
|
||||||
|
"""Test that dtypes are correctly recorded"""
|
||||||
|
from mcp_server.data_store import DataStore
|
||||||
|
|
||||||
|
store = DataStore()
|
||||||
|
store._dataframes = {}
|
||||||
|
store._metadata = {}
|
||||||
|
|
||||||
|
df = pd.DataFrame({
|
||||||
|
'int_col': [1, 2, 3],
|
||||||
|
'float_col': [1.1, 2.2, 3.3],
|
||||||
|
'str_col': ['a', 'b', 'c']
|
||||||
|
})
|
||||||
|
store.store(df, name='dtype_test')
|
||||||
|
|
||||||
|
info = store.get_info('dtype_test')
|
||||||
|
|
||||||
|
assert 'int_col' in info.dtypes
|
||||||
|
assert 'float_col' in info.dtypes
|
||||||
|
assert 'str_col' in info.dtypes
|
||||||
318
mcp-servers/data-platform/tests/test_dbt_tools.py
Normal file
318
mcp-servers/data-platform/tests/test_dbt_tools.py
Normal file
@@ -0,0 +1,318 @@
|
|||||||
|
"""
|
||||||
|
Unit tests for dbt MCP tools.
|
||||||
|
"""
|
||||||
|
import pytest
|
||||||
|
from unittest.mock import Mock, patch, MagicMock
|
||||||
|
import subprocess
|
||||||
|
import json
|
||||||
|
import tempfile
|
||||||
|
import os
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def mock_config(tmp_path):
|
||||||
|
"""Mock configuration with dbt project"""
|
||||||
|
dbt_dir = tmp_path / 'dbt_project'
|
||||||
|
dbt_dir.mkdir()
|
||||||
|
(dbt_dir / 'dbt_project.yml').write_text('name: test_project\n')
|
||||||
|
|
||||||
|
return {
|
||||||
|
'dbt_project_dir': str(dbt_dir),
|
||||||
|
'dbt_profiles_dir': str(tmp_path / '.dbt')
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def dbt_tools(mock_config):
|
||||||
|
"""Create DbtTools instance with mocked config"""
|
||||||
|
with patch('mcp_server.dbt_tools.load_config', return_value=mock_config):
|
||||||
|
from mcp_server.dbt_tools import DbtTools
|
||||||
|
|
||||||
|
tools = DbtTools()
|
||||||
|
tools.project_dir = mock_config['dbt_project_dir']
|
||||||
|
tools.profiles_dir = mock_config['dbt_profiles_dir']
|
||||||
|
return tools
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_dbt_parse_success(dbt_tools):
|
||||||
|
"""Test successful dbt parse"""
|
||||||
|
mock_result = MagicMock()
|
||||||
|
mock_result.returncode = 0
|
||||||
|
mock_result.stdout = 'Parsed successfully'
|
||||||
|
mock_result.stderr = ''
|
||||||
|
|
||||||
|
with patch('subprocess.run', return_value=mock_result):
|
||||||
|
result = await dbt_tools.dbt_parse()
|
||||||
|
|
||||||
|
assert result['valid'] is True
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_dbt_parse_failure(dbt_tools):
|
||||||
|
"""Test dbt parse with errors"""
|
||||||
|
mock_result = MagicMock()
|
||||||
|
mock_result.returncode = 1
|
||||||
|
mock_result.stdout = ''
|
||||||
|
mock_result.stderr = 'Compilation error: deprecated syntax'
|
||||||
|
|
||||||
|
with patch('subprocess.run', return_value=mock_result):
|
||||||
|
result = await dbt_tools.dbt_parse()
|
||||||
|
|
||||||
|
assert result['valid'] is False
|
||||||
|
assert 'deprecated' in str(result.get('details', '')).lower() or len(result.get('errors', [])) > 0
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_dbt_run_with_prevalidation(dbt_tools):
|
||||||
|
"""Test dbt run includes pre-validation"""
|
||||||
|
# First call is parse, second is run
|
||||||
|
mock_parse = MagicMock()
|
||||||
|
mock_parse.returncode = 0
|
||||||
|
mock_parse.stdout = 'OK'
|
||||||
|
mock_parse.stderr = ''
|
||||||
|
|
||||||
|
mock_run = MagicMock()
|
||||||
|
mock_run.returncode = 0
|
||||||
|
mock_run.stdout = 'Completed successfully'
|
||||||
|
mock_run.stderr = ''
|
||||||
|
|
||||||
|
with patch('subprocess.run', side_effect=[mock_parse, mock_run]):
|
||||||
|
result = await dbt_tools.dbt_run()
|
||||||
|
|
||||||
|
assert result['success'] is True
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_dbt_run_fails_validation(dbt_tools):
|
||||||
|
"""Test dbt run fails if validation fails"""
|
||||||
|
mock_parse = MagicMock()
|
||||||
|
mock_parse.returncode = 1
|
||||||
|
mock_parse.stdout = ''
|
||||||
|
mock_parse.stderr = 'Parse error'
|
||||||
|
|
||||||
|
with patch('subprocess.run', return_value=mock_parse):
|
||||||
|
result = await dbt_tools.dbt_run()
|
||||||
|
|
||||||
|
assert 'error' in result
|
||||||
|
assert 'Pre-validation failed' in result['error']
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_dbt_run_with_selection(dbt_tools):
|
||||||
|
"""Test dbt run with model selection"""
|
||||||
|
mock_parse = MagicMock()
|
||||||
|
mock_parse.returncode = 0
|
||||||
|
mock_parse.stdout = 'OK'
|
||||||
|
mock_parse.stderr = ''
|
||||||
|
|
||||||
|
mock_run = MagicMock()
|
||||||
|
mock_run.returncode = 0
|
||||||
|
mock_run.stdout = 'Completed'
|
||||||
|
mock_run.stderr = ''
|
||||||
|
|
||||||
|
calls = []
|
||||||
|
|
||||||
|
def track_calls(*args, **kwargs):
|
||||||
|
calls.append(args[0] if args else kwargs.get('args', []))
|
||||||
|
if len(calls) == 1:
|
||||||
|
return mock_parse
|
||||||
|
return mock_run
|
||||||
|
|
||||||
|
with patch('subprocess.run', side_effect=track_calls):
|
||||||
|
result = await dbt_tools.dbt_run(select='dim_customers')
|
||||||
|
|
||||||
|
# Verify --select was passed
|
||||||
|
assert any('--select' in str(call) for call in calls)
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_dbt_test(dbt_tools):
|
||||||
|
"""Test dbt test"""
|
||||||
|
mock_result = MagicMock()
|
||||||
|
mock_result.returncode = 0
|
||||||
|
mock_result.stdout = 'All tests passed'
|
||||||
|
mock_result.stderr = ''
|
||||||
|
|
||||||
|
with patch('subprocess.run', return_value=mock_result):
|
||||||
|
result = await dbt_tools.dbt_test()
|
||||||
|
|
||||||
|
assert result['success'] is True
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_dbt_build(dbt_tools):
|
||||||
|
"""Test dbt build with pre-validation"""
|
||||||
|
mock_parse = MagicMock()
|
||||||
|
mock_parse.returncode = 0
|
||||||
|
mock_parse.stdout = 'OK'
|
||||||
|
mock_parse.stderr = ''
|
||||||
|
|
||||||
|
mock_build = MagicMock()
|
||||||
|
mock_build.returncode = 0
|
||||||
|
mock_build.stdout = 'Build complete'
|
||||||
|
mock_build.stderr = ''
|
||||||
|
|
||||||
|
with patch('subprocess.run', side_effect=[mock_parse, mock_build]):
|
||||||
|
result = await dbt_tools.dbt_build()
|
||||||
|
|
||||||
|
assert result['success'] is True
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_dbt_compile(dbt_tools):
|
||||||
|
"""Test dbt compile"""
|
||||||
|
mock_result = MagicMock()
|
||||||
|
mock_result.returncode = 0
|
||||||
|
mock_result.stdout = 'Compiled'
|
||||||
|
mock_result.stderr = ''
|
||||||
|
|
||||||
|
with patch('subprocess.run', return_value=mock_result):
|
||||||
|
result = await dbt_tools.dbt_compile()
|
||||||
|
|
||||||
|
assert result['success'] is True
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_dbt_ls(dbt_tools):
|
||||||
|
"""Test dbt ls"""
|
||||||
|
mock_result = MagicMock()
|
||||||
|
mock_result.returncode = 0
|
||||||
|
mock_result.stdout = 'dim_customers\ndim_products\nfct_orders\n'
|
||||||
|
mock_result.stderr = ''
|
||||||
|
|
||||||
|
with patch('subprocess.run', return_value=mock_result):
|
||||||
|
result = await dbt_tools.dbt_ls()
|
||||||
|
|
||||||
|
assert result['success'] is True
|
||||||
|
assert result['count'] == 3
|
||||||
|
assert 'dim_customers' in result['resources']
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_dbt_docs_generate(dbt_tools, tmp_path):
|
||||||
|
"""Test dbt docs generate"""
|
||||||
|
mock_result = MagicMock()
|
||||||
|
mock_result.returncode = 0
|
||||||
|
mock_result.stdout = 'Done'
|
||||||
|
mock_result.stderr = ''
|
||||||
|
|
||||||
|
# Create fake target directory
|
||||||
|
target_dir = tmp_path / 'dbt_project' / 'target'
|
||||||
|
target_dir.mkdir(parents=True)
|
||||||
|
(target_dir / 'catalog.json').write_text('{}')
|
||||||
|
(target_dir / 'manifest.json').write_text('{}')
|
||||||
|
|
||||||
|
dbt_tools.project_dir = str(tmp_path / 'dbt_project')
|
||||||
|
|
||||||
|
with patch('subprocess.run', return_value=mock_result):
|
||||||
|
result = await dbt_tools.dbt_docs_generate()
|
||||||
|
|
||||||
|
assert result['success'] is True
|
||||||
|
assert result['catalog_generated'] is True
|
||||||
|
assert result['manifest_generated'] is True
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_dbt_lineage(dbt_tools, tmp_path):
|
||||||
|
"""Test dbt lineage"""
|
||||||
|
# Create manifest
|
||||||
|
target_dir = tmp_path / 'dbt_project' / 'target'
|
||||||
|
target_dir.mkdir(parents=True)
|
||||||
|
|
||||||
|
manifest = {
|
||||||
|
'nodes': {
|
||||||
|
'model.test.dim_customers': {
|
||||||
|
'name': 'dim_customers',
|
||||||
|
'resource_type': 'model',
|
||||||
|
'schema': 'public',
|
||||||
|
'database': 'testdb',
|
||||||
|
'description': 'Customer dimension',
|
||||||
|
'tags': ['daily'],
|
||||||
|
'config': {'materialized': 'table'},
|
||||||
|
'depends_on': {
|
||||||
|
'nodes': ['model.test.stg_customers']
|
||||||
|
}
|
||||||
|
},
|
||||||
|
'model.test.stg_customers': {
|
||||||
|
'name': 'stg_customers',
|
||||||
|
'resource_type': 'model',
|
||||||
|
'depends_on': {'nodes': []}
|
||||||
|
},
|
||||||
|
'model.test.fct_orders': {
|
||||||
|
'name': 'fct_orders',
|
||||||
|
'resource_type': 'model',
|
||||||
|
'depends_on': {
|
||||||
|
'nodes': ['model.test.dim_customers']
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
(target_dir / 'manifest.json').write_text(json.dumps(manifest))
|
||||||
|
|
||||||
|
dbt_tools.project_dir = str(tmp_path / 'dbt_project')
|
||||||
|
|
||||||
|
result = await dbt_tools.dbt_lineage('dim_customers')
|
||||||
|
|
||||||
|
assert result['model'] == 'dim_customers'
|
||||||
|
assert 'model.test.stg_customers' in result['upstream']
|
||||||
|
assert 'model.test.fct_orders' in result['downstream']
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_dbt_lineage_model_not_found(dbt_tools, tmp_path):
|
||||||
|
"""Test dbt lineage with nonexistent model"""
|
||||||
|
target_dir = tmp_path / 'dbt_project' / 'target'
|
||||||
|
target_dir.mkdir(parents=True)
|
||||||
|
|
||||||
|
manifest = {
|
||||||
|
'nodes': {
|
||||||
|
'model.test.dim_customers': {
|
||||||
|
'name': 'dim_customers',
|
||||||
|
'resource_type': 'model'
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
(target_dir / 'manifest.json').write_text(json.dumps(manifest))
|
||||||
|
|
||||||
|
dbt_tools.project_dir = str(tmp_path / 'dbt_project')
|
||||||
|
|
||||||
|
result = await dbt_tools.dbt_lineage('nonexistent_model')
|
||||||
|
|
||||||
|
assert 'error' in result
|
||||||
|
assert 'not found' in result['error'].lower()
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_dbt_no_project():
|
||||||
|
"""Test dbt tools when no project configured"""
|
||||||
|
with patch('mcp_server.dbt_tools.load_config', return_value={'dbt_project_dir': None}):
|
||||||
|
from mcp_server.dbt_tools import DbtTools
|
||||||
|
|
||||||
|
tools = DbtTools()
|
||||||
|
tools.project_dir = None
|
||||||
|
|
||||||
|
result = await tools.dbt_run()
|
||||||
|
|
||||||
|
assert 'error' in result
|
||||||
|
assert 'not found' in result['error'].lower()
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_dbt_timeout(dbt_tools):
|
||||||
|
"""Test dbt command timeout handling"""
|
||||||
|
with patch('subprocess.run', side_effect=subprocess.TimeoutExpired('dbt', 300)):
|
||||||
|
result = await dbt_tools.dbt_parse()
|
||||||
|
|
||||||
|
assert 'error' in result
|
||||||
|
assert 'timed out' in result['error'].lower()
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_dbt_not_installed(dbt_tools):
|
||||||
|
"""Test handling when dbt is not installed"""
|
||||||
|
with patch('subprocess.run', side_effect=FileNotFoundError()):
|
||||||
|
result = await dbt_tools.dbt_parse()
|
||||||
|
|
||||||
|
assert 'error' in result
|
||||||
|
assert 'not found' in result['error'].lower()
|
||||||
301
mcp-servers/data-platform/tests/test_pandas_tools.py
Normal file
301
mcp-servers/data-platform/tests/test_pandas_tools.py
Normal file
@@ -0,0 +1,301 @@
|
|||||||
|
"""
|
||||||
|
Unit tests for pandas MCP tools.
|
||||||
|
"""
|
||||||
|
import pytest
|
||||||
|
import pandas as pd
|
||||||
|
import tempfile
|
||||||
|
import os
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def temp_csv(tmp_path):
|
||||||
|
"""Create a temporary CSV file for testing"""
|
||||||
|
csv_path = tmp_path / 'test.csv'
|
||||||
|
df = pd.DataFrame({
|
||||||
|
'id': [1, 2, 3, 4, 5],
|
||||||
|
'name': ['Alice', 'Bob', 'Charlie', 'Diana', 'Eve'],
|
||||||
|
'value': [10.5, 20.0, 30.5, 40.0, 50.5]
|
||||||
|
})
|
||||||
|
df.to_csv(csv_path, index=False)
|
||||||
|
return str(csv_path)
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def temp_parquet(tmp_path):
|
||||||
|
"""Create a temporary Parquet file for testing"""
|
||||||
|
parquet_path = tmp_path / 'test.parquet'
|
||||||
|
df = pd.DataFrame({
|
||||||
|
'id': [1, 2, 3],
|
||||||
|
'data': ['a', 'b', 'c']
|
||||||
|
})
|
||||||
|
df.to_parquet(parquet_path)
|
||||||
|
return str(parquet_path)
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def temp_json(tmp_path):
|
||||||
|
"""Create a temporary JSON file for testing"""
|
||||||
|
json_path = tmp_path / 'test.json'
|
||||||
|
df = pd.DataFrame({
|
||||||
|
'x': [1, 2],
|
||||||
|
'y': [3, 4]
|
||||||
|
})
|
||||||
|
df.to_json(json_path, orient='records')
|
||||||
|
return str(json_path)
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def pandas_tools():
|
||||||
|
"""Create PandasTools instance with fresh store"""
|
||||||
|
from mcp_server.pandas_tools import PandasTools
|
||||||
|
from mcp_server.data_store import DataStore
|
||||||
|
|
||||||
|
# Reset store for test isolation
|
||||||
|
store = DataStore.get_instance()
|
||||||
|
store._dataframes = {}
|
||||||
|
store._metadata = {}
|
||||||
|
|
||||||
|
return PandasTools()
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_read_csv(pandas_tools, temp_csv):
|
||||||
|
"""Test reading CSV file"""
|
||||||
|
result = await pandas_tools.read_csv(temp_csv, name='csv_test')
|
||||||
|
|
||||||
|
assert 'data_ref' in result
|
||||||
|
assert result['data_ref'] == 'csv_test'
|
||||||
|
assert result['rows'] == 5
|
||||||
|
assert 'id' in result['columns']
|
||||||
|
assert 'name' in result['columns']
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_read_csv_nonexistent(pandas_tools):
|
||||||
|
"""Test reading nonexistent CSV file"""
|
||||||
|
result = await pandas_tools.read_csv('/nonexistent/path.csv')
|
||||||
|
|
||||||
|
assert 'error' in result
|
||||||
|
assert 'not found' in result['error'].lower()
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_read_parquet(pandas_tools, temp_parquet):
|
||||||
|
"""Test reading Parquet file"""
|
||||||
|
result = await pandas_tools.read_parquet(temp_parquet, name='parquet_test')
|
||||||
|
|
||||||
|
assert 'data_ref' in result
|
||||||
|
assert result['rows'] == 3
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_read_json(pandas_tools, temp_json):
|
||||||
|
"""Test reading JSON file"""
|
||||||
|
result = await pandas_tools.read_json(temp_json, name='json_test')
|
||||||
|
|
||||||
|
assert 'data_ref' in result
|
||||||
|
assert result['rows'] == 2
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_to_csv(pandas_tools, temp_csv, tmp_path):
|
||||||
|
"""Test exporting to CSV"""
|
||||||
|
# First load some data
|
||||||
|
await pandas_tools.read_csv(temp_csv, name='export_test')
|
||||||
|
|
||||||
|
# Export to new file
|
||||||
|
output_path = str(tmp_path / 'output.csv')
|
||||||
|
result = await pandas_tools.to_csv('export_test', output_path)
|
||||||
|
|
||||||
|
assert result['success'] is True
|
||||||
|
assert os.path.exists(output_path)
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_to_parquet(pandas_tools, temp_csv, tmp_path):
|
||||||
|
"""Test exporting to Parquet"""
|
||||||
|
await pandas_tools.read_csv(temp_csv, name='parquet_export')
|
||||||
|
|
||||||
|
output_path = str(tmp_path / 'output.parquet')
|
||||||
|
result = await pandas_tools.to_parquet('parquet_export', output_path)
|
||||||
|
|
||||||
|
assert result['success'] is True
|
||||||
|
assert os.path.exists(output_path)
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_describe(pandas_tools, temp_csv):
|
||||||
|
"""Test describe statistics"""
|
||||||
|
await pandas_tools.read_csv(temp_csv, name='describe_test')
|
||||||
|
|
||||||
|
result = await pandas_tools.describe('describe_test')
|
||||||
|
|
||||||
|
assert 'data_ref' in result
|
||||||
|
assert 'shape' in result
|
||||||
|
assert result['shape']['rows'] == 5
|
||||||
|
assert 'statistics' in result
|
||||||
|
assert 'null_counts' in result
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_head(pandas_tools, temp_csv):
|
||||||
|
"""Test getting first N rows"""
|
||||||
|
await pandas_tools.read_csv(temp_csv, name='head_test')
|
||||||
|
|
||||||
|
result = await pandas_tools.head('head_test', n=3)
|
||||||
|
|
||||||
|
assert result['returned_rows'] == 3
|
||||||
|
assert len(result['data']) == 3
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_tail(pandas_tools, temp_csv):
|
||||||
|
"""Test getting last N rows"""
|
||||||
|
await pandas_tools.read_csv(temp_csv, name='tail_test')
|
||||||
|
|
||||||
|
result = await pandas_tools.tail('tail_test', n=2)
|
||||||
|
|
||||||
|
assert result['returned_rows'] == 2
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_filter(pandas_tools, temp_csv):
|
||||||
|
"""Test filtering rows"""
|
||||||
|
await pandas_tools.read_csv(temp_csv, name='filter_test')
|
||||||
|
|
||||||
|
result = await pandas_tools.filter('filter_test', 'value > 25')
|
||||||
|
|
||||||
|
assert 'data_ref' in result
|
||||||
|
assert result['rows'] == 3 # 30.5, 40.0, 50.5
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_filter_invalid_condition(pandas_tools, temp_csv):
|
||||||
|
"""Test filter with invalid condition"""
|
||||||
|
await pandas_tools.read_csv(temp_csv, name='filter_error')
|
||||||
|
|
||||||
|
result = await pandas_tools.filter('filter_error', 'invalid_column > 0')
|
||||||
|
|
||||||
|
assert 'error' in result
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_select(pandas_tools, temp_csv):
|
||||||
|
"""Test selecting columns"""
|
||||||
|
await pandas_tools.read_csv(temp_csv, name='select_test')
|
||||||
|
|
||||||
|
result = await pandas_tools.select('select_test', ['id', 'name'])
|
||||||
|
|
||||||
|
assert 'data_ref' in result
|
||||||
|
assert result['columns'] == ['id', 'name']
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_select_invalid_column(pandas_tools, temp_csv):
|
||||||
|
"""Test select with invalid column"""
|
||||||
|
await pandas_tools.read_csv(temp_csv, name='select_error')
|
||||||
|
|
||||||
|
result = await pandas_tools.select('select_error', ['id', 'nonexistent'])
|
||||||
|
|
||||||
|
assert 'error' in result
|
||||||
|
assert 'available_columns' in result
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_groupby(pandas_tools, tmp_path):
|
||||||
|
"""Test groupby aggregation"""
|
||||||
|
# Create test data with groups
|
||||||
|
csv_path = tmp_path / 'groupby.csv'
|
||||||
|
df = pd.DataFrame({
|
||||||
|
'category': ['A', 'A', 'B', 'B'],
|
||||||
|
'value': [10, 20, 30, 40]
|
||||||
|
})
|
||||||
|
df.to_csv(csv_path, index=False)
|
||||||
|
|
||||||
|
await pandas_tools.read_csv(str(csv_path), name='groupby_test')
|
||||||
|
|
||||||
|
result = await pandas_tools.groupby(
|
||||||
|
'groupby_test',
|
||||||
|
by='category',
|
||||||
|
agg={'value': 'sum'}
|
||||||
|
)
|
||||||
|
|
||||||
|
assert 'data_ref' in result
|
||||||
|
assert result['rows'] == 2 # Two groups: A, B
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_join(pandas_tools, tmp_path):
|
||||||
|
"""Test joining DataFrames"""
|
||||||
|
# Create left table
|
||||||
|
left_path = tmp_path / 'left.csv'
|
||||||
|
pd.DataFrame({
|
||||||
|
'id': [1, 2, 3],
|
||||||
|
'name': ['A', 'B', 'C']
|
||||||
|
}).to_csv(left_path, index=False)
|
||||||
|
|
||||||
|
# Create right table
|
||||||
|
right_path = tmp_path / 'right.csv'
|
||||||
|
pd.DataFrame({
|
||||||
|
'id': [1, 2, 4],
|
||||||
|
'value': [100, 200, 400]
|
||||||
|
}).to_csv(right_path, index=False)
|
||||||
|
|
||||||
|
await pandas_tools.read_csv(str(left_path), name='left')
|
||||||
|
await pandas_tools.read_csv(str(right_path), name='right')
|
||||||
|
|
||||||
|
result = await pandas_tools.join('left', 'right', on='id', how='inner')
|
||||||
|
|
||||||
|
assert 'data_ref' in result
|
||||||
|
assert result['rows'] == 2 # Only id 1 and 2 match
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_list_data(pandas_tools, temp_csv):
|
||||||
|
"""Test listing all DataFrames"""
|
||||||
|
await pandas_tools.read_csv(temp_csv, name='list_test1')
|
||||||
|
await pandas_tools.read_csv(temp_csv, name='list_test2')
|
||||||
|
|
||||||
|
result = await pandas_tools.list_data()
|
||||||
|
|
||||||
|
assert result['count'] == 2
|
||||||
|
refs = [df['ref'] for df in result['dataframes']]
|
||||||
|
assert 'list_test1' in refs
|
||||||
|
assert 'list_test2' in refs
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_drop_data(pandas_tools, temp_csv):
|
||||||
|
"""Test dropping DataFrame"""
|
||||||
|
await pandas_tools.read_csv(temp_csv, name='drop_test')
|
||||||
|
|
||||||
|
result = await pandas_tools.drop_data('drop_test')
|
||||||
|
|
||||||
|
assert result['success'] is True
|
||||||
|
|
||||||
|
# Verify it's gone
|
||||||
|
list_result = await pandas_tools.list_data()
|
||||||
|
refs = [df['ref'] for df in list_result['dataframes']]
|
||||||
|
assert 'drop_test' not in refs
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_drop_nonexistent(pandas_tools):
|
||||||
|
"""Test dropping nonexistent DataFrame"""
|
||||||
|
result = await pandas_tools.drop_data('nonexistent')
|
||||||
|
|
||||||
|
assert 'error' in result
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_operations_on_nonexistent(pandas_tools):
|
||||||
|
"""Test operations on nonexistent data_ref"""
|
||||||
|
result = await pandas_tools.describe('nonexistent')
|
||||||
|
assert 'error' in result
|
||||||
|
|
||||||
|
result = await pandas_tools.head('nonexistent')
|
||||||
|
assert 'error' in result
|
||||||
|
|
||||||
|
result = await pandas_tools.filter('nonexistent', 'x > 0')
|
||||||
|
assert 'error' in result
|
||||||
338
mcp-servers/data-platform/tests/test_postgres_tools.py
Normal file
338
mcp-servers/data-platform/tests/test_postgres_tools.py
Normal file
@@ -0,0 +1,338 @@
|
|||||||
|
"""
|
||||||
|
Unit tests for PostgreSQL MCP tools.
|
||||||
|
"""
|
||||||
|
import pytest
|
||||||
|
from unittest.mock import Mock, AsyncMock, patch, MagicMock
|
||||||
|
import pandas as pd
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def mock_config():
|
||||||
|
"""Mock configuration"""
|
||||||
|
return {
|
||||||
|
'postgres_url': 'postgresql://test:test@localhost:5432/testdb',
|
||||||
|
'max_rows': 100000
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def postgres_tools(mock_config):
|
||||||
|
"""Create PostgresTools instance with mocked config"""
|
||||||
|
with patch('mcp_server.postgres_tools.load_config', return_value=mock_config):
|
||||||
|
from mcp_server.postgres_tools import PostgresTools
|
||||||
|
from mcp_server.data_store import DataStore
|
||||||
|
|
||||||
|
# Reset store
|
||||||
|
store = DataStore.get_instance()
|
||||||
|
store._dataframes = {}
|
||||||
|
store._metadata = {}
|
||||||
|
|
||||||
|
tools = PostgresTools()
|
||||||
|
tools.config = mock_config
|
||||||
|
return tools
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_pg_connect_no_config():
|
||||||
|
"""Test pg_connect when no PostgreSQL configured"""
|
||||||
|
with patch('mcp_server.postgres_tools.load_config', return_value={'postgres_url': None}):
|
||||||
|
from mcp_server.postgres_tools import PostgresTools
|
||||||
|
|
||||||
|
tools = PostgresTools()
|
||||||
|
tools.config = {'postgres_url': None}
|
||||||
|
|
||||||
|
result = await tools.pg_connect()
|
||||||
|
|
||||||
|
assert result['connected'] is False
|
||||||
|
assert 'not configured' in result['error'].lower()
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_pg_connect_success(postgres_tools):
|
||||||
|
"""Test successful pg_connect"""
|
||||||
|
mock_conn = AsyncMock()
|
||||||
|
mock_conn.fetchval = AsyncMock(side_effect=[
|
||||||
|
'PostgreSQL 15.1', # version
|
||||||
|
'testdb', # database name
|
||||||
|
'testuser', # user
|
||||||
|
None # PostGIS check fails
|
||||||
|
])
|
||||||
|
mock_conn.close = AsyncMock()
|
||||||
|
|
||||||
|
# Create proper async context manager
|
||||||
|
mock_cm = AsyncMock()
|
||||||
|
mock_cm.__aenter__ = AsyncMock(return_value=mock_conn)
|
||||||
|
mock_cm.__aexit__ = AsyncMock(return_value=None)
|
||||||
|
|
||||||
|
mock_pool = MagicMock()
|
||||||
|
mock_pool.acquire = MagicMock(return_value=mock_cm)
|
||||||
|
|
||||||
|
# Use AsyncMock for create_pool since it's awaited
|
||||||
|
with patch('asyncpg.create_pool', new=AsyncMock(return_value=mock_pool)):
|
||||||
|
postgres_tools.pool = None
|
||||||
|
result = await postgres_tools.pg_connect()
|
||||||
|
|
||||||
|
assert result['connected'] is True
|
||||||
|
assert result['database'] == 'testdb'
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_pg_query_success(postgres_tools):
|
||||||
|
"""Test successful pg_query"""
|
||||||
|
mock_rows = [
|
||||||
|
{'id': 1, 'name': 'Alice'},
|
||||||
|
{'id': 2, 'name': 'Bob'}
|
||||||
|
]
|
||||||
|
|
||||||
|
mock_conn = AsyncMock()
|
||||||
|
mock_conn.fetch = AsyncMock(return_value=mock_rows)
|
||||||
|
|
||||||
|
mock_pool = AsyncMock()
|
||||||
|
mock_pool.acquire = MagicMock(return_value=AsyncMock(
|
||||||
|
__aenter__=AsyncMock(return_value=mock_conn),
|
||||||
|
__aexit__=AsyncMock()
|
||||||
|
))
|
||||||
|
|
||||||
|
postgres_tools.pool = mock_pool
|
||||||
|
|
||||||
|
result = await postgres_tools.pg_query('SELECT * FROM users', name='users_data')
|
||||||
|
|
||||||
|
assert 'data_ref' in result
|
||||||
|
assert result['rows'] == 2
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_pg_query_empty_result(postgres_tools):
|
||||||
|
"""Test pg_query with no results"""
|
||||||
|
mock_conn = AsyncMock()
|
||||||
|
mock_conn.fetch = AsyncMock(return_value=[])
|
||||||
|
|
||||||
|
mock_pool = AsyncMock()
|
||||||
|
mock_pool.acquire = MagicMock(return_value=AsyncMock(
|
||||||
|
__aenter__=AsyncMock(return_value=mock_conn),
|
||||||
|
__aexit__=AsyncMock()
|
||||||
|
))
|
||||||
|
|
||||||
|
postgres_tools.pool = mock_pool
|
||||||
|
|
||||||
|
result = await postgres_tools.pg_query('SELECT * FROM empty_table')
|
||||||
|
|
||||||
|
assert result['data_ref'] is None
|
||||||
|
assert result['rows'] == 0
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_pg_execute_success(postgres_tools):
|
||||||
|
"""Test successful pg_execute"""
|
||||||
|
mock_conn = AsyncMock()
|
||||||
|
mock_conn.execute = AsyncMock(return_value='INSERT 0 3')
|
||||||
|
|
||||||
|
mock_pool = AsyncMock()
|
||||||
|
mock_pool.acquire = MagicMock(return_value=AsyncMock(
|
||||||
|
__aenter__=AsyncMock(return_value=mock_conn),
|
||||||
|
__aexit__=AsyncMock()
|
||||||
|
))
|
||||||
|
|
||||||
|
postgres_tools.pool = mock_pool
|
||||||
|
|
||||||
|
result = await postgres_tools.pg_execute('INSERT INTO users VALUES (1, 2, 3)')
|
||||||
|
|
||||||
|
assert result['success'] is True
|
||||||
|
assert result['affected_rows'] == 3
|
||||||
|
assert result['command'] == 'INSERT'
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_pg_tables(postgres_tools):
|
||||||
|
"""Test listing tables"""
|
||||||
|
mock_rows = [
|
||||||
|
{'table_name': 'users', 'table_type': 'BASE TABLE', 'column_count': 5},
|
||||||
|
{'table_name': 'orders', 'table_type': 'BASE TABLE', 'column_count': 8}
|
||||||
|
]
|
||||||
|
|
||||||
|
mock_conn = AsyncMock()
|
||||||
|
mock_conn.fetch = AsyncMock(return_value=mock_rows)
|
||||||
|
|
||||||
|
mock_pool = AsyncMock()
|
||||||
|
mock_pool.acquire = MagicMock(return_value=AsyncMock(
|
||||||
|
__aenter__=AsyncMock(return_value=mock_conn),
|
||||||
|
__aexit__=AsyncMock()
|
||||||
|
))
|
||||||
|
|
||||||
|
postgres_tools.pool = mock_pool
|
||||||
|
|
||||||
|
result = await postgres_tools.pg_tables(schema='public')
|
||||||
|
|
||||||
|
assert result['schema'] == 'public'
|
||||||
|
assert result['count'] == 2
|
||||||
|
assert len(result['tables']) == 2
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_pg_columns(postgres_tools):
|
||||||
|
"""Test getting column info"""
|
||||||
|
mock_rows = [
|
||||||
|
{
|
||||||
|
'column_name': 'id',
|
||||||
|
'data_type': 'integer',
|
||||||
|
'udt_name': 'int4',
|
||||||
|
'is_nullable': 'NO',
|
||||||
|
'column_default': "nextval('users_id_seq'::regclass)",
|
||||||
|
'character_maximum_length': None,
|
||||||
|
'numeric_precision': 32
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'column_name': 'name',
|
||||||
|
'data_type': 'character varying',
|
||||||
|
'udt_name': 'varchar',
|
||||||
|
'is_nullable': 'YES',
|
||||||
|
'column_default': None,
|
||||||
|
'character_maximum_length': 255,
|
||||||
|
'numeric_precision': None
|
||||||
|
}
|
||||||
|
]
|
||||||
|
|
||||||
|
mock_conn = AsyncMock()
|
||||||
|
mock_conn.fetch = AsyncMock(return_value=mock_rows)
|
||||||
|
|
||||||
|
mock_pool = AsyncMock()
|
||||||
|
mock_pool.acquire = MagicMock(return_value=AsyncMock(
|
||||||
|
__aenter__=AsyncMock(return_value=mock_conn),
|
||||||
|
__aexit__=AsyncMock()
|
||||||
|
))
|
||||||
|
|
||||||
|
postgres_tools.pool = mock_pool
|
||||||
|
|
||||||
|
result = await postgres_tools.pg_columns(table='users')
|
||||||
|
|
||||||
|
assert result['table'] == 'public.users'
|
||||||
|
assert result['column_count'] == 2
|
||||||
|
assert result['columns'][0]['name'] == 'id'
|
||||||
|
assert result['columns'][0]['nullable'] is False
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_pg_schemas(postgres_tools):
|
||||||
|
"""Test listing schemas"""
|
||||||
|
mock_rows = [
|
||||||
|
{'schema_name': 'public'},
|
||||||
|
{'schema_name': 'app'}
|
||||||
|
]
|
||||||
|
|
||||||
|
mock_conn = AsyncMock()
|
||||||
|
mock_conn.fetch = AsyncMock(return_value=mock_rows)
|
||||||
|
|
||||||
|
mock_pool = AsyncMock()
|
||||||
|
mock_pool.acquire = MagicMock(return_value=AsyncMock(
|
||||||
|
__aenter__=AsyncMock(return_value=mock_conn),
|
||||||
|
__aexit__=AsyncMock()
|
||||||
|
))
|
||||||
|
|
||||||
|
postgres_tools.pool = mock_pool
|
||||||
|
|
||||||
|
result = await postgres_tools.pg_schemas()
|
||||||
|
|
||||||
|
assert result['count'] == 2
|
||||||
|
assert 'public' in result['schemas']
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_st_tables(postgres_tools):
|
||||||
|
"""Test listing PostGIS tables"""
|
||||||
|
mock_rows = [
|
||||||
|
{
|
||||||
|
'table_name': 'locations',
|
||||||
|
'geometry_column': 'geom',
|
||||||
|
'geometry_type': 'POINT',
|
||||||
|
'srid': 4326,
|
||||||
|
'coord_dimension': 2
|
||||||
|
}
|
||||||
|
]
|
||||||
|
|
||||||
|
mock_conn = AsyncMock()
|
||||||
|
mock_conn.fetch = AsyncMock(return_value=mock_rows)
|
||||||
|
|
||||||
|
mock_pool = AsyncMock()
|
||||||
|
mock_pool.acquire = MagicMock(return_value=AsyncMock(
|
||||||
|
__aenter__=AsyncMock(return_value=mock_conn),
|
||||||
|
__aexit__=AsyncMock()
|
||||||
|
))
|
||||||
|
|
||||||
|
postgres_tools.pool = mock_pool
|
||||||
|
|
||||||
|
result = await postgres_tools.st_tables()
|
||||||
|
|
||||||
|
assert result['count'] == 1
|
||||||
|
assert result['postgis_tables'][0]['table'] == 'locations'
|
||||||
|
assert result['postgis_tables'][0]['srid'] == 4326
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_st_tables_no_postgis(postgres_tools):
|
||||||
|
"""Test st_tables when PostGIS not installed"""
|
||||||
|
mock_conn = AsyncMock()
|
||||||
|
mock_conn.fetch = AsyncMock(side_effect=Exception("relation \"geometry_columns\" does not exist"))
|
||||||
|
|
||||||
|
# Create proper async context manager
|
||||||
|
mock_cm = AsyncMock()
|
||||||
|
mock_cm.__aenter__ = AsyncMock(return_value=mock_conn)
|
||||||
|
mock_cm.__aexit__ = AsyncMock(return_value=None)
|
||||||
|
|
||||||
|
mock_pool = MagicMock()
|
||||||
|
mock_pool.acquire = MagicMock(return_value=mock_cm)
|
||||||
|
|
||||||
|
postgres_tools.pool = mock_pool
|
||||||
|
|
||||||
|
result = await postgres_tools.st_tables()
|
||||||
|
|
||||||
|
assert 'error' in result
|
||||||
|
assert 'PostGIS' in result['error']
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_st_extent(postgres_tools):
|
||||||
|
"""Test getting geometry bounding box"""
|
||||||
|
mock_row = {
|
||||||
|
'xmin': -122.5,
|
||||||
|
'ymin': 37.5,
|
||||||
|
'xmax': -122.0,
|
||||||
|
'ymax': 38.0
|
||||||
|
}
|
||||||
|
|
||||||
|
mock_conn = AsyncMock()
|
||||||
|
mock_conn.fetchrow = AsyncMock(return_value=mock_row)
|
||||||
|
|
||||||
|
mock_pool = AsyncMock()
|
||||||
|
mock_pool.acquire = MagicMock(return_value=AsyncMock(
|
||||||
|
__aenter__=AsyncMock(return_value=mock_conn),
|
||||||
|
__aexit__=AsyncMock()
|
||||||
|
))
|
||||||
|
|
||||||
|
postgres_tools.pool = mock_pool
|
||||||
|
|
||||||
|
result = await postgres_tools.st_extent(table='locations', column='geom')
|
||||||
|
|
||||||
|
assert result['bbox']['xmin'] == -122.5
|
||||||
|
assert result['bbox']['ymax'] == 38.0
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_error_handling(postgres_tools):
|
||||||
|
"""Test error handling for database errors"""
|
||||||
|
mock_conn = AsyncMock()
|
||||||
|
mock_conn.fetch = AsyncMock(side_effect=Exception("Connection refused"))
|
||||||
|
|
||||||
|
# Create proper async context manager
|
||||||
|
mock_cm = AsyncMock()
|
||||||
|
mock_cm.__aenter__ = AsyncMock(return_value=mock_conn)
|
||||||
|
mock_cm.__aexit__ = AsyncMock(return_value=None)
|
||||||
|
|
||||||
|
mock_pool = MagicMock()
|
||||||
|
mock_pool.acquire = MagicMock(return_value=mock_cm)
|
||||||
|
|
||||||
|
postgres_tools.pool = mock_pool
|
||||||
|
|
||||||
|
result = await postgres_tools.pg_query('SELECT 1')
|
||||||
|
|
||||||
|
assert 'error' in result
|
||||||
|
assert 'Connection refused' in result['error']
|
||||||
@@ -621,6 +621,40 @@ class GiteaClient:
|
|||||||
response.raise_for_status()
|
response.raise_for_status()
|
||||||
return response.json()
|
return response.json()
|
||||||
|
|
||||||
|
def create_org_label(
|
||||||
|
self,
|
||||||
|
org: str,
|
||||||
|
name: str,
|
||||||
|
color: str,
|
||||||
|
description: Optional[str] = None
|
||||||
|
) -> Dict:
|
||||||
|
"""
|
||||||
|
Create a new label at the organization level.
|
||||||
|
|
||||||
|
Organization labels are shared across all repositories in the org.
|
||||||
|
Use this for workflow labels (Type, Priority, Complexity, Effort, etc.)
|
||||||
|
|
||||||
|
Args:
|
||||||
|
org: Organization name
|
||||||
|
name: Label name (e.g., 'Type/Bug', 'Priority/High')
|
||||||
|
color: Hex color code (with or without #)
|
||||||
|
description: Optional label description
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Created label dictionary
|
||||||
|
"""
|
||||||
|
url = f"{self.base_url}/orgs/{org}/labels"
|
||||||
|
data = {
|
||||||
|
'name': name,
|
||||||
|
'color': color.lstrip('#') # Remove # if present
|
||||||
|
}
|
||||||
|
if description:
|
||||||
|
data['description'] = description
|
||||||
|
logger.info(f"Creating organization label '{name}' in {org}")
|
||||||
|
response = self.session.post(url, json=data)
|
||||||
|
response.raise_for_status()
|
||||||
|
return response.json()
|
||||||
|
|
||||||
# ========================================
|
# ========================================
|
||||||
# PULL REQUEST OPERATIONS
|
# PULL REQUEST OPERATIONS
|
||||||
# ========================================
|
# ========================================
|
||||||
|
|||||||
@@ -622,13 +622,65 @@ class GiteaMCPServer:
|
|||||||
),
|
),
|
||||||
Tool(
|
Tool(
|
||||||
name="create_label",
|
name="create_label",
|
||||||
description="Create a new label in the repository",
|
description="Create a new label in the repository (for repo-specific labels like Component/*, Tech/*)",
|
||||||
inputSchema={
|
inputSchema={
|
||||||
"type": "object",
|
"type": "object",
|
||||||
"properties": {
|
"properties": {
|
||||||
"name": {
|
"name": {
|
||||||
"type": "string",
|
"type": "string",
|
||||||
"description": "Label name"
|
"description": "Label name (e.g., 'Component/Backend', 'Tech/Python')"
|
||||||
|
},
|
||||||
|
"color": {
|
||||||
|
"type": "string",
|
||||||
|
"description": "Label color (hex code)"
|
||||||
|
},
|
||||||
|
"description": {
|
||||||
|
"type": "string",
|
||||||
|
"description": "Label description"
|
||||||
|
},
|
||||||
|
"repo": {
|
||||||
|
"type": "string",
|
||||||
|
"description": "Repository name (owner/repo format)"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"required": ["name", "color"]
|
||||||
|
}
|
||||||
|
),
|
||||||
|
Tool(
|
||||||
|
name="create_org_label",
|
||||||
|
description="Create a new label at organization level (for workflow labels like Type/*, Priority/*, Complexity/*, Effort/*)",
|
||||||
|
inputSchema={
|
||||||
|
"type": "object",
|
||||||
|
"properties": {
|
||||||
|
"org": {
|
||||||
|
"type": "string",
|
||||||
|
"description": "Organization name"
|
||||||
|
},
|
||||||
|
"name": {
|
||||||
|
"type": "string",
|
||||||
|
"description": "Label name (e.g., 'Type/Bug', 'Priority/High')"
|
||||||
|
},
|
||||||
|
"color": {
|
||||||
|
"type": "string",
|
||||||
|
"description": "Label color (hex code)"
|
||||||
|
},
|
||||||
|
"description": {
|
||||||
|
"type": "string",
|
||||||
|
"description": "Label description"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"required": ["org", "name", "color"]
|
||||||
|
}
|
||||||
|
),
|
||||||
|
Tool(
|
||||||
|
name="create_label_smart",
|
||||||
|
description="Create a label at the appropriate level (org or repo) based on category. Org: Type/*, Priority/*, Complexity/*, Effort/*, Risk/*, Source/*, Agent/*. Repo: Component/*, Tech/*",
|
||||||
|
inputSchema={
|
||||||
|
"type": "object",
|
||||||
|
"properties": {
|
||||||
|
"name": {
|
||||||
|
"type": "string",
|
||||||
|
"description": "Label name (e.g., 'Type/Bug', 'Component/Backend')"
|
||||||
},
|
},
|
||||||
"color": {
|
"color": {
|
||||||
"type": "string",
|
"type": "string",
|
||||||
@@ -880,6 +932,20 @@ class GiteaMCPServer:
|
|||||||
arguments.get('description'),
|
arguments.get('description'),
|
||||||
arguments.get('repo')
|
arguments.get('repo')
|
||||||
)
|
)
|
||||||
|
elif name == "create_org_label":
|
||||||
|
result = self.client.create_org_label(
|
||||||
|
arguments['org'],
|
||||||
|
arguments['name'],
|
||||||
|
arguments['color'],
|
||||||
|
arguments.get('description')
|
||||||
|
)
|
||||||
|
elif name == "create_label_smart":
|
||||||
|
result = await self.label_tools.create_label_smart(
|
||||||
|
arguments['name'],
|
||||||
|
arguments['color'],
|
||||||
|
arguments.get('description'),
|
||||||
|
arguments.get('repo')
|
||||||
|
)
|
||||||
# Pull Request tools
|
# Pull Request tools
|
||||||
elif name == "list_pull_requests":
|
elif name == "list_pull_requests":
|
||||||
result = await self.pr_tools.list_pull_requests(**arguments)
|
result = await self.pr_tools.list_pull_requests(**arguments)
|
||||||
|
|||||||
@@ -259,3 +259,119 @@ class LabelTools:
|
|||||||
return lookup[category_lower][value_lower]
|
return lookup[category_lower][value_lower]
|
||||||
|
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
# Organization-level label categories (workflow labels shared across repos)
|
||||||
|
ORG_LABEL_CATEGORIES = {'agent', 'complexity', 'effort', 'efforts', 'priority', 'risk', 'source', 'type'}
|
||||||
|
|
||||||
|
# Repository-level label categories (project-specific labels)
|
||||||
|
REPO_LABEL_CATEGORIES = {'component', 'tech'}
|
||||||
|
|
||||||
|
async def create_label_smart(
|
||||||
|
self,
|
||||||
|
name: str,
|
||||||
|
color: str,
|
||||||
|
description: Optional[str] = None,
|
||||||
|
repo: Optional[str] = None
|
||||||
|
) -> Dict:
|
||||||
|
"""
|
||||||
|
Create a label at the appropriate level (org or repo) based on category.
|
||||||
|
Skips if label already exists (checks both org and repo levels).
|
||||||
|
|
||||||
|
Organization labels: Agent, Complexity, Effort, Priority, Risk, Source, Type
|
||||||
|
Repository labels: Component, Tech
|
||||||
|
|
||||||
|
Args:
|
||||||
|
name: Label name (e.g., 'Type/Bug', 'Component/Backend')
|
||||||
|
color: Hex color code
|
||||||
|
description: Optional label description
|
||||||
|
repo: Repository in 'owner/repo' format
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Created label dictionary with 'level' key, or 'skipped' if already exists
|
||||||
|
"""
|
||||||
|
loop = asyncio.get_event_loop()
|
||||||
|
|
||||||
|
target_repo = repo or self.gitea.repo
|
||||||
|
if not target_repo or '/' not in target_repo:
|
||||||
|
raise ValueError("Use 'owner/repo' format (e.g. 'org/repo-name')")
|
||||||
|
|
||||||
|
owner = target_repo.split('/')[0]
|
||||||
|
is_org = await loop.run_in_executor(
|
||||||
|
None,
|
||||||
|
lambda: self.gitea.is_org_repo(target_repo)
|
||||||
|
)
|
||||||
|
|
||||||
|
# Fetch existing labels to check for duplicates
|
||||||
|
existing_labels = await self.get_labels(target_repo)
|
||||||
|
all_existing = existing_labels.get('organization', []) + existing_labels.get('repository', [])
|
||||||
|
existing_names = [label['name'].lower() for label in all_existing]
|
||||||
|
|
||||||
|
# Normalize the new label name for comparison
|
||||||
|
name_normalized = name.lower()
|
||||||
|
|
||||||
|
# Also check for format variations (Type/Bug vs Type: Bug)
|
||||||
|
name_variations = [name_normalized]
|
||||||
|
if '/' in name:
|
||||||
|
name_variations.append(name.replace('/', ': ').lower())
|
||||||
|
name_variations.append(name.replace('/', ':').lower())
|
||||||
|
elif ': ' in name:
|
||||||
|
name_variations.append(name.replace(': ', '/').lower())
|
||||||
|
elif ':' in name:
|
||||||
|
name_variations.append(name.replace(':', '/').lower())
|
||||||
|
|
||||||
|
# Check if label already exists in any format
|
||||||
|
for variation in name_variations:
|
||||||
|
if variation in existing_names:
|
||||||
|
logger.info(f"Label '{name}' already exists (found as '{variation}'), skipping")
|
||||||
|
return {
|
||||||
|
'name': name,
|
||||||
|
'skipped': True,
|
||||||
|
'reason': f"Label already exists",
|
||||||
|
'level': 'existing'
|
||||||
|
}
|
||||||
|
|
||||||
|
# Parse category from label name
|
||||||
|
category = None
|
||||||
|
if '/' in name:
|
||||||
|
category = name.split('/')[0].lower().rstrip('s')
|
||||||
|
elif ':' in name:
|
||||||
|
category = name.split(':')[0].strip().lower().rstrip('s')
|
||||||
|
|
||||||
|
# If it's an org repo and the category is an org-level category, create at org level
|
||||||
|
if is_org and category in self.ORG_LABEL_CATEGORIES:
|
||||||
|
result = await loop.run_in_executor(
|
||||||
|
None,
|
||||||
|
lambda: self.gitea.create_org_label(owner, name, color, description)
|
||||||
|
)
|
||||||
|
# Handle unexpected response types (API may return list or non-dict)
|
||||||
|
if not isinstance(result, dict):
|
||||||
|
logger.error(f"Unexpected API response type for org label: {type(result)} - {result}")
|
||||||
|
return {
|
||||||
|
'name': name,
|
||||||
|
'error': True,
|
||||||
|
'reason': f"API returned {type(result).__name__} instead of dict: {result}",
|
||||||
|
'level': 'organization'
|
||||||
|
}
|
||||||
|
result['level'] = 'organization'
|
||||||
|
result['skipped'] = False
|
||||||
|
logger.info(f"Created organization label '{name}' in {owner}")
|
||||||
|
else:
|
||||||
|
# Create at repo level
|
||||||
|
result = await loop.run_in_executor(
|
||||||
|
None,
|
||||||
|
lambda: self.gitea.create_label(name, color, description, target_repo)
|
||||||
|
)
|
||||||
|
# Handle unexpected response types (API may return list or non-dict)
|
||||||
|
if not isinstance(result, dict):
|
||||||
|
logger.error(f"Unexpected API response type for repo label: {type(result)} - {result}")
|
||||||
|
return {
|
||||||
|
'name': name,
|
||||||
|
'error': True,
|
||||||
|
'reason': f"API returned {type(result).__name__} instead of dict: {result}",
|
||||||
|
'level': 'repository'
|
||||||
|
}
|
||||||
|
result['level'] = 'repository'
|
||||||
|
result['skipped'] = False
|
||||||
|
logger.info(f"Created repository label '{name}' in {target_repo}")
|
||||||
|
|
||||||
|
return result
|
||||||
|
|||||||
@@ -4,6 +4,7 @@ NetBox API client for interacting with NetBox REST API.
|
|||||||
Provides a generic HTTP client with methods for all standard REST operations.
|
Provides a generic HTTP client with methods for all standard REST operations.
|
||||||
Individual tool modules use this client for their specific endpoints.
|
Individual tool modules use this client for their specific endpoints.
|
||||||
"""
|
"""
|
||||||
|
import json
|
||||||
import requests
|
import requests
|
||||||
import logging
|
import logging
|
||||||
from typing import List, Dict, Optional, Any, Union
|
from typing import List, Dict, Optional, Any, Union
|
||||||
@@ -83,7 +84,20 @@ class NetBoxClient:
|
|||||||
if response.status_code == 204 or not response.content:
|
if response.status_code == 204 or not response.content:
|
||||||
return None
|
return None
|
||||||
|
|
||||||
return response.json()
|
# Parse JSON with diagnostic error handling
|
||||||
|
try:
|
||||||
|
return response.json()
|
||||||
|
except json.JSONDecodeError as e:
|
||||||
|
logger.error(
|
||||||
|
f"JSON decode failed. Status: {response.status_code}, "
|
||||||
|
f"Content-Length: {len(response.content)}, "
|
||||||
|
f"Content preview: {response.content[:200]!r}"
|
||||||
|
)
|
||||||
|
raise ValueError(
|
||||||
|
f"Invalid JSON response from NetBox: {e}. "
|
||||||
|
f"Status code: {response.status_code}, "
|
||||||
|
f"Content length: {len(response.content)} bytes"
|
||||||
|
) from e
|
||||||
|
|
||||||
def list(
|
def list(
|
||||||
self,
|
self,
|
||||||
|
|||||||
@@ -103,7 +103,19 @@ TOOL_DEFINITIONS = {
|
|||||||
'properties': {
|
'properties': {
|
||||||
'id': {'type': 'integer', 'description': 'Site ID'},
|
'id': {'type': 'integer', 'description': 'Site ID'},
|
||||||
'name': {'type': 'string', 'description': 'New name'},
|
'name': {'type': 'string', 'description': 'New name'},
|
||||||
'status': {'type': 'string', 'description': 'New status'}
|
'slug': {'type': 'string', 'description': 'New slug'},
|
||||||
|
'status': {'type': 'string', 'description': 'Status'},
|
||||||
|
'region': {'type': 'integer', 'description': 'Region ID'},
|
||||||
|
'group': {'type': 'integer', 'description': 'Site group ID'},
|
||||||
|
'tenant': {'type': 'integer', 'description': 'Tenant ID'},
|
||||||
|
'facility': {'type': 'string', 'description': 'Facility name'},
|
||||||
|
'time_zone': {'type': 'string', 'description': 'Time zone'},
|
||||||
|
'description': {'type': 'string', 'description': 'Description'},
|
||||||
|
'physical_address': {'type': 'string', 'description': 'Physical address'},
|
||||||
|
'shipping_address': {'type': 'string', 'description': 'Shipping address'},
|
||||||
|
'latitude': {'type': 'number', 'description': 'Latitude'},
|
||||||
|
'longitude': {'type': 'number', 'description': 'Longitude'},
|
||||||
|
'comments': {'type': 'string', 'description': 'Comments'}
|
||||||
},
|
},
|
||||||
'required': ['id']
|
'required': ['id']
|
||||||
},
|
},
|
||||||
@@ -136,7 +148,14 @@ TOOL_DEFINITIONS = {
|
|||||||
},
|
},
|
||||||
'dcim_update_location': {
|
'dcim_update_location': {
|
||||||
'description': 'Update an existing location',
|
'description': 'Update an existing location',
|
||||||
'properties': {'id': {'type': 'integer', 'description': 'Location ID'}},
|
'properties': {
|
||||||
|
'id': {'type': 'integer', 'description': 'Location ID'},
|
||||||
|
'name': {'type': 'string', 'description': 'New name'},
|
||||||
|
'slug': {'type': 'string', 'description': 'New slug'},
|
||||||
|
'site': {'type': 'integer', 'description': 'Site ID'},
|
||||||
|
'parent': {'type': 'integer', 'description': 'Parent location ID'},
|
||||||
|
'description': {'type': 'string', 'description': 'Description'}
|
||||||
|
},
|
||||||
'required': ['id']
|
'required': ['id']
|
||||||
},
|
},
|
||||||
'dcim_delete_location': {
|
'dcim_delete_location': {
|
||||||
@@ -171,7 +190,18 @@ TOOL_DEFINITIONS = {
|
|||||||
},
|
},
|
||||||
'dcim_update_rack': {
|
'dcim_update_rack': {
|
||||||
'description': 'Update an existing rack',
|
'description': 'Update an existing rack',
|
||||||
'properties': {'id': {'type': 'integer', 'description': 'Rack ID'}},
|
'properties': {
|
||||||
|
'id': {'type': 'integer', 'description': 'Rack ID'},
|
||||||
|
'name': {'type': 'string', 'description': 'New name'},
|
||||||
|
'site': {'type': 'integer', 'description': 'Site ID'},
|
||||||
|
'location': {'type': 'integer', 'description': 'Location ID'},
|
||||||
|
'status': {'type': 'string', 'description': 'Status'},
|
||||||
|
'role': {'type': 'integer', 'description': 'Role ID'},
|
||||||
|
'tenant': {'type': 'integer', 'description': 'Tenant ID'},
|
||||||
|
'u_height': {'type': 'integer', 'description': 'Rack height in U'},
|
||||||
|
'description': {'type': 'string', 'description': 'Description'},
|
||||||
|
'comments': {'type': 'string', 'description': 'Comments'}
|
||||||
|
},
|
||||||
'required': ['id']
|
'required': ['id']
|
||||||
},
|
},
|
||||||
'dcim_delete_rack': {
|
'dcim_delete_rack': {
|
||||||
@@ -198,7 +228,12 @@ TOOL_DEFINITIONS = {
|
|||||||
},
|
},
|
||||||
'dcim_update_manufacturer': {
|
'dcim_update_manufacturer': {
|
||||||
'description': 'Update an existing manufacturer',
|
'description': 'Update an existing manufacturer',
|
||||||
'properties': {'id': {'type': 'integer', 'description': 'Manufacturer ID'}},
|
'properties': {
|
||||||
|
'id': {'type': 'integer', 'description': 'Manufacturer ID'},
|
||||||
|
'name': {'type': 'string', 'description': 'New name'},
|
||||||
|
'slug': {'type': 'string', 'description': 'New slug'},
|
||||||
|
'description': {'type': 'string', 'description': 'Description'}
|
||||||
|
},
|
||||||
'required': ['id']
|
'required': ['id']
|
||||||
},
|
},
|
||||||
'dcim_delete_manufacturer': {
|
'dcim_delete_manufacturer': {
|
||||||
@@ -230,7 +265,16 @@ TOOL_DEFINITIONS = {
|
|||||||
},
|
},
|
||||||
'dcim_update_device_type': {
|
'dcim_update_device_type': {
|
||||||
'description': 'Update an existing device type',
|
'description': 'Update an existing device type',
|
||||||
'properties': {'id': {'type': 'integer', 'description': 'Device type ID'}},
|
'properties': {
|
||||||
|
'id': {'type': 'integer', 'description': 'Device type ID'},
|
||||||
|
'manufacturer': {'type': 'integer', 'description': 'Manufacturer ID'},
|
||||||
|
'model': {'type': 'string', 'description': 'Model name'},
|
||||||
|
'slug': {'type': 'string', 'description': 'New slug'},
|
||||||
|
'u_height': {'type': 'number', 'description': 'Height in rack units'},
|
||||||
|
'is_full_depth': {'type': 'boolean', 'description': 'Is full depth'},
|
||||||
|
'description': {'type': 'string', 'description': 'Description'},
|
||||||
|
'comments': {'type': 'string', 'description': 'Comments'}
|
||||||
|
},
|
||||||
'required': ['id']
|
'required': ['id']
|
||||||
},
|
},
|
||||||
'dcim_delete_device_type': {
|
'dcim_delete_device_type': {
|
||||||
@@ -259,7 +303,14 @@ TOOL_DEFINITIONS = {
|
|||||||
},
|
},
|
||||||
'dcim_update_device_role': {
|
'dcim_update_device_role': {
|
||||||
'description': 'Update an existing device role',
|
'description': 'Update an existing device role',
|
||||||
'properties': {'id': {'type': 'integer', 'description': 'Device role ID'}},
|
'properties': {
|
||||||
|
'id': {'type': 'integer', 'description': 'Device role ID'},
|
||||||
|
'name': {'type': 'string', 'description': 'New name'},
|
||||||
|
'slug': {'type': 'string', 'description': 'New slug'},
|
||||||
|
'color': {'type': 'string', 'description': 'Hex color code'},
|
||||||
|
'vm_role': {'type': 'boolean', 'description': 'Can be assigned to VMs'},
|
||||||
|
'description': {'type': 'string', 'description': 'Description'}
|
||||||
|
},
|
||||||
'required': ['id']
|
'required': ['id']
|
||||||
},
|
},
|
||||||
'dcim_delete_device_role': {
|
'dcim_delete_device_role': {
|
||||||
@@ -290,7 +341,13 @@ TOOL_DEFINITIONS = {
|
|||||||
},
|
},
|
||||||
'dcim_update_platform': {
|
'dcim_update_platform': {
|
||||||
'description': 'Update an existing platform',
|
'description': 'Update an existing platform',
|
||||||
'properties': {'id': {'type': 'integer', 'description': 'Platform ID'}},
|
'properties': {
|
||||||
|
'id': {'type': 'integer', 'description': 'Platform ID'},
|
||||||
|
'name': {'type': 'string', 'description': 'New name'},
|
||||||
|
'slug': {'type': 'string', 'description': 'New slug'},
|
||||||
|
'manufacturer': {'type': 'integer', 'description': 'Manufacturer ID'},
|
||||||
|
'description': {'type': 'string', 'description': 'Description'}
|
||||||
|
},
|
||||||
'required': ['id']
|
'required': ['id']
|
||||||
},
|
},
|
||||||
'dcim_delete_platform': {
|
'dcim_delete_platform': {
|
||||||
@@ -326,7 +383,13 @@ TOOL_DEFINITIONS = {
|
|||||||
'status': {'type': 'string', 'description': 'Device status'},
|
'status': {'type': 'string', 'description': 'Device status'},
|
||||||
'rack': {'type': 'integer', 'description': 'Rack ID'},
|
'rack': {'type': 'integer', 'description': 'Rack ID'},
|
||||||
'position': {'type': 'number', 'description': 'Position in rack'},
|
'position': {'type': 'number', 'description': 'Position in rack'},
|
||||||
'serial': {'type': 'string', 'description': 'Serial number'}
|
'serial': {'type': 'string', 'description': 'Serial number'},
|
||||||
|
'platform': {'type': 'integer', 'description': 'Platform ID'},
|
||||||
|
'primary_ip4': {'type': 'integer', 'description': 'Primary IPv4 address ID'},
|
||||||
|
'primary_ip6': {'type': 'integer', 'description': 'Primary IPv6 address ID'},
|
||||||
|
'asset_tag': {'type': 'string', 'description': 'Asset tag'},
|
||||||
|
'description': {'type': 'string', 'description': 'Description'},
|
||||||
|
'comments': {'type': 'string', 'description': 'Comments'}
|
||||||
},
|
},
|
||||||
'required': ['name', 'device_type', 'role', 'site']
|
'required': ['name', 'device_type', 'role', 'site']
|
||||||
},
|
},
|
||||||
@@ -335,7 +398,17 @@ TOOL_DEFINITIONS = {
|
|||||||
'properties': {
|
'properties': {
|
||||||
'id': {'type': 'integer', 'description': 'Device ID'},
|
'id': {'type': 'integer', 'description': 'Device ID'},
|
||||||
'name': {'type': 'string', 'description': 'New name'},
|
'name': {'type': 'string', 'description': 'New name'},
|
||||||
'status': {'type': 'string', 'description': 'New status'}
|
'status': {'type': 'string', 'description': 'New status'},
|
||||||
|
'platform': {'type': 'integer', 'description': 'Platform ID'},
|
||||||
|
'primary_ip4': {'type': 'integer', 'description': 'Primary IPv4 address ID'},
|
||||||
|
'primary_ip6': {'type': 'integer', 'description': 'Primary IPv6 address ID'},
|
||||||
|
'serial': {'type': 'string', 'description': 'Serial number'},
|
||||||
|
'asset_tag': {'type': 'string', 'description': 'Asset tag'},
|
||||||
|
'site': {'type': 'integer', 'description': 'Site ID'},
|
||||||
|
'rack': {'type': 'integer', 'description': 'Rack ID'},
|
||||||
|
'position': {'type': 'number', 'description': 'Position in rack'},
|
||||||
|
'description': {'type': 'string', 'description': 'Description'},
|
||||||
|
'comments': {'type': 'string', 'description': 'Comments'}
|
||||||
},
|
},
|
||||||
'required': ['id']
|
'required': ['id']
|
||||||
},
|
},
|
||||||
@@ -370,7 +443,18 @@ TOOL_DEFINITIONS = {
|
|||||||
},
|
},
|
||||||
'dcim_update_interface': {
|
'dcim_update_interface': {
|
||||||
'description': 'Update an existing interface',
|
'description': 'Update an existing interface',
|
||||||
'properties': {'id': {'type': 'integer', 'description': 'Interface ID'}},
|
'properties': {
|
||||||
|
'id': {'type': 'integer', 'description': 'Interface ID'},
|
||||||
|
'name': {'type': 'string', 'description': 'New name'},
|
||||||
|
'type': {'type': 'string', 'description': 'Interface type'},
|
||||||
|
'enabled': {'type': 'boolean', 'description': 'Interface enabled'},
|
||||||
|
'mtu': {'type': 'integer', 'description': 'MTU'},
|
||||||
|
'mac_address': {'type': 'string', 'description': 'MAC address'},
|
||||||
|
'description': {'type': 'string', 'description': 'Description'},
|
||||||
|
'mode': {'type': 'string', 'description': 'VLAN mode'},
|
||||||
|
'untagged_vlan': {'type': 'integer', 'description': 'Untagged VLAN ID'},
|
||||||
|
'tagged_vlans': {'type': 'array', 'description': 'Tagged VLAN IDs'}
|
||||||
|
},
|
||||||
'required': ['id']
|
'required': ['id']
|
||||||
},
|
},
|
||||||
'dcim_delete_interface': {
|
'dcim_delete_interface': {
|
||||||
@@ -404,7 +488,15 @@ TOOL_DEFINITIONS = {
|
|||||||
},
|
},
|
||||||
'dcim_update_cable': {
|
'dcim_update_cable': {
|
||||||
'description': 'Update an existing cable',
|
'description': 'Update an existing cable',
|
||||||
'properties': {'id': {'type': 'integer', 'description': 'Cable ID'}},
|
'properties': {
|
||||||
|
'id': {'type': 'integer', 'description': 'Cable ID'},
|
||||||
|
'type': {'type': 'string', 'description': 'Cable type'},
|
||||||
|
'status': {'type': 'string', 'description': 'Cable status'},
|
||||||
|
'label': {'type': 'string', 'description': 'Cable label'},
|
||||||
|
'color': {'type': 'string', 'description': 'Cable color'},
|
||||||
|
'length': {'type': 'number', 'description': 'Cable length'},
|
||||||
|
'length_unit': {'type': 'string', 'description': 'Length unit'}
|
||||||
|
},
|
||||||
'required': ['id']
|
'required': ['id']
|
||||||
},
|
},
|
||||||
'dcim_delete_cable': {
|
'dcim_delete_cable': {
|
||||||
@@ -492,7 +584,15 @@ TOOL_DEFINITIONS = {
|
|||||||
},
|
},
|
||||||
'ipam_update_vrf': {
|
'ipam_update_vrf': {
|
||||||
'description': 'Update an existing VRF',
|
'description': 'Update an existing VRF',
|
||||||
'properties': {'id': {'type': 'integer', 'description': 'VRF ID'}},
|
'properties': {
|
||||||
|
'id': {'type': 'integer', 'description': 'VRF ID'},
|
||||||
|
'name': {'type': 'string', 'description': 'New name'},
|
||||||
|
'rd': {'type': 'string', 'description': 'Route distinguisher'},
|
||||||
|
'tenant': {'type': 'integer', 'description': 'Tenant ID'},
|
||||||
|
'enforce_unique': {'type': 'boolean', 'description': 'Enforce unique IPs'},
|
||||||
|
'description': {'type': 'string', 'description': 'Description'},
|
||||||
|
'comments': {'type': 'string', 'description': 'Comments'}
|
||||||
|
},
|
||||||
'required': ['id']
|
'required': ['id']
|
||||||
},
|
},
|
||||||
'ipam_delete_vrf': {
|
'ipam_delete_vrf': {
|
||||||
@@ -531,7 +631,19 @@ TOOL_DEFINITIONS = {
|
|||||||
},
|
},
|
||||||
'ipam_update_prefix': {
|
'ipam_update_prefix': {
|
||||||
'description': 'Update an existing prefix',
|
'description': 'Update an existing prefix',
|
||||||
'properties': {'id': {'type': 'integer', 'description': 'Prefix ID'}},
|
'properties': {
|
||||||
|
'id': {'type': 'integer', 'description': 'Prefix ID'},
|
||||||
|
'prefix': {'type': 'string', 'description': 'Prefix in CIDR notation'},
|
||||||
|
'status': {'type': 'string', 'description': 'Status'},
|
||||||
|
'site': {'type': 'integer', 'description': 'Site ID'},
|
||||||
|
'vrf': {'type': 'integer', 'description': 'VRF ID'},
|
||||||
|
'vlan': {'type': 'integer', 'description': 'VLAN ID'},
|
||||||
|
'role': {'type': 'integer', 'description': 'Role ID'},
|
||||||
|
'tenant': {'type': 'integer', 'description': 'Tenant ID'},
|
||||||
|
'is_pool': {'type': 'boolean', 'description': 'Is a pool'},
|
||||||
|
'description': {'type': 'string', 'description': 'Description'},
|
||||||
|
'comments': {'type': 'string', 'description': 'Comments'}
|
||||||
|
},
|
||||||
'required': ['id']
|
'required': ['id']
|
||||||
},
|
},
|
||||||
'ipam_delete_prefix': {
|
'ipam_delete_prefix': {
|
||||||
@@ -582,7 +694,18 @@ TOOL_DEFINITIONS = {
|
|||||||
},
|
},
|
||||||
'ipam_update_ip_address': {
|
'ipam_update_ip_address': {
|
||||||
'description': 'Update an existing IP address',
|
'description': 'Update an existing IP address',
|
||||||
'properties': {'id': {'type': 'integer', 'description': 'IP address ID'}},
|
'properties': {
|
||||||
|
'id': {'type': 'integer', 'description': 'IP address ID'},
|
||||||
|
'address': {'type': 'string', 'description': 'IP address with prefix length'},
|
||||||
|
'status': {'type': 'string', 'description': 'Status'},
|
||||||
|
'vrf': {'type': 'integer', 'description': 'VRF ID'},
|
||||||
|
'tenant': {'type': 'integer', 'description': 'Tenant ID'},
|
||||||
|
'dns_name': {'type': 'string', 'description': 'DNS name'},
|
||||||
|
'description': {'type': 'string', 'description': 'Description'},
|
||||||
|
'comments': {'type': 'string', 'description': 'Comments'},
|
||||||
|
'assigned_object_type': {'type': 'string', 'description': 'Object type to assign to'},
|
||||||
|
'assigned_object_id': {'type': 'integer', 'description': 'Object ID to assign to'}
|
||||||
|
},
|
||||||
'required': ['id']
|
'required': ['id']
|
||||||
},
|
},
|
||||||
'ipam_delete_ip_address': {
|
'ipam_delete_ip_address': {
|
||||||
@@ -647,7 +770,18 @@ TOOL_DEFINITIONS = {
|
|||||||
},
|
},
|
||||||
'ipam_update_vlan': {
|
'ipam_update_vlan': {
|
||||||
'description': 'Update an existing VLAN',
|
'description': 'Update an existing VLAN',
|
||||||
'properties': {'id': {'type': 'integer', 'description': 'VLAN ID'}},
|
'properties': {
|
||||||
|
'id': {'type': 'integer', 'description': 'VLAN ID'},
|
||||||
|
'vid': {'type': 'integer', 'description': 'VLAN ID number'},
|
||||||
|
'name': {'type': 'string', 'description': 'VLAN name'},
|
||||||
|
'status': {'type': 'string', 'description': 'Status'},
|
||||||
|
'site': {'type': 'integer', 'description': 'Site ID'},
|
||||||
|
'group': {'type': 'integer', 'description': 'VLAN group ID'},
|
||||||
|
'tenant': {'type': 'integer', 'description': 'Tenant ID'},
|
||||||
|
'role': {'type': 'integer', 'description': 'Role ID'},
|
||||||
|
'description': {'type': 'string', 'description': 'Description'},
|
||||||
|
'comments': {'type': 'string', 'description': 'Comments'}
|
||||||
|
},
|
||||||
'required': ['id']
|
'required': ['id']
|
||||||
},
|
},
|
||||||
'ipam_delete_vlan': {
|
'ipam_delete_vlan': {
|
||||||
@@ -757,16 +891,17 @@ TOOL_DEFINITIONS = {
|
|||||||
'properties': {'id': {'type': 'integer', 'description': 'Provider ID'}},
|
'properties': {'id': {'type': 'integer', 'description': 'Provider ID'}},
|
||||||
'required': ['id']
|
'required': ['id']
|
||||||
},
|
},
|
||||||
'circuits_list_circuit_types': {
|
# NOTE: circuit_types tools shortened to meet 28-char limit
|
||||||
|
'circ_list_types': {
|
||||||
'description': 'List all circuit types in NetBox',
|
'description': 'List all circuit types in NetBox',
|
||||||
'properties': {'name': {'type': 'string', 'description': 'Filter by name'}}
|
'properties': {'name': {'type': 'string', 'description': 'Filter by name'}}
|
||||||
},
|
},
|
||||||
'circuits_get_circuit_type': {
|
'circ_get_type': {
|
||||||
'description': 'Get a specific circuit type by ID',
|
'description': 'Get a specific circuit type by ID',
|
||||||
'properties': {'id': {'type': 'integer', 'description': 'Circuit type ID'}},
|
'properties': {'id': {'type': 'integer', 'description': 'Circuit type ID'}},
|
||||||
'required': ['id']
|
'required': ['id']
|
||||||
},
|
},
|
||||||
'circuits_create_circuit_type': {
|
'circ_create_type': {
|
||||||
'description': 'Create a new circuit type',
|
'description': 'Create a new circuit type',
|
||||||
'properties': {
|
'properties': {
|
||||||
'name': {'type': 'string', 'description': 'Type name'},
|
'name': {'type': 'string', 'description': 'Type name'},
|
||||||
@@ -809,19 +944,20 @@ TOOL_DEFINITIONS = {
|
|||||||
'properties': {'id': {'type': 'integer', 'description': 'Circuit ID'}},
|
'properties': {'id': {'type': 'integer', 'description': 'Circuit ID'}},
|
||||||
'required': ['id']
|
'required': ['id']
|
||||||
},
|
},
|
||||||
'circuits_list_circuit_terminations': {
|
# NOTE: circuit_terminations tools shortened to meet 28-char limit
|
||||||
|
'circ_list_terminations': {
|
||||||
'description': 'List all circuit terminations in NetBox',
|
'description': 'List all circuit terminations in NetBox',
|
||||||
'properties': {
|
'properties': {
|
||||||
'circuit_id': {'type': 'integer', 'description': 'Filter by circuit ID'},
|
'circuit_id': {'type': 'integer', 'description': 'Filter by circuit ID'},
|
||||||
'site_id': {'type': 'integer', 'description': 'Filter by site ID'}
|
'site_id': {'type': 'integer', 'description': 'Filter by site ID'}
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
'circuits_get_circuit_termination': {
|
'circ_get_termination': {
|
||||||
'description': 'Get a specific circuit termination by ID',
|
'description': 'Get a specific circuit termination by ID',
|
||||||
'properties': {'id': {'type': 'integer', 'description': 'Termination ID'}},
|
'properties': {'id': {'type': 'integer', 'description': 'Termination ID'}},
|
||||||
'required': ['id']
|
'required': ['id']
|
||||||
},
|
},
|
||||||
'circuits_create_circuit_termination': {
|
'circ_create_termination': {
|
||||||
'description': 'Create a new circuit termination',
|
'description': 'Create a new circuit termination',
|
||||||
'properties': {
|
'properties': {
|
||||||
'circuit': {'type': 'integer', 'description': 'Circuit ID'},
|
'circuit': {'type': 'integer', 'description': 'Circuit ID'},
|
||||||
@@ -832,16 +968,18 @@ TOOL_DEFINITIONS = {
|
|||||||
},
|
},
|
||||||
|
|
||||||
# ==================== Virtualization Tools ====================
|
# ==================== Virtualization Tools ====================
|
||||||
'virtualization_list_cluster_types': {
|
# NOTE: Tool names shortened from 'virtualization_' to 'virt_' to meet
|
||||||
|
# 28-char limit (Claude API 64-char limit minus 36-char prefix)
|
||||||
|
'virt_list_cluster_types': {
|
||||||
'description': 'List all cluster types in NetBox',
|
'description': 'List all cluster types in NetBox',
|
||||||
'properties': {'name': {'type': 'string', 'description': 'Filter by name'}}
|
'properties': {'name': {'type': 'string', 'description': 'Filter by name'}}
|
||||||
},
|
},
|
||||||
'virtualization_get_cluster_type': {
|
'virt_get_cluster_type': {
|
||||||
'description': 'Get a specific cluster type by ID',
|
'description': 'Get a specific cluster type by ID',
|
||||||
'properties': {'id': {'type': 'integer', 'description': 'Cluster type ID'}},
|
'properties': {'id': {'type': 'integer', 'description': 'Cluster type ID'}},
|
||||||
'required': ['id']
|
'required': ['id']
|
||||||
},
|
},
|
||||||
'virtualization_create_cluster_type': {
|
'virt_create_cluster_type': {
|
||||||
'description': 'Create a new cluster type',
|
'description': 'Create a new cluster type',
|
||||||
'properties': {
|
'properties': {
|
||||||
'name': {'type': 'string', 'description': 'Type name'},
|
'name': {'type': 'string', 'description': 'Type name'},
|
||||||
@@ -849,16 +987,16 @@ TOOL_DEFINITIONS = {
|
|||||||
},
|
},
|
||||||
'required': ['name', 'slug']
|
'required': ['name', 'slug']
|
||||||
},
|
},
|
||||||
'virtualization_list_cluster_groups': {
|
'virt_list_cluster_groups': {
|
||||||
'description': 'List all cluster groups in NetBox',
|
'description': 'List all cluster groups in NetBox',
|
||||||
'properties': {'name': {'type': 'string', 'description': 'Filter by name'}}
|
'properties': {'name': {'type': 'string', 'description': 'Filter by name'}}
|
||||||
},
|
},
|
||||||
'virtualization_get_cluster_group': {
|
'virt_get_cluster_group': {
|
||||||
'description': 'Get a specific cluster group by ID',
|
'description': 'Get a specific cluster group by ID',
|
||||||
'properties': {'id': {'type': 'integer', 'description': 'Cluster group ID'}},
|
'properties': {'id': {'type': 'integer', 'description': 'Cluster group ID'}},
|
||||||
'required': ['id']
|
'required': ['id']
|
||||||
},
|
},
|
||||||
'virtualization_create_cluster_group': {
|
'virt_create_cluster_group': {
|
||||||
'description': 'Create a new cluster group',
|
'description': 'Create a new cluster group',
|
||||||
'properties': {
|
'properties': {
|
||||||
'name': {'type': 'string', 'description': 'Group name'},
|
'name': {'type': 'string', 'description': 'Group name'},
|
||||||
@@ -866,7 +1004,7 @@ TOOL_DEFINITIONS = {
|
|||||||
},
|
},
|
||||||
'required': ['name', 'slug']
|
'required': ['name', 'slug']
|
||||||
},
|
},
|
||||||
'virtualization_list_clusters': {
|
'virt_list_clusters': {
|
||||||
'description': 'List all clusters in NetBox',
|
'description': 'List all clusters in NetBox',
|
||||||
'properties': {
|
'properties': {
|
||||||
'name': {'type': 'string', 'description': 'Filter by name'},
|
'name': {'type': 'string', 'description': 'Filter by name'},
|
||||||
@@ -875,12 +1013,12 @@ TOOL_DEFINITIONS = {
|
|||||||
'site_id': {'type': 'integer', 'description': 'Filter by site ID'}
|
'site_id': {'type': 'integer', 'description': 'Filter by site ID'}
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
'virtualization_get_cluster': {
|
'virt_get_cluster': {
|
||||||
'description': 'Get a specific cluster by ID',
|
'description': 'Get a specific cluster by ID',
|
||||||
'properties': {'id': {'type': 'integer', 'description': 'Cluster ID'}},
|
'properties': {'id': {'type': 'integer', 'description': 'Cluster ID'}},
|
||||||
'required': ['id']
|
'required': ['id']
|
||||||
},
|
},
|
||||||
'virtualization_create_cluster': {
|
'virt_create_cluster': {
|
||||||
'description': 'Create a new cluster',
|
'description': 'Create a new cluster',
|
||||||
'properties': {
|
'properties': {
|
||||||
'name': {'type': 'string', 'description': 'Cluster name'},
|
'name': {'type': 'string', 'description': 'Cluster name'},
|
||||||
@@ -891,17 +1029,27 @@ TOOL_DEFINITIONS = {
|
|||||||
},
|
},
|
||||||
'required': ['name', 'type']
|
'required': ['name', 'type']
|
||||||
},
|
},
|
||||||
'virtualization_update_cluster': {
|
'virt_update_cluster': {
|
||||||
'description': 'Update an existing cluster',
|
'description': 'Update an existing cluster',
|
||||||
'properties': {'id': {'type': 'integer', 'description': 'Cluster ID'}},
|
'properties': {
|
||||||
|
'id': {'type': 'integer', 'description': 'Cluster ID'},
|
||||||
|
'name': {'type': 'string', 'description': 'New name'},
|
||||||
|
'type': {'type': 'integer', 'description': 'Cluster type ID'},
|
||||||
|
'group': {'type': 'integer', 'description': 'Cluster group ID'},
|
||||||
|
'site': {'type': 'integer', 'description': 'Site ID'},
|
||||||
|
'status': {'type': 'string', 'description': 'Status'},
|
||||||
|
'tenant': {'type': 'integer', 'description': 'Tenant ID'},
|
||||||
|
'description': {'type': 'string', 'description': 'Description'},
|
||||||
|
'comments': {'type': 'string', 'description': 'Comments'}
|
||||||
|
},
|
||||||
'required': ['id']
|
'required': ['id']
|
||||||
},
|
},
|
||||||
'virtualization_delete_cluster': {
|
'virt_delete_cluster': {
|
||||||
'description': 'Delete a cluster',
|
'description': 'Delete a cluster',
|
||||||
'properties': {'id': {'type': 'integer', 'description': 'Cluster ID'}},
|
'properties': {'id': {'type': 'integer', 'description': 'Cluster ID'}},
|
||||||
'required': ['id']
|
'required': ['id']
|
||||||
},
|
},
|
||||||
'virtualization_list_virtual_machines': {
|
'virt_list_vms': {
|
||||||
'description': 'List all virtual machines in NetBox',
|
'description': 'List all virtual machines in NetBox',
|
||||||
'properties': {
|
'properties': {
|
||||||
'name': {'type': 'string', 'description': 'Filter by name'},
|
'name': {'type': 'string', 'description': 'Filter by name'},
|
||||||
@@ -910,12 +1058,12 @@ TOOL_DEFINITIONS = {
|
|||||||
'status': {'type': 'string', 'description': 'Filter by status'}
|
'status': {'type': 'string', 'description': 'Filter by status'}
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
'virtualization_get_virtual_machine': {
|
'virt_get_vm': {
|
||||||
'description': 'Get a specific virtual machine by ID',
|
'description': 'Get a specific virtual machine by ID',
|
||||||
'properties': {'id': {'type': 'integer', 'description': 'VM ID'}},
|
'properties': {'id': {'type': 'integer', 'description': 'VM ID'}},
|
||||||
'required': ['id']
|
'required': ['id']
|
||||||
},
|
},
|
||||||
'virtualization_create_virtual_machine': {
|
'virt_create_vm': {
|
||||||
'description': 'Create a new virtual machine',
|
'description': 'Create a new virtual machine',
|
||||||
'properties': {
|
'properties': {
|
||||||
'name': {'type': 'string', 'description': 'VM name'},
|
'name': {'type': 'string', 'description': 'VM name'},
|
||||||
@@ -928,29 +1076,45 @@ TOOL_DEFINITIONS = {
|
|||||||
},
|
},
|
||||||
'required': ['name']
|
'required': ['name']
|
||||||
},
|
},
|
||||||
'virtualization_update_virtual_machine': {
|
'virt_update_vm': {
|
||||||
'description': 'Update an existing virtual machine',
|
'description': 'Update an existing virtual machine',
|
||||||
'properties': {'id': {'type': 'integer', 'description': 'VM ID'}},
|
'properties': {
|
||||||
|
'id': {'type': 'integer', 'description': 'VM ID'},
|
||||||
|
'name': {'type': 'string', 'description': 'New name'},
|
||||||
|
'status': {'type': 'string', 'description': 'Status'},
|
||||||
|
'cluster': {'type': 'integer', 'description': 'Cluster ID'},
|
||||||
|
'site': {'type': 'integer', 'description': 'Site ID'},
|
||||||
|
'role': {'type': 'integer', 'description': 'Role ID'},
|
||||||
|
'tenant': {'type': 'integer', 'description': 'Tenant ID'},
|
||||||
|
'platform': {'type': 'integer', 'description': 'Platform ID'},
|
||||||
|
'vcpus': {'type': 'number', 'description': 'Number of vCPUs'},
|
||||||
|
'memory': {'type': 'integer', 'description': 'Memory in MB'},
|
||||||
|
'disk': {'type': 'integer', 'description': 'Disk in GB'},
|
||||||
|
'primary_ip4': {'type': 'integer', 'description': 'Primary IPv4 address ID'},
|
||||||
|
'primary_ip6': {'type': 'integer', 'description': 'Primary IPv6 address ID'},
|
||||||
|
'description': {'type': 'string', 'description': 'Description'},
|
||||||
|
'comments': {'type': 'string', 'description': 'Comments'}
|
||||||
|
},
|
||||||
'required': ['id']
|
'required': ['id']
|
||||||
},
|
},
|
||||||
'virtualization_delete_virtual_machine': {
|
'virt_delete_vm': {
|
||||||
'description': 'Delete a virtual machine',
|
'description': 'Delete a virtual machine',
|
||||||
'properties': {'id': {'type': 'integer', 'description': 'VM ID'}},
|
'properties': {'id': {'type': 'integer', 'description': 'VM ID'}},
|
||||||
'required': ['id']
|
'required': ['id']
|
||||||
},
|
},
|
||||||
'virtualization_list_vm_interfaces': {
|
'virt_list_vm_ifaces': {
|
||||||
'description': 'List all VM interfaces in NetBox',
|
'description': 'List all VM interfaces in NetBox',
|
||||||
'properties': {
|
'properties': {
|
||||||
'virtual_machine_id': {'type': 'integer', 'description': 'Filter by VM ID'},
|
'virtual_machine_id': {'type': 'integer', 'description': 'Filter by VM ID'},
|
||||||
'name': {'type': 'string', 'description': 'Filter by name'}
|
'name': {'type': 'string', 'description': 'Filter by name'}
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
'virtualization_get_vm_interface': {
|
'virt_get_vm_iface': {
|
||||||
'description': 'Get a specific VM interface by ID',
|
'description': 'Get a specific VM interface by ID',
|
||||||
'properties': {'id': {'type': 'integer', 'description': 'Interface ID'}},
|
'properties': {'id': {'type': 'integer', 'description': 'Interface ID'}},
|
||||||
'required': ['id']
|
'required': ['id']
|
||||||
},
|
},
|
||||||
'virtualization_create_vm_interface': {
|
'virt_create_vm_iface': {
|
||||||
'description': 'Create a new VM interface',
|
'description': 'Create a new VM interface',
|
||||||
'properties': {
|
'properties': {
|
||||||
'virtual_machine': {'type': 'integer', 'description': 'VM ID'},
|
'virtual_machine': {'type': 'integer', 'description': 'VM ID'},
|
||||||
@@ -1088,16 +1252,18 @@ TOOL_DEFINITIONS = {
|
|||||||
},
|
},
|
||||||
|
|
||||||
# ==================== Wireless Tools ====================
|
# ==================== Wireless Tools ====================
|
||||||
'wireless_list_wireless_lan_groups': {
|
# NOTE: Tool names shortened from 'wireless_' to 'wlan_' to meet
|
||||||
|
# 28-char limit (Claude API 64-char limit minus 36-char prefix)
|
||||||
|
'wlan_list_groups': {
|
||||||
'description': 'List all wireless LAN groups in NetBox',
|
'description': 'List all wireless LAN groups in NetBox',
|
||||||
'properties': {'name': {'type': 'string', 'description': 'Filter by name'}}
|
'properties': {'name': {'type': 'string', 'description': 'Filter by name'}}
|
||||||
},
|
},
|
||||||
'wireless_get_wireless_lan_group': {
|
'wlan_get_group': {
|
||||||
'description': 'Get a specific wireless LAN group by ID',
|
'description': 'Get a specific wireless LAN group by ID',
|
||||||
'properties': {'id': {'type': 'integer', 'description': 'WLAN group ID'}},
|
'properties': {'id': {'type': 'integer', 'description': 'WLAN group ID'}},
|
||||||
'required': ['id']
|
'required': ['id']
|
||||||
},
|
},
|
||||||
'wireless_create_wireless_lan_group': {
|
'wlan_create_group': {
|
||||||
'description': 'Create a new wireless LAN group',
|
'description': 'Create a new wireless LAN group',
|
||||||
'properties': {
|
'properties': {
|
||||||
'name': {'type': 'string', 'description': 'Group name'},
|
'name': {'type': 'string', 'description': 'Group name'},
|
||||||
@@ -1105,7 +1271,7 @@ TOOL_DEFINITIONS = {
|
|||||||
},
|
},
|
||||||
'required': ['name', 'slug']
|
'required': ['name', 'slug']
|
||||||
},
|
},
|
||||||
'wireless_list_wireless_lans': {
|
'wlan_list_lans': {
|
||||||
'description': 'List all wireless LANs in NetBox',
|
'description': 'List all wireless LANs in NetBox',
|
||||||
'properties': {
|
'properties': {
|
||||||
'ssid': {'type': 'string', 'description': 'Filter by SSID'},
|
'ssid': {'type': 'string', 'description': 'Filter by SSID'},
|
||||||
@@ -1113,12 +1279,12 @@ TOOL_DEFINITIONS = {
|
|||||||
'status': {'type': 'string', 'description': 'Filter by status'}
|
'status': {'type': 'string', 'description': 'Filter by status'}
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
'wireless_get_wireless_lan': {
|
'wlan_get_lan': {
|
||||||
'description': 'Get a specific wireless LAN by ID',
|
'description': 'Get a specific wireless LAN by ID',
|
||||||
'properties': {'id': {'type': 'integer', 'description': 'WLAN ID'}},
|
'properties': {'id': {'type': 'integer', 'description': 'WLAN ID'}},
|
||||||
'required': ['id']
|
'required': ['id']
|
||||||
},
|
},
|
||||||
'wireless_create_wireless_lan': {
|
'wlan_create_lan': {
|
||||||
'description': 'Create a new wireless LAN',
|
'description': 'Create a new wireless LAN',
|
||||||
'properties': {
|
'properties': {
|
||||||
'ssid': {'type': 'string', 'description': 'SSID'},
|
'ssid': {'type': 'string', 'description': 'SSID'},
|
||||||
@@ -1128,14 +1294,14 @@ TOOL_DEFINITIONS = {
|
|||||||
},
|
},
|
||||||
'required': ['ssid']
|
'required': ['ssid']
|
||||||
},
|
},
|
||||||
'wireless_list_wireless_links': {
|
'wlan_list_links': {
|
||||||
'description': 'List all wireless links in NetBox',
|
'description': 'List all wireless links in NetBox',
|
||||||
'properties': {
|
'properties': {
|
||||||
'ssid': {'type': 'string', 'description': 'Filter by SSID'},
|
'ssid': {'type': 'string', 'description': 'Filter by SSID'},
|
||||||
'status': {'type': 'string', 'description': 'Filter by status'}
|
'status': {'type': 'string', 'description': 'Filter by status'}
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
'wireless_get_wireless_link': {
|
'wlan_get_link': {
|
||||||
'description': 'Get a specific wireless link by ID',
|
'description': 'Get a specific wireless link by ID',
|
||||||
'properties': {'id': {'type': 'integer', 'description': 'Link ID'}},
|
'properties': {'id': {'type': 'integer', 'description': 'Link ID'}},
|
||||||
'required': ['id']
|
'required': ['id']
|
||||||
@@ -1241,6 +1407,52 @@ TOOL_DEFINITIONS = {
|
|||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
# Map shortened tool names to (category, method_name) for routing.
|
||||||
|
# This is necessary because tool names were shortened to meet the 28-character
|
||||||
|
# limit imposed by Claude API's 64-character tool name limit minus the
|
||||||
|
# 36-character prefix used by Claude Code for MCP tools.
|
||||||
|
TOOL_NAME_MAP = {
|
||||||
|
# Virtualization tools (virt_ -> virtualization category)
|
||||||
|
'virt_list_cluster_types': ('virtualization', 'list_cluster_types'),
|
||||||
|
'virt_get_cluster_type': ('virtualization', 'get_cluster_type'),
|
||||||
|
'virt_create_cluster_type': ('virtualization', 'create_cluster_type'),
|
||||||
|
'virt_list_cluster_groups': ('virtualization', 'list_cluster_groups'),
|
||||||
|
'virt_get_cluster_group': ('virtualization', 'get_cluster_group'),
|
||||||
|
'virt_create_cluster_group': ('virtualization', 'create_cluster_group'),
|
||||||
|
'virt_list_clusters': ('virtualization', 'list_clusters'),
|
||||||
|
'virt_get_cluster': ('virtualization', 'get_cluster'),
|
||||||
|
'virt_create_cluster': ('virtualization', 'create_cluster'),
|
||||||
|
'virt_update_cluster': ('virtualization', 'update_cluster'),
|
||||||
|
'virt_delete_cluster': ('virtualization', 'delete_cluster'),
|
||||||
|
'virt_list_vms': ('virtualization', 'list_virtual_machines'),
|
||||||
|
'virt_get_vm': ('virtualization', 'get_virtual_machine'),
|
||||||
|
'virt_create_vm': ('virtualization', 'create_virtual_machine'),
|
||||||
|
'virt_update_vm': ('virtualization', 'update_virtual_machine'),
|
||||||
|
'virt_delete_vm': ('virtualization', 'delete_virtual_machine'),
|
||||||
|
'virt_list_vm_ifaces': ('virtualization', 'list_vm_interfaces'),
|
||||||
|
'virt_get_vm_iface': ('virtualization', 'get_vm_interface'),
|
||||||
|
'virt_create_vm_iface': ('virtualization', 'create_vm_interface'),
|
||||||
|
|
||||||
|
# Circuits tools (circ_ -> circuits category, for shortened names only)
|
||||||
|
'circ_list_types': ('circuits', 'list_circuit_types'),
|
||||||
|
'circ_get_type': ('circuits', 'get_circuit_type'),
|
||||||
|
'circ_create_type': ('circuits', 'create_circuit_type'),
|
||||||
|
'circ_list_terminations': ('circuits', 'list_circuit_terminations'),
|
||||||
|
'circ_get_termination': ('circuits', 'get_circuit_termination'),
|
||||||
|
'circ_create_termination': ('circuits', 'create_circuit_termination'),
|
||||||
|
|
||||||
|
# Wireless tools (wlan_ -> wireless category)
|
||||||
|
'wlan_list_groups': ('wireless', 'list_wireless_lan_groups'),
|
||||||
|
'wlan_get_group': ('wireless', 'get_wireless_lan_group'),
|
||||||
|
'wlan_create_group': ('wireless', 'create_wireless_lan_group'),
|
||||||
|
'wlan_list_lans': ('wireless', 'list_wireless_lans'),
|
||||||
|
'wlan_get_lan': ('wireless', 'get_wireless_lan'),
|
||||||
|
'wlan_create_lan': ('wireless', 'create_wireless_lan'),
|
||||||
|
'wlan_list_links': ('wireless', 'list_wireless_links'),
|
||||||
|
'wlan_get_link': ('wireless', 'get_wireless_link'),
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
class NetBoxMCPServer:
|
class NetBoxMCPServer:
|
||||||
"""MCP Server for NetBox integration"""
|
"""MCP Server for NetBox integration"""
|
||||||
|
|
||||||
@@ -1314,12 +1526,21 @@ class NetBoxMCPServer:
|
|||||||
)]
|
)]
|
||||||
|
|
||||||
async def _route_tool(self, name: str, arguments: dict):
|
async def _route_tool(self, name: str, arguments: dict):
|
||||||
"""Route tool call to appropriate handler."""
|
"""Route tool call to appropriate handler.
|
||||||
parts = name.split('_', 1)
|
|
||||||
if len(parts) != 2:
|
|
||||||
raise ValueError(f"Invalid tool name format: {name}")
|
|
||||||
|
|
||||||
category, method_name = parts[0], parts[1]
|
Tool names may be shortened (e.g., 'virt_list_vms' instead of
|
||||||
|
'virtualization_list_virtual_machines') to meet the 28-character
|
||||||
|
limit. TOOL_NAME_MAP handles the translation to actual method names.
|
||||||
|
"""
|
||||||
|
# Check if this is a mapped short name
|
||||||
|
if name in TOOL_NAME_MAP:
|
||||||
|
category, method_name = TOOL_NAME_MAP[name]
|
||||||
|
else:
|
||||||
|
# Fall back to original logic for unchanged tools
|
||||||
|
parts = name.split('_', 1)
|
||||||
|
if len(parts) != 2:
|
||||||
|
raise ValueError(f"Invalid tool name format: {name}")
|
||||||
|
category, method_name = parts[0], parts[1]
|
||||||
|
|
||||||
# Map category to tool class
|
# Map category to tool class
|
||||||
tool_map = {
|
tool_map = {
|
||||||
|
|||||||
68
plugins/claude-config-maintainer/hooks/enforce-rules.sh
Executable file
68
plugins/claude-config-maintainer/hooks/enforce-rules.sh
Executable file
@@ -0,0 +1,68 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
# claude-config-maintainer: enforce mandatory behavior rules
|
||||||
|
# Checks if CLAUDE.md has the rules, adds them if missing
|
||||||
|
|
||||||
|
PREFIX="[claude-config-maintainer]"
|
||||||
|
|
||||||
|
# Find CLAUDE.md in current directory or parent
|
||||||
|
CLAUDE_MD=""
|
||||||
|
if [ -f "./CLAUDE.md" ]; then
|
||||||
|
CLAUDE_MD="./CLAUDE.md"
|
||||||
|
elif [ -f "../CLAUDE.md" ]; then
|
||||||
|
CLAUDE_MD="../CLAUDE.md"
|
||||||
|
fi
|
||||||
|
|
||||||
|
# If no CLAUDE.md found, exit silently
|
||||||
|
if [ -z "$CLAUDE_MD" ]; then
|
||||||
|
exit 0
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Check if mandatory rules exist
|
||||||
|
if grep -q "MANDATORY BEHAVIOR RULES" "$CLAUDE_MD" 2>/dev/null; then
|
||||||
|
# Rules exist, all good
|
||||||
|
exit 0
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Rules missing - add them
|
||||||
|
RULES='## ⛔ MANDATORY BEHAVIOR RULES - READ FIRST
|
||||||
|
|
||||||
|
**These rules are NON-NEGOTIABLE. Violating them wastes the user'\''s time and money.**
|
||||||
|
|
||||||
|
### 1. WHEN USER ASKS YOU TO CHECK SOMETHING - CHECK EVERYTHING
|
||||||
|
- Search ALL locations, not just where you think it is
|
||||||
|
- Check cache directories: `~/.claude/plugins/cache/`
|
||||||
|
- Check installed: `~/.claude/plugins/marketplaces/`
|
||||||
|
- Check source directories
|
||||||
|
- **NEVER say "no" or "that'\''s not the issue" without exhaustive verification**
|
||||||
|
|
||||||
|
### 2. WHEN USER SAYS SOMETHING IS WRONG - BELIEVE THEM
|
||||||
|
- The user knows their system better than you
|
||||||
|
- Investigate thoroughly before disagreeing
|
||||||
|
- **Your confidence is often wrong. User'\''s instincts are often right.**
|
||||||
|
|
||||||
|
### 3. NEVER SAY "DONE" WITHOUT VERIFICATION
|
||||||
|
- Run the actual command/script to verify
|
||||||
|
- Show the output to the user
|
||||||
|
- **"Done" means VERIFIED WORKING, not "I made changes"**
|
||||||
|
|
||||||
|
### 4. SHOW EXACTLY WHAT USER ASKS FOR
|
||||||
|
- If user asks for messages, show the MESSAGES
|
||||||
|
- If user asks for code, show the CODE
|
||||||
|
- **Do not interpret or summarize unless asked**
|
||||||
|
|
||||||
|
**FAILURE TO FOLLOW THESE RULES = WASTED USER TIME = UNACCEPTABLE**
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
'
|
||||||
|
|
||||||
|
# Create temp file with rules + existing content
|
||||||
|
{
|
||||||
|
head -1 "$CLAUDE_MD"
|
||||||
|
echo ""
|
||||||
|
echo "$RULES"
|
||||||
|
tail -n +2 "$CLAUDE_MD"
|
||||||
|
} > "${CLAUDE_MD}.tmp"
|
||||||
|
|
||||||
|
mv "${CLAUDE_MD}.tmp" "$CLAUDE_MD"
|
||||||
|
echo "$PREFIX Added mandatory behavior rules to CLAUDE.md"
|
||||||
10
plugins/claude-config-maintainer/hooks/hooks.json
Normal file
10
plugins/claude-config-maintainer/hooks/hooks.json
Normal file
@@ -0,0 +1,10 @@
|
|||||||
|
{
|
||||||
|
"hooks": {
|
||||||
|
"SessionStart": [
|
||||||
|
{
|
||||||
|
"type": "command",
|
||||||
|
"command": "${CLAUDE_PLUGIN_ROOT}/hooks/enforce-rules.sh"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -111,6 +111,7 @@ cmdb-assistant/
|
|||||||
│ └── plugin.json # Plugin manifest
|
│ └── plugin.json # Plugin manifest
|
||||||
├── .mcp.json # MCP server configuration
|
├── .mcp.json # MCP server configuration
|
||||||
├── commands/
|
├── commands/
|
||||||
|
│ ├── initial-setup.md # Setup wizard
|
||||||
│ ├── cmdb-search.md # Search command
|
│ ├── cmdb-search.md # Search command
|
||||||
│ ├── cmdb-device.md # Device management
|
│ ├── cmdb-device.md # Device management
|
||||||
│ ├── cmdb-ip.md # IP management
|
│ ├── cmdb-ip.md # IP management
|
||||||
|
|||||||
@@ -70,13 +70,15 @@ cat ~/.config/claude/netbox.env 2>/dev/null || echo "FILE_NOT_FOUND"
|
|||||||
### Step 3.3: Gather NetBox Information
|
### Step 3.3: Gather NetBox Information
|
||||||
|
|
||||||
Use AskUserQuestion:
|
Use AskUserQuestion:
|
||||||
- Question: "What is your NetBox server URL? (e.g., https://netbox.company.com)"
|
- Question: "What is your NetBox API URL? (e.g., https://netbox.company.com/api)"
|
||||||
- Header: "NetBox URL"
|
- Header: "NetBox URL"
|
||||||
- Options:
|
- Options:
|
||||||
- "Other (I'll provide the URL)"
|
- "Other (I'll provide the URL)"
|
||||||
|
|
||||||
Ask user to provide the URL.
|
Ask user to provide the URL.
|
||||||
|
|
||||||
|
**Important:** The URL must include `/api` at the end. If the user provides a URL without `/api`, append it automatically.
|
||||||
|
|
||||||
### Step 3.4: Create Configuration File
|
### Step 3.4: Create Configuration File
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
@@ -120,9 +122,11 @@ Use AskUserQuestion:
|
|||||||
### Step 4.1: Test Configuration (if token was added)
|
### Step 4.1: Test Configuration (if token was added)
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
source ~/.config/claude/netbox.env && curl -s -o /dev/null -w "%{http_code}" -H "Authorization: Token $NETBOX_API_TOKEN" "$NETBOX_API_URL/api/"
|
source ~/.config/claude/netbox.env && curl -s -o /dev/null -w "%{http_code}" -H "Authorization: Token $NETBOX_API_TOKEN" "$NETBOX_API_URL/"
|
||||||
```
|
```
|
||||||
|
|
||||||
|
**Note:** The URL already includes `/api`, so we just append `/` for the root API endpoint.
|
||||||
|
|
||||||
Report result:
|
Report result:
|
||||||
- 200: Success
|
- 200: Success
|
||||||
- 403: Invalid token
|
- 403: Invalid token
|
||||||
|
|||||||
@@ -9,5 +9,6 @@
|
|||||||
"homepage": "https://gitea.hotserv.cloud/personal-projects/leo-claude-mktplace/src/branch/main/plugins/code-sentinel/README.md",
|
"homepage": "https://gitea.hotserv.cloud/personal-projects/leo-claude-mktplace/src/branch/main/plugins/code-sentinel/README.md",
|
||||||
"repository": "https://gitea.hotserv.cloud/personal-projects/leo-claude-mktplace.git",
|
"repository": "https://gitea.hotserv.cloud/personal-projects/leo-claude-mktplace.git",
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
"keywords": ["security", "refactoring", "code-quality", "static-analysis", "hooks"]
|
"keywords": ["security", "refactoring", "code-quality", "static-analysis", "hooks"],
|
||||||
|
"commands": ["./commands/"]
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -5,8 +5,8 @@
|
|||||||
"matcher": "Write|Edit|MultiEdit",
|
"matcher": "Write|Edit|MultiEdit",
|
||||||
"hooks": [
|
"hooks": [
|
||||||
{
|
{
|
||||||
"type": "prompt",
|
"type": "command",
|
||||||
"prompt": "[code-sentinel] SECURITY CHECK - Before writing this code, scan for these patterns:\n\n**Critical (BLOCK if found):**\n- eval(), exec() with user input\n- SQL string concatenation (SQL injection)\n- shell=True with user input (command injection)\n- Hardcoded secrets (API keys, passwords, tokens)\n- Pickle/marshal deserialization of untrusted data\n- innerHTML/dangerouslySetInnerHTML with user content (XSS)\n\n**Warning (WARN but allow):**\n- subprocess without input validation\n- File operations without path sanitization\n- HTTP requests without timeout\n- Broad exception catches (except:)\n- Debug/print statements with sensitive data\n\n**Response:**\n- If CRITICAL found: STOP with '[code-sentinel] BLOCKED:', explain the issue, suggest safe alternative\n- If WARNING found: Note briefly with '[code-sentinel] WARNING:', proceed with suggestion\n- If clean: Proceed silently (say nothing)\n\nDo NOT announce clean scans. Only speak if issues found."
|
"command": "${CLAUDE_PLUGIN_ROOT}/hooks/security-check.sh"
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
|
|||||||
62
plugins/code-sentinel/hooks/security-check.sh
Executable file
62
plugins/code-sentinel/hooks/security-check.sh
Executable file
@@ -0,0 +1,62 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
# code-sentinel security check hook
|
||||||
|
# Checks for obvious security issues in code files, skips config/docs
|
||||||
|
# Command hook - guaranteed predictable behavior
|
||||||
|
|
||||||
|
# Read tool input from stdin
|
||||||
|
INPUT=$(cat)
|
||||||
|
|
||||||
|
# Extract file_path from JSON input
|
||||||
|
FILE_PATH=$(echo "$INPUT" | grep -o '"file_path"[[:space:]]*:[[:space:]]*"[^"]*"' | head -1 | sed 's/.*"file_path"[[:space:]]*:[[:space:]]*"\([^"]*\)".*/\1/')
|
||||||
|
|
||||||
|
# If no file_path, exit silently
|
||||||
|
if [ -z "$FILE_PATH" ]; then
|
||||||
|
exit 0
|
||||||
|
fi
|
||||||
|
|
||||||
|
# SKIP config/doc files entirely - exit silently
|
||||||
|
case "$FILE_PATH" in
|
||||||
|
*.md|*.json|*.yml|*.yaml|*.txt|*.toml|*.ini|*.cfg|*.conf)
|
||||||
|
exit 0
|
||||||
|
;;
|
||||||
|
*/docs/*|*/README*|*/CHANGELOG*|*/LICENSE*)
|
||||||
|
exit 0
|
||||||
|
;;
|
||||||
|
*/.claude/*|*/.github/*|*/.vscode/*)
|
||||||
|
exit 0
|
||||||
|
;;
|
||||||
|
esac
|
||||||
|
|
||||||
|
# For code files, extract content to check
|
||||||
|
# For Edit tool: check new_string
|
||||||
|
# For Write tool: check content
|
||||||
|
CONTENT=$(echo "$INPUT" | grep -o '"new_string"[[:space:]]*:[[:space:]]*"[^"]*"' | head -1 | sed 's/.*"new_string"[[:space:]]*:[[:space:]]*"\([^"]*\)".*/\1/')
|
||||||
|
if [ -z "$CONTENT" ]; then
|
||||||
|
CONTENT=$(echo "$INPUT" | grep -o '"content"[[:space:]]*:[[:space:]]*"[^"]*"' | head -1 | sed 's/.*"content"[[:space:]]*:[[:space:]]*"\([^"]*\)".*/\1/')
|
||||||
|
fi
|
||||||
|
|
||||||
|
# If no content to check, exit silently
|
||||||
|
if [ -z "$CONTENT" ]; then
|
||||||
|
exit 0
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Check for hardcoded secrets patterns (obvious cases only)
|
||||||
|
if echo "$CONTENT" | grep -qiE '(api[_-]?key|api[_-]?secret|password|passwd|secret[_-]?key|auth[_-]?token)[[:space:]]*[=:][[:space:]]*["\x27][A-Za-z0-9+/=_-]{20,}["\x27]'; then
|
||||||
|
echo "[code-sentinel] BLOCKED: Hardcoded secret detected"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Check for AWS keys pattern
|
||||||
|
if echo "$CONTENT" | grep -qE 'AKIA[A-Z0-9]{16}'; then
|
||||||
|
echo "[code-sentinel] BLOCKED: AWS access key detected"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Check for private key headers
|
||||||
|
if echo "$CONTENT" | grep -qE '\-\-\-\-\-BEGIN (RSA |DSA |EC |OPENSSH )?PRIVATE KEY\-\-\-\-\-'; then
|
||||||
|
echo "[code-sentinel] BLOCKED: Private key detected"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
# All other cases: exit silently (allow the edit)
|
||||||
|
exit 0
|
||||||
25
plugins/data-platform/.claude-plugin/plugin.json
Normal file
25
plugins/data-platform/.claude-plugin/plugin.json
Normal file
@@ -0,0 +1,25 @@
|
|||||||
|
{
|
||||||
|
"name": "data-platform",
|
||||||
|
"version": "1.0.0",
|
||||||
|
"description": "Data engineering tools with pandas, PostgreSQL/PostGIS, and dbt integration",
|
||||||
|
"author": {
|
||||||
|
"name": "Leo Miranda",
|
||||||
|
"email": "leobmiranda@gmail.com"
|
||||||
|
},
|
||||||
|
"homepage": "https://gitea.hotserv.cloud/personal-projects/leo-claude-mktplace/src/branch/main/plugins/data-platform/README.md",
|
||||||
|
"repository": "https://gitea.hotserv.cloud/personal-projects/leo-claude-mktplace.git",
|
||||||
|
"license": "MIT",
|
||||||
|
"keywords": [
|
||||||
|
"pandas",
|
||||||
|
"postgresql",
|
||||||
|
"postgis",
|
||||||
|
"dbt",
|
||||||
|
"data-engineering",
|
||||||
|
"etl",
|
||||||
|
"dataframe"
|
||||||
|
],
|
||||||
|
"hooks": "hooks/hooks.json",
|
||||||
|
"commands": ["./commands/"],
|
||||||
|
"agents": ["./agents/"],
|
||||||
|
"mcpServers": ["./.mcp.json"]
|
||||||
|
}
|
||||||
10
plugins/data-platform/.mcp.json
Normal file
10
plugins/data-platform/.mcp.json
Normal file
@@ -0,0 +1,10 @@
|
|||||||
|
{
|
||||||
|
"mcpServers": {
|
||||||
|
"data-platform": {
|
||||||
|
"type": "stdio",
|
||||||
|
"command": "${CLAUDE_PLUGIN_ROOT}/mcp-servers/data-platform/.venv/bin/python",
|
||||||
|
"args": ["-m", "mcp_server.server"],
|
||||||
|
"cwd": "${CLAUDE_PLUGIN_ROOT}/mcp-servers/data-platform"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
119
plugins/data-platform/README.md
Normal file
119
plugins/data-platform/README.md
Normal file
@@ -0,0 +1,119 @@
|
|||||||
|
# data-platform Plugin
|
||||||
|
|
||||||
|
Data engineering tools with pandas, PostgreSQL/PostGIS, and dbt integration for Claude Code.
|
||||||
|
|
||||||
|
## Features
|
||||||
|
|
||||||
|
- **pandas Operations**: Load, transform, and export DataFrames with persistent data_ref system
|
||||||
|
- **PostgreSQL/PostGIS**: Database queries with connection pooling and spatial data support
|
||||||
|
- **dbt Integration**: Build tool wrapper with pre-execution validation
|
||||||
|
|
||||||
|
## Installation
|
||||||
|
|
||||||
|
This plugin is part of the leo-claude-mktplace. Install via:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# From marketplace
|
||||||
|
claude plugins install leo-claude-mktplace/data-platform
|
||||||
|
|
||||||
|
# Setup MCP server venv
|
||||||
|
cd ~/.claude/plugins/marketplaces/leo-claude-mktplace/mcp-servers/data-platform
|
||||||
|
python -m venv .venv
|
||||||
|
source .venv/bin/activate
|
||||||
|
pip install -r requirements.txt
|
||||||
|
```
|
||||||
|
|
||||||
|
## Configuration
|
||||||
|
|
||||||
|
### PostgreSQL (Optional)
|
||||||
|
|
||||||
|
Create `~/.config/claude/postgres.env`:
|
||||||
|
|
||||||
|
```env
|
||||||
|
POSTGRES_URL=postgresql://user:password@host:5432/database
|
||||||
|
```
|
||||||
|
|
||||||
|
### dbt (Optional)
|
||||||
|
|
||||||
|
Add to project `.env`:
|
||||||
|
|
||||||
|
```env
|
||||||
|
DBT_PROJECT_DIR=/path/to/dbt/project
|
||||||
|
DBT_PROFILES_DIR=~/.dbt
|
||||||
|
```
|
||||||
|
|
||||||
|
## Commands
|
||||||
|
|
||||||
|
| Command | Description |
|
||||||
|
|---------|-------------|
|
||||||
|
| `/initial-setup` | Interactive setup wizard for PostgreSQL and dbt configuration |
|
||||||
|
| `/ingest` | Load data from files or database |
|
||||||
|
| `/profile` | Generate data profile and statistics |
|
||||||
|
| `/schema` | Show database/DataFrame schema |
|
||||||
|
| `/explain` | Explain dbt model lineage |
|
||||||
|
| `/lineage` | Visualize data dependencies |
|
||||||
|
| `/run` | Execute dbt models |
|
||||||
|
|
||||||
|
## Agents
|
||||||
|
|
||||||
|
| Agent | Description |
|
||||||
|
|-------|-------------|
|
||||||
|
| `data-ingestion` | Data loading and transformation specialist |
|
||||||
|
| `data-analysis` | Exploration and profiling specialist |
|
||||||
|
|
||||||
|
## data_ref System
|
||||||
|
|
||||||
|
All DataFrame operations use a `data_ref` system for persistence:
|
||||||
|
|
||||||
|
```
|
||||||
|
# Load returns a reference
|
||||||
|
read_csv("data.csv") → {"data_ref": "sales_data"}
|
||||||
|
|
||||||
|
# Use reference in subsequent operations
|
||||||
|
filter("sales_data", "amount > 100") → {"data_ref": "sales_data_filtered"}
|
||||||
|
describe("sales_data_filtered") → {statistics}
|
||||||
|
```
|
||||||
|
|
||||||
|
## Example Workflow
|
||||||
|
|
||||||
|
```
|
||||||
|
/ingest data/sales.csv
|
||||||
|
# → Loaded 50,000 rows as "sales_data"
|
||||||
|
|
||||||
|
/profile sales_data
|
||||||
|
# → Statistical summary, null counts, quality assessment
|
||||||
|
|
||||||
|
/schema orders
|
||||||
|
# → Column names, types, constraints
|
||||||
|
|
||||||
|
/lineage fct_orders
|
||||||
|
# → Dependency graph showing upstream/downstream models
|
||||||
|
|
||||||
|
/run dim_customers
|
||||||
|
# → Pre-validates then executes dbt model
|
||||||
|
```
|
||||||
|
|
||||||
|
## Tools Summary
|
||||||
|
|
||||||
|
### pandas (14 tools)
|
||||||
|
`read_csv`, `read_parquet`, `read_json`, `to_csv`, `to_parquet`, `describe`, `head`, `tail`, `filter`, `select`, `groupby`, `join`, `list_data`, `drop_data`
|
||||||
|
|
||||||
|
### PostgreSQL (6 tools)
|
||||||
|
`pg_connect`, `pg_query`, `pg_execute`, `pg_tables`, `pg_columns`, `pg_schemas`
|
||||||
|
|
||||||
|
### PostGIS (4 tools)
|
||||||
|
`st_tables`, `st_geometry_type`, `st_srid`, `st_extent`
|
||||||
|
|
||||||
|
### dbt (8 tools)
|
||||||
|
`dbt_parse`, `dbt_run`, `dbt_test`, `dbt_build`, `dbt_compile`, `dbt_ls`, `dbt_docs_generate`, `dbt_lineage`
|
||||||
|
|
||||||
|
## Memory Management
|
||||||
|
|
||||||
|
- Default limit: 100,000 rows per DataFrame
|
||||||
|
- Configure via `DATA_PLATFORM_MAX_ROWS` environment variable
|
||||||
|
- Use `chunk_size` parameter for large files
|
||||||
|
- Monitor with `list_data` tool
|
||||||
|
|
||||||
|
## SessionStart Hook
|
||||||
|
|
||||||
|
On session start, the plugin checks PostgreSQL connectivity and displays a warning if unavailable. This is non-blocking - pandas and dbt tools remain available.
|
||||||
98
plugins/data-platform/agents/data-analysis.md
Normal file
98
plugins/data-platform/agents/data-analysis.md
Normal file
@@ -0,0 +1,98 @@
|
|||||||
|
# Data Analysis Agent
|
||||||
|
|
||||||
|
You are a data analysis specialist. Your role is to help users explore, profile, and understand their data.
|
||||||
|
|
||||||
|
## Capabilities
|
||||||
|
|
||||||
|
- Profile datasets with statistical summaries
|
||||||
|
- Explore database schemas and structures
|
||||||
|
- Analyze dbt model lineage and dependencies
|
||||||
|
- Provide data quality assessments
|
||||||
|
- Generate insights and recommendations
|
||||||
|
|
||||||
|
## Available Tools
|
||||||
|
|
||||||
|
### Data Exploration
|
||||||
|
- `describe` - Statistical summary
|
||||||
|
- `head` - Preview first rows
|
||||||
|
- `tail` - Preview last rows
|
||||||
|
- `list_data` - List available DataFrames
|
||||||
|
|
||||||
|
### Database Exploration
|
||||||
|
- `pg_connect` - Check database connection
|
||||||
|
- `pg_tables` - List all tables
|
||||||
|
- `pg_columns` - Get column details
|
||||||
|
- `pg_schemas` - List schemas
|
||||||
|
|
||||||
|
### PostGIS Exploration
|
||||||
|
- `st_tables` - List spatial tables
|
||||||
|
- `st_geometry_type` - Get geometry type
|
||||||
|
- `st_srid` - Get coordinate system
|
||||||
|
- `st_extent` - Get bounding box
|
||||||
|
|
||||||
|
### dbt Analysis
|
||||||
|
- `dbt_lineage` - Model dependencies
|
||||||
|
- `dbt_ls` - List resources
|
||||||
|
- `dbt_compile` - View compiled SQL
|
||||||
|
- `dbt_docs_generate` - Generate docs
|
||||||
|
|
||||||
|
## Workflow Guidelines
|
||||||
|
|
||||||
|
1. **Understand the question**:
|
||||||
|
- What does the user want to know?
|
||||||
|
- What data is available?
|
||||||
|
- What level of detail is needed?
|
||||||
|
|
||||||
|
2. **Explore the data**:
|
||||||
|
- Start with `list_data` or `pg_tables`
|
||||||
|
- Get schema info with `describe` or `pg_columns`
|
||||||
|
- Preview with `head` to understand content
|
||||||
|
|
||||||
|
3. **Profile thoroughly**:
|
||||||
|
- Use `describe` for statistics
|
||||||
|
- Check for nulls, outliers, patterns
|
||||||
|
- Note data quality issues
|
||||||
|
|
||||||
|
4. **Analyze dependencies** (for dbt):
|
||||||
|
- Use `dbt_lineage` to trace data flow
|
||||||
|
- Understand transformations
|
||||||
|
- Identify critical paths
|
||||||
|
|
||||||
|
5. **Provide insights**:
|
||||||
|
- Summarize findings clearly
|
||||||
|
- Highlight potential issues
|
||||||
|
- Recommend next steps
|
||||||
|
|
||||||
|
## Analysis Patterns
|
||||||
|
|
||||||
|
### Data Quality Check
|
||||||
|
1. `describe` - Get statistics
|
||||||
|
2. Check null percentages
|
||||||
|
3. Identify outliers (min/max vs mean)
|
||||||
|
4. Flag suspicious patterns
|
||||||
|
|
||||||
|
### Schema Comparison
|
||||||
|
1. `pg_columns` - Get table A schema
|
||||||
|
2. `pg_columns` - Get table B schema
|
||||||
|
3. Compare column names, types
|
||||||
|
4. Identify mismatches
|
||||||
|
|
||||||
|
### Lineage Analysis
|
||||||
|
1. `dbt_lineage` - Get model graph
|
||||||
|
2. Trace upstream sources
|
||||||
|
3. Identify downstream impact
|
||||||
|
4. Document critical path
|
||||||
|
|
||||||
|
## Example Interactions
|
||||||
|
|
||||||
|
**User**: What's in the sales_data DataFrame?
|
||||||
|
**Agent**: Uses `describe`, `head`, explains columns, statistics, patterns
|
||||||
|
|
||||||
|
**User**: What tables are in the database?
|
||||||
|
**Agent**: Uses `pg_tables`, shows list with column counts
|
||||||
|
|
||||||
|
**User**: How does the dim_customers model work?
|
||||||
|
**Agent**: Uses `dbt_lineage`, `dbt_compile`, explains dependencies and SQL
|
||||||
|
|
||||||
|
**User**: Is there any spatial data?
|
||||||
|
**Agent**: Uses `st_tables`, shows PostGIS tables with geometry types
|
||||||
81
plugins/data-platform/agents/data-ingestion.md
Normal file
81
plugins/data-platform/agents/data-ingestion.md
Normal file
@@ -0,0 +1,81 @@
|
|||||||
|
# Data Ingestion Agent
|
||||||
|
|
||||||
|
You are a data ingestion specialist. Your role is to help users load, transform, and prepare data for analysis.
|
||||||
|
|
||||||
|
## Capabilities
|
||||||
|
|
||||||
|
- Load data from CSV, Parquet, JSON files
|
||||||
|
- Query PostgreSQL databases
|
||||||
|
- Transform data using filter, select, groupby, join operations
|
||||||
|
- Export data to various formats
|
||||||
|
- Handle large datasets with chunking
|
||||||
|
|
||||||
|
## Available Tools
|
||||||
|
|
||||||
|
### File Operations
|
||||||
|
- `read_csv` - Load CSV files with optional chunking
|
||||||
|
- `read_parquet` - Load Parquet files
|
||||||
|
- `read_json` - Load JSON/JSONL files
|
||||||
|
- `to_csv` - Export to CSV
|
||||||
|
- `to_parquet` - Export to Parquet
|
||||||
|
|
||||||
|
### Data Transformation
|
||||||
|
- `filter` - Filter rows by condition
|
||||||
|
- `select` - Select specific columns
|
||||||
|
- `groupby` - Group and aggregate
|
||||||
|
- `join` - Join two DataFrames
|
||||||
|
|
||||||
|
### Database Operations
|
||||||
|
- `pg_query` - Execute SELECT queries
|
||||||
|
- `pg_execute` - Execute INSERT/UPDATE/DELETE
|
||||||
|
- `pg_tables` - List available tables
|
||||||
|
|
||||||
|
### Management
|
||||||
|
- `list_data` - List all stored DataFrames
|
||||||
|
- `drop_data` - Remove DataFrame from store
|
||||||
|
|
||||||
|
## Workflow Guidelines
|
||||||
|
|
||||||
|
1. **Understand the data source**:
|
||||||
|
- Ask about file location/format
|
||||||
|
- For database, understand table structure
|
||||||
|
- Clarify any filters or transformations needed
|
||||||
|
|
||||||
|
2. **Load data efficiently**:
|
||||||
|
- Use appropriate reader for file format
|
||||||
|
- For large files (>100k rows), use chunking
|
||||||
|
- Name DataFrames meaningfully
|
||||||
|
|
||||||
|
3. **Transform as needed**:
|
||||||
|
- Apply filters early to reduce data size
|
||||||
|
- Select only needed columns
|
||||||
|
- Join related datasets
|
||||||
|
|
||||||
|
4. **Validate results**:
|
||||||
|
- Check row counts after transformations
|
||||||
|
- Verify data types are correct
|
||||||
|
- Preview results with `head`
|
||||||
|
|
||||||
|
5. **Store with meaningful names**:
|
||||||
|
- Use descriptive data_ref names
|
||||||
|
- Document the source and transformations
|
||||||
|
|
||||||
|
## Memory Management
|
||||||
|
|
||||||
|
- Default row limit: 100,000 rows
|
||||||
|
- For larger datasets, suggest:
|
||||||
|
- Filtering before loading
|
||||||
|
- Using chunk_size parameter
|
||||||
|
- Aggregating to reduce size
|
||||||
|
- Storing to Parquet for efficient retrieval
|
||||||
|
|
||||||
|
## Example Interactions
|
||||||
|
|
||||||
|
**User**: Load the sales data from data/sales.csv
|
||||||
|
**Agent**: Uses `read_csv` to load, reports data_ref, row count, columns
|
||||||
|
|
||||||
|
**User**: Filter to only Q4 2024 sales
|
||||||
|
**Agent**: Uses `filter` with date condition, stores filtered result
|
||||||
|
|
||||||
|
**User**: Join with customer data
|
||||||
|
**Agent**: Uses `join` to combine, validates result counts
|
||||||
90
plugins/data-platform/claude-md-integration.md
Normal file
90
plugins/data-platform/claude-md-integration.md
Normal file
@@ -0,0 +1,90 @@
|
|||||||
|
# data-platform Plugin - CLAUDE.md Integration
|
||||||
|
|
||||||
|
Add this section to your project's CLAUDE.md to enable data-platform plugin features.
|
||||||
|
|
||||||
|
## Suggested CLAUDE.md Section
|
||||||
|
|
||||||
|
```markdown
|
||||||
|
## Data Platform Integration
|
||||||
|
|
||||||
|
This project uses the data-platform plugin for data engineering workflows.
|
||||||
|
|
||||||
|
### Configuration
|
||||||
|
|
||||||
|
**PostgreSQL**: Credentials in `~/.config/claude/postgres.env`
|
||||||
|
**dbt**: Project path auto-detected from `dbt_project.yml`
|
||||||
|
|
||||||
|
### Available Commands
|
||||||
|
|
||||||
|
| Command | Purpose |
|
||||||
|
|---------|---------|
|
||||||
|
| `/ingest` | Load data from files or database |
|
||||||
|
| `/profile` | Generate statistical profile |
|
||||||
|
| `/schema` | Show schema information |
|
||||||
|
| `/explain` | Explain dbt model |
|
||||||
|
| `/lineage` | Show data lineage |
|
||||||
|
| `/run` | Execute dbt models |
|
||||||
|
|
||||||
|
### data_ref Convention
|
||||||
|
|
||||||
|
DataFrames are stored with references. Use meaningful names:
|
||||||
|
- `raw_*` for source data
|
||||||
|
- `stg_*` for staged/cleaned data
|
||||||
|
- `dim_*` for dimension tables
|
||||||
|
- `fct_*` for fact tables
|
||||||
|
- `rpt_*` for reports
|
||||||
|
|
||||||
|
### dbt Workflow
|
||||||
|
|
||||||
|
1. Always validate before running: `/run` includes automatic `dbt_parse`
|
||||||
|
2. For dbt 1.9+, check for deprecated syntax before commits
|
||||||
|
3. Use `/lineage` to understand impact of changes
|
||||||
|
|
||||||
|
### Database Access
|
||||||
|
|
||||||
|
PostgreSQL tools require POSTGRES_URL configuration:
|
||||||
|
- Read-only queries: `pg_query`
|
||||||
|
- Write operations: `pg_execute`
|
||||||
|
- Schema exploration: `pg_tables`, `pg_columns`
|
||||||
|
|
||||||
|
PostGIS spatial data:
|
||||||
|
- List spatial tables: `st_tables`
|
||||||
|
- Check geometry: `st_geometry_type`, `st_srid`, `st_extent`
|
||||||
|
```
|
||||||
|
|
||||||
|
## Environment Variables
|
||||||
|
|
||||||
|
Add to project `.env` if needed:
|
||||||
|
|
||||||
|
```env
|
||||||
|
# dbt configuration
|
||||||
|
DBT_PROJECT_DIR=./transform
|
||||||
|
DBT_PROFILES_DIR=~/.dbt
|
||||||
|
|
||||||
|
# Memory limits
|
||||||
|
DATA_PLATFORM_MAX_ROWS=100000
|
||||||
|
```
|
||||||
|
|
||||||
|
## Typical Workflows
|
||||||
|
|
||||||
|
### Data Exploration
|
||||||
|
```
|
||||||
|
/ingest data/raw_customers.csv
|
||||||
|
/profile raw_customers
|
||||||
|
/schema
|
||||||
|
```
|
||||||
|
|
||||||
|
### ETL Development
|
||||||
|
```
|
||||||
|
/schema orders # Understand source
|
||||||
|
/explain stg_orders # Understand transformation
|
||||||
|
/run stg_orders # Test the model
|
||||||
|
/lineage fct_orders # Check downstream impact
|
||||||
|
```
|
||||||
|
|
||||||
|
### Database Analysis
|
||||||
|
```
|
||||||
|
/schema # List all tables
|
||||||
|
pg_columns orders # Detailed schema
|
||||||
|
st_tables # Find spatial data
|
||||||
|
```
|
||||||
44
plugins/data-platform/commands/explain.md
Normal file
44
plugins/data-platform/commands/explain.md
Normal file
@@ -0,0 +1,44 @@
|
|||||||
|
# /explain - dbt Model Explanation
|
||||||
|
|
||||||
|
Explain a dbt model's purpose, dependencies, and SQL logic.
|
||||||
|
|
||||||
|
## Usage
|
||||||
|
|
||||||
|
```
|
||||||
|
/explain <model_name>
|
||||||
|
```
|
||||||
|
|
||||||
|
## Workflow
|
||||||
|
|
||||||
|
1. **Get model info**:
|
||||||
|
- Use `dbt_lineage` to get model metadata
|
||||||
|
- Extract description, tags, materialization
|
||||||
|
|
||||||
|
2. **Analyze dependencies**:
|
||||||
|
- Show upstream models (what this depends on)
|
||||||
|
- Show downstream models (what depends on this)
|
||||||
|
- Visualize as dependency tree
|
||||||
|
|
||||||
|
3. **Compile SQL**:
|
||||||
|
- Use `dbt_compile` to get rendered SQL
|
||||||
|
- Explain key transformations
|
||||||
|
|
||||||
|
4. **Report**:
|
||||||
|
- Model purpose (from description)
|
||||||
|
- Materialization strategy
|
||||||
|
- Dependency graph
|
||||||
|
- Key SQL logic explained
|
||||||
|
|
||||||
|
## Examples
|
||||||
|
|
||||||
|
```
|
||||||
|
/explain dim_customers
|
||||||
|
/explain fct_orders
|
||||||
|
```
|
||||||
|
|
||||||
|
## Available Tools
|
||||||
|
|
||||||
|
Use these MCP tools:
|
||||||
|
- `dbt_lineage` - Get model dependencies
|
||||||
|
- `dbt_compile` - Get compiled SQL
|
||||||
|
- `dbt_ls` - List related resources
|
||||||
44
plugins/data-platform/commands/ingest.md
Normal file
44
plugins/data-platform/commands/ingest.md
Normal file
@@ -0,0 +1,44 @@
|
|||||||
|
# /ingest - Data Ingestion
|
||||||
|
|
||||||
|
Load data from files or database into the data platform.
|
||||||
|
|
||||||
|
## Usage
|
||||||
|
|
||||||
|
```
|
||||||
|
/ingest [source]
|
||||||
|
```
|
||||||
|
|
||||||
|
## Workflow
|
||||||
|
|
||||||
|
1. **Identify data source**:
|
||||||
|
- If source is a file path, determine format (CSV, Parquet, JSON)
|
||||||
|
- If source is "db" or a table name, query PostgreSQL
|
||||||
|
|
||||||
|
2. **Load data**:
|
||||||
|
- For files: Use `read_csv`, `read_parquet`, or `read_json`
|
||||||
|
- For database: Use `pg_query` with appropriate SELECT
|
||||||
|
|
||||||
|
3. **Validate**:
|
||||||
|
- Check row count against limits
|
||||||
|
- If exceeds 100k rows, suggest chunking or filtering
|
||||||
|
|
||||||
|
4. **Report**:
|
||||||
|
- Show data_ref, row count, columns, and memory usage
|
||||||
|
- Preview first few rows
|
||||||
|
|
||||||
|
## Examples
|
||||||
|
|
||||||
|
```
|
||||||
|
/ingest data/sales.csv
|
||||||
|
/ingest data/customers.parquet
|
||||||
|
/ingest "SELECT * FROM orders WHERE created_at > '2024-01-01'"
|
||||||
|
```
|
||||||
|
|
||||||
|
## Available Tools
|
||||||
|
|
||||||
|
Use these MCP tools:
|
||||||
|
- `read_csv` - Load CSV files
|
||||||
|
- `read_parquet` - Load Parquet files
|
||||||
|
- `read_json` - Load JSON/JSONL files
|
||||||
|
- `pg_query` - Query PostgreSQL database
|
||||||
|
- `list_data` - List loaded DataFrames
|
||||||
231
plugins/data-platform/commands/initial-setup.md
Normal file
231
plugins/data-platform/commands/initial-setup.md
Normal file
@@ -0,0 +1,231 @@
|
|||||||
|
---
|
||||||
|
description: Interactive setup wizard for data-platform plugin - configures MCP server and optional PostgreSQL/dbt
|
||||||
|
---
|
||||||
|
|
||||||
|
# Data Platform Setup Wizard
|
||||||
|
|
||||||
|
This command sets up the data-platform plugin with pandas, PostgreSQL, and dbt integration.
|
||||||
|
|
||||||
|
## Important Context
|
||||||
|
|
||||||
|
- **This command uses Bash, Read, Write, and AskUserQuestion tools** - NOT MCP tools
|
||||||
|
- **MCP tools won't work until after setup + session restart**
|
||||||
|
- **PostgreSQL and dbt are optional** - pandas tools work without them
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Phase 1: Environment Validation
|
||||||
|
|
||||||
|
### Step 1.1: Check Python Version
|
||||||
|
|
||||||
|
```bash
|
||||||
|
python3 --version
|
||||||
|
```
|
||||||
|
|
||||||
|
Requires Python 3.10+. If below, stop setup and inform user.
|
||||||
|
|
||||||
|
### Step 1.2: Check for Required Libraries
|
||||||
|
|
||||||
|
```bash
|
||||||
|
python3 -c "import sys; print(f'Python {sys.version_info.major}.{sys.version_info.minor}')"
|
||||||
|
```
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Phase 2: MCP Server Setup
|
||||||
|
|
||||||
|
### Step 2.1: Locate Data Platform MCP Server
|
||||||
|
|
||||||
|
The MCP server should be at the marketplace root:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# If running from installed marketplace
|
||||||
|
ls -la ~/.claude/plugins/marketplaces/leo-claude-mktplace/mcp-servers/data-platform/ 2>/dev/null || echo "NOT_FOUND_INSTALLED"
|
||||||
|
|
||||||
|
# If running from source
|
||||||
|
ls -la ~/claude-plugins-work/mcp-servers/data-platform/ 2>/dev/null || echo "NOT_FOUND_SOURCE"
|
||||||
|
```
|
||||||
|
|
||||||
|
Determine the correct path based on which exists.
|
||||||
|
|
||||||
|
### Step 2.2: Check Virtual Environment
|
||||||
|
|
||||||
|
```bash
|
||||||
|
ls -la /path/to/mcp-servers/data-platform/.venv/bin/python 2>/dev/null && echo "VENV_EXISTS" || echo "VENV_MISSING"
|
||||||
|
```
|
||||||
|
|
||||||
|
### Step 2.3: Create Virtual Environment (if missing)
|
||||||
|
|
||||||
|
```bash
|
||||||
|
cd /path/to/mcp-servers/data-platform && python3 -m venv .venv && source .venv/bin/activate && pip install --upgrade pip && pip install -r requirements.txt && deactivate
|
||||||
|
```
|
||||||
|
|
||||||
|
**Note:** This may take a few minutes due to pandas, pyarrow, and dbt dependencies.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Phase 3: PostgreSQL Configuration (Optional)
|
||||||
|
|
||||||
|
### Step 3.1: Ask About PostgreSQL
|
||||||
|
|
||||||
|
Use AskUserQuestion:
|
||||||
|
- Question: "Do you want to configure PostgreSQL database access?"
|
||||||
|
- Header: "PostgreSQL"
|
||||||
|
- Options:
|
||||||
|
- "Yes, I have a PostgreSQL database"
|
||||||
|
- "No, I'll only use pandas/dbt tools"
|
||||||
|
|
||||||
|
**If user chooses "No":** Skip to Phase 4.
|
||||||
|
|
||||||
|
### Step 3.2: Create Config Directory
|
||||||
|
|
||||||
|
```bash
|
||||||
|
mkdir -p ~/.config/claude
|
||||||
|
```
|
||||||
|
|
||||||
|
### Step 3.3: Check PostgreSQL Configuration
|
||||||
|
|
||||||
|
```bash
|
||||||
|
cat ~/.config/claude/postgres.env 2>/dev/null || echo "FILE_NOT_FOUND"
|
||||||
|
```
|
||||||
|
|
||||||
|
**If file exists with valid URL:** Skip to Step 3.6.
|
||||||
|
**If missing or has placeholders:** Continue.
|
||||||
|
|
||||||
|
### Step 3.4: Gather PostgreSQL Information
|
||||||
|
|
||||||
|
Use AskUserQuestion:
|
||||||
|
- Question: "What is your PostgreSQL connection URL format?"
|
||||||
|
- Header: "DB Format"
|
||||||
|
- Options:
|
||||||
|
- "Standard: postgresql://user:pass@host:5432/db"
|
||||||
|
- "PostGIS: postgresql://user:pass@host:5432/db (with PostGIS extension)"
|
||||||
|
- "Other (I'll provide the full URL)"
|
||||||
|
|
||||||
|
Ask user to provide the connection URL.
|
||||||
|
|
||||||
|
### Step 3.5: Create Configuration File
|
||||||
|
|
||||||
|
```bash
|
||||||
|
cat > ~/.config/claude/postgres.env << 'EOF'
|
||||||
|
# PostgreSQL Configuration
|
||||||
|
# Generated by data-platform /initial-setup
|
||||||
|
|
||||||
|
POSTGRES_URL=<USER_PROVIDED_URL>
|
||||||
|
EOF
|
||||||
|
chmod 600 ~/.config/claude/postgres.env
|
||||||
|
```
|
||||||
|
|
||||||
|
### Step 3.6: Test PostgreSQL Connection (if configured)
|
||||||
|
|
||||||
|
```bash
|
||||||
|
source ~/.config/claude/postgres.env && python3 -c "
|
||||||
|
import asyncio
|
||||||
|
import asyncpg
|
||||||
|
async def test():
|
||||||
|
try:
|
||||||
|
conn = await asyncpg.connect('$POSTGRES_URL', timeout=5)
|
||||||
|
ver = await conn.fetchval('SELECT version()')
|
||||||
|
await conn.close()
|
||||||
|
print(f'SUCCESS: {ver.split(\",\")[0]}')
|
||||||
|
except Exception as e:
|
||||||
|
print(f'FAILED: {e}')
|
||||||
|
asyncio.run(test())
|
||||||
|
"
|
||||||
|
```
|
||||||
|
|
||||||
|
Report result:
|
||||||
|
- SUCCESS: Connection works
|
||||||
|
- FAILED: Show error and suggest fixes
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Phase 4: dbt Configuration (Optional)
|
||||||
|
|
||||||
|
### Step 4.1: Ask About dbt
|
||||||
|
|
||||||
|
Use AskUserQuestion:
|
||||||
|
- Question: "Do you use dbt for data transformations in your projects?"
|
||||||
|
- Header: "dbt"
|
||||||
|
- Options:
|
||||||
|
- "Yes, I have dbt projects"
|
||||||
|
- "No, I don't use dbt"
|
||||||
|
|
||||||
|
**If user chooses "No":** Skip to Phase 5.
|
||||||
|
|
||||||
|
### Step 4.2: dbt Discovery
|
||||||
|
|
||||||
|
dbt configuration is **project-level** (not system-level). The plugin auto-detects dbt projects by looking for `dbt_project.yml`.
|
||||||
|
|
||||||
|
Inform user:
|
||||||
|
```
|
||||||
|
dbt projects are detected automatically when you work in a directory
|
||||||
|
containing dbt_project.yml.
|
||||||
|
|
||||||
|
If your dbt project is in a subdirectory, you can set DBT_PROJECT_DIR
|
||||||
|
in your project's .env file:
|
||||||
|
|
||||||
|
DBT_PROJECT_DIR=./transform
|
||||||
|
DBT_PROFILES_DIR=~/.dbt
|
||||||
|
```
|
||||||
|
|
||||||
|
### Step 4.3: Check dbt Installation
|
||||||
|
|
||||||
|
```bash
|
||||||
|
dbt --version 2>/dev/null || echo "DBT_NOT_FOUND"
|
||||||
|
```
|
||||||
|
|
||||||
|
**If not found:** Inform user that dbt CLI tools require dbt-core to be installed globally or in the project.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Phase 5: Validation
|
||||||
|
|
||||||
|
### Step 5.1: Verify MCP Server
|
||||||
|
|
||||||
|
```bash
|
||||||
|
cd /path/to/mcp-servers/data-platform && .venv/bin/python -c "from mcp_server.server import DataPlatformMCPServer; print('MCP Server OK')"
|
||||||
|
```
|
||||||
|
|
||||||
|
### Step 5.2: Summary
|
||||||
|
|
||||||
|
```
|
||||||
|
╔════════════════════════════════════════════════════════════╗
|
||||||
|
║ DATA-PLATFORM SETUP COMPLETE ║
|
||||||
|
╠════════════════════════════════════════════════════════════╣
|
||||||
|
║ MCP Server: ✓ Ready ║
|
||||||
|
║ pandas Tools: ✓ Available (14 tools) ║
|
||||||
|
║ PostgreSQL Tools: [✓/✗] [Status based on config] ║
|
||||||
|
║ PostGIS Tools: [✓/✗] [Status based on PostGIS] ║
|
||||||
|
║ dbt Tools: [✓/✗] [Status based on discovery] ║
|
||||||
|
╚════════════════════════════════════════════════════════════╝
|
||||||
|
```
|
||||||
|
|
||||||
|
### Step 5.3: Session Restart Notice
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
**⚠️ Session Restart Required**
|
||||||
|
|
||||||
|
Restart your Claude Code session for MCP tools to become available.
|
||||||
|
|
||||||
|
**After restart, you can:**
|
||||||
|
- Run `/ingest` to load data from files or database
|
||||||
|
- Run `/profile` to analyze DataFrame statistics
|
||||||
|
- Run `/schema` to explore database/DataFrame schema
|
||||||
|
- Run `/run` to execute dbt models (if configured)
|
||||||
|
- Run `/lineage` to view dbt model dependencies
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Memory Limits
|
||||||
|
|
||||||
|
The data-platform plugin has a default row limit of 100,000 rows per DataFrame. For larger datasets:
|
||||||
|
- Use chunked processing (`chunk_size` parameter)
|
||||||
|
- Filter data before loading
|
||||||
|
- Store to Parquet for efficient re-loading
|
||||||
|
|
||||||
|
You can override the limit by setting in your project `.env`:
|
||||||
|
```
|
||||||
|
DATA_PLATFORM_MAX_ROWS=500000
|
||||||
|
```
|
||||||
60
plugins/data-platform/commands/lineage.md
Normal file
60
plugins/data-platform/commands/lineage.md
Normal file
@@ -0,0 +1,60 @@
|
|||||||
|
# /lineage - Data Lineage Visualization
|
||||||
|
|
||||||
|
Show data lineage for dbt models or database tables.
|
||||||
|
|
||||||
|
## Usage
|
||||||
|
|
||||||
|
```
|
||||||
|
/lineage <model_name> [--depth N]
|
||||||
|
```
|
||||||
|
|
||||||
|
## Workflow
|
||||||
|
|
||||||
|
1. **Get lineage data**:
|
||||||
|
- Use `dbt_lineage` for dbt models
|
||||||
|
- For database tables, trace through dbt manifest
|
||||||
|
|
||||||
|
2. **Build lineage graph**:
|
||||||
|
- Identify all upstream sources
|
||||||
|
- Identify all downstream consumers
|
||||||
|
- Note materialization at each node
|
||||||
|
|
||||||
|
3. **Visualize**:
|
||||||
|
- ASCII art dependency tree
|
||||||
|
- List format with indentation
|
||||||
|
- Show depth levels
|
||||||
|
|
||||||
|
4. **Report**:
|
||||||
|
- Full dependency chain
|
||||||
|
- Critical path identification
|
||||||
|
- Refresh implications
|
||||||
|
|
||||||
|
## Examples
|
||||||
|
|
||||||
|
```
|
||||||
|
/lineage dim_customers
|
||||||
|
/lineage fct_orders --depth 3
|
||||||
|
```
|
||||||
|
|
||||||
|
## Output Format
|
||||||
|
|
||||||
|
```
|
||||||
|
Sources:
|
||||||
|
└── raw_customers (source)
|
||||||
|
└── raw_orders (source)
|
||||||
|
|
||||||
|
dim_customers (table)
|
||||||
|
├── upstream:
|
||||||
|
│ └── stg_customers (view)
|
||||||
|
│ └── raw_customers (source)
|
||||||
|
└── downstream:
|
||||||
|
└── fct_orders (incremental)
|
||||||
|
└── rpt_customer_lifetime (table)
|
||||||
|
```
|
||||||
|
|
||||||
|
## Available Tools
|
||||||
|
|
||||||
|
Use these MCP tools:
|
||||||
|
- `dbt_lineage` - Get model dependencies
|
||||||
|
- `dbt_ls` - List dbt resources
|
||||||
|
- `dbt_docs_generate` - Generate full manifest
|
||||||
44
plugins/data-platform/commands/profile.md
Normal file
44
plugins/data-platform/commands/profile.md
Normal file
@@ -0,0 +1,44 @@
|
|||||||
|
# /profile - Data Profiling
|
||||||
|
|
||||||
|
Generate statistical profile and quality report for a DataFrame.
|
||||||
|
|
||||||
|
## Usage
|
||||||
|
|
||||||
|
```
|
||||||
|
/profile <data_ref>
|
||||||
|
```
|
||||||
|
|
||||||
|
## Workflow
|
||||||
|
|
||||||
|
1. **Get data reference**:
|
||||||
|
- If no data_ref provided, use `list_data` to show available options
|
||||||
|
- Validate the data_ref exists
|
||||||
|
|
||||||
|
2. **Generate profile**:
|
||||||
|
- Use `describe` for statistical summary
|
||||||
|
- Analyze null counts, unique values, data types
|
||||||
|
|
||||||
|
3. **Quality assessment**:
|
||||||
|
- Identify columns with high null percentage
|
||||||
|
- Flag potential data quality issues
|
||||||
|
- Suggest cleaning operations if needed
|
||||||
|
|
||||||
|
4. **Report**:
|
||||||
|
- Summary statistics per column
|
||||||
|
- Data type distribution
|
||||||
|
- Memory usage
|
||||||
|
- Quality score
|
||||||
|
|
||||||
|
## Examples
|
||||||
|
|
||||||
|
```
|
||||||
|
/profile sales_data
|
||||||
|
/profile df_a1b2c3d4
|
||||||
|
```
|
||||||
|
|
||||||
|
## Available Tools
|
||||||
|
|
||||||
|
Use these MCP tools:
|
||||||
|
- `describe` - Get statistical summary
|
||||||
|
- `head` - Preview first rows
|
||||||
|
- `list_data` - List available DataFrames
|
||||||
55
plugins/data-platform/commands/run.md
Normal file
55
plugins/data-platform/commands/run.md
Normal file
@@ -0,0 +1,55 @@
|
|||||||
|
# /run - Execute dbt Models
|
||||||
|
|
||||||
|
Run dbt models with automatic pre-validation.
|
||||||
|
|
||||||
|
## Usage
|
||||||
|
|
||||||
|
```
|
||||||
|
/run [model_selection] [--full-refresh]
|
||||||
|
```
|
||||||
|
|
||||||
|
## Workflow
|
||||||
|
|
||||||
|
1. **Pre-validation** (MANDATORY):
|
||||||
|
- Use `dbt_parse` to validate project
|
||||||
|
- Check for deprecated syntax (dbt 1.9+)
|
||||||
|
- If validation fails, show errors and STOP
|
||||||
|
|
||||||
|
2. **Execute models**:
|
||||||
|
- Use `dbt_run` with provided selection
|
||||||
|
- Monitor progress and capture output
|
||||||
|
|
||||||
|
3. **Report results**:
|
||||||
|
- Success/failure status per model
|
||||||
|
- Execution time
|
||||||
|
- Row counts where available
|
||||||
|
- Any warnings or errors
|
||||||
|
|
||||||
|
## Examples
|
||||||
|
|
||||||
|
```
|
||||||
|
/run # Run all models
|
||||||
|
/run dim_customers # Run specific model
|
||||||
|
/run +fct_orders # Run model and its upstream
|
||||||
|
/run tag:daily # Run models with tag
|
||||||
|
/run --full-refresh # Rebuild incremental models
|
||||||
|
```
|
||||||
|
|
||||||
|
## Selection Syntax
|
||||||
|
|
||||||
|
| Pattern | Meaning |
|
||||||
|
|---------|---------|
|
||||||
|
| `model_name` | Run single model |
|
||||||
|
| `+model_name` | Run model and upstream |
|
||||||
|
| `model_name+` | Run model and downstream |
|
||||||
|
| `+model_name+` | Run model with all deps |
|
||||||
|
| `tag:name` | Run by tag |
|
||||||
|
| `path:models/staging` | Run by path |
|
||||||
|
|
||||||
|
## Available Tools
|
||||||
|
|
||||||
|
Use these MCP tools:
|
||||||
|
- `dbt_parse` - Pre-validation (ALWAYS RUN FIRST)
|
||||||
|
- `dbt_run` - Execute models
|
||||||
|
- `dbt_build` - Run + test
|
||||||
|
- `dbt_test` - Run tests only
|
||||||
48
plugins/data-platform/commands/schema.md
Normal file
48
plugins/data-platform/commands/schema.md
Normal file
@@ -0,0 +1,48 @@
|
|||||||
|
# /schema - Schema Exploration
|
||||||
|
|
||||||
|
Display schema information for database tables or DataFrames.
|
||||||
|
|
||||||
|
## Usage
|
||||||
|
|
||||||
|
```
|
||||||
|
/schema [table_name | data_ref]
|
||||||
|
```
|
||||||
|
|
||||||
|
## Workflow
|
||||||
|
|
||||||
|
1. **Determine target**:
|
||||||
|
- If argument is a loaded data_ref, show DataFrame schema
|
||||||
|
- If argument is a table name, query database schema
|
||||||
|
- If no argument, list all available tables and DataFrames
|
||||||
|
|
||||||
|
2. **For DataFrames**:
|
||||||
|
- Use `describe` to get column info
|
||||||
|
- Show dtypes, null counts, sample values
|
||||||
|
|
||||||
|
3. **For database tables**:
|
||||||
|
- Use `pg_columns` for column details
|
||||||
|
- Use `st_tables` to check for PostGIS columns
|
||||||
|
- Show constraints and indexes if available
|
||||||
|
|
||||||
|
4. **Report**:
|
||||||
|
- Column name, type, nullable, default
|
||||||
|
- For PostGIS: geometry type, SRID
|
||||||
|
- For DataFrames: pandas dtype, null percentage
|
||||||
|
|
||||||
|
## Examples
|
||||||
|
|
||||||
|
```
|
||||||
|
/schema # List all tables and DataFrames
|
||||||
|
/schema customers # Show table schema
|
||||||
|
/schema sales_data # Show DataFrame schema
|
||||||
|
```
|
||||||
|
|
||||||
|
## Available Tools
|
||||||
|
|
||||||
|
Use these MCP tools:
|
||||||
|
- `pg_tables` - List database tables
|
||||||
|
- `pg_columns` - Get column info
|
||||||
|
- `pg_schemas` - List schemas
|
||||||
|
- `st_tables` - List PostGIS tables
|
||||||
|
- `describe` - Get DataFrame info
|
||||||
|
- `list_data` - List DataFrames
|
||||||
10
plugins/data-platform/hooks/hooks.json
Normal file
10
plugins/data-platform/hooks/hooks.json
Normal file
@@ -0,0 +1,10 @@
|
|||||||
|
{
|
||||||
|
"hooks": {
|
||||||
|
"SessionStart": [
|
||||||
|
{
|
||||||
|
"type": "command",
|
||||||
|
"command": "${CLAUDE_PLUGIN_ROOT}/hooks/startup-check.sh"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
}
|
||||||
54
plugins/data-platform/hooks/startup-check.sh
Executable file
54
plugins/data-platform/hooks/startup-check.sh
Executable file
@@ -0,0 +1,54 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
# data-platform startup check hook
|
||||||
|
# Checks for common issues at session start
|
||||||
|
# All output MUST have [data-platform] prefix
|
||||||
|
|
||||||
|
PREFIX="[data-platform]"
|
||||||
|
|
||||||
|
# Check if MCP venv exists
|
||||||
|
PLUGIN_ROOT="${CLAUDE_PLUGIN_ROOT:-$(dirname "$(dirname "$(realpath "$0")")")}"
|
||||||
|
VENV_PATH="$PLUGIN_ROOT/mcp-servers/data-platform/.venv/bin/python"
|
||||||
|
|
||||||
|
if [[ ! -f "$VENV_PATH" ]]; then
|
||||||
|
echo "$PREFIX MCP venv missing - run /initial-setup or setup.sh"
|
||||||
|
exit 0
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Check PostgreSQL configuration (optional - just warn if configured but failing)
|
||||||
|
POSTGRES_CONFIG="$HOME/.config/claude/postgres.env"
|
||||||
|
if [[ -f "$POSTGRES_CONFIG" ]]; then
|
||||||
|
source "$POSTGRES_CONFIG"
|
||||||
|
if [[ -n "${POSTGRES_URL:-}" ]]; then
|
||||||
|
# Quick connection test (5 second timeout)
|
||||||
|
RESULT=$("$VENV_PATH" -c "
|
||||||
|
import asyncio
|
||||||
|
import sys
|
||||||
|
async def test():
|
||||||
|
try:
|
||||||
|
import asyncpg
|
||||||
|
conn = await asyncpg.connect('$POSTGRES_URL', timeout=5)
|
||||||
|
await conn.close()
|
||||||
|
return 'OK'
|
||||||
|
except Exception as e:
|
||||||
|
return f'FAIL: {e}'
|
||||||
|
print(asyncio.run(test()))
|
||||||
|
" 2>/dev/null || echo "FAIL: asyncpg not installed")
|
||||||
|
|
||||||
|
if [[ "$RESULT" == "OK" ]]; then
|
||||||
|
# PostgreSQL OK - say nothing
|
||||||
|
:
|
||||||
|
elif [[ "$RESULT" == *"FAIL"* ]]; then
|
||||||
|
echo "$PREFIX PostgreSQL connection failed - check POSTGRES_URL"
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Check dbt project (if in a project with dbt_project.yml)
|
||||||
|
if [[ -f "dbt_project.yml" ]] || [[ -f "transform/dbt_project.yml" ]]; then
|
||||||
|
if ! command -v dbt &> /dev/null; then
|
||||||
|
echo "$PREFIX dbt CLI not found - dbt tools unavailable"
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
|
||||||
|
# All checks passed - say nothing
|
||||||
|
exit 0
|
||||||
1
plugins/data-platform/mcp-servers/data-platform
Symbolic link
1
plugins/data-platform/mcp-servers/data-platform
Symbolic link
@@ -0,0 +1 @@
|
|||||||
|
../../../mcp-servers/data-platform
|
||||||
@@ -9,5 +9,6 @@
|
|||||||
"homepage": "https://gitea.hotserv.cloud/personal-projects/leo-claude-mktplace/src/branch/main/plugins/doc-guardian/README.md",
|
"homepage": "https://gitea.hotserv.cloud/personal-projects/leo-claude-mktplace/src/branch/main/plugins/doc-guardian/README.md",
|
||||||
"repository": "https://gitea.hotserv.cloud/personal-projects/leo-claude-mktplace.git",
|
"repository": "https://gitea.hotserv.cloud/personal-projects/leo-claude-mktplace.git",
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
"keywords": ["documentation", "sync", "drift-detection", "automation", "hooks"]
|
"keywords": ["documentation", "sync", "drift-detection", "automation", "hooks"],
|
||||||
|
"commands": ["./commands/"]
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -5,8 +5,8 @@
|
|||||||
"matcher": "Write|Edit|MultiEdit",
|
"matcher": "Write|Edit|MultiEdit",
|
||||||
"hooks": [
|
"hooks": [
|
||||||
{
|
{
|
||||||
"type": "prompt",
|
"type": "command",
|
||||||
"prompt": "[doc-guardian] QUICK drift check (DO NOT block workflow):\n\n1. ONLY check if the modified file is referenced in README.md, CLAUDE.md, or API docs in the SAME directory\n2. Do NOT read files or perform deep analysis - just note potential drift based on file name/path\n3. If potential drift: output a single line like '[doc-guardian] Note: {filename} changed - may affect {doc}. Run /doc-sync to verify.'\n4. If no obvious drift: say nothing\n\nIMPORTANT: This is notification-only. Do NOT read documentation files, do NOT make changes, do NOT use any tools. Just a quick mental check based on the file path."
|
"command": "${CLAUDE_PLUGIN_ROOT}/hooks/notify.sh"
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
|
|||||||
23
plugins/doc-guardian/hooks/notify.sh
Executable file
23
plugins/doc-guardian/hooks/notify.sh
Executable file
@@ -0,0 +1,23 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
# doc-guardian notification hook
|
||||||
|
# Outputs a single notification for config file changes, nothing otherwise
|
||||||
|
# This is a command hook - guaranteed not to block workflow
|
||||||
|
|
||||||
|
# Read tool input from stdin (JSON with file_path)
|
||||||
|
INPUT=$(cat)
|
||||||
|
|
||||||
|
# Extract file_path from JSON input
|
||||||
|
FILE_PATH=$(echo "$INPUT" | grep -o '"file_path"[[:space:]]*:[[:space:]]*"[^"]*"' | head -1 | sed 's/.*"file_path"[[:space:]]*:[[:space:]]*"\([^"]*\)".*/\1/')
|
||||||
|
|
||||||
|
# If no file_path found, exit silently
|
||||||
|
if [ -z "$FILE_PATH" ]; then
|
||||||
|
exit 0
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Check if file is in a config directory (commands/, agents/, skills/, hooks/)
|
||||||
|
if echo "$FILE_PATH" | grep -qE '/(commands|agents|skills|hooks)/'; then
|
||||||
|
echo "[doc-guardian] Config file modified. Run /doc-sync when ready."
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Exit silently for all other files (no output = no blocking)
|
||||||
|
exit 0
|
||||||
@@ -10,7 +10,7 @@ git-flow streamlines common git operations with smart defaults, conventional com
|
|||||||
|
|
||||||
| Command | Description |
|
| Command | Description |
|
||||||
|---------|-------------|
|
|---------|-------------|
|
||||||
| `/commit` | Create commit with auto-generated conventional message |
|
| `/commit` | Create commit with auto-generated conventional message (with protected branch detection) |
|
||||||
| `/commit-push` | Commit and push in one operation |
|
| `/commit-push` | Commit and push in one operation |
|
||||||
| `/commit-merge` | Commit and merge into target branch |
|
| `/commit-merge` | Commit and merge into target branch |
|
||||||
| `/commit-sync` | Full sync: commit, push, and rebase on base branch |
|
| `/commit-sync` | Full sync: commit, push, and rebase on base branch |
|
||||||
@@ -79,7 +79,7 @@ chore/update-dependencies
|
|||||||
|
|
||||||
### Safety Checks
|
### Safety Checks
|
||||||
|
|
||||||
- Warns before commits to protected branches
|
- **Protected branch detection**: Before committing, checks if you're on a protected branch (main, master, development, staging, production by default). Offers to create a feature branch automatically instead of committing directly to protected branches.
|
||||||
- Confirms force push operations
|
- Confirms force push operations
|
||||||
- Prevents accidental branch deletion
|
- Prevents accidental branch deletion
|
||||||
|
|
||||||
|
|||||||
@@ -6,13 +6,44 @@ Create a git commit with an auto-generated conventional commit message based on
|
|||||||
|
|
||||||
## Behavior
|
## Behavior
|
||||||
|
|
||||||
### Step 1: Analyze Changes
|
### Step 1: Check for Protected Branch
|
||||||
|
|
||||||
|
Before any commit operation, check if the current branch is protected:
|
||||||
|
|
||||||
|
1. Get current branch: `git branch --show-current`
|
||||||
|
2. Check against `GIT_PROTECTED_BRANCHES` (default: `main,master,development,staging,production`)
|
||||||
|
|
||||||
|
If on a protected branch, warn the user:
|
||||||
|
|
||||||
|
```
|
||||||
|
⚠️ You are on a protected branch: development
|
||||||
|
|
||||||
|
Protected branches typically have push restrictions that will prevent
|
||||||
|
direct commits from being pushed to the remote.
|
||||||
|
|
||||||
|
Options:
|
||||||
|
1. Create a feature branch and continue (Recommended)
|
||||||
|
2. Continue on this branch anyway (may fail on push)
|
||||||
|
3. Cancel
|
||||||
|
```
|
||||||
|
|
||||||
|
**If option 1 (create feature branch):**
|
||||||
|
- Prompt for branch type (feat/fix/chore/docs/refactor)
|
||||||
|
- Prompt for brief description
|
||||||
|
- Create branch using `/branch-start` naming conventions
|
||||||
|
- Continue with commit on the new branch
|
||||||
|
|
||||||
|
**If option 2 (continue anyway):**
|
||||||
|
- Proceed with commit (user accepts risk of push rejection)
|
||||||
|
- Display reminder: "Remember: push may be rejected by remote protection rules"
|
||||||
|
|
||||||
|
### Step 2: Analyze Changes
|
||||||
|
|
||||||
1. Run `git status` to see staged and unstaged changes
|
1. Run `git status` to see staged and unstaged changes
|
||||||
2. Run `git diff --staged` to examine staged changes
|
2. Run `git diff --staged` to examine staged changes
|
||||||
3. If nothing staged, prompt user to stage changes
|
3. If nothing staged, prompt user to stage changes
|
||||||
|
|
||||||
### Step 2: Generate Commit Message
|
### Step 3: Generate Commit Message
|
||||||
|
|
||||||
Analyze the changes and generate a conventional commit message:
|
Analyze the changes and generate a conventional commit message:
|
||||||
|
|
||||||
@@ -38,7 +69,7 @@ Analyze the changes and generate a conventional commit message:
|
|||||||
|
|
||||||
**Scope:** Determined from changed files (e.g., `auth`, `api`, `ui`)
|
**Scope:** Determined from changed files (e.g., `auth`, `api`, `ui`)
|
||||||
|
|
||||||
### Step 3: Confirm or Edit
|
### Step 4: Confirm or Edit
|
||||||
|
|
||||||
Present the generated message:
|
Present the generated message:
|
||||||
|
|
||||||
@@ -58,7 +89,7 @@ Options:
|
|||||||
4. Cancel
|
4. Cancel
|
||||||
```
|
```
|
||||||
|
|
||||||
### Step 4: Execute Commit
|
### Step 5: Execute Commit
|
||||||
|
|
||||||
If confirmed, run:
|
If confirmed, run:
|
||||||
|
|
||||||
@@ -75,6 +106,7 @@ EOF
|
|||||||
|
|
||||||
| Variable | Default | Description |
|
| Variable | Default | Description |
|
||||||
|----------|---------|-------------|
|
|----------|---------|-------------|
|
||||||
|
| `GIT_PROTECTED_BRANCHES` | `main,master,development,staging,production` | Branches that trigger protection warning |
|
||||||
| `GIT_COMMIT_STYLE` | `conventional` | Message style (conventional, simple, detailed) |
|
| `GIT_COMMIT_STYLE` | `conventional` | Message style (conventional, simple, detailed) |
|
||||||
| `GIT_SIGN_COMMITS` | `false` | Use GPG signing |
|
| `GIT_SIGN_COMMITS` | `false` | Use GPG signing |
|
||||||
| `GIT_CO_AUTHOR` | `true` | Include Claude co-author footer |
|
| `GIT_CO_AUTHOR` | `true` | Include Claude co-author footer |
|
||||||
|
|||||||
@@ -13,6 +13,9 @@ pr-review conducts comprehensive code reviews using specialized agents for secur
|
|||||||
| `/pr-review <pr#>` | Full multi-agent review |
|
| `/pr-review <pr#>` | Full multi-agent review |
|
||||||
| `/pr-summary <pr#>` | Quick summary without full review |
|
| `/pr-summary <pr#>` | Quick summary without full review |
|
||||||
| `/pr-findings <pr#>` | Filter findings by category/confidence |
|
| `/pr-findings <pr#>` | Filter findings by category/confidence |
|
||||||
|
| `/initial-setup` | Full interactive setup wizard |
|
||||||
|
| `/project-init` | Quick project setup (system already configured) |
|
||||||
|
| `/project-sync` | Sync configuration with current git remote |
|
||||||
|
|
||||||
## Review Agents
|
## Review Agents
|
||||||
|
|
||||||
|
|||||||
@@ -2,8 +2,8 @@
|
|||||||
"hooks": {
|
"hooks": {
|
||||||
"SessionStart": [
|
"SessionStart": [
|
||||||
{
|
{
|
||||||
"type": "prompt",
|
"type": "command",
|
||||||
"prompt": "[pr-review] Perform silent startup checks:\n\n1. Check if MCP venvs exist at the installed marketplace location. If the marketplace is installed but venvs are missing, warn the user: '[pr-review] MCP venvs missing - run setup.sh from installed marketplace location'.\n\n2. Check if the project git remote matches .env configuration (GITEA_ORG/GITEA_REPO). If mismatch, warn: '[pr-review] Git remote mismatch - run /project-sync'.\n\nStay silent if all checks pass or not applicable. Be quick and non-blocking."
|
"command": "${CLAUDE_PLUGIN_ROOT}/hooks/startup-check.sh"
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
|
|||||||
30
plugins/pr-review/hooks/startup-check.sh
Executable file
30
plugins/pr-review/hooks/startup-check.sh
Executable file
@@ -0,0 +1,30 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
# pr-review startup check hook
|
||||||
|
# Checks for common issues at session start
|
||||||
|
# All output MUST have [pr-review] prefix
|
||||||
|
|
||||||
|
PREFIX="[pr-review]"
|
||||||
|
|
||||||
|
# Check if MCP venv exists
|
||||||
|
PLUGIN_ROOT="${CLAUDE_PLUGIN_ROOT:-$(dirname "$(dirname "$(realpath "$0")")")}"
|
||||||
|
VENV_PATH="$PLUGIN_ROOT/mcp-servers/gitea/.venv/bin/python"
|
||||||
|
|
||||||
|
if [[ ! -f "$VENV_PATH" ]]; then
|
||||||
|
echo "$PREFIX MCP venvs missing - run setup.sh from installed marketplace"
|
||||||
|
exit 0
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Check git remote vs .env config (only if .env exists)
|
||||||
|
if [[ -f ".env" ]]; then
|
||||||
|
CONFIGURED_REPO=$(grep -E "^GITEA_REPO=" .env 2>/dev/null | cut -d'=' -f2 | tr -d '"' || true)
|
||||||
|
if [[ -n "$CONFIGURED_REPO" ]]; then
|
||||||
|
CURRENT_REMOTE=$(git remote get-url origin 2>/dev/null | sed 's/.*[:/]\([^/]*\/[^.]*\).*/\1/' || true)
|
||||||
|
if [[ -n "$CURRENT_REMOTE" && "$CONFIGURED_REPO" != "$CURRENT_REMOTE" ]]; then
|
||||||
|
echo "$PREFIX Git remote mismatch - run /pr-review:project-sync"
|
||||||
|
exit 0
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
|
||||||
|
# All checks passed - say nothing
|
||||||
|
exit 0
|
||||||
@@ -20,7 +20,7 @@ claude plugin install project-hygiene
|
|||||||
|
|
||||||
## How It Works
|
## How It Works
|
||||||
|
|
||||||
The plugin registers a `task-completed` hook that runs after Claude completes any task. It:
|
The plugin registers a `PostToolUse` hook (on Write and Edit tools) that runs after Claude modifies files. It:
|
||||||
|
|
||||||
1. Scans for and deletes known temporary file patterns
|
1. Scans for and deletes known temporary file patterns
|
||||||
2. Removes temporary directories (`__pycache__`, `.pytest_cache`, etc.)
|
2. Removes temporary directories (`__pycache__`, `.pytest_cache`, etc.)
|
||||||
|
|||||||
@@ -1,365 +1,28 @@
|
|||||||
#!/bin/bash
|
#!/bin/bash
|
||||||
# project-hygiene cleanup hook
|
# project-hygiene cleanup hook
|
||||||
# Runs after task completion to clean up temp files and manage orphans
|
# Runs after file edits to clean up temp files
|
||||||
|
# All output MUST have [project-hygiene] prefix
|
||||||
|
|
||||||
set -euo pipefail
|
set -euo pipefail
|
||||||
|
|
||||||
# Configuration
|
PREFIX="[project-hygiene]"
|
||||||
|
|
||||||
|
# Read tool input from stdin (discard - we don't need it for cleanup)
|
||||||
|
cat > /dev/null
|
||||||
|
|
||||||
PROJECT_ROOT="${PROJECT_ROOT:-.}"
|
PROJECT_ROOT="${PROJECT_ROOT:-.}"
|
||||||
PLUGIN_ROOT="${CLAUDE_PLUGIN_ROOT:-$(dirname "$(dirname "$(realpath "$0")")")}"
|
|
||||||
CONFIG_FILE="${PROJECT_ROOT}/.hygiene.json"
|
|
||||||
LOG_DIR="${PROJECT_ROOT}/.dev/logs"
|
|
||||||
SCRATCH_DIR="${PROJECT_ROOT}/.dev/scratch"
|
|
||||||
LOG_FILE="${LOG_DIR}/hygiene-$(date +%Y%m%d-%H%M%S).log"
|
|
||||||
|
|
||||||
# Default allowed root files (can be overridden by .hygiene.json)
|
|
||||||
DEFAULT_ALLOWED_ROOT=(
|
|
||||||
".git"
|
|
||||||
".gitignore"
|
|
||||||
".gitattributes"
|
|
||||||
".editorconfig"
|
|
||||||
".env"
|
|
||||||
".env.example"
|
|
||||||
".env.local"
|
|
||||||
".nvmrc"
|
|
||||||
".node-version"
|
|
||||||
".python-version"
|
|
||||||
".ruby-version"
|
|
||||||
".tool-versions"
|
|
||||||
"README.md"
|
|
||||||
"LICENSE"
|
|
||||||
"CHANGELOG.md"
|
|
||||||
"CONTRIBUTING.md"
|
|
||||||
"CLAUDE.md"
|
|
||||||
"package.json"
|
|
||||||
"package-lock.json"
|
|
||||||
"yarn.lock"
|
|
||||||
"pnpm-lock.yaml"
|
|
||||||
"Makefile"
|
|
||||||
"Dockerfile"
|
|
||||||
"docker-compose.yml"
|
|
||||||
"docker-compose.yaml"
|
|
||||||
"Cargo.toml"
|
|
||||||
"Cargo.lock"
|
|
||||||
"go.mod"
|
|
||||||
"go.sum"
|
|
||||||
"requirements.txt"
|
|
||||||
"setup.py"
|
|
||||||
"pyproject.toml"
|
|
||||||
"poetry.lock"
|
|
||||||
"Gemfile"
|
|
||||||
"Gemfile.lock"
|
|
||||||
"tsconfig.json"
|
|
||||||
"jsconfig.json"
|
|
||||||
".eslintrc*"
|
|
||||||
".prettierrc*"
|
|
||||||
"vite.config.*"
|
|
||||||
"webpack.config.*"
|
|
||||||
"rollup.config.*"
|
|
||||||
".hygiene.json"
|
|
||||||
)
|
|
||||||
|
|
||||||
# Temp file patterns to delete
|
|
||||||
TEMP_PATTERNS=(
|
|
||||||
"*.tmp"
|
|
||||||
"*.bak"
|
|
||||||
"*.swp"
|
|
||||||
"*.swo"
|
|
||||||
"*~"
|
|
||||||
".DS_Store"
|
|
||||||
"Thumbs.db"
|
|
||||||
"*.log"
|
|
||||||
"*.orig"
|
|
||||||
"*.pyc"
|
|
||||||
"*.pyo"
|
|
||||||
)
|
|
||||||
|
|
||||||
# Directory patterns to delete
|
|
||||||
TEMP_DIRS=(
|
|
||||||
"__pycache__"
|
|
||||||
".pytest_cache"
|
|
||||||
".mypy_cache"
|
|
||||||
".ruff_cache"
|
|
||||||
"node_modules/.cache"
|
|
||||||
".next/cache"
|
|
||||||
".nuxt/.cache"
|
|
||||||
".turbo"
|
|
||||||
"*.egg-info"
|
|
||||||
".eggs"
|
|
||||||
"dist"
|
|
||||||
"build"
|
|
||||||
)
|
|
||||||
|
|
||||||
# Orphan patterns to identify
|
|
||||||
ORPHAN_PATTERNS=(
|
|
||||||
"test_*.py"
|
|
||||||
"debug_*"
|
|
||||||
"*_backup.*"
|
|
||||||
"*_old.*"
|
|
||||||
"*_bak.*"
|
|
||||||
"*.backup"
|
|
||||||
"temp_*"
|
|
||||||
"tmp_*"
|
|
||||||
)
|
|
||||||
|
|
||||||
# Initialize
|
|
||||||
DELETED_COUNT=0
|
DELETED_COUNT=0
|
||||||
WARNED_COUNT=0
|
|
||||||
ORPHAN_COUNT=0
|
|
||||||
MOVE_ORPHANS=false
|
|
||||||
|
|
||||||
# Logging function
|
|
||||||
log() {
|
|
||||||
local msg="[$(date +%H:%M:%S)] $1"
|
|
||||||
echo "$msg"
|
|
||||||
if [[ -f "$LOG_FILE" ]]; then
|
|
||||||
echo "$msg" >> "$LOG_FILE"
|
|
||||||
fi
|
|
||||||
}
|
|
||||||
|
|
||||||
log_action() {
|
|
||||||
local action="$1"
|
|
||||||
local target="$2"
|
|
||||||
log " $action: $target"
|
|
||||||
}
|
|
||||||
|
|
||||||
# Load project-local config if exists
|
|
||||||
load_config() {
|
|
||||||
if [[ -f "$CONFIG_FILE" ]]; then
|
|
||||||
log "Loading config from $CONFIG_FILE"
|
|
||||||
|
|
||||||
# Check if move_orphans is enabled
|
|
||||||
if command -v jq &>/dev/null; then
|
|
||||||
MOVE_ORPHANS=$(jq -r '.move_orphans // false' "$CONFIG_FILE" 2>/dev/null || echo "false")
|
|
||||||
|
|
||||||
# Load additional allowed root files
|
|
||||||
local extra_allowed
|
|
||||||
extra_allowed=$(jq -r '.allowed_root_files // [] | .[]' "$CONFIG_FILE" 2>/dev/null || true)
|
|
||||||
if [[ -n "$extra_allowed" ]]; then
|
|
||||||
while IFS= read -r file; do
|
|
||||||
DEFAULT_ALLOWED_ROOT+=("$file")
|
|
||||||
done <<< "$extra_allowed"
|
|
||||||
fi
|
|
||||||
|
|
||||||
# Load additional temp patterns
|
|
||||||
local extra_temp
|
|
||||||
extra_temp=$(jq -r '.temp_patterns // [] | .[]' "$CONFIG_FILE" 2>/dev/null || true)
|
|
||||||
if [[ -n "$extra_temp" ]]; then
|
|
||||||
while IFS= read -r pattern; do
|
|
||||||
TEMP_PATTERNS+=("$pattern")
|
|
||||||
done <<< "$extra_temp"
|
|
||||||
fi
|
|
||||||
|
|
||||||
# Load ignore patterns (files to never touch)
|
|
||||||
IGNORE_PATTERNS=()
|
|
||||||
local ignore
|
|
||||||
ignore=$(jq -r '.ignore_patterns // [] | .[]' "$CONFIG_FILE" 2>/dev/null || true)
|
|
||||||
if [[ -n "$ignore" ]]; then
|
|
||||||
while IFS= read -r pattern; do
|
|
||||||
IGNORE_PATTERNS+=("$pattern")
|
|
||||||
done <<< "$ignore"
|
|
||||||
fi
|
|
||||||
else
|
|
||||||
log "Warning: jq not installed, using default config"
|
|
||||||
fi
|
|
||||||
fi
|
|
||||||
}
|
|
||||||
|
|
||||||
# Check if file should be ignored
|
|
||||||
should_ignore() {
|
|
||||||
local file="$1"
|
|
||||||
local basename
|
|
||||||
basename=$(basename "$file")
|
|
||||||
|
|
||||||
for pattern in "${IGNORE_PATTERNS[@]:-}"; do
|
|
||||||
if [[ "$basename" == $pattern ]] || [[ "$file" == $pattern ]]; then
|
|
||||||
return 0
|
|
||||||
fi
|
|
||||||
done
|
|
||||||
return 1
|
|
||||||
}
|
|
||||||
|
|
||||||
# Check if file is in allowed root list
|
|
||||||
is_allowed_root() {
|
|
||||||
local file="$1"
|
|
||||||
local basename
|
|
||||||
basename=$(basename "$file")
|
|
||||||
|
|
||||||
for allowed in "${DEFAULT_ALLOWED_ROOT[@]}"; do
|
|
||||||
# Support wildcards in allowed patterns
|
|
||||||
if [[ "$basename" == $allowed ]]; then
|
|
||||||
return 0
|
|
||||||
fi
|
|
||||||
done
|
|
||||||
return 1
|
|
||||||
}
|
|
||||||
|
|
||||||
# Check if file matches orphan pattern
|
|
||||||
is_orphan() {
|
|
||||||
local file="$1"
|
|
||||||
local basename
|
|
||||||
basename=$(basename "$file")
|
|
||||||
|
|
||||||
for pattern in "${ORPHAN_PATTERNS[@]}"; do
|
|
||||||
if [[ "$basename" == $pattern ]]; then
|
|
||||||
return 0
|
|
||||||
fi
|
|
||||||
done
|
|
||||||
return 1
|
|
||||||
}
|
|
||||||
|
|
||||||
# Setup directories
|
|
||||||
setup_dirs() {
|
|
||||||
mkdir -p "$LOG_DIR"
|
|
||||||
if [[ "$MOVE_ORPHANS" == "true" ]]; then
|
|
||||||
mkdir -p "$SCRATCH_DIR"
|
|
||||||
fi
|
|
||||||
|
|
||||||
# Start log file
|
|
||||||
echo "=== Project Hygiene Cleanup ===" > "$LOG_FILE"
|
|
||||||
echo "Started: $(date)" >> "$LOG_FILE"
|
|
||||||
echo "Project: $PROJECT_ROOT" >> "$LOG_FILE"
|
|
||||||
echo "" >> "$LOG_FILE"
|
|
||||||
}
|
|
||||||
|
|
||||||
# Delete temp files
|
|
||||||
cleanup_temp_files() {
|
|
||||||
log "Cleaning temp files..."
|
|
||||||
|
|
||||||
for pattern in "${TEMP_PATTERNS[@]}"; do
|
|
||||||
while IFS= read -r -d '' file; do
|
|
||||||
if should_ignore "$file"; then
|
|
||||||
continue
|
|
||||||
fi
|
|
||||||
rm -f "$file"
|
|
||||||
log_action "DELETED" "$file"
|
|
||||||
((DELETED_COUNT++))
|
|
||||||
done < <(find "$PROJECT_ROOT" -name "$pattern" -type f -print0 2>/dev/null || true)
|
|
||||||
done
|
|
||||||
}
|
|
||||||
|
|
||||||
# Delete temp directories
|
|
||||||
cleanup_temp_dirs() {
|
|
||||||
log "Cleaning temp directories..."
|
|
||||||
|
|
||||||
for pattern in "${TEMP_DIRS[@]}"; do
|
|
||||||
while IFS= read -r -d '' dir; do
|
|
||||||
if should_ignore "$dir"; then
|
|
||||||
continue
|
|
||||||
fi
|
|
||||||
rm -rf "$dir"
|
|
||||||
log_action "DELETED DIR" "$dir"
|
|
||||||
((DELETED_COUNT++))
|
|
||||||
done < <(find "$PROJECT_ROOT" -name "$pattern" -type d -print0 2>/dev/null || true)
|
|
||||||
done
|
|
||||||
}
|
|
||||||
|
|
||||||
# Warn about unexpected root files
|
|
||||||
check_root_files() {
|
|
||||||
log "Checking root files..."
|
|
||||||
|
|
||||||
local unexpected_files=()
|
|
||||||
|
|
||||||
|
# Silently delete temp files
|
||||||
|
for pattern in "*.tmp" "*.bak" "*.swp" "*~" ".DS_Store"; do
|
||||||
while IFS= read -r -d '' file; do
|
while IFS= read -r -d '' file; do
|
||||||
local basename
|
rm -f "$file" 2>/dev/null && ((DELETED_COUNT++)) || true
|
||||||
basename=$(basename "$file")
|
done < <(find "$PROJECT_ROOT" -name "$pattern" -type f -print0 2>/dev/null || true)
|
||||||
|
done
|
||||||
|
|
||||||
# Skip directories
|
# Only output if we deleted something
|
||||||
[[ -d "$file" ]] && continue
|
if [[ $DELETED_COUNT -gt 0 ]]; then
|
||||||
|
echo "$PREFIX Cleaned $DELETED_COUNT temp files"
|
||||||
|
fi
|
||||||
|
|
||||||
# Skip if in allowed list
|
exit 0
|
||||||
is_allowed_root "$basename" && continue
|
|
||||||
|
|
||||||
# Skip if should be ignored
|
|
||||||
should_ignore "$basename" && continue
|
|
||||||
|
|
||||||
unexpected_files+=("$basename")
|
|
||||||
log_action "WARNING" "Unexpected root file: $basename"
|
|
||||||
((WARNED_COUNT++))
|
|
||||||
done < <(find "$PROJECT_ROOT" -maxdepth 1 -print0 2>/dev/null || true)
|
|
||||||
|
|
||||||
if [[ ${#unexpected_files[@]} -gt 0 ]]; then
|
|
||||||
log ""
|
|
||||||
log "⚠️ Unexpected files in project root:"
|
|
||||||
for f in "${unexpected_files[@]}"; do
|
|
||||||
log " - $f"
|
|
||||||
done
|
|
||||||
fi
|
|
||||||
}
|
|
||||||
|
|
||||||
# Identify and handle orphaned files
|
|
||||||
handle_orphans() {
|
|
||||||
log "Checking for orphaned files..."
|
|
||||||
|
|
||||||
local orphan_files=()
|
|
||||||
|
|
||||||
for pattern in "${ORPHAN_PATTERNS[@]}"; do
|
|
||||||
while IFS= read -r -d '' file; do
|
|
||||||
if should_ignore "$file"; then
|
|
||||||
continue
|
|
||||||
fi
|
|
||||||
|
|
||||||
orphan_files+=("$file")
|
|
||||||
|
|
||||||
if [[ "$MOVE_ORPHANS" == "true" ]]; then
|
|
||||||
local dest="${SCRATCH_DIR}/$(basename "$file")"
|
|
||||||
# Handle duplicates
|
|
||||||
if [[ -f "$dest" ]]; then
|
|
||||||
dest="${SCRATCH_DIR}/$(date +%Y%m%d%H%M%S)_$(basename "$file")"
|
|
||||||
fi
|
|
||||||
mv "$file" "$dest"
|
|
||||||
log_action "MOVED" "$file -> $dest"
|
|
||||||
else
|
|
||||||
log_action "ORPHAN" "$file"
|
|
||||||
fi
|
|
||||||
((ORPHAN_COUNT++))
|
|
||||||
done < <(find "$PROJECT_ROOT" -name "$pattern" -type f -print0 2>/dev/null || true)
|
|
||||||
done
|
|
||||||
|
|
||||||
if [[ ${#orphan_files[@]} -gt 0 && "$MOVE_ORPHANS" != "true" ]]; then
|
|
||||||
log ""
|
|
||||||
log "📦 Orphaned files found (enable move_orphans in .hygiene.json to auto-move):"
|
|
||||||
for f in "${orphan_files[@]}"; do
|
|
||||||
log " - $f"
|
|
||||||
done
|
|
||||||
fi
|
|
||||||
}
|
|
||||||
|
|
||||||
# Summary
|
|
||||||
print_summary() {
|
|
||||||
log ""
|
|
||||||
log "=== Cleanup Summary ==="
|
|
||||||
log " Deleted: $DELETED_COUNT items"
|
|
||||||
log " Warnings: $WARNED_COUNT unexpected root files"
|
|
||||||
log " Orphans: $ORPHAN_COUNT files"
|
|
||||||
if [[ "$MOVE_ORPHANS" == "true" ]]; then
|
|
||||||
log " Orphans moved to: $SCRATCH_DIR"
|
|
||||||
fi
|
|
||||||
log " Log file: $LOG_FILE"
|
|
||||||
log ""
|
|
||||||
}
|
|
||||||
|
|
||||||
# Main
|
|
||||||
main() {
|
|
||||||
cd "$PROJECT_ROOT" || exit 1
|
|
||||||
|
|
||||||
load_config
|
|
||||||
setup_dirs
|
|
||||||
|
|
||||||
log "Starting project hygiene cleanup..."
|
|
||||||
log ""
|
|
||||||
|
|
||||||
cleanup_temp_files
|
|
||||||
cleanup_temp_dirs
|
|
||||||
check_root_files
|
|
||||||
handle_orphans
|
|
||||||
|
|
||||||
print_summary
|
|
||||||
|
|
||||||
# Exit with warning code if issues found
|
|
||||||
if [[ $WARNED_COUNT -gt 0 || $ORPHAN_COUNT -gt 0 ]]; then
|
|
||||||
exit 0 # Still success, but logged warnings
|
|
||||||
fi
|
|
||||||
}
|
|
||||||
|
|
||||||
main "$@"
|
|
||||||
|
|||||||
@@ -13,7 +13,7 @@ Projman transforms a proven 15-sprint workflow into a distributable Claude Code
|
|||||||
- **Milestones** - Sprint milestone management and tracking
|
- **Milestones** - Sprint milestone management and tracking
|
||||||
- **Lessons Learned** - Systematic capture and search via Gitea Wiki
|
- **Lessons Learned** - Systematic capture and search via Gitea Wiki
|
||||||
- **Branch-Aware Security** - Prevents accidental changes on production branches
|
- **Branch-Aware Security** - Prevents accidental changes on production branches
|
||||||
- **Three-Agent Model** - Planner, Orchestrator, and Executor agents
|
- **Four-Agent Model** - Planner, Orchestrator, Executor, and Code Reviewer agents
|
||||||
- **CLI Tools Blocked** - All operations via MCP tools only (no `tea` or `gh`)
|
- **CLI Tools Blocked** - All operations via MCP tools only (no `tea` or `gh`)
|
||||||
|
|
||||||
## Quick Start
|
## Quick Start
|
||||||
@@ -461,20 +461,8 @@ projman/
|
|||||||
├── .claude-plugin/
|
├── .claude-plugin/
|
||||||
│ └── plugin.json # Plugin manifest
|
│ └── plugin.json # Plugin manifest
|
||||||
├── .mcp.json # MCP server configuration
|
├── .mcp.json # MCP server configuration
|
||||||
├── mcp-servers/ # Bundled MCP server
|
├── mcp-servers/
|
||||||
│ └── gitea/
|
│ └── gitea -> ../../../mcp-servers/gitea # SYMLINK to shared MCP server
|
||||||
│ ├── .venv/
|
|
||||||
│ ├── requirements.txt
|
|
||||||
│ ├── mcp_server/
|
|
||||||
│ │ ├── server.py
|
|
||||||
│ │ ├── gitea_client.py
|
|
||||||
│ │ └── tools/
|
|
||||||
│ │ ├── issues.py
|
|
||||||
│ │ ├── labels.py
|
|
||||||
│ │ ├── wiki.py
|
|
||||||
│ │ ├── milestones.py
|
|
||||||
│ │ └── dependencies.py
|
|
||||||
│ └── tests/
|
|
||||||
├── commands/ # Slash commands
|
├── commands/ # Slash commands
|
||||||
│ ├── sprint-plan.md
|
│ ├── sprint-plan.md
|
||||||
│ ├── sprint-start.md
|
│ ├── sprint-start.md
|
||||||
|
|||||||
@@ -4,7 +4,7 @@ description: Run diagnostics and create structured issue in marketplace reposito
|
|||||||
|
|
||||||
# Debug Report
|
# Debug Report
|
||||||
|
|
||||||
Run diagnostic checks on projman MCP tools and create a structured issue in the marketplace repository for investigation.
|
Create structured issues in the marketplace repository - either from automated diagnostic tests OR from user-reported problems.
|
||||||
|
|
||||||
## Prerequisites
|
## Prerequisites
|
||||||
|
|
||||||
@@ -20,6 +20,101 @@ If not configured, ask the user for the marketplace repository path.
|
|||||||
|
|
||||||
You MUST follow these steps in order. Do NOT skip any step.
|
You MUST follow these steps in order. Do NOT skip any step.
|
||||||
|
|
||||||
|
### Step 0: Select Report Mode
|
||||||
|
|
||||||
|
Use AskUserQuestion to determine what the user wants to report:
|
||||||
|
|
||||||
|
```
|
||||||
|
What would you like to report?
|
||||||
|
|
||||||
|
[ ] Run automated diagnostics - Test MCP tools and report failures
|
||||||
|
[ ] Report an issue I experienced - Describe a problem with any plugin command
|
||||||
|
```
|
||||||
|
|
||||||
|
Store the selection as `REPORT_MODE`:
|
||||||
|
- "automated" → Continue to Step 1
|
||||||
|
- "user-reported" → Skip to Step 0.1
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
### Step 0.1: Gather User Feedback (User-Reported Mode Only)
|
||||||
|
|
||||||
|
If `REPORT_MODE` is "user-reported", gather structured feedback.
|
||||||
|
|
||||||
|
**Question 1: What were you trying to do?**
|
||||||
|
|
||||||
|
Use AskUserQuestion:
|
||||||
|
```
|
||||||
|
Which plugin/command were you using?
|
||||||
|
|
||||||
|
[ ] projman (sprint planning, issues, labels)
|
||||||
|
[ ] git-flow (commits, branches)
|
||||||
|
[ ] pr-review (pull request review)
|
||||||
|
[ ] cmdb-assistant (NetBox integration)
|
||||||
|
[ ] doc-guardian (documentation)
|
||||||
|
[ ] code-sentinel (security, refactoring)
|
||||||
|
[ ] Other - I'll describe it
|
||||||
|
```
|
||||||
|
|
||||||
|
Store as `AFFECTED_PLUGIN`.
|
||||||
|
|
||||||
|
Then ask for the specific command (free text):
|
||||||
|
```
|
||||||
|
What command or tool were you using? (e.g., /sprint-plan, virt_update_vm)
|
||||||
|
```
|
||||||
|
|
||||||
|
Store as `AFFECTED_COMMAND`.
|
||||||
|
|
||||||
|
**Question 2: What was your goal?**
|
||||||
|
|
||||||
|
```
|
||||||
|
Briefly describe what you were trying to accomplish:
|
||||||
|
```
|
||||||
|
|
||||||
|
Store as `USER_GOAL`.
|
||||||
|
|
||||||
|
**Question 3: What went wrong?**
|
||||||
|
|
||||||
|
Use AskUserQuestion:
|
||||||
|
```
|
||||||
|
What type of problem did you encounter?
|
||||||
|
|
||||||
|
[ ] Error message - Command failed with an error
|
||||||
|
[ ] Missing feature - Tool doesn't support what I need
|
||||||
|
[ ] Unexpected behavior - It worked but did the wrong thing
|
||||||
|
[ ] Documentation issue - Instructions were unclear or wrong
|
||||||
|
[ ] Other - I'll describe it
|
||||||
|
```
|
||||||
|
|
||||||
|
Store as `PROBLEM_TYPE`.
|
||||||
|
|
||||||
|
Then ask for details (free text):
|
||||||
|
```
|
||||||
|
Describe what happened. Include any error messages if applicable:
|
||||||
|
```
|
||||||
|
|
||||||
|
Store as `PROBLEM_DESCRIPTION`.
|
||||||
|
|
||||||
|
**Question 4: Expected vs Actual**
|
||||||
|
|
||||||
|
```
|
||||||
|
What did you expect to happen?
|
||||||
|
```
|
||||||
|
|
||||||
|
Store as `EXPECTED_BEHAVIOR`.
|
||||||
|
|
||||||
|
**Question 5: Workaround (optional)**
|
||||||
|
|
||||||
|
```
|
||||||
|
Did you find a workaround? If so, describe it (or skip):
|
||||||
|
```
|
||||||
|
|
||||||
|
Store as `WORKAROUND` (may be empty).
|
||||||
|
|
||||||
|
After gathering feedback, continue to Step 1 for context gathering, then skip to Step 5.1.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
### Step 1: Gather Project Context
|
### Step 1: Gather Project Context
|
||||||
|
|
||||||
Run these Bash commands to capture project information:
|
Run these Bash commands to capture project information:
|
||||||
@@ -43,6 +138,46 @@ Store all values:
|
|||||||
- `CURRENT_BRANCH`: Current branch name
|
- `CURRENT_BRANCH`: Current branch name
|
||||||
- `WORKING_DIR`: Current working directory
|
- `WORKING_DIR`: Current working directory
|
||||||
|
|
||||||
|
### Step 1.5: Detect Sprint Context
|
||||||
|
|
||||||
|
Determine if this debug issue should be associated with an active sprint.
|
||||||
|
|
||||||
|
**1. Check for active sprint milestone:**
|
||||||
|
|
||||||
|
```
|
||||||
|
mcp__plugin_projman_gitea__list_milestones(repo=PROJECT_REPO, state="open")
|
||||||
|
```
|
||||||
|
|
||||||
|
Store the first open milestone as `ACTIVE_SPRINT` (if any).
|
||||||
|
|
||||||
|
**2. Analyze branch context:**
|
||||||
|
|
||||||
|
| Branch Pattern | Context |
|
||||||
|
|----------------|---------|
|
||||||
|
| `feat/*`, `fix/*`, `issue-*` | Sprint work - likely related to current sprint |
|
||||||
|
| `main`, `master`, `development` | Production/standalone - not sprint-related |
|
||||||
|
| Other | Unknown - ask user |
|
||||||
|
|
||||||
|
**3. Determine sprint association:**
|
||||||
|
|
||||||
|
```
|
||||||
|
IF ACTIVE_SPRINT exists AND CURRENT_BRANCH matches sprint pattern (feat/*, fix/*, issue-*):
|
||||||
|
→ SPRINT_CONTEXT = "detected"
|
||||||
|
→ Ask user: "Active sprint detected: [SPRINT_NAME]. Is this bug related to sprint work?"
|
||||||
|
Options:
|
||||||
|
- Yes, add to sprint (will associate with milestone)
|
||||||
|
- No, standalone fix (no milestone)
|
||||||
|
→ Store choice as ASSOCIATE_WITH_SPRINT (true/false)
|
||||||
|
|
||||||
|
ELSE IF ACTIVE_SPRINT exists AND CURRENT_BRANCH is main/development:
|
||||||
|
→ SPRINT_CONTEXT = "production"
|
||||||
|
→ ASSOCIATE_WITH_SPRINT = false (standalone fix, no question needed)
|
||||||
|
|
||||||
|
ELSE:
|
||||||
|
→ SPRINT_CONTEXT = "none"
|
||||||
|
→ ASSOCIATE_WITH_SPRINT = false
|
||||||
|
```
|
||||||
|
|
||||||
### Step 2: Read Marketplace Configuration
|
### Step 2: Read Marketplace Configuration
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
@@ -51,7 +186,9 @@ grep PROJMAN_MARKETPLACE_REPO .env
|
|||||||
|
|
||||||
Store as `MARKETPLACE_REPO`. If not found, ask the user.
|
Store as `MARKETPLACE_REPO`. If not found, ask the user.
|
||||||
|
|
||||||
### Step 3: Run Diagnostic Suite
|
### Step 3: Run Diagnostic Suite (Automated Mode Only)
|
||||||
|
|
||||||
|
**Skip this step if `REPORT_MODE` is "user-reported"** → Go to Step 5.1
|
||||||
|
|
||||||
Run each MCP tool with explicit `repo` parameter. Record success/failure and full response.
|
Run each MCP tool with explicit `repo` parameter. Record success/failure and full response.
|
||||||
|
|
||||||
@@ -91,7 +228,9 @@ For each test, record:
|
|||||||
- Status: PASS or FAIL
|
- Status: PASS or FAIL
|
||||||
- Response or error message
|
- Response or error message
|
||||||
|
|
||||||
### Step 4: Analyze Results
|
### Step 4: Analyze Results (Automated Mode Only)
|
||||||
|
|
||||||
|
**Skip this step if `REPORT_MODE` is "user-reported"** → Go to Step 5.1
|
||||||
|
|
||||||
Count failures and categorize errors:
|
Count failures and categorize errors:
|
||||||
|
|
||||||
@@ -105,7 +244,90 @@ Count failures and categorize errors:
|
|||||||
|
|
||||||
For each failure, write a hypothesis about the likely cause.
|
For each failure, write a hypothesis about the likely cause.
|
||||||
|
|
||||||
### Step 5: Generate Issue Content
|
### Step 5: Generate Smart Labels (Automated Mode Only)
|
||||||
|
|
||||||
|
**Skip this step if `REPORT_MODE` is "user-reported"** → Go to Step 5.1
|
||||||
|
|
||||||
|
Generate appropriate labels based on the diagnostic results.
|
||||||
|
|
||||||
|
**1. Build context string for label suggestion:**
|
||||||
|
|
||||||
|
```
|
||||||
|
LABEL_CONTEXT = "Bug fix: " + [summary of main failure] + ". " +
|
||||||
|
"Failed tools: " + [list of failed tool names] + ". " +
|
||||||
|
"Error category: " + [detected error category from Step 4]
|
||||||
|
```
|
||||||
|
|
||||||
|
**2. Get suggested labels:**
|
||||||
|
|
||||||
|
```
|
||||||
|
mcp__plugin_projman_gitea__suggest_labels(
|
||||||
|
repo=PROJECT_REPO,
|
||||||
|
context=LABEL_CONTEXT
|
||||||
|
)
|
||||||
|
```
|
||||||
|
|
||||||
|
**3. Merge with base labels:**
|
||||||
|
|
||||||
|
```
|
||||||
|
BASE_LABELS = ["Type: Bug", "Source: Diagnostic", "Agent: Claude"]
|
||||||
|
SUGGESTED_LABELS = [result from suggest_labels]
|
||||||
|
|
||||||
|
# Combine, avoiding duplicates
|
||||||
|
FINAL_LABELS = BASE_LABELS + [label for label in SUGGESTED_LABELS if label not in BASE_LABELS]
|
||||||
|
```
|
||||||
|
|
||||||
|
The final label set should include:
|
||||||
|
- **Always**: `Type: Bug`, `Source: Diagnostic`, `Agent: Claude`
|
||||||
|
- **If detected**: `Component: *`, `Complexity: *`, `Risk: *`, `Priority: *`
|
||||||
|
|
||||||
|
After generating labels, continue to Step 6.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
### Step 5.1: Generate Labels (User-Reported Mode Only)
|
||||||
|
|
||||||
|
**Only execute this step if `REPORT_MODE` is "user-reported"**
|
||||||
|
|
||||||
|
**1. Map problem type to labels:**
|
||||||
|
|
||||||
|
| PROBLEM_TYPE | Labels |
|
||||||
|
|--------------|--------|
|
||||||
|
| Error message | `Type: Bug` |
|
||||||
|
| Missing feature | `Type: Enhancement` |
|
||||||
|
| Unexpected behavior | `Type: Bug` |
|
||||||
|
| Documentation issue | `Type: Documentation` |
|
||||||
|
| Other | `Type: Bug` (default) |
|
||||||
|
|
||||||
|
**2. Map plugin to component:**
|
||||||
|
|
||||||
|
| AFFECTED_PLUGIN | Component Label |
|
||||||
|
|-----------------|-----------------|
|
||||||
|
| projman | `Component: Commands` |
|
||||||
|
| git-flow | `Component: Commands` |
|
||||||
|
| pr-review | `Component: Commands` |
|
||||||
|
| cmdb-assistant | `Component: API` |
|
||||||
|
| doc-guardian | `Component: Commands` |
|
||||||
|
| code-sentinel | `Component: Commands` |
|
||||||
|
| Other | *(no component label)* |
|
||||||
|
|
||||||
|
**3. Build final labels:**
|
||||||
|
|
||||||
|
```
|
||||||
|
BASE_LABELS = ["Source: User-Reported", "Agent: Claude"]
|
||||||
|
TYPE_LABEL = [mapped from PROBLEM_TYPE]
|
||||||
|
COMPONENT_LABEL = [mapped from AFFECTED_PLUGIN, if any]
|
||||||
|
|
||||||
|
FINAL_LABELS = BASE_LABELS + TYPE_LABEL + COMPONENT_LABEL
|
||||||
|
```
|
||||||
|
|
||||||
|
After generating labels, continue to Step 6.1.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
### Step 6: Generate Issue Content (Automated Mode Only)
|
||||||
|
|
||||||
|
**Skip this step if `REPORT_MODE` is "user-reported"** → Go to Step 6.1
|
||||||
|
|
||||||
Use this exact template:
|
Use this exact template:
|
||||||
|
|
||||||
@@ -179,85 +401,182 @@ Use this exact template:
|
|||||||
|
|
||||||
---
|
---
|
||||||
|
|
||||||
*Generated by /debug-report - Labels: Type: Bug, Source: Diagnostic, Agent: Claude*
|
*Generated by /debug-report (automated) - Labels: Type: Bug, Source: Diagnostic, Agent: Claude*
|
||||||
```
|
```
|
||||||
|
|
||||||
### Step 6: Create Issue in Marketplace
|
After generating content, continue to Step 7.
|
||||||
|
|
||||||
**First, check if MCP tools are available.** Attempt to use an MCP tool. If you receive "tool not found", "not in function list", or similar error, the MCP server is not accessible in this session - use the curl fallback.
|
---
|
||||||
|
|
||||||
#### Option A: MCP Available (preferred)
|
### Step 6.1: Generate Issue Content (User-Reported Mode Only)
|
||||||
|
|
||||||
```
|
**Only execute this step if `REPORT_MODE` is "user-reported"**
|
||||||
mcp__plugin_projman_gitea__create_issue(
|
|
||||||
repo=MARKETPLACE_REPO,
|
Use this template for user-reported issues:
|
||||||
title="[Diagnostic] [summary of main failure]",
|
|
||||||
body=[generated content from Step 5],
|
```markdown
|
||||||
labels=["Type: Bug", "Source: Diagnostic", "Agent: Claude"]
|
## User-Reported Issue
|
||||||
)
|
|
||||||
|
**Reported**: [ISO timestamp]
|
||||||
|
**Reporter**: Claude Code via /debug-report (user feedback)
|
||||||
|
|
||||||
|
## Context
|
||||||
|
|
||||||
|
| Field | Value |
|
||||||
|
|-------|-------|
|
||||||
|
| Plugin | `[AFFECTED_PLUGIN]` |
|
||||||
|
| Command/Tool | `[AFFECTED_COMMAND]` |
|
||||||
|
| Repository | `[PROJECT_REPO]` |
|
||||||
|
| Working Directory | `[WORKING_DIR]` |
|
||||||
|
| Branch | `[CURRENT_BRANCH]` |
|
||||||
|
|
||||||
|
## Problem Description
|
||||||
|
|
||||||
|
### Goal
|
||||||
|
[USER_GOAL]
|
||||||
|
|
||||||
|
### What Happened
|
||||||
|
**Problem Type**: [PROBLEM_TYPE]
|
||||||
|
|
||||||
|
[PROBLEM_DESCRIPTION]
|
||||||
|
|
||||||
|
### Expected Behavior
|
||||||
|
[EXPECTED_BEHAVIOR]
|
||||||
|
|
||||||
|
## Workaround
|
||||||
|
[WORKAROUND if provided, otherwise "None identified"]
|
||||||
|
|
||||||
|
## Investigation Hints
|
||||||
|
|
||||||
|
Based on the affected plugin/command, relevant files to check:
|
||||||
|
|
||||||
|
[Generate based on AFFECTED_PLUGIN:]
|
||||||
|
|
||||||
|
**projman:**
|
||||||
|
- `plugins/projman/commands/[AFFECTED_COMMAND].md`
|
||||||
|
- `mcp-servers/gitea/mcp_server/tools/*.py`
|
||||||
|
|
||||||
|
**git-flow:**
|
||||||
|
- `plugins/git-flow/commands/[AFFECTED_COMMAND].md`
|
||||||
|
|
||||||
|
**pr-review:**
|
||||||
|
- `plugins/pr-review/commands/[AFFECTED_COMMAND].md`
|
||||||
|
- `mcp-servers/gitea/mcp_server/tools/pull_requests.py`
|
||||||
|
|
||||||
|
**cmdb-assistant:**
|
||||||
|
- `plugins/cmdb-assistant/commands/[AFFECTED_COMMAND].md`
|
||||||
|
- `mcp-servers/netbox/mcp_server/tools/*.py`
|
||||||
|
- `mcp-servers/netbox/mcp_server/server.py` (tool schemas)
|
||||||
|
|
||||||
|
**doc-guardian / code-sentinel:**
|
||||||
|
- `plugins/[plugin]/commands/[AFFECTED_COMMAND].md`
|
||||||
|
- `plugins/[plugin]/hooks/*.md`
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
*Generated by /debug-report (user feedback) - Labels: [FINAL_LABELS]*
|
||||||
```
|
```
|
||||||
|
|
||||||
If labels don't exist, create issue without labels.
|
After generating content, continue to Step 7.
|
||||||
|
|
||||||
#### Option B: MCP Unavailable - Use curl Fallback
|
---
|
||||||
|
|
||||||
If MCP tools are not available (the very issue you may be diagnosing), use this fallback:
|
### Step 7: Create Issue in Marketplace
|
||||||
|
|
||||||
**1. Check for Gitea credentials:**
|
**IMPORTANT:** Always use curl to create issues in the marketplace repo. This avoids branch protection restrictions and MCP context issues that can block issue creation when working on protected branches.
|
||||||
|
|
||||||
|
**1. Load Gitea credentials:**
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
if [[ -f ~/.config/claude/gitea.env ]]; then
|
if [[ -f ~/.config/claude/gitea.env ]]; then
|
||||||
source ~/.config/claude/gitea.env
|
source ~/.config/claude/gitea.env
|
||||||
echo "Credentials found. API URL: $GITEA_API_URL"
|
echo "Credentials loaded. API URL: $GITEA_API_URL"
|
||||||
else
|
else
|
||||||
echo "No credentials at ~/.config/claude/gitea.env"
|
echo "ERROR: No credentials at ~/.config/claude/gitea.env"
|
||||||
fi
|
fi
|
||||||
```
|
```
|
||||||
|
|
||||||
**2. If credentials exist, create issue via curl with proper JSON escaping:**
|
**2. Fetch label IDs from marketplace repo:**
|
||||||
|
|
||||||
Create secure temp files and save content:
|
Labels depend on `REPORT_MODE`:
|
||||||
|
|
||||||
|
**Automated mode:**
|
||||||
|
- `Source/Diagnostic` (always)
|
||||||
|
- `Type/Bug` (always)
|
||||||
|
|
||||||
|
**User-reported mode:**
|
||||||
|
- `Source/User-Reported` (always)
|
||||||
|
- Type label from Step 5.1 (Bug, Enhancement, or Documentation)
|
||||||
|
- Component label from Step 5.1 (if applicable)
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Fetch all labels from marketplace repo
|
||||||
|
LABELS_JSON=$(curl -s "${GITEA_API_URL}/repos/${MARKETPLACE_REPO}/labels" \
|
||||||
|
-H "Authorization: token ${GITEA_API_TOKEN}")
|
||||||
|
|
||||||
|
# Extract label IDs based on FINAL_LABELS from Step 5 or 5.1
|
||||||
|
# Build LABEL_IDS array with IDs of labels that exist in the repo
|
||||||
|
# Example for automated mode:
|
||||||
|
SOURCE_ID=$(echo "$LABELS_JSON" | jq -r '.[] | select(.name == "Source/Diagnostic") | .id')
|
||||||
|
TYPE_ID=$(echo "$LABELS_JSON" | jq -r '.[] | select(.name == "Type/Bug") | .id')
|
||||||
|
|
||||||
|
# Example for user-reported mode (adjust based on FINAL_LABELS):
|
||||||
|
# SOURCE_ID=$(echo "$LABELS_JSON" | jq -r '.[] | select(.name == "Source/User-Reported") | .id')
|
||||||
|
# TYPE_ID=$(echo "$LABELS_JSON" | jq -r '.[] | select(.name == "[TYPE_LABEL]") | .id')
|
||||||
|
|
||||||
|
# Build label array from found IDs
|
||||||
|
LABEL_IDS="[$(echo "$SOURCE_ID,$TYPE_ID" | sed 's/,,*/,/g; s/^,//; s/,$//')]"
|
||||||
|
echo "Label IDs to apply: $LABEL_IDS"
|
||||||
|
```
|
||||||
|
|
||||||
|
**3. Create issue with labels via curl:**
|
||||||
|
|
||||||
|
**Title format depends on `REPORT_MODE`:**
|
||||||
|
- Automated: `[Diagnostic] [summary of main failure]`
|
||||||
|
- User-reported: `[AFFECTED_PLUGIN] [brief summary of PROBLEM_DESCRIPTION]`
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
# Create temp files with restrictive permissions
|
# Create temp files with restrictive permissions
|
||||||
DIAG_TITLE=$(mktemp -p /tmp -m 600 diag-title.XXXXXX)
|
DIAG_TITLE=$(mktemp -t diag-title.XXXXXX)
|
||||||
DIAG_BODY=$(mktemp -p /tmp -m 600 diag-body.XXXXXX)
|
DIAG_BODY=$(mktemp -t diag-body.XXXXXX)
|
||||||
DIAG_PAYLOAD=$(mktemp -p /tmp -m 600 diag-payload.XXXXXX)
|
DIAG_PAYLOAD=$(mktemp -t diag-payload.XXXXXX)
|
||||||
|
|
||||||
# Save title
|
# Save title (format depends on REPORT_MODE)
|
||||||
echo "[Diagnostic] [summary of main failure]" > "$DIAG_TITLE"
|
# Automated: "[Diagnostic] [summary of main failure]"
|
||||||
|
# User-reported: "[AFFECTED_PLUGIN] [brief summary]"
|
||||||
|
echo "[Title based on REPORT_MODE]" > "$DIAG_TITLE"
|
||||||
|
|
||||||
# Save body (paste Step 5 content) - heredoc delimiter prevents shell expansion
|
# Save body (paste Step 6 or 6.1 content) - heredoc delimiter prevents shell expansion
|
||||||
cat > "$DIAG_BODY" << 'DIAGNOSTIC_EOF'
|
cat > "$DIAG_BODY" << 'DIAGNOSTIC_EOF'
|
||||||
[Paste the full issue content from Step 5 here]
|
[Paste the full issue content from Step 6 or 6.1 here]
|
||||||
DIAGNOSTIC_EOF
|
DIAGNOSTIC_EOF
|
||||||
```
|
|
||||||
|
|
||||||
Construct JSON safely using jq's --rawfile (avoids command substitution):
|
# Build JSON payload with labels using jq
|
||||||
|
|
||||||
```bash
|
|
||||||
# Build JSON payload using jq with --rawfile for safe content handling
|
|
||||||
jq -n \
|
jq -n \
|
||||||
--rawfile title "$DIAG_TITLE" \
|
--rawfile title "$DIAG_TITLE" \
|
||||||
--rawfile body "$DIAG_BODY" \
|
--rawfile body "$DIAG_BODY" \
|
||||||
'{title: ($title | rtrimstr("\n")), body: $body}' > "$DIAG_PAYLOAD"
|
--argjson labels "$LABEL_IDS" \
|
||||||
|
'{title: ($title | rtrimstr("\n")), body: $body, labels: $labels}' > "$DIAG_PAYLOAD"
|
||||||
|
|
||||||
# Create issue using the JSON file
|
# Create issue using the JSON file
|
||||||
curl -s -X POST "${GITEA_API_URL}/repos/${MARKETPLACE_REPO}/issues" \
|
RESULT=$(curl -s -X POST "${GITEA_API_URL}/repos/${MARKETPLACE_REPO}/issues" \
|
||||||
-H "Authorization: token ${GITEA_API_TOKEN}" \
|
-H "Authorization: token ${GITEA_API_TOKEN}" \
|
||||||
-H "Content-Type: application/json" \
|
-H "Content-Type: application/json" \
|
||||||
-d @"$DIAG_PAYLOAD" | jq '.html_url // .'
|
-d @"$DIAG_PAYLOAD")
|
||||||
|
|
||||||
|
# Extract and display the issue URL
|
||||||
|
echo "$RESULT" | jq -r '.html_url // "Error: " + (.message // "Unknown error")'
|
||||||
|
|
||||||
# Secure cleanup
|
# Secure cleanup
|
||||||
rm -f "$DIAG_TITLE" "$DIAG_BODY" "$DIAG_PAYLOAD"
|
rm -f "$DIAG_TITLE" "$DIAG_BODY" "$DIAG_PAYLOAD"
|
||||||
```
|
```
|
||||||
|
|
||||||
**3. If no credentials found, save report locally:**
|
**4. If no credentials found, save report locally:**
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
REPORT_FILE=$(mktemp -p /tmp -m 600 diagnostic-report-XXXXXX.md)
|
REPORT_FILE=$(mktemp -t diagnostic-report-XXXXXX.md)
|
||||||
cat > "$REPORT_FILE" << 'DIAGNOSTIC_EOF'
|
cat > "$REPORT_FILE" << 'DIAGNOSTIC_EOF'
|
||||||
[Paste the full issue content from Step 5 here]
|
[Paste the full issue content from Step 6 here]
|
||||||
DIAGNOSTIC_EOF
|
DIAGNOSTIC_EOF
|
||||||
echo "Report saved to: $REPORT_FILE"
|
echo "Report saved to: $REPORT_FILE"
|
||||||
```
|
```
|
||||||
@@ -265,7 +584,7 @@ echo "Report saved to: $REPORT_FILE"
|
|||||||
Then inform the user:
|
Then inform the user:
|
||||||
|
|
||||||
```
|
```
|
||||||
MCP tools are unavailable and no Gitea credentials found at ~/.config/claude/gitea.env.
|
No Gitea credentials found at ~/.config/claude/gitea.env.
|
||||||
|
|
||||||
Diagnostic report saved to: [REPORT_FILE]
|
Diagnostic report saved to: [REPORT_FILE]
|
||||||
|
|
||||||
@@ -274,10 +593,11 @@ To create the issue manually:
|
|||||||
2. Or create issue directly at: http://gitea.hotserv.cloud/[MARKETPLACE_REPO]/issues/new
|
2. Or create issue directly at: http://gitea.hotserv.cloud/[MARKETPLACE_REPO]/issues/new
|
||||||
```
|
```
|
||||||
|
|
||||||
### Step 7: Report to User
|
### Step 8: Report to User
|
||||||
|
|
||||||
Display summary:
|
Display summary based on `REPORT_MODE`:
|
||||||
|
|
||||||
|
**Automated Mode:**
|
||||||
```
|
```
|
||||||
Debug Report Complete
|
Debug Report Complete
|
||||||
=====================
|
=====================
|
||||||
@@ -299,17 +619,38 @@ Next Steps:
|
|||||||
3. Select issue #[N] to investigate
|
3. Select issue #[N] to investigate
|
||||||
```
|
```
|
||||||
|
|
||||||
|
**User-Reported Mode:**
|
||||||
|
```
|
||||||
|
Issue Report Complete
|
||||||
|
=====================
|
||||||
|
|
||||||
|
Plugin: [AFFECTED_PLUGIN]
|
||||||
|
Command: [AFFECTED_COMMAND]
|
||||||
|
Problem: [PROBLEM_TYPE]
|
||||||
|
|
||||||
|
Issue Created: [issue URL]
|
||||||
|
|
||||||
|
Your feedback has been captured. The development team will
|
||||||
|
investigate and may follow up with questions.
|
||||||
|
|
||||||
|
Next Steps:
|
||||||
|
1. Switch to marketplace repo: cd [marketplace path]
|
||||||
|
2. Run: /debug-review
|
||||||
|
3. Select issue #[N] to investigate
|
||||||
|
```
|
||||||
|
|
||||||
## DO NOT
|
## DO NOT
|
||||||
|
|
||||||
- **DO NOT** attempt to fix anything - only report
|
- **DO NOT** attempt to fix anything - only report
|
||||||
- **DO NOT** create issues if all tests pass (just report success)
|
- **DO NOT** create issues if all automated tests pass (unless in user-reported mode)
|
||||||
- **DO NOT** skip any diagnostic test
|
- **DO NOT** skip any diagnostic test in automated mode
|
||||||
- **DO NOT** call MCP tools without the `repo` parameter
|
- **DO NOT** call MCP tools without the `repo` parameter
|
||||||
- **DO NOT** ask user questions during execution - run autonomously
|
- **DO NOT** skip user questions in user-reported mode - gather complete feedback
|
||||||
|
- **DO NOT** use MCP tools to create issues in the marketplace - always use curl (avoids branch restrictions)
|
||||||
|
|
||||||
## If All Tests Pass
|
## If All Tests Pass (Automated Mode Only)
|
||||||
|
|
||||||
If all 5 tests pass, report success without creating an issue:
|
If all 5 tests pass in automated mode, report success without creating an issue:
|
||||||
|
|
||||||
```
|
```
|
||||||
Debug Report Complete
|
Debug Report Complete
|
||||||
@@ -322,8 +663,8 @@ Failed: 0
|
|||||||
|
|
||||||
All diagnostics passed. No issues to report.
|
All diagnostics passed. No issues to report.
|
||||||
|
|
||||||
If you're experiencing a specific problem, please describe it
|
If you're experiencing a specific problem, run /debug-report again
|
||||||
and I can create a manual bug report.
|
and select "Report an issue I experienced" to describe it.
|
||||||
```
|
```
|
||||||
|
|
||||||
## Troubleshooting
|
## Troubleshooting
|
||||||
@@ -336,7 +677,12 @@ and I can create a manual bug report.
|
|||||||
- Check if in a git repository: `git rev-parse --git-dir`
|
- Check if in a git repository: `git rev-parse --git-dir`
|
||||||
- If not a git repo, ask user for the repository path
|
- If not a git repo, ask user for the repository path
|
||||||
|
|
||||||
**MCP tools not available**
|
**Gitea credentials not found**
|
||||||
- Use the curl fallback in Step 6, Option B
|
- Credentials must be at `~/.config/claude/gitea.env`
|
||||||
- Requires Gitea credentials at `~/.config/claude/gitea.env`
|
- If missing, the report will be saved locally for manual submission
|
||||||
- If no credentials, report will be saved locally for manual submission
|
- See docs/CONFIGURATION.md for setup instructions
|
||||||
|
|
||||||
|
**Labels not applied to issue**
|
||||||
|
- Verify labels exist in the marketplace repo: `Source/Diagnostic`, `Type/Bug`
|
||||||
|
- Check the label fetch output in Step 7.2 for errors
|
||||||
|
- If labels don't exist, create them first with `/labels-sync` in the marketplace repo
|
||||||
|
|||||||
@@ -195,6 +195,74 @@ Does this analysis match your understanding of the problem?
|
|||||||
|
|
||||||
Do NOT proceed until user approves.
|
Do NOT proceed until user approves.
|
||||||
|
|
||||||
|
### Step 9.5: Search Lessons Learned
|
||||||
|
|
||||||
|
Before proposing a fix, search for relevant lessons from past fixes.
|
||||||
|
|
||||||
|
**1. Extract search tags from the issue:**
|
||||||
|
|
||||||
|
```
|
||||||
|
SEARCH_TAGS = []
|
||||||
|
# Add tool names
|
||||||
|
for each failed_tool in issue:
|
||||||
|
SEARCH_TAGS.append(tool_name) # e.g., "get_labels", "validate_repo_org"
|
||||||
|
|
||||||
|
# Add error category
|
||||||
|
SEARCH_TAGS.append(error_category) # e.g., "parameter-format", "authentication"
|
||||||
|
|
||||||
|
# Add component if identifiable
|
||||||
|
if error relates to MCP server:
|
||||||
|
SEARCH_TAGS.append("mcp")
|
||||||
|
if error relates to command:
|
||||||
|
SEARCH_TAGS.append("command")
|
||||||
|
```
|
||||||
|
|
||||||
|
**2. Search lessons learned:**
|
||||||
|
|
||||||
|
```
|
||||||
|
mcp__plugin_projman_gitea__search_lessons(
|
||||||
|
repo=REPO_NAME,
|
||||||
|
tags=SEARCH_TAGS,
|
||||||
|
limit=5
|
||||||
|
)
|
||||||
|
```
|
||||||
|
|
||||||
|
**3. Also search by error keywords:**
|
||||||
|
|
||||||
|
```
|
||||||
|
mcp__plugin_projman_gitea__search_lessons(
|
||||||
|
repo=REPO_NAME,
|
||||||
|
query=[key error message words],
|
||||||
|
limit=5
|
||||||
|
)
|
||||||
|
```
|
||||||
|
|
||||||
|
**4. Display relevant lessons (if any):**
|
||||||
|
|
||||||
|
```
|
||||||
|
Related Lessons Learned
|
||||||
|
=======================
|
||||||
|
|
||||||
|
Found [N] relevant lessons from past fixes:
|
||||||
|
|
||||||
|
📚 Lesson: "Sprint 14 - Parameter validation in MCP tools"
|
||||||
|
Tags: mcp, get_labels, parameter-format
|
||||||
|
Summary: Always validate repo parameter format before API calls
|
||||||
|
Prevention: Add format check at function entry
|
||||||
|
|
||||||
|
📚 Lesson: "Sprint 12 - Graceful fallback for missing config"
|
||||||
|
Tags: configuration, fallback
|
||||||
|
Summary: Commands should work even without .env
|
||||||
|
Prevention: Check for env vars, use sensible defaults
|
||||||
|
|
||||||
|
These lessons may inform your fix approach.
|
||||||
|
```
|
||||||
|
|
||||||
|
If no lessons found, display:
|
||||||
|
```
|
||||||
|
No related lessons found. This may be a new type of issue.
|
||||||
|
```
|
||||||
|
|
||||||
### Step 10: Propose Fix Approach
|
### Step 10: Propose Fix Approach
|
||||||
|
|
||||||
Based on the analysis, propose a specific fix:
|
Based on the analysis, propose a specific fix:
|
||||||
@@ -342,7 +410,118 @@ Next Steps:
|
|||||||
1. Review and merge PR #81
|
1. Review and merge PR #81
|
||||||
2. In test project, pull latest plugin version
|
2. In test project, pull latest plugin version
|
||||||
3. Run /debug-report to verify fix
|
3. Run /debug-report to verify fix
|
||||||
4. If passing, close issue #80
|
4. Come back and run Step 15 to close issue and capture lesson
|
||||||
|
```
|
||||||
|
|
||||||
|
### Step 15: Verify, Close, and Capture Lesson
|
||||||
|
|
||||||
|
**This step runs AFTER the user has verified the fix works.**
|
||||||
|
|
||||||
|
When user returns and confirms the fix is working:
|
||||||
|
|
||||||
|
**1. Close the issue:**
|
||||||
|
|
||||||
|
```
|
||||||
|
mcp__plugin_projman_gitea__update_issue(
|
||||||
|
repo=REPO_NAME,
|
||||||
|
issue_number=ISSUE_NUMBER,
|
||||||
|
state="closed"
|
||||||
|
)
|
||||||
|
```
|
||||||
|
|
||||||
|
**2. Ask about lesson capture:**
|
||||||
|
|
||||||
|
Use AskUserQuestion:
|
||||||
|
|
||||||
|
```
|
||||||
|
This fix addressed [ERROR_TYPE] in [COMPONENT].
|
||||||
|
|
||||||
|
Would you like to capture this as a lesson learned?
|
||||||
|
|
||||||
|
Options:
|
||||||
|
- Yes, capture lesson (helps avoid similar issues in future)
|
||||||
|
- No, skip (trivial fix or already documented)
|
||||||
|
```
|
||||||
|
|
||||||
|
**3. If user chooses Yes, auto-generate lesson content:**
|
||||||
|
|
||||||
|
```
|
||||||
|
LESSON_TITLE = "Sprint [N] - [Brief description of fix]"
|
||||||
|
# Example: "Sprint 17 - MCP parameter validation"
|
||||||
|
|
||||||
|
LESSON_CONTENT = """
|
||||||
|
## Context
|
||||||
|
|
||||||
|
[What was happening when the issue occurred]
|
||||||
|
- Command/tool being used: [FAILED_TOOL]
|
||||||
|
- Error encountered: [ERROR_MESSAGE]
|
||||||
|
|
||||||
|
## Problem
|
||||||
|
|
||||||
|
[Root cause identified during investigation]
|
||||||
|
|
||||||
|
## Solution
|
||||||
|
|
||||||
|
[What was changed to fix it]
|
||||||
|
- Files modified: [LIST]
|
||||||
|
- PR: #[PR_NUMBER]
|
||||||
|
|
||||||
|
## Prevention
|
||||||
|
|
||||||
|
[How to avoid this in the future]
|
||||||
|
|
||||||
|
## Related
|
||||||
|
|
||||||
|
- Issue: #[ISSUE_NUMBER]
|
||||||
|
- PR: #[PR_NUMBER]
|
||||||
|
"""
|
||||||
|
|
||||||
|
LESSON_TAGS = [
|
||||||
|
tool_name, # e.g., "get_labels"
|
||||||
|
error_category, # e.g., "parameter-format"
|
||||||
|
component, # e.g., "mcp", "command"
|
||||||
|
"bug-fix"
|
||||||
|
]
|
||||||
|
```
|
||||||
|
|
||||||
|
**4. Show lesson preview and ask for approval:**
|
||||||
|
|
||||||
|
```
|
||||||
|
Lesson Preview
|
||||||
|
==============
|
||||||
|
|
||||||
|
Title: [LESSON_TITLE]
|
||||||
|
Tags: [LESSON_TAGS]
|
||||||
|
|
||||||
|
Content:
|
||||||
|
[LESSON_CONTENT]
|
||||||
|
|
||||||
|
Save this lesson? (Y/N/Edit)
|
||||||
|
```
|
||||||
|
|
||||||
|
**5. If approved, create the lesson:**
|
||||||
|
|
||||||
|
```
|
||||||
|
mcp__plugin_projman_gitea__create_lesson(
|
||||||
|
repo=REPO_NAME,
|
||||||
|
title=LESSON_TITLE,
|
||||||
|
content=LESSON_CONTENT,
|
||||||
|
tags=LESSON_TAGS,
|
||||||
|
category="sprints"
|
||||||
|
)
|
||||||
|
```
|
||||||
|
|
||||||
|
**6. Report completion:**
|
||||||
|
|
||||||
|
```
|
||||||
|
Issue Closed & Lesson Captured
|
||||||
|
==============================
|
||||||
|
|
||||||
|
Issue #[N]: CLOSED
|
||||||
|
Lesson: "[LESSON_TITLE]" saved to wiki
|
||||||
|
|
||||||
|
This lesson will be surfaced in future /debug-review
|
||||||
|
sessions when similar errors are encountered.
|
||||||
```
|
```
|
||||||
|
|
||||||
## DO NOT
|
## DO NOT
|
||||||
@@ -350,8 +529,9 @@ Next Steps:
|
|||||||
- **DO NOT** skip reading relevant files - this is MANDATORY
|
- **DO NOT** skip reading relevant files - this is MANDATORY
|
||||||
- **DO NOT** proceed past approval gates without user confirmation
|
- **DO NOT** proceed past approval gates without user confirmation
|
||||||
- **DO NOT** guess at fixes without evidence from code
|
- **DO NOT** guess at fixes without evidence from code
|
||||||
- **DO NOT** close issues - let user verify fix works first
|
- **DO NOT** close issues until user confirms fix works (Step 15)
|
||||||
- **DO NOT** commit directly to development or main branches
|
- **DO NOT** commit directly to development or main branches
|
||||||
|
- **DO NOT** skip the lessons learned search - past fixes inform better solutions
|
||||||
|
|
||||||
## If Investigation Finds No Bug
|
## If Investigation Finds No Bug
|
||||||
|
|
||||||
|
|||||||
@@ -62,12 +62,20 @@ Verify these required label categories exist:
|
|||||||
|
|
||||||
### Step 6: Create Missing Labels (if any)
|
### Step 6: Create Missing Labels (if any)
|
||||||
|
|
||||||
For each missing required label, call:
|
Use `create_label_smart` which automatically creates labels at the correct level:
|
||||||
|
- **Organization level**: Type/*, Priority/*, Complexity/*, Effort/*, Risk/*, Source/*, Agent/*
|
||||||
|
- **Repository level**: Component/*, Tech/*
|
||||||
|
|
||||||
```
|
```
|
||||||
mcp__plugin_projman_gitea__create_label(repo=REPO_NAME, name="Type: Bug", color="d73a4a")
|
mcp__plugin_projman_gitea__create_label_smart(repo=REPO_NAME, name="Type/Bug", color="d73a4a")
|
||||||
```
|
```
|
||||||
|
|
||||||
|
This automatically detects whether to create at org or repo level based on the category.
|
||||||
|
|
||||||
|
**Alternative (explicit control):**
|
||||||
|
- Org labels: `create_org_label(org="org-name", name="Type/Bug", color="d73a4a")`
|
||||||
|
- Repo labels: `create_label(repo=REPO_NAME, name="Component/Backend", color="5319e7")`
|
||||||
|
|
||||||
Use the label format that matches existing labels in the repo (slash `/` or colon-space `: `).
|
Use the label format that matches existing labels in the repo (slash `/` or colon-space `: `).
|
||||||
|
|
||||||
### Step 7: Report Results
|
### Step 7: Report Results
|
||||||
|
|||||||
@@ -2,8 +2,8 @@
|
|||||||
"hooks": {
|
"hooks": {
|
||||||
"SessionStart": [
|
"SessionStart": [
|
||||||
{
|
{
|
||||||
"type": "prompt",
|
"type": "command",
|
||||||
"prompt": "[projman] Perform silent startup checks:\n\n1. Check if MCP venvs exist at the installed marketplace location. If the marketplace is installed but venvs are missing, warn the user: '[projman] MCP venvs missing - run setup.sh from installed marketplace location'.\n\n2. Check if the project git remote matches .env configuration (GITEA_ORG/GITEA_REPO). If mismatch, warn: '[projman] Git remote mismatch - run /project-sync'.\n\nStay silent if all checks pass or not applicable. Be quick and non-blocking."
|
"command": "${CLAUDE_PLUGIN_ROOT}/hooks/startup-check.sh"
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
|
|||||||
30
plugins/projman/hooks/startup-check.sh
Executable file
30
plugins/projman/hooks/startup-check.sh
Executable file
@@ -0,0 +1,30 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
# projman startup check hook
|
||||||
|
# Checks for common issues at session start
|
||||||
|
# All output MUST have [projman] prefix
|
||||||
|
|
||||||
|
PREFIX="[projman]"
|
||||||
|
|
||||||
|
# Check if MCP venv exists
|
||||||
|
PLUGIN_ROOT="${CLAUDE_PLUGIN_ROOT:-$(dirname "$(dirname "$(realpath "$0")")")}"
|
||||||
|
VENV_PATH="$PLUGIN_ROOT/mcp-servers/gitea/.venv/bin/python"
|
||||||
|
|
||||||
|
if [[ ! -f "$VENV_PATH" ]]; then
|
||||||
|
echo "$PREFIX MCP venvs missing - run setup.sh from installed marketplace"
|
||||||
|
exit 0
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Check git remote vs .env config (only if .env exists)
|
||||||
|
if [[ -f ".env" ]]; then
|
||||||
|
CONFIGURED_REPO=$(grep -E "^GITEA_REPO=" .env 2>/dev/null | cut -d'=' -f2 | tr -d '"' || true)
|
||||||
|
if [[ -n "$CONFIGURED_REPO" ]]; then
|
||||||
|
CURRENT_REMOTE=$(git remote get-url origin 2>/dev/null | sed 's/.*[:/]\([^/]*\/[^.]*\).*/\1/' || true)
|
||||||
|
if [[ -n "$CURRENT_REMOTE" && "$CONFIGURED_REPO" != "$CURRENT_REMOTE" ]]; then
|
||||||
|
echo "$PREFIX Git remote mismatch - run /project-sync"
|
||||||
|
exit 0
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
|
||||||
|
# All checks passed - say nothing
|
||||||
|
exit 0
|
||||||
@@ -67,6 +67,7 @@ main() {
|
|||||||
# Shared MCP servers at repository root (v3.0.0+)
|
# Shared MCP servers at repository root (v3.0.0+)
|
||||||
update_mcp_server "gitea"
|
update_mcp_server "gitea"
|
||||||
update_mcp_server "netbox"
|
update_mcp_server "netbox"
|
||||||
|
update_mcp_server "data-platform"
|
||||||
|
|
||||||
check_changelog
|
check_changelog
|
||||||
|
|
||||||
@@ -78,3 +79,8 @@ main() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
main "$@"
|
main "$@"
|
||||||
|
|
||||||
|
# Clear plugin cache to ensure fresh hooks are loaded
|
||||||
|
echo "Clearing plugin cache..."
|
||||||
|
rm -rf ~/.claude/plugins/cache/leo-claude-mktplace/
|
||||||
|
echo "Cache cleared"
|
||||||
|
|||||||
172
scripts/release.sh
Executable file
172
scripts/release.sh
Executable file
@@ -0,0 +1,172 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
# release.sh - Create a new release with version consistency
|
||||||
|
#
|
||||||
|
# Usage: ./scripts/release.sh X.Y.Z
|
||||||
|
#
|
||||||
|
# This script ensures all version references are updated consistently:
|
||||||
|
# 1. CHANGELOG.md - [Unreleased] becomes [X.Y.Z] - YYYY-MM-DD
|
||||||
|
# 2. README.md - Title updated to vX.Y.Z
|
||||||
|
# 3. marketplace.json - version field updated
|
||||||
|
# 4. Git commit and tag created
|
||||||
|
#
|
||||||
|
# Prerequisites:
|
||||||
|
# - Clean working directory (no uncommitted changes)
|
||||||
|
# - [Unreleased] section in CHANGELOG.md with content
|
||||||
|
# - On development branch
|
||||||
|
|
||||||
|
set -e
|
||||||
|
|
||||||
|
VERSION=$1
|
||||||
|
DATE=$(date +%Y-%m-%d)
|
||||||
|
|
||||||
|
# Colors for output
|
||||||
|
RED='\033[0;31m'
|
||||||
|
GREEN='\033[0;32m'
|
||||||
|
YELLOW='\033[1;33m'
|
||||||
|
NC='\033[0m' # No Color
|
||||||
|
|
||||||
|
error() { echo -e "${RED}ERROR: $1${NC}" >&2; exit 1; }
|
||||||
|
warn() { echo -e "${YELLOW}WARNING: $1${NC}"; }
|
||||||
|
success() { echo -e "${GREEN}$1${NC}"; }
|
||||||
|
info() { echo -e "$1"; }
|
||||||
|
|
||||||
|
# Validate arguments
|
||||||
|
if [ -z "$VERSION" ]; then
|
||||||
|
echo "Usage: ./scripts/release.sh X.Y.Z"
|
||||||
|
echo ""
|
||||||
|
echo "Example: ./scripts/release.sh 3.2.0"
|
||||||
|
echo ""
|
||||||
|
echo "This will:"
|
||||||
|
echo " 1. Update CHANGELOG.md [Unreleased] -> [X.Y.Z] - $(date +%Y-%m-%d)"
|
||||||
|
echo " 2. Update README.md title to vX.Y.Z"
|
||||||
|
echo " 3. Update marketplace.json version to X.Y.Z"
|
||||||
|
echo " 4. Commit with message 'chore: release vX.Y.Z'"
|
||||||
|
echo " 5. Create git tag vX.Y.Z"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Validate version format
|
||||||
|
if ! [[ "$VERSION" =~ ^[0-9]+\.[0-9]+\.[0-9]+$ ]]; then
|
||||||
|
error "Invalid version format. Use X.Y.Z (e.g., 3.2.0)"
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Check we're in the right directory
|
||||||
|
if [ ! -f "CHANGELOG.md" ] || [ ! -f "README.md" ] || [ ! -f ".claude-plugin/marketplace.json" ]; then
|
||||||
|
error "Must run from repository root (CHANGELOG.md, README.md, .claude-plugin/marketplace.json must exist)"
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Check for clean working directory
|
||||||
|
if [ -n "$(git status --porcelain)" ]; then
|
||||||
|
warn "Working directory has uncommitted changes"
|
||||||
|
echo ""
|
||||||
|
git status --short
|
||||||
|
echo ""
|
||||||
|
read -p "Continue anyway? [y/N] " -n 1 -r
|
||||||
|
echo
|
||||||
|
if [[ ! $REPLY =~ ^[Yy]$ ]]; then
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Check current branch
|
||||||
|
BRANCH=$(git branch --show-current)
|
||||||
|
if [ "$BRANCH" != "development" ] && [ "$BRANCH" != "main" ]; then
|
||||||
|
warn "Not on development or main branch (current: $BRANCH)"
|
||||||
|
read -p "Continue anyway? [y/N] " -n 1 -r
|
||||||
|
echo
|
||||||
|
if [[ ! $REPLY =~ ^[Yy]$ ]]; then
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Check [Unreleased] section has content
|
||||||
|
if ! grep -q "## \[Unreleased\]" CHANGELOG.md; then
|
||||||
|
error "CHANGELOG.md missing [Unreleased] section"
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Check if tag already exists
|
||||||
|
if git tag -l | grep -q "^v$VERSION$"; then
|
||||||
|
error "Tag v$VERSION already exists"
|
||||||
|
fi
|
||||||
|
|
||||||
|
info ""
|
||||||
|
info "=== Release v$VERSION ==="
|
||||||
|
info ""
|
||||||
|
|
||||||
|
# Show what will change
|
||||||
|
info "Changes to be made:"
|
||||||
|
info " CHANGELOG.md: [Unreleased] -> [$VERSION] - $DATE"
|
||||||
|
info " README.md: title -> v$VERSION"
|
||||||
|
info " marketplace.json: version -> $VERSION"
|
||||||
|
info " Git: commit + tag v$VERSION"
|
||||||
|
info ""
|
||||||
|
|
||||||
|
# Preview CHANGELOG [Unreleased] content
|
||||||
|
info "Current [Unreleased] content:"
|
||||||
|
info "---"
|
||||||
|
sed -n '/^## \[Unreleased\]/,/^## \[/p' CHANGELOG.md | head -30
|
||||||
|
info "---"
|
||||||
|
info ""
|
||||||
|
|
||||||
|
read -p "Proceed with release? [y/N] " -n 1 -r
|
||||||
|
echo
|
||||||
|
if [[ ! $REPLY =~ ^[Yy]$ ]]; then
|
||||||
|
info "Aborted"
|
||||||
|
exit 0
|
||||||
|
fi
|
||||||
|
|
||||||
|
info ""
|
||||||
|
info "Updating files..."
|
||||||
|
|
||||||
|
# 1. Update CHANGELOG.md
|
||||||
|
# Replace [Unreleased] with [X.Y.Z] - DATE and add new [Unreleased] section
|
||||||
|
sed -i "s/^## \[Unreleased\]$/## [Unreleased]\n\n*Changes staged for the next release*\n\n---\n\n## [$VERSION] - $DATE/" CHANGELOG.md
|
||||||
|
|
||||||
|
# Remove the placeholder text if it exists after the new [Unreleased]
|
||||||
|
sed -i '/^\*Changes staged for the next release\*$/d' CHANGELOG.md
|
||||||
|
|
||||||
|
# Clean up any double blank lines
|
||||||
|
sed -i '/^$/N;/^\n$/d' CHANGELOG.md
|
||||||
|
|
||||||
|
success " CHANGELOG.md updated"
|
||||||
|
|
||||||
|
# 2. Update README.md title
|
||||||
|
sed -i "s/^# Leo Claude Marketplace - v[0-9]\+\.[0-9]\+\.[0-9]\+$/# Leo Claude Marketplace - v$VERSION/" README.md
|
||||||
|
success " README.md updated"
|
||||||
|
|
||||||
|
# 3. Update marketplace.json version
|
||||||
|
sed -i "s/\"version\": \"[0-9]\+\.[0-9]\+\.[0-9]\+\"/\"version\": \"$VERSION\"/" .claude-plugin/marketplace.json
|
||||||
|
success " marketplace.json updated"
|
||||||
|
|
||||||
|
info ""
|
||||||
|
info "Files updated. Review changes:"
|
||||||
|
info ""
|
||||||
|
git diff --stat
|
||||||
|
info ""
|
||||||
|
git diff CHANGELOG.md | head -40
|
||||||
|
info ""
|
||||||
|
|
||||||
|
read -p "Commit and tag? [y/N] " -n 1 -r
|
||||||
|
echo
|
||||||
|
if [[ ! $REPLY =~ ^[Yy]$ ]]; then
|
||||||
|
warn "Changes made but not committed. Run 'git checkout -- .' to revert."
|
||||||
|
exit 0
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Commit
|
||||||
|
git add CHANGELOG.md README.md .claude-plugin/marketplace.json
|
||||||
|
git commit -m "chore: release v$VERSION"
|
||||||
|
success " Committed"
|
||||||
|
|
||||||
|
# Tag
|
||||||
|
git tag "v$VERSION"
|
||||||
|
success " Tagged v$VERSION"
|
||||||
|
|
||||||
|
info ""
|
||||||
|
success "=== Release v$VERSION created ==="
|
||||||
|
info ""
|
||||||
|
info "Next steps:"
|
||||||
|
info " 1. Review the commit: git show HEAD"
|
||||||
|
info " 2. Push to remote: git push && git push --tags"
|
||||||
|
info " 3. Merge to main if on development branch"
|
||||||
|
info ""
|
||||||
@@ -116,6 +116,15 @@ verify_symlinks() {
|
|||||||
log_error "pr-review/gitea symlink missing"
|
log_error "pr-review/gitea symlink missing"
|
||||||
log_todo "Run: ln -s ../../../mcp-servers/gitea plugins/pr-review/mcp-servers/gitea"
|
log_todo "Run: ln -s ../../../mcp-servers/gitea plugins/pr-review/mcp-servers/gitea"
|
||||||
fi
|
fi
|
||||||
|
|
||||||
|
# Check data-platform -> data-platform symlink
|
||||||
|
local dataplatform_link="$REPO_ROOT/plugins/data-platform/mcp-servers/data-platform"
|
||||||
|
if [[ -L "$dataplatform_link" ]]; then
|
||||||
|
log_success "data-platform symlink exists"
|
||||||
|
else
|
||||||
|
log_error "data-platform symlink missing"
|
||||||
|
log_todo "Run: ln -s ../../../mcp-servers/data-platform plugins/data-platform/mcp-servers/data-platform"
|
||||||
|
fi
|
||||||
}
|
}
|
||||||
|
|
||||||
# --- Section 3: Config File Templates ---
|
# --- Section 3: Config File Templates ---
|
||||||
@@ -178,6 +187,22 @@ EOF
|
|||||||
chmod 600 "$config_dir/git-flow.env"
|
chmod 600 "$config_dir/git-flow.env"
|
||||||
log_success "git-flow.env template created"
|
log_success "git-flow.env template created"
|
||||||
fi
|
fi
|
||||||
|
|
||||||
|
# PostgreSQL config (for data-platform, optional)
|
||||||
|
if [[ -f "$config_dir/postgres.env" ]]; then
|
||||||
|
log_skip "postgres.env already exists"
|
||||||
|
else
|
||||||
|
cat > "$config_dir/postgres.env" << 'EOF'
|
||||||
|
# PostgreSQL Configuration (for data-platform plugin)
|
||||||
|
# Update with your PostgreSQL connection URL
|
||||||
|
# This is OPTIONAL - pandas tools work without it
|
||||||
|
|
||||||
|
POSTGRES_URL=postgresql://user:password@localhost:5432/database
|
||||||
|
EOF
|
||||||
|
chmod 600 "$config_dir/postgres.env"
|
||||||
|
log_success "postgres.env template created"
|
||||||
|
log_todo "Edit ~/.config/claude/postgres.env with your PostgreSQL credentials (optional)"
|
||||||
|
fi
|
||||||
}
|
}
|
||||||
|
|
||||||
# --- Section 4: Validate Configuration ---
|
# --- Section 4: Validate Configuration ---
|
||||||
@@ -283,6 +308,7 @@ main() {
|
|||||||
# Shared MCP servers at repository root
|
# Shared MCP servers at repository root
|
||||||
setup_shared_mcp "gitea"
|
setup_shared_mcp "gitea"
|
||||||
setup_shared_mcp "netbox"
|
setup_shared_mcp "netbox"
|
||||||
|
setup_shared_mcp "data-platform"
|
||||||
|
|
||||||
# Verify symlinks from plugins to shared MCP servers
|
# Verify symlinks from plugins to shared MCP servers
|
||||||
verify_symlinks
|
verify_symlinks
|
||||||
|
|||||||
@@ -168,6 +168,12 @@ if [[ ! -d "$ROOT_DIR/mcp-servers/netbox" ]]; then
|
|||||||
fi
|
fi
|
||||||
echo "✓ Shared netbox MCP server exists"
|
echo "✓ Shared netbox MCP server exists"
|
||||||
|
|
||||||
|
if [[ ! -d "$ROOT_DIR/mcp-servers/data-platform" ]]; then
|
||||||
|
echo "ERROR: Shared data-platform MCP server not found at mcp-servers/data-platform/"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
echo "✓ Shared data-platform MCP server exists"
|
||||||
|
|
||||||
# Check symlinks for plugins that use MCP servers
|
# Check symlinks for plugins that use MCP servers
|
||||||
check_mcp_symlink() {
|
check_mcp_symlink() {
|
||||||
local plugin_name="$1"
|
local plugin_name="$1"
|
||||||
@@ -195,5 +201,8 @@ check_mcp_symlink "pr-review" "gitea"
|
|||||||
# Plugins with netbox MCP dependency
|
# Plugins with netbox MCP dependency
|
||||||
check_mcp_symlink "cmdb-assistant" "netbox"
|
check_mcp_symlink "cmdb-assistant" "netbox"
|
||||||
|
|
||||||
|
# Plugins with data-platform MCP dependency
|
||||||
|
check_mcp_symlink "data-platform" "data-platform"
|
||||||
|
|
||||||
echo ""
|
echo ""
|
||||||
echo "=== All validations passed ==="
|
echo "=== All validations passed ==="
|
||||||
|
|||||||
44
scripts/verify-hooks.sh
Executable file
44
scripts/verify-hooks.sh
Executable file
@@ -0,0 +1,44 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
# Verify all hooks are command type (not prompt)
|
||||||
|
# Run this after any plugin update
|
||||||
|
|
||||||
|
echo "=== HOOK VERIFICATION ==="
|
||||||
|
echo ""
|
||||||
|
|
||||||
|
FAILED=0
|
||||||
|
|
||||||
|
# Check ALL hooks.json files in .claude directory
|
||||||
|
for f in $(find ~/.claude -name "hooks.json" 2>/dev/null); do
|
||||||
|
if grep -q '"type": "prompt"' "$f" || grep -q '"type":"prompt"' "$f"; then
|
||||||
|
echo "❌ PROMPT HOOK FOUND: $f"
|
||||||
|
FAILED=1
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
|
||||||
|
# Note about cache (informational only - do NOT clear mid-session)
|
||||||
|
if [ -d ~/.claude/plugins/cache/leo-claude-mktplace ]; then
|
||||||
|
echo "ℹ️ Cache exists: ~/.claude/plugins/cache/leo-claude-mktplace"
|
||||||
|
echo " (This is normal - do NOT clear mid-session or MCP tools will break)"
|
||||||
|
echo " To apply plugin changes: restart Claude Code session"
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Verify installed hooks are command type
|
||||||
|
for plugin in doc-guardian code-sentinel projman pr-review project-hygiene data-platform; do
|
||||||
|
HOOK_FILE=~/.claude/plugins/marketplaces/leo-claude-mktplace/plugins/$plugin/hooks/hooks.json
|
||||||
|
if [ -f "$HOOK_FILE" ]; then
|
||||||
|
if grep -q '"type": "command"' "$HOOK_FILE" || grep -q '"type":"command"' "$HOOK_FILE"; then
|
||||||
|
echo "✓ $plugin: command type"
|
||||||
|
else
|
||||||
|
echo "❌ $plugin: NOT command type"
|
||||||
|
FAILED=1
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
|
||||||
|
echo ""
|
||||||
|
if [ $FAILED -eq 0 ]; then
|
||||||
|
echo "✓ All hooks verified OK"
|
||||||
|
else
|
||||||
|
echo "❌ ISSUES FOUND - fix before using"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
Reference in New Issue
Block a user