initial project setup: added plugin skill
This commit is contained in:
@@ -0,0 +1,765 @@
|
||||
# CI/CD Integration Guide
|
||||
|
||||
Complete guide for automating plugin testing, validation, and deployment.
|
||||
|
||||
## GitHub Actions
|
||||
|
||||
### Basic Validation Workflow
|
||||
```yaml
|
||||
# .github/workflows/validate-plugin.yml
|
||||
name: Validate Plugin
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [main, develop]
|
||||
pull_request:
|
||||
branches: [main]
|
||||
|
||||
jobs:
|
||||
validate:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: '3.11'
|
||||
|
||||
- name: Validate plugin manifest
|
||||
run: |
|
||||
python scripts/validate_manifest.py .claude-plugin/plugin.json
|
||||
|
||||
- name: Check JSON syntax
|
||||
run: |
|
||||
find . -name "*.json" -exec jq . {} \; > /dev/null
|
||||
|
||||
- name: Test commands
|
||||
run: |
|
||||
python scripts/test_commands.py .
|
||||
|
||||
- name: Check file permissions
|
||||
run: |
|
||||
# Ensure hook scripts are executable
|
||||
find hooks -name "*.sh" -type f -exec test -x {} \; || exit 1
|
||||
```
|
||||
|
||||
### Advanced Testing Workflow
|
||||
```yaml
|
||||
# .github/workflows/test-plugin.yml
|
||||
name: Test Plugin
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [main, develop]
|
||||
pull_request:
|
||||
|
||||
env:
|
||||
PLUGIN_NAME: my-plugin
|
||||
|
||||
jobs:
|
||||
lint:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Lint Markdown
|
||||
uses: DavidAnson/markdownlint-cli2-action@v14
|
||||
with:
|
||||
globs: |
|
||||
**/*.md
|
||||
!node_modules
|
||||
|
||||
- name: Spell Check
|
||||
uses: streetsidesoftware/cspell-action@v5
|
||||
with:
|
||||
files: |
|
||||
**/*.md
|
||||
**/*.json
|
||||
|
||||
test-scripts:
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
python-version: ['3.9', '3.10', '3.11']
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
pip install -r requirements.txt
|
||||
pip install pytest pytest-cov
|
||||
|
||||
- name: Run Python tests
|
||||
run: |
|
||||
pytest tests/ --cov=scripts --cov-report=xml
|
||||
|
||||
- name: Upload coverage
|
||||
uses: codecov/codecov-action@v3
|
||||
|
||||
test-hooks:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Test shell scripts
|
||||
run: |
|
||||
# Install shellcheck
|
||||
sudo apt-get update && sudo apt-get install -y shellcheck
|
||||
|
||||
# Check all shell scripts
|
||||
find . -name "*.sh" -exec shellcheck {} \;
|
||||
|
||||
- name: Test hook execution
|
||||
run: |
|
||||
# Simulate hook environment
|
||||
export CHANGED_FILE="test.py"
|
||||
export FILE_EXTENSION="py"
|
||||
|
||||
# Test each hook
|
||||
for hook in hooks/*.sh; do
|
||||
if [ -x "$hook" ]; then
|
||||
echo "Testing $hook..."
|
||||
timeout 10s "$hook" || echo "Hook $hook failed or timed out"
|
||||
fi
|
||||
done
|
||||
|
||||
integration:
|
||||
runs-on: ubuntu-latest
|
||||
needs: [lint, test-scripts, test-hooks]
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Install Claude CLI (mock)
|
||||
run: |
|
||||
# In real scenario, install actual Claude CLI
|
||||
echo "Installing Claude CLI..."
|
||||
|
||||
- name: Test plugin installation
|
||||
run: |
|
||||
# Mock test - in reality would use Claude CLI
|
||||
echo "Testing plugin installation..."
|
||||
|
||||
- name: Test command execution
|
||||
run: |
|
||||
# Mock test - in reality would test commands
|
||||
echo "Testing command execution..."
|
||||
```
|
||||
|
||||
### Release Workflow
|
||||
```yaml
|
||||
# .github/workflows/release.yml
|
||||
name: Release Plugin
|
||||
|
||||
on:
|
||||
push:
|
||||
tags:
|
||||
- 'v*'
|
||||
|
||||
jobs:
|
||||
release:
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: write
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Validate version tag
|
||||
run: |
|
||||
# Extract version from tag
|
||||
VERSION=${GITHUB_REF#refs/tags/v}
|
||||
|
||||
# Check manifest version matches
|
||||
MANIFEST_VERSION=$(jq -r .version .claude-plugin/plugin.json)
|
||||
|
||||
if [ "$VERSION" != "$MANIFEST_VERSION" ]; then
|
||||
echo "Tag version ($VERSION) doesn't match manifest ($MANIFEST_VERSION)"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
- name: Create changelog
|
||||
id: changelog
|
||||
run: |
|
||||
# Generate changelog from commits
|
||||
git log --pretty=format:"- %s" $(git describe --tags --abbrev=0 HEAD^)..HEAD > CHANGELOG.md
|
||||
|
||||
- name: Update marketplace
|
||||
run: |
|
||||
# Update marketplace.json if it exists
|
||||
if [ -f "../marketplace/.claude-plugin/marketplace.json" ]; then
|
||||
# Update plugin version in marketplace
|
||||
jq --arg v "$VERSION" \
|
||||
'.plugins[] | select(.name == "${{ env.PLUGIN_NAME }}").version = $v' \
|
||||
../marketplace/.claude-plugin/marketplace.json > tmp.json
|
||||
mv tmp.json ../marketplace/.claude-plugin/marketplace.json
|
||||
fi
|
||||
|
||||
- name: Create Release
|
||||
uses: softprops/action-gh-release@v1
|
||||
with:
|
||||
body_path: CHANGELOG.md
|
||||
files: |
|
||||
.claude-plugin/plugin.json
|
||||
README.md
|
||||
```
|
||||
|
||||
## GitLab CI
|
||||
|
||||
### Basic Pipeline
|
||||
```yaml
|
||||
# .gitlab-ci.yml
|
||||
stages:
|
||||
- validate
|
||||
- test
|
||||
- deploy
|
||||
|
||||
variables:
|
||||
PLUGIN_NAME: "my-plugin"
|
||||
|
||||
validate:manifest:
|
||||
stage: validate
|
||||
image: python:3.11
|
||||
script:
|
||||
- python scripts/validate_manifest.py .claude-plugin/plugin.json
|
||||
only:
|
||||
- merge_requests
|
||||
- main
|
||||
|
||||
validate:json:
|
||||
stage: validate
|
||||
image: stedolan/jq
|
||||
script:
|
||||
- find . -name "*.json" -exec jq . {} \;
|
||||
only:
|
||||
- merge_requests
|
||||
- main
|
||||
|
||||
test:commands:
|
||||
stage: test
|
||||
image: python:3.11
|
||||
script:
|
||||
- pip install -r requirements.txt
|
||||
- python scripts/test_commands.py .
|
||||
artifacts:
|
||||
reports:
|
||||
junit: test-results.xml
|
||||
|
||||
test:scripts:
|
||||
stage: test
|
||||
image: python:3.11
|
||||
script:
|
||||
- pip install pytest pytest-cov
|
||||
- pytest tests/ --junitxml=test-results.xml
|
||||
coverage: '/TOTAL.*\s+(\d+%)$/'
|
||||
|
||||
deploy:marketplace:
|
||||
stage: deploy
|
||||
image: alpine/git
|
||||
script:
|
||||
- |
|
||||
# Update marketplace repository
|
||||
git clone $MARKETPLACE_REPO marketplace
|
||||
cd marketplace
|
||||
|
||||
# Update plugin entry
|
||||
# ... update logic ...
|
||||
|
||||
git add .
|
||||
git commit -m "Update $PLUGIN_NAME to $CI_COMMIT_TAG"
|
||||
git push origin main
|
||||
only:
|
||||
- tags
|
||||
```
|
||||
|
||||
### Advanced GitLab Pipeline
|
||||
```yaml
|
||||
# .gitlab-ci.yml
|
||||
include:
|
||||
- template: Security/SAST.gitlab-ci.yml
|
||||
- template: Code-Quality.gitlab-ci.yml
|
||||
|
||||
stages:
|
||||
- build
|
||||
- test
|
||||
- security
|
||||
- deploy
|
||||
- cleanup
|
||||
|
||||
.plugin_template:
|
||||
image: node:18
|
||||
before_script:
|
||||
- npm ci --cache .npm --prefer-offline
|
||||
cache:
|
||||
key: ${CI_COMMIT_REF_SLUG}
|
||||
paths:
|
||||
- .npm/
|
||||
- node_modules/
|
||||
|
||||
build:plugin:
|
||||
extends: .plugin_template
|
||||
stage: build
|
||||
script:
|
||||
- npm run build
|
||||
- tar -czf plugin.tar.gz .
|
||||
artifacts:
|
||||
paths:
|
||||
- plugin.tar.gz
|
||||
expire_in: 1 week
|
||||
|
||||
test:unit:
|
||||
extends: .plugin_template
|
||||
stage: test
|
||||
script:
|
||||
- npm test -- --coverage
|
||||
artifacts:
|
||||
reports:
|
||||
coverage_report:
|
||||
coverage_format: cobertura
|
||||
path: coverage/cobertura-coverage.xml
|
||||
|
||||
test:integration:
|
||||
stage: test
|
||||
image: docker:latest
|
||||
services:
|
||||
- docker:dind
|
||||
script:
|
||||
- docker build -t $PLUGIN_NAME:test .
|
||||
- docker run --rm $PLUGIN_NAME:test npm run test:integration
|
||||
|
||||
security:dependencies:
|
||||
stage: security
|
||||
image: node:18
|
||||
script:
|
||||
- npm audit --production
|
||||
allow_failure: true
|
||||
|
||||
deploy:staging:
|
||||
stage: deploy
|
||||
script:
|
||||
- echo "Deploying to staging marketplace..."
|
||||
environment:
|
||||
name: staging
|
||||
url: https://staging.marketplace.example.com
|
||||
only:
|
||||
- develop
|
||||
|
||||
deploy:production:
|
||||
stage: deploy
|
||||
script:
|
||||
- echo "Deploying to production marketplace..."
|
||||
environment:
|
||||
name: production
|
||||
url: https://marketplace.example.com
|
||||
only:
|
||||
- tags
|
||||
when: manual
|
||||
```
|
||||
|
||||
## Jenkins Pipeline
|
||||
|
||||
### Jenkinsfile
|
||||
```groovy
|
||||
// Jenkinsfile
|
||||
pipeline {
|
||||
agent any
|
||||
|
||||
environment {
|
||||
PLUGIN_NAME = 'my-plugin'
|
||||
PYTHON = 'python3'
|
||||
}
|
||||
|
||||
stages {
|
||||
stage('Checkout') {
|
||||
steps {
|
||||
checkout scm
|
||||
}
|
||||
}
|
||||
|
||||
stage('Validate') {
|
||||
parallel {
|
||||
stage('Manifest') {
|
||||
steps {
|
||||
sh '${PYTHON} scripts/validate_manifest.py .claude-plugin/plugin.json'
|
||||
}
|
||||
}
|
||||
|
||||
stage('JSON Files') {
|
||||
steps {
|
||||
sh 'find . -name "*.json" -exec jq . {} \\;'
|
||||
}
|
||||
}
|
||||
|
||||
stage('Markdown') {
|
||||
steps {
|
||||
sh 'npx markdownlint-cli2 "**/*.md"'
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
stage('Test') {
|
||||
steps {
|
||||
sh '''
|
||||
${PYTHON} -m pip install -r requirements.txt
|
||||
${PYTHON} -m pytest tests/ --junitxml=test-results.xml
|
||||
'''
|
||||
}
|
||||
|
||||
post {
|
||||
always {
|
||||
junit 'test-results.xml'
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
stage('Security Scan') {
|
||||
steps {
|
||||
sh '''
|
||||
# Run security scans
|
||||
${PYTHON} -m pip install safety
|
||||
safety check
|
||||
|
||||
# Check for secrets
|
||||
npx @secretlint/quick-start "**/*"
|
||||
'''
|
||||
}
|
||||
}
|
||||
|
||||
stage('Build') {
|
||||
when {
|
||||
tag pattern: "v\\d+\\.\\d+\\.\\d+", comparator: "REGEXP"
|
||||
}
|
||||
steps {
|
||||
sh '''
|
||||
# Package plugin
|
||||
tar -czf ${PLUGIN_NAME}-${TAG_NAME}.tar.gz .
|
||||
'''
|
||||
|
||||
archiveArtifacts artifacts: '*.tar.gz'
|
||||
}
|
||||
}
|
||||
|
||||
stage('Deploy') {
|
||||
when {
|
||||
tag pattern: "v\\d+\\.\\d+\\.\\d+", comparator: "REGEXP"
|
||||
}
|
||||
steps {
|
||||
input message: 'Deploy to marketplace?'
|
||||
|
||||
sh '''
|
||||
# Deploy to marketplace
|
||||
echo "Deploying version ${TAG_NAME}"
|
||||
'''
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
post {
|
||||
success {
|
||||
slackSend(
|
||||
color: 'good',
|
||||
message: "Plugin ${PLUGIN_NAME} build successful: ${env.JOB_NAME} ${env.BUILD_NUMBER}"
|
||||
)
|
||||
}
|
||||
|
||||
failure {
|
||||
slackSend(
|
||||
color: 'danger',
|
||||
message: "Plugin ${PLUGIN_NAME} build failed: ${env.JOB_NAME} ${env.BUILD_NUMBER}"
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
## CircleCI Configuration
|
||||
|
||||
### .circleci/config.yml
|
||||
```yaml
|
||||
version: 2.1
|
||||
|
||||
orbs:
|
||||
python: circleci/python@2.1.1
|
||||
node: circleci/node@5.1.0
|
||||
|
||||
executors:
|
||||
plugin-executor:
|
||||
docker:
|
||||
- image: cimg/python:3.11-node
|
||||
working_directory: ~/plugin
|
||||
|
||||
jobs:
|
||||
validate:
|
||||
executor: plugin-executor
|
||||
steps:
|
||||
- checkout
|
||||
- run:
|
||||
name: Validate Manifest
|
||||
command: python scripts/validate_manifest.py .claude-plugin/plugin.json
|
||||
- run:
|
||||
name: Check JSON
|
||||
command: |
|
||||
sudo apt-get update && sudo apt-get install -y jq
|
||||
find . -name "*.json" -exec jq . {} \;
|
||||
|
||||
test:
|
||||
executor: plugin-executor
|
||||
steps:
|
||||
- checkout
|
||||
- python/install-packages:
|
||||
pkg-manager: pip
|
||||
pip-dependency-file: requirements.txt
|
||||
- run:
|
||||
name: Run Tests
|
||||
command: |
|
||||
pytest tests/ --junitxml=test-results/junit.xml
|
||||
- store_test_results:
|
||||
path: test-results
|
||||
- store_artifacts:
|
||||
path: test-results
|
||||
|
||||
security:
|
||||
executor: plugin-executor
|
||||
steps:
|
||||
- checkout
|
||||
- run:
|
||||
name: Security Scan
|
||||
command: |
|
||||
pip install safety
|
||||
safety check
|
||||
|
||||
deploy:
|
||||
executor: plugin-executor
|
||||
steps:
|
||||
- checkout
|
||||
- run:
|
||||
name: Deploy to Marketplace
|
||||
command: |
|
||||
echo "Deploying to marketplace..."
|
||||
|
||||
workflows:
|
||||
plugin-pipeline:
|
||||
jobs:
|
||||
- validate
|
||||
- test:
|
||||
requires:
|
||||
- validate
|
||||
- security:
|
||||
requires:
|
||||
- validate
|
||||
- deploy:
|
||||
requires:
|
||||
- test
|
||||
- security
|
||||
filters:
|
||||
tags:
|
||||
only: /^v.*/
|
||||
branches:
|
||||
ignore: /.*/
|
||||
```
|
||||
|
||||
## Testing Strategies
|
||||
|
||||
### Unit Tests
|
||||
```python
|
||||
# tests/test_commands.py
|
||||
|
||||
import pytest
|
||||
from pathlib import Path
|
||||
import json
|
||||
|
||||
class TestCommands:
|
||||
def test_all_commands_have_metadata(self):
|
||||
"""Ensure all command files have proper metadata."""
|
||||
commands_dir = Path("commands")
|
||||
for md_file in commands_dir.rglob("*.md"):
|
||||
with open(md_file) as f:
|
||||
content = f.read()
|
||||
|
||||
assert content.startswith("---"), f"{md_file} missing frontmatter"
|
||||
assert "_type: command" in content, f"{md_file} missing _type"
|
||||
assert "_command:" in content, f"{md_file} missing _command"
|
||||
assert "_description:" in content, f"{md_file} missing _description"
|
||||
|
||||
def test_manifest_valid(self):
|
||||
"""Test plugin manifest is valid."""
|
||||
with open(".claude-plugin/plugin.json") as f:
|
||||
manifest = json.load(f)
|
||||
|
||||
assert "name" in manifest
|
||||
assert "version" in manifest
|
||||
assert "description" in manifest
|
||||
assert "author" in manifest
|
||||
```
|
||||
|
||||
### Integration Tests
|
||||
```python
|
||||
# tests/test_integration.py
|
||||
|
||||
import subprocess
|
||||
import tempfile
|
||||
import shutil
|
||||
from pathlib import Path
|
||||
|
||||
def test_plugin_installation():
|
||||
"""Test plugin can be installed."""
|
||||
with tempfile.TemporaryDirectory() as tmpdir:
|
||||
# Copy plugin to temp directory
|
||||
plugin_dir = Path(tmpdir) / "test-plugin"
|
||||
shutil.copytree(".", plugin_dir)
|
||||
|
||||
# Mock installation test
|
||||
result = subprocess.run(
|
||||
["python", "scripts/validate_manifest.py",
|
||||
str(plugin_dir / ".claude-plugin/plugin.json")],
|
||||
capture_output=True
|
||||
)
|
||||
|
||||
assert result.returncode == 0
|
||||
```
|
||||
|
||||
## Deployment Strategies
|
||||
|
||||
### Blue-Green Deployment
|
||||
```yaml
|
||||
# Deploy to staging first, then swap
|
||||
deploy:staging:
|
||||
stage: deploy
|
||||
script:
|
||||
- deploy_to_marketplace staging
|
||||
- run_smoke_tests staging
|
||||
environment:
|
||||
name: staging
|
||||
|
||||
deploy:production:
|
||||
stage: deploy
|
||||
script:
|
||||
- swap_marketplace_slots staging production
|
||||
when: manual
|
||||
only:
|
||||
- tags
|
||||
```
|
||||
|
||||
### Canary Releases
|
||||
```yaml
|
||||
deploy:canary:
|
||||
stage: deploy
|
||||
script:
|
||||
- |
|
||||
# Deploy to 10% of users
|
||||
update_marketplace_config canary 0.1
|
||||
|
||||
# Monitor for 1 hour
|
||||
sleep 3600
|
||||
|
||||
# Check metrics
|
||||
if check_canary_metrics; then
|
||||
update_marketplace_config canary 1.0
|
||||
else
|
||||
rollback_canary
|
||||
fi
|
||||
```
|
||||
|
||||
### Rollback Strategy
|
||||
```bash
|
||||
#!/bin/bash
|
||||
# scripts/rollback.sh
|
||||
|
||||
PLUGIN_NAME="my-plugin"
|
||||
PREVIOUS_VERSION=$(git describe --tags --abbrev=0 HEAD^)
|
||||
|
||||
echo "Rolling back $PLUGIN_NAME to $PREVIOUS_VERSION"
|
||||
|
||||
# Restore previous version
|
||||
git checkout "v$PREVIOUS_VERSION"
|
||||
|
||||
# Update marketplace
|
||||
update_marketplace_version "$PLUGIN_NAME" "$PREVIOUS_VERSION"
|
||||
|
||||
# Notify team
|
||||
send_notification "Rolled back $PLUGIN_NAME to $PREVIOUS_VERSION"
|
||||
```
|
||||
|
||||
## Monitoring & Alerts
|
||||
|
||||
### Health Checks
|
||||
```yaml
|
||||
# monitoring/health_check.yml
|
||||
checks:
|
||||
- name: plugin_availability
|
||||
url: https://marketplace.example.com/api/plugins/my-plugin
|
||||
interval: 5m
|
||||
|
||||
- name: command_execution
|
||||
command: claude /my-plugin health-check
|
||||
interval: 10m
|
||||
|
||||
- name: version_check
|
||||
script: |
|
||||
CURRENT=$(claude plugin list | grep my-plugin | awk '{print $2}')
|
||||
EXPECTED=$(cat .claude-plugin/plugin.json | jq -r .version)
|
||||
[ "$CURRENT" = "$EXPECTED" ]
|
||||
interval: 1h
|
||||
```
|
||||
|
||||
### Metrics Collection
|
||||
```python
|
||||
# scripts/collect_metrics.py
|
||||
|
||||
import json
|
||||
import requests
|
||||
from datetime import datetime
|
||||
|
||||
def collect_plugin_metrics():
|
||||
metrics = {
|
||||
"timestamp": datetime.now().isoformat(),
|
||||
"plugin": "my-plugin",
|
||||
"installations": get_installation_count(),
|
||||
"daily_active_users": get_dau(),
|
||||
"command_usage": get_command_stats(),
|
||||
"error_rate": get_error_rate()
|
||||
}
|
||||
|
||||
# Send to monitoring service
|
||||
requests.post(
|
||||
"https://metrics.example.com/api/plugins",
|
||||
json=metrics
|
||||
)
|
||||
```
|
||||
|
||||
## Best Practices
|
||||
|
||||
1. **Version Everything**
|
||||
- Tag releases with semantic versions
|
||||
- Keep changelog updated
|
||||
- Version lock dependencies
|
||||
|
||||
2. **Automate Testing**
|
||||
- Run tests on every commit
|
||||
- Block merges without passing tests
|
||||
- Include security scanning
|
||||
|
||||
3. **Progressive Rollout**
|
||||
- Deploy to staging first
|
||||
- Use canary releases for major changes
|
||||
- Have rollback plan ready
|
||||
|
||||
4. **Monitor Everything**
|
||||
- Track installation success rate
|
||||
- Monitor command execution time
|
||||
- Alert on error spikes
|
||||
|
||||
5. **Documentation**
|
||||
- Update docs with releases
|
||||
- Include migration guides
|
||||
- Document breaking changes
|
||||
@@ -0,0 +1,260 @@
|
||||
# Command Metadata Format
|
||||
|
||||
Comprehensive guide for command frontmatter in `.md` files.
|
||||
|
||||
## Basic Structure
|
||||
|
||||
```markdown
|
||||
---
|
||||
_type: command
|
||||
_command: command-name
|
||||
_description: Brief description of what the command does
|
||||
_agent: agent-name (optional)
|
||||
---
|
||||
|
||||
# Command content starts here
|
||||
```
|
||||
|
||||
## Required Fields
|
||||
|
||||
### _type
|
||||
- **Value**: Always `"command"`
|
||||
- **Purpose**: Identifies file as a command
|
||||
|
||||
### _command
|
||||
- **Format**: kebab-case
|
||||
- **Pattern**: `^[a-z][a-z0-9-]*$`
|
||||
- **Example**: `deploy-app`, `check-status`
|
||||
|
||||
### _description
|
||||
- **Length**: 10-100 characters
|
||||
- **Content**: User-facing command description
|
||||
- **Shows in**: Command autocomplete
|
||||
|
||||
## Optional Fields
|
||||
|
||||
### _agent
|
||||
- **Purpose**: Associates command with an agent
|
||||
- **Value**: Name of agent in `agents/` directory
|
||||
- **Example**: `_agent: deploy-specialist`
|
||||
|
||||
### _alias
|
||||
- **Purpose**: Alternative command names
|
||||
- **Format**: String or array
|
||||
- **Example**: `_alias: ["dp", "deploy-production"]`
|
||||
|
||||
### _hidden
|
||||
- **Purpose**: Hide from command list
|
||||
- **Value**: `true` or `false`
|
||||
- **Use case**: Internal/debug commands
|
||||
|
||||
### _require_confirmation
|
||||
- **Purpose**: Prompt user before execution
|
||||
- **Value**: `true` or `false`
|
||||
- **Use case**: Destructive operations
|
||||
|
||||
### _tags
|
||||
- **Purpose**: Categorize commands
|
||||
- **Format**: Array of strings
|
||||
- **Example**: `_tags: ["deployment", "production"]`
|
||||
|
||||
## Command Organization
|
||||
|
||||
### Single Commands
|
||||
```
|
||||
commands/
|
||||
└── deploy-app.md
|
||||
```
|
||||
|
||||
### Grouped Commands
|
||||
```
|
||||
commands/
|
||||
├── deploy/
|
||||
│ ├── staging.md
|
||||
│ ├── production.md
|
||||
│ └── rollback.md
|
||||
├── database/
|
||||
│ ├── backup.md
|
||||
│ ├── restore.md
|
||||
│ └── migrate.md
|
||||
└── status.md
|
||||
```
|
||||
|
||||
### Subcommand Syntax
|
||||
- Directory structure creates subcommands
|
||||
- Example: `/plugin-name deploy staging`
|
||||
- Parent directory can have `_index.md` for group help
|
||||
|
||||
## Complete Examples
|
||||
|
||||
### Basic Command
|
||||
```markdown
|
||||
---
|
||||
_type: command
|
||||
_command: health-check
|
||||
_description: Check system health status
|
||||
---
|
||||
|
||||
# Health Check
|
||||
|
||||
Performs comprehensive system health verification.
|
||||
|
||||
## Usage
|
||||
Run health check: `/deploy-tools health-check`
|
||||
|
||||
## Options
|
||||
- `--verbose`: Detailed output
|
||||
- `--service <name>`: Check specific service
|
||||
```
|
||||
|
||||
### Command with Agent
|
||||
```markdown
|
||||
---
|
||||
_type: command
|
||||
_command: deploy-app
|
||||
_description: Deploy application to cloud
|
||||
_agent: deployment-specialist
|
||||
_require_confirmation: true
|
||||
_tags: ["deployment", "cloud", "production"]
|
||||
---
|
||||
|
||||
# Deploy Application
|
||||
|
||||
Deploys application using specialized agent assistance.
|
||||
|
||||
## Prerequisites
|
||||
- Valid AWS credentials
|
||||
- Docker image built
|
||||
- Deployment config ready
|
||||
```
|
||||
|
||||
### Hidden Debug Command
|
||||
```markdown
|
||||
---
|
||||
_type: command
|
||||
_command: debug-manifest
|
||||
_description: Validate plugin manifest (internal)
|
||||
_hidden: true
|
||||
_alias: "dbg-manifest"
|
||||
---
|
||||
|
||||
# Debug Manifest
|
||||
|
||||
Internal command for manifest validation.
|
||||
```
|
||||
|
||||
## Variable Interpolation
|
||||
|
||||
### Available Variables
|
||||
- `${CLAUDE_PLUGIN_ROOT}`: Plugin directory path
|
||||
- `${CLAUDE_WORKSPACE}`: Current workspace
|
||||
- `${CLAUDE_USER}`: Current username
|
||||
|
||||
### Usage Example
|
||||
```markdown
|
||||
## Configuration
|
||||
|
||||
Load config from: `${CLAUDE_PLUGIN_ROOT}/config.json`
|
||||
```
|
||||
|
||||
## Best Practices
|
||||
|
||||
### Naming Conventions
|
||||
1. Use verb-noun pattern: `create-database`, `update-config`
|
||||
2. Keep names short but descriptive
|
||||
3. Avoid abbreviations unless common
|
||||
4. Use consistent terminology
|
||||
|
||||
### Description Guidelines
|
||||
1. Start with action verb
|
||||
2. Be specific about what happens
|
||||
3. Include object of action
|
||||
4. Avoid technical jargon
|
||||
|
||||
Good examples:
|
||||
- "Deploy application to staging"
|
||||
- "Create new database backup"
|
||||
- "Generate API documentation"
|
||||
|
||||
Bad examples:
|
||||
- "Deployment" (not a complete sentence)
|
||||
- "Does stuff with the app" (too vague)
|
||||
- "Executes deployment pipeline workflow" (too technical)
|
||||
|
||||
### Documentation Structure
|
||||
1. **Brief description** after frontmatter
|
||||
2. **Usage** section with examples
|
||||
3. **Options** if applicable
|
||||
4. **Prerequisites** for complex commands
|
||||
5. **Examples** showing real usage
|
||||
|
||||
### Error Handling
|
||||
```markdown
|
||||
## Troubleshooting
|
||||
|
||||
**Error: Missing credentials**
|
||||
Solution: Run `aws configure` first
|
||||
|
||||
**Error: Port already in use**
|
||||
Solution: Check running services with `lsof -i :3000`
|
||||
```
|
||||
|
||||
## Security Patterns
|
||||
|
||||
### Input Validation
|
||||
```markdown
|
||||
## Security
|
||||
|
||||
This command validates:
|
||||
- File paths (no directory traversal)
|
||||
- Environment variables (alphanumeric only)
|
||||
- URLs (HTTPS required)
|
||||
```
|
||||
|
||||
### Confirmation Prompts
|
||||
```markdown
|
||||
---
|
||||
_require_confirmation: true
|
||||
---
|
||||
|
||||
## Confirmation
|
||||
|
||||
You will be prompted to confirm:
|
||||
- Database deletion
|
||||
- Production deployment
|
||||
- Configuration overwrite
|
||||
```
|
||||
|
||||
## Advanced Features
|
||||
|
||||
### Conditional Display
|
||||
```markdown
|
||||
## Platform-Specific
|
||||
|
||||
<!--- platform:macos --->
|
||||
### macOS Instructions
|
||||
Use Homebrew: `brew install tool`
|
||||
|
||||
<!--- platform:linux --->
|
||||
### Linux Instructions
|
||||
Use apt: `sudo apt-get install tool`
|
||||
```
|
||||
|
||||
### Dynamic Content
|
||||
```markdown
|
||||
## Current Status
|
||||
|
||||
<!-- dynamic:start -->
|
||||
Status will be inserted here
|
||||
<!-- dynamic:end -->
|
||||
```
|
||||
|
||||
### Command Chaining
|
||||
```markdown
|
||||
## Related Commands
|
||||
|
||||
After deployment, you might want to:
|
||||
- `/deploy-tools check-status`
|
||||
- `/deploy-tools view-logs`
|
||||
- `/deploy-tools rollback` (if needed)
|
||||
```
|
||||
@@ -0,0 +1,438 @@
|
||||
# Hook Development Patterns
|
||||
|
||||
Comprehensive guide for creating event-driven automation with hooks.
|
||||
|
||||
## Hook Configuration
|
||||
|
||||
### Basic Structure
|
||||
```json
|
||||
{
|
||||
"hooks": [
|
||||
{
|
||||
"event": "file-changed",
|
||||
"pattern": "**/*.py",
|
||||
"script": "hooks/python_linter.sh"
|
||||
}
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
## Available Events
|
||||
|
||||
### file-changed
|
||||
Triggered when files are modified in the workspace.
|
||||
|
||||
```json
|
||||
{
|
||||
"event": "file-changed",
|
||||
"pattern": "**/*.{js,jsx,ts,tsx}",
|
||||
"script": "hooks/format_code.sh",
|
||||
"description": "Format JavaScript/TypeScript files"
|
||||
}
|
||||
```
|
||||
|
||||
**Event Data**:
|
||||
- `CHANGED_FILE`: Path to modified file
|
||||
- `FILE_EXTENSION`: File extension
|
||||
- `PROJECT_ROOT`: Project root directory
|
||||
|
||||
### git-commit-msg-needed
|
||||
Triggered when creating git commits.
|
||||
|
||||
```json
|
||||
{
|
||||
"event": "git-commit-msg-needed",
|
||||
"script": "hooks/generate_commit_msg.py",
|
||||
"description": "Generate conventional commit messages"
|
||||
}
|
||||
```
|
||||
|
||||
**Event Data**:
|
||||
- `STAGED_FILES`: List of staged files
|
||||
- `DIFF_CONTENT`: Git diff content
|
||||
- `BRANCH_NAME`: Current branch
|
||||
|
||||
### task-completed
|
||||
Triggered after task completion.
|
||||
|
||||
```json
|
||||
{
|
||||
"event": "task-completed",
|
||||
"pattern": "*deploy*",
|
||||
"script": "hooks/notify_team.sh",
|
||||
"description": "Notify team of deployment completion"
|
||||
}
|
||||
```
|
||||
|
||||
**Event Data**:
|
||||
- `TASK_NAME`: Completed task name
|
||||
- `TASK_DURATION`: Execution time
|
||||
- `TASK_STATUS`: Success/failure
|
||||
|
||||
## Pattern Matching
|
||||
|
||||
### Glob Patterns
|
||||
```json
|
||||
{
|
||||
"pattern": "**/*.py", // All Python files
|
||||
"pattern": "src/**/*.test.js", // Test files in src
|
||||
"pattern": "*.config.{js,json}", // Config files
|
||||
"pattern": "!**/*.min.js" // Exclude minified
|
||||
}
|
||||
```
|
||||
|
||||
### Multiple Patterns
|
||||
```json
|
||||
{
|
||||
"event": "file-changed",
|
||||
"patterns": [
|
||||
"**/*.py",
|
||||
"**/*.pyi"
|
||||
],
|
||||
"script": "hooks/python_type_check.sh"
|
||||
}
|
||||
```
|
||||
|
||||
### Regex Patterns
|
||||
```json
|
||||
{
|
||||
"event": "task-completed",
|
||||
"pattern_type": "regex",
|
||||
"pattern": "^deploy-.*-prod$",
|
||||
"script": "hooks/production_monitor.sh"
|
||||
}
|
||||
```
|
||||
|
||||
## Script Implementation
|
||||
|
||||
### Shell Script Example
|
||||
```bash
|
||||
#!/bin/bash
|
||||
# hooks/format_code.sh
|
||||
|
||||
set -e
|
||||
|
||||
# Access event data
|
||||
FILE_PATH="$CHANGED_FILE"
|
||||
EXTENSION="$FILE_EXTENSION"
|
||||
|
||||
# Format based on file type
|
||||
case "$EXTENSION" in
|
||||
js|jsx|ts|tsx)
|
||||
npx prettier --write "$FILE_PATH"
|
||||
;;
|
||||
py)
|
||||
black "$FILE_PATH"
|
||||
;;
|
||||
go)
|
||||
gofmt -w "$FILE_PATH"
|
||||
;;
|
||||
esac
|
||||
|
||||
echo "Formatted: $FILE_PATH"
|
||||
```
|
||||
|
||||
### Python Script Example
|
||||
```python
|
||||
#!/usr/bin/env python3
|
||||
# hooks/generate_commit_msg.py
|
||||
|
||||
import os
|
||||
import json
|
||||
import subprocess
|
||||
|
||||
def generate_commit_message():
|
||||
staged_files = os.environ.get('STAGED_FILES', '').split('\n')
|
||||
diff_content = os.environ.get('DIFF_CONTENT', '')
|
||||
|
||||
# Analyze changes
|
||||
file_types = set()
|
||||
for file in staged_files:
|
||||
if file.endswith('.py'):
|
||||
file_types.add('python')
|
||||
elif file.endswith(('.js', '.tsx')):
|
||||
file_types.add('frontend')
|
||||
|
||||
# Generate message
|
||||
if 'python' in file_types and 'frontend' in file_types:
|
||||
prefix = 'feat(fullstack):'
|
||||
elif 'python' in file_types:
|
||||
prefix = 'feat(backend):'
|
||||
else:
|
||||
prefix = 'feat(frontend):'
|
||||
|
||||
return f"{prefix} Update {len(staged_files)} files"
|
||||
|
||||
if __name__ == "__main__":
|
||||
message = generate_commit_message()
|
||||
print(message)
|
||||
```
|
||||
|
||||
### Node.js Script Example
|
||||
```javascript
|
||||
#!/usr/bin/env node
|
||||
// hooks/validate_json.js
|
||||
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
|
||||
const filePath = process.env.CHANGED_FILE;
|
||||
|
||||
if (filePath.endsWith('.json')) {
|
||||
try {
|
||||
const content = fs.readFileSync(filePath, 'utf8');
|
||||
JSON.parse(content);
|
||||
console.log(`✓ Valid JSON: ${path.basename(filePath)}`);
|
||||
} catch (error) {
|
||||
console.error(`✗ Invalid JSON: ${filePath}`);
|
||||
console.error(error.message);
|
||||
process.exit(1);
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
## Advanced Patterns
|
||||
|
||||
### Conditional Execution
|
||||
```json
|
||||
{
|
||||
"event": "file-changed",
|
||||
"pattern": "**/*.py",
|
||||
"script": "hooks/conditional_lint.sh",
|
||||
"conditions": {
|
||||
"branch_pattern": "feature/*",
|
||||
"skip_patterns": ["**/migrations/*"]
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### Chained Hooks
|
||||
```json
|
||||
{
|
||||
"hooks": [
|
||||
{
|
||||
"event": "file-changed",
|
||||
"pattern": "**/*.py",
|
||||
"script": "hooks/format.sh",
|
||||
"order": 1
|
||||
},
|
||||
{
|
||||
"event": "file-changed",
|
||||
"pattern": "**/*.py",
|
||||
"script": "hooks/lint.sh",
|
||||
"order": 2
|
||||
},
|
||||
{
|
||||
"event": "file-changed",
|
||||
"pattern": "**/*.py",
|
||||
"script": "hooks/type_check.sh",
|
||||
"order": 3
|
||||
}
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
### Debouncing
|
||||
```json
|
||||
{
|
||||
"event": "file-changed",
|
||||
"pattern": "**/*.scss",
|
||||
"script": "hooks/compile_sass.sh",
|
||||
"debounce": 1000,
|
||||
"description": "Compile SASS after 1s of no changes"
|
||||
}
|
||||
```
|
||||
|
||||
## Security Best Practices
|
||||
|
||||
### Input Validation
|
||||
```bash
|
||||
#!/bin/bash
|
||||
# Validate file paths
|
||||
|
||||
FILE="$CHANGED_FILE"
|
||||
|
||||
# Check for directory traversal
|
||||
if [[ "$FILE" == *".."* ]]; then
|
||||
echo "Error: Invalid file path"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Verify file exists in project
|
||||
if [[ ! "$FILE" == "$PROJECT_ROOT"* ]]; then
|
||||
echo "Error: File outside project"
|
||||
exit 1
|
||||
fi
|
||||
```
|
||||
|
||||
### Safe Command Execution
|
||||
```python
|
||||
#!/usr/bin/env python3
|
||||
import subprocess
|
||||
import shlex
|
||||
import os
|
||||
|
||||
def run_command_safely(cmd, file_path):
|
||||
# Sanitize file path
|
||||
safe_path = shlex.quote(file_path)
|
||||
|
||||
# Use subprocess safely
|
||||
result = subprocess.run(
|
||||
[cmd, safe_path],
|
||||
capture_output=True,
|
||||
text=True,
|
||||
timeout=30
|
||||
)
|
||||
|
||||
return result.returncode == 0
|
||||
```
|
||||
|
||||
### Environment Variable Safety
|
||||
```bash
|
||||
#!/bin/bash
|
||||
# Sanitize environment variables
|
||||
|
||||
# Remove potentially dangerous characters
|
||||
SAFE_BRANCH=$(echo "$BRANCH_NAME" | tr -cd '[:alnum:]/_-')
|
||||
|
||||
# Validate expected format
|
||||
if [[ ! "$SAFE_BRANCH" =~ ^[a-zA-Z0-9/_-]+$ ]]; then
|
||||
echo "Error: Invalid branch name"
|
||||
exit 1
|
||||
fi
|
||||
```
|
||||
|
||||
## Performance Optimization
|
||||
|
||||
### Async Processing
|
||||
```javascript
|
||||
// hooks/async_process.js
|
||||
const { Worker } = require('worker_threads');
|
||||
|
||||
async function processFileAsync(filePath) {
|
||||
return new Promise((resolve, reject) => {
|
||||
const worker = new Worker('./heavy_process.js', {
|
||||
workerData: { filePath }
|
||||
});
|
||||
|
||||
worker.on('message', resolve);
|
||||
worker.on('error', reject);
|
||||
});
|
||||
}
|
||||
```
|
||||
|
||||
### Caching
|
||||
```python
|
||||
# hooks/cached_analysis.py
|
||||
import hashlib
|
||||
import json
|
||||
import os
|
||||
|
||||
CACHE_DIR = os.path.join(os.environ['CLAUDE_PLUGIN_ROOT'], '.cache')
|
||||
|
||||
def get_file_hash(filepath):
|
||||
with open(filepath, 'rb') as f:
|
||||
return hashlib.md5(f.read()).hexdigest()
|
||||
|
||||
def get_cached_result(filepath):
|
||||
file_hash = get_file_hash(filepath)
|
||||
cache_file = os.path.join(CACHE_DIR, f"{file_hash}.json")
|
||||
|
||||
if os.path.exists(cache_file):
|
||||
with open(cache_file, 'r') as f:
|
||||
return json.load(f)
|
||||
return None
|
||||
```
|
||||
|
||||
### Batch Processing
|
||||
```bash
|
||||
#!/bin/bash
|
||||
# Process multiple files efficiently
|
||||
|
||||
# Collect files for batch processing
|
||||
CHANGED_FILES=()
|
||||
while read -r file; do
|
||||
CHANGED_FILES+=("$file")
|
||||
done < <(echo "$STAGED_FILES")
|
||||
|
||||
# Process in batch
|
||||
if [ ${#CHANGED_FILES[@]} -gt 10 ]; then
|
||||
echo "Batch processing ${#CHANGED_FILES[@]} files..."
|
||||
prettier --write "${CHANGED_FILES[@]}"
|
||||
else
|
||||
# Process individually for small batches
|
||||
for file in "${CHANGED_FILES[@]}"; do
|
||||
prettier --write "$file"
|
||||
done
|
||||
fi
|
||||
```
|
||||
|
||||
## Testing Hooks
|
||||
|
||||
### Unit Testing
|
||||
```python
|
||||
# tests/test_hook.py
|
||||
import unittest
|
||||
import os
|
||||
from hooks import generate_commit_msg
|
||||
|
||||
class TestCommitMsgHook(unittest.TestCase):
|
||||
def test_python_files(self):
|
||||
os.environ['STAGED_FILES'] = 'app.py\ntest.py'
|
||||
msg = generate_commit_msg.generate()
|
||||
self.assertIn('backend', msg)
|
||||
```
|
||||
|
||||
### Integration Testing
|
||||
```bash
|
||||
#!/bin/bash
|
||||
# test_hooks.sh
|
||||
|
||||
# Test file-changed hook
|
||||
echo "Testing file-changed hook..."
|
||||
touch test.py
|
||||
CHANGED_FILE="test.py" FILE_EXTENSION="py" ./hooks/format_code.sh
|
||||
|
||||
# Verify result
|
||||
if black --check test.py; then
|
||||
echo "✓ Hook executed successfully"
|
||||
else
|
||||
echo "✗ Hook failed"
|
||||
exit 1
|
||||
fi
|
||||
```
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
### Debug Mode
|
||||
```json
|
||||
{
|
||||
"event": "file-changed",
|
||||
"pattern": "**/*.py",
|
||||
"script": "hooks/lint.sh",
|
||||
"debug": true,
|
||||
"log_output": true
|
||||
}
|
||||
```
|
||||
|
||||
### Common Issues
|
||||
|
||||
1. **Hook not firing**
|
||||
- Check pattern matches
|
||||
- Verify event name
|
||||
- Ensure script is executable
|
||||
|
||||
2. **Script errors**
|
||||
- Add error handling
|
||||
- Check environment variables
|
||||
- Verify dependencies
|
||||
|
||||
3. **Performance issues**
|
||||
- Add debouncing
|
||||
- Implement caching
|
||||
- Use async processing
|
||||
|
||||
4. **Security warnings**
|
||||
- Validate all inputs
|
||||
- Use safe command execution
|
||||
- Restrict file access
|
||||
@@ -0,0 +1,201 @@
|
||||
# Plugin Manifest Schema
|
||||
|
||||
Complete JSON schema reference for `.claude-plugin/plugin.json` files.
|
||||
|
||||
## Required Fields
|
||||
|
||||
### Basic Information
|
||||
```json
|
||||
{
|
||||
"name": "string",
|
||||
"version": "string",
|
||||
"description": "string",
|
||||
"author": {
|
||||
"name": "string",
|
||||
"email": "string (optional)",
|
||||
"url": "string (optional)"
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### Field Specifications
|
||||
|
||||
#### name
|
||||
- **Type**: string
|
||||
- **Pattern**: `^[a-z][a-z0-9-]*$`
|
||||
- **Description**: Plugin identifier in kebab-case
|
||||
- **Example**: `"deploy-tools"`
|
||||
|
||||
#### version
|
||||
- **Type**: string
|
||||
- **Format**: Semantic versioning (MAJOR.MINOR.PATCH)
|
||||
- **Example**: `"1.0.0"`
|
||||
|
||||
#### description
|
||||
- **Type**: string
|
||||
- **Max length**: 200 characters
|
||||
- **Purpose**: Brief explanation of plugin functionality
|
||||
|
||||
#### author
|
||||
- **Type**: object
|
||||
- **Required**: name field
|
||||
- **Optional**: email, url fields
|
||||
|
||||
## Optional Fields
|
||||
|
||||
### Dependencies
|
||||
```json
|
||||
{
|
||||
"dependencies": {
|
||||
"node": ">=18.0.0",
|
||||
"python": ">=3.9",
|
||||
"docker": ">=20.0.0"
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### Configuration
|
||||
```json
|
||||
{
|
||||
"config": {
|
||||
"default_shell": "bash",
|
||||
"require_safety_check": true,
|
||||
"environment": {
|
||||
"NODE_ENV": "production",
|
||||
"PLUGIN_HOME": "${CLAUDE_PLUGIN_ROOT}"
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### Permissions
|
||||
```json
|
||||
{
|
||||
"permissions": {
|
||||
"file_access": ["read", "write"],
|
||||
"network_access": true,
|
||||
"shell_access": true
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### Scripts
|
||||
```json
|
||||
{
|
||||
"scripts": {
|
||||
"install": "npm install",
|
||||
"test": "npm test",
|
||||
"build": "npm run build"
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
## Complete Example
|
||||
|
||||
```json
|
||||
{
|
||||
"name": "deploy-automation",
|
||||
"version": "2.1.0",
|
||||
"description": "Automated deployment tools for cloud platforms",
|
||||
"author": {
|
||||
"name": "Hyper Hive Labs",
|
||||
"email": "plugins@hyperhivelabs.com",
|
||||
"url": "https://hyperhivelabs.com"
|
||||
},
|
||||
"license": "MIT",
|
||||
"keywords": ["deployment", "automation", "cloud", "devops"],
|
||||
"homepage": "https://github.com/hyperhivelabs/deploy-automation",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "https://github.com/hyperhivelabs/deploy-automation.git"
|
||||
},
|
||||
"bugs": {
|
||||
"url": "https://github.com/hyperhivelabs/deploy-automation/issues"
|
||||
},
|
||||
"dependencies": {
|
||||
"node": ">=18.0.0",
|
||||
"aws-cli": ">=2.0.0",
|
||||
"terraform": ">=1.0.0"
|
||||
},
|
||||
"config": {
|
||||
"default_shell": "bash",
|
||||
"require_safety_check": true,
|
||||
"timeout": 300,
|
||||
"environment": {
|
||||
"DEPLOY_ENV": "production",
|
||||
"LOG_LEVEL": "info"
|
||||
}
|
||||
},
|
||||
"permissions": {
|
||||
"file_access": ["read", "write"],
|
||||
"network_access": true,
|
||||
"shell_access": true,
|
||||
"env_access": ["AWS_*", "DEPLOY_*"]
|
||||
},
|
||||
"scripts": {
|
||||
"install": "./scripts/install.sh",
|
||||
"test": "./scripts/test.sh",
|
||||
"validate": "./scripts/validate.sh"
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
## Validation Rules
|
||||
|
||||
### Name Validation
|
||||
- Must start with lowercase letter
|
||||
- Can contain lowercase letters, numbers, hyphens
|
||||
- Cannot end with hyphen
|
||||
- Cannot contain consecutive hyphens
|
||||
- Length: 2-40 characters
|
||||
|
||||
### Version Validation
|
||||
- Must follow semantic versioning
|
||||
- Format: `MAJOR.MINOR.PATCH`
|
||||
- Optional pre-release: `-alpha`, `-beta`, `-rc.1`
|
||||
- Optional build metadata: `+build.123`
|
||||
|
||||
### Description Validation
|
||||
- Required field
|
||||
- Maximum 200 characters
|
||||
- Should describe what the plugin does
|
||||
- Should include primary use cases
|
||||
|
||||
### Author Validation
|
||||
- Name is required
|
||||
- Email must be valid format if provided
|
||||
- URL must be valid HTTP(S) URL if provided
|
||||
|
||||
## Security Considerations
|
||||
|
||||
### Restricted Fields
|
||||
These fields require special marketplace approval:
|
||||
```json
|
||||
{
|
||||
"privileged": true,
|
||||
"system_access": true,
|
||||
"unrestricted_network": true
|
||||
}
|
||||
```
|
||||
|
||||
### Environment Variables
|
||||
- Use `${CLAUDE_PLUGIN_ROOT}` for plugin paths
|
||||
- Never hardcode sensitive data
|
||||
- Document all required env vars
|
||||
|
||||
### Path Resolution
|
||||
- All paths relative to plugin root
|
||||
- No parent directory traversal (`../`)
|
||||
- No absolute paths unless documented
|
||||
|
||||
## Migration Guide
|
||||
|
||||
### From v1 to v2
|
||||
- Add `author` object (previously string)
|
||||
- Update `dependencies` format
|
||||
- Add `permissions` field
|
||||
|
||||
### Legacy Support
|
||||
- Old manifests auto-upgraded
|
||||
- Deprecation warnings shown
|
||||
- Grace period: 6 months
|
||||
@@ -0,0 +1,465 @@
|
||||
# Marketplace Publication Guide
|
||||
|
||||
Complete guide for distributing Claude plugins through marketplaces.
|
||||
|
||||
## Marketplace Types
|
||||
|
||||
### GitHub Marketplace
|
||||
Best for: Open source, team collaboration, public distribution
|
||||
|
||||
```bash
|
||||
# Add marketplace
|
||||
claude plugin marketplace add owner/repo
|
||||
|
||||
# Structure required
|
||||
repo/
|
||||
├── .claude-plugin/
|
||||
│ └── marketplace.json
|
||||
└── plugins/
|
||||
├── plugin-one/
|
||||
├── plugin-two/
|
||||
└── plugin-three/
|
||||
```
|
||||
|
||||
### GitLab Marketplace
|
||||
Best for: Private teams, enterprise environments
|
||||
|
||||
```bash
|
||||
# Add GitLab marketplace
|
||||
claude plugin marketplace add gitlab:group/project
|
||||
|
||||
# Requires access token
|
||||
export GITLAB_TOKEN="your-token"
|
||||
```
|
||||
|
||||
### Local Marketplace
|
||||
Best for: Development, testing, private plugins
|
||||
|
||||
```bash
|
||||
# Add local marketplace
|
||||
claude plugin marketplace add file:///path/to/marketplace
|
||||
|
||||
# Structure
|
||||
/path/to/marketplace/
|
||||
├── .claude-plugin/
|
||||
│ └── marketplace.json
|
||||
└── plugins/
|
||||
```
|
||||
|
||||
### Remote URL Marketplace
|
||||
Best for: Custom hosting, CDN distribution
|
||||
|
||||
```bash
|
||||
# Add remote marketplace
|
||||
claude plugin marketplace add https://plugins.example.com
|
||||
```
|
||||
|
||||
## Marketplace Configuration
|
||||
|
||||
### marketplace.json Structure
|
||||
```json
|
||||
{
|
||||
"name": "Hyper Hive Labs Plugins",
|
||||
"description": "Restaurant automation and AI tools",
|
||||
"version": "1.0.0",
|
||||
"plugins": [
|
||||
{
|
||||
"name": "restaurant-analytics",
|
||||
"version": "2.1.0",
|
||||
"description": "Analytics dashboard for restaurant data",
|
||||
"author": "Hyper Hive Labs",
|
||||
"path": "plugins/restaurant-analytics",
|
||||
"tags": ["analytics", "restaurant", "reporting"],
|
||||
"requirements": {
|
||||
"claude-version": ">=1.0.0",
|
||||
"dependencies": ["python>=3.9", "pandas"]
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "order-automation",
|
||||
"version": "1.5.2",
|
||||
"description": "Automated order processing system",
|
||||
"author": "Hyper Hive Labs",
|
||||
"path": "plugins/order-automation",
|
||||
"featured": true,
|
||||
"beta": false
|
||||
}
|
||||
],
|
||||
"categories": [
|
||||
{
|
||||
"name": "Analytics",
|
||||
"description": "Data analysis and reporting tools",
|
||||
"plugins": ["restaurant-analytics", "sales-insights"]
|
||||
},
|
||||
{
|
||||
"name": "Automation",
|
||||
"description": "Workflow automation tools",
|
||||
"plugins": ["order-automation", "inventory-manager"]
|
||||
}
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
## Publishing Workflow
|
||||
|
||||
### 1. Prepare Plugin
|
||||
```bash
|
||||
# Validate plugin structure
|
||||
cd my-plugin/
|
||||
claude plugin validate .
|
||||
|
||||
# Test locally
|
||||
claude plugin install file://$(pwd)
|
||||
|
||||
# Version bump
|
||||
npm version patch # or minor/major
|
||||
```
|
||||
|
||||
### 2. Create Release Branch
|
||||
```bash
|
||||
# GitHub flow
|
||||
git checkout -b release/v1.2.0
|
||||
git add .
|
||||
git commit -m "Release v1.2.0"
|
||||
git push origin release/v1.2.0
|
||||
```
|
||||
|
||||
### 3. Update Marketplace
|
||||
```json
|
||||
// Add to marketplace.json
|
||||
{
|
||||
"name": "my-new-plugin",
|
||||
"version": "1.2.0",
|
||||
"path": "plugins/my-new-plugin",
|
||||
"description": "My awesome plugin",
|
||||
"changelog": {
|
||||
"1.2.0": "Added new features",
|
||||
"1.1.0": "Bug fixes",
|
||||
"1.0.0": "Initial release"
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### 4. Tag and Release
|
||||
```bash
|
||||
# Create signed tag
|
||||
git tag -s v1.2.0 -m "Release version 1.2.0"
|
||||
git push origin v1.2.0
|
||||
|
||||
# GitHub release
|
||||
gh release create v1.2.0 \
|
||||
--title "Release v1.2.0" \
|
||||
--notes "Release notes here"
|
||||
```
|
||||
|
||||
## Team Marketplace Setup
|
||||
|
||||
### Organization Structure
|
||||
```
|
||||
github.com/myorg/claude-plugins/
|
||||
├── .claude-plugin/
|
||||
│ └── marketplace.json
|
||||
├── plugins/
|
||||
│ ├── shared-utils/
|
||||
│ ├── deploy-tools/
|
||||
│ └── code-review/
|
||||
├── docs/
|
||||
│ ├── CONTRIBUTING.md
|
||||
│ └── STANDARDS.md
|
||||
└── .github/
|
||||
└── workflows/
|
||||
└── validate-plugins.yml
|
||||
```
|
||||
|
||||
### Team Configuration
|
||||
```json
|
||||
// .claude/settings.json in user's machine
|
||||
{
|
||||
"extraKnownMarketplaces": {
|
||||
"team-tools": {
|
||||
"source": {
|
||||
"source": "github",
|
||||
"repo": "myorg/claude-plugins"
|
||||
},
|
||||
"autoUpdate": true,
|
||||
"updateInterval": 3600
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### Access Control
|
||||
```yaml
|
||||
# .github/CODEOWNERS
|
||||
/plugins/deploy-tools/ @devops-team
|
||||
/plugins/code-review/ @engineering-leads
|
||||
/.claude-plugin/ @plugin-admins
|
||||
```
|
||||
|
||||
## Security Requirements
|
||||
|
||||
### Code Signing
|
||||
```bash
|
||||
# Generate GPG key
|
||||
gpg --gen-key
|
||||
|
||||
# Sign commits
|
||||
git config commit.gpgsign true
|
||||
git config user.signingkey YOUR_KEY_ID
|
||||
|
||||
# Sign tags
|
||||
git tag -s v1.0.0 -m "Signed release"
|
||||
```
|
||||
|
||||
### Dependency Verification
|
||||
```json
|
||||
{
|
||||
"dependencies": {
|
||||
"axios": {
|
||||
"version": "1.6.0",
|
||||
"integrity": "sha512-...",
|
||||
"source": "npm"
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### Security Manifest
|
||||
```json
|
||||
// security.json
|
||||
{
|
||||
"permissions": {
|
||||
"file_access": ["read", "write"],
|
||||
"network_access": {
|
||||
"allowed_domains": ["api.mycompany.com"],
|
||||
"protocols": ["https"]
|
||||
}
|
||||
},
|
||||
"audit": {
|
||||
"last_review": "2024-01-15",
|
||||
"reviewer": "security-team",
|
||||
"findings": "none"
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
## Version Management
|
||||
|
||||
### Semantic Versioning
|
||||
```
|
||||
MAJOR.MINOR.PATCH
|
||||
|
||||
1.0.0 -> 1.0.1 (patch: bug fixes)
|
||||
1.0.0 -> 1.1.0 (minor: new features, backward compatible)
|
||||
1.0.0 -> 2.0.0 (major: breaking changes)
|
||||
```
|
||||
|
||||
### Version Constraints
|
||||
```json
|
||||
{
|
||||
"requirements": {
|
||||
"claude-version": ">=1.0.0 <2.0.0",
|
||||
"dependencies": {
|
||||
"python": "~3.9.0", // 3.9.x
|
||||
"node": "^18.0.0" // >=18.0.0 <19.0.0
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### Deprecation Policy
|
||||
```json
|
||||
{
|
||||
"deprecated": true,
|
||||
"deprecation_notice": "Use 'advanced-deploy' instead",
|
||||
"sunset_date": "2024-06-01",
|
||||
"migration_guide": "https://docs.example.com/migrate"
|
||||
}
|
||||
```
|
||||
|
||||
## Distribution Strategies
|
||||
|
||||
### Public Distribution
|
||||
1. **Open Source**
|
||||
- Host on GitHub/GitLab
|
||||
- Include LICENSE file
|
||||
- Accept contributions
|
||||
- Maintain changelog
|
||||
|
||||
2. **Discovery**
|
||||
- Submit to official registry
|
||||
- Add topics/tags
|
||||
- Write good descriptions
|
||||
- Include screenshots
|
||||
|
||||
### Private Distribution
|
||||
1. **Internal Teams**
|
||||
- Private Git repositories
|
||||
- VPN-only marketplaces
|
||||
- Access tokens
|
||||
- audit logs
|
||||
|
||||
2. **Enterprise**
|
||||
- Self-hosted marketplaces
|
||||
- Air-gapped environments
|
||||
- Signed packages
|
||||
- Compliance tracking
|
||||
|
||||
### Hybrid Approach
|
||||
```json
|
||||
{
|
||||
"public_plugins": ["docs-generator", "linter"],
|
||||
"private_plugins": ["deploy-prod", "customer-data"],
|
||||
"visibility": {
|
||||
"docs-generator": "public",
|
||||
"deploy-prod": "internal-only"
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
## Marketplace Features
|
||||
|
||||
### Plugin Search
|
||||
```json
|
||||
{
|
||||
"search": {
|
||||
"enabled": true,
|
||||
"fields": ["name", "description", "tags"],
|
||||
"filters": ["category", "author", "version"]
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### Auto-updates
|
||||
```json
|
||||
{
|
||||
"auto_update": {
|
||||
"enabled": true,
|
||||
"channels": {
|
||||
"stable": "*/releases/latest",
|
||||
"beta": "*/releases/beta",
|
||||
"nightly": "*/commits/main"
|
||||
},
|
||||
"strategy": "minor-only"
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### Plugin Analytics
|
||||
```json
|
||||
{
|
||||
"analytics": {
|
||||
"track_installs": true,
|
||||
"track_usage": false,
|
||||
"anonymize": true,
|
||||
"retention_days": 90
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
## CI/CD Integration
|
||||
|
||||
### GitHub Actions
|
||||
```yaml
|
||||
name: Validate and Publish Plugin
|
||||
|
||||
on:
|
||||
push:
|
||||
tags:
|
||||
- 'v*'
|
||||
|
||||
jobs:
|
||||
validate:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- name: Validate Plugin Structure
|
||||
run: |
|
||||
# Validate JSON files
|
||||
jq . .claude-plugin/plugin.json
|
||||
jq . .claude-plugin/marketplace.json
|
||||
|
||||
- name: Run Tests
|
||||
run: |
|
||||
npm test
|
||||
|
||||
- name: Update Marketplace
|
||||
run: |
|
||||
# Update marketplace.json with new version
|
||||
jq '.plugins[0].version = "${{ github.ref_name }}"' \
|
||||
marketplace.json > tmp.json
|
||||
mv tmp.json marketplace.json
|
||||
|
||||
- name: Commit Changes
|
||||
run: |
|
||||
git config user.name "GitHub Actions"
|
||||
git commit -am "Update marketplace for ${{ github.ref_name }}"
|
||||
git push
|
||||
```
|
||||
|
||||
### GitLab CI
|
||||
```yaml
|
||||
stages:
|
||||
- validate
|
||||
- test
|
||||
- publish
|
||||
|
||||
validate:plugin:
|
||||
stage: validate
|
||||
script:
|
||||
- jq . .claude-plugin/plugin.json
|
||||
- jq . .claude-plugin/marketplace.json
|
||||
|
||||
publish:marketplace:
|
||||
stage: publish
|
||||
only:
|
||||
- tags
|
||||
script:
|
||||
- update-marketplace-version.sh
|
||||
- git push origin main
|
||||
```
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
### Common Issues
|
||||
|
||||
1. **Marketplace not found**
|
||||
```bash
|
||||
# Check marketplace list
|
||||
claude plugin marketplace list
|
||||
|
||||
# Verify URL/path
|
||||
curl https://marketplace.url/.claude-plugin/marketplace.json
|
||||
```
|
||||
|
||||
2. **Plugin installation fails**
|
||||
```bash
|
||||
# Debug mode
|
||||
claude --debug plugin install my-plugin
|
||||
|
||||
# Check permissions
|
||||
ls -la ~/.claude/plugins/
|
||||
```
|
||||
|
||||
3. **Version conflicts**
|
||||
```bash
|
||||
# Force specific version
|
||||
claude plugin install my-plugin@1.2.0
|
||||
|
||||
# Clear cache
|
||||
claude plugin cache clear
|
||||
```
|
||||
|
||||
### Validation Checklist
|
||||
|
||||
- [ ] Valid marketplace.json structure
|
||||
- [ ] All plugin paths exist
|
||||
- [ ] Plugin versions match tags
|
||||
- [ ] Dependencies are specified
|
||||
- [ ] Security manifest included
|
||||
- [ ] Changelog updated
|
||||
- [ ] Documentation current
|
||||
- [ ] Tests passing
|
||||
- [ ] Code signed
|
||||
- [ ] Access permissions set
|
||||
@@ -0,0 +1,631 @@
|
||||
# MCP Server Integration
|
||||
|
||||
Comprehensive guide for integrating Model Context Protocol (MCP) servers with Claude plugins.
|
||||
|
||||
## MCP Overview
|
||||
|
||||
MCP servers provide structured interfaces to external tools and services, enabling Claude to interact with databases, APIs, and other systems through a standardized protocol.
|
||||
|
||||
## Basic Configuration
|
||||
|
||||
### .mcp.json Structure
|
||||
```json
|
||||
{
|
||||
"name": "restaurant-data-server",
|
||||
"version": "1.0.0",
|
||||
"description": "MCP server for restaurant database access",
|
||||
"command": "python",
|
||||
"args": ["servers/restaurant_mcp.py"],
|
||||
"env": {
|
||||
"DATABASE_URL": "${RESTAURANT_DB_URL}",
|
||||
"API_KEY": "${RESTAURANT_API_KEY}"
|
||||
},
|
||||
"capabilities": {
|
||||
"resources": true,
|
||||
"tools": true,
|
||||
"subscriptions": true
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
## Server Implementation
|
||||
|
||||
### Python MCP Server
|
||||
```python
|
||||
#!/usr/bin/env python3
|
||||
# servers/restaurant_mcp.py
|
||||
|
||||
import asyncio
|
||||
import json
|
||||
from typing import Any, Dict, List
|
||||
from mcp import MCPServer, Resource, Tool
|
||||
|
||||
class RestaurantMCPServer(MCPServer):
|
||||
def __init__(self):
|
||||
super().__init__("restaurant-data-server")
|
||||
self.setup_tools()
|
||||
self.setup_resources()
|
||||
|
||||
def setup_tools(self):
|
||||
@self.tool("get_sales_data")
|
||||
async def get_sales_data(date: str, location: str = None) -> Dict:
|
||||
"""Retrieve sales data for specified date and location"""
|
||||
# Implementation
|
||||
return {
|
||||
"date": date,
|
||||
"location": location,
|
||||
"total_sales": 15420.50,
|
||||
"transactions": 342
|
||||
}
|
||||
|
||||
@self.tool("update_inventory")
|
||||
async def update_inventory(item_id: str, quantity: int) -> Dict:
|
||||
"""Update inventory levels for an item"""
|
||||
# Implementation
|
||||
return {
|
||||
"item_id": item_id,
|
||||
"new_quantity": quantity,
|
||||
"status": "updated"
|
||||
}
|
||||
|
||||
def setup_resources(self):
|
||||
@self.resource("menu_items")
|
||||
async def get_menu_items() -> List[Resource]:
|
||||
"""List all menu items"""
|
||||
items = await fetch_menu_from_db()
|
||||
return [
|
||||
Resource(
|
||||
id=f"menu_item_{item['id']}",
|
||||
name=item['name'],
|
||||
description=f"Menu item: {item['name']}",
|
||||
metadata={"price": item['price'], "category": item['category']}
|
||||
)
|
||||
for item in items
|
||||
]
|
||||
|
||||
if __name__ == "__main__":
|
||||
server = RestaurantMCPServer()
|
||||
asyncio.run(server.run())
|
||||
```
|
||||
|
||||
### Node.js MCP Server
|
||||
```javascript
|
||||
#!/usr/bin/env node
|
||||
// servers/restaurant_mcp.js
|
||||
|
||||
const { MCPServer, Tool, Resource } = require('@modelcontextprotocol/server');
|
||||
|
||||
class RestaurantMCPServer extends MCPServer {
|
||||
constructor() {
|
||||
super('restaurant-data-server');
|
||||
this.setupTools();
|
||||
this.setupResources();
|
||||
}
|
||||
|
||||
setupTools() {
|
||||
this.registerTool(new Tool({
|
||||
name: 'get_sales_data',
|
||||
description: 'Retrieve sales data',
|
||||
parameters: {
|
||||
type: 'object',
|
||||
properties: {
|
||||
date: { type: 'string', format: 'date' },
|
||||
location: { type: 'string' }
|
||||
},
|
||||
required: ['date']
|
||||
},
|
||||
handler: async ({ date, location }) => {
|
||||
// Implementation
|
||||
return {
|
||||
date,
|
||||
location,
|
||||
total_sales: 15420.50,
|
||||
transactions: 342
|
||||
};
|
||||
}
|
||||
}));
|
||||
}
|
||||
|
||||
setupResources() {
|
||||
this.registerResourceProvider({
|
||||
pattern: /^menu_items$/,
|
||||
handler: async () => {
|
||||
const items = await this.fetchMenuFromDB();
|
||||
return items.map(item => ({
|
||||
id: `menu_item_${item.id}`,
|
||||
name: item.name,
|
||||
content: JSON.stringify(item, null, 2),
|
||||
mimeType: 'application/json'
|
||||
}));
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
const server = new RestaurantMCPServer();
|
||||
server.start();
|
||||
```
|
||||
|
||||
## Tool Definitions
|
||||
|
||||
### Tool Schema
|
||||
```json
|
||||
{
|
||||
"name": "analyze_customer_feedback",
|
||||
"description": "Analyze customer feedback sentiment",
|
||||
"parameters": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"feedback_id": {
|
||||
"type": "string",
|
||||
"description": "Unique feedback identifier"
|
||||
},
|
||||
"include_suggestions": {
|
||||
"type": "boolean",
|
||||
"default": true,
|
||||
"description": "Include improvement suggestions"
|
||||
}
|
||||
},
|
||||
"required": ["feedback_id"]
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### Complex Tool Example
|
||||
```python
|
||||
@server.tool("generate_report")
|
||||
async def generate_report(
|
||||
report_type: str,
|
||||
start_date: str,
|
||||
end_date: str,
|
||||
format: str = "pdf",
|
||||
filters: Dict[str, Any] = None
|
||||
) -> Dict[str, Any]:
|
||||
"""Generate comprehensive business report
|
||||
|
||||
Args:
|
||||
report_type: Type of report (sales, inventory, customer)
|
||||
start_date: Report start date (YYYY-MM-DD)
|
||||
end_date: Report end date (YYYY-MM-DD)
|
||||
format: Output format (pdf, excel, json)
|
||||
filters: Additional filters to apply
|
||||
|
||||
Returns:
|
||||
Report data and download URL
|
||||
"""
|
||||
# Validate inputs
|
||||
if not validate_date_range(start_date, end_date):
|
||||
raise ValueError("Invalid date range")
|
||||
|
||||
# Generate report
|
||||
report_data = await compile_report_data(
|
||||
report_type, start_date, end_date, filters
|
||||
)
|
||||
|
||||
# Format output
|
||||
if format == "pdf":
|
||||
url = await generate_pdf_report(report_data)
|
||||
elif format == "excel":
|
||||
url = await generate_excel_report(report_data)
|
||||
else:
|
||||
url = await save_json_report(report_data)
|
||||
|
||||
return {
|
||||
"report_type": report_type,
|
||||
"period": f"{start_date} to {end_date}",
|
||||
"download_url": url,
|
||||
"summary": report_data.get("summary", {})
|
||||
}
|
||||
```
|
||||
|
||||
## Resource Management
|
||||
|
||||
### Static Resources
|
||||
```python
|
||||
@server.resource("config/database_schema")
|
||||
async def get_database_schema() -> Resource:
|
||||
"""Provide database schema documentation"""
|
||||
schema = load_schema_file()
|
||||
return Resource(
|
||||
id="database_schema",
|
||||
name="Restaurant Database Schema",
|
||||
content=schema,
|
||||
mimeType="text/markdown"
|
||||
)
|
||||
```
|
||||
|
||||
### Dynamic Resources
|
||||
```python
|
||||
@server.resource_pattern(r"^orders/(\d{4}-\d{2}-\d{2})$")
|
||||
async def get_daily_orders(date: str) -> List[Resource]:
|
||||
"""Get orders for a specific date"""
|
||||
orders = await fetch_orders_by_date(date)
|
||||
return [
|
||||
Resource(
|
||||
id=f"order_{order['id']}",
|
||||
name=f"Order #{order['number']}",
|
||||
content=json.dumps(order, indent=2),
|
||||
mimeType="application/json",
|
||||
metadata={
|
||||
"customer": order['customer_name'],
|
||||
"total": order['total_amount'],
|
||||
"status": order['status']
|
||||
}
|
||||
)
|
||||
for order in orders
|
||||
]
|
||||
```
|
||||
|
||||
### Subscription Resources
|
||||
```python
|
||||
@server.subscription("live_orders")
|
||||
async def subscribe_to_orders(callback):
|
||||
"""Subscribe to live order updates"""
|
||||
async def order_handler(order):
|
||||
await callback(Resource(
|
||||
id=f"live_order_{order['id']}",
|
||||
name=f"New Order #{order['number']}",
|
||||
content=json.dumps(order),
|
||||
mimeType="application/json"
|
||||
))
|
||||
|
||||
# Register handler with order system
|
||||
order_system.on_new_order(order_handler)
|
||||
|
||||
# Return unsubscribe function
|
||||
return lambda: order_system.off_new_order(order_handler)
|
||||
```
|
||||
|
||||
## Security Implementation
|
||||
|
||||
### Authentication
|
||||
```python
|
||||
class SecureRestaurantServer(MCPServer):
|
||||
def __init__(self):
|
||||
super().__init__("secure-restaurant-server")
|
||||
self.auth_token = os.environ.get("MCP_AUTH_TOKEN")
|
||||
|
||||
async def authenticate(self, request):
|
||||
"""Validate authentication token"""
|
||||
token = request.headers.get("Authorization")
|
||||
if not token or token != f"Bearer {self.auth_token}":
|
||||
raise AuthenticationError("Invalid token")
|
||||
|
||||
async def handle_request(self, request):
|
||||
await self.authenticate(request)
|
||||
return await super().handle_request(request)
|
||||
```
|
||||
|
||||
### Input Validation
|
||||
```python
|
||||
@server.tool("update_menu_item")
|
||||
async def update_menu_item(item_id: str, updates: Dict) -> Dict:
|
||||
"""Securely update menu item"""
|
||||
# Validate item_id format
|
||||
if not re.match(r"^[A-Z0-9]{8}$", item_id):
|
||||
raise ValueError("Invalid item ID format")
|
||||
|
||||
# Validate allowed fields
|
||||
allowed_fields = {"name", "price", "description", "category"}
|
||||
invalid_fields = set(updates.keys()) - allowed_fields
|
||||
if invalid_fields:
|
||||
raise ValueError(f"Invalid fields: {invalid_fields}")
|
||||
|
||||
# Validate data types
|
||||
if "price" in updates:
|
||||
if not isinstance(updates["price"], (int, float)):
|
||||
raise TypeError("Price must be numeric")
|
||||
if updates["price"] < 0:
|
||||
raise ValueError("Price cannot be negative")
|
||||
|
||||
# Apply updates
|
||||
result = await db.update_menu_item(item_id, updates)
|
||||
return {"status": "success", "updated": result}
|
||||
```
|
||||
|
||||
### Rate Limiting
|
||||
```python
|
||||
from functools import wraps
|
||||
import time
|
||||
|
||||
def rate_limit(max_calls=10, time_window=60):
|
||||
calls = {}
|
||||
|
||||
def decorator(func):
|
||||
@wraps(func)
|
||||
async def wrapper(self, *args, **kwargs):
|
||||
client_id = kwargs.get('client_id', 'default')
|
||||
now = time.time()
|
||||
|
||||
# Clean old calls
|
||||
calls[client_id] = [
|
||||
t for t in calls.get(client_id, [])
|
||||
if now - t < time_window
|
||||
]
|
||||
|
||||
# Check rate limit
|
||||
if len(calls[client_id]) >= max_calls:
|
||||
raise RateLimitError(f"Rate limit exceeded: {max_calls}/{time_window}s")
|
||||
|
||||
# Record call
|
||||
calls[client_id].append(now)
|
||||
|
||||
# Execute function
|
||||
return await func(self, *args, **kwargs)
|
||||
return wrapper
|
||||
return decorator
|
||||
|
||||
# Usage
|
||||
@server.tool("expensive_operation")
|
||||
@rate_limit(max_calls=5, time_window=300)
|
||||
async def expensive_operation(data: str) -> Dict:
|
||||
"""Rate-limited expensive operation"""
|
||||
result = await perform_expensive_calculation(data)
|
||||
return {"result": result}
|
||||
```
|
||||
|
||||
## Error Handling
|
||||
|
||||
### Graceful Errors
|
||||
```python
|
||||
@server.tool("process_order")
|
||||
async def process_order(order_data: Dict) -> Dict:
|
||||
try:
|
||||
# Validate order
|
||||
validation_result = validate_order(order_data)
|
||||
if not validation_result.is_valid:
|
||||
return {
|
||||
"status": "error",
|
||||
"error_code": "INVALID_ORDER",
|
||||
"message": validation_result.message,
|
||||
"fields": validation_result.invalid_fields
|
||||
}
|
||||
|
||||
# Process order
|
||||
result = await order_processor.process(order_data)
|
||||
return {
|
||||
"status": "success",
|
||||
"order_id": result.order_id,
|
||||
"estimated_time": result.estimated_time
|
||||
}
|
||||
|
||||
except InventoryError as e:
|
||||
return {
|
||||
"status": "error",
|
||||
"error_code": "INSUFFICIENT_INVENTORY",
|
||||
"message": str(e),
|
||||
"available_items": e.available_items
|
||||
}
|
||||
except Exception as e:
|
||||
# Log unexpected errors
|
||||
logger.error(f"Unexpected error: {e}")
|
||||
return {
|
||||
"status": "error",
|
||||
"error_code": "INTERNAL_ERROR",
|
||||
"message": "An unexpected error occurred"
|
||||
}
|
||||
```
|
||||
|
||||
### Error Recovery
|
||||
```python
|
||||
class ResilientMCPServer(MCPServer):
|
||||
def __init__(self):
|
||||
super().__init__("resilient-server")
|
||||
self.db = None
|
||||
self.reconnect_attempts = 0
|
||||
|
||||
async def ensure_connection(self):
|
||||
"""Ensure database connection with retry logic"""
|
||||
if self.db and self.db.is_connected():
|
||||
return
|
||||
|
||||
for attempt in range(3):
|
||||
try:
|
||||
self.db = await create_db_connection()
|
||||
self.reconnect_attempts = 0
|
||||
return
|
||||
except ConnectionError:
|
||||
await asyncio.sleep(2 ** attempt)
|
||||
|
||||
raise ServiceUnavailableError("Cannot connect to database")
|
||||
|
||||
async def handle_tool_call(self, tool_name, params):
|
||||
await self.ensure_connection()
|
||||
return await super().handle_tool_call(tool_name, params)
|
||||
```
|
||||
|
||||
## Testing MCP Servers
|
||||
|
||||
### Unit Testing
|
||||
```python
|
||||
import pytest
|
||||
from unittest.mock import AsyncMock
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_get_sales_data():
|
||||
server = RestaurantMCPServer()
|
||||
server.db = AsyncMock()
|
||||
server.db.query.return_value = [
|
||||
{"date": "2024-01-15", "total": 1000}
|
||||
]
|
||||
|
||||
result = await server.tools["get_sales_data"](
|
||||
date="2024-01-15",
|
||||
location="main"
|
||||
)
|
||||
|
||||
assert result["total_sales"] == 1000
|
||||
server.db.query.assert_called_once()
|
||||
```
|
||||
|
||||
### Integration Testing
|
||||
```python
|
||||
async def test_mcp_server_integration():
|
||||
# Start test server
|
||||
server = RestaurantMCPServer()
|
||||
test_port = 8765
|
||||
await server.start(port=test_port)
|
||||
|
||||
# Create client
|
||||
client = MCPClient(f"http://localhost:{test_port}")
|
||||
|
||||
# Test tool call
|
||||
result = await client.call_tool(
|
||||
"get_sales_data",
|
||||
{"date": "2024-01-15"}
|
||||
)
|
||||
|
||||
assert result["status"] == "success"
|
||||
|
||||
# Cleanup
|
||||
await server.stop()
|
||||
```
|
||||
|
||||
### Mock Server for Development
|
||||
```javascript
|
||||
// servers/mock_restaurant_mcp.js
|
||||
class MockRestaurantServer extends MCPServer {
|
||||
constructor() {
|
||||
super('mock-restaurant-server');
|
||||
this.setupMockTools();
|
||||
}
|
||||
|
||||
setupMockTools() {
|
||||
this.registerTool({
|
||||
name: 'get_sales_data',
|
||||
handler: async ({ date }) => ({
|
||||
date,
|
||||
total_sales: Math.random() * 10000,
|
||||
transactions: Math.floor(Math.random() * 500)
|
||||
})
|
||||
});
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
## Performance Optimization
|
||||
|
||||
### Caching
|
||||
```python
|
||||
from functools import lru_cache
|
||||
from cachetools import TTLCache
|
||||
|
||||
class CachedMCPServer(MCPServer):
|
||||
def __init__(self):
|
||||
super().__init__("cached-server")
|
||||
self.cache = TTLCache(maxsize=100, ttl=300)
|
||||
|
||||
@server.tool("get_analytics")
|
||||
async def get_analytics(self, date_range: str) -> Dict:
|
||||
# Check cache
|
||||
cache_key = f"analytics_{date_range}"
|
||||
if cache_key in self.cache:
|
||||
return self.cache[cache_key]
|
||||
|
||||
# Compute analytics
|
||||
result = await self.compute_analytics(date_range)
|
||||
|
||||
# Store in cache
|
||||
self.cache[cache_key] = result
|
||||
return result
|
||||
```
|
||||
|
||||
### Connection Pooling
|
||||
```python
|
||||
import asyncpg
|
||||
|
||||
class PooledMCPServer(MCPServer):
|
||||
def __init__(self):
|
||||
super().__init__("pooled-server")
|
||||
self.db_pool = None
|
||||
|
||||
async def initialize(self):
|
||||
self.db_pool = await asyncpg.create_pool(
|
||||
database="restaurant_db",
|
||||
user="mcp_user",
|
||||
password=os.environ["DB_PASSWORD"],
|
||||
host="localhost",
|
||||
port=5432,
|
||||
min_size=5,
|
||||
max_size=20
|
||||
)
|
||||
|
||||
async def query(self, sql, *args):
|
||||
async with self.db_pool.acquire() as conn:
|
||||
return await conn.fetch(sql, *args)
|
||||
```
|
||||
|
||||
## Deployment
|
||||
|
||||
### Docker Configuration
|
||||
```dockerfile
|
||||
# Dockerfile
|
||||
FROM python:3.11-slim
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
COPY requirements.txt .
|
||||
RUN pip install --no-cache-dir -r requirements.txt
|
||||
|
||||
COPY servers/ ./servers/
|
||||
COPY .mcp.json .
|
||||
|
||||
ENV PYTHONUNBUFFERED=1
|
||||
|
||||
CMD ["python", "servers/restaurant_mcp.py"]
|
||||
```
|
||||
|
||||
### Systemd Service
|
||||
```ini
|
||||
# /etc/systemd/system/restaurant-mcp.service
|
||||
[Unit]
|
||||
Description=Restaurant MCP Server
|
||||
After=network.target
|
||||
|
||||
[Service]
|
||||
Type=simple
|
||||
User=mcp
|
||||
WorkingDirectory=/opt/restaurant-mcp
|
||||
Environment="DATABASE_URL=postgresql://localhost/restaurant"
|
||||
ExecStart=/usr/bin/python3 /opt/restaurant-mcp/servers/restaurant_mcp.py
|
||||
Restart=always
|
||||
|
||||
[Install]
|
||||
WantedBy=multi-user.target
|
||||
```
|
||||
|
||||
### Health Checks
|
||||
```python
|
||||
@server.tool("health_check")
|
||||
async def health_check() -> Dict:
|
||||
"""MCP server health check endpoint"""
|
||||
checks = {
|
||||
"server": "ok",
|
||||
"database": "unknown",
|
||||
"cache": "unknown"
|
||||
}
|
||||
|
||||
# Check database
|
||||
try:
|
||||
await db.execute("SELECT 1")
|
||||
checks["database"] = "ok"
|
||||
except:
|
||||
checks["database"] = "error"
|
||||
|
||||
# Check cache
|
||||
try:
|
||||
cache.get("test")
|
||||
checks["cache"] = "ok"
|
||||
except:
|
||||
checks["cache"] = "error"
|
||||
|
||||
overall_status = "healthy" if all(
|
||||
v == "ok" for v in checks.values()
|
||||
) else "unhealthy"
|
||||
|
||||
return {
|
||||
"status": overall_status,
|
||||
"checks": checks,
|
||||
"timestamp": datetime.now().isoformat()
|
||||
}
|
||||
```
|
||||
@@ -0,0 +1,489 @@
|
||||
# Performance Optimization Guide
|
||||
|
||||
Comprehensive guide for optimizing Claude plugin performance.
|
||||
|
||||
## Startup Performance
|
||||
|
||||
### Lazy Loading
|
||||
```json
|
||||
// plugin.json
|
||||
{
|
||||
"config": {
|
||||
"lazy_load": true,
|
||||
"preload_commands": ["help", "version"],
|
||||
"defer_agents": true
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### Minimal Dependencies
|
||||
```python
|
||||
# ❌ Bad: Import everything upfront
|
||||
import pandas as pd
|
||||
import numpy as np
|
||||
import matplotlib.pyplot as plt
|
||||
import requests
|
||||
|
||||
# ✅ Good: Import when needed
|
||||
def analyze_data(file_path):
|
||||
import pandas as pd # Import only when function is called
|
||||
return pd.read_csv(file_path)
|
||||
```
|
||||
|
||||
### Command Organization
|
||||
```
|
||||
# ❌ Bad: Many small files
|
||||
commands/
|
||||
├── create-user.md
|
||||
├── delete-user.md
|
||||
├── update-user.md
|
||||
├── list-users.md
|
||||
└── ... (20 more files)
|
||||
|
||||
# ✅ Good: Grouped commands
|
||||
commands/
|
||||
├── user/
|
||||
│ ├── create.md
|
||||
│ ├── delete.md
|
||||
│ ├── update.md
|
||||
│ └── list.md
|
||||
└── _index.md
|
||||
```
|
||||
|
||||
## Command Execution
|
||||
|
||||
### Async Operations
|
||||
```python
|
||||
#!/usr/bin/env python3
|
||||
# scripts/async_deploy.py
|
||||
|
||||
import asyncio
|
||||
import aiohttp
|
||||
import aiofiles
|
||||
|
||||
async def deploy_services(services):
|
||||
"""Deploy multiple services concurrently."""
|
||||
tasks = [deploy_single(service) for service in services]
|
||||
results = await asyncio.gather(*tasks, return_exceptions=True)
|
||||
|
||||
for service, result in zip(services, results):
|
||||
if isinstance(result, Exception):
|
||||
print(f"Failed to deploy {service}: {result}")
|
||||
else:
|
||||
print(f"Successfully deployed {service}")
|
||||
|
||||
async def deploy_single(service):
|
||||
async with aiohttp.ClientSession() as session:
|
||||
async with session.post(f"https://api.deploy.com/{service}") as resp:
|
||||
return await resp.json()
|
||||
```
|
||||
|
||||
### Caching Strategies
|
||||
```python
|
||||
# scripts/cached_operations.py
|
||||
|
||||
import functools
|
||||
import time
|
||||
import json
|
||||
from pathlib import Path
|
||||
|
||||
CACHE_DIR = Path("${CLAUDE_PLUGIN_ROOT}/.cache")
|
||||
CACHE_DIR.mkdir(exist_ok=True)
|
||||
|
||||
def timed_cache(seconds=300):
|
||||
"""Cache function results for specified seconds."""
|
||||
def decorator(func):
|
||||
cache = {}
|
||||
|
||||
@functools.wraps(func)
|
||||
def wrapper(*args, **kwargs):
|
||||
key = f"{func.__name__}:{args}:{kwargs}"
|
||||
now = time.time()
|
||||
|
||||
if key in cache:
|
||||
result, timestamp = cache[key]
|
||||
if now - timestamp < seconds:
|
||||
return result
|
||||
|
||||
result = func(*args, **kwargs)
|
||||
cache[key] = (result, now)
|
||||
return result
|
||||
|
||||
return wrapper
|
||||
return decorator
|
||||
|
||||
@timed_cache(seconds=600)
|
||||
def expensive_api_call(endpoint):
|
||||
"""Cached API call - results valid for 10 minutes."""
|
||||
# Implementation
|
||||
pass
|
||||
```
|
||||
|
||||
### Stream Processing
|
||||
```bash
|
||||
#!/bin/bash
|
||||
# hooks/process_large_file.sh
|
||||
|
||||
# ❌ Bad: Load entire file
|
||||
content=$(cat "$LARGE_FILE")
|
||||
processed=$(echo "$content" | process_command)
|
||||
|
||||
# ✅ Good: Stream processing
|
||||
process_command < "$LARGE_FILE" > "$OUTPUT_FILE"
|
||||
|
||||
# For line-by-line processing
|
||||
while IFS= read -r line; do
|
||||
process_line "$line"
|
||||
done < "$LARGE_FILE"
|
||||
```
|
||||
|
||||
## Memory Management
|
||||
|
||||
### Resource Cleanup
|
||||
```python
|
||||
# scripts/resource_manager.py
|
||||
|
||||
import contextlib
|
||||
import tempfile
|
||||
import shutil
|
||||
|
||||
@contextlib.contextmanager
|
||||
def temp_workspace():
|
||||
"""Create temporary workspace that's automatically cleaned up."""
|
||||
temp_dir = tempfile.mkdtemp(prefix="claude_plugin_")
|
||||
try:
|
||||
yield temp_dir
|
||||
finally:
|
||||
shutil.rmtree(temp_dir, ignore_errors=True)
|
||||
|
||||
# Usage
|
||||
def process_files(files):
|
||||
with temp_workspace() as workspace:
|
||||
# All files in workspace are automatically deleted
|
||||
for file in files:
|
||||
process_in_workspace(file, workspace)
|
||||
```
|
||||
|
||||
### Efficient Data Structures
|
||||
```python
|
||||
# ❌ Bad: Multiple passes over data
|
||||
def analyze_logs(log_file):
|
||||
lines = open(log_file).readlines()
|
||||
|
||||
error_count = sum(1 for line in lines if "ERROR" in line)
|
||||
warning_count = sum(1 for line in lines if "WARNING" in line)
|
||||
info_count = sum(1 for line in lines if "INFO" in line)
|
||||
|
||||
# ✅ Good: Single pass
|
||||
def analyze_logs(log_file):
|
||||
counts = {"ERROR": 0, "WARNING": 0, "INFO": 0}
|
||||
|
||||
with open(log_file) as f:
|
||||
for line in f:
|
||||
for level in counts:
|
||||
if level in line:
|
||||
counts[level] += 1
|
||||
break
|
||||
|
||||
return counts
|
||||
```
|
||||
|
||||
### Generator Functions
|
||||
```python
|
||||
# scripts/data_processor.py
|
||||
|
||||
def process_large_dataset(file_path):
|
||||
"""Process large dataset using generators."""
|
||||
def read_chunks(file, chunk_size=1024*1024):
|
||||
"""Read file in chunks to avoid memory issues."""
|
||||
with open(file, 'rb') as f:
|
||||
while True:
|
||||
chunk = f.read(chunk_size)
|
||||
if not chunk:
|
||||
break
|
||||
yield chunk
|
||||
|
||||
for chunk in read_chunks(file_path):
|
||||
process_chunk(chunk)
|
||||
```
|
||||
|
||||
## Hook Performance
|
||||
|
||||
### Debouncing File Changes
|
||||
```javascript
|
||||
// hooks/debounced_compiler.js
|
||||
|
||||
const debounce = (func, wait) => {
|
||||
let timeout;
|
||||
return function executedFunction(...args) {
|
||||
const later = () => {
|
||||
clearTimeout(timeout);
|
||||
func(...args);
|
||||
};
|
||||
clearTimeout(timeout);
|
||||
timeout = setTimeout(later, wait);
|
||||
};
|
||||
};
|
||||
|
||||
const compileStyles = debounce(() => {
|
||||
console.log('Compiling styles...');
|
||||
// Compilation logic
|
||||
}, 1000);
|
||||
|
||||
// File change handler
|
||||
process.env.CHANGED_FILE && compileStyles(process.env.CHANGED_FILE);
|
||||
```
|
||||
|
||||
### Selective Processing
|
||||
```bash
|
||||
#!/bin/bash
|
||||
# hooks/smart_formatter.sh
|
||||
|
||||
# Only process changed parts
|
||||
if command -v git >/dev/null 2>&1; then
|
||||
# Get only modified lines
|
||||
git diff --unified=0 "$CHANGED_FILE" | \
|
||||
grep -E '^\+[^+]' | \
|
||||
sed 's/^+//' > changed_lines.tmp
|
||||
|
||||
# Process only changed content
|
||||
format_lines < changed_lines.tmp
|
||||
else
|
||||
# Fallback to full file
|
||||
format_file "$CHANGED_FILE"
|
||||
fi
|
||||
```
|
||||
|
||||
## Network Optimization
|
||||
|
||||
### Connection Pooling
|
||||
```python
|
||||
# scripts/api_client.py
|
||||
|
||||
import requests
|
||||
from urllib3.util.retry import Retry
|
||||
from requests.adapters import HTTPAdapter
|
||||
|
||||
class OptimizedAPIClient:
|
||||
def __init__(self):
|
||||
self.session = requests.Session()
|
||||
|
||||
# Configure retry strategy
|
||||
retry_strategy = Retry(
|
||||
total=3,
|
||||
backoff_factor=1,
|
||||
status_forcelist=[429, 500, 502, 503, 504],
|
||||
)
|
||||
|
||||
# Configure connection pooling
|
||||
adapter = HTTPAdapter(
|
||||
max_retries=retry_strategy,
|
||||
pool_connections=10,
|
||||
pool_maxsize=20
|
||||
)
|
||||
|
||||
self.session.mount("https://", adapter)
|
||||
self.session.mount("http://", adapter)
|
||||
|
||||
def get(self, url, **kwargs):
|
||||
return self.session.get(url, **kwargs)
|
||||
|
||||
# Global client instance
|
||||
api_client = OptimizedAPIClient()
|
||||
```
|
||||
|
||||
### Parallel Downloads
|
||||
```python
|
||||
# scripts/parallel_downloader.py
|
||||
|
||||
import asyncio
|
||||
import aiohttp
|
||||
import aiofiles
|
||||
from pathlib import Path
|
||||
|
||||
async def download_files(urls, output_dir, max_concurrent=5):
|
||||
"""Download multiple files concurrently."""
|
||||
output_dir = Path(output_dir)
|
||||
output_dir.mkdir(exist_ok=True)
|
||||
|
||||
semaphore = asyncio.Semaphore(max_concurrent)
|
||||
|
||||
async def download_one(session, url):
|
||||
async with semaphore:
|
||||
filename = output_dir / Path(url).name
|
||||
async with session.get(url) as response:
|
||||
async with aiofiles.open(filename, 'wb') as f:
|
||||
async for chunk in response.content.iter_chunked(8192):
|
||||
await f.write(chunk)
|
||||
return filename
|
||||
|
||||
async with aiohttp.ClientSession() as session:
|
||||
tasks = [download_one(session, url) for url in urls]
|
||||
return await asyncio.gather(*tasks)
|
||||
```
|
||||
|
||||
## Profiling & Monitoring
|
||||
|
||||
### Performance Metrics
|
||||
```python
|
||||
# scripts/performance_monitor.py
|
||||
|
||||
import time
|
||||
import functools
|
||||
import json
|
||||
from datetime import datetime
|
||||
|
||||
METRICS_FILE = "${CLAUDE_PLUGIN_ROOT}/.metrics.json"
|
||||
|
||||
def track_performance(func):
|
||||
"""Decorator to track function performance."""
|
||||
@functools.wraps(func)
|
||||
def wrapper(*args, **kwargs):
|
||||
start_time = time.time()
|
||||
result = func(*args, **kwargs)
|
||||
end_time = time.time()
|
||||
|
||||
# Record metrics
|
||||
metrics = {
|
||||
"function": func.__name__,
|
||||
"duration": end_time - start_time,
|
||||
"timestamp": datetime.now().isoformat(),
|
||||
"args_size": len(str(args)),
|
||||
"result_size": len(str(result))
|
||||
}
|
||||
|
||||
# Append to metrics file
|
||||
with open(METRICS_FILE, 'a') as f:
|
||||
json.dump(metrics, f)
|
||||
f.write('\n')
|
||||
|
||||
return result
|
||||
return wrapper
|
||||
```
|
||||
|
||||
### Memory Profiling
|
||||
```python
|
||||
# scripts/memory_profiler.py
|
||||
|
||||
import tracemalloc
|
||||
import functools
|
||||
|
||||
def profile_memory(func):
|
||||
"""Profile memory usage of a function."""
|
||||
@functools.wraps(func)
|
||||
def wrapper(*args, **kwargs):
|
||||
tracemalloc.start()
|
||||
|
||||
result = func(*args, **kwargs)
|
||||
|
||||
current, peak = tracemalloc.get_traced_memory()
|
||||
tracemalloc.stop()
|
||||
|
||||
print(f"{func.__name__} memory usage:")
|
||||
print(f" Current: {current / 1024 / 1024:.2f} MB")
|
||||
print(f" Peak: {peak / 1024 / 1024:.2f} MB")
|
||||
|
||||
return result
|
||||
return wrapper
|
||||
```
|
||||
|
||||
## Best Practices
|
||||
|
||||
### 1. Measure Before Optimizing
|
||||
```bash
|
||||
# Time command execution
|
||||
time claude /my-plugin slow-command
|
||||
|
||||
# Profile Python scripts
|
||||
python -m cProfile -s cumtime scripts/my_script.py
|
||||
|
||||
# Memory usage
|
||||
/usr/bin/time -v claude /my-plugin memory-heavy-command
|
||||
```
|
||||
|
||||
### 2. Progressive Enhancement
|
||||
```json
|
||||
// plugin.json
|
||||
{
|
||||
"features": {
|
||||
"basic": ["core-command"],
|
||||
"enhanced": ["advanced-features"],
|
||||
"premium": ["ai-powered-analysis"]
|
||||
},
|
||||
"config": {
|
||||
"feature_detection": true,
|
||||
"fallback_mode": "basic"
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### 3. Resource Limits
|
||||
```python
|
||||
# scripts/resource_limited.py
|
||||
|
||||
import resource
|
||||
import signal
|
||||
|
||||
def limit_resources():
|
||||
"""Set resource limits for safety."""
|
||||
# Limit memory to 1GB
|
||||
resource.setrlimit(
|
||||
resource.RLIMIT_AS,
|
||||
(1024 * 1024 * 1024, 1024 * 1024 * 1024)
|
||||
)
|
||||
|
||||
# Limit CPU time to 60 seconds
|
||||
resource.setrlimit(
|
||||
resource.RLIMIT_CPU,
|
||||
(60, 60)
|
||||
)
|
||||
|
||||
# Set timeout handler
|
||||
def timeout_handler(signum, frame):
|
||||
raise TimeoutError("Operation timed out")
|
||||
|
||||
signal.signal(signal.SIGALRM, timeout_handler)
|
||||
signal.alarm(60)
|
||||
|
||||
# Use in scripts
|
||||
if __name__ == "__main__":
|
||||
limit_resources()
|
||||
main()
|
||||
```
|
||||
|
||||
### 4. Efficient File Operations
|
||||
```python
|
||||
# scripts/efficient_file_ops.py
|
||||
|
||||
import mmap
|
||||
import os
|
||||
|
||||
def search_in_large_file(file_path, search_term):
|
||||
"""Search in large files using memory mapping."""
|
||||
with open(file_path, 'rb') as f:
|
||||
with mmap.mmap(f.fileno(), 0, access=mmap.ACCESS_READ) as mmapped:
|
||||
search_bytes = search_term.encode()
|
||||
position = mmapped.find(search_bytes)
|
||||
|
||||
if position != -1:
|
||||
# Found - extract context
|
||||
start = max(0, position - 100)
|
||||
end = min(len(mmapped), position + 100)
|
||||
context = mmapped[start:end].decode('utf-8', errors='ignore')
|
||||
return context
|
||||
|
||||
return None
|
||||
```
|
||||
|
||||
## Performance Checklist
|
||||
|
||||
- [ ] Commands load in < 100ms
|
||||
- [ ] Startup time < 500ms
|
||||
- [ ] Memory usage < 100MB for basic operations
|
||||
- [ ] No blocking operations in main thread
|
||||
- [ ] Proper cleanup of temporary files
|
||||
- [ ] Connection pooling for network requests
|
||||
- [ ] Caching for expensive operations
|
||||
- [ ] Progressive loading of features
|
||||
- [ ] Resource limits configured
|
||||
- [ ] Performance metrics tracked
|
||||
Reference in New Issue
Block a user