refactor: bundle MCP servers inside plugins for cache compatibility
Claude Code only caches the plugin directory when installed from a
marketplace, not parent directories. This broke the shared mcp-servers/
architecture because relative paths like ../../mcp-servers/ resolved
to non-existent locations in the cache.
Changes:
- Move gitea and wikijs MCP servers into plugins/projman/mcp-servers/
- Move netbox MCP server into plugins/cmdb-assistant/mcp-servers/
- Update .mcp.json files to use ${CLAUDE_PLUGIN_ROOT}/mcp-servers/
- Update setup.sh to handle new bundled structure
- Add netbox.env config template to setup.sh
- Update CLAUDE.md and CANONICAL-PATHS.md documentation
This ensures plugins work correctly when installed and cached.
🤖 Generated with [Claude Code](https://claude.com/claude-code)
Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
This commit is contained in:
@@ -0,0 +1,3 @@
|
||||
"""Wiki.js MCP Server for Claude Code."""
|
||||
|
||||
__version__ = "0.1.0"
|
||||
102
plugins/projman/mcp-servers/wikijs/mcp_server/config.py
Normal file
102
plugins/projman/mcp-servers/wikijs/mcp_server/config.py
Normal file
@@ -0,0 +1,102 @@
|
||||
"""
|
||||
Configuration loader for Wiki.js MCP Server.
|
||||
|
||||
Implements hybrid configuration system:
|
||||
- System-level: ~/.config/claude/wikijs.env (credentials)
|
||||
- Project-level: .env (project path specification)
|
||||
"""
|
||||
from pathlib import Path
|
||||
from dotenv import load_dotenv
|
||||
import os
|
||||
import logging
|
||||
from typing import Dict, Optional
|
||||
|
||||
logging.basicConfig(level=logging.INFO)
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class WikiJSConfig:
|
||||
"""Hybrid configuration loader with mode detection"""
|
||||
|
||||
def __init__(self):
|
||||
self.api_url: Optional[str] = None
|
||||
self.api_token: Optional[str] = None
|
||||
self.base_path: Optional[str] = None
|
||||
self.project: Optional[str] = None
|
||||
self.mode: str = 'project'
|
||||
|
||||
def load(self) -> Dict[str, Optional[str]]:
|
||||
"""
|
||||
Load configuration from system and project levels.
|
||||
Project-level configuration overrides system-level.
|
||||
|
||||
Returns:
|
||||
Dict containing api_url, api_token, base_path, project, mode
|
||||
|
||||
Raises:
|
||||
FileNotFoundError: If system config is missing
|
||||
ValueError: If required configuration is missing
|
||||
"""
|
||||
# Load system config
|
||||
system_config = Path.home() / '.config' / 'claude' / 'wikijs.env'
|
||||
if system_config.exists():
|
||||
load_dotenv(system_config)
|
||||
logger.info(f"Loaded system configuration from {system_config}")
|
||||
else:
|
||||
raise FileNotFoundError(
|
||||
f"System config not found: {system_config}\n"
|
||||
"Create it with: mkdir -p ~/.config/claude && "
|
||||
"cat > ~/.config/claude/wikijs.env"
|
||||
)
|
||||
|
||||
# Load project config (overrides system)
|
||||
project_config = Path.cwd() / '.env'
|
||||
if project_config.exists():
|
||||
load_dotenv(project_config, override=True)
|
||||
logger.info(f"Loaded project configuration from {project_config}")
|
||||
|
||||
# Extract values
|
||||
self.api_url = os.getenv('WIKIJS_API_URL')
|
||||
self.api_token = os.getenv('WIKIJS_API_TOKEN')
|
||||
self.base_path = os.getenv('WIKIJS_BASE_PATH')
|
||||
self.project = os.getenv('WIKIJS_PROJECT') # Optional for PMO
|
||||
|
||||
# Detect mode
|
||||
if self.project:
|
||||
self.mode = 'project'
|
||||
logger.info(f"Running in project mode: {self.project}")
|
||||
else:
|
||||
self.mode = 'company'
|
||||
logger.info("Running in company-wide mode (PMO)")
|
||||
|
||||
# Validate required variables
|
||||
self._validate()
|
||||
|
||||
return {
|
||||
'api_url': self.api_url,
|
||||
'api_token': self.api_token,
|
||||
'base_path': self.base_path,
|
||||
'project': self.project,
|
||||
'mode': self.mode
|
||||
}
|
||||
|
||||
def _validate(self) -> None:
|
||||
"""
|
||||
Validate that required configuration is present.
|
||||
|
||||
Raises:
|
||||
ValueError: If required configuration is missing
|
||||
"""
|
||||
required = {
|
||||
'WIKIJS_API_URL': self.api_url,
|
||||
'WIKIJS_API_TOKEN': self.api_token,
|
||||
'WIKIJS_BASE_PATH': self.base_path
|
||||
}
|
||||
|
||||
missing = [key for key, value in required.items() if not value]
|
||||
|
||||
if missing:
|
||||
raise ValueError(
|
||||
f"Missing required configuration: {', '.join(missing)}\n"
|
||||
"Check your ~/.config/claude/wikijs.env file"
|
||||
)
|
||||
385
plugins/projman/mcp-servers/wikijs/mcp_server/server.py
Normal file
385
plugins/projman/mcp-servers/wikijs/mcp_server/server.py
Normal file
@@ -0,0 +1,385 @@
|
||||
"""
|
||||
MCP Server entry point for Wiki.js integration.
|
||||
|
||||
Provides Wiki.js tools to Claude Code via JSON-RPC 2.0 over stdio.
|
||||
"""
|
||||
import asyncio
|
||||
import logging
|
||||
import json
|
||||
from mcp.server import Server
|
||||
from mcp.server.stdio import stdio_server
|
||||
from mcp.types import Tool, TextContent
|
||||
|
||||
from .config import WikiJSConfig
|
||||
from .wikijs_client import WikiJSClient
|
||||
|
||||
# Suppress noisy MCP validation warnings on stderr
|
||||
logging.basicConfig(level=logging.INFO)
|
||||
logging.getLogger("root").setLevel(logging.ERROR)
|
||||
logging.getLogger("mcp").setLevel(logging.ERROR)
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class WikiJSMCPServer:
|
||||
"""MCP Server for Wiki.js integration"""
|
||||
|
||||
def __init__(self):
|
||||
self.server = Server("wikijs-mcp")
|
||||
self.config = None
|
||||
self.client = None
|
||||
|
||||
async def initialize(self):
|
||||
"""
|
||||
Initialize server and load configuration.
|
||||
|
||||
Raises:
|
||||
Exception: If initialization fails
|
||||
"""
|
||||
try:
|
||||
config_loader = WikiJSConfig()
|
||||
self.config = config_loader.load()
|
||||
|
||||
self.client = WikiJSClient(
|
||||
api_url=self.config['api_url'],
|
||||
api_token=self.config['api_token'],
|
||||
base_path=self.config['base_path'],
|
||||
project=self.config.get('project')
|
||||
)
|
||||
|
||||
logger.info(f"Wiki.js MCP Server initialized in {self.config['mode']} mode")
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to initialize: {e}")
|
||||
raise
|
||||
|
||||
def setup_tools(self):
|
||||
"""Register all available tools with the MCP server"""
|
||||
|
||||
@self.server.list_tools()
|
||||
async def list_tools() -> list[Tool]:
|
||||
"""Return list of available tools"""
|
||||
return [
|
||||
Tool(
|
||||
name="search_pages",
|
||||
description="Search Wiki.js pages by keywords and tags",
|
||||
inputSchema={
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"query": {
|
||||
"type": "string",
|
||||
"description": "Search query string"
|
||||
},
|
||||
"tags": {
|
||||
"type": "string",
|
||||
"description": "Comma-separated tags to filter by (optional)"
|
||||
},
|
||||
"limit": {
|
||||
"type": "integer",
|
||||
"default": 20,
|
||||
"description": "Maximum results to return"
|
||||
}
|
||||
},
|
||||
"required": ["query"]
|
||||
}
|
||||
),
|
||||
Tool(
|
||||
name="get_page",
|
||||
description="Get a specific page by path",
|
||||
inputSchema={
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"path": {
|
||||
"type": "string",
|
||||
"description": "Page path (relative or absolute)"
|
||||
}
|
||||
},
|
||||
"required": ["path"]
|
||||
}
|
||||
),
|
||||
Tool(
|
||||
name="create_page",
|
||||
description="Create a new Wiki.js page",
|
||||
inputSchema={
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"path": {
|
||||
"type": "string",
|
||||
"description": "Page path relative to project/base"
|
||||
},
|
||||
"title": {
|
||||
"type": "string",
|
||||
"description": "Page title"
|
||||
},
|
||||
"content": {
|
||||
"type": "string",
|
||||
"description": "Page content (markdown)"
|
||||
},
|
||||
"description": {
|
||||
"type": "string",
|
||||
"description": "Page description (optional)"
|
||||
},
|
||||
"tags": {
|
||||
"type": "string",
|
||||
"description": "Comma-separated tags (optional)"
|
||||
},
|
||||
"publish": {
|
||||
"type": "boolean",
|
||||
"default": True,
|
||||
"description": "Publish immediately"
|
||||
}
|
||||
},
|
||||
"required": ["path", "title", "content"]
|
||||
}
|
||||
),
|
||||
Tool(
|
||||
name="update_page",
|
||||
description="Update an existing Wiki.js page",
|
||||
inputSchema={
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"page_id": {
|
||||
"type": "integer",
|
||||
"description": "Page ID"
|
||||
},
|
||||
"content": {
|
||||
"type": "string",
|
||||
"description": "New content (optional)"
|
||||
},
|
||||
"title": {
|
||||
"type": "string",
|
||||
"description": "New title (optional)"
|
||||
},
|
||||
"description": {
|
||||
"type": "string",
|
||||
"description": "New description (optional)"
|
||||
},
|
||||
"tags": {
|
||||
"type": "string",
|
||||
"description": "New comma-separated tags (optional)"
|
||||
},
|
||||
"publish": {
|
||||
"type": "boolean",
|
||||
"description": "New publish status (optional)"
|
||||
}
|
||||
},
|
||||
"required": ["page_id"]
|
||||
}
|
||||
),
|
||||
Tool(
|
||||
name="list_pages",
|
||||
description="List pages under a specific path",
|
||||
inputSchema={
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"path_prefix": {
|
||||
"type": "string",
|
||||
"default": "",
|
||||
"description": "Path prefix to filter by"
|
||||
}
|
||||
}
|
||||
}
|
||||
),
|
||||
Tool(
|
||||
name="create_lesson",
|
||||
description="Create a lessons learned entry to prevent repeating mistakes",
|
||||
inputSchema={
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"title": {
|
||||
"type": "string",
|
||||
"description": "Lesson title (e.g., 'Sprint 16 - Prevent Infinite Loops')"
|
||||
},
|
||||
"content": {
|
||||
"type": "string",
|
||||
"description": "Lesson content (markdown with problem, solution, prevention)"
|
||||
},
|
||||
"tags": {
|
||||
"type": "string",
|
||||
"description": "Comma-separated tags for categorization"
|
||||
},
|
||||
"category": {
|
||||
"type": "string",
|
||||
"default": "sprints",
|
||||
"description": "Category (sprints, patterns, architecture, etc.)"
|
||||
}
|
||||
},
|
||||
"required": ["title", "content", "tags"]
|
||||
}
|
||||
),
|
||||
Tool(
|
||||
name="search_lessons",
|
||||
description="Search lessons learned from previous sprints to avoid known pitfalls",
|
||||
inputSchema={
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"query": {
|
||||
"type": "string",
|
||||
"description": "Search query (optional)"
|
||||
},
|
||||
"tags": {
|
||||
"type": "string",
|
||||
"description": "Comma-separated tags to filter by (optional)"
|
||||
},
|
||||
"limit": {
|
||||
"type": "integer",
|
||||
"default": 20,
|
||||
"description": "Maximum results"
|
||||
}
|
||||
}
|
||||
}
|
||||
),
|
||||
Tool(
|
||||
name="tag_lesson",
|
||||
description="Add or update tags on a lessons learned entry",
|
||||
inputSchema={
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"page_id": {
|
||||
"type": "integer",
|
||||
"description": "Lesson page ID"
|
||||
},
|
||||
"tags": {
|
||||
"type": "string",
|
||||
"description": "Comma-separated tags"
|
||||
}
|
||||
},
|
||||
"required": ["page_id", "tags"]
|
||||
}
|
||||
)
|
||||
]
|
||||
|
||||
@self.server.call_tool()
|
||||
async def call_tool(name: str, arguments: dict) -> list[TextContent]:
|
||||
"""
|
||||
Handle tool invocation.
|
||||
|
||||
Args:
|
||||
name: Tool name
|
||||
arguments: Tool arguments
|
||||
|
||||
Returns:
|
||||
List of TextContent with results
|
||||
"""
|
||||
try:
|
||||
# Route to appropriate client method
|
||||
if name == "search_pages":
|
||||
tags = arguments.get('tags')
|
||||
tag_list = [t.strip() for t in tags.split(',')] if tags else None
|
||||
results = await self.client.search_pages(
|
||||
query=arguments['query'],
|
||||
tags=tag_list,
|
||||
limit=arguments.get('limit', 20)
|
||||
)
|
||||
result = {'success': True, 'count': len(results), 'pages': results}
|
||||
|
||||
elif name == "get_page":
|
||||
page = await self.client.get_page(arguments['path'])
|
||||
if page:
|
||||
result = {'success': True, 'page': page}
|
||||
else:
|
||||
result = {'success': False, 'error': f"Page not found: {arguments['path']}"}
|
||||
|
||||
elif name == "create_page":
|
||||
tags = arguments.get('tags')
|
||||
tag_list = [t.strip() for t in tags.split(',')] if tags else []
|
||||
page = await self.client.create_page(
|
||||
path=arguments['path'],
|
||||
title=arguments['title'],
|
||||
content=arguments['content'],
|
||||
description=arguments.get('description', ''),
|
||||
tags=tag_list,
|
||||
is_published=arguments.get('publish', True)
|
||||
)
|
||||
result = {'success': True, 'page': page}
|
||||
|
||||
elif name == "update_page":
|
||||
tags = arguments.get('tags')
|
||||
tag_list = [t.strip() for t in tags.split(',')] if tags else None
|
||||
page = await self.client.update_page(
|
||||
page_id=arguments['page_id'],
|
||||
content=arguments.get('content'),
|
||||
title=arguments.get('title'),
|
||||
description=arguments.get('description'),
|
||||
tags=tag_list,
|
||||
is_published=arguments.get('publish')
|
||||
)
|
||||
result = {'success': True, 'page': page}
|
||||
|
||||
elif name == "list_pages":
|
||||
pages = await self.client.list_pages(
|
||||
path_prefix=arguments.get('path_prefix', '')
|
||||
)
|
||||
result = {'success': True, 'count': len(pages), 'pages': pages}
|
||||
|
||||
elif name == "create_lesson":
|
||||
tag_list = [t.strip() for t in arguments['tags'].split(',')]
|
||||
lesson = await self.client.create_lesson(
|
||||
title=arguments['title'],
|
||||
content=arguments['content'],
|
||||
tags=tag_list,
|
||||
category=arguments.get('category', 'sprints')
|
||||
)
|
||||
result = {
|
||||
'success': True,
|
||||
'lesson': lesson,
|
||||
'message': f"Lesson learned captured: {arguments['title']}"
|
||||
}
|
||||
|
||||
elif name == "search_lessons":
|
||||
tags = arguments.get('tags')
|
||||
tag_list = [t.strip() for t in tags.split(',')] if tags else None
|
||||
lessons = await self.client.search_lessons(
|
||||
query=arguments.get('query'),
|
||||
tags=tag_list,
|
||||
limit=arguments.get('limit', 20)
|
||||
)
|
||||
result = {
|
||||
'success': True,
|
||||
'count': len(lessons),
|
||||
'lessons': lessons,
|
||||
'message': f"Found {len(lessons)} relevant lessons"
|
||||
}
|
||||
|
||||
elif name == "tag_lesson":
|
||||
tag_list = [t.strip() for t in arguments['tags'].split(',')]
|
||||
lesson = await self.client.tag_lesson(
|
||||
page_id=arguments['page_id'],
|
||||
new_tags=tag_list
|
||||
)
|
||||
result = {'success': True, 'lesson': lesson, 'message': 'Tags updated'}
|
||||
|
||||
else:
|
||||
raise ValueError(f"Unknown tool: {name}")
|
||||
|
||||
return [TextContent(
|
||||
type="text",
|
||||
text=json.dumps(result, indent=2)
|
||||
)]
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Tool {name} failed: {e}")
|
||||
return [TextContent(
|
||||
type="text",
|
||||
text=json.dumps({'success': False, 'error': str(e)}, indent=2)
|
||||
)]
|
||||
|
||||
async def run(self):
|
||||
"""Run the MCP server"""
|
||||
await self.initialize()
|
||||
self.setup_tools()
|
||||
|
||||
async with stdio_server() as (read_stream, write_stream):
|
||||
await self.server.run(
|
||||
read_stream,
|
||||
write_stream,
|
||||
self.server.create_initialization_options()
|
||||
)
|
||||
|
||||
|
||||
async def main():
|
||||
"""Main entry point"""
|
||||
server = WikiJSMCPServer()
|
||||
await server.run()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
asyncio.run(main())
|
||||
@@ -0,0 +1 @@
|
||||
"""Wiki.js MCP tools."""
|
||||
@@ -0,0 +1,183 @@
|
||||
"""
|
||||
MCP tools for Wiki.js lessons learned management.
|
||||
"""
|
||||
from typing import Dict, Any, List, Optional
|
||||
from mcp.server import Tool
|
||||
from ..wikijs_client import WikiJSClient
|
||||
import logging
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def create_lesson_tools(client: WikiJSClient) -> List[Tool]:
|
||||
"""
|
||||
Create MCP tools for lessons learned management.
|
||||
|
||||
Args:
|
||||
client: WikiJSClient instance
|
||||
|
||||
Returns:
|
||||
List of MCP tools
|
||||
"""
|
||||
|
||||
async def create_lesson(
|
||||
title: str,
|
||||
content: str,
|
||||
tags: str,
|
||||
category: str = "sprints"
|
||||
) -> Dict[str, Any]:
|
||||
"""
|
||||
Create a lessons learned entry.
|
||||
|
||||
After 15 sprints without systematic lesson capture, repeated mistakes occurred.
|
||||
This tool ensures lessons are captured and searchable for future sprints.
|
||||
|
||||
Args:
|
||||
title: Lesson title (e.g., "Sprint 16 - Claude Code Infinite Loop on Label Validation")
|
||||
content: Lesson content in markdown (problem, solution, prevention)
|
||||
tags: Comma-separated tags (e.g., "claude-code, testing, labels, validation")
|
||||
category: Category for organization (default: "sprints", also: "patterns", "architecture")
|
||||
|
||||
Returns:
|
||||
Created lesson page data
|
||||
|
||||
Example:
|
||||
create_lesson(
|
||||
title="Sprint 16 - Prevent Infinite Loops in Validation",
|
||||
content="## Problem\\n\\nClaude Code entered infinite loop...\\n\\n## Solution\\n\\n...",
|
||||
tags="claude-code, testing, infinite-loop, validation",
|
||||
category="sprints"
|
||||
)
|
||||
"""
|
||||
try:
|
||||
tag_list = [t.strip() for t in tags.split(',')]
|
||||
|
||||
lesson = await client.create_lesson(
|
||||
title=title,
|
||||
content=content,
|
||||
tags=tag_list,
|
||||
category=category
|
||||
)
|
||||
|
||||
return {
|
||||
'success': True,
|
||||
'lesson': lesson,
|
||||
'message': f'Lesson learned captured: {title}'
|
||||
}
|
||||
except Exception as e:
|
||||
logger.error(f"Error creating lesson: {e}")
|
||||
return {
|
||||
'success': False,
|
||||
'error': str(e)
|
||||
}
|
||||
|
||||
async def search_lessons(
|
||||
query: Optional[str] = None,
|
||||
tags: Optional[str] = None,
|
||||
limit: int = 20
|
||||
) -> Dict[str, Any]:
|
||||
"""
|
||||
Search lessons learned entries.
|
||||
|
||||
Use this at sprint start to find relevant lessons from previous sprints.
|
||||
Prevents repeating the same mistakes.
|
||||
|
||||
Args:
|
||||
query: Search query (e.g., "validation", "infinite loop", "docker")
|
||||
tags: Comma-separated tags to filter by (e.g., "claude-code, testing")
|
||||
limit: Maximum number of results (default: 20)
|
||||
|
||||
Returns:
|
||||
List of matching lessons learned
|
||||
|
||||
Example:
|
||||
# Before implementing validation logic
|
||||
search_lessons(query="validation", tags="testing, claude-code")
|
||||
|
||||
# Before working with Docker
|
||||
search_lessons(query="docker", tags="deployment")
|
||||
"""
|
||||
try:
|
||||
tag_list = [t.strip() for t in tags.split(',')] if tags else None
|
||||
|
||||
lessons = await client.search_lessons(
|
||||
query=query,
|
||||
tags=tag_list,
|
||||
limit=limit
|
||||
)
|
||||
|
||||
return {
|
||||
'success': True,
|
||||
'count': len(lessons),
|
||||
'lessons': lessons,
|
||||
'message': f'Found {len(lessons)} relevant lessons'
|
||||
}
|
||||
except Exception as e:
|
||||
logger.error(f"Error searching lessons: {e}")
|
||||
return {
|
||||
'success': False,
|
||||
'error': str(e)
|
||||
}
|
||||
|
||||
async def tag_lesson(
|
||||
page_id: int,
|
||||
tags: str
|
||||
) -> Dict[str, Any]:
|
||||
"""
|
||||
Add or update tags on a lesson.
|
||||
|
||||
Args:
|
||||
page_id: Lesson page ID (from create_lesson or search_lessons)
|
||||
tags: Comma-separated tags (will replace existing tags)
|
||||
|
||||
Returns:
|
||||
Updated lesson data
|
||||
"""
|
||||
try:
|
||||
tag_list = [t.strip() for t in tags.split(',')]
|
||||
|
||||
lesson = await client.tag_lesson(
|
||||
page_id=page_id,
|
||||
new_tags=tag_list
|
||||
)
|
||||
|
||||
return {
|
||||
'success': True,
|
||||
'lesson': lesson,
|
||||
'message': 'Tags updated successfully'
|
||||
}
|
||||
except Exception as e:
|
||||
logger.error(f"Error tagging lesson: {e}")
|
||||
return {
|
||||
'success': False,
|
||||
'error': str(e)
|
||||
}
|
||||
|
||||
# Define MCP tools
|
||||
tools = [
|
||||
Tool(
|
||||
name="create_lesson",
|
||||
description=(
|
||||
"Create a lessons learned entry to prevent repeating mistakes. "
|
||||
"Critical for capturing sprint insights, architectural decisions, "
|
||||
"and technical gotchas for future reference."
|
||||
),
|
||||
function=create_lesson
|
||||
),
|
||||
Tool(
|
||||
name="search_lessons",
|
||||
description=(
|
||||
"Search lessons learned from previous sprints and projects. "
|
||||
"Use this before starting new work to avoid known pitfalls and "
|
||||
"leverage past solutions."
|
||||
),
|
||||
function=search_lessons
|
||||
),
|
||||
Tool(
|
||||
name="tag_lesson",
|
||||
description="Add or update tags on a lessons learned entry for better categorization",
|
||||
function=tag_lesson
|
||||
)
|
||||
]
|
||||
|
||||
return tools
|
||||
229
plugins/projman/mcp-servers/wikijs/mcp_server/tools/pages.py
Normal file
229
plugins/projman/mcp-servers/wikijs/mcp_server/tools/pages.py
Normal file
@@ -0,0 +1,229 @@
|
||||
"""
|
||||
MCP tools for Wiki.js page management.
|
||||
"""
|
||||
from typing import Dict, Any, List, Optional
|
||||
from mcp.server import Tool
|
||||
from ..wikijs_client import WikiJSClient
|
||||
import logging
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def create_page_tools(client: WikiJSClient) -> List[Tool]:
|
||||
"""
|
||||
Create MCP tools for page management.
|
||||
|
||||
Args:
|
||||
client: WikiJSClient instance
|
||||
|
||||
Returns:
|
||||
List of MCP tools
|
||||
"""
|
||||
|
||||
async def search_pages(
|
||||
query: str,
|
||||
tags: Optional[str] = None,
|
||||
limit: int = 20
|
||||
) -> Dict[str, Any]:
|
||||
"""
|
||||
Search Wiki.js pages by keywords and tags.
|
||||
|
||||
Args:
|
||||
query: Search query string
|
||||
tags: Comma-separated list of tags to filter by
|
||||
limit: Maximum number of results (default: 20)
|
||||
|
||||
Returns:
|
||||
List of matching pages with path, title, description, and tags
|
||||
"""
|
||||
try:
|
||||
tag_list = [t.strip() for t in tags.split(',')] if tags else None
|
||||
results = await client.search_pages(query, tag_list, limit)
|
||||
|
||||
return {
|
||||
'success': True,
|
||||
'count': len(results),
|
||||
'pages': results
|
||||
}
|
||||
except Exception as e:
|
||||
logger.error(f"Error searching pages: {e}")
|
||||
return {
|
||||
'success': False,
|
||||
'error': str(e)
|
||||
}
|
||||
|
||||
async def get_page(path: str) -> Dict[str, Any]:
|
||||
"""
|
||||
Get a specific page by path.
|
||||
|
||||
Args:
|
||||
path: Page path (can be relative to project or absolute)
|
||||
|
||||
Returns:
|
||||
Page data including content, metadata, and tags
|
||||
"""
|
||||
try:
|
||||
page = await client.get_page(path)
|
||||
|
||||
if page:
|
||||
return {
|
||||
'success': True,
|
||||
'page': page
|
||||
}
|
||||
else:
|
||||
return {
|
||||
'success': False,
|
||||
'error': f'Page not found: {path}'
|
||||
}
|
||||
except Exception as e:
|
||||
logger.error(f"Error getting page: {e}")
|
||||
return {
|
||||
'success': False,
|
||||
'error': str(e)
|
||||
}
|
||||
|
||||
async def create_page(
|
||||
path: str,
|
||||
title: str,
|
||||
content: str,
|
||||
description: str = "",
|
||||
tags: Optional[str] = None,
|
||||
publish: bool = True
|
||||
) -> Dict[str, Any]:
|
||||
"""
|
||||
Create a new Wiki.js page.
|
||||
|
||||
Args:
|
||||
path: Page path relative to project/base (e.g., 'documentation/api')
|
||||
title: Page title
|
||||
content: Page content in markdown format
|
||||
description: Page description (optional)
|
||||
tags: Comma-separated list of tags (optional)
|
||||
publish: Whether to publish immediately (default: True)
|
||||
|
||||
Returns:
|
||||
Created page data
|
||||
"""
|
||||
try:
|
||||
tag_list = [t.strip() for t in tags.split(',')] if tags else []
|
||||
|
||||
page = await client.create_page(
|
||||
path=path,
|
||||
title=title,
|
||||
content=content,
|
||||
description=description,
|
||||
tags=tag_list,
|
||||
is_published=publish
|
||||
)
|
||||
|
||||
return {
|
||||
'success': True,
|
||||
'page': page
|
||||
}
|
||||
except Exception as e:
|
||||
logger.error(f"Error creating page: {e}")
|
||||
return {
|
||||
'success': False,
|
||||
'error': str(e)
|
||||
}
|
||||
|
||||
async def update_page(
|
||||
page_id: int,
|
||||
content: Optional[str] = None,
|
||||
title: Optional[str] = None,
|
||||
description: Optional[str] = None,
|
||||
tags: Optional[str] = None,
|
||||
publish: Optional[bool] = None
|
||||
) -> Dict[str, Any]:
|
||||
"""
|
||||
Update an existing Wiki.js page.
|
||||
|
||||
Args:
|
||||
page_id: Page ID (from get_page or search_pages)
|
||||
content: New content (optional)
|
||||
title: New title (optional)
|
||||
description: New description (optional)
|
||||
tags: New comma-separated tags (optional)
|
||||
publish: New publish status (optional)
|
||||
|
||||
Returns:
|
||||
Updated page data
|
||||
"""
|
||||
try:
|
||||
tag_list = [t.strip() for t in tags.split(',')] if tags else None
|
||||
|
||||
page = await client.update_page(
|
||||
page_id=page_id,
|
||||
content=content,
|
||||
title=title,
|
||||
description=description,
|
||||
tags=tag_list,
|
||||
is_published=publish
|
||||
)
|
||||
|
||||
return {
|
||||
'success': True,
|
||||
'page': page
|
||||
}
|
||||
except Exception as e:
|
||||
logger.error(f"Error updating page: {e}")
|
||||
return {
|
||||
'success': False,
|
||||
'error': str(e)
|
||||
}
|
||||
|
||||
async def list_pages(path_prefix: str = "") -> Dict[str, Any]:
|
||||
"""
|
||||
List pages under a specific path.
|
||||
|
||||
Args:
|
||||
path_prefix: Path prefix to filter by (relative to project/base)
|
||||
|
||||
Returns:
|
||||
List of pages under the specified path
|
||||
"""
|
||||
try:
|
||||
pages = await client.list_pages(path_prefix)
|
||||
|
||||
return {
|
||||
'success': True,
|
||||
'count': len(pages),
|
||||
'pages': pages
|
||||
}
|
||||
except Exception as e:
|
||||
logger.error(f"Error listing pages: {e}")
|
||||
return {
|
||||
'success': False,
|
||||
'error': str(e)
|
||||
}
|
||||
|
||||
# Define MCP tools
|
||||
tools = [
|
||||
Tool(
|
||||
name="search_pages",
|
||||
description="Search Wiki.js pages by keywords and tags",
|
||||
function=search_pages
|
||||
),
|
||||
Tool(
|
||||
name="get_page",
|
||||
description="Get a specific Wiki.js page by path",
|
||||
function=get_page
|
||||
),
|
||||
Tool(
|
||||
name="create_page",
|
||||
description="Create a new Wiki.js page with content and metadata",
|
||||
function=create_page
|
||||
),
|
||||
Tool(
|
||||
name="update_page",
|
||||
description="Update an existing Wiki.js page",
|
||||
function=update_page
|
||||
),
|
||||
Tool(
|
||||
name="list_pages",
|
||||
description="List pages under a specific path",
|
||||
function=list_pages
|
||||
)
|
||||
]
|
||||
|
||||
return tools
|
||||
451
plugins/projman/mcp-servers/wikijs/mcp_server/wikijs_client.py
Normal file
451
plugins/projman/mcp-servers/wikijs/mcp_server/wikijs_client.py
Normal file
@@ -0,0 +1,451 @@
|
||||
"""
|
||||
Wiki.js GraphQL API Client.
|
||||
|
||||
Provides methods for interacting with Wiki.js GraphQL API for page management,
|
||||
lessons learned, and documentation.
|
||||
"""
|
||||
import httpx
|
||||
from typing import List, Dict, Optional, Any
|
||||
import logging
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class WikiJSClient:
|
||||
"""Client for Wiki.js GraphQL API"""
|
||||
|
||||
def __init__(self, api_url: str, api_token: str, base_path: str, project: Optional[str] = None):
|
||||
"""
|
||||
Initialize Wiki.js client.
|
||||
|
||||
Args:
|
||||
api_url: Wiki.js GraphQL API URL (e.g., http://wiki.example.com/graphql)
|
||||
api_token: Wiki.js API token
|
||||
base_path: Base path in Wiki.js (e.g., /your-org)
|
||||
project: Project path (e.g., projects/my-project) for project mode
|
||||
"""
|
||||
self.api_url = api_url
|
||||
self.api_token = api_token
|
||||
self.base_path = base_path.rstrip('/')
|
||||
self.project = project
|
||||
self.mode = 'project' if project else 'company'
|
||||
|
||||
self.headers = {
|
||||
'Authorization': f'Bearer {api_token}',
|
||||
'Content-Type': 'application/json'
|
||||
}
|
||||
|
||||
def _get_full_path(self, relative_path: str) -> str:
|
||||
"""
|
||||
Construct full path based on mode.
|
||||
|
||||
Args:
|
||||
relative_path: Path relative to project or base
|
||||
|
||||
Returns:
|
||||
Full path in Wiki.js
|
||||
"""
|
||||
relative_path = relative_path.lstrip('/')
|
||||
|
||||
if self.mode == 'project' and self.project:
|
||||
# Project mode: base_path/project/relative_path
|
||||
return f"{self.base_path}/{self.project}/{relative_path}"
|
||||
else:
|
||||
# Company mode: base_path/relative_path
|
||||
return f"{self.base_path}/{relative_path}"
|
||||
|
||||
async def _execute_query(self, query: str, variables: Optional[Dict[str, Any]] = None) -> Dict[str, Any]:
|
||||
"""
|
||||
Execute GraphQL query.
|
||||
|
||||
Args:
|
||||
query: GraphQL query string
|
||||
variables: Query variables
|
||||
|
||||
Returns:
|
||||
Response data
|
||||
|
||||
Raises:
|
||||
httpx.HTTPError: On HTTP errors
|
||||
ValueError: On GraphQL errors
|
||||
"""
|
||||
async with httpx.AsyncClient() as client:
|
||||
response = await client.post(
|
||||
self.api_url,
|
||||
headers=self.headers,
|
||||
json={'query': query, 'variables': variables or {}}
|
||||
)
|
||||
|
||||
# Log response for debugging
|
||||
if response.status_code != 200:
|
||||
logger.error(f"HTTP {response.status_code}: {response.text}")
|
||||
|
||||
response.raise_for_status()
|
||||
|
||||
data = response.json()
|
||||
|
||||
if 'errors' in data:
|
||||
errors = data['errors']
|
||||
error_messages = [err.get('message', str(err)) for err in errors]
|
||||
raise ValueError(f"GraphQL errors: {', '.join(error_messages)}")
|
||||
|
||||
return data.get('data', {})
|
||||
|
||||
async def search_pages(
|
||||
self,
|
||||
query: str,
|
||||
tags: Optional[List[str]] = None,
|
||||
limit: int = 20
|
||||
) -> List[Dict[str, Any]]:
|
||||
"""
|
||||
Search pages by keywords and tags.
|
||||
|
||||
Args:
|
||||
query: Search query string
|
||||
tags: Filter by tags
|
||||
limit: Maximum results to return
|
||||
|
||||
Returns:
|
||||
List of matching pages
|
||||
"""
|
||||
graphql_query = """
|
||||
query SearchPages($query: String!) {
|
||||
pages {
|
||||
search(query: $query) {
|
||||
results {
|
||||
id
|
||||
path
|
||||
title
|
||||
description
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
"""
|
||||
|
||||
data = await self._execute_query(graphql_query, {'query': query})
|
||||
results = data.get('pages', {}).get('search', {}).get('results', [])
|
||||
|
||||
# Filter by tags if specified
|
||||
if tags:
|
||||
tags_lower = [t.lower() for t in tags]
|
||||
results = [
|
||||
r for r in results
|
||||
if any(tag.lower() in tags_lower for tag in r.get('tags', []))
|
||||
]
|
||||
|
||||
return results[:limit]
|
||||
|
||||
async def get_page(self, path: str) -> Optional[Dict[str, Any]]:
|
||||
"""
|
||||
Get specific page by path.
|
||||
|
||||
Args:
|
||||
path: Page path (can be relative or absolute)
|
||||
|
||||
Returns:
|
||||
Page data or None if not found
|
||||
"""
|
||||
# Convert to absolute path
|
||||
if not path.startswith(self.base_path):
|
||||
path = self._get_full_path(path)
|
||||
|
||||
graphql_query = """
|
||||
query GetPage($path: String!) {
|
||||
pages {
|
||||
single(path: $path) {
|
||||
id
|
||||
path
|
||||
title
|
||||
description
|
||||
content
|
||||
tags
|
||||
createdAt
|
||||
updatedAt
|
||||
author
|
||||
isPublished
|
||||
}
|
||||
}
|
||||
}
|
||||
"""
|
||||
|
||||
try:
|
||||
data = await self._execute_query(graphql_query, {'path': path})
|
||||
return data.get('pages', {}).get('single')
|
||||
except (httpx.HTTPError, ValueError) as e:
|
||||
logger.warning(f"Page not found at {path}: {e}")
|
||||
return None
|
||||
|
||||
async def create_page(
|
||||
self,
|
||||
path: str,
|
||||
title: str,
|
||||
content: str,
|
||||
description: str = "",
|
||||
tags: Optional[List[str]] = None,
|
||||
is_published: bool = True
|
||||
) -> Dict[str, Any]:
|
||||
"""
|
||||
Create new page.
|
||||
|
||||
Args:
|
||||
path: Page path (relative to project/base)
|
||||
title: Page title
|
||||
content: Page content (markdown)
|
||||
description: Page description
|
||||
tags: Page tags
|
||||
is_published: Whether to publish immediately
|
||||
|
||||
Returns:
|
||||
Created page data
|
||||
"""
|
||||
full_path = self._get_full_path(path)
|
||||
|
||||
graphql_query = """
|
||||
mutation CreatePage($path: String!, $title: String!, $content: String!, $description: String!, $tags: [String]!, $isPublished: Boolean!, $isPrivate: Boolean!) {
|
||||
pages {
|
||||
create(
|
||||
path: $path
|
||||
title: $title
|
||||
content: $content
|
||||
description: $description
|
||||
tags: $tags
|
||||
isPublished: $isPublished
|
||||
isPrivate: $isPrivate
|
||||
editor: "markdown"
|
||||
locale: "en"
|
||||
) {
|
||||
responseResult {
|
||||
succeeded
|
||||
errorCode
|
||||
slug
|
||||
message
|
||||
}
|
||||
page {
|
||||
id
|
||||
path
|
||||
title
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
"""
|
||||
|
||||
variables = {
|
||||
'path': full_path,
|
||||
'title': title,
|
||||
'content': content,
|
||||
'description': description,
|
||||
'tags': tags or [],
|
||||
'isPublished': is_published,
|
||||
'isPrivate': False # Default to not private
|
||||
}
|
||||
|
||||
data = await self._execute_query(graphql_query, variables)
|
||||
result = data.get('pages', {}).get('create', {})
|
||||
|
||||
if not result.get('responseResult', {}).get('succeeded'):
|
||||
error_msg = result.get('responseResult', {}).get('message', 'Unknown error')
|
||||
raise ValueError(f"Failed to create page: {error_msg}")
|
||||
|
||||
return result.get('page', {})
|
||||
|
||||
async def update_page(
|
||||
self,
|
||||
page_id: int,
|
||||
content: Optional[str] = None,
|
||||
title: Optional[str] = None,
|
||||
description: Optional[str] = None,
|
||||
tags: Optional[List[str]] = None,
|
||||
is_published: Optional[bool] = None
|
||||
) -> Dict[str, Any]:
|
||||
"""
|
||||
Update existing page.
|
||||
|
||||
Args:
|
||||
page_id: Page ID
|
||||
content: New content (if changing)
|
||||
title: New title (if changing)
|
||||
description: New description (if changing)
|
||||
tags: New tags (if changing)
|
||||
is_published: New publish status (if changing)
|
||||
|
||||
Returns:
|
||||
Updated page data
|
||||
"""
|
||||
# Build update fields dynamically
|
||||
fields = []
|
||||
variables = {'id': page_id}
|
||||
|
||||
if content is not None:
|
||||
fields.append('content: $content')
|
||||
variables['content'] = content
|
||||
|
||||
if title is not None:
|
||||
fields.append('title: $title')
|
||||
variables['title'] = title
|
||||
|
||||
if description is not None:
|
||||
fields.append('description: $description')
|
||||
variables['description'] = description
|
||||
|
||||
if tags is not None:
|
||||
fields.append('tags: $tags')
|
||||
variables['tags'] = tags
|
||||
|
||||
if is_published is not None:
|
||||
fields.append('isPublished: $isPublished')
|
||||
variables['isPublished'] = is_published
|
||||
|
||||
fields_str = ', '.join(fields)
|
||||
|
||||
graphql_query = f"""
|
||||
mutation UpdatePage($id: Int!{''.join([f', ${k}: {type(v).__name__.title()}' for k, v in variables.items() if k != 'id'])}) {{
|
||||
pages {{
|
||||
update(
|
||||
id: $id
|
||||
{fields_str}
|
||||
) {{
|
||||
responseResult {{
|
||||
succeeded
|
||||
errorCode
|
||||
message
|
||||
}}
|
||||
page {{
|
||||
id
|
||||
path
|
||||
title
|
||||
updatedAt
|
||||
}}
|
||||
}}
|
||||
}}
|
||||
}}
|
||||
"""
|
||||
|
||||
data = await self._execute_query(graphql_query, variables)
|
||||
result = data.get('pages', {}).get('update', {})
|
||||
|
||||
if not result.get('responseResult', {}).get('succeeded'):
|
||||
error_msg = result.get('responseResult', {}).get('message', 'Unknown error')
|
||||
raise ValueError(f"Failed to update page: {error_msg}")
|
||||
|
||||
return result.get('page', {})
|
||||
|
||||
async def list_pages(self, path_prefix: str = "") -> List[Dict[str, Any]]:
|
||||
"""
|
||||
List pages under a specific path.
|
||||
|
||||
Args:
|
||||
path_prefix: Path prefix to filter (relative to project/base)
|
||||
|
||||
Returns:
|
||||
List of pages
|
||||
"""
|
||||
# Construct full path based on mode
|
||||
if path_prefix:
|
||||
full_path = self._get_full_path(path_prefix)
|
||||
else:
|
||||
# Empty path_prefix: return all pages in project (project mode) or base (company mode)
|
||||
if self.mode == 'project' and self.project:
|
||||
full_path = f"{self.base_path}/{self.project}"
|
||||
else:
|
||||
full_path = self.base_path
|
||||
|
||||
graphql_query = """
|
||||
query ListPages {
|
||||
pages {
|
||||
list {
|
||||
id
|
||||
path
|
||||
title
|
||||
description
|
||||
tags
|
||||
createdAt
|
||||
updatedAt
|
||||
isPublished
|
||||
}
|
||||
}
|
||||
}
|
||||
"""
|
||||
|
||||
data = await self._execute_query(graphql_query)
|
||||
all_pages = data.get('pages', {}).get('list', [])
|
||||
|
||||
# Filter by path prefix
|
||||
if full_path:
|
||||
return [p for p in all_pages if p.get('path', '').startswith(full_path)]
|
||||
|
||||
return all_pages
|
||||
|
||||
async def create_lesson(
|
||||
self,
|
||||
title: str,
|
||||
content: str,
|
||||
tags: List[str],
|
||||
category: str = "sprints"
|
||||
) -> Dict[str, Any]:
|
||||
"""
|
||||
Create a lessons learned entry.
|
||||
|
||||
Args:
|
||||
title: Lesson title
|
||||
content: Lesson content (markdown)
|
||||
tags: Tags for categorization
|
||||
category: Category (sprints, patterns, etc.)
|
||||
|
||||
Returns:
|
||||
Created lesson page data
|
||||
"""
|
||||
# Construct path: lessons-learned/category/title-slug
|
||||
slug = title.lower().replace(' ', '-').replace('_', '-')
|
||||
path = f"lessons-learned/{category}/{slug}"
|
||||
|
||||
return await self.create_page(
|
||||
path=path,
|
||||
title=title,
|
||||
content=content,
|
||||
description=f"Lessons learned: {title}",
|
||||
tags=tags + ['lesson-learned', category],
|
||||
is_published=True
|
||||
)
|
||||
|
||||
async def search_lessons(
|
||||
self,
|
||||
query: Optional[str] = None,
|
||||
tags: Optional[List[str]] = None,
|
||||
limit: int = 20
|
||||
) -> List[Dict[str, Any]]:
|
||||
"""
|
||||
Search lessons learned entries.
|
||||
|
||||
Args:
|
||||
query: Search query (optional)
|
||||
tags: Filter by tags
|
||||
limit: Maximum results
|
||||
|
||||
Returns:
|
||||
List of matching lessons
|
||||
"""
|
||||
# Search in lessons-learned path
|
||||
search_query = query or "lesson"
|
||||
|
||||
results = await self.search_pages(search_query, tags, limit)
|
||||
|
||||
# Filter to only lessons-learned path
|
||||
lessons_path = self._get_full_path("lessons-learned")
|
||||
return [r for r in results if r.get('path', '').startswith(lessons_path)]
|
||||
|
||||
async def tag_lesson(self, page_id: int, new_tags: List[str]) -> Dict[str, Any]:
|
||||
"""
|
||||
Add tags to a lesson.
|
||||
|
||||
Args:
|
||||
page_id: Lesson page ID
|
||||
new_tags: Tags to add
|
||||
|
||||
Returns:
|
||||
Updated page data
|
||||
"""
|
||||
# Get current page to merge tags
|
||||
# For now, just replace tags (can enhance to merge later)
|
||||
return await self.update_page(page_id=page_id, tags=new_tags)
|
||||
Reference in New Issue
Block a user