feat: major improvements to projman plugin v1.0.0
- Remove Wiki.js MCP server entirely - Add wiki, milestone, and dependency tools to Gitea MCP server - Add parallel execution support based on dependency graph - Add mandatory pre-planning validations (org check, labels, docs/changes) - Add CLI blocking rules to all agents (API-only) - Add standardized task naming: [Sprint XX] <type>: <description> - Add branch naming convention: feat/, fix/, debug/ prefixes - Add MR body template without subtasks - Add auto-close issues via commit keywords - Create claude-config-maintainer plugin for CLAUDE.md optimization - Update all sprint commands with new tools and workflows - Update documentation to remove Wiki.js references New MCP tools: - Wiki: list_wiki_pages, get_wiki_page, create_wiki_page, create_lesson, search_lessons - Milestones: list_milestones, get_milestone, create_milestone, update_milestone - Dependencies: list_issue_dependencies, create_issue_dependency, get_execution_order - Validation: validate_repo_org, get_branch_protection, create_label Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
This commit is contained in:
@@ -6,9 +6,13 @@ Provides synchronous methods for:
|
||||
- Label management
|
||||
- Repository operations
|
||||
- PMO multi-repo aggregation
|
||||
- Wiki operations (lessons learned)
|
||||
- Milestone management
|
||||
- Issue dependencies
|
||||
"""
|
||||
import requests
|
||||
import logging
|
||||
import re
|
||||
from typing import List, Dict, Optional
|
||||
from .config import GiteaConfig
|
||||
|
||||
@@ -209,3 +213,381 @@ class GiteaClient:
|
||||
logger.error(f"Error fetching issues from {repo_name}: {e}")
|
||||
|
||||
return aggregated
|
||||
|
||||
# ========================================
|
||||
# WIKI OPERATIONS (Lessons Learned)
|
||||
# ========================================
|
||||
|
||||
def list_wiki_pages(self, repo: Optional[str] = None) -> List[Dict]:
|
||||
"""List all wiki pages in repository."""
|
||||
owner, target_repo = self._parse_repo(repo)
|
||||
url = f"{self.base_url}/repos/{owner}/{target_repo}/wiki/pages"
|
||||
logger.info(f"Listing wiki pages from {owner}/{target_repo}")
|
||||
response = self.session.get(url)
|
||||
response.raise_for_status()
|
||||
return response.json()
|
||||
|
||||
def get_wiki_page(
|
||||
self,
|
||||
page_name: str,
|
||||
repo: Optional[str] = None
|
||||
) -> Dict:
|
||||
"""Get a specific wiki page by name."""
|
||||
owner, target_repo = self._parse_repo(repo)
|
||||
url = f"{self.base_url}/repos/{owner}/{target_repo}/wiki/page/{page_name}"
|
||||
logger.info(f"Getting wiki page '{page_name}' from {owner}/{target_repo}")
|
||||
response = self.session.get(url)
|
||||
response.raise_for_status()
|
||||
return response.json()
|
||||
|
||||
def create_wiki_page(
|
||||
self,
|
||||
title: str,
|
||||
content: str,
|
||||
repo: Optional[str] = None
|
||||
) -> Dict:
|
||||
"""Create a new wiki page."""
|
||||
owner, target_repo = self._parse_repo(repo)
|
||||
url = f"{self.base_url}/repos/{owner}/{target_repo}/wiki/new"
|
||||
data = {
|
||||
'title': title,
|
||||
'content_base64': self._encode_base64(content)
|
||||
}
|
||||
logger.info(f"Creating wiki page '{title}' in {owner}/{target_repo}")
|
||||
response = self.session.post(url, json=data)
|
||||
response.raise_for_status()
|
||||
return response.json()
|
||||
|
||||
def update_wiki_page(
|
||||
self,
|
||||
page_name: str,
|
||||
content: str,
|
||||
repo: Optional[str] = None
|
||||
) -> Dict:
|
||||
"""Update an existing wiki page."""
|
||||
owner, target_repo = self._parse_repo(repo)
|
||||
url = f"{self.base_url}/repos/{owner}/{target_repo}/wiki/page/{page_name}"
|
||||
data = {
|
||||
'content_base64': self._encode_base64(content)
|
||||
}
|
||||
logger.info(f"Updating wiki page '{page_name}' in {owner}/{target_repo}")
|
||||
response = self.session.patch(url, json=data)
|
||||
response.raise_for_status()
|
||||
return response.json()
|
||||
|
||||
def delete_wiki_page(
|
||||
self,
|
||||
page_name: str,
|
||||
repo: Optional[str] = None
|
||||
) -> bool:
|
||||
"""Delete a wiki page."""
|
||||
owner, target_repo = self._parse_repo(repo)
|
||||
url = f"{self.base_url}/repos/{owner}/{target_repo}/wiki/page/{page_name}"
|
||||
logger.info(f"Deleting wiki page '{page_name}' from {owner}/{target_repo}")
|
||||
response = self.session.delete(url)
|
||||
response.raise_for_status()
|
||||
return True
|
||||
|
||||
def _encode_base64(self, content: str) -> str:
|
||||
"""Encode content to base64 for wiki API."""
|
||||
import base64
|
||||
return base64.b64encode(content.encode('utf-8')).decode('utf-8')
|
||||
|
||||
def _decode_base64(self, content: str) -> str:
|
||||
"""Decode base64 content from wiki API."""
|
||||
import base64
|
||||
return base64.b64decode(content.encode('utf-8')).decode('utf-8')
|
||||
|
||||
def search_wiki_pages(
|
||||
self,
|
||||
query: str,
|
||||
repo: Optional[str] = None
|
||||
) -> List[Dict]:
|
||||
"""Search wiki pages by content (client-side filtering)."""
|
||||
pages = self.list_wiki_pages(repo)
|
||||
results = []
|
||||
query_lower = query.lower()
|
||||
for page in pages:
|
||||
if query_lower in page.get('title', '').lower():
|
||||
results.append(page)
|
||||
return results
|
||||
|
||||
def create_lesson(
|
||||
self,
|
||||
title: str,
|
||||
content: str,
|
||||
tags: List[str],
|
||||
category: str = "sprints",
|
||||
repo: Optional[str] = None
|
||||
) -> Dict:
|
||||
"""Create a lessons learned entry in the wiki."""
|
||||
# Sanitize title for wiki page name
|
||||
page_name = f"lessons/{category}/{self._sanitize_page_name(title)}"
|
||||
|
||||
# Add tags as metadata at the end of content
|
||||
full_content = f"{content}\n\n---\n**Tags:** {', '.join(tags)}"
|
||||
|
||||
return self.create_wiki_page(page_name, full_content, repo)
|
||||
|
||||
def search_lessons(
|
||||
self,
|
||||
query: Optional[str] = None,
|
||||
tags: Optional[List[str]] = None,
|
||||
repo: Optional[str] = None
|
||||
) -> List[Dict]:
|
||||
"""Search lessons learned by query and/or tags."""
|
||||
pages = self.list_wiki_pages(repo)
|
||||
results = []
|
||||
|
||||
for page in pages:
|
||||
title = page.get('title', '')
|
||||
# Filter to only lessons (pages starting with lessons/)
|
||||
if not title.startswith('lessons/'):
|
||||
continue
|
||||
|
||||
# If query provided, check if it matches title
|
||||
if query:
|
||||
if query.lower() not in title.lower():
|
||||
continue
|
||||
|
||||
# Get full page content for tag matching if tags provided
|
||||
if tags:
|
||||
try:
|
||||
full_page = self.get_wiki_page(title, repo)
|
||||
content = self._decode_base64(full_page.get('content_base64', ''))
|
||||
# Check if any tag is in the content
|
||||
if not any(tag.lower() in content.lower() for tag in tags):
|
||||
continue
|
||||
except Exception:
|
||||
continue
|
||||
|
||||
results.append(page)
|
||||
|
||||
return results
|
||||
|
||||
def _sanitize_page_name(self, title: str) -> str:
|
||||
"""Convert title to valid wiki page name."""
|
||||
# Replace spaces with hyphens, remove special chars
|
||||
name = re.sub(r'[^\w\s-]', '', title)
|
||||
name = re.sub(r'[\s]+', '-', name)
|
||||
return name.lower()
|
||||
|
||||
# ========================================
|
||||
# MILESTONE OPERATIONS
|
||||
# ========================================
|
||||
|
||||
def list_milestones(
|
||||
self,
|
||||
state: str = 'open',
|
||||
repo: Optional[str] = None
|
||||
) -> List[Dict]:
|
||||
"""List all milestones in repository."""
|
||||
owner, target_repo = self._parse_repo(repo)
|
||||
url = f"{self.base_url}/repos/{owner}/{target_repo}/milestones"
|
||||
params = {'state': state}
|
||||
logger.info(f"Listing milestones from {owner}/{target_repo}")
|
||||
response = self.session.get(url, params=params)
|
||||
response.raise_for_status()
|
||||
return response.json()
|
||||
|
||||
def get_milestone(
|
||||
self,
|
||||
milestone_id: int,
|
||||
repo: Optional[str] = None
|
||||
) -> Dict:
|
||||
"""Get a specific milestone by ID."""
|
||||
owner, target_repo = self._parse_repo(repo)
|
||||
url = f"{self.base_url}/repos/{owner}/{target_repo}/milestones/{milestone_id}"
|
||||
logger.info(f"Getting milestone #{milestone_id} from {owner}/{target_repo}")
|
||||
response = self.session.get(url)
|
||||
response.raise_for_status()
|
||||
return response.json()
|
||||
|
||||
def create_milestone(
|
||||
self,
|
||||
title: str,
|
||||
description: Optional[str] = None,
|
||||
due_on: Optional[str] = None,
|
||||
repo: Optional[str] = None
|
||||
) -> Dict:
|
||||
"""Create a new milestone."""
|
||||
owner, target_repo = self._parse_repo(repo)
|
||||
url = f"{self.base_url}/repos/{owner}/{target_repo}/milestones"
|
||||
data = {'title': title}
|
||||
if description:
|
||||
data['description'] = description
|
||||
if due_on:
|
||||
data['due_on'] = due_on
|
||||
logger.info(f"Creating milestone '{title}' in {owner}/{target_repo}")
|
||||
response = self.session.post(url, json=data)
|
||||
response.raise_for_status()
|
||||
return response.json()
|
||||
|
||||
def update_milestone(
|
||||
self,
|
||||
milestone_id: int,
|
||||
title: Optional[str] = None,
|
||||
description: Optional[str] = None,
|
||||
state: Optional[str] = None,
|
||||
due_on: Optional[str] = None,
|
||||
repo: Optional[str] = None
|
||||
) -> Dict:
|
||||
"""Update an existing milestone."""
|
||||
owner, target_repo = self._parse_repo(repo)
|
||||
url = f"{self.base_url}/repos/{owner}/{target_repo}/milestones/{milestone_id}"
|
||||
data = {}
|
||||
if title is not None:
|
||||
data['title'] = title
|
||||
if description is not None:
|
||||
data['description'] = description
|
||||
if state is not None:
|
||||
data['state'] = state
|
||||
if due_on is not None:
|
||||
data['due_on'] = due_on
|
||||
logger.info(f"Updating milestone #{milestone_id} in {owner}/{target_repo}")
|
||||
response = self.session.patch(url, json=data)
|
||||
response.raise_for_status()
|
||||
return response.json()
|
||||
|
||||
def delete_milestone(
|
||||
self,
|
||||
milestone_id: int,
|
||||
repo: Optional[str] = None
|
||||
) -> bool:
|
||||
"""Delete a milestone."""
|
||||
owner, target_repo = self._parse_repo(repo)
|
||||
url = f"{self.base_url}/repos/{owner}/{target_repo}/milestones/{milestone_id}"
|
||||
logger.info(f"Deleting milestone #{milestone_id} from {owner}/{target_repo}")
|
||||
response = self.session.delete(url)
|
||||
response.raise_for_status()
|
||||
return True
|
||||
|
||||
# ========================================
|
||||
# ISSUE DEPENDENCY OPERATIONS
|
||||
# ========================================
|
||||
|
||||
def list_issue_dependencies(
|
||||
self,
|
||||
issue_number: int,
|
||||
repo: Optional[str] = None
|
||||
) -> List[Dict]:
|
||||
"""List all dependencies for an issue (issues that block this one)."""
|
||||
owner, target_repo = self._parse_repo(repo)
|
||||
url = f"{self.base_url}/repos/{owner}/{target_repo}/issues/{issue_number}/dependencies"
|
||||
logger.info(f"Listing dependencies for issue #{issue_number} in {owner}/{target_repo}")
|
||||
response = self.session.get(url)
|
||||
response.raise_for_status()
|
||||
return response.json()
|
||||
|
||||
def create_issue_dependency(
|
||||
self,
|
||||
issue_number: int,
|
||||
depends_on: int,
|
||||
repo: Optional[str] = None
|
||||
) -> Dict:
|
||||
"""Create a dependency (issue_number depends on depends_on)."""
|
||||
owner, target_repo = self._parse_repo(repo)
|
||||
url = f"{self.base_url}/repos/{owner}/{target_repo}/issues/{issue_number}/dependencies"
|
||||
data = {
|
||||
'dependentIssue': {
|
||||
'owner': owner,
|
||||
'repo': target_repo,
|
||||
'index': depends_on
|
||||
}
|
||||
}
|
||||
logger.info(f"Creating dependency: #{issue_number} depends on #{depends_on} in {owner}/{target_repo}")
|
||||
response = self.session.post(url, json=data)
|
||||
response.raise_for_status()
|
||||
return response.json()
|
||||
|
||||
def remove_issue_dependency(
|
||||
self,
|
||||
issue_number: int,
|
||||
depends_on: int,
|
||||
repo: Optional[str] = None
|
||||
) -> bool:
|
||||
"""Remove a dependency between issues."""
|
||||
owner, target_repo = self._parse_repo(repo)
|
||||
url = f"{self.base_url}/repos/{owner}/{target_repo}/issues/{issue_number}/dependencies"
|
||||
data = {
|
||||
'dependentIssue': {
|
||||
'owner': owner,
|
||||
'repo': target_repo,
|
||||
'index': depends_on
|
||||
}
|
||||
}
|
||||
logger.info(f"Removing dependency: #{issue_number} no longer depends on #{depends_on}")
|
||||
response = self.session.delete(url, json=data)
|
||||
response.raise_for_status()
|
||||
return True
|
||||
|
||||
def list_issue_blocks(
|
||||
self,
|
||||
issue_number: int,
|
||||
repo: Optional[str] = None
|
||||
) -> List[Dict]:
|
||||
"""List all issues that this issue blocks."""
|
||||
owner, target_repo = self._parse_repo(repo)
|
||||
url = f"{self.base_url}/repos/{owner}/{target_repo}/issues/{issue_number}/blocks"
|
||||
logger.info(f"Listing issues blocked by #{issue_number} in {owner}/{target_repo}")
|
||||
response = self.session.get(url)
|
||||
response.raise_for_status()
|
||||
return response.json()
|
||||
|
||||
# ========================================
|
||||
# REPOSITORY VALIDATION
|
||||
# ========================================
|
||||
|
||||
def get_repo_info(self, repo: Optional[str] = None) -> Dict:
|
||||
"""Get repository information including owner type."""
|
||||
owner, target_repo = self._parse_repo(repo)
|
||||
url = f"{self.base_url}/repos/{owner}/{target_repo}"
|
||||
logger.info(f"Getting repo info for {owner}/{target_repo}")
|
||||
response = self.session.get(url)
|
||||
response.raise_for_status()
|
||||
return response.json()
|
||||
|
||||
def is_org_repo(self, repo: Optional[str] = None) -> bool:
|
||||
"""Check if repository belongs to an organization (not a user)."""
|
||||
info = self.get_repo_info(repo)
|
||||
owner_type = info.get('owner', {}).get('type', '')
|
||||
return owner_type.lower() == 'organization'
|
||||
|
||||
def get_branch_protection(
|
||||
self,
|
||||
branch: str,
|
||||
repo: Optional[str] = None
|
||||
) -> Optional[Dict]:
|
||||
"""Get branch protection rules for a branch."""
|
||||
owner, target_repo = self._parse_repo(repo)
|
||||
url = f"{self.base_url}/repos/{owner}/{target_repo}/branch_protections/{branch}"
|
||||
logger.info(f"Getting branch protection for {branch} in {owner}/{target_repo}")
|
||||
try:
|
||||
response = self.session.get(url)
|
||||
response.raise_for_status()
|
||||
return response.json()
|
||||
except requests.exceptions.HTTPError as e:
|
||||
if e.response.status_code == 404:
|
||||
return None # No protection rules
|
||||
raise
|
||||
|
||||
def create_label(
|
||||
self,
|
||||
name: str,
|
||||
color: str,
|
||||
description: Optional[str] = None,
|
||||
repo: Optional[str] = None
|
||||
) -> Dict:
|
||||
"""Create a new label in the repository."""
|
||||
owner, target_repo = self._parse_repo(repo)
|
||||
url = f"{self.base_url}/repos/{owner}/{target_repo}/labels"
|
||||
data = {
|
||||
'name': name,
|
||||
'color': color.lstrip('#') # Remove # if present
|
||||
}
|
||||
if description:
|
||||
data['description'] = description
|
||||
logger.info(f"Creating label '{name}' in {owner}/{target_repo}")
|
||||
response = self.session.post(url, json=data)
|
||||
response.raise_for_status()
|
||||
return response.json()
|
||||
|
||||
@@ -14,6 +14,9 @@ from .config import GiteaConfig
|
||||
from .gitea_client import GiteaClient
|
||||
from .tools.issues import IssueTools
|
||||
from .tools.labels import LabelTools
|
||||
from .tools.wiki import WikiTools
|
||||
from .tools.milestones import MilestoneTools
|
||||
from .tools.dependencies import DependencyTools
|
||||
|
||||
# Suppress noisy MCP validation warnings on stderr
|
||||
logging.basicConfig(level=logging.INFO)
|
||||
@@ -31,6 +34,9 @@ class GiteaMCPServer:
|
||||
self.client = None
|
||||
self.issue_tools = None
|
||||
self.label_tools = None
|
||||
self.wiki_tools = None
|
||||
self.milestone_tools = None
|
||||
self.dependency_tools = None
|
||||
|
||||
async def initialize(self):
|
||||
"""
|
||||
@@ -46,6 +52,9 @@ class GiteaMCPServer:
|
||||
self.client = GiteaClient()
|
||||
self.issue_tools = IssueTools(self.client)
|
||||
self.label_tools = LabelTools(self.client)
|
||||
self.wiki_tools = WikiTools(self.client)
|
||||
self.milestone_tools = MilestoneTools(self.client)
|
||||
self.dependency_tools = DependencyTools(self.client)
|
||||
|
||||
logger.info(f"Gitea MCP Server initialized in {self.config['mode']} mode")
|
||||
except Exception as e:
|
||||
@@ -237,6 +246,398 @@ class GiteaMCPServer:
|
||||
},
|
||||
"required": ["org"]
|
||||
}
|
||||
),
|
||||
# Wiki Tools (Lessons Learned)
|
||||
Tool(
|
||||
name="list_wiki_pages",
|
||||
description="List all wiki pages in repository",
|
||||
inputSchema={
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"repo": {
|
||||
"type": "string",
|
||||
"description": "Repository name (owner/repo format)"
|
||||
}
|
||||
}
|
||||
}
|
||||
),
|
||||
Tool(
|
||||
name="get_wiki_page",
|
||||
description="Get a specific wiki page by name",
|
||||
inputSchema={
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"page_name": {
|
||||
"type": "string",
|
||||
"description": "Wiki page name/path"
|
||||
},
|
||||
"repo": {
|
||||
"type": "string",
|
||||
"description": "Repository name (owner/repo format)"
|
||||
}
|
||||
},
|
||||
"required": ["page_name"]
|
||||
}
|
||||
),
|
||||
Tool(
|
||||
name="create_wiki_page",
|
||||
description="Create a new wiki page",
|
||||
inputSchema={
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"title": {
|
||||
"type": "string",
|
||||
"description": "Page title/name"
|
||||
},
|
||||
"content": {
|
||||
"type": "string",
|
||||
"description": "Page content (markdown)"
|
||||
},
|
||||
"repo": {
|
||||
"type": "string",
|
||||
"description": "Repository name (owner/repo format)"
|
||||
}
|
||||
},
|
||||
"required": ["title", "content"]
|
||||
}
|
||||
),
|
||||
Tool(
|
||||
name="update_wiki_page",
|
||||
description="Update an existing wiki page",
|
||||
inputSchema={
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"page_name": {
|
||||
"type": "string",
|
||||
"description": "Wiki page name/path"
|
||||
},
|
||||
"content": {
|
||||
"type": "string",
|
||||
"description": "New page content (markdown)"
|
||||
},
|
||||
"repo": {
|
||||
"type": "string",
|
||||
"description": "Repository name (owner/repo format)"
|
||||
}
|
||||
},
|
||||
"required": ["page_name", "content"]
|
||||
}
|
||||
),
|
||||
Tool(
|
||||
name="create_lesson",
|
||||
description="Create a lessons learned entry in the wiki",
|
||||
inputSchema={
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"title": {
|
||||
"type": "string",
|
||||
"description": "Lesson title (e.g., 'Sprint 16 - Prevent Infinite Loops')"
|
||||
},
|
||||
"content": {
|
||||
"type": "string",
|
||||
"description": "Lesson content (markdown with context, problem, solution, prevention)"
|
||||
},
|
||||
"tags": {
|
||||
"type": "array",
|
||||
"items": {"type": "string"},
|
||||
"description": "Tags for categorization"
|
||||
},
|
||||
"category": {
|
||||
"type": "string",
|
||||
"default": "sprints",
|
||||
"description": "Category (sprints, patterns, architecture, etc.)"
|
||||
},
|
||||
"repo": {
|
||||
"type": "string",
|
||||
"description": "Repository name (owner/repo format)"
|
||||
}
|
||||
},
|
||||
"required": ["title", "content", "tags"]
|
||||
}
|
||||
),
|
||||
Tool(
|
||||
name="search_lessons",
|
||||
description="Search lessons learned from previous sprints",
|
||||
inputSchema={
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"query": {
|
||||
"type": "string",
|
||||
"description": "Search query (optional)"
|
||||
},
|
||||
"tags": {
|
||||
"type": "array",
|
||||
"items": {"type": "string"},
|
||||
"description": "Tags to filter by (optional)"
|
||||
},
|
||||
"limit": {
|
||||
"type": "integer",
|
||||
"default": 20,
|
||||
"description": "Maximum results"
|
||||
},
|
||||
"repo": {
|
||||
"type": "string",
|
||||
"description": "Repository name (owner/repo format)"
|
||||
}
|
||||
}
|
||||
}
|
||||
),
|
||||
# Milestone Tools
|
||||
Tool(
|
||||
name="list_milestones",
|
||||
description="List all milestones in repository",
|
||||
inputSchema={
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"state": {
|
||||
"type": "string",
|
||||
"enum": ["open", "closed", "all"],
|
||||
"default": "open",
|
||||
"description": "Milestone state filter"
|
||||
},
|
||||
"repo": {
|
||||
"type": "string",
|
||||
"description": "Repository name (owner/repo format)"
|
||||
}
|
||||
}
|
||||
}
|
||||
),
|
||||
Tool(
|
||||
name="get_milestone",
|
||||
description="Get a specific milestone by ID",
|
||||
inputSchema={
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"milestone_id": {
|
||||
"type": "integer",
|
||||
"description": "Milestone ID"
|
||||
},
|
||||
"repo": {
|
||||
"type": "string",
|
||||
"description": "Repository name (owner/repo format)"
|
||||
}
|
||||
},
|
||||
"required": ["milestone_id"]
|
||||
}
|
||||
),
|
||||
Tool(
|
||||
name="create_milestone",
|
||||
description="Create a new milestone",
|
||||
inputSchema={
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"title": {
|
||||
"type": "string",
|
||||
"description": "Milestone title"
|
||||
},
|
||||
"description": {
|
||||
"type": "string",
|
||||
"description": "Milestone description"
|
||||
},
|
||||
"due_on": {
|
||||
"type": "string",
|
||||
"description": "Due date (ISO 8601 format)"
|
||||
},
|
||||
"repo": {
|
||||
"type": "string",
|
||||
"description": "Repository name (owner/repo format)"
|
||||
}
|
||||
},
|
||||
"required": ["title"]
|
||||
}
|
||||
),
|
||||
Tool(
|
||||
name="update_milestone",
|
||||
description="Update an existing milestone",
|
||||
inputSchema={
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"milestone_id": {
|
||||
"type": "integer",
|
||||
"description": "Milestone ID"
|
||||
},
|
||||
"title": {
|
||||
"type": "string",
|
||||
"description": "New title"
|
||||
},
|
||||
"description": {
|
||||
"type": "string",
|
||||
"description": "New description"
|
||||
},
|
||||
"state": {
|
||||
"type": "string",
|
||||
"enum": ["open", "closed"],
|
||||
"description": "New state"
|
||||
},
|
||||
"due_on": {
|
||||
"type": "string",
|
||||
"description": "New due date"
|
||||
},
|
||||
"repo": {
|
||||
"type": "string",
|
||||
"description": "Repository name (owner/repo format)"
|
||||
}
|
||||
},
|
||||
"required": ["milestone_id"]
|
||||
}
|
||||
),
|
||||
Tool(
|
||||
name="delete_milestone",
|
||||
description="Delete a milestone",
|
||||
inputSchema={
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"milestone_id": {
|
||||
"type": "integer",
|
||||
"description": "Milestone ID"
|
||||
},
|
||||
"repo": {
|
||||
"type": "string",
|
||||
"description": "Repository name (owner/repo format)"
|
||||
}
|
||||
},
|
||||
"required": ["milestone_id"]
|
||||
}
|
||||
),
|
||||
# Dependency Tools
|
||||
Tool(
|
||||
name="list_issue_dependencies",
|
||||
description="List all dependencies for an issue (issues that block this one)",
|
||||
inputSchema={
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"issue_number": {
|
||||
"type": "integer",
|
||||
"description": "Issue number"
|
||||
},
|
||||
"repo": {
|
||||
"type": "string",
|
||||
"description": "Repository name (owner/repo format)"
|
||||
}
|
||||
},
|
||||
"required": ["issue_number"]
|
||||
}
|
||||
),
|
||||
Tool(
|
||||
name="create_issue_dependency",
|
||||
description="Create a dependency (issue depends on another issue)",
|
||||
inputSchema={
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"issue_number": {
|
||||
"type": "integer",
|
||||
"description": "Issue that will depend on another"
|
||||
},
|
||||
"depends_on": {
|
||||
"type": "integer",
|
||||
"description": "Issue that blocks issue_number"
|
||||
},
|
||||
"repo": {
|
||||
"type": "string",
|
||||
"description": "Repository name (owner/repo format)"
|
||||
}
|
||||
},
|
||||
"required": ["issue_number", "depends_on"]
|
||||
}
|
||||
),
|
||||
Tool(
|
||||
name="remove_issue_dependency",
|
||||
description="Remove a dependency between issues",
|
||||
inputSchema={
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"issue_number": {
|
||||
"type": "integer",
|
||||
"description": "Issue that depends on another"
|
||||
},
|
||||
"depends_on": {
|
||||
"type": "integer",
|
||||
"description": "Issue being depended on"
|
||||
},
|
||||
"repo": {
|
||||
"type": "string",
|
||||
"description": "Repository name (owner/repo format)"
|
||||
}
|
||||
},
|
||||
"required": ["issue_number", "depends_on"]
|
||||
}
|
||||
),
|
||||
Tool(
|
||||
name="get_execution_order",
|
||||
description="Get parallelizable execution order for issues based on dependencies",
|
||||
inputSchema={
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"issue_numbers": {
|
||||
"type": "array",
|
||||
"items": {"type": "integer"},
|
||||
"description": "List of issue numbers to analyze"
|
||||
},
|
||||
"repo": {
|
||||
"type": "string",
|
||||
"description": "Repository name (owner/repo format)"
|
||||
}
|
||||
},
|
||||
"required": ["issue_numbers"]
|
||||
}
|
||||
),
|
||||
# Validation Tools
|
||||
Tool(
|
||||
name="validate_repo_org",
|
||||
description="Check if repository belongs to an organization",
|
||||
inputSchema={
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"repo": {
|
||||
"type": "string",
|
||||
"description": "Repository name (owner/repo format)"
|
||||
}
|
||||
}
|
||||
}
|
||||
),
|
||||
Tool(
|
||||
name="get_branch_protection",
|
||||
description="Get branch protection rules",
|
||||
inputSchema={
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"branch": {
|
||||
"type": "string",
|
||||
"description": "Branch name"
|
||||
},
|
||||
"repo": {
|
||||
"type": "string",
|
||||
"description": "Repository name (owner/repo format)"
|
||||
}
|
||||
},
|
||||
"required": ["branch"]
|
||||
}
|
||||
),
|
||||
Tool(
|
||||
name="create_label",
|
||||
description="Create a new label in the repository",
|
||||
inputSchema={
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"name": {
|
||||
"type": "string",
|
||||
"description": "Label name"
|
||||
},
|
||||
"color": {
|
||||
"type": "string",
|
||||
"description": "Label color (hex code)"
|
||||
},
|
||||
"description": {
|
||||
"type": "string",
|
||||
"description": "Label description"
|
||||
},
|
||||
"repo": {
|
||||
"type": "string",
|
||||
"description": "Repository name (owner/repo format)"
|
||||
}
|
||||
},
|
||||
"required": ["name", "color"]
|
||||
}
|
||||
)
|
||||
]
|
||||
|
||||
@@ -270,6 +671,61 @@ class GiteaMCPServer:
|
||||
result = await self.label_tools.suggest_labels(**arguments)
|
||||
elif name == "aggregate_issues":
|
||||
result = await self.issue_tools.aggregate_issues(**arguments)
|
||||
# Wiki tools
|
||||
elif name == "list_wiki_pages":
|
||||
result = await self.wiki_tools.list_wiki_pages(**arguments)
|
||||
elif name == "get_wiki_page":
|
||||
result = await self.wiki_tools.get_wiki_page(**arguments)
|
||||
elif name == "create_wiki_page":
|
||||
result = await self.wiki_tools.create_wiki_page(**arguments)
|
||||
elif name == "update_wiki_page":
|
||||
result = await self.wiki_tools.update_wiki_page(**arguments)
|
||||
elif name == "create_lesson":
|
||||
result = await self.wiki_tools.create_lesson(**arguments)
|
||||
elif name == "search_lessons":
|
||||
tags = arguments.get('tags')
|
||||
result = await self.wiki_tools.search_lessons(
|
||||
query=arguments.get('query'),
|
||||
tags=tags,
|
||||
limit=arguments.get('limit', 20),
|
||||
repo=arguments.get('repo')
|
||||
)
|
||||
# Milestone tools
|
||||
elif name == "list_milestones":
|
||||
result = await self.milestone_tools.list_milestones(**arguments)
|
||||
elif name == "get_milestone":
|
||||
result = await self.milestone_tools.get_milestone(**arguments)
|
||||
elif name == "create_milestone":
|
||||
result = await self.milestone_tools.create_milestone(**arguments)
|
||||
elif name == "update_milestone":
|
||||
result = await self.milestone_tools.update_milestone(**arguments)
|
||||
elif name == "delete_milestone":
|
||||
result = await self.milestone_tools.delete_milestone(**arguments)
|
||||
# Dependency tools
|
||||
elif name == "list_issue_dependencies":
|
||||
result = await self.dependency_tools.list_issue_dependencies(**arguments)
|
||||
elif name == "create_issue_dependency":
|
||||
result = await self.dependency_tools.create_issue_dependency(**arguments)
|
||||
elif name == "remove_issue_dependency":
|
||||
result = await self.dependency_tools.remove_issue_dependency(**arguments)
|
||||
elif name == "get_execution_order":
|
||||
result = await self.dependency_tools.get_execution_order(**arguments)
|
||||
# Validation tools
|
||||
elif name == "validate_repo_org":
|
||||
is_org = self.client.is_org_repo(arguments.get('repo'))
|
||||
result = {'is_organization': is_org}
|
||||
elif name == "get_branch_protection":
|
||||
result = self.client.get_branch_protection(
|
||||
arguments['branch'],
|
||||
arguments.get('repo')
|
||||
)
|
||||
elif name == "create_label":
|
||||
result = self.client.create_label(
|
||||
arguments['name'],
|
||||
arguments['color'],
|
||||
arguments.get('description'),
|
||||
arguments.get('repo')
|
||||
)
|
||||
else:
|
||||
raise ValueError(f"Unknown tool: {name}")
|
||||
|
||||
|
||||
@@ -0,0 +1,216 @@
|
||||
"""
|
||||
Issue dependency management tools for MCP server.
|
||||
|
||||
Provides async wrappers for issue dependency operations:
|
||||
- List/create/remove dependencies
|
||||
- Build dependency graphs for parallel execution
|
||||
"""
|
||||
import asyncio
|
||||
import logging
|
||||
from typing import List, Dict, Optional, Set, Tuple
|
||||
|
||||
logging.basicConfig(level=logging.INFO)
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class DependencyTools:
|
||||
"""Async wrappers for Gitea issue dependency operations"""
|
||||
|
||||
def __init__(self, gitea_client):
|
||||
"""
|
||||
Initialize dependency tools.
|
||||
|
||||
Args:
|
||||
gitea_client: GiteaClient instance
|
||||
"""
|
||||
self.gitea = gitea_client
|
||||
|
||||
async def list_issue_dependencies(
|
||||
self,
|
||||
issue_number: int,
|
||||
repo: Optional[str] = None
|
||||
) -> List[Dict]:
|
||||
"""
|
||||
List all dependencies for an issue (issues that block this one).
|
||||
|
||||
Args:
|
||||
issue_number: Issue number
|
||||
repo: Repository in owner/repo format
|
||||
|
||||
Returns:
|
||||
List of issues that this issue depends on
|
||||
"""
|
||||
loop = asyncio.get_event_loop()
|
||||
return await loop.run_in_executor(
|
||||
None,
|
||||
lambda: self.gitea.list_issue_dependencies(issue_number, repo)
|
||||
)
|
||||
|
||||
async def create_issue_dependency(
|
||||
self,
|
||||
issue_number: int,
|
||||
depends_on: int,
|
||||
repo: Optional[str] = None
|
||||
) -> Dict:
|
||||
"""
|
||||
Create a dependency between issues.
|
||||
|
||||
Args:
|
||||
issue_number: The issue that will depend on another
|
||||
depends_on: The issue that blocks issue_number
|
||||
repo: Repository in owner/repo format
|
||||
|
||||
Returns:
|
||||
Created dependency information
|
||||
"""
|
||||
loop = asyncio.get_event_loop()
|
||||
return await loop.run_in_executor(
|
||||
None,
|
||||
lambda: self.gitea.create_issue_dependency(issue_number, depends_on, repo)
|
||||
)
|
||||
|
||||
async def remove_issue_dependency(
|
||||
self,
|
||||
issue_number: int,
|
||||
depends_on: int,
|
||||
repo: Optional[str] = None
|
||||
) -> bool:
|
||||
"""
|
||||
Remove a dependency between issues.
|
||||
|
||||
Args:
|
||||
issue_number: The issue that currently depends on another
|
||||
depends_on: The issue being depended on
|
||||
repo: Repository in owner/repo format
|
||||
|
||||
Returns:
|
||||
True if removed successfully
|
||||
"""
|
||||
loop = asyncio.get_event_loop()
|
||||
return await loop.run_in_executor(
|
||||
None,
|
||||
lambda: self.gitea.remove_issue_dependency(issue_number, depends_on, repo)
|
||||
)
|
||||
|
||||
async def list_issue_blocks(
|
||||
self,
|
||||
issue_number: int,
|
||||
repo: Optional[str] = None
|
||||
) -> List[Dict]:
|
||||
"""
|
||||
List all issues that this issue blocks.
|
||||
|
||||
Args:
|
||||
issue_number: Issue number
|
||||
repo: Repository in owner/repo format
|
||||
|
||||
Returns:
|
||||
List of issues blocked by this issue
|
||||
"""
|
||||
loop = asyncio.get_event_loop()
|
||||
return await loop.run_in_executor(
|
||||
None,
|
||||
lambda: self.gitea.list_issue_blocks(issue_number, repo)
|
||||
)
|
||||
|
||||
async def build_dependency_graph(
|
||||
self,
|
||||
issue_numbers: List[int],
|
||||
repo: Optional[str] = None
|
||||
) -> Dict[int, List[int]]:
|
||||
"""
|
||||
Build a dependency graph for a list of issues.
|
||||
|
||||
Args:
|
||||
issue_numbers: List of issue numbers to analyze
|
||||
repo: Repository in owner/repo format
|
||||
|
||||
Returns:
|
||||
Dictionary mapping issue_number -> list of issues it depends on
|
||||
"""
|
||||
graph = {}
|
||||
for issue_num in issue_numbers:
|
||||
try:
|
||||
deps = await self.list_issue_dependencies(issue_num, repo)
|
||||
graph[issue_num] = [
|
||||
d.get('number') or d.get('index')
|
||||
for d in deps
|
||||
if (d.get('number') or d.get('index')) in issue_numbers
|
||||
]
|
||||
except Exception as e:
|
||||
logger.warning(f"Could not fetch dependencies for #{issue_num}: {e}")
|
||||
graph[issue_num] = []
|
||||
return graph
|
||||
|
||||
async def get_ready_tasks(
|
||||
self,
|
||||
issue_numbers: List[int],
|
||||
completed: Set[int],
|
||||
repo: Optional[str] = None
|
||||
) -> List[int]:
|
||||
"""
|
||||
Get tasks that are ready to execute (no unresolved dependencies).
|
||||
|
||||
Args:
|
||||
issue_numbers: List of all issue numbers in sprint
|
||||
completed: Set of already completed issue numbers
|
||||
repo: Repository in owner/repo format
|
||||
|
||||
Returns:
|
||||
List of issue numbers that can be executed now
|
||||
"""
|
||||
graph = await self.build_dependency_graph(issue_numbers, repo)
|
||||
ready = []
|
||||
|
||||
for issue_num in issue_numbers:
|
||||
if issue_num in completed:
|
||||
continue
|
||||
|
||||
deps = graph.get(issue_num, [])
|
||||
# Task is ready if all its dependencies are completed
|
||||
if all(dep in completed for dep in deps):
|
||||
ready.append(issue_num)
|
||||
|
||||
return ready
|
||||
|
||||
async def get_execution_order(
|
||||
self,
|
||||
issue_numbers: List[int],
|
||||
repo: Optional[str] = None
|
||||
) -> List[List[int]]:
|
||||
"""
|
||||
Get a parallelizable execution order for issues.
|
||||
|
||||
Returns batches of issues that can be executed in parallel.
|
||||
Each batch contains issues with no unresolved dependencies.
|
||||
|
||||
Args:
|
||||
issue_numbers: List of all issue numbers
|
||||
repo: Repository in owner/repo format
|
||||
|
||||
Returns:
|
||||
List of batches, where each batch can be executed in parallel
|
||||
"""
|
||||
graph = await self.build_dependency_graph(issue_numbers, repo)
|
||||
completed: Set[int] = set()
|
||||
remaining = set(issue_numbers)
|
||||
batches = []
|
||||
|
||||
while remaining:
|
||||
# Find all tasks with no unresolved dependencies
|
||||
batch = []
|
||||
for issue_num in remaining:
|
||||
deps = graph.get(issue_num, [])
|
||||
if all(dep in completed for dep in deps):
|
||||
batch.append(issue_num)
|
||||
|
||||
if not batch:
|
||||
# Circular dependency detected
|
||||
logger.error(f"Circular dependency detected! Remaining: {remaining}")
|
||||
batch = list(remaining) # Force include remaining to avoid infinite loop
|
||||
|
||||
batches.append(batch)
|
||||
completed.update(batch)
|
||||
remaining -= set(batch)
|
||||
|
||||
return batches
|
||||
145
plugins/projman/mcp-servers/gitea/mcp_server/tools/milestones.py
Normal file
145
plugins/projman/mcp-servers/gitea/mcp_server/tools/milestones.py
Normal file
@@ -0,0 +1,145 @@
|
||||
"""
|
||||
Milestone management tools for MCP server.
|
||||
|
||||
Provides async wrappers for milestone operations:
|
||||
- CRUD operations for milestones
|
||||
- Milestone-sprint relationship tracking
|
||||
"""
|
||||
import asyncio
|
||||
import logging
|
||||
from typing import List, Dict, Optional
|
||||
|
||||
logging.basicConfig(level=logging.INFO)
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class MilestoneTools:
|
||||
"""Async wrappers for Gitea milestone operations"""
|
||||
|
||||
def __init__(self, gitea_client):
|
||||
"""
|
||||
Initialize milestone tools.
|
||||
|
||||
Args:
|
||||
gitea_client: GiteaClient instance
|
||||
"""
|
||||
self.gitea = gitea_client
|
||||
|
||||
async def list_milestones(
|
||||
self,
|
||||
state: str = 'open',
|
||||
repo: Optional[str] = None
|
||||
) -> List[Dict]:
|
||||
"""
|
||||
List all milestones in repository.
|
||||
|
||||
Args:
|
||||
state: Milestone state (open, closed, all)
|
||||
repo: Repository in owner/repo format
|
||||
|
||||
Returns:
|
||||
List of milestone dictionaries
|
||||
"""
|
||||
loop = asyncio.get_event_loop()
|
||||
return await loop.run_in_executor(
|
||||
None,
|
||||
lambda: self.gitea.list_milestones(state, repo)
|
||||
)
|
||||
|
||||
async def get_milestone(
|
||||
self,
|
||||
milestone_id: int,
|
||||
repo: Optional[str] = None
|
||||
) -> Dict:
|
||||
"""
|
||||
Get a specific milestone by ID.
|
||||
|
||||
Args:
|
||||
milestone_id: Milestone ID
|
||||
repo: Repository in owner/repo format
|
||||
|
||||
Returns:
|
||||
Milestone dictionary
|
||||
"""
|
||||
loop = asyncio.get_event_loop()
|
||||
return await loop.run_in_executor(
|
||||
None,
|
||||
lambda: self.gitea.get_milestone(milestone_id, repo)
|
||||
)
|
||||
|
||||
async def create_milestone(
|
||||
self,
|
||||
title: str,
|
||||
description: Optional[str] = None,
|
||||
due_on: Optional[str] = None,
|
||||
repo: Optional[str] = None
|
||||
) -> Dict:
|
||||
"""
|
||||
Create a new milestone.
|
||||
|
||||
Args:
|
||||
title: Milestone title (e.g., "v2.0 Release", "Sprint 17")
|
||||
description: Milestone description
|
||||
due_on: Due date in ISO 8601 format (e.g., "2025-02-01T00:00:00Z")
|
||||
repo: Repository in owner/repo format
|
||||
|
||||
Returns:
|
||||
Created milestone dictionary
|
||||
"""
|
||||
loop = asyncio.get_event_loop()
|
||||
return await loop.run_in_executor(
|
||||
None,
|
||||
lambda: self.gitea.create_milestone(title, description, due_on, repo)
|
||||
)
|
||||
|
||||
async def update_milestone(
|
||||
self,
|
||||
milestone_id: int,
|
||||
title: Optional[str] = None,
|
||||
description: Optional[str] = None,
|
||||
state: Optional[str] = None,
|
||||
due_on: Optional[str] = None,
|
||||
repo: Optional[str] = None
|
||||
) -> Dict:
|
||||
"""
|
||||
Update an existing milestone.
|
||||
|
||||
Args:
|
||||
milestone_id: Milestone ID
|
||||
title: New title (optional)
|
||||
description: New description (optional)
|
||||
state: New state - 'open' or 'closed' (optional)
|
||||
due_on: New due date (optional)
|
||||
repo: Repository in owner/repo format
|
||||
|
||||
Returns:
|
||||
Updated milestone dictionary
|
||||
"""
|
||||
loop = asyncio.get_event_loop()
|
||||
return await loop.run_in_executor(
|
||||
None,
|
||||
lambda: self.gitea.update_milestone(
|
||||
milestone_id, title, description, state, due_on, repo
|
||||
)
|
||||
)
|
||||
|
||||
async def delete_milestone(
|
||||
self,
|
||||
milestone_id: int,
|
||||
repo: Optional[str] = None
|
||||
) -> bool:
|
||||
"""
|
||||
Delete a milestone.
|
||||
|
||||
Args:
|
||||
milestone_id: Milestone ID
|
||||
repo: Repository in owner/repo format
|
||||
|
||||
Returns:
|
||||
True if deleted successfully
|
||||
"""
|
||||
loop = asyncio.get_event_loop()
|
||||
return await loop.run_in_executor(
|
||||
None,
|
||||
lambda: self.gitea.delete_milestone(milestone_id, repo)
|
||||
)
|
||||
149
plugins/projman/mcp-servers/gitea/mcp_server/tools/wiki.py
Normal file
149
plugins/projman/mcp-servers/gitea/mcp_server/tools/wiki.py
Normal file
@@ -0,0 +1,149 @@
|
||||
"""
|
||||
Wiki management tools for MCP server.
|
||||
|
||||
Provides async wrappers for wiki operations to support lessons learned:
|
||||
- Page CRUD operations
|
||||
- Lessons learned creation and search
|
||||
"""
|
||||
import asyncio
|
||||
import logging
|
||||
from typing import List, Dict, Optional
|
||||
|
||||
logging.basicConfig(level=logging.INFO)
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class WikiTools:
|
||||
"""Async wrappers for Gitea wiki operations"""
|
||||
|
||||
def __init__(self, gitea_client):
|
||||
"""
|
||||
Initialize wiki tools.
|
||||
|
||||
Args:
|
||||
gitea_client: GiteaClient instance
|
||||
"""
|
||||
self.gitea = gitea_client
|
||||
|
||||
async def list_wiki_pages(self, repo: Optional[str] = None) -> List[Dict]:
|
||||
"""List all wiki pages in repository."""
|
||||
loop = asyncio.get_event_loop()
|
||||
return await loop.run_in_executor(
|
||||
None,
|
||||
lambda: self.gitea.list_wiki_pages(repo)
|
||||
)
|
||||
|
||||
async def get_wiki_page(
|
||||
self,
|
||||
page_name: str,
|
||||
repo: Optional[str] = None
|
||||
) -> Dict:
|
||||
"""Get a specific wiki page by name."""
|
||||
loop = asyncio.get_event_loop()
|
||||
return await loop.run_in_executor(
|
||||
None,
|
||||
lambda: self.gitea.get_wiki_page(page_name, repo)
|
||||
)
|
||||
|
||||
async def create_wiki_page(
|
||||
self,
|
||||
title: str,
|
||||
content: str,
|
||||
repo: Optional[str] = None
|
||||
) -> Dict:
|
||||
"""Create a new wiki page."""
|
||||
loop = asyncio.get_event_loop()
|
||||
return await loop.run_in_executor(
|
||||
None,
|
||||
lambda: self.gitea.create_wiki_page(title, content, repo)
|
||||
)
|
||||
|
||||
async def update_wiki_page(
|
||||
self,
|
||||
page_name: str,
|
||||
content: str,
|
||||
repo: Optional[str] = None
|
||||
) -> Dict:
|
||||
"""Update an existing wiki page."""
|
||||
loop = asyncio.get_event_loop()
|
||||
return await loop.run_in_executor(
|
||||
None,
|
||||
lambda: self.gitea.update_wiki_page(page_name, content, repo)
|
||||
)
|
||||
|
||||
async def delete_wiki_page(
|
||||
self,
|
||||
page_name: str,
|
||||
repo: Optional[str] = None
|
||||
) -> bool:
|
||||
"""Delete a wiki page."""
|
||||
loop = asyncio.get_event_loop()
|
||||
return await loop.run_in_executor(
|
||||
None,
|
||||
lambda: self.gitea.delete_wiki_page(page_name, repo)
|
||||
)
|
||||
|
||||
async def search_wiki_pages(
|
||||
self,
|
||||
query: str,
|
||||
repo: Optional[str] = None
|
||||
) -> List[Dict]:
|
||||
"""Search wiki pages by title."""
|
||||
loop = asyncio.get_event_loop()
|
||||
return await loop.run_in_executor(
|
||||
None,
|
||||
lambda: self.gitea.search_wiki_pages(query, repo)
|
||||
)
|
||||
|
||||
async def create_lesson(
|
||||
self,
|
||||
title: str,
|
||||
content: str,
|
||||
tags: List[str],
|
||||
category: str = "sprints",
|
||||
repo: Optional[str] = None
|
||||
) -> Dict:
|
||||
"""
|
||||
Create a lessons learned entry in the wiki.
|
||||
|
||||
Args:
|
||||
title: Lesson title (e.g., "Sprint 16 - Prevent Infinite Loops")
|
||||
content: Lesson content in markdown
|
||||
tags: List of tags for categorization
|
||||
category: Category (sprints, patterns, architecture, etc.)
|
||||
repo: Repository in owner/repo format
|
||||
|
||||
Returns:
|
||||
Created wiki page
|
||||
"""
|
||||
loop = asyncio.get_event_loop()
|
||||
return await loop.run_in_executor(
|
||||
None,
|
||||
lambda: self.gitea.create_lesson(title, content, tags, category, repo)
|
||||
)
|
||||
|
||||
async def search_lessons(
|
||||
self,
|
||||
query: Optional[str] = None,
|
||||
tags: Optional[List[str]] = None,
|
||||
limit: int = 20,
|
||||
repo: Optional[str] = None
|
||||
) -> List[Dict]:
|
||||
"""
|
||||
Search lessons learned from previous sprints.
|
||||
|
||||
Args:
|
||||
query: Search query (optional)
|
||||
tags: Tags to filter by (optional)
|
||||
limit: Maximum results (default 20)
|
||||
repo: Repository in owner/repo format
|
||||
|
||||
Returns:
|
||||
List of matching lessons
|
||||
"""
|
||||
loop = asyncio.get_event_loop()
|
||||
results = await loop.run_in_executor(
|
||||
None,
|
||||
lambda: self.gitea.search_lessons(query, tags, repo)
|
||||
)
|
||||
return results[:limit]
|
||||
@@ -1,413 +0,0 @@
|
||||
# Wiki.js MCP Server
|
||||
|
||||
Model Context Protocol (MCP) server for Wiki.js integration with Claude Code.
|
||||
|
||||
## Overview
|
||||
|
||||
The Wiki.js MCP Server provides Claude Code with direct access to Wiki.js for documentation management, lessons learned capture, and knowledge base operations. It supports both single-project (project mode) and company-wide (PMO mode) operations.
|
||||
|
||||
**Status**: ✅ Phase 1.1b Complete - Fully functional and tested
|
||||
|
||||
## Features
|
||||
|
||||
### Core Functionality
|
||||
|
||||
- **Page Management**: CRUD operations for Wiki.js pages with markdown content
|
||||
- **Lessons Learned**: Systematic capture and searchable repository of sprint insights
|
||||
- **Mode Detection**: Automatic project vs company-wide mode detection
|
||||
- **Hybrid Configuration**: System-level credentials + project-level paths
|
||||
- **PMO Support**: Company-wide documentation and cross-project lesson search
|
||||
|
||||
### Tools Provided
|
||||
|
||||
| Tool | Description | Mode |
|
||||
|------|-------------|------|
|
||||
| `search_pages` | Search pages by keywords and tags | Both |
|
||||
| `get_page` | Get specific page content | Both |
|
||||
| `create_page` | Create new page with markdown content | Both |
|
||||
| `update_page` | Update existing page | Both |
|
||||
| `list_pages` | List pages under a path | Both |
|
||||
| `create_lesson` | Create lessons learned entry | Both |
|
||||
| `search_lessons` | Search lessons from previous sprints | Both |
|
||||
| `tag_lesson` | Add/update tags on lessons | Both |
|
||||
|
||||
## Architecture
|
||||
|
||||
### Directory Structure
|
||||
|
||||
```
|
||||
mcp-servers/wikijs/
|
||||
├── .venv/ # Python virtual environment
|
||||
├── requirements.txt # Python dependencies
|
||||
├── mcp_server/
|
||||
│ ├── __init__.py
|
||||
│ ├── server.py # MCP server entry point
|
||||
│ ├── config.py # Configuration loader
|
||||
│ ├── wikijs_client.py # Wiki.js GraphQL client
|
||||
│ └── tools/
|
||||
│ ├── __init__.py
|
||||
│ ├── pages.py # Page management tools
|
||||
│ └── lessons_learned.py # Lessons learned tools
|
||||
├── tests/
|
||||
│ ├── __init__.py
|
||||
│ ├── test_config.py
|
||||
│ └── test_wikijs_client.py
|
||||
├── README.md # This file
|
||||
└── TESTING.md # Testing instructions
|
||||
```
|
||||
|
||||
### Mode Detection
|
||||
|
||||
The server operates in two modes based on environment variables:
|
||||
|
||||
**Project Mode** (Single Project):
|
||||
- When `WIKIJS_PROJECT` is set
|
||||
- Operates on single project path
|
||||
- Used by `projman` plugin
|
||||
- Pages scoped to `/base_path/project/`
|
||||
|
||||
**Company Mode** (Multi-Project / PMO):
|
||||
- When `WIKIJS_PROJECT` is NOT set
|
||||
- Operates on all projects in organization
|
||||
- Used by `projman-pmo` plugin
|
||||
- Pages scoped to `/base_path/`
|
||||
|
||||
### GraphQL Integration
|
||||
|
||||
The server uses Wiki.js GraphQL API for all operations:
|
||||
- **Pages API**: Create, read, update, list, search pages
|
||||
- **Tags**: Categorize and filter content
|
||||
- **Search**: Full-text search with tag filtering
|
||||
- **Lessons Learned**: Specialized workflow for sprint insights
|
||||
|
||||
## Installation
|
||||
|
||||
### Prerequisites
|
||||
|
||||
- Python 3.10 or higher
|
||||
- Access to Wiki.js instance with API token
|
||||
- GraphQL API enabled on Wiki.js
|
||||
|
||||
### Step 1: Install Dependencies
|
||||
|
||||
```bash
|
||||
cd mcp-servers/wikijs
|
||||
python3 -m venv .venv
|
||||
source .venv/bin/activate # Linux/Mac
|
||||
# or .venv\Scripts\activate # Windows
|
||||
|
||||
pip install -r requirements.txt
|
||||
```
|
||||
|
||||
### Step 2: System Configuration
|
||||
|
||||
Create system-level configuration with credentials:
|
||||
|
||||
```bash
|
||||
mkdir -p ~/.config/claude
|
||||
|
||||
cat > ~/.config/claude/wikijs.env << 'EOF'
|
||||
# Wiki.js API Configuration
|
||||
WIKIJS_API_URL=http://wikijs.hotport/graphql
|
||||
WIKIJS_API_TOKEN=your_api_token_here
|
||||
WIKIJS_BASE_PATH=/your-org
|
||||
EOF
|
||||
|
||||
chmod 600 ~/.config/claude/wikijs.env
|
||||
```
|
||||
|
||||
**Obtaining Wiki.js API Token:**
|
||||
1. Log in to Wiki.js as administrator
|
||||
2. Navigate to Administration → API Access
|
||||
3. Click "New API Key"
|
||||
4. Set permissions: Pages (read/write), Search (read)
|
||||
5. Copy the generated JWT token
|
||||
|
||||
### Step 3: Project Configuration (Optional)
|
||||
|
||||
For project-scoped operations, create `.env` in project root:
|
||||
|
||||
```bash
|
||||
# In your project directory
|
||||
cat > .env << 'EOF'
|
||||
# Wiki.js project path
|
||||
WIKIJS_PROJECT=projects/your-project-name
|
||||
EOF
|
||||
|
||||
# Add to .gitignore
|
||||
echo ".env" >> .gitignore
|
||||
```
|
||||
|
||||
**Note:** Omit `.env` for company-wide (PMO) mode.
|
||||
|
||||
## Usage
|
||||
|
||||
### Running the MCP Server
|
||||
|
||||
```bash
|
||||
cd mcp-servers/wikijs
|
||||
source .venv/bin/activate
|
||||
python -m mcp_server.server
|
||||
```
|
||||
|
||||
The server runs as a stdio-based MCP server and communicates via JSON-RPC 2.0.
|
||||
|
||||
### Integration with Claude Code
|
||||
|
||||
The MCP server is referenced in plugin `.mcp.json`:
|
||||
|
||||
```json
|
||||
{
|
||||
"mcpServers": {
|
||||
"wikijs": {
|
||||
"command": "python",
|
||||
"args": ["-m", "mcp_server.server"],
|
||||
"cwd": "${CLAUDE_PLUGIN_ROOT}/../mcp-servers/wikijs",
|
||||
"env": {}
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### Example Tool Calls
|
||||
|
||||
**Search Pages:**
|
||||
```json
|
||||
{
|
||||
"name": "search_pages",
|
||||
"arguments": {
|
||||
"query": "API documentation",
|
||||
"tags": "backend,api",
|
||||
"limit": 10
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
**Create Lesson Learned:**
|
||||
```json
|
||||
{
|
||||
"name": "create_lesson",
|
||||
"arguments": {
|
||||
"title": "Sprint 16 - Prevent Claude Code Infinite Loops",
|
||||
"content": "## Problem\\n\\nClaude Code entered infinite loop...\\n\\n## Solution\\n\\n...",
|
||||
"tags": "claude-code,testing,validation",
|
||||
"category": "sprints"
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
**Search Lessons:**
|
||||
```json
|
||||
{
|
||||
"name": "search_lessons",
|
||||
"arguments": {
|
||||
"query": "validation",
|
||||
"tags": "testing,claude-code",
|
||||
"limit": 20
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
## Configuration Reference
|
||||
|
||||
### Required Variables
|
||||
|
||||
| Variable | Description | Example |
|
||||
|----------|-------------|---------|
|
||||
| `WIKIJS_API_URL` | Wiki.js GraphQL endpoint | `http://wiki.example.com/graphql` |
|
||||
| `WIKIJS_API_TOKEN` | API authentication token (JWT) | `eyJhbGciOiJSUzI1...` |
|
||||
| `WIKIJS_BASE_PATH` | Base path in Wiki.js | `/your-org` |
|
||||
|
||||
### Optional Variables
|
||||
|
||||
| Variable | Description | Mode |
|
||||
|----------|-------------|------|
|
||||
| `WIKIJS_PROJECT` | Project-specific path | Project mode only |
|
||||
|
||||
### Configuration Priority
|
||||
|
||||
1. Project-level `.env` (overrides system)
|
||||
2. System-level `~/.config/claude/wikijs.env`
|
||||
|
||||
## Wiki.js Structure
|
||||
|
||||
### Recommended Organization
|
||||
|
||||
```
|
||||
/your-org/ # Base path
|
||||
├── projects/ # Project-specific
|
||||
│ ├── your-project/
|
||||
│ │ ├── lessons-learned/
|
||||
│ │ │ ├── sprints/
|
||||
│ │ │ ├── patterns/
|
||||
│ │ │ └── INDEX.md
|
||||
│ │ └── documentation/
|
||||
│ ├── another-project/
|
||||
│ └── shared-library/
|
||||
├── company/ # Company-wide
|
||||
│ ├── processes/
|
||||
│ ├── standards/
|
||||
│ └── tools/
|
||||
└── shared/ # Cross-project
|
||||
├── architecture-patterns/
|
||||
├── best-practices/
|
||||
└── tech-stack/
|
||||
```
|
||||
|
||||
### Lessons Learned Categories
|
||||
|
||||
- **sprints/**: Sprint-specific lessons and retrospectives
|
||||
- **patterns/**: Recurring patterns and solutions
|
||||
- **architecture/**: Architectural decisions and outcomes
|
||||
- **tools/**: Tool-specific tips and gotchas
|
||||
|
||||
## Testing
|
||||
|
||||
See [TESTING.md](./TESTING.md) for comprehensive testing instructions.
|
||||
|
||||
**Quick Test:**
|
||||
```bash
|
||||
source .venv/bin/activate
|
||||
pytest -v
|
||||
```
|
||||
|
||||
**Test Coverage:**
|
||||
- 18 tests covering all major functionality
|
||||
- Mock-based unit tests (fast)
|
||||
- Integration tests with real Wiki.js instance
|
||||
- Configuration validation
|
||||
- Mode detection
|
||||
- Error handling
|
||||
|
||||
## Lessons Learned System
|
||||
|
||||
### Why This Matters
|
||||
|
||||
After 15 sprints without systematic lesson capture, repeated mistakes occurred:
|
||||
- Claude Code infinite loops on similar issues: 2-3 times
|
||||
- Same architectural mistakes: Multiple occurrences
|
||||
- Forgotten optimizations: Re-discovered each time
|
||||
|
||||
**Solution:** Mandatory lessons learned capture at sprint close, searchable at sprint start.
|
||||
|
||||
### Workflow
|
||||
|
||||
**Sprint Close (Orchestrator):**
|
||||
1. Capture what went wrong
|
||||
2. Document what went right
|
||||
3. Note preventable repetitions
|
||||
4. Tag for discoverability
|
||||
|
||||
**Sprint Start (Planner):**
|
||||
1. Search relevant lessons by tags/keywords
|
||||
2. Review applicable patterns
|
||||
3. Apply preventive measures
|
||||
4. Avoid known pitfalls
|
||||
|
||||
### Lesson Structure
|
||||
|
||||
```markdown
|
||||
# Sprint X - [Lesson Title]
|
||||
|
||||
## Context
|
||||
[What were you trying to do?]
|
||||
|
||||
## Problem
|
||||
[What went wrong or what insight emerged?]
|
||||
|
||||
## Solution
|
||||
[How did you solve it?]
|
||||
|
||||
## Prevention
|
||||
[How can this be avoided or optimized in the future?]
|
||||
|
||||
## Tags
|
||||
[Comma-separated tags for search]
|
||||
```
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
### Connection Errors
|
||||
|
||||
**Error:** `Failed to connect to Wiki.js GraphQL endpoint`
|
||||
|
||||
**Solutions:**
|
||||
- Verify `WIKIJS_API_URL` is correct and includes `/graphql`
|
||||
- Check Wiki.js is running and accessible
|
||||
- Ensure GraphQL API is enabled in Wiki.js admin settings
|
||||
|
||||
### Authentication Errors
|
||||
|
||||
**Error:** `Unauthorized` or `Invalid token`
|
||||
|
||||
**Solutions:**
|
||||
- Verify API token is correct and not expired
|
||||
- Check token has required permissions (Pages: read/write, Search: read)
|
||||
- Regenerate token in Wiki.js admin if needed
|
||||
|
||||
### Permission Errors
|
||||
|
||||
**Error:** `Page creation failed: Permission denied`
|
||||
|
||||
**Solutions:**
|
||||
- Verify API key has write permissions
|
||||
- Check user/group permissions in Wiki.js
|
||||
- Ensure base path exists and is accessible
|
||||
|
||||
### Mode Detection Issues
|
||||
|
||||
**Error:** Operating in wrong mode
|
||||
|
||||
**Solutions:**
|
||||
- Check `WIKIJS_PROJECT` environment variable
|
||||
- Clear project `.env` for company mode
|
||||
- Verify configuration loading order (project overrides system)
|
||||
|
||||
## Security Considerations
|
||||
|
||||
1. **Never commit tokens**: Keep `~/.config/claude/wikijs.env` and `.env` out of git
|
||||
2. **Token scope**: Use minimum required permissions (Pages + Search)
|
||||
3. **Token rotation**: Regenerate tokens periodically
|
||||
4. **Access control**: Use Wiki.js groups/permissions for sensitive docs
|
||||
5. **Audit logs**: Review Wiki.js audit logs for unexpected operations
|
||||
|
||||
## Performance
|
||||
|
||||
- **GraphQL queries**: Optimized for minimal data transfer
|
||||
- **Search**: Indexed by Wiki.js for fast results
|
||||
- **Pagination**: Configurable result limits (default: 20)
|
||||
- **Caching**: Wiki.js handles internal caching
|
||||
|
||||
## Development
|
||||
|
||||
### Running Tests
|
||||
|
||||
```bash
|
||||
# All tests
|
||||
pytest -v
|
||||
|
||||
# Specific test file
|
||||
pytest tests/test_config.py -v
|
||||
|
||||
# Integration tests only
|
||||
pytest tests/test_wikijs_client.py -v -k integration
|
||||
```
|
||||
|
||||
### Code Structure
|
||||
|
||||
- `config.py`: Configuration loading and validation
|
||||
- `wikijs_client.py`: GraphQL client implementation
|
||||
- `server.py`: MCP server setup and tool routing
|
||||
- `tools/pages.py`: Page management MCP tools
|
||||
- `tools/lessons_learned.py`: Lessons learned MCP tools
|
||||
|
||||
## License
|
||||
|
||||
MIT License - See repository root for details
|
||||
|
||||
## Support
|
||||
|
||||
For issues and questions:
|
||||
- **Repository**: `ssh://git@hotserv.tailc9b278.ts.net:2222/bandit/support-claude-mktplace.git`
|
||||
- **Issues**: Contact repository maintainer
|
||||
- **Documentation**: `/docs/references/MCP-WIKIJS.md`
|
||||
@@ -1,503 +0,0 @@
|
||||
# Testing Guide - Wiki.js MCP Server
|
||||
|
||||
This document provides comprehensive testing instructions for the Wiki.js MCP Server.
|
||||
|
||||
## Test Suite Overview
|
||||
|
||||
The test suite includes:
|
||||
- **18 unit tests** with mocks (fast, no external dependencies)
|
||||
- **Integration tests** with real Wiki.js instance (requires live Wiki.js)
|
||||
- **Configuration validation** tests
|
||||
- **Mode detection** tests
|
||||
- **GraphQL client** tests
|
||||
- **Error handling** tests
|
||||
|
||||
## Prerequisites
|
||||
|
||||
### For Unit Tests (Mocked)
|
||||
- Python 3.10+
|
||||
- Virtual environment with dependencies installed
|
||||
- No external services required
|
||||
|
||||
### For Integration Tests
|
||||
- Everything from unit tests, plus:
|
||||
- Running Wiki.js instance
|
||||
- Valid API token with permissions
|
||||
- System configuration file (`~/.config/claude/wikijs.env`)
|
||||
|
||||
## Quick Start
|
||||
|
||||
### Run All Unit Tests
|
||||
|
||||
```bash
|
||||
cd mcp-servers/wikijs
|
||||
source .venv/bin/activate
|
||||
pytest -v
|
||||
```
|
||||
|
||||
**Expected Output:**
|
||||
```
|
||||
==================== test session starts ====================
|
||||
tests/test_config.py::test_load_system_config PASSED [ 5%]
|
||||
tests/test_config.py::test_project_config_override PASSED [ 11%]
|
||||
...
|
||||
==================== 18 passed in 0.40s ====================
|
||||
```
|
||||
|
||||
### Run Integration Tests
|
||||
|
||||
```bash
|
||||
# Set up system configuration first
|
||||
mkdir -p ~/.config/claude
|
||||
cat > ~/.config/claude/wikijs.env << 'EOF'
|
||||
WIKIJS_API_URL=http://wikijs.hotport/graphql
|
||||
WIKIJS_API_TOKEN=your_real_token_here
|
||||
WIKIJS_BASE_PATH=/your-org
|
||||
EOF
|
||||
|
||||
# Run integration tests
|
||||
pytest -v -m integration
|
||||
```
|
||||
|
||||
## Test Categories
|
||||
|
||||
### 1. Configuration Tests (`test_config.py`)
|
||||
|
||||
Tests the hybrid configuration system and mode detection.
|
||||
|
||||
**Tests:**
|
||||
- `test_load_system_config`: System-level config loading
|
||||
- `test_project_config_override`: Project overrides system
|
||||
- `test_missing_system_config`: Error when config missing
|
||||
- `test_missing_required_config`: Validation of required vars
|
||||
- `test_mode_detection_project`: Project mode detection
|
||||
- `test_mode_detection_company`: Company mode detection
|
||||
|
||||
**Run:**
|
||||
```bash
|
||||
pytest tests/test_config.py -v
|
||||
```
|
||||
|
||||
### 2. Wiki.js Client Tests (`test_wikijs_client.py`)
|
||||
|
||||
Tests the GraphQL client and all Wiki.js operations.
|
||||
|
||||
**Tests:**
|
||||
- `test_client_initialization`: Client setup
|
||||
- `test_company_mode_initialization`: Company mode setup
|
||||
- `test_get_full_path_project_mode`: Path construction (project)
|
||||
- `test_get_full_path_company_mode`: Path construction (company)
|
||||
- `test_search_pages`: Page search
|
||||
- `test_get_page`: Single page retrieval
|
||||
- `test_create_page`: Page creation
|
||||
- `test_update_page`: Page updates
|
||||
- `test_list_pages`: List pages with filtering
|
||||
- `test_create_lesson`: Lessons learned creation
|
||||
- `test_search_lessons`: Lesson search
|
||||
- `test_graphql_error_handling`: Error handling
|
||||
|
||||
**Run:**
|
||||
```bash
|
||||
pytest tests/test_wikijs_client.py -v
|
||||
```
|
||||
|
||||
## Integration Testing
|
||||
|
||||
### Setup Integration Environment
|
||||
|
||||
**Step 1: Configure Wiki.js**
|
||||
|
||||
Create a test namespace in Wiki.js:
|
||||
```
|
||||
/test-integration/
|
||||
├── projects/
|
||||
│ └── test-project/
|
||||
│ ├── documentation/
|
||||
│ └── lessons-learned/
|
||||
└── shared/
|
||||
```
|
||||
|
||||
**Step 2: Configure System**
|
||||
|
||||
```bash
|
||||
cat > ~/.config/claude/wikijs.env << 'EOF'
|
||||
WIKIJS_API_URL=http://wikijs.hotport/graphql
|
||||
WIKIJS_API_TOKEN=your_token_here
|
||||
WIKIJS_BASE_PATH=/test-integration
|
||||
EOF
|
||||
```
|
||||
|
||||
**Step 3: Configure Project**
|
||||
|
||||
```bash
|
||||
# In test directory
|
||||
cat > .env << 'EOF'
|
||||
WIKIJS_PROJECT=projects/test-project
|
||||
EOF
|
||||
```
|
||||
|
||||
### Run Integration Tests
|
||||
|
||||
```bash
|
||||
# Mark tests for integration
|
||||
pytest -v -m integration
|
||||
|
||||
# Run specific integration test
|
||||
pytest tests/test_wikijs_client.py::test_create_page -v -m integration
|
||||
```
|
||||
|
||||
### Integration Test Scenarios
|
||||
|
||||
**Scenario 1: Page Lifecycle**
|
||||
1. Create page with `create_page`
|
||||
2. Retrieve with `get_page`
|
||||
3. Update with `update_page`
|
||||
4. Search for page with `search_pages`
|
||||
5. Cleanup (manual via Wiki.js UI)
|
||||
|
||||
**Scenario 2: Lessons Learned Workflow**
|
||||
1. Create lesson with `create_lesson`
|
||||
2. Search lessons with `search_lessons`
|
||||
3. Add tags with `tag_lesson`
|
||||
4. Verify searchability
|
||||
|
||||
**Scenario 3: Mode Detection**
|
||||
1. Test in project mode (with `WIKIJS_PROJECT`)
|
||||
2. Test in company mode (without `WIKIJS_PROJECT`)
|
||||
3. Verify path scoping
|
||||
|
||||
## Manual Testing
|
||||
|
||||
### Test 1: Create and Retrieve Page
|
||||
|
||||
```bash
|
||||
# Start MCP server
|
||||
python -m mcp_server.server
|
||||
|
||||
# In another terminal, send MCP request
|
||||
echo '{
|
||||
"jsonrpc": "2.0",
|
||||
"id": 1,
|
||||
"method": "tools/call",
|
||||
"params": {
|
||||
"name": "create_page",
|
||||
"arguments": {
|
||||
"path": "documentation/test-api",
|
||||
"title": "Test API Documentation",
|
||||
"content": "# Test API\\n\\nThis is a test page.",
|
||||
"tags": "api,testing",
|
||||
"publish": true
|
||||
}
|
||||
}
|
||||
}' | python -m mcp_server.server
|
||||
```
|
||||
|
||||
**Expected Result:**
|
||||
```json
|
||||
{
|
||||
"success": true,
|
||||
"page": {
|
||||
"id": 123,
|
||||
"path": "/your-org/projects/test-project/documentation/test-api",
|
||||
"title": "Test API Documentation"
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### Test 2: Search Lessons
|
||||
|
||||
```bash
|
||||
echo '{
|
||||
"jsonrpc": "2.0",
|
||||
"id": 2,
|
||||
"method": "tools/call",
|
||||
"params": {
|
||||
"name": "search_lessons",
|
||||
"arguments": {
|
||||
"query": "validation",
|
||||
"tags": "testing,claude-code",
|
||||
"limit": 10
|
||||
}
|
||||
}
|
||||
}' | python -m mcp_server.server
|
||||
```
|
||||
|
||||
**Expected Result:**
|
||||
```json
|
||||
{
|
||||
"success": true,
|
||||
"count": 2,
|
||||
"lessons": [...]
|
||||
}
|
||||
```
|
||||
|
||||
### Test 3: Mode Detection
|
||||
|
||||
**Project Mode:**
|
||||
```bash
|
||||
# Create .env with WIKIJS_PROJECT
|
||||
echo "WIKIJS_PROJECT=projects/test-project" > .env
|
||||
|
||||
# Start server and check logs
|
||||
python -m mcp_server.server 2>&1 | grep "mode"
|
||||
```
|
||||
|
||||
**Expected Log:**
|
||||
```
|
||||
INFO:Running in project mode: projects/test-project
|
||||
```
|
||||
|
||||
**Company Mode:**
|
||||
```bash
|
||||
# Remove .env
|
||||
rm .env
|
||||
|
||||
# Start server and check logs
|
||||
python -m mcp_server.server 2>&1 | grep "mode"
|
||||
```
|
||||
|
||||
**Expected Log:**
|
||||
```
|
||||
INFO:Running in company-wide mode (PMO)
|
||||
```
|
||||
|
||||
## Test Data Management
|
||||
|
||||
### Cleanup Test Data
|
||||
|
||||
After integration tests, clean up test pages in Wiki.js:
|
||||
|
||||
```bash
|
||||
# Via Wiki.js UI
|
||||
1. Navigate to /test-integration/
|
||||
2. Select test pages
|
||||
3. Delete
|
||||
|
||||
# Or via GraphQL (advanced)
|
||||
curl -X POST http://wikijs.hotport/graphql \
|
||||
-H "Authorization: Bearer $WIKIJS_API_TOKEN" \
|
||||
-H "Content-Type: application/json" \
|
||||
-d '{
|
||||
"query": "mutation { pages { delete(id: 123) { responseResult { succeeded } } } }"
|
||||
}'
|
||||
```
|
||||
|
||||
### Test Data Fixtures
|
||||
|
||||
For repeatable testing, create fixtures:
|
||||
|
||||
```python
|
||||
# tests/conftest.py
|
||||
import pytest
|
||||
|
||||
@pytest.fixture
|
||||
async def test_page():
|
||||
"""Create a test page and clean up after"""
|
||||
client = WikiJSClient(...)
|
||||
page = await client.create_page(
|
||||
path="test/fixture-page",
|
||||
title="Test Fixture",
|
||||
content="# Test"
|
||||
)
|
||||
yield page
|
||||
# Cleanup after test
|
||||
await client.delete_page(page['id'])
|
||||
```
|
||||
|
||||
## Continuous Integration
|
||||
|
||||
### GitHub Actions / Gitea Actions
|
||||
|
||||
```yaml
|
||||
name: Test Wiki.js MCP Server
|
||||
|
||||
on: [push, pull_request]
|
||||
|
||||
jobs:
|
||||
test:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: '3.10'
|
||||
|
||||
- name: Install dependencies
|
||||
working-directory: mcp-servers/wikijs
|
||||
run: |
|
||||
python -m venv .venv
|
||||
source .venv/bin/activate
|
||||
pip install -r requirements.txt
|
||||
|
||||
- name: Run unit tests
|
||||
working-directory: mcp-servers/wikijs
|
||||
run: |
|
||||
source .venv/bin/activate
|
||||
pytest -v
|
||||
|
||||
# Integration tests (optional, requires Wiki.js instance)
|
||||
- name: Run integration tests
|
||||
if: env.WIKIJS_API_TOKEN != ''
|
||||
working-directory: mcp-servers/wikijs
|
||||
env:
|
||||
WIKIJS_API_URL: ${{ secrets.WIKIJS_API_URL }}
|
||||
WIKIJS_API_TOKEN: ${{ secrets.WIKIJS_API_TOKEN }}
|
||||
WIKIJS_BASE_PATH: /test-integration
|
||||
run: |
|
||||
source .venv/bin/activate
|
||||
pytest -v -m integration
|
||||
```
|
||||
|
||||
## Debugging Tests
|
||||
|
||||
### Enable Verbose Logging
|
||||
|
||||
```bash
|
||||
# Set log level to DEBUG
|
||||
export PYTHONLOG=DEBUG
|
||||
pytest -v -s
|
||||
```
|
||||
|
||||
### Run Single Test with Debugging
|
||||
|
||||
```bash
|
||||
# Run specific test with print statements visible
|
||||
pytest tests/test_config.py::test_load_system_config -v -s
|
||||
|
||||
# Use pytest debugger
|
||||
pytest tests/test_config.py::test_load_system_config --pdb
|
||||
```
|
||||
|
||||
### Inspect GraphQL Queries
|
||||
|
||||
Add logging to see actual GraphQL queries:
|
||||
|
||||
```python
|
||||
# In wikijs_client.py
|
||||
async def _execute_query(self, query: str, variables: Optional[Dict[str, Any]] = None):
|
||||
logger.info(f"GraphQL Query: {query}")
|
||||
logger.info(f"Variables: {variables}")
|
||||
# ... rest of method
|
||||
```
|
||||
|
||||
## Test Coverage
|
||||
|
||||
### Generate Coverage Report
|
||||
|
||||
```bash
|
||||
pip install pytest-cov
|
||||
|
||||
# Run with coverage
|
||||
pytest --cov=mcp_server --cov-report=html
|
||||
|
||||
# Open report
|
||||
open htmlcov/index.html
|
||||
```
|
||||
|
||||
**Target Coverage:** 90%+ for all modules
|
||||
|
||||
## Performance Testing
|
||||
|
||||
### Benchmark GraphQL Operations
|
||||
|
||||
```python
|
||||
import time
|
||||
|
||||
async def benchmark_search():
|
||||
client = WikiJSClient(...)
|
||||
start = time.time()
|
||||
results = await client.search_pages("test")
|
||||
elapsed = time.time() - start
|
||||
print(f"Search took {elapsed:.3f}s")
|
||||
```
|
||||
|
||||
**Expected Performance:**
|
||||
- Search: < 500ms
|
||||
- Get page: < 200ms
|
||||
- Create page: < 1s
|
||||
- Update page: < 500ms
|
||||
|
||||
## Common Test Failures
|
||||
|
||||
### 1. Configuration Not Found
|
||||
|
||||
**Error:**
|
||||
```
|
||||
FileNotFoundError: System config not found: ~/.config/claude/wikijs.env
|
||||
```
|
||||
|
||||
**Solution:**
|
||||
```bash
|
||||
mkdir -p ~/.config/claude
|
||||
cat > ~/.config/claude/wikijs.env << 'EOF'
|
||||
WIKIJS_API_URL=http://wikijs.hotport/graphql
|
||||
WIKIJS_API_TOKEN=test_token
|
||||
WIKIJS_BASE_PATH=/test
|
||||
EOF
|
||||
```
|
||||
|
||||
### 2. GraphQL Connection Error
|
||||
|
||||
**Error:**
|
||||
```
|
||||
httpx.ConnectError: Connection refused
|
||||
```
|
||||
|
||||
**Solution:**
|
||||
- Verify Wiki.js is running
|
||||
- Check `WIKIJS_API_URL` is correct
|
||||
- Ensure `/graphql` endpoint is accessible
|
||||
|
||||
### 3. Permission Denied
|
||||
|
||||
**Error:**
|
||||
```
|
||||
ValueError: Failed to create page: Permission denied
|
||||
```
|
||||
|
||||
**Solution:**
|
||||
- Regenerate API token with write permissions
|
||||
- Check Wiki.js user/group permissions
|
||||
- Verify base path exists and is accessible
|
||||
|
||||
### 4. Environment Variable Pollution
|
||||
|
||||
**Error:**
|
||||
```
|
||||
AssertionError: assert 'project' == 'company'
|
||||
```
|
||||
|
||||
**Solution:**
|
||||
```python
|
||||
# In test, clear environment
|
||||
monkeypatch.delenv('WIKIJS_PROJECT', raising=False)
|
||||
```
|
||||
|
||||
## Best Practices
|
||||
|
||||
1. **Isolate Tests**: Each test should be independent
|
||||
2. **Mock External Calls**: Use mocks for unit tests
|
||||
3. **Clean Up Resources**: Delete test pages after integration tests
|
||||
4. **Use Fixtures**: Reuse common setup/teardown
|
||||
5. **Test Error Cases**: Not just happy paths
|
||||
6. **Document Assumptions**: Comment what tests expect
|
||||
7. **Consistent Naming**: Follow `test_<what>_<scenario>` pattern
|
||||
|
||||
## Next Steps
|
||||
|
||||
After testing passes:
|
||||
1. Review code coverage report
|
||||
2. Add integration tests for edge cases
|
||||
3. Document any new test scenarios
|
||||
4. Update CI/CD pipeline
|
||||
5. Create test data fixtures for common scenarios
|
||||
|
||||
## Support
|
||||
|
||||
For testing issues:
|
||||
- Check test logs: `pytest -v -s`
|
||||
- Review Wiki.js logs
|
||||
- Verify configuration files
|
||||
- See main README.md troubleshooting section
|
||||
@@ -1,3 +0,0 @@
|
||||
"""Wiki.js MCP Server for Claude Code."""
|
||||
|
||||
__version__ = "0.1.0"
|
||||
@@ -1,102 +0,0 @@
|
||||
"""
|
||||
Configuration loader for Wiki.js MCP Server.
|
||||
|
||||
Implements hybrid configuration system:
|
||||
- System-level: ~/.config/claude/wikijs.env (credentials)
|
||||
- Project-level: .env (project path specification)
|
||||
"""
|
||||
from pathlib import Path
|
||||
from dotenv import load_dotenv
|
||||
import os
|
||||
import logging
|
||||
from typing import Dict, Optional
|
||||
|
||||
logging.basicConfig(level=logging.INFO)
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class WikiJSConfig:
|
||||
"""Hybrid configuration loader with mode detection"""
|
||||
|
||||
def __init__(self):
|
||||
self.api_url: Optional[str] = None
|
||||
self.api_token: Optional[str] = None
|
||||
self.base_path: Optional[str] = None
|
||||
self.project: Optional[str] = None
|
||||
self.mode: str = 'project'
|
||||
|
||||
def load(self) -> Dict[str, Optional[str]]:
|
||||
"""
|
||||
Load configuration from system and project levels.
|
||||
Project-level configuration overrides system-level.
|
||||
|
||||
Returns:
|
||||
Dict containing api_url, api_token, base_path, project, mode
|
||||
|
||||
Raises:
|
||||
FileNotFoundError: If system config is missing
|
||||
ValueError: If required configuration is missing
|
||||
"""
|
||||
# Load system config
|
||||
system_config = Path.home() / '.config' / 'claude' / 'wikijs.env'
|
||||
if system_config.exists():
|
||||
load_dotenv(system_config)
|
||||
logger.info(f"Loaded system configuration from {system_config}")
|
||||
else:
|
||||
raise FileNotFoundError(
|
||||
f"System config not found: {system_config}\n"
|
||||
"Create it with: mkdir -p ~/.config/claude && "
|
||||
"cat > ~/.config/claude/wikijs.env"
|
||||
)
|
||||
|
||||
# Load project config (overrides system)
|
||||
project_config = Path.cwd() / '.env'
|
||||
if project_config.exists():
|
||||
load_dotenv(project_config, override=True)
|
||||
logger.info(f"Loaded project configuration from {project_config}")
|
||||
|
||||
# Extract values
|
||||
self.api_url = os.getenv('WIKIJS_API_URL')
|
||||
self.api_token = os.getenv('WIKIJS_API_TOKEN')
|
||||
self.base_path = os.getenv('WIKIJS_BASE_PATH')
|
||||
self.project = os.getenv('WIKIJS_PROJECT') # Optional for PMO
|
||||
|
||||
# Detect mode
|
||||
if self.project:
|
||||
self.mode = 'project'
|
||||
logger.info(f"Running in project mode: {self.project}")
|
||||
else:
|
||||
self.mode = 'company'
|
||||
logger.info("Running in company-wide mode (PMO)")
|
||||
|
||||
# Validate required variables
|
||||
self._validate()
|
||||
|
||||
return {
|
||||
'api_url': self.api_url,
|
||||
'api_token': self.api_token,
|
||||
'base_path': self.base_path,
|
||||
'project': self.project,
|
||||
'mode': self.mode
|
||||
}
|
||||
|
||||
def _validate(self) -> None:
|
||||
"""
|
||||
Validate that required configuration is present.
|
||||
|
||||
Raises:
|
||||
ValueError: If required configuration is missing
|
||||
"""
|
||||
required = {
|
||||
'WIKIJS_API_URL': self.api_url,
|
||||
'WIKIJS_API_TOKEN': self.api_token,
|
||||
'WIKIJS_BASE_PATH': self.base_path
|
||||
}
|
||||
|
||||
missing = [key for key, value in required.items() if not value]
|
||||
|
||||
if missing:
|
||||
raise ValueError(
|
||||
f"Missing required configuration: {', '.join(missing)}\n"
|
||||
"Check your ~/.config/claude/wikijs.env file"
|
||||
)
|
||||
@@ -1,385 +0,0 @@
|
||||
"""
|
||||
MCP Server entry point for Wiki.js integration.
|
||||
|
||||
Provides Wiki.js tools to Claude Code via JSON-RPC 2.0 over stdio.
|
||||
"""
|
||||
import asyncio
|
||||
import logging
|
||||
import json
|
||||
from mcp.server import Server
|
||||
from mcp.server.stdio import stdio_server
|
||||
from mcp.types import Tool, TextContent
|
||||
|
||||
from .config import WikiJSConfig
|
||||
from .wikijs_client import WikiJSClient
|
||||
|
||||
# Suppress noisy MCP validation warnings on stderr
|
||||
logging.basicConfig(level=logging.INFO)
|
||||
logging.getLogger("root").setLevel(logging.ERROR)
|
||||
logging.getLogger("mcp").setLevel(logging.ERROR)
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class WikiJSMCPServer:
|
||||
"""MCP Server for Wiki.js integration"""
|
||||
|
||||
def __init__(self):
|
||||
self.server = Server("wikijs-mcp")
|
||||
self.config = None
|
||||
self.client = None
|
||||
|
||||
async def initialize(self):
|
||||
"""
|
||||
Initialize server and load configuration.
|
||||
|
||||
Raises:
|
||||
Exception: If initialization fails
|
||||
"""
|
||||
try:
|
||||
config_loader = WikiJSConfig()
|
||||
self.config = config_loader.load()
|
||||
|
||||
self.client = WikiJSClient(
|
||||
api_url=self.config['api_url'],
|
||||
api_token=self.config['api_token'],
|
||||
base_path=self.config['base_path'],
|
||||
project=self.config.get('project')
|
||||
)
|
||||
|
||||
logger.info(f"Wiki.js MCP Server initialized in {self.config['mode']} mode")
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to initialize: {e}")
|
||||
raise
|
||||
|
||||
def setup_tools(self):
|
||||
"""Register all available tools with the MCP server"""
|
||||
|
||||
@self.server.list_tools()
|
||||
async def list_tools() -> list[Tool]:
|
||||
"""Return list of available tools"""
|
||||
return [
|
||||
Tool(
|
||||
name="search_pages",
|
||||
description="Search Wiki.js pages by keywords and tags",
|
||||
inputSchema={
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"query": {
|
||||
"type": "string",
|
||||
"description": "Search query string"
|
||||
},
|
||||
"tags": {
|
||||
"type": "string",
|
||||
"description": "Comma-separated tags to filter by (optional)"
|
||||
},
|
||||
"limit": {
|
||||
"type": "integer",
|
||||
"default": 20,
|
||||
"description": "Maximum results to return"
|
||||
}
|
||||
},
|
||||
"required": ["query"]
|
||||
}
|
||||
),
|
||||
Tool(
|
||||
name="get_page",
|
||||
description="Get a specific page by path",
|
||||
inputSchema={
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"path": {
|
||||
"type": "string",
|
||||
"description": "Page path (relative or absolute)"
|
||||
}
|
||||
},
|
||||
"required": ["path"]
|
||||
}
|
||||
),
|
||||
Tool(
|
||||
name="create_page",
|
||||
description="Create a new Wiki.js page",
|
||||
inputSchema={
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"path": {
|
||||
"type": "string",
|
||||
"description": "Page path relative to project/base"
|
||||
},
|
||||
"title": {
|
||||
"type": "string",
|
||||
"description": "Page title"
|
||||
},
|
||||
"content": {
|
||||
"type": "string",
|
||||
"description": "Page content (markdown)"
|
||||
},
|
||||
"description": {
|
||||
"type": "string",
|
||||
"description": "Page description (optional)"
|
||||
},
|
||||
"tags": {
|
||||
"type": "string",
|
||||
"description": "Comma-separated tags (optional)"
|
||||
},
|
||||
"publish": {
|
||||
"type": "boolean",
|
||||
"default": True,
|
||||
"description": "Publish immediately"
|
||||
}
|
||||
},
|
||||
"required": ["path", "title", "content"]
|
||||
}
|
||||
),
|
||||
Tool(
|
||||
name="update_page",
|
||||
description="Update an existing Wiki.js page",
|
||||
inputSchema={
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"page_id": {
|
||||
"type": "integer",
|
||||
"description": "Page ID"
|
||||
},
|
||||
"content": {
|
||||
"type": "string",
|
||||
"description": "New content (optional)"
|
||||
},
|
||||
"title": {
|
||||
"type": "string",
|
||||
"description": "New title (optional)"
|
||||
},
|
||||
"description": {
|
||||
"type": "string",
|
||||
"description": "New description (optional)"
|
||||
},
|
||||
"tags": {
|
||||
"type": "string",
|
||||
"description": "New comma-separated tags (optional)"
|
||||
},
|
||||
"publish": {
|
||||
"type": "boolean",
|
||||
"description": "New publish status (optional)"
|
||||
}
|
||||
},
|
||||
"required": ["page_id"]
|
||||
}
|
||||
),
|
||||
Tool(
|
||||
name="list_pages",
|
||||
description="List pages under a specific path",
|
||||
inputSchema={
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"path_prefix": {
|
||||
"type": "string",
|
||||
"default": "",
|
||||
"description": "Path prefix to filter by"
|
||||
}
|
||||
}
|
||||
}
|
||||
),
|
||||
Tool(
|
||||
name="create_lesson",
|
||||
description="Create a lessons learned entry to prevent repeating mistakes",
|
||||
inputSchema={
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"title": {
|
||||
"type": "string",
|
||||
"description": "Lesson title (e.g., 'Sprint 16 - Prevent Infinite Loops')"
|
||||
},
|
||||
"content": {
|
||||
"type": "string",
|
||||
"description": "Lesson content (markdown with problem, solution, prevention)"
|
||||
},
|
||||
"tags": {
|
||||
"type": "string",
|
||||
"description": "Comma-separated tags for categorization"
|
||||
},
|
||||
"category": {
|
||||
"type": "string",
|
||||
"default": "sprints",
|
||||
"description": "Category (sprints, patterns, architecture, etc.)"
|
||||
}
|
||||
},
|
||||
"required": ["title", "content", "tags"]
|
||||
}
|
||||
),
|
||||
Tool(
|
||||
name="search_lessons",
|
||||
description="Search lessons learned from previous sprints to avoid known pitfalls",
|
||||
inputSchema={
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"query": {
|
||||
"type": "string",
|
||||
"description": "Search query (optional)"
|
||||
},
|
||||
"tags": {
|
||||
"type": "string",
|
||||
"description": "Comma-separated tags to filter by (optional)"
|
||||
},
|
||||
"limit": {
|
||||
"type": "integer",
|
||||
"default": 20,
|
||||
"description": "Maximum results"
|
||||
}
|
||||
}
|
||||
}
|
||||
),
|
||||
Tool(
|
||||
name="tag_lesson",
|
||||
description="Add or update tags on a lessons learned entry",
|
||||
inputSchema={
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"page_id": {
|
||||
"type": "integer",
|
||||
"description": "Lesson page ID"
|
||||
},
|
||||
"tags": {
|
||||
"type": "string",
|
||||
"description": "Comma-separated tags"
|
||||
}
|
||||
},
|
||||
"required": ["page_id", "tags"]
|
||||
}
|
||||
)
|
||||
]
|
||||
|
||||
@self.server.call_tool()
|
||||
async def call_tool(name: str, arguments: dict) -> list[TextContent]:
|
||||
"""
|
||||
Handle tool invocation.
|
||||
|
||||
Args:
|
||||
name: Tool name
|
||||
arguments: Tool arguments
|
||||
|
||||
Returns:
|
||||
List of TextContent with results
|
||||
"""
|
||||
try:
|
||||
# Route to appropriate client method
|
||||
if name == "search_pages":
|
||||
tags = arguments.get('tags')
|
||||
tag_list = [t.strip() for t in tags.split(',')] if tags else None
|
||||
results = await self.client.search_pages(
|
||||
query=arguments['query'],
|
||||
tags=tag_list,
|
||||
limit=arguments.get('limit', 20)
|
||||
)
|
||||
result = {'success': True, 'count': len(results), 'pages': results}
|
||||
|
||||
elif name == "get_page":
|
||||
page = await self.client.get_page(arguments['path'])
|
||||
if page:
|
||||
result = {'success': True, 'page': page}
|
||||
else:
|
||||
result = {'success': False, 'error': f"Page not found: {arguments['path']}"}
|
||||
|
||||
elif name == "create_page":
|
||||
tags = arguments.get('tags')
|
||||
tag_list = [t.strip() for t in tags.split(',')] if tags else []
|
||||
page = await self.client.create_page(
|
||||
path=arguments['path'],
|
||||
title=arguments['title'],
|
||||
content=arguments['content'],
|
||||
description=arguments.get('description', ''),
|
||||
tags=tag_list,
|
||||
is_published=arguments.get('publish', True)
|
||||
)
|
||||
result = {'success': True, 'page': page}
|
||||
|
||||
elif name == "update_page":
|
||||
tags = arguments.get('tags')
|
||||
tag_list = [t.strip() for t in tags.split(',')] if tags else None
|
||||
page = await self.client.update_page(
|
||||
page_id=arguments['page_id'],
|
||||
content=arguments.get('content'),
|
||||
title=arguments.get('title'),
|
||||
description=arguments.get('description'),
|
||||
tags=tag_list,
|
||||
is_published=arguments.get('publish')
|
||||
)
|
||||
result = {'success': True, 'page': page}
|
||||
|
||||
elif name == "list_pages":
|
||||
pages = await self.client.list_pages(
|
||||
path_prefix=arguments.get('path_prefix', '')
|
||||
)
|
||||
result = {'success': True, 'count': len(pages), 'pages': pages}
|
||||
|
||||
elif name == "create_lesson":
|
||||
tag_list = [t.strip() for t in arguments['tags'].split(',')]
|
||||
lesson = await self.client.create_lesson(
|
||||
title=arguments['title'],
|
||||
content=arguments['content'],
|
||||
tags=tag_list,
|
||||
category=arguments.get('category', 'sprints')
|
||||
)
|
||||
result = {
|
||||
'success': True,
|
||||
'lesson': lesson,
|
||||
'message': f"Lesson learned captured: {arguments['title']}"
|
||||
}
|
||||
|
||||
elif name == "search_lessons":
|
||||
tags = arguments.get('tags')
|
||||
tag_list = [t.strip() for t in tags.split(',')] if tags else None
|
||||
lessons = await self.client.search_lessons(
|
||||
query=arguments.get('query'),
|
||||
tags=tag_list,
|
||||
limit=arguments.get('limit', 20)
|
||||
)
|
||||
result = {
|
||||
'success': True,
|
||||
'count': len(lessons),
|
||||
'lessons': lessons,
|
||||
'message': f"Found {len(lessons)} relevant lessons"
|
||||
}
|
||||
|
||||
elif name == "tag_lesson":
|
||||
tag_list = [t.strip() for t in arguments['tags'].split(',')]
|
||||
lesson = await self.client.tag_lesson(
|
||||
page_id=arguments['page_id'],
|
||||
new_tags=tag_list
|
||||
)
|
||||
result = {'success': True, 'lesson': lesson, 'message': 'Tags updated'}
|
||||
|
||||
else:
|
||||
raise ValueError(f"Unknown tool: {name}")
|
||||
|
||||
return [TextContent(
|
||||
type="text",
|
||||
text=json.dumps(result, indent=2)
|
||||
)]
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Tool {name} failed: {e}")
|
||||
return [TextContent(
|
||||
type="text",
|
||||
text=json.dumps({'success': False, 'error': str(e)}, indent=2)
|
||||
)]
|
||||
|
||||
async def run(self):
|
||||
"""Run the MCP server"""
|
||||
await self.initialize()
|
||||
self.setup_tools()
|
||||
|
||||
async with stdio_server() as (read_stream, write_stream):
|
||||
await self.server.run(
|
||||
read_stream,
|
||||
write_stream,
|
||||
self.server.create_initialization_options()
|
||||
)
|
||||
|
||||
|
||||
async def main():
|
||||
"""Main entry point"""
|
||||
server = WikiJSMCPServer()
|
||||
await server.run()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
asyncio.run(main())
|
||||
@@ -1 +0,0 @@
|
||||
"""Wiki.js MCP tools."""
|
||||
@@ -1,183 +0,0 @@
|
||||
"""
|
||||
MCP tools for Wiki.js lessons learned management.
|
||||
"""
|
||||
from typing import Dict, Any, List, Optional
|
||||
from mcp.server import Tool
|
||||
from ..wikijs_client import WikiJSClient
|
||||
import logging
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def create_lesson_tools(client: WikiJSClient) -> List[Tool]:
|
||||
"""
|
||||
Create MCP tools for lessons learned management.
|
||||
|
||||
Args:
|
||||
client: WikiJSClient instance
|
||||
|
||||
Returns:
|
||||
List of MCP tools
|
||||
"""
|
||||
|
||||
async def create_lesson(
|
||||
title: str,
|
||||
content: str,
|
||||
tags: str,
|
||||
category: str = "sprints"
|
||||
) -> Dict[str, Any]:
|
||||
"""
|
||||
Create a lessons learned entry.
|
||||
|
||||
After 15 sprints without systematic lesson capture, repeated mistakes occurred.
|
||||
This tool ensures lessons are captured and searchable for future sprints.
|
||||
|
||||
Args:
|
||||
title: Lesson title (e.g., "Sprint 16 - Claude Code Infinite Loop on Label Validation")
|
||||
content: Lesson content in markdown (problem, solution, prevention)
|
||||
tags: Comma-separated tags (e.g., "claude-code, testing, labels, validation")
|
||||
category: Category for organization (default: "sprints", also: "patterns", "architecture")
|
||||
|
||||
Returns:
|
||||
Created lesson page data
|
||||
|
||||
Example:
|
||||
create_lesson(
|
||||
title="Sprint 16 - Prevent Infinite Loops in Validation",
|
||||
content="## Problem\\n\\nClaude Code entered infinite loop...\\n\\n## Solution\\n\\n...",
|
||||
tags="claude-code, testing, infinite-loop, validation",
|
||||
category="sprints"
|
||||
)
|
||||
"""
|
||||
try:
|
||||
tag_list = [t.strip() for t in tags.split(',')]
|
||||
|
||||
lesson = await client.create_lesson(
|
||||
title=title,
|
||||
content=content,
|
||||
tags=tag_list,
|
||||
category=category
|
||||
)
|
||||
|
||||
return {
|
||||
'success': True,
|
||||
'lesson': lesson,
|
||||
'message': f'Lesson learned captured: {title}'
|
||||
}
|
||||
except Exception as e:
|
||||
logger.error(f"Error creating lesson: {e}")
|
||||
return {
|
||||
'success': False,
|
||||
'error': str(e)
|
||||
}
|
||||
|
||||
async def search_lessons(
|
||||
query: Optional[str] = None,
|
||||
tags: Optional[str] = None,
|
||||
limit: int = 20
|
||||
) -> Dict[str, Any]:
|
||||
"""
|
||||
Search lessons learned entries.
|
||||
|
||||
Use this at sprint start to find relevant lessons from previous sprints.
|
||||
Prevents repeating the same mistakes.
|
||||
|
||||
Args:
|
||||
query: Search query (e.g., "validation", "infinite loop", "docker")
|
||||
tags: Comma-separated tags to filter by (e.g., "claude-code, testing")
|
||||
limit: Maximum number of results (default: 20)
|
||||
|
||||
Returns:
|
||||
List of matching lessons learned
|
||||
|
||||
Example:
|
||||
# Before implementing validation logic
|
||||
search_lessons(query="validation", tags="testing, claude-code")
|
||||
|
||||
# Before working with Docker
|
||||
search_lessons(query="docker", tags="deployment")
|
||||
"""
|
||||
try:
|
||||
tag_list = [t.strip() for t in tags.split(',')] if tags else None
|
||||
|
||||
lessons = await client.search_lessons(
|
||||
query=query,
|
||||
tags=tag_list,
|
||||
limit=limit
|
||||
)
|
||||
|
||||
return {
|
||||
'success': True,
|
||||
'count': len(lessons),
|
||||
'lessons': lessons,
|
||||
'message': f'Found {len(lessons)} relevant lessons'
|
||||
}
|
||||
except Exception as e:
|
||||
logger.error(f"Error searching lessons: {e}")
|
||||
return {
|
||||
'success': False,
|
||||
'error': str(e)
|
||||
}
|
||||
|
||||
async def tag_lesson(
|
||||
page_id: int,
|
||||
tags: str
|
||||
) -> Dict[str, Any]:
|
||||
"""
|
||||
Add or update tags on a lesson.
|
||||
|
||||
Args:
|
||||
page_id: Lesson page ID (from create_lesson or search_lessons)
|
||||
tags: Comma-separated tags (will replace existing tags)
|
||||
|
||||
Returns:
|
||||
Updated lesson data
|
||||
"""
|
||||
try:
|
||||
tag_list = [t.strip() for t in tags.split(',')]
|
||||
|
||||
lesson = await client.tag_lesson(
|
||||
page_id=page_id,
|
||||
new_tags=tag_list
|
||||
)
|
||||
|
||||
return {
|
||||
'success': True,
|
||||
'lesson': lesson,
|
||||
'message': 'Tags updated successfully'
|
||||
}
|
||||
except Exception as e:
|
||||
logger.error(f"Error tagging lesson: {e}")
|
||||
return {
|
||||
'success': False,
|
||||
'error': str(e)
|
||||
}
|
||||
|
||||
# Define MCP tools
|
||||
tools = [
|
||||
Tool(
|
||||
name="create_lesson",
|
||||
description=(
|
||||
"Create a lessons learned entry to prevent repeating mistakes. "
|
||||
"Critical for capturing sprint insights, architectural decisions, "
|
||||
"and technical gotchas for future reference."
|
||||
),
|
||||
function=create_lesson
|
||||
),
|
||||
Tool(
|
||||
name="search_lessons",
|
||||
description=(
|
||||
"Search lessons learned from previous sprints and projects. "
|
||||
"Use this before starting new work to avoid known pitfalls and "
|
||||
"leverage past solutions."
|
||||
),
|
||||
function=search_lessons
|
||||
),
|
||||
Tool(
|
||||
name="tag_lesson",
|
||||
description="Add or update tags on a lessons learned entry for better categorization",
|
||||
function=tag_lesson
|
||||
)
|
||||
]
|
||||
|
||||
return tools
|
||||
@@ -1,229 +0,0 @@
|
||||
"""
|
||||
MCP tools for Wiki.js page management.
|
||||
"""
|
||||
from typing import Dict, Any, List, Optional
|
||||
from mcp.server import Tool
|
||||
from ..wikijs_client import WikiJSClient
|
||||
import logging
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def create_page_tools(client: WikiJSClient) -> List[Tool]:
|
||||
"""
|
||||
Create MCP tools for page management.
|
||||
|
||||
Args:
|
||||
client: WikiJSClient instance
|
||||
|
||||
Returns:
|
||||
List of MCP tools
|
||||
"""
|
||||
|
||||
async def search_pages(
|
||||
query: str,
|
||||
tags: Optional[str] = None,
|
||||
limit: int = 20
|
||||
) -> Dict[str, Any]:
|
||||
"""
|
||||
Search Wiki.js pages by keywords and tags.
|
||||
|
||||
Args:
|
||||
query: Search query string
|
||||
tags: Comma-separated list of tags to filter by
|
||||
limit: Maximum number of results (default: 20)
|
||||
|
||||
Returns:
|
||||
List of matching pages with path, title, description, and tags
|
||||
"""
|
||||
try:
|
||||
tag_list = [t.strip() for t in tags.split(',')] if tags else None
|
||||
results = await client.search_pages(query, tag_list, limit)
|
||||
|
||||
return {
|
||||
'success': True,
|
||||
'count': len(results),
|
||||
'pages': results
|
||||
}
|
||||
except Exception as e:
|
||||
logger.error(f"Error searching pages: {e}")
|
||||
return {
|
||||
'success': False,
|
||||
'error': str(e)
|
||||
}
|
||||
|
||||
async def get_page(path: str) -> Dict[str, Any]:
|
||||
"""
|
||||
Get a specific page by path.
|
||||
|
||||
Args:
|
||||
path: Page path (can be relative to project or absolute)
|
||||
|
||||
Returns:
|
||||
Page data including content, metadata, and tags
|
||||
"""
|
||||
try:
|
||||
page = await client.get_page(path)
|
||||
|
||||
if page:
|
||||
return {
|
||||
'success': True,
|
||||
'page': page
|
||||
}
|
||||
else:
|
||||
return {
|
||||
'success': False,
|
||||
'error': f'Page not found: {path}'
|
||||
}
|
||||
except Exception as e:
|
||||
logger.error(f"Error getting page: {e}")
|
||||
return {
|
||||
'success': False,
|
||||
'error': str(e)
|
||||
}
|
||||
|
||||
async def create_page(
|
||||
path: str,
|
||||
title: str,
|
||||
content: str,
|
||||
description: str = "",
|
||||
tags: Optional[str] = None,
|
||||
publish: bool = True
|
||||
) -> Dict[str, Any]:
|
||||
"""
|
||||
Create a new Wiki.js page.
|
||||
|
||||
Args:
|
||||
path: Page path relative to project/base (e.g., 'documentation/api')
|
||||
title: Page title
|
||||
content: Page content in markdown format
|
||||
description: Page description (optional)
|
||||
tags: Comma-separated list of tags (optional)
|
||||
publish: Whether to publish immediately (default: True)
|
||||
|
||||
Returns:
|
||||
Created page data
|
||||
"""
|
||||
try:
|
||||
tag_list = [t.strip() for t in tags.split(',')] if tags else []
|
||||
|
||||
page = await client.create_page(
|
||||
path=path,
|
||||
title=title,
|
||||
content=content,
|
||||
description=description,
|
||||
tags=tag_list,
|
||||
is_published=publish
|
||||
)
|
||||
|
||||
return {
|
||||
'success': True,
|
||||
'page': page
|
||||
}
|
||||
except Exception as e:
|
||||
logger.error(f"Error creating page: {e}")
|
||||
return {
|
||||
'success': False,
|
||||
'error': str(e)
|
||||
}
|
||||
|
||||
async def update_page(
|
||||
page_id: int,
|
||||
content: Optional[str] = None,
|
||||
title: Optional[str] = None,
|
||||
description: Optional[str] = None,
|
||||
tags: Optional[str] = None,
|
||||
publish: Optional[bool] = None
|
||||
) -> Dict[str, Any]:
|
||||
"""
|
||||
Update an existing Wiki.js page.
|
||||
|
||||
Args:
|
||||
page_id: Page ID (from get_page or search_pages)
|
||||
content: New content (optional)
|
||||
title: New title (optional)
|
||||
description: New description (optional)
|
||||
tags: New comma-separated tags (optional)
|
||||
publish: New publish status (optional)
|
||||
|
||||
Returns:
|
||||
Updated page data
|
||||
"""
|
||||
try:
|
||||
tag_list = [t.strip() for t in tags.split(',')] if tags else None
|
||||
|
||||
page = await client.update_page(
|
||||
page_id=page_id,
|
||||
content=content,
|
||||
title=title,
|
||||
description=description,
|
||||
tags=tag_list,
|
||||
is_published=publish
|
||||
)
|
||||
|
||||
return {
|
||||
'success': True,
|
||||
'page': page
|
||||
}
|
||||
except Exception as e:
|
||||
logger.error(f"Error updating page: {e}")
|
||||
return {
|
||||
'success': False,
|
||||
'error': str(e)
|
||||
}
|
||||
|
||||
async def list_pages(path_prefix: str = "") -> Dict[str, Any]:
|
||||
"""
|
||||
List pages under a specific path.
|
||||
|
||||
Args:
|
||||
path_prefix: Path prefix to filter by (relative to project/base)
|
||||
|
||||
Returns:
|
||||
List of pages under the specified path
|
||||
"""
|
||||
try:
|
||||
pages = await client.list_pages(path_prefix)
|
||||
|
||||
return {
|
||||
'success': True,
|
||||
'count': len(pages),
|
||||
'pages': pages
|
||||
}
|
||||
except Exception as e:
|
||||
logger.error(f"Error listing pages: {e}")
|
||||
return {
|
||||
'success': False,
|
||||
'error': str(e)
|
||||
}
|
||||
|
||||
# Define MCP tools
|
||||
tools = [
|
||||
Tool(
|
||||
name="search_pages",
|
||||
description="Search Wiki.js pages by keywords and tags",
|
||||
function=search_pages
|
||||
),
|
||||
Tool(
|
||||
name="get_page",
|
||||
description="Get a specific Wiki.js page by path",
|
||||
function=get_page
|
||||
),
|
||||
Tool(
|
||||
name="create_page",
|
||||
description="Create a new Wiki.js page with content and metadata",
|
||||
function=create_page
|
||||
),
|
||||
Tool(
|
||||
name="update_page",
|
||||
description="Update an existing Wiki.js page",
|
||||
function=update_page
|
||||
),
|
||||
Tool(
|
||||
name="list_pages",
|
||||
description="List pages under a specific path",
|
||||
function=list_pages
|
||||
)
|
||||
]
|
||||
|
||||
return tools
|
||||
@@ -1,451 +0,0 @@
|
||||
"""
|
||||
Wiki.js GraphQL API Client.
|
||||
|
||||
Provides methods for interacting with Wiki.js GraphQL API for page management,
|
||||
lessons learned, and documentation.
|
||||
"""
|
||||
import httpx
|
||||
from typing import List, Dict, Optional, Any
|
||||
import logging
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class WikiJSClient:
|
||||
"""Client for Wiki.js GraphQL API"""
|
||||
|
||||
def __init__(self, api_url: str, api_token: str, base_path: str, project: Optional[str] = None):
|
||||
"""
|
||||
Initialize Wiki.js client.
|
||||
|
||||
Args:
|
||||
api_url: Wiki.js GraphQL API URL (e.g., http://wiki.example.com/graphql)
|
||||
api_token: Wiki.js API token
|
||||
base_path: Base path in Wiki.js (e.g., /your-org)
|
||||
project: Project path (e.g., projects/my-project) for project mode
|
||||
"""
|
||||
self.api_url = api_url
|
||||
self.api_token = api_token
|
||||
self.base_path = base_path.rstrip('/')
|
||||
self.project = project
|
||||
self.mode = 'project' if project else 'company'
|
||||
|
||||
self.headers = {
|
||||
'Authorization': f'Bearer {api_token}',
|
||||
'Content-Type': 'application/json'
|
||||
}
|
||||
|
||||
def _get_full_path(self, relative_path: str) -> str:
|
||||
"""
|
||||
Construct full path based on mode.
|
||||
|
||||
Args:
|
||||
relative_path: Path relative to project or base
|
||||
|
||||
Returns:
|
||||
Full path in Wiki.js
|
||||
"""
|
||||
relative_path = relative_path.lstrip('/')
|
||||
|
||||
if self.mode == 'project' and self.project:
|
||||
# Project mode: base_path/project/relative_path
|
||||
return f"{self.base_path}/{self.project}/{relative_path}"
|
||||
else:
|
||||
# Company mode: base_path/relative_path
|
||||
return f"{self.base_path}/{relative_path}"
|
||||
|
||||
async def _execute_query(self, query: str, variables: Optional[Dict[str, Any]] = None) -> Dict[str, Any]:
|
||||
"""
|
||||
Execute GraphQL query.
|
||||
|
||||
Args:
|
||||
query: GraphQL query string
|
||||
variables: Query variables
|
||||
|
||||
Returns:
|
||||
Response data
|
||||
|
||||
Raises:
|
||||
httpx.HTTPError: On HTTP errors
|
||||
ValueError: On GraphQL errors
|
||||
"""
|
||||
async with httpx.AsyncClient() as client:
|
||||
response = await client.post(
|
||||
self.api_url,
|
||||
headers=self.headers,
|
||||
json={'query': query, 'variables': variables or {}}
|
||||
)
|
||||
|
||||
# Log response for debugging
|
||||
if response.status_code != 200:
|
||||
logger.error(f"HTTP {response.status_code}: {response.text}")
|
||||
|
||||
response.raise_for_status()
|
||||
|
||||
data = response.json()
|
||||
|
||||
if 'errors' in data:
|
||||
errors = data['errors']
|
||||
error_messages = [err.get('message', str(err)) for err in errors]
|
||||
raise ValueError(f"GraphQL errors: {', '.join(error_messages)}")
|
||||
|
||||
return data.get('data', {})
|
||||
|
||||
async def search_pages(
|
||||
self,
|
||||
query: str,
|
||||
tags: Optional[List[str]] = None,
|
||||
limit: int = 20
|
||||
) -> List[Dict[str, Any]]:
|
||||
"""
|
||||
Search pages by keywords and tags.
|
||||
|
||||
Args:
|
||||
query: Search query string
|
||||
tags: Filter by tags
|
||||
limit: Maximum results to return
|
||||
|
||||
Returns:
|
||||
List of matching pages
|
||||
"""
|
||||
graphql_query = """
|
||||
query SearchPages($query: String!) {
|
||||
pages {
|
||||
search(query: $query) {
|
||||
results {
|
||||
id
|
||||
path
|
||||
title
|
||||
description
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
"""
|
||||
|
||||
data = await self._execute_query(graphql_query, {'query': query})
|
||||
results = data.get('pages', {}).get('search', {}).get('results', [])
|
||||
|
||||
# Filter by tags if specified
|
||||
if tags:
|
||||
tags_lower = [t.lower() for t in tags]
|
||||
results = [
|
||||
r for r in results
|
||||
if any(tag.lower() in tags_lower for tag in r.get('tags', []))
|
||||
]
|
||||
|
||||
return results[:limit]
|
||||
|
||||
async def get_page(self, path: str) -> Optional[Dict[str, Any]]:
|
||||
"""
|
||||
Get specific page by path.
|
||||
|
||||
Args:
|
||||
path: Page path (can be relative or absolute)
|
||||
|
||||
Returns:
|
||||
Page data or None if not found
|
||||
"""
|
||||
# Convert to absolute path
|
||||
if not path.startswith(self.base_path):
|
||||
path = self._get_full_path(path)
|
||||
|
||||
graphql_query = """
|
||||
query GetPage($path: String!) {
|
||||
pages {
|
||||
single(path: $path) {
|
||||
id
|
||||
path
|
||||
title
|
||||
description
|
||||
content
|
||||
tags
|
||||
createdAt
|
||||
updatedAt
|
||||
author
|
||||
isPublished
|
||||
}
|
||||
}
|
||||
}
|
||||
"""
|
||||
|
||||
try:
|
||||
data = await self._execute_query(graphql_query, {'path': path})
|
||||
return data.get('pages', {}).get('single')
|
||||
except (httpx.HTTPError, ValueError) as e:
|
||||
logger.warning(f"Page not found at {path}: {e}")
|
||||
return None
|
||||
|
||||
async def create_page(
|
||||
self,
|
||||
path: str,
|
||||
title: str,
|
||||
content: str,
|
||||
description: str = "",
|
||||
tags: Optional[List[str]] = None,
|
||||
is_published: bool = True
|
||||
) -> Dict[str, Any]:
|
||||
"""
|
||||
Create new page.
|
||||
|
||||
Args:
|
||||
path: Page path (relative to project/base)
|
||||
title: Page title
|
||||
content: Page content (markdown)
|
||||
description: Page description
|
||||
tags: Page tags
|
||||
is_published: Whether to publish immediately
|
||||
|
||||
Returns:
|
||||
Created page data
|
||||
"""
|
||||
full_path = self._get_full_path(path)
|
||||
|
||||
graphql_query = """
|
||||
mutation CreatePage($path: String!, $title: String!, $content: String!, $description: String!, $tags: [String]!, $isPublished: Boolean!, $isPrivate: Boolean!) {
|
||||
pages {
|
||||
create(
|
||||
path: $path
|
||||
title: $title
|
||||
content: $content
|
||||
description: $description
|
||||
tags: $tags
|
||||
isPublished: $isPublished
|
||||
isPrivate: $isPrivate
|
||||
editor: "markdown"
|
||||
locale: "en"
|
||||
) {
|
||||
responseResult {
|
||||
succeeded
|
||||
errorCode
|
||||
slug
|
||||
message
|
||||
}
|
||||
page {
|
||||
id
|
||||
path
|
||||
title
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
"""
|
||||
|
||||
variables = {
|
||||
'path': full_path,
|
||||
'title': title,
|
||||
'content': content,
|
||||
'description': description,
|
||||
'tags': tags or [],
|
||||
'isPublished': is_published,
|
||||
'isPrivate': False # Default to not private
|
||||
}
|
||||
|
||||
data = await self._execute_query(graphql_query, variables)
|
||||
result = data.get('pages', {}).get('create', {})
|
||||
|
||||
if not result.get('responseResult', {}).get('succeeded'):
|
||||
error_msg = result.get('responseResult', {}).get('message', 'Unknown error')
|
||||
raise ValueError(f"Failed to create page: {error_msg}")
|
||||
|
||||
return result.get('page', {})
|
||||
|
||||
async def update_page(
|
||||
self,
|
||||
page_id: int,
|
||||
content: Optional[str] = None,
|
||||
title: Optional[str] = None,
|
||||
description: Optional[str] = None,
|
||||
tags: Optional[List[str]] = None,
|
||||
is_published: Optional[bool] = None
|
||||
) -> Dict[str, Any]:
|
||||
"""
|
||||
Update existing page.
|
||||
|
||||
Args:
|
||||
page_id: Page ID
|
||||
content: New content (if changing)
|
||||
title: New title (if changing)
|
||||
description: New description (if changing)
|
||||
tags: New tags (if changing)
|
||||
is_published: New publish status (if changing)
|
||||
|
||||
Returns:
|
||||
Updated page data
|
||||
"""
|
||||
# Build update fields dynamically
|
||||
fields = []
|
||||
variables = {'id': page_id}
|
||||
|
||||
if content is not None:
|
||||
fields.append('content: $content')
|
||||
variables['content'] = content
|
||||
|
||||
if title is not None:
|
||||
fields.append('title: $title')
|
||||
variables['title'] = title
|
||||
|
||||
if description is not None:
|
||||
fields.append('description: $description')
|
||||
variables['description'] = description
|
||||
|
||||
if tags is not None:
|
||||
fields.append('tags: $tags')
|
||||
variables['tags'] = tags
|
||||
|
||||
if is_published is not None:
|
||||
fields.append('isPublished: $isPublished')
|
||||
variables['isPublished'] = is_published
|
||||
|
||||
fields_str = ', '.join(fields)
|
||||
|
||||
graphql_query = f"""
|
||||
mutation UpdatePage($id: Int!{''.join([f', ${k}: {type(v).__name__.title()}' for k, v in variables.items() if k != 'id'])}) {{
|
||||
pages {{
|
||||
update(
|
||||
id: $id
|
||||
{fields_str}
|
||||
) {{
|
||||
responseResult {{
|
||||
succeeded
|
||||
errorCode
|
||||
message
|
||||
}}
|
||||
page {{
|
||||
id
|
||||
path
|
||||
title
|
||||
updatedAt
|
||||
}}
|
||||
}}
|
||||
}}
|
||||
}}
|
||||
"""
|
||||
|
||||
data = await self._execute_query(graphql_query, variables)
|
||||
result = data.get('pages', {}).get('update', {})
|
||||
|
||||
if not result.get('responseResult', {}).get('succeeded'):
|
||||
error_msg = result.get('responseResult', {}).get('message', 'Unknown error')
|
||||
raise ValueError(f"Failed to update page: {error_msg}")
|
||||
|
||||
return result.get('page', {})
|
||||
|
||||
async def list_pages(self, path_prefix: str = "") -> List[Dict[str, Any]]:
|
||||
"""
|
||||
List pages under a specific path.
|
||||
|
||||
Args:
|
||||
path_prefix: Path prefix to filter (relative to project/base)
|
||||
|
||||
Returns:
|
||||
List of pages
|
||||
"""
|
||||
# Construct full path based on mode
|
||||
if path_prefix:
|
||||
full_path = self._get_full_path(path_prefix)
|
||||
else:
|
||||
# Empty path_prefix: return all pages in project (project mode) or base (company mode)
|
||||
if self.mode == 'project' and self.project:
|
||||
full_path = f"{self.base_path}/{self.project}"
|
||||
else:
|
||||
full_path = self.base_path
|
||||
|
||||
graphql_query = """
|
||||
query ListPages {
|
||||
pages {
|
||||
list {
|
||||
id
|
||||
path
|
||||
title
|
||||
description
|
||||
tags
|
||||
createdAt
|
||||
updatedAt
|
||||
isPublished
|
||||
}
|
||||
}
|
||||
}
|
||||
"""
|
||||
|
||||
data = await self._execute_query(graphql_query)
|
||||
all_pages = data.get('pages', {}).get('list', [])
|
||||
|
||||
# Filter by path prefix
|
||||
if full_path:
|
||||
return [p for p in all_pages if p.get('path', '').startswith(full_path)]
|
||||
|
||||
return all_pages
|
||||
|
||||
async def create_lesson(
|
||||
self,
|
||||
title: str,
|
||||
content: str,
|
||||
tags: List[str],
|
||||
category: str = "sprints"
|
||||
) -> Dict[str, Any]:
|
||||
"""
|
||||
Create a lessons learned entry.
|
||||
|
||||
Args:
|
||||
title: Lesson title
|
||||
content: Lesson content (markdown)
|
||||
tags: Tags for categorization
|
||||
category: Category (sprints, patterns, etc.)
|
||||
|
||||
Returns:
|
||||
Created lesson page data
|
||||
"""
|
||||
# Construct path: lessons-learned/category/title-slug
|
||||
slug = title.lower().replace(' ', '-').replace('_', '-')
|
||||
path = f"lessons-learned/{category}/{slug}"
|
||||
|
||||
return await self.create_page(
|
||||
path=path,
|
||||
title=title,
|
||||
content=content,
|
||||
description=f"Lessons learned: {title}",
|
||||
tags=tags + ['lesson-learned', category],
|
||||
is_published=True
|
||||
)
|
||||
|
||||
async def search_lessons(
|
||||
self,
|
||||
query: Optional[str] = None,
|
||||
tags: Optional[List[str]] = None,
|
||||
limit: int = 20
|
||||
) -> List[Dict[str, Any]]:
|
||||
"""
|
||||
Search lessons learned entries.
|
||||
|
||||
Args:
|
||||
query: Search query (optional)
|
||||
tags: Filter by tags
|
||||
limit: Maximum results
|
||||
|
||||
Returns:
|
||||
List of matching lessons
|
||||
"""
|
||||
# Search in lessons-learned path
|
||||
search_query = query or "lesson"
|
||||
|
||||
results = await self.search_pages(search_query, tags, limit)
|
||||
|
||||
# Filter to only lessons-learned path
|
||||
lessons_path = self._get_full_path("lessons-learned")
|
||||
return [r for r in results if r.get('path', '').startswith(lessons_path)]
|
||||
|
||||
async def tag_lesson(self, page_id: int, new_tags: List[str]) -> Dict[str, Any]:
|
||||
"""
|
||||
Add tags to a lesson.
|
||||
|
||||
Args:
|
||||
page_id: Lesson page ID
|
||||
new_tags: Tags to add
|
||||
|
||||
Returns:
|
||||
Updated page data
|
||||
"""
|
||||
# Get current page to merge tags
|
||||
# For now, just replace tags (can enhance to merge later)
|
||||
return await self.update_page(page_id=page_id, tags=new_tags)
|
||||
@@ -1,19 +0,0 @@
|
||||
# Wiki.js MCP Server Dependencies
|
||||
|
||||
# MCP SDK
|
||||
mcp>=0.1.0
|
||||
|
||||
# HTTP client for GraphQL
|
||||
httpx>=0.27.0
|
||||
httpx-sse>=0.4.0
|
||||
|
||||
# Configuration
|
||||
python-dotenv>=1.0.0
|
||||
|
||||
# Testing
|
||||
pytest>=8.0.0
|
||||
pytest-asyncio>=0.23.0
|
||||
pytest-mock>=3.12.0
|
||||
|
||||
# Type hints
|
||||
typing-extensions>=4.9.0
|
||||
@@ -1,185 +0,0 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Integration test script for Wiki.js MCP Server.
|
||||
Tests against real Wiki.js instance.
|
||||
|
||||
Usage:
|
||||
python test_integration.py
|
||||
"""
|
||||
import asyncio
|
||||
import sys
|
||||
from mcp_server.config import WikiJSConfig
|
||||
from mcp_server.wikijs_client import WikiJSClient
|
||||
|
||||
|
||||
async def test_connection():
|
||||
"""Test basic connection to Wiki.js"""
|
||||
print("🔌 Testing Wiki.js connection...")
|
||||
|
||||
try:
|
||||
config_loader = WikiJSConfig()
|
||||
config = config_loader.load()
|
||||
|
||||
print(f"✓ Configuration loaded")
|
||||
print(f" - API URL: {config['api_url']}")
|
||||
print(f" - Base Path: {config['base_path']}")
|
||||
print(f" - Mode: {config['mode']}")
|
||||
if config.get('project'):
|
||||
print(f" - Project: {config['project']}")
|
||||
|
||||
client = WikiJSClient(
|
||||
api_url=config['api_url'],
|
||||
api_token=config['api_token'],
|
||||
base_path=config['base_path'],
|
||||
project=config.get('project')
|
||||
)
|
||||
|
||||
print("✓ Client initialized")
|
||||
return client
|
||||
|
||||
except Exception as e:
|
||||
print(f"✗ Configuration failed: {e}")
|
||||
return None
|
||||
|
||||
|
||||
async def test_list_pages(client):
|
||||
"""Test listing pages"""
|
||||
print("\n📄 Testing list_pages...")
|
||||
|
||||
try:
|
||||
pages = await client.list_pages("")
|
||||
print(f"✓ Found {len(pages)} pages")
|
||||
|
||||
if pages:
|
||||
print(f" Sample pages:")
|
||||
for page in pages[:5]:
|
||||
print(f" - {page.get('title')} ({page.get('path')})")
|
||||
|
||||
return True
|
||||
except Exception as e:
|
||||
print(f"✗ List pages failed: {e}")
|
||||
return False
|
||||
|
||||
|
||||
async def test_search_pages(client):
|
||||
"""Test searching pages"""
|
||||
print("\n🔍 Testing search_pages...")
|
||||
|
||||
try:
|
||||
results = await client.search_pages("test", limit=5)
|
||||
print(f"✓ Search returned {len(results)} results")
|
||||
|
||||
if results:
|
||||
print(f" Sample results:")
|
||||
for result in results[:3]:
|
||||
print(f" - {result.get('title')}")
|
||||
|
||||
return True
|
||||
except Exception as e:
|
||||
print(f"✗ Search failed: {e}")
|
||||
return False
|
||||
|
||||
|
||||
async def test_create_page(client):
|
||||
"""Test creating a page"""
|
||||
print("\n➕ Testing create_page...")
|
||||
|
||||
# Use timestamp to create unique page path
|
||||
import time
|
||||
timestamp = int(time.time())
|
||||
page_path = f"testing/integration-test-{timestamp}"
|
||||
|
||||
try:
|
||||
page = await client.create_page(
|
||||
path=page_path,
|
||||
title=f"Integration Test Page - {timestamp}",
|
||||
content="# Integration Test\n\nThis page was created by the Wiki.js MCP Server integration test.",
|
||||
description="Automated test page",
|
||||
tags=["test", "integration", "mcp"],
|
||||
is_published=False # Don't publish test page
|
||||
)
|
||||
|
||||
print(f"✓ Page created successfully")
|
||||
print(f" - ID: {page.get('id')}")
|
||||
print(f" - Path: {page.get('path')}")
|
||||
print(f" - Title: {page.get('title')}")
|
||||
|
||||
return page_path # Return path for testing get_page
|
||||
|
||||
except Exception as e:
|
||||
import traceback
|
||||
print(f"✗ Create page failed: {e}")
|
||||
print(f" Error details: {traceback.format_exc()}")
|
||||
return None
|
||||
|
||||
|
||||
async def test_get_page(client, page_path):
|
||||
"""Test getting a specific page"""
|
||||
print("\n📖 Testing get_page...")
|
||||
|
||||
try:
|
||||
page = await client.get_page(page_path)
|
||||
|
||||
if page:
|
||||
print(f"✓ Page retrieved successfully")
|
||||
print(f" - Title: {page.get('title')}")
|
||||
print(f" - Tags: {', '.join(page.get('tags', []))}")
|
||||
print(f" - Published: {page.get('isPublished')}")
|
||||
return True
|
||||
else:
|
||||
print(f"✗ Page not found: {page_path}")
|
||||
return False
|
||||
|
||||
except Exception as e:
|
||||
print(f"✗ Get page failed: {e}")
|
||||
return False
|
||||
|
||||
|
||||
async def main():
|
||||
"""Run all integration tests"""
|
||||
print("=" * 60)
|
||||
print("Wiki.js MCP Server - Integration Tests")
|
||||
print("=" * 60)
|
||||
|
||||
# Test connection
|
||||
client = await test_connection()
|
||||
if not client:
|
||||
print("\n❌ Integration tests failed: Cannot connect to Wiki.js")
|
||||
sys.exit(1)
|
||||
|
||||
# Run tests
|
||||
results = []
|
||||
|
||||
results.append(await test_list_pages(client))
|
||||
results.append(await test_search_pages(client))
|
||||
|
||||
page_path = await test_create_page(client)
|
||||
if page_path:
|
||||
results.append(True)
|
||||
# Test getting the created page
|
||||
results.append(await test_get_page(client, page_path))
|
||||
else:
|
||||
results.append(False)
|
||||
results.append(False)
|
||||
|
||||
# Summary
|
||||
print("\n" + "=" * 60)
|
||||
print("Test Summary")
|
||||
print("=" * 60)
|
||||
|
||||
passed = sum(results)
|
||||
total = len(results)
|
||||
|
||||
print(f"✓ Passed: {passed}/{total}")
|
||||
print(f"✗ Failed: {total - passed}/{total}")
|
||||
|
||||
if passed == total:
|
||||
print("\n✅ All integration tests passed!")
|
||||
sys.exit(0)
|
||||
else:
|
||||
print("\n❌ Some integration tests failed")
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
asyncio.run(main())
|
||||
@@ -1 +0,0 @@
|
||||
"""Tests for Wiki.js MCP Server."""
|
||||
@@ -1,109 +0,0 @@
|
||||
"""
|
||||
Tests for WikiJS configuration loader.
|
||||
"""
|
||||
import pytest
|
||||
from pathlib import Path
|
||||
from unittest.mock import patch, MagicMock
|
||||
from mcp_server.config import WikiJSConfig
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def mock_env(monkeypatch, tmp_path):
|
||||
"""Mock environment with temporary config files"""
|
||||
# Create mock system config
|
||||
system_config = tmp_path / ".config" / "claude" / "wikijs.env"
|
||||
system_config.parent.mkdir(parents=True)
|
||||
system_config.write_text(
|
||||
"WIKIJS_API_URL=http://wiki.test.com/graphql\n"
|
||||
"WIKIJS_API_TOKEN=test_token_123\n"
|
||||
"WIKIJS_BASE_PATH=/test-company\n"
|
||||
)
|
||||
|
||||
# Mock Path.home()
|
||||
with patch('pathlib.Path.home', return_value=tmp_path):
|
||||
yield tmp_path
|
||||
|
||||
|
||||
def test_load_system_config(mock_env):
|
||||
"""Test loading system-level configuration"""
|
||||
config = WikiJSConfig()
|
||||
result = config.load()
|
||||
|
||||
assert result['api_url'] == "http://wiki.test.com/graphql"
|
||||
assert result['api_token'] == "test_token_123"
|
||||
assert result['base_path'] == "/test-company"
|
||||
assert result['project'] is None
|
||||
assert result['mode'] == 'company' # No project = company mode
|
||||
|
||||
|
||||
def test_project_config_override(mock_env, tmp_path, monkeypatch):
|
||||
"""Test project-level config overrides system-level"""
|
||||
# Create project-level config
|
||||
project_config = tmp_path / ".env"
|
||||
project_config.write_text(
|
||||
"WIKIJS_PROJECT=projects/test-project\n"
|
||||
)
|
||||
|
||||
# Mock Path.cwd()
|
||||
monkeypatch.setattr('pathlib.Path.cwd', lambda: tmp_path)
|
||||
|
||||
config = WikiJSConfig()
|
||||
result = config.load()
|
||||
|
||||
assert result['api_url'] == "http://wiki.test.com/graphql" # From system
|
||||
assert result['project'] == "projects/test-project" # From project
|
||||
assert result['mode'] == 'project' # Has project = project mode
|
||||
|
||||
|
||||
def test_missing_system_config():
|
||||
"""Test error when system config is missing"""
|
||||
with patch('pathlib.Path.home', return_value=Path('/nonexistent')):
|
||||
config = WikiJSConfig()
|
||||
with pytest.raises(FileNotFoundError, match="System config not found"):
|
||||
config.load()
|
||||
|
||||
|
||||
def test_missing_required_config(mock_env, monkeypatch):
|
||||
"""Test validation of required configuration"""
|
||||
# Clear environment variables from previous tests
|
||||
monkeypatch.delenv('WIKIJS_API_URL', raising=False)
|
||||
monkeypatch.delenv('WIKIJS_API_TOKEN', raising=False)
|
||||
monkeypatch.delenv('WIKIJS_BASE_PATH', raising=False)
|
||||
monkeypatch.delenv('WIKIJS_PROJECT', raising=False)
|
||||
|
||||
# Create incomplete system config
|
||||
system_config = mock_env / ".config" / "claude" / "wikijs.env"
|
||||
system_config.write_text(
|
||||
"WIKIJS_API_URL=http://wiki.test.com/graphql\n"
|
||||
# Missing API_TOKEN and BASE_PATH
|
||||
)
|
||||
|
||||
config = WikiJSConfig()
|
||||
with pytest.raises(ValueError, match="Missing required configuration"):
|
||||
config.load()
|
||||
|
||||
|
||||
def test_mode_detection_project(mock_env, tmp_path, monkeypatch):
|
||||
"""Test mode detection when WIKIJS_PROJECT is set"""
|
||||
project_config = tmp_path / ".env"
|
||||
project_config.write_text("WIKIJS_PROJECT=projects/my-project\n")
|
||||
|
||||
monkeypatch.setattr('pathlib.Path.cwd', lambda: tmp_path)
|
||||
|
||||
config = WikiJSConfig()
|
||||
result = config.load()
|
||||
|
||||
assert result['mode'] == 'project'
|
||||
assert result['project'] == 'projects/my-project'
|
||||
|
||||
|
||||
def test_mode_detection_company(mock_env, monkeypatch):
|
||||
"""Test mode detection when WIKIJS_PROJECT is not set (company mode)"""
|
||||
# Clear WIKIJS_PROJECT from environment
|
||||
monkeypatch.delenv('WIKIJS_PROJECT', raising=False)
|
||||
|
||||
config = WikiJSConfig()
|
||||
result = config.load()
|
||||
|
||||
assert result['mode'] == 'company'
|
||||
assert result['project'] is None
|
||||
@@ -1,355 +0,0 @@
|
||||
"""
|
||||
Tests for Wiki.js GraphQL client.
|
||||
"""
|
||||
import pytest
|
||||
from unittest.mock import AsyncMock, patch, MagicMock
|
||||
from mcp_server.wikijs_client import WikiJSClient
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def client():
|
||||
"""Create WikiJSClient instance for testing"""
|
||||
return WikiJSClient(
|
||||
api_url="http://wiki.test.com/graphql",
|
||||
api_token="test_token_123",
|
||||
base_path="/test-company",
|
||||
project="projects/test-project"
|
||||
)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def company_client():
|
||||
"""Create WikiJSClient in company mode"""
|
||||
return WikiJSClient(
|
||||
api_url="http://wiki.test.com/graphql",
|
||||
api_token="test_token_123",
|
||||
base_path="/test-company",
|
||||
project=None # Company mode
|
||||
)
|
||||
|
||||
|
||||
def test_client_initialization(client):
|
||||
"""Test client initializes with correct settings"""
|
||||
assert client.api_url == "http://wiki.test.com/graphql"
|
||||
assert client.api_token == "test_token_123"
|
||||
assert client.base_path == "/test-company"
|
||||
assert client.project == "projects/test-project"
|
||||
assert client.mode == 'project'
|
||||
|
||||
|
||||
def test_company_mode_initialization(company_client):
|
||||
"""Test client initializes in company mode"""
|
||||
assert company_client.mode == 'company'
|
||||
assert company_client.project is None
|
||||
|
||||
|
||||
def test_get_full_path_project_mode(client):
|
||||
"""Test path construction in project mode"""
|
||||
path = client._get_full_path("documentation/api")
|
||||
assert path == "/test-company/projects/test-project/documentation/api"
|
||||
|
||||
|
||||
def test_get_full_path_company_mode(company_client):
|
||||
"""Test path construction in company mode"""
|
||||
path = company_client._get_full_path("shared/architecture")
|
||||
assert path == "/test-company/shared/architecture"
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_search_pages(client):
|
||||
"""Test searching pages"""
|
||||
mock_response = {
|
||||
'data': {
|
||||
'pages': {
|
||||
'search': {
|
||||
'results': [
|
||||
{
|
||||
'id': 1,
|
||||
'path': '/test-company/projects/test-project/doc1',
|
||||
'title': 'Document 1',
|
||||
'tags': ['api', 'documentation']
|
||||
},
|
||||
{
|
||||
'id': 2,
|
||||
'path': '/test-company/projects/test-project/doc2',
|
||||
'title': 'Document 2',
|
||||
'tags': ['guide', 'tutorial']
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
with patch('httpx.AsyncClient') as mock_client:
|
||||
mock_instance = MagicMock()
|
||||
mock_instance.__aenter__.return_value = mock_instance
|
||||
mock_instance.__aexit__.return_value = None
|
||||
mock_instance.post = AsyncMock(return_value=MagicMock(
|
||||
json=lambda: mock_response,
|
||||
raise_for_status=lambda: None
|
||||
))
|
||||
mock_client.return_value = mock_instance
|
||||
|
||||
results = await client.search_pages("documentation")
|
||||
|
||||
assert len(results) == 2
|
||||
assert results[0]['title'] == 'Document 1'
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_get_page(client):
|
||||
"""Test getting a specific page"""
|
||||
mock_response = {
|
||||
'data': {
|
||||
'pages': {
|
||||
'single': {
|
||||
'id': 1,
|
||||
'path': '/test-company/projects/test-project/doc1',
|
||||
'title': 'Document 1',
|
||||
'content': '# Test Content',
|
||||
'tags': ['api'],
|
||||
'isPublished': True
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
with patch('httpx.AsyncClient') as mock_client:
|
||||
mock_instance = MagicMock()
|
||||
mock_instance.__aenter__.return_value = mock_instance
|
||||
mock_instance.__aexit__.return_value = None
|
||||
mock_instance.post = AsyncMock(return_value=MagicMock(
|
||||
json=lambda: mock_response,
|
||||
raise_for_status=lambda: None
|
||||
))
|
||||
mock_client.return_value = mock_instance
|
||||
|
||||
page = await client.get_page("doc1")
|
||||
|
||||
assert page is not None
|
||||
assert page['title'] == 'Document 1'
|
||||
assert page['content'] == '# Test Content'
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_create_page(client):
|
||||
"""Test creating a new page"""
|
||||
mock_response = {
|
||||
'data': {
|
||||
'pages': {
|
||||
'create': {
|
||||
'responseResult': {
|
||||
'succeeded': True,
|
||||
'errorCode': None,
|
||||
'message': 'Page created successfully'
|
||||
},
|
||||
'page': {
|
||||
'id': 1,
|
||||
'path': '/test-company/projects/test-project/new-doc',
|
||||
'title': 'New Document'
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
with patch('httpx.AsyncClient') as mock_client:
|
||||
mock_instance = MagicMock()
|
||||
mock_instance.__aenter__.return_value = mock_instance
|
||||
mock_instance.__aexit__.return_value = None
|
||||
mock_instance.post = AsyncMock(return_value=MagicMock(
|
||||
json=lambda: mock_response,
|
||||
raise_for_status=lambda: None
|
||||
))
|
||||
mock_client.return_value = mock_instance
|
||||
|
||||
page = await client.create_page(
|
||||
path="new-doc",
|
||||
title="New Document",
|
||||
content="# Content",
|
||||
tags=["test"]
|
||||
)
|
||||
|
||||
assert page['id'] == 1
|
||||
assert page['title'] == 'New Document'
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_update_page(client):
|
||||
"""Test updating a page"""
|
||||
mock_response = {
|
||||
'data': {
|
||||
'pages': {
|
||||
'update': {
|
||||
'responseResult': {
|
||||
'succeeded': True,
|
||||
'message': 'Page updated'
|
||||
},
|
||||
'page': {
|
||||
'id': 1,
|
||||
'path': '/test-company/projects/test-project/doc1',
|
||||
'title': 'Updated Title'
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
with patch('httpx.AsyncClient') as mock_client:
|
||||
mock_instance = MagicMock()
|
||||
mock_instance.__aenter__.return_value = mock_instance
|
||||
mock_instance.__aexit__.return_value = None
|
||||
mock_instance.post = AsyncMock(return_value=MagicMock(
|
||||
json=lambda: mock_response,
|
||||
raise_for_status=lambda: None
|
||||
))
|
||||
mock_client.return_value = mock_instance
|
||||
|
||||
page = await client.update_page(
|
||||
page_id=1,
|
||||
title="Updated Title"
|
||||
)
|
||||
|
||||
assert page['title'] == 'Updated Title'
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_list_pages(client):
|
||||
"""Test listing pages"""
|
||||
mock_response = {
|
||||
'data': {
|
||||
'pages': {
|
||||
'list': [
|
||||
{'id': 1, 'path': '/test-company/projects/test-project/doc1', 'title': 'Doc 1'},
|
||||
{'id': 2, 'path': '/test-company/projects/test-project/doc2', 'title': 'Doc 2'},
|
||||
{'id': 3, 'path': '/test-company/other-project/doc3', 'title': 'Doc 3'}
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
with patch('httpx.AsyncClient') as mock_client:
|
||||
mock_instance = MagicMock()
|
||||
mock_instance.__aenter__.return_value = mock_instance
|
||||
mock_instance.__aexit__.return_value = None
|
||||
mock_instance.post = AsyncMock(return_value=MagicMock(
|
||||
json=lambda: mock_response,
|
||||
raise_for_status=lambda: None
|
||||
))
|
||||
mock_client.return_value = mock_instance
|
||||
|
||||
# List all pages in current project
|
||||
pages = await client.list_pages("")
|
||||
|
||||
# Should only return pages from test-project
|
||||
assert len(pages) == 2
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_create_lesson(client):
|
||||
"""Test creating a lesson learned"""
|
||||
mock_response = {
|
||||
'data': {
|
||||
'pages': {
|
||||
'create': {
|
||||
'responseResult': {
|
||||
'succeeded': True,
|
||||
'message': 'Lesson created'
|
||||
},
|
||||
'page': {
|
||||
'id': 1,
|
||||
'path': '/test-company/projects/test-project/lessons-learned/sprints/test-lesson',
|
||||
'title': 'Test Lesson'
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
with patch('httpx.AsyncClient') as mock_client:
|
||||
mock_instance = MagicMock()
|
||||
mock_instance.__aenter__.return_value = mock_instance
|
||||
mock_instance.__aexit__.return_value = None
|
||||
mock_instance.post = AsyncMock(return_value=MagicMock(
|
||||
json=lambda: mock_response,
|
||||
raise_for_status=lambda: None
|
||||
))
|
||||
mock_client.return_value = mock_instance
|
||||
|
||||
lesson = await client.create_lesson(
|
||||
title="Test Lesson",
|
||||
content="# Lesson Content",
|
||||
tags=["testing", "sprint-16"],
|
||||
category="sprints"
|
||||
)
|
||||
|
||||
assert lesson['id'] == 1
|
||||
assert 'lessons-learned' in lesson['path']
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_search_lessons(client):
|
||||
"""Test searching lessons learned"""
|
||||
mock_response = {
|
||||
'data': {
|
||||
'pages': {
|
||||
'search': {
|
||||
'results': [
|
||||
{
|
||||
'id': 1,
|
||||
'path': '/test-company/projects/test-project/lessons-learned/sprints/lesson1',
|
||||
'title': 'Lesson 1',
|
||||
'tags': ['testing']
|
||||
},
|
||||
{
|
||||
'id': 2,
|
||||
'path': '/test-company/projects/test-project/documentation/doc1',
|
||||
'title': 'Doc 1',
|
||||
'tags': ['guide']
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
with patch('httpx.AsyncClient') as mock_client:
|
||||
mock_instance = MagicMock()
|
||||
mock_instance.__aenter__.return_value = mock_instance
|
||||
mock_instance.__aexit__.return_value = None
|
||||
mock_instance.post = AsyncMock(return_value=MagicMock(
|
||||
json=lambda: mock_response,
|
||||
raise_for_status=lambda: None
|
||||
))
|
||||
mock_client.return_value = mock_instance
|
||||
|
||||
lessons = await client.search_lessons(query="testing")
|
||||
|
||||
# Should only return lessons-learned pages
|
||||
assert len(lessons) == 1
|
||||
assert 'lessons-learned' in lessons[0]['path']
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_graphql_error_handling(client):
|
||||
"""Test handling of GraphQL errors"""
|
||||
mock_response = {
|
||||
'errors': [
|
||||
{'message': 'Page not found'},
|
||||
{'message': 'Invalid query'}
|
||||
]
|
||||
}
|
||||
|
||||
with patch('httpx.AsyncClient') as mock_client:
|
||||
mock_instance = MagicMock()
|
||||
mock_instance.__aenter__.return_value = mock_instance
|
||||
mock_instance.__aexit__.return_value = None
|
||||
mock_instance.post = AsyncMock(return_value=MagicMock(
|
||||
json=lambda: mock_response,
|
||||
raise_for_status=lambda: None
|
||||
))
|
||||
mock_client.return_value = mock_instance
|
||||
|
||||
with pytest.raises(ValueError, match="GraphQL errors"):
|
||||
await client.search_pages("test")
|
||||
Reference in New Issue
Block a user