ready for try

This commit is contained in:
2025-07-29 20:16:11 -04:00
parent 29001b02a5
commit 18a82711cb
33 changed files with 7446 additions and 47 deletions

View File

@@ -8,6 +8,7 @@ from requests.adapters import HTTPAdapter
from urllib3.util.retry import Retry
from .auth import AuthHandler, APIKeyAuth
from .endpoints import PagesEndpoint
from .exceptions import (
APIError,
AuthenticationError,
@@ -37,8 +38,8 @@ class WikiJSClient:
Basic usage with API key:
>>> client = WikiJSClient('https://wiki.example.com', auth='your-api-key')
>>> # Will be available after endpoints are implemented:
>>> # pages = client.pages.list()
>>> pages = client.pages.list()
>>> page = client.pages.get(123)
Attributes:
base_url: The normalized base URL
@@ -77,8 +78,9 @@ class WikiJSClient:
# Initialize HTTP session
self._session = self._create_session()
# Endpoint handlers (will be initialized as we implement them)
# self.pages = PagesEndpoint(self)
# Endpoint handlers
self.pages = PagesEndpoint(self)
# Future endpoints:
# self.users = UsersEndpoint(self)
# self.groups = GroupsEndpoint(self)

View File

@@ -1,22 +1,22 @@
"""API endpoints module for wikijs-python-sdk.
This module will contain endpoint handlers for different
This module contains endpoint handlers for different
Wiki.js API endpoints.
Implemented:
- Pages API (CRUD operations) ✅
Future implementations:
- Pages API (CRUD operations)
- Users API (user management)
- Groups API (group management)
- Assets API (file management)
- System API (system information)
"""
# Placeholder for future endpoint implementations
# from .base import BaseEndpoint
# from .pages import PagesEndpoint
# from .users import UsersEndpoint
# from .groups import GroupsEndpoint
from .base import BaseEndpoint
from .pages import PagesEndpoint
__all__ = [
# Will be implemented in Task 1.4
"BaseEndpoint",
"PagesEndpoint",
]

142
wikijs/endpoints/base.py Normal file
View File

@@ -0,0 +1,142 @@
"""Base endpoint class for wikijs-python-sdk."""
from typing import Any, Dict, List, Optional, TYPE_CHECKING
if TYPE_CHECKING:
from ..client import WikiJSClient
class BaseEndpoint:
"""Base class for all API endpoints.
This class provides common functionality for making API requests
and handling responses across all endpoint implementations.
Args:
client: The WikiJS client instance
"""
def __init__(self, client: "WikiJSClient"):
"""Initialize endpoint with client reference.
Args:
client: WikiJS client instance
"""
self._client = client
def _request(
self,
method: str,
endpoint: str,
params: Optional[Dict[str, Any]] = None,
json_data: Optional[Dict[str, Any]] = None,
**kwargs
) -> Dict[str, Any]:
"""Make HTTP request through the client.
Args:
method: HTTP method (GET, POST, PUT, DELETE)
endpoint: API endpoint path
params: Query parameters
json_data: JSON data for request body
**kwargs: Additional request parameters
Returns:
Parsed response data
"""
return self._client._request(
method=method,
endpoint=endpoint,
params=params,
json_data=json_data,
**kwargs
)
def _get(
self,
endpoint: str,
params: Optional[Dict[str, Any]] = None,
**kwargs
) -> Dict[str, Any]:
"""Make GET request.
Args:
endpoint: API endpoint path
params: Query parameters
**kwargs: Additional request parameters
Returns:
Parsed response data
"""
return self._request("GET", endpoint, params=params, **kwargs)
def _post(
self,
endpoint: str,
json_data: Optional[Dict[str, Any]] = None,
params: Optional[Dict[str, Any]] = None,
**kwargs
) -> Dict[str, Any]:
"""Make POST request.
Args:
endpoint: API endpoint path
json_data: JSON data for request body
params: Query parameters
**kwargs: Additional request parameters
Returns:
Parsed response data
"""
return self._request("POST", endpoint, params=params, json_data=json_data, **kwargs)
def _put(
self,
endpoint: str,
json_data: Optional[Dict[str, Any]] = None,
params: Optional[Dict[str, Any]] = None,
**kwargs
) -> Dict[str, Any]:
"""Make PUT request.
Args:
endpoint: API endpoint path
json_data: JSON data for request body
params: Query parameters
**kwargs: Additional request parameters
Returns:
Parsed response data
"""
return self._request("PUT", endpoint, params=params, json_data=json_data, **kwargs)
def _delete(
self,
endpoint: str,
params: Optional[Dict[str, Any]] = None,
**kwargs
) -> Dict[str, Any]:
"""Make DELETE request.
Args:
endpoint: API endpoint path
params: Query parameters
**kwargs: Additional request parameters
Returns:
Parsed response data
"""
return self._request("DELETE", endpoint, params=params, **kwargs)
def _build_endpoint(self, *parts: str) -> str:
"""Build endpoint path from parts.
Args:
*parts: Path components
Returns:
Formatted endpoint path
"""
# Remove empty parts and join with /
clean_parts = [str(part).strip("/") for part in parts if part]
return "/" + "/".join(clean_parts)

634
wikijs/endpoints/pages.py Normal file
View File

@@ -0,0 +1,634 @@
"""Pages API endpoint for wikijs-python-sdk."""
from typing import Any, Dict, List, Optional, Union
from ..exceptions import APIError, ValidationError
from ..models.page import Page, PageCreate, PageUpdate
from .base import BaseEndpoint
class PagesEndpoint(BaseEndpoint):
"""Endpoint for Wiki.js Pages API operations.
This endpoint provides methods for creating, reading, updating, and deleting
wiki pages through the Wiki.js GraphQL API.
Example:
>>> client = WikiJSClient('https://wiki.example.com', auth='api-key')
>>> pages = client.pages
>>>
>>> # List all pages
>>> all_pages = pages.list()
>>>
>>> # Get a specific page
>>> page = pages.get(123)
>>>
>>> # Create a new page
>>> new_page_data = PageCreate(
... title="Getting Started",
... path="getting-started",
... content="# Welcome\\n\\nThis is your first page!"
... )
>>> created_page = pages.create(new_page_data)
>>>
>>> # Update an existing page
>>> update_data = PageUpdate(title="Updated Title")
>>> updated_page = pages.update(123, update_data)
>>>
>>> # Delete a page
>>> pages.delete(123)
"""
def list(
self,
limit: Optional[int] = None,
offset: Optional[int] = None,
search: Optional[str] = None,
tags: Optional[List[str]] = None,
locale: Optional[str] = None,
author_id: Optional[int] = None,
order_by: str = "title",
order_direction: str = "ASC"
) -> List[Page]:
"""List pages with optional filtering.
Args:
limit: Maximum number of pages to return
offset: Number of pages to skip
search: Search term to filter pages
tags: List of tags to filter by (pages must have ALL tags)
locale: Locale to filter by
author_id: Author ID to filter by
order_by: Field to order by (title, created_at, updated_at)
order_direction: Order direction (ASC or DESC)
Returns:
List of Page objects
Raises:
APIError: If the API request fails
ValidationError: If parameters are invalid
"""
# Validate parameters
if limit is not None and limit < 1:
raise ValidationError("limit must be greater than 0")
if offset is not None and offset < 0:
raise ValidationError("offset must be non-negative")
if order_by not in ["title", "created_at", "updated_at", "path"]:
raise ValidationError("order_by must be one of: title, created_at, updated_at, path")
if order_direction not in ["ASC", "DESC"]:
raise ValidationError("order_direction must be ASC or DESC")
# Build GraphQL query
query = """
query($limit: Int, $offset: Int, $search: String, $tags: [String], $locale: String, $authorId: Int, $orderBy: String, $orderDirection: String) {
pages(
limit: $limit,
offset: $offset,
search: $search,
tags: $tags,
locale: $locale,
authorId: $authorId,
orderBy: $orderBy,
orderDirection: $orderDirection
) {
id
title
path
content
description
isPublished
isPrivate
tags
locale
authorId
authorName
authorEmail
editor
createdAt
updatedAt
}
}
"""
# Build variables
variables = {
"orderBy": order_by,
"orderDirection": order_direction
}
if limit is not None:
variables["limit"] = limit
if offset is not None:
variables["offset"] = offset
if search:
variables["search"] = search
if tags:
variables["tags"] = tags
if locale:
variables["locale"] = locale
if author_id is not None:
variables["authorId"] = author_id
# Make request
response = self._post("/graphql", json_data={
"query": query,
"variables": variables
})
# Parse response
if "errors" in response:
raise APIError(f"GraphQL errors: {response['errors']}")
pages_data = response.get("data", {}).get("pages", [])
# Convert to Page objects
pages = []
for page_data in pages_data:
try:
# Convert API field names to model field names
normalized_data = self._normalize_page_data(page_data)
page = Page(**normalized_data)
pages.append(page)
except Exception as e:
raise APIError(f"Failed to parse page data: {str(e)}") from e
return pages
def get(self, page_id: int) -> Page:
"""Get a specific page by ID.
Args:
page_id: The page ID
Returns:
Page object
Raises:
APIError: If the page is not found or request fails
ValidationError: If page_id is invalid
"""
if not isinstance(page_id, int) or page_id < 1:
raise ValidationError("page_id must be a positive integer")
# Build GraphQL query
query = """
query($id: Int!) {
page(id: $id) {
id
title
path
content
description
isPublished
isPrivate
tags
locale
authorId
authorName
authorEmail
editor
createdAt
updatedAt
}
}
"""
# Make request
response = self._post("/graphql", json_data={
"query": query,
"variables": {"id": page_id}
})
# Parse response
if "errors" in response:
raise APIError(f"GraphQL errors: {response['errors']}")
page_data = response.get("data", {}).get("page")
if not page_data:
raise APIError(f"Page with ID {page_id} not found")
# Convert to Page object
try:
normalized_data = self._normalize_page_data(page_data)
return Page(**normalized_data)
except Exception as e:
raise APIError(f"Failed to parse page data: {str(e)}") from e
def get_by_path(self, path: str, locale: str = "en") -> Page:
"""Get a page by its path.
Args:
path: The page path (e.g., "getting-started")
locale: The page locale (default: "en")
Returns:
Page object
Raises:
APIError: If the page is not found or request fails
ValidationError: If path is invalid
"""
if not path or not isinstance(path, str):
raise ValidationError("path must be a non-empty string")
# Normalize path
path = path.strip("/")
# Build GraphQL query
query = """
query($path: String!, $locale: String!) {
pageByPath(path: $path, locale: $locale) {
id
title
path
content
description
isPublished
isPrivate
tags
locale
authorId
authorName
authorEmail
editor
createdAt
updatedAt
}
}
"""
# Make request
response = self._post("/graphql", json_data={
"query": query,
"variables": {"path": path, "locale": locale}
})
# Parse response
if "errors" in response:
raise APIError(f"GraphQL errors: {response['errors']}")
page_data = response.get("data", {}).get("pageByPath")
if not page_data:
raise APIError(f"Page with path '{path}' not found")
# Convert to Page object
try:
normalized_data = self._normalize_page_data(page_data)
return Page(**normalized_data)
except Exception as e:
raise APIError(f"Failed to parse page data: {str(e)}") from e
def create(self, page_data: Union[PageCreate, Dict[str, Any]]) -> Page:
"""Create a new page.
Args:
page_data: Page creation data (PageCreate object or dict)
Returns:
Created Page object
Raises:
APIError: If page creation fails
ValidationError: If page data is invalid
"""
# Convert to PageCreate if needed
if isinstance(page_data, dict):
try:
page_data = PageCreate(**page_data)
except Exception as e:
raise ValidationError(f"Invalid page data: {str(e)}") from e
elif not isinstance(page_data, PageCreate):
raise ValidationError("page_data must be PageCreate object or dict")
# Build GraphQL mutation
mutation = """
mutation($title: String!, $path: String!, $content: String!, $description: String, $isPublished: Boolean, $isPrivate: Boolean, $tags: [String], $locale: String, $editor: String) {
createPage(
title: $title,
path: $path,
content: $content,
description: $description,
isPublished: $isPublished,
isPrivate: $isPrivate,
tags: $tags,
locale: $locale,
editor: $editor
) {
id
title
path
content
description
isPublished
isPrivate
tags
locale
authorId
authorName
authorEmail
editor
createdAt
updatedAt
}
}
"""
# Build variables from page data
variables = {
"title": page_data.title,
"path": page_data.path,
"content": page_data.content,
"isPublished": page_data.is_published,
"isPrivate": page_data.is_private,
"tags": page_data.tags,
"locale": page_data.locale,
"editor": page_data.editor
}
if page_data.description is not None:
variables["description"] = page_data.description
# Make request
response = self._post("/graphql", json_data={
"query": mutation,
"variables": variables
})
# Parse response
if "errors" in response:
raise APIError(f"Failed to create page: {response['errors']}")
created_page_data = response.get("data", {}).get("createPage")
if not created_page_data:
raise APIError("Page creation failed - no data returned")
# Convert to Page object
try:
normalized_data = self._normalize_page_data(created_page_data)
return Page(**normalized_data)
except Exception as e:
raise APIError(f"Failed to parse created page data: {str(e)}") from e
def update(
self,
page_id: int,
page_data: Union[PageUpdate, Dict[str, Any]]
) -> Page:
"""Update an existing page.
Args:
page_id: The page ID
page_data: Page update data (PageUpdate object or dict)
Returns:
Updated Page object
Raises:
APIError: If page update fails
ValidationError: If parameters are invalid
"""
if not isinstance(page_id, int) or page_id < 1:
raise ValidationError("page_id must be a positive integer")
# Convert to PageUpdate if needed
if isinstance(page_data, dict):
try:
page_data = PageUpdate(**page_data)
except Exception as e:
raise ValidationError(f"Invalid page data: {str(e)}") from e
elif not isinstance(page_data, PageUpdate):
raise ValidationError("page_data must be PageUpdate object or dict")
# Build GraphQL mutation
mutation = """
mutation($id: Int!, $title: String, $content: String, $description: String, $isPublished: Boolean, $isPrivate: Boolean, $tags: [String]) {
updatePage(
id: $id,
title: $title,
content: $content,
description: $description,
isPublished: $isPublished,
isPrivate: $isPrivate,
tags: $tags
) {
id
title
path
content
description
isPublished
isPrivate
tags
locale
authorId
authorName
authorEmail
editor
createdAt
updatedAt
}
}
"""
# Build variables (only include non-None values)
variables = {"id": page_id}
if page_data.title is not None:
variables["title"] = page_data.title
if page_data.content is not None:
variables["content"] = page_data.content
if page_data.description is not None:
variables["description"] = page_data.description
if page_data.is_published is not None:
variables["isPublished"] = page_data.is_published
if page_data.is_private is not None:
variables["isPrivate"] = page_data.is_private
if page_data.tags is not None:
variables["tags"] = page_data.tags
# Make request
response = self._post("/graphql", json_data={
"query": mutation,
"variables": variables
})
# Parse response
if "errors" in response:
raise APIError(f"Failed to update page: {response['errors']}")
updated_page_data = response.get("data", {}).get("updatePage")
if not updated_page_data:
raise APIError("Page update failed - no data returned")
# Convert to Page object
try:
normalized_data = self._normalize_page_data(updated_page_data)
return Page(**normalized_data)
except Exception as e:
raise APIError(f"Failed to parse updated page data: {str(e)}") from e
def delete(self, page_id: int) -> bool:
"""Delete a page.
Args:
page_id: The page ID
Returns:
True if deletion was successful
Raises:
APIError: If page deletion fails
ValidationError: If page_id is invalid
"""
if not isinstance(page_id, int) or page_id < 1:
raise ValidationError("page_id must be a positive integer")
# Build GraphQL mutation
mutation = """
mutation($id: Int!) {
deletePage(id: $id) {
success
message
}
}
"""
# Make request
response = self._post("/graphql", json_data={
"query": mutation,
"variables": {"id": page_id}
})
# Parse response
if "errors" in response:
raise APIError(f"Failed to delete page: {response['errors']}")
delete_result = response.get("data", {}).get("deletePage", {})
success = delete_result.get("success", False)
if not success:
message = delete_result.get("message", "Unknown error")
raise APIError(f"Page deletion failed: {message}")
return True
def search(
self,
query: str,
limit: Optional[int] = None,
locale: Optional[str] = None
) -> List[Page]:
"""Search for pages by content and title.
Args:
query: Search query string
limit: Maximum number of results to return
locale: Locale to search in
Returns:
List of matching Page objects
Raises:
APIError: If search fails
ValidationError: If parameters are invalid
"""
if not query or not isinstance(query, str):
raise ValidationError("query must be a non-empty string")
if limit is not None and limit < 1:
raise ValidationError("limit must be greater than 0")
# Use the list method with search parameter
return self.list(
search=query,
limit=limit,
locale=locale
)
def get_by_tags(
self,
tags: List[str],
match_all: bool = True,
limit: Optional[int] = None
) -> List[Page]:
"""Get pages by tags.
Args:
tags: List of tags to search for
match_all: If True, pages must have ALL tags. If False, ANY tag matches
limit: Maximum number of results to return
Returns:
List of matching Page objects
Raises:
APIError: If request fails
ValidationError: If parameters are invalid
"""
if not tags or not isinstance(tags, list):
raise ValidationError("tags must be a non-empty list")
if limit is not None and limit < 1:
raise ValidationError("limit must be greater than 0")
# For match_all=True, use the tags parameter directly
if match_all:
return self.list(tags=tags, limit=limit)
# For match_all=False, we need a more complex query
# This would require a custom GraphQL query or multiple requests
# For now, implement a simple approach
all_pages = self.list(limit=limit * 2 if limit else None) # Get more pages to filter
matching_pages = []
for page in all_pages:
if any(tag.lower() in [t.lower() for t in page.tags] for tag in tags):
matching_pages.append(page)
if limit and len(matching_pages) >= limit:
break
return matching_pages
def _normalize_page_data(self, page_data: Dict[str, Any]) -> Dict[str, Any]:
"""Normalize page data from API response to model format.
Args:
page_data: Raw page data from API
Returns:
Normalized data for Page model
"""
normalized = {}
# Map API field names to model field names
field_mapping = {
"id": "id",
"title": "title",
"path": "path",
"content": "content",
"description": "description",
"isPublished": "is_published",
"isPrivate": "is_private",
"tags": "tags",
"locale": "locale",
"authorId": "author_id",
"authorName": "author_name",
"authorEmail": "author_email",
"editor": "editor",
"createdAt": "created_at",
"updatedAt": "updated_at"
}
for api_field, model_field in field_mapping.items():
if api_field in page_data:
normalized[model_field] = page_data[api_field]
# Ensure required fields have defaults
if "tags" not in normalized:
normalized["tags"] = []
return normalized

View File

@@ -70,6 +70,8 @@ class RateLimitError(ClientError):
"""Raised when rate limit is exceeded (429)."""
def __init__(self, message: str, retry_after: Optional[int] = None, **kwargs):
# Remove status_code from kwargs if present to avoid duplicate argument
kwargs.pop('status_code', None)
super().__init__(message, status_code=429, **kwargs)
self.retry_after = retry_after