feat: implement production-ready features from improvement plan phase 2.5 & 2.6

Phase 2.5: Fix Foundation (CRITICAL)
- Fixed 4 failing tests by adding cache attribute to mock_client fixture
- Created comprehensive cache tests for Pages endpoint (test_pages_cache.py)
- Added missing dependencies: pydantic[email] and aiohttp to core requirements
- Updated requirements.txt with proper dependency versions
- Achieved 82.67% test coverage with 454 passing tests

Phase 2.6: Production Essentials
- Implemented structured logging (wikijs/logging.py)
  * JSON and text log formatters
  * Configurable log levels and output destinations
  * Integration with client operations

- Implemented metrics and telemetry (wikijs/metrics.py)
  * Request tracking with duration, status codes, errors
  * Latency percentiles (min, max, avg, p50, p95, p99)
  * Error rate calculation
  * Thread-safe metrics collection

- Implemented rate limiting (wikijs/ratelimit.py)
  * Token bucket algorithm for request throttling
  * Per-endpoint rate limiting support
  * Configurable timeout handling
  * Burst capacity management

- Created SECURITY.md policy
  * Vulnerability reporting procedures
  * Security best practices
  * Response timelines
  * Supported versions

Documentation
- Added comprehensive logging guide (docs/logging.md)
- Added metrics and telemetry guide (docs/metrics.md)
- Added rate limiting guide (docs/rate_limiting.md)
- Updated README.md with production features section
- Updated IMPROVEMENT_PLAN_2.md with completed checkboxes

Testing
- Created test suite for logging (tests/test_logging.py)
- Created test suite for metrics (tests/test_metrics.py)
- Created test suite for rate limiting (tests/test_ratelimit.py)
- All 454 tests passing
- Test coverage: 82.67%

Breaking Changes: None
Dependencies Added: pydantic[email], email-validator, dnspython

🤖 Generated with [Claude Code](https://claude.com/claude-code)

Co-Authored-By: Claude <noreply@anthropic.com>
This commit is contained in:
Claude
2025-10-23 16:45:02 +00:00
parent 6fbd24d737
commit cef6903cbc
15 changed files with 1278 additions and 40 deletions

View File

@@ -17,6 +17,7 @@ class TestPagesEndpoint:
def mock_client(self):
"""Create a mock WikiJS client."""
client = Mock(spec=WikiJSClient)
client.cache = None
return client
@pytest.fixture

View File

@@ -0,0 +1,175 @@
"""Tests for Pages endpoint caching functionality."""
import pytest
from unittest.mock import MagicMock, Mock
from wikijs.cache import MemoryCache, CacheKey
from wikijs.endpoints.pages import PagesEndpoint
from wikijs.models import Page
class TestPagesCaching:
"""Test caching behavior in Pages endpoint."""
def test_get_with_cache_hit(self):
"""Test page retrieval uses cache when available."""
# Setup
cache = MemoryCache(ttl=300)
client = MagicMock()
client.cache = cache
client._request = MagicMock()
pages = PagesEndpoint(client)
# Pre-populate cache
page_data = {"id": 123, "title": "Test", "path": "test"}
cache_key = CacheKey("page", "123", "get")
cache.set(cache_key, page_data)
# Execute
result = pages.get(123)
# Verify cache was used, not API
client._request.assert_not_called()
assert result["id"] == 123
def test_get_with_cache_miss(self):
"""Test page retrieval calls API on cache miss."""
# Setup
cache = MemoryCache(ttl=300)
client = MagicMock()
client.cache = cache
# Mock the _post method on the endpoint
pages = PagesEndpoint(client)
pages._post = Mock(return_value={
"data": {"pages": {"single": {
"id": 123,
"title": "Test",
"path": "test",
"content": "Test content",
"description": "Test desc",
"isPublished": True,
"isPrivate": False,
"tags": [],
"locale": "en",
"authorId": 1,
"authorName": "Test User",
"authorEmail": "test@example.com",
"editor": "markdown",
"createdAt": "2023-01-01T00:00:00Z",
"updatedAt": "2023-01-02T00:00:00Z"
}}}
})
# Execute
result = pages.get(123)
# Verify API was called
pages._post.assert_called_once()
# Verify result was cached
cache_key = CacheKey("page", "123", "get")
cached = cache.get(cache_key)
assert cached is not None
def test_update_invalidates_cache(self):
"""Test page update invalidates cache."""
# Setup
cache = MemoryCache(ttl=300)
client = MagicMock()
client.cache = cache
pages = PagesEndpoint(client)
pages._post = Mock(return_value={
"data": {"updatePage": {
"id": 123,
"title": "New",
"path": "test",
"content": "Updated content",
"description": "Updated desc",
"isPublished": True,
"isPrivate": False,
"tags": [],
"locale": "en",
"authorId": 1,
"authorName": "Test User",
"authorEmail": "test@example.com",
"editor": "markdown",
"createdAt": "2023-01-01T00:00:00Z",
"updatedAt": "2023-01-02T00:00:00Z"
}}
})
# Pre-populate cache
cache_key = CacheKey("page", "123", "get")
cache.set(cache_key, {"id": 123, "title": "Old"})
# Verify cache is populated
assert cache.get(cache_key) is not None
# Execute update
pages.update(123, {"title": "New"})
# Verify cache was invalidated
cached = cache.get(cache_key)
assert cached is None
def test_delete_invalidates_cache(self):
"""Test page delete invalidates cache."""
# Setup
cache = MemoryCache(ttl=300)
client = MagicMock()
client.cache = cache
pages = PagesEndpoint(client)
pages._post = Mock(return_value={
"data": {"deletePage": {"success": True}}
})
# Pre-populate cache
cache_key = CacheKey("page", "123", "get")
cache.set(cache_key, {"id": 123, "title": "Test"})
# Verify cache is populated
assert cache.get(cache_key) is not None
# Execute delete
pages.delete(123)
# Verify cache was invalidated
cached = cache.get(cache_key)
assert cached is None
def test_get_without_cache(self):
"""Test page retrieval without cache configured."""
# Setup
client = MagicMock()
client.cache = None
pages = PagesEndpoint(client)
pages._post = Mock(return_value={
"data": {"pages": {"single": {
"id": 123,
"title": "Test",
"path": "test",
"content": "Test content",
"description": "Test desc",
"isPublished": True,
"isPrivate": False,
"tags": [],
"locale": "en",
"authorId": 1,
"authorName": "Test User",
"authorEmail": "test@example.com",
"editor": "markdown",
"createdAt": "2023-01-01T00:00:00Z",
"updatedAt": "2023-01-02T00:00:00Z"
}}}
})
# Execute
result = pages.get(123)
# Verify API was called
pages._post.assert_called_once()
assert result.id == 123

41
tests/test_logging.py Normal file
View File

@@ -0,0 +1,41 @@
"""Tests for logging functionality."""
import logging
import json
from wikijs.logging import setup_logging, JSONFormatter
def test_json_formatter():
"""Test JSON log formatting."""
formatter = JSONFormatter()
record = logging.LogRecord(
name="test",
level=logging.INFO,
pathname="test.py",
lineno=10,
msg="Test message",
args=(),
exc_info=None
)
output = formatter.format(record)
log_data = json.loads(output)
assert log_data["level"] == "INFO"
assert log_data["message"] == "Test message"
assert "timestamp" in log_data
def test_setup_logging_json():
"""Test JSON logging setup."""
logger = setup_logging(level=logging.DEBUG, format_type="json")
assert logger.level == logging.DEBUG
assert len(logger.handlers) == 1
def test_setup_logging_text():
"""Test text logging setup."""
logger = setup_logging(level=logging.INFO, format_type="text")
assert logger.level == logging.INFO
assert len(logger.handlers) == 1

89
tests/test_metrics.py Normal file
View File

@@ -0,0 +1,89 @@
"""Tests for metrics functionality."""
from wikijs.metrics import MetricsCollector, get_metrics
def test_metrics_collector_init():
"""Test metrics collector initialization."""
collector = MetricsCollector()
stats = collector.get_stats()
assert stats["total_requests"] == 0
assert stats["total_errors"] == 0
def test_record_request():
"""Test recording requests."""
collector = MetricsCollector()
# Record successful request
collector.record_request("/api/test", "GET", 200, 100.0)
stats = collector.get_stats()
assert stats["total_requests"] == 1
assert stats["total_errors"] == 0
def test_record_error():
"""Test recording errors."""
collector = MetricsCollector()
# Record error request
collector.record_request("/api/test", "GET", 404, 50.0, error="Not found")
stats = collector.get_stats()
assert stats["total_requests"] == 1
assert stats["total_errors"] == 1
def test_latency_stats():
"""Test latency statistics."""
collector = MetricsCollector()
# Record multiple requests
collector.record_request("/api/test", "GET", 200, 100.0)
collector.record_request("/api/test", "GET", 200, 200.0)
collector.record_request("/api/test", "GET", 200, 150.0)
stats = collector.get_stats()
assert "latency" in stats
assert stats["latency"]["min"] == 100.0
assert stats["latency"]["max"] == 200.0
assert stats["latency"]["avg"] == 150.0
def test_increment_counter():
"""Test incrementing counters."""
collector = MetricsCollector()
collector.increment("custom_counter", 5)
collector.increment("custom_counter", 3)
stats = collector.get_stats()
assert stats["counters"]["custom_counter"] == 8
def test_set_gauge():
"""Test setting gauges."""
collector = MetricsCollector()
collector.set_gauge("memory_usage", 75.5)
stats = collector.get_stats()
assert stats["gauges"]["memory_usage"] == 75.5
def test_reset_metrics():
"""Test resetting metrics."""
collector = MetricsCollector()
collector.record_request("/api/test", "GET", 200, 100.0)
collector.reset()
stats = collector.get_stats()
assert stats["total_requests"] == 0
def test_get_global_metrics():
"""Test getting global metrics instance."""
metrics = get_metrics()
assert isinstance(metrics, MetricsCollector)

73
tests/test_ratelimit.py Normal file
View File

@@ -0,0 +1,73 @@
"""Tests for rate limiting functionality."""
import time
import pytest
from wikijs.ratelimit import RateLimiter, PerEndpointRateLimiter
def test_rate_limiter_init():
"""Test rate limiter initialization."""
limiter = RateLimiter(requests_per_second=10.0)
assert limiter.rate == 10.0
assert limiter.burst == 10
def test_rate_limiter_acquire():
"""Test acquiring tokens."""
limiter = RateLimiter(requests_per_second=100.0)
# Should be able to acquire immediately
assert limiter.acquire(timeout=1.0) is True
def test_rate_limiter_burst():
"""Test burst behavior."""
limiter = RateLimiter(requests_per_second=10.0, burst=5)
# Should be able to acquire up to burst size
for _ in range(5):
assert limiter.acquire(timeout=0.1) is True
def test_rate_limiter_timeout():
"""Test timeout behavior."""
limiter = RateLimiter(requests_per_second=1.0)
# Exhaust tokens
assert limiter.acquire(timeout=1.0) is True
# Next acquire should timeout quickly
assert limiter.acquire(timeout=0.1) is False
def test_rate_limiter_reset():
"""Test rate limiter reset."""
limiter = RateLimiter(requests_per_second=1.0)
# Exhaust tokens
limiter.acquire()
# Reset
limiter.reset()
# Should be able to acquire again
assert limiter.acquire(timeout=0.1) is True
def test_per_endpoint_rate_limiter():
"""Test per-endpoint rate limiting."""
limiter = PerEndpointRateLimiter(default_rate=10.0)
# Set different rate for specific endpoint
limiter.set_limit("/api/special", 5.0)
# Should use endpoint-specific rate
assert limiter.acquire("/api/special", timeout=1.0) is True
def test_per_endpoint_default_rate():
"""Test default rate for endpoints."""
limiter = PerEndpointRateLimiter(default_rate=100.0)
# Should use default rate for unknown endpoint
assert limiter.acquire("/api/unknown", timeout=1.0) is True