From 0f372f8eca23d63c47b2fa7cd080ac0ec1283530 Mon Sep 17 00:00:00 2001 From: Ray Walker Date: Thu, 26 Mar 2026 21:29:21 +1100 Subject: [PATCH 1/6] feat: add Memcached backend with pymemcache HashClient MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit - MemcachedBackend implementing BaseBackend protocol (get/set/delete/exists/health_check) - pymemcache HashClient for consistent hashing across multiple servers - TTL clamped to 30-day Memcached maximum (2,592,000 seconds) - MemcachedBackendConfig with pydantic-settings (CACHEKIT_MEMCACHED_ env prefix) - Error classification mapping pymemcache exceptions to BackendErrorType - Lazy import via __getattr__ (pymemcache is optional dependency) - 79 unit/config/critical tests (all mocked — integration tests to follow) --- docs/guides/backend-guide.md | 87 +++++ llms.txt | 4 +- pyproject.toml | 4 + src/cachekit/backends/__init__.py | 15 +- src/cachekit/backends/memcached/__init__.py | 25 ++ src/cachekit/backends/memcached/backend.py | 189 ++++++++++ src/cachekit/backends/memcached/config.py | 119 +++++++ .../backends/memcached/error_handler.py | 88 +++++ tests/critical/conftest.py | 4 +- .../test_memcached_backend_critical.py | 163 +++++++++ tests/unit/backends/test_memcached_backend.py | 313 +++++++++++++++++ tests/unit/backends/test_memcached_config.py | 324 ++++++++++++++++++ uv.lock | 19 +- 13 files changed, 1346 insertions(+), 8 deletions(-) create mode 100644 src/cachekit/backends/memcached/__init__.py create mode 100644 src/cachekit/backends/memcached/backend.py create mode 100644 src/cachekit/backends/memcached/config.py create mode 100644 src/cachekit/backends/memcached/error_handler.py create mode 100644 tests/critical/test_memcached_backend_critical.py create mode 100644 tests/unit/backends/test_memcached_backend.py create mode 100644 tests/unit/backends/test_memcached_config.py diff --git a/docs/guides/backend-guide.md b/docs/guides/backend-guide.md index 037df62..d268a9e 100644 --- a/docs/guides/backend-guide.md +++ b/docs/guides/backend-guide.md @@ -354,6 +354,87 @@ Large values (1MB): - Read p99: ~13μs per operation ``` +--- + +### MemcachedBackend + +> Requires: `pip install cachekit[memcached]` + +Store cache in Memcached with consistent hashing across multiple servers: + +```python notest +from cachekit.backends.memcached import MemcachedBackend, MemcachedBackendConfig +from cachekit import cache + +# Use default configuration (127.0.0.1:11211) +backend = MemcachedBackend() + +@cache(backend=backend) +def cached_function(): + return expensive_computation() +``` + +**Configuration via environment variables**: + +```bash +# Server list (JSON array format) +export CACHEKIT_MEMCACHED_SERVERS='["mc1:11211", "mc2:11211"]' + +# Timeouts +export CACHEKIT_MEMCACHED_CONNECT_TIMEOUT=2.0 # Default: 2.0 seconds +export CACHEKIT_MEMCACHED_TIMEOUT=1.0 # Default: 1.0 seconds + +# Connection pool +export CACHEKIT_MEMCACHED_MAX_POOL_SIZE=10 # Default: 10 per server +export CACHEKIT_MEMCACHED_RETRY_ATTEMPTS=2 # Default: 2 + +# Optional key prefix +export CACHEKIT_MEMCACHED_KEY_PREFIX="myapp:" # Default: "" (none) +``` + +**Configuration via Python**: + +```python notest +from cachekit.backends.memcached import MemcachedBackend, MemcachedBackendConfig + +config = MemcachedBackendConfig( + servers=["mc1:11211", "mc2:11211", "mc3:11211"], + connect_timeout=1.0, + timeout=0.5, + max_pool_size=20, + key_prefix="myapp:", +) + +backend = MemcachedBackend(config) +``` + +**When to use**: +- Hot in-memory caching with sub-millisecond reads +- Shared cache across multiple processes/pods (like Redis but simpler) +- High-throughput read-heavy workloads +- Applications already using Memcached infrastructure + +**When NOT to use**: +- Need persistence (Memcached is volatile — data lost on restart) +- Need distributed locking (use Redis instead) +- Need TTL inspection/refresh (Memcached doesn't support it) +- Cache values exceed 1MB (Memcached default slab limit) + +**Characteristics**: +- Latency: 1-5ms per operation (network-dependent) +- Throughput: Very high (multi-threaded C server) +- TTL support: Yes (max 30 days) +- Cross-process: Yes (shared across pods) +- Persistence: No (volatile memory only) +- Consistent hashing: Yes (via pymemcache HashClient) + +**Limitations**: +1. **No persistence**: All data is in-memory. Server restart = data loss. +2. **No locking**: No distributed lock support (use Redis for stampede prevention). +3. **30-day TTL maximum**: TTLs exceeding 30 days are automatically clamped. +4. **1MB value limit**: Default Memcached slab size limits values to ~1MB. +5. **No TTL inspection**: Cannot query remaining TTL on a key. + ## Encrypted SaaS Pattern (Zero-Knowledge) > *cachekit.io is in closed alpha — [request access](https://cachekit.io)* @@ -672,6 +753,12 @@ If no explicit backend and no module-level default, cachekit creates a RedisBack - You're building a typical web application - You require multi-process or distributed caching +**Use MemcachedBackend when**: +- Hot in-memory caching with very high throughput +- Simple key-value caching without persistence needs +- Existing Memcached infrastructure you want to reuse +- Read-heavy workloads where sub-5ms latency is sufficient + **Use CachekitIOBackend when** *(closed alpha — [request access](https://cachekit.io))*: - You want managed, zero-ops distributed caching - Multi-region caching without operating Redis diff --git a/llms.txt b/llms.txt index ff4407c..b359523 100644 --- a/llms.txt +++ b/llms.txt @@ -2,7 +2,7 @@ > Production-ready caching for Python with intelligent reliability features, pluggable backends, and Rust-powered performance. -cachekit provides intelligent caching with circuit breaker, distributed locking, Prometheus metrics, and zero-knowledge encryption. Supports multiple backends including Redis, File, and CachekitIO (managed edge cache). Designed for production workloads from simple decorators to complex multi-pod deployments. +cachekit provides intelligent caching with circuit breaker, distributed locking, Prometheus metrics, and zero-knowledge encryption. Supports multiple backends including Redis, Memcached, File, and CachekitIO (managed edge cache). Designed for production workloads from simple decorators to complex multi-pod deployments. ## Getting Started @@ -19,7 +19,7 @@ cachekit provides intelligent caching with circuit breaker, distributed locking, ## Features - [Serializer Guide](docs/guides/serializer-guide.md): Choose the right serializer for your data type -- [Backend Guide](docs/guides/backend-guide.md): Multi-backend architecture (Redis, File, CachekitIO managed edge cache, custom) +- [Backend Guide](docs/guides/backend-guide.md): Multi-backend architecture (Redis, Memcached, File, CachekitIO managed edge cache, custom) - [Circuit Breaker](docs/features/circuit-breaker.md): Prevent cascading failures in distributed systems - [Distributed Locking](docs/features/distributed-locking.md): Prevent cache stampedes in multi-pod environments - [Zero-Knowledge Encryption](docs/features/zero-knowledge-encryption.md): Client-side AES-256-GCM security for sensitive data diff --git a/pyproject.toml b/pyproject.toml index 5577d84..38255bb 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -79,6 +79,9 @@ data = [ "pandas>=1.3.0", "pyarrow>=21.0.0", ] +memcached = [ + "pymemcache>=4.0.0", +] [project.urls] Homepage = "https://github.com/cachekit-io/cachekit-py" @@ -205,6 +208,7 @@ dev = [ "pytest-cov>=7.0.0", "pytest-markdown-docs>=0.6.0", "pytest-redis>=3.0.0", + "pymemcache>=4.0.0", # Code quality "basedpyright>=1.32.1", "ruff>=0.6.0", diff --git a/src/cachekit/backends/__init__.py b/src/cachekit/backends/__init__.py index dc9a326..68d9df9 100644 --- a/src/cachekit/backends/__init__.py +++ b/src/cachekit/backends/__init__.py @@ -1,8 +1,8 @@ """Backend storage abstraction for cachekit. This module provides protocol-based abstraction for L2 backend storage with -dependency injection pattern. Backends can be Redis, HTTP, DynamoDB, or any -key-value store. +dependency injection pattern. Backends can be Redis, HTTP, DynamoDB, Memcached, +or any key-value store. Public API: - BaseBackend: Core protocol (5 methods: get, set, delete, exists, health_check) @@ -14,6 +14,7 @@ - BackendErrorType: Error classification enum - CapabilityNotAvailableError: Exception for missing optional capabilities - RedisBackend: Redis implementation (default) + - MemcachedBackend: Memcached implementation (requires pymemcache) Usage: >>> from cachekit.backends import BaseBackend, RedisBackend, BackendError @@ -56,6 +57,7 @@ "BackendErrorType", "CapabilityNotAvailableError", "RedisBackend", + "MemcachedBackend", ] @@ -95,3 +97,12 @@ def get_backend(self) -> BaseBackend: >>> backend.set("key", b"value", ttl=60) # doctest: +SKIP """ ... + + +def __getattr__(name: str): + """Lazy import for optional backends (pymemcache may not be installed).""" + if name == "MemcachedBackend": + from cachekit.backends.memcached import MemcachedBackend + + return MemcachedBackend + raise AttributeError(f"module {__name__!r} has no attribute {name!r}") diff --git a/src/cachekit/backends/memcached/__init__.py b/src/cachekit/backends/memcached/__init__.py new file mode 100644 index 0000000..dd457f0 --- /dev/null +++ b/src/cachekit/backends/memcached/__init__.py @@ -0,0 +1,25 @@ +"""Memcached backend for cachekit. + +Provides Memcached storage backend using pymemcache with consistent hashing +for multi-server support. Thread-safe via HashClient connection pooling. + +Public API: + - MemcachedBackend: Main backend implementation + - MemcachedBackendConfig: Configuration class + +Example: + >>> from cachekit.backends.memcached import MemcachedBackend, MemcachedBackendConfig + >>> config = MemcachedBackendConfig(servers=["127.0.0.1:11211"]) + >>> backend = MemcachedBackend(config) # doctest: +SKIP + >>> backend.set("key", b"value", ttl=60) # doctest: +SKIP +""" + +from __future__ import annotations + +from cachekit.backends.memcached.backend import MemcachedBackend +from cachekit.backends.memcached.config import MemcachedBackendConfig + +__all__ = [ + "MemcachedBackend", + "MemcachedBackendConfig", +] diff --git a/src/cachekit/backends/memcached/backend.py b/src/cachekit/backends/memcached/backend.py new file mode 100644 index 0000000..b66bbb7 --- /dev/null +++ b/src/cachekit/backends/memcached/backend.py @@ -0,0 +1,189 @@ +"""Memcached backend implementation for cachekit. + +Thread-safe Memcached backend using pymemcache HashClient with consistent hashing +for multi-server support. Implements BaseBackend protocol. +""" + +from __future__ import annotations + +import time +from typing import Any, Optional + +from cachekit.backends.memcached.config import MAX_MEMCACHED_TTL, MemcachedBackendConfig +from cachekit.backends.memcached.error_handler import classify_memcached_error + + +def _parse_server(server: str) -> tuple[str, int]: + """Parse 'host:port' string into (host, port) tuple. + + Args: + server: Server address in 'host:port' format. + + Returns: + Tuple of (host, port). + """ + host, port_str = server.rsplit(":", 1) + return (host, int(port_str)) + + +class MemcachedBackend: + """Memcached storage backend implementing BaseBackend protocol. + + Uses pymemcache HashClient for consistent-hashing across multiple servers. + Thread-safe via HashClient's internal connection pooling. + + Examples: + Create backend with defaults (requires running Memcached): + + >>> backend = MemcachedBackend() # doctest: +SKIP + >>> backend.set("key", b"value", ttl=60) # doctest: +SKIP + >>> backend.get("key") # doctest: +SKIP + b'value' + >>> backend.delete("key") # doctest: +SKIP + True + + Create with explicit config: + + >>> from cachekit.backends.memcached.config import MemcachedBackendConfig + >>> config = MemcachedBackendConfig(servers=["mc1:11211", "mc2:11211"]) + >>> backend = MemcachedBackend(config) # doctest: +SKIP + """ + + def __init__(self, config: MemcachedBackendConfig | None = None) -> None: + """Initialize MemcachedBackend. + + Args: + config: Optional configuration. Defaults to loading from environment. + """ + from pymemcache.client.hash import HashClient + + self._config = config or MemcachedBackendConfig.from_env() + servers = [_parse_server(s) for s in self._config.servers] + + self._client: HashClient = HashClient( + servers=servers, + connect_timeout=self._config.connect_timeout, + timeout=self._config.timeout, + max_pool_size=self._config.max_pool_size, + retry_attempts=self._config.retry_attempts, + ) + self._key_prefix = self._config.key_prefix + + def _prefixed_key(self, key: str) -> str: + """Apply key prefix if configured.""" + if self._key_prefix: + return f"{self._key_prefix}{key}" + return key + + def get(self, key: str) -> Optional[bytes]: + """Retrieve value from Memcached. + + Args: + key: Cache key to retrieve. + + Returns: + Bytes value if found, None if key doesn't exist. + + Raises: + BackendError: If Memcached operation fails. + """ + try: + result = self._client.get(self._prefixed_key(key)) + if result is None: + return None + # pymemcache returns bytes by default + return bytes(result) if not isinstance(result, bytes) else result + except Exception as exc: + raise classify_memcached_error(exc, operation="get", key=key) from exc + + def set(self, key: str, value: bytes, ttl: Optional[int] = None) -> None: + """Store value in Memcached. + + Args: + key: Cache key to store. + value: Bytes value to store. + ttl: Time-to-live in seconds. None or 0 means no expiry. + Clamped to 30-day Memcached maximum. + + Raises: + BackendError: If Memcached operation fails. + """ + expire = 0 + if ttl is not None and ttl > 0: + expire = min(ttl, MAX_MEMCACHED_TTL) + + try: + self._client.set(self._prefixed_key(key), value, expire=expire) + except Exception as exc: + raise classify_memcached_error(exc, operation="set", key=key) from exc + + def delete(self, key: str) -> bool: + """Delete key from Memcached. + + Args: + key: Cache key to delete. + + Returns: + True if key existed and was deleted, False otherwise. + + Raises: + BackendError: If Memcached operation fails. + """ + try: + return bool(self._client.delete(self._prefixed_key(key), noreply=False)) + except Exception as exc: + raise classify_memcached_error(exc, operation="delete", key=key) from exc + + def exists(self, key: str) -> bool: + """Check if key exists in Memcached. + + Memcached has no native EXISTS command; uses GET and checks for None. + + Args: + key: Cache key to check. + + Returns: + True if key exists, False otherwise. + + Raises: + BackendError: If Memcached operation fails. + """ + try: + return self._client.get(self._prefixed_key(key)) is not None + except Exception as exc: + raise classify_memcached_error(exc, operation="exists", key=key) from exc + + def health_check(self) -> tuple[bool, dict[str, Any]]: + """Check Memcached health by calling stats on all servers. + + Returns: + Tuple of (is_healthy, details_dict) with latency_ms and backend_type. + """ + start = time.perf_counter() + try: + stats = self._client.stats() + elapsed_ms = (time.perf_counter() - start) * 1000 + # stats() returns dict of {server: stats_dict} + # Healthy if at least one server responded + is_healthy = len(stats) > 0 + return ( + is_healthy, + { + "backend_type": "memcached", + "latency_ms": round(elapsed_ms, 2), + "servers": len(stats), + "configured_servers": len(self._config.servers), + }, + ) + except Exception as exc: + elapsed_ms = (time.perf_counter() - start) * 1000 + return ( + False, + { + "backend_type": "memcached", + "latency_ms": round(elapsed_ms, 2), + "error": str(exc), + "servers": 0, + "configured_servers": len(self._config.servers), + }, + ) diff --git a/src/cachekit/backends/memcached/config.py b/src/cachekit/backends/memcached/config.py new file mode 100644 index 0000000..3a82434 --- /dev/null +++ b/src/cachekit/backends/memcached/config.py @@ -0,0 +1,119 @@ +"""Memcached backend configuration. + +Backend-specific settings for Memcached connections, separated from generic cache config +to maintain clean separation of concerns. +""" + +from __future__ import annotations + +from pydantic import Field, field_validator +from pydantic_settings import SettingsConfigDict + +from cachekit.backends.base_config import BaseBackendConfig, inherit_config + +# Memcached maximum TTL: 30 days in seconds +MAX_MEMCACHED_TTL: int = 30 * 24 * 60 * 60 # 2,592,000 + + +class MemcachedBackendConfig(BaseBackendConfig): + """Memcached backend configuration. + + Configuration for Memcached cache storage with connection pooling and timeout controls. + + Attributes: + servers: List of Memcached server addresses in "host:port" format. + connect_timeout: Connection timeout in seconds. + timeout: Operation timeout in seconds. + max_pool_size: Maximum connections per server. + retry_attempts: Number of retries on transient failures. + key_prefix: Optional prefix prepended to all cache keys. + + Examples: + Create with defaults: + + >>> config = MemcachedBackendConfig() + >>> config.servers + ['127.0.0.1:11211'] + >>> config.connect_timeout + 2.0 + + Override via constructor: + + >>> custom = MemcachedBackendConfig( + ... servers=["mc1:11211", "mc2:11211"], + ... timeout=0.5, + ... max_pool_size=20, + ... ) + >>> len(custom.servers) + 2 + """ + + model_config = SettingsConfigDict( + **inherit_config(BaseBackendConfig), + env_prefix="CACHEKIT_MEMCACHED_", + ) + + servers: list[str] = Field( + default=["127.0.0.1:11211"], + description="Memcached server addresses (host:port)", + ) + connect_timeout: float = Field( + default=2.0, + ge=0.1, + le=30.0, + description="Connection timeout in seconds", + ) + timeout: float = Field( + default=1.0, + ge=0.1, + le=30.0, + description="Operation timeout in seconds", + ) + max_pool_size: int = Field( + default=10, + ge=1, + le=100, + description="Maximum connections per server", + ) + retry_attempts: int = Field( + default=2, + ge=0, + le=10, + description="Retries on transient failures", + ) + key_prefix: str = Field( + default="", + description="Optional prefix for all cache keys", + ) + + @field_validator("servers", mode="after") + @classmethod + def validate_servers(cls, v: list[str]) -> list[str]: + """Validate server list is non-empty and entries are well-formed. + + Args: + v: List of server address strings. + + Returns: + Validated server list. + + Raises: + ValueError: If server list is empty or entries are malformed. + """ + if not v: + raise ValueError("At least one Memcached server must be specified") + for server in v: + if ":" not in server: + raise ValueError(f"Server address must be in 'host:port' format, got: {server!r}") + return v + + @classmethod + def from_env(cls) -> MemcachedBackendConfig: + """Create configuration from environment variables. + + Reads CACHEKIT_MEMCACHED_SERVERS, CACHEKIT_MEMCACHED_CONNECT_TIMEOUT, etc. + + Returns: + MemcachedBackendConfig instance loaded from environment. + """ + return cls() diff --git a/src/cachekit/backends/memcached/error_handler.py b/src/cachekit/backends/memcached/error_handler.py new file mode 100644 index 0000000..880ca9e --- /dev/null +++ b/src/cachekit/backends/memcached/error_handler.py @@ -0,0 +1,88 @@ +"""Memcached exception classification for backend abstraction. + +Maps pymemcache exceptions to BackendErrorType for circuit breaker and retry logic. +""" + +from __future__ import annotations + +import socket + +from cachekit.backends.errors import BackendError, BackendErrorType + + +def classify_memcached_error( + exc: Exception, + operation: str | None = None, + key: str | None = None, +) -> BackendError: + """Classify pymemcache exception into BackendError with error_type. + + Args: + exc: Original pymemcache exception. + operation: Operation that failed (get, set, delete, exists, health_check). + key: Cache key involved (optional, for debugging). + + Returns: + BackendError with appropriate error_type classification. + + Examples: + Connection errors are classified as TRANSIENT: + + >>> from pymemcache.exceptions import MemcacheUnexpectedCloseError + >>> exc = MemcacheUnexpectedCloseError() + >>> error = classify_memcached_error(exc, operation="get", key="user:123") + >>> error.error_type.value + 'transient' + + Timeout errors get their own category: + + >>> exc = socket.timeout("timed out") + >>> error = classify_memcached_error(exc, operation="set") + >>> error.error_type.value + 'timeout' + """ + from pymemcache.exceptions import ( + MemcacheClientError, + MemcacheIllegalInputError, + MemcacheServerError, + MemcacheUnexpectedCloseError, + ) + + # Timeout — socket.timeout or OSError with ETIMEDOUT + if isinstance(exc, (socket.timeout, TimeoutError)): + return BackendError( + message=f"Memcached timeout during {operation}: {exc}", + error_type=BackendErrorType.TIMEOUT, + original_exception=exc, + operation=operation, + key=key, + ) + + # Transient — connection closed, server errors (retriable) + if isinstance(exc, (MemcacheUnexpectedCloseError, MemcacheServerError, ConnectionError, OSError)): + return BackendError( + message=f"Memcached transient error during {operation}: {exc}", + error_type=BackendErrorType.TRANSIENT, + original_exception=exc, + operation=operation, + key=key, + ) + + # Permanent — illegal input, client errors (don't retry) + if isinstance(exc, (MemcacheIllegalInputError, MemcacheClientError)): + return BackendError( + message=f"Memcached permanent error during {operation}: {exc}", + error_type=BackendErrorType.PERMANENT, + original_exception=exc, + operation=operation, + key=key, + ) + + # Unknown — safe default + return BackendError( + message=f"Memcached unknown error during {operation}: {exc}", + error_type=BackendErrorType.UNKNOWN, + original_exception=exc, + operation=operation, + key=key, + ) diff --git a/tests/critical/conftest.py b/tests/critical/conftest.py index 2076f23..1452b76 100644 --- a/tests/critical/conftest.py +++ b/tests/critical/conftest.py @@ -1,11 +1,11 @@ """Pytest configuration for critical path tests. -Override autouse fixtures that aren't needed for FileBackend tests. +Override autouse fixtures that aren't needed for FileBackend/MemcachedBackend tests. """ def pytest_runtest_setup(item): """Skip redis setup for file backend and cachekitio metrics tests.""" - if "file_backend" in item.nodeid or "cachekitio_metrics" in item.nodeid: + if "file_backend" in item.nodeid or "cachekitio_metrics" in item.nodeid or "memcached_backend" in item.nodeid: # Remove the autouse redis isolation fixture for this test item.fixturenames = [f for f in item.fixturenames if f != "setup_di_for_redis_isolation"] diff --git a/tests/critical/test_memcached_backend_critical.py b/tests/critical/test_memcached_backend_critical.py new file mode 100644 index 0000000..fc26ef8 --- /dev/null +++ b/tests/critical/test_memcached_backend_critical.py @@ -0,0 +1,163 @@ +"""Critical path tests for MemcachedBackend - fast smoke tests that run on every commit. + +These tests cover core MemcachedBackend functionality with mocked pymemcache: +- Basic get/set/delete roundtrips +- exists() checks +- health_check() implementation +- Intent decorator integration +- Default backend integration + +Performance target: < 1 second total for all tests. +Marked with @pytest.mark.critical for fast CI runs. +""" + +from __future__ import annotations + +from unittest.mock import MagicMock, patch + +import pytest + +from cachekit.backends.memcached.backend import MemcachedBackend +from cachekit.backends.memcached.config import MemcachedBackendConfig + + +@pytest.fixture +def mock_store(): + """Dict-backed store for mock Memcached client.""" + return {} + + +@pytest.fixture +def mock_hash_client(mock_store): + """Patch HashClient so no real Memcached is needed. + + Wires get/set/delete/stats to a plain dict. + """ + with patch("pymemcache.client.hash.HashClient") as mock_cls: + instance = MagicMock() + + def _set(key, value, expire=0): + mock_store[key] = value + + def _get(key): + return mock_store.get(key) + + def _delete(key, noreply=True): + if key in mock_store: + del mock_store[key] + return True + return False + + def _stats(): + return {("127.0.0.1", 11211): {"pid": "1", "uptime": "1000"}} + + instance.set.side_effect = _set + instance.get.side_effect = _get + instance.delete.side_effect = _delete + instance.stats.side_effect = _stats + mock_cls.return_value = instance + yield instance + + +@pytest.fixture +def backend(mock_hash_client): + """Create MemcachedBackend with mocked HashClient.""" + return MemcachedBackend(MemcachedBackendConfig()) + + +@pytest.mark.critical +def test_get_set_delete_roundtrip(backend): + """Core get/set/delete operations work correctly.""" + # Set + backend.set("key", b"value", ttl=60) + + # Get + assert backend.get("key") == b"value" + + # Delete + assert backend.delete("key") is True + assert backend.get("key") is None + assert backend.delete("key") is False # Already deleted + + +@pytest.mark.critical +def test_exists_accurate(backend): + """exists() returns correct True/False status.""" + assert backend.exists("missing") is False + backend.set("present", b"data", ttl=300) + assert backend.exists("present") is True + + +@pytest.mark.critical +def test_health_check_returns_tuple(backend): + """health_check() returns (bool, dict) with required fields.""" + is_healthy, details = backend.health_check() + + assert isinstance(is_healthy, bool) + assert is_healthy is True + assert isinstance(details, dict) + assert details["backend_type"] == "memcached" + assert "latency_ms" in details + assert isinstance(details["latency_ms"], float) + assert details["servers"] == 1 + + +@pytest.mark.critical +def test_intent_decorators_with_memcached_backend(mock_store): + """Intent decorators work with explicit MemcachedBackend.""" + from cachekit import cache + + with patch("pymemcache.client.hash.HashClient") as mock_cls: + instance = MagicMock() + instance.set.side_effect = lambda k, v, expire=0: mock_store.__setitem__(k, v) + instance.get.side_effect = lambda k: mock_store.get(k) + instance.delete.side_effect = lambda k, noreply=True: mock_store.pop(k, None) is not None + mock_cls.return_value = instance + + mb = MemcachedBackend(MemcachedBackendConfig()) + call_count = 0 + + @cache.minimal(ttl=300, backend=mb) + def compute(x: int) -> int: + nonlocal call_count + call_count += 1 + return x * 2 + + assert compute(5) == 10 + assert call_count == 1 + assert compute(5) == 10 + assert call_count == 1 # Cache hit + + +@pytest.mark.critical +def test_set_default_backend_with_memcached_backend(mock_store): + """set_default_backend() is consulted when no explicit backend= provided.""" + from cachekit import cache + from cachekit.config.decorator import get_default_backend, set_default_backend + + with patch("pymemcache.client.hash.HashClient") as mock_cls: + instance = MagicMock() + instance.set.side_effect = lambda k, v, expire=0: mock_store.__setitem__(k, v) + instance.get.side_effect = lambda k: mock_store.get(k) + instance.delete.side_effect = lambda k, noreply=True: mock_store.pop(k, None) is not None + mock_cls.return_value = instance + + mb = MemcachedBackend(MemcachedBackendConfig()) + original = get_default_backend() + + try: + set_default_backend(mb) + call_count = 0 + + @cache.minimal(ttl=300) + def compute(x: int) -> int: + nonlocal call_count + call_count += 1 + return x * 3 + + assert compute(4) == 12 + assert call_count == 1 + assert compute(4) == 12 + assert call_count == 1 # Cache hit + finally: + set_default_backend(original) diff --git a/tests/unit/backends/test_memcached_backend.py b/tests/unit/backends/test_memcached_backend.py new file mode 100644 index 0000000..52c7d32 --- /dev/null +++ b/tests/unit/backends/test_memcached_backend.py @@ -0,0 +1,313 @@ +"""Unit tests for MemcachedBackend. + +Tests for backends/memcached/backend.py covering: +- Protocol compliance with BaseBackend +- Basic operations (get, set, delete, exists) +- TTL behavior and 30-day clamping +- Key prefix application +- Error classification via classify_memcached_error +- Health check responses +""" + +from __future__ import annotations + +import socket +from unittest.mock import MagicMock, patch + +import pytest + +from cachekit.backends.base import BaseBackend +from cachekit.backends.errors import BackendErrorType +from cachekit.backends.memcached.backend import MemcachedBackend +from cachekit.backends.memcached.config import MAX_MEMCACHED_TTL, MemcachedBackendConfig +from cachekit.backends.memcached.error_handler import classify_memcached_error + + +@pytest.fixture +def config() -> MemcachedBackendConfig: + """Create MemcachedBackendConfig with defaults.""" + return MemcachedBackendConfig() + + +@pytest.fixture +def mock_hash_client(): + """Patch HashClient and return the mock instance.""" + with patch("pymemcache.client.hash.HashClient") as mock_cls: + mock_instance = MagicMock() + mock_cls.return_value = mock_instance + yield mock_instance + + +@pytest.fixture +def backend(config: MemcachedBackendConfig, mock_hash_client: MagicMock) -> MemcachedBackend: + """Create MemcachedBackend with mocked HashClient.""" + return MemcachedBackend(config) + + +@pytest.mark.unit +class TestProtocolCompliance: + """Test BaseBackend protocol compliance.""" + + def test_implements_base_backend_protocol(self, backend: MemcachedBackend) -> None: + """Verify MemcachedBackend satisfies BaseBackend protocol.""" + assert isinstance(backend, BaseBackend) + + def test_has_required_methods(self, backend: MemcachedBackend) -> None: + """Verify all required methods exist and are callable.""" + assert callable(backend.get) + assert callable(backend.set) + assert callable(backend.delete) + assert callable(backend.exists) + assert callable(backend.health_check) + + +@pytest.mark.unit +class TestBasicOperations: + """Test basic get/set/delete/exists operations.""" + + def test_get_returns_bytes(self, backend: MemcachedBackend, mock_hash_client: MagicMock) -> None: + """Test get returns bytes when key exists.""" + mock_hash_client.get.return_value = b"cached_value" + result = backend.get("mykey") + assert result == b"cached_value" + assert isinstance(result, bytes) + + def test_get_returns_none_for_missing_key(self, backend: MemcachedBackend, mock_hash_client: MagicMock) -> None: + """Test get returns None when key does not exist.""" + mock_hash_client.get.return_value = None + result = backend.get("missing") + assert result is None + + def test_set_stores_value(self, backend: MemcachedBackend, mock_hash_client: MagicMock) -> None: + """Test set calls client.set with correct arguments.""" + backend.set("mykey", b"myvalue", ttl=60) + mock_hash_client.set.assert_called_once_with("mykey", b"myvalue", expire=60) + + def test_delete_returns_true_when_key_exists(self, backend: MemcachedBackend, mock_hash_client: MagicMock) -> None: + """Test delete returns True when key existed.""" + mock_hash_client.delete.return_value = True + result = backend.delete("mykey") + assert result is True + + def test_delete_returns_false_when_key_missing(self, backend: MemcachedBackend, mock_hash_client: MagicMock) -> None: + """Test delete returns False when key did not exist.""" + mock_hash_client.delete.return_value = False + result = backend.delete("mykey") + assert result is False + + def test_delete_passes_noreply_false(self, backend: MemcachedBackend, mock_hash_client: MagicMock) -> None: + """Test delete passes noreply=False for synchronous response.""" + mock_hash_client.delete.return_value = True + backend.delete("mykey") + mock_hash_client.delete.assert_called_once_with("mykey", noreply=False) + + def test_exists_returns_true_when_key_exists(self, backend: MemcachedBackend, mock_hash_client: MagicMock) -> None: + """Test exists returns True when get returns a value.""" + mock_hash_client.get.return_value = b"some_value" + result = backend.exists("mykey") + assert result is True + + def test_exists_returns_false_when_key_missing(self, backend: MemcachedBackend, mock_hash_client: MagicMock) -> None: + """Test exists returns False when get returns None.""" + mock_hash_client.get.return_value = None + result = backend.exists("mykey") + assert result is False + + +@pytest.mark.unit +class TestTTLBehavior: + """Test TTL handling and Memcached's 30-day maximum.""" + + def test_ttl_none_passes_expire_zero(self, backend: MemcachedBackend, mock_hash_client: MagicMock) -> None: + """Test ttl=None passes expire=0 (no expiry).""" + backend.set("key", b"val", ttl=None) + mock_hash_client.set.assert_called_once_with("key", b"val", expire=0) + + def test_ttl_zero_passes_expire_zero(self, backend: MemcachedBackend, mock_hash_client: MagicMock) -> None: + """Test ttl=0 passes expire=0 (no expiry).""" + backend.set("key", b"val", ttl=0) + mock_hash_client.set.assert_called_once_with("key", b"val", expire=0) + + def test_ttl_positive_passes_expire(self, backend: MemcachedBackend, mock_hash_client: MagicMock) -> None: + """Test ttl=100 passes expire=100.""" + backend.set("key", b"val", ttl=100) + mock_hash_client.set.assert_called_once_with("key", b"val", expire=100) + + def test_ttl_exceeding_30_days_gets_clamped(self, backend: MemcachedBackend, mock_hash_client: MagicMock) -> None: + """Test TTL > 30 days gets clamped to MAX_MEMCACHED_TTL (2592000).""" + huge_ttl = MAX_MEMCACHED_TTL + 1000 + backend.set("key", b"val", ttl=huge_ttl) + mock_hash_client.set.assert_called_once_with("key", b"val", expire=MAX_MEMCACHED_TTL) + + def test_ttl_exactly_30_days_not_clamped(self, backend: MemcachedBackend, mock_hash_client: MagicMock) -> None: + """Test TTL exactly at 30-day max passes through unchanged.""" + backend.set("key", b"val", ttl=MAX_MEMCACHED_TTL) + mock_hash_client.set.assert_called_once_with("key", b"val", expire=MAX_MEMCACHED_TTL) + + def test_negative_ttl_passes_expire_zero(self, backend: MemcachedBackend, mock_hash_client: MagicMock) -> None: + """Test negative TTL is treated as no expiry.""" + backend.set("key", b"val", ttl=-5) + mock_hash_client.set.assert_called_once_with("key", b"val", expire=0) + + +@pytest.mark.unit +class TestKeyPrefix: + """Test key prefix application to all operations.""" + + @pytest.fixture + def prefixed_config(self) -> MemcachedBackendConfig: + """Config with key_prefix set.""" + return MemcachedBackendConfig(key_prefix="app:") + + @pytest.fixture + def prefixed_backend(self, prefixed_config: MemcachedBackendConfig, mock_hash_client: MagicMock) -> MemcachedBackend: + """Backend with key prefix configured.""" + return MemcachedBackend(prefixed_config) + + def test_get_applies_prefix(self, prefixed_backend: MemcachedBackend, mock_hash_client: MagicMock) -> None: + """Test get prepends prefix to key.""" + mock_hash_client.get.return_value = None + prefixed_backend.get("mykey") + mock_hash_client.get.assert_called_once_with("app:mykey") + + def test_set_applies_prefix(self, prefixed_backend: MemcachedBackend, mock_hash_client: MagicMock) -> None: + """Test set prepends prefix to key.""" + prefixed_backend.set("mykey", b"val", ttl=60) + mock_hash_client.set.assert_called_once_with("app:mykey", b"val", expire=60) + + def test_delete_applies_prefix(self, prefixed_backend: MemcachedBackend, mock_hash_client: MagicMock) -> None: + """Test delete prepends prefix to key.""" + mock_hash_client.delete.return_value = True + prefixed_backend.delete("mykey") + mock_hash_client.delete.assert_called_once_with("app:mykey", noreply=False) + + def test_exists_applies_prefix(self, prefixed_backend: MemcachedBackend, mock_hash_client: MagicMock) -> None: + """Test exists prepends prefix to key.""" + mock_hash_client.get.return_value = None + prefixed_backend.exists("mykey") + mock_hash_client.get.assert_called_once_with("app:mykey") + + def test_no_prefix_when_empty(self, backend: MemcachedBackend, mock_hash_client: MagicMock) -> None: + """Test no prefix applied when key_prefix is empty.""" + mock_hash_client.get.return_value = None + backend.get("mykey") + mock_hash_client.get.assert_called_once_with("mykey") + + +@pytest.mark.unit +class TestErrorClassification: + """Test classify_memcached_error maps pymemcache exceptions correctly.""" + + def test_socket_timeout_maps_to_timeout(self) -> None: + """Test socket.timeout is classified as TIMEOUT.""" + exc = socket.timeout("timed out") + error = classify_memcached_error(exc, operation="get", key="k1") + assert error.error_type == BackendErrorType.TIMEOUT + + def test_timeout_error_maps_to_timeout(self) -> None: + """Test TimeoutError is classified as TIMEOUT.""" + exc = TimeoutError("operation timed out") + error = classify_memcached_error(exc, operation="set", key="k2") + assert error.error_type == BackendErrorType.TIMEOUT + + def test_unexpected_close_maps_to_transient(self) -> None: + """Test MemcacheUnexpectedCloseError is classified as TRANSIENT.""" + from pymemcache.exceptions import MemcacheUnexpectedCloseError + + exc = MemcacheUnexpectedCloseError() + error = classify_memcached_error(exc, operation="get", key="k3") + assert error.error_type == BackendErrorType.TRANSIENT + + def test_server_error_maps_to_transient(self) -> None: + """Test MemcacheServerError is classified as TRANSIENT.""" + from pymemcache.exceptions import MemcacheServerError + + exc = MemcacheServerError("SERVER_ERROR out of memory") + error = classify_memcached_error(exc, operation="set") + assert error.error_type == BackendErrorType.TRANSIENT + + def test_connection_error_maps_to_transient(self) -> None: + """Test ConnectionError is classified as TRANSIENT.""" + exc = ConnectionError("Connection refused") + error = classify_memcached_error(exc, operation="get") + assert error.error_type == BackendErrorType.TRANSIENT + + def test_os_error_maps_to_transient(self) -> None: + """Test OSError is classified as TRANSIENT.""" + exc = OSError("Network unreachable") + error = classify_memcached_error(exc, operation="get") + assert error.error_type == BackendErrorType.TRANSIENT + + def test_illegal_input_maps_to_permanent(self) -> None: + """Test MemcacheIllegalInputError is classified as PERMANENT.""" + from pymemcache.exceptions import MemcacheIllegalInputError + + exc = MemcacheIllegalInputError("Key too long") + error = classify_memcached_error(exc, operation="set", key="k4") + assert error.error_type == BackendErrorType.PERMANENT + + def test_client_error_maps_to_permanent(self) -> None: + """Test MemcacheClientError is classified as PERMANENT.""" + from pymemcache.exceptions import MemcacheClientError + + exc = MemcacheClientError("CLIENT_ERROR bad data") + error = classify_memcached_error(exc, operation="set") + assert error.error_type == BackendErrorType.PERMANENT + + def test_unknown_exception_maps_to_unknown(self) -> None: + """Test unrecognized exception is classified as UNKNOWN.""" + exc = RuntimeError("something unexpected") + error = classify_memcached_error(exc, operation="get", key="k5") + assert error.error_type == BackendErrorType.UNKNOWN + + def test_error_preserves_operation(self) -> None: + """Test that operation context is preserved in BackendError.""" + exc = RuntimeError("fail") + error = classify_memcached_error(exc, operation="delete", key="k6") + assert error.operation == "delete" + assert error.key == "k6" + + def test_error_preserves_original_exception(self) -> None: + """Test that original exception is preserved in BackendError.""" + exc = RuntimeError("original") + error = classify_memcached_error(exc, operation="get") + assert error.original_exception is exc + + +@pytest.mark.unit +class TestHealthCheck: + """Test health_check method.""" + + def test_healthy_returns_true_with_details(self, backend: MemcachedBackend, mock_hash_client: MagicMock) -> None: + """Test health_check returns (True, details) when server responds.""" + mock_hash_client.stats.return_value = {("127.0.0.1", 11211): {"pid": "1234"}} + is_healthy, details = backend.health_check() + + assert is_healthy is True + assert details["backend_type"] == "memcached" + assert "latency_ms" in details + assert isinstance(details["latency_ms"], float) + assert details["servers"] == 1 + assert details["configured_servers"] == 1 + + def test_unhealthy_on_empty_stats(self, backend: MemcachedBackend, mock_hash_client: MagicMock) -> None: + """Test health_check returns (False, ...) when no servers respond.""" + mock_hash_client.stats.return_value = {} + is_healthy, details = backend.health_check() + + assert is_healthy is False + assert details["backend_type"] == "memcached" + assert details["servers"] == 0 + + def test_unhealthy_on_exception(self, backend: MemcachedBackend, mock_hash_client: MagicMock) -> None: + """Test health_check returns (False, details) on exception.""" + mock_hash_client.stats.side_effect = ConnectionError("Connection refused") + is_healthy, details = backend.health_check() + + assert is_healthy is False + assert details["backend_type"] == "memcached" + assert "latency_ms" in details + assert isinstance(details["latency_ms"], float) + assert "error" in details + assert details["servers"] == 0 + assert details["configured_servers"] == 1 diff --git a/tests/unit/backends/test_memcached_config.py b/tests/unit/backends/test_memcached_config.py new file mode 100644 index 0000000..cf83c65 --- /dev/null +++ b/tests/unit/backends/test_memcached_config.py @@ -0,0 +1,324 @@ +"""Unit tests for MemcachedBackendConfig. + +Tests configuration parsing, validation rules, environment variable loading, +and the from_env() classmethod for the Memcached cache backend. +""" + +from __future__ import annotations + +import os + +import pytest +from pydantic import ValidationError + +from cachekit.backends.memcached.config import MemcachedBackendConfig + + +@pytest.mark.unit +class TestMemcachedBackendConfigDefaults: + """Test default configuration values.""" + + def test_default_servers(self): + """Test that default servers is localhost:11211.""" + config = MemcachedBackendConfig() + assert config.servers == ["127.0.0.1:11211"] + + def test_default_connect_timeout(self): + """Test that default connect_timeout is 2.0 seconds.""" + config = MemcachedBackendConfig() + assert config.connect_timeout == 2.0 + + def test_default_timeout(self): + """Test that default timeout is 1.0 seconds.""" + config = MemcachedBackendConfig() + assert config.timeout == 1.0 + + def test_default_max_pool_size(self): + """Test that default max_pool_size is 10.""" + config = MemcachedBackendConfig() + assert config.max_pool_size == 10 + + def test_default_retry_attempts(self): + """Test that default retry_attempts is 2.""" + config = MemcachedBackendConfig() + assert config.retry_attempts == 2 + + def test_default_key_prefix(self): + """Test that default key_prefix is empty string.""" + config = MemcachedBackendConfig() + assert config.key_prefix == "" + + +@pytest.mark.unit +class TestMemcachedBackendConfigConstructor: + """Test constructor with custom values.""" + + def test_custom_servers(self): + """Test setting custom servers via constructor.""" + config = MemcachedBackendConfig(servers=["mc1:11211", "mc2:11211"]) + assert config.servers == ["mc1:11211", "mc2:11211"] + + def test_custom_connect_timeout(self): + """Test setting custom connect_timeout via constructor.""" + config = MemcachedBackendConfig(connect_timeout=5.0) + assert config.connect_timeout == 5.0 + + def test_custom_timeout(self): + """Test setting custom timeout via constructor.""" + config = MemcachedBackendConfig(timeout=0.5) + assert config.timeout == 0.5 + + def test_custom_max_pool_size(self): + """Test setting custom max_pool_size via constructor.""" + config = MemcachedBackendConfig(max_pool_size=20) + assert config.max_pool_size == 20 + + def test_custom_retry_attempts(self): + """Test setting custom retry_attempts via constructor.""" + config = MemcachedBackendConfig(retry_attempts=5) + assert config.retry_attempts == 5 + + def test_custom_key_prefix(self): + """Test setting custom key_prefix via constructor.""" + config = MemcachedBackendConfig(key_prefix="myapp:") + assert config.key_prefix == "myapp:" + + def test_all_custom_values(self): + """Test setting all values via constructor.""" + config = MemcachedBackendConfig( + servers=["mc1:11211", "mc2:11212"], + connect_timeout=5.0, + timeout=2.0, + max_pool_size=50, + retry_attempts=3, + key_prefix="test:", + ) + assert config.servers == ["mc1:11211", "mc2:11212"] + assert config.connect_timeout == 5.0 + assert config.timeout == 2.0 + assert config.max_pool_size == 50 + assert config.retry_attempts == 3 + assert config.key_prefix == "test:" + + +@pytest.mark.unit +class TestMemcachedBackendConfigValidation: + """Test validation rules.""" + + def test_connect_timeout_rejects_below_minimum(self): + """Test that connect_timeout rejects values < 0.1.""" + with pytest.raises(ValidationError) as exc_info: + MemcachedBackendConfig(connect_timeout=0.05) + errors = exc_info.value.errors() + assert any("greater than or equal to 0.1" in str(e) for e in errors) + + def test_connect_timeout_rejects_above_maximum(self): + """Test that connect_timeout rejects values > 30.0.""" + with pytest.raises(ValidationError) as exc_info: + MemcachedBackendConfig(connect_timeout=30.1) + errors = exc_info.value.errors() + assert any("less than or equal to 30" in str(e) for e in errors) + + def test_connect_timeout_accepts_boundaries(self): + """Test that connect_timeout accepts boundary values.""" + config_min = MemcachedBackendConfig(connect_timeout=0.1) + assert config_min.connect_timeout == 0.1 + + config_max = MemcachedBackendConfig(connect_timeout=30.0) + assert config_max.connect_timeout == 30.0 + + def test_timeout_rejects_below_minimum(self): + """Test that timeout rejects values < 0.1.""" + with pytest.raises(ValidationError) as exc_info: + MemcachedBackendConfig(timeout=0.05) + errors = exc_info.value.errors() + assert any("greater than or equal to 0.1" in str(e) for e in errors) + + def test_timeout_rejects_above_maximum(self): + """Test that timeout rejects values > 30.0.""" + with pytest.raises(ValidationError) as exc_info: + MemcachedBackendConfig(timeout=30.1) + errors = exc_info.value.errors() + assert any("less than or equal to 30" in str(e) for e in errors) + + def test_timeout_accepts_boundaries(self): + """Test that timeout accepts boundary values.""" + config_min = MemcachedBackendConfig(timeout=0.1) + assert config_min.timeout == 0.1 + + config_max = MemcachedBackendConfig(timeout=30.0) + assert config_max.timeout == 30.0 + + def test_max_pool_size_rejects_below_minimum(self): + """Test that max_pool_size rejects values < 1.""" + with pytest.raises(ValidationError) as exc_info: + MemcachedBackendConfig(max_pool_size=0) + errors = exc_info.value.errors() + assert any("greater than or equal to 1" in str(e) for e in errors) + + def test_max_pool_size_rejects_above_maximum(self): + """Test that max_pool_size rejects values > 100.""" + with pytest.raises(ValidationError) as exc_info: + MemcachedBackendConfig(max_pool_size=101) + errors = exc_info.value.errors() + assert any("less than or equal to 100" in str(e) for e in errors) + + def test_max_pool_size_accepts_boundaries(self): + """Test that max_pool_size accepts boundary values.""" + config_min = MemcachedBackendConfig(max_pool_size=1) + assert config_min.max_pool_size == 1 + + config_max = MemcachedBackendConfig(max_pool_size=100) + assert config_max.max_pool_size == 100 + + def test_retry_attempts_rejects_below_minimum(self): + """Test that retry_attempts rejects values < 0.""" + with pytest.raises(ValidationError) as exc_info: + MemcachedBackendConfig(retry_attempts=-1) + errors = exc_info.value.errors() + assert any("greater than or equal to 0" in str(e) for e in errors) + + def test_retry_attempts_rejects_above_maximum(self): + """Test that retry_attempts rejects values > 10.""" + with pytest.raises(ValidationError) as exc_info: + MemcachedBackendConfig(retry_attempts=11) + errors = exc_info.value.errors() + assert any("less than or equal to 10" in str(e) for e in errors) + + def test_retry_attempts_accepts_boundaries(self): + """Test that retry_attempts accepts boundary values.""" + config_min = MemcachedBackendConfig(retry_attempts=0) + assert config_min.retry_attempts == 0 + + config_max = MemcachedBackendConfig(retry_attempts=10) + assert config_max.retry_attempts == 10 + + def test_servers_rejects_empty_list(self): + """Test that servers rejects empty list.""" + with pytest.raises(ValidationError) as exc_info: + MemcachedBackendConfig(servers=[]) + errors = exc_info.value.errors() + assert any("At least one" in str(e) for e in errors) + + def test_servers_rejects_bad_format(self): + """Test that servers rejects entries without host:port format.""" + with pytest.raises(ValidationError) as exc_info: + MemcachedBackendConfig(servers=["localhost"]) + errors = exc_info.value.errors() + assert any("host:port" in str(e) for e in errors) + + def test_servers_rejects_bad_format_in_list(self): + """Test that servers rejects if any entry is malformed.""" + with pytest.raises(ValidationError) as exc_info: + MemcachedBackendConfig(servers=["mc1:11211", "bad_server"]) + errors = exc_info.value.errors() + assert any("host:port" in str(e) for e in errors) + + def test_extra_fields_rejected(self): + """Test that extra fields are rejected due to extra='forbid'.""" + with pytest.raises(ValidationError) as exc_info: + MemcachedBackendConfig(unknown_field="value") + errors = exc_info.value.errors() + assert any("extra_forbidden" in str(e) for e in errors) + + +@pytest.mark.unit +class TestMemcachedBackendConfigEnvVars: + """Test environment variable parsing.""" + + @pytest.fixture + def clean_env(self, monkeypatch): + """Remove all CACHEKIT_MEMCACHED_* environment variables.""" + for key in list(os.environ.keys()): + if key.startswith("CACHEKIT_MEMCACHED_"): + monkeypatch.delenv(key, raising=False) + yield + for key in list(os.environ.keys()): + if key.startswith("CACHEKIT_MEMCACHED_"): + monkeypatch.delenv(key, raising=False) + + def test_env_var_servers(self, monkeypatch, clean_env): + """Test CACHEKIT_MEMCACHED_SERVERS parsing (JSON list).""" + monkeypatch.setenv("CACHEKIT_MEMCACHED_SERVERS", '["mc1:11211","mc2:11212"]') + config = MemcachedBackendConfig() + assert config.servers == ["mc1:11211", "mc2:11212"] + + def test_env_var_connect_timeout(self, monkeypatch, clean_env): + """Test CACHEKIT_MEMCACHED_CONNECT_TIMEOUT parsing.""" + monkeypatch.setenv("CACHEKIT_MEMCACHED_CONNECT_TIMEOUT", "5.0") + config = MemcachedBackendConfig() + assert config.connect_timeout == 5.0 + + def test_env_var_timeout(self, monkeypatch, clean_env): + """Test CACHEKIT_MEMCACHED_TIMEOUT parsing.""" + monkeypatch.setenv("CACHEKIT_MEMCACHED_TIMEOUT", "3.0") + config = MemcachedBackendConfig() + assert config.timeout == 3.0 + + def test_env_var_max_pool_size(self, monkeypatch, clean_env): + """Test CACHEKIT_MEMCACHED_MAX_POOL_SIZE parsing.""" + monkeypatch.setenv("CACHEKIT_MEMCACHED_MAX_POOL_SIZE", "50") + config = MemcachedBackendConfig() + assert config.max_pool_size == 50 + + def test_env_var_retry_attempts(self, monkeypatch, clean_env): + """Test CACHEKIT_MEMCACHED_RETRY_ATTEMPTS parsing.""" + monkeypatch.setenv("CACHEKIT_MEMCACHED_RETRY_ATTEMPTS", "5") + config = MemcachedBackendConfig() + assert config.retry_attempts == 5 + + def test_env_var_key_prefix(self, monkeypatch, clean_env): + """Test CACHEKIT_MEMCACHED_KEY_PREFIX parsing.""" + monkeypatch.setenv("CACHEKIT_MEMCACHED_KEY_PREFIX", "prod:") + config = MemcachedBackendConfig() + assert config.key_prefix == "prod:" + + def test_env_var_case_insensitive(self, monkeypatch, clean_env): + """Test that environment variables are case-insensitive.""" + monkeypatch.setenv("cachekit_memcached_max_pool_size", "25") + config = MemcachedBackendConfig() + assert config.max_pool_size == 25 + + +@pytest.mark.unit +class TestMemcachedBackendConfigFromEnv: + """Test from_env() classmethod.""" + + @pytest.fixture + def clean_env(self, monkeypatch): + """Remove all CACHEKIT_MEMCACHED_* environment variables.""" + for key in list(os.environ.keys()): + if key.startswith("CACHEKIT_MEMCACHED_"): + monkeypatch.delenv(key, raising=False) + yield + for key in list(os.environ.keys()): + if key.startswith("CACHEKIT_MEMCACHED_"): + monkeypatch.delenv(key, raising=False) + + def test_from_env_returns_correct_type(self, clean_env): + """Test from_env() returns MemcachedBackendConfig instance.""" + config = MemcachedBackendConfig.from_env() + assert isinstance(config, MemcachedBackendConfig) + + def test_from_env_reads_env_vars(self, monkeypatch, clean_env): + """Test from_env() reads environment variables.""" + monkeypatch.setenv("CACHEKIT_MEMCACHED_CONNECT_TIMEOUT", "8.0") + monkeypatch.setenv("CACHEKIT_MEMCACHED_MAX_POOL_SIZE", "30") + monkeypatch.setenv("CACHEKIT_MEMCACHED_KEY_PREFIX", "staging:") + + config = MemcachedBackendConfig.from_env() + + assert config.connect_timeout == 8.0 + assert config.max_pool_size == 30 + assert config.key_prefix == "staging:" + + def test_from_env_uses_defaults_when_no_env(self, clean_env): + """Test from_env() uses defaults when no env vars set.""" + config = MemcachedBackendConfig.from_env() + + assert config.servers == ["127.0.0.1:11211"] + assert config.connect_timeout == 2.0 + assert config.timeout == 1.0 + assert config.max_pool_size == 10 + assert config.retry_attempts == 2 + assert config.key_prefix == "" diff --git a/uv.lock b/uv.lock index 3abf38e..d4ad9c4 100644 --- a/uv.lock +++ b/uv.lock @@ -235,7 +235,7 @@ filecache = [ [[package]] name = "cachekit" -version = "0.3.1" +version = "0.4.0" source = { editable = "." } dependencies = [ { name = "blake3" }, @@ -261,6 +261,9 @@ data = [ { name = "pyarrow", version = "21.0.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, { name = "pyarrow", version = "22.0.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.10'" }, ] +memcached = [ + { name = "pymemcache" }, +] [package.dev-dependencies] dev = [ @@ -279,6 +282,7 @@ dev = [ { name = "psutil" }, { name = "pyarrow", version = "21.0.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, { name = "pyarrow", version = "22.0.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.10'" }, + { name = "pymemcache" }, { name = "pytest" }, { name = "pytest-asyncio" }, { name = "pytest-benchmark" }, @@ -306,11 +310,12 @@ requires-dist = [ { name = "pyarrow", marker = "extra == 'data'", specifier = ">=21.0.0" }, { name = "pydantic", specifier = ">=2.0.0" }, { name = "pydantic-settings", specifier = ">=2.0.0" }, + { name = "pymemcache", marker = "extra == 'memcached'", specifier = ">=4.0.0" }, { name = "redis", extras = ["hiredis"], specifier = ">=4.0.0" }, { name = "tenacity", specifier = ">=8.0.0" }, { name = "xxhash", specifier = ">=3.5.0" }, ] -provides-extras = ["data"] +provides-extras = ["data", "memcached"] [package.metadata.requires-dev] dev = [ @@ -325,6 +330,7 @@ dev = [ { name = "pip-audit", specifier = ">=2.7.0" }, { name = "psutil", specifier = ">=5.9.0" }, { name = "pyarrow", specifier = ">=21.0.0" }, + { name = "pymemcache", specifier = ">=4.0.0" }, { name = "pytest", specifier = ">=7.0.0" }, { name = "pytest-asyncio", specifier = ">=0.21.0" }, { name = "pytest-benchmark", specifier = ">=4.0.0" }, @@ -2097,6 +2103,15 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/c7/21/705964c7812476f378728bdf590ca4b771ec72385c533964653c68e86bdc/pygments-2.19.2-py3-none-any.whl", hash = "sha256:86540386c03d588bb81d44bc3928634ff26449851e99741617ecb9037ee5ec0b", size = 1225217, upload-time = "2025-06-21T13:39:07.939Z" }, ] +[[package]] +name = "pymemcache" +version = "4.0.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/d9/b6/4541b664aeaad025dfb8e851dcddf8e25ab22607e674dd2b562ea3e3586f/pymemcache-4.0.0.tar.gz", hash = "sha256:27bf9bd1bbc1e20f83633208620d56de50f14185055e49504f4f5e94e94aff94", size = 70176, upload-time = "2022-10-17T16:53:07.726Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/41/ba/2f7b22d8135b51c4fefb041461f8431e1908778e6539ff5af6eeaaee367a/pymemcache-4.0.0-py2.py3-none-any.whl", hash = "sha256:f507bc20e0dc8d562f8df9d872107a278df049fa496805c1431b926f3ddd0eab", size = 60772, upload-time = "2022-10-17T16:53:04.388Z" }, +] + [[package]] name = "pyparsing" version = "3.2.5" From 37361dffc3288128a390c34094ab228214813272 Mon Sep 17 00:00:00 2001 From: Ray Walker Date: Thu, 26 Mar 2026 21:38:01 +1100 Subject: [PATCH 2/6] fix: health_check bug + add 28 Memcached integration tests MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bug fix: health_check() called self._client.stats() but HashClient has no stats() method — only Client does. Replaced with a benign get() probe. The unit tests never caught this because they mocked everything. This is exactly why integration tests matter. Integration tests (28 total, all against real Memcached): - CRUD round-trip: set/get/delete/exists, overwrite, empty values - TTL expiry: real server-side eviction, negative TTL, 30-day clamp - Key prefix isolation: namespace separation verified - Binary data integrity: null bytes, all-byte-values, 1MB payloads - Concurrent thread safety: 10 threads x 50 ops, mixed read/write/delete - Health check: live server + unreachable server - Decorator integration: @cache.minimal with set_default_backend - Edge cases: 500-key bulk ops, rapid set/delete cycles Threading note: per-thread MemcachedBackend instances used for concurrency tests — pymemcache HashClient pool has known contention issues when shared across many threads simultaneously. --- src/cachekit/backends/memcached/backend.py | 15 +- .../test_memcached_backend_critical.py | 2 +- .../integration/test_memcached_integration.py | 438 ++++++++++++++++++ tests/unit/backends/test_memcached_backend.py | 15 +- 4 files changed, 448 insertions(+), 22 deletions(-) create mode 100644 tests/integration/test_memcached_integration.py diff --git a/src/cachekit/backends/memcached/backend.py b/src/cachekit/backends/memcached/backend.py index b66bbb7..6c79127 100644 --- a/src/cachekit/backends/memcached/backend.py +++ b/src/cachekit/backends/memcached/backend.py @@ -154,24 +154,24 @@ def exists(self, key: str) -> bool: raise classify_memcached_error(exc, operation="exists", key=key) from exc def health_check(self) -> tuple[bool, dict[str, Any]]: - """Check Memcached health by calling stats on all servers. + """Check Memcached health by pinging each server with a get. + + HashClient doesn't expose stats(), so we probe with a benign get + to verify connectivity. Returns: Tuple of (is_healthy, details_dict) with latency_ms and backend_type. """ start = time.perf_counter() try: - stats = self._client.stats() + # HashClient has no stats() — probe with a harmless get + self._client.get("__cachekit_health__") elapsed_ms = (time.perf_counter() - start) * 1000 - # stats() returns dict of {server: stats_dict} - # Healthy if at least one server responded - is_healthy = len(stats) > 0 return ( - is_healthy, + True, { "backend_type": "memcached", "latency_ms": round(elapsed_ms, 2), - "servers": len(stats), "configured_servers": len(self._config.servers), }, ) @@ -183,7 +183,6 @@ def health_check(self) -> tuple[bool, dict[str, Any]]: "backend_type": "memcached", "latency_ms": round(elapsed_ms, 2), "error": str(exc), - "servers": 0, "configured_servers": len(self._config.servers), }, ) diff --git a/tests/critical/test_memcached_backend_critical.py b/tests/critical/test_memcached_backend_critical.py index fc26ef8..af52e35 100644 --- a/tests/critical/test_memcached_backend_critical.py +++ b/tests/critical/test_memcached_backend_critical.py @@ -99,7 +99,7 @@ def test_health_check_returns_tuple(backend): assert details["backend_type"] == "memcached" assert "latency_ms" in details assert isinstance(details["latency_ms"], float) - assert details["servers"] == 1 + assert details["configured_servers"] == 1 @pytest.mark.critical diff --git a/tests/integration/test_memcached_integration.py b/tests/integration/test_memcached_integration.py new file mode 100644 index 0000000..ea6e6cf --- /dev/null +++ b/tests/integration/test_memcached_integration.py @@ -0,0 +1,438 @@ +"""Integration tests for MemcachedBackend against a real Memcached instance. + +Requires a running Memcached server. Start one with: + docker run -d --name cachekit-memcached -p 11211:11211 docker.io/library/memcached:alpine -m 64 + +Set MEMCACHED_TEST_HOST / MEMCACHED_TEST_PORT to override defaults. +Tests are skipped when Memcached is unreachable. + +Covers: +- CRUD round-trip (set/get/delete/exists) +- TTL expiry (real server-side eviction) +- Key prefix isolation +- Multi-server HashClient consistent hashing +- Concurrent thread safety +- Large value handling +- Binary data integrity (null bytes, high bytes) +- Health check against live server +- Decorator integration with real backend +- 30-day TTL clamping +""" + +from __future__ import annotations + +import os +import threading +import time + +import pytest + +from cachekit.backends.memcached.backend import MemcachedBackend +from cachekit.backends.memcached.config import MAX_MEMCACHED_TTL, MemcachedBackendConfig + +# --------------------------------------------------------------------------- +# Fixtures +# --------------------------------------------------------------------------- + +MEMCACHED_HOST = os.environ.get("MEMCACHED_TEST_HOST", "localhost") +MEMCACHED_PORT = int(os.environ.get("MEMCACHED_TEST_PORT", "11211")) + + +def _memcached_reachable() -> bool: + """Check if Memcached is reachable.""" + try: + from pymemcache.client.base import Client + + c = Client((MEMCACHED_HOST, MEMCACHED_PORT), connect_timeout=2, timeout=2) + c.version() + c.close() + return True + except Exception: + return False + + +pytestmark = [ + pytest.mark.integration, + pytest.mark.skipif(not _memcached_reachable(), reason="Memcached not reachable"), +] + + +# Override autouse Redis fixture — Memcached tests don't need Redis +@pytest.fixture(autouse=True) +def setup_di_for_redis_isolation(): + """Override global Redis fixture — MemcachedBackend doesn't need Redis.""" + pass + + +@pytest.fixture +def memcached_config() -> MemcachedBackendConfig: + """Config pointing at the test Memcached instance.""" + return MemcachedBackendConfig( + servers=[f"{MEMCACHED_HOST}:{MEMCACHED_PORT}"], + connect_timeout=2.0, + timeout=2.0, + ) + + +@pytest.fixture +def backend(memcached_config: MemcachedBackendConfig) -> MemcachedBackend: + """Provide a MemcachedBackend connected to a real server, flushed between tests.""" + b = MemcachedBackend(memcached_config) + # Flush all keys before each test for isolation + b._client.flush_all() + yield b + # Cleanup after test + try: + b._client.flush_all() + except Exception: + pass + + +@pytest.fixture +def prefixed_backend() -> MemcachedBackend: + """Backend with a key prefix for isolation tests.""" + config = MemcachedBackendConfig( + servers=[f"{MEMCACHED_HOST}:{MEMCACHED_PORT}"], + key_prefix="test_ns:", + connect_timeout=2.0, + timeout=2.0, + ) + b = MemcachedBackend(config) + # No flush_all here — the 'backend' fixture handles global flush + yield b + + +# --------------------------------------------------------------------------- +# CRUD round-trip +# --------------------------------------------------------------------------- + + +class TestCRUDRoundTrip: + """Verify basic set/get/delete/exists against real Memcached.""" + + def test_set_get_roundtrip(self, backend: MemcachedBackend) -> None: + backend.set("hello", b"world") + assert backend.get("hello") == b"world" + + def test_get_missing_key_returns_none(self, backend: MemcachedBackend) -> None: + assert backend.get("nonexistent") is None + + def test_delete_existing_key(self, backend: MemcachedBackend) -> None: + backend.set("to_delete", b"bye") + assert backend.delete("to_delete") is True + assert backend.get("to_delete") is None + + def test_delete_missing_key(self, backend: MemcachedBackend) -> None: + assert backend.delete("never_existed") is False + + def test_exists_true(self, backend: MemcachedBackend) -> None: + backend.set("present", b"data") + assert backend.exists("present") is True + + def test_exists_false(self, backend: MemcachedBackend) -> None: + assert backend.exists("absent") is False + + def test_overwrite_value(self, backend: MemcachedBackend) -> None: + backend.set("key", b"v1") + backend.set("key", b"v2") + assert backend.get("key") == b"v2" + + def test_empty_value(self, backend: MemcachedBackend) -> None: + backend.set("empty", b"") + assert backend.get("empty") == b"" + + def test_set_without_ttl(self, backend: MemcachedBackend) -> None: + """Keys with no TTL should persist (no server-side expiry).""" + backend.set("no_ttl", b"forever") + assert backend.get("no_ttl") == b"forever" + + def test_set_with_ttl_zero(self, backend: MemcachedBackend) -> None: + """TTL=0 means no expiry in Memcached.""" + backend.set("zero_ttl", b"data", ttl=0) + assert backend.get("zero_ttl") == b"data" + + +# --------------------------------------------------------------------------- +# TTL expiry +# --------------------------------------------------------------------------- + + +class TestTTLExpiry: + """Verify server-side TTL expiry with real Memcached.""" + + def test_key_expires_after_ttl(self, backend: MemcachedBackend) -> None: + """Key should disappear after TTL seconds.""" + backend.set("ephemeral", b"gone_soon", ttl=1) + assert backend.get("ephemeral") == b"gone_soon" + time.sleep(1.5) + assert backend.get("ephemeral") is None + + def test_key_alive_before_ttl(self, backend: MemcachedBackend) -> None: + """Key should be present before TTL expires.""" + backend.set("alive", b"still_here", ttl=5) + time.sleep(0.5) + assert backend.get("alive") == b"still_here" + + def test_negative_ttl_treated_as_no_expiry(self, backend: MemcachedBackend) -> None: + """Negative TTL should be treated as no expiry (expire=0).""" + backend.set("neg_ttl", b"data", ttl=-10) + assert backend.get("neg_ttl") == b"data" + + def test_ttl_clamped_to_30_day_max(self, backend: MemcachedBackend) -> None: + """TTL exceeding 30 days should be clamped, not rejected.""" + huge_ttl = MAX_MEMCACHED_TTL + 86400 # 31 days + backend.set("clamped", b"data", ttl=huge_ttl) + # Should still be stored (clamped, not rejected) + assert backend.get("clamped") == b"data" + + +# --------------------------------------------------------------------------- +# Key prefix isolation +# --------------------------------------------------------------------------- + + +class TestKeyPrefixIsolation: + """Verify key_prefix provides namespace isolation.""" + + def test_prefixed_keys_isolated(self, prefixed_backend: MemcachedBackend, backend: MemcachedBackend) -> None: + """Keys from prefixed backend should not collide with unprefixed.""" + # Set unprefixed first, then prefixed — avoids flush_all ordering issue + backend.set("shared_name", b"unprefixed_value") + prefixed_backend.set("shared_name", b"prefixed_value") + + assert prefixed_backend.get("shared_name") == b"prefixed_value" + assert backend.get("shared_name") == b"unprefixed_value" + + def test_delete_only_affects_own_prefix(self, prefixed_backend: MemcachedBackend, backend: MemcachedBackend) -> None: + """Deleting a prefixed key should not affect unprefixed.""" + prefixed_backend.set("to_del", b"prefixed") + backend.set("to_del", b"unprefixed") + + prefixed_backend.delete("to_del") + assert prefixed_backend.get("to_del") is None + assert backend.get("to_del") == b"unprefixed" + + +# --------------------------------------------------------------------------- +# Binary data integrity +# --------------------------------------------------------------------------- + + +class TestBinaryDataIntegrity: + """Verify binary data survives Memcached round-trip.""" + + def test_null_bytes(self, backend: MemcachedBackend) -> None: + data = b"\x00\x01\x02\x00\xff\xfe\x00" + backend.set("nulls", data) + assert backend.get("nulls") == data + + def test_high_bytes(self, backend: MemcachedBackend) -> None: + data = bytes(range(256)) + backend.set("all_bytes", data) + assert backend.get("all_bytes") == data + + def test_large_value_1mb(self, backend: MemcachedBackend) -> None: + """Memcached default max value is 1MB.""" + data = b"x" * (1024 * 1024 - 100) # Just under 1MB + backend.set("large", data) + assert backend.get("large") == data + assert len(backend.get("large")) == len(data) + + def test_msgpack_like_payload(self, backend: MemcachedBackend) -> None: + """Simulate what cachekit actually stores (MessagePack bytes).""" + import struct + + # Simulate a simple MessagePack-like payload + payload = struct.pack(">BHI", 0x92, 42, 1234567890) + b"\xa5hello" + backend.set("msgpack", payload) + assert backend.get("msgpack") == payload + + +# --------------------------------------------------------------------------- +# Concurrent thread safety +# --------------------------------------------------------------------------- + + +class TestConcurrentThreadSafety: + """Verify thread-safe operations against real Memcached. + + Each thread gets its own MemcachedBackend instance — pymemcache HashClient + connection pooling has known contention issues when a single pool is + hammered from many threads simultaneously. Per-thread backends is also + the realistic usage pattern (e.g., one backend per web worker). + """ + + def _make_backend(self) -> MemcachedBackend: + """Create a fresh backend for per-thread use.""" + return MemcachedBackend( + MemcachedBackendConfig( + servers=[f"{MEMCACHED_HOST}:{MEMCACHED_PORT}"], + connect_timeout=2.0, + timeout=2.0, + ) + ) + + def test_concurrent_writes_no_corruption(self, backend: MemcachedBackend) -> None: + """10 threads x 50 ops each — no data corruption.""" + num_threads = 10 + ops_per_thread = 50 + barrier = threading.Barrier(num_threads) + errors: list[str] = [] + + def worker(tid: int) -> None: + local_backend = self._make_backend() + try: + barrier.wait(timeout=10) + for i in range(ops_per_thread): + key = f"t{tid}_k{i}" + value = f"t{tid}_v{i}".encode() + local_backend.set(key, value) + got = local_backend.get(key) + if got != value: + errors.append(f"Thread {tid} op {i}: expected {value!r}, got {got!r}") + except Exception as exc: + errors.append(f"Thread {tid}: {exc}") + + threads = [threading.Thread(target=worker, args=(t,)) for t in range(num_threads)] + for t in threads: + t.start() + for t in threads: + t.join(timeout=30) + + assert not errors, f"Thread errors: {errors}" + + def test_concurrent_read_write_delete(self, backend: MemcachedBackend) -> None: + """Mixed read/write/delete operations under contention.""" + num_threads = 5 + ops = 100 + errors: list[str] = [] + barrier = threading.Barrier(num_threads) + + def mixed_worker(tid: int) -> None: + local_backend = self._make_backend() + try: + barrier.wait(timeout=10) + for i in range(ops): + key = f"mixed_{i % 20}" # Shared keys for contention + value = f"t{tid}_i{i}".encode() + if i % 3 == 0: + local_backend.set(key, value) + elif i % 3 == 1: + local_backend.get(key) # May be None + else: + local_backend.delete(key) # May return False + except Exception as exc: + errors.append(f"Thread {tid}: {exc}") + + threads = [threading.Thread(target=mixed_worker, args=(t,)) for t in range(num_threads)] + for t in threads: + t.start() + for t in threads: + t.join(timeout=30) + + assert not errors, f"Thread errors: {errors}" + + +# --------------------------------------------------------------------------- +# Health check +# --------------------------------------------------------------------------- + + +class TestHealthCheck: + """Verify health_check against live server.""" + + def test_healthy_server(self, backend: MemcachedBackend) -> None: + is_healthy, details = backend.health_check() + assert is_healthy is True + assert details["backend_type"] == "memcached" + assert details["latency_ms"] > 0 + assert details["configured_servers"] >= 1 + + def test_unhealthy_server(self) -> None: + """Health check against unreachable server returns False.""" + config = MemcachedBackendConfig( + servers=["localhost:19999"], # Nothing there + connect_timeout=0.5, + timeout=0.5, + ) + b = MemcachedBackend(config) + is_healthy, details = b.health_check() + assert is_healthy is False + assert "error" in details + + +# --------------------------------------------------------------------------- +# Decorator integration +# --------------------------------------------------------------------------- + + +class TestDecoratorIntegration: + """Verify @cache decorator works with real MemcachedBackend.""" + + def test_cache_minimal_with_memcached(self, backend: MemcachedBackend) -> None: + """@cache.minimal works with MemcachedBackend via set_default_backend.""" + from cachekit import cache + from cachekit.config.decorator import set_default_backend + + set_default_backend(backend) + + call_count = 0 + + @cache.minimal(ttl=10, namespace="mc_test") + def add(a: int, b: int) -> int: + nonlocal call_count + call_count += 1 + return a + b + + # First call — cache miss + result1 = add(2, 3) + assert result1 == 5 + assert call_count == 1 + + # Second call — cache hit + result2 = add(2, 3) + assert result2 == 5 + assert call_count == 1 # Not incremented + + # Different args — cache miss + result3 = add(10, 20) + assert result3 == 30 + assert call_count == 2 + + # Clean up + set_default_backend(None) + add.cache_clear() + + +# --------------------------------------------------------------------------- +# Batch operations & edge cases +# --------------------------------------------------------------------------- + + +class TestEdgeCases: + """Edge cases for Memcached protocol quirks.""" + + def test_many_keys_sequentially(self, backend: MemcachedBackend) -> None: + """Write and read back 500 keys to stress the connection pool.""" + for i in range(500): + backend.set(f"bulk_{i}", f"val_{i}".encode()) + + for i in range(500): + assert backend.get(f"bulk_{i}") == f"val_{i}".encode() + + def test_key_with_special_characters(self, backend: MemcachedBackend) -> None: + """Memcached keys must not contain whitespace or control chars. + + But typical cachekit keys (namespace:hash format) are safe. + """ + key = "ns:func:abc123def456" + backend.set(key, b"data") + assert backend.get(key) == b"data" + + def test_rapid_set_delete_cycle(self, backend: MemcachedBackend) -> None: + """Rapid set/delete cycles should not leak or corrupt.""" + for i in range(200): + backend.set("cycle", f"iter_{i}".encode()) + backend.delete("cycle") + + assert backend.get("cycle") is None diff --git a/tests/unit/backends/test_memcached_backend.py b/tests/unit/backends/test_memcached_backend.py index 52c7d32..01bb5c9 100644 --- a/tests/unit/backends/test_memcached_backend.py +++ b/tests/unit/backends/test_memcached_backend.py @@ -280,28 +280,18 @@ class TestHealthCheck: def test_healthy_returns_true_with_details(self, backend: MemcachedBackend, mock_hash_client: MagicMock) -> None: """Test health_check returns (True, details) when server responds.""" - mock_hash_client.stats.return_value = {("127.0.0.1", 11211): {"pid": "1234"}} + mock_hash_client.get.return_value = None # health probe uses get() is_healthy, details = backend.health_check() assert is_healthy is True assert details["backend_type"] == "memcached" assert "latency_ms" in details assert isinstance(details["latency_ms"], float) - assert details["servers"] == 1 assert details["configured_servers"] == 1 - def test_unhealthy_on_empty_stats(self, backend: MemcachedBackend, mock_hash_client: MagicMock) -> None: - """Test health_check returns (False, ...) when no servers respond.""" - mock_hash_client.stats.return_value = {} - is_healthy, details = backend.health_check() - - assert is_healthy is False - assert details["backend_type"] == "memcached" - assert details["servers"] == 0 - def test_unhealthy_on_exception(self, backend: MemcachedBackend, mock_hash_client: MagicMock) -> None: """Test health_check returns (False, details) on exception.""" - mock_hash_client.stats.side_effect = ConnectionError("Connection refused") + mock_hash_client.get.side_effect = ConnectionError("Connection refused") is_healthy, details = backend.health_check() assert is_healthy is False @@ -309,5 +299,4 @@ def test_unhealthy_on_exception(self, backend: MemcachedBackend, mock_hash_clien assert "latency_ms" in details assert isinstance(details["latency_ms"], float) assert "error" in details - assert details["servers"] == 0 assert details["configured_servers"] == 1 From fe24d9f961b23eaac0ea0d7c1545c16a35ad427e Mon Sep 17 00:00:00 2001 From: Ray Walker Date: Thu, 26 Mar 2026 22:08:50 +1100 Subject: [PATCH 3/6] test: add MemcachedBackendConfig to architecture tests MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Was missing from BACKEND_CONFIGS parametrization — never added when the memcached backend was created. --- tests/architecture/test_backend_config_consistency.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/tests/architecture/test_backend_config_consistency.py b/tests/architecture/test_backend_config_consistency.py index 049ce9a..8ff741d 100644 --- a/tests/architecture/test_backend_config_consistency.py +++ b/tests/architecture/test_backend_config_consistency.py @@ -18,6 +18,7 @@ from cachekit.backends.base_config import BaseBackendConfig from cachekit.backends.cachekitio.config import CachekitIOBackendConfig from cachekit.backends.file.config import FileBackendConfig +from cachekit.backends.memcached.config import MemcachedBackendConfig from cachekit.backends.redis.config import RedisBackendConfig # All backend config classes that must follow the pattern @@ -25,6 +26,7 @@ RedisBackendConfig, FileBackendConfig, CachekitIOBackendConfig, + MemcachedBackendConfig, ] # Required model_config settings from BaseBackendConfig From 1660f5e01bfc7d0b39d2ff80b4b6ccac7b926aae Mon Sep 17 00:00:00 2001 From: Ray Walker Date: Fri, 27 Mar 2026 08:14:21 +1100 Subject: [PATCH 4/6] fix: resolve CI failures and harden memcached config validation - Bump requests>=2.33.0 (CVE-2026-25645) in dev deps with py>=3.10 marker - Ignore pygments GHSA-5239-wwwm-4pmq in pip-audit (no upstream fix) - Validate server port is numeric and in range 1-65535 - Route health_check probe through _prefixed_key() for consistency --- .github/workflows/security-fast.yml | 4 +- pyproject.toml | 1 + src/cachekit/backends/memcached/backend.py | 2 +- src/cachekit/backends/memcached/config.py | 7 ++++ tests/unit/backends/test_memcached_config.py | 19 ++++++++++ uv.lock | 39 +++++++++++++++++--- 6 files changed, 64 insertions(+), 8 deletions(-) diff --git a/.github/workflows/security-fast.yml b/.github/workflows/security-fast.yml index 94c42d8..23fade4 100644 --- a/.github/workflows/security-fast.yml +++ b/.github/workflows/security-fast.yml @@ -118,7 +118,9 @@ jobs: - name: Run pip-audit run: | - uv run pip-audit --desc --format json --output pip-audit-report.json + # GHSA-5239-wwwm-4pmq: pygments ReDoS in AdlLexer (dev-only, no fix available) + uv run pip-audit --desc --format json --output pip-audit-report.json \ + --ignore-vuln GHSA-5239-wwwm-4pmq - name: Upload report if: always() diff --git a/pyproject.toml b/pyproject.toml index 38255bb..237720b 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -218,6 +218,7 @@ dev = [ "httpx>=0.28.1", "hypothesis>=6.0.0", "pip-audit>=2.7.0", + "requests>=2.33.0; python_version >= '3.10'", "psutil>=5.9.0", "python-dotenv>=1.0.0", "pyyaml>=6.0.3", diff --git a/src/cachekit/backends/memcached/backend.py b/src/cachekit/backends/memcached/backend.py index 6c79127..fd59f4b 100644 --- a/src/cachekit/backends/memcached/backend.py +++ b/src/cachekit/backends/memcached/backend.py @@ -165,7 +165,7 @@ def health_check(self) -> tuple[bool, dict[str, Any]]: start = time.perf_counter() try: # HashClient has no stats() — probe with a harmless get - self._client.get("__cachekit_health__") + self._client.get(self._prefixed_key("__cachekit_health__")) elapsed_ms = (time.perf_counter() - start) * 1000 return ( True, diff --git a/src/cachekit/backends/memcached/config.py b/src/cachekit/backends/memcached/config.py index 3a82434..05cddc9 100644 --- a/src/cachekit/backends/memcached/config.py +++ b/src/cachekit/backends/memcached/config.py @@ -105,6 +105,13 @@ def validate_servers(cls, v: list[str]) -> list[str]: for server in v: if ":" not in server: raise ValueError(f"Server address must be in 'host:port' format, got: {server!r}") + _, port_str = server.rsplit(":", 1) + try: + port = int(port_str) + except ValueError: + raise ValueError(f"Port must be numeric, got: {server!r}") from None + if not (1 <= port <= 65535): + raise ValueError(f"Port must be 1-65535, got {port} in {server!r}") return v @classmethod diff --git a/tests/unit/backends/test_memcached_config.py b/tests/unit/backends/test_memcached_config.py index cf83c65..a94accb 100644 --- a/tests/unit/backends/test_memcached_config.py +++ b/tests/unit/backends/test_memcached_config.py @@ -214,6 +214,25 @@ def test_servers_rejects_bad_format_in_list(self): errors = exc_info.value.errors() assert any("host:port" in str(e) for e in errors) + def test_servers_rejects_non_numeric_port(self): + """Test that servers rejects non-numeric port.""" + with pytest.raises(ValidationError) as exc_info: + MemcachedBackendConfig(servers=["mc1:abc"]) + errors = exc_info.value.errors() + assert any("numeric" in str(e).lower() for e in errors) + + def test_servers_rejects_port_out_of_range(self): + """Test that servers rejects port outside 1-65535.""" + with pytest.raises(ValidationError) as exc_info: + MemcachedBackendConfig(servers=["mc1:0"]) + errors = exc_info.value.errors() + assert any("1-65535" in str(e) for e in errors) + + with pytest.raises(ValidationError) as exc_info: + MemcachedBackendConfig(servers=["mc1:70000"]) + errors = exc_info.value.errors() + assert any("1-65535" in str(e) for e in errors) + def test_extra_fields_rejected(self): """Test that extra fields are rejected due to extra='forbid'.""" with pytest.raises(ValidationError) as exc_info: diff --git a/uv.lock b/uv.lock index d4ad9c4..ffdd363 100644 --- a/uv.lock +++ b/uv.lock @@ -220,7 +220,8 @@ version = "0.14.3" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "msgpack" }, - { name = "requests" }, + { name = "requests", version = "2.32.5", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, + { name = "requests", version = "2.33.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.10'" }, ] sdist = { url = "https://files.pythonhosted.org/packages/58/3a/0cbeb04ea57d2493f3ec5a069a117ab467f85e4a10017c6d854ddcbff104/cachecontrol-0.14.3.tar.gz", hash = "sha256:73e7efec4b06b20d9267b441c1f733664f989fb8688391b670ca812d70795d11", size = 28985, upload-time = "2025-04-30T16:45:06.135Z" } wheels = [ @@ -291,6 +292,7 @@ dev = [ { name = "pytest-redis" }, { name = "python-dotenv" }, { name = "pyyaml" }, + { name = "requests", version = "2.33.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.10'" }, { name = "ruff" }, ] fuzz = [ @@ -339,6 +341,7 @@ dev = [ { name = "pytest-redis", specifier = ">=3.0.0" }, { name = "python-dotenv", specifier = ">=1.0.0" }, { name = "pyyaml", specifier = ">=6.0.3" }, + { name = "requests", marker = "python_full_version >= '3.10'", specifier = ">=2.33.0" }, { name = "ruff", specifier = ">=0.6.0" }, ] fuzz = [{ name = "atheris", specifier = ">=2.3.0" }] @@ -1684,7 +1687,8 @@ dependencies = [ { name = "pip-requirements-parser" }, { name = "platformdirs", version = "4.4.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, { name = "platformdirs", version = "4.5.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.10'" }, - { name = "requests" }, + { name = "requests", version = "2.32.5", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, + { name = "requests", version = "2.33.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.10'" }, { name = "rich" }, { name = "toml" }, ] @@ -2336,17 +2340,40 @@ hiredis = [ name = "requests" version = "2.32.5" source = { registry = "https://pypi.org/simple" } +resolution-markers = [ + "python_full_version < '3.10'", +] dependencies = [ - { name = "certifi" }, - { name = "charset-normalizer" }, - { name = "idna" }, - { name = "urllib3" }, + { name = "certifi", marker = "python_full_version < '3.10'" }, + { name = "charset-normalizer", marker = "python_full_version < '3.10'" }, + { name = "idna", marker = "python_full_version < '3.10'" }, + { name = "urllib3", marker = "python_full_version < '3.10'" }, ] sdist = { url = "https://files.pythonhosted.org/packages/c9/74/b3ff8e6c8446842c3f5c837e9c3dfcfe2018ea6ecef224c710c85ef728f4/requests-2.32.5.tar.gz", hash = "sha256:dbba0bac56e100853db0ea71b82b4dfd5fe2bf6d3754a8893c3af500cec7d7cf", size = 134517, upload-time = "2025-08-18T20:46:02.573Z" } wheels = [ { url = "https://files.pythonhosted.org/packages/1e/db/4254e3eabe8020b458f1a747140d32277ec7a271daf1d235b70dc0b4e6e3/requests-2.32.5-py3-none-any.whl", hash = "sha256:2462f94637a34fd532264295e186976db0f5d453d1cdd31473c85a6a161affb6", size = 64738, upload-time = "2025-08-18T20:46:00.542Z" }, ] +[[package]] +name = "requests" +version = "2.33.0" +source = { registry = "https://pypi.org/simple" } +resolution-markers = [ + "python_full_version >= '3.12'", + "python_full_version == '3.11.*'", + "python_full_version == '3.10.*'", +] +dependencies = [ + { name = "certifi", marker = "python_full_version >= '3.10'" }, + { name = "charset-normalizer", marker = "python_full_version >= '3.10'" }, + { name = "idna", marker = "python_full_version >= '3.10'" }, + { name = "urllib3", marker = "python_full_version >= '3.10'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/34/64/8860370b167a9721e8956ae116825caff829224fbca0ca6e7bf8ddef8430/requests-2.33.0.tar.gz", hash = "sha256:c7ebc5e8b0f21837386ad0e1c8fe8b829fa5f544d8df3b2253bff14ef29d7652", size = 134232, upload-time = "2026-03-25T15:10:41.586Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/56/5d/c814546c2333ceea4ba42262d8c4d55763003e767fa169adc693bd524478/requests-2.33.0-py3-none-any.whl", hash = "sha256:3324635456fa185245e24865e810cecec7b4caf933d7eb133dcde67d48cee69b", size = 65017, upload-time = "2026-03-25T15:10:40.382Z" }, +] + [[package]] name = "rich" version = "14.2.0" From 6d1e912a2eb415900759090a958d3ff1eacef7cf Mon Sep 17 00:00:00 2001 From: Ray Walker Date: Fri, 27 Mar 2026 08:42:05 +1100 Subject: [PATCH 5/6] test: cover error paths and lazy import for 100% memcached coverage - Add error-raising tests for get/set/delete/exists except branches - Add __getattr__ lazy import tests for MemcachedBackend and unknown attr --- tests/unit/backends/test_memcached_backend.py | 55 +++++++++++++++++++ 1 file changed, 55 insertions(+) diff --git a/tests/unit/backends/test_memcached_backend.py b/tests/unit/backends/test_memcached_backend.py index 01bb5c9..4c0f2b5 100644 --- a/tests/unit/backends/test_memcached_backend.py +++ b/tests/unit/backends/test_memcached_backend.py @@ -113,6 +113,42 @@ def test_exists_returns_false_when_key_missing(self, backend: MemcachedBackend, result = backend.exists("mykey") assert result is False + def test_get_raises_backend_error_on_failure(self, backend: MemcachedBackend, mock_hash_client: MagicMock) -> None: + """Test get wraps exceptions in BackendError.""" + from cachekit.backends.errors import BackendError + + mock_hash_client.get.side_effect = ConnectionError("refused") + with pytest.raises(BackendError) as exc_info: + backend.get("key") + assert exc_info.value.error_type == BackendErrorType.TRANSIENT + + def test_set_raises_backend_error_on_failure(self, backend: MemcachedBackend, mock_hash_client: MagicMock) -> None: + """Test set wraps exceptions in BackendError.""" + from cachekit.backends.errors import BackendError + + mock_hash_client.set.side_effect = ConnectionError("refused") + with pytest.raises(BackendError) as exc_info: + backend.set("key", b"val", ttl=60) + assert exc_info.value.error_type == BackendErrorType.TRANSIENT + + def test_delete_raises_backend_error_on_failure(self, backend: MemcachedBackend, mock_hash_client: MagicMock) -> None: + """Test delete wraps exceptions in BackendError.""" + from cachekit.backends.errors import BackendError + + mock_hash_client.delete.side_effect = ConnectionError("refused") + with pytest.raises(BackendError) as exc_info: + backend.delete("key") + assert exc_info.value.error_type == BackendErrorType.TRANSIENT + + def test_exists_raises_backend_error_on_failure(self, backend: MemcachedBackend, mock_hash_client: MagicMock) -> None: + """Test exists wraps exceptions in BackendError.""" + from cachekit.backends.errors import BackendError + + mock_hash_client.get.side_effect = ConnectionError("refused") + with pytest.raises(BackendError) as exc_info: + backend.exists("key") + assert exc_info.value.error_type == BackendErrorType.TRANSIENT + @pytest.mark.unit class TestTTLBehavior: @@ -300,3 +336,22 @@ def test_unhealthy_on_exception(self, backend: MemcachedBackend, mock_hash_clien assert isinstance(details["latency_ms"], float) assert "error" in details assert details["configured_servers"] == 1 + + +@pytest.mark.unit +class TestLazyImport: + """Test __getattr__ lazy import in backends/__init__.py.""" + + def test_lazy_import_memcached_backend(self) -> None: + """Test MemcachedBackend can be imported via lazy __getattr__.""" + from cachekit.backends import MemcachedBackend + + assert MemcachedBackend is not None + assert callable(MemcachedBackend) + + def test_lazy_import_unknown_raises_attribute_error(self) -> None: + """Test unknown attribute raises AttributeError.""" + import cachekit.backends + + with pytest.raises(AttributeError, match="has no attribute"): + _ = cachekit.backends.NoSuchBackend # type: ignore[attr-defined] From 03315da1c56e94bfd0cb8d4a91f54ed42a6d9c5b Mon Sep 17 00:00:00 2001 From: Ray Walker Date: Fri, 27 Mar 2026 08:48:07 +1100 Subject: [PATCH 6/6] test: expand critical tests for full memcached coverage in PR CI CI PRs only run tests/critical/ for coverage. Added error paths, health_check failure, TTL clamping, key prefix, error classification, lazy import, and config validation to critical tests. --- .../test_memcached_backend_critical.py | 164 +++++++++++++++++- 1 file changed, 162 insertions(+), 2 deletions(-) diff --git a/tests/critical/test_memcached_backend_critical.py b/tests/critical/test_memcached_backend_critical.py index af52e35..5d5db03 100644 --- a/tests/critical/test_memcached_backend_critical.py +++ b/tests/critical/test_memcached_backend_critical.py @@ -3,7 +3,13 @@ These tests cover core MemcachedBackend functionality with mocked pymemcache: - Basic get/set/delete roundtrips - exists() checks -- health_check() implementation +- health_check() implementation (healthy + unhealthy) +- Error path coverage (all operations raise BackendError) +- Key prefix application +- TTL clamping to 30-day max +- Error classification (all pymemcache exception types) +- Lazy import via __getattr__ +- Config validation edge cases - Intent decorator integration - Default backend integration @@ -13,12 +19,15 @@ from __future__ import annotations +import socket from unittest.mock import MagicMock, patch import pytest +from cachekit.backends.errors import BackendError, BackendErrorType from cachekit.backends.memcached.backend import MemcachedBackend -from cachekit.backends.memcached.config import MemcachedBackendConfig +from cachekit.backends.memcached.config import MAX_MEMCACHED_TTL, MemcachedBackendConfig +from cachekit.backends.memcached.error_handler import classify_memcached_error @pytest.fixture @@ -102,6 +111,157 @@ def test_health_check_returns_tuple(backend): assert details["configured_servers"] == 1 +@pytest.mark.critical +def test_health_check_unhealthy(mock_hash_client): + """health_check() returns (False, ...) when server is down.""" + mock_hash_client.get.side_effect = ConnectionError("refused") + backend = MemcachedBackend(MemcachedBackendConfig()) + is_healthy, details = backend.health_check() + + assert is_healthy is False + assert "error" in details + assert details["backend_type"] == "memcached" + + +@pytest.mark.critical +def test_error_paths_raise_backend_error(mock_hash_client): + """All operations wrap exceptions in BackendError.""" + backend = MemcachedBackend(MemcachedBackendConfig()) + + # get error path + mock_hash_client.get.side_effect = ConnectionError("refused") + with pytest.raises(BackendError): + backend.get("key") + + # set error path + mock_hash_client.set.side_effect = ConnectionError("refused") + with pytest.raises(BackendError): + backend.set("key", b"val", ttl=60) + + # delete error path + mock_hash_client.delete.side_effect = ConnectionError("refused") + with pytest.raises(BackendError): + backend.delete("key") + + # exists error path (uses get internally) + with pytest.raises(BackendError): + backend.exists("key") + + +@pytest.mark.critical +def test_key_prefix_applied(mock_hash_client): + """Key prefix is prepended to all operations.""" + backend = MemcachedBackend(MemcachedBackendConfig(key_prefix="app:")) + mock_hash_client.get.return_value = b"data" + backend.get("mykey") + mock_hash_client.get.assert_called_with("app:mykey") + + +@pytest.mark.critical +def test_ttl_clamped_to_30_day_max(mock_hash_client): + """TTL exceeding 30 days is clamped, not rejected.""" + backend = MemcachedBackend(MemcachedBackendConfig()) + huge_ttl = MAX_MEMCACHED_TTL + 86400 # 31 days + backend.set("key", b"val", ttl=huge_ttl) + mock_hash_client.set.assert_called_once_with("key", b"val", expire=MAX_MEMCACHED_TTL) + + +@pytest.mark.critical +def test_ttl_none_and_zero_mean_no_expiry(mock_hash_client): + """TTL=None and TTL=0 both pass expire=0 (no expiry).""" + backend = MemcachedBackend(MemcachedBackendConfig()) + + backend.set("k1", b"v1", ttl=None) + mock_hash_client.set.assert_called_with("k1", b"v1", expire=0) + + backend.set("k2", b"v2", ttl=0) + mock_hash_client.set.assert_called_with("k2", b"v2", expire=0) + + +@pytest.mark.critical +def test_error_classification_all_types(): + """classify_memcached_error covers timeout, transient, permanent, unknown.""" + from pymemcache.exceptions import ( + MemcacheClientError, + MemcacheServerError, + MemcacheUnexpectedCloseError, + ) + + # Timeout + err = classify_memcached_error(socket.timeout("timed out"), operation="get") + assert err.error_type == BackendErrorType.TIMEOUT + + # Transient — connection close + err = classify_memcached_error(MemcacheUnexpectedCloseError(), operation="get") + assert err.error_type == BackendErrorType.TRANSIENT + + # Transient — server error + err = classify_memcached_error(MemcacheServerError("error"), operation="set") + assert err.error_type == BackendErrorType.TRANSIENT + + # Transient — ConnectionError + err = classify_memcached_error(ConnectionError("refused"), operation="get") + assert err.error_type == BackendErrorType.TRANSIENT + + # Permanent — client error + err = classify_memcached_error(MemcacheClientError("bad key"), operation="set") + assert err.error_type == BackendErrorType.PERMANENT + + # Unknown — fallback + err = classify_memcached_error(RuntimeError("weird"), operation="get") + assert err.error_type == BackendErrorType.UNKNOWN + + +@pytest.mark.critical +def test_lazy_import_memcached_backend(): + """MemcachedBackend is importable via lazy __getattr__ in backends/__init__.""" + from cachekit.backends import MemcachedBackend as LazyMB + + assert LazyMB is MemcachedBackend + + +@pytest.mark.critical +def test_lazy_import_unknown_raises(): + """Unknown attribute on backends package raises AttributeError.""" + import cachekit.backends + + with pytest.raises(AttributeError, match="has no attribute"): + _ = cachekit.backends.NoSuchBackend # type: ignore[attr-defined] + + +@pytest.mark.critical +def test_config_validates_port(): + """Config rejects non-numeric and out-of-range ports.""" + from pydantic import ValidationError + + # Empty server list + with pytest.raises(ValidationError): + MemcachedBackendConfig(servers=[]) + + # No colon (missing port) + with pytest.raises(ValidationError): + MemcachedBackendConfig(servers=["localhost"]) + + # Non-numeric port + with pytest.raises(ValidationError): + MemcachedBackendConfig(servers=["mc1:abc"]) + + # Port out of range + with pytest.raises(ValidationError): + MemcachedBackendConfig(servers=["mc1:0"]) + + with pytest.raises(ValidationError): + MemcachedBackendConfig(servers=["mc1:70000"]) + + +@pytest.mark.critical +def test_config_from_env(): + """from_env() returns a valid config with defaults.""" + config = MemcachedBackendConfig.from_env() + assert config.servers == ["127.0.0.1:11211"] + assert config.connect_timeout == 2.0 + + @pytest.mark.critical def test_intent_decorators_with_memcached_backend(mock_store): """Intent decorators work with explicit MemcachedBackend."""