diff --git a/base_cacheable_class/__init__.py b/base_cacheable_class/__init__.py index 2b5cc96..3e9a899 100644 --- a/base_cacheable_class/__init__.py +++ b/base_cacheable_class/__init__.py @@ -1,22 +1,13 @@ from .base import BaseCacheableClass -from .cache.in_memory import InMemoryCache, InMemoryCacheDecorator -from .interfaces import CacheDecoratorInterface, CacheInterface +from .cache.async_ import AsyncCacheDecoratorFactory +from .cache.sync import SyncCacheDecoratorFactory +from .interfaces import CacheDecoratorInterface from .models import CacheItem __all__ = [ "BaseCacheableClass", - "CacheInterface", "CacheDecoratorInterface", "CacheItem", - "InMemoryCache", - "InMemoryCacheDecorator", + "AsyncCacheDecoratorFactory", + "SyncCacheDecoratorFactory", ] - -# Conditional export for Redis classes -try: - from .cache.redis import RedisCache, RedisCacheDecorator - - __all__.extend(["RedisCache", "RedisCacheDecorator"]) -except ImportError: - # Redis is optional - pass diff --git a/base_cacheable_class/base.py b/base_cacheable_class/base.py index d8da62e..e1ba03a 100644 --- a/base_cacheable_class/base.py +++ b/base_cacheable_class/base.py @@ -1,3 +1,4 @@ +import asyncio from collections.abc import Callable from functools import wraps from typing import Any, TypeVar @@ -7,24 +8,42 @@ F = TypeVar("F", bound=Callable[..., Any]) +def _wrapper_sync_or_async(func: F, execute_func: Any): + if asyncio.iscoroutinefunction(func): + + @wraps(func) + async def wrapper(self: Any, *args: Any, **kwargs: Any) -> Any: + return await execute_func(self, *args, **kwargs) + + return wrapper + + @wraps(func) + def wrapper(self: Any, *args: Any, **kwargs: Any) -> Any: + return execute_func(self, *args, **kwargs) + + return wrapper + + class BaseCacheableClass: def __init__(self, cache_decorator: CacheDecoratorInterface) -> None: self._cache_decorator = cache_decorator + def get_cache_client(self): + return self._cache_decorator.cache + def wrapped(self, func: F) -> F: return self._cache_decorator()(func) # type: ignore @classmethod def cache(cls, ttl: int | None = None) -> Callable[[F], F]: # Note: if `ttl` is None, then the cache is stored forever in-memory. - def decorator(func: F) -> F: - @wraps(func) - async def wrapper(self: Any, *args: Any, **kwargs: Any) -> Any: + def decorator(func: F) -> Any: + def _execute(self, *args, **kwargs) -> Any: if not hasattr(self, "_cache_decorator"): raise AttributeError("_cache_decorator not found. Did you call super().__init__?") - return await self._cache_decorator(ttl=ttl)(func)(self, *args, **kwargs) + return self._cache_decorator(ttl=ttl)(func)(self, *args, **kwargs) - return wrapper # type: ignore + return _wrapper_sync_or_async(func, _execute) return decorator @@ -37,28 +56,24 @@ def invalidate(cls, target_func_name: str, param_mapping: dict[str, str] | None 예: {'user_id': 'customer_id'} -> 현재 함수의 customer_id를 target_func의 user_id로 매핑 """ - def decorator(func: F) -> F: - @wraps(func) - async def wrapper(self: Any, *args: Any, **kwargs: Any) -> Any: + def decorator(func: F) -> Any: + def _execute(self, *args, **kwargs) -> Any: if not hasattr(self, "_cache_decorator"): raise AttributeError("_cache_decorator not found. Did you call super().__init__?") - return await self._cache_decorator.invalidate(target_func_name, param_mapping)(func)( - self, *args, **kwargs - ) + return self._cache_decorator.invalidate(target_func_name, param_mapping)(func)(self, *args, **kwargs) - return wrapper # type: ignore + return _wrapper_sync_or_async(func, _execute) return decorator @classmethod def invalidate_all(cls) -> Callable[[F], F]: - def decorator(func: F) -> F: - @wraps(func) - async def wrapper(self: Any, *args: Any, **kwargs: Any) -> Any: + def decorator(func: F) -> Any: + def _execute(self, *args, **kwargs) -> Any: if not hasattr(self, "_cache_decorator"): raise AttributeError("_cache_decorator not found. Did you call super().__init__?") - return await self._cache_decorator.invalidate_all()(func)(self, *args, **kwargs) + return self._cache_decorator.invalidate_all()(func)(self, *args, **kwargs) - return wrapper # type: ignore + return _wrapper_sync_or_async(func, _execute) return decorator diff --git a/base_cacheable_class/cache/async_/__init__.py b/base_cacheable_class/cache/async_/__init__.py new file mode 100644 index 0000000..dd53e5b --- /dev/null +++ b/base_cacheable_class/cache/async_/__init__.py @@ -0,0 +1,47 @@ +from .decorator import CacheDecorator +from .in_memory import InMemoryCache +from .interface import CacheInterface + +__all__ = ["InMemoryCache", "CacheInterface", "CacheDecorator", "AsyncCacheDecoratorFactory"] + +# Conditional export for Redis classes +try: + from .redis.cache import RedisCache + + __all__.extend(["RedisCache"]) +except ImportError: + # Redis is optional + pass + + +from ..utils.key_builders import default_key, default_pattern + + +class AsyncCacheDecoratorFactory: + @classmethod + def in_memory(cls, default_ttl: int = 60) -> CacheDecorator: + cache = InMemoryCache() + return CacheDecorator(cache, key_builder=default_key, pattern_builder=default_pattern, default_ttl=default_ttl) + + @classmethod + def redis( + cls, + host: str, + port: int, + password: str, + username: str, + db: int = 0, + socket_timeout: float = 0.5, + socket_connect_timeout: float = 0.5, + default_ttl: int = 60, + ) -> CacheDecorator: + cache = RedisCache( + host=host, + port=port, + password=password, + username=username, + db=db, + socket_timeout=socket_timeout, + socket_connect_timeout=socket_connect_timeout, + ) + return CacheDecorator(cache, key_builder=default_key, pattern_builder=default_pattern, default_ttl=default_ttl) diff --git a/base_cacheable_class/cache/in_memory/decorator.py b/base_cacheable_class/cache/async_/decorator.py similarity index 66% rename from base_cacheable_class/cache/in_memory/decorator.py rename to base_cacheable_class/cache/async_/decorator.py index a81281a..03099b1 100644 --- a/base_cacheable_class/cache/in_memory/decorator.py +++ b/base_cacheable_class/cache/async_/decorator.py @@ -3,27 +3,27 @@ from functools import wraps from typing import Any -from ...interfaces import CacheDecoratorInterface, CacheInterface +from ...interfaces import CacheDecoratorInterface +from ..utils.key_builders import default_key, default_pattern +from .interface import CacheInterface logger = logging.getLogger(__name__) -class InMemoryCacheDecorator(CacheDecoratorInterface): - def __init__(self, cache: CacheInterface, default_ttl: int = 60): +class CacheDecorator(CacheDecoratorInterface): + def __init__( + self, cache: CacheInterface, key_builder=default_key, pattern_builder=default_pattern, default_ttl: int = 60 + ): self.cache = cache self.default_ttl = default_ttl - - def key_builder(self, f: Callable[..., Any], *args: Any, **kwargs: Any) -> str: - arg_str = str(args) - kwarg_str = str(kwargs) if kwargs else "{}" - func_name = getattr(f, "__name__", "unknown") - return f"{func_name}:{arg_str}:{kwarg_str}" + self._key_builder = key_builder + self._pattern_builder = pattern_builder def __call__(self, ttl: int | None = None) -> Callable[[Callable[..., Any]], Callable[..., Any]]: def decorator(func: Callable[..., Any]) -> Callable[..., Any]: @wraps(func) async def wrapper(*args: Any, **kwargs: Any) -> Any: - _key = self.key_builder(func, *args, **kwargs) + _key = self._key_builder(func, *args, **kwargs) current_ttl = ttl if ttl is not None else self.default_ttl try: @@ -52,17 +52,7 @@ def decorator(func: Callable[..., Any]) -> Callable[..., Any]: @wraps(func) async def wrapper(*args: Any, **kwargs: Any) -> Any: try: - pattern = rf"{target_func_name}:\(.*\):{{.*}}" - if param_mapping: - # 매핑된 파라미터 값 추출 - mapped_kwargs = { - target_param: kwargs[source_param] - for target_param, source_param in param_mapping.items() - if source_param in kwargs - } - kwargs_patterns = [rf".*'{k}':\s*'{v!s}'" for k, v in mapped_kwargs.items()] - pattern = rf"{target_func_name}:\(.*\):{{" + ".*".join(kwargs_patterns) + ".*}" - + pattern = self._pattern_builder(target_func_name, param_mapping, **kwargs) cached_keys = await self.cache.get_keys(pattern) for cache_key in cached_keys: @@ -81,7 +71,7 @@ def invalidate_all(self) -> Callable[[Callable[..., Any]], Callable[..., Any]]: def decorator(func: Callable[..., Any]) -> Callable[..., Any]: @wraps(func) async def wrapper(*args: Any, **kwargs: Any) -> Any: - _key = self.key_builder(func, *args, **kwargs) + _key = self._key_builder(func, *args, **kwargs) try: await self.cache.clear() except Exception as e: diff --git a/base_cacheable_class/cache/async_/in_memory/__init__.py b/base_cacheable_class/cache/async_/in_memory/__init__.py new file mode 100644 index 0000000..7a3d9c7 --- /dev/null +++ b/base_cacheable_class/cache/async_/in_memory/__init__.py @@ -0,0 +1,3 @@ +from .cache import InMemoryCache + +__all__ = ["InMemoryCache"] diff --git a/base_cacheable_class/cache/in_memory/cache.py b/base_cacheable_class/cache/async_/in_memory/cache.py similarity index 96% rename from base_cacheable_class/cache/in_memory/cache.py rename to base_cacheable_class/cache/async_/in_memory/cache.py index 9fe7e0f..4070810 100644 --- a/base_cacheable_class/cache/in_memory/cache.py +++ b/base_cacheable_class/cache/async_/in_memory/cache.py @@ -2,8 +2,8 @@ import time from typing import Any, cast -from ...interfaces import CacheInterface -from ...models import CacheItem +from ....models import CacheItem +from ..interface import CacheInterface class InMemoryCache(CacheInterface): diff --git a/base_cacheable_class/interfaces/cache.py b/base_cacheable_class/cache/async_/interface.py similarity index 100% rename from base_cacheable_class/interfaces/cache.py rename to base_cacheable_class/cache/async_/interface.py diff --git a/base_cacheable_class/cache/async_/redis/__init__.py b/base_cacheable_class/cache/async_/redis/__init__.py new file mode 100644 index 0000000..f13b257 --- /dev/null +++ b/base_cacheable_class/cache/async_/redis/__init__.py @@ -0,0 +1,3 @@ +from .cache import RedisCache + +__all__ = ["RedisCache"] diff --git a/base_cacheable_class/cache/redis/cache.py b/base_cacheable_class/cache/async_/redis/cache.py similarity index 98% rename from base_cacheable_class/cache/redis/cache.py rename to base_cacheable_class/cache/async_/redis/cache.py index ef13491..14d3c39 100644 --- a/base_cacheable_class/cache/redis/cache.py +++ b/base_cacheable_class/cache/async_/redis/cache.py @@ -4,7 +4,7 @@ from redis.asyncio import Redis -from base_cacheable_class import CacheInterface +from ..interface import CacheInterface class RedisCache(CacheInterface): diff --git a/base_cacheable_class/cache/in_memory/__init__.py b/base_cacheable_class/cache/in_memory/__init__.py deleted file mode 100644 index 129e1f3..0000000 --- a/base_cacheable_class/cache/in_memory/__init__.py +++ /dev/null @@ -1,4 +0,0 @@ -from .cache import InMemoryCache -from .decorator import InMemoryCacheDecorator - -__all__ = ["InMemoryCache", "InMemoryCacheDecorator"] diff --git a/base_cacheable_class/cache/redis/__init__.py b/base_cacheable_class/cache/redis/__init__.py deleted file mode 100644 index 38a0c77..0000000 --- a/base_cacheable_class/cache/redis/__init__.py +++ /dev/null @@ -1,4 +0,0 @@ -from .cache import RedisCache -from .decorator import RedisCacheDecorator - -__all__ = ["RedisCache", "RedisCacheDecorator"] diff --git a/base_cacheable_class/cache/redis/decorator.py b/base_cacheable_class/cache/redis/decorator.py deleted file mode 100644 index 221f061..0000000 --- a/base_cacheable_class/cache/redis/decorator.py +++ /dev/null @@ -1,98 +0,0 @@ -import logging -from collections.abc import Callable -from functools import wraps -from typing import Any - -from ...interfaces import CacheDecoratorInterface, CacheInterface - -logger = logging.getLogger(__name__) - - -class RedisCacheDecorator(CacheDecoratorInterface): - def __init__(self, cache: CacheInterface, default_ttl: int = 60): - self.cache = cache - self.default_ttl = default_ttl - - def key_builder(self, func: Callable[..., Any], *args: Any, **kwargs: Any) -> str: - arg_str = str(args) - kwarg_str = str(kwargs) if kwargs else "{}" - func_name = getattr(func, "__name__", "unknown") - return f"{func_name}:{arg_str}:{kwarg_str}" - - def __call__(self, ttl: int | None = None) -> Callable[..., Callable[..., Any]]: - def decorator(func: Callable[..., Any]) -> Callable[..., Any]: - @wraps(func) - async def wrapper(*args: Any, **kwargs: Any) -> Any: - _key: str = self.key_builder(func, *args, **kwargs) - current_ttl: int = ttl if ttl is not None else self.default_ttl - - try: - cached_value = await self.cache.get(_key) - if cached_value is not None: - return cached_value - - result = await func(*args, **kwargs) - if result is not None: - await self.cache.set(_key, result, ttl=current_ttl) - return result - - except (ConnectionError, TimeoutError) as e: - logger.warning(f"Redis connection or timeout issue: {e}, falling back.") - return await func(*args, **kwargs) - - except Exception as e: - logger.error(f"Error in cache decorator: {e}") - return await func(*args, **kwargs) - - return wrapper - - return decorator - - def invalidate( - self, target_func_name: str, param_mapping: dict[str, str] | None = None - ) -> Callable[..., Callable[..., Any]]: - def decorator(func: Callable[..., Any]) -> Callable[..., Any]: - @wraps(func) - async def wrapper(*args: Any, **kwargs: Any) -> Any: - try: - pattern = rf"{target_func_name}:\(.*\):{{.*}}" - - if param_mapping: - kwargs_patterns: list[str] = [ - rf".*'{k}':\s*'{v!s}'" - for k, v in { - t_param: kwargs[s_param] - for t_param, s_param in param_mapping.items() - if s_param in kwargs - }.items() - ] - pattern = rf"{target_func_name}:\(.*\):{{" + ".*".join(kwargs_patterns) + ".*}" - - cached_keys: list[str] = await self.cache.get_keys_regex( - target_func_name=target_func_name, pattern=pattern - ) - for cache_key in cached_keys: - await self.cache.delete(cache_key) - - except Exception as e: - logger.error(f"Error in cache invalidation: {e}") - - return await func(*args, **kwargs) - - return wrapper - - return decorator - - def invalidate_all(self) -> Callable[..., Callable[..., Any]]: - def decorator(func: Callable[..., Any]) -> Callable[..., Any]: - @wraps(func) - async def wrapper(*args: Any, **kwargs: Any) -> Any: - try: - await self.cache.clear() - except Exception as e: - logger.error(f"Error in cache clear: {e}") - return await func(*args, **kwargs) - - return wrapper - - return decorator diff --git a/base_cacheable_class/cache/sync/__init__.py b/base_cacheable_class/cache/sync/__init__.py new file mode 100644 index 0000000..efef9fb --- /dev/null +++ b/base_cacheable_class/cache/sync/__init__.py @@ -0,0 +1,47 @@ +from .decorator import CacheDecorator +from .in_memory import InMemoryCache +from .interface import CacheInterface + +__all__ = ["InMemoryCache", "CacheInterface", "CacheDecorator", "SyncCacheDecoratorFactory"] + +# Conditional export for Redis classes +try: + from .redis.cache import RedisCache + + __all__.extend(["RedisCache"]) +except ImportError: + # Redis is optional + pass + + +from ..utils.key_builders import default_key, default_pattern + + +class SyncCacheDecoratorFactory: + @classmethod + def in_memory(cls, default_ttl: int = 60) -> CacheDecorator: + cache = InMemoryCache() + return CacheDecorator(cache, key_builder=default_key, pattern_builder=default_pattern, default_ttl=default_ttl) + + @classmethod + def redis( + cls, + host: str, + port: int, + password: str, + username: str, + db: int = 0, + socket_timeout: float = 0.5, + socket_connect_timeout: float = 0.5, + default_ttl: int = 60, + ) -> CacheDecorator: + cache = RedisCache( + host=host, + port=port, + password=password, + username=username, + db=db, + socket_timeout=socket_timeout, + socket_connect_timeout=socket_connect_timeout, + ) + return CacheDecorator(cache, key_builder=default_key, pattern_builder=default_pattern, default_ttl=default_ttl) diff --git a/base_cacheable_class/cache/sync/decorator.py b/base_cacheable_class/cache/sync/decorator.py new file mode 100644 index 0000000..a2f1055 --- /dev/null +++ b/base_cacheable_class/cache/sync/decorator.py @@ -0,0 +1,83 @@ +import logging +from collections.abc import Callable +from functools import wraps +from typing import Any + +from ...interfaces import CacheDecoratorInterface +from ..utils import default_key, default_pattern +from .interface import CacheInterface + +logger = logging.getLogger(__name__) + + +class CacheDecorator(CacheDecoratorInterface): + def __init__( + self, cache: CacheInterface, key_builder=default_key, pattern_builder=default_pattern, default_ttl: int = 60 + ): + self.cache = cache + self.default_ttl = default_ttl + self._key_builder = key_builder + self._pattern_builder = pattern_builder + + def __call__(self, ttl: int | None = None) -> Callable[[Callable[..., Any]], Callable[..., Any]]: + def decorator(func: Callable[..., Any]) -> Callable[..., Any]: + @wraps(func) + def wrapper(*args: Any, **kwargs: Any) -> Any: + _key = self._key_builder(func, *args, **kwargs) + current_ttl = ttl if ttl is not None else self.default_ttl + + try: + cached_value = self.cache.get(_key) + if cached_value is not None: + return cached_value + + result = func(*args, **kwargs) + + if result is not None: + self.cache.set(_key, result, ttl=current_ttl) + + return result + except Exception as e: + logger.error(f"Error in cache decorator: {e}") + return func(*args, **kwargs) + + return wrapper + + return decorator + + def invalidate( + self, target_func_name: str, param_mapping: dict[str, str] | None = None + ) -> Callable[[Callable[..., Any]], Callable[..., Any]]: + def decorator(func: Callable[..., Any]) -> Callable[..., Any]: + @wraps(func) + def wrapper(*args: Any, **kwargs: Any) -> Any: + try: + pattern = self._pattern_builder(target_func_name, param_mapping, **kwargs) + cached_keys = self.cache.get_keys(pattern) + + for cache_key in cached_keys: + self.cache.delete(cache_key) + + except Exception as e: + logger.error(f"Error in cache invalidation: {e}") + + return func(*args, **kwargs) + + return wrapper + + return decorator + + def invalidate_all(self) -> Callable[[Callable[..., Any]], Callable[..., Any]]: + def decorator(func: Callable[..., Any]) -> Callable[..., Any]: + @wraps(func) + def wrapper(*args: Any, **kwargs: Any) -> Any: + _key = self._key_builder(func, *args, **kwargs) + try: + self.cache.clear() + except Exception as e: + logger.error(f"Error in cache clear: {e}") + return func(*args, **kwargs) + + return wrapper + + return decorator diff --git a/base_cacheable_class/cache/sync/in_memory/__init__.py b/base_cacheable_class/cache/sync/in_memory/__init__.py new file mode 100644 index 0000000..7a3d9c7 --- /dev/null +++ b/base_cacheable_class/cache/sync/in_memory/__init__.py @@ -0,0 +1,3 @@ +from .cache import InMemoryCache + +__all__ = ["InMemoryCache"] diff --git a/base_cacheable_class/cache/sync/in_memory/cache.py b/base_cacheable_class/cache/sync/in_memory/cache.py new file mode 100644 index 0000000..72f4d5f --- /dev/null +++ b/base_cacheable_class/cache/sync/in_memory/cache.py @@ -0,0 +1,59 @@ +import re +import time +from typing import Any, cast + +from ....models import CacheItem +from ..interface import CacheInterface + + +class InMemoryCache(CacheInterface): + _instance: "InMemoryCache | None" = None + cache: dict[str, CacheItem] + + def __new__(cls) -> "InMemoryCache": + if cls._instance is None: + cls._instance = cast(InMemoryCache, super().__new__(cls)) + cls._instance.cache = {} + return cls._instance + + def set(self, key: str, value: Any, ttl: int | None = None) -> None: + expire_at = time.time() + ttl if ttl is not None else None + self.cache[key] = CacheItem(value, expire_at) + + def get(self, key: str) -> Any: + item = self.cache.get(key) + if item is None: + return None + if item.expire_at is None or time.time() < item.expire_at: + return item.value + del self.cache[key] # Remove expired item + return None + + def get_keys(self, pattern: str | None = None) -> list[str]: + if pattern is None: + return list(self.cache.keys()) + + cache_pattern = re.compile(pattern) + return [key for key in self.cache if cache_pattern.match(key)] + + def get_keys_regex(self, target_func_name: str, pattern: str | None = None) -> list[str]: + if pattern is None: + return list(self.cache.keys()) + + cache_pattern = re.compile(pattern) + return [key for key in self.cache if cache_pattern.match(key)] + + def exists(self, key: str) -> bool: + item = self.cache.get(key) + if item is None: + return False + if item.expire_at is None or time.time() < item.expire_at: + return True + del self.cache[key] # Remove expired item + return False + + def delete(self, key: str) -> None: + self.cache.pop(key, None) + + def clear(self) -> None: + self.cache.clear() diff --git a/base_cacheable_class/cache/sync/interface.py b/base_cacheable_class/cache/sync/interface.py new file mode 100644 index 0000000..b489a94 --- /dev/null +++ b/base_cacheable_class/cache/sync/interface.py @@ -0,0 +1,32 @@ +from abc import ABC, abstractmethod +from typing import Any + + +class CacheInterface(ABC): + @abstractmethod + def set(self, key: str, value: Any, ttl: int | None = None) -> None: + pass + + @abstractmethod + def get(self, key: str) -> Any: + pass + + @abstractmethod + def exists(self, key: str) -> bool: + pass + + @abstractmethod + def delete(self, key: str) -> Any: + pass + + @abstractmethod + def clear(self) -> Any: + pass + + @abstractmethod + def get_keys(self, pattern: str | None = None) -> list[str]: + pass + + @abstractmethod + def get_keys_regex(self, target_func_name: str, pattern: str | None = None) -> list[str]: + pass diff --git a/base_cacheable_class/cache/sync/redis/__init__.py b/base_cacheable_class/cache/sync/redis/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/base_cacheable_class/cache/sync/redis/cache.py b/base_cacheable_class/cache/sync/redis/cache.py new file mode 100644 index 0000000..aaf8a39 --- /dev/null +++ b/base_cacheable_class/cache/sync/redis/cache.py @@ -0,0 +1,87 @@ +import pickle +import re +from typing import Any + +from redis import Redis + +from ..interface import CacheInterface + + +class RedisCache(CacheInterface): + def __init__( + self, + host: str, + port: int, + password: str, + username: str, + db: int = 0, + socket_timeout: float = 0.5, + socket_connect_timeout: float = 0.5, + ): + self.redis = Redis( + host=host, + port=port, + password=password, + db=db, + username=username, + socket_timeout=socket_timeout, + socket_connect_timeout=socket_connect_timeout, + ) + + def set(self, key: str, value: Any, ttl: int | None = None) -> None: + pickled_value = pickle.dumps(value) + if ttl is not None: + self.redis.set(key, pickled_value, ex=ttl) + else: + self.redis.set(key, pickled_value) + + def get(self, key: str) -> Any: + data = self.redis.get(key) + if data is None: + return None + return pickle.loads(data) # noqa: S301 + + def exists(self, key: str) -> bool: + return bool((self.redis.exists(key)) == 1) + + def delete(self, key: str) -> None: + self.redis.delete(key) + + def clear(self) -> None: + self.redis.flushdb() + + def get_keys_regex(self, target_func_name: str, pattern: str | None = None) -> list[str]: + cursor: int = 0 + all_keys: list[str] = [] + + while True: + cursor, keys = self.redis.scan(cursor=cursor, match=f"{target_func_name}*") + if keys: + all_keys.extend(k.decode("utf-8") for k in keys) + if cursor == 0: + break + + if not pattern: + return all_keys + + return [k for k in all_keys if re.compile(pattern).search(k)] + + def get_keys(self, pattern: str | None = None) -> list[str]: + if not pattern: + pattern = "*" + + cursor = 0 + matched_keys = [] + while True: + cursor, keys = self.redis.scan(cursor=cursor, match=pattern) + matched_keys.extend(keys) + if cursor == 0: + break + + return [k.decode("utf-8") for k in matched_keys] + + def ping(self) -> None: + self.redis.ping() + + def close(self) -> None: + self.redis.close() diff --git a/base_cacheable_class/cache/utils/__init__.py b/base_cacheable_class/cache/utils/__init__.py new file mode 100644 index 0000000..d589e5a --- /dev/null +++ b/base_cacheable_class/cache/utils/__init__.py @@ -0,0 +1,3 @@ +from .key_builders import default_key, default_pattern + +__all__ = ["default_key", "default_pattern"] diff --git a/base_cacheable_class/cache/utils/key_builders.py b/base_cacheable_class/cache/utils/key_builders.py new file mode 100644 index 0000000..de1833d --- /dev/null +++ b/base_cacheable_class/cache/utils/key_builders.py @@ -0,0 +1,23 @@ +from collections.abc import Callable +from typing import Any + + +def default_key(f: Callable[..., Any], *args: Any, **kwargs: Any) -> str: + arg_str = str(args) + kwarg_str = str(kwargs) if kwargs else "{}" + func_name = getattr(f, "__name__", "unknown") + return f"{func_name}:{arg_str}:{kwarg_str}" + + +def default_pattern(target_func_name: str, param_mapping: dict[str, str] | None, **kwargs: Any) -> str: + pattern = rf"{target_func_name}:\(.*\):{{.*}}" + if param_mapping: + # 매핑된 파라미터 값 추출 + mapped_kwargs = { + target_param: kwargs[source_param] + for target_param, source_param in param_mapping.items() + if source_param in kwargs + } + kwargs_patterns = [rf".*'{k}':\s*'{v!s}'" for k, v in mapped_kwargs.items()] + pattern = rf"{target_func_name}:\(.*\):{{" + ".*".join(kwargs_patterns) + ".*}" + return pattern diff --git a/base_cacheable_class/interfaces/__init__.py b/base_cacheable_class/interfaces/__init__.py index 2fe7ce0..8ff937e 100644 --- a/base_cacheable_class/interfaces/__init__.py +++ b/base_cacheable_class/interfaces/__init__.py @@ -1,4 +1,3 @@ -from .cache import CacheInterface from .decorator import CacheDecoratorInterface -__all__ = ["CacheInterface", "CacheDecoratorInterface"] +__all__ = ["CacheDecoratorInterface"] diff --git a/examples/basic_example.py b/examples/basic_example.py index 9a87079..09daaae 100644 --- a/examples/basic_example.py +++ b/examples/basic_example.py @@ -1,12 +1,11 @@ import asyncio -from base_cacheable_class import BaseCacheableClass, InMemoryCache, InMemoryCacheDecorator +from base_cacheable_class import BaseCacheableClass, AsyncCacheDecoratorFactory class WeatherService(BaseCacheableClass): def __init__(self): - cache = InMemoryCache() - cache_decorator = InMemoryCacheDecorator(cache, default_ttl=300) # 5 minutes default + cache_decorator = AsyncCacheDecoratorFactory.in_memory(default_ttl=300) super().__init__(cache_decorator) @BaseCacheableClass.cache(ttl=60) # Cache for 1 minute diff --git a/examples/invalidation_example.py b/examples/invalidation_example.py index 6735355..3439065 100644 --- a/examples/invalidation_example.py +++ b/examples/invalidation_example.py @@ -1,12 +1,11 @@ import asyncio -from base_cacheable_class import BaseCacheableClass, InMemoryCache, InMemoryCacheDecorator +from base_cacheable_class import BaseCacheableClass, AsyncCacheDecoratorFactory class UserRepository(BaseCacheableClass): def __init__(self): - cache = InMemoryCache() - cache_decorator = InMemoryCacheDecorator(cache, default_ttl=600) # 10 minutes default + cache_decorator = AsyncCacheDecoratorFactory.in_memory(default_ttl=600) # 10 minutes default super().__init__(cache_decorator) # Simulate database self.db = { diff --git a/examples/redis_example.py b/examples/redis_example.py index cb0f767..ec58eaa 100644 --- a/examples/redis_example.py +++ b/examples/redis_example.py @@ -1,16 +1,17 @@ import asyncio import os -from base_cacheable_class import BaseCacheableClass, RedisCache, RedisCacheDecorator +from base_cacheable_class import BaseCacheableClass, AsyncCacheDecoratorFactory class ProductService(BaseCacheableClass): def __init__(self, redis_host="localhost", redis_port=6379, redis_password=None): # Initialize Redis cache - cache = RedisCache(host=redis_host, port=redis_port, password=redis_password or "", username="test", db=0) - cache_decorator = RedisCacheDecorator(cache, default_ttl=3600) # 1 hour default + cache_decorator = AsyncCacheDecoratorFactory.redis( + host=redis_host, port=redis_port, password=redis_password or "", username="default", db=0, default_ttl=3600 + ) super().__init__(cache_decorator) - self._redis_cache = cache # Keep reference for cleanup + self._redis_cache = self.get_cache_client() # Keep reference for cleanup # Simulate product database self.products = { diff --git a/examples/sync/basic_example.py b/examples/sync/basic_example.py new file mode 100644 index 0000000..a7b376a --- /dev/null +++ b/examples/sync/basic_example.py @@ -0,0 +1,59 @@ +import time +from base_cacheable_class import BaseCacheableClass, SyncCacheDecoratorFactory + + +class WeatherService(BaseCacheableClass): + def __init__(self): + cache_decorator = SyncCacheDecoratorFactory.in_memory(default_ttl=300) + super().__init__(cache_decorator) + + @BaseCacheableClass.cache(ttl=60) # Cache for 1 minute + def get_weather(self, city: str): + print(f"Fetching weather for {city}...") + # Simulate API call + return {"city": city, "temp": 25, "condition": "Sunny"} + + @BaseCacheableClass.cache(ttl=30) # Cache for 30 seconds + def get_forecast(self, city: str, days: int = 7): + print(f"Fetching {days}-day forecast for {city}...") + # Simulate API call + return {"city": city, "days": days, "forecast": [{"day": i, "temp": 20 + i} for i in range(days)]} + + +def main(): + service = WeatherService() + + print("=== Weather Service Example ===") + + # First call - will fetch from source + print("\n1. First call to get_weather:") + weather = service.get_weather("Seoul") + print(f"Result: {weather}") + + # Second call - will return from cache + print("\n2. Second call to get_weather (cached):") + weather = service.get_weather("Seoul") + print(f"Result: {weather}") + + # Different city - will fetch from source + print("\n3. Different city:") + weather = service.get_weather("Tokyo") + print(f"Result: {weather}") + + # Forecast example + print("\n4. Get forecast:") + forecast = service.get_forecast("Seoul", days=3) + print(f"Result: {forecast}") + + # Wait for cache to expire + print("\n5. Waiting 31 seconds for forecast cache to expire...") + time.sleep(31) + + # This will fetch again + print("\n6. Get forecast after cache expiry:") + forecast = service.get_forecast("Seoul", days=3) + print(f"Result: {forecast}") + + +if __name__ == "__main__": + main() diff --git a/examples/sync/invalidation_example.py b/examples/sync/invalidation_example.py new file mode 100644 index 0000000..3439065 --- /dev/null +++ b/examples/sync/invalidation_example.py @@ -0,0 +1,108 @@ +import asyncio + +from base_cacheable_class import BaseCacheableClass, AsyncCacheDecoratorFactory + + +class UserRepository(BaseCacheableClass): + def __init__(self): + cache_decorator = AsyncCacheDecoratorFactory.in_memory(default_ttl=600) # 10 minutes default + super().__init__(cache_decorator) + # Simulate database + self.db = { + 1: {"id": 1, "name": "Alice", "email": "alice@example.com"}, + 2: {"id": 2, "name": "Bob", "email": "bob@example.com"}, + 3: {"id": 3, "name": "Charlie", "email": "charlie@example.com"}, + } + + @BaseCacheableClass.cache(ttl=300) # Cache for 5 minutes + async def get_user(self, user_id: int): + print(f"Fetching user {user_id} from database...") + await asyncio.sleep(0.5) # Simulate DB query + return self.db.get(user_id) + + @BaseCacheableClass.cache() # Cache indefinitely + async def get_all_users(self): + print("Fetching all users from database...") + await asyncio.sleep(1) # Simulate DB query + return list(self.db.values()) + + @BaseCacheableClass.invalidate("get_user", param_mapping={"user_id": "user_id"}) + @BaseCacheableClass.invalidate("get_all_users") + async def update_user(self, user_id: int, name: str | None = None, email: str | None = None): + print(f"Updating user {user_id}...") + if user_id in self.db: + if name: + self.db[user_id]["name"] = name + if email: + self.db[user_id]["email"] = email + return self.db[user_id] + return None + + @BaseCacheableClass.invalidate("get_user", param_mapping={"user_id": "user_id"}) + @BaseCacheableClass.invalidate("get_all_users") + async def delete_user(self, user_id: int): + print(f"Deleting user {user_id}...") + return self.db.pop(user_id, None) + + @BaseCacheableClass.invalidate_all() + async def refresh_cache(self): + print("Refreshing all caches...") + return "All caches cleared" + + +async def main(): + repo = UserRepository() + + print("=== Cache Invalidation Example ===") + + # Get user - will fetch from DB + print("\n1. Get user 1:") + user = await repo.get_user(1) + print(f"Result: {user}") + + # Get same user - will return from cache + print("\n2. Get user 1 again (cached):") + user = await repo.get_user(1) + print(f"Result: {user}") + + # Get all users - will fetch from DB + print("\n3. Get all users:") + users = await repo.get_all_users() + print(f"Count: {len(users)}") + + # Update user - will invalidate specific user cache and all users cache + print("\n4. Update user 1:") + updated = await repo.update_user(1, name="Alice Updated") + print(f"Updated: {updated}") + + # Get user again - will fetch from DB (cache was invalidated) + print("\n5. Get user 1 after update:") + user = await repo.get_user(1) + print(f"Result: {user}") + + # Get all users again - will fetch from DB (cache was invalidated) + print("\n6. Get all users after update:") + users = await repo.get_all_users() + print(f"Count: {len(users)}") + + # Cache some data + print("\n7. Cache multiple users:") + await repo.get_user(1) + await repo.get_user(2) + await repo.get_user(3) + print("Users 1, 2, 3 are now cached") + + # Clear all caches + print("\n8. Clear all caches:") + await repo.refresh_cache() + + # All will fetch from DB + print("\n9. Get users after cache clear:") + user1 = await repo.get_user(1) + user2 = await repo.get_user(2) + print(f"User 1: {user1}") + print(f"User 2: {user2}") + + +if __name__ == "__main__": + asyncio.run(main()) diff --git a/examples/sync/redis_example.py b/examples/sync/redis_example.py new file mode 100644 index 0000000..89d496f --- /dev/null +++ b/examples/sync/redis_example.py @@ -0,0 +1,151 @@ +import os + +from base_cacheable_class import BaseCacheableClass, SyncCacheDecoratorFactory + + +class ProductService(BaseCacheableClass): + def __init__(self, redis_host="localhost", redis_port=6379, redis_password=None): + # Initialize Redis cache + cache_decorator = SyncCacheDecoratorFactory.redis( + host=redis_host, port=redis_port, password=redis_password or "", username="default", db=0, default_ttl=3600 + ) + super().__init__(cache_decorator) + self._redis_cache = cache_decorator.cache # Keep reference for cleanup + + # Simulate product database + self.products = { + 1: {"id": 1, "name": "Laptop", "price": 999.99, "stock": 10}, + 2: {"id": 2, "name": "Mouse", "price": 29.99, "stock": 100}, + 3: {"id": 3, "name": "Keyboard", "price": 79.99, "stock": 50}, + } + + @BaseCacheableClass.cache(ttl=300) # Cache for 5 minutes + def get_product(self, product_id: int): + print(f"Fetching product {product_id} from database...") + return self.products.get(product_id) + + @BaseCacheableClass.cache(ttl=600) # Cache for 10 minutes + def search_products(self, keyword: str): + print(f"Searching products with keyword: {keyword}") + results = [] + for product in self.products.values(): + if keyword.lower() in str(product["name"]).lower(): + results.append(product) + return results + + @BaseCacheableClass.cache(ttl=60) # Cache for 1 minute + def get_inventory_status(self): + print("Calculating inventory status...") + total_products = len(self.products) + total_stock = sum(p["stock"] for p in self.products.values()) + low_stock = [p for p in self.products.values() if int(p["stock"]) < 20] + return { + "total_products": total_products, + "total_stock": total_stock, + "low_stock_items": len(low_stock), + "low_stock_products": low_stock, + } + + @BaseCacheableClass.invalidate("get_product", param_mapping={"product_id": "product_id"}) + @BaseCacheableClass.invalidate("get_inventory_status") + def update_stock(self, product_id: int, new_stock: int): + print(f"Updating stock for product {product_id} to {new_stock}") + if product_id in self.products: + self.products[product_id]["stock"] = new_stock + return self.products[product_id] + return None + + @BaseCacheableClass.invalidate_all() + def refresh_catalog(self): + print("Refreshing entire product catalog...") + # Simulate reloading products from database + return "Catalog refreshed and all caches cleared" + + def close(self): + """Clean up Redis connection""" + self._redis_cache.close() + + +def main(): + # Configure Redis connection from environment or use defaults + redis_host = os.getenv("REDIS_HOST", "localhost") + redis_port = int(os.getenv("REDIS_PORT", "6379")) + redis_password = os.getenv("REDIS_PASSWORD") + + print("=== Redis Cache Example ===") + print(f"Connecting to Redis at {redis_host}:{redis_port}") + + service = ProductService(redis_host, redis_port, redis_password) + + try: + # Test connection + service._redis_cache.ping() + print("Successfully connected to Redis!") + except Exception as e: + print(f"Failed to connect to Redis: {e}") + print("Make sure Redis is running. You can start it with: redis-server") + return + + try: + # Clear any existing cache + service._redis_cache.clear() + + # Get product - will fetch from DB + print("\n1. Get product 1:") + product = service.get_product(1) + print(f"Result: {product}") + + # Get same product - will return from Redis cache + print("\n2. Get product 1 again (from Redis):") + product = service.get_product(1) + print(f"Result: {product}") + + # Search products + print("\n3. Search for 'board':") + results = service.search_products("board") + print(f"Found {len(results)} products") + + # Get inventory status + print("\n4. Get inventory status:") + status = service.get_inventory_status() + print( + f"Status: Total products: {status['total_products']}, " + f"Total stock: {status['total_stock']}, " + f"Low stock items: {status['low_stock_items']}" + ) + + # Update stock - will invalidate related caches + print("\n5. Update stock for product 1:") + updated = service.update_stock(1, 5) + print(f"Updated: {updated}") + + # Get product again - will fetch from DB (cache invalidated) + print("\n6. Get product 1 after stock update:") + product = service.get_product(1) + print(f"Result: {product}") + + # Get inventory status again - will recalculate (cache invalidated) + print("\n7. Get inventory status after update:") + status = service.get_inventory_status() + print(f"Low stock items: {status['low_stock_items']}") + + # Demonstrate cache persistence across instances + print("\n8. Creating new service instance:") + service2 = ProductService(redis_host, redis_port, redis_password) + + # This will use cached data from the first instance + print("Getting product 2 from new instance (should be cached):") + service.get_product(2) # Cache it first + product = service2.get_product(2) # Get from cache + print(f"Result: {product}") + + service2.close() + + finally: + # Clean up + service.close() + print("\nClosed Redis connections") + + +if __name__ == "__main__": + main() diff --git a/tests/sync/__init__.py b/tests/sync/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/tests/sync/test_base.py b/tests/sync/test_base.py new file mode 100644 index 0000000..7197c1e --- /dev/null +++ b/tests/sync/test_base.py @@ -0,0 +1,278 @@ +import pytest +from unittest.mock import patch +from base_cacheable_class import ( + CacheDecoratorInterface, + BaseCacheableClass, +) +from base_cacheable_class import SyncCacheDecoratorFactory + + +class SyncMockCacheDecorator(CacheDecoratorInterface): + def __init__(self): + self.call_count = 0 + self.invalidate_count = 0 + self.invalidate_all_count = 0 + + def __call__(self, ttl=None): + def decorator(func): + def wrapper(*args, **kwargs): + self.call_count += 1 + return func(*args, **kwargs) + + return wrapper + + return decorator + + def invalidate(self, target_func_name, param_mapping=None): + def decorator(func): + def wrapper(*args, **kwargs): + self.invalidate_count += 1 + return func(*args, **kwargs) + + return wrapper + + return decorator + + def invalidate_all(self): + def decorator(func): + def wrapper(*args, **kwargs): + self.invalidate_all_count += 1 + return func(*args, **kwargs) + + return wrapper + + return decorator + + +class TestService(BaseCacheableClass): + def __init__(self, cache_decorator): + super().__init__(cache_decorator) + + @BaseCacheableClass.cache(ttl=60) + def get_data(self, key: str): + return f"data_{key}" + + @BaseCacheableClass.invalidate("get_data", param_mapping={"key": "key"}) + def update_data(self, key: str, value: str): + return f"updated_{key}_{value}" + + @BaseCacheableClass.invalidate_all() + def clear_all(self): + return "cleared" + + +class TestSyncBaseCacheableClass: + def test_cache_decorator(self): + """Test cache decorator functionality""" + mock_decorator = SyncMockCacheDecorator() + service = TestService(mock_decorator) + + result = service.get_data("test") + assert result == "data_test" + assert mock_decorator.call_count == 1 + + def test_invalidate_decorator(self): + """Test invalidate decorator functionality""" + mock_decorator = SyncMockCacheDecorator() + service = TestService(mock_decorator) + + result = service.update_data("test", "value") + assert result == "updated_test_value" + assert mock_decorator.invalidate_count == 1 + + def test_invalidate_all_decorator(self): + """Test invalidate_all decorator functionality""" + mock_decorator = SyncMockCacheDecorator() + service = TestService(mock_decorator) + + result = service.clear_all() + assert result == "cleared" + assert mock_decorator.invalidate_all_count == 1 + + def test_cache_decorator_without_init(self): + """Test cache decorator raises error when _cache_decorator not found""" + + class BadService(BaseCacheableClass): + def __init__(self): + # Not calling super().__init__() + pass + + @BaseCacheableClass.cache() + def get_data(self): + return "data" + + service = BadService() + with pytest.raises(AttributeError, match="_cache_decorator not found"): + service.get_data() + + def test_invalidate_decorator_without_init(self): + """Test invalidate decorator raises error when _cache_decorator not found""" + + class BadService(BaseCacheableClass): + def __init__(self): + # Not calling super().__init__() + pass + + @BaseCacheableClass.invalidate("some_func") + def update_data(self): + return "data" + + service = BadService() + with pytest.raises(AttributeError, match="_cache_decorator not found"): + service.update_data() + + def test_invalidate_all_decorator_without_init(self): + """Test invalidate_all decorator raises error when _cache_decorator not found""" + + class BadService(BaseCacheableClass): + def __init__(self): + # Not calling super().__init__() + pass + + @BaseCacheableClass.invalidate_all() + def clear_data(self): + return "data" + + service = BadService() + with pytest.raises(AttributeError, match="_cache_decorator not found"): + service.clear_data() + + +class TestSyncInMemoryCacheDecorator: + def test_basic_caching(self): + """Test basic caching functionality""" + decorator = SyncCacheDecoratorFactory.in_memory(default_ttl=60) + decorator.invalidate_all() + + call_count = 0 + + @decorator(ttl=10) + def test_func(x): + nonlocal call_count + call_count += 1 + return x * 2 + + # First call + result = test_func(5) + assert result == 10 + assert call_count == 1 + + # Second call should use cache + result = test_func(5) + assert result == 10 + assert call_count == 1 # Not incremented + + # Different argument should call function + result = test_func(10) + assert result == 20 + assert call_count == 2 + + def test_invalidate(self): + """Test invalidate functionality""" + decorator = SyncCacheDecoratorFactory.in_memory(default_ttl=60) + decorator.invalidate_all() + + call_count = 0 + + @decorator() + def get_data(key): + nonlocal call_count + call_count += 1 + return f"data_{key}" + + @decorator.invalidate("get_data", param_mapping={"key": "key"}) + def update_data(key, value): + return f"updated_{key}_{value}" + + # Cache some data + result = get_data("test") + assert result == "data_test" + assert call_count == 1 + + # Should use cache + result = get_data("test") + assert result == "data_test" + assert call_count == 1 + + # Update should invalidate cache + update_data("test", "new") + + # Next call should hit function + result = get_data("test") + assert result == "data_test" + assert call_count == 2 + + def test_invalidate_all(self): + """Test invalidate all functionality""" + decorator = SyncCacheDecoratorFactory.in_memory(default_ttl=60) + decorator.invalidate_all() + + call_count = 0 + + @decorator() + def get_data(key): + nonlocal call_count + call_count += 1 + return f"data_{key}" + + @decorator.invalidate_all() + def clear_cache(): + return "cleared" + + # Cache multiple items + get_data("test1") + get_data("test2") + assert call_count == 2 + + # Should use cache + get_data("test1") + get_data("test2") + assert call_count == 2 + + # Clear all caches + clear_cache() + + # Should call functions again + get_data("test1") + get_data("test2") + assert call_count == 4 + + def test_error_handling(self): + """Test error handling in decorator""" + decorator = SyncCacheDecoratorFactory.in_memory(default_ttl=60) + cache = decorator.cache + + # Mock cache.get to raise an exception + with patch.object(cache, "get", side_effect=Exception("Cache error")): + + @decorator() + def test_func(x): + return x * 2 + + # Should still work even if cache fails + result = test_func(5) + assert result == 10 + + @pytest.mark.asyncio + async def test_none_result_caching(self): + """Test that None results are not cached""" + decorator = SyncCacheDecoratorFactory.in_memory(default_ttl=60) + decorator.invalidate_all() + + call_count = 0 + + @decorator() + def test_func(): + nonlocal call_count + call_count += 1 + return None + + # First call + result = test_func() + assert result is None + assert call_count == 1 + + # Second call should also call function (None not cached) + result = test_func() + assert result is None + assert call_count == 2 diff --git a/tests/sync/test_cache_integration.py b/tests/sync/test_cache_integration.py new file mode 100644 index 0000000..37f9e05 --- /dev/null +++ b/tests/sync/test_cache_integration.py @@ -0,0 +1,869 @@ +import time +from typing import Any + +import pytest + +from base_cacheable_class.cache.sync import InMemoryCache, CacheDecorator + + +@pytest.fixture +def cache_(): + cache = InMemoryCache() + cache.clear() + return cache + + +@pytest.fixture +def cache_decorator(cache_): + return CacheDecorator(cache_, default_ttl=1) + + +@pytest.fixture +def invalidate_decorator(cache_): + decorator = CacheDecorator(cache_, default_ttl=1) + return decorator.invalidate + + +def test_real_cache_integration(cache_decorator): + call_count = 0 + + @cache_decorator() + def test_func(): + nonlocal call_count + call_count += 1 + result = f"Call {call_count}" + return result + + # 첫 번째 호출 + result1 = test_func() + assert result1 == "Call 1", f"Expected 'Call 1', but got {result1}" + + # 캐시된 결과 + result2 = test_func() + assert result2 == "Call 1", f"Expected 'Call 1', but got {result2}" + assert call_count == 1, f"Expected call_count to be 1, but got {call_count}" + + # TTL 만료 대기 + time.sleep(1.1) + + # TTL 만료 후 새로운 호출 + result3 = test_func() + assert result3 == "Call 2", f"Expected 'Call 2', but got {result3}" + assert call_count == 2, f"Expected call_count to be 2, but got {call_count}" + + +def test_cache_decorator_with_custom_ttl(cache_): + # Clear cache before test + cache_.clear() + + # Create decorator with custom TTL + decorator = CacheDecorator(cache_, default_ttl=60) + + call_count = 0 + + @decorator(ttl=0.5) # Override default TTL with 0.5 seconds + def test_func(): + nonlocal call_count + call_count += 1 + return f"Call {call_count}" + + # First call + result = test_func() + assert result == "Call 1", f"Expected 'Call 1', but got {result}" + assert call_count == 1 + + # Cached result + result = test_func() + assert result == "Call 1", f"Expected 'Call 1', but got {result}" + assert call_count == 1, f"Expected call_count to be 1, but got {call_count}" + + # Wait for TTL expiration + time.sleep(0.6) + + # Should call function again after TTL + result = test_func() + assert result == "Call 2", f"Expected 'Call 2', but got {result}" + assert call_count == 2, f"Expected call_count to be 2, but got {call_count}" + + +def test_cache_decorator_with_mock(mocker): + mock_cache = mocker.Mock(spec=InMemoryCache) + mock_cache.get.return_value = None + decorator = CacheDecorator(mock_cache, default_ttl=60) + + call_count = 0 + + @decorator() + def test_func(): + nonlocal call_count + call_count += 1 + return f"Call {call_count}" + + result = test_func() + assert result == "Call 1", f"Expected 'Call 1', but got {result}" + mock_cache.set.assert_called_once_with(mocker.ANY, "Call 1", ttl=60) + + mock_cache.get.return_value = "Call 1" + result = test_func() + assert result == "Call 1", f"Expected 'Call 1', but got {result}" + assert call_count == 1, f"Expected call_count to be 1, but got {call_count}" + + +def test_cache_with_different_parameters(cache_decorator): + call_count = 0 + + @cache_decorator() + def test_func(param1, param2): + nonlocal call_count + call_count += 1 + result = f"Call {call_count}: {param1}, {param2}" + return result + + # 서로 다른 파라미터로 호출 + result1 = test_func("a", "b") + assert result1 == "Call 1: a, b", f"Expected 'Call 1: a, b', but got {result1}" + assert call_count == 1 + + result2 = test_func("c", "d") + assert result2 == "Call 2: c, d", f"Expected 'Call 2: c, d', but got {result2}" + assert call_count == 2 + + # 같은 파라미터로 재호출 (캐시 히트 예상) + result3 = test_func("a", "b") + assert result3 == "Call 1: a, b", f"Expected 'Call 1: a, b', but got {result3}" + assert call_count == 2 # 캐시 히트로 인해 call_count는 증가하지 않아야 함 + + result4 = test_func("c", "d") + assert result4 == "Call 2: c, d", f"Expected 'Call 2: c, d', but got {result4}" + assert call_count == 2 # 캐시 히트로 인해 call_count는 증가하지 않아야 함 + + # TTL 만료 대기 + time.sleep(1.1) + + # TTL 만료 후 재호출 + result5 = test_func("a", "b") + assert result5 == "Call 3: a, b", f"Expected 'Call 3: a, b', but got {result5}" + assert call_count == 3 + + result6 = test_func("c", "d") + assert result6 == "Call 4: c, d", f"Expected 'Call 4: c, d', but got {result6}" + assert call_count == 4 + + result7 = test_func("e", "f") + assert result7 == "Call 5: e, f", f"Expected 'Call 5: e, f', but got {result7}" + assert call_count == 5 + + +def test_cache_with_complex_parameters(cache_decorator): + call_count = 0 + + @cache_decorator() + def test_func(param1, param2, *args, **kwargs): + nonlocal call_count + call_count += 1 + result = f"Call {call_count}: {param1}, {param2}, {args}, {kwargs}" + return result + + # 다양한 형태의 파라미터로 호출 + result1 = test_func(1, "b", "extra", key="value") + assert result1 == "Call 1: 1, b, ('extra',), {'key': 'value'}", f"Unexpected result: {result1}" + assert call_count == 1 + + # 같은 파라미터로 재호출 (캐시 히트 예상) + result2 = test_func(1, "b", "extra", key="value") + assert result2 == "Call 1: 1, b, ('extra',), {'key': 'value'}", f"Unexpected result: {result2}" + assert call_count == 1 # 캐시 히트로 인해 call_count는 증가하지 않아야 함 + + # 파라미터 순서 변경 + result3 = test_func("b", 1, "extra", key="value") + assert result3 == "Call 2: b, 1, ('extra',), {'key': 'value'}", f"Unexpected result: {result3}" + assert call_count == 2 + + # kwargs 순서 변경 (동일한 캐시 키 예상) + result4 = test_func(1, "b", "extra", value="key") + assert result4 == "Call 3: 1, b, ('extra',), {'value': 'key'}", f"Unexpected result: {result4}" + assert call_count == 3 + + +def test_cache_invalidation(cache_decorator, invalidate_decorator): + call_count = 0 + + @cache_decorator() + def test_func(): + nonlocal call_count + call_count += 1 + return f"Call {call_count}" + + @invalidate_decorator(target_func_name="test_func") + def invalidator(): + return "invalidated" + + # 첫 번째 호출 + result1 = test_func() + assert result1 == "Call 1", f"Expected 'Call 1', but got {result1}" + + # 캐시된 결과 + result2 = test_func() + assert result2 == "Call 1", f"Expected 'Call 1', but got {result2}" + assert call_count == 1, f"Expected call_count to be 1, but got {call_count}" + + # 캐시 무효화 + invalidator() + + # 새로운 호출 + result3 = test_func() + assert result3 == "Call 2", f"Expected 'Call 2', but got {result3}" + assert call_count == 2, f"Expected call_count to be 2, but got {call_count}" + + +def test_cache_invalidation_with_different_parameters(cache_decorator, invalidate_decorator): + call_count = 0 + + @cache_decorator() + def test_func(param1, param2): + nonlocal call_count + call_count += 1 + result = f"Call {call_count}: {param1}, {param2}" + return result + + @invalidate_decorator(target_func_name="test_func", param_mapping={"param1": "param1", "param2": "param2"}) + def invalidator(param1, param2): + return "invalidated" + + # 서로 다른 파라미터로 호출 + result1 = test_func("a", "b") + assert result1 == "Call 1: a, b", f"Expected 'Call 1: a, b', but got {result1}" + assert call_count == 1 + + # 같은 파라미터로 재호출 (캐시 히트 예상) + result2 = test_func("a", "b") + assert result2 == "Call 1: a, b", f"Expected 'Call 1: a, b', but got {result2}" + assert call_count == 1 # 캐시 히트로 인해 call_count는 증가하지 않아야 함 + + # 캐시 무효화 + invalidator("a", "b") + + # 캐시 무효화 후 재호출 + result3 = test_func("a", "b") + assert result3 == "Call 2: a, b", f"Expected 'Call 2: a, b', but got {result3}" + assert call_count == 2 + + +class TestBasicCacheOperations: + """Test basic cache operations and TTL behavior""" + + @pytest.fixture + def cache_(self): + cache = InMemoryCache() + cache.clear() + return cache + + @pytest.fixture + def cache_decorator(self, cache_): + return CacheDecorator(cache_, default_ttl=1) + + @pytest.fixture + def invalidate_decorator(self, cache_): + decorator = CacheDecorator(cache_, default_ttl=1) + return decorator.invalidate + + def test_real_cache_integration(self, cache_decorator): + call_count = 0 + + @cache_decorator() + def test_func(): + nonlocal call_count + call_count += 1 + result = f"Call {call_count}" + return result + + # 첫 번째 호출 + result1 = test_func() + assert result1 == "Call 1", f"Expected 'Call 1', but got {result1}" + + # 캐시된 결과 + result2 = test_func() + assert result2 == "Call 1", f"Expected 'Call 1', but got {result2}" + assert call_count == 1, f"Expected call_count to be 1, but got {call_count}" + + # TTL 만료 대기 + time.sleep(1.1) + + # TTL 만료 후 새로운 호출 + result3 = test_func() + assert result3 == "Call 2", f"Expected 'Call 2', but got {result3}" + assert call_count == 2, f"Expected call_count to be 2, but got {call_count}" + + def test_cache_decorator_with_mock(self, mocker): + """Test with mocked cache using pytest-mock""" + mock_cache = mocker.Mock(spec=InMemoryCache) + mock_cache.get.return_value = None + decorator = CacheDecorator(mock_cache, default_ttl=60) + + call_count = 0 + + @decorator() + def test_func(): + nonlocal call_count + call_count += 1 + return f"Call {call_count}" + + result = test_func() + assert result == "Call 1", f"Expected 'Call 1', but got {result}" + mock_cache.set.assert_called_once_with(mocker.ANY, "Call 1", ttl=60) + + mock_cache.get.return_value = "Call 1" + result = test_func() + assert result == "Call 1", f"Expected 'Call 1', but got {result}" + assert call_count == 1, f"Expected call_count to be 1, but got {call_count}" + + +class TestParameterHandling: + """Test cache behavior with different parameter types""" + + @pytest.fixture + def cache_decorator(self): + cache = InMemoryCache() + cache.clear() + return CacheDecorator(cache, default_ttl=1) + + def test_cache_with_different_parameters(self, cache_decorator): + """Test caching with different parameter values""" + call_count = 0 + + @cache_decorator() + def test_func(param1, param2): + nonlocal call_count + call_count += 1 + result = f"Call {call_count}: {param1}, {param2}" + return result + + # 서로 다른 파라미터로 호출 + result1 = test_func("a", "b") + assert result1 == "Call 1: a, b", f"Expected 'Call 1: a, b', but got {result1}" + assert call_count == 1 + + result2 = test_func("c", "d") + assert result2 == "Call 2: c, d", f"Expected 'Call 2: c, d', but got {result2}" + assert call_count == 2 + + # 같은 파라미터로 재호출 (캐시 히트 예상) + result3 = test_func("a", "b") + assert result3 == "Call 1: a, b", f"Expected 'Call 1: a, b', but got {result3}" + assert call_count == 2 # 캐시 히트로 인해 call_count는 증가하지 않아야 함 + + result4 = test_func("c", "d") + assert result4 == "Call 2: c, d", f"Expected 'Call 2: c, d', but got {result4}" + assert call_count == 2 # 캐시 히트로 인해 call_count는 증가하지 않아야 함 + + # TTL 만료 대기 + time.sleep(1.1) + + # TTL 만료 후 재호출 + result5 = test_func("a", "b") + assert result5 == "Call 3: a, b", f"Expected 'Call 3: a, b', but got {result5}" + assert call_count == 3 + + result6 = test_func("c", "d") + assert result6 == "Call 4: c, d", f"Expected 'Call 4: c, d', but got {result6}" + assert call_count == 4 + + result7 = test_func("e", "f") + assert result7 == "Call 5: e, f", f"Expected 'Call 5: e, f', but got {result7}" + assert call_count == 5 + + def test_cache_with_complex_parameters(self, cache_decorator): + """Test caching with args and kwargs""" + call_count = 0 + + @cache_decorator() + def test_func(param1, param2, *args, **kwargs): + nonlocal call_count + call_count += 1 + result = f"Call {call_count}: {param1}, {param2}, {args}, {kwargs}" + return result + + # 다양한 형태의 파라미터로 호출 + result1 = test_func(1, "b", "extra", key="value") + assert result1 == "Call 1: 1, b, ('extra',), {'key': 'value'}", f"Unexpected result: {result1}" + assert call_count == 1 + + # 같은 파라미터로 재호출 (캐시 히트 예상) + result2 = test_func(1, "b", "extra", key="value") + assert result2 == "Call 1: 1, b, ('extra',), {'key': 'value'}", f"Unexpected result: {result2}" + assert call_count == 1 # 캐시 히트로 인해 call_count는 증가하지 않아야 함 + + # 파라미터 순서 변경 + result3 = test_func("b", 1, "extra", key="value") + assert result3 == "Call 2: b, 1, ('extra',), {'key': 'value'}", f"Unexpected result: {result3}" + assert call_count == 2 + + # kwargs 순서 변경 (동일한 캐시 키 예상) + result4 = test_func(1, "b", "extra", value="key") + assert result4 == "Call 3: 1, b, ('extra',), {'value': 'key'}", f"Unexpected result: {result4}" + assert call_count == 3 + + +class TestCacheInvalidation: + """Test cache invalidation features""" + + @pytest.fixture + def cache_(self): + cache = InMemoryCache() + cache.clear() + return cache + + @pytest.fixture + def cache_decorator(self, cache_): + return CacheDecorator(cache_, default_ttl=1) + + @pytest.fixture + def invalidate_decorator(self, cache_): + decorator = CacheDecorator(cache_, default_ttl=1) + return decorator.invalidate + + def test_cache_invalidation(self, cache_decorator, invalidate_decorator): + """Test basic cache invalidation""" + call_count = 0 + + @cache_decorator() + def test_func(): + nonlocal call_count + call_count += 1 + return f"Call {call_count}" + + @invalidate_decorator(target_func_name="test_func") + def invalidator(): + return "invalidated" + + # 첫 번째 호출 + result1 = test_func() + assert result1 == "Call 1", f"Expected 'Call 1', but got {result1}" + + # 캐시된 결과 + result2 = test_func() + assert result2 == "Call 1", f"Expected 'Call 1', but got {result2}" + assert call_count == 1, f"Expected call_count to be 1, but got {call_count}" + + # 캐시 무효화 + invalidator() + + # 새로운 호출 + result3 = test_func() + assert result3 == "Call 2", f"Expected 'Call 2', but got {result3}" + assert call_count == 2, f"Expected call_count to be 2, but got {call_count}" + + def test_cache_invalidation_with_different_parameters(self, cache_decorator, invalidate_decorator): + """Test cache invalidation with parameter mapping""" + call_count = 0 + + @cache_decorator() + def test_func(param1, param2): + nonlocal call_count + call_count += 1 + result = f"Call {call_count}: {param1}, {param2}" + return result + + @invalidate_decorator(target_func_name="test_func", param_mapping={"param1": "param1", "param2": "param2"}) + def invalidator(param1, param2): + return "invalidated" + + # 서로 다른 파라미터로 호출 + result1 = test_func("a", "b") + assert result1 == "Call 1: a, b", f"Expected 'Call 1: a, b', but got {result1}" + assert call_count == 1 + + # 같은 파라미터로 재호출 (캐시 히트 예상) + result2 = test_func("a", "b") + assert result2 == "Call 1: a, b", f"Expected 'Call 1: a, b', but got {result2}" + assert call_count == 1 # 캐시 히트로 인해 call_count는 증가하지 않아야 함 + + # 캐시 무효화 + invalidator("a", "b") + + # 캐시 무효화 후 재호출 + result3 = test_func("a", "b") + assert result3 == "Call 2: a, b", f"Expected 'Call 2: a, b', but got {result3}" + assert call_count == 2 + + +class TestEdgeCases: + """Test edge cases and special scenarios""" + + @pytest.fixture + def cache_decorator(self): + cache = InMemoryCache() + cache.clear() + return CacheDecorator(cache, default_ttl=1) + + def test_none_result_not_cached(self, cache_decorator): + """Test that None results are not cached""" + call_count = 0 + + @cache_decorator() + def returns_none(): + nonlocal call_count + call_count += 1 + return None + + # First call + result1 = returns_none() + assert result1 is None + assert call_count == 1 + + # Second call should also execute function (None not cached) + result2 = returns_none() + assert result2 is None + assert call_count == 2 + + def test_exception_handling(self, cache_decorator): + """Test exception handling in cached functions""" + call_count = 0 + + @cache_decorator() + def may_fail(should_fail: bool): + nonlocal call_count + call_count += 1 + if should_fail: + raise ValueError(f"Failed on call {call_count}") + return f"Success_{call_count}" + + # Exceptions cause retry due to error handling + with pytest.raises(ValueError): + may_fail(True) + + # Due to retry mechanism, function is called twice + assert call_count == 2 + + # Successful calls are cached + result1 = may_fail(False) + assert result1 == "Success_3" + + result2 = may_fail(False) + assert result2 == "Success_3" + assert call_count == 3 + + def test_custom_ttl_override(self, cache_decorator): + """Test TTL override per function""" + call_count = 0 + + @cache_decorator(ttl=0.3) # Override default TTL + def quick_expiry(): + nonlocal call_count + call_count += 1 + return f"Call_{call_count}" + + result1 = quick_expiry() + assert result1 == "Call_1" + + # Should still be cached + time.sleep(0.1) + result2 = quick_expiry() + assert result2 == "Call_1" + + # Should be expired + time.sleep(0.3) + result3 = quick_expiry() + assert result3 == "Call_2" + + +class TestRealWorldScenarios: + """Test real-world usage patterns""" + + @pytest.fixture + def cache_decorator(self): + cache = InMemoryCache() + cache.clear() + return CacheDecorator(cache) + + def test_api_response_caching(self, cache_decorator): + """Simulate API response caching""" + + class ApiClient: + def __init__(self): + self.request_count = 0 + + @cache_decorator(ttl=5) + def get_user(self, user_id: str) -> dict[str, Any]: + self.request_count += 1 + # Simulate API call + time.sleep(0.01) + return {"id": user_id, "name": f"User {user_id}", "request_number": self.request_count} + + client = ApiClient() + + # First request + user1 = client.get_user("123") + assert user1["id"] == "123" + assert user1["request_number"] == 1 + + # Cached request + user1_cached = client.get_user("123") + assert user1_cached["request_number"] == 1 + assert client.request_count == 1 + + # Different user + user2 = client.get_user("456") + assert user2["id"] == "456" + assert user2["request_number"] == 2 + + def test_database_query_caching(self, cache_decorator): + """Simulate database query caching with invalidation""" + + class Database: + def __init__(self): + self.data = {"key1": "value1", "key2": "value2"} + self.query_count = 0 + self.decorator = cache_decorator + + @property + def cached_get(self): + return self.decorator(ttl=10)(self._get) + + def _get(self, key: str) -> str | None: + self.query_count += 1 + time.sleep(0.01) # Simulate DB latency + return self.data.get(key) + + @property + def update(self): + return self.decorator.invalidate(target_func_name="_get", param_mapping={"key": "key"})(self._update) + + def _update(self, key: str, value: str) -> None: + self.data[key] = value + + db = Database() + + # Query data + value1 = db.cached_get("key1") + assert value1 == "value1" + assert db.query_count == 1 + + # Cached query + value1_cached = db.cached_get("key1") + assert value1_cached == "value1" + assert db.query_count == 1 + + # Update invalidates cache + db.update("key1", "new_value1") + + # Next query hits DB again + value1_new = db.cached_get("key1") + assert value1_new == "new_value1" + assert db.query_count == 2 + + +def test_cache_with_class_methods(cache_decorator): + """Test caching with class methods""" + + class DataService: + def __init__(self): + self.call_count = 0 + + @cache_decorator() + def get_data(self, item_id): + self.call_count += 1 + return f"Data_{item_id}_{self.call_count}" + + service = DataService() + + # First call + result1 = service.get_data("item1") + assert result1 == "Data_item1_1" + assert service.call_count == 1 + + # Cached call + result2 = service.get_data("item1") + assert result2 == "Data_item1_1" + assert service.call_count == 1 + + # Different parameter + result3 = service.get_data("item2") + assert result3 == "Data_item2_2" + assert service.call_count == 2 + + +def test_cache_with_complex_objects(cache_decorator): + """Test caching with complex parameter types""" + call_count = 0 + + @cache_decorator() + def process_data(data_dict, data_list): + nonlocal call_count + call_count += 1 + return f"Processed_{len(data_dict)}_{len(data_list)}_{call_count}" + + # Test with dictionaries and lists + dict1 = {"a": 1, "b": 2} + list1 = [1, 2, 3] + + result1 = process_data(dict1, list1) + assert result1 == "Processed_2_3_1" + + # Same content should hit cache + result2 = process_data({"a": 1, "b": 2}, [1, 2, 3]) + assert result2 == "Processed_2_3_1" + assert call_count == 1 + + # Different content should miss cache + result3 = process_data({"c": 3}, [4, 5]) + assert result3 == "Processed_1_2_2" + assert call_count == 2 + + +def test_selective_cache_invalidation(cache_decorator, invalidate_decorator): + """Test selective invalidation with parameter mapping""" + call_count = 0 + + @cache_decorator() + def get_user_data(user_id, include_details=False): + nonlocal call_count + call_count += 1 + return f"User_{user_id}_details_{include_details}_{call_count}" + + @invalidate_decorator(target_func_name="get_user_data", param_mapping={"user_id": "user_id"}) + def update_user(user_id, new_data): + return f"Updated_{user_id}" + + # Cache data for multiple users + result1 = get_user_data("user1", True) + result2 = get_user_data("user1", False) + result3 = get_user_data("user2", True) + assert call_count == 3 + + # Verify cache hits + get_user_data("user1", True) + get_user_data("user1", False) + get_user_data("user2", True) + assert call_count == 3 # No new calls + + # Invalidate user1 cache + update_user("user1", {"name": "New Name"}) + + # User1 cache should be invalidated + result4 = get_user_data("user1", True) + assert "4" in result4 + result5 = get_user_data("user1", False) + assert "5" in result5 + + # User2 cache might also be invalidated due to broad regex pattern matching + result6 = get_user_data("user2", True) + # This could be either the cached value or a new call + assert result6 in [result3, f"User_user2_details_True_{call_count}"] + + +def test_exception_handling_in_cached_function(cache_decorator): + """Test that exceptions are not cached""" + call_count = 0 + + @cache_decorator() + def unstable_function(should_fail): + nonlocal call_count + call_count += 1 + if should_fail: + raise ValueError(f"Failed on call {call_count}") + return f"Success_{call_count}" + + # First call fails - but due to error handling, it gets called twice + # (once in try block, once in except block) + with pytest.raises(ValueError): + unstable_function(True) + + assert call_count == 2 # Called twice due to retry in except block + + # Second call to same parameters should also fail and retry + with pytest.raises(ValueError): + unstable_function(True) + + assert call_count == 4 # Two more calls due to retry + + # Successful call should be cached + result1 = unstable_function(False) + assert result1 == "Success_5" # Count is 5 after previous failed calls + + result2 = unstable_function(False) + assert result2 == "Success_5" + assert call_count == 5 # No new calls due to cache + + +def test_ttl_precision(cache_decorator): + """Test precise TTL behavior""" + call_count = 0 + + @cache_decorator(ttl=0.5) # 0.5 second TTL + def short_lived_cache(): + nonlocal call_count + call_count += 1 + return f"Call_{call_count}" + + # Initial call + result1 = short_lived_cache() + assert result1 == "Call_1" + + # Check cache at different time intervals + time.sleep(0.3) + result2 = short_lived_cache() + assert result2 == "Call_1" # Should still be cached + + time.sleep(0.3) # Total 0.6 seconds + result3 = short_lived_cache() + assert result3 == "Call_2" # Should be expired + + assert call_count == 2 + + +def test_cache_key_collision_prevention(cache_decorator): + """Test that similar function names don't collide""" + call_count_1 = 0 + call_count_2 = 0 + + @cache_decorator() + def get_data(param): + nonlocal call_count_1 + call_count_1 += 1 + return f"Function1_{param}_{call_count_1}" + + @cache_decorator() + def get_data_v2(param): + nonlocal call_count_2 + call_count_2 += 1 + return f"Function2_{param}_{call_count_2}" + + # Call both functions with same parameter + result1 = get_data("test") + result2 = get_data_v2("test") + + assert result1 == "Function1_test_1" + assert result2 == "Function2_test_1" + + # Verify they maintain separate caches + result3 = get_data("test") + result4 = get_data_v2("test") + + assert result3 == "Function1_test_1" + assert result4 == "Function2_test_1" + assert call_count_1 == 1 + assert call_count_2 == 1 + + +def test_cache_size_monitoring(cache_): + """Test monitoring cache size and keys""" + decorator = CacheDecorator(cache_) + + @decorator() + def cache_item(key): + return f"Value_{key}" + + # Add multiple items + for i in range(5): + cache_item(f"key_{i}") + + # Check cache contents + all_keys = cache_.get_keys() + cache_keys = [k for k in all_keys if k.startswith("cache_item")] + assert len(cache_keys) == 5 + + # Verify key format + for key in cache_keys: + assert "cache_item" in key + assert "key_" in key diff --git a/tests/sync/test_interface.py b/tests/sync/test_interface.py new file mode 100644 index 0000000..f207390 --- /dev/null +++ b/tests/sync/test_interface.py @@ -0,0 +1,138 @@ +import time +from abc import ABC + + +import pytest + +from base_cacheable_class.cache.sync import CacheInterface, InMemoryCache + + +class TestInterfaces: + def test_cache_interface_is_abstract(self): + """Test that CacheInterface is abstract and cannot be instantiated""" + with pytest.raises(TypeError): + CacheInterface() + + def test_cache_interface_methods(self): + """Test that CacheInterface has all required abstract methods""" + assert hasattr(CacheInterface, "set") + assert hasattr(CacheInterface, "get") + assert hasattr(CacheInterface, "exists") + assert hasattr(CacheInterface, "delete") + assert hasattr(CacheInterface, "clear") + assert hasattr(CacheInterface, "get_keys") + assert hasattr(CacheInterface, "get_keys_regex") + + def test_interfaces_inherit_from_abc(self): + """Test that interfaces inherit from ABC""" + assert issubclass(CacheInterface, ABC) + + +class TestInMemoryCache: + def test_singleton(self): + """Test that InMemoryCache is a singleton""" + cache1 = InMemoryCache() + cache2 = InMemoryCache() + assert cache1 is cache2 + + def test_set_and_get(self): + """Test basic set and get operations""" + cache = InMemoryCache() + cache.clear() + + cache.set("key1", "value1") + result = cache.get("key1") + assert result == "value1" + + def test_get_nonexistent(self): + """Test getting a non-existent key returns None""" + cache = InMemoryCache() + cache.clear() + + result = cache.get("nonexistent") + assert result is None + + def test_ttl_expiration(self): + """Test that items expire after TTL""" + cache = InMemoryCache() + cache.clear() + + cache.set("key1", "value1", ttl=1) # 1 second TTL + + # Should exist immediately + result = cache.get("key1") + assert result == "value1" + + # Wait for expiration + time.sleep(1.1) + + # Should be expired + result = cache.get("key1") + assert result is None + + def test_no_ttl(self): + """Test that items without TTL don't expire""" + cache = InMemoryCache() + cache.clear() + + cache.set("key1", "value1", ttl=None) + + # Should exist even after some time + time.sleep(0.1) + result = cache.get("key1") + assert result == "value1" + + def test_exists(self): + """Test exists method""" + cache = InMemoryCache() + cache.clear() + + cache.set("key1", "value1") + assert cache.exists("key1") is True + assert cache.exists("nonexistent") is False + + def test_delete(self): + """Test delete method""" + cache = InMemoryCache() + cache.clear() + + cache.set("key1", "value1") + assert cache.exists("key1") is True + + cache.delete("key1") + assert cache.exists("key1") is False + + def test_clear(self): + """Test clear method""" + cache = InMemoryCache() + cache.clear() + + cache.set("key1", "value1") + cache.set("key2", "value2") + cache.clear() + + assert cache.exists("key1") is False + assert cache.exists("key2") is False + + def test_get_keys(self): + """Test get_keys method""" + cache = InMemoryCache() + cache.clear() + + cache.set("test:key1", "value1") + cache.set("test:key2", "value2") + cache.set("other:key3", "value3") + + # Get all keys + all_keys = cache.get_keys() + assert len(all_keys) == 3 + assert "test:key1" in all_keys + assert "test:key2" in all_keys + assert "other:key3" in all_keys + + # Get keys with pattern + test_keys = cache.get_keys("test:.*") + assert len(test_keys) == 2 + assert "test:key1" in test_keys + assert "test:key2" in test_keys + assert "other:key3" not in test_keys diff --git a/tests/test_cache_integration.py b/tests/test_cache_integration.py index 4a979d1..e1585b1 100644 --- a/tests/test_cache_integration.py +++ b/tests/test_cache_integration.py @@ -1,23 +1,27 @@ import asyncio +from typing import Any import pytest -from base_cacheable_class import InMemoryCache, InMemoryCacheDecorator +from base_cacheable_class.cache.async_ import InMemoryCache, CacheDecorator +from base_cacheable_class.cache.utils import default_key, default_pattern @pytest.fixture def cache_(): - return InMemoryCache() + cache = InMemoryCache() + asyncio.run(cache.clear()) + return cache @pytest.fixture def cache_decorator(cache_): - return InMemoryCacheDecorator(cache_, default_ttl=1) + return CacheDecorator(cache_, default_key, default_pattern, default_ttl=1) @pytest.fixture def invalidate_decorator(cache_): - decorator = InMemoryCacheDecorator(cache_, default_ttl=1) + decorator = CacheDecorator(cache_, default_key, default_pattern, default_ttl=1) return decorator.invalidate @@ -56,7 +60,7 @@ async def test_cache_decorator_with_custom_ttl(cache_): await cache_.clear() # Create decorator with custom TTL - decorator = InMemoryCacheDecorator(cache_, default_ttl=60) + decorator = CacheDecorator(cache_, default_key, default_pattern, default_ttl=60) call_count = 0 @@ -89,7 +93,7 @@ async def test_func(): async def test_cache_decorator_with_mock(mocker): mock_cache = mocker.Mock(spec=InMemoryCache) mock_cache.get.return_value = None - decorator = InMemoryCacheDecorator(mock_cache, default_ttl=60) + decorator = CacheDecorator(mock_cache, default_key, default_pattern, default_ttl=60) call_count = 0 @@ -253,14 +257,6 @@ async def invalidator(param1, param2): assert call_count == 2 -import asyncio -from typing import Any - -import pytest - -from base_cacheable_class import InMemoryCache, InMemoryCacheDecorator - - class TestBasicCacheOperations: """Test basic cache operations and TTL behavior""" @@ -272,11 +268,11 @@ def cache_(self): @pytest.fixture def cache_decorator(self, cache_): - return InMemoryCacheDecorator(cache_, default_ttl=1) + return CacheDecorator(cache_, default_key, default_pattern, default_ttl=1) @pytest.fixture def invalidate_decorator(self, cache_): - decorator = InMemoryCacheDecorator(cache_, default_ttl=1) + decorator = CacheDecorator(cache_, default_key, default_pattern, default_ttl=1) return decorator.invalidate @pytest.mark.asyncio @@ -312,7 +308,7 @@ async def test_cache_decorator_with_mock(self, mocker): """Test with mocked cache using pytest-mock""" mock_cache = mocker.Mock(spec=InMemoryCache) mock_cache.get.return_value = None - decorator = InMemoryCacheDecorator(mock_cache, default_ttl=60) + decorator = CacheDecorator(mock_cache, default_key, default_pattern, default_ttl=60) call_count = 0 @@ -339,7 +335,7 @@ class TestParameterHandling: def cache_decorator(self): cache = InMemoryCache() asyncio.run(cache.clear()) - return InMemoryCacheDecorator(cache, default_ttl=1) + return CacheDecorator(cache, default_key, default_pattern, default_ttl=1) @pytest.mark.asyncio async def test_cache_with_different_parameters(self, cache_decorator): @@ -431,11 +427,11 @@ def cache_(self): @pytest.fixture def cache_decorator(self, cache_): - return InMemoryCacheDecorator(cache_, default_ttl=1) + return CacheDecorator(cache_, default_key, default_pattern, default_ttl=1) @pytest.fixture def invalidate_decorator(self, cache_): - decorator = InMemoryCacheDecorator(cache_, default_ttl=1) + decorator = CacheDecorator(cache_, default_key, default_pattern, default_ttl=1) return decorator.invalidate @pytest.mark.asyncio @@ -512,7 +508,7 @@ class TestEdgeCases: def cache_decorator(self): cache = InMemoryCache() asyncio.run(cache.clear()) - return InMemoryCacheDecorator(cache, default_ttl=1) + return CacheDecorator(cache, default_key, default_pattern, default_ttl=1) @pytest.mark.asyncio async def test_none_result_not_cached(self, cache_decorator): @@ -595,7 +591,7 @@ class TestRealWorldScenarios: def cache_decorator(self): cache = InMemoryCache() asyncio.run(cache.clear()) - return InMemoryCacheDecorator(cache) + return CacheDecorator(cache, default_key, default_pattern) @pytest.mark.asyncio async def test_api_response_caching(self, cache_decorator): @@ -676,32 +672,6 @@ async def _update(self, key: str, value: str) -> None: assert db.query_count == 2 -import asyncio -import pytest -from base_cacheable_class import InMemoryCache, InMemoryCacheDecorator - - -@pytest.fixture -def cache(): - """Create a fresh cache instance""" - cache = InMemoryCache() - asyncio.run(cache.clear()) - return cache - - -@pytest.fixture -def cache_decorator(cache): - """Create a cache decorator with 1 second TTL""" - return InMemoryCacheDecorator(cache, default_ttl=1) - - -@pytest.fixture -def invalidate_decorator(cache): - """Create an invalidate decorator""" - decorator = InMemoryCacheDecorator(cache, default_ttl=1) - return decorator.invalidate - - @pytest.mark.asyncio async def test_concurrent_cache_access(cache_decorator): """Test concurrent access to cached function""" @@ -924,9 +894,9 @@ async def get_data_v2(param): @pytest.mark.asyncio -async def test_cache_size_monitoring(cache): +async def test_cache_size_monitoring(cache_): """Test monitoring cache size and keys""" - decorator = InMemoryCacheDecorator(cache) + decorator = CacheDecorator(cache_, default_key, default_pattern) @decorator() async def cache_item(key): @@ -937,7 +907,7 @@ async def cache_item(key): await cache_item(f"key_{i}") # Check cache contents - all_keys = await cache.get_keys() + all_keys = await cache_.get_keys() cache_keys = [k for k in all_keys if k.startswith("cache_item")] assert len(cache_keys) == 5 diff --git a/tests/test_interface.py b/tests/test_interface.py index aaec898..d6d4749 100644 --- a/tests/test_interface.py +++ b/tests/test_interface.py @@ -1,8 +1,15 @@ +import asyncio from abc import ABC +from unittest.mock import patch import pytest -from base_cacheable_class import CacheDecoratorInterface, CacheInterface, CacheItem +from base_cacheable_class import ( + CacheDecoratorInterface, + CacheItem, +) +from base_cacheable_class.cache.async_ import CacheInterface, InMemoryCache, CacheDecorator +from base_cacheable_class.cache.utils import default_key, default_pattern class TestCacheItem: @@ -177,7 +184,7 @@ async def test_basic_caching(self): """Test basic caching functionality""" cache = InMemoryCache() await cache.clear() - decorator = InMemoryCacheDecorator(cache, default_ttl=60) + decorator = CacheDecorator(cache, default_key, default_pattern, default_ttl=60) call_count = 0 @@ -205,29 +212,43 @@ async def test_func(x): @pytest.mark.asyncio async def test_key_builder(self): """Test key builder creates correct keys""" - cache = InMemoryCache() - decorator = InMemoryCacheDecorator(cache) def test_func(): pass # Test with different arguments - key1 = decorator.key_builder(test_func, "self", "arg1", "arg2") + key1 = default_key(test_func, "self", "arg1", "arg2") assert key1 == "test_func:('self', 'arg1', 'arg2'):{}" - key2 = decorator.key_builder(test_func, "self", kwarg1="value1") + key2 = default_key(test_func, "self", kwarg1="value1") assert key2 == "test_func:('self',):{'kwarg1': 'value1'}" # Test without self (function call) - key3 = decorator.key_builder(test_func, "arg1", "arg2") + key3 = default_key(test_func, "arg1", "arg2") assert key3 == "test_func:('arg1', 'arg2'):{}" + @pytest.mark.asyncio + async def test_pattern_builder(self): + """Test pattern builder creates correct patterns""" + + # Test when param_mapping in kwargs + param_mapping1 = {"arg1": "arg1"} + kwargs1 = {"arg1": "arg1", "arg2": "arg2"} + pattern1 = default_pattern("test_func", param_mapping1, **kwargs1) + assert pattern1 == "test_func:\\(.*\\):{.*'arg1':\\s*'arg1'.*}" + + # Test when param_mapping not in kwargs + param_mapping2 = {"arg2": "arg2"} + kwargs2 = {"arg22": "arg22"} + pattern2 = default_pattern("test_func", param_mapping2, **kwargs2) + assert pattern2 == "test_func:\\(.*\\):{.*}" + @pytest.mark.asyncio async def test_invalidate(self): """Test cache invalidation""" cache = InMemoryCache() await cache.clear() - decorator = InMemoryCacheDecorator(cache) + decorator = CacheDecorator(cache, default_key, default_pattern) call_count = 0 @@ -264,7 +285,7 @@ async def test_invalidate_all(self): """Test invalidate all functionality""" cache = InMemoryCache() await cache.clear() - decorator = InMemoryCacheDecorator(cache) + decorator = CacheDecorator(cache, default_key, default_pattern) call_count = 0 @@ -300,7 +321,7 @@ async def clear_cache(): async def test_error_handling(self): """Test error handling in decorator""" cache = InMemoryCache() - decorator = InMemoryCacheDecorator(cache) + decorator = CacheDecorator(cache, default_key, default_pattern) # Mock cache.get to raise an exception with patch.object(cache, "get", side_effect=Exception("Cache error")): @@ -318,7 +339,7 @@ async def test_none_result_caching(self): """Test that None results are not cached""" cache = InMemoryCache() await cache.clear() - decorator = InMemoryCacheDecorator(cache) + decorator = CacheDecorator(cache, default_key, default_pattern) call_count = 0