mirror of
https://github.com/davidhalter/typeshed.git
synced 2026-05-07 22:10:10 +08:00
cachetools: precise typing for decorators and cached(); expose cache_info/cache_clear and fix keys signatures (#14770)
This commit is contained in:
@@ -0,0 +1,104 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Hashable
|
||||
from typing import Any
|
||||
from typing_extensions import assert_type
|
||||
|
||||
from cachetools import LRUCache, cached, keys as cachekeys
|
||||
from cachetools.func import fifo_cache, lfu_cache, lru_cache, rr_cache, ttl_cache
|
||||
|
||||
# Tests for cachetools.cached
|
||||
|
||||
# Explicitly parameterize the cache to avoid Unknown types
|
||||
cache_inst: LRUCache[int, int] = LRUCache(maxsize=128)
|
||||
|
||||
|
||||
@cached(cache_inst)
|
||||
def check_cached(x: int) -> int:
|
||||
return x * 2
|
||||
|
||||
|
||||
assert_type(check_cached(3), int)
|
||||
# Methods cache_info/cache_clear are only present when info=True; do not access them here.
|
||||
|
||||
|
||||
@cached(cache_inst, info=True)
|
||||
def check_cached_with_info(x: int) -> int:
|
||||
return x + 1
|
||||
|
||||
|
||||
assert_type(check_cached_with_info(4), int)
|
||||
assert_type(check_cached_with_info.cache_info().misses, int)
|
||||
check_cached_with_info.cache_clear()
|
||||
|
||||
|
||||
# Tests for cachetools.func decorators
|
||||
|
||||
|
||||
@lru_cache
|
||||
def lru_noparens(x: int) -> int:
|
||||
return x * 2
|
||||
|
||||
|
||||
@lru_cache(maxsize=32)
|
||||
def lru_with_maxsize(x: int) -> int:
|
||||
return x * 3
|
||||
|
||||
|
||||
assert_type(lru_noparens(3), int)
|
||||
assert_type(lru_with_maxsize(3), int)
|
||||
assert_type(lru_noparens.cache_info().hits, int)
|
||||
assert_type(lru_with_maxsize.cache_info().misses, int)
|
||||
assert_type(lru_with_maxsize.cache_parameters(), dict[str, Any])
|
||||
lru_with_maxsize.cache_clear()
|
||||
|
||||
|
||||
@fifo_cache
|
||||
def fifo_func(x: int) -> int:
|
||||
return x
|
||||
|
||||
|
||||
@lfu_cache
|
||||
def lfu_func(x: int) -> int:
|
||||
return x
|
||||
|
||||
|
||||
@rr_cache
|
||||
def rr_func(x: int) -> int:
|
||||
return x
|
||||
|
||||
|
||||
@ttl_cache
|
||||
def ttl_func(x: int) -> int:
|
||||
return x
|
||||
|
||||
|
||||
assert_type(fifo_func(1), int)
|
||||
assert_type(lfu_func(1), int)
|
||||
assert_type(rr_func(1), int)
|
||||
assert_type(ttl_func(1), int)
|
||||
assert_type(fifo_func.cache_info().currsize, int)
|
||||
assert_type(lfu_func.cache_parameters(), dict[str, Any])
|
||||
|
||||
|
||||
# Tests for cachetools.keys
|
||||
|
||||
k1 = cachekeys.hashkey(1, "a")
|
||||
assert_type(k1, tuple[Hashable, ...])
|
||||
|
||||
|
||||
class C:
|
||||
def method(self, a: int) -> int:
|
||||
return a
|
||||
|
||||
|
||||
inst = C()
|
||||
|
||||
k2 = cachekeys.methodkey(inst, 5)
|
||||
assert_type(k2, tuple[Hashable, ...])
|
||||
|
||||
k3 = cachekeys.typedkey(1, "x")
|
||||
assert_type(k3, tuple[Hashable, ...])
|
||||
|
||||
k4 = cachekeys.typedmethodkey(inst, 2)
|
||||
assert_type(k4, tuple[Hashable, ...])
|
||||
@@ -2,7 +2,7 @@ from _typeshed import IdentityFunction, Unused
|
||||
from collections.abc import Callable, Iterator, MutableMapping, Sequence
|
||||
from contextlib import AbstractContextManager
|
||||
from threading import Condition
|
||||
from typing import Any, TypeVar, overload
|
||||
from typing import Any, Generic, Literal, NamedTuple, TypeVar, overload
|
||||
from typing_extensions import Self, deprecated
|
||||
|
||||
__all__ = ("Cache", "FIFOCache", "LFUCache", "LRUCache", "RRCache", "TLRUCache", "TTLCache", "cached", "cachedmethod")
|
||||
@@ -11,6 +11,7 @@ __version__: str
|
||||
_KT = TypeVar("_KT")
|
||||
_VT = TypeVar("_VT")
|
||||
_T = TypeVar("_T")
|
||||
_R = TypeVar("_R")
|
||||
|
||||
class Cache(MutableMapping[_KT, _VT]):
|
||||
@overload
|
||||
@@ -99,22 +100,52 @@ class TLRUCache(_TimedCache[_KT, _VT]):
|
||||
def ttu(self) -> Callable[[_KT, _VT, float], float]: ...
|
||||
def expire(self, time: float | None = None) -> list[tuple[_KT, _VT]]: ...
|
||||
|
||||
class _CacheInfo(NamedTuple):
|
||||
hits: int
|
||||
misses: int
|
||||
maxsize: int | None
|
||||
currsize: int
|
||||
|
||||
class _cached_wrapper(Generic[_R]):
|
||||
__wrapped__: Callable[..., _R]
|
||||
def __call__(self, /, *args: Any, **kwargs: Any) -> _R: ...
|
||||
|
||||
class _cached_wrapper_info(_cached_wrapper[_R]):
|
||||
def cache_info(self) -> _CacheInfo: ...
|
||||
def cache_clear(self) -> None: ...
|
||||
|
||||
@overload
|
||||
def cached(
|
||||
cache: MutableMapping[_KT, Any] | None,
|
||||
key: Callable[..., _KT] = ...,
|
||||
lock: AbstractContextManager[Any] | None = None,
|
||||
condition: Condition | None = None,
|
||||
info: bool = False,
|
||||
) -> IdentityFunction: ...
|
||||
info: Literal[True] = ...,
|
||||
) -> Callable[[Callable[..., _R]], _cached_wrapper_info[_R]]: ...
|
||||
@overload
|
||||
def cached(
|
||||
cache: MutableMapping[_KT, Any] | None,
|
||||
key: Callable[..., _KT] = ...,
|
||||
lock: AbstractContextManager[Any] | None = None,
|
||||
condition: Condition | None = None,
|
||||
info: Literal[False] = ...,
|
||||
) -> Callable[[Callable[..., _R]], _cached_wrapper[_R]]: ...
|
||||
@overload
|
||||
@deprecated("Passing `info` as positional parameter is deprecated.")
|
||||
def cached(
|
||||
cache: MutableMapping[_KT, Any] | None,
|
||||
key: Callable[..., _KT] = ...,
|
||||
lock: AbstractContextManager[Any] | None = None,
|
||||
condition: bool | None = None,
|
||||
) -> IdentityFunction: ...
|
||||
condition: Literal[True] = ...,
|
||||
) -> Callable[[Callable[..., _R]], _cached_wrapper_info[_R]]: ...
|
||||
@overload
|
||||
@deprecated("Passing `info` as positional parameter is deprecated.")
|
||||
def cached(
|
||||
cache: MutableMapping[_KT, Any] | None,
|
||||
key: Callable[..., _KT] = ...,
|
||||
lock: AbstractContextManager[Any] | None = None,
|
||||
condition: Literal[False] | None = ...,
|
||||
) -> Callable[[Callable[..., _R]], _cached_wrapper[_R]]: ...
|
||||
def cachedmethod(
|
||||
cache: Callable[[Any], MutableMapping[_KT, Any] | None],
|
||||
key: Callable[..., _KT] = ...,
|
||||
|
||||
@@ -1,16 +1,51 @@
|
||||
from _typeshed import IdentityFunction
|
||||
from collections.abc import Callable, Sequence
|
||||
from typing import TypeVar
|
||||
from typing import Any, Final, Generic, NamedTuple, TypeVar, overload
|
||||
|
||||
__all__: Final = ("fifo_cache", "lfu_cache", "lru_cache", "rr_cache", "ttl_cache")
|
||||
|
||||
__all__ = ("fifo_cache", "lfu_cache", "lru_cache", "rr_cache", "ttl_cache")
|
||||
_T = TypeVar("_T")
|
||||
_R = TypeVar("_R")
|
||||
|
||||
def fifo_cache(maxsize: float | None = 128, typed: bool = False) -> IdentityFunction: ...
|
||||
def lfu_cache(maxsize: float | None = 128, typed: bool = False) -> IdentityFunction: ...
|
||||
def lru_cache(maxsize: float | None = 128, typed: bool = False) -> IdentityFunction: ...
|
||||
class _CacheInfo(NamedTuple):
|
||||
hits: int
|
||||
misses: int
|
||||
maxsize: int | None
|
||||
currsize: int
|
||||
|
||||
class _cachetools_cache_wrapper(Generic[_R]):
|
||||
__wrapped__: Callable[..., _R]
|
||||
def __call__(self, /, *args: Any, **kwargs: Any) -> _R: ...
|
||||
def cache_info(self) -> _CacheInfo: ...
|
||||
def cache_clear(self) -> None: ...
|
||||
def cache_parameters(self) -> dict[str, Any]: ...
|
||||
|
||||
@overload
|
||||
def fifo_cache(
|
||||
maxsize: int | None = 128, typed: bool = False
|
||||
) -> Callable[[Callable[..., _R]], _cachetools_cache_wrapper[_R]]: ...
|
||||
@overload
|
||||
def fifo_cache(maxsize: Callable[..., _R], typed: bool = False) -> _cachetools_cache_wrapper[_R]: ...
|
||||
@overload
|
||||
def lfu_cache(maxsize: int | None = 128, typed: bool = False) -> Callable[[Callable[..., _R]], _cachetools_cache_wrapper[_R]]: ...
|
||||
@overload
|
||||
def lfu_cache(maxsize: Callable[..., _R], typed: bool = False) -> _cachetools_cache_wrapper[_R]: ...
|
||||
@overload
|
||||
def lru_cache(maxsize: int | None = 128, typed: bool = False) -> Callable[[Callable[..., _R]], _cachetools_cache_wrapper[_R]]: ...
|
||||
@overload
|
||||
def lru_cache(maxsize: Callable[..., _R], typed: bool = False) -> _cachetools_cache_wrapper[_R]: ...
|
||||
@overload
|
||||
def rr_cache(
|
||||
maxsize: float | None = 128, choice: Callable[[Sequence[_T]], _T] | None = ..., typed: bool = False
|
||||
) -> IdentityFunction: ...
|
||||
maxsize: int | None = 128, choice: Callable[[Sequence[_T]], _T] | None = ..., typed: bool = False
|
||||
) -> Callable[[Callable[..., _R]], _cachetools_cache_wrapper[_R]]: ...
|
||||
@overload
|
||||
def rr_cache(
|
||||
maxsize: Callable[..., _R], choice: Callable[[Sequence[_T]], _T] | None = ..., typed: bool = False
|
||||
) -> _cachetools_cache_wrapper[_R]: ...
|
||||
@overload
|
||||
def ttl_cache(
|
||||
maxsize: float | None = 128, ttl: float = 600, timer: Callable[[], float] = ..., typed: bool = False
|
||||
) -> IdentityFunction: ...
|
||||
maxsize: int | None = 128, ttl: float = 600, timer: Callable[[], float] = ..., typed: bool = False
|
||||
) -> Callable[[Callable[..., _R]], _cachetools_cache_wrapper[_R]]: ...
|
||||
@overload
|
||||
def ttl_cache(
|
||||
maxsize: Callable[..., _R], ttl: float = 600, timer: Callable[[], float] = ..., typed: bool = False
|
||||
) -> _cachetools_cache_wrapper[_R]: ...
|
||||
|
||||
@@ -4,6 +4,6 @@ from collections.abc import Hashable
|
||||
__all__ = ("hashkey", "methodkey", "typedkey", "typedmethodkey")
|
||||
|
||||
def hashkey(*args: Hashable, **kwargs: Hashable) -> tuple[Hashable, ...]: ...
|
||||
def methodkey(self: Unused, *args: Hashable, **kwargs: Hashable) -> tuple[Hashable, ...]: ...
|
||||
def methodkey(self: Unused, /, *args: Hashable, **kwargs: Hashable) -> tuple[Hashable, ...]: ...
|
||||
def typedkey(*args: Hashable, **kwargs: Hashable) -> tuple[Hashable, ...]: ...
|
||||
def typedmethodkey(self: Unused, *args: Hashable, **kwargs: Hashable) -> tuple[Hashable, ...]: ...
|
||||
def typedmethodkey(self: Unused, /, *args: Hashable, **kwargs: Hashable) -> tuple[Hashable, ...]: ...
|
||||
|
||||
Reference in New Issue
Block a user