Remove redis (#13157)

Closes: #10592
This commit is contained in:
Sebastian Rittau
2024-11-30 23:52:56 +01:00
committed by GitHub
parent 2ccc53bb67
commit 0a2da01946
57 changed files with 0 additions and 6596 deletions

View File

@@ -1,73 +0,0 @@
redis.client.Pipeline.transaction # instance attribute has same name as superclass method
# async def mismatch problems
redis.asyncio.client.Pipeline.command_info
redis.asyncio.client.Pipeline.debug_segfault
redis.asyncio.client.Pipeline.memory_doctor
redis.asyncio.client.Pipeline.memory_help
redis.asyncio.client.Pipeline.script_debug
redis.asyncio.client.Pipeline.shutdown
# unclear problems
redis.asyncio.Sentinel.master_for
redis.asyncio.Sentinel.slave_for
redis.asyncio.sentinel.Sentinel.master_for
redis.asyncio.sentinel.Sentinel.slave_for
redis.sentinel.Sentinel.master_for
redis.sentinel.Sentinel.slave_for
# Metaclass differs:
redis.RedisCluster
redis.asyncio.Redis
redis.asyncio.RedisCluster
redis.asyncio.client.Pipeline
redis.asyncio.client.Redis
redis.client.Pipeline
redis.client.Redis
redis.cluster.ClusterPipeline
redis.cluster.RedisCluster
redis.commands.AsyncCoreCommands
redis.commands.CoreCommands
redis.commands.RedisClusterCommands
redis.commands.cluster.ClusterDataAccessCommands
redis.commands.cluster.ClusterManagementCommands
redis.commands.cluster.ClusterMultiKeyCommands
redis.commands.cluster.RedisClusterCommands
redis.commands.core.ACLCommands
redis.commands.core.AsyncACLCommands
redis.commands.core.AsyncBasicKeyCommands
redis.commands.core.AsyncClusterCommands
redis.commands.core.AsyncCoreCommands
redis.commands.core.AsyncDataAccessCommands
redis.commands.core.AsyncGeoCommands
redis.commands.core.AsyncHashCommands
redis.commands.core.AsyncHyperlogCommands
redis.commands.core.AsyncListCommands
redis.commands.core.AsyncManagementCommands
redis.commands.core.AsyncModuleCommands
redis.commands.core.AsyncPubSubCommands
redis.commands.core.AsyncScanCommands
redis.commands.core.AsyncScriptCommands
redis.commands.core.AsyncSetCommands
redis.commands.core.AsyncSortedSetCommands
redis.commands.core.AsyncStreamCommands
redis.commands.core.BasicKeyCommands
redis.commands.core.ClusterCommands
redis.commands.core.CoreCommands
redis.commands.core.DataAccessCommands
redis.commands.core.GeoCommands
redis.commands.core.HashCommands
redis.commands.core.HyperlogCommands
redis.commands.core.ListCommands
redis.commands.core.ManagementCommands
redis.commands.core.ModuleCommands
redis.commands.core.PubSubCommands
redis.commands.core.ScanCommands
redis.commands.core.ScriptCommands
redis.commands.core.SetCommands
redis.commands.core.SortedSetCommands
redis.commands.core.StreamCommands
redis.commands.json.Pipeline
redis.commands.timeseries.Pipeline
redis.asyncio.cluster.ClusterPipeline
redis.asyncio.cluster.RedisCluster

View File

@@ -1,16 +0,0 @@
from typing import TypedDict
import redis
class RedisStreamData(TypedDict):
foo: str
bar: bytes
def check_xadd(r: redis.Redis[str]) -> None:
# check that TypedDicts are accepted for the `fields` parameter of `xadd()`
#
# N.B. the `pyright: ignore` is not part of the test,
# it's just because the return type is currently unannotated
r.xadd("stream", fields=RedisStreamData({"foo": "bar", "bar": b"foo"})) # pyright: ignore[reportUnknownMemberType]

View File

@@ -1,12 +0,0 @@
version = "4.6.0"
upstream_repository = "https://github.com/redis/redis-py"
# Requires a version of cryptography with a `py.typed` file
requires = ["cryptography>=35.0.0", "types-pyOpenSSL"]
partial_stub = true
obsolete_since = "5.0.0" # Released on 2023-08-15, marked obsolete 2024-07-25
[tool.stubtest]
ignore_missing_stub = true
# The runtime has an undeclared dependency on setuptools
stubtest_requirements = ["setuptools"]
extras = ["ocsp"]

View File

@@ -1,72 +0,0 @@
from . import backoff, client, connection, credentials, exceptions, sentinel, utils
from .cluster import RedisCluster as RedisCluster
__all__ = [
"AuthenticationError",
"AuthenticationWrongNumberOfArgsError",
"BlockingConnectionPool",
"BusyLoadingError",
"ChildDeadlockedError",
"Connection",
"ConnectionError",
"ConnectionPool",
"CredentialProvider",
"DataError",
"from_url",
"default_backoff",
"InvalidResponse",
"PubSubError",
"ReadOnlyError",
"Redis",
"RedisCluster",
"RedisError",
"ResponseError",
"Sentinel",
"SentinelConnectionPool",
"SentinelManagedConnection",
"SentinelManagedSSLConnection",
"SSLConnection",
"UsernamePasswordCredentialProvider",
"StrictRedis",
"TimeoutError",
"UnixDomainSocketConnection",
"WatchError",
]
default_backoff = backoff.default_backoff
Redis = client.Redis
BlockingConnectionPool = connection.BlockingConnectionPool
Connection = connection.Connection
ConnectionPool = connection.ConnectionPool
SSLConnection = connection.SSLConnection
StrictRedis = client.StrictRedis
UnixDomainSocketConnection = connection.UnixDomainSocketConnection
from_url = utils.from_url
Sentinel = sentinel.Sentinel
SentinelConnectionPool = sentinel.SentinelConnectionPool
SentinelManagedConnection = sentinel.SentinelManagedConnection
SentinelManagedSSLConnection = sentinel.SentinelManagedSSLConnection
AuthenticationError = exceptions.AuthenticationError
AuthenticationWrongNumberOfArgsError = exceptions.AuthenticationWrongNumberOfArgsError
BusyLoadingError = exceptions.BusyLoadingError
ChildDeadlockedError = exceptions.ChildDeadlockedError
ConnectionError = exceptions.ConnectionError
DataError = exceptions.DataError
InvalidResponse = exceptions.InvalidResponse
PubSubError = exceptions.PubSubError
ReadOnlyError = exceptions.ReadOnlyError
RedisError = exceptions.RedisError
ResponseError = exceptions.ResponseError
TimeoutError = exceptions.TimeoutError
WatchError = exceptions.WatchError
CredentialProvider = credentials.CredentialProvider
UsernamePasswordCredentialProvider = credentials.UsernamePasswordCredentialProvider
__version__: str
VERSION: tuple[int | str, ...]

View File

@@ -1,64 +0,0 @@
from redis.asyncio.client import Redis as Redis, StrictRedis as StrictRedis
from redis.asyncio.cluster import RedisCluster as RedisCluster
from redis.asyncio.connection import (
BlockingConnectionPool as BlockingConnectionPool,
Connection as Connection,
ConnectionPool as ConnectionPool,
SSLConnection as SSLConnection,
UnixDomainSocketConnection as UnixDomainSocketConnection,
)
from redis.asyncio.parser import CommandsParser as CommandsParser
from redis.asyncio.sentinel import (
Sentinel as Sentinel,
SentinelConnectionPool as SentinelConnectionPool,
SentinelManagedConnection as SentinelManagedConnection,
SentinelManagedSSLConnection as SentinelManagedSSLConnection,
)
from redis.asyncio.utils import from_url as from_url
from redis.backoff import default_backoff as default_backoff
from redis.exceptions import (
AuthenticationError as AuthenticationError,
AuthenticationWrongNumberOfArgsError as AuthenticationWrongNumberOfArgsError,
BusyLoadingError as BusyLoadingError,
ChildDeadlockedError as ChildDeadlockedError,
ConnectionError as ConnectionError,
DataError as DataError,
InvalidResponse as InvalidResponse,
PubSubError as PubSubError,
ReadOnlyError as ReadOnlyError,
RedisError as RedisError,
ResponseError as ResponseError,
TimeoutError as TimeoutError,
WatchError as WatchError,
)
__all__ = [
"AuthenticationError",
"AuthenticationWrongNumberOfArgsError",
"BlockingConnectionPool",
"BusyLoadingError",
"ChildDeadlockedError",
"CommandsParser",
"Connection",
"ConnectionError",
"ConnectionPool",
"DataError",
"from_url",
"default_backoff",
"InvalidResponse",
"PubSubError",
"ReadOnlyError",
"Redis",
"RedisCluster",
"RedisError",
"ResponseError",
"Sentinel",
"SentinelConnectionPool",
"SentinelManagedConnection",
"SentinelManagedSSLConnection",
"SSLConnection",
"StrictRedis",
"TimeoutError",
"UnixDomainSocketConnection",
"WatchError",
]

File diff suppressed because it is too large Load Diff

View File

@@ -1,229 +0,0 @@
from _typeshed import Incomplete
from collections.abc import Awaitable, Callable, Mapping
from types import TracebackType
from typing import Any, Generic, TypeVar
from typing_extensions import Self
from redis.asyncio.client import ResponseCallbackT
from redis.asyncio.connection import AbstractConnection, BaseParser, Connection, Encoder
from redis.asyncio.parser import CommandsParser
from redis.client import AbstractRedis
from redis.cluster import AbstractRedisCluster, LoadBalancer
# TODO: add AsyncRedisClusterCommands stubs
# from redis.commands import AsyncRedisClusterCommands
from redis.commands.core import _StrType
from redis.credentials import CredentialProvider
from redis.exceptions import ResponseError
from redis.retry import Retry
from redis.typing import AnyKeyT, EncodableT, KeyT
TargetNodesT = TypeVar("TargetNodesT", str, ClusterNode, list[ClusterNode], dict[Any, ClusterNode]) # noqa: Y001
# It uses `DefaultParser` in real life, but it is a dynamic base class.
class ClusterParser(BaseParser):
def on_disconnect(self) -> None: ...
def on_connect(self, connection: AbstractConnection) -> None: ...
async def can_read_destructive(self) -> bool: ...
async def read_response(self, disable_decoding: bool = False) -> EncodableT | ResponseError | list[EncodableT] | None: ...
class RedisCluster(AbstractRedis, AbstractRedisCluster, Generic[_StrType]): # TODO: AsyncRedisClusterCommands
@classmethod
def from_url(
cls,
url: str,
*,
host: str | None = None,
port: str | int = 6379,
# Cluster related kwargs
startup_nodes: list[ClusterNode] | None = None,
require_full_coverage: bool = True,
read_from_replicas: bool = False,
reinitialize_steps: int = 5,
cluster_error_retry_attempts: int = 3,
connection_error_retry_attempts: int = 3,
max_connections: int = 2147483648,
# Client related kwargs
db: str | int = 0,
path: str | None = None,
credential_provider: CredentialProvider | None = None,
username: str | None = None,
password: str | None = None,
client_name: str | None = None,
# Encoding related kwargs
encoding: str = "utf-8",
encoding_errors: str = "strict",
decode_responses: bool = False,
# Connection related kwargs
health_check_interval: float = 0,
socket_connect_timeout: float | None = None,
socket_keepalive: bool = False,
socket_keepalive_options: Mapping[int, int | bytes] | None = None,
socket_timeout: float | None = None,
retry: Retry | None = None,
retry_on_error: list[Exception] | None = None,
# SSL related kwargs
ssl: bool = False,
ssl_ca_certs: str | None = None,
ssl_ca_data: str | None = None,
ssl_cert_reqs: str = "required",
ssl_certfile: str | None = None,
ssl_check_hostname: bool = False,
ssl_keyfile: str | None = None,
address_remap: Callable[[str, int], tuple[str, int]] | None = None,
) -> Self: ...
retry: Retry | None
connection_kwargs: dict[str, Any]
nodes_manager: NodesManager
encoder: Encoder
read_from_replicas: bool
reinitialize_steps: int
cluster_error_retry_attempts: int
reinitialize_counter: int
commands_parser: CommandsParser
node_flags: set[str]
command_flags: dict[str, str]
response_callbacks: Incomplete
result_callbacks: dict[str, Callable[[Incomplete, Incomplete], Incomplete]]
def __init__(
self,
host: str | None = None,
port: str | int = 6379,
# Cluster related kwargs
startup_nodes: list[ClusterNode] | None = None,
require_full_coverage: bool = True,
read_from_replicas: bool = False,
reinitialize_steps: int = 5,
cluster_error_retry_attempts: int = 3,
connection_error_retry_attempts: int = 3,
max_connections: int = 2147483648,
# Client related kwargs
db: str | int = 0,
path: str | None = None,
credential_provider: CredentialProvider | None = None,
username: str | None = None,
password: str | None = None,
client_name: str | None = None,
# Encoding related kwargs
encoding: str = "utf-8",
encoding_errors: str = "strict",
decode_responses: bool = False,
# Connection related kwargs
health_check_interval: float = 0,
socket_connect_timeout: float | None = None,
socket_keepalive: bool = False,
socket_keepalive_options: Mapping[int, int | bytes] | None = None,
socket_timeout: float | None = None,
retry: Retry | None = None,
retry_on_error: list[Exception] | None = None,
# SSL related kwargs
ssl: bool = False,
ssl_ca_certs: str | None = None,
ssl_ca_data: str | None = None,
ssl_cert_reqs: str = "required",
ssl_certfile: str | None = None,
ssl_check_hostname: bool = False,
ssl_keyfile: str | None = None,
address_remap: Callable[[str, int], tuple[str, int]] | None = None,
) -> None: ...
async def initialize(self) -> Self: ...
async def close(self) -> None: ...
async def __aenter__(self) -> Self: ...
async def __aexit__(
self, exc_type: type[BaseException] | None, exc_value: BaseException | None, traceback: TracebackType | None
) -> None: ...
def __await__(self) -> Awaitable[Self]: ...
def __del__(self) -> None: ...
async def on_connect(self, connection: Connection) -> None: ...
def get_nodes(self) -> list[ClusterNode]: ...
def get_primaries(self) -> list[ClusterNode]: ...
def get_replicas(self) -> list[ClusterNode]: ...
def get_random_node(self) -> ClusterNode: ...
def get_default_node(self) -> ClusterNode: ...
def set_default_node(self, node: ClusterNode) -> None: ...
def get_node(self, host: str | None = None, port: int | None = None, node_name: str | None = None) -> ClusterNode | None: ...
def get_node_from_key(self, key: str, replica: bool = False) -> ClusterNode | None: ...
def keyslot(self, key: EncodableT) -> int: ...
def get_encoder(self) -> Encoder: ...
def get_connection_kwargs(self) -> dict[str, Any | None]: ...
def set_response_callback(self, command: str, callback: ResponseCallbackT) -> None: ...
async def execute_command(self, *args: EncodableT, **kwargs: Any) -> Any: ...
def pipeline(self, transaction: Any | None = None, shard_hint: Any | None = None) -> ClusterPipeline[_StrType]: ...
class ClusterNode:
host: str
port: str | int
name: str
server_type: str | None
max_connections: int
connection_class: type[Connection]
connection_kwargs: dict[str, Any]
response_callbacks: dict[Incomplete, Incomplete]
def __init__(
self,
host: str,
port: str | int,
server_type: str | None = None,
*,
max_connections: int = 2147483648,
connection_class: type[Connection] = ...,
**connection_kwargs: Any,
) -> None: ...
def __eq__(self, obj: object) -> bool: ...
def __del__(self) -> None: ...
async def disconnect(self) -> None: ...
def acquire_connection(self) -> Connection: ...
async def parse_response(self, connection: Connection, command: str, **kwargs: Any) -> Any: ...
async def execute_command(self, *args: Any, **kwargs: Any) -> Any: ...
async def execute_pipeline(self, commands: list[PipelineCommand]) -> bool: ...
class NodesManager:
startup_nodes: dict[str, ClusterNode]
require_full_coverage: bool
connection_kwargs: dict[str, Any]
default_node: ClusterNode | None
nodes_cache: dict[str, ClusterNode]
slots_cache: dict[int, list[ClusterNode]]
read_load_balancer: LoadBalancer
address_remap: Callable[[str, int], tuple[str, int]] | None
def __init__(
self,
startup_nodes: list[ClusterNode],
require_full_coverage: bool,
connection_kwargs: dict[str, Any],
address_remap: Callable[[str, int], tuple[str, int]] | None = None,
) -> None: ...
def get_node(self, host: str | None = None, port: int | None = None, node_name: str | None = None) -> ClusterNode | None: ...
def set_nodes(self, old: dict[str, ClusterNode], new: dict[str, ClusterNode], remove_old: bool = False) -> None: ...
def get_node_from_slot(self, slot: int, read_from_replicas: bool = False) -> ClusterNode: ...
def get_nodes_by_server_type(self, server_type: str) -> list[ClusterNode]: ...
async def initialize(self) -> None: ...
async def close(self, attr: str = "nodes_cache") -> None: ...
def remap_host_port(self, host: str, port: int) -> tuple[str, int]: ...
class ClusterPipeline(AbstractRedis, AbstractRedisCluster, Generic[_StrType]): # TODO: AsyncRedisClusterCommands
def __init__(self, client: RedisCluster[_StrType]) -> None: ...
async def initialize(self) -> Self: ...
async def __aenter__(self) -> Self: ...
async def __aexit__(
self, exc_type: type[BaseException] | None, exc_value: BaseException | None, traceback: TracebackType | None
) -> None: ...
def __await__(self) -> Awaitable[Self]: ...
def __enter__(self) -> Self: ...
def __exit__(
self, exc_type: type[BaseException] | None, exc_value: BaseException | None, traceback: TracebackType | None
) -> None: ...
def __bool__(self) -> bool: ...
def __len__(self) -> int: ...
def execute_command(self, *args: KeyT | EncodableT, **kwargs: Any) -> Self: ...
async def execute(self, raise_on_error: bool = True, allow_redirections: bool = True) -> list[Any]: ...
def mset_nonatomic(self, mapping: Mapping[AnyKeyT, EncodableT]) -> Self: ...
class PipelineCommand:
args: Any
kwargs: Any
position: int
result: Exception | None | Any
def __init__(self, position: int, *args: Any, **kwargs: Any) -> None: ...

View File

@@ -1,363 +0,0 @@
import asyncio
import enum
import ssl
from _typeshed import Unused
from abc import abstractmethod
from collections.abc import Callable, Iterable, Mapping
from types import MappingProxyType
from typing import Any, Final, Generic, Literal, Protocol, TypedDict, TypeVar, overload
from typing_extensions import Self, TypeAlias
from redis.asyncio.retry import Retry
from redis.credentials import CredentialProvider
from redis.exceptions import AuthenticationError, RedisError, ResponseError
from redis.typing import EncodableT, EncodedT
_SSLVerifyMode: TypeAlias = Literal["none", "optional", "required"]
SYM_STAR: Final[bytes]
SYM_DOLLAR: Final[bytes]
SYM_CRLF: Final[bytes]
SYM_LF: Final[bytes]
SYM_EMPTY: Final[bytes]
SERVER_CLOSED_CONNECTION_ERROR: Final[str]
class _Sentinel(enum.Enum):
sentinel = object()
SENTINEL: Final[object]
MODULE_LOAD_ERROR: Final[str]
NO_SUCH_MODULE_ERROR: Final[str]
MODULE_UNLOAD_NOT_POSSIBLE_ERROR: Final[str]
MODULE_EXPORTS_DATA_TYPES_ERROR: Final[str]
NO_AUTH_SET_ERROR: Final[dict[str, type[AuthenticationError]]]
class Encoder:
encoding: str
encoding_errors: str
decode_responses: bool
def __init__(self, encoding: str, encoding_errors: str, decode_responses: bool) -> None: ...
def encode(self, value: EncodableT) -> EncodedT: ...
def decode(self, value: EncodableT, force: bool = False) -> EncodableT: ...
ExceptionMappingT: TypeAlias = Mapping[str, type[Exception] | Mapping[str, type[Exception]]]
class BaseParser:
EXCEPTION_CLASSES: ExceptionMappingT
def __init__(self, socket_read_size: int) -> None: ...
@classmethod
def parse_error(cls, response: str) -> ResponseError: ...
@abstractmethod
def on_disconnect(self) -> None: ...
@abstractmethod
def on_connect(self, connection: AbstractConnection) -> None: ...
@abstractmethod
async def can_read_destructive(self) -> bool: ...
@abstractmethod
async def read_response(self, disable_decoding: bool = False) -> EncodableT | ResponseError | list[EncodableT] | None: ...
class PythonParser(BaseParser):
encoder: Encoder | None
def __init__(self, socket_read_size: int) -> None: ...
def on_connect(self, connection: AbstractConnection) -> None: ...
def on_disconnect(self) -> None: ...
async def can_read_destructive(self) -> bool: ...
async def read_response(self, disable_decoding: bool = False) -> EncodableT | ResponseError | None: ...
class HiredisParser(BaseParser):
def __init__(self, socket_read_size: int) -> None: ...
def on_connect(self, connection: AbstractConnection) -> None: ...
def on_disconnect(self) -> None: ...
async def can_read_destructive(self) -> bool: ...
async def read_from_socket(self) -> Literal[True]: ...
async def read_response(self, disable_decoding: bool = False) -> EncodableT | list[EncodableT]: ...
DefaultParser: type[PythonParser | HiredisParser]
class ConnectCallbackProtocol(Protocol):
def __call__(self, connection: Connection): ...
class AsyncConnectCallbackProtocol(Protocol):
async def __call__(self, connection: Connection): ...
ConnectCallbackT: TypeAlias = ConnectCallbackProtocol | AsyncConnectCallbackProtocol
class AbstractConnection:
pid: int
db: str | int
client_name: str | None
credential_provider: CredentialProvider | None
password: str | None
username: str | None
socket_timeout: float | None
socket_connect_timeout: float | None
retry_on_timeout: bool
retry_on_error: list[type[Exception]]
retry: Retry
health_check_interval: float
next_health_check: float
encoder: Encoder
redis_connect_func: ConnectCallbackT | None
def __init__(
self,
*,
db: str | int = 0,
password: str | None = None,
socket_timeout: float | None = None,
socket_connect_timeout: float | None = None,
retry_on_timeout: bool = False,
retry_on_error: list[type[RedisError]] | _Sentinel = ...,
encoding: str = "utf-8",
encoding_errors: str = "strict",
decode_responses: bool = False,
parser_class: type[BaseParser] = ...,
socket_read_size: int = 65536,
health_check_interval: float = 0,
client_name: str | None = None,
username: str | None = None,
retry: Retry | None = None,
redis_connect_func: ConnectCallbackT | None = None,
encoder_class: type[Encoder] = ...,
credential_provider: CredentialProvider | None = None,
) -> None: ...
@abstractmethod
def repr_pieces(self) -> list[tuple[str, Any]]: ...
@property
def is_connected(self) -> bool: ...
def register_connect_callback(self, callback: ConnectCallbackT) -> None: ...
def clear_connect_callbacks(self) -> None: ...
def set_parser(self, parser_class: type[BaseParser]) -> None: ...
async def connect(self) -> None: ...
async def on_connect(self) -> None: ...
async def disconnect(self, nowait: bool = False) -> None: ...
async def check_health(self) -> None: ...
async def send_packed_command(self, command: bytes | str | Iterable[bytes], check_health: bool = True) -> None: ...
async def send_command(self, *args: Any, **kwargs: Any) -> None: ...
async def can_read_destructive(self) -> bool: ...
async def read_response(
self, disable_decoding: bool = False, timeout: float | None = None, *, disconnect_on_error: bool = True
) -> EncodableT | list[EncodableT] | None: ...
def pack_command(self, *args: EncodableT) -> list[bytes]: ...
def pack_commands(self, commands: Iterable[Iterable[EncodableT]]) -> list[bytes]: ...
class Connection(AbstractConnection):
host: str
port: int
socket_keepalive: bool
socket_keepalive_options: Mapping[int, int | bytes] | None
socket_type: int
def __init__(
self,
*,
host: str = "localhost",
port: str | int = 6379,
socket_keepalive: bool = False,
socket_keepalive_options: Mapping[int, int | bytes] | None = None,
socket_type: int = 0,
# **kwargs forwarded to AbstractConnection.
db: str | int = 0,
password: str | None = None,
socket_timeout: float | None = None,
socket_connect_timeout: float | None = None,
retry_on_timeout: bool = False,
retry_on_error: list[type[RedisError]] | _Sentinel = ...,
encoding: str = "utf-8",
encoding_errors: str = "strict",
decode_responses: bool = False,
parser_class: type[BaseParser] = ...,
socket_read_size: int = 65536,
health_check_interval: float = 0,
client_name: str | None = None,
username: str | None = None,
retry: Retry | None = None,
redis_connect_func: ConnectCallbackT | None = None,
encoder_class: type[Encoder] = ...,
credential_provider: CredentialProvider | None = None,
) -> None: ...
def repr_pieces(self) -> list[tuple[str, Any]]: ...
class SSLConnection(Connection):
ssl_context: RedisSSLContext
def __init__(
self,
ssl_keyfile: str | None = None,
ssl_certfile: str | None = None,
ssl_cert_reqs: _SSLVerifyMode = "required",
ssl_ca_certs: str | None = None,
ssl_ca_data: str | None = None,
ssl_check_hostname: bool = False,
*,
# **kwargs forwarded to Connection.
host: str = "localhost",
port: str | int = 6379,
socket_keepalive: bool = False,
socket_keepalive_options: Mapping[int, int | bytes] | None = None,
socket_type: int = 0,
db: str | int = 0,
password: str | None = None,
socket_timeout: float | None = None,
socket_connect_timeout: float | None = None,
retry_on_timeout: bool = False,
retry_on_error: list[type[RedisError]] | _Sentinel = ...,
encoding: str = "utf-8",
encoding_errors: str = "strict",
decode_responses: bool = False,
parser_class: type[BaseParser] = ...,
socket_read_size: int = 65536,
health_check_interval: float = 0,
client_name: str | None = None,
username: str | None = None,
retry: Retry | None = None,
redis_connect_func: ConnectCallbackT | None = None,
encoder_class: type[Encoder] = ...,
credential_provider: CredentialProvider | None = None,
) -> None: ...
@property
def keyfile(self) -> str | None: ...
@property
def certfile(self) -> str | None: ...
@property
def cert_reqs(self) -> ssl.VerifyMode: ...
@property
def ca_certs(self) -> str | None: ...
@property
def ca_data(self) -> str | None: ...
@property
def check_hostname(self) -> bool: ...
class RedisSSLContext:
keyfile: str | None
certfile: str | None
cert_reqs: ssl.VerifyMode
ca_certs: str | None
ca_data: str | None
check_hostname: bool
context: ssl.SSLContext | None
def __init__(
self,
keyfile: str | None = None,
certfile: str | None = None,
cert_reqs: _SSLVerifyMode | None = None,
ca_certs: str | None = None,
ca_data: str | None = None,
check_hostname: bool = False,
) -> None: ...
def get(self) -> ssl.SSLContext: ...
class UnixDomainSocketConnection(Connection):
path: str
def __init__(
self,
*,
path: str = "",
# **kwargs forwarded to AbstractConnection.
db: str | int = 0,
password: str | None = None,
socket_timeout: float | None = None,
socket_connect_timeout: float | None = None,
retry_on_timeout: bool = False,
retry_on_error: list[type[RedisError]] | _Sentinel = ...,
encoding: str = "utf-8",
encoding_errors: str = "strict",
decode_responses: bool = False,
parser_class: type[BaseParser] = ...,
socket_read_size: int = 65536,
health_check_interval: float = 0,
client_name: str | None = None,
username: str | None = None,
retry: Retry | None = None,
redis_connect_func: ConnectCallbackT | None = None,
encoder_class: type[Encoder] = ...,
credential_provider: CredentialProvider | None = None,
) -> None: ...
def repr_pieces(self) -> list[tuple[str, Any]]: ...
FALSE_STRINGS: Final[tuple[str, ...]]
def to_bool(value: object) -> bool | None: ...
URL_QUERY_ARGUMENT_PARSERS: MappingProxyType[str, Callable[[str], Any]]
class ConnectKwargs(TypedDict):
username: str
password: str
connection_class: type[AbstractConnection]
host: str
port: int
db: int
path: str
def parse_url(url: str) -> ConnectKwargs: ...
_ConnectionT = TypeVar("_ConnectionT", bound=AbstractConnection)
class ConnectionPool(Generic[_ConnectionT]):
# kwargs accepts all arguments from the connection class chosen for
# the given URL, except those encoded in the URL itself.
@classmethod
def from_url(cls, url: str, **kwargs: Any) -> Self: ...
connection_class: type[_ConnectionT]
connection_kwargs: Mapping[str, Any]
max_connections: int
encoder_class: type[Encoder]
pid: int
@overload
def __init__(
self: ConnectionPool[_ConnectionT], # pyright: ignore[reportInvalidTypeVarUse] #11780
connection_class: type[_ConnectionT],
max_connections: int | None = None,
# **kwargs are passed to the constructed connection instances.
**connection_kwargs: Any,
) -> None: ...
@overload
def __init__(self: ConnectionPool[Connection], *, max_connections: int | None = None, **connection_kwargs) -> None: ...
def reset(self) -> None: ...
async def get_connection(self, command_name: Unused, *keys: Unused, **options: Unused) -> _ConnectionT: ...
def get_encoder(self) -> Encoder: ...
def make_connection(self) -> _ConnectionT: ...
async def release(self, connection: AbstractConnection) -> None: ...
def owns_connection(self, connection: AbstractConnection) -> bool: ...
async def disconnect(self, inuse_connections: bool = True) -> None: ...
def set_retry(self, retry: Retry) -> None: ...
class BlockingConnectionPool(ConnectionPool[_ConnectionT]):
queue_class: type[asyncio.Queue[_ConnectionT | None]]
timeout: int | None
pool: asyncio.Queue[_ConnectionT | None]
@overload
def __init__(
self: BlockingConnectionPool[_ConnectionT], # pyright: ignore[reportInvalidTypeVarUse] #11780
max_connections: int,
timeout: int | None,
connection_class: type[_ConnectionT],
queue_class: type[asyncio.Queue[_ConnectionT | None]] = ...,
# **kwargs are passed to the constructed connection instances.
**connection_kwargs: Any,
) -> None: ...
@overload
def __init__(
self: BlockingConnectionPool[_ConnectionT], # pyright: ignore[reportInvalidTypeVarUse] #11780
max_connections: int = 50,
timeout: int | None = 20,
*,
connection_class: type[_ConnectionT],
queue_class: type[asyncio.Queue[_ConnectionT | None]] = ...,
# **kwargs are passed to the constructed connection instances.
**connection_kwargs: Any,
) -> None: ...
@overload
def __init__(
self: BlockingConnectionPool[Connection],
max_connections: int = 50,
timeout: int | None = 20,
*,
queue_class: type[asyncio.Queue[Connection | None]] = ...,
# **kwargs are passed to the constructed connection instances.
**connection_kwargs: Any,
) -> None: ...

View File

@@ -1,51 +0,0 @@
import threading
from collections.abc import Awaitable
from types import SimpleNamespace, TracebackType
from typing import Any, ClassVar
from typing_extensions import Self
from redis.asyncio import Redis
from redis.commands.core import AsyncScript
class Lock:
lua_release: ClassVar[AsyncScript | None]
lua_extend: ClassVar[AsyncScript | None]
lua_reacquire: ClassVar[AsyncScript | None]
LUA_RELEASE_SCRIPT: ClassVar[str]
LUA_EXTEND_SCRIPT: ClassVar[str]
LUA_REACQUIRE_SCRIPT: ClassVar[str]
redis: Redis[Any]
name: str | bytes | memoryview
timeout: float | None
sleep: float
blocking: bool
blocking_timeout: float | None
thread_local: bool
local: threading.local | SimpleNamespace
def __init__(
self,
redis: Redis[Any],
name: str | bytes | memoryview,
timeout: float | None = None,
sleep: float = 0.1,
blocking: bool = True,
blocking_timeout: float | None = None,
thread_local: bool = True,
) -> None: ...
def register_scripts(self) -> None: ...
async def __aenter__(self) -> Self: ...
async def __aexit__(
self, exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: TracebackType | None
) -> None: ...
async def acquire(
self, blocking: bool | None = None, blocking_timeout: float | None = None, token: str | bytes | None = None
) -> bool: ...
async def do_acquire(self, token: str | bytes) -> bool: ...
async def locked(self) -> bool: ...
async def owned(self) -> bool: ...
def release(self) -> Awaitable[None]: ...
async def do_release(self, expected_token: bytes) -> None: ...
def extend(self, additional_time: float, replace_ttl: bool = False) -> Awaitable[bool]: ...
async def do_extend(self, additional_time: float, replace_ttl: bool) -> bool: ...
def reacquire(self) -> Awaitable[bool]: ...
async def do_reacquire(self) -> bool: ...

View File

@@ -1,9 +0,0 @@
from _typeshed import Incomplete
from typing import Any
# TODO: define and use:
# from redis.asyncio.cluster import ClusterNode
class CommandsParser:
async def initialize(self, node: Incomplete | None = None) -> None: ... # TODO: ClusterNode
async def get_keys(self, *args: Any) -> tuple[str, ...] | None: ...

View File

@@ -1,12 +0,0 @@
from collections.abc import Awaitable, Callable, Iterable
from typing import TypeVar
from redis.backoff import AbstractBackoff
from redis.exceptions import RedisError
_T = TypeVar("_T")
class Retry:
def __init__(self, backoff: AbstractBackoff, retries: int, supported_errors: tuple[type[RedisError], ...] = ...) -> None: ...
def update_supported_errors(self, specified_errors: Iterable[type[RedisError]]) -> None: ...
async def call_with_retry(self, do: Callable[[], Awaitable[_T]], fail: Callable[[RedisError], Awaitable[object]]) -> _T: ...

View File

@@ -1,162 +0,0 @@
from collections.abc import AsyncIterator, Iterable, Mapping
from typing import Any, Literal, TypedDict, TypeVar, overload
from redis.asyncio.client import Redis
from redis.asyncio.connection import (
BaseParser,
ConnectCallbackT,
Connection,
ConnectionPool,
Encoder,
SSLConnection,
_ConnectionT,
_Sentinel,
)
from redis.asyncio.retry import Retry
from redis.commands import AsyncSentinelCommands
from redis.credentials import CredentialProvider
from redis.exceptions import ConnectionError, RedisError
_RedisT = TypeVar("_RedisT", bound=Redis[Any])
class MasterNotFoundError(ConnectionError): ...
class SlaveNotFoundError(ConnectionError): ...
class SentinelManagedConnection(Connection):
connection_pool: ConnectionPool[Any] | None
def __init__(
self,
*,
connection_pool: ConnectionPool[Any] | None,
# **kwargs forwarded to Connection.
host: str = "localhost",
port: str | int = 6379,
socket_keepalive: bool = False,
socket_keepalive_options: Mapping[int, int | bytes] | None = None,
socket_type: int = 0,
db: str | int = 0,
password: str | None = None,
socket_timeout: float | None = None,
socket_connect_timeout: float | None = None,
retry_on_timeout: bool = False,
retry_on_error: list[type[RedisError]] | _Sentinel = ...,
encoding: str = "utf-8",
encoding_errors: str = "strict",
decode_responses: bool = False,
parser_class: type[BaseParser] = ...,
socket_read_size: int = 65536,
health_check_interval: float = 0,
client_name: str | None = None,
username: str | None = None,
retry: Retry | None = None,
redis_connect_func: ConnectCallbackT | None = None,
encoder_class: type[Encoder] = ...,
credential_provider: CredentialProvider | None = None,
) -> None: ...
async def connect_to(self, address: tuple[str, int]) -> None: ...
async def connect(self) -> None: ...
class SentinelManagedSSLConnection(SentinelManagedConnection, SSLConnection): ...
class SentinelConnectionPool(ConnectionPool[_ConnectionT]):
is_master: bool
check_connection: bool
service_name: str
sentinel_manager: Sentinel
master_address: tuple[str, int] | None
slave_rr_counter: int | None
def __init__(
self,
service_name: str,
sentinel_manager: Sentinel,
*,
ssl: bool = False,
connection_class: type[SentinelManagedConnection] = ...,
is_master: bool = True,
check_connection: bool = False,
# **kwargs ultimately forwarded to construction Connection instances.
host: str = "localhost",
port: str | int = 6379,
socket_keepalive: bool = False,
socket_keepalive_options: Mapping[int, int | bytes] | None = None,
socket_type: int = 0,
db: str | int = 0,
password: str | None = None,
socket_timeout: float | None = None,
socket_connect_timeout: float | None = None,
retry_on_timeout: bool = False,
retry_on_error: list[type[RedisError]] | _Sentinel = ...,
encoding: str = "utf-8",
encoding_errors: str = "strict",
decode_responses: bool = False,
parser_class: type[BaseParser] = ...,
socket_read_size: int = 65536,
health_check_interval: float = 0,
client_name: str | None = None,
username: str | None = None,
retry: Retry | None = None,
redis_connect_func: ConnectCallbackT | None = None,
encoder_class: type[Encoder] = ...,
credential_provider: CredentialProvider | None = None,
) -> None: ...
async def get_master_address(self) -> tuple[str, int]: ...
async def rotate_slaves(self) -> AsyncIterator[tuple[str, int]]: ...
_State = TypedDict(
"_State", {"ip": str, "port": int, "is_master": bool, "is_sdown": bool, "is_odown": bool, "num-other-sentinels": int}
)
class Sentinel(AsyncSentinelCommands):
sentinel_kwargs: Mapping[str, Any]
sentinels: list[Redis[Any]]
min_other_sentinels: int
connection_kwargs: Mapping[str, Any]
def __init__(
self,
sentinels: Iterable[tuple[str, int]],
min_other_sentinels: int = 0,
sentinel_kwargs: Mapping[str, Any] | None = None,
**connection_kwargs: Any,
) -> None: ...
async def execute_command(self, *args: Any, once: bool = False, **kwargs: Any) -> Literal[True]: ...
def check_master_state(self, state: _State, service_name: str) -> bool: ...
async def discover_master(self, service_name: str) -> tuple[str, int]: ...
def filter_slaves(self, slaves: Iterable[_State]) -> list[tuple[str, int]]: ...
async def discover_slaves(self, service_name: str) -> list[tuple[str, int]]: ...
@overload
def master_for(
self,
service_name: str,
redis_class: type[_RedisT],
connection_pool_class: type[SentinelConnectionPool[Any]] = ...,
# Forwarded to the connection pool constructor.
**kwargs: Any,
) -> _RedisT: ...
@overload
def master_for(
self,
service_name: str,
*,
connection_pool_class: type[SentinelConnectionPool[Any]] = ...,
# Forwarded to the connection pool constructor.
**kwargs: Any,
) -> Redis[Any]: ...
@overload
def slave_for(
self,
service_name: str,
redis_class: type[_RedisT],
connection_pool_class: type[SentinelConnectionPool[Any]] = ...,
# Forwarded to the connection pool constructor.
**kwargs: Any,
) -> _RedisT: ...
@overload
def slave_for(
self,
service_name: str,
*,
connection_pool_class: type[SentinelConnectionPool[Any]] = ...,
# Forwarded to the connection pool constructor.
**kwargs: Any,
) -> Redis[Any]: ...

View File

@@ -1,15 +0,0 @@
from types import TracebackType
from typing import Any, Generic
from redis.asyncio.client import Pipeline, Redis
from redis.client import _StrType
def from_url(url: str, **kwargs) -> Redis[Any]: ...
class pipeline(Generic[_StrType]):
p: Pipeline[_StrType]
def __init__(self, redis_obj: Redis[_StrType]) -> None: ...
async def __aenter__(self) -> Pipeline[_StrType]: ...
async def __aexit__(
self, exc_type: type[BaseException] | None, exc_value: BaseException | None, traceback: TracebackType | None
) -> None: ...

View File

@@ -1,31 +0,0 @@
from abc import ABC, abstractmethod
class AbstractBackoff(ABC):
def reset(self) -> None: ...
@abstractmethod
def compute(self, failures: int) -> float: ...
class ConstantBackoff(AbstractBackoff):
def __init__(self, backoff: int) -> None: ...
def compute(self, failures: int) -> float: ...
class NoBackoff(ConstantBackoff):
def __init__(self) -> None: ...
class ExponentialBackoff(AbstractBackoff):
def __init__(self, cap: float = 0.512, base: float = 0.008) -> None: ...
def compute(self, failures: int) -> float: ...
class FullJitterBackoff(AbstractBackoff):
def __init__(self, cap: float = 0.512, base: float = 0.008) -> None: ...
def compute(self, failures: int) -> float: ...
class EqualJitterBackoff(AbstractBackoff):
def __init__(self, cap: float = 0.512, base: float = 0.008) -> None: ...
def compute(self, failures: int) -> float: ...
class DecorrelatedJitterBackoff(AbstractBackoff):
def __init__(self, cap: float = 0.512, base: float = 0.008) -> None: ...
def compute(self, failures: int) -> float: ...
def default_backoff() -> EqualJitterBackoff: ...

View File

@@ -1,817 +0,0 @@
import threading
from _typeshed import Incomplete, SupportsItems, Unused
from collections.abc import Callable, Iterable, Iterator, Mapping, Sequence
from datetime import datetime, timedelta
from re import Pattern
from types import TracebackType
from typing import Any, ClassVar, Literal, TypeVar, overload
from typing_extensions import Self, TypeAlias
from redis import RedisError
from .commands import CoreCommands, RedisModuleCommands, SentinelCommands
from .connection import ConnectionPool, _ConnectFunc, _ConnectionPoolOptions
from .credentials import CredentialProvider
from .lock import Lock
from .retry import Retry
from .typing import ChannelT, EncodableT, KeyT, PatternT
_Value: TypeAlias = bytes | float | int | str
_Key: TypeAlias = str | bytes
# Lib returns str or bytes depending on value of decode_responses
_StrType = TypeVar("_StrType", bound=str | bytes)
_VT = TypeVar("_VT")
_T = TypeVar("_T")
# Keyword arguments that are passed to Redis.parse_response().
_ParseResponseOptions: TypeAlias = Any
# Keyword arguments that are passed to Redis.execute_command().
_CommandOptions: TypeAlias = _ConnectionPoolOptions | _ParseResponseOptions
SYM_EMPTY: bytes
EMPTY_RESPONSE: str
NEVER_DECODE: str
class CaseInsensitiveDict(dict[_StrType, _VT]):
def __init__(self, data: SupportsItems[_StrType, _VT]) -> None: ...
def update(self, data: SupportsItems[_StrType, _VT]) -> None: ... # type: ignore[override]
@overload
def get(self, k: _StrType, default: None = None) -> _VT | None: ...
@overload
def get(self, k: _StrType, default: _VT | _T) -> _VT | _T: ...
# Overrides many other methods too, but without changing signature
def list_or_args(keys, args): ...
def timestamp_to_datetime(response): ...
def string_keys_to_dict(key_string, callback): ...
def parse_debug_object(response): ...
def parse_object(response, infotype): ...
def parse_info(response): ...
SENTINEL_STATE_TYPES: dict[str, type[int]]
def parse_sentinel_state(item): ...
def parse_sentinel_master(response): ...
def parse_sentinel_masters(response): ...
def parse_sentinel_slaves_and_sentinels(response): ...
def parse_sentinel_get_master(response): ...
def pairs_to_dict(response, decode_keys: bool = False, decode_string_values: bool = False): ...
def pairs_to_dict_typed(response, type_info): ...
def zset_score_pairs(response, **options): ...
def sort_return_tuples(response, **options): ...
def int_or_none(response): ...
def float_or_none(response): ...
def bool_ok(response): ...
def parse_client_list(response, **options): ...
def parse_config_get(response, **options): ...
def parse_scan(response, **options): ...
def parse_hscan(response, **options): ...
def parse_zscan(response, **options): ...
def parse_slowlog_get(response, **options): ...
_LockType = TypeVar("_LockType")
class AbstractRedis:
RESPONSE_CALLBACKS: dict[str, Any]
class Redis(AbstractRedis, RedisModuleCommands, CoreCommands[_StrType], SentinelCommands):
@overload
@classmethod
def from_url(
cls,
url: str,
*,
host: str | None = ...,
port: int | None = ...,
db: int | None = ...,
password: str | None = ...,
socket_timeout: float | None = ...,
socket_connect_timeout: float | None = ...,
socket_keepalive: bool | None = ...,
socket_keepalive_options: Mapping[str, int | str] | None = ...,
connection_pool: ConnectionPool | None = ...,
unix_socket_path: str | None = ...,
encoding: str = ...,
encoding_errors: str = ...,
charset: str | None = ...,
errors: str | None = ...,
decode_responses: Literal[True],
retry_on_timeout: bool = ...,
retry_on_error: list[type[RedisError]] | None = ...,
ssl: bool = ...,
ssl_keyfile: str | None = ...,
ssl_certfile: str | None = ...,
ssl_cert_reqs: str | int | None = ...,
ssl_ca_certs: str | None = ...,
ssl_ca_path: Incomplete | None = None,
ssl_ca_data: Incomplete | None = None,
ssl_check_hostname: bool = ...,
ssl_password: Incomplete | None = None,
ssl_validate_ocsp: bool = False,
ssl_validate_ocsp_stapled: bool = False, # added in 4.1.1
ssl_ocsp_context: Incomplete | None = None, # added in 4.1.1
ssl_ocsp_expected_cert: Incomplete | None = None, # added in 4.1.1
max_connections: int | None = ...,
single_connection_client: bool = ...,
health_check_interval: float = ...,
client_name: str | None = ...,
username: str | None = ...,
retry: Retry | None = ...,
redis_connect_func: _ConnectFunc | None = None,
credential_provider: CredentialProvider | None = None,
) -> Redis[str]: ...
@overload
@classmethod
def from_url(
cls,
url: str,
*,
host: str | None = ...,
port: int | None = ...,
db: int | None = ...,
password: str | None = ...,
socket_timeout: float | None = ...,
socket_connect_timeout: float | None = ...,
socket_keepalive: bool | None = ...,
socket_keepalive_options: Mapping[str, int | str] | None = ...,
connection_pool: ConnectionPool | None = ...,
unix_socket_path: str | None = ...,
encoding: str = ...,
encoding_errors: str = ...,
charset: str | None = ...,
errors: str | None = ...,
decode_responses: Literal[False] = False,
retry_on_timeout: bool = ...,
retry_on_error: list[type[RedisError]] | None = ...,
ssl: bool = ...,
ssl_keyfile: str | None = ...,
ssl_certfile: str | None = ...,
ssl_cert_reqs: str | int | None = ...,
ssl_ca_certs: str | None = ...,
ssl_ca_path: Incomplete | None = None,
ssl_ca_data: Incomplete | None = None,
ssl_check_hostname: bool = ...,
ssl_password: Incomplete | None = None,
ssl_validate_ocsp: bool = False,
ssl_validate_ocsp_stapled: bool = False, # added in 4.1.1
ssl_ocsp_context: Incomplete | None = None, # added in 4.1.1
ssl_ocsp_expected_cert: Incomplete | None = None, # added in 4.1.1
max_connections: int | None = ...,
single_connection_client: bool = ...,
health_check_interval: float = ...,
client_name: str | None = ...,
username: str | None = ...,
retry: Retry | None = ...,
redis_connect_func: _ConnectFunc | None = None,
credential_provider: CredentialProvider | None = None,
) -> Redis[bytes]: ...
connection_pool: Any
response_callbacks: Any
@overload
def __init__(
self: Redis[str],
host: str,
port: int,
db: int,
password: str | None,
socket_timeout: float | None,
socket_connect_timeout: float | None,
socket_keepalive: bool | None,
socket_keepalive_options: Mapping[str, int | str] | None,
connection_pool: ConnectionPool | None,
unix_socket_path: str | None,
encoding: str,
encoding_errors: str,
charset: str | None,
errors: str | None,
decode_responses: Literal[True],
retry_on_timeout: bool = False,
retry_on_error: list[type[RedisError]] | None = None,
ssl: bool = False,
ssl_keyfile: str | None = None,
ssl_certfile: str | None = None,
ssl_cert_reqs: str | int | None = "required",
ssl_ca_certs: str | None = None,
ssl_ca_path: Incomplete | None = None,
ssl_ca_data: Incomplete | None = None,
ssl_check_hostname: bool = False,
ssl_password: Incomplete | None = None,
ssl_validate_ocsp: bool = False,
ssl_validate_ocsp_stapled: bool = False, # added in 4.1.1
ssl_ocsp_context: Incomplete | None = None, # added in 4.1.1
ssl_ocsp_expected_cert: Incomplete | None = None, # added in 4.1.1
max_connections: int | None = None,
single_connection_client: bool = False,
health_check_interval: float = 0,
client_name: str | None = None,
username: str | None = None,
retry: Retry | None = None,
redis_connect_func: _ConnectFunc | None = None,
credential_provider: CredentialProvider | None = None,
) -> None: ...
@overload
def __init__(
self: Redis[str],
host: str = "localhost",
port: int = 6379,
db: int = 0,
password: str | None = None,
socket_timeout: float | None = None,
socket_connect_timeout: float | None = None,
socket_keepalive: bool | None = None,
socket_keepalive_options: Mapping[str, int | str] | None = None,
connection_pool: ConnectionPool | None = None,
unix_socket_path: str | None = None,
encoding: str = "utf-8",
encoding_errors: str = "strict",
charset: str | None = None,
errors: str | None = None,
*,
decode_responses: Literal[True],
retry_on_timeout: bool = False,
retry_on_error: list[type[RedisError]] | None = None,
ssl: bool = False,
ssl_keyfile: str | None = None,
ssl_certfile: str | None = None,
ssl_cert_reqs: str | int | None = "required",
ssl_ca_certs: str | None = None,
ssl_ca_data: Incomplete | None = None,
ssl_check_hostname: bool = False,
ssl_password: Incomplete | None = None,
ssl_validate_ocsp: bool = False,
ssl_validate_ocsp_stapled: bool = False, # added in 4.1.1
ssl_ocsp_context: Incomplete | None = None, # added in 4.1.1
ssl_ocsp_expected_cert: Incomplete | None = None, # added in 4.1.1
max_connections: int | None = None,
single_connection_client: bool = False,
health_check_interval: float = 0,
client_name: str | None = None,
username: str | None = None,
retry: Retry | None = None,
redis_connect_func: _ConnectFunc | None = None,
credential_provider: CredentialProvider | None = None,
) -> None: ...
@overload
def __init__(
self: Redis[bytes],
host: str = "localhost",
port: int = 6379,
db: int = 0,
password: str | None = None,
socket_timeout: float | None = None,
socket_connect_timeout: float | None = None,
socket_keepalive: bool | None = None,
socket_keepalive_options: Mapping[str, int | str] | None = None,
connection_pool: ConnectionPool | None = None,
unix_socket_path: str | None = None,
encoding: str = "utf-8",
encoding_errors: str = "strict",
charset: str | None = None,
errors: str | None = None,
decode_responses: Literal[False] = False,
retry_on_timeout: bool = False,
retry_on_error: list[type[RedisError]] | None = None,
ssl: bool = False,
ssl_keyfile: str | None = None,
ssl_certfile: str | None = None,
ssl_cert_reqs: str | int | None = "required",
ssl_ca_certs: str | None = None,
ssl_ca_data: Incomplete | None = None,
ssl_check_hostname: bool = False,
ssl_password: Incomplete | None = None,
ssl_validate_ocsp: bool = False,
ssl_validate_ocsp_stapled: bool = False, # added in 4.1.1
ssl_ocsp_context: Incomplete | None = None, # added in 4.1.1
ssl_ocsp_expected_cert: Incomplete | None = None, # added in 4.1.1
max_connections: int | None = None,
single_connection_client: bool = False,
health_check_interval: float = 0,
client_name: str | None = None,
username: str | None = None,
retry: Retry | None = None,
redis_connect_func: _ConnectFunc | None = None,
credential_provider: CredentialProvider | None = None,
) -> None: ...
def get_encoder(self): ...
def get_connection_kwargs(self): ...
def set_response_callback(self, command, callback): ...
def pipeline(self, transaction: bool = True, shard_hint: Any = None) -> Pipeline[_StrType]: ...
def transaction(self, func, *watches, **kwargs): ...
@overload
def lock(
self,
name: _Key,
timeout: float | None = None,
sleep: float = 0.1,
blocking: bool = True,
blocking_timeout: float | None = None,
lock_class: None = None,
thread_local: bool = True,
) -> Lock: ...
@overload
def lock(
self,
name: _Key,
timeout: float | None,
sleep: float,
blocking: bool,
blocking_timeout: float | None,
lock_class: type[_LockType],
thread_local: bool = True,
) -> _LockType: ...
@overload
def lock(
self,
name: _Key,
timeout: float | None = None,
sleep: float = 0.1,
blocking: bool = True,
blocking_timeout: float | None = None,
*,
lock_class: type[_LockType],
thread_local: bool = True,
) -> _LockType: ...
def pubsub(self, *, shard_hint: Any = ..., ignore_subscribe_messages: bool = ...) -> PubSub: ...
def execute_command(self, *args, **options: _CommandOptions): ...
def parse_response(self, connection, command_name, **options: _ParseResponseOptions): ...
def monitor(self) -> Monitor: ...
def __enter__(self) -> Redis[_StrType]: ...
def __exit__(
self, exc_type: type[BaseException] | None, exc_value: BaseException | None, traceback: TracebackType | None
) -> None: ...
def __del__(self) -> None: ...
def close(self) -> None: ...
def client(self) -> Redis[_StrType]: ...
StrictRedis = Redis
class PubSub:
PUBLISH_MESSAGE_TYPES: ClassVar[tuple[str, str]]
UNSUBSCRIBE_MESSAGE_TYPES: ClassVar[tuple[str, str]]
HEALTH_CHECK_MESSAGE: ClassVar[str]
connection_pool: Any
shard_hint: Any
ignore_subscribe_messages: Any
connection: Any
subscribed_event: threading.Event
encoder: Any
health_check_response_b: bytes
health_check_response: list[str] | list[bytes]
def __init__(
self,
connection_pool,
shard_hint: Incomplete | None = None,
ignore_subscribe_messages: bool = False,
encoder: Incomplete | None = None,
) -> None: ...
def __enter__(self) -> Self: ...
def __exit__(
self, exc_type: type[BaseException] | None, exc_value: BaseException | None, traceback: TracebackType | None
) -> None: ...
def __del__(self): ...
channels: Any
patterns: Any
def reset(self): ...
def close(self) -> None: ...
def on_connect(self, connection): ...
@property
def subscribed(self): ...
def execute_command(self, *args): ...
def clean_health_check_responses(self) -> None: ...
def parse_response(self, block: bool = True, timeout: float = 0): ...
def is_health_check_response(self, response) -> bool: ...
def check_health(self) -> None: ...
def psubscribe(self, *args: _Key, **kwargs: Callable[[Any], None]): ...
def punsubscribe(self, *args: _Key) -> None: ...
def subscribe(self, *args: _Key, **kwargs: Callable[[Any], None]) -> None: ...
def unsubscribe(self, *args: _Key) -> None: ...
def listen(self): ...
def get_message(self, ignore_subscribe_messages: bool = False, timeout: float = 0.0) -> dict[str, Any] | None: ...
def handle_message(self, response, ignore_subscribe_messages: bool = False) -> dict[str, Any] | None: ...
def run_in_thread(self, sleep_time: float = 0, daemon: bool = False, exception_handler: Incomplete | None = None): ...
def ping(self, message: _Value | None = None) -> None: ...
class PubSubWorkerThread(threading.Thread):
daemon: Any
pubsub: Any
sleep_time: Any
exception_handler: Any
def __init__(self, pubsub, sleep_time, daemon: bool = False, exception_handler: Incomplete | None = None) -> None: ...
def run(self) -> None: ...
def stop(self) -> None: ...
class Pipeline(Redis[_StrType]):
UNWATCH_COMMANDS: Any
connection_pool: Any
connection: Any
response_callbacks: Any
transaction: bool
shard_hint: Any
watching: bool
command_stack: Any
scripts: Any
explicit_transaction: Any
def __init__(self, connection_pool, response_callbacks, transaction, shard_hint) -> None: ...
def __enter__(self) -> Pipeline[_StrType]: ...
def __exit__(
self, exc_type: type[BaseException] | None, exc_value: BaseException | None, traceback: TracebackType | None
) -> None: ...
def __del__(self) -> None: ...
def __len__(self) -> int: ...
def __bool__(self) -> bool: ...
def discard(self) -> None: ...
def reset(self) -> None: ...
def multi(self) -> None: ...
def execute_command(self, *args, **options): ...
def immediate_execute_command(self, *args, **options): ...
def pipeline_execute_command(self, *args, **options): ...
def raise_first_error(self, commands, response): ...
def annotate_exception(self, exception, number, command): ...
def parse_response(self, connection, command_name, **options): ...
def load_scripts(self): ...
def execute(self, raise_on_error: bool = True) -> list[Any]: ...
def watch(self, *names: _Key) -> bool: ...
def unwatch(self) -> bool: ...
# in the Redis implementation, the following methods are inherited from client.
def set_response_callback(self, command, callback): ...
def pipeline(self, transaction: bool = True, shard_hint: Any = None) -> Pipeline[_StrType]: ...
def acl_cat(self, category: str | None = None) -> Pipeline[_StrType]: ... # type: ignore[override]
def acl_deluser(self, username: str) -> Pipeline[_StrType]: ... # type: ignore[override]
def acl_genpass(self, bits: int | None = None) -> Pipeline[_StrType]: ... # type: ignore[override]
def acl_getuser(self, username: str) -> Pipeline[_StrType]: ... # type: ignore[override]
def acl_list(self) -> Pipeline[_StrType]: ... # type: ignore[override]
def acl_load(self) -> Pipeline[_StrType]: ... # type: ignore[override]
def acl_setuser( # type: ignore[override]
self,
username: str,
enabled: bool = False,
nopass: bool = False,
passwords: Sequence[str] | None = None,
hashed_passwords: Sequence[str] | None = None,
categories: Sequence[str] | None = None,
commands: Sequence[str] | None = None,
keys: Sequence[str] | None = None,
channels: Iterable[ChannelT] | None = None,
selectors: Iterable[tuple[str, KeyT]] | None = None,
reset: bool = False,
reset_keys: bool = False,
reset_channels: bool = False,
reset_passwords: bool = False,
**kwargs: _CommandOptions,
) -> Pipeline[_StrType]: ...
def acl_users(self) -> Pipeline[_StrType]: ... # type: ignore[override]
def acl_whoami(self) -> Pipeline[_StrType]: ... # type: ignore[override]
def bgrewriteaof(self) -> Pipeline[_StrType]: ... # type: ignore[override]
def bgsave(self, schedule: bool = True) -> Pipeline[_StrType]: ... # type: ignore[override]
def client_id(self) -> Pipeline[_StrType]: ... # type: ignore[override]
def client_kill(self, address: str) -> Pipeline[_StrType]: ... # type: ignore[override]
def client_list(self, _type: str | None = None, client_id: list[str] = []) -> Pipeline[_StrType]: ... # type: ignore[override]
def client_getname(self) -> Pipeline[_StrType]: ... # type: ignore[override]
def client_setname(self, name: str) -> Pipeline[_StrType]: ... # type: ignore[override]
def readwrite(self) -> Pipeline[_StrType]: ... # type: ignore[override]
def readonly(self) -> Pipeline[_StrType]: ... # type: ignore[override]
def config_get(self, pattern: PatternT = "*", *args: PatternT, **kwargs: _CommandOptions) -> Pipeline[_StrType]: ...
def config_set(
self, name: KeyT, value: EncodableT, *args: KeyT | EncodableT, **kwargs: _CommandOptions
) -> Pipeline[_StrType]: ...
def config_resetstat(self) -> Pipeline[_StrType]: ... # type: ignore[override]
def config_rewrite(self) -> Pipeline[_StrType]: ... # type: ignore[override]
def dbsize(self) -> Pipeline[_StrType]: ... # type: ignore[override]
def debug_object(self, key) -> Pipeline[_StrType]: ... # type: ignore[override]
def echo(self, value) -> Pipeline[_StrType]: ... # type: ignore[override]
def flushall(self, asynchronous: bool = False) -> Pipeline[_StrType]: ... # type: ignore[override]
def flushdb(self, asynchronous: bool = False) -> Pipeline[_StrType]: ... # type: ignore[override]
def info(self, section: _Key | None = None, *args: _Key, **kwargs: _CommandOptions) -> Pipeline[_StrType]: ... # type: ignore[override]
def lastsave(self) -> Pipeline[_StrType]: ... # type: ignore[override]
def object(self, infotype, key) -> Pipeline[_StrType]: ... # type: ignore[override]
def ping(self) -> Pipeline[_StrType]: ... # type: ignore[override]
def save(self) -> Pipeline[_StrType]: ... # type: ignore[override]
def sentinel_get_master_addr_by_name(self, service_name) -> Pipeline[_StrType]: ...
def sentinel_master(self, service_name) -> Pipeline[_StrType]: ...
def sentinel_masters(self) -> Pipeline[_StrType]: ...
def sentinel_monitor(self, name, ip, port, quorum) -> Pipeline[_StrType]: ...
def sentinel_remove(self, name) -> Pipeline[_StrType]: ...
def sentinel_sentinels(self, service_name) -> Pipeline[_StrType]: ...
def sentinel_set(self, name, option, value) -> Pipeline[_StrType]: ...
def sentinel_slaves(self, service_name) -> Pipeline[_StrType]: ...
def slaveof(self, host=None, port=None) -> Pipeline[_StrType]: ... # type: ignore[override]
def slowlog_get(self, num=None) -> Pipeline[_StrType]: ... # type: ignore[override]
def slowlog_len(self) -> Pipeline[_StrType]: ... # type: ignore[override]
def slowlog_reset(self) -> Pipeline[_StrType]: ... # type: ignore[override]
def time(self) -> Pipeline[_StrType]: ... # type: ignore[override]
def append(self, key, value) -> Pipeline[_StrType]: ...
def bitcount( # type: ignore[override]
self, key: _Key, start: int | None = None, end: int | None = None, mode: str | None = None
) -> Pipeline[_StrType]: ...
def bitop(self, operation, dest, *keys) -> Pipeline[_StrType]: ...
def bitpos(self, key, bit, start=None, end=None, mode: str | None = None) -> Pipeline[_StrType]: ...
def decr(self, name, amount=1) -> Pipeline[_StrType]: ... # type: ignore[override]
def delete(self, *names: _Key) -> Pipeline[_StrType]: ... # type: ignore[override]
def __delitem__(self, _Key) -> None: ...
def dump(self, name) -> Pipeline[_StrType]: ... # type: ignore[override]
def exists(self, *names: _Key) -> Pipeline[_StrType]: ... # type: ignore[override]
def __contains__(self, *names: _Key) -> Pipeline[_StrType]: ... # type: ignore[override]
def expire( # type: ignore[override]
self, name: _Key, time: int | timedelta, nx: bool = False, xx: bool = False, gt: bool = False, lt: bool = False
) -> Pipeline[_StrType]: ...
def expireat(
self, name, when, nx: bool = False, xx: bool = False, gt: bool = False, lt: bool = False
) -> Pipeline[_StrType]: ...
def get(self, name: _Key) -> Pipeline[_StrType]: ... # type: ignore[override]
def __getitem__(self, name) -> Pipeline[_StrType]: ...
def getbit(self, name: _Key, offset: int) -> Pipeline[_StrType]: ... # type: ignore[override]
def getrange(self, key, start, end) -> Pipeline[_StrType]: ...
def getset(self, name, value) -> Pipeline[_StrType]: ... # type: ignore[override]
def incr(self, name, amount=1) -> Pipeline[_StrType]: ... # type: ignore[override]
def incrby(self, name, amount=1) -> Pipeline[_StrType]: ... # type: ignore[override]
def incrbyfloat(self, name, amount=1.0) -> Pipeline[_StrType]: ... # type: ignore[override]
def keys(self, pattern: _Key = "*") -> Pipeline[_StrType]: ... # type: ignore[override]
def mget(self, keys: _Key | Iterable[_Key], *args: _Key) -> Pipeline[_StrType]: ... # type: ignore[override]
def mset(self, mapping: Mapping[_Key, _Value]) -> Pipeline[_StrType]: ... # type: ignore[override]
def msetnx(self, mapping: Mapping[_Key, _Value]) -> Pipeline[_StrType]: ... # type: ignore[override]
def move(self, name: _Key, db: int) -> Pipeline[_StrType]: ... # type: ignore[override]
def persist(self, name: _Key) -> Pipeline[_StrType]: ... # type: ignore[override]
def pexpire( # type: ignore[override]
self, name: _Key, time: int | timedelta, nx: bool = False, xx: bool = False, gt: bool = False, lt: bool = False
) -> Pipeline[_StrType]: ...
def pexpireat( # type: ignore[override]
self, name: _Key, when: int | datetime, nx: bool = False, xx: bool = False, gt: bool = False, lt: bool = False
) -> Pipeline[_StrType]: ...
def psetex(self, name, time_ms, value) -> Pipeline[_StrType]: ...
def pttl(self, name) -> Pipeline[_StrType]: ... # type: ignore[override]
def randomkey(self) -> Pipeline[_StrType]: ... # type: ignore[override]
def rename(self, src, dst) -> Pipeline[_StrType]: ...
def renamenx(self, src, dst) -> Pipeline[_StrType]: ...
def restore(
self,
name,
ttl,
value,
replace: bool = False,
absttl: bool = False,
idletime: Incomplete | None = None,
frequency: Incomplete | None = None,
) -> Pipeline[_StrType]: ...
def set( # type: ignore[override]
self,
name: _Key,
value: _Value,
ex: None | int | timedelta = None,
px: None | int | timedelta = None,
nx: bool = False,
xx: bool = False,
keepttl: bool = False,
get: bool = False,
exat: Incomplete | None = None,
pxat: Incomplete | None = None,
) -> Pipeline[_StrType]: ...
def __setitem__(self, name, value) -> None: ...
def setbit(self, name: _Key, offset: int, value: int) -> Pipeline[_StrType]: ... # type: ignore[override]
def setex(self, name: _Key, time: int | timedelta, value: _Value) -> Pipeline[_StrType]: ... # type: ignore[override]
def setnx(self, name, value) -> Pipeline[_StrType]: ... # type: ignore[override]
def setrange(self, name, offset, value) -> Pipeline[_StrType]: ...
def strlen(self, name) -> Pipeline[_StrType]: ...
def substr(self, name, start, end=-1) -> Pipeline[_StrType]: ...
def ttl(self, name: _Key) -> Pipeline[_StrType]: ... # type: ignore[override]
def type(self, name) -> Pipeline[_StrType]: ...
def unlink(self, *names: _Key) -> Pipeline[_StrType]: ... # type: ignore[override]
def blmove( # type: ignore[override]
self,
first_list: _Key,
second_list: _Key,
timeout: float,
src: Literal["LEFT", "RIGHT"] = "LEFT",
dest: Literal["LEFT", "RIGHT"] = "RIGHT",
) -> Pipeline[_StrType]: ...
def blpop(self, keys: _Value | Iterable[_Value], timeout: float = 0) -> Pipeline[_StrType]: ... # type: ignore[override]
def brpop(self, keys: _Value | Iterable[_Value], timeout: float = 0) -> Pipeline[_StrType]: ... # type: ignore[override]
def brpoplpush(self, src, dst, timeout=0) -> Pipeline[_StrType]: ...
def lindex(self, name: _Key, index: int) -> Pipeline[_StrType]: ... # type: ignore[override]
def linsert( # type: ignore[override]
self, name: _Key, where: Literal["BEFORE", "AFTER", "before", "after"], refvalue: _Value, value: _Value
) -> Pipeline[_StrType]: ...
def llen(self, name: _Key) -> Pipeline[_StrType]: ... # type: ignore[override]
def lmove( # type: ignore[override]
self,
first_list: _Key,
second_list: _Key,
src: Literal["LEFT", "RIGHT"] = "LEFT",
dest: Literal["LEFT", "RIGHT"] = "RIGHT",
) -> Pipeline[_StrType]: ...
def lpop(self, name, count: int | None = None) -> Pipeline[_StrType]: ...
def lpush(self, name: _Value, *values: _Value) -> Pipeline[_StrType]: ... # type: ignore[override]
def lpushx(self, name, value) -> Pipeline[_StrType]: ...
def lrange(self, name: _Key, start: int, end: int) -> Pipeline[_StrType]: ... # type: ignore[override]
def lrem(self, name: _Key, count: int, value: _Value) -> Pipeline[_StrType]: ... # type: ignore[override]
def lset(self, name: _Key, index: int, value: _Value) -> Pipeline[_StrType]: ... # type: ignore[override]
def ltrim(self, name: _Key, start: int, end: int) -> Pipeline[_StrType]: ... # type: ignore[override]
def rpop(self, name, count: int | None = None) -> Pipeline[_StrType]: ...
def rpoplpush(self, src, dst) -> Pipeline[_StrType]: ...
def rpush(self, name: _Value, *values: _Value) -> Pipeline[_StrType]: ... # type: ignore[override]
def rpushx(self, name, value) -> Pipeline[_StrType]: ...
def sort( # type: ignore[override]
self,
name: _Key,
start: int | None = None,
num: int | None = None,
by: _Key | None = None,
get: _Key | Sequence[_Key] | None = None,
desc: bool = False,
alpha: bool = False,
store: _Key | None = None,
groups: bool = False,
) -> Pipeline[_StrType]: ...
def scan( # type: ignore[override]
self, cursor: int = 0, match: _Key | None = None, count: int | None = None, _type: str | None = None
) -> Pipeline[_StrType]: ...
def scan_iter(self, match: _Key | None = None, count: int | None = None, _type: str | None = None) -> Iterator[Any]: ... # type: ignore[override]
def sscan(self, name: _Key, cursor: int = 0, match: _Key | None = None, count: int | None = None) -> Pipeline[_StrType]: ... # type: ignore[override]
def sscan_iter(self, name: _Key, match: _Key | None = None, count: int | None = None) -> Iterator[Any]: ...
def hscan(self, name: _Key, cursor: int = 0, match: _Key | None = None, count: int | None = None) -> Pipeline[_StrType]: ... # type: ignore[override]
def hscan_iter(self, name, match: _Key | None = None, count: int | None = None) -> Iterator[Any]: ...
def zscan_iter(
self, name: _Key, match: _Key | None = None, count: int | None = None, score_cast_func: Callable[[_StrType], Any] = ...
) -> Iterator[Any]: ...
def sadd(self, name: _Key, *values: _Value) -> Pipeline[_StrType]: ... # type: ignore[override]
def scard(self, name: _Key) -> Pipeline[_StrType]: ... # type: ignore[override]
def sdiff(self, keys: _Key | Iterable[_Key], *args: _Key) -> Pipeline[_StrType]: ... # type: ignore[override]
def sdiffstore(self, dest: _Key, keys: _Key | Iterable[_Key], *args: _Key) -> Pipeline[_StrType]: ... # type: ignore[override]
def sinter(self, keys: _Key | Iterable[_Key], *args: _Key) -> Pipeline[_StrType]: ... # type: ignore[override]
def sinterstore(self, dest: _Key, keys: _Key | Iterable[_Key], *args: _Key) -> Pipeline[_StrType]: ... # type: ignore[override]
def sismember(self, name: _Key, value: _Value) -> Pipeline[_StrType]: ... # type: ignore[override]
def smembers(self, name: _Key) -> Pipeline[_StrType]: ... # type: ignore[override]
def smove(self, src: _Key, dst: _Key, value: _Value) -> Pipeline[_StrType]: ... # type: ignore[override]
def spop(self, name: _Key, count: int | None = None) -> Pipeline[_StrType]: ... # type: ignore[override]
def srandmember(self, name: _Key, number: int | None = None) -> Pipeline[_StrType]: ... # type: ignore[override]
def srem(self, name: _Key, *values: _Value) -> Pipeline[_StrType]: ... # type: ignore[override]
def sunion(self, keys: _Key | Iterable[_Key], *args: _Key) -> Pipeline[_StrType]: ... # type: ignore[override]
def sunionstore(self, dest: _Key, keys: _Key | Iterable[_Key], *args: _Key) -> Pipeline[_StrType]: ... # type: ignore[override]
def xack(self, name, groupname, *ids) -> Pipeline[_StrType]: ...
def xadd(
self,
name,
fields,
id="*",
maxlen=None,
approximate: bool = True,
nomkstream: bool = False,
minid: Incomplete | None = None,
limit: int | None = None,
) -> Pipeline[_StrType]: ...
def xclaim(
self,
name,
groupname,
consumername,
min_idle_time,
message_ids,
idle=None,
time=None,
retrycount=None,
force=False,
justid=False,
) -> Pipeline[_StrType]: ...
def xdel(self, name, *ids) -> Pipeline[_StrType]: ...
def xgroup_create(self, name, groupname, id="$", mkstream=False, entries_read: int | None = None) -> Pipeline[_StrType]: ...
def xgroup_delconsumer(self, name, groupname, consumername) -> Pipeline[_StrType]: ...
def xgroup_destroy(self, name, groupname) -> Pipeline[_StrType]: ...
def xgroup_setid(self, name, groupname, id, entries_read: int | None = None) -> Pipeline[_StrType]: ...
def xinfo_consumers(self, name, groupname) -> Pipeline[_StrType]: ...
def xinfo_groups(self, name) -> Pipeline[_StrType]: ...
def xinfo_stream(self, name, full: bool = False) -> Pipeline[_StrType]: ...
def xlen(self, name: _Key) -> Pipeline[_StrType]: ... # type: ignore[override]
def xpending(self, name, groupname) -> Pipeline[_StrType]: ...
def xpending_range(
self, name: _Key, groupname, min, max, count: int, consumername: Incomplete | None = None, idle: int | None = None
) -> Pipeline[_StrType]: ...
def xrange(self, name, min="-", max="+", count=None) -> Pipeline[_StrType]: ...
def xread(self, streams, count=None, block=None) -> Pipeline[_StrType]: ...
def xreadgroup(self, groupname, consumername, streams, count=None, block=None, noack=False) -> Pipeline[_StrType]: ...
def xrevrange(self, name, max="+", min="-", count=None) -> Pipeline[_StrType]: ...
def xtrim(
self, name, maxlen: int | None = None, approximate: bool = True, minid: Incomplete | None = None, limit: int | None = None
) -> Pipeline[_StrType]: ...
def zadd( # type: ignore[override]
self,
name: _Key,
mapping: Mapping[_Key, _Value],
nx: bool = False,
xx: bool = False,
ch: bool = False,
incr: bool = False,
gt: Incomplete | None = False,
lt: Incomplete | None = False,
) -> Pipeline[_StrType]: ...
def zcard(self, name: _Key) -> Pipeline[_StrType]: ... # type: ignore[override]
def zcount(self, name: _Key, min: _Value, max: _Value) -> Pipeline[_StrType]: ... # type: ignore[override]
def zincrby(self, name: _Key, amount: float, value: _Value) -> Pipeline[_StrType]: ... # type: ignore[override]
def zinterstore( # type: ignore[override]
self, dest: _Key, keys: Iterable[_Key], aggregate: Literal["SUM", "MIN", "MAX"] | None = None
) -> Pipeline[_StrType]: ...
def zlexcount(self, name: _Key, min: _Value, max: _Value) -> Pipeline[_StrType]: ... # type: ignore[override]
def zpopmax(self, name: _Key, count: int | None = None) -> Pipeline[_StrType]: ... # type: ignore[override]
def zpopmin(self, name: _Key, count: int | None = None) -> Pipeline[_StrType]: ... # type: ignore[override]
def bzpopmax(self, keys: _Key | Iterable[_Key], timeout: float = 0) -> Pipeline[_StrType]: ... # type: ignore[override]
def bzpopmin(self, keys: _Key | Iterable[_Key], timeout: float = 0) -> Pipeline[_StrType]: ... # type: ignore[override]
def zrange( # type: ignore[override]
self,
name: _Key,
start: int,
end: int,
desc: bool = False,
withscores: bool = False,
score_cast_func: Callable[[_StrType], Any] = ...,
byscore: bool = False,
bylex: bool = False,
offset: int | None = None,
num: int | None = None,
) -> Pipeline[_StrType]: ...
def zrangebylex( # type: ignore[override]
self, name: _Key, min: _Value, max: _Value, start: int | None = None, num: int | None = None
) -> Pipeline[_StrType]: ...
def zrangebyscore( # type: ignore[override]
self,
name: _Key,
min: _Value,
max: _Value,
start: int | None = None,
num: int | None = None,
withscores: bool = False,
score_cast_func: Callable[[_StrType], Any] = ...,
) -> Pipeline[_StrType]: ...
def zrank(self, name: _Key, value: _Value, withscore: bool = False) -> Pipeline[_StrType]: ... # type: ignore[override]
def zrem(self, name: _Key, *values: _Value) -> Pipeline[_StrType]: ... # type: ignore[override]
def zremrangebylex(self, name: _Key, min: _Value, max: _Value) -> Pipeline[_StrType]: ... # type: ignore[override]
def zremrangebyrank(self, name: _Key, min: _Value, max: _Value) -> Pipeline[_StrType]: ... # type: ignore[override]
def zremrangebyscore(self, name: _Key, min: _Value, max: _Value) -> Pipeline[_StrType]: ... # type: ignore[override]
def zrevrange( # type: ignore[override]
self, name: _Key, start: int, end: int, withscores: bool = False, score_cast_func: Callable[[_StrType], Any] = ...
) -> Pipeline[_StrType]: ...
def zrevrangebyscore( # type: ignore[override]
self,
name: _Key,
max: _Value,
min: _Value,
start: int | None = None,
num: int | None = None,
withscores: bool = False,
score_cast_func: Callable[[_StrType], Any] = ...,
) -> Pipeline[_StrType]: ...
def zrevrangebylex( # type: ignore[override]
self, name: _Key, max: _Value, min: _Value, start: int | None = None, num: int | None = None
) -> Pipeline[_StrType]: ...
def zrevrank(self, name: _Key, value: _Value, withscore: bool = False) -> Pipeline[_StrType]: ... # type: ignore[override]
def zscore(self, name: _Key, value: _Value) -> Pipeline[_StrType]: ... # type: ignore[override]
def zunionstore( # type: ignore[override]
self, dest: _Key, keys: Iterable[_Key], aggregate: Literal["SUM", "MIN", "MAX"] | None = None
) -> Pipeline[_StrType]: ...
def pfadd(self, name: _Key, *values: _Value) -> Pipeline[_StrType]: ... # type: ignore[override]
def pfcount(self, name: _Key) -> Pipeline[_StrType]: ... # type: ignore[override]
def pfmerge(self, dest: _Key, *sources: _Key) -> Pipeline[_StrType]: ... # type: ignore[override]
def hdel(self, name: _Key, *keys: _Key) -> Pipeline[_StrType]: ... # type: ignore[override]
def hexists(self, name: _Key, key: _Key) -> Pipeline[_StrType]: ... # type: ignore[override]
def hget(self, name: _Key, key: _Key) -> Pipeline[_StrType]: ... # type: ignore[override]
def hgetall(self, name: _Key) -> Pipeline[_StrType]: ... # type: ignore[override]
def hincrby(self, name: _Key, key: _Key, amount: int = 1) -> Pipeline[_StrType]: ... # type: ignore[override]
def hincrbyfloat(self, name: _Key, key: _Key, amount: float = 1.0) -> Pipeline[_StrType]: ... # type: ignore[override]
def hkeys(self, name: _Key) -> Pipeline[_StrType]: ... # type: ignore[override]
def hlen(self, name: _Key) -> Pipeline[_StrType]: ... # type: ignore[override]
@overload # type: ignore[override]
def hset(
self, name: _Key, key: _Key, value: _Value, mapping: Mapping[_Key, _Value] | None = None, items: Incomplete | None = None
) -> Pipeline[_StrType]: ...
@overload
def hset(
self, name: _Key, key: None, value: None, mapping: Mapping[_Key, _Value], items: Incomplete | None = None
) -> Pipeline[_StrType]: ...
@overload
def hset(self, name: _Key, *, mapping: Mapping[_Key, _Value], items: Incomplete | None = None) -> Pipeline[_StrType]: ...
def hsetnx(self, name: _Key, key: _Key, value: _Value) -> Pipeline[_StrType]: ... # type: ignore[override]
def hmset(self, name: _Key, mapping: Mapping[_Key, _Value]) -> Pipeline[_StrType]: ... # type: ignore[override]
def hmget(self, name: _Key, keys: _Key | Iterable[_Key], *args: _Key) -> Pipeline[_StrType]: ... # type: ignore[override]
def hvals(self, name: _Key) -> Pipeline[_StrType]: ... # type: ignore[override]
def publish(self, channel: _Key, message: _Key) -> Pipeline[_StrType]: ... # type: ignore[override]
def eval(self, script, numkeys, *keys_and_args) -> Pipeline[_StrType]: ...
def evalsha(self, sha, numkeys, *keys_and_args) -> Pipeline[_StrType]: ...
def script_exists(self, *args) -> Pipeline[_StrType]: ...
def script_flush(self, sync_type: Incomplete | None = None) -> Pipeline[_StrType]: ...
def script_kill(self) -> Pipeline[_StrType]: ...
def script_load(self, script) -> Pipeline[_StrType]: ...
def pubsub_channels(self, pattern: _Key = "*") -> Pipeline[_StrType]: ... # type: ignore[override]
def pubsub_numsub(self, *args: _Key) -> Pipeline[_StrType]: ... # type: ignore[override]
def pubsub_numpat(self) -> Pipeline[_StrType]: ... # type: ignore[override]
def monitor(self) -> Monitor: ...
def cluster(self, cluster_arg: str, *args: Any) -> Pipeline[_StrType]: ... # type: ignore[override]
def client(self) -> Any: ...
class Monitor:
command_re: Pattern[str]
monitor_re: Pattern[str]
def __init__(self, connection_pool) -> None: ...
def __enter__(self) -> Self: ...
def __exit__(self, *args: Unused) -> None: ...
def next_command(self) -> dict[str, Any]: ...
def listen(self) -> Iterable[dict[str, Any]]: ...

View File

@@ -1,265 +0,0 @@
from _typeshed import Incomplete, Unused
from collections.abc import Callable, Iterable, Sequence
from threading import Lock
from types import TracebackType
from typing import Any, ClassVar, Literal, NoReturn, Protocol
from typing_extensions import Self
from redis.client import CaseInsensitiveDict, PubSub, Redis, _ParseResponseOptions
from redis.commands import CommandsParser, RedisClusterCommands
from redis.commands.core import _StrType
from redis.connection import BaseParser, Connection, ConnectionPool, Encoder, _ConnectionPoolOptions, _Encodable
from redis.exceptions import MovedError, RedisError
from redis.retry import Retry
from redis.typing import EncodableT
def get_node_name(host: str, port: str | int) -> str: ...
def get_connection(redis_node: Redis[Any], *args, **options: _ConnectionPoolOptions) -> Connection: ...
def parse_scan_result(command: Unused, res, **options): ...
def parse_pubsub_numsub(command: Unused, res, **options: Unused): ...
def parse_cluster_slots(resp, **options) -> dict[tuple[int, int], dict[str, Any]]: ...
def parse_cluster_myshardid(resp: bytes, **options: Unused) -> str: ...
PRIMARY: str
REPLICA: str
SLOT_ID: str
REDIS_ALLOWED_KEYS: tuple[str, ...]
KWARGS_DISABLED_KEYS: tuple[str, ...]
PIPELINE_BLOCKED_COMMANDS: tuple[str, ...]
def cleanup_kwargs(**kwargs: Any) -> dict[str, Any]: ...
# It uses `DefaultParser` in real life, but it is a dynamic base class.
class ClusterParser(BaseParser): ...
class AbstractRedisCluster:
RedisClusterRequestTTL: ClassVar[int]
PRIMARIES: ClassVar[str]
REPLICAS: ClassVar[str]
ALL_NODES: ClassVar[str]
RANDOM: ClassVar[str]
DEFAULT_NODE: ClassVar[str]
NODE_FLAGS: ClassVar[set[str]]
COMMAND_FLAGS: ClassVar[dict[str, str]]
CLUSTER_COMMANDS_RESPONSE_CALLBACKS: ClassVar[dict[str, Any]]
RESULT_CALLBACKS: ClassVar[dict[str, Callable[[Incomplete, Incomplete], Incomplete]]]
ERRORS_ALLOW_RETRY: ClassVar[tuple[type[RedisError], ...]]
class RedisCluster(AbstractRedisCluster, RedisClusterCommands[_StrType]):
user_on_connect_func: Callable[[Connection], object] | None
encoder: Encoder
cluster_error_retry_attempts: int
command_flags: dict[str, str]
node_flags: set[str]
read_from_replicas: bool
reinitialize_counter: int
reinitialize_steps: int
nodes_manager: NodesManager
cluster_response_callbacks: CaseInsensitiveDict[str, Callable[..., Incomplete]]
result_callbacks: CaseInsensitiveDict[str, Callable[[Incomplete, Incomplete], Incomplete]]
commands_parser: CommandsParser
def __init__( # TODO: make @overloads, either `url` or `host:port` can be passed
self,
host: str | None = None,
port: int | None = 6379,
startup_nodes: list[ClusterNode] | None = None,
cluster_error_retry_attempts: int = 3,
retry: Retry | None = None,
require_full_coverage: bool = False,
reinitialize_steps: int = 5,
read_from_replicas: bool = False,
dynamic_startup_nodes: bool = True,
url: str | None = None,
address_remap: Callable[[str, int], tuple[str, int]] | None = None,
**kwargs,
) -> None: ...
def __enter__(self) -> Self: ...
def __exit__(
self, type: type[BaseException] | None, value: BaseException | None, traceback: TracebackType | None
) -> None: ...
def __del__(self) -> None: ...
def disconnect_connection_pools(self) -> None: ...
@classmethod
def from_url(cls, url: str, **kwargs) -> Self: ...
def on_connect(self, connection: Connection) -> None: ...
def get_redis_connection(self, node: ClusterNode) -> Redis[Any]: ...
def get_node(
self, host: str | None = None, port: str | int | None = None, node_name: str | None = None
) -> ClusterNode | None: ...
def get_primaries(self) -> list[ClusterNode]: ...
def get_replicas(self) -> list[ClusterNode]: ...
def get_random_node(self) -> ClusterNode: ...
def get_nodes(self) -> list[ClusterNode]: ...
def get_node_from_key(self, key: _Encodable, replica: bool = False) -> ClusterNode | None: ...
def get_default_node(self) -> ClusterNode | None: ...
def set_default_node(self, node: ClusterNode | None) -> bool: ...
def monitor(self, target_node: Incomplete | None = None): ...
def pubsub(
self, node: Incomplete | None = None, host: Incomplete | None = None, port: Incomplete | None = None, **kwargs
): ...
def pipeline(self, transaction: Incomplete | None = None, shard_hint: Incomplete | None = None): ...
def lock(
self,
name: str,
timeout: float | None = None,
sleep: float = 0.1,
blocking: bool = True,
blocking_timeout: float | None = None,
lock_class: type[Incomplete] | None = None,
thread_local: bool = True,
): ...
def keyslot(self, key: _Encodable) -> int: ...
def determine_slot(self, *args): ...
def get_encoder(self) -> Encoder: ...
def get_connection_kwargs(self) -> dict[str, Any]: ...
def execute_command(self, *args, **kwargs): ...
def close(self) -> None: ...
class ClusterNode:
host: str
port: int
name: str
server_type: str | None
redis_connection: Redis[Incomplete] | None
def __init__(
self, host: str, port: int, server_type: str | None = None, redis_connection: Redis[Incomplete] | None = None
) -> None: ...
def __eq__(self, obj: object) -> bool: ...
def __del__(self) -> None: ...
class LoadBalancer:
primary_to_idx: dict[str, int]
start_index: int
def __init__(self, start_index: int = 0) -> None: ...
def get_server_index(self, primary: str, list_size: int) -> int: ...
def reset(self) -> None: ...
class NodesManager:
nodes_cache: dict[str, ClusterNode]
slots_cache: dict[str, list[ClusterNode]]
startup_nodes: dict[str, ClusterNode]
default_node: ClusterNode | None
from_url: bool
connection_pool_class: type[ConnectionPool]
connection_kwargs: dict[str, Incomplete] # TODO: could be a TypedDict
read_load_balancer: LoadBalancer
address_remap: Callable[[str, int], tuple[str, int]] | None
def __init__(
self,
startup_nodes: Iterable[ClusterNode],
from_url: bool = False,
require_full_coverage: bool = False,
lock: Lock | None = None,
dynamic_startup_nodes: bool = True,
connection_pool_class: type[ConnectionPool] = ...,
address_remap: Callable[[str, int], tuple[str, int]] | None = None,
**kwargs, # TODO: same type as connection_kwargs
) -> None: ...
def get_node(
self, host: str | None = None, port: int | str | None = None, node_name: str | None = None
) -> ClusterNode | None: ...
def update_moved_exception(self, exception: MovedError) -> None: ...
def get_node_from_slot(self, slot: str, read_from_replicas: bool = False, server_type: str | None = None) -> ClusterNode: ...
def get_nodes_by_server_type(self, server_type: str) -> list[ClusterNode]: ...
def populate_startup_nodes(self, nodes: Iterable[ClusterNode]) -> None: ...
def check_slots_coverage(self, slots_cache: dict[str, list[ClusterNode]]) -> bool: ...
def create_redis_connections(self, nodes: Iterable[ClusterNode]) -> None: ...
def create_redis_node(self, host: str, port: int | str, **kwargs: Any) -> Redis[Incomplete]: ...
def initialize(self) -> None: ...
def close(self) -> None: ...
def reset(self) -> None: ...
def remap_host_port(self, host: str, port: int) -> tuple[str, int]: ...
class ClusterPubSub(PubSub):
node: ClusterNode | None
cluster: RedisCluster[Any]
def __init__(
self,
redis_cluster: RedisCluster[Any],
node: ClusterNode | None = None,
host: str | None = None,
port: int | None = None,
**kwargs,
) -> None: ...
def set_pubsub_node(
self, cluster: RedisCluster[Any], node: ClusterNode | None = None, host: str | None = None, port: int | None = None
) -> None: ...
def get_pubsub_node(self) -> ClusterNode | None: ...
def execute_command(self, *args, **kwargs) -> None: ...
def get_redis_connection(self) -> Redis[Any] | None: ...
class ClusterPipeline(RedisCluster[_StrType]):
command_stack: list[Incomplete]
nodes_manager: Incomplete
refresh_table_asap: bool
result_callbacks: Incomplete
startup_nodes: Incomplete
read_from_replicas: bool
command_flags: Incomplete
cluster_response_callbacks: Incomplete
cluster_error_retry_attempts: int
reinitialize_counter: int
reinitialize_steps: int
encoder: Encoder
commands_parser: Incomplete
def __init__(
self,
nodes_manager,
commands_parser,
result_callbacks: Incomplete | None = None,
cluster_response_callbacks: Incomplete | None = None,
startup_nodes: Incomplete | None = None,
read_from_replicas: bool = False,
cluster_error_retry_attempts: int = 3,
reinitialize_steps: int = 5,
lock: Lock | None = None,
**kwargs,
) -> None: ...
def __len__(self) -> int: ...
def __bool__(self) -> Literal[True]: ...
def execute_command(self, *args, **kwargs): ...
def pipeline_execute_command(self, *args, **options): ...
def raise_first_error(self, stack) -> None: ...
def annotate_exception(self, exception, number, command) -> None: ...
def execute(self, raise_on_error: bool = True): ...
scripts: set[Any] # is only set in `reset()`
watching: bool # is only set in `reset()`
explicit_transaction: bool # is only set in `reset()`
def reset(self) -> None: ...
def send_cluster_commands(self, stack, raise_on_error: bool = True, allow_redirections: bool = True): ...
def eval(self) -> None: ...
def multi(self) -> None: ...
def immediate_execute_command(self, *args, **options) -> None: ...
def load_scripts(self) -> None: ...
def watch(self, *names) -> None: ...
def unwatch(self) -> None: ...
def script_load_for_pipeline(self, *args, **kwargs) -> None: ...
def delete(self, *names): ...
def block_pipeline_command(name: str) -> Callable[..., NoReturn]: ...
class PipelineCommand:
args: Sequence[EncodableT]
options: _ParseResponseOptions
position: int | None
result: Any | Exception | None
node: Incomplete | None
asking: bool
def __init__(
self, args: Sequence[EncodableT], options: _ParseResponseOptions | None = None, position: int | None = None
) -> None: ...
class _ParseResponseCallback(Protocol):
def __call__(self, connection: Connection, command: EncodableT, /, **kwargs) -> Any: ...
class NodeCommands:
parse_response: _ParseResponseCallback
connection_pool: ConnectionPool
connection: Connection
commands: list[PipelineCommand]
def __init__(
self, parse_response: _ParseResponseCallback, connection_pool: ConnectionPool, connection: Connection
) -> None: ...
def append(self, c: PipelineCommand) -> None: ...
def write(self) -> None: ...
def read(self) -> None: ...

View File

@@ -1,17 +0,0 @@
from .cluster import RedisClusterCommands as RedisClusterCommands
from .core import AsyncCoreCommands as AsyncCoreCommands, CoreCommands as CoreCommands
from .helpers import list_or_args as list_or_args
from .parser import CommandsParser as CommandsParser
from .redismodules import RedisModuleCommands as RedisModuleCommands
from .sentinel import AsyncSentinelCommands as AsyncSentinelCommands, SentinelCommands as SentinelCommands
__all__ = [
"RedisClusterCommands",
"CommandsParser",
"AsyncCoreCommands",
"CoreCommands",
"list_or_args",
"RedisModuleCommands",
"AsyncSentinelCommands",
"SentinelCommands",
]

View File

@@ -1,58 +0,0 @@
from typing import Any
from .commands import *
from .info import BFInfo as BFInfo, CFInfo as CFInfo, CMSInfo as CMSInfo, TDigestInfo as TDigestInfo, TopKInfo as TopKInfo
class AbstractBloom:
@staticmethod
def append_items(params, items) -> None: ...
@staticmethod
def append_error(params, error) -> None: ...
@staticmethod
def append_capacity(params, capacity) -> None: ...
@staticmethod
def append_expansion(params, expansion) -> None: ...
@staticmethod
def append_no_scale(params, noScale) -> None: ...
@staticmethod
def append_weights(params, weights) -> None: ...
@staticmethod
def append_no_create(params, noCreate) -> None: ...
@staticmethod
def append_items_and_increments(params, items, increments) -> None: ...
@staticmethod
def append_values_and_weights(params, items, weights) -> None: ...
@staticmethod
def append_max_iterations(params, max_iterations) -> None: ...
@staticmethod
def append_bucket_size(params, bucket_size) -> None: ...
class CMSBloom(CMSCommands, AbstractBloom):
client: Any
commandmixin: Any
execute_command: Any
def __init__(self, client, **kwargs) -> None: ...
class TOPKBloom(TOPKCommands, AbstractBloom):
client: Any
commandmixin: Any
execute_command: Any
def __init__(self, client, **kwargs) -> None: ...
class CFBloom(CFCommands, AbstractBloom):
client: Any
commandmixin: Any
execute_command: Any
def __init__(self, client, **kwargs) -> None: ...
class TDigestBloom(TDigestCommands, AbstractBloom):
client: Any
commandmixin: Any
execute_command: Any
def __init__(self, client, **kwargs) -> None: ...
class BFBloom(BFCommands, AbstractBloom):
client: Any
commandmixin: Any
execute_command: Any
def __init__(self, client, **kwargs) -> None: ...

View File

@@ -1,112 +0,0 @@
from _typeshed import Incomplete
BF_RESERVE: str
BF_ADD: str
BF_MADD: str
BF_INSERT: str
BF_EXISTS: str
BF_MEXISTS: str
BF_SCANDUMP: str
BF_LOADCHUNK: str
BF_INFO: str
CF_RESERVE: str
CF_ADD: str
CF_ADDNX: str
CF_INSERT: str
CF_INSERTNX: str
CF_EXISTS: str
CF_DEL: str
CF_COUNT: str
CF_SCANDUMP: str
CF_LOADCHUNK: str
CF_INFO: str
CMS_INITBYDIM: str
CMS_INITBYPROB: str
CMS_INCRBY: str
CMS_QUERY: str
CMS_MERGE: str
CMS_INFO: str
TOPK_RESERVE: str
TOPK_ADD: str
TOPK_INCRBY: str
TOPK_QUERY: str
TOPK_COUNT: str
TOPK_LIST: str
TOPK_INFO: str
TDIGEST_CREATE: str
TDIGEST_RESET: str
TDIGEST_ADD: str
TDIGEST_MERGE: str
TDIGEST_CDF: str
TDIGEST_QUANTILE: str
TDIGEST_MIN: str
TDIGEST_MAX: str
TDIGEST_INFO: str
class BFCommands:
def create(self, key, errorRate, capacity, expansion: Incomplete | None = None, noScale: Incomplete | None = None): ...
def add(self, key, item): ...
def madd(self, key, *items): ...
def insert(
self,
key,
items,
capacity: Incomplete | None = None,
error: Incomplete | None = None,
noCreate: Incomplete | None = None,
expansion: Incomplete | None = None,
noScale: Incomplete | None = None,
): ...
def exists(self, key, item): ...
def mexists(self, key, *items): ...
def scandump(self, key, iter): ...
def loadchunk(self, key, iter, data): ...
def info(self, key): ...
class CFCommands:
def create(
self,
key,
capacity,
expansion: Incomplete | None = None,
bucket_size: Incomplete | None = None,
max_iterations: Incomplete | None = None,
): ...
def add(self, key, item): ...
def addnx(self, key, item): ...
def insert(self, key, items, capacity: Incomplete | None = None, nocreate: Incomplete | None = None): ...
def insertnx(self, key, items, capacity: Incomplete | None = None, nocreate: Incomplete | None = None): ...
def exists(self, key, item): ...
def delete(self, key, item): ...
def count(self, key, item): ...
def scandump(self, key, iter): ...
def loadchunk(self, key, iter, data): ...
def info(self, key): ...
class TOPKCommands:
def reserve(self, key, k, width, depth, decay): ...
def add(self, key, *items): ...
def incrby(self, key, items, increments): ...
def query(self, key, *items): ...
def count(self, key, *items): ...
def list(self, key, withcount: bool = False): ...
def info(self, key): ...
class TDigestCommands:
def create(self, key, compression: int = 100): ...
def reset(self, key): ...
def add(self, key, values): ...
def merge(self, destination_key, num_keys, *keys, compression: int | None = None, override: bool = False): ...
def min(self, key): ...
def max(self, key): ...
def quantile(self, key, quantile, *quantiles): ...
def cdf(self, key, value, *values): ...
def info(self, key): ...
class CMSCommands:
def initbydim(self, key, width, depth): ...
def initbyprob(self, key, error, probability): ...
def incrby(self, key, items, increments): ...
def query(self, key, *items): ...
def merge(self, destKey, numKeys, srcKeys, weights=[]): ...
def info(self, key): ...

View File

@@ -1,43 +0,0 @@
from typing import Any
class BFInfo:
capacity: Any
size: Any
filterNum: Any
insertedNum: Any
expansionRate: Any
def __init__(self, args) -> None: ...
class CFInfo:
size: Any
bucketNum: Any
filterNum: Any
insertedNum: Any
deletedNum: Any
bucketSize: Any
expansionRate: Any
maxIteration: Any
def __init__(self, args) -> None: ...
class CMSInfo:
width: Any
depth: Any
count: Any
def __init__(self, args) -> None: ...
class TopKInfo:
k: Any
width: Any
depth: Any
decay: Any
def __init__(self, args) -> None: ...
class TDigestInfo:
compression: Any
capacity: Any
mergedNodes: Any
unmergedNodes: Any
mergedWeight: Any
unmergedWeight: Any
totalCompressions: Any
def __init__(self, args) -> None: ...

View File

@@ -1,60 +0,0 @@
from _typeshed import Incomplete
from typing import NoReturn
from .core import ACLCommands, DataAccessCommands, ManagementCommands, PubSubCommands, _StrType
class ClusterMultiKeyCommands:
def mget_nonatomic(self, keys, *args): ...
def mset_nonatomic(self, mapping): ...
def exists(self, *keys): ...
def delete(self, *keys): ...
def touch(self, *keys): ...
def unlink(self, *keys): ...
class ClusterManagementCommands(ManagementCommands):
def slaveof(self, *args, **kwargs) -> None: ...
def replicaof(self, *args, **kwargs) -> None: ...
def swapdb(self, *args, **kwargs) -> None: ...
class ClusterDataAccessCommands(DataAccessCommands[_StrType]):
def stralgo(
self,
algo,
value1,
value2,
specific_argument: str = "strings",
len: bool = False,
idx: bool = False,
minmatchlen: Incomplete | None = None,
withmatchlen: bool = False,
**kwargs,
): ...
class RedisClusterCommands(
ClusterMultiKeyCommands, ClusterManagementCommands, ACLCommands[_StrType], PubSubCommands, ClusterDataAccessCommands[_StrType]
):
def cluster_addslots(self, target_node, *slots): ...
def cluster_countkeysinslot(self, slot_id): ...
def cluster_count_failure_report(self, node_id): ...
def cluster_delslots(self, *slots): ...
def cluster_failover(self, target_node, option: Incomplete | None = None): ...
def cluster_info(self, target_nodes: Incomplete | None = None): ...
def cluster_keyslot(self, key): ...
def cluster_meet(self, host, port, target_nodes: Incomplete | None = None): ...
def cluster_nodes(self): ...
def cluster_replicate(self, target_nodes, node_id): ...
def cluster_reset(self, soft: bool = True, target_nodes: Incomplete | None = None): ...
def cluster_save_config(self, target_nodes: Incomplete | None = None): ...
def cluster_get_keys_in_slot(self, slot, num_keys): ...
def cluster_set_config_epoch(self, epoch, target_nodes: Incomplete | None = None): ...
def cluster_setslot(self, target_node, node_id, slot_id, state): ...
def cluster_setslot_stable(self, slot_id): ...
def cluster_replicas(self, node_id, target_nodes: Incomplete | None = None): ...
def cluster_slots(self, target_nodes: Incomplete | None = None): ...
def cluster_myshardid(self, target_nodes: Incomplete | None = None): ...
def cluster_links(self, target_node): ...
def cluster_flushslots(self, target_nodes: Incomplete | None = None) -> NoReturn: ...
def cluster_bumpepoch(self, target_nodes: Incomplete | None = None) -> NoReturn: ...
read_from_replicas: bool
def readonly(self, target_nodes: Incomplete | None = None): ...
def readwrite(self, target_nodes: Incomplete | None = None): ...

File diff suppressed because it is too large Load Diff

View File

@@ -1,26 +0,0 @@
from typing import Any
from .commands import GraphCommands as GraphCommands
from .edge import Edge as Edge
from .node import Node as Node
from .path import Path as Path
class Graph(GraphCommands):
NAME: Any
client: Any
execute_command: Any
nodes: Any
edges: Any
version: int
def __init__(self, client, name=...) -> None: ...
@property
def name(self): ...
def get_label(self, idx): ...
def get_relation(self, idx): ...
def get_property(self, idx): ...
def add_node(self, node) -> None: ...
def add_edge(self, edge) -> None: ...
def call_procedure(self, procedure, *args, read_only: bool = False, **kwagrs): ...
def labels(self): ...
def relationship_types(self): ...
def property_keys(self): ...

View File

@@ -1,25 +0,0 @@
from _typeshed import Incomplete
from typing import Any
class GraphCommands:
def commit(self): ...
version: Any
def query(
self,
q,
params: Incomplete | None = None,
timeout: Incomplete | None = None,
read_only: bool = False,
profile: bool = False,
): ...
def merge(self, pattern): ...
def delete(self): ...
nodes: Any
edges: Any
def flush(self) -> None: ...
def explain(self, query, params: Incomplete | None = None): ...
def bulk(self, **kwargs) -> None: ...
def profile(self, query): ...
def slowlog(self): ...
def config(self, name, value: Incomplete | None = None, set: bool = False): ...
def list_keys(self): ...

View File

@@ -1,14 +0,0 @@
from _typeshed import Incomplete
from typing import Any
class Edge:
id: Any
relation: Any
properties: Any
src_node: Any
dest_node: Any
def __init__(
self, src_node, relation, dest_node, edge_id: Incomplete | None = None, properties: Incomplete | None = None
) -> None: ...
def to_string(self): ...
def __eq__(self, rhs): ...

View File

@@ -1,5 +0,0 @@
from typing import Any
class VersionMismatchException(Exception):
version: Any
def __init__(self, version) -> None: ...

View File

@@ -1,18 +0,0 @@
from _typeshed import Incomplete
from typing import Any
class Node:
id: Any
alias: Any
label: Any
labels: Any
properties: Any
def __init__(
self,
node_id: Incomplete | None = None,
alias: Incomplete | None = None,
label: str | list[str] | None = None,
properties: Incomplete | None = None,
) -> None: ...
def to_string(self): ...
def __eq__(self, rhs): ...

View File

@@ -1,18 +0,0 @@
from typing import Any
class Path:
append_type: Any
def __init__(self, nodes, edges) -> None: ...
@classmethod
def new_empty_path(cls): ...
def nodes(self): ...
def edges(self): ...
def get_node(self, index): ...
def get_relationship(self, index): ...
def first_node(self): ...
def last_node(self): ...
def edge_count(self): ...
def nodes_count(self): ...
def add_node(self, node): ...
def add_edge(self, edge): ...
def __eq__(self, other): ...

View File

@@ -1,74 +0,0 @@
from typing import Any, ClassVar, Literal
LABELS_ADDED: str
NODES_CREATED: str
NODES_DELETED: str
RELATIONSHIPS_DELETED: str
PROPERTIES_SET: str
RELATIONSHIPS_CREATED: str
INDICES_CREATED: str
INDICES_DELETED: str
CACHED_EXECUTION: str
INTERNAL_EXECUTION_TIME: str
STATS: Any
class ResultSetColumnTypes:
COLUMN_UNKNOWN: ClassVar[Literal[0]]
COLUMN_SCALAR: ClassVar[Literal[1]]
COLUMN_NODE: ClassVar[Literal[2]]
COLUMN_RELATION: ClassVar[Literal[3]]
class ResultSetScalarTypes:
VALUE_UNKNOWN: ClassVar[Literal[0]]
VALUE_NULL: ClassVar[Literal[1]]
VALUE_STRING: ClassVar[Literal[2]]
VALUE_INTEGER: ClassVar[Literal[3]]
VALUE_BOOLEAN: ClassVar[Literal[4]]
VALUE_DOUBLE: ClassVar[Literal[5]]
VALUE_ARRAY: ClassVar[Literal[6]]
VALUE_EDGE: ClassVar[Literal[7]]
VALUE_NODE: ClassVar[Literal[8]]
VALUE_PATH: ClassVar[Literal[9]]
VALUE_MAP: ClassVar[Literal[10]]
VALUE_POINT: ClassVar[Literal[11]]
class QueryResult:
graph: Any
header: Any
result_set: Any
def __init__(self, graph, response, profile: bool = False) -> None: ...
def parse_results(self, raw_result_set) -> None: ...
statistics: Any
def parse_statistics(self, raw_statistics) -> None: ...
def parse_header(self, raw_result_set): ...
def parse_records(self, raw_result_set): ...
def parse_entity_properties(self, props): ...
def parse_string(self, cell): ...
def parse_node(self, cell): ...
def parse_edge(self, cell): ...
def parse_path(self, cell): ...
def parse_map(self, cell): ...
def parse_point(self, cell): ...
def parse_scalar(self, cell): ...
def parse_profile(self, response) -> None: ...
def is_empty(self): ...
@property
def labels_added(self): ...
@property
def nodes_created(self): ...
@property
def nodes_deleted(self): ...
@property
def properties_set(self): ...
@property
def relationships_created(self): ...
@property
def relationships_deleted(self): ...
@property
def indices_created(self): ...
@property
def indices_deleted(self): ...
@property
def cached_execution(self): ...
@property
def run_time_ms(self): ...

View File

@@ -1,10 +0,0 @@
def list_or_args(keys, args): ...
def nativestr(x): ...
def delist(x): ...
def parse_to_list(response): ...
def parse_list_to_dict(response): ...
def parse_to_dict(response): ...
def random_string(length: int = 10) -> str: ...
def quote_string(v): ...
def decode_dict_keys(obj): ...
def stringify_param_value(value): ...

View File

@@ -1,15 +0,0 @@
from _typeshed import Incomplete
from typing import Any
from ...client import Pipeline as ClientPipeline
from .commands import JSONCommands
class JSON(JSONCommands):
MODULE_CALLBACKS: dict[str, Any]
client: Any
execute_command: Any
MODULE_VERSION: Incomplete | None
def __init__(self, client, version: Incomplete | None = None, decoder=..., encoder=...) -> None: ...
def pipeline(self, transaction: bool = True, shard_hint: Incomplete | None = None) -> Pipeline: ...
class Pipeline(JSONCommands, ClientPipeline[Incomplete]): ... # type: ignore[misc]

View File

@@ -1,32 +0,0 @@
from _typeshed import Incomplete
class JSONCommands:
def arrappend(self, name: str, path: str | None = ".", *args) -> list[int | None]: ...
def arrindex(
self, name: str, path: str, scalar: int, start: int | None = None, stop: int | None = None
) -> list[int | None]: ...
def arrinsert(self, name: str, path: str, index: int, *args) -> list[int | None]: ...
def arrlen(self, name: str, path: str | None = ".") -> list[int | None]: ...
def arrpop(self, name: str, path: str | None = ".", index: int | None = -1) -> list[str | None]: ...
def arrtrim(self, name: str, path: str, start: int, stop: int) -> list[int | None]: ...
def type(self, name: str, path: str | None = ".") -> list[str]: ...
def resp(self, name: str, path: str | None = ".") -> list[Incomplete]: ...
def objkeys(self, name, path="."): ...
def objlen(self, name, path="."): ...
def numincrby(self, name, path, number): ...
def nummultby(self, name, path, number): ...
def clear(self, name, path="."): ...
def delete(self, key, path="."): ...
forget = delete
def get(self, name, *args, no_escape: bool = False): ...
def mget(self, keys, path): ...
def set(self, name, path, obj, nx: bool = False, xx: bool = False, decode_keys: bool = False): ...
def set_file(self, name, path, file_name, nx: bool = False, xx: bool = False, decode_keys: bool = False): ...
def set_path(self, json_path, root_folder, nx: bool = False, xx: bool = False, decode_keys: bool = False): ...
def strlen(self, name, path: Incomplete | None = None): ...
def toggle(self, name, path="."): ...
def strappend(self, name, value, path="."): ...
def debug(self, subcommand, key: Incomplete | None = None, path="."): ...
def jsonget(self, *args, **kwargs): ...
def jsonmget(self, *args, **kwargs): ...
def jsonset(self, *args, **kwargs): ...

View File

@@ -1,4 +0,0 @@
def bulk_of_jsons(d): ...
def decode_dict_keys(obj): ...
def unstring(obj): ...
def decode_list(b): ...

View File

@@ -1,5 +0,0 @@
class Path:
strPath: str
@staticmethod
def root_path() -> str: ...
def __init__(self, path: str) -> None: ...

View File

@@ -1,8 +0,0 @@
from redis.client import AbstractRedis
from redis.typing import EncodableT
class CommandsParser:
commands: dict[str, str]
def __init__(self, redis_connection: AbstractRedis) -> None: ...
def initialize(self, r: AbstractRedis) -> None: ...
def get_keys(self, redis_conn: AbstractRedis, *args: EncodableT) -> list[EncodableT] | None: ...

View File

@@ -1,14 +0,0 @@
from .json import JSON
from .search import Search
from .timeseries import TimeSeries
class RedisModuleCommands:
def json(self, encoder=..., decoder=...) -> JSON: ...
def ft(self, index_name: str = "idx") -> Search: ...
def ts(self) -> TimeSeries: ...
def bf(self): ...
def cf(self): ...
def cms(self): ...
def topk(self): ...
def tdigest(self): ...
def graph(self, index_name: str = "idx"): ...

View File

@@ -1,22 +0,0 @@
from _typeshed import Incomplete
from .commands import SearchCommands
class Search(SearchCommands):
class BatchIndexer:
def __init__(self, client, chunk_size: int = 1000) -> None: ...
def add_document(
self,
doc_id,
nosave: bool = False,
score: float = 1.0,
payload: Incomplete | None = None,
replace: bool = False,
partial: bool = False,
no_create: bool = False,
**fields,
): ...
def add_document_hash(self, doc_id, score: float = 1.0, replace: bool = False): ...
def commit(self): ...
def __init__(self, client, index_name: str = "idx") -> None: ...

View File

@@ -1,53 +0,0 @@
from typing import Any, ClassVar, Literal
FIELDNAME: Any
class Limit:
offset: Any
count: Any
def __init__(self, offset: int = 0, count: int = 0) -> None: ...
def build_args(self): ...
class Reducer:
NAME: ClassVar[None]
def __init__(self, *args) -> None: ...
def alias(self, alias): ...
@property
def args(self): ...
class SortDirection:
DIRSTRING: ClassVar[str | None]
field: Any
def __init__(self, field) -> None: ...
class Asc(SortDirection):
DIRSTRING: ClassVar[Literal["ASC"]]
class Desc(SortDirection):
DIRSTRING: ClassVar[Literal["DESC"]]
class AggregateRequest:
def __init__(self, query: str = "*") -> None: ...
def load(self, *fields): ...
def group_by(self, fields, *reducers): ...
def apply(self, **kwexpr): ...
def limit(self, offset, num): ...
def sort_by(self, *fields, **kwargs): ...
def filter(self, expressions): ...
def with_schema(self): ...
def verbatim(self): ...
def cursor(self, count: int = 0, max_idle: float = 0.0): ...
def build_args(self): ...
class Cursor:
cid: Any
max_idle: int
count: int
def __init__(self, cid) -> None: ...
def build_args(self): ...
class AggregateResult:
rows: Any
cursor: Any
schema: Any
def __init__(self, rows, cursor, schema) -> None: ...

View File

@@ -1,111 +0,0 @@
from _typeshed import Incomplete
from collections.abc import Mapping
from typing import Any, Literal
from typing_extensions import TypeAlias
from .aggregation import AggregateRequest, AggregateResult, Cursor
from .query import Query
from .result import Result
_QueryParams: TypeAlias = Mapping[str, str | float]
NUMERIC: Literal["NUMERIC"]
CREATE_CMD: Literal["FT.CREATE"]
ALTER_CMD: Literal["FT.ALTER"]
SEARCH_CMD: Literal["FT.SEARCH"]
ADD_CMD: Literal["FT.ADD"]
ADDHASH_CMD: Literal["FT.ADDHASH"]
DROP_CMD: Literal["FT.DROP"]
EXPLAIN_CMD: Literal["FT.EXPLAIN"]
EXPLAINCLI_CMD: Literal["FT.EXPLAINCLI"]
DEL_CMD: Literal["FT.DEL"]
AGGREGATE_CMD: Literal["FT.AGGREGATE"]
PROFILE_CMD: Literal["FT.PROFILE"]
CURSOR_CMD: Literal["FT.CURSOR"]
SPELLCHECK_CMD: Literal["FT.SPELLCHECK"]
DICT_ADD_CMD: Literal["FT.DICTADD"]
DICT_DEL_CMD: Literal["FT.DICTDEL"]
DICT_DUMP_CMD: Literal["FT.DICTDUMP"]
GET_CMD: Literal["FT.GET"]
MGET_CMD: Literal["FT.MGET"]
CONFIG_CMD: Literal["FT.CONFIG"]
TAGVALS_CMD: Literal["FT.TAGVALS"]
ALIAS_ADD_CMD: Literal["FT.ALIASADD"]
ALIAS_UPDATE_CMD: Literal["FT.ALIASUPDATE"]
ALIAS_DEL_CMD: Literal["FT.ALIASDEL"]
INFO_CMD: Literal["FT.INFO"]
SUGADD_COMMAND: Literal["FT.SUGADD"]
SUGDEL_COMMAND: Literal["FT.SUGDEL"]
SUGLEN_COMMAND: Literal["FT.SUGLEN"]
SUGGET_COMMAND: Literal["FT.SUGGET"]
SYNUPDATE_CMD: Literal["FT.SYNUPDATE"]
SYNDUMP_CMD: Literal["FT.SYNDUMP"]
NOOFFSETS: Literal["NOOFFSETS"]
NOFIELDS: Literal["NOFIELDS"]
STOPWORDS: Literal["STOPWORDS"]
WITHSCORES: Literal["WITHSCORES"]
FUZZY: Literal["FUZZY"]
WITHPAYLOADS: Literal["WITHPAYLOADS"]
class SearchCommands:
def batch_indexer(self, chunk_size: int = 100): ...
def create_index(
self,
fields,
no_term_offsets: bool = False,
no_field_flags: bool = False,
stopwords: Incomplete | None = None,
definition: Incomplete | None = None,
max_text_fields: bool = False, # added in 4.1.1
temporary: Incomplete | None = None, # added in 4.1.1
no_highlight: bool = False, # added in 4.1.1
no_term_frequencies: bool = False, # added in 4.1.1
skip_initial_scan: bool = False, # added in 4.1.1
): ...
def alter_schema_add(self, fields): ...
def dropindex(self, delete_documents: bool = False): ...
def add_document(
self,
doc_id,
nosave: bool = False,
score: float = 1.0,
payload: Incomplete | None = None,
replace: bool = False,
partial: bool = False,
language: Incomplete | None = None,
no_create: bool = False,
**fields,
): ...
def add_document_hash(self, doc_id, score: float = 1.0, language: Incomplete | None = None, replace: bool = False): ...
def delete_document(self, doc_id, conn: Incomplete | None = None, delete_actual_document: bool = False): ...
def load_document(self, id): ...
def get(self, *ids): ...
def info(self): ...
def get_params_args(self, query_params: _QueryParams) -> list[Any]: ...
def search(self, query: str | Query, query_params: _QueryParams | None = None) -> Result: ...
def explain(self, query: str | Query, query_params: _QueryParams | None = None): ...
def explain_cli(self, query): ...
def aggregate(self, query: AggregateRequest | Cursor, query_params: _QueryParams | None = None) -> AggregateResult: ...
def profile(
self, query: str | Query | AggregateRequest, limited: bool = False, query_params: Mapping[str, str | float] | None = None
) -> tuple[Incomplete, Incomplete]: ...
def spellcheck(
self, query, distance: Incomplete | None = None, include: Incomplete | None = None, exclude: Incomplete | None = None
): ...
def dict_add(self, name, *terms): ...
def dict_del(self, name, *terms): ...
def dict_dump(self, name): ...
def config_set(self, option: str, value: str) -> bool: ...
def config_get(self, option: str) -> dict[str, str]: ...
def tagvals(self, tagfield): ...
def aliasadd(self, alias): ...
def aliasupdate(self, alias): ...
def aliasdel(self, alias): ...
def sugadd(self, key, *suggestions, **kwargs): ...
def suglen(self, key): ...
def sugdel(self, key, string): ...
def sugget(self, key, prefix, fuzzy: bool = False, num: int = 10, with_scores: bool = False, with_payloads: bool = False): ...
def synupdate(self, groupid, skipinitial: bool = False, *terms): ...
def syndump(self): ...

View File

@@ -1,52 +0,0 @@
from _typeshed import Incomplete
from typing import Any
class Query:
def __init__(self, query_string) -> None: ...
def query_string(self): ...
def limit_ids(self, *ids): ...
def return_fields(self, *fields): ...
def return_field(self, field, as_field: Incomplete | None = None): ...
def summarize(
self,
fields: Incomplete | None = None,
context_len: Incomplete | None = None,
num_frags: Incomplete | None = None,
sep: Incomplete | None = None,
): ...
def highlight(self, fields: Incomplete | None = None, tags: Incomplete | None = None): ...
def language(self, language): ...
def slop(self, slop): ...
def in_order(self): ...
def scorer(self, scorer): ...
def get_args(self): ...
def paging(self, offset, num): ...
def verbatim(self): ...
def no_content(self): ...
def no_stopwords(self): ...
def with_payloads(self): ...
def with_scores(self): ...
def limit_fields(self, *fields): ...
def add_filter(self, flt): ...
def sort_by(self, field, asc: bool = True): ...
def expander(self, expander): ...
class Filter:
args: Any
def __init__(self, keyword, field, *args) -> None: ...
class NumericFilter(Filter):
INF: str
NEG_INF: str
def __init__(self, field, minval, maxval, minExclusive: bool = False, maxExclusive: bool = False) -> None: ...
class GeoFilter(Filter):
METERS: str
KILOMETERS: str
FEET: str
MILES: str
def __init__(self, field, lon, lat, radius, unit="km") -> None: ...
class SortbyField:
args: Any
def __init__(self, field, asc: bool = True) -> None: ...

View File

@@ -1,7 +0,0 @@
from typing import Any
class Result:
total: Any
duration: Any
docs: Any
def __init__(self, res, hascontent, duration: int = 0, has_payload: bool = False, with_scores: bool = False) -> None: ...

View File

@@ -1,17 +0,0 @@
class SentinelCommands:
def sentinel(self, *args): ...
def sentinel_get_master_addr_by_name(self, service_name): ...
def sentinel_master(self, service_name): ...
def sentinel_masters(self): ...
def sentinel_monitor(self, name, ip, port, quorum): ...
def sentinel_remove(self, name): ...
def sentinel_sentinels(self, service_name): ...
def sentinel_set(self, name, option, value): ...
def sentinel_slaves(self, service_name): ...
def sentinel_reset(self, pattern): ...
def sentinel_failover(self, new_master_name): ...
def sentinel_ckquorum(self, new_master_name): ...
def sentinel_flushconfig(self): ...
class AsyncSentinelCommands(SentinelCommands):
async def sentinel(self, *args) -> None: ...

View File

@@ -1,14 +0,0 @@
from _typeshed import Incomplete
from typing import Any
from ...client import Pipeline as ClientPipeline
from .commands import TimeSeriesCommands
class TimeSeries(TimeSeriesCommands):
MODULE_CALLBACKS: dict[str, Any]
client: Any
execute_command: Any
def __init__(self, client: Incomplete | None = None, **kwargs) -> None: ...
def pipeline(self, transaction: bool = True, shard_hint: Incomplete | None = None) -> Pipeline: ...
class Pipeline(TimeSeriesCommands, ClientPipeline[Incomplete]): ... # type: ignore[misc]

View File

@@ -1,160 +0,0 @@
from typing import Literal
from typing_extensions import TypeAlias
_Key: TypeAlias = bytes | str | memoryview
ADD_CMD: Literal["TS.ADD"]
ALTER_CMD: Literal["TS.ALTER"]
CREATERULE_CMD: Literal["TS.CREATERULE"]
CREATE_CMD: Literal["TS.CREATE"]
DECRBY_CMD: Literal["TS.DECRBY"]
DELETERULE_CMD: Literal["TS.DELETERULE"]
DEL_CMD: Literal["TS.DEL"]
GET_CMD: Literal["TS.GET"]
INCRBY_CMD: Literal["TS.INCRBY"]
INFO_CMD: Literal["TS.INFO"]
MADD_CMD: Literal["TS.MADD"]
MGET_CMD: Literal["TS.MGET"]
MRANGE_CMD: Literal["TS.MRANGE"]
MREVRANGE_CMD: Literal["TS.MREVRANGE"]
QUERYINDEX_CMD: Literal["TS.QUERYINDEX"]
RANGE_CMD: Literal["TS.RANGE"]
REVRANGE_CMD: Literal["TS.REVRANGE"]
class TimeSeriesCommands:
def create(
self,
key: _Key,
retention_msecs: int | None = None,
uncompressed: bool | None = False,
labels: dict[str, str] | None = None,
chunk_size: int | None = None,
duplicate_policy: str | None = None,
): ...
def alter(
self,
key: _Key,
retention_msecs: int | None = None,
labels: dict[str, str] | None = None,
chunk_size: int | None = None,
duplicate_policy: str | None = None,
): ...
def add(
self,
key: _Key,
timestamp: int | str,
value: float,
retention_msecs: int | None = None,
uncompressed: bool | None = False,
labels: dict[str, str] | None = None,
chunk_size: int | None = None,
duplicate_policy: str | None = None,
): ...
def madd(self, ktv_tuples): ...
def incrby(
self,
key: _Key,
value: float,
timestamp: int | str | None = None,
retention_msecs: int | None = None,
uncompressed: bool | None = False,
labels: dict[str, str] | None = None,
chunk_size: int | None = None,
): ...
def decrby(
self,
key: _Key,
value: float,
timestamp: int | str | None = None,
retention_msecs: int | None = None,
uncompressed: bool | None = False,
labels: dict[str, str] | None = None,
chunk_size: int | None = None,
): ...
def delete(self, key, from_time, to_time): ...
def createrule(
self, source_key: _Key, dest_key: _Key, aggregation_type: str, bucket_size_msec: int, align_timestamp: int | None = None
): ...
def deleterule(self, source_key, dest_key): ...
def range(
self,
key: _Key,
from_time: int | str,
to_time: int | str,
count: int | None = None,
aggregation_type: str | None = None,
bucket_size_msec: int | None = 0,
filter_by_ts: list[int] | None = None,
filter_by_min_value: int | None = None,
filter_by_max_value: int | None = None,
align: int | str | None = None,
latest: bool | None = False,
bucket_timestamp: str | None = None,
empty: bool | None = False,
): ...
def revrange(
self,
key: _Key,
from_time: int | str,
to_time: int | str,
count: int | None = None,
aggregation_type: str | None = None,
bucket_size_msec: int | None = 0,
filter_by_ts: list[int] | None = None,
filter_by_min_value: int | None = None,
filter_by_max_value: int | None = None,
align: int | str | None = None,
latest: bool | None = False,
bucket_timestamp: str | None = None,
empty: bool | None = False,
): ...
def mrange(
self,
from_time: int | str,
to_time: int | str,
filters: list[str],
count: int | None = None,
aggregation_type: str | None = None,
bucket_size_msec: int | None = 0,
with_labels: bool | None = False,
filter_by_ts: list[int] | None = None,
filter_by_min_value: int | None = None,
filter_by_max_value: int | None = None,
groupby: str | None = None,
reduce: str | None = None,
select_labels: list[str] | None = None,
align: int | str | None = None,
latest: bool | None = False,
bucket_timestamp: str | None = None,
empty: bool | None = False,
): ...
def mrevrange(
self,
from_time: int | str,
to_time: int | str,
filters: list[str],
count: int | None = None,
aggregation_type: str | None = None,
bucket_size_msec: int | None = 0,
with_labels: bool | None = False,
filter_by_ts: list[int] | None = None,
filter_by_min_value: int | None = None,
filter_by_max_value: int | None = None,
groupby: str | None = None,
reduce: str | None = None,
select_labels: list[str] | None = None,
align: int | str | None = None,
latest: bool | None = False,
bucket_timestamp: str | None = None,
empty: bool | None = False,
): ...
def get(self, key: _Key, latest: bool | None = False): ...
def mget(
self,
filters: list[str],
with_labels: bool | None = False,
select_labels: list[str] | None = None,
latest: bool | None = False,
): ...
def info(self, key): ...
def queryindex(self, filters): ...

View File

@@ -1,18 +0,0 @@
from _typeshed import Incomplete
from typing import Any
class TSInfo:
rules: list[Any]
labels: list[Any]
sourceKey: Incomplete | None
chunk_count: Incomplete | None
memory_usage: Incomplete | None
total_samples: Incomplete | None
retention_msecs: Incomplete | None
last_time_stamp: Incomplete | None
first_time_stamp: Incomplete | None
max_samples_per_chunk: Incomplete | None
chunk_size: Incomplete | None
duplicate_policy: Incomplete | None
def __init__(self, args) -> None: ...

View File

@@ -1,5 +0,0 @@
def list_to_dict(aList): ...
def parse_range(response): ...
def parse_m_range(response): ...
def parse_get(response): ...
def parse_m_get(response): ...

View File

@@ -1,289 +0,0 @@
from _typeshed import Incomplete, Unused
from abc import abstractmethod
from collections.abc import Callable, Iterable, Mapping
from queue import Queue
from socket import socket
from typing import Any, ClassVar
from typing_extensions import Self, TypeAlias
from .credentials import CredentialProvider
from .retry import Retry
ssl_available: bool
SYM_STAR: bytes
SYM_DOLLAR: bytes
SYM_CRLF: bytes
SYM_EMPTY: bytes
SERVER_CLOSED_CONNECTION_ERROR: str
NONBLOCKING_EXCEPTIONS: tuple[type[Exception], ...]
NONBLOCKING_EXCEPTION_ERROR_NUMBERS: dict[type[Exception], int]
SENTINEL: object
MODULE_LOAD_ERROR: str
NO_SUCH_MODULE_ERROR: str
MODULE_UNLOAD_NOT_POSSIBLE_ERROR: str
MODULE_EXPORTS_DATA_TYPES_ERROR: str
FALSE_STRINGS: tuple[str, ...]
URL_QUERY_ARGUMENT_PARSERS: dict[str, Callable[[Any], Any]]
# Options as passed to Pool.get_connection().
_ConnectionPoolOptions: TypeAlias = Any
_ConnectFunc: TypeAlias = Callable[[Connection], object]
class BaseParser:
EXCEPTION_CLASSES: ClassVar[dict[str, type[Exception] | dict[str, type[Exception]]]]
@classmethod
def parse_error(cls, response: str) -> Exception: ...
class SocketBuffer:
socket_read_size: int
bytes_written: int
bytes_read: int
socket_timeout: float | None
def __init__(self, socket: socket, socket_read_size: int, socket_timeout: float | None) -> None: ...
def unread_bytes(self) -> int: ...
def can_read(self, timeout: float | None) -> bool: ...
def read(self, length: int) -> bytes: ...
def readline(self) -> bytes: ...
def get_pos(self) -> int: ...
def rewind(self, pos: int) -> None: ...
def purge(self) -> None: ...
def close(self) -> None: ...
class PythonParser(BaseParser):
encoding: str
socket_read_size: int
encoder: Encoder | None
def __init__(self, socket_read_size: int) -> None: ...
def __del__(self) -> None: ...
def on_connect(self, connection: Connection) -> None: ...
def on_disconnect(self) -> None: ...
def can_read(self, timeout: float | None) -> bool: ...
def read_response(self, disable_decoding: bool = False) -> Any: ... # `str | bytes` or `list[str | bytes]`
class HiredisParser(BaseParser):
socket_read_size: int
def __init__(self, socket_read_size: int) -> None: ...
def __del__(self) -> None: ...
def on_connect(self, connection: Connection, **kwargs) -> None: ...
def on_disconnect(self) -> None: ...
def can_read(self, timeout: float | None) -> bool: ...
def read_from_socket(self, timeout: float | None = ..., raise_on_timeout: bool = True) -> bool: ...
def read_response(self, disable_decoding: bool = False) -> Any: ... # `str | bytes` or `list[str | bytes]`
DefaultParser: type[BaseParser] # Hiredis or PythonParser
_Encodable: TypeAlias = str | bytes | memoryview | bool | float
class Encoder:
encoding: str
encoding_errors: str
decode_responses: bool
def __init__(self, encoding: str, encoding_errors: str, decode_responses: bool) -> None: ...
def encode(self, value: _Encodable) -> bytes: ...
def decode(self, value: str | bytes | memoryview, force: bool = False) -> str: ...
class AbstractConnection:
pid: int
db: int
client_name: str | None
credential_provider: CredentialProvider | None
password: str | None
username: str | None
socket_timeout: float | None
socket_connect_timeout: float | None
retry_on_timeout: bool
retry_on_error: list[type[Exception]]
retry: Retry
health_check_interval: int
next_health_check: int
redis_connect_func: _ConnectFunc | None
encoder: Encoder
def __init__(
self,
db: int = 0,
password: str | None = None,
socket_timeout: float | None = None,
socket_connect_timeout: float | None = None,
retry_on_timeout: bool = False,
retry_on_error: list[type[Exception]] = ...,
encoding: str = "utf-8",
encoding_errors: str = "strict",
decode_responses: bool = False,
parser_class: type[BaseParser] = ...,
socket_read_size: int = 65536,
health_check_interval: int = 0,
client_name: str | None = None,
username: str | None = None,
retry: Retry | None = None,
redis_connect_func: _ConnectFunc | None = None,
credential_provider: CredentialProvider | None = None,
command_packer: Incomplete | None = None,
) -> None: ...
@abstractmethod
def repr_pieces(self) -> list[tuple[str, Any]]: ...
def register_connect_callback(self, callback: _ConnectFunc) -> None: ...
def clear_connect_callbacks(self) -> None: ...
def set_parser(self, parser_class: type[BaseParser]) -> None: ...
def connect(self) -> None: ...
def on_connect(self) -> None: ...
def disconnect(self, *args: Unused) -> None: ... # 'args' added in redis 4.1.2
def check_health(self) -> None: ...
def send_packed_command(self, command: str | Iterable[str], check_health: bool = True) -> None: ...
def send_command(self, *args, **kwargs) -> None: ...
def can_read(self, timeout: float | None = 0) -> bool: ...
def read_response(
self, disable_decoding: bool = False, *, disconnect_on_error: bool = True
) -> Any: ... # `str | bytes` or `list[str | bytes]`
def pack_command(self, *args) -> list[bytes]: ...
def pack_commands(self, commands: Iterable[Iterable[Incomplete]]) -> list[bytes]: ...
class Connection(AbstractConnection):
host: str
port: int
socket_keepalive: bool
socket_keepalive_options: Mapping[str, int | str]
socket_type: int
def __init__(
self,
host: str = "localhost",
port: int = 6379,
socket_keepalive: bool = False,
socket_keepalive_options: Mapping[str, int | str] | None = None,
socket_type: int = 0,
*,
db: int = 0,
password: str | None = None,
socket_timeout: float | None = None,
socket_connect_timeout: float | None = None,
retry_on_timeout: bool = False,
retry_on_error: list[type[Exception]] = ...,
encoding: str = "utf-8",
encoding_errors: str = "strict",
decode_responses: bool = False,
parser_class: type[BaseParser] = ...,
socket_read_size: int = 65536,
health_check_interval: int = 0,
client_name: str | None = None,
username: str | None = None,
retry: Retry | None = None,
redis_connect_func: _ConnectFunc | None = None,
credential_provider: CredentialProvider | None = None,
command_packer: Incomplete | None = None,
) -> None: ...
def repr_pieces(self) -> list[tuple[str, Any]]: ...
class SSLConnection(Connection):
keyfile: Any
certfile: Any
cert_reqs: Any
ca_certs: Any
ca_path: Incomplete | None
check_hostname: bool
certificate_password: Incomplete | None
ssl_validate_ocsp: bool
ssl_validate_ocsp_stapled: bool # added in 4.1.1
ssl_ocsp_context: Incomplete | None # added in 4.1.1
ssl_ocsp_expected_cert: Incomplete | None # added in 4.1.1
def __init__(
self,
ssl_keyfile=None,
ssl_certfile=None,
ssl_cert_reqs="required",
ssl_ca_certs=None,
ssl_ca_data: Incomplete | None = None,
ssl_check_hostname: bool = False,
ssl_ca_path: Incomplete | None = None,
ssl_password: Incomplete | None = None,
ssl_validate_ocsp: bool = False,
ssl_validate_ocsp_stapled: bool = False, # added in 4.1.1
ssl_ocsp_context: Incomplete | None = None, # added in 4.1.1
ssl_ocsp_expected_cert: Incomplete | None = None, # added in 4.1.1
*,
host: str = "localhost",
port: int = 6379,
socket_timeout: float | None = None,
socket_connect_timeout: float | None = None,
socket_keepalive: bool = False,
socket_keepalive_options: Mapping[str, int | str] | None = None,
socket_type: int = 0,
db: int = 0,
password: str | None = None,
retry_on_timeout: bool = False,
retry_on_error: list[type[Exception]] = ...,
encoding: str = "utf-8",
encoding_errors: str = "strict",
decode_responses: bool = False,
parser_class: type[BaseParser] = ...,
socket_read_size: int = 65536,
health_check_interval: int = 0,
client_name: str | None = None,
username: str | None = None,
retry: Retry | None = None,
redis_connect_func: _ConnectFunc | None = None,
credential_provider: CredentialProvider | None = None,
command_packer: Incomplete | None = None,
) -> None: ...
class UnixDomainSocketConnection(AbstractConnection):
path: str
def __init__(
self,
path: str = "",
*,
db: int = 0,
password: str | None = None,
socket_timeout: float | None = None,
socket_connect_timeout: float | None = None,
retry_on_timeout: bool = False,
retry_on_error: list[type[Exception]] = ...,
encoding: str = "utf-8",
encoding_errors: str = "strict",
decode_responses: bool = False,
parser_class: type[BaseParser] = ...,
socket_read_size: int = 65536,
health_check_interval: int = 0,
client_name: str | None = None,
username: str | None = None,
retry: Retry | None = None,
redis_connect_func: _ConnectFunc | None = None,
credential_provider: CredentialProvider | None = None,
command_packer: Incomplete | None = None,
) -> None: ...
def repr_pieces(self) -> list[tuple[str, Any]]: ...
# TODO: make generic on `connection_class`
class ConnectionPool:
connection_class: type[Connection]
connection_kwargs: dict[str, Any]
max_connections: int
pid: int
@classmethod
def from_url(cls, url: str, *, db: int = ..., decode_components: bool = ..., **kwargs) -> Self: ...
def __init__(
self, connection_class: type[AbstractConnection] = ..., max_connections: int | None = None, **connection_kwargs
) -> None: ...
def reset(self) -> None: ...
def get_connection(self, command_name: Unused, *keys, **options: _ConnectionPoolOptions) -> Connection: ...
def make_connection(self) -> Connection: ...
def release(self, connection: Connection) -> None: ...
def disconnect(self, inuse_connections: bool = True) -> None: ...
def get_encoder(self) -> Encoder: ...
def owns_connection(self, connection: Connection) -> bool: ...
class BlockingConnectionPool(ConnectionPool):
queue_class: type[Queue[Any]]
timeout: float
pool: Queue[Connection | None] # might not be defined
def __init__(
self,
max_connections: int = 50,
timeout: float = 20,
connection_class: type[Connection] = ...,
queue_class: type[Queue[Any]] = ...,
**connection_kwargs,
) -> None: ...
def disconnect(self) -> None: ... # type: ignore[override]
def to_bool(value: object) -> bool: ...
def parse_url(url: str) -> dict[str, Any]: ...

View File

@@ -1,5 +0,0 @@
from redis.typing import EncodedT
REDIS_CLUSTER_HASH_SLOTS: int
def key_slot(key: EncodedT, bucket: int = 16384) -> int: ...

View File

@@ -1,11 +0,0 @@
from abc import abstractmethod
class CredentialProvider:
@abstractmethod
def get_credentials(self) -> tuple[str] | tuple[str, str]: ...
class UsernamePasswordCredentialProvider(CredentialProvider):
username: str
password: str
def __init__(self, username: str | None = None, password: str | None = None) -> None: ...
def get_credentials(self) -> tuple[str] | tuple[str, str]: ...

View File

@@ -1,42 +0,0 @@
class RedisError(Exception): ...
class AuthenticationError(RedisError): ...
class ConnectionError(RedisError): ...
class TimeoutError(RedisError): ...
class AuthorizationError(ConnectionError): ...
class BusyLoadingError(ConnectionError): ...
class InvalidResponse(RedisError): ...
class ResponseError(RedisError): ...
class DataError(RedisError): ...
class PubSubError(RedisError): ...
class WatchError(RedisError): ...
class NoScriptError(ResponseError): ...
class ExecAbortError(ResponseError): ...
class ReadOnlyError(ResponseError): ...
class NoPermissionError(ResponseError): ...
class ModuleError(ResponseError): ...
class LockError(RedisError, ValueError): ...
class LockNotOwnedError(LockError): ...
class ChildDeadlockedError(Exception): ...
class AuthenticationWrongNumberOfArgsError(ResponseError): ...
class RedisClusterException(Exception): ...
class ClusterError(RedisError): ...
class ClusterDownError(ClusterError, ResponseError):
args: tuple[str]
message: str
def __init__(self, resp: str) -> None: ...
class AskError(ResponseError):
args: tuple[str]
message: str
slot_id: int
node_addr: tuple[str, int]
host: str
port: int
def __init__(self, resp: str) -> None: ...
class TryAgainError(ResponseError): ...
class ClusterCrossSlotError(ResponseError): ...
class MovedError(AskError): ...
class MasterDownError(ClusterDownError): ...
class SlotNotCoveredError(RedisClusterException): ...

View File

@@ -1,56 +0,0 @@
from _typeshed import Incomplete
from types import TracebackType
from typing import Any, ClassVar, Protocol
from typing_extensions import Self
from redis.client import Redis
class _Local(Protocol):
token: str | bytes | None
class Lock:
LUA_EXTEND_SCRIPT: ClassVar[str]
LUA_REACQUIRE_SCRIPT: ClassVar[str]
LUA_RELEASE_SCRIPT: ClassVar[str]
lua_extend: ClassVar[Incomplete | None]
lua_reacquire: ClassVar[Incomplete | None]
lua_release: ClassVar[Incomplete | None]
redis: Redis[Any]
name: str
timeout: float | None
sleep: float
blocking: bool
blocking_timeout: float | None
thread_local: bool
local: _Local
def __init__(
self,
redis: Redis[Any],
name: str,
timeout: float | None = None,
sleep: float = 0.1,
blocking: bool = True,
blocking_timeout: float | None = None,
thread_local: bool = True,
) -> None: ...
def register_scripts(self) -> None: ...
def __enter__(self) -> Self: ...
def __exit__(
self, exc_type: type[BaseException] | None, exc_value: BaseException | None, traceback: TracebackType | None
) -> bool | None: ...
def acquire(
self,
sleep: float | None = None,
blocking: bool | None = None,
blocking_timeout: float | None = None,
token: str | bytes | None = None,
) -> bool: ...
def do_acquire(self, token: str | bytes) -> bool: ...
def locked(self) -> bool: ...
def owned(self) -> bool: ...
def release(self) -> None: ...
def do_release(self, expected_token: str | bytes) -> None: ...
def extend(self, additional_time: float, replace_ttl: bool = False) -> bool: ...
def do_extend(self, additional_time: float, replace_ttl: bool) -> bool: ...
def reacquire(self) -> bool: ...
def do_reacquire(self) -> bool: ...

View File

@@ -1,21 +0,0 @@
from _typeshed import Incomplete
from ssl import SSLObject, SSLSocket
from typing import Literal
from cryptography.x509.base import Certificate
from OpenSSL.SSL import Connection
def ocsp_staple_verifier(con: Connection, ocsp_bytes: bytes, expected: bytes | None = None) -> Literal[True]: ...
class OCSPVerifier:
SOCK: SSLObject | SSLSocket
HOST: str
PORT: int
CA_CERTS: str | None
def __init__(self, sock: SSLObject | SSLSocket, host: str, port: int, ca_certs: str | None = None) -> None: ...
# cryptography.x509.general_name.GeneralName.value is typed as Any
def components_from_socket(self) -> tuple[Certificate, Incomplete | None, Incomplete]: ...
def components_from_direct_connection(self) -> tuple[Certificate, Incomplete | None, Incomplete]: ...
def build_certificate_url(self, server: str, cert: Certificate, issuer_cert: Certificate) -> str: ...
def check_certificate(self, server: str, cert: Certificate, issuer_url: str | bytes) -> Literal[True]: ...
def is_valid(self) -> Literal[True]: ...

View File

@@ -1,11 +0,0 @@
from collections.abc import Callable, Iterable
from typing import TypeVar
from redis.backoff import AbstractBackoff
_T = TypeVar("_T")
class Retry:
def __init__(self, backoff: AbstractBackoff, retries: int, supported_errors: tuple[type[Exception], ...] = ...) -> None: ...
def update_supported_errors(self, specified_errors: Iterable[type[Exception]]) -> None: ...
def call_with_retry(self, do: Callable[[], _T], fail: Callable[[Exception], object]) -> _T: ...

View File

@@ -1,62 +0,0 @@
from collections.abc import Iterable, Iterator
from typing import Any, Literal, TypeVar, overload
from typing_extensions import TypeAlias
from redis.client import Redis
from redis.commands.sentinel import SentinelCommands
from redis.connection import Connection, ConnectionPool, SSLConnection
from redis.exceptions import ConnectionError
_RedisT = TypeVar("_RedisT", bound=Redis[Any])
_AddressAndPort: TypeAlias = tuple[str, int]
_SentinelState: TypeAlias = dict[str, Any] # TODO: this can be a TypedDict
class MasterNotFoundError(ConnectionError): ...
class SlaveNotFoundError(ConnectionError): ...
class SentinelManagedConnection(Connection):
connection_pool: SentinelConnectionPool
def __init__(self, *, connection_pool: SentinelConnectionPool, **kwargs) -> None: ...
def connect_to(self, address: _AddressAndPort) -> None: ...
def connect(self) -> None: ...
# The result can be either `str | bytes` or `list[str | bytes]`
def read_response(self, disable_decoding: bool = False, *, disconnect_on_error: bool = False) -> Any: ...
class SentinelManagedSSLConnection(SentinelManagedConnection, SSLConnection): ...
class SentinelConnectionPool(ConnectionPool):
is_master: bool
check_connection: bool
service_name: str
sentinel_manager: Sentinel
def __init__(self, service_name: str, sentinel_manager: Sentinel, **kwargs) -> None: ...
def reset(self) -> None: ...
def owns_connection(self, connection: Connection) -> bool: ...
def get_master_address(self) -> _AddressAndPort: ...
def rotate_slaves(self) -> Iterator[_AddressAndPort]: ...
class Sentinel(SentinelCommands):
sentinel_kwargs: dict[str, Any]
sentinels: list[Redis[Any]]
min_other_sentinels: int
connection_kwargs: dict[str, Any]
def __init__(
self,
sentinels: Iterable[_AddressAndPort],
min_other_sentinels: int = 0,
sentinel_kwargs: dict[str, Any] | None = None,
**connection_kwargs,
) -> None: ...
def check_master_state(self, state: _SentinelState, service_name: str) -> bool: ...
def discover_master(self, service_name: str) -> _AddressAndPort: ...
def filter_slaves(self, slaves: Iterable[_SentinelState]) -> list[_AddressAndPort]: ...
def discover_slaves(self, service_name: str) -> list[_AddressAndPort]: ...
@overload
def master_for(self, service_name: str, *, connection_pool_class=..., **kwargs) -> Redis[Any]: ...
@overload
def master_for(self, service_name: str, redis_class: type[_RedisT], connection_pool_class=..., **kwargs) -> _RedisT: ...
@overload
def slave_for(self, service_name: str, *, connection_pool_class=..., **kwargs) -> Redis[Any]: ...
@overload
def slave_for(self, service_name: str, redis_class: type[_RedisT], connection_pool_class=..., **kwargs) -> _RedisT: ...
def execute_command(self, *args, **kwargs) -> Literal[True]: ...

View File

@@ -1,34 +0,0 @@
from collections.abc import Iterable
from datetime import datetime, timedelta
from typing import Any, Protocol, TypeVar
from typing_extensions import TypeAlias
from redis.asyncio.connection import ConnectionPool as AsyncConnectionPool
from redis.connection import ConnectionPool
# The following type aliases exist at runtime.
EncodedT: TypeAlias = bytes | memoryview
DecodedT: TypeAlias = str | int | float
EncodableT: TypeAlias = EncodedT | DecodedT
AbsExpiryT: TypeAlias = int | datetime
ExpiryT: TypeAlias = int | timedelta
ZScoreBoundT: TypeAlias = float | str
BitfieldOffsetT: TypeAlias = int | str
_StringLikeT: TypeAlias = bytes | str | memoryview # noqa: Y043
KeyT: TypeAlias = _StringLikeT
PatternT: TypeAlias = _StringLikeT
FieldT: TypeAlias = EncodableT
KeysT: TypeAlias = KeyT | Iterable[KeyT]
ChannelT: TypeAlias = _StringLikeT
GroupT: TypeAlias = _StringLikeT
ConsumerT: TypeAlias = _StringLikeT
StreamIdT: TypeAlias = int | _StringLikeT
ScriptTextT: TypeAlias = _StringLikeT
TimeoutSecT: TypeAlias = int | float | _StringLikeT
AnyKeyT = TypeVar("AnyKeyT", bytes, str, memoryview) # noqa: Y001
AnyFieldT = TypeVar("AnyFieldT", bytes, str, memoryview) # noqa: Y001
AnyChannelT = TypeVar("AnyChannelT", bytes, str, memoryview) # noqa: Y001
class CommandsProtocol(Protocol):
connection_pool: AsyncConnectionPool[Any] | ConnectionPool
def execute_command(self, *args, **options): ...

View File

@@ -1,22 +0,0 @@
from _typeshed import Unused
from collections.abc import Iterable, Mapping
from contextlib import AbstractContextManager
from typing import Any, Literal, TypeVar, overload
from .client import Pipeline, Redis, _StrType
_T = TypeVar("_T")
HIREDIS_AVAILABLE: bool
CRYPTOGRAPHY_AVAILABLE: bool
@overload
def from_url(url: str, *, db: int = ..., decode_responses: Literal[True], **kwargs: Any) -> Redis[str]: ...
@overload
def from_url(url: str, *, db: int = ..., decode_responses: Literal[False] = False, **kwargs: Any) -> Redis[bytes]: ...
def pipeline(redis_obj: Redis[_StrType]) -> AbstractContextManager[Pipeline[_StrType]]: ...
def str_if_bytes(value: str | bytes) -> str: ...
def safe_str(value: object) -> str: ...
def dict_merge(*dicts: Mapping[str, _T]) -> dict[str, _T]: ...
def list_keys_to_dict(key_list, callback): ... # unused, alias for `dict.fromkeys`
def merge_result(command: Unused, res: Mapping[Any, Iterable[_T]]) -> list[_T]: ...