From cbf1ae3677ac7f012be5d7f31316f19e91b0703d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rapha=C3=ABl=20Vinot?= Date: Fri, 6 Sep 2024 15:02:41 +0200 Subject: [PATCH 01/39] new: Initial merge types-redis MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Related https://github.com/valkey-io/valkey-py/issues/84 Signed-off-by: Raphaël Vinot --- valkey/__init__.py | 3 + valkey/asyncio/__init__.pyi | 64 + valkey/asyncio/client.pyi | 1091 ++++++++++++++ valkey/asyncio/cluster.pyi | 229 +++ valkey/asyncio/connection.pyi | 363 +++++ valkey/asyncio/lock.pyi | 51 + valkey/asyncio/parser.pyi | 9 + valkey/asyncio/retry.pyi | 12 + valkey/asyncio/sentinel.pyi | 162 +++ valkey/asyncio/utils.pyi | 15 + valkey/backoff.pyi | 31 + valkey/client.pyi | 799 +++++++++++ valkey/cluster.pyi | 265 ++++ valkey/commands/__init__.pyi | 17 + valkey/commands/bf/__init__.pyi | 58 + valkey/commands/bf/commands.pyi | 112 ++ valkey/commands/bf/info.pyi | 43 + valkey/commands/cluster.pyi | 60 + valkey/commands/core.pyi | 1743 +++++++++++++++++++++++ valkey/commands/graph/__init__.pyi | 26 + valkey/commands/graph/commands.pyi | 25 + valkey/commands/graph/edge.pyi | 14 + valkey/commands/graph/exceptions.pyi | 5 + valkey/commands/graph/node.pyi | 18 + valkey/commands/graph/path.pyi | 18 + valkey/commands/graph/query_result.pyi | 74 + valkey/commands/helpers.pyi | 10 + valkey/commands/json/__init__.pyi | 15 + valkey/commands/json/commands.pyi | 32 + valkey/commands/json/decoders.pyi | 4 + valkey/commands/json/path.pyi | 5 + valkey/commands/parser.pyi | 8 + valkey/commands/redismodules.pyi | 14 + valkey/commands/search/__init__.pyi | 22 + valkey/commands/search/aggregation.pyi | 53 + valkey/commands/search/commands.pyi | 111 ++ valkey/commands/search/query.pyi | 52 + valkey/commands/search/result.pyi | 7 + valkey/commands/sentinel.pyi | 17 + valkey/commands/timeseries/__init__.pyi | 14 + valkey/commands/timeseries/commands.pyi | 160 +++ valkey/commands/timeseries/info.pyi | 18 + valkey/commands/timeseries/utils.pyi | 5 + valkey/connection.pyi | 289 ++++ valkey/crc.pyi | 5 + valkey/credentials.pyi | 11 + valkey/exceptions.pyi | 42 + valkey/lock.pyi | 56 + valkey/ocsp.pyi | 21 + valkey/retry.pyi | 11 + valkey/sentinel.pyi | 62 + valkey/typing.pyi | 34 + valkey/utils.pyi | 22 + 53 files changed, 6407 insertions(+) create mode 100644 valkey/asyncio/__init__.pyi create mode 100644 valkey/asyncio/client.pyi create mode 100644 valkey/asyncio/cluster.pyi create mode 100644 valkey/asyncio/connection.pyi create mode 100644 valkey/asyncio/lock.pyi create mode 100644 valkey/asyncio/parser.pyi create mode 100644 valkey/asyncio/retry.pyi create mode 100644 valkey/asyncio/sentinel.pyi create mode 100644 valkey/asyncio/utils.pyi create mode 100644 valkey/backoff.pyi create mode 100644 valkey/client.pyi create mode 100644 valkey/cluster.pyi create mode 100644 valkey/commands/__init__.pyi create mode 100644 valkey/commands/bf/__init__.pyi create mode 100644 valkey/commands/bf/commands.pyi create mode 100644 valkey/commands/bf/info.pyi create mode 100644 valkey/commands/cluster.pyi create mode 100644 valkey/commands/core.pyi create mode 100644 valkey/commands/graph/__init__.pyi create mode 100644 valkey/commands/graph/commands.pyi create mode 100644 valkey/commands/graph/edge.pyi create mode 100644 valkey/commands/graph/exceptions.pyi create mode 100644 valkey/commands/graph/node.pyi create mode 100644 valkey/commands/graph/path.pyi create mode 100644 valkey/commands/graph/query_result.pyi create mode 100644 valkey/commands/helpers.pyi create mode 100644 valkey/commands/json/__init__.pyi create mode 100644 valkey/commands/json/commands.pyi create mode 100644 valkey/commands/json/decoders.pyi create mode 100644 valkey/commands/json/path.pyi create mode 100644 valkey/commands/parser.pyi create mode 100644 valkey/commands/redismodules.pyi create mode 100644 valkey/commands/search/__init__.pyi create mode 100644 valkey/commands/search/aggregation.pyi create mode 100644 valkey/commands/search/commands.pyi create mode 100644 valkey/commands/search/query.pyi create mode 100644 valkey/commands/search/result.pyi create mode 100644 valkey/commands/sentinel.pyi create mode 100644 valkey/commands/timeseries/__init__.pyi create mode 100644 valkey/commands/timeseries/commands.pyi create mode 100644 valkey/commands/timeseries/info.pyi create mode 100644 valkey/commands/timeseries/utils.pyi create mode 100644 valkey/connection.pyi create mode 100644 valkey/crc.pyi create mode 100644 valkey/credentials.pyi create mode 100644 valkey/exceptions.pyi create mode 100644 valkey/lock.pyi create mode 100644 valkey/ocsp.pyi create mode 100644 valkey/retry.pyi create mode 100644 valkey/sentinel.pyi create mode 100644 valkey/typing.pyi create mode 100644 valkey/utils.pyi diff --git a/valkey/__init__.py b/valkey/__init__.py index e4202fbe..0b10bdac 100644 --- a/valkey/__init__.py +++ b/valkey/__init__.py @@ -44,6 +44,9 @@ def int_or_str(value): return value +__version__: str +VERSION: tuple[int | str, ...] + try: __version__ = metadata.version("valkey") except metadata.PackageNotFoundError: diff --git a/valkey/asyncio/__init__.pyi b/valkey/asyncio/__init__.pyi new file mode 100644 index 00000000..7d45bb0f --- /dev/null +++ b/valkey/asyncio/__init__.pyi @@ -0,0 +1,64 @@ +from valkey.asyncio.client import Valkey as Valkey, StrictValkey as StrictValkey +from valkey.asyncio.cluster import ValkeyCluster as ValkeyCluster +from valkey.asyncio.connection import ( + BlockingConnectionPool as BlockingConnectionPool, + Connection as Connection, + ConnectionPool as ConnectionPool, + SSLConnection as SSLConnection, + UnixDomainSocketConnection as UnixDomainSocketConnection, +) +from valkey.asyncio.parser import CommandsParser as CommandsParser +from valkey.asyncio.sentinel import ( + Sentinel as Sentinel, + SentinelConnectionPool as SentinelConnectionPool, + SentinelManagedConnection as SentinelManagedConnection, + SentinelManagedSSLConnection as SentinelManagedSSLConnection, +) +from valkey.asyncio.utils import from_url as from_url +from valkey.backoff import default_backoff as default_backoff +from valkey.exceptions import ( + AuthenticationError as AuthenticationError, + AuthenticationWrongNumberOfArgsError as AuthenticationWrongNumberOfArgsError, + BusyLoadingError as BusyLoadingError, + ChildDeadlockedError as ChildDeadlockedError, + ConnectionError as ConnectionError, + DataError as DataError, + InvalidResponse as InvalidResponse, + PubSubError as PubSubError, + ReadOnlyError as ReadOnlyError, + ValkeyError as ValkeyError, + ResponseError as ResponseError, + TimeoutError as TimeoutError, + WatchError as WatchError, +) + +__all__ = [ + "AuthenticationError", + "AuthenticationWrongNumberOfArgsError", + "BlockingConnectionPool", + "BusyLoadingError", + "ChildDeadlockedError", + "CommandsParser", + "Connection", + "ConnectionError", + "ConnectionPool", + "DataError", + "from_url", + "default_backoff", + "InvalidResponse", + "PubSubError", + "ReadOnlyError", + "Valkey", + "ValkeyCluster", + "ValkeyError", + "ResponseError", + "Sentinel", + "SentinelConnectionPool", + "SentinelManagedConnection", + "SentinelManagedSSLConnection", + "SSLConnection", + "StrictValkey", + "TimeoutError", + "UnixDomainSocketConnection", + "WatchError", +] diff --git a/valkey/asyncio/client.pyi b/valkey/asyncio/client.pyi new file mode 100644 index 00000000..a33d7cec --- /dev/null +++ b/valkey/asyncio/client.pyi @@ -0,0 +1,1091 @@ +from _typeshed import Incomplete, Unused +from collections.abc import AsyncIterator, Awaitable, Callable, Generator, Iterable, Mapping, MutableMapping, Sequence +from datetime import datetime, timedelta +from types import TracebackType +from typing import Any, ClassVar, Literal, NoReturn, Protocol, TypedDict, overload +from typing_extensions import Self, TypeAlias + +from valkey import ValkeyError +from valkey.asyncio.connection import ConnectCallbackT, Connection, ConnectionPool +from valkey.asyncio.lock import Lock +from valkey.asyncio.retry import Retry +from valkey.client import AbstractValkey, _CommandOptions, _Key, _StrType, _Value +from valkey.commands import AsyncCoreCommands, AsyncSentinelCommands, ValkeyModuleCommands +from valkey.credentials import CredentialProvider +from valkey.typing import ChannelT, EncodableT, KeyT, PatternT, StreamIdT + +PubSubHandler: TypeAlias = Callable[[dict[str, str]], Awaitable[None]] + +class ResponseCallbackProtocol(Protocol): + def __call__(self, response: Any, **kwargs): ... + +class AsyncResponseCallbackProtocol(Protocol): + async def __call__(self, response: Any, **kwargs): ... + +ResponseCallbackT: TypeAlias = ResponseCallbackProtocol | AsyncResponseCallbackProtocol + +class Valkey(AbstractValkey, ValkeyModuleCommands, AsyncCoreCommands[_StrType], AsyncSentinelCommands): + response_callbacks: MutableMapping[str | bytes, ResponseCallbackT] + auto_close_connection_pool: bool + connection_pool: Any + single_connection_client: Any + connection: Any + @overload + @classmethod + def from_url( + cls, + url: str, + *, + host: str = "localhost", + port: int = 6379, + db: str | int = 0, + password: str | None = None, + socket_timeout: float | None = None, + socket_connect_timeout: float | None = None, + socket_keepalive: bool | None = None, + socket_keepalive_options: Mapping[int, int | bytes] | None = None, + connection_pool: ConnectionPool[Any] | None = None, + unix_socket_path: str | None = None, + encoding: str = "utf-8", + encoding_errors: str = "strict", + decode_responses: Literal[True], + retry_on_timeout: bool = False, + retry_on_error: list[type[ValkeyError]] | None = None, + ssl: bool = False, + ssl_keyfile: str | None = None, + ssl_certfile: str | None = None, + ssl_cert_reqs: str = "required", + ssl_ca_certs: str | None = None, + ssl_ca_data: str | None = None, + ssl_check_hostname: bool = False, + max_connections: int | None = None, + single_connection_client: bool = False, + health_check_interval: int = 0, + client_name: str | None = None, + username: str | None = None, + retry: Retry | None = None, + auto_close_connection_pool: bool = True, + valkey_connect_func: ConnectCallbackT | None = None, + credential_provider: CredentialProvider | None = None, + ) -> Valkey[str]: ... + @overload + @classmethod + def from_url( + cls, + url: str, + *, + host: str = "localhost", + port: int = 6379, + db: str | int = 0, + password: str | None = None, + socket_timeout: float | None = None, + socket_connect_timeout: float | None = None, + socket_keepalive: bool | None = None, + socket_keepalive_options: Mapping[int, int | bytes] | None = None, + connection_pool: ConnectionPool[Any] | None = None, + unix_socket_path: str | None = None, + encoding: str = "utf-8", + encoding_errors: str = "strict", + decode_responses: Literal[False] = False, + retry_on_timeout: bool = False, + retry_on_error: list[type[ValkeyError]] | None = None, + ssl: bool = False, + ssl_keyfile: str | None = None, + ssl_certfile: str | None = None, + ssl_cert_reqs: str = "required", + ssl_ca_certs: str | None = None, + ssl_ca_data: str | None = None, + ssl_check_hostname: bool = False, + max_connections: int | None = None, + single_connection_client: bool = False, + health_check_interval: int = 0, + client_name: str | None = None, + username: str | None = None, + retry: Retry | None = None, + auto_close_connection_pool: bool = True, + valkey_connect_func: ConnectCallbackT | None = None, + credential_provider: CredentialProvider | None = None, + ) -> Valkey[bytes]: ... + @overload + def __init__( + self: Valkey[str], + *, + host: str = "localhost", + port: int = 6379, + db: str | int = 0, + password: str | None = None, + socket_timeout: float | None = None, + socket_connect_timeout: float | None = None, + socket_keepalive: bool | None = None, + socket_keepalive_options: Mapping[int, int | bytes] | None = None, + connection_pool: ConnectionPool[Any] | None = None, + unix_socket_path: str | None = None, + encoding: str = "utf-8", + encoding_errors: str = "strict", + decode_responses: Literal[True], + retry_on_timeout: bool = False, + retry_on_error: list[type[ValkeyError]] | None = None, + ssl: bool = False, + ssl_keyfile: str | None = None, + ssl_certfile: str | None = None, + ssl_cert_reqs: str = "required", + ssl_ca_certs: str | None = None, + ssl_ca_data: str | None = None, + ssl_check_hostname: bool = False, + max_connections: int | None = None, + single_connection_client: bool = False, + health_check_interval: int = 0, + client_name: str | None = None, + username: str | None = None, + retry: Retry | None = None, + auto_close_connection_pool: bool = True, + valkey_connect_func: ConnectCallbackT | None = None, + credential_provider: CredentialProvider | None = None, + ) -> None: ... + @overload + def __init__( + self: Valkey[bytes], + *, + host: str = "localhost", + port: int = 6379, + db: str | int = 0, + password: str | None = None, + socket_timeout: float | None = None, + socket_connect_timeout: float | None = None, + socket_keepalive: bool | None = None, + socket_keepalive_options: Mapping[int, int | bytes] | None = None, + connection_pool: ConnectionPool[Any] | None = None, + unix_socket_path: str | None = None, + encoding: str = "utf-8", + encoding_errors: str = "strict", + decode_responses: Literal[False] = False, + retry_on_timeout: bool = False, + retry_on_error: list[type[ValkeyError]] | None = None, + ssl: bool = False, + ssl_keyfile: str | None = None, + ssl_certfile: str | None = None, + ssl_cert_reqs: str = "required", + ssl_ca_certs: str | None = None, + ssl_ca_data: str | None = None, + ssl_check_hostname: bool = False, + max_connections: int | None = None, + single_connection_client: bool = False, + health_check_interval: int = 0, + client_name: str | None = None, + username: str | None = None, + retry: Retry | None = None, + auto_close_connection_pool: bool = True, + valkey_connect_func: ConnectCallbackT | None = None, + credential_provider: CredentialProvider | None = None, + ) -> None: ... + def __await__(self) -> Generator[Any, None, Self]: ... + async def initialize(self) -> Self: ... + def set_response_callback(self, command: str, callback: ResponseCallbackT): ... + def load_external_module(self, funcname, func) -> None: ... + def pipeline(self, transaction: bool = True, shard_hint: str | None = None) -> Pipeline[_StrType]: ... + async def transaction( + self, + func: Callable[[Pipeline[_StrType]], Any | Awaitable[Any]], + *watches: KeyT, + shard_hint: str | None = None, + value_from_callable: bool = False, + watch_delay: float | None = None, + ): ... + def lock( + self, + name: KeyT, + timeout: float | None = None, + sleep: float = 0.1, + blocking: bool = True, + blocking_timeout: float | None = None, + lock_class: type[Lock] | None = None, + thread_local: bool = True, + ) -> Lock: ... + def pubsub(self, **kwargs) -> PubSub: ... + def monitor(self) -> Monitor: ... + def client(self) -> Valkey[_StrType]: ... + async def __aenter__(self) -> Self: ... + async def __aexit__( + self, exc_type: type[BaseException] | None, exc_value: BaseException | None, traceback: TracebackType | None + ) -> None: ... + def __del__(self, _warnings: Any = ...) -> None: ... + async def close(self, close_connection_pool: bool | None = None) -> None: ... + async def execute_command(self, *args, **options): ... + async def parse_response(self, connection: Connection, command_name: str | bytes, **options): ... + +StrictValkey = Valkey + +class MonitorCommandInfo(TypedDict): + time: float + db: int + client_address: str + client_port: str + client_type: str + command: str + +class Monitor: + monitor_re: Any + command_re: Any + connection_pool: Any + connection: Any + def __init__(self, connection_pool: ConnectionPool[Any]) -> None: ... + async def connect(self) -> None: ... + async def __aenter__(self) -> Self: ... + async def __aexit__(self, *args: Unused) -> None: ... + async def next_command(self) -> MonitorCommandInfo: ... + def listen(self) -> AsyncIterator[MonitorCommandInfo]: ... + +class PubSub: + PUBLISH_MESSAGE_TYPES: ClassVar[tuple[str, ...]] + UNSUBSCRIBE_MESSAGE_TYPES: ClassVar[tuple[str, ...]] + HEALTH_CHECK_MESSAGE: ClassVar[str] + connection_pool: Any + shard_hint: str | None + ignore_subscribe_messages: bool + connection: Any + encoder: Any + health_check_response: Iterable[str | bytes] + channels: Any + pending_unsubscribe_channels: Any + patterns: Any + pending_unsubscribe_patterns: Any + def __init__( + self, + connection_pool: ConnectionPool[Any], + shard_hint: str | None = None, + ignore_subscribe_messages: bool = False, + encoder: Incomplete | None = None, + ) -> None: ... + async def __aenter__(self) -> Self: ... + async def __aexit__( + self, exc_type: type[BaseException] | None, exc_value: BaseException | None, traceback: TracebackType | None + ) -> None: ... + def __del__(self) -> None: ... + async def reset(self) -> None: ... + def close(self) -> Awaitable[NoReturn]: ... + async def on_connect(self, connection: Connection): ... + @property + def subscribed(self) -> bool: ... + async def execute_command(self, *args: EncodableT): ... + async def parse_response(self, block: bool = True, timeout: float = 0): ... + async def check_health(self) -> None: ... + async def psubscribe(self, *args: ChannelT, **kwargs: PubSubHandler): ... + def punsubscribe(self, *args: ChannelT) -> Awaitable[Any]: ... + async def subscribe(self, *args: ChannelT, **kwargs: Callable[..., Any]): ... + def unsubscribe(self, *args) -> Awaitable[Any]: ... + def listen(self) -> AsyncIterator[Any]: ... + async def get_message(self, ignore_subscribe_messages: bool = False, timeout: float = 0.0): ... + def ping(self, message: Incomplete | None = None) -> Awaitable[Any]: ... + async def handle_message(self, response, ignore_subscribe_messages: bool = False): ... + async def run(self, *, exception_handler: PSWorkerThreadExcHandlerT | None = None, poll_timeout: float = 1.0) -> None: ... + +class PubsubWorkerExceptionHandler(Protocol): + def __call__(self, e: BaseException, pubsub: PubSub): ... + +class AsyncPubsubWorkerExceptionHandler(Protocol): + async def __call__(self, e: BaseException, pubsub: PubSub): ... + +PSWorkerThreadExcHandlerT: TypeAlias = PubsubWorkerExceptionHandler | AsyncPubsubWorkerExceptionHandler +CommandT: TypeAlias = tuple[tuple[str | bytes, ...], Mapping[str, Any]] +CommandStackT: TypeAlias = list[CommandT] + +class Pipeline(Valkey[_StrType]): + UNWATCH_COMMANDS: ClassVar[set[str]] + connection_pool: Any + connection: Any + response_callbacks: Any + is_transaction: bool + shard_hint: str | None + watching: bool + command_stack: Any + scripts: Any + explicit_transaction: bool + def __init__( + self, + connection_pool: ConnectionPool[Any], + response_callbacks: MutableMapping[str | bytes, ResponseCallbackT], + transaction: bool, + shard_hint: str | None, + ) -> None: ... + async def __aenter__(self) -> Self: ... + async def __aexit__( + self, exc_type: type[BaseException] | None, exc_value: BaseException | None, traceback: TracebackType | None + ) -> None: ... + def __await__(self) -> Generator[Any, None, Self]: ... + def __len__(self) -> int: ... + def __bool__(self) -> bool: ... + async def reset(self) -> None: ... + def multi(self) -> None: ... + def execute_command(self, *args, **kwargs) -> Pipeline[_StrType] | Awaitable[Pipeline[_StrType]]: ... + async def immediate_execute_command(self, *args, **options): ... + def pipeline_execute_command(self, *args, **options): ... + def raise_first_error(self, commands: CommandStackT, response: Iterable[Any]): ... + def annotate_exception(self, exception: Exception, number: int, command: Iterable[object]) -> None: ... + async def parse_response(self, connection: Connection, command_name: str | bytes, **options): ... + async def load_scripts(self) -> None: ... + async def execute(self, raise_on_error: bool = True): ... + async def discard(self) -> None: ... + async def watch(self, *names: KeyT) -> bool: ... + async def unwatch(self) -> bool: ... + # region acl commands + def acl_cat(self, category: str | None = None, **kwargs: _CommandOptions) -> Any: ... + def acl_deluser(self, *username: str, **kwargs: _CommandOptions) -> Any: ... + def acl_genpass(self, bits: int | None = None, **kwargs: _CommandOptions) -> Any: ... + def acl_getuser(self, username: str, **kwargs: _CommandOptions) -> Any: ... + def acl_help(self, **kwargs: _CommandOptions) -> Any: ... + def acl_list(self, **kwargs: _CommandOptions) -> Any: ... + def acl_log(self, count: int | None = None, **kwargs: _CommandOptions) -> Any: ... + def acl_log_reset(self, **kwargs: _CommandOptions) -> Any: ... + def acl_load(self, **kwargs: _CommandOptions) -> Any: ... + def acl_save(self, **kwargs: _CommandOptions) -> Any: ... + def acl_setuser( # type: ignore[override] + self, + username: str, + enabled: bool = False, + nopass: bool = False, + passwords: Sequence[str] | None = None, + hashed_passwords: Sequence[str] | None = None, + categories: Sequence[str] | None = None, + commands: Sequence[str] | None = None, + keys: Sequence[str] | None = None, + channels: Iterable[ChannelT] | None = None, + selectors: Iterable[tuple[str, KeyT]] | None = None, + reset: bool = False, + reset_keys: bool = False, + reset_channels: bool = False, + reset_passwords: bool = False, + **kwargs: _CommandOptions, + ) -> Pipeline[_StrType]: ... + def acl_users(self, **kwargs: _CommandOptions) -> Any: ... + def acl_whoami(self, **kwargs: _CommandOptions) -> Any: ... + # endregion + # region cluster commands + def cluster(self, cluster_arg: str, *args, **kwargs: _CommandOptions) -> Any: ... + def readwrite(self, **kwargs: _CommandOptions) -> Any: ... + def readonly(self, **kwargs: _CommandOptions) -> Any: ... + # endregion + # region BasicKey commands + def append(self, key, value) -> Any: ... + def bitcount(self, key: _Key, start: int | None = None, end: int | None = None, mode: str | None = None) -> Any: ... + def bitfield(self, key, default_overflow: Incomplete | None = None) -> Any: ... + def bitop(self, operation, dest, *keys) -> Any: ... + def bitpos(self, key: _Key, bit: int, start: int | None = None, end: int | None = None, mode: str | None = None) -> Any: ... + def copy(self, source, destination, destination_db: Incomplete | None = None, replace: bool = False) -> Any: ... + def decr(self, name, amount: int = 1) -> Any: ... + def decrby(self, name, amount: int = 1) -> Any: ... + def delete(self, *names: _Key) -> Any: ... + def dump(self, name: _Key) -> Any: ... + def exists(self, *names: _Key) -> Any: ... + def expire( + self, name: _Key, time: int | timedelta, nx: bool = False, xx: bool = False, gt: bool = False, lt: bool = False + ) -> Any: ... + def expireat(self, name, when, nx: bool = False, xx: bool = False, gt: bool = False, lt: bool = False) -> Any: ... + def get(self, name: _Key) -> Any: ... + def getdel(self, name: _Key) -> Any: ... + def getex( + self, + name, + ex: Incomplete | None = None, + px: Incomplete | None = None, + exat: Incomplete | None = None, + pxat: Incomplete | None = None, + persist: bool = False, + ) -> Any: ... + def getbit(self, name: _Key, offset: int) -> Any: ... + def getrange(self, key, start, end) -> Any: ... + def getset(self, name, value) -> Any: ... + def incr(self, name: _Key, amount: int = 1) -> Any: ... + def incrby(self, name: _Key, amount: int = 1) -> Any: ... + def incrbyfloat(self, name: _Key, amount: float = 1.0) -> Any: ... + def keys(self, pattern: _Key = "*", **kwargs: _CommandOptions) -> Any: ... + def lmove( + self, + first_list: _Key, + second_list: _Key, + src: Literal["LEFT", "RIGHT"] = "LEFT", + dest: Literal["LEFT", "RIGHT"] = "RIGHT", + ) -> Any: ... + def blmove( + self, + first_list: _Key, + second_list: _Key, + timeout: float, + src: Literal["LEFT", "RIGHT"] = "LEFT", + dest: Literal["LEFT", "RIGHT"] = "RIGHT", + ) -> Any: ... + def mget(self, keys: _Key | Iterable[_Key], *args: _Key) -> Any: ... + def mset(self, mapping: Mapping[_Key, _Value]) -> Any: ... + def msetnx(self, mapping: Mapping[_Key, _Value]) -> Any: ... + def move(self, name: _Key, db: int) -> Any: ... + def persist(self, name: _Key) -> Any: ... + def pexpire( + self, name: _Key, time: int | timedelta, nx: bool = False, xx: bool = False, gt: bool = False, lt: bool = False + ) -> Any: ... + def pexpireat( + self, name: _Key, when: int | datetime, nx: bool = False, xx: bool = False, gt: bool = False, lt: bool = False + ) -> Any: ... + def psetex(self, name, time_ms, value) -> Any: ... + def pttl(self, name: _Key) -> Any: ... + def hrandfield(self, key, count: Incomplete | None = None, withvalues: bool = False) -> Any: ... + def randomkey(self, **kwargs: _CommandOptions) -> Any: ... + def rename(self, src, dst) -> Any: ... + def renamenx(self, src, dst) -> Any: ... + def restore( + self, + name, + ttl, + value, + replace: bool = False, + absttl: bool = False, + idletime: Incomplete | None = None, + frequency: Incomplete | None = None, + ) -> Any: ... + def set( # type: ignore[override] + self, + name: _Key, + value: _Value, + ex: None | int | timedelta = None, + px: None | int | timedelta = None, + nx: bool = False, + xx: bool = False, + keepttl: bool = False, + get: bool = False, + exat: Incomplete | None = None, + pxat: Incomplete | None = None, + ) -> Any: ... + def setbit(self, name: _Key, offset: int, value: int) -> Any: ... + def setex(self, name: _Key, time: int | timedelta, value: _Value) -> Any: ... + def setnx(self, name: _Key, value: _Value) -> Any: ... + def setrange(self, name, offset, value) -> Any: ... + def stralgo( + self, + algo, + value1, + value2, + specific_argument: str = "strings", + len: bool = False, + idx: bool = False, + minmatchlen: Incomplete | None = None, + withmatchlen: bool = False, + **kwargs: _CommandOptions, + ) -> Any: ... + def strlen(self, name) -> Any: ... + def substr(self, name, start, end: int = -1) -> Any: ... + def touch(self, *args) -> Any: ... + def ttl(self, name: _Key) -> Any: ... + def type(self, name) -> Any: ... + def unlink(self, *names: _Key) -> Any: ... + # endregion + # region hyperlog commands + def pfadd(self, name: _Key, *values: _Value) -> Any: ... + def pfcount(self, name: _Key) -> Any: ... + def pfmerge(self, dest: _Key, *sources: _Key) -> Any: ... + # endregion + # region hash commands + def hdel(self, name: _Key, *keys: _Key) -> Any: ... + def hexists(self, name: _Key, key: _Key) -> Any: ... + def hget(self, name: _Key, key: _Key) -> Any: ... + def hgetall(self, name: _Key) -> Any: ... + def hincrby(self, name: _Key, key: _Key, amount: int = 1) -> Any: ... + def hincrbyfloat(self, name: _Key, key: _Key, amount: float = 1.0) -> Any: ... + def hkeys(self, name: _Key) -> Any: ... + def hlen(self, name: _Key) -> Any: ... + @overload + def hset( + self, name: _Key, key: _Key, value: _Value, mapping: Mapping[_Key, _Value] | None = None, items: Incomplete | None = None + ) -> Any: ... + @overload + def hset( + self, name: _Key, key: None, value: None, mapping: Mapping[_Key, _Value], items: Incomplete | None = None + ) -> Any: ... + @overload + def hset(self, name: _Key, *, mapping: Mapping[_Key, _Value], items: Incomplete | None = None) -> Any: ... + def hsetnx(self, name: _Key, key: _Key, value: _Value) -> Any: ... + def hmset(self, name: _Key, mapping: Mapping[_Key, _Value]) -> Any: ... + def hmget(self, name: _Key, keys: _Key | Iterable[_Key], *args: _Key) -> Any: ... + def hvals(self, name: _Key) -> Any: ... + def hstrlen(self, name, key) -> Any: ... + # endregion + # region geo commands + def geoadd(self, name, values, nx: bool = False, xx: bool = False, ch: bool = False) -> Any: ... + def geodist(self, name, place1, place2, unit: Incomplete | None = None) -> Any: ... + def geohash(self, name, *values) -> Any: ... + def geopos(self, name, *values) -> Any: ... + def georadius( + self, + name, + longitude, + latitude, + radius, + unit: Incomplete | None = None, + withdist: bool = False, + withcoord: bool = False, + withhash: bool = False, + count: Incomplete | None = None, + sort: Incomplete | None = None, + store: Incomplete | None = None, + store_dist: Incomplete | None = None, + any: bool = False, + ) -> Any: ... + def georadiusbymember( + self, + name, + member, + radius, + unit: Incomplete | None = None, + withdist: bool = False, + withcoord: bool = False, + withhash: bool = False, + count: Incomplete | None = None, + sort: Incomplete | None = None, + store: Incomplete | None = None, + store_dist: Incomplete | None = None, + any: bool = False, + ) -> Any: ... + def geosearch( + self, + name, + member: Incomplete | None = None, + longitude: Incomplete | None = None, + latitude: Incomplete | None = None, + unit: str = "m", + radius: Incomplete | None = None, + width: Incomplete | None = None, + height: Incomplete | None = None, + sort: Incomplete | None = None, + count: Incomplete | None = None, + any: bool = False, + withcoord: bool = False, + withdist: bool = False, + withhash: bool = False, + ) -> Any: ... + def geosearchstore( + self, + dest, + name, + member: Incomplete | None = None, + longitude: Incomplete | None = None, + latitude: Incomplete | None = None, + unit: str = "m", + radius: Incomplete | None = None, + width: Incomplete | None = None, + height: Incomplete | None = None, + sort: Incomplete | None = None, + count: Incomplete | None = None, + any: bool = False, + storedist: bool = False, + ) -> Any: ... + # endregion + # region list commands + @overload + def blpop(self, keys: _Value | Iterable[_Value], timeout: Literal[0] | None = 0) -> Any: ... + @overload + def blpop(self, keys: _Value | Iterable[_Value], timeout: float) -> Any: ... + @overload + def brpop(self, keys: _Value | Iterable[_Value], timeout: Literal[0] | None = 0) -> Any: ... + @overload + def brpop(self, keys: _Value | Iterable[_Value], timeout: float) -> Any: ... + def brpoplpush(self, src, dst, timeout: int | None = 0) -> Any: ... + def lindex(self, name: _Key, index: int) -> Any: ... + def linsert( + self, name: _Key, where: Literal["BEFORE", "AFTER", "before", "after"], refvalue: _Value, value: _Value + ) -> Any: ... + def llen(self, name: _Key) -> Any: ... + def lpop(self, name, count: int | None = None) -> Any: ... + def lpush(self, name: _Value, *values: _Value) -> Any: ... + def lpushx(self, name, value) -> Any: ... + def lrange(self, name: _Key, start: int, end: int) -> Any: ... + def lrem(self, name: _Key, count: int, value: _Value) -> Any: ... + def lset(self, name: _Key, index: int, value: _Value) -> Any: ... + def ltrim(self, name: _Key, start: int, end: int) -> Any: ... + def rpop(self, name, count: int | None = None) -> Any: ... + def rpoplpush(self, src, dst) -> Any: ... + def rpush(self, name: _Value, *values: _Value) -> Any: ... + def rpushx(self, name, value) -> Any: ... + def lpos( + self, name, value, rank: Incomplete | None = None, count: Incomplete | None = None, maxlen: Incomplete | None = None + ) -> Any: ... + @overload # type: ignore[override] + def sort( + self, + name: _Key, + start: int | None = None, + num: int | None = None, + by: _Key | None = None, + get: _Key | Sequence[_Key] | None = None, + desc: bool = False, + alpha: bool = False, + store: None = None, + groups: bool = False, + ) -> list[_StrType]: ... + @overload + def sort( + self, + name: _Key, + start: int | None = None, + num: int | None = None, + by: _Key | None = None, + get: _Key | Sequence[_Key] | None = None, + desc: bool = False, + alpha: bool = False, + *, + store: _Key, + groups: bool = False, + ) -> Any: ... + @overload + def sort( + self, + name: _Key, + start: int | None, + num: int | None, + by: _Key | None, + get: _Key | Sequence[_Key] | None, + desc: bool, + alpha: bool, + store: _Key, + groups: bool = False, + ) -> Any: ... + # endregion + # region scan commands + def scan( + self, + cursor: int = 0, + match: _Key | None = None, + count: int | None = None, + _type: str | None = None, + **kwargs: _CommandOptions, + ) -> Any: ... + def sscan(self, name: _Key, cursor: int = 0, match: _Key | None = None, count: int | None = None) -> Any: ... + def hscan(self, name: _Key, cursor: int = 0, match: _Key | None = None, count: int | None = None) -> Any: ... + @overload + def zscan(self, name: _Key, cursor: int = 0, match: _Key | None = None, count: int | None = None) -> Any: ... + @overload + def zscan( + self, + name: _Key, + cursor: int = 0, + match: _Key | None = None, + count: int | None = None, + *, + score_cast_func: Callable[[_StrType], Any], + ) -> Any: ... + @overload + def zscan( + self, name: _Key, cursor: int, match: _Key | None, count: int | None, score_cast_func: Callable[[_StrType], Any] + ) -> Any: ... + # endregion + # region set commands + def sadd(self, name: _Key, *values: _Value) -> Any: ... + def scard(self, name: _Key) -> Any: ... + def sdiff(self, keys: _Key | Iterable[_Key], *args: _Key) -> Any: ... + def sdiffstore(self, dest: _Key, keys: _Key | Iterable[_Key], *args: _Key) -> Any: ... + def sinter(self, keys: _Key | Iterable[_Key], *args: _Key) -> Any: ... + def sinterstore(self, dest: _Key, keys: _Key | Iterable[_Key], *args: _Key) -> Any: ... + def sismember(self, name: _Key, value: _Value) -> Any: ... + def smembers(self, name: _Key) -> Any: ... + def smismember(self, name, values, *args) -> Any: ... + def smove(self, src: _Key, dst: _Key, value: _Value) -> Any: ... + @overload + def spop(self, name: _Key, count: None = None) -> Any: ... + @overload + def spop(self, name: _Key, count: int) -> Any: ... + @overload + def srandmember(self, name: _Key, number: None = None) -> Any: ... + @overload + def srandmember(self, name: _Key, number: int) -> Any: ... + def srem(self, name: _Key, *values: _Value) -> Any: ... + def sunion(self, keys: _Key | Iterable[_Key], *args: _Key) -> Any: ... + def sunionstore(self, dest: _Key, keys: _Key | Iterable[_Key], *args: _Key) -> Any: ... + # endregion + # region stream commands + def xack(self, name, groupname, *ids) -> Any: ... + def xadd( + self, + name, + fields, + id: str | int | bytes | memoryview = "*", + maxlen=None, + approximate: bool = True, + nomkstream: bool = False, + minid: Incomplete | None = None, + limit: Incomplete | None = None, + ) -> Any: ... + def xautoclaim( + self, + name, + groupname, + consumername, + min_idle_time, + start_id: StreamIdT = "0-0", + count: Incomplete | None = None, + justid: bool = False, + ) -> Any: ... + def xclaim( + self, + name, + groupname, + consumername, + min_idle_time, + message_ids, + idle=None, + time=None, + retrycount=None, + force=False, + justid=False, + ) -> Any: ... + def xdel(self, name, *ids) -> Any: ... + def xgroup_create(self, name, groupname, id: str = "$", mkstream: bool = False, entries_read: int | None = None) -> Any: ... + def xgroup_delconsumer(self, name, groupname, consumername) -> Any: ... + def xgroup_destroy(self, name, groupname) -> Any: ... + def xgroup_createconsumer(self, name, groupname, consumername) -> Any: ... + def xgroup_setid(self, name, groupname, id, entries_read: int | None = None) -> Any: ... + def xinfo_consumers(self, name, groupname) -> Any: ... + def xinfo_groups(self, name) -> Any: ... + def xinfo_stream(self, name, full: bool = False) -> Any: ... + def xlen(self, name: _Key) -> Any: ... + def xpending(self, name, groupname) -> Any: ... + def xpending_range( + self, name: _Key, groupname, min, max, count: int, consumername: Incomplete | None = None, idle: int | None = None + ) -> Any: ... + def xrange(self, name, min: str = "-", max: str = "+", count: Incomplete | None = None) -> Any: ... + def xread(self, streams, count: Incomplete | None = None, block: Incomplete | None = None) -> Any: ... + def xreadgroup( + self, + groupname, + consumername, + streams, + count: Incomplete | None = None, + block: Incomplete | None = None, + noack: bool = False, + ) -> Any: ... + def xrevrange(self, name, max: str = "+", min: str = "-", count: Incomplete | None = None) -> Any: ... + def xtrim( + self, name, maxlen: int | None = None, approximate: bool = True, minid: Incomplete | None = None, limit: int | None = None + ) -> Any: ... + # endregion + # region sorted set commands + def zadd( + self, + name: _Key, + mapping: Mapping[_Key, _Value], + nx: bool = False, + xx: bool = False, + ch: bool = False, + incr: bool = False, + gt: Incomplete | None = False, + lt: Incomplete | None = False, + ) -> Any: ... + def zcard(self, name: _Key) -> Any: ... + def zcount(self, name: _Key, min: _Value, max: _Value) -> Any: ... + def zdiff(self, keys, withscores: bool = False) -> Any: ... + def zdiffstore(self, dest, keys) -> Any: ... + def zincrby(self, name: _Key, amount: float, value: _Value) -> Any: ... + def zinter(self, keys, aggregate: Incomplete | None = None, withscores: bool = False) -> Any: ... + def zinterstore(self, dest: _Key, keys: Iterable[_Key], aggregate: Literal["SUM", "MIN", "MAX"] | None = None) -> Any: ... + def zlexcount(self, name: _Key, min: _Value, max: _Value) -> Any: ... + def zpopmax(self, name: _Key, count: int | None = None) -> Any: ... + def zpopmin(self, name: _Key, count: int | None = None) -> Any: ... + def zrandmember(self, key, count: Incomplete | None = None, withscores: bool = False) -> Any: ... + @overload + def bzpopmax(self, keys: _Key | Iterable[_Key], timeout: Literal[0] = 0) -> Any: ... + @overload + def bzpopmax(self, keys: _Key | Iterable[_Key], timeout: float) -> Any: ... + @overload + def bzpopmin(self, keys: _Key | Iterable[_Key], timeout: Literal[0] = 0) -> Any: ... + @overload + def bzpopmin(self, keys: _Key | Iterable[_Key], timeout: float) -> Any: ... + @overload # type: ignore[override] + def zrange( + self, + name: _Key, + start: int, + end: int, + desc: bool, + withscores: Literal[True], + score_cast_func: Callable[[_StrType], Any], + byscore: bool = False, + bylex: bool = False, + offset: int | None = None, + num: int | None = None, + ) -> Any: ... + @overload + def zrange( + self, + name: _Key, + start: int, + end: int, + desc: bool, + withscores: Literal[True], + score_cast_func: Callable[[_StrType], float] = ..., + byscore: bool = False, + bylex: bool = False, + offset: int | None = None, + num: int | None = None, + ) -> Any: ... + @overload + def zrange( + self, + name: _Key, + start: int, + end: int, + *, + withscores: Literal[True], + score_cast_func: Callable[[_StrType], None], + byscore: bool = False, + bylex: bool = False, + offset: int | None = None, + num: int | None = None, + ) -> Any: ... + @overload + def zrange( + self, + name: _Key, + start: int, + end: int, + *, + withscores: Literal[True], + score_cast_func: Callable[[_StrType], float] = ..., + byscore: bool = False, + bylex: bool = False, + offset: int | None = None, + num: int | None = None, + ) -> Any: ... + @overload + def zrange( + self, + name: _Key, + start: int, + end: int, + desc: bool = False, + withscores: bool = False, + score_cast_func: Callable[[_StrType], Any] = ..., + byscore: bool = False, + bylex: bool = False, + offset: int | None = None, + num: int | None = None, + ) -> Any: ... + @overload # type: ignore[override] + def zrevrange( + self, name: _Key, start: int, end: int, withscores: Literal[True], score_cast_func: Callable[[_StrType], None] + ) -> Any: ... + @overload + def zrevrange(self, name: _Key, start: int, end: int, withscores: Literal[True]) -> Any: ... + @overload + def zrevrange( + self, name: _Key, start: int, end: int, withscores: bool = False, score_cast_func: Callable[[Any], Any] = ... + ) -> Any: ... + def zrangestore( + self, + dest, + name, + start, + end, + byscore: bool = False, + bylex: bool = False, + desc: bool = False, + offset: Incomplete | None = None, + num: Incomplete | None = None, + ) -> Any: ... + def zrangebylex(self, name: _Key, min: _Value, max: _Value, start: int | None = None, num: int | None = None) -> Any: ... + def zrevrangebylex(self, name: _Key, max: _Value, min: _Value, start: int | None = None, num: int | None = None) -> Any: ... + @overload # type: ignore[override] + def zrangebyscore( + self, + name: _Key, + min: _Value, + max: _Value, + start: int | None = None, + num: int | None = None, + *, + withscores: Literal[True], + score_cast_func: Callable[[_StrType], None], + ) -> Any: ... + @overload + def zrangebyscore( + self, name: _Key, min: _Value, max: _Value, start: int | None = None, num: int | None = None, *, withscores: Literal[True] + ) -> Any: ... + @overload + def zrangebyscore( + self, + name: _Key, + min: _Value, + max: _Value, + start: int | None = None, + num: int | None = None, + withscores: bool = False, + score_cast_func: Callable[[_StrType], Any] = ..., + ) -> Any: ... + @overload + def zrevrangebyscore( + self, + name: _Key, + max: _Value, + min: _Value, + start: int | None = None, + num: int | None = None, + *, + withscores: Literal[True], + score_cast_func: Callable[[_StrType], Any], + ) -> Any: ... + @overload + def zrevrangebyscore( + self, name: _Key, max: _Value, min: _Value, start: int | None = None, num: int | None = None, *, withscores: Literal[True] + ) -> Any: ... + @overload + def zrevrangebyscore( + self, + name: _Key, + max: _Value, + min: _Value, + start: int | None = None, + num: int | None = None, + withscores: bool = False, + score_cast_func: Callable[[_StrType], Any] = ..., + ) -> Any: ... + def zrank(self, name: _Key, value: _Value, withscore: bool = False) -> Any: ... + def zrem(self, name: _Key, *values: _Value) -> Any: ... + def zremrangebylex(self, name: _Key, min: _Value, max: _Value) -> Any: ... + def zremrangebyrank(self, name: _Key, min: int, max: int) -> Any: ... + def zremrangebyscore(self, name: _Key, min: _Value, max: _Value) -> Any: ... + def zrevrank(self, name: _Key, value: _Value, withscore: bool = False) -> Any: ... + def zscore(self, name: _Key, value: _Value) -> Any: ... + def zunion(self, keys, aggregate: Incomplete | None = None, withscores: bool = False) -> Any: ... + def zunionstore(self, dest: _Key, keys: Iterable[_Key], aggregate: Literal["SUM", "MIN", "MAX"] | None = None) -> Any: ... + def zmscore(self, key, members) -> Any: ... + # endregion + # region management commands + def bgrewriteaof(self, **kwargs: _CommandOptions) -> Any: ... + def bgsave(self, schedule: bool = True, **kwargs: _CommandOptions) -> Any: ... + def role(self) -> Any: ... + def client_kill(self, address: str, **kwargs: _CommandOptions) -> Any: ... + def client_kill_filter( + self, + _id: Incomplete | None = None, + _type: Incomplete | None = None, + addr: Incomplete | None = None, + skipme: Incomplete | None = None, + laddr: Incomplete | None = None, + user: Incomplete | None = None, + **kwargs: _CommandOptions, + ) -> Any: ... + def client_info(self, **kwargs: _CommandOptions) -> Any: ... + def client_list(self, _type: str | None = None, client_id: list[str] = [], **kwargs: _CommandOptions) -> Any: ... + def client_getname(self, **kwargs: _CommandOptions) -> Any: ... + def client_getredir(self, **kwargs: _CommandOptions) -> Any: ... + def client_reply(self, reply, **kwargs: _CommandOptions) -> Any: ... + def client_id(self, **kwargs: _CommandOptions) -> Any: ... + def client_tracking_on( + self, + clientid: Incomplete | None = None, + prefix=[], + bcast: bool = False, + optin: bool = False, + optout: bool = False, + noloop: bool = False, + ) -> Any: ... + def client_tracking_off( + self, + clientid: Incomplete | None = None, + prefix=[], + bcast: bool = False, + optin: bool = False, + optout: bool = False, + noloop: bool = False, + ) -> Any: ... + def client_tracking( + self, + on: bool = True, + clientid: Incomplete | None = None, + prefix=[], + bcast: bool = False, + optin: bool = False, + optout: bool = False, + noloop: bool = False, + **kwargs: _CommandOptions, + ) -> Any: ... + def client_trackinginfo(self, **kwargs: _CommandOptions) -> Any: ... + def client_setname(self, name: str, **kwargs: _CommandOptions) -> Any: ... + def client_unblock(self, client_id, error: bool = False, **kwargs: _CommandOptions) -> Any: ... + def client_pause(self, timeout, all: bool = True, **kwargs: _CommandOptions) -> Any: ... + def client_unpause(self, **kwargs: _CommandOptions) -> Any: ... + def command(self, **kwargs: _CommandOptions) -> Any: ... + def command_info(self, **kwargs: _CommandOptions) -> Any: ... + def command_count(self, **kwargs: _CommandOptions) -> Any: ... + def config_get(self, pattern: PatternT = "*", *args: PatternT, **kwargs: _CommandOptions) -> Any: ... + def config_set(self, name: KeyT, value: EncodableT, *args: KeyT | EncodableT, **kwargs: _CommandOptions) -> Any: ... + def config_resetstat(self, **kwargs: _CommandOptions) -> Any: ... + def config_rewrite(self, **kwargs: _CommandOptions) -> Any: ... + def dbsize(self, **kwargs: _CommandOptions) -> Any: ... + def debug_object(self, key, **kwargs: _CommandOptions) -> Any: ... + def debug_segfault(self, **kwargs: _CommandOptions) -> Any: ... + def echo(self, value: _Value, **kwargs: _CommandOptions) -> Any: ... + def flushall(self, asynchronous: bool = False, **kwargs: _CommandOptions) -> Any: ... + def flushdb(self, asynchronous: bool = False, **kwargs: _CommandOptions) -> Any: ... + def sync(self) -> Any: ... + def psync(self, replicationid, offset) -> Any: ... + def swapdb(self, first, second, **kwargs: _CommandOptions) -> Any: ... + def select(self, index, **kwargs: _CommandOptions) -> Any: ... + def info(self, section: _Key | None = None, *args: _Key, **kwargs: _CommandOptions) -> Any: ... + def lastsave(self, **kwargs: _CommandOptions) -> Any: ... + def lolwut(self, *version_numbers: _Value, **kwargs: _CommandOptions) -> Any: ... + def migrate( + self, + host, + port, + keys, + destination_db, + timeout, + copy: bool = False, + replace: bool = False, + auth: Incomplete | None = None, + **kwargs: _CommandOptions, + ) -> Any: ... + def object(self, infotype, key, **kwargs: _CommandOptions) -> Any: ... + def memory_doctor(self, **kwargs: _CommandOptions) -> Any: ... + def memory_help(self, **kwargs: _CommandOptions) -> Any: ... + def memory_stats(self, **kwargs: _CommandOptions) -> Any: ... + def memory_malloc_stats(self, **kwargs: _CommandOptions) -> Any: ... + def memory_usage(self, key, samples: Incomplete | None = None, **kwargs: _CommandOptions) -> Any: ... + def memory_purge(self, **kwargs: _CommandOptions) -> Any: ... + def ping(self, **kwargs: _CommandOptions) -> Any: ... + def quit(self, **kwargs: _CommandOptions) -> Any: ... + def replicaof(self, *args, **kwargs: _CommandOptions) -> Any: ... + def save(self, **kwargs: _CommandOptions) -> Any: ... + def shutdown( + self, + save: bool = False, + nosave: bool = False, + now: bool = False, + force: bool = False, + abort: bool = False, + **kwargs: _CommandOptions, + ) -> Any: ... + def slaveof(self, host: Incomplete | None = None, port: Incomplete | None = None, **kwargs: _CommandOptions) -> Any: ... + def slowlog_get(self, num: Incomplete | None = None, **kwargs: _CommandOptions) -> Any: ... + def slowlog_len(self, **kwargs: _CommandOptions) -> Any: ... + def slowlog_reset(self, **kwargs: _CommandOptions) -> Any: ... + def time(self, **kwargs: _CommandOptions) -> Any: ... + def wait(self, num_replicas, timeout, **kwargs: _CommandOptions) -> Any: ... + # endregion + # region module commands + def module_load(self, path, *args) -> Any: ... + def module_unload(self, name) -> Any: ... + def module_list(self) -> Any: ... + def command_getkeys(self, *args) -> Any: ... + # endregion + # region pubsub commands + def publish(self, channel: _Key, message: _Key, **kwargs: _CommandOptions) -> Any: ... + def pubsub_channels(self, pattern: _Key = "*", **kwargs: _CommandOptions) -> Any: ... + def pubsub_numpat(self, **kwargs: _CommandOptions) -> Any: ... + def pubsub_numsub(self, *args: _Key, **kwargs: _CommandOptions) -> Any: ... + # endregion + # region script commands + def eval(self, script, numkeys, *keys_and_args) -> Any: ... + def evalsha(self, sha, numkeys, *keys_and_args) -> Any: ... + def script_exists(self, *args) -> Any: ... + def script_debug(self, *args) -> Any: ... + def script_flush(self, sync_type: Incomplete | None = None) -> Any: ... + def script_kill(self) -> Any: ... + def script_load(self, script) -> Any: ... + def register_script(self, script: str | _StrType) -> Any: ... # type: ignore[override] + # endregion diff --git a/valkey/asyncio/cluster.pyi b/valkey/asyncio/cluster.pyi new file mode 100644 index 00000000..257769d6 --- /dev/null +++ b/valkey/asyncio/cluster.pyi @@ -0,0 +1,229 @@ +from _typeshed import Incomplete +from collections.abc import Awaitable, Callable, Mapping +from types import TracebackType +from typing import Any, Generic, TypeVar +from typing_extensions import Self + +from valkey.asyncio.client import ResponseCallbackT +from valkey.asyncio.connection import AbstractConnection, BaseParser, Connection, Encoder +from valkey.asyncio.parser import CommandsParser +from valkey.client import AbstractValkey +from valkey.cluster import AbstractValkeyCluster, LoadBalancer + +# TODO: add AsyncValkeyClusterCommands stubs +# from valkey.commands import AsyncValkeyClusterCommands +from valkey.commands.core import _StrType +from valkey.credentials import CredentialProvider +from valkey.exceptions import ResponseError +from valkey.retry import Retry +from valkey.typing import AnyKeyT, EncodableT, KeyT + +TargetNodesT = TypeVar("TargetNodesT", str, ClusterNode, list[ClusterNode], dict[Any, ClusterNode]) # noqa: Y001 + +# It uses `DefaultParser` in real life, but it is a dynamic base class. +class ClusterParser(BaseParser): + def on_disconnect(self) -> None: ... + def on_connect(self, connection: AbstractConnection) -> None: ... + async def can_read_destructive(self) -> bool: ... + async def read_response(self, disable_decoding: bool = False) -> EncodableT | ResponseError | list[EncodableT] | None: ... + +class ValkeyCluster(AbstractValkey, AbstractValkeyCluster, Generic[_StrType]): # TODO: AsyncValkeyClusterCommands + @classmethod + def from_url( + cls, + url: str, + *, + host: str | None = None, + port: str | int = 6379, + # Cluster related kwargs + startup_nodes: list[ClusterNode] | None = None, + require_full_coverage: bool = True, + read_from_replicas: bool = False, + reinitialize_steps: int = 5, + cluster_error_retry_attempts: int = 3, + connection_error_retry_attempts: int = 3, + max_connections: int = 2147483648, + # Client related kwargs + db: str | int = 0, + path: str | None = None, + credential_provider: CredentialProvider | None = None, + username: str | None = None, + password: str | None = None, + client_name: str | None = None, + # Encoding related kwargs + encoding: str = "utf-8", + encoding_errors: str = "strict", + decode_responses: bool = False, + # Connection related kwargs + health_check_interval: float = 0, + socket_connect_timeout: float | None = None, + socket_keepalive: bool = False, + socket_keepalive_options: Mapping[int, int | bytes] | None = None, + socket_timeout: float | None = None, + retry: Retry | None = None, + retry_on_error: list[Exception] | None = None, + # SSL related kwargs + ssl: bool = False, + ssl_ca_certs: str | None = None, + ssl_ca_data: str | None = None, + ssl_cert_reqs: str = "required", + ssl_certfile: str | None = None, + ssl_check_hostname: bool = False, + ssl_keyfile: str | None = None, + address_remap: Callable[[str, int], tuple[str, int]] | None = None, + ) -> Self: ... + + retry: Retry | None + connection_kwargs: dict[str, Any] + nodes_manager: NodesManager + encoder: Encoder + read_from_replicas: bool + reinitialize_steps: int + cluster_error_retry_attempts: int + reinitialize_counter: int + commands_parser: CommandsParser + node_flags: set[str] + command_flags: dict[str, str] + response_callbacks: Incomplete + result_callbacks: dict[str, Callable[[Incomplete, Incomplete], Incomplete]] + + def __init__( + self, + host: str | None = None, + port: str | int = 6379, + # Cluster related kwargs + startup_nodes: list[ClusterNode] | None = None, + require_full_coverage: bool = True, + read_from_replicas: bool = False, + reinitialize_steps: int = 5, + cluster_error_retry_attempts: int = 3, + connection_error_retry_attempts: int = 3, + max_connections: int = 2147483648, + # Client related kwargs + db: str | int = 0, + path: str | None = None, + credential_provider: CredentialProvider | None = None, + username: str | None = None, + password: str | None = None, + client_name: str | None = None, + # Encoding related kwargs + encoding: str = "utf-8", + encoding_errors: str = "strict", + decode_responses: bool = False, + # Connection related kwargs + health_check_interval: float = 0, + socket_connect_timeout: float | None = None, + socket_keepalive: bool = False, + socket_keepalive_options: Mapping[int, int | bytes] | None = None, + socket_timeout: float | None = None, + retry: Retry | None = None, + retry_on_error: list[Exception] | None = None, + # SSL related kwargs + ssl: bool = False, + ssl_ca_certs: str | None = None, + ssl_ca_data: str | None = None, + ssl_cert_reqs: str = "required", + ssl_certfile: str | None = None, + ssl_check_hostname: bool = False, + ssl_keyfile: str | None = None, + address_remap: Callable[[str, int], tuple[str, int]] | None = None, + ) -> None: ... + async def initialize(self) -> Self: ... + async def close(self) -> None: ... + async def __aenter__(self) -> Self: ... + async def __aexit__( + self, exc_type: type[BaseException] | None, exc_value: BaseException | None, traceback: TracebackType | None + ) -> None: ... + def __await__(self) -> Awaitable[Self]: ... + def __del__(self) -> None: ... + async def on_connect(self, connection: Connection) -> None: ... + def get_nodes(self) -> list[ClusterNode]: ... + def get_primaries(self) -> list[ClusterNode]: ... + def get_replicas(self) -> list[ClusterNode]: ... + def get_random_node(self) -> ClusterNode: ... + def get_default_node(self) -> ClusterNode: ... + def set_default_node(self, node: ClusterNode) -> None: ... + def get_node(self, host: str | None = None, port: int | None = None, node_name: str | None = None) -> ClusterNode | None: ... + def get_node_from_key(self, key: str, replica: bool = False) -> ClusterNode | None: ... + def keyslot(self, key: EncodableT) -> int: ... + def get_encoder(self) -> Encoder: ... + def get_connection_kwargs(self) -> dict[str, Any | None]: ... + def set_response_callback(self, command: str, callback: ResponseCallbackT) -> None: ... + async def execute_command(self, *args: EncodableT, **kwargs: Any) -> Any: ... + def pipeline(self, transaction: Any | None = None, shard_hint: Any | None = None) -> ClusterPipeline[_StrType]: ... + +class ClusterNode: + host: str + port: str | int + name: str + server_type: str | None + max_connections: int + connection_class: type[Connection] + connection_kwargs: dict[str, Any] + response_callbacks: dict[Incomplete, Incomplete] + def __init__( + self, + host: str, + port: str | int, + server_type: str | None = None, + *, + max_connections: int = 2147483648, + connection_class: type[Connection] = ..., + **connection_kwargs: Any, + ) -> None: ... + def __eq__(self, obj: object) -> bool: ... + def __del__(self) -> None: ... + async def disconnect(self) -> None: ... + def acquire_connection(self) -> Connection: ... + async def parse_response(self, connection: Connection, command: str, **kwargs: Any) -> Any: ... + async def execute_command(self, *args: Any, **kwargs: Any) -> Any: ... + async def execute_pipeline(self, commands: list[PipelineCommand]) -> bool: ... + +class NodesManager: + startup_nodes: dict[str, ClusterNode] + require_full_coverage: bool + connection_kwargs: dict[str, Any] + default_node: ClusterNode | None + nodes_cache: dict[str, ClusterNode] + slots_cache: dict[int, list[ClusterNode]] + read_load_balancer: LoadBalancer + address_remap: Callable[[str, int], tuple[str, int]] | None + def __init__( + self, + startup_nodes: list[ClusterNode], + require_full_coverage: bool, + connection_kwargs: dict[str, Any], + address_remap: Callable[[str, int], tuple[str, int]] | None = None, + ) -> None: ... + def get_node(self, host: str | None = None, port: int | None = None, node_name: str | None = None) -> ClusterNode | None: ... + def set_nodes(self, old: dict[str, ClusterNode], new: dict[str, ClusterNode], remove_old: bool = False) -> None: ... + def get_node_from_slot(self, slot: int, read_from_replicas: bool = False) -> ClusterNode: ... + def get_nodes_by_server_type(self, server_type: str) -> list[ClusterNode]: ... + async def initialize(self) -> None: ... + async def close(self, attr: str = "nodes_cache") -> None: ... + def remap_host_port(self, host: str, port: int) -> tuple[str, int]: ... + +class ClusterPipeline(AbstractValkey, AbstractValkeyCluster, Generic[_StrType]): # TODO: AsyncValkeyClusterCommands + def __init__(self, client: ValkeyCluster[_StrType]) -> None: ... + async def initialize(self) -> Self: ... + async def __aenter__(self) -> Self: ... + async def __aexit__( + self, exc_type: type[BaseException] | None, exc_value: BaseException | None, traceback: TracebackType | None + ) -> None: ... + def __await__(self) -> Awaitable[Self]: ... + def __enter__(self) -> Self: ... + def __exit__( + self, exc_type: type[BaseException] | None, exc_value: BaseException | None, traceback: TracebackType | None + ) -> None: ... + def __bool__(self) -> bool: ... + def __len__(self) -> int: ... + def execute_command(self, *args: KeyT | EncodableT, **kwargs: Any) -> Self: ... + async def execute(self, raise_on_error: bool = True, allow_redirections: bool = True) -> list[Any]: ... + def mset_nonatomic(self, mapping: Mapping[AnyKeyT, EncodableT]) -> Self: ... + +class PipelineCommand: + args: Any + kwargs: Any + position: int + result: Exception | None | Any + def __init__(self, position: int, *args: Any, **kwargs: Any) -> None: ... diff --git a/valkey/asyncio/connection.pyi b/valkey/asyncio/connection.pyi new file mode 100644 index 00000000..b0525ffd --- /dev/null +++ b/valkey/asyncio/connection.pyi @@ -0,0 +1,363 @@ +import asyncio +import enum +import ssl +from _typeshed import Unused +from abc import abstractmethod +from collections.abc import Callable, Iterable, Mapping +from types import MappingProxyType +from typing import Any, Final, Generic, Literal, Protocol, TypedDict, TypeVar, overload +from typing_extensions import Self, TypeAlias + +from valkey.asyncio.retry import Retry +from valkey.credentials import CredentialProvider +from valkey.exceptions import AuthenticationError, ValkeyError, ResponseError +from valkey.typing import EncodableT, EncodedT + +_SSLVerifyMode: TypeAlias = Literal["none", "optional", "required"] + +SYM_STAR: Final[bytes] +SYM_DOLLAR: Final[bytes] +SYM_CRLF: Final[bytes] +SYM_LF: Final[bytes] +SYM_EMPTY: Final[bytes] + +SERVER_CLOSED_CONNECTION_ERROR: Final[str] + +class _Sentinel(enum.Enum): + sentinel = object() + +SENTINEL: Final[object] +MODULE_LOAD_ERROR: Final[str] +NO_SUCH_MODULE_ERROR: Final[str] +MODULE_UNLOAD_NOT_POSSIBLE_ERROR: Final[str] +MODULE_EXPORTS_DATA_TYPES_ERROR: Final[str] +NO_AUTH_SET_ERROR: Final[dict[str, type[AuthenticationError]]] + +class Encoder: + encoding: str + encoding_errors: str + decode_responses: bool + def __init__(self, encoding: str, encoding_errors: str, decode_responses: bool) -> None: ... + def encode(self, value: EncodableT) -> EncodedT: ... + def decode(self, value: EncodableT, force: bool = False) -> EncodableT: ... + +ExceptionMappingT: TypeAlias = Mapping[str, type[Exception] | Mapping[str, type[Exception]]] + +class BaseParser: + EXCEPTION_CLASSES: ExceptionMappingT + def __init__(self, socket_read_size: int) -> None: ... + @classmethod + def parse_error(cls, response: str) -> ResponseError: ... + @abstractmethod + def on_disconnect(self) -> None: ... + @abstractmethod + def on_connect(self, connection: AbstractConnection) -> None: ... + @abstractmethod + async def can_read_destructive(self) -> bool: ... + @abstractmethod + async def read_response(self, disable_decoding: bool = False) -> EncodableT | ResponseError | list[EncodableT] | None: ... + +class PythonParser(BaseParser): + encoder: Encoder | None + def __init__(self, socket_read_size: int) -> None: ... + def on_connect(self, connection: AbstractConnection) -> None: ... + def on_disconnect(self) -> None: ... + async def can_read_destructive(self) -> bool: ... + async def read_response(self, disable_decoding: bool = False) -> EncodableT | ResponseError | None: ... + +class LibvalkeyParser(BaseParser): + def __init__(self, socket_read_size: int) -> None: ... + def on_connect(self, connection: AbstractConnection) -> None: ... + def on_disconnect(self) -> None: ... + async def can_read_destructive(self) -> bool: ... + async def read_from_socket(self) -> Literal[True]: ... + async def read_response(self, disable_decoding: bool = False) -> EncodableT | list[EncodableT]: ... + +DefaultParser: type[PythonParser | LibvalkeyParser] + +class ConnectCallbackProtocol(Protocol): + def __call__(self, connection: Connection): ... + +class AsyncConnectCallbackProtocol(Protocol): + async def __call__(self, connection: Connection): ... + +ConnectCallbackT: TypeAlias = ConnectCallbackProtocol | AsyncConnectCallbackProtocol + +class AbstractConnection: + pid: int + db: str | int + client_name: str | None + credential_provider: CredentialProvider | None + password: str | None + username: str | None + socket_timeout: float | None + socket_connect_timeout: float | None + retry_on_timeout: bool + retry_on_error: list[type[Exception]] + retry: Retry + health_check_interval: float + next_health_check: float + encoder: Encoder + valkey_connect_func: ConnectCallbackT | None + + def __init__( + self, + *, + db: str | int = 0, + password: str | None = None, + socket_timeout: float | None = None, + socket_connect_timeout: float | None = None, + retry_on_timeout: bool = False, + retry_on_error: list[type[ValkeyError]] | _Sentinel = ..., + encoding: str = "utf-8", + encoding_errors: str = "strict", + decode_responses: bool = False, + parser_class: type[BaseParser] = ..., + socket_read_size: int = 65536, + health_check_interval: float = 0, + client_name: str | None = None, + username: str | None = None, + retry: Retry | None = None, + valkey_connect_func: ConnectCallbackT | None = None, + encoder_class: type[Encoder] = ..., + credential_provider: CredentialProvider | None = None, + ) -> None: ... + @abstractmethod + def repr_pieces(self) -> list[tuple[str, Any]]: ... + @property + def is_connected(self) -> bool: ... + def register_connect_callback(self, callback: ConnectCallbackT) -> None: ... + def clear_connect_callbacks(self) -> None: ... + def set_parser(self, parser_class: type[BaseParser]) -> None: ... + async def connect(self) -> None: ... + async def on_connect(self) -> None: ... + async def disconnect(self, nowait: bool = False) -> None: ... + async def check_health(self) -> None: ... + async def send_packed_command(self, command: bytes | str | Iterable[bytes], check_health: bool = True) -> None: ... + async def send_command(self, *args: Any, **kwargs: Any) -> None: ... + async def can_read_destructive(self) -> bool: ... + async def read_response( + self, disable_decoding: bool = False, timeout: float | None = None, *, disconnect_on_error: bool = True + ) -> EncodableT | list[EncodableT] | None: ... + def pack_command(self, *args: EncodableT) -> list[bytes]: ... + def pack_commands(self, commands: Iterable[Iterable[EncodableT]]) -> list[bytes]: ... + +class Connection(AbstractConnection): + host: str + port: int + socket_keepalive: bool + socket_keepalive_options: Mapping[int, int | bytes] | None + socket_type: int + + def __init__( + self, + *, + host: str = "localhost", + port: str | int = 6379, + socket_keepalive: bool = False, + socket_keepalive_options: Mapping[int, int | bytes] | None = None, + socket_type: int = 0, + # **kwargs forwarded to AbstractConnection. + db: str | int = 0, + password: str | None = None, + socket_timeout: float | None = None, + socket_connect_timeout: float | None = None, + retry_on_timeout: bool = False, + retry_on_error: list[type[ValkeyError]] | _Sentinel = ..., + encoding: str = "utf-8", + encoding_errors: str = "strict", + decode_responses: bool = False, + parser_class: type[BaseParser] = ..., + socket_read_size: int = 65536, + health_check_interval: float = 0, + client_name: str | None = None, + username: str | None = None, + retry: Retry | None = None, + valkey_connect_func: ConnectCallbackT | None = None, + encoder_class: type[Encoder] = ..., + credential_provider: CredentialProvider | None = None, + ) -> None: ... + def repr_pieces(self) -> list[tuple[str, Any]]: ... + +class SSLConnection(Connection): + ssl_context: ValkeySSLContext + def __init__( + self, + ssl_keyfile: str | None = None, + ssl_certfile: str | None = None, + ssl_cert_reqs: _SSLVerifyMode = "required", + ssl_ca_certs: str | None = None, + ssl_ca_data: str | None = None, + ssl_check_hostname: bool = False, + *, + # **kwargs forwarded to Connection. + host: str = "localhost", + port: str | int = 6379, + socket_keepalive: bool = False, + socket_keepalive_options: Mapping[int, int | bytes] | None = None, + socket_type: int = 0, + db: str | int = 0, + password: str | None = None, + socket_timeout: float | None = None, + socket_connect_timeout: float | None = None, + retry_on_timeout: bool = False, + retry_on_error: list[type[ValkeyError]] | _Sentinel = ..., + encoding: str = "utf-8", + encoding_errors: str = "strict", + decode_responses: bool = False, + parser_class: type[BaseParser] = ..., + socket_read_size: int = 65536, + health_check_interval: float = 0, + client_name: str | None = None, + username: str | None = None, + retry: Retry | None = None, + valkey_connect_func: ConnectCallbackT | None = None, + encoder_class: type[Encoder] = ..., + credential_provider: CredentialProvider | None = None, + ) -> None: ... + @property + def keyfile(self) -> str | None: ... + @property + def certfile(self) -> str | None: ... + @property + def cert_reqs(self) -> ssl.VerifyMode: ... + @property + def ca_certs(self) -> str | None: ... + @property + def ca_data(self) -> str | None: ... + @property + def check_hostname(self) -> bool: ... + +class ValkeySSLContext: + keyfile: str | None + certfile: str | None + cert_reqs: ssl.VerifyMode + ca_certs: str | None + ca_data: str | None + check_hostname: bool + context: ssl.SSLContext | None + def __init__( + self, + keyfile: str | None = None, + certfile: str | None = None, + cert_reqs: _SSLVerifyMode | None = None, + ca_certs: str | None = None, + ca_data: str | None = None, + check_hostname: bool = False, + ) -> None: ... + def get(self) -> ssl.SSLContext: ... + +class UnixDomainSocketConnection(Connection): + path: str + def __init__( + self, + *, + path: str = "", + # **kwargs forwarded to AbstractConnection. + db: str | int = 0, + password: str | None = None, + socket_timeout: float | None = None, + socket_connect_timeout: float | None = None, + retry_on_timeout: bool = False, + retry_on_error: list[type[ValkeyError]] | _Sentinel = ..., + encoding: str = "utf-8", + encoding_errors: str = "strict", + decode_responses: bool = False, + parser_class: type[BaseParser] = ..., + socket_read_size: int = 65536, + health_check_interval: float = 0, + client_name: str | None = None, + username: str | None = None, + retry: Retry | None = None, + valkey_connect_func: ConnectCallbackT | None = None, + encoder_class: type[Encoder] = ..., + credential_provider: CredentialProvider | None = None, + ) -> None: ... + def repr_pieces(self) -> list[tuple[str, Any]]: ... + +FALSE_STRINGS: Final[tuple[str, ...]] + +def to_bool(value: object) -> bool | None: ... + +URL_QUERY_ARGUMENT_PARSERS: MappingProxyType[str, Callable[[str], Any]] + +class ConnectKwargs(TypedDict): + username: str + password: str + connection_class: type[AbstractConnection] + host: str + port: int + db: int + path: str + +def parse_url(url: str) -> ConnectKwargs: ... + +_ConnectionT = TypeVar("_ConnectionT", bound=AbstractConnection) + +class ConnectionPool(Generic[_ConnectionT]): + # kwargs accepts all arguments from the connection class chosen for + # the given URL, except those encoded in the URL itself. + @classmethod + def from_url(cls, url: str, **kwargs: Any) -> Self: ... + + connection_class: type[_ConnectionT] + connection_kwargs: Mapping[str, Any] + max_connections: int + encoder_class: type[Encoder] + pid: int + + @overload + def __init__( + self: ConnectionPool[_ConnectionT], # pyright: ignore[reportInvalidTypeVarUse] #11780 + connection_class: type[_ConnectionT], + max_connections: int | None = None, + # **kwargs are passed to the constructed connection instances. + **connection_kwargs: Any, + ) -> None: ... + @overload + def __init__(self: ConnectionPool[Connection], *, max_connections: int | None = None, **connection_kwargs) -> None: ... + def reset(self) -> None: ... + async def get_connection(self, command_name: Unused, *keys: Unused, **options: Unused) -> _ConnectionT: ... + def get_encoder(self) -> Encoder: ... + def make_connection(self) -> _ConnectionT: ... + async def release(self, connection: AbstractConnection) -> None: ... + def owns_connection(self, connection: AbstractConnection) -> bool: ... + async def disconnect(self, inuse_connections: bool = True) -> None: ... + def set_retry(self, retry: Retry) -> None: ... + +class BlockingConnectionPool(ConnectionPool[_ConnectionT]): + queue_class: type[asyncio.Queue[_ConnectionT | None]] + timeout: int | None + pool: asyncio.Queue[_ConnectionT | None] + + @overload + def __init__( + self: BlockingConnectionPool[_ConnectionT], # pyright: ignore[reportInvalidTypeVarUse] #11780 + max_connections: int, + timeout: int | None, + connection_class: type[_ConnectionT], + queue_class: type[asyncio.Queue[_ConnectionT | None]] = ..., + # **kwargs are passed to the constructed connection instances. + **connection_kwargs: Any, + ) -> None: ... + @overload + def __init__( + self: BlockingConnectionPool[_ConnectionT], # pyright: ignore[reportInvalidTypeVarUse] #11780 + max_connections: int = 50, + timeout: int | None = 20, + *, + connection_class: type[_ConnectionT], + queue_class: type[asyncio.Queue[_ConnectionT | None]] = ..., + # **kwargs are passed to the constructed connection instances. + **connection_kwargs: Any, + ) -> None: ... + @overload + def __init__( + self: BlockingConnectionPool[Connection], + max_connections: int = 50, + timeout: int | None = 20, + *, + queue_class: type[asyncio.Queue[Connection | None]] = ..., + # **kwargs are passed to the constructed connection instances. + **connection_kwargs: Any, + ) -> None: ... diff --git a/valkey/asyncio/lock.pyi b/valkey/asyncio/lock.pyi new file mode 100644 index 00000000..018591c7 --- /dev/null +++ b/valkey/asyncio/lock.pyi @@ -0,0 +1,51 @@ +import threading +from collections.abc import Awaitable +from types import SimpleNamespace, TracebackType +from typing import Any, ClassVar +from typing_extensions import Self + +from valkey.asyncio import Valkey +from valkey.commands.core import AsyncScript + +class Lock: + lua_release: ClassVar[AsyncScript | None] + lua_extend: ClassVar[AsyncScript | None] + lua_reacquire: ClassVar[AsyncScript | None] + LUA_RELEASE_SCRIPT: ClassVar[str] + LUA_EXTEND_SCRIPT: ClassVar[str] + LUA_REACQUIRE_SCRIPT: ClassVar[str] + valkey: Valkey[Any] + name: str | bytes | memoryview + timeout: float | None + sleep: float + blocking: bool + blocking_timeout: float | None + thread_local: bool + local: threading.local | SimpleNamespace + def __init__( + self, + valkey: Valkey[Any], + name: str | bytes | memoryview, + timeout: float | None = None, + sleep: float = 0.1, + blocking: bool = True, + blocking_timeout: float | None = None, + thread_local: bool = True, + ) -> None: ... + def register_scripts(self) -> None: ... + async def __aenter__(self) -> Self: ... + async def __aexit__( + self, exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: TracebackType | None + ) -> None: ... + async def acquire( + self, blocking: bool | None = None, blocking_timeout: float | None = None, token: str | bytes | None = None + ) -> bool: ... + async def do_acquire(self, token: str | bytes) -> bool: ... + async def locked(self) -> bool: ... + async def owned(self) -> bool: ... + def release(self) -> Awaitable[None]: ... + async def do_release(self, expected_token: bytes) -> None: ... + def extend(self, additional_time: float, replace_ttl: bool = False) -> Awaitable[bool]: ... + async def do_extend(self, additional_time: float, replace_ttl: bool) -> bool: ... + def reacquire(self) -> Awaitable[bool]: ... + async def do_reacquire(self) -> bool: ... diff --git a/valkey/asyncio/parser.pyi b/valkey/asyncio/parser.pyi new file mode 100644 index 00000000..fe5139a8 --- /dev/null +++ b/valkey/asyncio/parser.pyi @@ -0,0 +1,9 @@ +from _typeshed import Incomplete +from typing import Any + +# TODO: define and use: +# from valkey.asyncio.cluster import ClusterNode + +class CommandsParser: + async def initialize(self, node: Incomplete | None = None) -> None: ... # TODO: ClusterNode + async def get_keys(self, *args: Any) -> tuple[str, ...] | None: ... diff --git a/valkey/asyncio/retry.pyi b/valkey/asyncio/retry.pyi new file mode 100644 index 00000000..0970df7b --- /dev/null +++ b/valkey/asyncio/retry.pyi @@ -0,0 +1,12 @@ +from collections.abc import Awaitable, Callable, Iterable +from typing import TypeVar + +from valkey.backoff import AbstractBackoff +from valkey.exceptions import ValkeyError + +_T = TypeVar("_T") + +class Retry: + def __init__(self, backoff: AbstractBackoff, retries: int, supported_errors: tuple[type[ValkeyError], ...] = ...) -> None: ... + def update_supported_errors(self, specified_errors: Iterable[type[ValkeyError]]) -> None: ... + async def call_with_retry(self, do: Callable[[], Awaitable[_T]], fail: Callable[[ValkeyError], Awaitable[object]]) -> _T: ... diff --git a/valkey/asyncio/sentinel.pyi b/valkey/asyncio/sentinel.pyi new file mode 100644 index 00000000..1fa9e5fa --- /dev/null +++ b/valkey/asyncio/sentinel.pyi @@ -0,0 +1,162 @@ +from collections.abc import AsyncIterator, Iterable, Mapping +from typing import Any, Literal, TypedDict, TypeVar, overload + +from valkey.asyncio.client import Valkey +from valkey.asyncio.connection import ( + BaseParser, + ConnectCallbackT, + Connection, + ConnectionPool, + Encoder, + SSLConnection, + _ConnectionT, + _Sentinel, +) +from valkey.asyncio.retry import Retry +from valkey.commands import AsyncSentinelCommands +from valkey.credentials import CredentialProvider +from valkey.exceptions import ConnectionError, ValkeyError + +_ValkeyT = TypeVar("_ValkeyT", bound=Valkey[Any]) + +class MasterNotFoundError(ConnectionError): ... +class SlaveNotFoundError(ConnectionError): ... + +class SentinelManagedConnection(Connection): + connection_pool: ConnectionPool[Any] | None + def __init__( + self, + *, + connection_pool: ConnectionPool[Any] | None, + # **kwargs forwarded to Connection. + host: str = "localhost", + port: str | int = 6379, + socket_keepalive: bool = False, + socket_keepalive_options: Mapping[int, int | bytes] | None = None, + socket_type: int = 0, + db: str | int = 0, + password: str | None = None, + socket_timeout: float | None = None, + socket_connect_timeout: float | None = None, + retry_on_timeout: bool = False, + retry_on_error: list[type[ValkeyError]] | _Sentinel = ..., + encoding: str = "utf-8", + encoding_errors: str = "strict", + decode_responses: bool = False, + parser_class: type[BaseParser] = ..., + socket_read_size: int = 65536, + health_check_interval: float = 0, + client_name: str | None = None, + username: str | None = None, + retry: Retry | None = None, + valkey_connect_func: ConnectCallbackT | None = None, + encoder_class: type[Encoder] = ..., + credential_provider: CredentialProvider | None = None, + ) -> None: ... + async def connect_to(self, address: tuple[str, int]) -> None: ... + async def connect(self) -> None: ... + +class SentinelManagedSSLConnection(SentinelManagedConnection, SSLConnection): ... + +class SentinelConnectionPool(ConnectionPool[_ConnectionT]): + is_master: bool + check_connection: bool + service_name: str + sentinel_manager: Sentinel + master_address: tuple[str, int] | None + slave_rr_counter: int | None + + def __init__( + self, + service_name: str, + sentinel_manager: Sentinel, + *, + ssl: bool = False, + connection_class: type[SentinelManagedConnection] = ..., + is_master: bool = True, + check_connection: bool = False, + # **kwargs ultimately forwarded to construction Connection instances. + host: str = "localhost", + port: str | int = 6379, + socket_keepalive: bool = False, + socket_keepalive_options: Mapping[int, int | bytes] | None = None, + socket_type: int = 0, + db: str | int = 0, + password: str | None = None, + socket_timeout: float | None = None, + socket_connect_timeout: float | None = None, + retry_on_timeout: bool = False, + retry_on_error: list[type[ValkeyError]] | _Sentinel = ..., + encoding: str = "utf-8", + encoding_errors: str = "strict", + decode_responses: bool = False, + parser_class: type[BaseParser] = ..., + socket_read_size: int = 65536, + health_check_interval: float = 0, + client_name: str | None = None, + username: str | None = None, + retry: Retry | None = None, + valkey_connect_func: ConnectCallbackT | None = None, + encoder_class: type[Encoder] = ..., + credential_provider: CredentialProvider | None = None, + ) -> None: ... + async def get_master_address(self) -> tuple[str, int]: ... + async def rotate_slaves(self) -> AsyncIterator[tuple[str, int]]: ... + +_State = TypedDict( + "_State", {"ip": str, "port": int, "is_master": bool, "is_sdown": bool, "is_odown": bool, "num-other-sentinels": int} +) + +class Sentinel(AsyncSentinelCommands): + sentinel_kwargs: Mapping[str, Any] + sentinels: list[Valkey[Any]] + min_other_sentinels: int + connection_kwargs: Mapping[str, Any] + def __init__( + self, + sentinels: Iterable[tuple[str, int]], + min_other_sentinels: int = 0, + sentinel_kwargs: Mapping[str, Any] | None = None, + **connection_kwargs: Any, + ) -> None: ... + async def execute_command(self, *args: Any, once: bool = False, **kwargs: Any) -> Literal[True]: ... + def check_master_state(self, state: _State, service_name: str) -> bool: ... + async def discover_master(self, service_name: str) -> tuple[str, int]: ... + def filter_slaves(self, slaves: Iterable[_State]) -> list[tuple[str, int]]: ... + async def discover_slaves(self, service_name: str) -> list[tuple[str, int]]: ... + @overload + def master_for( + self, + service_name: str, + valkey_class: type[_ValkeyT], + connection_pool_class: type[SentinelConnectionPool[Any]] = ..., + # Forwarded to the connection pool constructor. + **kwargs: Any, + ) -> _ValkeyT: ... + @overload + def master_for( + self, + service_name: str, + *, + connection_pool_class: type[SentinelConnectionPool[Any]] = ..., + # Forwarded to the connection pool constructor. + **kwargs: Any, + ) -> Valkey[Any]: ... + @overload + def slave_for( + self, + service_name: str, + valkey_class: type[_ValkeyT], + connection_pool_class: type[SentinelConnectionPool[Any]] = ..., + # Forwarded to the connection pool constructor. + **kwargs: Any, + ) -> _ValkeyT: ... + @overload + def slave_for( + self, + service_name: str, + *, + connection_pool_class: type[SentinelConnectionPool[Any]] = ..., + # Forwarded to the connection pool constructor. + **kwargs: Any, + ) -> Valkey[Any]: ... diff --git a/valkey/asyncio/utils.pyi b/valkey/asyncio/utils.pyi new file mode 100644 index 00000000..cd3b14df --- /dev/null +++ b/valkey/asyncio/utils.pyi @@ -0,0 +1,15 @@ +from types import TracebackType +from typing import Any, Generic + +from valkey.asyncio.client import Pipeline, Valkey +from valkey.client import _StrType + +def from_url(url: str, **kwargs) -> Valkey[Any]: ... + +class pipeline(Generic[_StrType]): + p: Pipeline[_StrType] + def __init__(self, valkey_obj: Valkey[_StrType]) -> None: ... + async def __aenter__(self) -> Pipeline[_StrType]: ... + async def __aexit__( + self, exc_type: type[BaseException] | None, exc_value: BaseException | None, traceback: TracebackType | None + ) -> None: ... diff --git a/valkey/backoff.pyi b/valkey/backoff.pyi new file mode 100644 index 00000000..40230a13 --- /dev/null +++ b/valkey/backoff.pyi @@ -0,0 +1,31 @@ +from abc import ABC, abstractmethod + +class AbstractBackoff(ABC): + def reset(self) -> None: ... + @abstractmethod + def compute(self, failures: int) -> float: ... + +class ConstantBackoff(AbstractBackoff): + def __init__(self, backoff: int) -> None: ... + def compute(self, failures: int) -> float: ... + +class NoBackoff(ConstantBackoff): + def __init__(self) -> None: ... + +class ExponentialBackoff(AbstractBackoff): + def __init__(self, cap: float = 0.512, base: float = 0.008) -> None: ... + def compute(self, failures: int) -> float: ... + +class FullJitterBackoff(AbstractBackoff): + def __init__(self, cap: float = 0.512, base: float = 0.008) -> None: ... + def compute(self, failures: int) -> float: ... + +class EqualJitterBackoff(AbstractBackoff): + def __init__(self, cap: float = 0.512, base: float = 0.008) -> None: ... + def compute(self, failures: int) -> float: ... + +class DecorrelatedJitterBackoff(AbstractBackoff): + def __init__(self, cap: float = 0.512, base: float = 0.008) -> None: ... + def compute(self, failures: int) -> float: ... + +def default_backoff() -> EqualJitterBackoff: ... diff --git a/valkey/client.pyi b/valkey/client.pyi new file mode 100644 index 00000000..b9ad6a83 --- /dev/null +++ b/valkey/client.pyi @@ -0,0 +1,799 @@ +import threading +from _typeshed import Incomplete, SupportsItems, Unused +from collections.abc import Callable, Iterable, Iterator, Mapping, Sequence +from datetime import datetime, timedelta +from re import Pattern +from types import TracebackType +from typing import Any, ClassVar, Literal, TypeVar, overload +from typing_extensions import Self, TypeAlias + +from valkey import ValkeyError + +from .commands import CoreCommands, ValkeyModuleCommands, SentinelCommands +from .connection import ConnectionPool, _ConnectFunc, _ConnectionPoolOptions +from .credentials import CredentialProvider +from .lock import Lock +from .retry import Retry +from .typing import ChannelT, EncodableT, KeyT, PatternT + +_Value: TypeAlias = bytes | float | int | str +_Key: TypeAlias = str | bytes + +# Lib returns str or bytes depending on value of decode_responses +_StrType = TypeVar("_StrType", bound=str | bytes) + +_VT = TypeVar("_VT") +_T = TypeVar("_T") + +# Keyword arguments that are passed to Valkey.parse_response(). +_ParseResponseOptions: TypeAlias = Any +# Keyword arguments that are passed to Valkey.execute_command(). +_CommandOptions: TypeAlias = _ConnectionPoolOptions | _ParseResponseOptions + +SYM_EMPTY: bytes +EMPTY_RESPONSE: str +NEVER_DECODE: str + +class CaseInsensitiveDict(dict[_StrType, _VT]): + def __init__(self, data: SupportsItems[_StrType, _VT]) -> None: ... + def update(self, data: SupportsItems[_StrType, _VT]) -> None: ... # type: ignore[override] + @overload + def get(self, k: _StrType, default: None = None) -> _VT | None: ... + @overload + def get(self, k: _StrType, default: _VT | _T) -> _VT | _T: ... + # Overrides many other methods too, but without changing signature + +def list_or_args(keys, args): ... +def timestamp_to_datetime(response): ... +def string_keys_to_dict(key_string, callback): ... +def parse_debug_object(response): ... +def parse_object(response, infotype): ... +def parse_info(response): ... + +SENTINEL_STATE_TYPES: dict[str, type[int]] + +def parse_sentinel_state(item): ... +def parse_sentinel_master(response): ... +def parse_sentinel_masters(response): ... +def parse_sentinel_slaves_and_sentinels(response): ... +def parse_sentinel_get_master(response): ... +def pairs_to_dict(response, decode_keys: bool = False, decode_string_values: bool = False): ... +def pairs_to_dict_typed(response, type_info): ... +def zset_score_pairs(response, **options): ... +def sort_return_tuples(response, **options): ... +def int_or_none(response): ... +def float_or_none(response): ... +def bool_ok(response): ... +def parse_client_list(response, **options): ... +def parse_config_get(response, **options): ... +def parse_scan(response, **options): ... +def parse_hscan(response, **options): ... +def parse_zscan(response, **options): ... +def parse_slowlog_get(response, **options): ... + +_LockType = TypeVar("_LockType") + +class AbstractValkey: + RESPONSE_CALLBACKS: dict[str, Any] + +class Valkey(AbstractValkey, ValkeyModuleCommands, CoreCommands[_StrType], SentinelCommands): + @overload + @classmethod + def from_url( + cls, + url: str, + *, + host: str | None = ..., + port: int | None = ..., + db: int | None = ..., + password: str | None = ..., + socket_timeout: float | None = ..., + socket_connect_timeout: float | None = ..., + socket_keepalive: bool | None = ..., + socket_keepalive_options: Mapping[str, int | str] | None = ..., + connection_pool: ConnectionPool | None = ..., + unix_socket_path: str | None = ..., + encoding: str = ..., + encoding_errors: str = ..., + charset: str | None = ..., + errors: str | None = ..., + decode_responses: Literal[True], + retry_on_timeout: bool = ..., + retry_on_error: list[type[ValkeyError]] | None = ..., + ssl: bool = ..., + ssl_keyfile: str | None = ..., + ssl_certfile: str | None = ..., + ssl_cert_reqs: str | int | None = ..., + ssl_ca_certs: str | None = ..., + ssl_check_hostname: bool = ..., + max_connections: int | None = ..., + single_connection_client: bool = ..., + health_check_interval: float = ..., + client_name: str | None = ..., + username: str | None = ..., + retry: Retry | None = ..., + ) -> Valkey[str]: ... + @overload + @classmethod + def from_url( + cls, + url: str, + *, + host: str | None = ..., + port: int | None = ..., + db: int | None = ..., + password: str | None = ..., + socket_timeout: float | None = ..., + socket_connect_timeout: float | None = ..., + socket_keepalive: bool | None = ..., + socket_keepalive_options: Mapping[str, int | str] | None = ..., + connection_pool: ConnectionPool | None = ..., + unix_socket_path: str | None = ..., + encoding: str = ..., + encoding_errors: str = ..., + charset: str | None = ..., + errors: str | None = ..., + decode_responses: Literal[False] = False, + retry_on_timeout: bool = ..., + retry_on_error: list[type[ValkeyError]] | None = ..., + ssl: bool = ..., + ssl_keyfile: str | None = ..., + ssl_certfile: str | None = ..., + ssl_cert_reqs: str | int | None = ..., + ssl_ca_certs: str | None = ..., + ssl_check_hostname: bool = ..., + max_connections: int | None = ..., + single_connection_client: bool = ..., + health_check_interval: float = ..., + client_name: str | None = ..., + username: str | None = ..., + retry: Retry | None = ..., + ) -> Valkey[bytes]: ... + connection_pool: Any + response_callbacks: Any + @overload + def __init__( + self: Valkey[str], + host: str, + port: int, + db: int, + password: str | None, + socket_timeout: float | None, + socket_connect_timeout: float | None, + socket_keepalive: bool | None, + socket_keepalive_options: Mapping[str, int | str] | None, + connection_pool: ConnectionPool | None, + unix_socket_path: str | None, + encoding: str, + encoding_errors: str, + charset: str | None, + errors: str | None, + decode_responses: Literal[True], + retry_on_timeout: bool = False, + retry_on_error: list[type[ValkeyError]] | None = None, + ssl: bool = False, + ssl_keyfile: str | None = None, + ssl_certfile: str | None = None, + ssl_cert_reqs: str | int | None = "required", + ssl_ca_certs: str | None = None, + ssl_ca_path: Incomplete | None = None, + ssl_ca_data: Incomplete | None = None, + ssl_check_hostname: bool = False, + ssl_password: Incomplete | None = None, + ssl_validate_ocsp: bool = False, + ssl_validate_ocsp_stapled: bool = False, # added in 4.1.1 + ssl_ocsp_context: Incomplete | None = None, # added in 4.1.1 + ssl_ocsp_expected_cert: Incomplete | None = None, # added in 4.1.1 + max_connections: int | None = None, + single_connection_client: bool = False, + health_check_interval: float = 0, + client_name: str | None = None, + username: str | None = None, + retry: Retry | None = None, + valkey_connect_func: _ConnectFunc | None = None, + credential_provider: CredentialProvider | None = None, + ) -> None: ... + @overload + def __init__( + self: Valkey[str], + host: str = "localhost", + port: int = 6379, + db: int = 0, + password: str | None = None, + socket_timeout: float | None = None, + socket_connect_timeout: float | None = None, + socket_keepalive: bool | None = None, + socket_keepalive_options: Mapping[str, int | str] | None = None, + connection_pool: ConnectionPool | None = None, + unix_socket_path: str | None = None, + encoding: str = "utf-8", + encoding_errors: str = "strict", + charset: str | None = None, + errors: str | None = None, + *, + decode_responses: Literal[True], + retry_on_timeout: bool = False, + retry_on_error: list[type[ValkeyError]] | None = None, + ssl: bool = False, + ssl_keyfile: str | None = None, + ssl_certfile: str | None = None, + ssl_cert_reqs: str | int | None = "required", + ssl_ca_certs: str | None = None, + ssl_ca_data: Incomplete | None = None, + ssl_check_hostname: bool = False, + ssl_password: Incomplete | None = None, + ssl_validate_ocsp: bool = False, + ssl_validate_ocsp_stapled: bool = False, # added in 4.1.1 + ssl_ocsp_context: Incomplete | None = None, # added in 4.1.1 + ssl_ocsp_expected_cert: Incomplete | None = None, # added in 4.1.1 + max_connections: int | None = None, + single_connection_client: bool = False, + health_check_interval: float = 0, + client_name: str | None = None, + username: str | None = None, + retry: Retry | None = None, + valkey_connect_func: _ConnectFunc | None = None, + credential_provider: CredentialProvider | None = None, + ) -> None: ... + @overload + def __init__( + self: Valkey[bytes], + host: str = "localhost", + port: int = 6379, + db: int = 0, + password: str | None = None, + socket_timeout: float | None = None, + socket_connect_timeout: float | None = None, + socket_keepalive: bool | None = None, + socket_keepalive_options: Mapping[str, int | str] | None = None, + connection_pool: ConnectionPool | None = None, + unix_socket_path: str | None = None, + encoding: str = "utf-8", + encoding_errors: str = "strict", + charset: str | None = None, + errors: str | None = None, + decode_responses: Literal[False] = False, + retry_on_timeout: bool = False, + retry_on_error: list[type[ValkeyError]] | None = None, + ssl: bool = False, + ssl_keyfile: str | None = None, + ssl_certfile: str | None = None, + ssl_cert_reqs: str | int | None = "required", + ssl_ca_certs: str | None = None, + ssl_ca_data: Incomplete | None = None, + ssl_check_hostname: bool = False, + ssl_password: Incomplete | None = None, + ssl_validate_ocsp: bool = False, + ssl_validate_ocsp_stapled: bool = False, # added in 4.1.1 + ssl_ocsp_context: Incomplete | None = None, # added in 4.1.1 + ssl_ocsp_expected_cert: Incomplete | None = None, # added in 4.1.1 + max_connections: int | None = None, + single_connection_client: bool = False, + health_check_interval: float = 0, + client_name: str | None = None, + username: str | None = None, + retry: Retry | None = None, + valkey_connect_func: _ConnectFunc | None = None, + credential_provider: CredentialProvider | None = None, + ) -> None: ... + def get_encoder(self): ... + def get_connection_kwargs(self): ... + def set_response_callback(self, command, callback): ... + def pipeline(self, transaction: bool = True, shard_hint: Any = None) -> Pipeline[_StrType]: ... + def transaction(self, func, *watches, **kwargs): ... + @overload + def lock( + self, + name: _Key, + timeout: float | None = None, + sleep: float = 0.1, + blocking: bool = True, + blocking_timeout: float | None = None, + lock_class: None = None, + thread_local: bool = True, + ) -> Lock: ... + @overload + def lock( + self, + name: _Key, + timeout: float | None, + sleep: float, + blocking: bool, + blocking_timeout: float | None, + lock_class: type[_LockType], + thread_local: bool = True, + ) -> _LockType: ... + @overload + def lock( + self, + name: _Key, + timeout: float | None = None, + sleep: float = 0.1, + blocking: bool = True, + blocking_timeout: float | None = None, + *, + lock_class: type[_LockType], + thread_local: bool = True, + ) -> _LockType: ... + def pubsub(self, *, shard_hint: Any = ..., ignore_subscribe_messages: bool = ...) -> PubSub: ... + def execute_command(self, *args, **options: _CommandOptions): ... + def parse_response(self, connection, command_name, **options: _ParseResponseOptions): ... + def monitor(self) -> Monitor: ... + def __enter__(self) -> Valkey[_StrType]: ... + def __exit__( + self, exc_type: type[BaseException] | None, exc_value: BaseException | None, traceback: TracebackType | None + ) -> None: ... + def __del__(self) -> None: ... + def close(self) -> None: ... + def client(self) -> Valkey[_StrType]: ... + +StrictValkey = Valkey + +class PubSub: + PUBLISH_MESSAGE_TYPES: ClassVar[tuple[str, str]] + UNSUBSCRIBE_MESSAGE_TYPES: ClassVar[tuple[str, str]] + HEALTH_CHECK_MESSAGE: ClassVar[str] + connection_pool: Any + shard_hint: Any + ignore_subscribe_messages: Any + connection: Any + subscribed_event: threading.Event + encoder: Any + health_check_response_b: bytes + health_check_response: list[str] | list[bytes] + def __init__( + self, + connection_pool, + shard_hint: Incomplete | None = None, + ignore_subscribe_messages: bool = False, + encoder: Incomplete | None = None, + ) -> None: ... + def __enter__(self) -> Self: ... + def __exit__( + self, exc_type: type[BaseException] | None, exc_value: BaseException | None, traceback: TracebackType | None + ) -> None: ... + def __del__(self): ... + channels: Any + patterns: Any + def reset(self): ... + def close(self) -> None: ... + def on_connect(self, connection): ... + @property + def subscribed(self): ... + def execute_command(self, *args): ... + def clean_health_check_responses(self) -> None: ... + def parse_response(self, block: bool = True, timeout: float = 0): ... + def is_health_check_response(self, response) -> bool: ... + def check_health(self) -> None: ... + def psubscribe(self, *args: _Key, **kwargs: Callable[[Any], None]): ... + def punsubscribe(self, *args: _Key) -> None: ... + def subscribe(self, *args: _Key, **kwargs: Callable[[Any], None]) -> None: ... + def unsubscribe(self, *args: _Key) -> None: ... + def listen(self): ... + def get_message(self, ignore_subscribe_messages: bool = False, timeout: float = 0.0) -> dict[str, Any] | None: ... + def handle_message(self, response, ignore_subscribe_messages: bool = False) -> dict[str, Any] | None: ... + def run_in_thread(self, sleep_time: float = 0, daemon: bool = False, exception_handler: Incomplete | None = None): ... + def ping(self, message: _Value | None = None) -> None: ... + +class PubSubWorkerThread(threading.Thread): + daemon: Any + pubsub: Any + sleep_time: Any + exception_handler: Any + def __init__(self, pubsub, sleep_time, daemon: bool = False, exception_handler: Incomplete | None = None) -> None: ... + def run(self) -> None: ... + def stop(self) -> None: ... + +class Pipeline(Valkey[_StrType]): + UNWATCH_COMMANDS: Any + connection_pool: Any + connection: Any + response_callbacks: Any + transaction: bool + shard_hint: Any + watching: bool + + command_stack: Any + scripts: Any + explicit_transaction: Any + def __init__(self, connection_pool, response_callbacks, transaction, shard_hint) -> None: ... + def __enter__(self) -> Pipeline[_StrType]: ... + def __exit__( + self, exc_type: type[BaseException] | None, exc_value: BaseException | None, traceback: TracebackType | None + ) -> None: ... + def __del__(self) -> None: ... + def __len__(self) -> int: ... + def __bool__(self) -> bool: ... + def discard(self) -> None: ... + def reset(self) -> None: ... + def multi(self) -> None: ... + def execute_command(self, *args, **options): ... + def immediate_execute_command(self, *args, **options): ... + def pipeline_execute_command(self, *args, **options): ... + def raise_first_error(self, commands, response): ... + def annotate_exception(self, exception, number, command): ... + def parse_response(self, connection, command_name, **options): ... + def load_scripts(self): ... + def execute(self, raise_on_error: bool = True) -> list[Any]: ... + def watch(self, *names: _Key) -> bool: ... + def unwatch(self) -> bool: ... + # in the Valkey implementation, the following methods are inherited from client. + def set_response_callback(self, command, callback): ... + def pipeline(self, transaction: bool = True, shard_hint: Any = None) -> Pipeline[_StrType]: ... + def acl_cat(self, category: str | None = None) -> Pipeline[_StrType]: ... # type: ignore[override] + def acl_deluser(self, username: str) -> Pipeline[_StrType]: ... # type: ignore[override] + def acl_genpass(self, bits: int | None = None) -> Pipeline[_StrType]: ... # type: ignore[override] + def acl_getuser(self, username: str) -> Pipeline[_StrType]: ... # type: ignore[override] + def acl_list(self) -> Pipeline[_StrType]: ... # type: ignore[override] + def acl_load(self) -> Pipeline[_StrType]: ... # type: ignore[override] + def acl_setuser( # type: ignore[override] + self, + username: str, + enabled: bool = False, + nopass: bool = False, + passwords: Sequence[str] | None = None, + hashed_passwords: Sequence[str] | None = None, + categories: Sequence[str] | None = None, + commands: Sequence[str] | None = None, + keys: Sequence[str] | None = None, + channels: Iterable[ChannelT] | None = None, + selectors: Iterable[tuple[str, KeyT]] | None = None, + reset: bool = False, + reset_keys: bool = False, + reset_channels: bool = False, + reset_passwords: bool = False, + **kwargs: _CommandOptions, + ) -> Pipeline[_StrType]: ... + def acl_users(self) -> Pipeline[_StrType]: ... # type: ignore[override] + def acl_whoami(self) -> Pipeline[_StrType]: ... # type: ignore[override] + def bgrewriteaof(self) -> Pipeline[_StrType]: ... # type: ignore[override] + def bgsave(self, schedule: bool = True) -> Pipeline[_StrType]: ... # type: ignore[override] + def client_id(self) -> Pipeline[_StrType]: ... # type: ignore[override] + def client_kill(self, address: str) -> Pipeline[_StrType]: ... # type: ignore[override] + def client_list(self, _type: str | None = None, client_id: list[str] = []) -> Pipeline[_StrType]: ... # type: ignore[override] + def client_getname(self) -> Pipeline[_StrType]: ... # type: ignore[override] + def client_setname(self, name: str) -> Pipeline[_StrType]: ... # type: ignore[override] + def readwrite(self) -> Pipeline[_StrType]: ... # type: ignore[override] + def readonly(self) -> Pipeline[_StrType]: ... # type: ignore[override] + def config_get(self, pattern: PatternT = "*", *args: PatternT, **kwargs: _CommandOptions) -> Pipeline[_StrType]: ... + def config_set( + self, name: KeyT, value: EncodableT, *args: KeyT | EncodableT, **kwargs: _CommandOptions + ) -> Pipeline[_StrType]: ... + def config_resetstat(self) -> Pipeline[_StrType]: ... # type: ignore[override] + def config_rewrite(self) -> Pipeline[_StrType]: ... # type: ignore[override] + def dbsize(self) -> Pipeline[_StrType]: ... # type: ignore[override] + def debug_object(self, key) -> Pipeline[_StrType]: ... # type: ignore[override] + def echo(self, value) -> Pipeline[_StrType]: ... # type: ignore[override] + def flushall(self, asynchronous: bool = False) -> Pipeline[_StrType]: ... # type: ignore[override] + def flushdb(self, asynchronous: bool = False) -> Pipeline[_StrType]: ... # type: ignore[override] + def info(self, section: _Key | None = None, *args: _Key, **kwargs: _CommandOptions) -> Pipeline[_StrType]: ... # type: ignore[override] + def lastsave(self) -> Pipeline[_StrType]: ... # type: ignore[override] + def object(self, infotype, key) -> Pipeline[_StrType]: ... # type: ignore[override] + def ping(self) -> Pipeline[_StrType]: ... # type: ignore[override] + def save(self) -> Pipeline[_StrType]: ... # type: ignore[override] + def sentinel_get_master_addr_by_name(self, service_name) -> Pipeline[_StrType]: ... + def sentinel_master(self, service_name) -> Pipeline[_StrType]: ... + def sentinel_masters(self) -> Pipeline[_StrType]: ... + def sentinel_monitor(self, name, ip, port, quorum) -> Pipeline[_StrType]: ... + def sentinel_remove(self, name) -> Pipeline[_StrType]: ... + def sentinel_sentinels(self, service_name) -> Pipeline[_StrType]: ... + def sentinel_set(self, name, option, value) -> Pipeline[_StrType]: ... + def sentinel_slaves(self, service_name) -> Pipeline[_StrType]: ... + def slaveof(self, host=None, port=None) -> Pipeline[_StrType]: ... # type: ignore[override] + def slowlog_get(self, num=None) -> Pipeline[_StrType]: ... # type: ignore[override] + def slowlog_len(self) -> Pipeline[_StrType]: ... # type: ignore[override] + def slowlog_reset(self) -> Pipeline[_StrType]: ... # type: ignore[override] + def time(self) -> Pipeline[_StrType]: ... # type: ignore[override] + def append(self, key, value) -> Pipeline[_StrType]: ... + def bitcount( # type: ignore[override] + self, key: _Key, start: int | None = None, end: int | None = None, mode: str | None = None + ) -> Pipeline[_StrType]: ... + def bitop(self, operation, dest, *keys) -> Pipeline[_StrType]: ... + def bitpos(self, key, bit, start=None, end=None, mode: str | None = None) -> Pipeline[_StrType]: ... + def decr(self, name, amount=1) -> Pipeline[_StrType]: ... # type: ignore[override] + def delete(self, *names: _Key) -> Pipeline[_StrType]: ... # type: ignore[override] + def __delitem__(self, _Key) -> None: ... + def dump(self, name) -> Pipeline[_StrType]: ... # type: ignore[override] + def exists(self, *names: _Key) -> Pipeline[_StrType]: ... # type: ignore[override] + def __contains__(self, *names: _Key) -> Pipeline[_StrType]: ... # type: ignore[override] + def expire( # type: ignore[override] + self, name: _Key, time: int | timedelta, nx: bool = False, xx: bool = False, gt: bool = False, lt: bool = False + ) -> Pipeline[_StrType]: ... + def expireat( + self, name, when, nx: bool = False, xx: bool = False, gt: bool = False, lt: bool = False + ) -> Pipeline[_StrType]: ... + def get(self, name: _Key) -> Pipeline[_StrType]: ... # type: ignore[override] + def __getitem__(self, name) -> Pipeline[_StrType]: ... + def getbit(self, name: _Key, offset: int) -> Pipeline[_StrType]: ... # type: ignore[override] + def getrange(self, key, start, end) -> Pipeline[_StrType]: ... + def getset(self, name, value) -> Pipeline[_StrType]: ... # type: ignore[override] + def incr(self, name, amount=1) -> Pipeline[_StrType]: ... # type: ignore[override] + def incrby(self, name, amount=1) -> Pipeline[_StrType]: ... # type: ignore[override] + def incrbyfloat(self, name, amount=1.0) -> Pipeline[_StrType]: ... # type: ignore[override] + def keys(self, pattern: _Key = "*") -> Pipeline[_StrType]: ... # type: ignore[override] + def mget(self, keys: _Key | Iterable[_Key], *args: _Key) -> Pipeline[_StrType]: ... # type: ignore[override] + def mset(self, mapping: Mapping[_Key, _Value]) -> Pipeline[_StrType]: ... # type: ignore[override] + def msetnx(self, mapping: Mapping[_Key, _Value]) -> Pipeline[_StrType]: ... # type: ignore[override] + def move(self, name: _Key, db: int) -> Pipeline[_StrType]: ... # type: ignore[override] + def persist(self, name: _Key) -> Pipeline[_StrType]: ... # type: ignore[override] + def pexpire( # type: ignore[override] + self, name: _Key, time: int | timedelta, nx: bool = False, xx: bool = False, gt: bool = False, lt: bool = False + ) -> Pipeline[_StrType]: ... + def pexpireat( # type: ignore[override] + self, name: _Key, when: int | datetime, nx: bool = False, xx: bool = False, gt: bool = False, lt: bool = False + ) -> Pipeline[_StrType]: ... + def psetex(self, name, time_ms, value) -> Pipeline[_StrType]: ... + def pttl(self, name) -> Pipeline[_StrType]: ... # type: ignore[override] + def randomkey(self) -> Pipeline[_StrType]: ... # type: ignore[override] + def rename(self, src, dst) -> Pipeline[_StrType]: ... + def renamenx(self, src, dst) -> Pipeline[_StrType]: ... + def restore( + self, + name, + ttl, + value, + replace: bool = False, + absttl: bool = False, + idletime: Incomplete | None = None, + frequency: Incomplete | None = None, + ) -> Pipeline[_StrType]: ... + def set( # type: ignore[override] + self, + name: _Key, + value: _Value, + ex: None | int | timedelta = None, + px: None | int | timedelta = None, + nx: bool = False, + xx: bool = False, + keepttl: bool = False, + get: bool = False, + exat: Incomplete | None = None, + pxat: Incomplete | None = None, + ) -> Pipeline[_StrType]: ... + def __setitem__(self, name, value) -> None: ... + def setbit(self, name: _Key, offset: int, value: int) -> Pipeline[_StrType]: ... # type: ignore[override] + def setex(self, name: _Key, time: int | timedelta, value: _Value) -> Pipeline[_StrType]: ... # type: ignore[override] + def setnx(self, name, value) -> Pipeline[_StrType]: ... # type: ignore[override] + def setrange(self, name, offset, value) -> Pipeline[_StrType]: ... + def strlen(self, name) -> Pipeline[_StrType]: ... + def substr(self, name, start, end=-1) -> Pipeline[_StrType]: ... + def ttl(self, name: _Key) -> Pipeline[_StrType]: ... # type: ignore[override] + def type(self, name) -> Pipeline[_StrType]: ... + def unlink(self, *names: _Key) -> Pipeline[_StrType]: ... # type: ignore[override] + def blmove( # type: ignore[override] + self, + first_list: _Key, + second_list: _Key, + timeout: float, + src: Literal["LEFT", "RIGHT"] = "LEFT", + dest: Literal["LEFT", "RIGHT"] = "RIGHT", + ) -> Pipeline[_StrType]: ... + def blpop(self, keys: _Value | Iterable[_Value], timeout: float = 0) -> Pipeline[_StrType]: ... # type: ignore[override] + def brpop(self, keys: _Value | Iterable[_Value], timeout: float = 0) -> Pipeline[_StrType]: ... # type: ignore[override] + def brpoplpush(self, src, dst, timeout=0) -> Pipeline[_StrType]: ... + def lindex(self, name: _Key, index: int) -> Pipeline[_StrType]: ... # type: ignore[override] + def linsert( # type: ignore[override] + self, name: _Key, where: Literal["BEFORE", "AFTER", "before", "after"], refvalue: _Value, value: _Value + ) -> Pipeline[_StrType]: ... + def llen(self, name: _Key) -> Pipeline[_StrType]: ... # type: ignore[override] + def lmove( # type: ignore[override] + self, + first_list: _Key, + second_list: _Key, + src: Literal["LEFT", "RIGHT"] = "LEFT", + dest: Literal["LEFT", "RIGHT"] = "RIGHT", + ) -> Pipeline[_StrType]: ... + def lpop(self, name, count: int | None = None) -> Pipeline[_StrType]: ... + def lpush(self, name: _Value, *values: _Value) -> Pipeline[_StrType]: ... # type: ignore[override] + def lpushx(self, name, value) -> Pipeline[_StrType]: ... + def lrange(self, name: _Key, start: int, end: int) -> Pipeline[_StrType]: ... # type: ignore[override] + def lrem(self, name: _Key, count: int, value: _Value) -> Pipeline[_StrType]: ... # type: ignore[override] + def lset(self, name: _Key, index: int, value: _Value) -> Pipeline[_StrType]: ... # type: ignore[override] + def ltrim(self, name: _Key, start: int, end: int) -> Pipeline[_StrType]: ... # type: ignore[override] + def rpop(self, name, count: int | None = None) -> Pipeline[_StrType]: ... + def rpoplpush(self, src, dst) -> Pipeline[_StrType]: ... + def rpush(self, name: _Value, *values: _Value) -> Pipeline[_StrType]: ... # type: ignore[override] + def rpushx(self, name, value) -> Pipeline[_StrType]: ... + def sort( # type: ignore[override] + self, + name: _Key, + start: int | None = None, + num: int | None = None, + by: _Key | None = None, + get: _Key | Sequence[_Key] | None = None, + desc: bool = False, + alpha: bool = False, + store: _Key | None = None, + groups: bool = False, + ) -> Pipeline[_StrType]: ... + def scan( # type: ignore[override] + self, cursor: int = 0, match: _Key | None = None, count: int | None = None, _type: str | None = None + ) -> Pipeline[_StrType]: ... + def scan_iter(self, match: _Key | None = None, count: int | None = None, _type: str | None = None) -> Iterator[Any]: ... # type: ignore[override] + def sscan(self, name: _Key, cursor: int = 0, match: _Key | None = None, count: int | None = None) -> Pipeline[_StrType]: ... # type: ignore[override] + def sscan_iter(self, name: _Key, match: _Key | None = None, count: int | None = None) -> Iterator[Any]: ... + def hscan(self, name: _Key, cursor: int = 0, match: _Key | None = None, count: int | None = None) -> Pipeline[_StrType]: ... # type: ignore[override] + def hscan_iter(self, name, match: _Key | None = None, count: int | None = None) -> Iterator[Any]: ... + def zscan_iter( + self, name: _Key, match: _Key | None = None, count: int | None = None, score_cast_func: Callable[[_StrType], Any] = ... + ) -> Iterator[Any]: ... + def sadd(self, name: _Key, *values: _Value) -> Pipeline[_StrType]: ... # type: ignore[override] + def scard(self, name: _Key) -> Pipeline[_StrType]: ... # type: ignore[override] + def sdiff(self, keys: _Key | Iterable[_Key], *args: _Key) -> Pipeline[_StrType]: ... # type: ignore[override] + def sdiffstore(self, dest: _Key, keys: _Key | Iterable[_Key], *args: _Key) -> Pipeline[_StrType]: ... # type: ignore[override] + def sinter(self, keys: _Key | Iterable[_Key], *args: _Key) -> Pipeline[_StrType]: ... # type: ignore[override] + def sinterstore(self, dest: _Key, keys: _Key | Iterable[_Key], *args: _Key) -> Pipeline[_StrType]: ... # type: ignore[override] + def sismember(self, name: _Key, value: _Value) -> Pipeline[_StrType]: ... # type: ignore[override] + def smembers(self, name: _Key) -> Pipeline[_StrType]: ... # type: ignore[override] + def smove(self, src: _Key, dst: _Key, value: _Value) -> Pipeline[_StrType]: ... # type: ignore[override] + def spop(self, name: _Key, count: int | None = None) -> Pipeline[_StrType]: ... # type: ignore[override] + def srandmember(self, name: _Key, number: int | None = None) -> Pipeline[_StrType]: ... # type: ignore[override] + def srem(self, name: _Key, *values: _Value) -> Pipeline[_StrType]: ... # type: ignore[override] + def sunion(self, keys: _Key | Iterable[_Key], *args: _Key) -> Pipeline[_StrType]: ... # type: ignore[override] + def sunionstore(self, dest: _Key, keys: _Key | Iterable[_Key], *args: _Key) -> Pipeline[_StrType]: ... # type: ignore[override] + def xack(self, name, groupname, *ids) -> Pipeline[_StrType]: ... + def xadd( + self, + name, + fields, + id="*", + maxlen=None, + approximate: bool = True, + nomkstream: bool = False, + minid: Incomplete | None = None, + limit: int | None = None, + ) -> Pipeline[_StrType]: ... + def xclaim( + self, + name, + groupname, + consumername, + min_idle_time, + message_ids, + idle=None, + time=None, + retrycount=None, + force=False, + justid=False, + ) -> Pipeline[_StrType]: ... + def xdel(self, name, *ids) -> Pipeline[_StrType]: ... + def xgroup_create(self, name, groupname, id="$", mkstream=False, entries_read: int | None = None) -> Pipeline[_StrType]: ... + def xgroup_delconsumer(self, name, groupname, consumername) -> Pipeline[_StrType]: ... + def xgroup_destroy(self, name, groupname) -> Pipeline[_StrType]: ... + def xgroup_setid(self, name, groupname, id, entries_read: int | None = None) -> Pipeline[_StrType]: ... + def xinfo_consumers(self, name, groupname) -> Pipeline[_StrType]: ... + def xinfo_groups(self, name) -> Pipeline[_StrType]: ... + def xinfo_stream(self, name, full: bool = False) -> Pipeline[_StrType]: ... + def xlen(self, name: _Key) -> Pipeline[_StrType]: ... # type: ignore[override] + def xpending(self, name, groupname) -> Pipeline[_StrType]: ... + def xpending_range( + self, name: _Key, groupname, min, max, count: int, consumername: Incomplete | None = None, idle: int | None = None + ) -> Pipeline[_StrType]: ... + def xrange(self, name, min="-", max="+", count=None) -> Pipeline[_StrType]: ... + def xread(self, streams, count=None, block=None) -> Pipeline[_StrType]: ... + def xreadgroup(self, groupname, consumername, streams, count=None, block=None, noack=False) -> Pipeline[_StrType]: ... + def xrevrange(self, name, max="+", min="-", count=None) -> Pipeline[_StrType]: ... + def xtrim( + self, name, maxlen: int | None = None, approximate: bool = True, minid: Incomplete | None = None, limit: int | None = None + ) -> Pipeline[_StrType]: ... + def zadd( # type: ignore[override] + self, + name: _Key, + mapping: Mapping[_Key, _Value], + nx: bool = False, + xx: bool = False, + ch: bool = False, + incr: bool = False, + gt: Incomplete | None = False, + lt: Incomplete | None = False, + ) -> Pipeline[_StrType]: ... + def zcard(self, name: _Key) -> Pipeline[_StrType]: ... # type: ignore[override] + def zcount(self, name: _Key, min: _Value, max: _Value) -> Pipeline[_StrType]: ... # type: ignore[override] + def zincrby(self, name: _Key, amount: float, value: _Value) -> Pipeline[_StrType]: ... # type: ignore[override] + def zinterstore( # type: ignore[override] + self, dest: _Key, keys: Iterable[_Key], aggregate: Literal["SUM", "MIN", "MAX"] | None = None + ) -> Pipeline[_StrType]: ... + def zlexcount(self, name: _Key, min: _Value, max: _Value) -> Pipeline[_StrType]: ... # type: ignore[override] + def zpopmax(self, name: _Key, count: int | None = None) -> Pipeline[_StrType]: ... # type: ignore[override] + def zpopmin(self, name: _Key, count: int | None = None) -> Pipeline[_StrType]: ... # type: ignore[override] + def bzpopmax(self, keys: _Key | Iterable[_Key], timeout: float = 0) -> Pipeline[_StrType]: ... # type: ignore[override] + def bzpopmin(self, keys: _Key | Iterable[_Key], timeout: float = 0) -> Pipeline[_StrType]: ... # type: ignore[override] + def zrange( # type: ignore[override] + self, + name: _Key, + start: int, + end: int, + desc: bool = False, + withscores: bool = False, + score_cast_func: Callable[[_StrType], Any] = ..., + byscore: bool = False, + bylex: bool = False, + offset: int | None = None, + num: int | None = None, + ) -> Pipeline[_StrType]: ... + def zrangebylex( # type: ignore[override] + self, name: _Key, min: _Value, max: _Value, start: int | None = None, num: int | None = None + ) -> Pipeline[_StrType]: ... + def zrangebyscore( # type: ignore[override] + self, + name: _Key, + min: _Value, + max: _Value, + start: int | None = None, + num: int | None = None, + withscores: bool = False, + score_cast_func: Callable[[_StrType], Any] = ..., + ) -> Pipeline[_StrType]: ... + def zrank(self, name: _Key, value: _Value, withscore: bool = False) -> Pipeline[_StrType]: ... # type: ignore[override] + def zrem(self, name: _Key, *values: _Value) -> Pipeline[_StrType]: ... # type: ignore[override] + def zremrangebylex(self, name: _Key, min: _Value, max: _Value) -> Pipeline[_StrType]: ... # type: ignore[override] + def zremrangebyrank(self, name: _Key, min: _Value, max: _Value) -> Pipeline[_StrType]: ... # type: ignore[override] + def zremrangebyscore(self, name: _Key, min: _Value, max: _Value) -> Pipeline[_StrType]: ... # type: ignore[override] + def zrevrange( # type: ignore[override] + self, name: _Key, start: int, end: int, withscores: bool = False, score_cast_func: Callable[[_StrType], Any] = ... + ) -> Pipeline[_StrType]: ... + def zrevrangebyscore( # type: ignore[override] + self, + name: _Key, + max: _Value, + min: _Value, + start: int | None = None, + num: int | None = None, + withscores: bool = False, + score_cast_func: Callable[[_StrType], Any] = ..., + ) -> Pipeline[_StrType]: ... + def zrevrangebylex( # type: ignore[override] + self, name: _Key, max: _Value, min: _Value, start: int | None = None, num: int | None = None + ) -> Pipeline[_StrType]: ... + def zrevrank(self, name: _Key, value: _Value, withscore: bool = False) -> Pipeline[_StrType]: ... # type: ignore[override] + def zscore(self, name: _Key, value: _Value) -> Pipeline[_StrType]: ... # type: ignore[override] + def zunionstore( # type: ignore[override] + self, dest: _Key, keys: Iterable[_Key], aggregate: Literal["SUM", "MIN", "MAX"] | None = None + ) -> Pipeline[_StrType]: ... + def pfadd(self, name: _Key, *values: _Value) -> Pipeline[_StrType]: ... # type: ignore[override] + def pfcount(self, name: _Key) -> Pipeline[_StrType]: ... # type: ignore[override] + def pfmerge(self, dest: _Key, *sources: _Key) -> Pipeline[_StrType]: ... # type: ignore[override] + def hdel(self, name: _Key, *keys: _Key) -> Pipeline[_StrType]: ... # type: ignore[override] + def hexists(self, name: _Key, key: _Key) -> Pipeline[_StrType]: ... # type: ignore[override] + def hget(self, name: _Key, key: _Key) -> Pipeline[_StrType]: ... # type: ignore[override] + def hgetall(self, name: _Key) -> Pipeline[_StrType]: ... # type: ignore[override] + def hincrby(self, name: _Key, key: _Key, amount: int = 1) -> Pipeline[_StrType]: ... # type: ignore[override] + def hincrbyfloat(self, name: _Key, key: _Key, amount: float = 1.0) -> Pipeline[_StrType]: ... # type: ignore[override] + def hkeys(self, name: _Key) -> Pipeline[_StrType]: ... # type: ignore[override] + def hlen(self, name: _Key) -> Pipeline[_StrType]: ... # type: ignore[override] + @overload # type: ignore[override] + def hset( + self, name: _Key, key: _Key, value: _Value, mapping: Mapping[_Key, _Value] | None = None, items: Incomplete | None = None + ) -> Pipeline[_StrType]: ... + @overload + def hset( + self, name: _Key, key: None, value: None, mapping: Mapping[_Key, _Value], items: Incomplete | None = None + ) -> Pipeline[_StrType]: ... + @overload + def hset(self, name: _Key, *, mapping: Mapping[_Key, _Value], items: Incomplete | None = None) -> Pipeline[_StrType]: ... + def hsetnx(self, name: _Key, key: _Key, value: _Value) -> Pipeline[_StrType]: ... # type: ignore[override] + def hmset(self, name: _Key, mapping: Mapping[_Key, _Value]) -> Pipeline[_StrType]: ... # type: ignore[override] + def hmget(self, name: _Key, keys: _Key | Iterable[_Key], *args: _Key) -> Pipeline[_StrType]: ... # type: ignore[override] + def hvals(self, name: _Key) -> Pipeline[_StrType]: ... # type: ignore[override] + def publish(self, channel: _Key, message: _Key) -> Pipeline[_StrType]: ... # type: ignore[override] + def eval(self, script, numkeys, *keys_and_args) -> Pipeline[_StrType]: ... + def evalsha(self, sha, numkeys, *keys_and_args) -> Pipeline[_StrType]: ... + def script_exists(self, *args) -> Pipeline[_StrType]: ... + def script_flush(self, sync_type: Incomplete | None = None) -> Pipeline[_StrType]: ... + def script_kill(self) -> Pipeline[_StrType]: ... + def script_load(self, script) -> Pipeline[_StrType]: ... + def pubsub_channels(self, pattern: _Key = "*") -> Pipeline[_StrType]: ... # type: ignore[override] + def pubsub_numsub(self, *args: _Key) -> Pipeline[_StrType]: ... # type: ignore[override] + def pubsub_numpat(self) -> Pipeline[_StrType]: ... # type: ignore[override] + def monitor(self) -> Monitor: ... + def cluster(self, cluster_arg: str, *args: Any) -> Pipeline[_StrType]: ... # type: ignore[override] + def client(self) -> Any: ... + +class Monitor: + command_re: Pattern[str] + monitor_re: Pattern[str] + def __init__(self, connection_pool) -> None: ... + def __enter__(self) -> Self: ... + def __exit__(self, *args: Unused) -> None: ... + def next_command(self) -> dict[str, Any]: ... + def listen(self) -> Iterable[dict[str, Any]]: ... diff --git a/valkey/cluster.pyi b/valkey/cluster.pyi new file mode 100644 index 00000000..6758c05e --- /dev/null +++ b/valkey/cluster.pyi @@ -0,0 +1,265 @@ +from _typeshed import Incomplete, Unused +from collections.abc import Callable, Iterable, Sequence +from threading import Lock +from types import TracebackType +from typing import Any, ClassVar, Literal, NoReturn, Protocol +from typing_extensions import Self + +from redis.client import CaseInsensitiveDict, PubSub, Valkey, _ParseResponseOptions +from redis.commands import CommandsParser, ValkeyClusterCommands +from redis.commands.core import _StrType +from redis.connection import BaseParser, Connection, ConnectionPool, Encoder, _ConnectionPoolOptions, _Encodable +from redis.exceptions import MovedError, ValkeyError +from redis.retry import Retry +from redis.typing import EncodableT + +def get_node_name(host: str, port: str | int) -> str: ... +def get_connection(redis_node: Valkey[Any], *args, **options: _ConnectionPoolOptions) -> Connection: ... +def parse_scan_result(command: Unused, res, **options): ... +def parse_pubsub_numsub(command: Unused, res, **options: Unused): ... +def parse_cluster_slots(resp, **options) -> dict[tuple[int, int], dict[str, Any]]: ... +def parse_cluster_myshardid(resp: bytes, **options: Unused) -> str: ... + +PRIMARY: str +REPLICA: str +SLOT_ID: str +VALKEY_ALLOWED_KEYS: tuple[str, ...] +KWARGS_DISABLED_KEYS: tuple[str, ...] +PIPELINE_BLOCKED_COMMANDS: tuple[str, ...] + +def cleanup_kwargs(**kwargs: Any) -> dict[str, Any]: ... + +# It uses `DefaultParser` in real life, but it is a dynamic base class. +class ClusterParser(BaseParser): ... + +class AbstractValkeyCluster: + ValkeyClusterRequestTTL: ClassVar[int] + PRIMARIES: ClassVar[str] + REPLICAS: ClassVar[str] + ALL_NODES: ClassVar[str] + RANDOM: ClassVar[str] + DEFAULT_NODE: ClassVar[str] + NODE_FLAGS: ClassVar[set[str]] + COMMAND_FLAGS: ClassVar[dict[str, str]] + CLUSTER_COMMANDS_RESPONSE_CALLBACKS: ClassVar[dict[str, Any]] + RESULT_CALLBACKS: ClassVar[dict[str, Callable[[Incomplete, Incomplete], Incomplete]]] + ERRORS_ALLOW_RETRY: ClassVar[tuple[type[ValkeyError], ...]] + +class ValkeyCluster(AbstractValkeyCluster, ValkeyClusterCommands[_StrType]): + user_on_connect_func: Callable[[Connection], object] | None + encoder: Encoder + cluster_error_retry_attempts: int + command_flags: dict[str, str] + node_flags: set[str] + read_from_replicas: bool + reinitialize_counter: int + reinitialize_steps: int + nodes_manager: NodesManager + cluster_response_callbacks: CaseInsensitiveDict[str, Callable[..., Incomplete]] + result_callbacks: CaseInsensitiveDict[str, Callable[[Incomplete, Incomplete], Incomplete]] + commands_parser: CommandsParser + def __init__( # TODO: make @overloads, either `url` or `host:port` can be passed + self, + host: str | None = None, + port: int | None = 6379, + startup_nodes: list[ClusterNode] | None = None, + cluster_error_retry_attempts: int = 3, + retry: Retry | None = None, + require_full_coverage: bool = False, + reinitialize_steps: int = 5, + read_from_replicas: bool = False, + dynamic_startup_nodes: bool = True, + url: str | None = None, + address_remap: Callable[[str, int], tuple[str, int]] | None = None, + **kwargs, + ) -> None: ... + def __enter__(self) -> Self: ... + def __exit__( + self, type: type[BaseException] | None, value: BaseException | None, traceback: TracebackType | None + ) -> None: ... + def __del__(self) -> None: ... + def disconnect_connection_pools(self) -> None: ... + @classmethod + def from_url(cls, url: str, **kwargs) -> Self: ... + def on_connect(self, connection: Connection) -> None: ... + def get_redis_connection(self, node: ClusterNode) -> Valkey[Any]: ... + def get_node( + self, host: str | None = None, port: str | int | None = None, node_name: str | None = None + ) -> ClusterNode | None: ... + def get_primaries(self) -> list[ClusterNode]: ... + def get_replicas(self) -> list[ClusterNode]: ... + def get_random_node(self) -> ClusterNode: ... + def get_nodes(self) -> list[ClusterNode]: ... + def get_node_from_key(self, key: _Encodable, replica: bool = False) -> ClusterNode | None: ... + def get_default_node(self) -> ClusterNode | None: ... + def set_default_node(self, node: ClusterNode | None) -> bool: ... + def monitor(self, target_node: Incomplete | None = None): ... + def pubsub( + self, node: Incomplete | None = None, host: Incomplete | None = None, port: Incomplete | None = None, **kwargs + ): ... + def pipeline(self, transaction: Incomplete | None = None, shard_hint: Incomplete | None = None): ... + def lock( + self, + name: str, + timeout: float | None = None, + sleep: float = 0.1, + blocking: bool = True, + blocking_timeout: float | None = None, + lock_class: type[Incomplete] | None = None, + thread_local: bool = True, + ): ... + def keyslot(self, key: _Encodable) -> int: ... + def determine_slot(self, *args): ... + def get_encoder(self) -> Encoder: ... + def get_connection_kwargs(self) -> dict[str, Any]: ... + def execute_command(self, *args, **kwargs): ... + def close(self) -> None: ... + +class ClusterNode: + host: str + port: int + name: str + server_type: str | None + redis_connection: Valkey[Incomplete] | None + def __init__( + self, host: str, port: int, server_type: str | None = None, redis_connection: Valkey[Incomplete] | None = None + ) -> None: ... + def __eq__(self, obj: object) -> bool: ... + def __del__(self) -> None: ... + +class LoadBalancer: + primary_to_idx: dict[str, int] + start_index: int + def __init__(self, start_index: int = 0) -> None: ... + def get_server_index(self, primary: str, list_size: int) -> int: ... + def reset(self) -> None: ... + +class NodesManager: + nodes_cache: dict[str, ClusterNode] + slots_cache: dict[str, list[ClusterNode]] + startup_nodes: dict[str, ClusterNode] + default_node: ClusterNode | None + from_url: bool + connection_pool_class: type[ConnectionPool] + connection_kwargs: dict[str, Incomplete] # TODO: could be a TypedDict + read_load_balancer: LoadBalancer + address_remap: Callable[[str, int], tuple[str, int]] | None + def __init__( + self, + startup_nodes: Iterable[ClusterNode], + from_url: bool = False, + require_full_coverage: bool = False, + lock: Lock | None = None, + dynamic_startup_nodes: bool = True, + connection_pool_class: type[ConnectionPool] = ..., + address_remap: Callable[[str, int], tuple[str, int]] | None = None, + **kwargs, # TODO: same type as connection_kwargs + ) -> None: ... + def get_node( + self, host: str | None = None, port: int | str | None = None, node_name: str | None = None + ) -> ClusterNode | None: ... + def update_moved_exception(self, exception: MovedError) -> None: ... + def get_node_from_slot(self, slot: str, read_from_replicas: bool = False, server_type: str | None = None) -> ClusterNode: ... + def get_nodes_by_server_type(self, server_type: str) -> list[ClusterNode]: ... + def populate_startup_nodes(self, nodes: Iterable[ClusterNode]) -> None: ... + def check_slots_coverage(self, slots_cache: dict[str, list[ClusterNode]]) -> bool: ... + def create_redis_connections(self, nodes: Iterable[ClusterNode]) -> None: ... + def create_redis_node(self, host: str, port: int | str, **kwargs: Any) -> Valkey[Incomplete]: ... + def initialize(self) -> None: ... + def close(self) -> None: ... + def reset(self) -> None: ... + def remap_host_port(self, host: str, port: int) -> tuple[str, int]: ... + +class ClusterPubSub(PubSub): + node: ClusterNode | None + cluster: ValkeyCluster[Any] + def __init__( + self, + redis_cluster: ValkeyCluster[Any], + node: ClusterNode | None = None, + host: str | None = None, + port: int | None = None, + **kwargs, + ) -> None: ... + def set_pubsub_node( + self, cluster: ValkeyCluster[Any], node: ClusterNode | None = None, host: str | None = None, port: int | None = None + ) -> None: ... + def get_pubsub_node(self) -> ClusterNode | None: ... + def execute_command(self, *args, **kwargs) -> None: ... + def get_redis_connection(self) -> Valkey[Any] | None: ... + +class ClusterPipeline(ValkeyCluster[_StrType]): + command_stack: list[Incomplete] + nodes_manager: Incomplete + refresh_table_asap: bool + result_callbacks: Incomplete + startup_nodes: Incomplete + read_from_replicas: bool + command_flags: Incomplete + cluster_response_callbacks: Incomplete + cluster_error_retry_attempts: int + reinitialize_counter: int + reinitialize_steps: int + encoder: Encoder + commands_parser: Incomplete + def __init__( + self, + nodes_manager, + commands_parser, + result_callbacks: Incomplete | None = None, + cluster_response_callbacks: Incomplete | None = None, + startup_nodes: Incomplete | None = None, + read_from_replicas: bool = False, + cluster_error_retry_attempts: int = 3, + reinitialize_steps: int = 5, + lock: Lock | None = None, + **kwargs, + ) -> None: ... + def __len__(self) -> int: ... + def __bool__(self) -> Literal[True]: ... + def execute_command(self, *args, **kwargs): ... + def pipeline_execute_command(self, *args, **options): ... + def raise_first_error(self, stack) -> None: ... + def annotate_exception(self, exception, number, command) -> None: ... + def execute(self, raise_on_error: bool = True): ... + scripts: set[Any] # is only set in `reset()` + watching: bool # is only set in `reset()` + explicit_transaction: bool # is only set in `reset()` + def reset(self) -> None: ... + def send_cluster_commands(self, stack, raise_on_error: bool = True, allow_redirections: bool = True): ... + def eval(self) -> None: ... + def multi(self) -> None: ... + def immediate_execute_command(self, *args, **options) -> None: ... + def load_scripts(self) -> None: ... + def watch(self, *names) -> None: ... + def unwatch(self) -> None: ... + def script_load_for_pipeline(self, *args, **kwargs) -> None: ... + def delete(self, *names): ... + +def block_pipeline_command(name: str) -> Callable[..., NoReturn]: ... + +class PipelineCommand: + args: Sequence[EncodableT] + options: _ParseResponseOptions + position: int | None + result: Any | Exception | None + node: Incomplete | None + asking: bool + def __init__( + self, args: Sequence[EncodableT], options: _ParseResponseOptions | None = None, position: int | None = None + ) -> None: ... + +class _ParseResponseCallback(Protocol): + def __call__(self, connection: Connection, command: EncodableT, /, **kwargs) -> Any: ... + +class NodeCommands: + parse_response: _ParseResponseCallback + connection_pool: ConnectionPool + connection: Connection + commands: list[PipelineCommand] + def __init__( + self, parse_response: _ParseResponseCallback, connection_pool: ConnectionPool, connection: Connection + ) -> None: ... + def append(self, c: PipelineCommand) -> None: ... + def write(self) -> None: ... + def read(self) -> None: ... diff --git a/valkey/commands/__init__.pyi b/valkey/commands/__init__.pyi new file mode 100644 index 00000000..1abccc40 --- /dev/null +++ b/valkey/commands/__init__.pyi @@ -0,0 +1,17 @@ +from .cluster import ValkeyClusterCommands as ValkeyClusterCommands +from .core import AsyncCoreCommands as AsyncCoreCommands, CoreCommands as CoreCommands +from .helpers import list_or_args as list_or_args +from .parser import CommandsParser as CommandsParser +from .valkeymodules import ValkeyModuleCommands as ValkeyModuleCommands +from .sentinel import AsyncSentinelCommands as AsyncSentinelCommands, SentinelCommands as SentinelCommands + +__all__ = [ + "ValkeyClusterCommands", + "CommandsParser", + "AsyncCoreCommands", + "CoreCommands", + "list_or_args", + "ValkeyModuleCommands", + "AsyncSentinelCommands", + "SentinelCommands", +] diff --git a/valkey/commands/bf/__init__.pyi b/valkey/commands/bf/__init__.pyi new file mode 100644 index 00000000..d5ef70ee --- /dev/null +++ b/valkey/commands/bf/__init__.pyi @@ -0,0 +1,58 @@ +from typing import Any + +from .commands import * +from .info import BFInfo as BFInfo, CFInfo as CFInfo, CMSInfo as CMSInfo, TDigestInfo as TDigestInfo, TopKInfo as TopKInfo + +class AbstractBloom: + @staticmethod + def append_items(params, items) -> None: ... + @staticmethod + def append_error(params, error) -> None: ... + @staticmethod + def append_capacity(params, capacity) -> None: ... + @staticmethod + def append_expansion(params, expansion) -> None: ... + @staticmethod + def append_no_scale(params, noScale) -> None: ... + @staticmethod + def append_weights(params, weights) -> None: ... + @staticmethod + def append_no_create(params, noCreate) -> None: ... + @staticmethod + def append_items_and_increments(params, items, increments) -> None: ... + @staticmethod + def append_values_and_weights(params, items, weights) -> None: ... + @staticmethod + def append_max_iterations(params, max_iterations) -> None: ... + @staticmethod + def append_bucket_size(params, bucket_size) -> None: ... + +class CMSBloom(CMSCommands, AbstractBloom): + client: Any + commandmixin: Any + execute_command: Any + def __init__(self, client, **kwargs) -> None: ... + +class TOPKBloom(TOPKCommands, AbstractBloom): + client: Any + commandmixin: Any + execute_command: Any + def __init__(self, client, **kwargs) -> None: ... + +class CFBloom(CFCommands, AbstractBloom): + client: Any + commandmixin: Any + execute_command: Any + def __init__(self, client, **kwargs) -> None: ... + +class TDigestBloom(TDigestCommands, AbstractBloom): + client: Any + commandmixin: Any + execute_command: Any + def __init__(self, client, **kwargs) -> None: ... + +class BFBloom(BFCommands, AbstractBloom): + client: Any + commandmixin: Any + execute_command: Any + def __init__(self, client, **kwargs) -> None: ... diff --git a/valkey/commands/bf/commands.pyi b/valkey/commands/bf/commands.pyi new file mode 100644 index 00000000..99a296fd --- /dev/null +++ b/valkey/commands/bf/commands.pyi @@ -0,0 +1,112 @@ +from _typeshed import Incomplete + +BF_RESERVE: str +BF_ADD: str +BF_MADD: str +BF_INSERT: str +BF_EXISTS: str +BF_MEXISTS: str +BF_SCANDUMP: str +BF_LOADCHUNK: str +BF_INFO: str +CF_RESERVE: str +CF_ADD: str +CF_ADDNX: str +CF_INSERT: str +CF_INSERTNX: str +CF_EXISTS: str +CF_DEL: str +CF_COUNT: str +CF_SCANDUMP: str +CF_LOADCHUNK: str +CF_INFO: str +CMS_INITBYDIM: str +CMS_INITBYPROB: str +CMS_INCRBY: str +CMS_QUERY: str +CMS_MERGE: str +CMS_INFO: str +TOPK_RESERVE: str +TOPK_ADD: str +TOPK_INCRBY: str +TOPK_QUERY: str +TOPK_COUNT: str +TOPK_LIST: str +TOPK_INFO: str +TDIGEST_CREATE: str +TDIGEST_RESET: str +TDIGEST_ADD: str +TDIGEST_MERGE: str +TDIGEST_CDF: str +TDIGEST_QUANTILE: str +TDIGEST_MIN: str +TDIGEST_MAX: str +TDIGEST_INFO: str + +class BFCommands: + def create(self, key, errorRate, capacity, expansion: Incomplete | None = None, noScale: Incomplete | None = None): ... + def add(self, key, item): ... + def madd(self, key, *items): ... + def insert( + self, + key, + items, + capacity: Incomplete | None = None, + error: Incomplete | None = None, + noCreate: Incomplete | None = None, + expansion: Incomplete | None = None, + noScale: Incomplete | None = None, + ): ... + def exists(self, key, item): ... + def mexists(self, key, *items): ... + def scandump(self, key, iter): ... + def loadchunk(self, key, iter, data): ... + def info(self, key): ... + +class CFCommands: + def create( + self, + key, + capacity, + expansion: Incomplete | None = None, + bucket_size: Incomplete | None = None, + max_iterations: Incomplete | None = None, + ): ... + def add(self, key, item): ... + def addnx(self, key, item): ... + def insert(self, key, items, capacity: Incomplete | None = None, nocreate: Incomplete | None = None): ... + def insertnx(self, key, items, capacity: Incomplete | None = None, nocreate: Incomplete | None = None): ... + def exists(self, key, item): ... + def delete(self, key, item): ... + def count(self, key, item): ... + def scandump(self, key, iter): ... + def loadchunk(self, key, iter, data): ... + def info(self, key): ... + +class TOPKCommands: + def reserve(self, key, k, width, depth, decay): ... + def add(self, key, *items): ... + def incrby(self, key, items, increments): ... + def query(self, key, *items): ... + def count(self, key, *items): ... + def list(self, key, withcount: bool = False): ... + def info(self, key): ... + +class TDigestCommands: + def create(self, key, compression: int = 100): ... + def reset(self, key): ... + def add(self, key, values): ... + def merge(self, destination_key, num_keys, *keys, compression: int | None = None, override: bool = False): ... + def min(self, key): ... + def max(self, key): ... + def quantile(self, key, quantile, *quantiles): ... + def cdf(self, key, value, *values): ... + def info(self, key): ... + +class CMSCommands: + def initbydim(self, key, width, depth): ... + def initbyprob(self, key, error, probability): ... + def incrby(self, key, items, increments): ... + def query(self, key, *items): ... + def merge(self, destKey, numKeys, srcKeys, weights=[]): ... + def info(self, key): ... diff --git a/valkey/commands/bf/info.pyi b/valkey/commands/bf/info.pyi new file mode 100644 index 00000000..54d1cf04 --- /dev/null +++ b/valkey/commands/bf/info.pyi @@ -0,0 +1,43 @@ +from typing import Any + +class BFInfo: + capacity: Any + size: Any + filterNum: Any + insertedNum: Any + expansionRate: Any + def __init__(self, args) -> None: ... + +class CFInfo: + size: Any + bucketNum: Any + filterNum: Any + insertedNum: Any + deletedNum: Any + bucketSize: Any + expansionRate: Any + maxIteration: Any + def __init__(self, args) -> None: ... + +class CMSInfo: + width: Any + depth: Any + count: Any + def __init__(self, args) -> None: ... + +class TopKInfo: + k: Any + width: Any + depth: Any + decay: Any + def __init__(self, args) -> None: ... + +class TDigestInfo: + compression: Any + capacity: Any + mergedNodes: Any + unmergedNodes: Any + mergedWeight: Any + unmergedWeight: Any + totalCompressions: Any + def __init__(self, args) -> None: ... diff --git a/valkey/commands/cluster.pyi b/valkey/commands/cluster.pyi new file mode 100644 index 00000000..2654a73f --- /dev/null +++ b/valkey/commands/cluster.pyi @@ -0,0 +1,60 @@ +from _typeshed import Incomplete +from typing import NoReturn + +from .core import ACLCommands, DataAccessCommands, ManagementCommands, PubSubCommands, _StrType + +class ClusterMultiKeyCommands: + def mget_nonatomic(self, keys, *args): ... + def mset_nonatomic(self, mapping): ... + def exists(self, *keys): ... + def delete(self, *keys): ... + def touch(self, *keys): ... + def unlink(self, *keys): ... + +class ClusterManagementCommands(ManagementCommands): + def slaveof(self, *args, **kwargs) -> None: ... + def replicaof(self, *args, **kwargs) -> None: ... + def swapdb(self, *args, **kwargs) -> None: ... + +class ClusterDataAccessCommands(DataAccessCommands[_StrType]): + def stralgo( + self, + algo, + value1, + value2, + specific_argument: str = "strings", + len: bool = False, + idx: bool = False, + minmatchlen: Incomplete | None = None, + withmatchlen: bool = False, + **kwargs, + ): ... + +class ValkeyClusterCommands( + ClusterMultiKeyCommands, ClusterManagementCommands, ACLCommands[_StrType], PubSubCommands, ClusterDataAccessCommands[_StrType] +): + def cluster_addslots(self, target_node, *slots): ... + def cluster_countkeysinslot(self, slot_id): ... + def cluster_count_failure_report(self, node_id): ... + def cluster_delslots(self, *slots): ... + def cluster_failover(self, target_node, option: Incomplete | None = None): ... + def cluster_info(self, target_nodes: Incomplete | None = None): ... + def cluster_keyslot(self, key): ... + def cluster_meet(self, host, port, target_nodes: Incomplete | None = None): ... + def cluster_nodes(self): ... + def cluster_replicate(self, target_nodes, node_id): ... + def cluster_reset(self, soft: bool = True, target_nodes: Incomplete | None = None): ... + def cluster_save_config(self, target_nodes: Incomplete | None = None): ... + def cluster_get_keys_in_slot(self, slot, num_keys): ... + def cluster_set_config_epoch(self, epoch, target_nodes: Incomplete | None = None): ... + def cluster_setslot(self, target_node, node_id, slot_id, state): ... + def cluster_setslot_stable(self, slot_id): ... + def cluster_replicas(self, node_id, target_nodes: Incomplete | None = None): ... + def cluster_slots(self, target_nodes: Incomplete | None = None): ... + def cluster_myshardid(self, target_nodes: Incomplete | None = None): ... + def cluster_links(self, target_node): ... + def cluster_flushslots(self, target_nodes: Incomplete | None = None) -> NoReturn: ... + def cluster_bumpepoch(self, target_nodes: Incomplete | None = None) -> NoReturn: ... + read_from_replicas: bool + def readonly(self, target_nodes: Incomplete | None = None): ... + def readwrite(self, target_nodes: Incomplete | None = None): ... diff --git a/valkey/commands/core.pyi b/valkey/commands/core.pyi new file mode 100644 index 00000000..770bf4ce --- /dev/null +++ b/valkey/commands/core.pyi @@ -0,0 +1,1743 @@ +import builtins +from _typeshed import Incomplete, SupportsItems +from collections.abc import AsyncIterator, Awaitable, Callable, Iterable, Iterator, Mapping, Sequence +from datetime import datetime, timedelta +from typing import Any, Generic, Literal, TypeVar, overload + +from ..asyncio.client import Valkey as AsyncValkey +from ..client import _CommandOptions, _Key, _Value +from ..typing import ChannelT, EncodableT, KeyT, PatternT, ScriptTextT, StreamIdT + +_ScoreCastFuncReturn = TypeVar("_ScoreCastFuncReturn") +_StrType = TypeVar("_StrType", bound=str | bytes) + +class ACLCommands(Generic[_StrType]): + def acl_cat(self, category: str | None = None, **kwargs: _CommandOptions) -> list[str]: ... + def acl_deluser(self, *username: str, **kwargs: _CommandOptions) -> int: ... + def acl_genpass(self, bits: int | None = None, **kwargs: _CommandOptions) -> str: ... + def acl_getuser(self, username: str, **kwargs: _CommandOptions) -> Any | None: ... + def acl_help(self, **kwargs: _CommandOptions): ... + def acl_list(self, **kwargs: _CommandOptions) -> list[str]: ... + def acl_log(self, count: int | None = None, **kwargs: _CommandOptions): ... + def acl_log_reset(self, **kwargs: _CommandOptions): ... + def acl_load(self, **kwargs: _CommandOptions) -> bool: ... + def acl_save(self, **kwargs: _CommandOptions): ... + def acl_setuser( + self, + username: str, + enabled: bool = False, + nopass: bool = False, + passwords: Sequence[str] | None = None, + hashed_passwords: Sequence[str] | None = None, + categories: Sequence[str] | None = None, + commands: Sequence[str] | None = None, + keys: Sequence[str] | None = None, + channels: Iterable[ChannelT] | None = None, + selectors: Iterable[tuple[str, KeyT]] | None = None, + reset: bool = False, + reset_keys: bool = False, + reset_channels: bool = False, + reset_passwords: bool = False, + **kwargs: _CommandOptions, + ) -> bool: ... + def acl_users(self, **kwargs: _CommandOptions) -> list[str]: ... + def acl_whoami(self, **kwargs: _CommandOptions) -> str: ... + +class AsyncACLCommands(Generic[_StrType]): + async def acl_cat(self, category: str | None = None, **kwargs: _CommandOptions) -> list[str]: ... + async def acl_deluser(self, *username: str, **kwargs: _CommandOptions) -> int: ... + async def acl_genpass(self, bits: int | None = None, **kwargs: _CommandOptions) -> str: ... + async def acl_getuser(self, username: str, **kwargs: _CommandOptions) -> Any | None: ... + async def acl_help(self, **kwargs: _CommandOptions): ... + async def acl_list(self, **kwargs: _CommandOptions) -> list[str]: ... + async def acl_log(self, count: int | None = None, **kwargs: _CommandOptions): ... + async def acl_log_reset(self, **kwargs: _CommandOptions): ... + async def acl_load(self, **kwargs: _CommandOptions) -> bool: ... + async def acl_save(self, **kwargs: _CommandOptions): ... + async def acl_setuser( + self, + username: str, + enabled: bool = False, + nopass: bool = False, + passwords: Sequence[str] | None = None, + hashed_passwords: Sequence[str] | None = None, + categories: Sequence[str] | None = None, + commands: Sequence[str] | None = None, + keys: Sequence[str] | None = None, + channels: Iterable[ChannelT] | None = None, + selectors: Iterable[tuple[str, KeyT]] | None = None, + reset: bool = False, + reset_keys: bool = False, + reset_channels: bool = False, + reset_passwords: bool = False, + **kwargs: _CommandOptions, + ) -> bool: ... + async def acl_users(self, **kwargs: _CommandOptions) -> list[str]: ... + async def acl_whoami(self, **kwargs: _CommandOptions) -> str: ... + +class ManagementCommands: + def bgrewriteaof(self, **kwargs: _CommandOptions): ... + def bgsave(self, schedule: bool = True, **kwargs: _CommandOptions): ... + def role(self): ... + def client_kill(self, address: str, **kwargs: _CommandOptions) -> bool: ... + def client_kill_filter( + self, + _id: Incomplete | None = None, + _type: Incomplete | None = None, + addr: Incomplete | None = None, + skipme: Incomplete | None = None, + laddr: Incomplete | None = None, + user: Incomplete | None = None, + **kwargs: _CommandOptions, + ): ... + def client_info(self, **kwargs: _CommandOptions): ... + def client_list( + self, _type: str | None = None, client_id: list[str] = [], **kwargs: _CommandOptions + ) -> list[dict[str, str]]: ... + def client_getname(self, **kwargs: _CommandOptions) -> str | None: ... + def client_getredir(self, **kwargs: _CommandOptions): ... + def client_reply(self, reply, **kwargs: _CommandOptions): ... + def client_id(self, **kwargs: _CommandOptions) -> int: ... + def client_tracking_on( + self, + clientid: Incomplete | None = None, + prefix=[], + bcast: bool = False, + optin: bool = False, + optout: bool = False, + noloop: bool = False, + ): ... + def client_tracking_off( + self, + clientid: Incomplete | None = None, + prefix=[], + bcast: bool = False, + optin: bool = False, + optout: bool = False, + noloop: bool = False, + ): ... + def client_tracking( + self, + on: bool = True, + clientid: Incomplete | None = None, + prefix=[], + bcast: bool = False, + optin: bool = False, + optout: bool = False, + noloop: bool = False, + **kwargs: _CommandOptions, + ): ... + def client_trackinginfo(self, **kwargs: _CommandOptions): ... + def client_setname(self, name: str, **kwargs: _CommandOptions) -> bool: ... + def client_unblock(self, client_id, error: bool = False, **kwargs: _CommandOptions): ... + def client_pause(self, timeout, all: bool = True, **kwargs: _CommandOptions): ... + def client_unpause(self, **kwargs: _CommandOptions): ... + def client_no_evict(self, mode: str): ... + def client_no_touch(self, mode: str): ... + def command(self, **kwargs: _CommandOptions): ... + def command_info(self, **kwargs: _CommandOptions): ... + def command_count(self, **kwargs: _CommandOptions): ... + def config_get(self, pattern: PatternT = "*", *args: PatternT, **kwargs: _CommandOptions): ... + def config_set(self, name: KeyT, value: EncodableT, *args: KeyT | EncodableT, **kwargs: _CommandOptions): ... + def config_resetstat(self, **kwargs: _CommandOptions): ... + def config_rewrite(self, **kwargs: _CommandOptions): ... + def dbsize(self, **kwargs: _CommandOptions) -> int: ... + def debug_object(self, key, **kwargs: _CommandOptions): ... + def debug_segfault(self, **kwargs: _CommandOptions): ... + def echo(self, value: _Value, **kwargs: _CommandOptions) -> bytes: ... + def flushall(self, asynchronous: bool = False, **kwargs: _CommandOptions) -> bool: ... + def flushdb(self, asynchronous: bool = False, **kwargs: _CommandOptions) -> bool: ... + def sync(self): ... + def psync(self, replicationid, offset): ... + def swapdb(self, first, second, **kwargs: _CommandOptions): ... + def select(self, index, **kwargs: _CommandOptions): ... + def info(self, section: _Key | None = None, *args: _Key, **kwargs: _CommandOptions) -> Mapping[str, Any]: ... + def lastsave(self, **kwargs: _CommandOptions): ... + def lolwut(self, *version_numbers: _Value, **kwargs: _CommandOptions) -> bytes: ... + def reset(self) -> None: ... + def migrate( + self, + host, + port, + keys, + destination_db, + timeout, + copy: bool = False, + replace: bool = False, + auth: Incomplete | None = None, + **kwargs: _CommandOptions, + ): ... + def object(self, infotype, key, **kwargs: _CommandOptions): ... + def memory_doctor(self, **kwargs: _CommandOptions): ... + def memory_help(self, **kwargs: _CommandOptions): ... + def memory_stats(self, **kwargs: _CommandOptions) -> dict[str, Any]: ... + def memory_malloc_stats(self, **kwargs: _CommandOptions): ... + def memory_usage(self, key, samples: Incomplete | None = None, **kwargs: _CommandOptions): ... + def memory_purge(self, **kwargs: _CommandOptions): ... + def ping(self, **kwargs: _CommandOptions) -> bool: ... + def quit(self, **kwargs: _CommandOptions): ... + def replicaof(self, *args, **kwargs: _CommandOptions): ... + def save(self, **kwargs: _CommandOptions) -> bool: ... + def shutdown( + self, + save: bool = False, + nosave: bool = False, + now: bool = False, + force: bool = False, + abort: bool = False, + **kwargs: _CommandOptions, + ) -> None: ... + def slaveof(self, host: Incomplete | None = None, port: Incomplete | None = None, **kwargs: _CommandOptions): ... + def slowlog_get(self, num: Incomplete | None = None, **kwargs: _CommandOptions): ... + def slowlog_len(self, **kwargs: _CommandOptions): ... + def slowlog_reset(self, **kwargs: _CommandOptions): ... + def time(self, **kwargs: _CommandOptions): ... + def wait(self, num_replicas, timeout, **kwargs: _CommandOptions): ... + +class AsyncManagementCommands: + async def bgrewriteaof(self, **kwargs: _CommandOptions): ... + async def bgsave(self, schedule: bool = True, **kwargs: _CommandOptions): ... + async def role(self): ... + async def client_kill(self, address: str, **kwargs: _CommandOptions) -> bool: ... + async def client_kill_filter( + self, + _id: Incomplete | None = None, + _type: Incomplete | None = None, + addr: Incomplete | None = None, + skipme: Incomplete | None = None, + laddr: Incomplete | None = None, + user: Incomplete | None = None, + **kwargs: _CommandOptions, + ): ... + async def client_info(self, **kwargs: _CommandOptions): ... + async def client_list( + self, _type: str | None = None, client_id: list[str] = [], **kwargs: _CommandOptions + ) -> list[dict[str, str]]: ... + async def client_getname(self, **kwargs: _CommandOptions) -> str | None: ... + async def client_getredir(self, **kwargs: _CommandOptions): ... + async def client_reply(self, reply, **kwargs: _CommandOptions): ... + async def client_id(self, **kwargs: _CommandOptions) -> int: ... + async def client_tracking_on( + self, + clientid: Incomplete | None = None, + prefix=[], + bcast: bool = False, + optin: bool = False, + optout: bool = False, + noloop: bool = False, + ): ... + async def client_tracking_off( + self, + clientid: Incomplete | None = None, + prefix=[], + bcast: bool = False, + optin: bool = False, + optout: bool = False, + noloop: bool = False, + ): ... + async def client_tracking( + self, + on: bool = True, + clientid: Incomplete | None = None, + prefix=[], + bcast: bool = False, + optin: bool = False, + optout: bool = False, + noloop: bool = False, + **kwargs: _CommandOptions, + ): ... + async def client_trackinginfo(self, **kwargs: _CommandOptions): ... + async def client_setname(self, name: str, **kwargs: _CommandOptions) -> bool: ... + async def client_unblock(self, client_id, error: bool = False, **kwargs: _CommandOptions): ... + async def client_pause(self, timeout, all: bool = True, **kwargs: _CommandOptions): ... + async def client_unpause(self, **kwargs: _CommandOptions): ... + async def command(self, **kwargs: _CommandOptions): ... + async def command_info(self, **kwargs: _CommandOptions): ... + async def command_count(self, **kwargs: _CommandOptions): ... + async def config_get(self, pattern: PatternT = "*", *args: PatternT, **kwargs: _CommandOptions): ... + async def config_set(self, name: KeyT, value: EncodableT, *args: KeyT | EncodableT, **kwargs: _CommandOptions): ... + async def config_resetstat(self, **kwargs: _CommandOptions): ... + async def config_rewrite(self, **kwargs: _CommandOptions): ... + async def dbsize(self, **kwargs: _CommandOptions) -> int: ... + async def debug_object(self, key, **kwargs: _CommandOptions): ... + async def debug_segfault(self, **kwargs: _CommandOptions): ... + async def echo(self, value: _Value, **kwargs: _CommandOptions) -> bytes: ... + async def flushall(self, asynchronous: bool = False, **kwargs: _CommandOptions) -> bool: ... + async def flushdb(self, asynchronous: bool = False, **kwargs: _CommandOptions) -> bool: ... + async def sync(self): ... + async def psync(self, replicationid, offset): ... + async def swapdb(self, first, second, **kwargs: _CommandOptions): ... + async def select(self, index, **kwargs: _CommandOptions): ... + async def info(self, section: _Key | None = None, *args: _Key, **kwargs: _CommandOptions) -> Mapping[str, Any]: ... + async def lastsave(self, **kwargs: _CommandOptions): ... + async def lolwut(self, *version_numbers: _Value, **kwargs: _CommandOptions) -> bytes: ... + async def reset(self) -> None: ... + async def migrate( + self, + host, + port, + keys, + destination_db, + timeout, + copy: bool = False, + replace: bool = False, + auth: Incomplete | None = None, + **kwargs: _CommandOptions, + ): ... + async def object(self, infotype, key, **kwargs: _CommandOptions): ... + async def memory_doctor(self, **kwargs: _CommandOptions): ... + async def memory_help(self, **kwargs: _CommandOptions): ... + async def memory_stats(self, **kwargs: _CommandOptions) -> dict[str, Any]: ... + async def memory_malloc_stats(self, **kwargs: _CommandOptions): ... + async def memory_usage(self, key, samples: Incomplete | None = None, **kwargs: _CommandOptions): ... + async def memory_purge(self, **kwargs: _CommandOptions): ... + async def ping(self, **kwargs: _CommandOptions) -> bool: ... + async def quit(self, **kwargs: _CommandOptions): ... + async def replicaof(self, *args, **kwargs: _CommandOptions): ... + async def save(self, **kwargs: _CommandOptions) -> bool: ... + async def shutdown( + self, + save: bool = False, + nosave: bool = False, + now: bool = False, + force: bool = False, + abort: bool = False, + **kwargs: _CommandOptions, + ) -> None: ... + async def slaveof(self, host: Incomplete | None = None, port: Incomplete | None = None, **kwargs: _CommandOptions): ... + async def slowlog_get(self, num: Incomplete | None = None, **kwargs: _CommandOptions): ... + async def slowlog_len(self, **kwargs: _CommandOptions): ... + async def slowlog_reset(self, **kwargs: _CommandOptions): ... + async def time(self, **kwargs: _CommandOptions): ... + async def wait(self, num_replicas, timeout, **kwargs: _CommandOptions): ... + +class BasicKeyCommands(Generic[_StrType]): + def append(self, key, value): ... + def bitcount(self, key: _Key, start: int | None = None, end: int | None = None, mode: str | None = None) -> int: ... + def bitfield(self, key, default_overflow: Incomplete | None = None): ... + def bitop(self, operation, dest, *keys): ... + def bitpos(self, key: _Key, bit: int, start: int | None = None, end: int | None = None, mode: str | None = None): ... + def copy(self, source, destination, destination_db: Incomplete | None = None, replace: bool = False): ... + def decr(self, name, amount: int = 1) -> int: ... + def decrby(self, name, amount: int = 1) -> int: ... + def delete(self, *names: _Key) -> int: ... + def __delitem__(self, name: _Key) -> None: ... + def dump(self, name: _Key) -> _StrType | None: ... + def exists(self, *names: _Key) -> int: ... + __contains__ = exists + def expire( + self, name: _Key, time: int | timedelta, nx: bool = False, xx: bool = False, gt: bool = False, lt: bool = False + ) -> bool: ... + def expireat(self, name, when, nx: bool = False, xx: bool = False, gt: bool = False, lt: bool = False): ... + def get(self, name: _Key) -> _StrType | None: ... + def getdel(self, name: _Key) -> _StrType | None: ... + def getex( + self, + name, + ex: Incomplete | None = None, + px: Incomplete | None = None, + exat: Incomplete | None = None, + pxat: Incomplete | None = None, + persist: bool = False, + ): ... + def __getitem__(self, name: str): ... + def getbit(self, name: _Key, offset: int) -> int: ... + def getrange(self, key, start, end): ... + def getset(self, name, value) -> _StrType | None: ... + def incr(self, name: _Key, amount: int = 1) -> int: ... + def incrby(self, name: _Key, amount: int = 1) -> int: ... + def incrbyfloat(self, name: _Key, amount: float = 1.0) -> float: ... + def keys(self, pattern: _Key = "*", **kwargs: _CommandOptions) -> list[_StrType]: ... + def lmove( + self, + first_list: _Key, + second_list: _Key, + src: Literal["LEFT", "RIGHT"] = "LEFT", + dest: Literal["LEFT", "RIGHT"] = "RIGHT", + ) -> _Value: ... + def blmove( + self, + first_list: _Key, + second_list: _Key, + timeout: float, + src: Literal["LEFT", "RIGHT"] = "LEFT", + dest: Literal["LEFT", "RIGHT"] = "RIGHT", + ) -> _Value | None: ... + def mget(self, keys: _Key | Iterable[_Key], *args: _Key) -> list[_StrType | None]: ... + def mset(self, mapping: Mapping[_Key, _Value]) -> Literal[True]: ... + def msetnx(self, mapping: Mapping[_Key, _Value]) -> bool: ... + def move(self, name: _Key, db: int) -> bool: ... + def persist(self, name: _Key) -> bool: ... + def pexpire( + self, name: _Key, time: int | timedelta, nx: bool = False, xx: bool = False, gt: bool = False, lt: bool = False + ) -> Literal[1, 0]: ... + def pexpireat( + self, name: _Key, when: int | datetime, nx: bool = False, xx: bool = False, gt: bool = False, lt: bool = False + ) -> Literal[1, 0]: ... + def psetex(self, name, time_ms, value): ... + def pttl(self, name: _Key) -> int: ... + def hrandfield(self, key, count: Incomplete | None = None, withvalues: bool = False): ... + def randomkey(self, **kwargs: _CommandOptions): ... + def rename(self, src, dst): ... + def renamenx(self, src, dst): ... + def restore( + self, + name, + ttl, + value, + replace: bool = False, + absttl: bool = False, + idletime: Incomplete | None = None, + frequency: Incomplete | None = None, + ): ... + def set( + self, + name: _Key, + value: _Value, + ex: None | float | timedelta = None, + px: None | float | timedelta = None, + nx: bool = False, + xx: bool = False, + keepttl: bool = False, + get: bool = False, + exat: Incomplete | None = None, + pxat: Incomplete | None = None, + ) -> bool | None: ... + def __setitem__(self, name, value) -> None: ... + def setbit(self, name: _Key, offset: int, value: int) -> int: ... + def setex(self, name: _Key, time: int | timedelta, value: _Value) -> bool: ... + def setnx(self, name: _Key, value: _Value) -> bool: ... + def setrange(self, name, offset, value): ... + def stralgo( + self, + algo, + value1, + value2, + specific_argument: str = "strings", + len: bool = False, + idx: bool = False, + minmatchlen: Incomplete | None = None, + withmatchlen: bool = False, + **kwargs: _CommandOptions, + ): ... + def strlen(self, name): ... + def substr(self, name, start, end: int = -1): ... + def touch(self, *args): ... + def ttl(self, name: _Key) -> int: ... + def type(self, name): ... + def watch(self, *names): ... + def unwatch(self): ... + def unlink(self, *names: _Key) -> int: ... + +class AsyncBasicKeyCommands(Generic[_StrType]): + async def append(self, key, value): ... + async def bitcount(self, key: _Key, start: int | None = None, end: int | None = None, mode: str | None = None) -> int: ... + async def bitfield(self, key, default_overflow: Incomplete | None = None): ... + async def bitop(self, operation, dest, *keys): ... + async def bitpos(self, key: _Key, bit: int, start: int | None = None, end: int | None = None, mode: str | None = None): ... + async def copy(self, source, destination, destination_db: Incomplete | None = None, replace: bool = False): ... + async def decr(self, name, amount: int = 1) -> int: ... + async def decrby(self, name, amount: int = 1) -> int: ... + async def delete(self, *names: _Key) -> int: ... + async def dump(self, name: _Key) -> _StrType | None: ... + async def exists(self, *names: _Key) -> int: ... + async def expire( + self, name: _Key, time: int | timedelta, nx: bool = False, xx: bool = False, gt: bool = False, lt: bool = False + ) -> bool: ... + async def expireat(self, name, when, nx: bool = False, xx: bool = False, gt: bool = False, lt: bool = False): ... + async def get(self, name: _Key) -> _StrType | None: ... + async def getdel(self, name: _Key) -> _StrType | None: ... + async def getex( + self, + name, + ex: Incomplete | None = None, + px: Incomplete | None = None, + exat: Incomplete | None = None, + pxat: Incomplete | None = None, + persist: bool = False, + ): ... + async def getbit(self, name: _Key, offset: int) -> int: ... + async def getrange(self, key, start, end): ... + async def getset(self, name, value) -> _StrType | None: ... + async def incr(self, name: _Key, amount: int = 1) -> int: ... + async def incrby(self, name: _Key, amount: int = 1) -> int: ... + async def incrbyfloat(self, name: _Key, amount: float = 1.0) -> float: ... + async def keys(self, pattern: _Key = "*", **kwargs: _CommandOptions) -> list[_StrType]: ... + async def lmove( + self, + first_list: _Key, + second_list: _Key, + src: Literal["LEFT", "RIGHT"] = "LEFT", + dest: Literal["LEFT", "RIGHT"] = "RIGHT", + ) -> _Value: ... + async def blmove( + self, + first_list: _Key, + second_list: _Key, + timeout: float, + src: Literal["LEFT", "RIGHT"] = "LEFT", + dest: Literal["LEFT", "RIGHT"] = "RIGHT", + ) -> _Value | None: ... + async def mget(self, keys: _Key | Iterable[_Key], *args: _Key) -> list[_StrType | None]: ... + async def mset(self, mapping: Mapping[_Key, _Value]) -> Literal[True]: ... + async def msetnx(self, mapping: Mapping[_Key, _Value]) -> bool: ... + async def move(self, name: _Key, db: int) -> bool: ... + async def persist(self, name: _Key) -> bool: ... + async def pexpire( + self, name: _Key, time: int | timedelta, nx: bool = False, xx: bool = False, gt: bool = False, lt: bool = False + ) -> Literal[1, 0]: ... + async def pexpireat( + self, name: _Key, when: int | datetime, nx: bool = False, xx: bool = False, gt: bool = False, lt: bool = False + ) -> Literal[1, 0]: ... + async def psetex(self, name, time_ms, value): ... + async def pttl(self, name: _Key) -> int: ... + async def hrandfield(self, key, count: Incomplete | None = None, withvalues: bool = False): ... + async def randomkey(self, **kwargs: _CommandOptions): ... + async def rename(self, src, dst): ... + async def renamenx(self, src, dst): ... + async def restore( + self, + name, + ttl, + value, + replace: bool = False, + absttl: bool = False, + idletime: Incomplete | None = None, + frequency: Incomplete | None = None, + ): ... + async def set( + self, + name: _Key, + value: _Value, + ex: None | float | timedelta = None, + px: None | float | timedelta = None, + nx: bool = False, + xx: bool = False, + keepttl: bool = False, + get: bool = False, + exat: Incomplete | None = None, + pxat: Incomplete | None = None, + ) -> bool | None: ... + async def setbit(self, name: _Key, offset: int, value: int) -> int: ... + async def setex(self, name: _Key, time: int | timedelta, value: _Value) -> bool: ... + async def setnx(self, name: _Key, value: _Value) -> bool: ... + async def setrange(self, name, offset, value): ... + async def stralgo( + self, + algo, + value1, + value2, + specific_argument: str = "strings", + len: bool = False, + idx: bool = False, + minmatchlen: Incomplete | None = None, + withmatchlen: bool = False, + **kwargs: _CommandOptions, + ): ... + async def strlen(self, name): ... + async def substr(self, name, start, end: int = -1): ... + async def touch(self, *args): ... + async def ttl(self, name: _Key) -> int: ... + async def type(self, name): ... + async def watch(self, *names): ... + async def unwatch(self): ... + async def unlink(self, *names: _Key) -> int: ... + def __getitem__(self, name: str): ... + def __setitem__(self, name, value) -> None: ... + def __delitem__(self, name: _Key) -> None: ... + def __contains__(self, name: _Key) -> None: ... + +class ListCommands(Generic[_StrType]): + @overload + def blpop(self, keys: _Value | Iterable[_Value], timeout: Literal[0] | None = 0) -> tuple[_StrType, _StrType]: ... + @overload + def blpop(self, keys: _Value | Iterable[_Value], timeout: float) -> tuple[_StrType, _StrType] | None: ... + @overload + def brpop(self, keys: _Value | Iterable[_Value], timeout: Literal[0] | None = 0) -> tuple[_StrType, _StrType]: ... + @overload + def brpop(self, keys: _Value | Iterable[_Value], timeout: float) -> tuple[_StrType, _StrType] | None: ... + def brpoplpush(self, src, dst, timeout: int | None = 0): ... + def lindex(self, name: _Key, index: int) -> _StrType | None: ... + def linsert( + self, name: _Key, where: Literal["BEFORE", "AFTER", "before", "after"], refvalue: _Value, value: _Value + ) -> int: ... + def llen(self, name: _Key) -> int: ... + def lpop(self, name, count: int | None = None): ... + def lpush(self, name: _Value, *values: _Value) -> int: ... + def lpushx(self, name, value): ... + def lrange(self, name: _Key, start: int, end: int) -> list[_StrType]: ... + def lrem(self, name: _Key, count: int, value: _Value) -> int: ... + def lset(self, name: _Key, index: int, value: _Value) -> bool: ... + def ltrim(self, name: _Key, start: int, end: int) -> bool: ... + def rpop(self, name, count: int | None = None): ... + def rpoplpush(self, src, dst): ... + def rpush(self, name: _Value, *values: _Value) -> int: ... + def rpushx(self, name, value): ... + def lpos( + self, name, value, rank: Incomplete | None = None, count: Incomplete | None = None, maxlen: Incomplete | None = None + ): ... + @overload + def sort( + self, + name: _Key, + start: int | None = None, + num: int | None = None, + by: _Key | None = None, + get: _Key | Sequence[_Key] | None = None, + desc: bool = False, + alpha: bool = False, + store: None = None, + groups: bool = False, + ) -> list[_StrType]: ... + @overload + def sort( + self, + name: _Key, + start: int | None = None, + num: int | None = None, + by: _Key | None = None, + get: _Key | Sequence[_Key] | None = None, + desc: bool = False, + alpha: bool = False, + *, + store: _Key, + groups: bool = False, + ) -> int: ... + @overload + def sort( + self, + name: _Key, + start: int | None, + num: int | None, + by: _Key | None, + get: _Key | Sequence[_Key] | None, + desc: bool, + alpha: bool, + store: _Key, + groups: bool = False, + ) -> int: ... + +class AsyncListCommands(Generic[_StrType]): + @overload + async def blpop(self, keys: _Value | Iterable[_Value], timeout: Literal[0] | None = 0) -> tuple[_StrType, _StrType]: ... + @overload + async def blpop(self, keys: _Value | Iterable[_Value], timeout: float) -> tuple[_StrType, _StrType] | None: ... + @overload + async def brpop(self, keys: _Value | Iterable[_Value], timeout: Literal[0] | None = 0) -> tuple[_StrType, _StrType]: ... + @overload + async def brpop(self, keys: _Value | Iterable[_Value], timeout: float) -> tuple[_StrType, _StrType] | None: ... + async def brpoplpush(self, src, dst, timeout: int | None = 0): ... + async def lindex(self, name: _Key, index: int) -> _StrType | None: ... + async def linsert( + self, name: _Key, where: Literal["BEFORE", "AFTER", "before", "after"], refvalue: _Value, value: _Value + ) -> int: ... + async def llen(self, name: _Key) -> int: ... + async def lpop(self, name, count: int | None = None): ... + async def lpush(self, name: _Value, *values: _Value) -> int: ... + async def lpushx(self, name, value): ... + async def lrange(self, name: _Key, start: int, end: int) -> list[_StrType]: ... + async def lrem(self, name: _Key, count: int, value: _Value) -> int: ... + async def lset(self, name: _Key, index: int, value: _Value) -> bool: ... + async def ltrim(self, name: _Key, start: int, end: int) -> bool: ... + async def rpop(self, name, count: int | None = None): ... + async def rpoplpush(self, src, dst): ... + async def rpush(self, name: _Value, *values: _Value) -> int: ... + async def rpushx(self, name, value): ... + async def lpos( + self, name, value, rank: Incomplete | None = None, count: Incomplete | None = None, maxlen: Incomplete | None = None + ): ... + @overload + async def sort( + self, + name: _Key, + start: int | None = None, + num: int | None = None, + by: _Key | None = None, + get: _Key | Sequence[_Key] | None = None, + desc: bool = False, + alpha: bool = False, + store: None = None, + groups: bool = False, + ) -> list[_StrType]: ... + @overload + async def sort( + self, + name: _Key, + start: int | None = None, + num: int | None = None, + by: _Key | None = None, + get: _Key | Sequence[_Key] | None = None, + desc: bool = False, + alpha: bool = False, + *, + store: _Key, + groups: bool = False, + ) -> int: ... + @overload + async def sort( + self, + name: _Key, + start: int | None, + num: int | None, + by: _Key | None, + get: _Key | Sequence[_Key] | None, + desc: bool, + alpha: bool, + store: _Key, + groups: bool = False, + ) -> int: ... + +class ScanCommands(Generic[_StrType]): + def scan( + self, + cursor: int = 0, + match: _Key | None = None, + count: int | None = None, + _type: str | None = None, + **kwargs: _CommandOptions, + ) -> tuple[int, list[_StrType]]: ... + def scan_iter( + self, match: _Key | None = None, count: int | None = None, _type: str | None = None, **kwargs: _CommandOptions + ) -> Iterator[_StrType]: ... + def sscan( + self, name: _Key, cursor: int = 0, match: _Key | None = None, count: int | None = None + ) -> tuple[int, list[_StrType]]: ... + def sscan_iter(self, name: _Key, match: _Key | None = None, count: int | None = None) -> Iterator[_StrType]: ... + def hscan( + self, name: _Key, cursor: int = 0, match: _Key | None = None, count: int | None = None + ) -> tuple[int, dict[_StrType, _StrType]]: ... + def hscan_iter( + self, name: _Key, match: _Key | None = None, count: int | None = None + ) -> Iterator[tuple[_StrType, _StrType]]: ... + @overload + def zscan( + self, name: _Key, cursor: int = 0, match: _Key | None = None, count: int | None = None + ) -> tuple[int, list[tuple[_StrType, float]]]: ... + @overload + def zscan( + self, + name: _Key, + cursor: int = 0, + match: _Key | None = None, + count: int | None = None, + *, + score_cast_func: Callable[[_StrType], _ScoreCastFuncReturn], + ) -> tuple[int, list[tuple[_StrType, _ScoreCastFuncReturn]]]: ... + @overload + def zscan( + self, + name: _Key, + cursor: int, + match: _Key | None, + count: int | None, + score_cast_func: Callable[[_StrType], _ScoreCastFuncReturn], + ) -> tuple[int, list[tuple[_StrType, _ScoreCastFuncReturn]]]: ... + @overload + def zscan_iter(self, name: _Key, match: _Key | None = None, count: int | None = None) -> Iterator[tuple[_StrType, float]]: ... + @overload + def zscan_iter( + self, + name: _Key, + match: _Key | None = None, + count: int | None = None, + *, + score_cast_func: Callable[[_StrType], _ScoreCastFuncReturn], + ) -> Iterator[tuple[_StrType, _ScoreCastFuncReturn]]: ... + @overload + def zscan_iter( + self, name: _Key, match: _Key | None, count: int | None, score_cast_func: Callable[[_StrType], _ScoreCastFuncReturn] + ) -> Iterator[tuple[_StrType, _ScoreCastFuncReturn]]: ... + +class AsyncScanCommands(Generic[_StrType]): + async def scan( + self, + cursor: int = 0, + match: _Key | None = None, + count: int | None = None, + _type: str | None = None, + **kwargs: _CommandOptions, + ) -> tuple[int, list[_StrType]]: ... + def scan_iter( + self, match: _Key | None = None, count: int | None = None, _type: str | None = None, **kwargs: _CommandOptions + ) -> AsyncIterator[_StrType]: ... + async def sscan( + self, name: _Key, cursor: int = 0, match: _Key | None = None, count: int | None = None + ) -> tuple[int, list[_StrType]]: ... + def sscan_iter(self, name: _Key, match: _Key | None = None, count: int | None = None) -> AsyncIterator[_StrType]: ... + async def hscan( + self, name: _Key, cursor: int = 0, match: _Key | None = None, count: int | None = None + ) -> tuple[int, dict[_StrType, _StrType]]: ... + def hscan_iter( + self, name: _Key, match: _Key | None = None, count: int | None = None + ) -> AsyncIterator[tuple[_StrType, _StrType]]: ... + @overload + async def zscan( + self, name: _Key, cursor: int = 0, match: _Key | None = None, count: int | None = None + ) -> tuple[int, list[tuple[_StrType, float]]]: ... + @overload + async def zscan( + self, + name: _Key, + cursor: int = 0, + match: _Key | None = None, + count: int | None = None, + *, + score_cast_func: Callable[[_StrType], _ScoreCastFuncReturn], + ) -> tuple[int, list[tuple[_StrType, _ScoreCastFuncReturn]]]: ... + @overload + async def zscan( + self, + name: _Key, + cursor: int, + match: _Key | None, + count: int | None, + score_cast_func: Callable[[_StrType], _ScoreCastFuncReturn], + ) -> tuple[int, list[tuple[_StrType, _ScoreCastFuncReturn]]]: ... + @overload + def zscan_iter( + self, name: _Key, match: _Key | None = None, count: int | None = None + ) -> AsyncIterator[tuple[_StrType, float]]: ... + @overload + def zscan_iter( + self, + name: _Key, + match: _Key | None = None, + count: int | None = None, + *, + score_cast_func: Callable[[_StrType], _ScoreCastFuncReturn], + ) -> AsyncIterator[tuple[_StrType, _ScoreCastFuncReturn]]: ... + @overload + def zscan_iter( + self, name: _Key, match: _Key | None, count: int | None, score_cast_func: Callable[[_StrType], _ScoreCastFuncReturn] + ) -> AsyncIterator[tuple[_StrType, _ScoreCastFuncReturn]]: ... + +class SetCommands(Generic[_StrType]): + def sadd(self, name: _Key, *values: _Value) -> int: ... + def scard(self, name: _Key) -> int: ... + def sdiff(self, keys: _Key | Iterable[_Key], *args: _Key) -> builtins.set[_Value]: ... + def sdiffstore(self, dest: _Key, keys: _Key | Iterable[_Key], *args: _Key) -> int: ... + def sinter(self, keys: _Key | Iterable[_Key], *args: _Key) -> builtins.set[_Value]: ... + def sinterstore(self, dest: _Key, keys: _Key | Iterable[_Key], *args: _Key) -> int: ... + def sismember(self, name: _Key, value: _Value) -> bool: ... + def smembers(self, name: _Key) -> builtins.set[_StrType]: ... + def smismember(self, name, values, *args): ... + def smove(self, src: _Key, dst: _Key, value: _Value) -> bool: ... + @overload + def spop(self, name: _Key, count: None = None) -> _Value | None: ... + @overload + def spop(self, name: _Key, count: int) -> list[_Value]: ... + @overload + def srandmember(self, name: _Key, number: None = None) -> _Value | None: ... + @overload + def srandmember(self, name: _Key, number: int) -> list[_Value]: ... + def srem(self, name: _Key, *values: _Value) -> int: ... + def sunion(self, keys: _Key | Iterable[_Key], *args: _Key) -> builtins.set[_Value]: ... + def sunionstore(self, dest: _Key, keys: _Key | Iterable[_Key], *args: _Key) -> int: ... + +class AsyncSetCommands(Generic[_StrType]): + async def sadd(self, name: _Key, *values: _Value) -> int: ... + async def scard(self, name: _Key) -> int: ... + async def sdiff(self, keys: _Key | Iterable[_Key], *args: _Key) -> builtins.set[_Value]: ... + async def sdiffstore(self, dest: _Key, keys: _Key | Iterable[_Key], *args: _Key) -> int: ... + async def sinter(self, keys: _Key | Iterable[_Key], *args: _Key) -> builtins.set[_Value]: ... + async def sinterstore(self, dest: _Key, keys: _Key | Iterable[_Key], *args: _Key) -> int: ... + async def sismember(self, name: _Key, value: _Value) -> bool: ... + async def smembers(self, name: _Key) -> builtins.set[_StrType]: ... + async def smismember(self, name, values, *args): ... + async def smove(self, src: _Key, dst: _Key, value: _Value) -> bool: ... + @overload + async def spop(self, name: _Key, count: None = None) -> _Value | None: ... + @overload + async def spop(self, name: _Key, count: int) -> list[_Value]: ... + @overload + async def srandmember(self, name: _Key, number: None = None) -> _Value | None: ... + @overload + async def srandmember(self, name: _Key, number: int) -> list[_Value]: ... + async def srem(self, name: _Key, *values: _Value) -> int: ... + async def sunion(self, keys: _Key | Iterable[_Key], *args: _Key) -> builtins.set[_Value]: ... + async def sunionstore(self, dest: _Key, keys: _Key | Iterable[_Key], *args: _Key) -> int: ... + +class StreamCommands: + def xack(self, name, groupname, *ids): ... + def xadd( + self, + name: KeyT, + # Only accepts dict objects, but for variance reasons we use a looser annotation + fields: SupportsItems[bytes | memoryview | str | float, Any], + id: str | int | bytes | memoryview = "*", + maxlen=None, + approximate: bool = True, + nomkstream: bool = False, + minid: Incomplete | None = None, + limit: Incomplete | None = None, + ): ... + def xautoclaim( + self, + name, + groupname, + consumername, + min_idle_time, + start_id: StreamIdT = "0-0", + count: Incomplete | None = None, + justid: bool = False, + ): ... + def xclaim( + self, + name, + groupname, + consumername, + min_idle_time, + message_ids, + idle=None, + time=None, + retrycount=None, + force=False, + justid=False, + ): ... + def xdel(self, name: KeyT, *ids: str | int | bytes | memoryview): ... + def xgroup_create(self, name, groupname, id: str = "$", mkstream: bool = False, entries_read: int | None = None): ... + def xgroup_delconsumer(self, name, groupname, consumername): ... + def xgroup_destroy(self, name, groupname): ... + def xgroup_createconsumer(self, name, groupname, consumername): ... + def xgroup_setid(self, name, groupname, id, entries_read: int | None = None): ... + def xinfo_consumers(self, name, groupname): ... + def xinfo_groups(self, name): ... + def xinfo_stream(self, name, full: bool = False): ... + def xlen(self, name: _Key) -> int: ... + def xpending(self, name, groupname): ... + def xpending_range( + self, name: _Key, groupname, min, max, count: int, consumername: Incomplete | None = None, idle: int | None = None + ): ... + def xrange(self, name, min: str = "-", max: str = "+", count: Incomplete | None = None): ... + def xread(self, streams, count: Incomplete | None = None, block: Incomplete | None = None): ... + def xreadgroup( + self, + groupname, + consumername, + streams, + count: Incomplete | None = None, + block: Incomplete | None = None, + noack: bool = False, + ): ... + def xrevrange(self, name, max: str = "+", min: str = "-", count: Incomplete | None = None): ... + def xtrim( + self, name, maxlen: int | None = None, approximate: bool = True, minid: Incomplete | None = None, limit: int | None = None + ): ... + +class AsyncStreamCommands: + async def xack(self, name, groupname, *ids): ... + async def xadd( + self, + name: KeyT, + # Only accepts dict objects, but for variance reasons we use a looser annotation + fields: SupportsItems[bytes | memoryview | str | float, Any], + id: str | int | bytes | memoryview = "*", + maxlen=None, + approximate: bool = True, + nomkstream: bool = False, + minid: Incomplete | None = None, + limit: Incomplete | None = None, + ): ... + async def xautoclaim( + self, + name, + groupname, + consumername, + min_idle_time, + start_id: StreamIdT = "0-0", + count: Incomplete | None = None, + justid: bool = False, + ): ... + async def xclaim( + self, + name, + groupname, + consumername, + min_idle_time, + message_ids, + idle=None, + time=None, + retrycount=None, + force=False, + justid=False, + ): ... + async def xdel(self, name: KeyT, *ids: str | int | bytes | memoryview): ... + async def xgroup_create(self, name, groupname, id: str = "$", mkstream: bool = False, entries_read: int | None = None): ... + async def xgroup_delconsumer(self, name, groupname, consumername): ... + async def xgroup_destroy(self, name, groupname): ... + async def xgroup_createconsumer(self, name, groupname, consumername): ... + async def xgroup_setid(self, name, groupname, id, entries_read: int | None = None): ... + async def xinfo_consumers(self, name, groupname): ... + async def xinfo_groups(self, name): ... + async def xinfo_stream(self, name, full: bool = False): ... + async def xlen(self, name: _Key) -> int: ... + async def xpending(self, name, groupname): ... + async def xpending_range( + self, name: _Key, groupname, min, max, count: int, consumername: Incomplete | None = None, idle: int | None = None + ): ... + async def xrange(self, name, min: str = "-", max: str = "+", count: Incomplete | None = None): ... + async def xread(self, streams, count: Incomplete | None = None, block: Incomplete | None = None): ... + async def xreadgroup( + self, + groupname, + consumername, + streams, + count: Incomplete | None = None, + block: Incomplete | None = None, + noack: bool = False, + ): ... + async def xrevrange(self, name, max: str = "+", min: str = "-", count: Incomplete | None = None): ... + async def xtrim( + self, name, maxlen: int | None = None, approximate: bool = True, minid: Incomplete | None = None, limit: int | None = None + ): ... + +class SortedSetCommands(Generic[_StrType]): + def zadd( + self, + name: _Key, + mapping: Mapping[_Key, _Value], + nx: bool = False, + xx: bool = False, + ch: bool = False, + incr: bool = False, + gt: Incomplete | None = False, + lt: Incomplete | None = False, + ) -> int: ... + def zcard(self, name: _Key) -> int: ... + def zcount(self, name: _Key, min: _Value, max: _Value) -> int: ... + def zdiff(self, keys, withscores: bool = False): ... + def zdiffstore(self, dest, keys): ... + def zincrby(self, name: _Key, amount: float, value: _Value) -> float: ... + def zinter(self, keys, aggregate: Incomplete | None = None, withscores: bool = False): ... + def zinterstore(self, dest: _Key, keys: Iterable[_Key], aggregate: Literal["SUM", "MIN", "MAX"] | None = None) -> int: ... + def zlexcount(self, name: _Key, min: _Value, max: _Value) -> int: ... + def zpopmax(self, name: _Key, count: int | None = None) -> list[tuple[_StrType, float]]: ... + def zpopmin(self, name: _Key, count: int | None = None) -> list[tuple[_StrType, float]]: ... + def zrandmember(self, key, count: Incomplete | None = None, withscores: bool = False): ... + @overload + def bzpopmax(self, keys: _Key | Iterable[_Key], timeout: Literal[0] = 0) -> tuple[_StrType, _StrType, float]: ... + @overload + def bzpopmax(self, keys: _Key | Iterable[_Key], timeout: float) -> tuple[_StrType, _StrType, float] | None: ... + @overload + def bzpopmin(self, keys: _Key | Iterable[_Key], timeout: Literal[0] = 0) -> tuple[_StrType, _StrType, float]: ... + @overload + def bzpopmin(self, keys: _Key | Iterable[_Key], timeout: float) -> tuple[_StrType, _StrType, float] | None: ... + @overload + def zrange( # type: ignore[overload-overlap] + self, + name: _Key, + start: int, + end: int, + desc: bool, + withscores: Literal[True], + score_cast_func: Callable[[_StrType], _ScoreCastFuncReturn], + byscore: bool = False, + bylex: bool = False, + offset: int | None = None, + num: int | None = None, + ) -> list[tuple[_StrType, _ScoreCastFuncReturn]]: ... + @overload + def zrange( # type: ignore[overload-overlap] + self, + name: _Key, + start: int, + end: int, + desc: bool, + withscores: Literal[True], + score_cast_func: Callable[[_StrType], float] = ..., + byscore: bool = False, + bylex: bool = False, + offset: int | None = None, + num: int | None = None, + ) -> list[tuple[_StrType, float]]: ... + @overload + def zrange( # type: ignore[overload-overlap] + self, + name: _Key, + start: int, + end: int, + *, + withscores: Literal[True], + score_cast_func: Callable[[_StrType], _ScoreCastFuncReturn], + byscore: bool = False, + bylex: bool = False, + offset: int | None = None, + num: int | None = None, + ) -> list[tuple[_StrType, _ScoreCastFuncReturn]]: ... + @overload + def zrange( # type: ignore[overload-overlap] + self, + name: _Key, + start: int, + end: int, + *, + withscores: Literal[True], + score_cast_func: Callable[[_StrType], float] = ..., + byscore: bool = False, + bylex: bool = False, + offset: int | None = None, + num: int | None = None, + ) -> list[tuple[_StrType, float]]: ... + @overload + def zrange( + self, + name: _Key, + start: int, + end: int, + desc: bool = False, + withscores: bool = False, + score_cast_func: Callable[[_StrType], Any] = ..., + byscore: bool = False, + bylex: bool = False, + offset: int | None = None, + num: int | None = None, + ) -> list[_StrType]: ... + @overload + def zrevrange( + self, + name: _Key, + start: int, + end: int, + withscores: Literal[True], + score_cast_func: Callable[[_StrType], _ScoreCastFuncReturn], + ) -> list[tuple[_StrType, _ScoreCastFuncReturn]]: ... + @overload + def zrevrange( # type: ignore[overload-overlap] + self, name: _Key, start: int, end: int, withscores: Literal[True] + ) -> list[tuple[_StrType, float]]: ... + @overload + def zrevrange( + self, name: _Key, start: int, end: int, withscores: bool = False, score_cast_func: Callable[[Any], Any] = ... + ) -> list[_StrType]: ... + def zrangestore( + self, + dest, + name, + start, + end, + byscore: bool = False, + bylex: bool = False, + desc: bool = False, + offset: Incomplete | None = None, + num: Incomplete | None = None, + ): ... + def zrangebylex( + self, name: _Key, min: _Value, max: _Value, start: int | None = None, num: int | None = None + ) -> list[_StrType]: ... + def zrevrangebylex( + self, name: _Key, max: _Value, min: _Value, start: int | None = None, num: int | None = None + ) -> list[_StrType]: ... + @overload + def zrangebyscore( # type: ignore[overload-overlap] + self, + name: _Key, + min: _Value, + max: _Value, + start: int | None = None, + num: int | None = None, + *, + withscores: Literal[True], + score_cast_func: Callable[[_StrType], _ScoreCastFuncReturn], + ) -> list[tuple[_StrType, _ScoreCastFuncReturn]]: ... + @overload + def zrangebyscore( # type: ignore[overload-overlap] + self, name: _Key, min: _Value, max: _Value, start: int | None = None, num: int | None = None, *, withscores: Literal[True] + ) -> list[tuple[_StrType, float]]: ... + @overload + def zrangebyscore( + self, + name: _Key, + min: _Value, + max: _Value, + start: int | None = None, + num: int | None = None, + withscores: bool = False, + score_cast_func: Callable[[_StrType], Any] = ..., + ) -> list[_StrType]: ... + @overload + def zrevrangebyscore( # type: ignore[overload-overlap] + self, + name: _Key, + max: _Value, + min: _Value, + start: int | None = None, + num: int | None = None, + *, + withscores: Literal[True], + score_cast_func: Callable[[_StrType], _ScoreCastFuncReturn], + ) -> list[tuple[_StrType, _ScoreCastFuncReturn]]: ... + @overload + def zrevrangebyscore( # type: ignore[overload-overlap] + self, name: _Key, max: _Value, min: _Value, start: int | None = None, num: int | None = None, *, withscores: Literal[True] + ) -> list[tuple[_StrType, float]]: ... + @overload + def zrevrangebyscore( + self, + name: _Key, + max: _Value, + min: _Value, + start: int | None = None, + num: int | None = None, + withscores: bool = False, + score_cast_func: Callable[[_StrType], Any] = ..., + ) -> list[_StrType]: ... + def zrank(self, name: _Key, value: _Value, withscore: bool = False) -> int | None: ... + def zrem(self, name: _Key, *values: _Value) -> int: ... + def zremrangebylex(self, name: _Key, min: _Value, max: _Value) -> int: ... + def zremrangebyrank(self, name: _Key, min: int, max: int) -> int: ... + def zremrangebyscore(self, name: _Key, min: _Value, max: _Value) -> int: ... + def zrevrank(self, name: _Key, value: _Value, withscore: bool = False) -> int | None: ... + def zscore(self, name: _Key, value: _Value) -> float | None: ... + def zunion(self, keys, aggregate: Incomplete | None = None, withscores: bool = False): ... + def zunionstore(self, dest: _Key, keys: Iterable[_Key], aggregate: Literal["SUM", "MIN", "MAX"] | None = None) -> int: ... + def zmscore(self, key, members): ... + +class AsyncSortedSetCommands(Generic[_StrType]): + async def zadd( + self, + name: _Key, + mapping: Mapping[_Key, _Value], + nx: bool = False, + xx: bool = False, + ch: bool = False, + incr: bool = False, + gt: Incomplete | None = False, + lt: Incomplete | None = False, + ) -> int: ... + async def zcard(self, name: _Key) -> int: ... + async def zcount(self, name: _Key, min: _Value, max: _Value) -> int: ... + async def zdiff(self, keys, withscores: bool = False): ... + async def zdiffstore(self, dest, keys): ... + async def zincrby(self, name: _Key, amount: float, value: _Value) -> float: ... + async def zinter(self, keys, aggregate: Incomplete | None = None, withscores: bool = False): ... + async def zinterstore( + self, dest: _Key, keys: Iterable[_Key], aggregate: Literal["SUM", "MIN", "MAX"] | None = None + ) -> int: ... + async def zlexcount(self, name: _Key, min: _Value, max: _Value) -> int: ... + async def zpopmax(self, name: _Key, count: int | None = None) -> list[tuple[_StrType, float]]: ... + async def zpopmin(self, name: _Key, count: int | None = None) -> list[tuple[_StrType, float]]: ... + async def zrandmember(self, key, count: Incomplete | None = None, withscores: bool = False): ... + @overload + async def bzpopmax(self, keys: _Key | Iterable[_Key], timeout: Literal[0] = 0) -> tuple[_StrType, _StrType, float]: ... + @overload + async def bzpopmax(self, keys: _Key | Iterable[_Key], timeout: float) -> tuple[_StrType, _StrType, float] | None: ... + @overload + async def bzpopmin(self, keys: _Key | Iterable[_Key], timeout: Literal[0] = 0) -> tuple[_StrType, _StrType, float]: ... + @overload + async def bzpopmin(self, keys: _Key | Iterable[_Key], timeout: float) -> tuple[_StrType, _StrType, float] | None: ... + @overload + async def zrange( # type: ignore[overload-overlap] + self, + name: _Key, + start: int, + end: int, + desc: bool, + withscores: Literal[True], + score_cast_func: Callable[[_StrType], _ScoreCastFuncReturn], + byscore: bool = False, + bylex: bool = False, + offset: int | None = None, + num: int | None = None, + ) -> list[tuple[_StrType, _ScoreCastFuncReturn]]: ... + @overload + async def zrange( # type: ignore[overload-overlap] + self, + name: _Key, + start: int, + end: int, + desc: bool, + withscores: Literal[True], + score_cast_func: Callable[[_StrType], float] = ..., + byscore: bool = False, + bylex: bool = False, + offset: int | None = None, + num: int | None = None, + ) -> list[tuple[_StrType, float]]: ... + @overload + async def zrange( # type: ignore[overload-overlap] + self, + name: _Key, + start: int, + end: int, + desc: bool = False, + *, + withscores: Literal[True], + score_cast_func: Callable[[_StrType], _ScoreCastFuncReturn], + byscore: bool = False, + bylex: bool = False, + offset: int | None = None, + num: int | None = None, + ) -> list[tuple[_StrType, _ScoreCastFuncReturn]]: ... + @overload + async def zrange( # type: ignore[overload-overlap] + self, + name: _Key, + start: int, + end: int, + desc: bool = False, + *, + withscores: Literal[True], + score_cast_func: Callable[[_StrType], float] = ..., + byscore: bool = False, + bylex: bool = False, + offset: int | None = None, + num: int | None = None, + ) -> list[tuple[_StrType, float]]: ... + @overload + async def zrange( + self, + name: _Key, + start: int, + end: int, + desc: bool = False, + withscores: bool = False, + score_cast_func: Callable[[_StrType], Any] = ..., + byscore: bool = False, + bylex: bool = False, + offset: int | None = None, + num: int | None = None, + ) -> list[_StrType]: ... + @overload + async def zrevrange( + self, + name: _Key, + start: int, + end: int, + withscores: Literal[True], + score_cast_func: Callable[[_StrType], _ScoreCastFuncReturn], + ) -> list[tuple[_StrType, _ScoreCastFuncReturn]]: ... + @overload + async def zrevrange( # type: ignore[overload-overlap] + self, name: _Key, start: int, end: int, withscores: Literal[True] + ) -> list[tuple[_StrType, float]]: ... + @overload + async def zrevrange( + self, name: _Key, start: int, end: int, withscores: bool = False, score_cast_func: Callable[[Any], Any] = ... + ) -> list[_StrType]: ... + async def zrangestore( + self, + dest, + name, + start, + end, + byscore: bool = False, + bylex: bool = False, + desc: bool = False, + offset: Incomplete | None = None, + num: Incomplete | None = None, + ): ... + async def zrangebylex( + self, name: _Key, min: _Value, max: _Value, start: int | None = None, num: int | None = None + ) -> list[_StrType]: ... + async def zrevrangebylex( + self, name: _Key, max: _Value, min: _Value, start: int | None = None, num: int | None = None + ) -> list[_StrType]: ... + @overload + async def zrangebyscore( # type: ignore[overload-overlap] + self, + name: _Key, + min: _Value, + max: _Value, + start: int | None = None, + num: int | None = None, + *, + withscores: Literal[True], + score_cast_func: Callable[[_StrType], _ScoreCastFuncReturn], + ) -> list[tuple[_StrType, _ScoreCastFuncReturn]]: ... + @overload + async def zrangebyscore( # type: ignore[overload-overlap] + self, name: _Key, min: _Value, max: _Value, start: int | None = None, num: int | None = None, *, withscores: Literal[True] + ) -> list[tuple[_StrType, float]]: ... + @overload + async def zrangebyscore( + self, + name: _Key, + min: _Value, + max: _Value, + start: int | None = None, + num: int | None = None, + withscores: bool = False, + score_cast_func: Callable[[_StrType], Any] = ..., + ) -> list[_StrType]: ... + @overload + async def zrevrangebyscore( # type: ignore[overload-overlap] + self, + name: _Key, + max: _Value, + min: _Value, + start: int | None = None, + num: int | None = None, + *, + withscores: Literal[True], + score_cast_func: Callable[[_StrType], _ScoreCastFuncReturn], + ) -> list[tuple[_StrType, _ScoreCastFuncReturn]]: ... + @overload + async def zrevrangebyscore( # type: ignore[overload-overlap] + self, name: _Key, max: _Value, min: _Value, start: int | None = None, num: int | None = None, *, withscores: Literal[True] + ) -> list[tuple[_StrType, float]]: ... + @overload + async def zrevrangebyscore( + self, + name: _Key, + max: _Value, + min: _Value, + start: int | None = None, + num: int | None = None, + withscores: bool = False, + score_cast_func: Callable[[_StrType], Any] = ..., + ) -> list[_StrType]: ... + async def zrank(self, name: _Key, value: _Value, withscore: bool = False) -> int | None: ... + async def zrem(self, name: _Key, *values: _Value) -> int: ... + async def zremrangebylex(self, name: _Key, min: _Value, max: _Value) -> int: ... + async def zremrangebyrank(self, name: _Key, min: int, max: int) -> int: ... + async def zremrangebyscore(self, name: _Key, min: _Value, max: _Value) -> int: ... + async def zrevrank(self, name: _Key, value: _Value, withscore: bool = False) -> int | None: ... + async def zscore(self, name: _Key, value: _Value) -> float | None: ... + async def zunion(self, keys, aggregate: Incomplete | None = None, withscores: bool = False): ... + async def zunionstore( + self, dest: _Key, keys: Iterable[_Key], aggregate: Literal["SUM", "MIN", "MAX"] | None = None + ) -> int: ... + async def zmscore(self, key, members): ... + +class HyperlogCommands: + def pfadd(self, name: _Key, *values: _Value) -> int: ... + def pfcount(self, name: _Key) -> int: ... + def pfmerge(self, dest: _Key, *sources: _Key) -> bool: ... + +class AsyncHyperlogCommands: + async def pfadd(self, name: _Key, *values: _Value) -> int: ... + async def pfcount(self, name: _Key) -> int: ... + async def pfmerge(self, dest: _Key, *sources: _Key) -> bool: ... + +class HashCommands(Generic[_StrType]): + def hdel(self, name: _Key, *keys: _Key) -> int: ... + def hexists(self, name: _Key, key: _Key) -> bool: ... + def hget(self, name: _Key, key: _Key) -> _StrType | None: ... + def hgetall(self, name: _Key) -> dict[_StrType, _StrType]: ... + def hincrby(self, name: _Key, key: _Key, amount: int = 1) -> int: ... + def hincrbyfloat(self, name: _Key, key: _Key, amount: float = 1.0) -> float: ... + def hkeys(self, name: _Key) -> list[_StrType]: ... + def hlen(self, name: _Key) -> int: ... + @overload + def hset( + self, name: _Key, key: _Key, value: _Value, mapping: Mapping[_Key, _Value] | None = None, items: Incomplete | None = None + ) -> int: ... + @overload + def hset( + self, name: _Key, key: None, value: None, mapping: Mapping[_Key, _Value], items: Incomplete | None = None + ) -> int: ... + @overload + def hset(self, name: _Key, *, mapping: Mapping[_Key, _Value], items: Incomplete | None = None) -> int: ... + def hsetnx(self, name: _Key, key: _Key, value: _Value) -> int: ... + def hmset(self, name: _Key, mapping: Mapping[_Key, _Value]) -> bool: ... + def hmget(self, name: _Key, keys: _Key | Iterable[_Key], *args: _Key) -> list[_StrType | None]: ... + def hvals(self, name: _Key) -> list[_StrType]: ... + def hstrlen(self, name, key): ... + +class AsyncHashCommands(Generic[_StrType]): + async def hdel(self, name: _Key, *keys: _Key) -> int: ... + async def hexists(self, name: _Key, key: _Key) -> bool: ... + async def hget(self, name: _Key, key: _Key) -> _StrType | None: ... + async def hgetall(self, name: _Key) -> dict[_StrType, _StrType]: ... + async def hincrby(self, name: _Key, key: _Key, amount: int = 1) -> int: ... + async def hincrbyfloat(self, name: _Key, key: _Key, amount: float = 1.0) -> float: ... + async def hkeys(self, name: _Key) -> list[_StrType]: ... + async def hlen(self, name: _Key) -> int: ... + @overload + async def hset( + self, name: _Key, key: _Key, value: _Value, mapping: Mapping[_Key, _Value] | None = None, items: Incomplete | None = None + ) -> int: ... + @overload + async def hset( + self, name: _Key, key: None, value: None, mapping: Mapping[_Key, _Value], items: Incomplete | None = None + ) -> int: ... + @overload + async def hset(self, name: _Key, *, mapping: Mapping[_Key, _Value], items: Incomplete | None = None) -> int: ... + async def hsetnx(self, name: _Key, key: _Key, value: _Value) -> int: ... + async def hmset(self, name: _Key, mapping: Mapping[_Key, _Value]) -> bool: ... + async def hmget(self, name: _Key, keys: _Key | Iterable[_Key], *args: _Key) -> list[_StrType | None]: ... + async def hvals(self, name: _Key) -> list[_StrType]: ... + async def hstrlen(self, name, key): ... + +class AsyncScript: + def __init__(self, registered_client: AsyncValkey[Any], script: ScriptTextT) -> None: ... + async def __call__( + self, keys: Sequence[KeyT] | None = None, args: Iterable[EncodableT] | None = None, client: AsyncValkey[Any] | None = None + ): ... + +class PubSubCommands: + def publish(self, channel: _Key, message: _Key, **kwargs: _CommandOptions) -> int: ... + def pubsub_channels(self, pattern: _Key = "*", **kwargs: _CommandOptions) -> list[str]: ... + def pubsub_numpat(self, **kwargs: _CommandOptions) -> int: ... + def pubsub_numsub(self, *args: _Key, **kwargs: _CommandOptions) -> list[tuple[str, int]]: ... + +class AsyncPubSubCommands: + async def publish(self, channel: _Key, message: _Key, **kwargs: _CommandOptions) -> int: ... + async def pubsub_channels(self, pattern: _Key = "*", **kwargs: _CommandOptions) -> list[str]: ... + async def pubsub_numpat(self, **kwargs: _CommandOptions) -> int: ... + async def pubsub_numsub(self, *args: _Key, **kwargs: _CommandOptions) -> list[tuple[str, int]]: ... + +class ScriptCommands(Generic[_StrType]): + def eval(self, script, numkeys, *keys_and_args): ... + def evalsha(self, sha, numkeys, *keys_and_args): ... + def script_exists(self, *args): ... + def script_debug(self, *args): ... + def script_flush(self, sync_type: Incomplete | None = None): ... + def script_kill(self): ... + def script_load(self, script): ... + def register_script(self, script: str | _StrType) -> Script: ... + +class AsyncScriptCommands(Generic[_StrType]): + async def eval(self, script, numkeys, *keys_and_args): ... + async def evalsha(self, sha, numkeys, *keys_and_args): ... + async def script_exists(self, *args): ... + async def script_debug(self, *args): ... + async def script_flush(self, sync_type: Incomplete | None = None): ... + async def script_kill(self): ... + async def script_load(self, script): ... + def register_script(self, script: ScriptTextT) -> AsyncScript: ... + +class GeoCommands: + def geoadd(self, name, values, nx: bool = False, xx: bool = False, ch: bool = False): ... + def geodist(self, name, place1, place2, unit: Incomplete | None = None): ... + def geohash(self, name, *values): ... + def geopos(self, name, *values): ... + def georadius( + self, + name, + longitude, + latitude, + radius, + unit: Incomplete | None = None, + withdist: bool = False, + withcoord: bool = False, + withhash: bool = False, + count: Incomplete | None = None, + sort: Incomplete | None = None, + store: Incomplete | None = None, + store_dist: Incomplete | None = None, + any: bool = False, + ): ... + def georadiusbymember( + self, + name, + member, + radius, + unit: Incomplete | None = None, + withdist: bool = False, + withcoord: bool = False, + withhash: bool = False, + count: Incomplete | None = None, + sort: Incomplete | None = None, + store: Incomplete | None = None, + store_dist: Incomplete | None = None, + any: bool = False, + ): ... + def geosearch( + self, + name, + member: Incomplete | None = None, + longitude: Incomplete | None = None, + latitude: Incomplete | None = None, + unit: str = "m", + radius: Incomplete | None = None, + width: Incomplete | None = None, + height: Incomplete | None = None, + sort: Incomplete | None = None, + count: Incomplete | None = None, + any: bool = False, + withcoord: bool = False, + withdist: bool = False, + withhash: bool = False, + ): ... + def geosearchstore( + self, + dest, + name, + member: Incomplete | None = None, + longitude: Incomplete | None = None, + latitude: Incomplete | None = None, + unit: str = "m", + radius: Incomplete | None = None, + width: Incomplete | None = None, + height: Incomplete | None = None, + sort: Incomplete | None = None, + count: Incomplete | None = None, + any: bool = False, + storedist: bool = False, + ): ... + +class AsyncGeoCommands: + async def geoadd(self, name, values, nx: bool = False, xx: bool = False, ch: bool = False): ... + async def geodist(self, name, place1, place2, unit: Incomplete | None = None): ... + async def geohash(self, name, *values): ... + async def geopos(self, name, *values): ... + async def georadius( + self, + name, + longitude, + latitude, + radius, + unit: Incomplete | None = None, + withdist: bool = False, + withcoord: bool = False, + withhash: bool = False, + count: Incomplete | None = None, + sort: Incomplete | None = None, + store: Incomplete | None = None, + store_dist: Incomplete | None = None, + any: bool = False, + ): ... + async def georadiusbymember( + self, + name, + member, + radius, + unit: Incomplete | None = None, + withdist: bool = False, + withcoord: bool = False, + withhash: bool = False, + count: Incomplete | None = None, + sort: Incomplete | None = None, + store: Incomplete | None = None, + store_dist: Incomplete | None = None, + any: bool = False, + ): ... + async def geosearch( + self, + name, + member: Incomplete | None = None, + longitude: Incomplete | None = None, + latitude: Incomplete | None = None, + unit: str = "m", + radius: Incomplete | None = None, + width: Incomplete | None = None, + height: Incomplete | None = None, + sort: Incomplete | None = None, + count: Incomplete | None = None, + any: bool = False, + withcoord: bool = False, + withdist: bool = False, + withhash: bool = False, + ): ... + async def geosearchstore( + self, + dest, + name, + member: Incomplete | None = None, + longitude: Incomplete | None = None, + latitude: Incomplete | None = None, + unit: str = "m", + radius: Incomplete | None = None, + width: Incomplete | None = None, + height: Incomplete | None = None, + sort: Incomplete | None = None, + count: Incomplete | None = None, + any: bool = False, + storedist: bool = False, + ): ... + +class ModuleCommands: + def module_load(self, path, *args): ... + def module_unload(self, name): ... + def module_list(self): ... + def command_info(self): ... + def command_count(self): ... + def command_getkeys(self, *args): ... + def command(self): ... + +class Script: + def __init__(self, registered_client, script) -> None: ... + def __call__(self, keys=[], args=[], client: Incomplete | None = None): ... + +class BitFieldOperation: + def __init__(self, client, key, default_overflow: Incomplete | None = None): ... + def reset(self) -> None: ... + def overflow(self, overflow): ... + def incrby(self, fmt, offset, increment, overflow: Incomplete | None = None): ... + def get(self, fmt, offset): ... + def set(self, fmt, offset, value): ... + @property + def command(self): ... + def execute(self): ... + +class AsyncModuleCommands(ModuleCommands): + async def command_info(self) -> None: ... + +class ClusterCommands: + def cluster(self, cluster_arg: str, *args, **kwargs: _CommandOptions): ... + def readwrite(self, **kwargs: _CommandOptions) -> bool: ... + def readonly(self, **kwargs: _CommandOptions) -> bool: ... + +class AsyncClusterCommands: + async def cluster(self, cluster_arg: str, *args, **kwargs: _CommandOptions): ... + async def readwrite(self, **kwargs: _CommandOptions) -> bool: ... + async def readonly(self, **kwargs: _CommandOptions) -> bool: ... + +class FunctionCommands: + def function_load(self, code: str, replace: bool | None = False) -> Awaitable[str] | str: ... + def function_delete(self, library: str) -> Awaitable[str] | str: ... + def function_flush(self, mode: str = "SYNC") -> Awaitable[str] | str: ... + def function_list(self, library: str | None = "*", withcode: bool | None = False) -> Awaitable[list[Any]] | list[Any]: ... + def fcall(self, function, numkeys: int, *keys_and_args: list[Any] | None) -> Awaitable[str] | str: ... + def fcall_ro(self, function, numkeys: int, *keys_and_args: list[Any] | None) -> Awaitable[str] | str: ... + def function_dump(self) -> Awaitable[str] | str: ... + def function_restore(self, payload: str, policy: str | None = "APPEND") -> Awaitable[str] | str: ... + def function_kill(self) -> Awaitable[str] | str: ... + def function_stats(self) -> Awaitable[list[Any]] | list[Any]: ... + +class AsyncFunctionCommands: + async def function_load(self, code: str, replace: bool | None = False) -> Awaitable[str] | str: ... + async def function_delete(self, library: str) -> Awaitable[str] | str: ... + async def function_flush(self, mode: str = "SYNC") -> Awaitable[str] | str: ... + async def function_list( + self, library: str | None = "*", withcode: bool | None = False + ) -> Awaitable[list[Any]] | list[Any]: ... + async def fcall(self, function, numkeys: int, *keys_and_args: list[Any] | None) -> Awaitable[str] | str: ... + async def fcall_ro(self, function, numkeys: int, *keys_and_args: list[Any] | None) -> Awaitable[str] | str: ... + async def function_dump(self) -> Awaitable[str] | str: ... + async def function_restore(self, payload: str, policy: str | None = "APPEND") -> Awaitable[str] | str: ... + async def function_kill(self) -> Awaitable[str] | str: ... + async def function_stats(self) -> Awaitable[list[Any]] | list[Any]: ... + +class DataAccessCommands( + BasicKeyCommands[_StrType], + HyperlogCommands, + HashCommands[_StrType], + GeoCommands, + ListCommands[_StrType], + ScanCommands[_StrType], + SetCommands[_StrType], + StreamCommands, + SortedSetCommands[_StrType], +): ... +class AsyncDataAccessCommands( + AsyncBasicKeyCommands[_StrType], + AsyncHyperlogCommands, + AsyncHashCommands[_StrType], + AsyncGeoCommands, + AsyncListCommands[_StrType], + AsyncScanCommands[_StrType], + AsyncSetCommands[_StrType], + AsyncStreamCommands, + AsyncSortedSetCommands[_StrType], +): ... +class CoreCommands( + ACLCommands[_StrType], + ClusterCommands, + DataAccessCommands[_StrType], + ManagementCommands, + ModuleCommands, + PubSubCommands, + ScriptCommands[_StrType], +): ... +class AsyncCoreCommands( + AsyncACLCommands[_StrType], + AsyncClusterCommands, + AsyncDataAccessCommands[_StrType], + AsyncManagementCommands, + AsyncModuleCommands, + AsyncPubSubCommands, + AsyncScriptCommands[_StrType], + AsyncFunctionCommands, +): ... diff --git a/valkey/commands/graph/__init__.pyi b/valkey/commands/graph/__init__.pyi new file mode 100644 index 00000000..a8209b8d --- /dev/null +++ b/valkey/commands/graph/__init__.pyi @@ -0,0 +1,26 @@ +from typing import Any + +from .commands import GraphCommands as GraphCommands +from .edge import Edge as Edge +from .node import Node as Node +from .path import Path as Path + +class Graph(GraphCommands): + NAME: Any + client: Any + execute_command: Any + nodes: Any + edges: Any + version: int + def __init__(self, client, name=...) -> None: ... + @property + def name(self): ... + def get_label(self, idx): ... + def get_relation(self, idx): ... + def get_property(self, idx): ... + def add_node(self, node) -> None: ... + def add_edge(self, edge) -> None: ... + def call_procedure(self, procedure, *args, read_only: bool = False, **kwagrs): ... + def labels(self): ... + def relationship_types(self): ... + def property_keys(self): ... diff --git a/valkey/commands/graph/commands.pyi b/valkey/commands/graph/commands.pyi new file mode 100644 index 00000000..b57418dd --- /dev/null +++ b/valkey/commands/graph/commands.pyi @@ -0,0 +1,25 @@ +from _typeshed import Incomplete +from typing import Any + +class GraphCommands: + def commit(self): ... + version: Any + def query( + self, + q, + params: Incomplete | None = None, + timeout: Incomplete | None = None, + read_only: bool = False, + profile: bool = False, + ): ... + def merge(self, pattern): ... + def delete(self): ... + nodes: Any + edges: Any + def flush(self) -> None: ... + def explain(self, query, params: Incomplete | None = None): ... + def bulk(self, **kwargs) -> None: ... + def profile(self, query): ... + def slowlog(self): ... + def config(self, name, value: Incomplete | None = None, set: bool = False): ... + def list_keys(self): ... diff --git a/valkey/commands/graph/edge.pyi b/valkey/commands/graph/edge.pyi new file mode 100644 index 00000000..3bd36b65 --- /dev/null +++ b/valkey/commands/graph/edge.pyi @@ -0,0 +1,14 @@ +from _typeshed import Incomplete +from typing import Any + +class Edge: + id: Any + relation: Any + properties: Any + src_node: Any + dest_node: Any + def __init__( + self, src_node, relation, dest_node, edge_id: Incomplete | None = None, properties: Incomplete | None = None + ) -> None: ... + def to_string(self): ... + def __eq__(self, rhs): ... diff --git a/valkey/commands/graph/exceptions.pyi b/valkey/commands/graph/exceptions.pyi new file mode 100644 index 00000000..6069e055 --- /dev/null +++ b/valkey/commands/graph/exceptions.pyi @@ -0,0 +1,5 @@ +from typing import Any + +class VersionMismatchException(Exception): + version: Any + def __init__(self, version) -> None: ... diff --git a/valkey/commands/graph/node.pyi b/valkey/commands/graph/node.pyi new file mode 100644 index 00000000..e7a65537 --- /dev/null +++ b/valkey/commands/graph/node.pyi @@ -0,0 +1,18 @@ +from _typeshed import Incomplete +from typing import Any + +class Node: + id: Any + alias: Any + label: Any + labels: Any + properties: Any + def __init__( + self, + node_id: Incomplete | None = None, + alias: Incomplete | None = None, + label: str | list[str] | None = None, + properties: Incomplete | None = None, + ) -> None: ... + def to_string(self): ... + def __eq__(self, rhs): ... diff --git a/valkey/commands/graph/path.pyi b/valkey/commands/graph/path.pyi new file mode 100644 index 00000000..69106f89 --- /dev/null +++ b/valkey/commands/graph/path.pyi @@ -0,0 +1,18 @@ +from typing import Any + +class Path: + append_type: Any + def __init__(self, nodes, edges) -> None: ... + @classmethod + def new_empty_path(cls): ... + def nodes(self): ... + def edges(self): ... + def get_node(self, index): ... + def get_relationship(self, index): ... + def first_node(self): ... + def last_node(self): ... + def edge_count(self): ... + def nodes_count(self): ... + def add_node(self, node): ... + def add_edge(self, edge): ... + def __eq__(self, other): ... diff --git a/valkey/commands/graph/query_result.pyi b/valkey/commands/graph/query_result.pyi new file mode 100644 index 00000000..d9f8b514 --- /dev/null +++ b/valkey/commands/graph/query_result.pyi @@ -0,0 +1,74 @@ +from typing import Any, ClassVar, Literal + +LABELS_ADDED: str +NODES_CREATED: str +NODES_DELETED: str +RELATIONSHIPS_DELETED: str +PROPERTIES_SET: str +RELATIONSHIPS_CREATED: str +INDICES_CREATED: str +INDICES_DELETED: str +CACHED_EXECUTION: str +INTERNAL_EXECUTION_TIME: str +STATS: Any + +class ResultSetColumnTypes: + COLUMN_UNKNOWN: ClassVar[Literal[0]] + COLUMN_SCALAR: ClassVar[Literal[1]] + COLUMN_NODE: ClassVar[Literal[2]] + COLUMN_RELATION: ClassVar[Literal[3]] + +class ResultSetScalarTypes: + VALUE_UNKNOWN: ClassVar[Literal[0]] + VALUE_NULL: ClassVar[Literal[1]] + VALUE_STRING: ClassVar[Literal[2]] + VALUE_INTEGER: ClassVar[Literal[3]] + VALUE_BOOLEAN: ClassVar[Literal[4]] + VALUE_DOUBLE: ClassVar[Literal[5]] + VALUE_ARRAY: ClassVar[Literal[6]] + VALUE_EDGE: ClassVar[Literal[7]] + VALUE_NODE: ClassVar[Literal[8]] + VALUE_PATH: ClassVar[Literal[9]] + VALUE_MAP: ClassVar[Literal[10]] + VALUE_POINT: ClassVar[Literal[11]] + +class QueryResult: + graph: Any + header: Any + result_set: Any + def __init__(self, graph, response, profile: bool = False) -> None: ... + def parse_results(self, raw_result_set) -> None: ... + statistics: Any + def parse_statistics(self, raw_statistics) -> None: ... + def parse_header(self, raw_result_set): ... + def parse_records(self, raw_result_set): ... + def parse_entity_properties(self, props): ... + def parse_string(self, cell): ... + def parse_node(self, cell): ... + def parse_edge(self, cell): ... + def parse_path(self, cell): ... + def parse_map(self, cell): ... + def parse_point(self, cell): ... + def parse_scalar(self, cell): ... + def parse_profile(self, response) -> None: ... + def is_empty(self): ... + @property + def labels_added(self): ... + @property + def nodes_created(self): ... + @property + def nodes_deleted(self): ... + @property + def properties_set(self): ... + @property + def relationships_created(self): ... + @property + def relationships_deleted(self): ... + @property + def indices_created(self): ... + @property + def indices_deleted(self): ... + @property + def cached_execution(self): ... + @property + def run_time_ms(self): ... diff --git a/valkey/commands/helpers.pyi b/valkey/commands/helpers.pyi new file mode 100644 index 00000000..b4e5ac7f --- /dev/null +++ b/valkey/commands/helpers.pyi @@ -0,0 +1,10 @@ +def list_or_args(keys, args): ... +def nativestr(x): ... +def delist(x): ... +def parse_to_list(response): ... +def parse_list_to_dict(response): ... +def parse_to_dict(response): ... +def random_string(length: int = 10) -> str: ... +def quote_string(v): ... +def decode_dict_keys(obj): ... +def stringify_param_value(value): ... diff --git a/valkey/commands/json/__init__.pyi b/valkey/commands/json/__init__.pyi new file mode 100644 index 00000000..f9e8825b --- /dev/null +++ b/valkey/commands/json/__init__.pyi @@ -0,0 +1,15 @@ +from _typeshed import Incomplete +from typing import Any + +from ...client import Pipeline as ClientPipeline +from .commands import JSONCommands + +class JSON(JSONCommands): + MODULE_CALLBACKS: dict[str, Any] + client: Any + execute_command: Any + MODULE_VERSION: Incomplete | None + def __init__(self, client, version: Incomplete | None = None, decoder=..., encoder=...) -> None: ... + def pipeline(self, transaction: bool = True, shard_hint: Incomplete | None = None) -> Pipeline: ... + +class Pipeline(JSONCommands, ClientPipeline[Incomplete]): ... # type: ignore[misc] diff --git a/valkey/commands/json/commands.pyi b/valkey/commands/json/commands.pyi new file mode 100644 index 00000000..38d4d4c6 --- /dev/null +++ b/valkey/commands/json/commands.pyi @@ -0,0 +1,32 @@ +from _typeshed import Incomplete + +class JSONCommands: + def arrappend(self, name: str, path: str | None = ".", *args) -> list[int | None]: ... + def arrindex( + self, name: str, path: str, scalar: int, start: int | None = None, stop: int | None = None + ) -> list[int | None]: ... + def arrinsert(self, name: str, path: str, index: int, *args) -> list[int | None]: ... + def arrlen(self, name: str, path: str | None = ".") -> list[int | None]: ... + def arrpop(self, name: str, path: str | None = ".", index: int | None = -1) -> list[str | None]: ... + def arrtrim(self, name: str, path: str, start: int, stop: int) -> list[int | None]: ... + def type(self, name: str, path: str | None = ".") -> list[str]: ... + def resp(self, name: str, path: str | None = ".") -> list[Incomplete]: ... + def objkeys(self, name, path="."): ... + def objlen(self, name, path="."): ... + def numincrby(self, name, path, number): ... + def nummultby(self, name, path, number): ... + def clear(self, name, path="."): ... + def delete(self, key, path="."): ... + forget = delete + def get(self, name, *args, no_escape: bool = False): ... + def mget(self, keys, path): ... + def set(self, name, path, obj, nx: bool = False, xx: bool = False, decode_keys: bool = False): ... + def set_file(self, name, path, file_name, nx: bool = False, xx: bool = False, decode_keys: bool = False): ... + def set_path(self, json_path, root_folder, nx: bool = False, xx: bool = False, decode_keys: bool = False): ... + def strlen(self, name, path: Incomplete | None = None): ... + def toggle(self, name, path="."): ... + def strappend(self, name, value, path="."): ... + def debug(self, subcommand, key: Incomplete | None = None, path="."): ... + def jsonget(self, *args, **kwargs): ... + def jsonmget(self, *args, **kwargs): ... + def jsonset(self, *args, **kwargs): ... diff --git a/valkey/commands/json/decoders.pyi b/valkey/commands/json/decoders.pyi new file mode 100644 index 00000000..ccea2438 --- /dev/null +++ b/valkey/commands/json/decoders.pyi @@ -0,0 +1,4 @@ +def bulk_of_jsons(d): ... +def decode_dict_keys(obj): ... +def unstring(obj): ... +def decode_list(b): ... diff --git a/valkey/commands/json/path.pyi b/valkey/commands/json/path.pyi new file mode 100644 index 00000000..bbc35c4f --- /dev/null +++ b/valkey/commands/json/path.pyi @@ -0,0 +1,5 @@ +class Path: + strPath: str + @staticmethod + def root_path() -> str: ... + def __init__(self, path: str) -> None: ... diff --git a/valkey/commands/parser.pyi b/valkey/commands/parser.pyi new file mode 100644 index 00000000..f17afa28 --- /dev/null +++ b/valkey/commands/parser.pyi @@ -0,0 +1,8 @@ +from valkey.client import AbstractValkey +from valkey.typing import EncodableT + +class CommandsParser: + commands: dict[str, str] + def __init__(self, valkey_connection: AbstractValkey) -> None: ... + def initialize(self, r: AbstractValkey) -> None: ... + def get_keys(self, valkey_conn: AbstractValkey, *args: EncodableT) -> list[EncodableT] | None: ... diff --git a/valkey/commands/redismodules.pyi b/valkey/commands/redismodules.pyi new file mode 100644 index 00000000..129b2a17 --- /dev/null +++ b/valkey/commands/redismodules.pyi @@ -0,0 +1,14 @@ +from .json import JSON +from .search import Search +from .timeseries import TimeSeries + +class ValkeyModuleCommands: + def json(self, encoder=..., decoder=...) -> JSON: ... + def ft(self, index_name: str = "idx") -> Search: ... + def ts(self) -> TimeSeries: ... + def bf(self): ... + def cf(self): ... + def cms(self): ... + def topk(self): ... + def tdigest(self): ... + def graph(self, index_name: str = "idx"): ... diff --git a/valkey/commands/search/__init__.pyi b/valkey/commands/search/__init__.pyi new file mode 100644 index 00000000..3366d451 --- /dev/null +++ b/valkey/commands/search/__init__.pyi @@ -0,0 +1,22 @@ +from _typeshed import Incomplete + +from .commands import SearchCommands + +class Search(SearchCommands): + class BatchIndexer: + def __init__(self, client, chunk_size: int = 1000) -> None: ... + def add_document( + self, + doc_id, + nosave: bool = False, + score: float = 1.0, + payload: Incomplete | None = None, + replace: bool = False, + partial: bool = False, + no_create: bool = False, + **fields, + ): ... + def add_document_hash(self, doc_id, score: float = 1.0, replace: bool = False): ... + def commit(self): ... + + def __init__(self, client, index_name: str = "idx") -> None: ... diff --git a/valkey/commands/search/aggregation.pyi b/valkey/commands/search/aggregation.pyi new file mode 100644 index 00000000..48bac218 --- /dev/null +++ b/valkey/commands/search/aggregation.pyi @@ -0,0 +1,53 @@ +from typing import Any, ClassVar, Literal + +FIELDNAME: Any + +class Limit: + offset: Any + count: Any + def __init__(self, offset: int = 0, count: int = 0) -> None: ... + def build_args(self): ... + +class Reducer: + NAME: ClassVar[None] + def __init__(self, *args) -> None: ... + def alias(self, alias): ... + @property + def args(self): ... + +class SortDirection: + DIRSTRING: ClassVar[str | None] + field: Any + def __init__(self, field) -> None: ... + +class Asc(SortDirection): + DIRSTRING: ClassVar[Literal["ASC"]] + +class Desc(SortDirection): + DIRSTRING: ClassVar[Literal["DESC"]] + +class AggregateRequest: + def __init__(self, query: str = "*") -> None: ... + def load(self, *fields): ... + def group_by(self, fields, *reducers): ... + def apply(self, **kwexpr): ... + def limit(self, offset, num): ... + def sort_by(self, *fields, **kwargs): ... + def filter(self, expressions): ... + def with_schema(self): ... + def verbatim(self): ... + def cursor(self, count: int = 0, max_idle: float = 0.0): ... + def build_args(self): ... + +class Cursor: + cid: Any + max_idle: int + count: int + def __init__(self, cid) -> None: ... + def build_args(self): ... + +class AggregateResult: + rows: Any + cursor: Any + schema: Any + def __init__(self, rows, cursor, schema) -> None: ... diff --git a/valkey/commands/search/commands.pyi b/valkey/commands/search/commands.pyi new file mode 100644 index 00000000..f8a2baf3 --- /dev/null +++ b/valkey/commands/search/commands.pyi @@ -0,0 +1,111 @@ +from _typeshed import Incomplete +from collections.abc import Mapping +from typing import Any, Literal +from typing_extensions import TypeAlias + +from .aggregation import AggregateRequest, AggregateResult, Cursor +from .query import Query +from .result import Result + +_QueryParams: TypeAlias = Mapping[str, str | float] + +NUMERIC: Literal["NUMERIC"] + +CREATE_CMD: Literal["FT.CREATE"] +ALTER_CMD: Literal["FT.ALTER"] +SEARCH_CMD: Literal["FT.SEARCH"] +ADD_CMD: Literal["FT.ADD"] +ADDHASH_CMD: Literal["FT.ADDHASH"] +DROP_CMD: Literal["FT.DROP"] +EXPLAIN_CMD: Literal["FT.EXPLAIN"] +EXPLAINCLI_CMD: Literal["FT.EXPLAINCLI"] +DEL_CMD: Literal["FT.DEL"] +AGGREGATE_CMD: Literal["FT.AGGREGATE"] +PROFILE_CMD: Literal["FT.PROFILE"] +CURSOR_CMD: Literal["FT.CURSOR"] +SPELLCHECK_CMD: Literal["FT.SPELLCHECK"] +DICT_ADD_CMD: Literal["FT.DICTADD"] +DICT_DEL_CMD: Literal["FT.DICTDEL"] +DICT_DUMP_CMD: Literal["FT.DICTDUMP"] +GET_CMD: Literal["FT.GET"] +MGET_CMD: Literal["FT.MGET"] +CONFIG_CMD: Literal["FT.CONFIG"] +TAGVALS_CMD: Literal["FT.TAGVALS"] +ALIAS_ADD_CMD: Literal["FT.ALIASADD"] +ALIAS_UPDATE_CMD: Literal["FT.ALIASUPDATE"] +ALIAS_DEL_CMD: Literal["FT.ALIASDEL"] +INFO_CMD: Literal["FT.INFO"] +SUGADD_COMMAND: Literal["FT.SUGADD"] +SUGDEL_COMMAND: Literal["FT.SUGDEL"] +SUGLEN_COMMAND: Literal["FT.SUGLEN"] +SUGGET_COMMAND: Literal["FT.SUGGET"] +SYNUPDATE_CMD: Literal["FT.SYNUPDATE"] +SYNDUMP_CMD: Literal["FT.SYNDUMP"] + +NOOFFSETS: Literal["NOOFFSETS"] +NOFIELDS: Literal["NOFIELDS"] +STOPWORDS: Literal["STOPWORDS"] +WITHSCORES: Literal["WITHSCORES"] +FUZZY: Literal["FUZZY"] +WITHPAYLOADS: Literal["WITHPAYLOADS"] + +class SearchCommands: + def batch_indexer(self, chunk_size: int = 100): ... + def create_index( + self, + fields, + no_term_offsets: bool = False, + no_field_flags: bool = False, + stopwords: Incomplete | None = None, + definition: Incomplete | None = None, + max_text_fields: bool = False, # added in 4.1.1 + temporary: Incomplete | None = None, # added in 4.1.1 + no_highlight: bool = False, # added in 4.1.1 + no_term_frequencies: bool = False, # added in 4.1.1 + skip_initial_scan: bool = False, # added in 4.1.1 + ): ... + def alter_schema_add(self, fields): ... + def dropindex(self, delete_documents: bool = False): ... + def add_document( + self, + doc_id, + nosave: bool = False, + score: float = 1.0, + payload: Incomplete | None = None, + replace: bool = False, + partial: bool = False, + language: Incomplete | None = None, + no_create: bool = False, + **fields, + ): ... + def add_document_hash(self, doc_id, score: float = 1.0, language: Incomplete | None = None, replace: bool = False): ... + def delete_document(self, doc_id, conn: Incomplete | None = None, delete_actual_document: bool = False): ... + def load_document(self, id): ... + def get(self, *ids): ... + def info(self): ... + def get_params_args(self, query_params: _QueryParams) -> list[Any]: ... + def search(self, query: str | Query, query_params: _QueryParams | None = None) -> Result: ... + def explain(self, query: str | Query, query_params: _QueryParams | None = None): ... + def explain_cli(self, query): ... + def aggregate(self, query: AggregateRequest | Cursor, query_params: _QueryParams | None = None) -> AggregateResult: ... + def profile( + self, query: str | Query | AggregateRequest, limited: bool = False, query_params: Mapping[str, str | float] | None = None + ) -> tuple[Incomplete, Incomplete]: ... + def spellcheck( + self, query, distance: Incomplete | None = None, include: Incomplete | None = None, exclude: Incomplete | None = None + ): ... + def dict_add(self, name, *terms): ... + def dict_del(self, name, *terms): ... + def dict_dump(self, name): ... + def config_set(self, option: str, value: str) -> bool: ... + def config_get(self, option: str) -> dict[str, str]: ... + def tagvals(self, tagfield): ... + def aliasadd(self, alias): ... + def aliasupdate(self, alias): ... + def aliasdel(self, alias): ... + def sugadd(self, key, *suggestions, **kwargs): ... + def suglen(self, key): ... + def sugdel(self, key, string): ... + def sugget(self, key, prefix, fuzzy: bool = False, num: int = 10, with_scores: bool = False, with_payloads: bool = False): ... + def synupdate(self, groupid, skipinitial: bool = False, *terms): ... + def syndump(self): ... diff --git a/valkey/commands/search/query.pyi b/valkey/commands/search/query.pyi new file mode 100644 index 00000000..eb1846ba --- /dev/null +++ b/valkey/commands/search/query.pyi @@ -0,0 +1,52 @@ +from _typeshed import Incomplete +from typing import Any + +class Query: + def __init__(self, query_string) -> None: ... + def query_string(self): ... + def limit_ids(self, *ids): ... + def return_fields(self, *fields): ... + def return_field(self, field, as_field: Incomplete | None = None): ... + def summarize( + self, + fields: Incomplete | None = None, + context_len: Incomplete | None = None, + num_frags: Incomplete | None = None, + sep: Incomplete | None = None, + ): ... + def highlight(self, fields: Incomplete | None = None, tags: Incomplete | None = None): ... + def language(self, language): ... + def slop(self, slop): ... + def in_order(self): ... + def scorer(self, scorer): ... + def get_args(self): ... + def paging(self, offset, num): ... + def verbatim(self): ... + def no_content(self): ... + def no_stopwords(self): ... + def with_payloads(self): ... + def with_scores(self): ... + def limit_fields(self, *fields): ... + def add_filter(self, flt): ... + def sort_by(self, field, asc: bool = True): ... + def expander(self, expander): ... + +class Filter: + args: Any + def __init__(self, keyword, field, *args) -> None: ... + +class NumericFilter(Filter): + INF: str + NEG_INF: str + def __init__(self, field, minval, maxval, minExclusive: bool = False, maxExclusive: bool = False) -> None: ... + +class GeoFilter(Filter): + METERS: str + KILOMETERS: str + FEET: str + MILES: str + def __init__(self, field, lon, lat, radius, unit="km") -> None: ... + +class SortbyField: + args: Any + def __init__(self, field, asc: bool = True) -> None: ... diff --git a/valkey/commands/search/result.pyi b/valkey/commands/search/result.pyi new file mode 100644 index 00000000..046c3170 --- /dev/null +++ b/valkey/commands/search/result.pyi @@ -0,0 +1,7 @@ +from typing import Any + +class Result: + total: Any + duration: Any + docs: Any + def __init__(self, res, hascontent, duration: int = 0, has_payload: bool = False, with_scores: bool = False) -> None: ... diff --git a/valkey/commands/sentinel.pyi b/valkey/commands/sentinel.pyi new file mode 100644 index 00000000..b526a45f --- /dev/null +++ b/valkey/commands/sentinel.pyi @@ -0,0 +1,17 @@ +class SentinelCommands: + def sentinel(self, *args): ... + def sentinel_get_master_addr_by_name(self, service_name): ... + def sentinel_master(self, service_name): ... + def sentinel_masters(self): ... + def sentinel_monitor(self, name, ip, port, quorum): ... + def sentinel_remove(self, name): ... + def sentinel_sentinels(self, service_name): ... + def sentinel_set(self, name, option, value): ... + def sentinel_slaves(self, service_name): ... + def sentinel_reset(self, pattern): ... + def sentinel_failover(self, new_master_name): ... + def sentinel_ckquorum(self, new_master_name): ... + def sentinel_flushconfig(self): ... + +class AsyncSentinelCommands(SentinelCommands): + async def sentinel(self, *args) -> None: ... diff --git a/valkey/commands/timeseries/__init__.pyi b/valkey/commands/timeseries/__init__.pyi new file mode 100644 index 00000000..95457d6f --- /dev/null +++ b/valkey/commands/timeseries/__init__.pyi @@ -0,0 +1,14 @@ +from _typeshed import Incomplete +from typing import Any + +from ...client import Pipeline as ClientPipeline +from .commands import TimeSeriesCommands + +class TimeSeries(TimeSeriesCommands): + MODULE_CALLBACKS: dict[str, Any] + client: Any + execute_command: Any + def __init__(self, client: Incomplete | None = None, **kwargs) -> None: ... + def pipeline(self, transaction: bool = True, shard_hint: Incomplete | None = None) -> Pipeline: ... + +class Pipeline(TimeSeriesCommands, ClientPipeline[Incomplete]): ... # type: ignore[misc] diff --git a/valkey/commands/timeseries/commands.pyi b/valkey/commands/timeseries/commands.pyi new file mode 100644 index 00000000..ed70e575 --- /dev/null +++ b/valkey/commands/timeseries/commands.pyi @@ -0,0 +1,160 @@ +from typing import Literal +from typing_extensions import TypeAlias + +_Key: TypeAlias = bytes | str | memoryview + +ADD_CMD: Literal["TS.ADD"] +ALTER_CMD: Literal["TS.ALTER"] +CREATERULE_CMD: Literal["TS.CREATERULE"] +CREATE_CMD: Literal["TS.CREATE"] +DECRBY_CMD: Literal["TS.DECRBY"] +DELETERULE_CMD: Literal["TS.DELETERULE"] +DEL_CMD: Literal["TS.DEL"] +GET_CMD: Literal["TS.GET"] +INCRBY_CMD: Literal["TS.INCRBY"] +INFO_CMD: Literal["TS.INFO"] +MADD_CMD: Literal["TS.MADD"] +MGET_CMD: Literal["TS.MGET"] +MRANGE_CMD: Literal["TS.MRANGE"] +MREVRANGE_CMD: Literal["TS.MREVRANGE"] +QUERYINDEX_CMD: Literal["TS.QUERYINDEX"] +RANGE_CMD: Literal["TS.RANGE"] +REVRANGE_CMD: Literal["TS.REVRANGE"] + +class TimeSeriesCommands: + def create( + self, + key: _Key, + retention_msecs: int | None = None, + uncompressed: bool | None = False, + labels: dict[str, str] | None = None, + chunk_size: int | None = None, + duplicate_policy: str | None = None, + ): ... + def alter( + self, + key: _Key, + retention_msecs: int | None = None, + labels: dict[str, str] | None = None, + chunk_size: int | None = None, + duplicate_policy: str | None = None, + ): ... + def add( + self, + key: _Key, + timestamp: int | str, + value: float, + retention_msecs: int | None = None, + uncompressed: bool | None = False, + labels: dict[str, str] | None = None, + chunk_size: int | None = None, + duplicate_policy: str | None = None, + ): ... + def madd(self, ktv_tuples): ... + def incrby( + self, + key: _Key, + value: float, + timestamp: int | str | None = None, + retention_msecs: int | None = None, + uncompressed: bool | None = False, + labels: dict[str, str] | None = None, + chunk_size: int | None = None, + ): ... + def decrby( + self, + key: _Key, + value: float, + timestamp: int | str | None = None, + retention_msecs: int | None = None, + uncompressed: bool | None = False, + labels: dict[str, str] | None = None, + chunk_size: int | None = None, + ): ... + def delete(self, key, from_time, to_time): ... + def createrule( + self, source_key: _Key, dest_key: _Key, aggregation_type: str, bucket_size_msec: int, align_timestamp: int | None = None + ): ... + def deleterule(self, source_key, dest_key): ... + def range( + self, + key: _Key, + from_time: int | str, + to_time: int | str, + count: int | None = None, + aggregation_type: str | None = None, + bucket_size_msec: int | None = 0, + filter_by_ts: list[int] | None = None, + filter_by_min_value: int | None = None, + filter_by_max_value: int | None = None, + align: int | str | None = None, + latest: bool | None = False, + bucket_timestamp: str | None = None, + empty: bool | None = False, + ): ... + def revrange( + self, + key: _Key, + from_time: int | str, + to_time: int | str, + count: int | None = None, + aggregation_type: str | None = None, + bucket_size_msec: int | None = 0, + filter_by_ts: list[int] | None = None, + filter_by_min_value: int | None = None, + filter_by_max_value: int | None = None, + align: int | str | None = None, + latest: bool | None = False, + bucket_timestamp: str | None = None, + empty: bool | None = False, + ): ... + def mrange( + self, + from_time: int | str, + to_time: int | str, + filters: list[str], + count: int | None = None, + aggregation_type: str | None = None, + bucket_size_msec: int | None = 0, + with_labels: bool | None = False, + filter_by_ts: list[int] | None = None, + filter_by_min_value: int | None = None, + filter_by_max_value: int | None = None, + groupby: str | None = None, + reduce: str | None = None, + select_labels: list[str] | None = None, + align: int | str | None = None, + latest: bool | None = False, + bucket_timestamp: str | None = None, + empty: bool | None = False, + ): ... + def mrevrange( + self, + from_time: int | str, + to_time: int | str, + filters: list[str], + count: int | None = None, + aggregation_type: str | None = None, + bucket_size_msec: int | None = 0, + with_labels: bool | None = False, + filter_by_ts: list[int] | None = None, + filter_by_min_value: int | None = None, + filter_by_max_value: int | None = None, + groupby: str | None = None, + reduce: str | None = None, + select_labels: list[str] | None = None, + align: int | str | None = None, + latest: bool | None = False, + bucket_timestamp: str | None = None, + empty: bool | None = False, + ): ... + def get(self, key: _Key, latest: bool | None = False): ... + def mget( + self, + filters: list[str], + with_labels: bool | None = False, + select_labels: list[str] | None = None, + latest: bool | None = False, + ): ... + def info(self, key): ... + def queryindex(self, filters): ... diff --git a/valkey/commands/timeseries/info.pyi b/valkey/commands/timeseries/info.pyi new file mode 100644 index 00000000..8b082c7d --- /dev/null +++ b/valkey/commands/timeseries/info.pyi @@ -0,0 +1,18 @@ +from _typeshed import Incomplete +from typing import Any + +class TSInfo: + rules: list[Any] + labels: list[Any] + sourceKey: Incomplete | None + chunk_count: Incomplete | None + memory_usage: Incomplete | None + total_samples: Incomplete | None + retention_msecs: Incomplete | None + last_time_stamp: Incomplete | None + first_time_stamp: Incomplete | None + + max_samples_per_chunk: Incomplete | None + chunk_size: Incomplete | None + duplicate_policy: Incomplete | None + def __init__(self, args) -> None: ... diff --git a/valkey/commands/timeseries/utils.pyi b/valkey/commands/timeseries/utils.pyi new file mode 100644 index 00000000..4a0d52c4 --- /dev/null +++ b/valkey/commands/timeseries/utils.pyi @@ -0,0 +1,5 @@ +def list_to_dict(aList): ... +def parse_range(response): ... +def parse_m_range(response): ... +def parse_get(response): ... +def parse_m_get(response): ... diff --git a/valkey/connection.pyi b/valkey/connection.pyi new file mode 100644 index 00000000..9796fd21 --- /dev/null +++ b/valkey/connection.pyi @@ -0,0 +1,289 @@ +from _typeshed import Incomplete, Unused +from abc import abstractmethod +from collections.abc import Callable, Iterable, Mapping +from queue import Queue +from socket import socket +from typing import Any, ClassVar +from typing_extensions import Self, TypeAlias + +from .credentials import CredentialProvider +from .retry import Retry + +ssl_available: bool +SYM_STAR: bytes +SYM_DOLLAR: bytes +SYM_CRLF: bytes +SYM_EMPTY: bytes +SERVER_CLOSED_CONNECTION_ERROR: str +NONBLOCKING_EXCEPTIONS: tuple[type[Exception], ...] +NONBLOCKING_EXCEPTION_ERROR_NUMBERS: dict[type[Exception], int] +SENTINEL: object +MODULE_LOAD_ERROR: str +NO_SUCH_MODULE_ERROR: str +MODULE_UNLOAD_NOT_POSSIBLE_ERROR: str +MODULE_EXPORTS_DATA_TYPES_ERROR: str +FALSE_STRINGS: tuple[str, ...] +URL_QUERY_ARGUMENT_PARSERS: dict[str, Callable[[Any], Any]] + +# Options as passed to Pool.get_connection(). +_ConnectionPoolOptions: TypeAlias = Any +_ConnectFunc: TypeAlias = Callable[[Connection], object] + +class BaseParser: + EXCEPTION_CLASSES: ClassVar[dict[str, type[Exception] | dict[str, type[Exception]]]] + @classmethod + def parse_error(cls, response: str) -> Exception: ... + +class SocketBuffer: + socket_read_size: int + bytes_written: int + bytes_read: int + socket_timeout: float | None + def __init__(self, socket: socket, socket_read_size: int, socket_timeout: float | None) -> None: ... + def unread_bytes(self) -> int: ... + def can_read(self, timeout: float | None) -> bool: ... + def read(self, length: int) -> bytes: ... + def readline(self) -> bytes: ... + def get_pos(self) -> int: ... + def rewind(self, pos: int) -> None: ... + def purge(self) -> None: ... + def close(self) -> None: ... + +class PythonParser(BaseParser): + encoding: str + socket_read_size: int + encoder: Encoder | None + def __init__(self, socket_read_size: int) -> None: ... + def __del__(self) -> None: ... + def on_connect(self, connection: Connection) -> None: ... + def on_disconnect(self) -> None: ... + def can_read(self, timeout: float | None) -> bool: ... + def read_response(self, disable_decoding: bool = False) -> Any: ... # `str | bytes` or `list[str | bytes]` + +class LibvalkeyParser(BaseParser): + socket_read_size: int + def __init__(self, socket_read_size: int) -> None: ... + def __del__(self) -> None: ... + def on_connect(self, connection: Connection, **kwargs) -> None: ... + def on_disconnect(self) -> None: ... + def can_read(self, timeout: float | None) -> bool: ... + def read_from_socket(self, timeout: float | None = ..., raise_on_timeout: bool = True) -> bool: ... + def read_response(self, disable_decoding: bool = False) -> Any: ... # `str | bytes` or `list[str | bytes]` + +DefaultParser: type[BaseParser] # Libvalkey or PythonParser + +_Encodable: TypeAlias = str | bytes | memoryview | bool | float + +class Encoder: + encoding: str + encoding_errors: str + decode_responses: bool + def __init__(self, encoding: str, encoding_errors: str, decode_responses: bool) -> None: ... + def encode(self, value: _Encodable) -> bytes: ... + def decode(self, value: str | bytes | memoryview, force: bool = False) -> str: ... + +class AbstractConnection: + pid: int + db: int + client_name: str | None + credential_provider: CredentialProvider | None + password: str | None + username: str | None + socket_timeout: float | None + socket_connect_timeout: float | None + retry_on_timeout: bool + retry_on_error: list[type[Exception]] + retry: Retry + health_check_interval: int + next_health_check: int + valkey_connect_func: _ConnectFunc | None + encoder: Encoder + + def __init__( + self, + db: int = 0, + password: str | None = None, + socket_timeout: float | None = None, + socket_connect_timeout: float | None = None, + retry_on_timeout: bool = False, + retry_on_error: list[type[Exception]] = ..., + encoding: str = "utf-8", + encoding_errors: str = "strict", + decode_responses: bool = False, + parser_class: type[BaseParser] = ..., + socket_read_size: int = 65536, + health_check_interval: int = 0, + client_name: str | None = None, + username: str | None = None, + retry: Retry | None = None, + valkey_connect_func: _ConnectFunc | None = None, + credential_provider: CredentialProvider | None = None, + command_packer: Incomplete | None = None, + ) -> None: ... + @abstractmethod + def repr_pieces(self) -> list[tuple[str, Any]]: ... + def register_connect_callback(self, callback: _ConnectFunc) -> None: ... + def clear_connect_callbacks(self) -> None: ... + def set_parser(self, parser_class: type[BaseParser]) -> None: ... + def connect(self) -> None: ... + def on_connect(self) -> None: ... + def disconnect(self, *args: Unused) -> None: ... # 'args' added in valkey 4.1.2 + def check_health(self) -> None: ... + def send_packed_command(self, command: str | Iterable[str], check_health: bool = True) -> None: ... + def send_command(self, *args, **kwargs) -> None: ... + def can_read(self, timeout: float | None = 0) -> bool: ... + def read_response( + self, disable_decoding: bool = False, *, disconnect_on_error: bool = True + ) -> Any: ... # `str | bytes` or `list[str | bytes]` + def pack_command(self, *args) -> list[bytes]: ... + def pack_commands(self, commands: Iterable[Iterable[Incomplete]]) -> list[bytes]: ... + +class Connection(AbstractConnection): + host: str + port: int + socket_keepalive: bool + socket_keepalive_options: Mapping[str, int | str] + socket_type: int + def __init__( + self, + host: str = "localhost", + port: int = 6379, + socket_keepalive: bool = False, + socket_keepalive_options: Mapping[str, int | str] | None = None, + socket_type: int = 0, + *, + db: int = 0, + password: str | None = None, + socket_timeout: float | None = None, + socket_connect_timeout: float | None = None, + retry_on_timeout: bool = False, + retry_on_error: list[type[Exception]] = ..., + encoding: str = "utf-8", + encoding_errors: str = "strict", + decode_responses: bool = False, + parser_class: type[BaseParser] = ..., + socket_read_size: int = 65536, + health_check_interval: int = 0, + client_name: str | None = None, + username: str | None = None, + retry: Retry | None = None, + valkey_connect_func: _ConnectFunc | None = None, + credential_provider: CredentialProvider | None = None, + command_packer: Incomplete | None = None, + ) -> None: ... + def repr_pieces(self) -> list[tuple[str, Any]]: ... + +class SSLConnection(Connection): + keyfile: Any + certfile: Any + cert_reqs: Any + ca_certs: Any + ca_path: Incomplete | None + check_hostname: bool + certificate_password: Incomplete | None + ssl_validate_ocsp: bool + ssl_validate_ocsp_stapled: bool # added in 4.1.1 + ssl_ocsp_context: Incomplete | None # added in 4.1.1 + ssl_ocsp_expected_cert: Incomplete | None # added in 4.1.1 + def __init__( + self, + ssl_keyfile=None, + ssl_certfile=None, + ssl_cert_reqs="required", + ssl_ca_certs=None, + ssl_ca_data: Incomplete | None = None, + ssl_check_hostname: bool = False, + ssl_ca_path: Incomplete | None = None, + ssl_password: Incomplete | None = None, + ssl_validate_ocsp: bool = False, + ssl_validate_ocsp_stapled: bool = False, # added in 4.1.1 + ssl_ocsp_context: Incomplete | None = None, # added in 4.1.1 + ssl_ocsp_expected_cert: Incomplete | None = None, # added in 4.1.1 + *, + host: str = "localhost", + port: int = 6379, + socket_timeout: float | None = None, + socket_connect_timeout: float | None = None, + socket_keepalive: bool = False, + socket_keepalive_options: Mapping[str, int | str] | None = None, + socket_type: int = 0, + db: int = 0, + password: str | None = None, + retry_on_timeout: bool = False, + retry_on_error: list[type[Exception]] = ..., + encoding: str = "utf-8", + encoding_errors: str = "strict", + decode_responses: bool = False, + parser_class: type[BaseParser] = ..., + socket_read_size: int = 65536, + health_check_interval: int = 0, + client_name: str | None = None, + username: str | None = None, + retry: Retry | None = None, + valkey_connect_func: _ConnectFunc | None = None, + credential_provider: CredentialProvider | None = None, + command_packer: Incomplete | None = None, + ) -> None: ... + +class UnixDomainSocketConnection(AbstractConnection): + path: str + def __init__( + self, + path: str = "", + *, + db: int = 0, + password: str | None = None, + socket_timeout: float | None = None, + socket_connect_timeout: float | None = None, + retry_on_timeout: bool = False, + retry_on_error: list[type[Exception]] = ..., + encoding: str = "utf-8", + encoding_errors: str = "strict", + decode_responses: bool = False, + parser_class: type[BaseParser] = ..., + socket_read_size: int = 65536, + health_check_interval: int = 0, + client_name: str | None = None, + username: str | None = None, + retry: Retry | None = None, + valkey_connect_func: _ConnectFunc | None = None, + credential_provider: CredentialProvider | None = None, + command_packer: Incomplete | None = None, + ) -> None: ... + def repr_pieces(self) -> list[tuple[str, Any]]: ... + +# TODO: make generic on `connection_class` +class ConnectionPool: + connection_class: type[Connection] + connection_kwargs: dict[str, Any] + max_connections: int + pid: int + @classmethod + def from_url(cls, url: str, *, db: int = ..., decode_components: bool = ..., **kwargs) -> Self: ... + def __init__( + self, connection_class: type[AbstractConnection] = ..., max_connections: int | None = None, **connection_kwargs + ) -> None: ... + def reset(self) -> None: ... + def get_connection(self, command_name: Unused, *keys, **options: _ConnectionPoolOptions) -> Connection: ... + def make_connection(self) -> Connection: ... + def release(self, connection: Connection) -> None: ... + def disconnect(self, inuse_connections: bool = True) -> None: ... + def get_encoder(self) -> Encoder: ... + def owns_connection(self, connection: Connection) -> bool: ... + +class BlockingConnectionPool(ConnectionPool): + queue_class: type[Queue[Any]] + timeout: float + pool: Queue[Connection | None] # might not be defined + def __init__( + self, + max_connections: int = 50, + timeout: float = 20, + connection_class: type[Connection] = ..., + queue_class: type[Queue[Any]] = ..., + **connection_kwargs, + ) -> None: ... + def disconnect(self) -> None: ... # type: ignore[override] + +def to_bool(value: object) -> bool: ... +def parse_url(url: str) -> dict[str, Any]: ... diff --git a/valkey/crc.pyi b/valkey/crc.pyi new file mode 100644 index 00000000..d808e657 --- /dev/null +++ b/valkey/crc.pyi @@ -0,0 +1,5 @@ +from valkey.typing import EncodedT + +VALKEY_CLUSTER_HASH_SLOTS: int + +def key_slot(key: EncodedT, bucket: int = 16384) -> int: ... diff --git a/valkey/credentials.pyi b/valkey/credentials.pyi new file mode 100644 index 00000000..7a2d78ec --- /dev/null +++ b/valkey/credentials.pyi @@ -0,0 +1,11 @@ +from abc import abstractmethod + +class CredentialProvider: + @abstractmethod + def get_credentials(self) -> tuple[str] | tuple[str, str]: ... + +class UsernamePasswordCredentialProvider(CredentialProvider): + username: str + password: str + def __init__(self, username: str | None = None, password: str | None = None) -> None: ... + def get_credentials(self) -> tuple[str] | tuple[str, str]: ... diff --git a/valkey/exceptions.pyi b/valkey/exceptions.pyi new file mode 100644 index 00000000..9f671ad7 --- /dev/null +++ b/valkey/exceptions.pyi @@ -0,0 +1,42 @@ +class ValkeyError(Exception): ... +class AuthenticationError(ValkeyError): ... +class ConnectionError(ValkeyError): ... +class TimeoutError(ValkeyError): ... +class AuthorizationError(ConnectionError): ... +class BusyLoadingError(ConnectionError): ... +class InvalidResponse(ValkeyError): ... +class ResponseError(ValkeyError): ... +class DataError(ValkeyError): ... +class PubSubError(ValkeyError): ... +class WatchError(ValkeyError): ... +class NoScriptError(ResponseError): ... +class ExecAbortError(ResponseError): ... +class ReadOnlyError(ResponseError): ... +class NoPermissionError(ResponseError): ... +class ModuleError(ResponseError): ... +class LockError(ValkeyError, ValueError): ... +class LockNotOwnedError(LockError): ... +class ChildDeadlockedError(Exception): ... +class AuthenticationWrongNumberOfArgsError(ResponseError): ... +class ValkeyClusterException(Exception): ... +class ClusterError(ValkeyError): ... + +class ClusterDownError(ClusterError, ResponseError): + args: tuple[str] + message: str + def __init__(self, resp: str) -> None: ... + +class AskError(ResponseError): + args: tuple[str] + message: str + slot_id: int + node_addr: tuple[str, int] + host: str + port: int + def __init__(self, resp: str) -> None: ... + +class TryAgainError(ResponseError): ... +class ClusterCrossSlotError(ResponseError): ... +class MovedError(AskError): ... +class MasterDownError(ClusterDownError): ... +class SlotNotCoveredError(ValkeyClusterException): ... diff --git a/valkey/lock.pyi b/valkey/lock.pyi new file mode 100644 index 00000000..81d1dcac --- /dev/null +++ b/valkey/lock.pyi @@ -0,0 +1,56 @@ +from _typeshed import Incomplete +from types import TracebackType +from typing import Any, ClassVar, Protocol +from typing_extensions import Self + +from valkey.client import Valkey + +class _Local(Protocol): + token: str | bytes | None + +class Lock: + LUA_EXTEND_SCRIPT: ClassVar[str] + LUA_REACQUIRE_SCRIPT: ClassVar[str] + LUA_RELEASE_SCRIPT: ClassVar[str] + lua_extend: ClassVar[Incomplete | None] + lua_reacquire: ClassVar[Incomplete | None] + lua_release: ClassVar[Incomplete | None] + valkey: Valkey[Any] + name: str + timeout: float | None + sleep: float + blocking: bool + blocking_timeout: float | None + thread_local: bool + local: _Local + def __init__( + self, + valkey: Valkey[Any], + name: str, + timeout: float | None = None, + sleep: float = 0.1, + blocking: bool = True, + blocking_timeout: float | None = None, + thread_local: bool = True, + ) -> None: ... + def register_scripts(self) -> None: ... + def __enter__(self) -> Self: ... + def __exit__( + self, exc_type: type[BaseException] | None, exc_value: BaseException | None, traceback: TracebackType | None + ) -> bool | None: ... + def acquire( + self, + sleep: float | None = None, + blocking: bool | None = None, + blocking_timeout: float | None = None, + token: str | bytes | None = None, + ) -> bool: ... + def do_acquire(self, token: str | bytes) -> bool: ... + def locked(self) -> bool: ... + def owned(self) -> bool: ... + def release(self) -> None: ... + def do_release(self, expected_token: str | bytes) -> None: ... + def extend(self, additional_time: float, replace_ttl: bool = False) -> bool: ... + def do_extend(self, additional_time: float, replace_ttl: bool) -> bool: ... + def reacquire(self) -> bool: ... + def do_reacquire(self) -> bool: ... diff --git a/valkey/ocsp.pyi b/valkey/ocsp.pyi new file mode 100644 index 00000000..5fc72e08 --- /dev/null +++ b/valkey/ocsp.pyi @@ -0,0 +1,21 @@ +from _typeshed import Incomplete +from ssl import SSLObject, SSLSocket +from typing import Literal + +from cryptography.x509.base import Certificate +from OpenSSL.SSL import Connection + +def ocsp_staple_verifier(con: Connection, ocsp_bytes: bytes, expected: bytes | None = None) -> Literal[True]: ... + +class OCSPVerifier: + SOCK: SSLObject | SSLSocket + HOST: str + PORT: int + CA_CERTS: str | None + def __init__(self, sock: SSLObject | SSLSocket, host: str, port: int, ca_certs: str | None = None) -> None: ... + # cryptography.x509.general_name.GeneralName.value is typed as Any + def components_from_socket(self) -> tuple[Certificate, Incomplete | None, Incomplete]: ... + def components_from_direct_connection(self) -> tuple[Certificate, Incomplete | None, Incomplete]: ... + def build_certificate_url(self, server: str, cert: Certificate, issuer_cert: Certificate) -> str: ... + def check_certificate(self, server: str, cert: Certificate, issuer_url: str | bytes) -> Literal[True]: ... + def is_valid(self) -> Literal[True]: ... diff --git a/valkey/retry.pyi b/valkey/retry.pyi new file mode 100644 index 00000000..ab727e66 --- /dev/null +++ b/valkey/retry.pyi @@ -0,0 +1,11 @@ +from collections.abc import Callable, Iterable +from typing import TypeVar + +from valkey.backoff import AbstractBackoff + +_T = TypeVar("_T") + +class Retry: + def __init__(self, backoff: AbstractBackoff, retries: int, supported_errors: tuple[type[Exception], ...] = ...) -> None: ... + def update_supported_errors(self, specified_errors: Iterable[type[Exception]]) -> None: ... + def call_with_retry(self, do: Callable[[], _T], fail: Callable[[Exception], object]) -> _T: ... diff --git a/valkey/sentinel.pyi b/valkey/sentinel.pyi new file mode 100644 index 00000000..4a4c9489 --- /dev/null +++ b/valkey/sentinel.pyi @@ -0,0 +1,62 @@ +from collections.abc import Iterable, Iterator +from typing import Any, Literal, TypeVar, overload +from typing_extensions import TypeAlias + +from valkey.client import Valkey +from valkey.commands.sentinel import SentinelCommands +from valkey.connection import Connection, ConnectionPool, SSLConnection +from valkey.exceptions import ConnectionError + +_ValkeyT = TypeVar("_ValkeyT", bound=Valkey[Any]) +_AddressAndPort: TypeAlias = tuple[str, int] +_SentinelState: TypeAlias = dict[str, Any] # TODO: this can be a TypedDict + +class MasterNotFoundError(ConnectionError): ... +class SlaveNotFoundError(ConnectionError): ... + +class SentinelManagedConnection(Connection): + connection_pool: SentinelConnectionPool + def __init__(self, *, connection_pool: SentinelConnectionPool, **kwargs) -> None: ... + def connect_to(self, address: _AddressAndPort) -> None: ... + def connect(self) -> None: ... + # The result can be either `str | bytes` or `list[str | bytes]` + def read_response(self, disable_decoding: bool = False, *, disconnect_on_error: bool = False) -> Any: ... + +class SentinelManagedSSLConnection(SentinelManagedConnection, SSLConnection): ... + +class SentinelConnectionPool(ConnectionPool): + is_master: bool + check_connection: bool + service_name: str + sentinel_manager: Sentinel + def __init__(self, service_name: str, sentinel_manager: Sentinel, **kwargs) -> None: ... + def reset(self) -> None: ... + def owns_connection(self, connection: Connection) -> bool: ... + def get_master_address(self) -> _AddressAndPort: ... + def rotate_slaves(self) -> Iterator[_AddressAndPort]: ... + +class Sentinel(SentinelCommands): + sentinel_kwargs: dict[str, Any] + sentinels: list[Valkey[Any]] + min_other_sentinels: int + connection_kwargs: dict[str, Any] + def __init__( + self, + sentinels: Iterable[_AddressAndPort], + min_other_sentinels: int = 0, + sentinel_kwargs: dict[str, Any] | None = None, + **connection_kwargs, + ) -> None: ... + def check_master_state(self, state: _SentinelState, service_name: str) -> bool: ... + def discover_master(self, service_name: str) -> _AddressAndPort: ... + def filter_slaves(self, slaves: Iterable[_SentinelState]) -> list[_AddressAndPort]: ... + def discover_slaves(self, service_name: str) -> list[_AddressAndPort]: ... + @overload + def master_for(self, service_name: str, *, connection_pool_class=..., **kwargs) -> Valkey[Any]: ... + @overload + def master_for(self, service_name: str, valkey_class: type[_ValkeyT], connection_pool_class=..., **kwargs) -> _ValkeyT: ... + @overload + def slave_for(self, service_name: str, *, connection_pool_class=..., **kwargs) -> Valkey[Any]: ... + @overload + def slave_for(self, service_name: str, valkey_class: type[_ValkeyT], connection_pool_class=..., **kwargs) -> _ValkeyT: ... + def execute_command(self, *args, **kwargs) -> Literal[True]: ... diff --git a/valkey/typing.pyi b/valkey/typing.pyi new file mode 100644 index 00000000..dce33cb2 --- /dev/null +++ b/valkey/typing.pyi @@ -0,0 +1,34 @@ +from collections.abc import Iterable +from datetime import datetime, timedelta +from typing import Any, Protocol, TypeVar +from typing_extensions import TypeAlias + +from valkey.asyncio.connection import ConnectionPool as AsyncConnectionPool +from valkey.connection import ConnectionPool + +# The following type aliases exist at runtime. +EncodedT: TypeAlias = bytes | memoryview +DecodedT: TypeAlias = str | int | float +EncodableT: TypeAlias = EncodedT | DecodedT +AbsExpiryT: TypeAlias = int | datetime +ExpiryT: TypeAlias = int | timedelta +ZScoreBoundT: TypeAlias = float | str +BitfieldOffsetT: TypeAlias = int | str +_StringLikeT: TypeAlias = bytes | str | memoryview # noqa: Y043 +KeyT: TypeAlias = _StringLikeT +PatternT: TypeAlias = _StringLikeT +FieldT: TypeAlias = EncodableT +KeysT: TypeAlias = KeyT | Iterable[KeyT] +ChannelT: TypeAlias = _StringLikeT +GroupT: TypeAlias = _StringLikeT +ConsumerT: TypeAlias = _StringLikeT +StreamIdT: TypeAlias = int | _StringLikeT +ScriptTextT: TypeAlias = _StringLikeT +TimeoutSecT: TypeAlias = int | float | _StringLikeT +AnyKeyT = TypeVar("AnyKeyT", bytes, str, memoryview) # noqa: Y001 +AnyFieldT = TypeVar("AnyFieldT", bytes, str, memoryview) # noqa: Y001 +AnyChannelT = TypeVar("AnyChannelT", bytes, str, memoryview) # noqa: Y001 + +class CommandsProtocol(Protocol): + connection_pool: AsyncConnectionPool[Any] | ConnectionPool + def execute_command(self, *args, **options): ... diff --git a/valkey/utils.pyi b/valkey/utils.pyi new file mode 100644 index 00000000..de41c112 --- /dev/null +++ b/valkey/utils.pyi @@ -0,0 +1,22 @@ +from _typeshed import Unused +from collections.abc import Iterable, Mapping +from contextlib import AbstractContextManager +from typing import Any, Literal, TypeVar, overload + +from .client import Pipeline, Valkey, _StrType + +_T = TypeVar("_T") + +LIBVALKEY_AVAILABLE: bool +CRYPTOGRAPHY_AVAILABLE: bool + +@overload +def from_url(url: str, *, db: int = ..., decode_responses: Literal[True], **kwargs: Any) -> Valkey[str]: ... +@overload +def from_url(url: str, *, db: int = ..., decode_responses: Literal[False] = False, **kwargs: Any) -> Valkey[bytes]: ... +def pipeline(valkey_obj: Valkey[_StrType]) -> AbstractContextManager[Pipeline[_StrType]]: ... +def str_if_bytes(value: str | bytes) -> str: ... +def safe_str(value: object) -> str: ... +def dict_merge(*dicts: Mapping[str, _T]) -> dict[str, _T]: ... +def list_keys_to_dict(key_list, callback): ... # unused, alias for `dict.fromkeys` +def merge_result(command: Unused, res: Mapping[Any, Iterable[_T]]) -> list[_T]: ... From 48ac4a4ff0a2310fcc9a6e25f48bb252004ced74 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rapha=C3=ABl=20Vinot?= Date: Mon, 9 Sep 2024 22:54:34 +0200 Subject: [PATCH 02/39] More rename, add few types MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Signed-off-by: Raphaël Vinot --- valkey/cluster.pyi | 30 ++++++++++++------------- valkey/commands/graph/__init__.pyi | 19 ++++++++++++++++ valkey/commands/graph/execution_plan.py | 8 +++---- valkey/commands/search/__init__.pyi | 18 +++++++++++++++ valkey/commands/search/aggregation.py | 2 +- valkey/commands/search/aggregation.pyi | 4 ++-- valkey/commands/search/field.py | 12 +++++----- valkey/commands/search/querystring.py | 5 ++++- valkey/exceptions.pyi | 1 + valkey/retry.py | 2 +- 10 files changed, 71 insertions(+), 30 deletions(-) diff --git a/valkey/cluster.pyi b/valkey/cluster.pyi index 6758c05e..f6bb7b6d 100644 --- a/valkey/cluster.pyi +++ b/valkey/cluster.pyi @@ -5,16 +5,16 @@ from types import TracebackType from typing import Any, ClassVar, Literal, NoReturn, Protocol from typing_extensions import Self -from redis.client import CaseInsensitiveDict, PubSub, Valkey, _ParseResponseOptions -from redis.commands import CommandsParser, ValkeyClusterCommands -from redis.commands.core import _StrType -from redis.connection import BaseParser, Connection, ConnectionPool, Encoder, _ConnectionPoolOptions, _Encodable -from redis.exceptions import MovedError, ValkeyError -from redis.retry import Retry -from redis.typing import EncodableT +from valkey.client import CaseInsensitiveDict, PubSub, Valkey, _ParseResponseOptions +from valkey.commands import CommandsParser, ValkeyClusterCommands +from valkey.commands.core import _StrType +from valkey.connection import BaseParser, Connection, ConnectionPool, Encoder, _ConnectionPoolOptions, _Encodable +from valkey.exceptions import MovedError, ValkeyError +from valkey.retry import Retry +from valkey.typing import EncodableT def get_node_name(host: str, port: str | int) -> str: ... -def get_connection(redis_node: Valkey[Any], *args, **options: _ConnectionPoolOptions) -> Connection: ... +def get_connection(valkey_node: Valkey[Any], *args, **options: _ConnectionPoolOptions) -> Connection: ... def parse_scan_result(command: Unused, res, **options): ... def parse_pubsub_numsub(command: Unused, res, **options: Unused): ... def parse_cluster_slots(resp, **options) -> dict[tuple[int, int], dict[str, Any]]: ... @@ -82,7 +82,7 @@ class ValkeyCluster(AbstractValkeyCluster, ValkeyClusterCommands[_StrType]): @classmethod def from_url(cls, url: str, **kwargs) -> Self: ... def on_connect(self, connection: Connection) -> None: ... - def get_redis_connection(self, node: ClusterNode) -> Valkey[Any]: ... + def get_valkey_connection(self, node: ClusterNode) -> Valkey[Any]: ... def get_node( self, host: str | None = None, port: str | int | None = None, node_name: str | None = None ) -> ClusterNode | None: ... @@ -120,9 +120,9 @@ class ClusterNode: port: int name: str server_type: str | None - redis_connection: Valkey[Incomplete] | None + valkey_connection: Valkey[Incomplete] | None def __init__( - self, host: str, port: int, server_type: str | None = None, redis_connection: Valkey[Incomplete] | None = None + self, host: str, port: int, server_type: str | None = None, valkey_connection: Valkey[Incomplete] | None = None ) -> None: ... def __eq__(self, obj: object) -> bool: ... def __del__(self) -> None: ... @@ -163,8 +163,8 @@ class NodesManager: def get_nodes_by_server_type(self, server_type: str) -> list[ClusterNode]: ... def populate_startup_nodes(self, nodes: Iterable[ClusterNode]) -> None: ... def check_slots_coverage(self, slots_cache: dict[str, list[ClusterNode]]) -> bool: ... - def create_redis_connections(self, nodes: Iterable[ClusterNode]) -> None: ... - def create_redis_node(self, host: str, port: int | str, **kwargs: Any) -> Valkey[Incomplete]: ... + def create_valkey_connections(self, nodes: Iterable[ClusterNode]) -> None: ... + def create_valkey_node(self, host: str, port: int | str, **kwargs: Any) -> Valkey[Incomplete]: ... def initialize(self) -> None: ... def close(self) -> None: ... def reset(self) -> None: ... @@ -175,7 +175,7 @@ class ClusterPubSub(PubSub): cluster: ValkeyCluster[Any] def __init__( self, - redis_cluster: ValkeyCluster[Any], + valkey_cluster: ValkeyCluster[Any], node: ClusterNode | None = None, host: str | None = None, port: int | None = None, @@ -186,7 +186,7 @@ class ClusterPubSub(PubSub): ) -> None: ... def get_pubsub_node(self) -> ClusterNode | None: ... def execute_command(self, *args, **kwargs) -> None: ... - def get_redis_connection(self) -> Valkey[Any] | None: ... + def get_valkey_connection(self) -> Valkey[Any] | None: ... class ClusterPipeline(ValkeyCluster[_StrType]): command_stack: list[Incomplete] diff --git a/valkey/commands/graph/__init__.pyi b/valkey/commands/graph/__init__.pyi index a8209b8d..222db4ef 100644 --- a/valkey/commands/graph/__init__.pyi +++ b/valkey/commands/graph/__init__.pyi @@ -24,3 +24,22 @@ class Graph(GraphCommands): def labels(self): ... def relationship_types(self): ... def property_keys(self): ... + + +class AsyncGraph(GraphCommands): + NAME: Any + client: Any + execute_command: Any + nodes: Any + edges: Any + version: int + def __init__(self, client, name=...) -> None: ... + async def get_label(self, idx): ... + async def get_relation(self, idx): ... + async def get_property(self, idx): ... + async def add_node(self, node) -> None: ... + async def add_edge(self, edge) -> None: ... + async def call_procedure(self, procedure, *args, read_only: bool = False, **kwagrs): ... + async def labels(self): ... + async def relationship_types(self): ... + async def property_keys(self): ... diff --git a/valkey/commands/graph/execution_plan.py b/valkey/commands/graph/execution_plan.py index 179a80cc..cf71284e 100644 --- a/valkey/commands/graph/execution_plan.py +++ b/valkey/commands/graph/execution_plan.py @@ -166,10 +166,10 @@ def _create_operation(args): args.pop(0) if len(args) > 0 and "Records produced" in args[-1]: records_produced = int( - re.search("Records produced: (\\d+)", args[-1]).group(1) + re.search("Records produced: (\\d+)", args[-1]).group(1) # type: ignore[union-attr] ) execution_time = float( - re.search("Execution time: (\\d+.\\d+) ms", args[-1]).group(1) + re.search("Execution time: (\\d+.\\d+) ms", args[-1]).group(1) # type: ignore[union-attr] ) profile_stats = ProfileStats(records_produced, execution_time) args.pop(-1) @@ -186,7 +186,7 @@ def _create_operation(args): # set the current operation and move next child = _create_operation(current_op.split("|")) if current: - current = stack.pop() + current = stack.pop() # type: ignore[unreachable] current.append_child(child) current = child i += 1 @@ -194,7 +194,7 @@ def _create_operation(args): # if the operation is child of the current operation # add it as child and set as current operation child = _create_operation(current_op.split("|")) - current.append_child(child) + current.append_child(child) # type: ignore[union-attr] stack.append(current) current = child level += 1 diff --git a/valkey/commands/search/__init__.pyi b/valkey/commands/search/__init__.pyi index 3366d451..4776dfc9 100644 --- a/valkey/commands/search/__init__.pyi +++ b/valkey/commands/search/__init__.pyi @@ -20,3 +20,21 @@ class Search(SearchCommands): def commit(self): ... def __init__(self, client, index_name: str = "idx") -> None: ... + +class AsyncSearch(SearchCommands): + class BatchIndexer: + def __init__(self, client, chunk_size: int = 1000) -> None: ... + async def add_document( + self, + doc_id, + nosave: bool = False, + score: float = 1.0, + payload: Incomplete | None = None, + replace: bool = False, + partial: bool = False, + no_create: bool = False, + **fields, + ): ... + async def commit(self): ... + + def __init__(self, client, index_name: str = "idx") -> None: ... diff --git a/valkey/commands/search/aggregation.py b/valkey/commands/search/aggregation.py index 45172380..6c523216 100644 --- a/valkey/commands/search/aggregation.py +++ b/valkey/commands/search/aggregation.py @@ -22,7 +22,7 @@ class Reducer: See the `valkeyearch.reducers` module for the actual reducers. """ - NAME = None + NAME: Union[str, None] = None def __init__(self, *args: List[str]) -> None: self._args = args diff --git a/valkey/commands/search/aggregation.pyi b/valkey/commands/search/aggregation.pyi index 48bac218..f5200332 100644 --- a/valkey/commands/search/aggregation.pyi +++ b/valkey/commands/search/aggregation.pyi @@ -1,4 +1,4 @@ -from typing import Any, ClassVar, Literal +from typing import Any, ClassVar, Literal, Union FIELDNAME: Any @@ -9,7 +9,7 @@ class Limit: def build_args(self): ... class Reducer: - NAME: ClassVar[None] + NAME: ClassVar[Union[str, None]] def __init__(self, *args) -> None: ... def alias(self, alias): ... @property diff --git a/valkey/commands/search/field.py b/valkey/commands/search/field.py index 72907ae4..04ff0e84 100644 --- a/valkey/commands/search/field.py +++ b/valkey/commands/search/field.py @@ -1,4 +1,4 @@ -from typing import List +from typing import List, Union from valkey import DataError @@ -18,10 +18,10 @@ class Field: def __init__( self, name: str, - args: List[str] = None, + args: Union[List[str], None] = None, sortable: bool = False, no_index: bool = False, - as_name: str = None, + as_name: Union[str, None] = None, ): if args is None: args = [] @@ -63,11 +63,11 @@ def __init__( name: str, weight: float = 1.0, no_stem: bool = False, - phonetic_matcher: str = None, + phonetic_matcher: Union[str, None] = None, withsuffixtrie: bool = False, **kwargs, ): - Field.__init__(self, name, args=[Field.TEXT, Field.WEIGHT, weight], **kwargs) + Field.__init__(self, name, args=[Field.TEXT, Field.WEIGHT, weight], **kwargs) # type: ignore[list-item] if no_stem: Field.append_arg(self, self.NOSTEM) @@ -180,5 +180,5 @@ def __init__(self, name: str, algorithm: str, attributes: dict, **kwargs): attr_li.extend([key, value]) Field.__init__( - self, name, args=[Field.VECTOR, algorithm, len(attr_li), *attr_li], **kwargs + self, name, args=[Field.VECTOR, algorithm, len(attr_li), *attr_li], **kwargs # type: ignore[list-item] ) diff --git a/valkey/commands/search/querystring.py b/valkey/commands/search/querystring.py index 3ff13209..fa2292d9 100644 --- a/valkey/commands/search/querystring.py +++ b/valkey/commands/search/querystring.py @@ -1,3 +1,6 @@ +from typing import Dict, List + + def tags(*t): """ Indicate that the values should be matched to a tag field @@ -182,7 +185,7 @@ def __init__(self, *children, **kwparams): self.params = [] - kvparams = {} + kvparams: Dict[str, List[Value]] = {} for k, v in kwparams.items(): curvals = kvparams.setdefault(k, []) if isinstance(v, (str, int, float)): diff --git a/valkey/exceptions.pyi b/valkey/exceptions.pyi index 9f671ad7..50eb8955 100644 --- a/valkey/exceptions.pyi +++ b/valkey/exceptions.pyi @@ -10,6 +10,7 @@ class DataError(ValkeyError): ... class PubSubError(ValkeyError): ... class WatchError(ValkeyError): ... class NoScriptError(ResponseError): ... +class OutOfMemoryError(ResponseError): ... class ExecAbortError(ResponseError): ... class ReadOnlyError(ResponseError): ... class NoPermissionError(ResponseError): ... diff --git a/valkey/retry.py b/valkey/retry.py index e40a8331..4eb34d77 100644 --- a/valkey/retry.py +++ b/valkey/retry.py @@ -7,7 +7,7 @@ T = TypeVar("T") if TYPE_CHECKING: - from redis.backoff import AbstractBackoff + from valkey.backoff import AbstractBackoff class Retry: From 7d75d411e8e8f240fe13f2fa3107966fe639c2f9 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rapha=C3=ABl=20Vinot?= Date: Tue, 10 Sep 2024 13:15:31 +0200 Subject: [PATCH 03/39] make mypy happy MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Signed-off-by: Raphaël Vinot --- .mypy.ini | 42 +++++++++++++-------------- valkey/commands/search/querystring.py | 2 +- valkey/commands/search/reducers.py | 2 +- 3 files changed, 23 insertions(+), 23 deletions(-) diff --git a/.mypy.ini b/.mypy.ini index 0d3b08d4..9733fd7a 100644 --- a/.mypy.ini +++ b/.mypy.ini @@ -1,24 +1,24 @@ [mypy] -#, docs/examples, tests -files = valkey -check_untyped_defs = True -follow_imports_for_stubs asyncio.= True -#disallow_any_decorated = True -disallow_subclassing_any = True -#disallow_untyped_calls = True -disallow_untyped_decorators = True -#disallow_untyped_defs = True -implicit_reexport = False -no_implicit_optional = True -show_error_codes = True -strict_equality = True -warn_incomplete_stub = True -warn_redundant_casts = True -warn_unreachable = True -warn_unused_ignores = True -disallow_any_unimported = True -#warn_return_any = True +# strict = True +warn_return_any = False +show_error_context = True +pretty = True +exclude = docs -[mypy-valkey.asyncio.lock] -# TODO: Remove once locks has been rewritten +[mypy-valkey._parsers.*] +ignore_errors = True + +[mypy-valkey._cache] +ignore_errors = True + +[mypy-tests.*] +ignore_errors = True + +[mypy-benchmarks.*] +ignore_errors = True + +[mypy-whitelist] +ignore_errors = True + +[mypy-tasks] ignore_errors = True diff --git a/valkey/commands/search/querystring.py b/valkey/commands/search/querystring.py index fa2292d9..1ebd6aa0 100644 --- a/valkey/commands/search/querystring.py +++ b/valkey/commands/search/querystring.py @@ -185,7 +185,7 @@ def __init__(self, *children, **kwparams): self.params = [] - kvparams: Dict[str, List[Value]] = {} + kvparams = {} for k, v in kwparams.items(): curvals = kvparams.setdefault(k, []) if isinstance(v, (str, int, float)): diff --git a/valkey/commands/search/reducers.py b/valkey/commands/search/reducers.py index 694558de..00f65075 100644 --- a/valkey/commands/search/reducers.py +++ b/valkey/commands/search/reducers.py @@ -151,7 +151,7 @@ def __init__(self, field: str, *byfields: Union[Asc, Desc]) -> None: and isinstance(byfields[0], type) and issubclass(byfields[0], SortDirection) ): - byfields = [byfields[0](field)] + byfields = [byfields[0](field)] # type: ignore[assignment] for f in byfields: fieldstrs += [f.field, f.DIRSTRING] From 3340e26f4c82810933f7a57614b0a3c4982e46ce Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rapha=C3=ABl=20Vinot?= Date: Tue, 10 Sep 2024 14:01:15 +0200 Subject: [PATCH 04/39] new: MyPy Github actions MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Signed-off-by: Raphaël Vinot --- .github/workflows/mypy.yml | 34 ++++++++++++++++++++++++++++++++++ 1 file changed, 34 insertions(+) create mode 100644 .github/workflows/mypy.yml diff --git a/.github/workflows/mypy.yml b/.github/workflows/mypy.yml new file mode 100644 index 00000000..84983051 --- /dev/null +++ b/.github/workflows/mypy.yml @@ -0,0 +1,34 @@ +name: Python application + +on: + push: + branches: [ types ] + pull_request: + branches: [ types ] + +jobs: + build: + + runs-on: ubuntu-latest + strategy: + fail-fast: false + matrix: + python-version: ["3.8", "3.9", "3.10", "3.11", "3.12"] + + steps: + - uses: actions/checkout@v4 + + - name: Set up Python ${{matrix.python-version}} + uses: actions/setup-python@v5 + with: + python-version: ${{matrix.python-version}} + + - name: Install package + run: | + pip install mypy cryptography pyopenssl requests + pip install types-setuptools + pip install .[libvalkey] + + - name: Run MyPy + run: | + mypy --exclude build . From 7bff49ecdbab71a6c81e67fe1780a8264717260b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rapha=C3=ABl=20Vinot?= Date: Tue, 10 Sep 2024 14:04:52 +0200 Subject: [PATCH 05/39] fix: python < 3.10 compat MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Signed-off-by: Raphaël Vinot --- valkey/__init__.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/valkey/__init__.py b/valkey/__init__.py index 0b10bdac..3cb2fbc1 100644 --- a/valkey/__init__.py +++ b/valkey/__init__.py @@ -1,4 +1,5 @@ from importlib import metadata +from typing import Tuple, Union from valkey import asyncio # noqa from valkey.backoff import default_backoff @@ -45,7 +46,7 @@ def int_or_str(value): __version__: str -VERSION: tuple[int | str, ...] +VERSION: Tuple[Union[int | str], ...] try: __version__ = metadata.version("valkey") From 64bcdc176d3b3d05047e90dcd5d50598b40d4883 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rapha=C3=ABl=20Vinot?= Date: Wed, 11 Sep 2024 10:53:45 +0200 Subject: [PATCH 06/39] Fixup typing for test suite (async) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Signed-off-by: Raphaël Vinot --- .github/workflows/mypy.yml | 1 + .mypy.ini | 19 +- tests/test_asyncio/test_commands.py | 649 ++++++++++++------------ tests/test_commands.py | 61 +-- valkey/__init__.py | 2 +- valkey/asyncio/client.pyi | 15 +- valkey/client.pyi | 13 +- valkey/commands/core.pyi | 46 +- valkey/commands/graph/execution_plan.py | 2 +- valkey/commands/search/field.py | 2 +- valkey/commands/search/querystring.py | 5 +- 11 files changed, 439 insertions(+), 376 deletions(-) diff --git a/.github/workflows/mypy.yml b/.github/workflows/mypy.yml index 84983051..4727b8be 100644 --- a/.github/workflows/mypy.yml +++ b/.github/workflows/mypy.yml @@ -27,6 +27,7 @@ jobs: run: | pip install mypy cryptography pyopenssl requests pip install types-setuptools + pip install -r dev_requirements.txt pip install .[libvalkey] - name: Run MyPy diff --git a/.mypy.ini b/.mypy.ini index 9733fd7a..8e09fb7c 100644 --- a/.mypy.ini +++ b/.mypy.ini @@ -1,9 +1,16 @@ [mypy] -# strict = True -warn_return_any = False +strict = True show_error_context = True pretty = True -exclude = docs +exclude = docs|build + +# These next few are various gradations of forcing use of type annotations +disallow_untyped_calls = False +disallow_incomplete_defs = False +disallow_untyped_defs = False + +# This one can be tricky to get passing if you use a lot of untyped libraries +warn_return_any = False [mypy-valkey._parsers.*] ignore_errors = True @@ -11,9 +18,15 @@ ignore_errors = True [mypy-valkey._cache] ignore_errors = True + [mypy-tests.*] ignore_errors = True +[mypy-tests.test_commands] +ignore_errors = False +[mypy-tests.test_asyncio.test_commands] +ignore_errors = False + [mypy-benchmarks.*] ignore_errors = True diff --git a/tests/test_asyncio/test_commands.py b/tests/test_asyncio/test_commands.py index c50d844e..9ad75b4f 100644 --- a/tests/test_asyncio/test_commands.py +++ b/tests/test_asyncio/test_commands.py @@ -2,6 +2,8 @@ Tests async overrides of commands from their mixins """ +from __future__ import annotations + import asyncio import binascii import datetime @@ -9,6 +11,7 @@ import re import sys from string import ascii_letters +from typing import Any import pytest import pytest_asyncio @@ -33,15 +36,15 @@ from valkey.client import EMPTY_RESPONSE, NEVER_DECODE if sys.version_info >= (3, 11, 3): - from asyncio import timeout as async_timeout + from asyncio import timeout as async_timeout # type: ignore[unused-ignore,assignment,no-redef,import-not-found,attr-defined] else: - from async_timeout import timeout as async_timeout + from async_timeout import timeout as async_timeout # type: ignore[unused-ignore,assignment,no-redef,import-not-found] VALKEY_6_VERSION = "5.9.0" @pytest_asyncio.fixture() -async def r_teardown(r: valkey.Valkey): +async def r_teardown(r: valkey.asyncio.Valkey[str]): """ A special fixture which removes the provided names from the database after use """ @@ -57,7 +60,7 @@ def factory(username): @pytest_asyncio.fixture() -async def slowlog(r: valkey.Valkey): +async def slowlog(r: valkey.asyncio.Valkey[str]): current_config = await r.config_get() old_slower_than_value = current_config["slowlog-log-slower-than"] old_max_legnth_value = current_config["slowlog-max-len"] @@ -71,13 +74,13 @@ async def slowlog(r: valkey.Valkey): await r.config_set("slowlog-max-len", old_max_legnth_value) -async def valkey_server_time(client: valkey.Valkey): +async def valkey_server_time(client: valkey.asyncio.Valkey[bytes]): seconds, milliseconds = await client.time() timestamp = float(f"{seconds}.{milliseconds}") return datetime.datetime.fromtimestamp(timestamp) -async def get_stream_message(client: valkey.Valkey, stream: str, message_id: str): +async def get_stream_message(client: valkey.asyncio.Valkey[str], stream: str, message_id: str): """Fetch a stream message and format it as a (message_id, fields) pair""" response = await client.xrange(stream, min=message_id, max=message_id) assert len(response) == 1 @@ -89,7 +92,7 @@ async def get_stream_message(client: valkey.Valkey, stream: str, message_id: str class TestResponseCallbacks: """Tests for the response callback system""" - async def test_response_callbacks(self, r: valkey.Valkey): + async def test_response_callbacks(self, r: valkey.asyncio.Valkey[str]): callbacks = _ValkeyCallbacks if is_resp2_connection(r): callbacks.update(_ValkeyCallbacksRESP2) @@ -97,32 +100,32 @@ async def test_response_callbacks(self, r: valkey.Valkey): callbacks.update(_ValkeyCallbacksRESP3) assert r.response_callbacks == callbacks assert id(r.response_callbacks) != id(_ValkeyCallbacks) - r.set_response_callback("GET", lambda x: "static") + r.set_response_callback("GET", lambda x: "static") # type: ignore[arg-type] await r.set("a", "foo") assert await r.get("a") == "static" - async def test_case_insensitive_command_names(self, r: valkey.Valkey): + async def test_case_insensitive_command_names(self, r: valkey.asyncio.Valkey[str]): assert r.response_callbacks["ping"] == r.response_callbacks["PING"] class TestValkeyCommands: - async def test_command_on_invalid_key_type(self, r: valkey.Valkey): + async def test_command_on_invalid_key_type(self, r: valkey.asyncio.Valkey[str]): await r.lpush("a", "1") with pytest.raises(valkey.ResponseError): await r.get("a") # SERVER INFORMATION @skip_if_server_version_lt(VALKEY_6_VERSION) - async def test_acl_cat_no_category(self, r: valkey.Valkey): + async def test_acl_cat_no_category(self, r: valkey.asyncio.Valkey[str]): categories = await r.acl_cat() assert isinstance(categories, list) - assert "read" in categories or b"read" in categories + assert "read" in categories or b"read" in categories # type: ignore[comparison-overlap] @skip_if_server_version_lt(VALKEY_6_VERSION) - async def test_acl_cat_with_category(self, r: valkey.Valkey): + async def test_acl_cat_with_category(self, r: valkey.asyncio.Valkey[str]): commands = await r.acl_cat("read") assert isinstance(commands, list) - assert "get" in commands or b"get" in commands + assert "get" in commands or b"get" in commands # type: ignore[comparison-overlap] @skip_if_server_version_lt(VALKEY_6_VERSION) async def test_acl_deluser(self, r_teardown): @@ -134,7 +137,7 @@ async def test_acl_deluser(self, r_teardown): assert await r.acl_deluser(username) == 1 @skip_if_server_version_lt(VALKEY_6_VERSION) - async def test_acl_genpass(self, r: valkey.Valkey): + async def test_acl_genpass(self, r: valkey.asyncio.Valkey[str]): password = await r.acl_genpass() assert isinstance(password, (str, bytes)) @@ -309,24 +312,24 @@ async def test_acl_setuser_add_passwords_and_nopass_fails(self, r_teardown): await r.acl_setuser(username, passwords="+mypass", nopass=True) @skip_if_server_version_lt(VALKEY_6_VERSION) - async def test_acl_users(self, r: valkey.Valkey): + async def test_acl_users(self, r: valkey.asyncio.Valkey[str]): users = await r.acl_users() assert isinstance(users, list) assert len(users) > 0 @skip_if_server_version_lt(VALKEY_6_VERSION) - async def test_acl_whoami(self, r: valkey.Valkey): + async def test_acl_whoami(self, r: valkey.asyncio.Valkey[str]): username = await r.acl_whoami() assert isinstance(username, (str, bytes)) @pytest.mark.onlynoncluster - async def test_client_list(self, r: valkey.Valkey): + async def test_client_list(self, r: valkey.asyncio.Valkey[str]): clients = await r.client_list() assert isinstance(clients[0], dict) assert "addr" in clients[0] @skip_if_server_version_lt("5.0.0") - async def test_client_list_type(self, r: valkey.Valkey): + async def test_client_list_type(self, r: valkey.asyncio.Valkey[str]): with pytest.raises(exceptions.ValkeyError): await r.client_list(_type="not a client type") for client_type in ["normal", "master", "replica", "pubsub"]: @@ -335,12 +338,12 @@ async def test_client_list_type(self, r: valkey.Valkey): @skip_if_server_version_lt("5.0.0") @pytest.mark.onlynoncluster - async def test_client_id(self, r: valkey.Valkey): + async def test_client_id(self, r: valkey.asyncio.Valkey[str]): assert await r.client_id() > 0 @skip_if_server_version_lt("5.0.0") @pytest.mark.onlynoncluster - async def test_client_unblock(self, r: valkey.Valkey): + async def test_client_unblock(self, r: valkey.asyncio.Valkey[str]): myid = await r.client_id() assert not await r.client_unblock(myid) assert not await r.client_unblock(myid, error=True) @@ -348,19 +351,19 @@ async def test_client_unblock(self, r: valkey.Valkey): @skip_if_server_version_lt("2.6.9") @pytest.mark.onlynoncluster - async def test_client_getname(self, r: valkey.Valkey): + async def test_client_getname(self, r: valkey.asyncio.Valkey[str]): assert await r.client_getname() is None @skip_if_server_version_lt("2.6.9") @pytest.mark.onlynoncluster - async def test_client_setname(self, r: valkey.Valkey): + async def test_client_setname(self, r: valkey.asyncio.Valkey[str]): assert await r.client_setname("valkey_py_test") assert_resp_response( r, await r.client_getname(), "valkey_py_test", b"valkey_py_test" ) @skip_if_server_version_lt("7.2.0") - async def test_client_setinfo(self, r: valkey.Valkey): + async def test_client_setinfo(self, r: valkey.asyncio.Valkey[str]): await r.ping() info = await r.client_info() assert info["lib-name"] == "valkey-py" @@ -383,7 +386,7 @@ async def test_client_setinfo(self, r: valkey.Valkey): @skip_if_server_version_lt("2.6.9") @pytest.mark.onlynoncluster - async def test_client_kill(self, r: valkey.Valkey, r2): + async def test_client_kill(self, r: valkey.asyncio.Valkey[str], r2): await r.client_setname("valkey-py-c1") await r2.client_setname("valkey-py-c2") clients = [ @@ -396,7 +399,7 @@ async def test_client_kill(self, r: valkey.Valkey, r2): clients_by_name = {client.get("name"): client for client in clients} client_addr = clients_by_name["valkey-py-c2"].get("addr") - assert await r.client_kill(client_addr) is True + assert await r.client_kill(client_addr) is True # type: ignore[arg-type] clients = [ client @@ -407,22 +410,22 @@ async def test_client_kill(self, r: valkey.Valkey, r2): assert clients[0].get("name") == "valkey-py-c1" @skip_if_server_version_lt("2.8.12") - async def test_client_kill_filter_invalid_params(self, r: valkey.Valkey): + async def test_client_kill_filter_invalid_params(self, r: valkey.asyncio.Valkey[str]): # empty with pytest.raises(exceptions.DataError): await r.client_kill_filter() # invalid skipme with pytest.raises(exceptions.DataError): - await r.client_kill_filter(skipme="yeah") # type: ignore + await r.client_kill_filter(skipme="yeah") # invalid type with pytest.raises(exceptions.DataError): - await r.client_kill_filter(_type="caster") # type: ignore + await r.client_kill_filter(_type="caster") @skip_if_server_version_lt("2.8.12") @pytest.mark.onlynoncluster - async def test_client_kill_filter_by_id(self, r: valkey.Valkey, r2): + async def test_client_kill_filter_by_id(self, r: valkey.asyncio.Valkey[str], r2): await r.client_setname("valkey-py-c1") await r2.client_setname("valkey-py-c2") clients = [ @@ -448,7 +451,7 @@ async def test_client_kill_filter_by_id(self, r: valkey.Valkey, r2): @skip_if_server_version_lt("2.8.12") @pytest.mark.onlynoncluster - async def test_client_kill_filter_by_addr(self, r: valkey.Valkey, r2): + async def test_client_kill_filter_by_addr(self, r: valkey.asyncio.Valkey[str], r2): await r.client_setname("valkey-py-c1") await r2.client_setname("valkey-py-c2") clients = [ @@ -473,7 +476,7 @@ async def test_client_kill_filter_by_addr(self, r: valkey.Valkey, r2): assert clients[0].get("name") == "valkey-py-c1" @skip_if_server_version_lt("2.6.9") - async def test_client_list_after_client_setname(self, r: valkey.Valkey): + async def test_client_list_after_client_setname(self, r: valkey.asyncio.Valkey[str]): await r.client_setname("valkey_py_test") clients = await r.client_list() # we don't know which client ours will be @@ -481,7 +484,7 @@ async def test_client_list_after_client_setname(self, r: valkey.Valkey): @skip_if_server_version_lt("2.9.50") @pytest.mark.onlynoncluster - async def test_client_pause(self, r: valkey.Valkey): + async def test_client_pause(self, r: valkey.asyncio.Valkey[str]): assert await r.client_pause(1) assert await r.client_pause(timeout=1) with pytest.raises(exceptions.ValkeyError): @@ -489,19 +492,19 @@ async def test_client_pause(self, r: valkey.Valkey): @skip_if_server_version_lt("7.2.0") @pytest.mark.onlynoncluster - async def test_client_no_touch(self, r: valkey.Valkey): + async def test_client_no_touch(self, r: valkey.asyncio.Valkey[bytes]): assert await r.client_no_touch("ON") == b"OK" assert await r.client_no_touch("OFF") == b"OK" with pytest.raises(TypeError): - await r.client_no_touch() + await r.client_no_touch() # type: ignore[call-arg] - async def test_config_get(self, r: valkey.Valkey): + async def test_config_get(self, r: valkey.asyncio.Valkey[str]): data = await r.config_get() assert "maxmemory" in data assert data["maxmemory"].isdigit() @pytest.mark.onlynoncluster - async def test_config_resetstat(self, r: valkey.Valkey): + async def test_config_resetstat(self, r: valkey.asyncio.Valkey[str]): await r.ping() prior_commands_processed = int((await r.info())["total_commands_processed"]) assert prior_commands_processed >= 1 @@ -509,24 +512,24 @@ async def test_config_resetstat(self, r: valkey.Valkey): reset_commands_processed = int((await r.info())["total_commands_processed"]) assert reset_commands_processed < prior_commands_processed - async def test_config_set(self, r: valkey.Valkey): + async def test_config_set(self, r: valkey.asyncio.Valkey[str]): await r.config_set("timeout", 70) assert (await r.config_get())["timeout"] == "70" assert await r.config_set("timeout", 0) assert (await r.config_get())["timeout"] == "0" @pytest.mark.onlynoncluster - async def test_dbsize(self, r: valkey.Valkey): + async def test_dbsize(self, r: valkey.asyncio.Valkey[str]): await r.set("a", "foo") await r.set("b", "bar") assert await r.dbsize() == 2 @pytest.mark.onlynoncluster - async def test_echo(self, r: valkey.Valkey): + async def test_echo(self, r: valkey.asyncio.Valkey[str]): assert await r.echo("foo bar") == b"foo bar" @pytest.mark.onlynoncluster - async def test_info(self, r: valkey.Valkey): + async def test_info(self, r: valkey.asyncio.Valkey[str]): await r.set("a", "foo") await r.set("b", "bar") info = await r.info() @@ -535,21 +538,21 @@ async def test_info(self, r: valkey.Valkey): assert "valkey_version" in info.keys() @pytest.mark.onlynoncluster - async def test_lastsave(self, r: valkey.Valkey): + async def test_lastsave(self, r: valkey.asyncio.Valkey[str]): assert isinstance(await r.lastsave(), datetime.datetime) - async def test_object(self, r: valkey.Valkey): + async def test_object(self, r: valkey.asyncio.Valkey[str]): await r.set("a", "foo") assert isinstance(await r.object("refcount", "a"), int) assert isinstance(await r.object("idletime", "a"), int) assert await r.object("encoding", "a") in (b"raw", b"embstr") assert await r.object("idletime", "invalid-key") is None - async def test_ping(self, r: valkey.Valkey): + async def test_ping(self, r: valkey.asyncio.Valkey[str]): assert await r.ping() @pytest.mark.onlynoncluster - async def test_slowlog_get(self, r: valkey.Valkey, slowlog): + async def test_slowlog_get(self, r: valkey.asyncio.Valkey[str], slowlog): assert await r.slowlog_reset() unicode_string = chr(3456) + "abcd" + chr(3421) await r.get(unicode_string) @@ -571,7 +574,7 @@ async def test_slowlog_get(self, r: valkey.Valkey, slowlog): assert isinstance(slowlog[0]["duration"], int) @pytest.mark.onlynoncluster - async def test_slowlog_get_limit(self, r: valkey.Valkey, slowlog): + async def test_slowlog_get_limit(self, r: valkey.asyncio.Valkey[str], slowlog): assert await r.slowlog_reset() await r.get("foo") slowlog = await r.slowlog_get(1) @@ -580,36 +583,36 @@ async def test_slowlog_get_limit(self, r: valkey.Valkey, slowlog): assert len(slowlog) == 1 @pytest.mark.onlynoncluster - async def test_slowlog_length(self, r: valkey.Valkey, slowlog): + async def test_slowlog_length(self, r: valkey.asyncio.Valkey[str], slowlog): await r.get("foo") assert isinstance(await r.slowlog_len(), int) @skip_if_server_version_lt("2.6.0") - async def test_time(self, r: valkey.Valkey): + async def test_time(self, r: valkey.asyncio.Valkey[str]): t = await r.time() assert len(t) == 2 assert isinstance(t[0], int) assert isinstance(t[1], int) - async def test_never_decode_option(self, r: valkey.Valkey): - opts = {NEVER_DECODE: []} + async def test_never_decode_option(self, r: valkey.asyncio.Valkey[str]): + opts: dict[str, list[str]] = {NEVER_DECODE: []} await r.delete("a") assert await r.execute_command("EXISTS", "a", **opts) == 0 - async def test_empty_response_option(self, r: valkey.Valkey): - opts = {EMPTY_RESPONSE: []} + async def test_empty_response_option(self, r: valkey.asyncio.Valkey[str]): + opts: dict[str, list[str]] = {EMPTY_RESPONSE: []} await r.delete("a") assert await r.execute_command("EXISTS", "a", **opts) == 0 # BASIC KEY COMMANDS - async def test_append(self, r: valkey.Valkey): + async def test_append(self, r: valkey.asyncio.Valkey[bytes]): assert await r.append("a", "a1") == 2 assert await r.get("a") == b"a1" assert await r.append("a", "a2") == 4 assert await r.get("a") == b"a1a2" @skip_if_server_version_lt("2.6.0") - async def test_bitcount(self, r: valkey.Valkey): + async def test_bitcount(self, r: valkey.asyncio.Valkey[str]): await r.setbit("a", 5, True) assert await r.bitcount("a") == 1 await r.setbit("a", 6, True) @@ -629,32 +632,32 @@ async def test_bitcount(self, r: valkey.Valkey): @skip_if_server_version_lt("2.6.0") @pytest.mark.onlynoncluster - async def test_bitop_not_empty_string(self, r: valkey.Valkey): + async def test_bitop_not_empty_string(self, r: valkey.asyncio.Valkey[str]): await r.set("a", "") await r.bitop("not", "r", "a") assert await r.get("r") is None @skip_if_server_version_lt("2.6.0") @pytest.mark.onlynoncluster - async def test_bitop_not(self, r: valkey.Valkey): + async def test_bitop_not(self, r: valkey.asyncio.Valkey[str]): test_str = b"\xAA\x00\xFF\x55" correct = ~0xAA00FF55 & 0xFFFFFFFF await r.set("a", test_str) await r.bitop("not", "r", "a") - assert int(binascii.hexlify(await r.get("r")), 16) == correct + assert int(binascii.hexlify(await r.get("r")), 16) == correct # type: ignore[arg-type] @skip_if_server_version_lt("2.6.0") @pytest.mark.onlynoncluster - async def test_bitop_not_in_place(self, r: valkey.Valkey): + async def test_bitop_not_in_place(self, r: valkey.asyncio.Valkey[str]): test_str = b"\xAA\x00\xFF\x55" correct = ~0xAA00FF55 & 0xFFFFFFFF await r.set("a", test_str) await r.bitop("not", "a", "a") - assert int(binascii.hexlify(await r.get("a")), 16) == correct + assert int(binascii.hexlify(await r.get("a")), 16) == correct # type: ignore[arg-type] @skip_if_server_version_lt("2.6.0") @pytest.mark.onlynoncluster - async def test_bitop_single_string(self, r: valkey.Valkey): + async def test_bitop_single_string(self, r: valkey.asyncio.Valkey[bytes]): test_str = b"\x01\x02\xFF" await r.set("a", test_str) await r.bitop("and", "res1", "a") @@ -666,19 +669,19 @@ async def test_bitop_single_string(self, r: valkey.Valkey): @skip_if_server_version_lt("2.6.0") @pytest.mark.onlynoncluster - async def test_bitop_string_operands(self, r: valkey.Valkey): + async def test_bitop_string_operands(self, r: valkey.asyncio.Valkey[str]): await r.set("a", b"\x01\x02\xFF\xFF") await r.set("b", b"\x01\x02\xFF") await r.bitop("and", "res1", "a", "b") await r.bitop("or", "res2", "a", "b") await r.bitop("xor", "res3", "a", "b") - assert int(binascii.hexlify(await r.get("res1")), 16) == 0x0102FF00 - assert int(binascii.hexlify(await r.get("res2")), 16) == 0x0102FFFF - assert int(binascii.hexlify(await r.get("res3")), 16) == 0x000000FF + assert int(binascii.hexlify(await r.get("res1")), 16) == 0x0102FF00 # type: ignore[arg-type] + assert int(binascii.hexlify(await r.get("res2")), 16) == 0x0102FFFF # type: ignore[arg-type] + assert int(binascii.hexlify(await r.get("res3")), 16) == 0x000000FF # type: ignore[arg-type] @pytest.mark.onlynoncluster @skip_if_server_version_lt("2.8.7") - async def test_bitpos(self, r: valkey.Valkey): + async def test_bitpos(self, r: valkey.asyncio.Valkey[str]): key = "key:bitpos" await r.set(key, b"\xff\xf0\x00") assert await r.bitpos(key, 0) == 12 @@ -691,7 +694,7 @@ async def test_bitpos(self, r: valkey.Valkey): assert await r.bitpos(key, 1) == -1 @skip_if_server_version_lt("2.8.7") - async def test_bitpos_wrong_arguments(self, r: valkey.Valkey): + async def test_bitpos_wrong_arguments(self, r: valkey.asyncio.Valkey[str]): key = "key:bitpos:wrong:args" await r.set(key, b"\xff\xf0\x00") with pytest.raises(exceptions.ValkeyError): @@ -699,7 +702,7 @@ async def test_bitpos_wrong_arguments(self, r: valkey.Valkey): with pytest.raises(exceptions.ValkeyError): await r.bitpos(key, 7) == 12 - async def test_decr(self, r: valkey.Valkey): + async def test_decr(self, r: valkey.asyncio.Valkey[bytes]): assert await r.decr("a") == -1 assert await r.get("a") == b"-1" assert await r.decr("a") == -2 @@ -707,37 +710,37 @@ async def test_decr(self, r: valkey.Valkey): assert await r.decr("a", amount=5) == -7 assert await r.get("a") == b"-7" - async def test_decrby(self, r: valkey.Valkey): + async def test_decrby(self, r: valkey.asyncio.Valkey[bytes]): assert await r.decrby("a", amount=2) == -2 assert await r.decrby("a", amount=3) == -5 assert await r.get("a") == b"-5" - async def test_delete(self, r: valkey.Valkey): + async def test_delete(self, r: valkey.asyncio.Valkey[str]): assert await r.delete("a") == 0 await r.set("a", "foo") assert await r.delete("a") == 1 - async def test_delete_with_multiple_keys(self, r: valkey.Valkey): + async def test_delete_with_multiple_keys(self, r: valkey.asyncio.Valkey[str]): await r.set("a", "foo") await r.set("b", "bar") assert await r.delete("a", "b") == 2 assert await r.get("a") is None assert await r.get("b") is None - async def test_delitem(self, r: valkey.Valkey): + async def test_delitem(self, r: valkey.asyncio.Valkey[str]): await r.set("a", "foo") await r.delete("a") assert await r.get("a") is None @skip_if_server_version_lt("4.0.0") - async def test_unlink(self, r: valkey.Valkey): + async def test_unlink(self, r: valkey.asyncio.Valkey[str]): assert await r.unlink("a") == 0 await r.set("a", "foo") assert await r.unlink("a") == 1 assert await r.get("a") is None @skip_if_server_version_lt("4.0.0") - async def test_unlink_with_multiple_keys(self, r: valkey.Valkey): + async def test_unlink_with_multiple_keys(self, r: valkey.asyncio.Valkey[str]): await r.set("a", "foo") await r.set("b", "bar") assert await r.unlink("a", "b") == 2 @@ -745,7 +748,7 @@ async def test_unlink_with_multiple_keys(self, r: valkey.Valkey): assert await r.get("b") is None @skip_if_server_version_lt("2.6.0") - async def test_dump_and_restore(self, r: valkey.Valkey): + async def test_dump_and_restore(self, r: valkey.asyncio.Valkey[bytes]): await r.set("a", "foo") dumped = await r.dump("a") await r.delete("a") @@ -753,7 +756,7 @@ async def test_dump_and_restore(self, r: valkey.Valkey): assert await r.get("a") == b"foo" @skip_if_server_version_lt("3.0.0") - async def test_dump_and_restore_and_replace(self, r: valkey.Valkey): + async def test_dump_and_restore_and_replace(self, r: valkey.asyncio.Valkey[bytes]): await r.set("a", "bar") dumped = await r.dump("a") with pytest.raises(valkey.ResponseError): @@ -763,7 +766,7 @@ async def test_dump_and_restore_and_replace(self, r: valkey.Valkey): assert await r.get("a") == b"bar" @skip_if_server_version_lt("5.0.0") - async def test_dump_and_restore_absttl(self, r: valkey.Valkey): + async def test_dump_and_restore_absttl(self, r: valkey.asyncio.Valkey[bytes]): await r.set("a", "foo") dumped = await r.dump("a") await r.delete("a") @@ -775,19 +778,19 @@ async def test_dump_and_restore_absttl(self, r: valkey.Valkey): assert await r.get("a") == b"foo" assert 0 < await r.ttl("a") <= 61 - async def test_exists(self, r: valkey.Valkey): + async def test_exists(self, r: valkey.asyncio.Valkey[str]): assert await r.exists("a") == 0 await r.set("a", "foo") await r.set("b", "bar") assert await r.exists("a") == 1 assert await r.exists("a", "b") == 2 - async def test_exists_contains(self, r: valkey.Valkey): + async def test_exists_contains(self, r: valkey.asyncio.Valkey[str]): assert not await r.exists("a") await r.set("a", "foo") assert await r.exists("a") - async def test_expire(self, r: valkey.Valkey): + async def test_expire(self, r: valkey.asyncio.Valkey[str]): assert not await r.expire("a", 10) await r.set("a", "foo") assert await r.expire("a", 10) @@ -795,24 +798,24 @@ async def test_expire(self, r: valkey.Valkey): assert await r.persist("a") assert await r.ttl("a") == -1 - async def test_expireat_datetime(self, r: valkey.Valkey): + async def test_expireat_datetime(self, r: valkey.asyncio.Valkey[bytes]): expire_at = await valkey_server_time(r) + datetime.timedelta(minutes=1) await r.set("a", "foo") assert await r.expireat("a", expire_at) assert 0 < await r.ttl("a") <= 61 - async def test_expireat_no_key(self, r: valkey.Valkey): + async def test_expireat_no_key(self, r: valkey.asyncio.Valkey[bytes]): expire_at = await valkey_server_time(r) + datetime.timedelta(minutes=1) assert not await r.expireat("a", expire_at) - async def test_expireat_unixtime(self, r: valkey.Valkey): + async def test_expireat_unixtime(self, r: valkey.asyncio.Valkey[bytes]): expire_at = await valkey_server_time(r) + datetime.timedelta(minutes=1) await r.set("a", "foo") expire_at_seconds = int(expire_at.timestamp()) assert await r.expireat("a", expire_at_seconds) assert 0 < await r.ttl("a") <= 61 - async def test_get_and_set(self, r: valkey.Valkey): + async def test_get_and_set(self, r: valkey.asyncio.Valkey[bytes]): # get and set can't be tested independently of each other assert await r.get("a") is None byte_string = b"value" @@ -823,9 +826,9 @@ async def test_get_and_set(self, r: valkey.Valkey): assert await r.set("unicode_string", unicode_string) assert await r.get("byte_string") == byte_string assert await r.get("integer") == str(integer).encode() - assert (await r.get("unicode_string")).decode("utf-8") == unicode_string + assert (await r.get("unicode_string")).decode("utf-8") == unicode_string # type: ignore[union-attr] - async def test_get_set_bit(self, r: valkey.Valkey): + async def test_get_set_bit(self, r: valkey.asyncio.Valkey[str]): # no value assert not await r.getbit("a", 5) # set bit 5 @@ -841,18 +844,18 @@ async def test_get_set_bit(self, r: valkey.Valkey): assert await r.setbit("a", 5, True) assert await r.getbit("a", 5) - async def test_getrange(self, r: valkey.Valkey): + async def test_getrange(self, r: valkey.asyncio.Valkey[bytes]): await r.set("a", "foo") assert await r.getrange("a", 0, 0) == b"f" assert await r.getrange("a", 0, 2) == b"foo" assert await r.getrange("a", 3, 4) == b"" - async def test_getset(self, r: valkey.Valkey): + async def test_getset(self, r: valkey.asyncio.Valkey[bytes]): assert await r.getset("a", "foo") is None assert await r.getset("a", "bar") == b"foo" assert await r.get("a") == b"bar" - async def test_incr(self, r: valkey.Valkey): + async def test_incr(self, r: valkey.asyncio.Valkey[bytes]): assert await r.incr("a") == 1 assert await r.get("a") == b"1" assert await r.incr("a") == 2 @@ -860,20 +863,20 @@ async def test_incr(self, r: valkey.Valkey): assert await r.incr("a", amount=5) == 7 assert await r.get("a") == b"7" - async def test_incrby(self, r: valkey.Valkey): + async def test_incrby(self, r: valkey.asyncio.Valkey[bytes]): assert await r.incrby("a") == 1 assert await r.incrby("a", 4) == 5 assert await r.get("a") == b"5" @skip_if_server_version_lt("2.6.0") - async def test_incrbyfloat(self, r: valkey.Valkey): + async def test_incrbyfloat(self, r: valkey.asyncio.Valkey[bytes]): assert await r.incrbyfloat("a") == 1.0 assert await r.get("a") == b"1" assert await r.incrbyfloat("a", 1.1) == 2.1 - assert float(await r.get("a")) == float(2.1) + assert float(await r.get("a")) == float(2.1) # type: ignore[arg-type] @pytest.mark.onlynoncluster - async def test_keys(self, r: valkey.Valkey): + async def test_keys(self, r: valkey.asyncio.Valkey[bytes]): assert await r.keys() == [] keys_with_underscores = {b"test_a", b"test_b"} keys = keys_with_underscores.union({b"testc"}) @@ -883,7 +886,7 @@ async def test_keys(self, r: valkey.Valkey): assert set(await r.keys(pattern="test*")) == keys @pytest.mark.onlynoncluster - async def test_mget(self, r: valkey.Valkey): + async def test_mget(self, r: valkey.asyncio.Valkey[bytes]): assert await r.mget([]) == [] assert await r.mget(["a", "b"]) == [None, None] await r.set("a", "1") @@ -892,24 +895,24 @@ async def test_mget(self, r: valkey.Valkey): assert await r.mget("a", "other", "b", "c") == [b"1", None, b"2", b"3"] @pytest.mark.onlynoncluster - async def test_mset(self, r: valkey.Valkey): + async def test_mset(self, r: valkey.asyncio.Valkey[bytes]): d = {"a": b"1", "b": b"2", "c": b"3"} - assert await r.mset(d) + assert await r.mset(d) # type: ignore[arg-type] for k, v in d.items(): assert await r.get(k) == v @pytest.mark.onlynoncluster - async def test_msetnx(self, r: valkey.Valkey): + async def test_msetnx(self, r: valkey.asyncio.Valkey[bytes]): d = {"a": b"1", "b": b"2", "c": b"3"} - assert await r.msetnx(d) + assert await r.msetnx(d) # type: ignore[arg-type] d2 = {"a": b"x", "d": b"4"} - assert not await r.msetnx(d2) + assert not await r.msetnx(d2) # type: ignore[arg-type] for k, v in d.items(): assert await r.get(k) == v assert await r.get("d") is None @skip_if_server_version_lt("2.6.0") - async def test_pexpire(self, r: valkey.Valkey): + async def test_pexpire(self, r: valkey.asyncio.Valkey[str]): assert not await r.pexpire("a", 60000) await r.set("a", "foo") assert await r.pexpire("a", 60000) @@ -918,19 +921,19 @@ async def test_pexpire(self, r: valkey.Valkey): assert await r.pttl("a") == -1 @skip_if_server_version_lt("2.6.0") - async def test_pexpireat_datetime(self, r: valkey.Valkey): + async def test_pexpireat_datetime(self, r: valkey.asyncio.Valkey[bytes]): expire_at = await valkey_server_time(r) + datetime.timedelta(minutes=1) await r.set("a", "foo") assert await r.pexpireat("a", expire_at) assert 0 < await r.pttl("a") <= 61000 @skip_if_server_version_lt("2.6.0") - async def test_pexpireat_no_key(self, r: valkey.Valkey): + async def test_pexpireat_no_key(self, r: valkey.asyncio.Valkey[bytes]): expire_at = await valkey_server_time(r) + datetime.timedelta(minutes=1) assert not await r.pexpireat("a", expire_at) @skip_if_server_version_lt("2.6.0") - async def test_pexpireat_unixtime(self, r: valkey.Valkey): + async def test_pexpireat_unixtime(self, r: valkey.asyncio.Valkey[bytes]): expire_at = await valkey_server_time(r) + datetime.timedelta(minutes=1) await r.set("a", "foo") expire_at_milliseconds = int(expire_at.timestamp() * 1000) @@ -938,20 +941,20 @@ async def test_pexpireat_unixtime(self, r: valkey.Valkey): assert 0 < await r.pttl("a") <= 61000 @skip_if_server_version_lt("2.6.0") - async def test_psetex(self, r: valkey.Valkey): + async def test_psetex(self, r: valkey.asyncio.Valkey[bytes]): assert await r.psetex("a", 1000, "value") assert await r.get("a") == b"value" assert 0 < await r.pttl("a") <= 1000 @skip_if_server_version_lt("2.6.0") - async def test_psetex_timedelta(self, r: valkey.Valkey): + async def test_psetex_timedelta(self, r: valkey.asyncio.Valkey[bytes]): expire_at = datetime.timedelta(milliseconds=1000) assert await r.psetex("a", expire_at, "value") assert await r.get("a") == b"value" assert 0 < await r.pttl("a") <= 1000 @skip_if_server_version_lt("2.6.0") - async def test_pttl(self, r: valkey.Valkey): + async def test_pttl(self, r: valkey.asyncio.Valkey[str]): assert not await r.pexpire("a", 10000) await r.set("a", "1") assert await r.pexpire("a", 10000) @@ -960,7 +963,7 @@ async def test_pttl(self, r: valkey.Valkey): assert await r.pttl("a") == -1 @skip_if_server_version_lt("2.8.0") - async def test_pttl_no_key(self, r: valkey.Valkey): + async def test_pttl_no_key(self, r: valkey.asyncio.Valkey[str]): """PTTL on servers 2.8 and after return -2 when the key doesn't exist""" assert await r.pttl("a") == -2 @@ -978,21 +981,21 @@ async def test_hrandfield(self, r): assert len(await r.hrandfield("key", -10)) == 10 @pytest.mark.onlynoncluster - async def test_randomkey(self, r: valkey.Valkey): + async def test_randomkey(self, r: valkey.asyncio.Valkey[bytes]): assert await r.randomkey() is None for key in ("a", "b", "c"): await r.set(key, 1) assert await r.randomkey() in (b"a", b"b", b"c") @pytest.mark.onlynoncluster - async def test_rename(self, r: valkey.Valkey): + async def test_rename(self, r: valkey.asyncio.Valkey[bytes]): await r.set("a", "1") assert await r.rename("a", "b") assert await r.get("a") is None assert await r.get("b") == b"1" @pytest.mark.onlynoncluster - async def test_renamenx(self, r: valkey.Valkey): + async def test_renamenx(self, r: valkey.asyncio.Valkey[bytes]): await r.set("a", "1") await r.set("b", "2") assert not await r.renamenx("a", "b") @@ -1000,13 +1003,13 @@ async def test_renamenx(self, r: valkey.Valkey): assert await r.get("b") == b"2" @skip_if_server_version_lt("2.6.0") - async def test_set_nx(self, r: valkey.Valkey): + async def test_set_nx(self, r: valkey.asyncio.Valkey[bytes]): assert await r.set("a", "1", nx=True) assert not await r.set("a", "2", nx=True) assert await r.get("a") == b"1" @skip_if_server_version_lt("2.6.0") - async def test_set_xx(self, r: valkey.Valkey): + async def test_set_xx(self, r: valkey.asyncio.Valkey[bytes]): assert not await r.set("a", "1", xx=True) assert await r.get("a") is None await r.set("a", "bar") @@ -1014,38 +1017,38 @@ async def test_set_xx(self, r: valkey.Valkey): assert await r.get("a") == b"2" @skip_if_server_version_lt("2.6.0") - async def test_set_px(self, r: valkey.Valkey): + async def test_set_px(self, r: valkey.asyncio.Valkey[bytes]): assert await r.set("a", "1", px=10000) assert await r.get("a") == b"1" assert 0 < await r.pttl("a") <= 10000 assert 0 < await r.ttl("a") <= 10 @skip_if_server_version_lt("2.6.0") - async def test_set_px_timedelta(self, r: valkey.Valkey): + async def test_set_px_timedelta(self, r: valkey.asyncio.Valkey[str]): expire_at = datetime.timedelta(milliseconds=1000) assert await r.set("a", "1", px=expire_at) assert 0 < await r.pttl("a") <= 1000 assert 0 < await r.ttl("a") <= 1 @skip_if_server_version_lt("2.6.0") - async def test_set_ex(self, r: valkey.Valkey): + async def test_set_ex(self, r: valkey.asyncio.Valkey[str]): assert await r.set("a", "1", ex=10) assert 0 < await r.ttl("a") <= 10 @skip_if_server_version_lt("2.6.0") - async def test_set_ex_timedelta(self, r: valkey.Valkey): + async def test_set_ex_timedelta(self, r: valkey.asyncio.Valkey[str]): expire_at = datetime.timedelta(seconds=60) assert await r.set("a", "1", ex=expire_at) assert 0 < await r.ttl("a") <= 60 @skip_if_server_version_lt("2.6.0") - async def test_set_multipleoptions(self, r: valkey.Valkey): + async def test_set_multipleoptions(self, r: valkey.asyncio.Valkey[str]): await r.set("a", "val") assert await r.set("a", "1", xx=True, px=10000) assert 0 < await r.ttl("a") <= 10 @skip_if_server_version_lt(VALKEY_6_VERSION) - async def test_set_keepttl(self, r: valkey.Valkey): + async def test_set_keepttl(self, r: valkey.asyncio.Valkey[bytes]): await r.set("a", "val") assert await r.set("a", "1", xx=True, px=10000) assert 0 < await r.ttl("a") <= 10 @@ -1053,36 +1056,36 @@ async def test_set_keepttl(self, r: valkey.Valkey): assert await r.get("a") == b"2" assert 0 < await r.ttl("a") <= 10 - async def test_setex(self, r: valkey.Valkey): + async def test_setex(self, r: valkey.asyncio.Valkey[bytes]): assert await r.setex("a", 60, "1") assert await r.get("a") == b"1" assert 0 < await r.ttl("a") <= 60 - async def test_setnx(self, r: valkey.Valkey): + async def test_setnx(self, r: valkey.asyncio.Valkey[bytes]): assert await r.setnx("a", "1") assert await r.get("a") == b"1" assert not await r.setnx("a", "2") assert await r.get("a") == b"1" - async def test_setrange(self, r: valkey.Valkey): + async def test_setrange(self, r: valkey.asyncio.Valkey[bytes]): assert await r.setrange("a", 5, "foo") == 8 assert await r.get("a") == b"\0\0\0\0\0foo" await r.set("a", "abcdefghijh") assert await r.setrange("a", 6, "12345") == 11 assert await r.get("a") == b"abcdef12345" - async def test_strlen(self, r: valkey.Valkey): + async def test_strlen(self, r: valkey.asyncio.Valkey[str]): await r.set("a", "foo") assert await r.strlen("a") == 3 - async def test_substr(self, r: valkey.Valkey): + async def test_substr(self, r: valkey.asyncio.Valkey[bytes]): await r.set("a", "0123456789") assert await r.substr("a", 0) == b"0123456789" assert await r.substr("a", 2) == b"23456789" assert await r.substr("a", 3, 5) == b"345" assert await r.substr("a", 3, -2) == b"345678" - async def test_ttl(self, r: valkey.Valkey): + async def test_ttl(self, r: valkey.asyncio.Valkey[str]): await r.set("a", "1") assert await r.expire("a", 10) assert 0 < await r.ttl("a") <= 10 @@ -1090,11 +1093,11 @@ async def test_ttl(self, r: valkey.Valkey): assert await r.ttl("a") == -1 @skip_if_server_version_lt("2.8.0") - async def test_ttl_nokey(self, r: valkey.Valkey): + async def test_ttl_nokey(self, r: valkey.asyncio.Valkey[str]): """TTL on servers 2.8 and after return -2 when the key doesn't exist""" assert await r.ttl("a") == -2 - async def test_type(self, r: valkey.Valkey): + async def test_type(self, r: valkey.asyncio.Valkey[bytes]): assert await r.type("a") == b"none" await r.set("a", "1") assert await r.type("a") == b"string" @@ -1110,7 +1113,7 @@ async def test_type(self, r: valkey.Valkey): # LIST COMMANDS @pytest.mark.onlynoncluster - async def test_blpop(self, r: valkey.Valkey): + async def test_blpop(self, r: valkey.asyncio.Valkey[bytes]): await r.rpush("a", "1", "2") await r.rpush("b", "3", "4") assert_resp_response( @@ -1132,7 +1135,7 @@ async def test_blpop(self, r: valkey.Valkey): ) @pytest.mark.onlynoncluster - async def test_brpop(self, r: valkey.Valkey): + async def test_brpop(self, r: valkey.asyncio.Valkey[bytes]): await r.rpush("a", "1", "2") await r.rpush("b", "3", "4") assert_resp_response( @@ -1154,7 +1157,7 @@ async def test_brpop(self, r: valkey.Valkey): ) @pytest.mark.onlynoncluster - async def test_brpoplpush(self, r: valkey.Valkey): + async def test_brpoplpush(self, r: valkey.asyncio.Valkey[bytes]): await r.rpush("a", "1", "2") await r.rpush("b", "3", "4") assert await r.brpoplpush("a", "b") == b"2" @@ -1164,54 +1167,54 @@ async def test_brpoplpush(self, r: valkey.Valkey): assert await r.lrange("b", 0, -1) == [b"1", b"2", b"3", b"4"] @pytest.mark.onlynoncluster - async def test_brpoplpush_empty_string(self, r: valkey.Valkey): + async def test_brpoplpush_empty_string(self, r: valkey.asyncio.Valkey[bytes]): await r.rpush("a", "") assert await r.brpoplpush("a", "b") == b"" - async def test_lindex(self, r: valkey.Valkey): + async def test_lindex(self, r: valkey.asyncio.Valkey[bytes]): await r.rpush("a", "1", "2", "3") assert await r.lindex("a", "0") == b"1" assert await r.lindex("a", "1") == b"2" assert await r.lindex("a", "2") == b"3" - async def test_linsert(self, r: valkey.Valkey): + async def test_linsert(self, r: valkey.asyncio.Valkey[bytes]): await r.rpush("a", "1", "2", "3") assert await r.linsert("a", "after", "2", "2.5") == 4 assert await r.lrange("a", 0, -1) == [b"1", b"2", b"2.5", b"3"] assert await r.linsert("a", "before", "2", "1.5") == 5 assert await r.lrange("a", 0, -1) == [b"1", b"1.5", b"2", b"2.5", b"3"] - async def test_llen(self, r: valkey.Valkey): + async def test_llen(self, r: valkey.asyncio.Valkey[str]): await r.rpush("a", "1", "2", "3") assert await r.llen("a") == 3 - async def test_lpop(self, r: valkey.Valkey): + async def test_lpop(self, r: valkey.asyncio.Valkey[bytes]): await r.rpush("a", "1", "2", "3") assert await r.lpop("a") == b"1" assert await r.lpop("a") == b"2" assert await r.lpop("a") == b"3" assert await r.lpop("a") is None - async def test_lpush(self, r: valkey.Valkey): + async def test_lpush(self, r: valkey.asyncio.Valkey[bytes]): assert await r.lpush("a", "1") == 1 assert await r.lpush("a", "2") == 2 assert await r.lpush("a", "3", "4") == 4 assert await r.lrange("a", 0, -1) == [b"4", b"3", b"2", b"1"] - async def test_lpushx(self, r: valkey.Valkey): + async def test_lpushx(self, r: valkey.asyncio.Valkey[bytes]): assert await r.lpushx("a", "1") == 0 assert await r.lrange("a", 0, -1) == [] await r.rpush("a", "1", "2", "3") assert await r.lpushx("a", "4") == 4 assert await r.lrange("a", 0, -1) == [b"4", b"1", b"2", b"3"] - async def test_lrange(self, r: valkey.Valkey): + async def test_lrange(self, r: valkey.asyncio.Valkey[bytes]): await r.rpush("a", "1", "2", "3", "4", "5") assert await r.lrange("a", 0, 2) == [b"1", b"2", b"3"] assert await r.lrange("a", 2, 10) == [b"3", b"4", b"5"] assert await r.lrange("a", 0, -1) == [b"1", b"2", b"3", b"4", b"5"] - async def test_lrem(self, r: valkey.Valkey): + async def test_lrem(self, r: valkey.asyncio.Valkey[bytes]): await r.rpush("a", "Z", "b", "Z", "Z", "c", "Z", "Z") # remove the first 'Z' item assert await r.lrem("a", 1, "Z") == 1 @@ -1223,18 +1226,18 @@ async def test_lrem(self, r: valkey.Valkey): assert await r.lrem("a", 0, "Z") == 2 assert await r.lrange("a", 0, -1) == [b"b", b"c"] - async def test_lset(self, r: valkey.Valkey): + async def test_lset(self, r: valkey.asyncio.Valkey[bytes]): await r.rpush("a", "1", "2", "3") assert await r.lrange("a", 0, -1) == [b"1", b"2", b"3"] assert await r.lset("a", 1, "4") assert await r.lrange("a", 0, 2) == [b"1", b"4", b"3"] - async def test_ltrim(self, r: valkey.Valkey): + async def test_ltrim(self, r: valkey.asyncio.Valkey[bytes]): await r.rpush("a", "1", "2", "3") assert await r.ltrim("a", 0, 1) assert await r.lrange("a", 0, -1) == [b"1", b"2"] - async def test_rpop(self, r: valkey.Valkey): + async def test_rpop(self, r: valkey.asyncio.Valkey[bytes]): await r.rpush("a", "1", "2", "3") assert await r.rpop("a") == b"3" assert await r.rpop("a") == b"2" @@ -1242,21 +1245,21 @@ async def test_rpop(self, r: valkey.Valkey): assert await r.rpop("a") is None @pytest.mark.onlynoncluster - async def test_rpoplpush(self, r: valkey.Valkey): + async def test_rpoplpush(self, r: valkey.asyncio.Valkey[bytes]): await r.rpush("a", "a1", "a2", "a3") await r.rpush("b", "b1", "b2", "b3") assert await r.rpoplpush("a", "b") == b"a3" assert await r.lrange("a", 0, -1) == [b"a1", b"a2"] assert await r.lrange("b", 0, -1) == [b"a3", b"b1", b"b2", b"b3"] - async def test_rpush(self, r: valkey.Valkey): + async def test_rpush(self, r: valkey.asyncio.Valkey[bytes]): assert await r.rpush("a", "1") == 1 assert await r.rpush("a", "2") == 2 assert await r.rpush("a", "3", "4") == 4 assert await r.lrange("a", 0, -1) == [b"1", b"2", b"3", b"4"] @skip_if_server_version_lt("6.0.6") - async def test_lpos(self, r: valkey.Valkey): + async def test_lpos(self, r: valkey.asyncio.Valkey[bytes]): assert await r.rpush("a", "a", "b", "c", "1", "2", "3", "c", "c") == 8 assert await r.lpos("a", "a") == 0 assert await r.lpos("a", "c") == 2 @@ -1287,7 +1290,7 @@ async def test_lpos(self, r: valkey.Valkey): assert await r.lpos("a", "c", count=0, maxlen=3, rank=-1) == [7, 6] assert await r.lpos("a", "c", count=0, maxlen=7, rank=2) == [6] - async def test_rpushx(self, r: valkey.Valkey): + async def test_rpushx(self, r: valkey.asyncio.Valkey[bytes]): assert await r.rpushx("a", "b") == 0 assert await r.lrange("a", 0, -1) == [] await r.rpush("a", "1", "2", "3") @@ -1297,7 +1300,7 @@ async def test_rpushx(self, r: valkey.Valkey): # SCAN COMMANDS @skip_if_server_version_lt("2.8.0") @pytest.mark.onlynoncluster - async def test_scan(self, r: valkey.Valkey): + async def test_scan(self, r: valkey.asyncio.Valkey[bytes]): await r.set("a", 1) await r.set("b", 2) await r.set("c", 3) @@ -1309,7 +1312,7 @@ async def test_scan(self, r: valkey.Valkey): @skip_if_server_version_lt(VALKEY_6_VERSION) @pytest.mark.onlynoncluster - async def test_scan_type(self, r: valkey.Valkey): + async def test_scan_type(self, r: valkey.asyncio.Valkey[bytes]): await r.sadd("a-set", 1) await r.hset("a-hash", "foo", 2) await r.lpush("a-list", "aux", 3) @@ -1318,7 +1321,7 @@ async def test_scan_type(self, r: valkey.Valkey): @skip_if_server_version_lt("2.8.0") @pytest.mark.onlynoncluster - async def test_scan_iter(self, r: valkey.Valkey): + async def test_scan_iter(self, r: valkey.asyncio.Valkey[bytes]): await r.set("a", 1) await r.set("b", 2) await r.set("c", 3) @@ -1328,7 +1331,7 @@ async def test_scan_iter(self, r: valkey.Valkey): assert set(keys) == {b"a"} @skip_if_server_version_lt("2.8.0") - async def test_sscan(self, r: valkey.Valkey): + async def test_sscan(self, r: valkey.asyncio.Valkey[bytes]): await r.sadd("a", 1, 2, 3) cursor, members = await r.sscan("a") assert cursor == 0 @@ -1337,7 +1340,7 @@ async def test_sscan(self, r: valkey.Valkey): assert set(members) == {b"1"} @skip_if_server_version_lt("2.8.0") - async def test_sscan_iter(self, r: valkey.Valkey): + async def test_sscan_iter(self, r: valkey.asyncio.Valkey[bytes]): await r.sadd("a", 1, 2, 3) members = [k async for k in r.sscan_iter("a")] assert set(members) == {b"1", b"2", b"3"} @@ -1345,7 +1348,7 @@ async def test_sscan_iter(self, r: valkey.Valkey): assert set(members) == {b"1"} @skip_if_server_version_lt("2.8.0") - async def test_hscan(self, r: valkey.Valkey): + async def test_hscan(self, r: valkey.asyncio.Valkey[bytes]): await r.hset("a", mapping={"a": 1, "b": 2, "c": 3}) cursor, dic = await r.hscan("a") assert cursor == 0 @@ -1355,19 +1358,20 @@ async def test_hscan(self, r: valkey.Valkey): _, dic = await r.hscan("a_notset", match="a") assert dic == {} + # TODO: is that a bug? @skip_if_server_version_lt("7.3.240") - async def test_hscan_novalues(self, r: valkey.Valkey): + async def test_hscan_novalues(self, r: valkey.asyncio.Valkey[bytes]): await r.hset("a", mapping={"a": 1, "b": 2, "c": 3}) cursor, keys = await r.hscan("a", no_values=True) assert cursor == 0 assert sorted(keys) == [b"a", b"b", b"c"] _, keys = await r.hscan("a", match="a", no_values=True) - assert keys == [b"a"] + assert keys == [b"a"] # type: ignore[comparison-overlap] _, keys = await r.hscan("a_notset", match="a", no_values=True) - assert keys == [] + assert keys == [] # type: ignore[comparison-overlap] @skip_if_server_version_lt("2.8.0") - async def test_hscan_iter(self, r: valkey.Valkey): + async def test_hscan_iter(self, r: valkey.asyncio.Valkey[bytes]): await r.hset("a", mapping={"a": 1, "b": 2, "c": 3}) dic = {k: v async for k, v in r.hscan_iter("a")} assert dic == {b"a": b"1", b"b": b"2", b"c": b"3"} @@ -1376,20 +1380,21 @@ async def test_hscan_iter(self, r: valkey.Valkey): dic = {k: v async for k, v in r.hscan_iter("a_notset", match="a")} assert dic == {} + # TODO: is that a bug? @skip_if_server_version_lt("7.3.240") - async def test_hscan_iter_novalues(self, r: valkey.Valkey): + async def test_hscan_iter_novalues(self, r: valkey.asyncio.Valkey[bytes]): await r.hset("a", mapping={"a": 1, "b": 2, "c": 3}) keys = list([k async for k in r.hscan_iter("a", no_values=True)]) - assert sorted(keys) == [b"a", b"b", b"c"] + assert sorted(keys) == [b"a", b"b", b"c"] # type: ignore[comparison-overlap] keys = list([k async for k in r.hscan_iter("a", match="a", no_values=True)]) - assert keys == [b"a"] + assert keys == [b"a"] # type: ignore[comparison-overlap] keys = list( [k async for k in r.hscan_iter("a", match="a_notset", no_values=True)] ) assert keys == [] @skip_if_server_version_lt("2.8.0") - async def test_zscan(self, r: valkey.Valkey): + async def test_zscan(self, r: valkey.asyncio.Valkey[bytes]): await r.zadd("a", {"a": 1, "b": 2, "c": 3}) cursor, pairs = await r.zscan("a") assert cursor == 0 @@ -1398,7 +1403,7 @@ async def test_zscan(self, r: valkey.Valkey): assert set(pairs) == {(b"a", 1)} @skip_if_server_version_lt("2.8.0") - async def test_zscan_iter(self, r: valkey.Valkey): + async def test_zscan_iter(self, r: valkey.asyncio.Valkey[bytes]): await r.zadd("a", {"a": 1, "b": 2, "c": 3}) pairs = [k async for k in r.zscan_iter("a")] assert set(pairs) == {(b"a", 1), (b"b", 2), (b"c", 3)} @@ -1406,78 +1411,78 @@ async def test_zscan_iter(self, r: valkey.Valkey): assert set(pairs) == {(b"a", 1)} # SET COMMANDS - async def test_sadd(self, r: valkey.Valkey): + async def test_sadd(self, r: valkey.asyncio.Valkey[bytes]): members = {b"1", b"2", b"3"} await r.sadd("a", *members) assert set(await r.smembers("a")) == members - async def test_scard(self, r: valkey.Valkey): + async def test_scard(self, r: valkey.asyncio.Valkey[str]): await r.sadd("a", "1", "2", "3") assert await r.scard("a") == 3 @pytest.mark.onlynoncluster - async def test_sdiff(self, r: valkey.Valkey): + async def test_sdiff(self, r: valkey.asyncio.Valkey[bytes]): await r.sadd("a", "1", "2", "3") assert set(await r.sdiff("a", "b")) == {b"1", b"2", b"3"} await r.sadd("b", "2", "3") - assert await r.sdiff("a", "b") == [b"1"] + assert await r.sdiff("a", "b") == {b"1", } @pytest.mark.onlynoncluster - async def test_sdiffstore(self, r: valkey.Valkey): + async def test_sdiffstore(self, r: valkey.asyncio.Valkey[bytes]): await r.sadd("a", "1", "2", "3") assert await r.sdiffstore("c", "a", "b") == 3 assert set(await r.smembers("c")) == {b"1", b"2", b"3"} await r.sadd("b", "2", "3") assert await r.sdiffstore("c", "a", "b") == 1 - assert await r.smembers("c") == [b"1"] + assert await r.smembers("c") == {b"1", } @pytest.mark.onlynoncluster - async def test_sinter(self, r: valkey.Valkey): + async def test_sinter(self, r: valkey.asyncio.Valkey[bytes]): await r.sadd("a", "1", "2", "3") - assert await r.sinter("a", "b") == [] + assert await r.sinter("a", "b") == set() await r.sadd("b", "2", "3") assert set(await r.sinter("a", "b")) == {b"2", b"3"} @pytest.mark.onlynoncluster - async def test_sinterstore(self, r: valkey.Valkey): + async def test_sinterstore(self, r: valkey.asyncio.Valkey[bytes]): await r.sadd("a", "1", "2", "3") assert await r.sinterstore("c", "a", "b") == 0 - assert await r.smembers("c") == [] + assert await r.smembers("c") == set() await r.sadd("b", "2", "3") assert await r.sinterstore("c", "a", "b") == 2 assert set(await r.smembers("c")) == {b"2", b"3"} - async def test_sismember(self, r: valkey.Valkey): + async def test_sismember(self, r: valkey.asyncio.Valkey[str]): await r.sadd("a", "1", "2", "3") assert await r.sismember("a", "1") assert await r.sismember("a", "2") assert await r.sismember("a", "3") assert not await r.sismember("a", "4") - async def test_smembers(self, r: valkey.Valkey): + async def test_smembers(self, r: valkey.asyncio.Valkey[bytes]): await r.sadd("a", "1", "2", "3") assert set(await r.smembers("a")) == {b"1", b"2", b"3"} @pytest.mark.onlynoncluster - async def test_smove(self, r: valkey.Valkey): + async def test_smove(self, r: valkey.asyncio.Valkey[bytes]): await r.sadd("a", "a1", "a2") await r.sadd("b", "b1", "b2") assert await r.smove("a", "b", "a1") - assert await r.smembers("a") == [b"a2"] + assert await r.smembers("a") == {b"a2", } assert set(await r.smembers("b")) == {b"b1", b"b2", b"a1"} - async def test_spop(self, r: valkey.Valkey): + async def test_spop(self, r: valkey.asyncio.Valkey[bytes]): s = [b"1", b"2", b"3"] await r.sadd("a", *s) - value = await r.spop("a") + value: bytes = await r.spop("a") # type: ignore[assignment] assert value in s - assert set(await r.smembers("a")) == set(s) - {value} + assert set(await r.smembers("a")) == set(s) - {value, } @skip_if_server_version_lt("3.2.0") - async def test_spop_multi_value(self, r: valkey.Valkey): + async def test_spop_multi_value(self, r: valkey.asyncio.Valkey[bytes]): s = [b"1", b"2", b"3"] await r.sadd("a", *s) - values = await r.spop("a", 2) + values: list[bytes] = await r.spop("a", 2) # type: ignore[assignment] assert len(values) == 2 for value in values: @@ -1486,42 +1491,42 @@ async def test_spop_multi_value(self, r: valkey.Valkey): response = await r.spop("a", 1) assert set(response) == set(s) - set(values) - async def test_srandmember(self, r: valkey.Valkey): + async def test_srandmember(self, r: valkey.asyncio.Valkey[str]): s = [b"1", b"2", b"3"] await r.sadd("a", *s) assert await r.srandmember("a") in s @skip_if_server_version_lt("2.6.0") - async def test_srandmember_multi_value(self, r: valkey.Valkey): + async def test_srandmember_multi_value(self, r: valkey.asyncio.Valkey[str]): s = [b"1", b"2", b"3"] await r.sadd("a", *s) randoms = await r.srandmember("a", number=2) assert len(randoms) == 2 assert set(randoms).intersection(s) == set(randoms) - async def test_srem(self, r: valkey.Valkey): + async def test_srem(self, r: valkey.asyncio.Valkey[bytes]): await r.sadd("a", "1", "2", "3", "4") assert await r.srem("a", "5") == 0 assert await r.srem("a", "2", "4") == 2 assert set(await r.smembers("a")) == {b"1", b"3"} @pytest.mark.onlynoncluster - async def test_sunion(self, r: valkey.Valkey): + async def test_sunion(self, r: valkey.asyncio.Valkey[bytes]): await r.sadd("a", "1", "2") await r.sadd("b", "2", "3") assert set(await r.sunion("a", "b")) == {b"1", b"2", b"3"} @pytest.mark.onlynoncluster - async def test_sunionstore(self, r: valkey.Valkey): + async def test_sunionstore(self, r: valkey.asyncio.Valkey[bytes]): await r.sadd("a", "1", "2") await r.sadd("b", "2", "3") assert await r.sunionstore("c", "a", "b") == 3 assert set(await r.smembers("c")) == {b"1", b"2", b"3"} # SORTED SET COMMANDS - async def test_zadd(self, r: valkey.Valkey): + async def test_zadd(self, r: valkey.asyncio.Valkey[bytes]): mapping = {"a1": 1.0, "a2": 2.0, "a3": 3.0} - await r.zadd("a", mapping) + await r.zadd("a", mapping) # type: ignore[arg-type] response = await r.zrange("a", 0, -1, withscores=True) assert_resp_response( r, @@ -1536,13 +1541,13 @@ async def test_zadd(self, r: valkey.Valkey): # cannot use both nx and xx options with pytest.raises(exceptions.DataError): - await r.zadd("a", mapping, nx=True, xx=True) + await r.zadd("a", mapping, nx=True, xx=True) # type: ignore[arg-type] # cannot use the incr options with more than one value with pytest.raises(exceptions.DataError): - await r.zadd("a", mapping, incr=True) + await r.zadd("a", mapping, incr=True) # type: ignore[arg-type] - async def test_zadd_nx(self, r: valkey.Valkey): + async def test_zadd_nx(self, r: valkey.asyncio.Valkey[bytes]): assert await r.zadd("a", {"a1": 1}) == 1 assert await r.zadd("a", {"a1": 99, "a2": 2}, nx=True) == 1 response = await r.zrange("a", 0, -1, withscores=True) @@ -1550,13 +1555,13 @@ async def test_zadd_nx(self, r: valkey.Valkey): r, response, [(b"a1", 1.0), (b"a2", 2.0)], [[b"a1", 1.0], [b"a2", 2.0]] ) - async def test_zadd_xx(self, r: valkey.Valkey): + async def test_zadd_xx(self, r: valkey.asyncio.Valkey[bytes]): assert await r.zadd("a", {"a1": 1}) == 1 assert await r.zadd("a", {"a1": 99, "a2": 2}, xx=True) == 0 response = await r.zrange("a", 0, -1, withscores=True) assert_resp_response(r, response, [(b"a1", 99.0)], [[b"a1", 99.0]]) - async def test_zadd_ch(self, r: valkey.Valkey): + async def test_zadd_ch(self, r: valkey.asyncio.Valkey[bytes]): assert await r.zadd("a", {"a1": 1}) == 1 assert await r.zadd("a", {"a1": 99, "a2": 2}, ch=True) == 2 response = await r.zrange("a", 0, -1, withscores=True) @@ -1564,21 +1569,21 @@ async def test_zadd_ch(self, r: valkey.Valkey): r, response, [(b"a2", 2.0), (b"a1", 99.0)], [[b"a2", 2.0], [b"a1", 99.0]] ) - async def test_zadd_incr(self, r: valkey.Valkey): + async def test_zadd_incr(self, r: valkey.asyncio.Valkey[str]): assert await r.zadd("a", {"a1": 1}) == 1 assert await r.zadd("a", {"a1": 4.5}, incr=True) == 5.5 - async def test_zadd_incr_with_xx(self, r: valkey.Valkey): + async def test_zadd_incr_with_xx(self, r: valkey.asyncio.Valkey[str]): # this asks zadd to incr 'a1' only if it exists, but it clearly # doesn't. Valkey returns a null value in this case and so should # valkey-py assert await r.zadd("a", {"a1": 1}, xx=True, incr=True) is None - async def test_zcard(self, r: valkey.Valkey): + async def test_zcard(self, r: valkey.asyncio.Valkey[str]): await r.zadd("a", {"a1": 1, "a2": 2, "a3": 3}) assert await r.zcard("a") == 3 - async def test_zcount(self, r: valkey.Valkey): + async def test_zcount(self, r: valkey.asyncio.Valkey[str]): await r.zadd("a", {"a1": 1, "a2": 2, "a3": 3}) assert await r.zcount("a", "-inf", "+inf") == 3 assert await r.zcount("a", 1, 2) == 2 @@ -1605,7 +1610,7 @@ async def test_zdiffstore(self, r): response = await r.zrange("out", 0, -1, withscores=True) assert_resp_response(r, response, [(b"a3", 3.0)], [[b"a3", 3.0]]) - async def test_zincrby(self, r: valkey.Valkey): + async def test_zincrby(self, r: valkey.asyncio.Valkey[str]): await r.zadd("a", {"a1": 1, "a2": 2, "a3": 3}) assert await r.zincrby("a", 1, "a2") == 3.0 assert await r.zincrby("a", 5, "a3") == 8.0 @@ -1613,13 +1618,13 @@ async def test_zincrby(self, r: valkey.Valkey): assert await r.zscore("a", "a3") == 8.0 @skip_if_server_version_lt("2.8.9") - async def test_zlexcount(self, r: valkey.Valkey): + async def test_zlexcount(self, r: valkey.asyncio.Valkey[str]): await r.zadd("a", {"a": 0, "b": 0, "c": 0, "d": 0, "e": 0, "f": 0, "g": 0}) assert await r.zlexcount("a", "-", "+") == 7 assert await r.zlexcount("a", "[b", "[f") == 5 @pytest.mark.onlynoncluster - async def test_zinterstore_sum(self, r: valkey.Valkey): + async def test_zinterstore_sum(self, r: valkey.asyncio.Valkey[str]): await r.zadd("a", {"a1": 1, "a2": 1, "a3": 1}) await r.zadd("b", {"a1": 2, "a2": 2, "a3": 2}) await r.zadd("c", {"a1": 6, "a3": 5, "a4": 4}) @@ -1630,7 +1635,7 @@ async def test_zinterstore_sum(self, r: valkey.Valkey): ) @pytest.mark.onlynoncluster - async def test_zinterstore_max(self, r: valkey.Valkey): + async def test_zinterstore_max(self, r: valkey.asyncio.Valkey[str]): await r.zadd("a", {"a1": 1, "a2": 1, "a3": 1}) await r.zadd("b", {"a1": 2, "a2": 2, "a3": 2}) await r.zadd("c", {"a1": 6, "a3": 5, "a4": 4}) @@ -1641,7 +1646,7 @@ async def test_zinterstore_max(self, r: valkey.Valkey): ) @pytest.mark.onlynoncluster - async def test_zinterstore_min(self, r: valkey.Valkey): + async def test_zinterstore_min(self, r: valkey.asyncio.Valkey[str]): await r.zadd("a", {"a1": 1, "a2": 2, "a3": 3}) await r.zadd("b", {"a1": 2, "a2": 3, "a3": 5}) await r.zadd("c", {"a1": 6, "a3": 5, "a4": 4}) @@ -1652,7 +1657,7 @@ async def test_zinterstore_min(self, r: valkey.Valkey): ) @pytest.mark.onlynoncluster - async def test_zinterstore_with_weight(self, r: valkey.Valkey): + async def test_zinterstore_with_weight(self, r: valkey.asyncio.Valkey[str]): await r.zadd("a", {"a1": 1, "a2": 1, "a3": 1}) await r.zadd("b", {"a1": 2, "a2": 2, "a3": 2}) await r.zadd("c", {"a1": 6, "a3": 5, "a4": 4}) @@ -1663,7 +1668,7 @@ async def test_zinterstore_with_weight(self, r: valkey.Valkey): ) @skip_if_server_version_lt("4.9.0") - async def test_zpopmax(self, r: valkey.Valkey): + async def test_zpopmax(self, r: valkey.asyncio.Valkey[str]): await r.zadd("a", {"a1": 1, "a2": 2, "a3": 3}) response = await r.zpopmax("a") assert_resp_response(r, response, [(b"a3", 3)], [b"a3", 3.0]) @@ -1675,7 +1680,7 @@ async def test_zpopmax(self, r: valkey.Valkey): ) @skip_if_server_version_lt("4.9.0") - async def test_zpopmin(self, r: valkey.Valkey): + async def test_zpopmin(self, r: valkey.asyncio.Valkey[str]): await r.zadd("a", {"a1": 1, "a2": 2, "a3": 3}) response = await r.zpopmin("a") assert_resp_response(r, response, [(b"a1", 1)], [b"a1", 1.0]) @@ -1688,7 +1693,7 @@ async def test_zpopmin(self, r: valkey.Valkey): @skip_if_server_version_lt("4.9.0") @pytest.mark.onlynoncluster - async def test_bzpopmax(self, r: valkey.Valkey): + async def test_bzpopmax(self, r: valkey.asyncio.Valkey[str]): await r.zadd("a", {"a1": 1, "a2": 2}) await r.zadd("b", {"b1": 10, "b2": 20}) assert_resp_response( @@ -1723,7 +1728,7 @@ async def test_bzpopmax(self, r: valkey.Valkey): @skip_if_server_version_lt("4.9.0") @pytest.mark.onlynoncluster - async def test_bzpopmin(self, r: valkey.Valkey): + async def test_bzpopmin(self, r: valkey.asyncio.Valkey[str]): await r.zadd("a", {"a1": 1, "a2": 2}) await r.zadd("b", {"b1": 10, "b2": 20}) assert_resp_response( @@ -1756,7 +1761,7 @@ async def test_bzpopmin(self, r: valkey.Valkey): r, await r.bzpopmin("c", timeout=1), (b"c", b"c1", 100), [b"c", b"c1", 100] ) - async def test_zrange(self, r: valkey.Valkey): + async def test_zrange(self, r: valkey.asyncio.Valkey[bytes]): await r.zadd("a", {"a1": 1, "a2": 2, "a3": 3}) assert await r.zrange("a", 0, 1) == [b"a1", b"a2"] assert await r.zrange("a", 1, 2) == [b"a2", b"a3"] @@ -1778,7 +1783,7 @@ async def test_zrange(self, r: valkey.Valkey): # ] @skip_if_server_version_lt("2.8.9") - async def test_zrangebylex(self, r: valkey.Valkey): + async def test_zrangebylex(self, r: valkey.asyncio.Valkey[bytes]): await r.zadd("a", {"a": 0, "b": 0, "c": 0, "d": 0, "e": 0, "f": 0, "g": 0}) assert await r.zrangebylex("a", "-", "[c") == [b"a", b"b", b"c"] assert await r.zrangebylex("a", "-", "(c") == [b"a", b"b"] @@ -1787,7 +1792,7 @@ async def test_zrangebylex(self, r: valkey.Valkey): assert await r.zrangebylex("a", "-", "+", start=3, num=2) == [b"d", b"e"] @skip_if_server_version_lt("2.9.9") - async def test_zrevrangebylex(self, r: valkey.Valkey): + async def test_zrevrangebylex(self, r: valkey.asyncio.Valkey[bytes]): await r.zadd("a", {"a": 0, "b": 0, "c": 0, "d": 0, "e": 0, "f": 0, "g": 0}) assert await r.zrevrangebylex("a", "[c", "-") == [b"c", b"b", b"a"] assert await r.zrevrangebylex("a", "(c", "-") == [b"b", b"a"] @@ -1801,7 +1806,7 @@ async def test_zrevrangebylex(self, r: valkey.Valkey): assert await r.zrevrangebylex("a", "+", "[f") == [b"g", b"f"] assert await r.zrevrangebylex("a", "+", "-", start=3, num=2) == [b"d", b"c"] - async def test_zrangebyscore(self, r: valkey.Valkey): + async def test_zrangebyscore(self, r: valkey.asyncio.Valkey[bytes]): await r.zadd("a", {"a1": 1, "a2": 2, "a3": 3, "a4": 4, "a5": 5}) assert await r.zrangebyscore("a", 2, 4) == [b"a2", b"a3", b"a4"] @@ -1828,14 +1833,14 @@ async def test_zrangebyscore(self, r: valkey.Valkey): [[b"a2", 2], [b"a3", 3], [b"a4", 4]], ) - async def test_zrank(self, r: valkey.Valkey): + async def test_zrank(self, r: valkey.asyncio.Valkey[str]): await r.zadd("a", {"a1": 1, "a2": 2, "a3": 3, "a4": 4, "a5": 5}) assert await r.zrank("a", "a1") == 0 assert await r.zrank("a", "a2") == 1 assert await r.zrank("a", "a6") is None @skip_if_server_version_lt("7.2.0") - async def test_zrank_withscore(self, r: valkey.Valkey): + async def test_zrank_withscore(self, r: valkey.asyncio.Valkey[bytes]): await r.zadd("a", {"a1": 1, "a2": 2, "a3": 3, "a4": 4, "a5": 5}) assert await r.zrank("a", "a1") == 0 assert await r.zrank("a", "a2") == 1 @@ -1845,20 +1850,20 @@ async def test_zrank_withscore(self, r: valkey.Valkey): ) assert await r.zrank("a", "a6", withscore=True) is None - async def test_zrem(self, r: valkey.Valkey): + async def test_zrem(self, r: valkey.asyncio.Valkey[bytes]): await r.zadd("a", {"a1": 1, "a2": 2, "a3": 3}) assert await r.zrem("a", "a2") == 1 assert await r.zrange("a", 0, -1) == [b"a1", b"a3"] assert await r.zrem("a", "b") == 0 assert await r.zrange("a", 0, -1) == [b"a1", b"a3"] - async def test_zrem_multiple_keys(self, r: valkey.Valkey): + async def test_zrem_multiple_keys(self, r: valkey.asyncio.Valkey[bytes]): await r.zadd("a", {"a1": 1, "a2": 2, "a3": 3}) assert await r.zrem("a", "a1", "a2") == 2 assert await r.zrange("a", 0, 5) == [b"a3"] @skip_if_server_version_lt("2.8.9") - async def test_zremrangebylex(self, r: valkey.Valkey): + async def test_zremrangebylex(self, r: valkey.asyncio.Valkey[bytes]): await r.zadd("a", {"a": 0, "b": 0, "c": 0, "d": 0, "e": 0, "f": 0, "g": 0}) assert await r.zremrangebylex("a", "-", "[c") == 3 assert await r.zrange("a", 0, -1) == [b"d", b"e", b"f", b"g"] @@ -1867,19 +1872,19 @@ async def test_zremrangebylex(self, r: valkey.Valkey): assert await r.zremrangebylex("a", "[h", "+") == 0 assert await r.zrange("a", 0, -1) == [b"d", b"e"] - async def test_zremrangebyrank(self, r: valkey.Valkey): + async def test_zremrangebyrank(self, r: valkey.asyncio.Valkey[bytes]): await r.zadd("a", {"a1": 1, "a2": 2, "a3": 3, "a4": 4, "a5": 5}) assert await r.zremrangebyrank("a", 1, 3) == 3 assert await r.zrange("a", 0, 5) == [b"a1", b"a5"] - async def test_zremrangebyscore(self, r: valkey.Valkey): + async def test_zremrangebyscore(self, r: valkey.asyncio.Valkey[bytes]): await r.zadd("a", {"a1": 1, "a2": 2, "a3": 3, "a4": 4, "a5": 5}) assert await r.zremrangebyscore("a", 2, 4) == 3 assert await r.zrange("a", 0, -1) == [b"a1", b"a5"] assert await r.zremrangebyscore("a", 2, 4) == 0 assert await r.zrange("a", 0, -1) == [b"a1", b"a5"] - async def test_zrevrange(self, r: valkey.Valkey): + async def test_zrevrange(self, r: valkey.asyncio.Valkey[bytes]): await r.zadd("a", {"a1": 1, "a2": 2, "a3": 3}) assert await r.zrevrange("a", 0, 1) == [b"a3", b"a2"] assert await r.zrevrange("a", 1, 2) == [b"a2", b"a1"] @@ -1900,7 +1905,7 @@ async def test_zrevrange(self, r: valkey.Valkey): r, response, [(b"a3", 3), (b"a2", 2)], [[b"a3", 3], [b"a2", 2]] ) - async def test_zrevrangebyscore(self, r: valkey.Valkey): + async def test_zrevrangebyscore(self, r: valkey.asyncio.Valkey[bytes]): await r.zadd("a", {"a1": 1, "a2": 2, "a3": 3, "a4": 4, "a5": 5}) assert await r.zrevrangebyscore("a", 4, 2) == [b"a4", b"a3", b"a2"] @@ -1927,14 +1932,14 @@ async def test_zrevrangebyscore(self, r: valkey.Valkey): [[b"a4", 4], [b"a3", 3], [b"a2", 2]], ) - async def test_zrevrank(self, r: valkey.Valkey): + async def test_zrevrank(self, r: valkey.asyncio.Valkey[str]): await r.zadd("a", {"a1": 1, "a2": 2, "a3": 3, "a4": 4, "a5": 5}) assert await r.zrevrank("a", "a1") == 4 assert await r.zrevrank("a", "a2") == 3 assert await r.zrevrank("a", "a6") is None @skip_if_server_version_lt("7.2.0") - async def test_zrevrank_withscore(self, r: valkey.Valkey): + async def test_zrevrank_withscore(self, r: valkey.asyncio.Valkey[bytes]): await r.zadd("a", {"a1": 1, "a2": 2, "a3": 3, "a4": 4, "a5": 5}) assert await r.zrevrank("a", "a1") == 4 assert await r.zrevrank("a", "a2") == 3 @@ -1944,14 +1949,14 @@ async def test_zrevrank_withscore(self, r: valkey.Valkey): ) assert await r.zrevrank("a", "a6", withscore=True) is None - async def test_zscore(self, r: valkey.Valkey): + async def test_zscore(self, r: valkey.asyncio.Valkey[str]): await r.zadd("a", {"a1": 1, "a2": 2, "a3": 3}) assert await r.zscore("a", "a1") == 1.0 assert await r.zscore("a", "a2") == 2.0 assert await r.zscore("a", "a4") is None @pytest.mark.onlynoncluster - async def test_zunionstore_sum(self, r: valkey.Valkey): + async def test_zunionstore_sum(self, r: valkey.asyncio.Valkey[str]): await r.zadd("a", {"a1": 1, "a2": 1, "a3": 1}) await r.zadd("b", {"a1": 2, "a2": 2, "a3": 2}) await r.zadd("c", {"a1": 6, "a3": 5, "a4": 4}) @@ -1965,7 +1970,7 @@ async def test_zunionstore_sum(self, r: valkey.Valkey): ) @pytest.mark.onlynoncluster - async def test_zunionstore_max(self, r: valkey.Valkey): + async def test_zunionstore_max(self, r: valkey.asyncio.Valkey[str]): await r.zadd("a", {"a1": 1, "a2": 1, "a3": 1}) await r.zadd("b", {"a1": 2, "a2": 2, "a3": 2}) await r.zadd("c", {"a1": 6, "a3": 5, "a4": 4}) @@ -1979,7 +1984,7 @@ async def test_zunionstore_max(self, r: valkey.Valkey): ) @pytest.mark.onlynoncluster - async def test_zunionstore_min(self, r: valkey.Valkey): + async def test_zunionstore_min(self, r: valkey.asyncio.Valkey[str]): await r.zadd("a", {"a1": 1, "a2": 2, "a3": 3}) await r.zadd("b", {"a1": 2, "a2": 2, "a3": 4}) await r.zadd("c", {"a1": 6, "a3": 5, "a4": 4}) @@ -1993,7 +1998,7 @@ async def test_zunionstore_min(self, r: valkey.Valkey): ) @pytest.mark.onlynoncluster - async def test_zunionstore_with_weight(self, r: valkey.Valkey): + async def test_zunionstore_with_weight(self, r: valkey.asyncio.Valkey[str]): await r.zadd("a", {"a1": 1, "a2": 1, "a3": 1}) await r.zadd("b", {"a1": 2, "a2": 2, "a3": 2}) await r.zadd("c", {"a1": 6, "a3": 5, "a4": 4}) @@ -2008,7 +2013,7 @@ async def test_zunionstore_with_weight(self, r: valkey.Valkey): # HYPERLOGLOG TESTS @skip_if_server_version_lt("2.8.9") - async def test_pfadd(self, r: valkey.Valkey): + async def test_pfadd(self, r: valkey.asyncio.Valkey[str]): members = {b"1", b"2", b"3"} assert await r.pfadd("a", *members) == 1 assert await r.pfadd("a", *members) == 0 @@ -2016,18 +2021,18 @@ async def test_pfadd(self, r: valkey.Valkey): @skip_if_server_version_lt("2.8.9") @pytest.mark.onlynoncluster - async def test_pfcount(self, r: valkey.Valkey): + async def test_pfcount(self, r: valkey.asyncio.Valkey[str]): members = {b"1", b"2", b"3"} await r.pfadd("a", *members) assert await r.pfcount("a") == len(members) members_b = {b"2", b"3", b"4"} await r.pfadd("b", *members_b) assert await r.pfcount("b") == len(members_b) - assert await r.pfcount("a", "b") == len(members_b.union(members)) + assert await r.pfcount("a", "b") == len(members_b.union(members)) # type: ignore[call-arg] @skip_if_server_version_lt("2.8.9") @pytest.mark.onlynoncluster - async def test_pfmerge(self, r: valkey.Valkey): + async def test_pfmerge(self, r: valkey.asyncio.Valkey[str]): mema = {b"1", b"2", b"3"} memb = {b"2", b"3", b"4"} memc = {b"5", b"6", b"7"} @@ -2040,7 +2045,7 @@ async def test_pfmerge(self, r: valkey.Valkey): assert await r.pfcount("d") == 7 # HASH COMMANDS - async def test_hget_and_hset(self, r: valkey.Valkey): + async def test_hget_and_hset(self, r: valkey.asyncio.Valkey[bytes]): await r.hset("a", mapping={"1": 1, "2": 2, "3": 3}) assert await r.hget("a", "1") == b"1" assert await r.hget("a", "2") == b"2" @@ -2058,10 +2063,10 @@ async def test_hget_and_hset(self, r: valkey.Valkey): assert await r.hget("a", "b") is None # keys with bool(key) == False - assert await r.hset("a", 0, 10) == 1 + assert await r.hset("a", 0, 10) == 1 # type: ignore[call-overload] assert await r.hset("a", "", 10) == 1 - async def test_hset_with_multi_key_values(self, r: valkey.Valkey): + async def test_hset_with_multi_key_values(self, r: valkey.asyncio.Valkey[bytes]): await r.hset("a", mapping={"1": 1, "2": 2, "3": 3}) assert await r.hget("a", "1") == b"1" assert await r.hget("a", "2") == b"2" @@ -2072,94 +2077,94 @@ async def test_hset_with_multi_key_values(self, r: valkey.Valkey): assert await r.hget("b", "2") == b"2" assert await r.hget("b", "foo") == b"bar" - async def test_hset_without_data(self, r: valkey.Valkey): + async def test_hset_without_data(self, r: valkey.asyncio.Valkey[str]): with pytest.raises(exceptions.DataError): - await r.hset("x") + await r.hset("x") # type: ignore[call-overload] - async def test_hdel(self, r: valkey.Valkey): + async def test_hdel(self, r: valkey.asyncio.Valkey[str]): await r.hset("a", mapping={"1": 1, "2": 2, "3": 3}) assert await r.hdel("a", "2") == 1 assert await r.hget("a", "2") is None assert await r.hdel("a", "1", "3") == 2 assert await r.hlen("a") == 0 - async def test_hexists(self, r: valkey.Valkey): + async def test_hexists(self, r: valkey.asyncio.Valkey[str]): await r.hset("a", mapping={"1": 1, "2": 2, "3": 3}) assert await r.hexists("a", "1") assert not await r.hexists("a", "4") - async def test_hgetall(self, r: valkey.Valkey): + async def test_hgetall(self, r: valkey.asyncio.Valkey[bytes]): h = {b"a1": b"1", b"a2": b"2", b"a3": b"3"} - await r.hset("a", mapping=h) + await r.hset("a", mapping=h) # type: ignore[arg-type] assert await r.hgetall("a") == h - async def test_hincrby(self, r: valkey.Valkey): + async def test_hincrby(self, r: valkey.asyncio.Valkey[str]): assert await r.hincrby("a", "1") == 1 assert await r.hincrby("a", "1", amount=2) == 3 assert await r.hincrby("a", "1", amount=-2) == 1 @skip_if_server_version_lt("2.6.0") - async def test_hincrbyfloat(self, r: valkey.Valkey): + async def test_hincrbyfloat(self, r: valkey.asyncio.Valkey[str]): assert await r.hincrbyfloat("a", "1") == 1.0 assert await r.hincrbyfloat("a", "1") == 2.0 assert await r.hincrbyfloat("a", "1", 1.2) == 3.2 - async def test_hkeys(self, r: valkey.Valkey): + async def test_hkeys(self, r: valkey.asyncio.Valkey[bytes]): h = {b"a1": b"1", b"a2": b"2", b"a3": b"3"} - await r.hset("a", mapping=h) + await r.hset("a", mapping=h) # type: ignore[arg-type] local_keys = list(h.keys()) remote_keys = await r.hkeys("a") assert sorted(local_keys) == sorted(remote_keys) - async def test_hlen(self, r: valkey.Valkey): + async def test_hlen(self, r: valkey.asyncio.Valkey[str]): await r.hset("a", mapping={"1": 1, "2": 2, "3": 3}) assert await r.hlen("a") == 3 - async def test_hmget(self, r: valkey.Valkey): + async def test_hmget(self, r: valkey.asyncio.Valkey[bytes]): assert await r.hset("a", mapping={"a": 1, "b": 2, "c": 3}) assert await r.hmget("a", "a", "b", "c") == [b"1", b"2", b"3"] - async def test_hmset(self, r: valkey.Valkey): + async def test_hmset(self, r: valkey.asyncio.Valkey[bytes]): warning_message = ( r"^Valkey(?:Cluster)*\.hmset\(\) is deprecated\. " r"Use Valkey(?:Cluster)*\.hset\(\) instead\.$" ) h = {b"a": b"1", b"b": b"2", b"c": b"3"} with pytest.warns(DeprecationWarning, match=warning_message): - assert await r.hmset("a", h) + assert await r.hmset("a", h) # type: ignore[arg-type] assert await r.hgetall("a") == h - async def test_hsetnx(self, r: valkey.Valkey): + async def test_hsetnx(self, r: valkey.asyncio.Valkey[bytes]): # Initially set the hash field assert await r.hsetnx("a", "1", 1) assert await r.hget("a", "1") == b"1" assert not await r.hsetnx("a", "1", 2) assert await r.hget("a", "1") == b"1" - async def test_hvals(self, r: valkey.Valkey): + async def test_hvals(self, r: valkey.asyncio.Valkey[bytes]): h = {b"a1": b"1", b"a2": b"2", b"a3": b"3"} - await r.hset("a", mapping=h) + await r.hset("a", mapping=h) # type: ignore[arg-type] local_vals = list(h.values()) remote_vals = await r.hvals("a") assert sorted(local_vals) == sorted(remote_vals) @skip_if_server_version_lt("3.2.0") - async def test_hstrlen(self, r: valkey.Valkey): + async def test_hstrlen(self, r: valkey.asyncio.Valkey[str]): await r.hset("a", mapping={"1": "22", "2": "333"}) assert await r.hstrlen("a", "1") == 2 assert await r.hstrlen("a", "2") == 3 # SORT - async def test_sort_basic(self, r: valkey.Valkey): + async def test_sort_basic(self, r: valkey.asyncio.Valkey[bytes]): await r.rpush("a", "3", "2", "1", "4") assert await r.sort("a") == [b"1", b"2", b"3", b"4"] - async def test_sort_limited(self, r: valkey.Valkey): + async def test_sort_limited(self, r: valkey.asyncio.Valkey[bytes]): await r.rpush("a", "3", "2", "1", "4") assert await r.sort("a", start=1, num=2) == [b"2", b"3"] @pytest.mark.onlynoncluster - async def test_sort_by(self, r: valkey.Valkey): + async def test_sort_by(self, r: valkey.asyncio.Valkey[bytes]): await r.set("score:1", 8) await r.set("score:2", 3) await r.set("score:3", 5) @@ -2167,7 +2172,7 @@ async def test_sort_by(self, r: valkey.Valkey): assert await r.sort("a", by="score:*") == [b"2", b"3", b"1"] @pytest.mark.onlynoncluster - async def test_sort_get(self, r: valkey.Valkey): + async def test_sort_get(self, r: valkey.asyncio.Valkey[bytes]): await r.set("user:1", "u1") await r.set("user:2", "u2") await r.set("user:3", "u3") @@ -2175,7 +2180,7 @@ async def test_sort_get(self, r: valkey.Valkey): assert await r.sort("a", get="user:*") == [b"u1", b"u2", b"u3"] @pytest.mark.onlynoncluster - async def test_sort_get_multi(self, r: valkey.Valkey): + async def test_sort_get_multi(self, r: valkey.asyncio.Valkey[bytes]): await r.set("user:1", "u1") await r.set("user:2", "u2") await r.set("user:3", "u3") @@ -2190,19 +2195,19 @@ async def test_sort_get_multi(self, r: valkey.Valkey): ] @pytest.mark.onlynoncluster - async def test_sort_get_groups_two(self, r: valkey.Valkey): + async def test_sort_get_groups_two(self, r: valkey.asyncio.Valkey[bytes]): await r.set("user:1", "u1") await r.set("user:2", "u2") await r.set("user:3", "u3") await r.rpush("a", "2", "3", "1") - assert await r.sort("a", get=("user:*", "#"), groups=True) == [ + assert await r.sort("a", get=("user:*", "#"), groups=True) == [ # type: ignore[comparison-overlap] (b"u1", b"1"), (b"u2", b"2"), (b"u3", b"3"), ] @pytest.mark.onlynoncluster - async def test_sort_groups_string_get(self, r: valkey.Valkey): + async def test_sort_groups_string_get(self, r: valkey.asyncio.Valkey[str]): await r.set("user:1", "u1") await r.set("user:2", "u2") await r.set("user:3", "u3") @@ -2211,7 +2216,7 @@ async def test_sort_groups_string_get(self, r: valkey.Valkey): await r.sort("a", get="user:*", groups=True) @pytest.mark.onlynoncluster - async def test_sort_groups_just_one_get(self, r: valkey.Valkey): + async def test_sort_groups_just_one_get(self, r: valkey.asyncio.Valkey[str]): await r.set("user:1", "u1") await r.set("user:2", "u2") await r.set("user:3", "u3") @@ -2219,7 +2224,7 @@ async def test_sort_groups_just_one_get(self, r: valkey.Valkey): with pytest.raises(exceptions.DataError): await r.sort("a", get=["user:*"], groups=True) - async def test_sort_groups_no_get(self, r: valkey.Valkey): + async def test_sort_groups_no_get(self, r: valkey.asyncio.Valkey[str]): await r.set("user:1", "u1") await r.set("user:2", "u2") await r.set("user:3", "u3") @@ -2228,7 +2233,7 @@ async def test_sort_groups_no_get(self, r: valkey.Valkey): await r.sort("a", groups=True) @pytest.mark.onlynoncluster - async def test_sort_groups_three_gets(self, r: valkey.Valkey): + async def test_sort_groups_three_gets(self, r: valkey.asyncio.Valkey[bytes]): await r.set("user:1", "u1") await r.set("user:2", "u2") await r.set("user:3", "u3") @@ -2236,28 +2241,28 @@ async def test_sort_groups_three_gets(self, r: valkey.Valkey): await r.set("door:2", "d2") await r.set("door:3", "d3") await r.rpush("a", "2", "3", "1") - assert await r.sort("a", get=("user:*", "door:*", "#"), groups=True) == [ + assert await r.sort("a", get=("user:*", "door:*", "#"), groups=True) == [ # type: ignore[comparison-overlap] (b"u1", b"d1", b"1"), (b"u2", b"d2", b"2"), (b"u3", b"d3", b"3"), ] - async def test_sort_desc(self, r: valkey.Valkey): + async def test_sort_desc(self, r: valkey.asyncio.Valkey[bytes]): await r.rpush("a", "2", "3", "1") assert await r.sort("a", desc=True) == [b"3", b"2", b"1"] - async def test_sort_alpha(self, r: valkey.Valkey): + async def test_sort_alpha(self, r: valkey.asyncio.Valkey[bytes]): await r.rpush("a", "e", "c", "b", "d", "a") assert await r.sort("a", alpha=True) == [b"a", b"b", b"c", b"d", b"e"] @pytest.mark.onlynoncluster - async def test_sort_store(self, r: valkey.Valkey): + async def test_sort_store(self, r: valkey.asyncio.Valkey[bytes]): await r.rpush("a", "2", "3", "1") assert await r.sort("a", store="sorted_values") == 3 assert await r.lrange("sorted_values", 0, -1) == [b"1", b"2", b"3"] @pytest.mark.onlynoncluster - async def test_sort_all_options(self, r: valkey.Valkey): + async def test_sort_all_options(self, r: valkey.asyncio.Valkey[bytes]): await r.set("user:1:username", "zeus") await r.set("user:2:username", "titan") await r.set("user:3:username", "hermes") @@ -2295,7 +2300,7 @@ async def test_sort_all_options(self, r: valkey.Valkey): b"apple juice", ] - async def test_sort_issue_924(self, r: valkey.Valkey): + async def test_sort_issue_924(self, r: valkey.asyncio.Valkey[str]): # Tests for issue https://github.com/andymccurdy/redis-py/issues/924 await r.execute_command("SADD", "issue#924", 1) await r.execute_command("SORT", "issue#924") @@ -2372,12 +2377,12 @@ async def test_cluster_slaves(self, mock_cluster_resp_slaves): @skip_if_server_version_lt("3.0.0") @skip_if_server_version_gte("7.0.0") @pytest.mark.onlynoncluster - async def test_readwrite(self, r: valkey.Valkey): + async def test_readwrite(self, r: valkey.asyncio.Valkey[str]): assert await r.readwrite() @skip_if_server_version_lt("3.0.0") @pytest.mark.onlynoncluster - async def test_readonly_invalid_cluster_state(self, r: valkey.Valkey): + async def test_readonly_invalid_cluster_state(self, r: valkey.asyncio.Valkey[str]): with pytest.raises(exceptions.ValkeyError): await r.readonly() @@ -2388,7 +2393,7 @@ async def test_readonly(self, mock_cluster_resp_ok): # GEO COMMANDS @skip_if_server_version_lt("3.2.0") - async def test_geoadd(self, r: valkey.Valkey): + async def test_geoadd(self, r: valkey.asyncio.Valkey[str]): values = (2.1909389952632, 41.433791470673, "place1") + ( 2.1873744593677, 41.406342043777, @@ -2399,12 +2404,12 @@ async def test_geoadd(self, r: valkey.Valkey): assert await r.zcard("barcelona") == 2 @skip_if_server_version_lt("3.2.0") - async def test_geoadd_invalid_params(self, r: valkey.Valkey): + async def test_geoadd_invalid_params(self, r: valkey.asyncio.Valkey[str]): with pytest.raises(exceptions.ValkeyError): await r.geoadd("barcelona", (1, 2)) @skip_if_server_version_lt("3.2.0") - async def test_geodist(self, r: valkey.Valkey): + async def test_geodist(self, r: valkey.asyncio.Valkey[str]): values = (2.1909389952632, 41.433791470673, "place1") + ( 2.1873744593677, 41.406342043777, @@ -2415,7 +2420,7 @@ async def test_geodist(self, r: valkey.Valkey): assert await r.geodist("barcelona", "place1", "place2") == 3067.4157 @skip_if_server_version_lt("3.2.0") - async def test_geodist_units(self, r: valkey.Valkey): + async def test_geodist_units(self, r: valkey.asyncio.Valkey[str]): values = (2.1909389952632, 41.433791470673, "place1") + ( 2.1873744593677, 41.406342043777, @@ -2426,18 +2431,18 @@ async def test_geodist_units(self, r: valkey.Valkey): assert await r.geodist("barcelona", "place1", "place2", "km") == 3.0674 @skip_if_server_version_lt("3.2.0") - async def test_geodist_missing_one_member(self, r: valkey.Valkey): + async def test_geodist_missing_one_member(self, r: valkey.asyncio.Valkey[str]): values = (2.1909389952632, 41.433791470673, "place1") await r.geoadd("barcelona", values) assert await r.geodist("barcelona", "place1", "missing_member", "km") is None @skip_if_server_version_lt("3.2.0") - async def test_geodist_invalid_units(self, r: valkey.Valkey): + async def test_geodist_invalid_units(self, r: valkey.asyncio.Valkey[str]): with pytest.raises(exceptions.ValkeyError): assert await r.geodist("x", "y", "z", "inches") @skip_if_server_version_lt("3.2.0") - async def test_geohash(self, r: valkey.Valkey): + async def test_geohash(self, r: valkey.asyncio.Valkey[str]): values = (2.1909389952632, 41.433791470673, "place1") + ( 2.1873744593677, 41.406342043777, @@ -2453,7 +2458,7 @@ async def test_geohash(self, r: valkey.Valkey): ) @skip_if_server_version_lt("3.2.0") - async def test_geopos(self, r: valkey.Valkey): + async def test_geopos(self, r: valkey.asyncio.Valkey[str]): values = (2.1909389952632, 41.433791470673, "place1") + ( 2.1873744593677, 41.406342043777, @@ -2476,16 +2481,16 @@ async def test_geopos(self, r: valkey.Valkey): ) @skip_if_server_version_lt("4.0.0") - async def test_geopos_no_value(self, r: valkey.Valkey): + async def test_geopos_no_value(self, r: valkey.asyncio.Valkey[str]): assert await r.geopos("barcelona", "place1", "place2") == [None, None] @skip_if_server_version_lt("3.2.0") @skip_if_server_version_gte("4.0.0") - async def test_old_geopos_no_value(self, r: valkey.Valkey): + async def test_old_geopos_no_value(self, r: valkey.asyncio.Valkey[str]): assert await r.geopos("barcelona", "place1", "place2") == [] @skip_if_server_version_lt("3.2.0") - async def test_georadius(self, r: valkey.Valkey): + async def test_georadius(self, r: valkey.asyncio.Valkey[str]): values = (2.1909389952632, 41.433791470673, "place1") + ( 2.1873744593677, 41.406342043777, @@ -2497,7 +2502,7 @@ async def test_georadius(self, r: valkey.Valkey): assert await r.georadius("barcelona", 2.187, 41.406, 1000) == [b"\x80place2"] @skip_if_server_version_lt("3.2.0") - async def test_georadius_no_values(self, r: valkey.Valkey): + async def test_georadius_no_values(self, r: valkey.asyncio.Valkey[str]): values = (2.1909389952632, 41.433791470673, "place1") + ( 2.1873744593677, 41.406342043777, @@ -2508,7 +2513,7 @@ async def test_georadius_no_values(self, r: valkey.Valkey): assert await r.georadius("barcelona", 1, 2, 1000) == [] @skip_if_server_version_lt("3.2.0") - async def test_georadius_units(self, r: valkey.Valkey): + async def test_georadius_units(self, r: valkey.asyncio.Valkey[str]): values = (2.1909389952632, 41.433791470673, "place1") + ( 2.1873744593677, 41.406342043777, @@ -2544,7 +2549,7 @@ async def test_georadius_units(self, r: valkey.Valkey): ], ) async def test_georadius_with( - self, r: valkey.Valkey, georadius_kwargs, expected_georadius_result + self, r: valkey.asyncio.Valkey[str], georadius_kwargs, expected_georadius_result ): values = (2.1909389952632, 41.433791470673, "place1") + ( 2.1873744593677, @@ -2588,7 +2593,7 @@ async def test_georadius_with( ) @skip_if_server_version_lt("3.2.0") - async def test_georadius_count(self, r: valkey.Valkey): + async def test_georadius_count(self, r: valkey.asyncio.Valkey[str]): values = (2.1909389952632, 41.433791470673, "place1") + ( 2.1873744593677, 41.406342043777, @@ -2601,7 +2606,7 @@ async def test_georadius_count(self, r: valkey.Valkey): ] @skip_if_server_version_lt("3.2.0") - async def test_georadius_sort(self, r: valkey.Valkey): + async def test_georadius_sort(self, r: valkey.asyncio.Valkey[str]): values = (2.1909389952632, 41.433791470673, "place1") + ( 2.1873744593677, 41.406342043777, @@ -2620,7 +2625,7 @@ async def test_georadius_sort(self, r: valkey.Valkey): @skip_if_server_version_lt("3.2.0") @pytest.mark.onlynoncluster - async def test_georadius_store(self, r: valkey.Valkey): + async def test_georadius_store(self, r: valkey.asyncio.Valkey[bytes]): values = (2.1909389952632, 41.433791470673, "place1") + ( 2.1873744593677, 41.406342043777, @@ -2634,7 +2639,7 @@ async def test_georadius_store(self, r: valkey.Valkey): @skip_unless_arch_bits(64) @skip_if_server_version_lt("3.2.0") @pytest.mark.onlynoncluster - async def test_georadius_store_dist(self, r: valkey.Valkey): + async def test_georadius_store_dist(self, r: valkey.asyncio.Valkey[str]): values = (2.1909389952632, 41.433791470673, "place1") + ( 2.1873744593677, 41.406342043777, @@ -2647,11 +2652,11 @@ async def test_georadius_store_dist(self, r: valkey.Valkey): ) # instead of save the geo score, the distance is saved. z_score = await r.zscore("places_barcelona", "place1") - assert math.isclose(z_score, 88.05060698409301) + assert math.isclose(z_score, 88.05060698409301) # type: ignore[arg-type] @skip_unless_arch_bits(64) @skip_if_server_version_lt("3.2.0") - async def test_georadiusmember(self, r: valkey.Valkey): + async def test_georadiusmember(self, r: valkey.asyncio.Valkey[str]): values = (2.1909389952632, 41.433791470673, "place1") + ( 2.1873744593677, 41.406342043777, @@ -2684,7 +2689,7 @@ async def test_georadiusmember(self, r: valkey.Valkey): ] @skip_if_server_version_lt("5.0.0") - async def test_xack(self, r: valkey.Valkey): + async def test_xack(self, r: valkey.asyncio.Valkey[str]): stream = "stream" group = "group" consumer = "consumer" @@ -2705,7 +2710,7 @@ async def test_xack(self, r: valkey.Valkey): assert await r.xack(stream, group, m2, m3) == 2 @skip_if_server_version_lt("5.0.0") - async def test_xadd(self, r: valkey.Valkey): + async def test_xadd(self, r: valkey.asyncio.Valkey[str]): stream = "stream" message_id = await r.xadd(stream, {"foo": "bar"}) assert re.match(rb"[0-9]+\-[0-9]+", message_id) @@ -2719,7 +2724,7 @@ async def test_xadd(self, r: valkey.Valkey): assert await r.xlen(stream) == 2 @skip_if_server_version_lt("5.0.0") - async def test_xclaim(self, r: valkey.Valkey): + async def test_xclaim(self, r: valkey.asyncio.Valkey[str]): stream = "stream" group = "group" consumer1 = "consumer1" @@ -2757,7 +2762,7 @@ async def test_xclaim(self, r: valkey.Valkey): ) == [message_id] @skip_if_server_version_lt("7.0.0") - async def test_xclaim_trimmed(self, r: valkey.Valkey): + async def test_xclaim_trimmed(self, r: valkey.asyncio.Valkey[str]): # xclaim should not raise an exception if the item is not there stream = "stream" group = "group" @@ -2781,7 +2786,7 @@ async def test_xclaim_trimmed(self, r: valkey.Valkey): assert item[0][0] == sid2 @skip_if_server_version_lt("5.0.0") - async def test_xdel(self, r: valkey.Valkey): + async def test_xdel(self, r: valkey.asyncio.Valkey[str]): stream = "stream" # deleting from an empty stream doesn't do anything @@ -2796,7 +2801,7 @@ async def test_xdel(self, r: valkey.Valkey): assert await r.xdel(stream, m2, m3) == 2 @skip_if_server_version_lt("7.0.0") - async def test_xgroup_create(self, r: valkey.Valkey): + async def test_xgroup_create(self, r: valkey.asyncio.Valkey[str]): # tests xgroup_create and xinfo_groups stream = "stream" group = "group" @@ -2819,7 +2824,7 @@ async def test_xgroup_create(self, r: valkey.Valkey): assert await r.xinfo_groups(stream) == expected @skip_if_server_version_lt("7.0.0") - async def test_xgroup_create_mkstream(self, r: valkey.Valkey): + async def test_xgroup_create_mkstream(self, r: valkey.asyncio.Valkey[str]): # tests xgroup_create and xinfo_groups stream = "stream" group = "group" @@ -2845,7 +2850,7 @@ async def test_xgroup_create_mkstream(self, r: valkey.Valkey): assert await r.xinfo_groups(stream) == expected @skip_if_server_version_lt("5.0.0") - async def test_xgroup_delconsumer(self, r: valkey.Valkey): + async def test_xgroup_delconsumer(self, r: valkey.asyncio.Valkey[str]): stream = "stream" group = "group" consumer = "consumer" @@ -2863,7 +2868,7 @@ async def test_xgroup_delconsumer(self, r: valkey.Valkey): assert await r.xgroup_delconsumer(stream, group, consumer) == 2 @skip_if_server_version_lt("5.0.0") - async def test_xgroup_destroy(self, r: valkey.Valkey): + async def test_xgroup_destroy(self, r: valkey.asyncio.Valkey[str]): stream = "stream" group = "group" await r.xadd(stream, {"foo": "bar"}) @@ -2875,7 +2880,7 @@ async def test_xgroup_destroy(self, r: valkey.Valkey): assert await r.xgroup_destroy(stream, group) @skip_if_server_version_lt("7.0.0") - async def test_xgroup_setid(self, r: valkey.Valkey): + async def test_xgroup_setid(self, r: valkey.asyncio.Valkey[str]): stream = "stream" group = "group" message_id = await r.xadd(stream, {"foo": "bar"}) @@ -2896,7 +2901,7 @@ async def test_xgroup_setid(self, r: valkey.Valkey): assert await r.xinfo_groups(stream) == expected @skip_if_server_version_lt("7.2.0") - async def test_xinfo_consumers(self, r: valkey.Valkey): + async def test_xinfo_consumers(self, r: valkey.asyncio.Valkey[str]): stream = "stream" group = "group" consumer1 = "consumer1" @@ -2923,7 +2928,7 @@ async def test_xinfo_consumers(self, r: valkey.Valkey): assert info == expected @skip_if_server_version_lt("5.0.0") - async def test_xinfo_stream(self, r: valkey.Valkey): + async def test_xinfo_stream(self, r: valkey.asyncio.Valkey[str]): stream = "stream" m1 = await r.xadd(stream, {"foo": "bar"}) m2 = await r.xadd(stream, {"foo": "bar"}) @@ -2940,7 +2945,7 @@ async def test_xinfo_stream(self, r: valkey.Valkey): assert info["last-entry"] is None @skip_if_server_version_lt("6.0.0") - async def test_xinfo_stream_full(self, r: valkey.Valkey): + async def test_xinfo_stream_full(self, r: valkey.asyncio.Valkey[str]): stream = "stream" group = "group" @@ -2959,7 +2964,7 @@ async def test_xinfo_stream_full(self, r: valkey.Valkey): assert isinstance(consumer, dict) @skip_if_server_version_lt("5.0.0") - async def test_xlen(self, r: valkey.Valkey): + async def test_xlen(self, r: valkey.asyncio.Valkey[str]): stream = "stream" assert await r.xlen(stream) == 0 await r.xadd(stream, {"foo": "bar"}) @@ -2967,7 +2972,7 @@ async def test_xlen(self, r: valkey.Valkey): assert await r.xlen(stream) == 2 @skip_if_server_version_lt("5.0.0") - async def test_xpending(self, r: valkey.Valkey): + async def test_xpending(self, r: valkey.asyncio.Valkey[str]): stream = "stream" group = "group" consumer1 = "consumer1" @@ -2977,7 +2982,7 @@ async def test_xpending(self, r: valkey.Valkey): await r.xgroup_create(stream, group, 0) # xpending on a group that has no consumers yet - expected = {"pending": 0, "min": None, "max": None, "consumers": []} + expected: dict[str, int | None | list[Any]] = {"pending": 0, "min": None, "max": None, "consumers": []} assert await r.xpending(stream, group) == expected # read 1 message from the group with each consumer @@ -2996,7 +3001,7 @@ async def test_xpending(self, r: valkey.Valkey): assert await r.xpending(stream, group) == expected @skip_if_server_version_lt("5.0.0") - async def test_xpending_range(self, r: valkey.Valkey): + async def test_xpending_range(self, r: valkey.asyncio.Valkey[str]): stream = "stream" group = "group" consumer1 = "consumer1" @@ -3020,7 +3025,7 @@ async def test_xpending_range(self, r: valkey.Valkey): assert response[1]["consumer"] == consumer2.encode() @skip_if_server_version_lt("5.0.0") - async def test_xrange(self, r: valkey.Valkey): + async def test_xrange(self, r: valkey.asyncio.Valkey[str]): stream = "stream" m1 = await r.xadd(stream, {"foo": "bar"}) m2 = await r.xadd(stream, {"foo": "bar"}) @@ -3043,7 +3048,7 @@ def get_ids(results): assert get_ids(results) == [m1] @skip_if_server_version_lt("5.0.0") - async def test_xread(self, r: valkey.Valkey): + async def test_xread(self, r: valkey.asyncio.Valkey[str]): stream = "stream" m1 = await r.xadd(stream, {"foo": "bar"}) m2 = await r.xadd(stream, {"bing": "baz"}) @@ -3074,7 +3079,7 @@ async def test_xread(self, r: valkey.Valkey): ) @skip_if_server_version_lt("5.0.0") - async def test_xreadgroup(self, r: valkey.Valkey): + async def test_xreadgroup(self, r: valkey.asyncio.Valkey[str]): stream = "stream" group = "group" consumer = "consumer" @@ -3141,7 +3146,7 @@ async def test_xreadgroup(self, r: valkey.Valkey): ) @skip_if_server_version_lt("5.0.0") - async def test_xrevrange(self, r: valkey.Valkey): + async def test_xrevrange(self, r: valkey.asyncio.Valkey[str]): stream = "stream" m1 = await r.xadd(stream, {"foo": "bar"}) m2 = await r.xadd(stream, {"foo": "bar"}) @@ -3164,7 +3169,7 @@ def get_ids(results): assert get_ids(results) == [m4] @skip_if_server_version_lt("5.0.0") - async def test_xtrim(self, r: valkey.Valkey): + async def test_xtrim(self, r: valkey.asyncio.Valkey[str]): stream = "stream" # trimming an empty key doesn't do anything @@ -3183,7 +3188,7 @@ async def test_xtrim(self, r: valkey.Valkey): assert await r.xtrim(stream, 3, approximate=False) == 1 @pytest.mark.onlynoncluster - async def test_bitfield_operations(self, r: valkey.Valkey): + async def test_bitfield_operations(self, r: valkey.asyncio.Valkey[str]): # comments show affected bits await r.execute_command("SELECT", 10) bf = r.bitfield("a") @@ -3253,7 +3258,7 @@ async def test_bitfield_operations(self, r: valkey.Valkey): assert resp == [0, None, 255] @skip_if_server_version_lt("6.0.0") - async def test_bitfield_ro(self, r: valkey.Valkey): + async def test_bitfield_ro(self, r: valkey.asyncio.Valkey[str]): bf = r.bitfield("a") resp = await bf.set("u8", 8, 255).execute() assert resp == [0] @@ -3266,7 +3271,7 @@ async def test_bitfield_ro(self, r: valkey.Valkey): assert resp == [0, 15, 15, 14] @skip_if_server_version_lt("4.0.0") - async def test_memory_stats(self, r: valkey.Valkey): + async def test_memory_stats(self, r: valkey.asyncio.Valkey[str]): # put a key into the current db to make sure that "db." # has data await r.set("foo", "bar") @@ -3277,18 +3282,18 @@ async def test_memory_stats(self, r: valkey.Valkey): assert not isinstance(value, list) @skip_if_server_version_lt("4.0.0") - async def test_memory_usage(self, r: valkey.Valkey): + async def test_memory_usage(self, r: valkey.asyncio.Valkey[str]): await r.set("foo", "bar") assert isinstance(await r.memory_usage("foo"), int) @skip_if_server_version_lt("4.0.0") - async def test_module_list(self, r: valkey.Valkey): + async def test_module_list(self, r: valkey.asyncio.Valkey[str]): assert isinstance(await r.module_list(), list) for x in await r.module_list(): assert isinstance(x, dict) @pytest.mark.onlynoncluster - async def test_interrupted_command(self, r: valkey.Valkey): + async def test_interrupted_command(self, r: valkey.asyncio.Valkey[str]): """ Regression test for issue #1128: An Un-handled BaseException will leave the socket with un-read response to a previous @@ -3305,7 +3310,7 @@ async def helper(): # because the timeout won't catch its Cancelled Error if the task # has a pending cancel. Python documentation probably should reflect this. if sys.version_info >= (3, 11): - asyncio.current_task().uncancel() + asyncio.current_task().uncancel() # type: ignore[union-attr] # if all is well, we can continue. The following should not hang. await r.set("status", "down") @@ -3321,7 +3326,7 @@ async def helper(): @pytest.mark.onlynoncluster class TestBinarySave: - async def test_binary_get_set(self, r: valkey.Valkey): + async def test_binary_get_set(self, r: valkey.asyncio.Valkey[bytes]): assert await r.set(" foo bar ", "123") assert await r.get(" foo bar ") == b"123" @@ -3341,7 +3346,7 @@ async def test_binary_get_set(self, r: valkey.Valkey): assert await r.delete(" foo\r\nbar\r\n ") assert await r.delete(" \r\n\t\x07\x13 ") - async def test_binary_lists(self, r: valkey.Valkey): + async def test_binary_lists(self, r: valkey.asyncio.Valkey[bytes]): mapping = { b"foo bar": [b"1", b"2", b"3"], b"foo\r\nbar\r\n": [b"4", b"5", b"6"], @@ -3358,7 +3363,7 @@ async def test_binary_lists(self, r: valkey.Valkey): for key, value in mapping.items(): assert await r.lrange(key, 0, -1) == value - async def test_22_info(self, r: valkey.Valkey): + async def test_22_info(self, r: valkey.asyncio.Valkey[str]): info = ( "allocation_stats:6=1,7=1,8=7141,9=180,10=92,11=116,12=5330," "13=123,14=3091,15=11048,16=225842,17=1784,18=814,19=12020," @@ -3390,14 +3395,14 @@ async def test_22_info(self, r: valkey.Valkey): assert "6" in parsed["allocation_stats"] assert ">=256" in parsed["allocation_stats"] - async def test_large_responses(self, r: valkey.Valkey): + async def test_large_responses(self, r: valkey.asyncio.Valkey[bytes]): """The PythonParser has some special cases for return values > 1MB""" # load up 5MB of data into a key data = "".join([ascii_letters] * (5000000 // len(ascii_letters))) await r.set("a", data) assert await r.get("a") == data.encode() - async def test_floating_point_encoding(self, r: valkey.Valkey): + async def test_floating_point_encoding(self, r: valkey.asyncio.Valkey[str]): """ High precision floating point values sent to the server should keep precision. diff --git a/tests/test_commands.py b/tests/test_commands.py index 49bc6b4e..8b18b7e6 100644 --- a/tests/test_commands.py +++ b/tests/test_commands.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import binascii import datetime import math @@ -6,6 +8,7 @@ import time from asyncio import CancelledError from string import ascii_letters +from typing import Any from unittest import mock from unittest.mock import patch @@ -544,7 +547,7 @@ def test_client_setname(self, r): assert_resp_response(r, r.client_getname(), "valkey_py_test", b"valkey_py_test") @skip_if_server_version_lt("7.2.0") - def test_client_setinfo(self, r: valkey.Valkey): + def test_client_setinfo(self, r: valkey.Valkey[str]): r.ping() info = r.client_info() assert info["lib-name"] == "valkey-py" @@ -771,7 +774,7 @@ def test_config_get(self, r): # assert data['maxmemory'].isdigit() @skip_if_server_version_lt("7.0.0") - def test_config_get_multi_params(self, r: valkey.Valkey): + def test_config_get_multi_params(self, r: valkey.Valkey[str]): res = r.config_get("*max-*-entries*", "maxmemory") assert "maxmemory" in res assert "hash-max-listpack-entries" in res @@ -792,7 +795,7 @@ def test_config_set(self, r): assert r.config_get()["timeout"] == "0" @skip_if_server_version_lt("7.0.0") - def test_config_set_multi_params(self, r: valkey.Valkey): + def test_config_set_multi_params(self, r: valkey.Valkey[str]): r.config_set("timeout", 70, "maxmemory", 100) assert r.config_get()["timeout"] == "70" assert r.config_get()["maxmemory"] == "100" @@ -955,13 +958,13 @@ def test_bgsave(self, r): time.sleep(0.3) assert r.bgsave(True) - def test_never_decode_option(self, r: valkey.Valkey): - opts = {NEVER_DECODE: []} + def test_never_decode_option(self, r: valkey.Valkey[str]): + opts: dict[str, list[Any]] = {NEVER_DECODE: []} r.delete("a") assert r.execute_command("EXISTS", "a", **opts) == 0 - def test_empty_response_option(self, r: valkey.Valkey): - opts = {EMPTY_RESPONSE: []} + def test_empty_response_option(self, r: valkey.Valkey[str]): + opts: dict[str, list[Any]] = {EMPTY_RESPONSE: []} r.delete("a") assert r.execute_command("EXISTS", "a", **opts) == 0 @@ -2834,7 +2837,7 @@ def test_zrank(self, r): assert r.zrank("a", "a6") is None @skip_if_server_version_lt("7.2.0") - def test_zrank_withscore(self, r: valkey.Valkey): + def test_zrank_withscore(self, r: valkey.Valkey[str]): r.zadd("a", {"a1": 1, "a2": 2, "a3": 3, "a4": 4, "a5": 5}) assert r.zrank("a", "a1") == 0 assert r.zrank("a", "a2") == 1 @@ -3447,7 +3450,7 @@ def test_geoadd(self, r): @skip_if_server_version_lt("6.2.0") def test_geoadd_nx(self, r): - values = (2.1909389952632, 41.433791470673, "place1") + ( + values: Any = (2.1909389952632, 41.433791470673, "place1") + ( 2.1873744593677, 41.406342043777, "place2", @@ -3463,7 +3466,7 @@ def test_geoadd_nx(self, r): @skip_if_server_version_lt("6.2.0") def test_geoadd_xx(self, r): - values = (2.1909389952632, 41.433791470673, "place1") + values: Any = (2.1909389952632, 41.433791470673, "place1") assert r.geoadd("a", values) == 1 values = (2.1909389952632, 41.433791470673, "place1") + ( 2.1873744593677, @@ -3475,7 +3478,7 @@ def test_geoadd_xx(self, r): @skip_if_server_version_lt("6.2.0") def test_geoadd_ch(self, r): - values = (2.1909389952632, 41.433791470673, "place1") + values: Any = (2.1909389952632, 41.433791470673, "place1") assert r.geoadd("a", values) == 1 values = (2.1909389952632, 31.433791470673, "place1") + ( 2.1873744593677, @@ -4096,7 +4099,7 @@ def test_xadd_minlen_and_limit(self, r): assert r.xadd(stream, {"foo": "bar"}, approximate=True, minid=m3) @skip_if_server_version_lt("7.0.0") - def test_xadd_explicit_ms(self, r: valkey.Valkey): + def test_xadd_explicit_ms(self, r: valkey.Valkey[str]): stream = "stream" message_id = r.xadd(stream, {"foo": "bar"}, "9999999999999999999-*") ms = message_id[: message_id.index(b"-")] @@ -4273,7 +4276,7 @@ def test_xgroup_create_mkstream(self, r): assert r.xinfo_groups(stream) == expected @skip_if_server_version_lt("7.0.0") - def test_xgroup_create_entriesread(self, r: valkey.Valkey): + def test_xgroup_create_entriesread(self, r: valkey.Valkey[str]): stream = "stream" group = "group" r.xadd(stream, {"foo": "bar"}) @@ -4452,7 +4455,7 @@ def test_xpending(self, r): r.xgroup_create(stream, group, 0) # xpending on a group that has no consumers yet - expected = {"pending": 0, "min": None, "max": None, "consumers": []} + expected: dict[str, Any] = {"pending": 0, "min": None, "max": None, "consumers": []} assert r.xpending(stream, group) == expected # read 1 message from the group with each consumer @@ -4831,7 +4834,7 @@ def test_bitfield_operations(self, r): assert resp == [0, None, 255] @skip_if_server_version_lt("6.0.0") - def test_bitfield_ro(self, r: valkey.Valkey): + def test_bitfield_ro(self, r: valkey.Valkey[str]): bf = r.bitfield("a") resp = bf.set("u8", 8, 255).execute() assert resp == [0] @@ -4875,25 +4878,25 @@ def test_memory_usage(self, r): assert isinstance(r.memory_usage("foo"), int) @skip_if_server_version_lt("7.0.0") - def test_latency_histogram_not_implemented(self, r: valkey.Valkey): + def test_latency_histogram_not_implemented(self, r: valkey.Valkey[str]): with pytest.raises(NotImplementedError): r.latency_histogram() - def test_latency_graph_not_implemented(self, r: valkey.Valkey): + def test_latency_graph_not_implemented(self, r: valkey.Valkey[str]): with pytest.raises(NotImplementedError): r.latency_graph() - def test_latency_doctor_not_implemented(self, r: valkey.Valkey): + def test_latency_doctor_not_implemented(self, r: valkey.Valkey[str]): with pytest.raises(NotImplementedError): r.latency_doctor() - def test_latency_history(self, r: valkey.Valkey): + def test_latency_history(self, r: valkey.Valkey[str]): assert r.latency_history("command") == [] - def test_latency_latest(self, r: valkey.Valkey): + def test_latency_latest(self, r: valkey.Valkey[str]): assert r.latency_latest() == [] - def test_latency_reset(self, r: valkey.Valkey): + def test_latency_reset(self, r: valkey.Valkey[str]): assert r.latency_reset() == 0 @skip_if_server_version_lt("4.0.0") @@ -4914,7 +4917,7 @@ def test_command_docs(self, r): r.command_docs("set") @skip_if_server_version_lt("7.0.0") - def test_command_list(self, r: valkey.Valkey): + def test_command_list(self, r: valkey.Valkey[str]): assert len(r.command_list()) > 300 assert len(r.command_list(module="fakemod")) == 0 assert len(r.command_list(category="list")) > 15 @@ -4953,7 +4956,7 @@ def test_command(self, r): @pytest.mark.onlynoncluster @skip_if_server_version_lt("7.0.0") - def test_command_getkeysandflags(self, r: valkey.Valkey): + def test_command_getkeysandflags(self, r: valkey.Valkey[str]): res = r.command_getkeysandflags("LMOVE", "mylist1", "mylist2", "left", "left") assert res == [ [b"mylist1", [b"RW", b"access", b"delete"]], @@ -4973,7 +4976,7 @@ def test_module(self, r): @pytest.mark.onlynoncluster @skip_if_server_version_lt("7.0.0") - def test_module_loadex(self, r: valkey.Valkey): + def test_module_loadex(self, r: valkey.Valkey[str]): with pytest.raises(valkey.exceptions.ModuleError) as excinfo: r.module_loadex("/some/fake/path") assert "Error loading the extension." in str(excinfo.value) @@ -5032,14 +5035,14 @@ def test_replicaof(self, r): assert r.replicaof("NO ONE") assert r.replicaof("NO", "ONE") - def test_shutdown(self, r: valkey.Valkey): - r.execute_command = mock.MagicMock() + def test_shutdown(self, r: valkey.Valkey[str]): + r.execute_command = mock.MagicMock() # type: ignore[method-assign] r.execute_command("SHUTDOWN", "NOSAVE") r.execute_command.assert_called_once_with("SHUTDOWN", "NOSAVE") @skip_if_server_version_lt("7.0.0") - def test_shutdown_with_params(self, r: valkey.Valkey): - r.execute_command = mock.MagicMock() + def test_shutdown_with_params(self, r: valkey.Valkey[str]): + r.execute_command = mock.MagicMock() # type: ignore[method-assign] r.execute_command("SHUTDOWN", "SAVE", "NOW", "FORCE") r.execute_command.assert_called_once_with("SHUTDOWN", "SAVE", "NOW", "FORCE") r.execute_command("SHUTDOWN", "ABORT") @@ -5063,7 +5066,7 @@ def test_psync(self, r): assert b"FULLRESYNC" in res @pytest.mark.onlynoncluster - def test_interrupted_command(self, r: valkey.Valkey): + def test_interrupted_command(self, r: valkey.Valkey[str]): """ Regression test for issue #1128: An Un-handled BaseException will leave the socket with un-read response to a previous diff --git a/valkey/__init__.py b/valkey/__init__.py index 3cb2fbc1..1feaac77 100644 --- a/valkey/__init__.py +++ b/valkey/__init__.py @@ -46,7 +46,7 @@ def int_or_str(value): __version__: str -VERSION: Tuple[Union[int | str], ...] +VERSION: Tuple[Union[int, str], ...] try: __version__ = metadata.version("valkey") diff --git a/valkey/asyncio/client.pyi b/valkey/asyncio/client.pyi index a33d7cec..99d40f4e 100644 --- a/valkey/asyncio/client.pyi +++ b/valkey/asyncio/client.pyi @@ -62,6 +62,8 @@ class Valkey(AbstractValkey, ValkeyModuleCommands, AsyncCoreCommands[_StrType], single_connection_client: bool = False, health_check_interval: int = 0, client_name: str | None = None, + lib_name: str | None = None, + lib_version: str | None = None, username: str | None = None, retry: Retry | None = None, auto_close_connection_pool: bool = True, @@ -100,6 +102,8 @@ class Valkey(AbstractValkey, ValkeyModuleCommands, AsyncCoreCommands[_StrType], single_connection_client: bool = False, health_check_interval: int = 0, client_name: str | None = None, + lib_name: str | None = None, + lib_version: str | None = None, username: str | None = None, retry: Retry | None = None, auto_close_connection_pool: bool = True, @@ -136,6 +140,8 @@ class Valkey(AbstractValkey, ValkeyModuleCommands, AsyncCoreCommands[_StrType], single_connection_client: bool = False, health_check_interval: int = 0, client_name: str | None = None, + lib_name: str | None = None, + lib_version: str | None = None, username: str | None = None, retry: Retry | None = None, auto_close_connection_pool: bool = True, @@ -172,6 +178,8 @@ class Valkey(AbstractValkey, ValkeyModuleCommands, AsyncCoreCommands[_StrType], single_connection_client: bool = False, health_check_interval: int = 0, client_name: str | None = None, + lib_name: str | None = None, + lib_version: str | None = None, username: str | None = None, retry: Retry | None = None, auto_close_connection_pool: bool = True, @@ -209,6 +217,7 @@ class Valkey(AbstractValkey, ValkeyModuleCommands, AsyncCoreCommands[_StrType], self, exc_type: type[BaseException] | None, exc_value: BaseException | None, traceback: TracebackType | None ) -> None: ... def __del__(self, _warnings: Any = ...) -> None: ... + async def aclose(self, close_connection_pool: bool | None = None) -> None: ... async def close(self, close_connection_pool: bool | None = None) -> None: ... async def execute_command(self, *args, **options): ... async def parse_response(self, connection: Connection, command_name: str | bytes, **options): ... @@ -586,7 +595,7 @@ class Pipeline(Valkey[_StrType]): @overload def brpop(self, keys: _Value | Iterable[_Value], timeout: float) -> Any: ... def brpoplpush(self, src, dst, timeout: int | None = 0) -> Any: ... - def lindex(self, name: _Key, index: int) -> Any: ... + def lindex(self, name: _Key, index: int | str) -> Any: ... def linsert( self, name: _Key, where: Literal["BEFORE", "AFTER", "before", "after"], refvalue: _Value, value: _Value ) -> Any: ... @@ -656,7 +665,7 @@ class Pipeline(Valkey[_StrType]): **kwargs: _CommandOptions, ) -> Any: ... def sscan(self, name: _Key, cursor: int = 0, match: _Key | None = None, count: int | None = None) -> Any: ... - def hscan(self, name: _Key, cursor: int = 0, match: _Key | None = None, count: int | None = None) -> Any: ... + def hscan(self, name: _Key, cursor: int = 0, match: _Key | None = None, count: int | None = None, no_values: bool | None = None) -> Any: ... @overload def zscan(self, name: _Key, cursor: int = 0, match: _Key | None = None, count: int | None = None) -> Any: ... @overload @@ -734,7 +743,7 @@ class Pipeline(Valkey[_StrType]): justid=False, ) -> Any: ... def xdel(self, name, *ids) -> Any: ... - def xgroup_create(self, name, groupname, id: str = "$", mkstream: bool = False, entries_read: int | None = None) -> Any: ... + def xgroup_create(self, name, groupname, id: str | int = "$", mkstream: bool = False, entries_read: int | None = None) -> Any: ... def xgroup_delconsumer(self, name, groupname, consumername) -> Any: ... def xgroup_destroy(self, name, groupname) -> Any: ... def xgroup_createconsumer(self, name, groupname, consumername) -> Any: ... diff --git a/valkey/client.pyi b/valkey/client.pyi index b9ad6a83..d55b234b 100644 --- a/valkey/client.pyi +++ b/valkey/client.pyi @@ -150,6 +150,7 @@ class Valkey(AbstractValkey, ValkeyModuleCommands, CoreCommands[_StrType], Senti retry: Retry | None = ..., ) -> Valkey[bytes]: ... connection_pool: Any + connection: Any response_callbacks: Any @overload def __init__( @@ -188,6 +189,8 @@ class Valkey(AbstractValkey, ValkeyModuleCommands, CoreCommands[_StrType], Senti single_connection_client: bool = False, health_check_interval: float = 0, client_name: str | None = None, + lib_name: str | None = None, + lib_version: str | None = None, username: str | None = None, retry: Retry | None = None, valkey_connect_func: _ConnectFunc | None = None, @@ -230,6 +233,8 @@ class Valkey(AbstractValkey, ValkeyModuleCommands, CoreCommands[_StrType], Senti single_connection_client: bool = False, health_check_interval: float = 0, client_name: str | None = None, + lib_name: str | None = None, + lib_version: str | None = None, username: str | None = None, retry: Retry | None = None, valkey_connect_func: _ConnectFunc | None = None, @@ -271,6 +276,8 @@ class Valkey(AbstractValkey, ValkeyModuleCommands, CoreCommands[_StrType], Senti single_connection_client: bool = False, health_check_interval: float = 0, client_name: str | None = None, + lib_name: str | None = None, + lib_version: str | None = None, username: str | None = None, retry: Retry | None = None, valkey_connect_func: _ConnectFunc | None = None, @@ -571,7 +578,7 @@ class Pipeline(Valkey[_StrType]): def blpop(self, keys: _Value | Iterable[_Value], timeout: float = 0) -> Pipeline[_StrType]: ... # type: ignore[override] def brpop(self, keys: _Value | Iterable[_Value], timeout: float = 0) -> Pipeline[_StrType]: ... # type: ignore[override] def brpoplpush(self, src, dst, timeout=0) -> Pipeline[_StrType]: ... - def lindex(self, name: _Key, index: int) -> Pipeline[_StrType]: ... # type: ignore[override] + def lindex(self, name: _Key, index: int | str) -> Pipeline[_StrType]: ... # type: ignore[override] def linsert( # type: ignore[override] self, name: _Key, where: Literal["BEFORE", "AFTER", "before", "after"], refvalue: _Value, value: _Value ) -> Pipeline[_StrType]: ... @@ -612,8 +619,8 @@ class Pipeline(Valkey[_StrType]): def scan_iter(self, match: _Key | None = None, count: int | None = None, _type: str | None = None) -> Iterator[Any]: ... # type: ignore[override] def sscan(self, name: _Key, cursor: int = 0, match: _Key | None = None, count: int | None = None) -> Pipeline[_StrType]: ... # type: ignore[override] def sscan_iter(self, name: _Key, match: _Key | None = None, count: int | None = None) -> Iterator[Any]: ... - def hscan(self, name: _Key, cursor: int = 0, match: _Key | None = None, count: int | None = None) -> Pipeline[_StrType]: ... # type: ignore[override] - def hscan_iter(self, name, match: _Key | None = None, count: int | None = None) -> Iterator[Any]: ... + def hscan(self, name: _Key, cursor: int = 0, match: _Key | None = None, count: int | None = None, no_values: bool | None = None) -> Pipeline[_StrType]: ... # type: ignore[override] + def hscan_iter(self, name, match: _Key | None = None, count: int | None = None, no_values: bool | None = None) -> Iterator[Any]: ... def zscan_iter( self, name: _Key, match: _Key | None = None, count: int | None = None, score_cast_func: Callable[[_StrType], Any] = ... ) -> Iterator[Any]: ... diff --git a/valkey/commands/core.pyi b/valkey/commands/core.pyi index 770bf4ce..532612e0 100644 --- a/valkey/commands/core.pyi +++ b/valkey/commands/core.pyi @@ -129,6 +129,7 @@ class ManagementCommands: ): ... def client_trackinginfo(self, **kwargs: _CommandOptions): ... def client_setname(self, name: str, **kwargs: _CommandOptions) -> bool: ... + def client_setinfo(self, attr: str, value: str, **kwargs: _CommandOptions) -> bool: ... def client_unblock(self, client_id, error: bool = False, **kwargs: _CommandOptions): ... def client_pause(self, timeout, all: bool = True, **kwargs: _CommandOptions): ... def client_unpause(self, **kwargs: _CommandOptions): ... @@ -153,6 +154,8 @@ class ManagementCommands: def select(self, index, **kwargs: _CommandOptions): ... def info(self, section: _Key | None = None, *args: _Key, **kwargs: _CommandOptions) -> Mapping[str, Any]: ... def lastsave(self, **kwargs: _CommandOptions): ... + def latency_doctor(self): ... + def latency_graph(self): ... def lolwut(self, *version_numbers: _Value, **kwargs: _CommandOptions) -> bytes: ... def reset(self) -> None: ... def migrate( @@ -174,6 +177,10 @@ class ManagementCommands: def memory_malloc_stats(self, **kwargs: _CommandOptions): ... def memory_usage(self, key, samples: Incomplete | None = None, **kwargs: _CommandOptions): ... def memory_purge(self, **kwargs: _CommandOptions): ... + def latency_histogram(self, *args): ... + def latency_history(self, event: str): ... + def latency_latest(self): ... + def latency_reset(self, *events: str) -> bool: ... def ping(self, **kwargs: _CommandOptions) -> bool: ... def quit(self, **kwargs: _CommandOptions): ... def replicaof(self, *args, **kwargs: _CommandOptions): ... @@ -248,7 +255,10 @@ class AsyncManagementCommands: ): ... async def client_trackinginfo(self, **kwargs: _CommandOptions): ... async def client_setname(self, name: str, **kwargs: _CommandOptions) -> bool: ... + async def client_setinfo(self, attr: str, value: str, **kwargs: _CommandOptions) -> bool: ... async def client_unblock(self, client_id, error: bool = False, **kwargs: _CommandOptions): ... + async def client_no_evict(self, mode: str): ... + async def client_no_touch(self, mode: str): ... async def client_pause(self, timeout, all: bool = True, **kwargs: _CommandOptions): ... async def client_unpause(self, **kwargs: _CommandOptions): ... async def command(self, **kwargs: _CommandOptions): ... @@ -270,6 +280,8 @@ class AsyncManagementCommands: async def select(self, index, **kwargs: _CommandOptions): ... async def info(self, section: _Key | None = None, *args: _Key, **kwargs: _CommandOptions) -> Mapping[str, Any]: ... async def lastsave(self, **kwargs: _CommandOptions): ... + async def latency_doctor(self): ... + async def latency_graph(self): ... async def lolwut(self, *version_numbers: _Value, **kwargs: _CommandOptions) -> bytes: ... async def reset(self) -> None: ... async def migrate( @@ -291,6 +303,10 @@ class AsyncManagementCommands: async def memory_malloc_stats(self, **kwargs: _CommandOptions): ... async def memory_usage(self, key, samples: Incomplete | None = None, **kwargs: _CommandOptions): ... async def memory_purge(self, **kwargs: _CommandOptions): ... + async def latency_histogram(self, *args): ... + async def latency_history(self, event: str): ... + async def latency_latest(self): ... + async def latency_reset(self, *events: str) -> bool: ... async def ping(self, **kwargs: _CommandOptions) -> bool: ... async def quit(self, **kwargs: _CommandOptions): ... async def replicaof(self, *args, **kwargs: _CommandOptions): ... @@ -315,6 +331,7 @@ class BasicKeyCommands(Generic[_StrType]): def append(self, key, value): ... def bitcount(self, key: _Key, start: int | None = None, end: int | None = None, mode: str | None = None) -> int: ... def bitfield(self, key, default_overflow: Incomplete | None = None): ... + def bitfield_ro(self, key, encoding: str, offset: int, items: list[tuple[str, int]] | None = None): ... def bitop(self, operation, dest, *keys): ... def bitpos(self, key: _Key, bit: int, start: int | None = None, end: int | None = None, mode: str | None = None): ... def copy(self, source, destination, destination_db: Incomplete | None = None, replace: bool = False): ... @@ -432,7 +449,8 @@ class BasicKeyCommands(Generic[_StrType]): class AsyncBasicKeyCommands(Generic[_StrType]): async def append(self, key, value): ... async def bitcount(self, key: _Key, start: int | None = None, end: int | None = None, mode: str | None = None) -> int: ... - async def bitfield(self, key, default_overflow: Incomplete | None = None): ... + def bitfield(self, key, default_overflow: Incomplete | None = None): ... + async def bitfield_ro(self, key, encoding: str, offset: int, items: list[tuple[str, int]] | None = None): ... async def bitop(self, operation, dest, *keys): ... async def bitpos(self, key: _Key, bit: int, start: int | None = None, end: int | None = None, mode: str | None = None): ... async def copy(self, source, destination, destination_db: Incomplete | None = None, replace: bool = False): ... @@ -557,7 +575,7 @@ class ListCommands(Generic[_StrType]): @overload def brpop(self, keys: _Value | Iterable[_Value], timeout: float) -> tuple[_StrType, _StrType] | None: ... def brpoplpush(self, src, dst, timeout: int | None = 0): ... - def lindex(self, name: _Key, index: int) -> _StrType | None: ... + def lindex(self, name: _Key, index: int | str) -> _StrType | None: ... def linsert( self, name: _Key, where: Literal["BEFORE", "AFTER", "before", "after"], refvalue: _Value, value: _Value ) -> int: ... @@ -627,7 +645,7 @@ class AsyncListCommands(Generic[_StrType]): @overload async def brpop(self, keys: _Value | Iterable[_Value], timeout: float) -> tuple[_StrType, _StrType] | None: ... async def brpoplpush(self, src, dst, timeout: int | None = 0): ... - async def lindex(self, name: _Key, index: int) -> _StrType | None: ... + async def lindex(self, name: _Key, index: int | str) -> _StrType | None: ... async def linsert( self, name: _Key, where: Literal["BEFORE", "AFTER", "before", "after"], refvalue: _Value, value: _Value ) -> int: ... @@ -704,10 +722,12 @@ class ScanCommands(Generic[_StrType]): ) -> tuple[int, list[_StrType]]: ... def sscan_iter(self, name: _Key, match: _Key | None = None, count: int | None = None) -> Iterator[_StrType]: ... def hscan( - self, name: _Key, cursor: int = 0, match: _Key | None = None, count: int | None = None + self, name: _Key, cursor: int = 0, match: _Key | None = None, count: int | None = None, + no_values: bool | None = None, ) -> tuple[int, dict[_StrType, _StrType]]: ... def hscan_iter( - self, name: _Key, match: _Key | None = None, count: int | None = None + self, name: _Key, match: _Key | None = None, count: int | None = None, + no_values: bool | None = None, ) -> Iterator[tuple[_StrType, _StrType]]: ... @overload def zscan( @@ -765,10 +785,12 @@ class AsyncScanCommands(Generic[_StrType]): ) -> tuple[int, list[_StrType]]: ... def sscan_iter(self, name: _Key, match: _Key | None = None, count: int | None = None) -> AsyncIterator[_StrType]: ... async def hscan( - self, name: _Key, cursor: int = 0, match: _Key | None = None, count: int | None = None + self, name: _Key, cursor: int = 0, match: _Key | None = None, count: int | None = None, + no_values: bool | None = None, ) -> tuple[int, dict[_StrType, _StrType]]: ... def hscan_iter( - self, name: _Key, match: _Key | None = None, count: int | None = None + self, name: _Key, match: _Key | None = None, count: int | None = None, + no_values: bool | None = None, ) -> AsyncIterator[tuple[_StrType, _StrType]]: ... @overload async def zscan( @@ -895,7 +917,7 @@ class StreamCommands: justid=False, ): ... def xdel(self, name: KeyT, *ids: str | int | bytes | memoryview): ... - def xgroup_create(self, name, groupname, id: str = "$", mkstream: bool = False, entries_read: int | None = None): ... + def xgroup_create(self, name, groupname, id: str | int = "$", mkstream: bool = False, entries_read: int | None = None): ... def xgroup_delconsumer(self, name, groupname, consumername): ... def xgroup_destroy(self, name, groupname): ... def xgroup_createconsumer(self, name, groupname, consumername): ... @@ -962,7 +984,7 @@ class AsyncStreamCommands: justid=False, ): ... async def xdel(self, name: KeyT, *ids: str | int | bytes | memoryview): ... - async def xgroup_create(self, name, groupname, id: str = "$", mkstream: bool = False, entries_read: int | None = None): ... + async def xgroup_create(self, name, groupname, id: str | int = "$", mkstream: bool = False, entries_read: int | None = None): ... async def xgroup_delconsumer(self, name, groupname, consumername): ... async def xgroup_destroy(self, name, groupname): ... async def xgroup_createconsumer(self, name, groupname, consumername): ... @@ -1639,10 +1661,13 @@ class AsyncGeoCommands: class ModuleCommands: def module_load(self, path, *args): ... + def module_loadex(self, path: str, options: list[str] | None = None, args: list[str] | None = None): ... def module_unload(self, name): ... def module_list(self): ... def command_info(self): ... def command_count(self): ... + def command_list(self, module: str | None = None, category: str | None = None, pattern: str | None = None): ... + def command_getkeysandflags(self, *args: str): ... def command_getkeys(self, *args): ... def command(self): ... @@ -1662,7 +1687,10 @@ class BitFieldOperation: def execute(self): ... class AsyncModuleCommands(ModuleCommands): + async def module_loadex(self, path: str, options: list[str] | None = None, args: list[str] | None = None): ... async def command_info(self) -> None: ... + async def command_list(self, module: str | None = None, category: str | None = None, pattern: str | None = None): ... + async def command_getkeysandflags(self, *args: str): ... class ClusterCommands: def cluster(self, cluster_arg: str, *args, **kwargs: _CommandOptions): ... diff --git a/valkey/commands/graph/execution_plan.py b/valkey/commands/graph/execution_plan.py index cf71284e..0f07427c 100644 --- a/valkey/commands/graph/execution_plan.py +++ b/valkey/commands/graph/execution_plan.py @@ -186,7 +186,7 @@ def _create_operation(args): # set the current operation and move next child = _create_operation(current_op.split("|")) if current: - current = stack.pop() # type: ignore[unreachable] + current = stack.pop() current.append_child(child) current = child i += 1 diff --git a/valkey/commands/search/field.py b/valkey/commands/search/field.py index 04ff0e84..f9b25b62 100644 --- a/valkey/commands/search/field.py +++ b/valkey/commands/search/field.py @@ -148,7 +148,7 @@ class VectorField(Field): See https://oss.valkey.com/valkeyearch/Vectors/#vector_fields. """ - def __init__(self, name: str, algorithm: str, attributes: dict, **kwargs): + def __init__(self, name: str, algorithm: str, attributes: dict, **kwargs): # type: ignore[type-arg] """ Create Vector Field. Notice that Vector cannot have sortable or no_index tag, although it's also a Field. diff --git a/valkey/commands/search/querystring.py b/valkey/commands/search/querystring.py index 1ebd6aa0..bd576490 100644 --- a/valkey/commands/search/querystring.py +++ b/valkey/commands/search/querystring.py @@ -1,6 +1,3 @@ -from typing import Dict, List - - def tags(*t): """ Indicate that the values should be matched to a tag field @@ -185,7 +182,7 @@ def __init__(self, *children, **kwparams): self.params = [] - kvparams = {} + kvparams = {} # type: ignore[var-annotated] for k, v in kwparams.items(): curvals = kvparams.setdefault(k, []) if isinstance(v, (str, int, float)): From 9febbc03b2c9cf9b38f106edb095bc9d7a6dc957 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rapha=C3=ABl=20Vinot?= Date: Wed, 11 Sep 2024 13:52:35 +0200 Subject: [PATCH 07/39] new: typecheck more tests MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Signed-off-by: Raphaël Vinot --- .mypy.ini | 10 +++- tests/test_asyncio/compat.py | 2 + tests/test_asyncio/test_pipeline.py | 4 +- tests/test_asyncio/test_pubsub.py | 88 ++++++++++++++--------------- tests/test_pipeline.py | 4 +- tests/test_pubsub.py | 22 ++++---- valkey/asyncio/client.pyi | 2 + 7 files changed, 73 insertions(+), 59 deletions(-) diff --git a/.mypy.ini b/.mypy.ini index 8e09fb7c..f0436a8a 100644 --- a/.mypy.ini +++ b/.mypy.ini @@ -18,14 +18,20 @@ ignore_errors = True [mypy-valkey._cache] ignore_errors = True - [mypy-tests.*] ignore_errors = True - [mypy-tests.test_commands] ignore_errors = False [mypy-tests.test_asyncio.test_commands] ignore_errors = False +[mypy-tests.test_pipeline] +ignore_errors = False +[mypy-tests.test_asyncio.test_pipeline] +ignore_errors = False +[mypy-tests.test_pubsub] +ignore_errors = False +[mypy-tests.test_asyncio.test_pubsub] +ignore_errors = False [mypy-benchmarks.*] ignore_errors = True diff --git a/tests/test_asyncio/compat.py b/tests/test_asyncio/compat.py index aa1dc49a..05760029 100644 --- a/tests/test_asyncio/compat.py +++ b/tests/test_asyncio/compat.py @@ -1,6 +1,8 @@ import asyncio from unittest import mock +__all__ = ["mock", "aclosing", "create_task"] + try: mock.AsyncMock except AttributeError: diff --git a/tests/test_asyncio/test_pipeline.py b/tests/test_asyncio/test_pipeline.py index 5021f91c..cb28b0ff 100644 --- a/tests/test_asyncio/test_pipeline.py +++ b/tests/test_asyncio/test_pipeline.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import pytest import valkey from tests.conftest import skip_if_server_version_lt @@ -308,7 +310,7 @@ async def test_aclosing(self, r): async def test_transaction_callable(self, r): await r.set("a", 1) await r.set("b", 2) - has_run = [] + has_run: list[str] = [] async def my_transaction(pipe): a_value = await pipe.get("a") diff --git a/tests/test_asyncio/test_pubsub.py b/tests/test_asyncio/test_pubsub.py index 8afb2256..517177e4 100644 --- a/tests/test_asyncio/test_pubsub.py +++ b/tests/test_asyncio/test_pubsub.py @@ -8,9 +8,9 @@ # the functionality is available in 3.11.x but has a major issue before # 3.11.3. See https://github.com/redis/redis-py/issues/2633 if sys.version_info >= (3, 11, 3): - from asyncio import timeout as async_timeout + from asyncio import timeout as async_timeout # type: ignore[unused-ignore,assignment,no-redef,import-not-found,attr-defined] else: - from async_timeout import timeout as async_timeout + from async_timeout import timeout as async_timeout # type: ignore[unused-ignore,assignment,no-redef,import-not-found] import pytest import pytest_asyncio @@ -23,7 +23,7 @@ from .compat import aclosing, create_task, mock -def with_timeout(t): +def with_timeout(t: int): def wrapper(corofunc): @functools.wraps(corofunc) async def run(*args, **kwargs): @@ -83,7 +83,7 @@ def make_subscribe_test_data(pubsub, type): @pytest_asyncio.fixture() -async def pubsub(r: valkey.Valkey): +async def pubsub(r: valkey.Valkey[bytes]): async with r.pubsub() as p: yield p @@ -214,7 +214,7 @@ async def test_subscribe_property_with_patterns(self, pubsub): kwargs = make_subscribe_test_data(pubsub, "pattern") await self._test_subscribed_property(**kwargs) - async def test_aclosing(self, r: valkey.Valkey): + async def test_aclosing(self, r: valkey.Valkey[str]): p = r.pubsub() async with aclosing(p): assert p.subscribed is False @@ -222,7 +222,7 @@ async def test_aclosing(self, r: valkey.Valkey): assert p.subscribed is True assert p.subscribed is False - async def test_context_manager(self, r: valkey.Valkey): + async def test_context_manager(self, r: valkey.Valkey[str]): p = r.pubsub() async with p: assert p.subscribed is False @@ -230,7 +230,7 @@ async def test_context_manager(self, r: valkey.Valkey): assert p.subscribed is True assert p.subscribed is False - async def test_close_is_aclose(self, r: valkey.Valkey): + async def test_close_is_aclose(self, r: valkey.Valkey[str]): """ Test backwards compatible close method """ @@ -242,7 +242,7 @@ async def test_close_is_aclose(self, r: valkey.Valkey): await p.close() assert p.subscribed is False - async def test_reset_is_aclose(self, r: valkey.Valkey): + async def test_reset_is_aclose(self, r: valkey.Valkey[str]): """ Test backwards compatible reset method """ @@ -254,7 +254,7 @@ async def test_reset_is_aclose(self, r: valkey.Valkey): await p.reset() assert p.subscribed is False - async def test_ignore_all_subscribe_messages(self, r: valkey.Valkey): + async def test_ignore_all_subscribe_messages(self, r: valkey.Valkey[str]): p = r.pubsub(ignore_subscribe_messages=True) checks = ( @@ -347,7 +347,7 @@ def message_handler(self, message): async def async_message_handler(self, message): self.async_message = message - async def test_published_message_to_channel(self, r: valkey.Valkey, pubsub): + async def test_published_message_to_channel(self, r: valkey.Valkey[str], pubsub): p = pubsub await p.subscribe("foo") assert await wait_for_message(p) == make_message("subscribe", "foo", 1) @@ -357,7 +357,7 @@ async def test_published_message_to_channel(self, r: valkey.Valkey, pubsub): assert isinstance(message, dict) assert message == make_message("message", "foo", "test message") - async def test_published_message_to_pattern(self, r: valkey.Valkey, pubsub): + async def test_published_message_to_pattern(self, r: valkey.Valkey[str], pubsub): p = pubsub await p.subscribe("foo") await p.psubscribe("f*") @@ -380,7 +380,7 @@ async def test_published_message_to_pattern(self, r: valkey.Valkey, pubsub): assert message2 in expected assert message1 != message2 - async def test_channel_message_handler(self, r: valkey.Valkey): + async def test_channel_message_handler(self, r: valkey.Valkey[str]): p = r.pubsub(ignore_subscribe_messages=True) await p.subscribe(foo=self.message_handler) assert await wait_for_message(p) is None @@ -411,7 +411,7 @@ async def test_channel_sync_async_message_handler(self, r): await p.aclose() @pytest.mark.onlynoncluster - async def test_pattern_message_handler(self, r: valkey.Valkey): + async def test_pattern_message_handler(self, r: valkey.Valkey[str]): p = r.pubsub(ignore_subscribe_messages=True) await p.psubscribe(**{"f*": self.message_handler}) assert await wait_for_message(p) is None @@ -422,7 +422,7 @@ async def test_pattern_message_handler(self, r: valkey.Valkey): ) await p.aclose() - async def test_unicode_channel_message_handler(self, r: valkey.Valkey): + async def test_unicode_channel_message_handler(self, r: valkey.Valkey[str]): p = r.pubsub(ignore_subscribe_messages=True) channel = "uni" + chr(4456) + "code" channels = {channel: self.message_handler} @@ -436,7 +436,7 @@ async def test_unicode_channel_message_handler(self, r: valkey.Valkey): @pytest.mark.onlynoncluster # see: https://valkey-py-cluster.readthedocs.io/en/stable/pubsub.html # #known-limitations-with-pubsub - async def test_unicode_pattern_message_handler(self, r: valkey.Valkey): + async def test_unicode_pattern_message_handler(self, r: valkey.Valkey[str]): p = r.pubsub(ignore_subscribe_messages=True) pattern = "uni" + chr(4456) + "*" channel = "uni" + chr(4456) + "code" @@ -449,7 +449,7 @@ async def test_unicode_pattern_message_handler(self, r: valkey.Valkey): ) await p.aclose() - async def test_get_message_without_subscribe(self, r: valkey.Valkey, pubsub): + async def test_get_message_without_subscribe(self, r: valkey.Valkey[str], pubsub): p = pubsub with pytest.raises(RuntimeError) as info: await p.get_message() @@ -522,7 +522,7 @@ async def test_pattern_subscribe_unsubscribe(self, pubsub): "punsubscribe", self.pattern, 0 ) - async def test_channel_publish(self, r: valkey.Valkey, pubsub): + async def test_channel_publish(self, r: valkey.Valkey[str], pubsub): p = pubsub await p.subscribe(self.channel) assert await wait_for_message(p) == self.make_message( @@ -534,7 +534,7 @@ async def test_channel_publish(self, r: valkey.Valkey, pubsub): ) @pytest.mark.onlynoncluster - async def test_pattern_publish(self, r: valkey.Valkey, pubsub): + async def test_pattern_publish(self, r: valkey.Valkey[str], pubsub): p = pubsub await p.psubscribe(self.pattern) assert await wait_for_message(p) == self.make_message( @@ -545,7 +545,7 @@ async def test_pattern_publish(self, r: valkey.Valkey, pubsub): "pmessage", self.channel, self.data, pattern=self.pattern ) - async def test_channel_message_handler(self, r: valkey.Valkey): + async def test_channel_message_handler(self, r: valkey.Valkey[str]): p = r.pubsub(ignore_subscribe_messages=True) await p.subscribe(**{self.channel: self.message_handler}) assert await wait_for_message(p) is None @@ -563,7 +563,7 @@ async def test_channel_message_handler(self, r: valkey.Valkey): assert self.message == self.make_message("message", self.channel, new_data) await p.aclose() - async def test_pattern_message_handler(self, r: valkey.Valkey): + async def test_pattern_message_handler(self, r: valkey.Valkey[str]): p = r.pubsub(ignore_subscribe_messages=True) await p.psubscribe(**{self.pattern: self.message_handler}) assert await wait_for_message(p) is None @@ -585,7 +585,7 @@ async def test_pattern_message_handler(self, r: valkey.Valkey): ) await p.aclose() - async def test_context_manager(self, r: valkey.Valkey): + async def test_context_manager(self, r: valkey.Valkey[str]): async with r.pubsub() as pubsub: await pubsub.subscribe("foo") assert pubsub.connection is not None @@ -598,7 +598,7 @@ async def test_context_manager(self, r: valkey.Valkey): @pytest.mark.onlynoncluster class TestPubSubValkeyDown: - async def test_channel_subscribe(self, r: valkey.Valkey): + async def test_channel_subscribe(self): r = valkey.Valkey(host="localhost", port=6390) p = r.pubsub() with pytest.raises(ConnectionError): @@ -609,17 +609,17 @@ async def test_channel_subscribe(self, r: valkey.Valkey): class TestPubSubSubcommands: @pytest.mark.onlynoncluster @skip_if_server_version_lt("2.8.0") - async def test_pubsub_channels(self, r: valkey.Valkey, pubsub): + async def test_pubsub_channels(self, r: valkey.Valkey[bytes], pubsub): p = pubsub await p.subscribe("foo", "bar", "baz", "quux") for i in range(4): assert (await wait_for_message(p))["type"] == "subscribe" expected = [b"bar", b"baz", b"foo", b"quux"] - assert all([channel in await r.pubsub_channels() for channel in expected]) + assert all([channel in await r.pubsub_channels() for channel in expected]) # type: ignore[comparison-overlap] @pytest.mark.onlynoncluster @skip_if_server_version_lt("2.8.0") - async def test_pubsub_numsub(self, r: valkey.Valkey): + async def test_pubsub_numsub(self, r: valkey.Valkey[bytes]): p1 = r.pubsub() await p1.subscribe("foo", "bar", "baz") for i in range(3): @@ -633,13 +633,13 @@ async def test_pubsub_numsub(self, r: valkey.Valkey): assert (await wait_for_message(p3))["type"] == "subscribe" channels = [(b"foo", 1), (b"bar", 2), (b"baz", 3)] - assert await r.pubsub_numsub("foo", "bar", "baz") == channels + assert await r.pubsub_numsub("foo", "bar", "baz") == channels # type: ignore[comparison-overlap] await p1.aclose() await p2.aclose() await p3.aclose() @skip_if_server_version_lt("2.8.0") - async def test_pubsub_numpat(self, r: valkey.Valkey): + async def test_pubsub_numpat(self, r: valkey.Valkey[str]): p = r.pubsub() await p.psubscribe("*oo", "*ar", "b*z") for i in range(3): @@ -651,7 +651,7 @@ async def test_pubsub_numpat(self, r: valkey.Valkey): @pytest.mark.onlynoncluster class TestPubSubPings: @skip_if_server_version_lt("3.0.0") - async def test_send_pubsub_ping(self, r: valkey.Valkey): + async def test_send_pubsub_ping(self, r: valkey.Valkey[str]): p = r.pubsub(ignore_subscribe_messages=True) await p.subscribe("foo") await p.ping() @@ -661,7 +661,7 @@ async def test_send_pubsub_ping(self, r: valkey.Valkey): await p.aclose() @skip_if_server_version_lt("3.0.0") - async def test_send_pubsub_ping_message(self, r: valkey.Valkey): + async def test_send_pubsub_ping_message(self, r: valkey.Valkey[str]): p = r.pubsub(ignore_subscribe_messages=True) await p.subscribe("foo") await p.ping(message="hello world") @@ -675,7 +675,7 @@ async def test_send_pubsub_ping_message(self, r: valkey.Valkey): class TestPubSubConnectionKilled: @skip_if_server_version_lt("3.0.0") async def test_connection_error_raised_when_connection_dies( - self, r: valkey.Valkey, pubsub + self, r: valkey.Valkey[str], pubsub ): p = pubsub await p.subscribe("foo") @@ -698,13 +698,13 @@ async def test_get_message_with_timeout_returns_none(self, pubsub): @pytest.mark.onlynoncluster class TestPubSubReconnect: - @with_timeout(2) - async def test_reconnect_listen(self, r: valkey.Valkey, pubsub): + @with_timeout(2) # type: ignore[misc] + async def test_reconnect_listen(self, r: valkey.Valkey[str], pubsub): """ Test that a loop processing PubSub messages can survive a disconnect, by issuing a connect() call. """ - messages = asyncio.Queue() + messages = asyncio.Queue() # type: ignore[var-annotated] interrupt = False async def loop(): @@ -775,11 +775,11 @@ async def _subscribe(self, p, *args, **kwargs): ): return - async def test_callbacks(self, r: valkey.Valkey, pubsub): + async def test_callbacks(self, r: valkey.Valkey[str], pubsub): def callback(message): messages.put_nowait(message) - messages = asyncio.Queue() + messages = asyncio.Queue() # type: ignore[var-annotated] p = pubsub await self._subscribe(p, foo=callback) task = asyncio.get_running_loop().create_task(p.run()) @@ -797,12 +797,12 @@ def callback(message): "type": "message", } - async def test_exception_handler(self, r: valkey.Valkey, pubsub): + async def test_exception_handler(self, r: valkey.Valkey[str], pubsub): def exception_handler_callback(e, pubsub) -> None: assert pubsub == p exceptions.put_nowait(e) - exceptions = asyncio.Queue() + exceptions = asyncio.Queue() # type: ignore[var-annotated] p = pubsub await self._subscribe(p, foo=lambda x: None) with mock.patch.object(p, "get_message", side_effect=Exception("error")): @@ -817,11 +817,11 @@ def exception_handler_callback(e, pubsub) -> None: pass assert str(e) == "error" - async def test_late_subscribe(self, r: valkey.Valkey, pubsub): + async def test_late_subscribe(self, r: valkey.Valkey[str], pubsub): def callback(message): messages.put_nowait(message) - messages = asyncio.Queue() + messages = asyncio.Queue() # type: ignore[var-annotated] p = pubsub task = asyncio.get_running_loop().create_task(p.run()) # wait until loop gets settled. Add a subscription @@ -856,7 +856,7 @@ class TestPubSubAutoReconnect: timeout = 2 async def mysetup(self, r, method): - self.messages = asyncio.Queue() + self.messages = asyncio.Queue() # type: ignore[var-annotated] self.pubsub = r.pubsub() # State: 0 = initial state , 1 = after disconnect, 2 = ConnectionError is seen, # 3=successfully reconnected 4 = exit @@ -892,7 +892,7 @@ async def mykill(self): self.state = 4 # quit await self.task - async def test_reconnect_socket_error(self, r: valkey.Valkey, method): + async def test_reconnect_socket_error(self, r: valkey.Valkey[str], method): """ Test that a socket error will cause reconnect """ @@ -921,7 +921,7 @@ async def test_reconnect_socket_error(self, r: valkey.Valkey, method): finally: await self.mykill() - async def test_reconnect_disconnect(self, r: valkey.Valkey, method): + async def test_reconnect_disconnect(self, r: valkey.Valkey[str], method): """ Test that a manual disconnect() will cause reconnect """ @@ -992,7 +992,7 @@ class TestBaseException: @pytest.mark.skipif( sys.version_info < (3, 8), reason="requires python 3.8 or higher" ) - async def test_outer_timeout(self, r: valkey.Valkey): + async def test_outer_timeout(self, r: valkey.Valkey[str]): """ Using asyncio_timeout manually outside the inner method timeouts works. This works on Python versions 3.8 and greater, at which time asyncio. @@ -1026,7 +1026,7 @@ async def get_msg_or_timeout(timeout=0.1): @pytest.mark.skipif( sys.version_info < (3, 8), reason="requires python 3.8 or higher" ) - async def test_base_exception(self, r: valkey.Valkey): + async def test_base_exception(self, r: valkey.Valkey[str]): """ Manually trigger a BaseException inside the parser's .read_response method and verify that it isn't caught diff --git a/tests/test_pipeline.py b/tests/test_pipeline.py index 065f898c..9fd76656 100644 --- a/tests/test_pipeline.py +++ b/tests/test_pipeline.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from contextlib import closing from unittest import mock @@ -309,7 +311,7 @@ def test_closing(self, r): def test_transaction_callable(self, r): r["a"] = 1 r["b"] = 2 - has_run = [] + has_run: list[str] = [] def my_transaction(pipe): a_value = pipe.get("a") diff --git a/tests/test_pubsub.py b/tests/test_pubsub.py index 01b5dee8..76809fee 100644 --- a/tests/test_pubsub.py +++ b/tests/test_pubsub.py @@ -115,7 +115,7 @@ def test_shard_channel_subscribe_unsubscribe(self, r): @pytest.mark.onlycluster @skip_if_server_version_lt("7.0.0") def test_shard_channel_subscribe_unsubscribe_cluster(self, r): - node_channels = defaultdict(int) + node_channels = defaultdict(int) # type: ignore[var-annotated] p = r.pubsub() keys = { "foo": r.get_node_from_key("foo"), @@ -632,7 +632,7 @@ def message_handler(self, message): @pytest.fixture() def r(self, request): - return _get_client(valkey.Valkey, request=request, decode_responses=True) + return _get_client(valkey.Valkey[str], request=request, decode_responses=True) def test_channel_subscribe_unsubscribe(self, r): p = r.pubsub() @@ -768,7 +768,7 @@ def test_context_manager(self, r): class TestPubSubValkeyDown: def test_channel_subscribe(self, r): - r = valkey.Valkey(host="localhost", port=6390) + r = valkey.Valkey[str](host="localhost", port=6390) p = r.pubsub() with pytest.raises(ConnectionError): p.subscribe("foo") @@ -845,7 +845,7 @@ def test_pubsub_numpat(self, r): @pytest.mark.onlycluster @skip_if_server_version_lt("7.0.0") - def test_pubsub_shardnumsub(self, r): + def test_pubsub_shardnumsub(self, r: valkey.ValkeyCluster[bytes]): channels = { b"foo": r.get_node_from_key("foo"), b"bar": r.get_node_from_key("bar"), @@ -866,8 +866,8 @@ def test_pubsub_shardnumsub(self, r): p3.ssubscribe("baz") assert wait_for_message(p3, node=channels[b"baz"])["type"] == "ssubscribe" - channels = [(b"foo", 1), (b"bar", 2), (b"baz", 3)] - assert r.pubsub_shardnumsub("foo", "bar", "baz", target_nodes="all") == channels + channels_names = [(b"foo", 1), (b"bar", 2), (b"baz", 3)] + assert r.pubsub_shardnumsub("foo", "bar", "baz", target_nodes="all") == channels_names # type: ignore[attr-defined] class TestPubSubPings: @@ -972,7 +972,7 @@ class TestPubSubDeadlock: @pytest.mark.timeout(30, method="thread") def test_pubsub_deadlock(self, master_host): pool = valkey.ConnectionPool(host=master_host[0], port=master_host[1]) - r = valkey.Valkey(connection_pool=pool) + r = valkey.Valkey[str](connection_pool=pool) for i in range(60): p = r.pubsub() @@ -985,7 +985,7 @@ def test_pubsub_deadlock(self, master_host): @pytest.mark.onlynoncluster class TestPubSubAutoReconnect: def mysetup(self, r, method): - self.messages = queue.Queue() + self.messages = queue.Queue() # type: ignore[var-annotated] self.pubsub = r.pubsub() self.state = 0 self.cond = threading.Condition() @@ -1026,7 +1026,7 @@ def mycleanup(self): self.cond.notify() self.thread.join() - def test_reconnect_socket_error(self, r: valkey.Valkey, method): + def test_reconnect_socket_error(self, r: valkey.Valkey[str], method): """ Test that a socket error will cause reconnect """ @@ -1048,7 +1048,7 @@ def test_reconnect_socket_error(self, r: valkey.Valkey, method): finally: self.mycleanup() - def test_reconnect_disconnect(self, r: valkey.Valkey, method): + def test_reconnect_disconnect(self, r: valkey.Valkey[str], method): """ Test that a manual disconnect() will cause reconnect """ @@ -1107,7 +1107,7 @@ def loop_step_listen(self): @pytest.mark.onlynoncluster class TestBaseException: - def test_base_exception(self, r: valkey.Valkey): + def test_base_exception(self, r: valkey.Valkey[str]): """ Manually trigger a BaseException inside the parser's .read_response method and verify that it isn't caught diff --git a/valkey/asyncio/client.pyi b/valkey/asyncio/client.pyi index 99d40f4e..7cb11b26 100644 --- a/valkey/asyncio/client.pyi +++ b/valkey/asyncio/client.pyi @@ -271,6 +271,7 @@ class PubSub: ) -> None: ... def __del__(self) -> None: ... async def reset(self) -> None: ... + async def aclose(self) -> None: ... def close(self) -> Awaitable[NoReturn]: ... async def on_connect(self, connection: Connection): ... @property @@ -324,6 +325,7 @@ class Pipeline(Valkey[_StrType]): def __len__(self) -> int: ... def __bool__(self) -> bool: ... async def reset(self) -> None: ... + async def aclose(self) -> None: ... # type: ignore[override] def multi(self) -> None: ... def execute_command(self, *args, **kwargs) -> Pipeline[_StrType] | Awaitable[Pipeline[_StrType]]: ... async def immediate_execute_command(self, *args, **options): ... From aa44eac87843e68abe5051650bfd0724bd4ae0d2 Mon Sep 17 00:00:00 2001 From: Salvatore Mesoraca Date: Wed, 11 Sep 2024 11:24:12 +0200 Subject: [PATCH 08/39] v6.0.2 MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Signed-off-by: Salvatore Mesoraca Signed-off-by: Raphaël Vinot --- setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.py b/setup.py index a388c95a..500272c3 100644 --- a/setup.py +++ b/setup.py @@ -8,7 +8,7 @@ long_description_content_type="text/markdown", keywords=["Valkey", "key-value store", "database"], license="MIT", - version="6.0.1", + version="6.0.2", packages=find_packages( include=[ "valkey", From 99d20795683d6998c51dfce290ef497d7e9b05fb Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rapha=C3=ABl=20Vinot?= Date: Tue, 17 Sep 2024 21:01:20 +0200 Subject: [PATCH 09/39] check typing on bloom and cache MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Signed-off-by: Raphaël Vinot --- .github/workflows/mypy.yml | 1 + .mypy.ini | 12 ++++++++ tests/test_asyncio/test_bloom.py | 52 ++++++++++++++++---------------- tests/test_cache.py | 6 ++-- 4 files changed, 43 insertions(+), 28 deletions(-) diff --git a/.github/workflows/mypy.yml b/.github/workflows/mypy.yml index 4727b8be..e04b8a1f 100644 --- a/.github/workflows/mypy.yml +++ b/.github/workflows/mypy.yml @@ -27,6 +27,7 @@ jobs: run: | pip install mypy cryptography pyopenssl requests pip install types-setuptools + pip install types-cachetools pip install -r dev_requirements.txt pip install .[libvalkey] diff --git a/.mypy.ini b/.mypy.ini index f0436a8a..0f69ed5d 100644 --- a/.mypy.ini +++ b/.mypy.ini @@ -32,6 +32,18 @@ ignore_errors = False ignore_errors = False [mypy-tests.test_asyncio.test_pubsub] ignore_errors = False +[mypy-tests.test_cache] +ignore_errors = False +[mypy-tests.test_asyncio.test_cache] +ignore_errors = False +[mypy-tests.test_bloom] +ignore_errors = False +[mypy-tests.test_asyncio.test_bloom] +ignore_errors = False +#[mypy-tests.test_cluster] +#ignore_errors = False +#[mypy-tests.test_asyncio.test_cluster] +#ignore_errors = False [mypy-benchmarks.*] ignore_errors = True diff --git a/tests/test_asyncio/test_bloom.py b/tests/test_asyncio/test_bloom.py index 04528c1c..8dac9936 100644 --- a/tests/test_asyncio/test_bloom.py +++ b/tests/test_asyncio/test_bloom.py @@ -16,7 +16,7 @@ def intlist(obj): return [int(v) for v in obj] -async def test_create(decoded_r: valkey.Valkey): +async def test_create(decoded_r: valkey.Valkey[str]): """Test CREATE/RESERVE calls""" assert await decoded_r.bf().create("bloom", 0.01, 1000) assert await decoded_r.bf().create("bloom_e", 0.01, 1000, expansion=1) @@ -31,11 +31,11 @@ async def test_create(decoded_r: valkey.Valkey): @pytest.mark.experimental -async def test_tdigest_create(decoded_r: valkey.Valkey): +async def test_tdigest_create(decoded_r: valkey.Valkey[str]): assert await decoded_r.tdigest().create("tDigest", 100) -async def test_bf_add(decoded_r: valkey.Valkey): +async def test_bf_add(decoded_r: valkey.Valkey[str]): assert await decoded_r.bf().create("bloom", 0.01, 1000) assert 1 == await decoded_r.bf().add("bloom", "foo") assert 0 == await decoded_r.bf().add("bloom", "foo") @@ -47,7 +47,7 @@ async def test_bf_add(decoded_r: valkey.Valkey): assert [1, 0] == intlist(await decoded_r.bf().mexists("bloom", "foo", "noexist")) -async def test_bf_insert(decoded_r: valkey.Valkey): +async def test_bf_insert(decoded_r: valkey.Valkey[str]): assert await decoded_r.bf().create("bloom", 0.01, 1000) assert [1] == intlist(await decoded_r.bf().insert("bloom", ["foo"])) assert [0, 1] == intlist(await decoded_r.bf().insert("bloom", ["foo", "bar"])) @@ -77,7 +77,7 @@ async def test_bf_insert(decoded_r: valkey.Valkey): ) -async def test_bf_scandump_and_loadchunk(decoded_r: valkey.Valkey): +async def test_bf_scandump_and_loadchunk(decoded_r: valkey.Valkey[str]): # Store a filter await decoded_r.bf().create("myBloom", "0.0001", "1000") @@ -124,7 +124,7 @@ async def do_verify(): await decoded_r.bf().create("myBloom", "0.0001", "10000000") -async def test_bf_info(decoded_r: valkey.Valkey): +async def test_bf_info(decoded_r: valkey.Valkey[str]): expansion = 4 # Store a filter await decoded_r.bf().create("nonscaling", "0.0001", "1000", noScale=True) @@ -155,7 +155,7 @@ async def test_bf_info(decoded_r: valkey.Valkey): assert True -async def test_bf_card(decoded_r: valkey.Valkey): +async def test_bf_card(decoded_r: valkey.Valkey[str]): # return 0 if the key does not exist assert await decoded_r.bf().card("not_exist") == 0 @@ -169,7 +169,7 @@ async def test_bf_card(decoded_r: valkey.Valkey): await decoded_r.bf().card("setKey") -async def test_cf_add_and_insert(decoded_r: valkey.Valkey): +async def test_cf_add_and_insert(decoded_r: valkey.Valkey[str]): assert await decoded_r.cf().create("cuckoo", 1000) assert await decoded_r.cf().add("cuckoo", "filter") assert not await decoded_r.cf().addnx("cuckoo", "filter") @@ -194,7 +194,7 @@ async def test_cf_add_and_insert(decoded_r: valkey.Valkey): ) -async def test_cf_exists_and_del(decoded_r: valkey.Valkey): +async def test_cf_exists_and_del(decoded_r: valkey.Valkey[str]): assert await decoded_r.cf().create("cuckoo", 1000) assert await decoded_r.cf().add("cuckoo", "filter") assert await decoded_r.cf().exists("cuckoo", "filter") @@ -205,7 +205,7 @@ async def test_cf_exists_and_del(decoded_r: valkey.Valkey): assert 0 == await decoded_r.cf().count("cuckoo", "filter") -async def test_cms(decoded_r: valkey.Valkey): +async def test_cms(decoded_r: valkey.Valkey[str]): assert await decoded_r.cms().initbydim("dim", 1000, 5) assert await decoded_r.cms().initbyprob("prob", 0.01, 0.01) assert await decoded_r.cms().incrby("dim", ["foo"], [5]) @@ -221,7 +221,7 @@ async def test_cms(decoded_r: valkey.Valkey): @pytest.mark.onlynoncluster -async def test_cms_merge(decoded_r: valkey.Valkey): +async def test_cms_merge(decoded_r: valkey.Valkey[str]): assert await decoded_r.cms().initbydim("A", 1000, 5) assert await decoded_r.cms().initbydim("B", 1000, 5) assert await decoded_r.cms().initbydim("C", 1000, 5) @@ -237,7 +237,7 @@ async def test_cms_merge(decoded_r: valkey.Valkey): assert [16, 15, 21] == await decoded_r.cms().query("C", "foo", "bar", "baz") -async def test_topk(decoded_r: valkey.Valkey): +async def test_topk(decoded_r: valkey.Valkey[str]): # test list with empty buckets assert await decoded_r.topk().reserve("topk", 3, 50, 4, 0.9) assert [ @@ -317,7 +317,7 @@ async def test_topk(decoded_r: valkey.Valkey): assert 0.9 == round(float(info["decay"]), 1) -async def test_topk_incrby(decoded_r: valkey.Valkey): +async def test_topk_incrby(decoded_r: valkey.Valkey[str]): await decoded_r.flushdb() assert await decoded_r.topk().reserve("topk", 3, 10, 3, 1) assert [None, None, None] == await decoded_r.topk().incrby( @@ -332,7 +332,7 @@ async def test_topk_incrby(decoded_r: valkey.Valkey): @pytest.mark.experimental -async def test_tdigest_reset(decoded_r: valkey.Valkey): +async def test_tdigest_reset(decoded_r: valkey.Valkey[str]): assert await decoded_r.tdigest().create("tDigest", 10) # reset on empty histogram assert await decoded_r.tdigest().reset("tDigest") @@ -348,7 +348,7 @@ async def test_tdigest_reset(decoded_r: valkey.Valkey): @pytest.mark.onlynoncluster -async def test_tdigest_merge(decoded_r: valkey.Valkey): +async def test_tdigest_merge(decoded_r: valkey.Valkey[str]): assert await decoded_r.tdigest().create("to-tDigest", 10) assert await decoded_r.tdigest().create("from-tDigest", 10) # insert data-points into sketch @@ -375,7 +375,7 @@ async def test_tdigest_merge(decoded_r: valkey.Valkey): @pytest.mark.experimental -async def test_tdigest_min_and_max(decoded_r: valkey.Valkey): +async def test_tdigest_min_and_max(decoded_r: valkey.Valkey[str]): assert await decoded_r.tdigest().create("tDigest", 100) # insert data-points into sketch assert await decoded_r.tdigest().add("tDigest", [1, 2, 3]) @@ -385,8 +385,8 @@ async def test_tdigest_min_and_max(decoded_r: valkey.Valkey): @pytest.mark.experimental -@skip_ifmodversion_lt("2.4.0", "bf") -async def test_tdigest_quantile(decoded_r: valkey.Valkey): +@skip_ifmodversion_lt("2.4.0", "bf") # type: ignore[misc] +async def test_tdigest_quantile(decoded_r: valkey.Valkey[str]): assert await decoded_r.tdigest().create("tDigest", 500) # insert data-points into sketch assert await decoded_r.tdigest().add( @@ -413,7 +413,7 @@ async def test_tdigest_quantile(decoded_r: valkey.Valkey): @pytest.mark.experimental -async def test_tdigest_cdf(decoded_r: valkey.Valkey): +async def test_tdigest_cdf(decoded_r: valkey.Valkey[str]): assert await decoded_r.tdigest().create("tDigest", 100) # insert data-points into sketch assert await decoded_r.tdigest().add("tDigest", list(range(1, 10))) @@ -424,8 +424,8 @@ async def test_tdigest_cdf(decoded_r: valkey.Valkey): @pytest.mark.experimental -@skip_ifmodversion_lt("2.4.0", "bf") -async def test_tdigest_trimmed_mean(decoded_r: valkey.Valkey): +@skip_ifmodversion_lt("2.4.0", "bf") # type: ignore[misc] +async def test_tdigest_trimmed_mean(decoded_r: valkey.Valkey[str]): assert await decoded_r.tdigest().create("tDigest", 100) # insert data-points into sketch assert await decoded_r.tdigest().add("tDigest", list(range(1, 10))) @@ -434,7 +434,7 @@ async def test_tdigest_trimmed_mean(decoded_r: valkey.Valkey): @pytest.mark.experimental -async def test_tdigest_rank(decoded_r: valkey.Valkey): +async def test_tdigest_rank(decoded_r: valkey.Valkey[str]): assert await decoded_r.tdigest().create("t-digest", 500) assert await decoded_r.tdigest().add("t-digest", list(range(0, 20))) assert -1 == (await decoded_r.tdigest().rank("t-digest", -1))[0] @@ -444,7 +444,7 @@ async def test_tdigest_rank(decoded_r: valkey.Valkey): @pytest.mark.experimental -async def test_tdigest_revrank(decoded_r: valkey.Valkey): +async def test_tdigest_revrank(decoded_r: valkey.Valkey[str]): assert await decoded_r.tdigest().create("t-digest", 500) assert await decoded_r.tdigest().add("t-digest", list(range(0, 20))) assert -1 == (await decoded_r.tdigest().revrank("t-digest", 20))[0] @@ -453,7 +453,7 @@ async def test_tdigest_revrank(decoded_r: valkey.Valkey): @pytest.mark.experimental -async def test_tdigest_byrank(decoded_r: valkey.Valkey): +async def test_tdigest_byrank(decoded_r: valkey.Valkey[str]): assert await decoded_r.tdigest().create("t-digest", 500) assert await decoded_r.tdigest().add("t-digest", list(range(1, 11))) assert 1 == (await decoded_r.tdigest().byrank("t-digest", 0))[0] @@ -464,7 +464,7 @@ async def test_tdigest_byrank(decoded_r: valkey.Valkey): @pytest.mark.experimental -async def test_tdigest_byrevrank(decoded_r: valkey.Valkey): +async def test_tdigest_byrevrank(decoded_r: valkey.Valkey[str]): assert await decoded_r.tdigest().create("t-digest", 500) assert await decoded_r.tdigest().add("t-digest", list(range(1, 11))) assert 10 == (await decoded_r.tdigest().byrevrank("t-digest", 0))[0] @@ -474,7 +474,7 @@ async def test_tdigest_byrevrank(decoded_r: valkey.Valkey): (await decoded_r.tdigest().byrevrank("t-digest", -1))[0] -# # async def test_pipeline(decoded_r: valkey.Valkey): +# # async def test_pipeline(decoded_r: valkey.Valkey[str]): # pipeline = await decoded_r.bf().pipeline() # assert not await decoded_r.bf().execute_command("get pipeline") # diff --git a/tests/test_cache.py b/tests/test_cache.py index 63784101..25792fa5 100644 --- a/tests/test_cache.py +++ b/tests/test_cache.py @@ -8,7 +8,9 @@ from tests.conftest import _get_client from valkey import ValkeyError from valkey._cache import AbstractCache, EvictionPolicy, _LocalCache -from valkey.typing import KeyT, ResponseT + +# It is defined, just not in __all__ +from valkey.typing import KeyT, ResponseT # type: ignore[attr-defined] from valkey.utils import LIBVALKEY_AVAILABLE @@ -529,7 +531,7 @@ def test_cache_decode_response(self, local_cache, sentinel_setup, master): class TestCustomCache: class _CustomCache(AbstractCache): def __init__(self): - self.responses = cachetools.LRUCache(maxsize=1000) + self.responses = cachetools.LRUCache(maxsize=1000) # type: ignore[var-annotated] self.keys_to_commands = defaultdict(list) self.commands_to_keys = defaultdict(list) From 71265af2fc7d3cf507cc26785c16794af3e33e24 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rapha=C3=ABl=20Vinot?= Date: Tue, 17 Sep 2024 21:16:34 +0200 Subject: [PATCH 10/39] smembers returns a list, not a set. MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Signed-off-by: Raphaël Vinot --- tests/test_asyncio/test_commands.py | 6 +++--- valkey/commands/core.pyi | 4 ++-- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/tests/test_asyncio/test_commands.py b/tests/test_asyncio/test_commands.py index 9ad75b4f..14d834f0 100644 --- a/tests/test_asyncio/test_commands.py +++ b/tests/test_asyncio/test_commands.py @@ -1434,7 +1434,7 @@ async def test_sdiffstore(self, r: valkey.asyncio.Valkey[bytes]): assert set(await r.smembers("c")) == {b"1", b"2", b"3"} await r.sadd("b", "2", "3") assert await r.sdiffstore("c", "a", "b") == 1 - assert await r.smembers("c") == {b"1", } + assert await r.smembers("c") == [b"1", ] @pytest.mark.onlynoncluster async def test_sinter(self, r: valkey.asyncio.Valkey[bytes]): @@ -1447,7 +1447,7 @@ async def test_sinter(self, r: valkey.asyncio.Valkey[bytes]): async def test_sinterstore(self, r: valkey.asyncio.Valkey[bytes]): await r.sadd("a", "1", "2", "3") assert await r.sinterstore("c", "a", "b") == 0 - assert await r.smembers("c") == set() + assert await r.smembers("c") == list() await r.sadd("b", "2", "3") assert await r.sinterstore("c", "a", "b") == 2 assert set(await r.smembers("c")) == {b"2", b"3"} @@ -1468,7 +1468,7 @@ async def test_smove(self, r: valkey.asyncio.Valkey[bytes]): await r.sadd("a", "a1", "a2") await r.sadd("b", "b1", "b2") assert await r.smove("a", "b", "a1") - assert await r.smembers("a") == {b"a2", } + assert await r.smembers("a") == [b"a2", ] assert set(await r.smembers("b")) == {b"b1", b"b2", b"a1"} async def test_spop(self, r: valkey.asyncio.Valkey[bytes]): diff --git a/valkey/commands/core.pyi b/valkey/commands/core.pyi index 532612e0..d69b6710 100644 --- a/valkey/commands/core.pyi +++ b/valkey/commands/core.pyi @@ -841,7 +841,7 @@ class SetCommands(Generic[_StrType]): def sinter(self, keys: _Key | Iterable[_Key], *args: _Key) -> builtins.set[_Value]: ... def sinterstore(self, dest: _Key, keys: _Key | Iterable[_Key], *args: _Key) -> int: ... def sismember(self, name: _Key, value: _Value) -> bool: ... - def smembers(self, name: _Key) -> builtins.set[_StrType]: ... + def smembers(self, name: _Key) -> builtins.list[_StrType]: ... def smismember(self, name, values, *args): ... def smove(self, src: _Key, dst: _Key, value: _Value) -> bool: ... @overload @@ -864,7 +864,7 @@ class AsyncSetCommands(Generic[_StrType]): async def sinter(self, keys: _Key | Iterable[_Key], *args: _Key) -> builtins.set[_Value]: ... async def sinterstore(self, dest: _Key, keys: _Key | Iterable[_Key], *args: _Key) -> int: ... async def sismember(self, name: _Key, value: _Value) -> bool: ... - async def smembers(self, name: _Key) -> builtins.set[_StrType]: ... + async def smembers(self, name: _Key) -> builtins.list[_StrType]: ... async def smismember(self, name, values, *args): ... async def smove(self, src: _Key, dst: _Key, value: _Value) -> bool: ... @overload From d22dbe38111823430da9419cb6f89d835009933b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rapha=C3=ABl=20Vinot?= Date: Fri, 1 Nov 2024 14:16:06 +0100 Subject: [PATCH 11/39] add more tests on mypy MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Signed-off-by: Raphaël Vinot --- .mypy.ini | 32 ++++++++++++++++++++------------ 1 file changed, 20 insertions(+), 12 deletions(-) diff --git a/.mypy.ini b/.mypy.ini index 0f69ed5d..358ee392 100644 --- a/.mypy.ini +++ b/.mypy.ini @@ -20,30 +20,38 @@ ignore_errors = True [mypy-tests.*] ignore_errors = True -[mypy-tests.test_commands] -ignore_errors = False -[mypy-tests.test_asyncio.test_commands] -ignore_errors = False -[mypy-tests.test_pipeline] -ignore_errors = False -[mypy-tests.test_asyncio.test_pipeline] -ignore_errors = False -[mypy-tests.test_pubsub] +[mypy-tests.test_bloom] ignore_errors = False -[mypy-tests.test_asyncio.test_pubsub] +[mypy-tests.test_asyncio.test_bloom] ignore_errors = False [mypy-tests.test_cache] ignore_errors = False [mypy-tests.test_asyncio.test_cache] ignore_errors = False -[mypy-tests.test_bloom] +[mypy-tests.test_commands] ignore_errors = False -[mypy-tests.test_asyncio.test_bloom] +[mypy-tests.test_asyncio.test_commands] ignore_errors = False #[mypy-tests.test_cluster] #ignore_errors = False #[mypy-tests.test_asyncio.test_cluster] #ignore_errors = False +#[mypy-tests.test_connection_pool] +#ignore_errors = False +#[mypy-tests.test_asyncio.test_connection_pool] +#ignore_errors = False +#[mypy-tests.test_connection] +#ignore_errors = False +#[mypy-tests.test_asyncio.test_connection] +#ignore_errors = False +[mypy-tests.test_pipeline] +ignore_errors = False +[mypy-tests.test_asyncio.test_pipeline] +ignore_errors = False +[mypy-tests.test_pubsub] +ignore_errors = False +[mypy-tests.test_asyncio.test_pubsub] +ignore_errors = False [mypy-benchmarks.*] ignore_errors = True From 0a490f233c2a5ec638e54badccc7fb120c33e574 Mon Sep 17 00:00:00 2001 From: Mikhail Koviazin Date: Fri, 20 Sep 2024 15:41:38 +0300 Subject: [PATCH 12/39] tests: added forgotten asserts in test_geosearch_member MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit These lines were missing the `assert` keyword and hence were meaningless. Signed-off-by: Mikhail Koviazin Signed-off-by: Raphaël Vinot --- tests/test_commands.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/test_commands.py b/tests/test_commands.py index 8b18b7e6..e86717de 100644 --- a/tests/test_commands.py +++ b/tests/test_commands.py @@ -3623,7 +3623,7 @@ def test_geosearch_member(self, r): ) # All but the coordinates are identical - geosearch_place2[:-1] == [ + assert geosearch_place2[:-1] == [ b"\x80place2", 3067.4157, 3471609625421029, @@ -3631,7 +3631,7 @@ def test_geosearch_member(self, r): assert_geo_is_close( geosearch_place2[-1], (2.187376320362091, 41.40634178640635) ) - geosearch_place1[:-1] == [ + assert geosearch_place1[:-1] == [ b"place1", 0.0, 3471609698139488, From 802ee261be786c995c7288681505af1f9c847396 Mon Sep 17 00:00:00 2001 From: Mikhail Koviazin Date: Fri, 20 Sep 2024 15:42:59 +0300 Subject: [PATCH 13/39] tests: added async geosearch tests MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit These tests were copied from the sync tests and adapted to use the async client. This commit fixes #63 Signed-off-by: Mikhail Koviazin Signed-off-by: Raphaël Vinot --- tests/test_asyncio/test_commands.py | 242 ++++++++++++++++++++++++++++ 1 file changed, 242 insertions(+) diff --git a/tests/test_asyncio/test_commands.py b/tests/test_asyncio/test_commands.py index 14d834f0..6d1426c1 100644 --- a/tests/test_asyncio/test_commands.py +++ b/tests/test_asyncio/test_commands.py @@ -2489,6 +2489,248 @@ async def test_geopos_no_value(self, r: valkey.asyncio.Valkey[str]): async def test_old_geopos_no_value(self, r: valkey.asyncio.Valkey[str]): assert await r.geopos("barcelona", "place1", "place2") == [] + @skip_if_server_version_lt("6.2.0") + async def test_geosearch(self, r: valkey.Valkey): + values = ( + (2.1909389952632, 41.433791470673, "place1") + + (2.1873744593677, 41.406342043777, b"\x80place2") + + (2.583333, 41.316667, "place3") + ) + await r.geoadd("barcelona", values) + assert await r.geosearch( + "barcelona", longitude=2.191, latitude=41.433, radius=1000 + ) == [b"place1"] + assert await r.geosearch( + "barcelona", longitude=2.187, latitude=41.406, radius=1000 + ) == [b"\x80place2"] + assert await r.geosearch( + "barcelona", longitude=2.191, latitude=41.433, height=1000, width=1000 + ) == [b"place1"] + assert await r.geosearch( + "barcelona", member="place3", radius=100, unit="km" + ) == [ + b"\x80place2", + b"place1", + b"place3", + ] + # test count + assert await r.geosearch( + "barcelona", member="place3", radius=100, unit="km", count=2 + ) == [b"place3", b"\x80place2"] + search_res = await r.geosearch( + "barcelona", member="place3", radius=100, unit="km", count=1, any=1 + ) + assert search_res[0] in [b"place1", b"place3", b"\x80place2"] + + @skip_unless_arch_bits(64) + @skip_if_server_version_lt("6.2.0") + async def test_geosearch_member(self, r: valkey.Valkey): + values = (2.1909389952632, 41.433791470673, "place1") + ( + 2.1873744593677, + 41.406342043777, + b"\x80place2", + ) + + await r.geoadd("barcelona", values) + assert await r.geosearch("barcelona", member="place1", radius=10) == [b"place1"] + + geosearch_place2, geosearch_place1 = await r.geosearch( + "barcelona", + member="place1", + radius=4000, + withdist=True, + withcoord=True, + withhash=True, + ) + + # All but the coordinates are identical + assert geosearch_place2[:-1] == [ + b"\x80place2", + 3067.4157, + 3471609625421029, + ] + assert_geo_is_close( + geosearch_place2[-1], (2.187376320362091, 41.40634178640635) + ) + assert geosearch_place1[:-1] == [ + b"place1", + 0.0, + 3471609698139488, + ] + assert_geo_is_close( + geosearch_place1[-1], (2.1909382939338684, 41.433790281840835) + ) + + @skip_if_server_version_lt("6.2.0") + async def test_geosearch_sort(self, r: valkey.Valkey): + values = (2.1909389952632, 41.433791470673, "place1") + ( + 2.1873744593677, + 41.406342043777, + "place2", + ) + await r.geoadd("barcelona", values) + assert await r.geosearch( + "barcelona", longitude=2.191, latitude=41.433, radius=3000, sort="ASC" + ) == [b"place1", b"place2"] + assert await r.geosearch( + "barcelona", longitude=2.191, latitude=41.433, radius=3000, sort="DESC" + ) == [b"place2", b"place1"] + + @skip_unless_arch_bits(64) + @skip_if_server_version_lt("6.2.0") + @pytest.mark.parametrize( + "geosearch_kwargs, expected_geosearch_result", + [ + ( + {"withdist": True, "withcoord": True, "withhash": True}, + [b"place1", 0.0881, 3471609698139488], + ), + ( + {"withdist": True, "withcoord": True}, + [b"place1", 0.0881], + ), + ( + {"withhash": True, "withcoord": True}, + [b"place1", 3471609698139488], + ), + ( + {"withdist": True, "withhash": True}, + [b"place1", 0.0881, 3471609698139488], + ), + ], + ) + async def test_geosearch_with( + self, + r: valkey.Valkey, + geosearch_kwargs: Dict[str, Any], + expected_geosearch_result: List[Any], + ): + values = (2.1909389952632, 41.433791470673, "place1") + ( + 2.1873744593677, + 41.406342043777, + "place2", + ) + await r.geoadd("barcelona", values) + + # test a bunch of combinations to test the parse response + # function. + geosearch_result = await r.geosearch( + "barcelona", + longitude=2.191, + latitude=41.433, + radius=1, + unit="km", + **geosearch_kwargs, + ) + assert len(geosearch_result) == 1 + if "withcoord" in geosearch_kwargs: + assert_geo_is_close( + geosearch_result[0][-1], (2.1909382939338684, 41.433790281840835) + ) + assert geosearch_result[0][:-1] == expected_geosearch_result + else: + assert geosearch_result == [expected_geosearch_result] + + assert ( + await r.geosearch( + "barcelona", + longitude=2, + latitude=1, + radius=1, + unit="km", + **geosearch_kwargs, + ) + == [] + ) + + @skip_if_server_version_lt("6.2.0") + async def test_geosearch_negative(self, r: valkey.Valkey): + # not specifying member nor longitude and latitude + with pytest.raises(exceptions.DataError): + assert await r.geosearch("barcelona") + # specifying member and longitude and latitude + with pytest.raises(exceptions.DataError): + assert await r.geosearch( + "barcelona", member="Paris", longitude=2, latitude=1 + ) + # specifying one of longitude and latitude + with pytest.raises(exceptions.DataError): + assert await r.geosearch("barcelona", longitude=2) + with pytest.raises(exceptions.DataError): + assert await r.geosearch("barcelona", latitude=2) + + # not specifying radius nor width and height + with pytest.raises(exceptions.DataError): + assert await r.geosearch("barcelona", member="Paris") + # specifying radius and width and height + with pytest.raises(exceptions.DataError): + assert await r.geosearch( + "barcelona", member="Paris", radius=3, width=2, height=1 + ) + # specifying one of width and height + with pytest.raises(exceptions.DataError): + assert await r.geosearch("barcelona", member="Paris", width=2) + with pytest.raises(exceptions.DataError): + assert await r.geosearch("barcelona", member="Paris", height=2) + + # invalid sort + with pytest.raises(exceptions.DataError): + assert await r.geosearch( + "barcelona", member="Paris", width=2, height=2, sort="wrong" + ) + + # invalid unit + with pytest.raises(exceptions.DataError): + assert await r.geosearch( + "barcelona", member="Paris", width=2, height=2, unit="miles" + ) + + # use any without count + with pytest.raises(exceptions.DataError): + assert await r.geosearch("barcelona", member="place3", radius=100, any=1) + + @pytest.mark.onlynoncluster + @skip_if_server_version_lt("6.2.0") + async def test_geosearchstore(self, r: valkey.Valkey): + values = (2.1909389952632, 41.433791470673, "place1") + ( + 2.1873744593677, + 41.406342043777, + "place2", + ) + + await r.geoadd("barcelona", values) + await r.geosearchstore( + "places_barcelona", + "barcelona", + longitude=2.191, + latitude=41.433, + radius=1000, + ) + assert await r.zrange("places_barcelona", 0, -1) == [b"place1"] + + @pytest.mark.onlynoncluster + @skip_unless_arch_bits(64) + @skip_if_server_version_lt("6.2.0") + async def test_geosearchstore_dist(self, r: valkey.Valkey): + values = (2.1909389952632, 41.433791470673, "place1") + ( + 2.1873744593677, + 41.406342043777, + "place2", + ) + + await r.geoadd("barcelona", values) + await r.geosearchstore( + "places_barcelona", + "barcelona", + longitude=2.191, + latitude=41.433, + radius=1000, + storedist=True, + ) + # instead of save the geo score, the distance is saved. + score = await r.zscore("places_barcelona", "place1") + assert math.isclose(score, 88.05060698409301) + @skip_if_server_version_lt("3.2.0") async def test_georadius(self, r: valkey.asyncio.Valkey[str]): values = (2.1909389952632, 41.433791470673, "place1") + ( From f1a8c3fcab0ac20c51650f10b189b797ba2df1c8 Mon Sep 17 00:00:00 2001 From: amirreza Date: Mon, 23 Sep 2024 18:26:31 +0330 Subject: [PATCH 14/39] make documentation link more obvious MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Signed-off-by: amirreza Signed-off-by: Raphaël Vinot --- README.md | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/README.md b/README.md index 64463b7d..c966ae5c 100644 --- a/README.md +++ b/README.md @@ -9,7 +9,7 @@ The Python interface to the Valkey key-value store. [![pre-release](https://img.shields.io/github/v/release/valkey-io/valkey-py?include_prereleases&label=latest-prerelease)](https://github.com/valkey-io/valkey-py/releases) [![codecov](https://codecov.io/gh/valkey-io/valkey-py/branch/main/graph/badge.svg?token=yenl5fzxxr)](https://codecov.io/gh/valkey-io/valkey-py) -[Installation](#installation) | [Usage](#usage) | [Advanced Topics](#advanced-topics) | [Contributing](https://github.com/valkey-io/valkey-py/blob/main/CONTRIBUTING.md) +[Installation](#installation) | [Usage](#usage) | [Documentation](#documentation) | [Advanced Topics](#advanced-topics) | [Contributing](https://github.com/valkey-io/valkey-py/blob/main/CONTRIBUTING.md) --------------------------------------------- @@ -85,6 +85,10 @@ Alternatively, you might want to look at [Async connections](https://valkey-py.r There is built-in support for all of the [out-of-the-box Valkey commands](https://valkey.io/commands). They are exposed using the raw Redis command names (`HSET`, `HGETALL`, etc.) except where a word (i.e. del) is reserved by the language. The complete set of commands can be found [here](https://github.com/valkey-io/valkey-py/tree/main/valkey/commands), or [the documentation](https://valkey-py.readthedocs.io/en/latest/commands.html). +## Documentation + +Check out the [documentation](https://valkey-py.readthedocs.io/en/latest/index.html) + ## Advanced Topics The [official Valkey command documentation](https://valkey.io/commands) From 6b5f6f07d0570f684bfd3f9f4c4cc9d034a1490b Mon Sep 17 00:00:00 2001 From: amirreza Date: Tue, 24 Sep 2024 16:42:16 +0330 Subject: [PATCH 15/39] Deleted outdated link MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Signed-off-by: amirreza Signed-off-by: Raphaël Vinot --- docs/clustering.rst | 6 +----- 1 file changed, 1 insertion(+), 5 deletions(-) diff --git a/docs/clustering.rst b/docs/clustering.rst index c9cfe649..1642b060 100644 --- a/docs/clustering.rst +++ b/docs/clustering.rst @@ -197,11 +197,7 @@ Pattern subscribe and publish do not currently work properly due to key slots. If we hash a pattern like fo\* we will receive a keyslot for that string but there are endless possibilities for channel names based on this pattern - unknowable in advance. This feature is not disabled but -the commands are not currently recommended for use. See -`valkey-py-cluster -documentation `__ -for more. - +the commands are not currently recommended for use. .. code:: python >>> p1 = rc.pubsub() From 6e7db0b32f9ebbaabe3dc64dec0782fd628893ea Mon Sep 17 00:00:00 2001 From: Salvatore Mesoraca Date: Tue, 24 Sep 2024 16:24:54 +0200 Subject: [PATCH 16/39] doc: fix rst format MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Signed-off-by: Salvatore Mesoraca Signed-off-by: Raphaël Vinot --- docs/clustering.rst | 1 + 1 file changed, 1 insertion(+) diff --git a/docs/clustering.rst b/docs/clustering.rst index 1642b060..19354776 100644 --- a/docs/clustering.rst +++ b/docs/clustering.rst @@ -198,6 +198,7 @@ slots. If we hash a pattern like fo\* we will receive a keyslot for that string but there are endless possibilities for channel names based on this pattern - unknowable in advance. This feature is not disabled but the commands are not currently recommended for use. + .. code:: python >>> p1 = rc.pubsub() From 1c8a665cea1fb56d79eb576d1d922dba74f788c7 Mon Sep 17 00:00:00 2001 From: ArtemIsmagilov Date: Mon, 30 Sep 2024 00:03:57 +0400 Subject: [PATCH 17/39] drop compose format and commands v1, use supported v2+ MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Signed-off-by: ArtemIsmagilov Signed-off-by: Raphaël Vinot --- CONTRIBUTING.md | 2 +- docker-compose.yml => compose.yaml | 2 -- docs/examples/opentelemetry/README.md | 4 ++-- .../opentelemetry/{docker-compose.yml => compose.yaml} | 2 -- 4 files changed, 3 insertions(+), 7 deletions(-) rename docker-compose.yml => compose.yaml (99%) rename docs/examples/opentelemetry/{docker-compose.yml => compose.yaml} (99%) diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 939b3da1..30880dec 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -96,7 +96,7 @@ Here's how to get started with your code contribution: c. pip install -r dev_requirements.txt c. pip install -r requirements.txt -4. If you need a development environment, run `invoke devenv`. Note: this relies on docker-compose to build environments, and assumes that you have a version supporting [docker profiles](https://docs.docker.com/compose/profiles/). +4. If you need a development environment, run `invoke devenv`. Note: this relies on docker compose to build environments, and assumes that you have a version supporting [docker profiles](https://docs.docker.com/compose/profiles/). 5. While developing, make sure the tests pass by running `invoke tests` 6. If you like the change and think the project could use it, send a pull request diff --git a/docker-compose.yml b/compose.yaml similarity index 99% rename from docker-compose.yml rename to compose.yaml index ec9a45e3..ade6e8e2 100644 --- a/docker-compose.yml +++ b/compose.yaml @@ -1,7 +1,5 @@ --- -version: "3.8" - services: valkey: diff --git a/docs/examples/opentelemetry/README.md b/docs/examples/opentelemetry/README.md index 4409924a..2c2c4757 100644 --- a/docs/examples/opentelemetry/README.md +++ b/docs/examples/opentelemetry/README.md @@ -30,8 +30,8 @@ pip install -r requirements.txt **Step 4**. Start the services using Docker and make sure Uptrace is running: ```shell -docker-compose up -d -docker-compose logs uptrace +docker compose up -d +docker compose logs uptrace ``` **Step 5**. Run the Valkey client example and follow the link from the CLI to view the trace: diff --git a/docs/examples/opentelemetry/docker-compose.yml b/docs/examples/opentelemetry/compose.yaml similarity index 99% rename from docs/examples/opentelemetry/docker-compose.yml rename to docs/examples/opentelemetry/compose.yaml index a0a4119b..97f61355 100644 --- a/docs/examples/opentelemetry/docker-compose.yml +++ b/docs/examples/opentelemetry/compose.yaml @@ -1,5 +1,3 @@ -version: "3" - services: clickhouse: image: clickhouse/clickhouse-server:22.7 From 68d58d171c679ffecee9575a1096ea82cf286cf1 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 1 Oct 2024 11:37:02 +0000 Subject: [PATCH 18/39] build(deps): bump actions/cache from 3 to 4 MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [actions/cache](https://github.com/actions/cache) from 3 to 4. - [Release notes](https://github.com/actions/cache/releases) - [Changelog](https://github.com/actions/cache/blob/main/RELEASES.md) - [Commits](https://github.com/actions/cache/compare/v3...v4) --- updated-dependencies: - dependency-name: actions/cache dependency-type: direct:production update-type: version-update:semver-major ... Signed-off-by: dependabot[bot] Signed-off-by: Raphaël Vinot --- .github/workflows/integration.yaml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/integration.yaml b/.github/workflows/integration.yaml index 207aa4ba..ffc80a4f 100644 --- a/.github/workflows/integration.yaml +++ b/.github/workflows/integration.yaml @@ -58,7 +58,7 @@ jobs: - uses: actions/checkout@v4 - name: Cache docker images id: custom-cache - uses: actions/cache@v3 + uses: actions/cache@v4 with: path: ./custom-cache/ key: custom-cache @@ -94,7 +94,7 @@ jobs: - name: Cache docker images id: custom-cache - uses: actions/cache@v3 + uses: actions/cache@v4 with: path: ./custom-cache/ fail-on-cache-miss: true @@ -150,7 +150,7 @@ jobs: - name: Cache docker images id: custom-cache - uses: actions/cache@v3 + uses: actions/cache@v4 with: path: ./custom-cache/ fail-on-cache-miss: true From b9d9ca04b6bff88cc7ce7bf47bbbae9e137dd758 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 1 Oct 2024 12:10:15 +0000 Subject: [PATCH 19/39] build(deps): bump rojopolis/spellcheck-github-actions MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [rojopolis/spellcheck-github-actions](https://github.com/rojopolis/spellcheck-github-actions) from 0.41.0 to 0.42.0. - [Release notes](https://github.com/rojopolis/spellcheck-github-actions/releases) - [Changelog](https://github.com/rojopolis/spellcheck-github-actions/blob/master/CHANGELOG.md) - [Commits](https://github.com/rojopolis/spellcheck-github-actions/compare/0.41.0...0.42.0) --- updated-dependencies: - dependency-name: rojopolis/spellcheck-github-actions dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Signed-off-by: Raphaël Vinot --- .github/workflows/spellcheck.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/spellcheck.yml b/.github/workflows/spellcheck.yml index 8beeff18..cdc9a754 100644 --- a/.github/workflows/spellcheck.yml +++ b/.github/workflows/spellcheck.yml @@ -8,7 +8,7 @@ jobs: - name: Checkout uses: actions/checkout@v4 - name: Check Spelling - uses: rojopolis/spellcheck-github-actions@0.41.0 + uses: rojopolis/spellcheck-github-actions@0.42.0 with: config_path: .github/spellcheck-settings.yml task_name: Markdown From fde6bab8895c97e638134cc341112a7bfacbf552 Mon Sep 17 00:00:00 2001 From: ArtemIsmagilov Date: Sun, 29 Sep 2024 23:05:15 +0400 Subject: [PATCH 20/39] sort methods `acl_deluser` and `acl_dryrun` by alphabetically, checked doc MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Signed-off-by: ArtemIsmagilov Signed-off-by: Raphaël Vinot --- valkey/commands/core.py | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/valkey/commands/core.py b/valkey/commands/core.py index 014cb01d..75f45eec 100644 --- a/valkey/commands/core.py +++ b/valkey/commands/core.py @@ -69,14 +69,6 @@ def acl_cat(self, category: Union[str, None] = None, **kwargs) -> ResponseT: pieces: list[EncodableT] = [category] if category else [] return self.execute_command("ACL CAT", *pieces, **kwargs) - def acl_dryrun(self, username, *args, **kwargs): - """ - Simulate the execution of a given command by a given ``username``. - - For more information see https://valkey.io/commands/acl-dryrun - """ - return self.execute_command("ACL DRYRUN", username, *args, **kwargs) - def acl_deluser(self, *username: str, **kwargs) -> ResponseT: """ Delete the ACL for the specified ``username``\\s @@ -85,6 +77,14 @@ def acl_deluser(self, *username: str, **kwargs) -> ResponseT: """ return self.execute_command("ACL DELUSER", *username, **kwargs) + def acl_dryrun(self, username, *args, **kwargs): + """ + Simulate the execution of a given command by a given ``username``. + + For more information see https://valkey.io/commands/acl-dryrun + """ + return self.execute_command("ACL DRYRUN", username, *args, **kwargs) + def acl_genpass(self, bits: Union[int, None] = None, **kwargs) -> ResponseT: """Generate a random password value. If ``bits`` is supplied then use this number of bits, rounded to From 782361886a4b455da319450266d58a4506728207 Mon Sep 17 00:00:00 2001 From: Salvatore Mesoraca Date: Tue, 15 Oct 2024 09:27:57 +0200 Subject: [PATCH 21/39] Temporarily fix https://github.com/actions/runner-images/issues/10781 MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Signed-off-by: Salvatore Mesoraca Signed-off-by: Raphaël Vinot --- .github/workflows/integration.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/integration.yaml b/.github/workflows/integration.yaml index ffc80a4f..4a73aead 100644 --- a/.github/workflows/integration.yaml +++ b/.github/workflows/integration.yaml @@ -27,7 +27,7 @@ jobs: dependency-audit: name: Dependency audit - runs-on: ubuntu-latest + runs-on: ubuntu-22.04 steps: - uses: actions/checkout@v4 - uses: pypa/gh-action-pip-audit@v1.1.0 From 60fa28a7d4d366716b92514aa6fa8d6f9e6df6b7 Mon Sep 17 00:00:00 2001 From: Salvatore Mesoraca Date: Tue, 15 Oct 2024 09:16:17 +0200 Subject: [PATCH 22/39] parsers: resp3: be less verbose MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Close #111 Signed-off-by: Salvatore Mesoraca Signed-off-by: Raphaël Vinot --- valkey/_parsers/resp3.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/valkey/_parsers/resp3.py b/valkey/_parsers/resp3.py index c90f65e9..274fe433 100644 --- a/valkey/_parsers/resp3.py +++ b/valkey/_parsers/resp3.py @@ -19,7 +19,7 @@ def __init__(self, socket_read_size): def handle_pubsub_push_response(self, response): logger = getLogger("push_response") - logger.info("Push response: " + str(response)) + logger.debug("Push response: " + str(response)) return response def read_response(self, disable_decoding=False, push_request=False): @@ -150,7 +150,7 @@ def __init__(self, socket_read_size): def handle_pubsub_push_response(self, response): logger = getLogger("push_response") - logger.info("Push response: " + str(response)) + logger.debug("Push response: " + str(response)) return response async def read_response( From 8ec92e85d954461d9230514714a94e80caeb0177 Mon Sep 17 00:00:00 2001 From: Salvatore Mesoraca Date: Wed, 30 Oct 2024 10:04:55 +0100 Subject: [PATCH 23/39] Revert "Temporarily fix https://github.com/actions/runner-images/issues/10781" MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit This reverts commit 318019a1326ed69696627edd51db80d7a2840afd. Signed-off-by: Salvatore Mesoraca Signed-off-by: Raphaël Vinot --- .github/workflows/integration.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/integration.yaml b/.github/workflows/integration.yaml index 4a73aead..ffc80a4f 100644 --- a/.github/workflows/integration.yaml +++ b/.github/workflows/integration.yaml @@ -27,7 +27,7 @@ jobs: dependency-audit: name: Dependency audit - runs-on: ubuntu-22.04 + runs-on: ubuntu-latest steps: - uses: actions/checkout@v4 - uses: pypa/gh-action-pip-audit@v1.1.0 From 39616e98fe94ad4e1917461eee87fac702036a12 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rapha=C3=ABl=20Vinot?= Date: Fri, 1 Nov 2024 14:30:25 +0100 Subject: [PATCH 24/39] chg: Sync with upstream MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Signed-off-by: Raphaël Vinot --- tests/test_asyncio/test_commands.py | 23 ++++++++++++----------- 1 file changed, 12 insertions(+), 11 deletions(-) diff --git a/tests/test_asyncio/test_commands.py b/tests/test_asyncio/test_commands.py index 6d1426c1..47e1fa5a 100644 --- a/tests/test_asyncio/test_commands.py +++ b/tests/test_asyncio/test_commands.py @@ -2490,7 +2490,7 @@ async def test_old_geopos_no_value(self, r: valkey.asyncio.Valkey[str]): assert await r.geopos("barcelona", "place1", "place2") == [] @skip_if_server_version_lt("6.2.0") - async def test_geosearch(self, r: valkey.Valkey): + async def test_geosearch(self, r: valkey.asyncio.Valkey[str]): values = ( (2.1909389952632, 41.433791470673, "place1") + (2.1873744593677, 41.406342043777, b"\x80place2") @@ -2518,13 +2518,13 @@ async def test_geosearch(self, r: valkey.Valkey): "barcelona", member="place3", radius=100, unit="km", count=2 ) == [b"place3", b"\x80place2"] search_res = await r.geosearch( - "barcelona", member="place3", radius=100, unit="km", count=1, any=1 + "barcelona", member="place3", radius=100, unit="km", count=1, any=True ) assert search_res[0] in [b"place1", b"place3", b"\x80place2"] @skip_unless_arch_bits(64) @skip_if_server_version_lt("6.2.0") - async def test_geosearch_member(self, r: valkey.Valkey): + async def test_geosearch_member(self, r: valkey.asyncio.Valkey[str]): values = (2.1909389952632, 41.433791470673, "place1") + ( 2.1873744593677, 41.406342043777, @@ -2562,7 +2562,7 @@ async def test_geosearch_member(self, r: valkey.Valkey): ) @skip_if_server_version_lt("6.2.0") - async def test_geosearch_sort(self, r: valkey.Valkey): + async def test_geosearch_sort(self, r: valkey.asyncio.Valkey[str]): values = (2.1909389952632, 41.433791470673, "place1") + ( 2.1873744593677, 41.406342043777, @@ -2601,9 +2601,9 @@ async def test_geosearch_sort(self, r: valkey.Valkey): ) async def test_geosearch_with( self, - r: valkey.Valkey, - geosearch_kwargs: Dict[str, Any], - expected_geosearch_result: List[Any], + r: valkey.asyncio.Valkey[str], + geosearch_kwargs: dict[str, Any], + expected_geosearch_result: list[Any], ): values = (2.1909389952632, 41.433791470673, "place1") + ( 2.1873744593677, @@ -2644,7 +2644,7 @@ async def test_geosearch_with( ) @skip_if_server_version_lt("6.2.0") - async def test_geosearch_negative(self, r: valkey.Valkey): + async def test_geosearch_negative(self, r: valkey.asyncio.Valkey[str]): # not specifying member nor longitude and latitude with pytest.raises(exceptions.DataError): assert await r.geosearch("barcelona") @@ -2687,11 +2687,11 @@ async def test_geosearch_negative(self, r: valkey.Valkey): # use any without count with pytest.raises(exceptions.DataError): - assert await r.geosearch("barcelona", member="place3", radius=100, any=1) + assert await r.geosearch("barcelona", member="place3", radius=100, any=True) @pytest.mark.onlynoncluster @skip_if_server_version_lt("6.2.0") - async def test_geosearchstore(self, r: valkey.Valkey): + async def test_geosearchstore(self, r: valkey.asyncio.Valkey[bytes]): values = (2.1909389952632, 41.433791470673, "place1") + ( 2.1873744593677, 41.406342043777, @@ -2711,7 +2711,7 @@ async def test_geosearchstore(self, r: valkey.Valkey): @pytest.mark.onlynoncluster @skip_unless_arch_bits(64) @skip_if_server_version_lt("6.2.0") - async def test_geosearchstore_dist(self, r: valkey.Valkey): + async def test_geosearchstore_dist(self, r: valkey.asyncio.Valkey[str]): values = (2.1909389952632, 41.433791470673, "place1") + ( 2.1873744593677, 41.406342043777, @@ -2729,6 +2729,7 @@ async def test_geosearchstore_dist(self, r: valkey.Valkey): ) # instead of save the geo score, the distance is saved. score = await r.zscore("places_barcelona", "place1") + assert score is not None assert math.isclose(score, 88.05060698409301) @skip_if_server_version_lt("3.2.0") From 564231c1f0b6f3f8830e2269e7292f6b8af1eaec Mon Sep 17 00:00:00 2001 From: Salvatore Mesoraca Date: Tue, 15 Oct 2024 09:27:57 +0200 Subject: [PATCH 25/39] Temporarily fix https://github.com/actions/runner-images/issues/10781 MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Signed-off-by: Salvatore Mesoraca Signed-off-by: Raphaël Vinot --- .github/workflows/integration.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/integration.yaml b/.github/workflows/integration.yaml index ffc80a4f..4a73aead 100644 --- a/.github/workflows/integration.yaml +++ b/.github/workflows/integration.yaml @@ -27,7 +27,7 @@ jobs: dependency-audit: name: Dependency audit - runs-on: ubuntu-latest + runs-on: ubuntu-22.04 steps: - uses: actions/checkout@v4 - uses: pypa/gh-action-pip-audit@v1.1.0 From fd67799b61b4d037322eb1c3a3f7a55a7503f50c Mon Sep 17 00:00:00 2001 From: Salvatore Mesoraca Date: Wed, 30 Oct 2024 10:04:55 +0100 Subject: [PATCH 26/39] Revert "Temporarily fix https://github.com/actions/runner-images/issues/10781" MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit This reverts commit 318019a1326ed69696627edd51db80d7a2840afd. Signed-off-by: Salvatore Mesoraca Signed-off-by: Raphaël Vinot --- .github/workflows/integration.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/integration.yaml b/.github/workflows/integration.yaml index 4a73aead..ffc80a4f 100644 --- a/.github/workflows/integration.yaml +++ b/.github/workflows/integration.yaml @@ -27,7 +27,7 @@ jobs: dependency-audit: name: Dependency audit - runs-on: ubuntu-22.04 + runs-on: ubuntu-latest steps: - uses: actions/checkout@v4 - uses: pypa/gh-action-pip-audit@v1.1.0 From 9bca022b32999090217dc6c5582ef8f4f14d9879 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 1 Nov 2024 11:31:53 +0000 Subject: [PATCH 27/39] build(deps): bump rojopolis/spellcheck-github-actions MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [rojopolis/spellcheck-github-actions](https://github.com/rojopolis/spellcheck-github-actions) from 0.42.0 to 0.44.0. - [Release notes](https://github.com/rojopolis/spellcheck-github-actions/releases) - [Changelog](https://github.com/rojopolis/spellcheck-github-actions/blob/master/CHANGELOG.md) - [Commits](https://github.com/rojopolis/spellcheck-github-actions/compare/0.42.0...0.44.0) --- updated-dependencies: - dependency-name: rojopolis/spellcheck-github-actions dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Signed-off-by: Raphaël Vinot --- .github/workflows/spellcheck.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/spellcheck.yml b/.github/workflows/spellcheck.yml index cdc9a754..83071568 100644 --- a/.github/workflows/spellcheck.yml +++ b/.github/workflows/spellcheck.yml @@ -8,7 +8,7 @@ jobs: - name: Checkout uses: actions/checkout@v4 - name: Check Spelling - uses: rojopolis/spellcheck-github-actions@0.42.0 + uses: rojopolis/spellcheck-github-actions@0.44.0 with: config_path: .github/spellcheck-settings.yml task_name: Markdown From b26e9e95cbc98092d8c6559c679ea65a130cbd38 Mon Sep 17 00:00:00 2001 From: Saverio Proto Date: Thu, 7 Nov 2024 10:11:21 +0100 Subject: [PATCH 28/39] Set socket_timeout default value to 5 seconds MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Fixes https://github.com/valkey-io/valkey-py/issues/119 Signed-off-by: Saverio Proto Signed-off-by: Raphaël Vinot --- valkey/asyncio/client.py | 2 +- valkey/asyncio/cluster.py | 2 +- valkey/asyncio/connection.py | 2 +- valkey/connection.py | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/valkey/asyncio/client.py b/valkey/asyncio/client.py index 2098d585..dc4f2021 100644 --- a/valkey/asyncio/client.py +++ b/valkey/asyncio/client.py @@ -205,7 +205,7 @@ def __init__( port: int = 6379, db: Union[str, int] = 0, password: Optional[str] = None, - socket_timeout: Optional[float] = None, + socket_timeout: Optional[float] = 5, socket_connect_timeout: Optional[float] = None, socket_keepalive: Optional[bool] = None, socket_keepalive_options: Optional[Mapping[int, Union[int, bytes]]] = None, diff --git a/valkey/asyncio/cluster.py b/valkey/asyncio/cluster.py index c496ae0c..3386eddc 100644 --- a/valkey/asyncio/cluster.py +++ b/valkey/asyncio/cluster.py @@ -259,7 +259,7 @@ def __init__( socket_connect_timeout: Optional[float] = None, socket_keepalive: bool = False, socket_keepalive_options: Optional[Mapping[int, Union[int, bytes]]] = None, - socket_timeout: Optional[float] = None, + socket_timeout: Optional[float] = 5, retry: Optional["Retry"] = None, retry_on_error: Optional[List[Type[Exception]]] = None, # SSL related kwargs diff --git a/valkey/asyncio/connection.py b/valkey/asyncio/connection.py index c7a18ad9..20b2a7f8 100644 --- a/valkey/asyncio/connection.py +++ b/valkey/asyncio/connection.py @@ -136,7 +136,7 @@ def __init__( *, db: Union[str, int] = 0, password: Optional[str] = None, - socket_timeout: Optional[float] = None, + socket_timeout: Optional[float] = 5, socket_connect_timeout: Optional[float] = None, retry_on_timeout: bool = False, retry_on_error: Union[list, _Sentinel] = SENTINEL, diff --git a/valkey/connection.py b/valkey/connection.py index a85b3db2..6e2861cf 100644 --- a/valkey/connection.py +++ b/valkey/connection.py @@ -137,7 +137,7 @@ def __init__( self, db: int = 0, password: Optional[str] = None, - socket_timeout: Optional[float] = None, + socket_timeout: Optional[float] = 5, socket_connect_timeout: Optional[float] = None, retry_on_timeout: bool = False, retry_on_error=SENTINEL, From 6e2d71d0e3ad0aeface2e56db3d164b63c2cc26b Mon Sep 17 00:00:00 2001 From: Mikhail Koviazin Date: Mon, 25 Nov 2024 10:57:13 +0100 Subject: [PATCH 29/39] tests: take into account changed behaviour of READONLY Behaviour of READONLY was changed in https://github.com/valkey-io/valkey/pull/325 which became a part of 8.0.0. This caused test_readonly_invalid_cluster_state to fail. This commit takes into account this change. Signed-off-by: Mikhail Koviazin --- tests/conftest.py | 5 +++++ tests/test_asyncio/test_commands.py | 14 ++++++++++---- tests/test_commands.py | 14 ++++++++++---- 3 files changed, 25 insertions(+), 8 deletions(-) diff --git a/tests/conftest.py b/tests/conftest.py index 9c6e1015..80700cfc 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -456,6 +456,11 @@ def master_host(request): return parts.hostname, (parts.port or 6379) +@pytest.fixture() +def valkey_version(): + return Version(VALKEY_INFO["version"]) + + def wait_for_command(client, monitor, command, key=None): # issue a command with a key name that's local to this process. # if we find a command with our key before the command we're waiting diff --git a/tests/test_asyncio/test_commands.py b/tests/test_asyncio/test_commands.py index 47e1fa5a..28e8dbca 100644 --- a/tests/test_asyncio/test_commands.py +++ b/tests/test_asyncio/test_commands.py @@ -16,6 +16,7 @@ import pytest import pytest_asyncio import valkey +from packaging.version import Version from tests.conftest import ( assert_geo_is_close, assert_resp_response, @@ -2382,13 +2383,18 @@ async def test_readwrite(self, r: valkey.asyncio.Valkey[str]): @skip_if_server_version_lt("3.0.0") @pytest.mark.onlynoncluster - async def test_readonly_invalid_cluster_state(self, r: valkey.asyncio.Valkey[str]): - with pytest.raises(exceptions.ValkeyError): - await r.readonly() + async def test_readonly(self, r: valkey.asyncio.Valkey[str], valkey_version: Version): + # NOTE: Valkey 8.0.0 changes the behaviour of READONLY + # See https://github.com/valkey-io/valkey/pull/325 + if valkey_version < Version("8.0.0"): + with pytest.raises(exceptions.ValkeyError): + await r.readonly() + else: + assert await r.readonly() is True @skip_if_server_version_lt("3.0.0") @pytest.mark.onlynoncluster - async def test_readonly(self, mock_cluster_resp_ok): + async def test_mock_readonly(self, mock_cluster_resp_ok): assert await mock_cluster_resp_ok.readonly() is True # GEO COMMANDS diff --git a/tests/test_commands.py b/tests/test_commands.py index e86717de..e5de5bdf 100644 --- a/tests/test_commands.py +++ b/tests/test_commands.py @@ -14,6 +14,7 @@ import pytest import valkey +from packaging.version import Version from valkey import exceptions from valkey._parsers.helpers import ( _ValkeyCallbacks, @@ -3428,13 +3429,18 @@ def test_readwrite(self, r): @pytest.mark.onlynoncluster @skip_if_server_version_lt("3.0.0") - def test_readonly_invalid_cluster_state(self, r): - with pytest.raises(exceptions.ValkeyError): - r.readonly() + def test_readonly(self, r, valkey_version): + # NOTE: Valkey 8.0.0 changes the behaviour of READONLY + # See https://github.com/valkey-io/valkey/pull/325 + if valkey_version < Version("8.0.0"): + with pytest.raises(exceptions.ValkeyError): + r.readonly() + else: + assert r.readonly() is True @pytest.mark.onlynoncluster @skip_if_server_version_lt("3.0.0") - def test_readonly(self, mock_cluster_resp_ok): + def test_readonly_mock(self, mock_cluster_resp_ok): assert mock_cluster_resp_ok.readonly() is True # GEO COMMANDS From 4325ae20ace519b3149e4a8c969d32bcb3aeaf70 Mon Sep 17 00:00:00 2001 From: Mikhail Koviazin Date: Mon, 25 Nov 2024 10:59:21 +0100 Subject: [PATCH 30/39] tests: mark test_hash for skipping MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Hash commands are part of proprietary module that Valkey does not implement for now. Mark them for skipping just like JSON and search. Signed-off-by: Mikhail Koviazin Signed-off-by: Raphaël Vinot --- tests/test_asyncio/test_hash.py | 3 +++ tests/test_hash.py | 2 ++ 2 files changed, 5 insertions(+) diff --git a/tests/test_asyncio/test_hash.py b/tests/test_asyncio/test_hash.py index d4f18053..13549c75 100644 --- a/tests/test_asyncio/test_hash.py +++ b/tests/test_asyncio/test_hash.py @@ -1,8 +1,11 @@ import asyncio from datetime import datetime, timedelta +import pytest from tests.conftest import skip_if_server_version_lt +pytestmark = pytest.mark.skip + @skip_if_server_version_lt("7.3.240") async def test_hexpire_basic(r): diff --git a/tests/test_hash.py b/tests/test_hash.py index 7145b10a..9519e0a6 100644 --- a/tests/test_hash.py +++ b/tests/test_hash.py @@ -4,6 +4,8 @@ import pytest from tests.conftest import skip_if_server_version_lt +pytestmark = pytest.mark.skip + @skip_if_server_version_lt("7.3.240") def test_hexpire_basic(r): From 93b1e6aadf8d3f05e4ef8231ee663fb17787ac19 Mon Sep 17 00:00:00 2001 From: Mikhail Koviazin Date: Mon, 25 Nov 2024 14:30:56 +0100 Subject: [PATCH 31/39] tests: make test_client_kill_filter_by_maxage more robust MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Previously the test relied on the amount of clients that are currently connected to the server. This is not robust as this can be affected by the outside or a bad timing. This caused quite a lot of test case failures. The proper way to handle this is: * Create a client and assign a distinguishable name to it * Verify it's in `CLIENT LIST` * Sleep for enough time for it to be killed by maxage we provide * Verify it's not anymore in `CLIENT LIST` This commit does exactly that. Signed-off-by: Mikhail Koviazin Signed-off-by: Raphaël Vinot --- tests/test_commands.py | 10 +++++++--- 1 file changed, 7 insertions(+), 3 deletions(-) diff --git a/tests/test_commands.py b/tests/test_commands.py index e5de5bdf..593f2bb9 100644 --- a/tests/test_commands.py +++ b/tests/test_commands.py @@ -702,11 +702,15 @@ def test_client_kill_filter_by_user(self, r, request): @skip_if_server_version_lt("7.3.240") @pytest.mark.onlynoncluster def test_client_kill_filter_by_maxage(self, r, request): - _get_client(valkey.Valkey, request, flushdb=False) + client = _get_client(valkey.Valkey, request, flushdb=False) + client_name = "test-kill-by-maxage" + client.client_setname(client_name) time.sleep(4) - assert len(r.client_list()) >= 2 + clients = r.client_list() + assert client_name in [c["name"] for c in clients] r.client_kill_filter(maxage=2) - assert len(r.client_list()) == 1 + clients = r.client_list() + assert client_name not in [c["name"] for c in clients] @pytest.mark.onlynoncluster @skip_if_server_version_lt("2.9.50") From a87c4b712ba9a74745710f3c152e4fb65cb48474 Mon Sep 17 00:00:00 2001 From: Salvatore Mesoraca Date: Wed, 30 Oct 2024 10:40:13 +0100 Subject: [PATCH 32/39] Bump minimum libvalkey version to 4.0.1 MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Signed-off-by: Salvatore Mesoraca Signed-off-by: Mikhail Koviazin Signed-off-by: Raphaël Vinot --- .github/workflows/integration.yaml | 2 +- setup.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/integration.yaml b/.github/workflows/integration.yaml index ffc80a4f..bcb88d7f 100644 --- a/.github/workflows/integration.yaml +++ b/.github/workflows/integration.yaml @@ -108,7 +108,7 @@ jobs: pip install -r requirements.txt pip install -r dev_requirements.txt if [ "${{matrix.connection-type}}" == "libvalkey" ]; then - pip install "libvalkey>=4.0.0" + pip install "libvalkey>=4.0.1" fi invoke devenv if [[ "${{matrix.test-type}}" == "standalone" ]]; then diff --git a/setup.py b/setup.py index 500272c3..aff5f60f 100644 --- a/setup.py +++ b/setup.py @@ -56,7 +56,7 @@ "Programming Language :: Python :: Implementation :: PyPy", ], extras_require={ - "libvalkey": ["libvalkey>=4.0.0"], + "libvalkey": ["libvalkey>=4.0.1"], "ocsp": ["cryptography>=36.0.1", "pyopenssl==23.2.1", "requests>=2.31.0"], }, ) From 92137ef0e2c7d7cf5f923605e1c77b0a70ddfb58 Mon Sep 17 00:00:00 2001 From: Mikhail Koviazin Date: Wed, 6 Nov 2024 11:00:28 +0100 Subject: [PATCH 33/39] Update SSL certificates to include key usage MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit In Python 3.13, `ssl.create_default_context()` added `VERIFY_X509_STRICT` to the flags by default which caused the tests to fail due to missing key usage. This commit adds it to the certificate configuration and replaces the certificates with reconfigured ones. Signed-off-by: Mikhail Koviazin Signed-off-by: Raphaël Vinot --- dockers/stunnel/create_certs.sh | 32 +++++++++++------ dockers/stunnel/keys/ca-cert.pem | 34 +++++++++--------- dockers/stunnel/keys/ca-key.pem | 52 ++++++++++++++-------------- dockers/stunnel/keys/client-cert.pem | 32 +++++++++-------- dockers/stunnel/keys/client-key.pem | 52 ++++++++++++++-------------- dockers/stunnel/keys/client-req.pem | 27 ++++++++------- dockers/stunnel/keys/server-cert.pem | 32 +++++++++-------- dockers/stunnel/keys/server-key.pem | 52 ++++++++++++++-------------- dockers/stunnel/keys/server-req.pem | 27 ++++++++------- dockers/stunnel/openssl.cnf | 15 ++++++++ 10 files changed, 194 insertions(+), 161 deletions(-) create mode 100644 dockers/stunnel/openssl.cnf diff --git a/dockers/stunnel/create_certs.sh b/dockers/stunnel/create_certs.sh index fa3e22d1..e64bab74 100755 --- a/dockers/stunnel/create_certs.sh +++ b/dockers/stunnel/create_certs.sh @@ -2,14 +2,16 @@ set -e -DESTDIR=`dirname "$0"`/keys +CONFIG_FILE=$(realpath "$(dirname "$0")")/openssl.cnf + +DESTDIR=$(dirname "$0")/keys test -d ${DESTDIR} || mkdir ${DESTDIR} cd ${DESTDIR} which openssl &>/dev/null if [ $? -ne 0 ]; then - echo "No openssl binary present, exiting." - exit 1 + echo "No openssl binary present, exiting." + exit 1 fi openssl genrsa -out ca-key.pem 2048 &>/dev/null @@ -17,29 +19,39 @@ openssl genrsa -out ca-key.pem 2048 &>/dev/null openssl req -new -x509 -nodes -days 365000 \ -key ca-key.pem \ -out ca-cert.pem \ - -subj "/CN=valkey-py-ca" &>/dev/null + -config "$CONFIG_FILE" \ + -extensions v3_ca \ + -subj "/CN=valkey-py-ca" -openssl req -newkey rsa:2048 -nodes -days 365000 \ +openssl req -newkey rsa:2048 -nodes \ -keyout server-key.pem \ -out server-req.pem \ - -subj "/CN=valkey-py-server" &>/dev/null + -config "$CONFIG_FILE" \ + -extensions v3_req \ + -subj "/CN=valkey-py-server" openssl x509 -req -days 365000 -set_serial 01 \ -in server-req.pem \ -out server-cert.pem \ -CA ca-cert.pem \ - -CAkey ca-key.pem &>/dev/null + -CAkey ca-key.pem \ + -extfile "$CONFIG_FILE" \ + -extensions v3_req -openssl req -newkey rsa:2048 -nodes -days 365000 \ +openssl req -newkey rsa:2048 -nodes \ -keyout client-key.pem \ -out client-req.pem \ - -subj "/CN=valkey-py-client" &>/dev/null + -config "$CONFIG_FILE" \ + -extensions v3_req \ + -subj "/CN=valkey-py-client" openssl x509 -req -days 365000 -set_serial 01 \ -in client-req.pem \ -out client-cert.pem \ -CA ca-cert.pem \ - -CAkey ca-key.pem &>/dev/null + -CAkey ca-key.pem \ + -extfile "$CONFIG_FILE" \ + -extensions v3_req echo "Keys generated in ${DESTDIR}:" ls diff --git a/dockers/stunnel/keys/ca-cert.pem b/dockers/stunnel/keys/ca-cert.pem index 291cf8e2..a0371e07 100644 --- a/dockers/stunnel/keys/ca-cert.pem +++ b/dockers/stunnel/keys/ca-cert.pem @@ -1,19 +1,19 @@ -----BEGIN CERTIFICATE----- -MIIDDzCCAfegAwIBAgIUZWdrJiIH/w7FJkNbLTYldxOFEpswDQYJKoZIhvcNAQEL -BQAwFjEUMBIGA1UEAwwLcmVkaXMtcHktY2EwIBcNMjQwNTA5MDcyMDE4WhgPMzAy -MzA5MTAwNzIwMThaMBYxFDASBgNVBAMMC3JlZGlzLXB5LWNhMIIBIjANBgkqhkiG -9w0BAQEFAAOCAQ8AMIIBCgKCAQEA0N9BXLRx3Hxb+ZGuKi5hZabcDWDMEeUGunJG -F1ijxO9XbNWXxYiR127Le2dMkS3TefU3CNiiYJa7eRxMPAS/wGUp6Bb7LrCoeC3F -1bfJSYnzC6SwhMq66m51VhqctjAbJxBBAPYqyNBFB2w2BQZOIkKDNPgPJTDNmF/7 -G/5jmAaOPlhm1GITnT+sSTyfr/JcoRRbV9VTVc9VUaTjk6ytHsW+K2sK+uWrjdig -qdzZDng0gtasTn907QkTDDyR4E/UY9N47aD2Jy5F3XHesy9kEfuppq+A1WYOs8/H -bXgEL53ncayqDNAgjnid5kHvKJ9wTAPSMDqmupHG0l5ADisahwIDAQABo1MwUTAd -BgNVHQ4EFgQUWg70hcbq4zibHXAFlZd8mHVEWzowHwYDVR0jBBgwFoAUWg70hcbq -4zibHXAFlZd8mHVEWzowDwYDVR0TAQH/BAUwAwEB/zANBgkqhkiG9w0BAQsFAAOC -AQEAe1qupf8GoqCgtzTwFCwmcDygLibX4vI/EfCMOLVZHMgDacDwQbmYPlM+goJT -Pz8WCklopFcMJ6MSdUGy3g4hjKmJpKttTSuhEd3uZWPZYjhRj2SY8531/aAajg9/ -oezyvlgN/DWXAREG31XWyXLzPU7VLbg99mYB+2+lo2cAciAOCBdIOu6WzqnQax82 -aDSqXIHiTGc/5QYZ6ZIzdVRYiVdddKSxTNKZn9x0hu3L8r2e9ryGLLVKJmZfNZDS -tXYwiY3fE0EwYViIPiPlmBEXiBhHlC2kAQMFK8Qd4LgX6rGki4luL15GYxxKPQbF -EtDS9EqM4EdRWZq3SDjOA1zODA== +MIIC/TCCAeWgAwIBAgIUL0/OSD+P0ZISmuNtnbVNjymQn3wwDQYJKoZIhvcNAQEL +BQAwFzEVMBMGA1UEAwwMdmFsa2V5LXB5LWNhMCAXDTI0MTEwMTExNTEwMFoYDzMw +MjQwMzA0MTE1MTAwWjAXMRUwEwYDVQQDDAx2YWxrZXktcHktY2EwggEiMA0GCSqG +SIb3DQEBAQUAA4IBDwAwggEKAoIBAQDaENi99I937j1QW4QOM7YSPHzymMHJpnRO +ZP9JYDxOO7XjKpRwhyU4hM3QfxeNJi04VKv+FZe8QswCSqyp6OeNFPAuQ2M3Shcl +neUymoSVsQqyqzrJ8G4qW3sAMdvG32rA8sRsOewSVABnsi0wUZS+0+4EMR+L372O +WDd9ZV88uePwsY6MTfqvxoyh0S+5E3xdyep956+LGotr+maDZ/MrEP2Kl1StWv4W +mS0Gd7bzJaGsCazGXfc22JLwztBG/JgZdjI6T3e1ION0VpaQ82uMqvFmajmPxWUU +8lbjAzeHSGOJq+BZmPVh6NFp6Pn1xdH8OOHW1CW8UMaAjQre37bHAgMBAAGjPzA9 +MA8GA1UdEwEB/wQFMAMBAf8wCwYDVR0PBAQDAgEGMB0GA1UdDgQWBBQCiVq1mATQ +GX/9xPxG9l0soukgFzANBgkqhkiG9w0BAQsFAAOCAQEALUxF0RNlfpj55H2ku7r6 +aYcKsElzmCdgICxc0jrhvlMT7yv03nt0EOxgx4yWeoCNNKcAhAy9rHh+3pfyXwS7 +RAkwvwTxbqfdXB/mviolrPus0fn8dfC0ZpVSS8DYxS54ziFU0BkZi+odlkBA5PBE +p6p7kWwx6hc1h+F6abrNEivLe7G5V1Z8sIBNkj9Xj36muDXwNJjCOTq2FyeRRV4H +C9ztHK4iVhlw2UYHZ8dQjyI/MSPrAyMVbmbglhIdGGoE+JGAixWkB02kjySQ6lxh +Yt7b7icD4hmHxnXoxoN31wNF4YMePMZmQsuQEjjndSg5Nt+Vbk1Bk/jK88p297vi +gQ== -----END CERTIFICATE----- diff --git a/dockers/stunnel/keys/ca-key.pem b/dockers/stunnel/keys/ca-key.pem index 25989d08..715b86c9 100644 --- a/dockers/stunnel/keys/ca-key.pem +++ b/dockers/stunnel/keys/ca-key.pem @@ -1,28 +1,28 @@ -----BEGIN PRIVATE KEY----- -MIIEvgIBADANBgkqhkiG9w0BAQEFAASCBKgwggSkAgEAAoIBAQDQ30FctHHcfFv5 -ka4qLmFlptwNYMwR5Qa6ckYXWKPE71ds1ZfFiJHXbst7Z0yRLdN59TcI2KJglrt5 -HEw8BL/AZSnoFvsusKh4LcXVt8lJifMLpLCEyrrqbnVWGpy2MBsnEEEA9irI0EUH -bDYFBk4iQoM0+A8lMM2YX/sb/mOYBo4+WGbUYhOdP6xJPJ+v8lyhFFtX1VNVz1VR -pOOTrK0exb4rawr65auN2KCp3NkOeDSC1qxOf3TtCRMMPJHgT9Rj03jtoPYnLkXd -cd6zL2QR+6mmr4DVZg6zz8dteAQvnedxrKoM0CCOeJ3mQe8on3BMA9IwOqa6kcbS -XkAOKxqHAgMBAAECggEAB16eh28qcUrF/VPsNDrMtEcjOSmdfv14s6K34bepQkKQ -8BsdLsVhzUXF0jB+iBojfbMZjQCvwf6vgKzEl9LcZ8+/Sca9zWjtmMfsqgdrsmI2 -psYvIDr9m1XoYpsFGnyEs2fPE1dG19eusn4D7et0svVr0bZK5SyypFoGmcyWUP/M -kA990HAP7enGzPfpvcpr++Iu3EwWlTY3rjYgh9a7AiFhtj9zDzb9Sg0+4Xl9+8TZ -dsOvyVsiLu09MZ3vScGg5l+46w+rai+R0IxpgI9QM0sMxAS3AYFY666akrJqn6NU -S0Q5Q9gZ5V9hHxU7IHfo3weygPQuBW07nbwtX6+JCQKBgQDp7+smBlstRD+1/ZHJ -KO4Xhi+yrhtkKzViC+gF2vXpZ1GQ+3plRJFzRMFu+LkBgn1jPfg479Tm7CM4W4vM -cTZo45+hhnpwmLGnltTf3Vw23yXzLdUMenaE2u66PWh3DFPkPHwNqb30QGnx131Q -Mjnp+2EsBdiZ1d8TFF815ucG7QKBgQDkkiz7I4JgGGCbd51AseFryHgUepsrgeaA -DIWKEKBOoxOnfWH7JOxtm0oXcpWHLciQ4M6FaTFNv2vNA9Hrz5yApXFwIkKgXVU9 -+zsok4eWdEYmwxZFwjCNYvzsIDGBBwa1PQeps6C5L+nciOE8IZHYW7egAR96prV3 -E4ZQ6aWkwwKBgQCL/nJXIAiiLyx9SVBb9C1/UGLs57ommKDqmrtv/ZeZ5KVwQL3/ -KihstaGYOinkmGVW5XfNAuECjB+Lk2U2pC1uWYFm1SYiiY4O/3lGup57i9CXFT9g -p0yTtryUITmJvIvbksKeHo05RO7hthYczuHPfwqooJr9fHpxXYiYpiRtBQKBgCp0 -kFBRhyzsOj2GWTokEDfh85PyNhI9vZ+5M7CyZ+RTXBo3KtToRdYSCxAR435JXcCz -UQjswhCr5o0dEYfYdzxZ/pkSdAevbl7l5FYkGQI0NLeMcv2gFT6dzVban/dUY8WU -QXEfAVKEeM7SyetOXPWwC4p3yu4QOxKUGNW8oFzbAoGBAK3WKV51jhmMz3dtCkGW -UZxcDp5q/3uV29/UUF3/CNEhLcVuQLtNOPYRG+S9zMvwo0SNsz4mZJH1nFDSWSNL -xGXg/Ret9Li4JQTWD47kcheBCVLoTtX1bc66D2LlXDKzN5DRBACxKkAJPUjouhMB -mPDd05msnfgzPBMHMwsNjg5W +MIIEvgIBADANBgkqhkiG9w0BAQEFAASCBKgwggSkAgEAAoIBAQDaENi99I937j1Q +W4QOM7YSPHzymMHJpnROZP9JYDxOO7XjKpRwhyU4hM3QfxeNJi04VKv+FZe8QswC +Sqyp6OeNFPAuQ2M3ShclneUymoSVsQqyqzrJ8G4qW3sAMdvG32rA8sRsOewSVABn +si0wUZS+0+4EMR+L372OWDd9ZV88uePwsY6MTfqvxoyh0S+5E3xdyep956+LGotr ++maDZ/MrEP2Kl1StWv4WmS0Gd7bzJaGsCazGXfc22JLwztBG/JgZdjI6T3e1ION0 +VpaQ82uMqvFmajmPxWUU8lbjAzeHSGOJq+BZmPVh6NFp6Pn1xdH8OOHW1CW8UMaA +jQre37bHAgMBAAECggEAUbk4kVADKI4nemMhxXTJymHS7dQj5B+2vN6K8gPX9fXY +v67ofJeZcmoK/BV1TRe+oLrSzmFnQU3DSSSVOwQnKy9qp9vnZgQlUpqvF9zizXrR +KI6VdLLfho5MNZF57Tkzt+YDiQ/YEjJbCIG0/8PDPBUOwZFrYi9SyfLzsNH59DaB +Nf64J6KpMLMEP8BzDf9MkDWjg/uZZ5rJ2VDkl11QZCmyAAPMXps1nH4WJojVEwB7 +ul/VK8wrqiiyZqzDesw/jcET7DrCHtix35An8NJZAtWPILgHHnAlLPmG7a0uyy3Y +XaeqZRppUkuSv/OKf3Q0l/2IzjcNb3tjbktVSCXgGQKBgQDyxozi0V09Bc9w7yI3 +DaREFSs0h134ByzvsoObJMZTc9Qkis8VZB+IhMO4RaP2DNstJVqkl0pQWCr/C5ln +d6tYUtueeQ/9SYusnLIxu+HtsySPzBKthLrWArPQ6U1q70irxNovcSxOWimuSUIA +ftzWV6mCdBUsCImGZaiKl7GqDQKBgQDl8blf5iVRArHA/8vTBwdBNf3tkuctFcE2 +Pqmg5KQmGEvIO0S/DB2zAY+4JF4E4VrdJL47xXTnf+XN2ptQUf5kjwLflEaimupv +knwtNG+fq6hcWMeN+hnf0+A81b03Klo3H2JsuQ3EZ8kXOrpF8t/PanXz9UuV8Bkl +IjDwBLCTIwKBgQC9cIdRGjPaQSVsp30YXnG2mpobJCIEP30mETM2pYyIZBK+7P3I +YFdmzMp4iQb3IXMJmGNRmahoZ1QtrhxnK28tvYIX97mtWG1AJQm7WzNhqu81sfVF +JxQvmO49bz902QDo3/OtH2+GOD7b+9gf0N579u2TmQdIU+UUVVEdzF7bJQKBgFQX +TWKryNPSd20MXt7iwB1yAFYEljRfs1QCIIitdPZVhklIm4B+jtHq7UM7UYLZYyBi +kotLT9BlboYUvx3ljnH59uQK1rYaj0eUO4NQnM24ug5jjT73ysSXOHcm91aYT3u/ +J4B5QHamOd0b5gk0o/K3jUFVYHoJ3zg8Q8dS/7wfAoGBAMTue1Uq2GZOklxHWGUf +AedLR0aeNrV01hvl/R+sVb0h/lPqSeg5jQeLUHvkgG4SIq93dNhCnzMI6Dhch+Yc +o337l8S4ZcmJblp0uDz2gg2BLpt3PUPDWYQy8oFAjGK4JVwNgxPzDchMXGPDHuhQ +8r+9yBZlU1k64S3EIYuK4m7a -----END PRIVATE KEY----- diff --git a/dockers/stunnel/keys/client-cert.pem b/dockers/stunnel/keys/client-cert.pem index 4db466a4..b761d806 100644 --- a/dockers/stunnel/keys/client-cert.pem +++ b/dockers/stunnel/keys/client-cert.pem @@ -1,17 +1,19 @@ -----BEGIN CERTIFICATE----- -MIICpjCCAY4CAQEwDQYJKoZIhvcNAQELBQAwFjEUMBIGA1UEAwwLcmVkaXMtcHkt -Y2EwIBcNMjQwNTA5MDcyMDE5WhgPMzAyMzA5MTAwNzIwMTlaMBoxGDAWBgNVBAMM -D3JlZGlzLXB5LWNsaWVudDCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEB -ALOL3znn2vpX8+VHOlETymeFpw8wsCeOfr9fNhK2o5APIG1NhrGjlu+T7ri/DfrM -ZmjF+uDSuuUs044o5SFOECNi7yOwpdC9YVWSPQQ5VrsMENqyjIYyq2BC7fLHztAt -VF1jg0D0zijfFg/4meG2tAOnXLa0O9WUcmwsNlxEgyFzcLvCoTaXpUJbLYJZ2IxW -BoKgJ85acLlIFQIex053CqmgG/odM8Ib8s1YO+IXI4JsJlJFd9we+zYgZ2TRSZ8L -v8A8gXM+WTBZpZXNXYv020dW22X7gu+VH4LHcg/6eF0GtkdrFdlQjCEjwGIoVFTu -fNSp3NvSSYrK/qeJtSNaSw0CAwEAATANBgkqhkiG9w0BAQsFAAOCAQEAdA1QqJn/ -d4rcSO8z2L64d3SdO4wLf78Qznh3vTrIlQ/i0sESRQppw1U57PHSyYtAJzc1MV39 -zgn8KvuQToPQl9UoRWD6mVK8L//xplTPxWJB4BqD/kUc+lA9akBNU8Yhx7KbI5zX -z4OgTIeWAtY9R5CH1xbQlVCqAAk+SdDk2raOebNQMpzJrMUdeDTrgoDaBFnHgDbb -XHQCOF9/LrbBlrTlNJh6PHY8YztrJKdDDhSxJ9Tudz7ynUA+NcZ8dF5o/Co+QD5b -gkVdz/nV8LoDeO8QjJXsgsHFD/B+ljWYeEGc5flFe6jWLGOCtgQB5JhImg9lsWFh -X9i921F9Cqox3Q== +MIIDDDCCAfSgAwIBAgIBATANBgkqhkiG9w0BAQsFADAXMRUwEwYDVQQDDAx2YWxr +ZXktcHktY2EwIBcNMjQxMTAxMTE1MTAwWhgPMzAyNDAzMDQxMTUxMDBaMBsxGTAX +BgNVBAMMEHZhbGtleS1weS1jbGllbnQwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAw +ggEKAoIBAQCywTxWqxdElzelLCtjgek1JX5H3D2FwtnlNzqHJ8Ork4zjiBkouPoC +PAHWRV2VH6Y9vUkFzoJYCtjNBs7dawXDyl4Jj2PsbMIFpD5gRjztqX0yFVupWrVz +dovQheEzDC5pOie8vhgD3s5ej0pus1C1dQJ3KhAa49Ci88+cAO8kO7/MnjoY97SJ +vsk0Ui2zcBPGzXHZnHcDkyjegDBztuNKuhnrP90zLMWylwVH+h6QpAi7JnnPfpUr +bDgraBets8tco//Lr745M1vcV7jCxFk+9eyBhkAlbq/Z+FsA/i47vLqqwy93yt9S +61XkElyFUbcYbRS2xF58WS//18Dv2js5AgMBAAGjXTBbMAwGA1UdEwEB/wQCMAAw +CwYDVR0PBAQDAgWgMB0GA1UdDgQWBBRKmRR43Y3FSUupi7emhzJIqyn2ZzAfBgNV +HSMEGDAWgBQCiVq1mATQGX/9xPxG9l0soukgFzANBgkqhkiG9w0BAQsFAAOCAQEA +PhwfN23MKSKIOgg+heNiz9HWuNxacjlHp4sbgM/vHvah1x7nctdEsXPm1NO6J3uu +iTGIEV8u4I3Pry2TRsP5UZKX5VMTfB9TeonxYbu51P+lAIu+fB5fwQ3qHaycq6su +yKHIzDHP7+oOSd3lHTUiyIa04h1EevMjoWmihsFOgHQCNRaU3ifdyzcPa4Exd4dL +MyuXq9ccbuqHe+UZyj8ftt8zYtIILcAnLJlhosIl+VsSWyD1e0WRfR36/tLR8ACf ++nz6aEXaCk07BgryllC0+YvoIVzfXddfD/p6e8/CO2Vxw+df6OT3Z6sZrRnFVK+m +6PAcuS8VDO7k2y457d8w0A== -----END CERTIFICATE----- diff --git a/dockers/stunnel/keys/client-key.pem b/dockers/stunnel/keys/client-key.pem index a53cbce0..89a1c670 100644 --- a/dockers/stunnel/keys/client-key.pem +++ b/dockers/stunnel/keys/client-key.pem @@ -1,28 +1,28 @@ -----BEGIN PRIVATE KEY----- -MIIEvQIBADANBgkqhkiG9w0BAQEFAASCBKcwggSjAgEAAoIBAQCzi98559r6V/Pl -RzpRE8pnhacPMLAnjn6/XzYStqOQDyBtTYaxo5bvk+64vw36zGZoxfrg0rrlLNOO -KOUhThAjYu8jsKXQvWFVkj0EOVa7DBDasoyGMqtgQu3yx87QLVRdY4NA9M4o3xYP -+JnhtrQDp1y2tDvVlHJsLDZcRIMhc3C7wqE2l6VCWy2CWdiMVgaCoCfOWnC5SBUC -HsdOdwqpoBv6HTPCG/LNWDviFyOCbCZSRXfcHvs2IGdk0UmfC7/APIFzPlkwWaWV -zV2L9NtHVttl+4LvlR+Cx3IP+nhdBrZHaxXZUIwhI8BiKFRU7nzUqdzb0kmKyv6n -ibUjWksNAgMBAAECggEAEelgSZyRwevITxU+AhyhUpaIxgErcabLijfrYw6JXrPD -nmPfjhUt15TAefnFYUHG7ajikE81ietg54u44AuznHQgO0VCJYLfFPRT1foKZvqb -K9YoIrMnWaETr+azAR2kjvSAgZhqgLVQtCMu5s+dQcgOfcOZPINkrtnySl4jXtDE -SOTaj65VjSIkura17rj7nJNUPmDGFwsxwKpeEcXZTfa//ypT/hHVREkRmbSFk5Kw -rf3T3O1pMVF8+SeacK/oyDUf3ISc8wn9Xmwgpv8I74xWtDy3kAs315tfWPMOHe4b -CYk7GD1fu2rVRhtDCvkljiw2NejfeMzKt5+2wLXRmQKBgQD0KeCv8vdw6JBLH6PI -72yE/GRkjAn4KfhmHK+1GZN6m49DV4XAYaA7T6u2Q3gn9gNsVsHC2FCsCHy63BpA -I6ZJfdm2rcJkqgeKKRQpLBRedDMpQLY1WyXjugpV46KmA0ThtgtZeVKilJWvamHs -t/TwSbf/humg0cIcamEnkKVawwKBgQC8QBS1pfMqlSodylbPG0VaJqgdF/yAthp6 -gunVqpgbTMqGLTCpKUfSgPMpzu8znaCNeZN0EK1p7qZ7VE1VHpVoyQHC9Eu8d6PF -HAENaOUcUoCQNtXLoaN4waSjt7i6vYRldT/qrYB1YdpkkVKdj39w2N+uaxtZzDXu -hHu0eixF7wKBgCR3TLN6mjImycYuh4uvFooWF/hcYfDKc+rsReHKXBhnu1HXdIZz -DjdNgtvJ39w4BfLcUjwDiqjm65oM3W7O5Dr9rNJ3yRy3uECOOhCcIL6qpCl5HL2D -S3ljg7+oK9aXjmYXhkJquEjH4EM+pDlykAaDPBPR1nrKWS9dQ/1gwRF5AoGAd+Uo -S3jiIqDWLhsMpuNrjDtKnx0DyMYynwx5+YepUNnbsxFdCKAuCjfupxYQ6wLdmr1v -2GA20l0Y0zuh9TCBYDeFU7Fb+zEHsSZg1TWVljBFiZQjHopYHzTVsx/0G5tQk33V -s5XFVv13ps2XnJokRK8b5254AP067Cqczxlw0SkCgYEA0ito+l4TOa1/DnsbP1Q0 -kgeTb/9wPHpHVJ0Hz6vIXabaDlvvYwgRh151+9xzMmrs/0QCbI2+SHucAzu4RTjM -MAiytSBQtXA+L9deNNU9QqPKsy6/Xq6SsKLRkL9kiUasiUE0v7c/T7L9D81nTFuS -8htCfXw1/Tf8tLb+Rtvvwtw= +MIIEvQIBADANBgkqhkiG9w0BAQEFAASCBKcwggSjAgEAAoIBAQCywTxWqxdElzel +LCtjgek1JX5H3D2FwtnlNzqHJ8Ork4zjiBkouPoCPAHWRV2VH6Y9vUkFzoJYCtjN +Bs7dawXDyl4Jj2PsbMIFpD5gRjztqX0yFVupWrVzdovQheEzDC5pOie8vhgD3s5e +j0pus1C1dQJ3KhAa49Ci88+cAO8kO7/MnjoY97SJvsk0Ui2zcBPGzXHZnHcDkyje +gDBztuNKuhnrP90zLMWylwVH+h6QpAi7JnnPfpUrbDgraBets8tco//Lr745M1vc +V7jCxFk+9eyBhkAlbq/Z+FsA/i47vLqqwy93yt9S61XkElyFUbcYbRS2xF58WS// +18Dv2js5AgMBAAECggEAIajCSF2SFY/V4gvFpcieFaxYMYbWrNvKdN9n7XA+541y +n5uOhT0Dkq0i+Wp5Wy2o+4IrgGTo5VQxi7XG+SmAXeQ6vdkayzeVd0N8nVtMeMIL ++YTNDEAw36uIWz0CcT7PdHAHcIJo+j2XpXWc4ehw/6InUzH/81hHfo+jXbBNV4h6 +B9+lx2SjO985i9ubBsHrvf4CbjIElnOD0fHgdTstvDi03U3U2J75ASiYri6ei5Ob +jOUMqhGVAPHlonCk23uWqOvqgG0Y/XqnzCWfoJxRI4IU689/jGNsYOw8ZYhG4A/u +nlbsQ4NTnJ3jCimtdWAsApYoNvXJ82cwKJcyB2LPaQKBgQDgPekDTt1UXEUDTjoW +UGBIhXfFkliV4Bxfj5TKvTfX5xP3dlU+IogaqVL7wIfdlEkLtNB8tnBxIas/8XpH +bF+/w3TqBRfC7NW6qOs0mO7rgiWWDJX5nYW3dgViMOVCZiRTijiOcXVrhHFfXoLT +7F7xMZQYEFdMxXaP9QsRIMEXnQKBgQDMEicpWJb54qPYUYHMYyf4z/hSyrcMVGNl +EhozCqzpZrB0C59ohzadZ3nKQyitlIkPSlhneWjF20mnovF4AS0qsckbDv2Z7nOS +ZKxnfUfJ/i0BenVVv96U/tD5oHFzf2ezbk1bfWVpry7dKQjoh7zmxDCrJEi58Igq +pwqTevtVTQKBgQDIpJyp6RcBNM5LduNis+hy+3l/vsKk2DKLDt4Dyer9tDWZZrg/ +MIa31Gn7+PmYueXiI5eo/1T85TNls5vF7KJ/41PpUUVBlMhojFxoY67j6z/WUsye +3OOYlHGcukNodhxq43JXgg2edpM60kYdeZI6HjJ0laqHdufvR0LvwG8FwQKBgAyn +k4Yc2D/mrgJcC5CBFZl4TA3WREOfeApsdPN1VgOjOo33qorw15IrOIIyZ/NboqQw +GAtSnAyo7IhYsmCesg5TuATViSRihQgu9gH04t7DxEazMVN/8m2K36qbKG3hGK0n +yeRCgmdrVZyhTswcnrowsFPsjBX7tHXwpdc/aRaBAoGAKeYeOxGwx3L25g6/VzqU +8d/Uu2t39crLz/8cElqnjoN2Lis0m6FezUiIYCKHgfQtFtypdrFI6UjWk+G4mS5M +zS2j3B+66bfbBLgZrbav30lLz8YoKAuX1OIPsq19e2YIqb2sA3J4DqjaX73fFndW +ekKsHsxJCHDmI2QXsu5B9ZM= -----END PRIVATE KEY----- diff --git a/dockers/stunnel/keys/client-req.pem b/dockers/stunnel/keys/client-req.pem index 62828e19..85cedd4e 100644 --- a/dockers/stunnel/keys/client-req.pem +++ b/dockers/stunnel/keys/client-req.pem @@ -1,15 +1,16 @@ -----BEGIN CERTIFICATE REQUEST----- -MIICXzCCAUcCAQAwGjEYMBYGA1UEAwwPcmVkaXMtcHktY2xpZW50MIIBIjANBgkq -hkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAs4vfOefa+lfz5Uc6URPKZ4WnDzCwJ45+ -v182ErajkA8gbU2GsaOW75PuuL8N+sxmaMX64NK65SzTjijlIU4QI2LvI7Cl0L1h -VZI9BDlWuwwQ2rKMhjKrYELt8sfO0C1UXWODQPTOKN8WD/iZ4ba0A6dctrQ71ZRy -bCw2XESDIXNwu8KhNpelQlstglnYjFYGgqAnzlpwuUgVAh7HTncKqaAb+h0zwhvy -zVg74hcjgmwmUkV33B77NiBnZNFJnwu/wDyBcz5ZMFmllc1di/TbR1bbZfuC75Uf -gsdyD/p4XQa2R2sV2VCMISPAYihUVO581Knc29JJisr+p4m1I1pLDQIDAQABoAAw -DQYJKoZIhvcNAQELBQADggEBAD3H8McA7SmTrswSp0lw1C1UFmtazhKbFYY3/+Ld -ntZimzTy4Y5Ai1UW/blgwVLZxWWzazfkfWPMsRXtWcttuW/pxFGkLlyzFm4OsUQA -hpxtUNlmEwzcYZAin3qNnCA9bQfGL/z+zUcuMuf6HGplAUhtPhTUnvGZ2B7rJ+aC -syyt+/T/JJdnnnY0o4s4OzQa9ow6P7mC6egefHgLrtFbbuB4L/L/NdVj5NBzkXso -kmHLTUwkEtKOiG4gFLRDXsgXCy+sfEEqqWapeFhOQdagENYg+LXSN0jpxGWeR1J/ -vZHMSJT4GK4SgyNpZFu5To2lf7ucw6ywCFfg6jH2EWQeCjk= +MIICjDCCAXQCAQAwGzEZMBcGA1UEAwwQdmFsa2V5LXB5LWNsaWVudDCCASIwDQYJ +KoZIhvcNAQEBBQADggEPADCCAQoCggEBALLBPFarF0SXN6UsK2OB6TUlfkfcPYXC +2eU3Oocnw6uTjOOIGSi4+gI8AdZFXZUfpj29SQXOglgK2M0Gzt1rBcPKXgmPY+xs +wgWkPmBGPO2pfTIVW6latXN2i9CF4TMMLmk6J7y+GAPezl6PSm6zULV1AncqEBrj +0KLzz5wA7yQ7v8yeOhj3tIm+yTRSLbNwE8bNcdmcdwOTKN6AMHO240q6Ges/3TMs +xbKXBUf6HpCkCLsmec9+lStsOCtoF62zy1yj/8uvvjkzW9xXuMLEWT717IGGQCVu +r9n4WwD+Lju8uqrDL3fK31LrVeQSXIVRtxhtFLbEXnxZL//XwO/aOzkCAwEAAaAs +MCoGCSqGSIb3DQEJDjEdMBswDAYDVR0TAQH/BAIwADALBgNVHQ8EBAMCBaAwDQYJ +KoZIhvcNAQELBQADggEBAHtRC+8mMMebz3cbeZa8ORnk7zqhkKvKjKXND01LVwlj +spDPLYks4ySd5pehpsopxtF0DQw4EDnGq4f7MnwJvArSc1uqoul1seHKffesDKmY +zIbumivBfHUaIrqlxIcyXB75aM0rV7XD+DTTVX+39XCavckXpYHhDLI2slR6P+71 +OLhCV3GEmhJchyNjr/tMidtO/5NkcIFjcanZYf0wYWHo+lVBEmkwQBHL132TJge3 +XCTSfoL5m1smokq+zrJDaJjtsYfR2kUzU6MMY8H2omI7DMwEISJEpYK5FumxTWxx +djEFXUcRybmtRcnwHNJXFpSNANfWaSx0oCxi51BN808= -----END CERTIFICATE REQUEST----- diff --git a/dockers/stunnel/keys/server-cert.pem b/dockers/stunnel/keys/server-cert.pem index c17bf9ca..dc41bb48 100644 --- a/dockers/stunnel/keys/server-cert.pem +++ b/dockers/stunnel/keys/server-cert.pem @@ -1,17 +1,19 @@ -----BEGIN CERTIFICATE----- -MIICpjCCAY4CAQEwDQYJKoZIhvcNAQELBQAwFjEUMBIGA1UEAwwLcmVkaXMtcHkt -Y2EwIBcNMjQwNTA5MDcyMDE5WhgPMzAyMzA5MTAwNzIwMTlaMBoxGDAWBgNVBAMM -D3JlZGlzLXB5LXNlcnZlcjCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEB -AOg14yTsgmakeSFuqtvy4fV1rcSgLiGdGKzOBsoytmCZzV++5Jljj7utSpJiYMYk -HOTZtyqAVwmF/0yyZ25lbEHR/N3S3Jj/al4EG9u+K7O3eNZrTQkg4+ifwcT+V1Xo -s6f+L6BRld4y78QVZwdEsTy4SIeSAwGygACymEWYZ6NZBgM2xgp8SInHYxHP3gXh -02wioB79B62DExFVUKwUXjbUhPooyvGf9MMpUrmdFmQFfcosW/urCQF9YI6ZcPnr -ybXJ6kiplmNKeVD4dEyQLYNp09alnT6q+pcJa+NwW6O0eyqEsHQxCJyo9ZA3IW5I -SH+oftVxnZJIIPcsXABuH10CAwEAATANBgkqhkiG9w0BAQsFAAOCAQEAdWY0UeR4 -/9hpK3Mhl8VVz0zQwwEfnxCmI/TxpqNw+5lvpit/WvriIAEP9MToWHwYvG24zRrp -zv/LDHNh8UtnX3GILGs0CY/oFDevAEU1tixbmFJPceuMwKsrMtkp/6NyWF4p62o2 -fiQK68l1HSGgaH7kJ6BKYgV4JQK3Fgk9J4KrejwmYXzCFKcEvNtKMG7i0WN+AmK2 -vnxxZ3xx4HPH3OJ5ss6T2gGlvjFnOS7Z0kHtbkzPzxaC9ZVqMySwPRggf84tUUdk -vCwDHiJcbk5BMLug3yI9xTfSG3lMnwgZAWXMOqm/w6c1IIM8R/nKwNfwbG+4eUK0 -t2F8EBCShzAJGg== +MIIDDDCCAfSgAwIBAgIBATANBgkqhkiG9w0BAQsFADAXMRUwEwYDVQQDDAx2YWxr +ZXktcHktY2EwIBcNMjQxMTAxMTE1MTAwWhgPMzAyNDAzMDQxMTUxMDBaMBsxGTAX +BgNVBAMMEHZhbGtleS1weS1zZXJ2ZXIwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAw +ggEKAoIBAQCzjC1+i1/DoFk1wNOQENIC15+kL/FKk5r1JHCB6h3pOgKre1eAwVIC +OQ2EgD76OarvQNK4ECydOacY09uCjR+BIWn65fDpC8jGlfZmgGr3IiLoIyKkoqXv +8ZUxU/lTcFs4TKjCioXiYXwcCnVW7mG95I37IDtul/bUh+aySE7T2b+6tdwUKsQG +HF1PULH3Tfk/jwOOMpPt4J7CGbNxxKFi+/qiCxLNCSFXF2+UMwJ9UbKZA+vIMEw9 +Ecgkpy53KtZ9Xds0o6IGyJjHR6UQV848c/Miawikz0Cc11b+aq3gvWMhuhGXVnIp +zjyzJxgkk9FYdL8KtTYASIOo7nsieUdPAgMBAAGjXTBbMAwGA1UdEwEB/wQCMAAw +CwYDVR0PBAQDAgWgMB0GA1UdDgQWBBQX5KXbc5jNpdtKb/6mGOGVa4L6VTAfBgNV +HSMEGDAWgBQCiVq1mATQGX/9xPxG9l0soukgFzANBgkqhkiG9w0BAQsFAAOCAQEA +q47hqIOjO+005XUBiekSuHi0QA0B79p4tKbCSFtXA0kmmW22Cg4HTZWR9oIzB3my +DukHHcpn/53xeTZXVbDiptorGX3jpaBjDlD/ELl7YFYNNlenwkXa1IRlSlbmYhx9 +O2PsRnz73R6ebybqN4fpNUHy0cHqe8KNkhRI5YPhSWfIo5dbVyiD9jsOy5vhT+am +Bt5Adk+gMFm3hok3aO500exAIscteflwDWyb1w6jShyoRX1YahJI5QU+MICIL+5k +3rKO4FK2Vo6wI6dk8ReMGRrZCBzfUxwCBsS+kQ5jwYym4XOw/62oealELP/Gm/Pp +bWhwbV/AcUIgSZC76ZSoJQ== -----END CERTIFICATE----- diff --git a/dockers/stunnel/keys/server-key.pem b/dockers/stunnel/keys/server-key.pem index 8dd9a1e2..2df2718d 100644 --- a/dockers/stunnel/keys/server-key.pem +++ b/dockers/stunnel/keys/server-key.pem @@ -1,28 +1,28 @@ -----BEGIN PRIVATE KEY----- -MIIEvQIBADANBgkqhkiG9w0BAQEFAASCBKcwggSjAgEAAoIBAQDoNeMk7IJmpHkh -bqrb8uH1da3EoC4hnRiszgbKMrZgmc1fvuSZY4+7rUqSYmDGJBzk2bcqgFcJhf9M -smduZWxB0fzd0tyY/2peBBvbviuzt3jWa00JIOPon8HE/ldV6LOn/i+gUZXeMu/E -FWcHRLE8uEiHkgMBsoAAsphFmGejWQYDNsYKfEiJx2MRz94F4dNsIqAe/QetgxMR -VVCsFF421IT6KMrxn/TDKVK5nRZkBX3KLFv7qwkBfWCOmXD568m1yepIqZZjSnlQ -+HRMkC2DadPWpZ0+qvqXCWvjcFujtHsqhLB0MQicqPWQNyFuSEh/qH7VcZ2SSCD3 -LFwAbh9dAgMBAAECggEAI0llDgxeuIhP2/O8RRZAnhNW56VLvVHpGQFp6LoSGtXk -bqNMi76kbemkhmAqwpFkTqaC/hNoporVQ+tsaktBSzNE0NSlLx7JJCZNsXPRokrE -Mxk1KKj12TjFslDQJr7o5iNrS1p6gryM0OhLssAOiuKaKvfWOyDL8M8y8oh5X0ny -1M6IAJMkbpwiWU2OHIH7irkS8fYyCeOz0JMovCwMPwYkovHD7uHKbV4qGKzdOKN1 -QD8qMWAF1lCv/57juuwpzulGY3sSyU7yRZMMxJQ7nbIRj5iuj6+e2m6JhVghIiYG -IObIkGyubCr9QH315byiSS9ma1xzml3EqyM3XQkEhQKBgQDyxGY+60/dkUW9vAAm -g20eVZnflhE8+ooEpX9VPIliL7ADy3HU2poV2oXif8pVauMvRaYla8BHIOPV2qGI -tHTYNvubs6lxEq2Z7gM+8c5qOElXjup8Ch9/XCHXZavW8caWEcA9Z84Z4dCxbaku -EhEL0SduCn7j1tU1+Z9jBs08ewKBgQD03i29kCUeCnW+zEo+aO2Spn6HpdyZkuzG -2az5XurHGUFAgWYLOpShatjD4BY1GONvJTlD/gH2vqEkfY2OGgZ2pbjCFSfhIma/ -cnMuhsO2IlcuETqzlod1HGHcn6gGRM5LvYP343UIdv9nmJaT31nckueWv+yBd8HO -kAx1W2boBwKBgBtM7tqgh8i474jYvYOXQAwrQDSeoa2j1yWSnvEs7541Eqw6ksCH -HNDcVDYWfOCCNq44POj0ZxkYn8aK4aOH96Pg+waVe7aVjSREWeUYOEhFsCnCjqgI -U2Z1K/EXI+32Hoj90gqVw92xQVDSrjXaHkSf7rk3QPHKVQvO2JfAShBFAoGAW5ic -nZNE/ybEgrmicBQKAlh7bjxx95SJM50LYkDKK+3bhcihpkOkg3kXWrYBOJ11vga7 -lB55F5aZaq/4epZroog9Q4RsZX/b1XN3eIj6vq+70sSpI7KEOx+Bz+h9DtNAI/7h -VaHlDmSNB3CBqxDaaXMeZDqouolUmvMxZdjp9pMCgYEA1Y7vhCvMZA62OJwFJ4X8 -9Xg7bPE3jZQ6Tj3FbVCMy+RQdlo8GzYeoNZDjhpSxjJe/1lyk//EBwVNs3E4rRNl -+GcaEOo0X/J7SkPFqM6aFITypIIGeJpFyz/S99i/5tkfsNt9BQtiTS+x1Kj1iREV -bXIoNJRac5m/LLZKtDtHv18= +MIIEvgIBADANBgkqhkiG9w0BAQEFAASCBKgwggSkAgEAAoIBAQCzjC1+i1/DoFk1 +wNOQENIC15+kL/FKk5r1JHCB6h3pOgKre1eAwVICOQ2EgD76OarvQNK4ECydOacY +09uCjR+BIWn65fDpC8jGlfZmgGr3IiLoIyKkoqXv8ZUxU/lTcFs4TKjCioXiYXwc +CnVW7mG95I37IDtul/bUh+aySE7T2b+6tdwUKsQGHF1PULH3Tfk/jwOOMpPt4J7C +GbNxxKFi+/qiCxLNCSFXF2+UMwJ9UbKZA+vIMEw9Ecgkpy53KtZ9Xds0o6IGyJjH +R6UQV848c/Miawikz0Cc11b+aq3gvWMhuhGXVnIpzjyzJxgkk9FYdL8KtTYASIOo +7nsieUdPAgMBAAECggEAEPLiFoh8lUBtO2xE7FwSHw+Qs9SMv//4CD0U28aoZSxD +NUHS7EYTgj81ffUHPOK1tpkVayentn3LPsY8+fFtcGihkvwixjUFEm30kQ99SW/x +AJ3Udtsds+1HqpzlM9Gu4r0lzxt5cPnH1/PKyNZ+5oiNOI/93D4/ICfmCJ1Xx5ql +W/4QzYzo+D2L5fDHBB9RrHWmb4eKUsDvXFqUlz50dbKtwdIT307j+27HjpaX0UKx +Jf0LDn9KsygzHgsSQwJ2pvZrrpTS4pmSlz397+WINgYQd+XkcasHbdsJrcSXAOg3 +J6eyE5kEEGYMHoZSpYQ2Jhpb7QB/RiR1vhJuSQ2IqQKBgQDr2Qd/VBzkj+ppNgp8 +KTMHLdZDexuTUbfm0lqmbHHbaYXgc1iYAKLvGDHvrxEZsnDUSyf+lAeJuaVY5KdV +T4ogCrI6Zdm4gux/8LsqyKMd3JH6DnaHYDmChQudXdkbNgdgsoo3G59zmbDMvfrL +mxmL49lkQ8jSE/3owO3KaXgd2QKBgQDC46EL2L/eteuZ3ocIzttffPyuO2/PlSz/ +I+ij9pG+9EMqMlhy1mG3FYvunSUex4FoUjIz8SETWiaX/+n8nU3sqMsCXK6U78ga +Nhe8bmshfFICwzT7cOzYJY5gkFtqV3xie9BrrM8SM5VwmJuAdJYdDiC+Qub/2/+g +57SzeaNNZwKBgGy7I8+58ZAWIVXcCj1vqQzYPv3hVbc3Z3dM52nueRdUsNnnk6KQ +OI3OM8dyiIm2UHovJAMkL814/xfaYqLcBqv7AmwV5KhCA9KAI2n4EeuEcvA7lr2W +ySy5Nb+ZMqxu3jvgVARQAdUDuBTMSUFxAfgSVXj6Hy1q9hZGS9qTgUMRAoGBAIqE +J064O4cbXdz7IJbOD3WK7D0Z2Zp8uIKPDyaadXR3P9WZ+uuEG+d41QA/iMabngp7 +gVsRoySSCqQ2LCRz2ZK/VarUHPGWi261y6EOCe6+4bs860dbN7tY1h0j/RVUIQAO +aFBffr29FBX3IW7nblowVG1mN7DauJGwneqCJeM5AoGBAOc6vMhl355zR/5VK2Sb +PtKrrbpHmJSeqW7wLBNWXNPkFInrf5G8m6oMQISVjqJ+dlP/AKZE2dOqt6+XyK7T +QWhln1l4+Gbx+o9ig6/nrisSYPIoZXDUht0+GYbCEBK1p/R+8k4CM1FrVLIaufQj +1wx+hdkof1ICK9gnjBDc/DwL -----END PRIVATE KEY----- diff --git a/dockers/stunnel/keys/server-req.pem b/dockers/stunnel/keys/server-req.pem index 6d853693..3d082431 100644 --- a/dockers/stunnel/keys/server-req.pem +++ b/dockers/stunnel/keys/server-req.pem @@ -1,15 +1,16 @@ -----BEGIN CERTIFICATE REQUEST----- -MIICXzCCAUcCAQAwGjEYMBYGA1UEAwwPcmVkaXMtcHktc2VydmVyMIIBIjANBgkq -hkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA6DXjJOyCZqR5IW6q2/Lh9XWtxKAuIZ0Y -rM4GyjK2YJnNX77kmWOPu61KkmJgxiQc5Nm3KoBXCYX/TLJnbmVsQdH83dLcmP9q -XgQb274rs7d41mtNCSDj6J/BxP5XVeizp/4voFGV3jLvxBVnB0SxPLhIh5IDAbKA -ALKYRZhno1kGAzbGCnxIicdjEc/eBeHTbCKgHv0HrYMTEVVQrBReNtSE+ijK8Z/0 -wylSuZ0WZAV9yixb+6sJAX1gjplw+evJtcnqSKmWY0p5UPh0TJAtg2nT1qWdPqr6 -lwlr43Bbo7R7KoSwdDEInKj1kDchbkhIf6h+1XGdkkgg9yxcAG4fXQIDAQABoAAw -DQYJKoZIhvcNAQELBQADggEBAGMLI6jfG95L1Kqny8+Fl9sVnJ4ynb5905Hk9vXJ -V/BVc3P6JS6c4qYSeFd6wihHC7/j2EC3wt55Sj6JzYKy93AEjBfDfBb2ZuB6VpPy -iGKXzSGO71ziI2uzz92ltJhptNc6TNUUxwaBhOZiq2sxnLpnIcPZ/txDC75fGYEm -9iSbeeHNNZTSqQyQOzKW0OL6ss+GHhlfJPzx6mSH5dvb6bpKB2SCG1aZaDuOQTl3 -8aDIo1Z/ug6BrqoDMCyRAZTDnTohhC96bbKLRMdm0g3wwDeoWuQy1q9s1/AUYfBm -305LUYORBdFy08n41lFWo1JA4errzBhVTpHNKZ6DyQfMOxA= +MIICjDCCAXQCAQAwGzEZMBcGA1UEAwwQdmFsa2V5LXB5LXNlcnZlcjCCASIwDQYJ +KoZIhvcNAQEBBQADggEPADCCAQoCggEBALOMLX6LX8OgWTXA05AQ0gLXn6Qv8UqT +mvUkcIHqHek6Aqt7V4DBUgI5DYSAPvo5qu9A0rgQLJ05pxjT24KNH4Ehafrl8OkL +yMaV9maAavciIugjIqSipe/xlTFT+VNwWzhMqMKKheJhfBwKdVbuYb3kjfsgO26X +9tSH5rJITtPZv7q13BQqxAYcXU9QsfdN+T+PA44yk+3gnsIZs3HEoWL7+qILEs0J +IVcXb5QzAn1RspkD68gwTD0RyCSnLncq1n1d2zSjogbImMdHpRBXzjxz8yJrCKTP +QJzXVv5qreC9YyG6EZdWcinOPLMnGCST0Vh0vwq1NgBIg6jueyJ5R08CAwEAAaAs +MCoGCSqGSIb3DQEJDjEdMBswDAYDVR0TAQH/BAIwADALBgNVHQ8EBAMCBaAwDQYJ +KoZIhvcNAQELBQADggEBAEJabnLktbb21WSoTTaC2mpLwSIGLGFXVQEFepdwmoKm +RKqhqEz/Tw9LwC2EDPndjV1pzQf/yf1AYosMG/OHC+TyqsDyNdBXi+qbGLJrUVEc +leE9swf52prrK6fauxZgsDJPRtHqzu40yrNkNz+wPRm7OvDiePdbqW5LhzepdBJV +wqK6AEXfIJDn9zkMhVxYYCff9QaqOBUYiTaptAAH2K6wGzL7CXp7SnDnDLMWAheD +JIoRYZVXCV/3u/p67pUcndKZA2CvuI+fslmMVaoRMzcaXZ2dxXbr9OrJAFJNeqCg +nbOlMZvgLjvAiOqMIoYtGasJnlolb0Fg2yurPIqaisM= -----END CERTIFICATE REQUEST----- diff --git a/dockers/stunnel/openssl.cnf b/dockers/stunnel/openssl.cnf new file mode 100644 index 00000000..1119d485 --- /dev/null +++ b/dockers/stunnel/openssl.cnf @@ -0,0 +1,15 @@ +[ req ] +distinguished_name = req_distinguished_name +x509_extensions = v3_ca + +[ req_distinguished_name ] +commonName = valkey.io +commonName_max = 64 + +[ v3_ca ] +basicConstraints = critical, CA:TRUE +keyUsage = keyCertSign, cRLSign + +[ v3_req ] +basicConstraints = critical, CA:FALSE +keyUsage = digitalSignature, keyEncipherment From dd7c56d50dbddbb946267ba0117b1b6ca7a4825f Mon Sep 17 00:00:00 2001 From: Mikhail Koviazin Date: Mon, 25 Nov 2024 16:45:12 +0100 Subject: [PATCH 34/39] tests: fix TLS tests with Python 3.13 MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit This commit adds support for minimum_ssl_version and maximum_ssl_version to `_ValkeyTCPServer` in tests. Previously it was written with `ssl.wrap_socket` in mind which only supported `ssl_version`. `SSLContext` OTOH supports passing both minimum and maximum supported TLS versions. This commit utilizes that. Additionally, TLS version in test_tcp_ssl_version_mismatch was fixed. It was broken since 7783e0b. This change was added there by mistake and in fact didn't change anything for Python 3.12. Instead, it seems to have hidden a bug that revealed itself with Python 3.13. Signed-off-by: Mikhail Koviazin Signed-off-by: Raphaël Vinot --- tests/test_asyncio/test_connect.py | 10 +++++----- tests/test_connect.py | 12 +++++++----- 2 files changed, 12 insertions(+), 10 deletions(-) diff --git a/tests/test_asyncio/test_connect.py b/tests/test_asyncio/test_connect.py index ac0465dd..dc92b2f1 100644 --- a/tests/test_asyncio/test_connect.py +++ b/tests/test_asyncio/test_connect.py @@ -125,7 +125,7 @@ async def test_tcp_ssl_version_mismatch(tcp_address): tcp_address, certfile=certfile, keyfile=keyfile, - ssl_version=ssl.TLSVersion.TLSv1_2, + maximum_ssl_version=ssl.TLSVersion.TLSv1_2, ) await conn.disconnect() @@ -135,7 +135,8 @@ async def _assert_connect( server_address, certfile=None, keyfile=None, - ssl_version=None, + minimum_ssl_version=ssl.TLSVersion.TLSv1_2, + maximum_ssl_version=ssl.TLSVersion.TLSv1_3, ): stop_event = asyncio.Event() finished = asyncio.Event() @@ -153,9 +154,8 @@ async def _handler(reader, writer): elif certfile: host, port = server_address context = ssl.create_default_context(ssl.Purpose.CLIENT_AUTH) - if ssl_version is not None: - context.minimum_version = ssl_version - context.maximum_version = ssl_version + context.minimum_version = minimum_ssl_version + context.maximum_version = maximum_ssl_version context.load_cert_chain(certfile=certfile, keyfile=keyfile) server = await asyncio.start_server(_handler, host=host, port=port, ssl=context) else: diff --git a/tests/test_connect.py b/tests/test_connect.py index ac91f5a0..cc580008 100644 --- a/tests/test_connect.py +++ b/tests/test_connect.py @@ -100,7 +100,6 @@ def test_tcp_ssl_tls12_custom_ciphers(tcp_address, ssl_ciphers): tcp_address, certfile=certfile, keyfile=keyfile, - ssl_version=ssl.TLSVersion.TLSv1_2, ) @@ -141,7 +140,7 @@ def test_tcp_ssl_version_mismatch(tcp_address): tcp_address, certfile=certfile, keyfile=keyfile, - ssl_version=ssl.TLSVersion.TLSv1_3, + maximum_ssl_version=ssl.TLSVersion.TLSv1_2, ) @@ -170,14 +169,16 @@ def __init__( *args, certfile=None, keyfile=None, - ssl_version=ssl.TLSVersion.TLSv1, + minimum_ssl_version=ssl.TLSVersion.TLSv1_2, + maximum_ssl_version=ssl.TLSVersion.TLSv1_3, **kw, ) -> None: self._ready_event = threading.Event() self._stop_requested = False self._certfile = certfile self._keyfile = keyfile - self._ssl_version = ssl_version + self._minimum_ssl_version = minimum_ssl_version + self._maximum_ssl_version = maximum_ssl_version super().__init__(*args, **kw) def service_actions(self): @@ -199,7 +200,8 @@ def get_request(self): newsocket, fromaddr = self.socket.accept() sslctx = ssl.create_default_context(ssl.Purpose.CLIENT_AUTH) sslctx.load_cert_chain(self._certfile, self._keyfile) - sslctx.minimum_version = self._ssl_version + sslctx.minimum_version = self._minimum_ssl_version + sslctx.maximum_version = self._maximum_ssl_version connstream = sslctx.wrap_socket( newsocket, server_side=True, From 70491aef450fa53552c5de3282402fcecc184a59 Mon Sep 17 00:00:00 2001 From: Salvatore Mesoraca Date: Wed, 30 Oct 2024 09:58:23 +0100 Subject: [PATCH 35/39] Add support for Python 3.13 MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Signed-off-by: Salvatore Mesoraca Signed-off-by: Raphaël Vinot --- .github/workflows/integration.yaml | 4 ++-- setup.py | 1 + 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/.github/workflows/integration.yaml b/.github/workflows/integration.yaml index bcb88d7f..1aa9a29c 100644 --- a/.github/workflows/integration.yaml +++ b/.github/workflows/integration.yaml @@ -78,7 +78,7 @@ jobs: max-parallel: 15 fail-fast: false matrix: - python-version: ['3.8', '3.9', '3.10', '3.11', '3.11.1', '3.12', 'pypy-3.9', 'pypy-3.10'] + python-version: ['3.8', '3.9', '3.10', '3.11', '3.11.1', '3.12', '3.13', 'pypy-3.9', 'pypy-3.10'] test-type: ['standalone', 'cluster'] connection-type: ['libvalkey', 'plain'] protocol-version: ['2','3'] @@ -168,7 +168,7 @@ jobs: strategy: fail-fast: false matrix: - python-version: ['3.8', '3.9', '3.10', '3.11', '3.11.1', '3.12', 'pypy-3.9', 'pypy-3.10'] + python-version: ['3.8', '3.9', '3.10', '3.11', '3.11.1', '3.12', '3.13', 'pypy-3.9', 'pypy-3.10'] steps: - uses: actions/checkout@v4 - uses: actions/setup-python@v5 diff --git a/setup.py b/setup.py index aff5f60f..2df4059d 100644 --- a/setup.py +++ b/setup.py @@ -52,6 +52,7 @@ "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", + "Programming Language :: Python :: 3.13", "Programming Language :: Python :: Implementation :: CPython", "Programming Language :: Python :: Implementation :: PyPy", ], From 32108c2400da82d8146a756b9b10fe186436e867 Mon Sep 17 00:00:00 2001 From: Salvatore Mesoraca Date: Wed, 27 Nov 2024 16:19:41 +0100 Subject: [PATCH 36/39] Remove expiration/TTL commands that are not supported by Valkey MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Support for this commands may come in the future[1]. But it will take some time, so for now it's better to drop them. This is a breaking change for 6.1. Close #78 [1]: https://github.com/valkey-io/valkey/issues/640 Signed-off-by: Salvatore Mesoraca Signed-off-by: Raphaël Vinot --- tests/test_asyncio/test_hash.py | 303 ------------------------ tests/test_hash.py | 371 ----------------------------- valkey/commands/core.py | 404 -------------------------------- 3 files changed, 1078 deletions(-) delete mode 100644 tests/test_asyncio/test_hash.py delete mode 100644 tests/test_hash.py diff --git a/tests/test_asyncio/test_hash.py b/tests/test_asyncio/test_hash.py deleted file mode 100644 index 13549c75..00000000 --- a/tests/test_asyncio/test_hash.py +++ /dev/null @@ -1,303 +0,0 @@ -import asyncio -from datetime import datetime, timedelta - -import pytest -from tests.conftest import skip_if_server_version_lt - -pytestmark = pytest.mark.skip - - -@skip_if_server_version_lt("7.3.240") -async def test_hexpire_basic(r): - await r.delete("test:hash") - await r.hset("test:hash", mapping={"field1": "value1", "field2": "value2"}) - assert await r.hexpire("test:hash", 1, "field1") == [1] - await asyncio.sleep(1.1) - assert await r.hexists("test:hash", "field1") is False - assert await r.hexists("test:hash", "field2") is True - - -@skip_if_server_version_lt("7.3.240") -async def test_hexpire_with_timedelta(r): - await r.delete("test:hash") - await r.hset("test:hash", mapping={"field1": "value1", "field2": "value2"}) - assert await r.hexpire("test:hash", timedelta(seconds=1), "field1") == [1] - await asyncio.sleep(1.1) - assert await r.hexists("test:hash", "field1") is False - assert await r.hexists("test:hash", "field2") is True - - -@skip_if_server_version_lt("7.3.240") -async def test_hexpire_conditions(r): - await r.delete("test:hash") - await r.hset("test:hash", mapping={"field1": "value1"}) - assert await r.hexpire("test:hash", 2, "field1", xx=True) == [0] - assert await r.hexpire("test:hash", 2, "field1", nx=True) == [1] - assert await r.hexpire("test:hash", 1, "field1", xx=True) == [1] - assert await r.hexpire("test:hash", 2, "field1", nx=True) == [0] - await asyncio.sleep(1.1) - assert await r.hexists("test:hash", "field1") is False - await r.hset("test:hash", "field1", "value1") - await r.hexpire("test:hash", 2, "field1") - assert await r.hexpire("test:hash", 1, "field1", gt=True) == [0] - assert await r.hexpire("test:hash", 1, "field1", lt=True) == [1] - await asyncio.sleep(1.1) - assert await r.hexists("test:hash", "field1") is False - - -@skip_if_server_version_lt("7.3.240") -async def test_hexpire_nonexistent_key_or_field(r): - await r.delete("test:hash") - assert await r.hexpire("test:hash", 1, "field1") == [] - await r.hset("test:hash", "field1", "value1") - assert await r.hexpire("test:hash", 1, "nonexistent_field") == [-2] - - -@skip_if_server_version_lt("7.3.240") -async def test_hexpire_multiple_fields(r): - await r.delete("test:hash") - await r.hset( - "test:hash", - mapping={"field1": "value1", "field2": "value2", "field3": "value3"}, - ) - assert await r.hexpire("test:hash", 1, "field1", "field2") == [1, 1] - await asyncio.sleep(1.1) - assert await r.hexists("test:hash", "field1") is False - assert await r.hexists("test:hash", "field2") is False - assert await r.hexists("test:hash", "field3") is True - - -@skip_if_server_version_lt("7.3.240") -async def test_hpexpire_basic(r): - await r.delete("test:hash") - await r.hset("test:hash", mapping={"field1": "value1", "field2": "value2"}) - assert await r.hpexpire("test:hash", 500, "field1") == [1] - await asyncio.sleep(0.6) - assert await r.hexists("test:hash", "field1") is False - assert await r.hexists("test:hash", "field2") is True - - -@skip_if_server_version_lt("7.3.240") -async def test_hpexpire_with_timedelta(r): - await r.delete("test:hash") - await r.hset("test:hash", mapping={"field1": "value1", "field2": "value2"}) - assert await r.hpexpire("test:hash", timedelta(milliseconds=500), "field1") == [1] - await asyncio.sleep(0.6) - assert await r.hexists("test:hash", "field1") is False - assert await r.hexists("test:hash", "field2") is True - - -@skip_if_server_version_lt("7.3.240") -async def test_hpexpire_conditions(r): - await r.delete("test:hash") - await r.hset("test:hash", mapping={"field1": "value1"}) - assert await r.hpexpire("test:hash", 1500, "field1", xx=True) == [0] - assert await r.hpexpire("test:hash", 1500, "field1", nx=True) == [1] - assert await r.hpexpire("test:hash", 500, "field1", xx=True) == [1] - assert await r.hpexpire("test:hash", 1500, "field1", nx=True) == [0] - await asyncio.sleep(0.6) - assert await r.hexists("test:hash", "field1") is False - await r.hset("test:hash", "field1", "value1") - await r.hpexpire("test:hash", 1000, "field1") - assert await r.hpexpire("test:hash", 500, "field1", gt=True) == [0] - assert await r.hpexpire("test:hash", 500, "field1", lt=True) == [1] - await asyncio.sleep(0.6) - assert await r.hexists("test:hash", "field1") is False - - -@skip_if_server_version_lt("7.3.240") -async def test_hpexpire_nonexistent_key_or_field(r): - await r.delete("test:hash") - assert await r.hpexpire("test:hash", 500, "field1") == [] - await r.hset("test:hash", "field1", "value1") - assert await r.hpexpire("test:hash", 500, "nonexistent_field") == [-2] - - -@skip_if_server_version_lt("7.3.240") -async def test_hpexpire_multiple_fields(r): - await r.delete("test:hash") - await r.hset( - "test:hash", - mapping={"field1": "value1", "field2": "value2", "field3": "value3"}, - ) - assert await r.hpexpire("test:hash", 500, "field1", "field2") == [1, 1] - await asyncio.sleep(0.6) - assert await r.hexists("test:hash", "field1") is False - assert await r.hexists("test:hash", "field2") is False - assert await r.hexists("test:hash", "field3") is True - - -@skip_if_server_version_lt("7.3.240") -async def test_hexpireat_basic(r): - await r.delete("test:hash") - await r.hset("test:hash", mapping={"field1": "value1", "field2": "value2"}) - exp_time = int((datetime.now() + timedelta(seconds=1)).timestamp()) - assert await r.hexpireat("test:hash", exp_time, "field1") == [1] - await asyncio.sleep(1.1) - assert await r.hexists("test:hash", "field1") is False - assert await r.hexists("test:hash", "field2") is True - - -@skip_if_server_version_lt("7.3.240") -async def test_hexpireat_with_datetime(r): - await r.delete("test:hash") - await r.hset("test:hash", mapping={"field1": "value1", "field2": "value2"}) - exp_time = datetime.now() + timedelta(seconds=1) - assert await r.hexpireat("test:hash", exp_time, "field1") == [1] - await asyncio.sleep(1.1) - assert await r.hexists("test:hash", "field1") is False - assert await r.hexists("test:hash", "field2") is True - - -@skip_if_server_version_lt("7.3.240") -async def test_hexpireat_conditions(r): - await r.delete("test:hash") - await r.hset("test:hash", mapping={"field1": "value1"}) - future_exp_time = int((datetime.now() + timedelta(seconds=2)).timestamp()) - past_exp_time = int((datetime.now() - timedelta(seconds=1)).timestamp()) - assert await r.hexpireat("test:hash", future_exp_time, "field1", xx=True) == [0] - assert await r.hexpireat("test:hash", future_exp_time, "field1", nx=True) == [1] - assert await r.hexpireat("test:hash", past_exp_time, "field1", gt=True) == [0] - assert await r.hexpireat("test:hash", past_exp_time, "field1", lt=True) == [2] - assert await r.hexists("test:hash", "field1") is False - - -@skip_if_server_version_lt("7.3.240") -async def test_hexpireat_nonexistent_key_or_field(r): - await r.delete("test:hash") - future_exp_time = int((datetime.now() + timedelta(seconds=1)).timestamp()) - assert await r.hexpireat("test:hash", future_exp_time, "field1") == [] - await r.hset("test:hash", "field1", "value1") - assert await r.hexpireat("test:hash", future_exp_time, "nonexistent_field") == [-2] - - -@skip_if_server_version_lt("7.3.240") -async def test_hexpireat_multiple_fields(r): - await r.delete("test:hash") - await r.hset( - "test:hash", - mapping={"field1": "value1", "field2": "value2", "field3": "value3"}, - ) - exp_time = int((datetime.now() + timedelta(seconds=1)).timestamp()) - assert await r.hexpireat("test:hash", exp_time, "field1", "field2") == [1, 1] - await asyncio.sleep(1.1) - assert await r.hexists("test:hash", "field1") is False - assert await r.hexists("test:hash", "field2") is False - assert await r.hexists("test:hash", "field3") is True - - -@skip_if_server_version_lt("7.3.240") -async def test_hpexpireat_basic(r): - await r.delete("test:hash") - await r.hset("test:hash", mapping={"field1": "value1", "field2": "value2"}) - exp_time = int((datetime.now() + timedelta(milliseconds=400)).timestamp() * 1000) - assert await r.hpexpireat("test:hash", exp_time, "field1") == [1] - await asyncio.sleep(0.5) - assert await r.hexists("test:hash", "field1") is False - assert await r.hexists("test:hash", "field2") is True - - -@skip_if_server_version_lt("7.3.240") -async def test_hpexpireat_with_datetime(r): - await r.delete("test:hash") - await r.hset("test:hash", mapping={"field1": "value1", "field2": "value2"}) - exp_time = datetime.now() + timedelta(milliseconds=400) - assert await r.hpexpireat("test:hash", exp_time, "field1") == [1] - await asyncio.sleep(0.5) - assert await r.hexists("test:hash", "field1") is False - assert await r.hexists("test:hash", "field2") is True - - -@skip_if_server_version_lt("7.3.240") -async def test_hpexpireat_conditions(r): - await r.delete("test:hash") - await r.hset("test:hash", mapping={"field1": "value1"}) - future_exp_time = int( - (datetime.now() + timedelta(milliseconds=500)).timestamp() * 1000 - ) - past_exp_time = int( - (datetime.now() - timedelta(milliseconds=500)).timestamp() * 1000 - ) - assert await r.hpexpireat("test:hash", future_exp_time, "field1", xx=True) == [0] - assert await r.hpexpireat("test:hash", future_exp_time, "field1", nx=True) == [1] - assert await r.hpexpireat("test:hash", past_exp_time, "field1", gt=True) == [0] - assert await r.hpexpireat("test:hash", past_exp_time, "field1", lt=True) == [2] - assert await r.hexists("test:hash", "field1") is False - - -@skip_if_server_version_lt("7.3.240") -async def test_hpexpireat_nonexistent_key_or_field(r): - await r.delete("test:hash") - future_exp_time = int( - (datetime.now() + timedelta(milliseconds=500)).timestamp() * 1000 - ) - assert await r.hpexpireat("test:hash", future_exp_time, "field1") == [] - await r.hset("test:hash", "field1", "value1") - assert await r.hpexpireat("test:hash", future_exp_time, "nonexistent_field") == [-2] - - -@skip_if_server_version_lt("7.3.240") -async def test_hpexpireat_multiple_fields(r): - await r.delete("test:hash") - await r.hset( - "test:hash", - mapping={"field1": "value1", "field2": "value2", "field3": "value3"}, - ) - exp_time = int((datetime.now() + timedelta(milliseconds=400)).timestamp() * 1000) - assert await r.hpexpireat("test:hash", exp_time, "field1", "field2") == [1, 1] - await asyncio.sleep(0.5) - assert await r.hexists("test:hash", "field1") is False - assert await r.hexists("test:hash", "field2") is False - assert await r.hexists("test:hash", "field3") is True - - -@skip_if_server_version_lt("7.3.240") -async def test_hpersist_multiple_fields_mixed_conditions(r): - await r.delete("test:hash") - await r.hset("test:hash", mapping={"field1": "value1", "field2": "value2"}) - await r.hexpire("test:hash", 5000, "field1") - assert await r.hpersist("test:hash", "field1", "field2", "field3") == [1, -1, -2] - - -@skip_if_server_version_lt("7.3.240") -async def test_hexpiretime_multiple_fields_mixed_conditions(r): - await r.delete("test:hash") - await r.hset("test:hash", mapping={"field1": "value1", "field2": "value2"}) - future_time = int((datetime.now() + timedelta(minutes=30)).timestamp()) - await r.hexpireat("test:hash", future_time, "field1") - result = await r.hexpiretime("test:hash", "field1", "field2", "field3") - assert future_time - 10 < result[0] <= future_time - assert result[1:] == [-1, -2] - - -@skip_if_server_version_lt("7.3.240") -async def test_hpexpiretime_multiple_fields_mixed_conditions(r): - await r.delete("test:hash") - await r.hset("test:hash", mapping={"field1": "value1", "field2": "value2"}) - future_time = int((datetime.now() + timedelta(minutes=30)).timestamp()) - await r.hexpireat("test:hash", future_time, "field1") - result = await r.hpexpiretime("test:hash", "field1", "field2", "field3") - assert future_time * 1000 - 10000 < result[0] <= future_time * 1000 - assert result[1:] == [-1, -2] - - -@skip_if_server_version_lt("7.3.240") -async def test_ttl_multiple_fields_mixed_conditions(r): - await r.delete("test:hash") - await r.hset("test:hash", mapping={"field1": "value1", "field2": "value2"}) - future_time = int((datetime.now() + timedelta(minutes=30)).timestamp()) - await r.hexpireat("test:hash", future_time, "field1") - result = await r.httl("test:hash", "field1", "field2", "field3") - assert 30 * 60 - 10 < result[0] <= 30 * 60 - assert result[1:] == [-1, -2] - - -@skip_if_server_version_lt("7.3.240") -async def test_pttl_multiple_fields_mixed_conditions(r): - await r.delete("test:hash") - await r.hset("test:hash", mapping={"field1": "value1", "field2": "value2"}) - future_time = int((datetime.now() + timedelta(minutes=30)).timestamp()) - await r.hexpireat("test:hash", future_time, "field1") - result = await r.hpttl("test:hash", "field1", "field2", "field3") - assert 30 * 60000 - 10000 < result[0] <= 30 * 60000 - assert result[1:] == [-1, -2] diff --git a/tests/test_hash.py b/tests/test_hash.py deleted file mode 100644 index 9519e0a6..00000000 --- a/tests/test_hash.py +++ /dev/null @@ -1,371 +0,0 @@ -import time -from datetime import datetime, timedelta - -import pytest -from tests.conftest import skip_if_server_version_lt - -pytestmark = pytest.mark.skip - - -@skip_if_server_version_lt("7.3.240") -def test_hexpire_basic(r): - r.delete("test:hash") - r.hset("test:hash", mapping={"field1": "value1", "field2": "value2"}) - assert r.hexpire("test:hash", 1, "field1") == [1] - time.sleep(1.1) - assert r.hexists("test:hash", "field1") is False - assert r.hexists("test:hash", "field2") is True - - -@skip_if_server_version_lt("7.3.240") -def test_hexpire_with_timedelta(r): - r.delete("test:hash") - r.hset("test:hash", mapping={"field1": "value1", "field2": "value2"}) - assert r.hexpire("test:hash", timedelta(seconds=1), "field1") == [1] - time.sleep(1.1) - assert r.hexists("test:hash", "field1") is False - assert r.hexists("test:hash", "field2") is True - - -@skip_if_server_version_lt("7.3.240") -def test_hexpire_conditions(r): - r.delete("test:hash") - r.hset("test:hash", mapping={"field1": "value1"}) - assert r.hexpire("test:hash", 2, "field1", xx=True) == [0] - assert r.hexpire("test:hash", 2, "field1", nx=True) == [1] - assert r.hexpire("test:hash", 1, "field1", xx=True) == [1] - assert r.hexpire("test:hash", 2, "field1", nx=True) == [0] - time.sleep(1.1) - assert r.hexists("test:hash", "field1") is False - r.hset("test:hash", "field1", "value1") - r.hexpire("test:hash", 2, "field1") - assert r.hexpire("test:hash", 1, "field1", gt=True) == [0] - assert r.hexpire("test:hash", 1, "field1", lt=True) == [1] - time.sleep(1.1) - assert r.hexists("test:hash", "field1") is False - - -@skip_if_server_version_lt("7.3.240") -def test_hexpire_nonexistent_key_or_field(r): - r.delete("test:hash") - assert r.hexpire("test:hash", 1, "field1") == [] - r.hset("test:hash", "field1", "value1") - assert r.hexpire("test:hash", 1, "nonexistent_field") == [-2] - - -@skip_if_server_version_lt("7.3.240") -def test_hexpire_multiple_fields(r): - r.delete("test:hash") - r.hset( - "test:hash", - mapping={"field1": "value1", "field2": "value2", "field3": "value3"}, - ) - assert r.hexpire("test:hash", 1, "field1", "field2") == [1, 1] - time.sleep(1.1) - assert r.hexists("test:hash", "field1") is False - assert r.hexists("test:hash", "field2") is False - assert r.hexists("test:hash", "field3") is True - - -@skip_if_server_version_lt("7.3.240") -def test_hexpire_multiple_condition_flags_error(r): - r.delete("test:hash") - r.hset("test:hash", mapping={"field1": "value1"}) - with pytest.raises(ValueError) as e: - r.hexpire("test:hash", 1, "field1", nx=True, xx=True) - assert "Only one of" in str(e) - - -@skip_if_server_version_lt("7.3.240") -def test_hpexpire_basic(r): - r.delete("test:hash") - r.hset("test:hash", mapping={"field1": "value1", "field2": "value2"}) - assert r.hpexpire("test:hash", 500, "field1") == [1] - time.sleep(0.6) - assert r.hexists("test:hash", "field1") is False - assert r.hexists("test:hash", "field2") is True - - -@skip_if_server_version_lt("7.3.240") -def test_hpexpire_with_timedelta(r): - r.delete("test:hash") - r.hset("test:hash", mapping={"field1": "value1", "field2": "value2"}) - assert r.hpexpire("test:hash", timedelta(milliseconds=500), "field1") == [1] - time.sleep(0.6) - assert r.hexists("test:hash", "field1") is False - assert r.hexists("test:hash", "field2") is True - - -@skip_if_server_version_lt("7.3.240") -def test_hpexpire_conditions(r): - r.delete("test:hash") - r.hset("test:hash", mapping={"field1": "value1"}) - assert r.hpexpire("test:hash", 1500, "field1", xx=True) == [0] - assert r.hpexpire("test:hash", 1500, "field1", nx=True) == [1] - assert r.hpexpire("test:hash", 500, "field1", xx=True) == [1] - assert r.hpexpire("test:hash", 1500, "field1", nx=True) == [0] - time.sleep(0.6) - assert r.hexists("test:hash", "field1") is False - r.hset("test:hash", "field1", "value1") - r.hpexpire("test:hash", 1000, "field1") - assert r.hpexpire("test:hash", 500, "field1", gt=True) == [0] - assert r.hpexpire("test:hash", 500, "field1", lt=True) == [1] - time.sleep(0.6) - assert r.hexists("test:hash", "field1") is False - - -@skip_if_server_version_lt("7.3.240") -def test_hpexpire_nonexistent_key_or_field(r): - r.delete("test:hash") - assert r.hpexpire("test:hash", 500, "field1") == [] - r.hset("test:hash", "field1", "value1") - assert r.hpexpire("test:hash", 500, "nonexistent_field") == [-2] - - -@skip_if_server_version_lt("7.3.240") -def test_hpexpire_multiple_fields(r): - r.delete("test:hash") - r.hset( - "test:hash", - mapping={"field1": "value1", "field2": "value2", "field3": "value3"}, - ) - assert r.hpexpire("test:hash", 500, "field1", "field2") == [1, 1] - time.sleep(0.6) - assert r.hexists("test:hash", "field1") is False - assert r.hexists("test:hash", "field2") is False - assert r.hexists("test:hash", "field3") is True - - -@skip_if_server_version_lt("7.3.240") -def test_hpexpire_multiple_condition_flags_error(r): - r.delete("test:hash") - r.hset("test:hash", mapping={"field1": "value1"}) - with pytest.raises(ValueError) as e: - r.hpexpire("test:hash", 500, "field1", nx=True, xx=True) - assert "Only one of" in str(e) - - -@skip_if_server_version_lt("7.3.240") -def test_hexpireat_basic(r): - r.delete("test:hash") - r.hset("test:hash", mapping={"field1": "value1", "field2": "value2"}) - exp_time = int((datetime.now() + timedelta(seconds=1)).timestamp()) - assert r.hexpireat("test:hash", exp_time, "field1") == [1] - time.sleep(1.1) - assert r.hexists("test:hash", "field1") is False - assert r.hexists("test:hash", "field2") is True - - -@skip_if_server_version_lt("7.3.240") -def test_hexpireat_with_datetime(r): - r.delete("test:hash") - r.hset("test:hash", mapping={"field1": "value1", "field2": "value2"}) - exp_time = datetime.now() + timedelta(seconds=1) - assert r.hexpireat("test:hash", exp_time, "field1") == [1] - time.sleep(1.1) - assert r.hexists("test:hash", "field1") is False - assert r.hexists("test:hash", "field2") is True - - -@skip_if_server_version_lt("7.3.240") -def test_hexpireat_conditions(r): - r.delete("test:hash") - r.hset("test:hash", mapping={"field1": "value1"}) - future_exp_time = int((datetime.now() + timedelta(seconds=2)).timestamp()) - past_exp_time = int((datetime.now() - timedelta(seconds=1)).timestamp()) - assert r.hexpireat("test:hash", future_exp_time, "field1", xx=True) == [0] - assert r.hexpireat("test:hash", future_exp_time, "field1", nx=True) == [1] - assert r.hexpireat("test:hash", past_exp_time, "field1", gt=True) == [0] - assert r.hexpireat("test:hash", past_exp_time, "field1", lt=True) == [2] - assert r.hexists("test:hash", "field1") is False - - -@skip_if_server_version_lt("7.3.240") -def test_hexpireat_nonexistent_key_or_field(r): - r.delete("test:hash") - future_exp_time = int((datetime.now() + timedelta(seconds=1)).timestamp()) - assert r.hexpireat("test:hash", future_exp_time, "field1") == [] - r.hset("test:hash", "field1", "value1") - assert r.hexpireat("test:hash", future_exp_time, "nonexistent_field") == [-2] - - -@skip_if_server_version_lt("7.3.240") -def test_hexpireat_multiple_fields(r): - r.delete("test:hash") - r.hset( - "test:hash", - mapping={"field1": "value1", "field2": "value2", "field3": "value3"}, - ) - exp_time = int((datetime.now() + timedelta(seconds=1)).timestamp()) - assert r.hexpireat("test:hash", exp_time, "field1", "field2") == [1, 1] - time.sleep(1.1) - assert r.hexists("test:hash", "field1") is False - assert r.hexists("test:hash", "field2") is False - assert r.hexists("test:hash", "field3") is True - - -@skip_if_server_version_lt("7.3.240") -def test_hexpireat_multiple_condition_flags_error(r): - r.delete("test:hash") - r.hset("test:hash", mapping={"field1": "value1"}) - exp_time = int((datetime.now() + timedelta(seconds=1)).timestamp()) - with pytest.raises(ValueError) as e: - r.hexpireat("test:hash", exp_time, "field1", nx=True, xx=True) - assert "Only one of" in str(e) - - -@skip_if_server_version_lt("7.3.240") -def test_hpexpireat_basic(r): - r.delete("test:hash") - r.hset("test:hash", mapping={"field1": "value1", "field2": "value2"}) - exp_time = int((datetime.now() + timedelta(milliseconds=400)).timestamp() * 1000) - assert r.hpexpireat("test:hash", exp_time, "field1") == [1] - time.sleep(0.5) - assert r.hexists("test:hash", "field1") is False - assert r.hexists("test:hash", "field2") is True - - -@skip_if_server_version_lt("7.3.240") -def test_hpexpireat_with_datetime(r): - r.delete("test:hash") - r.hset("test:hash", mapping={"field1": "value1", "field2": "value2"}) - exp_time = datetime.now() + timedelta(milliseconds=400) - assert r.hpexpireat("test:hash", exp_time, "field1") == [1] - time.sleep(0.5) - assert r.hexists("test:hash", "field1") is False - assert r.hexists("test:hash", "field2") is True - - -@skip_if_server_version_lt("7.3.240") -def test_hpexpireat_conditions(r): - r.delete("test:hash") - r.hset("test:hash", mapping={"field1": "value1"}) - future_exp_time = int( - (datetime.now() + timedelta(milliseconds=500)).timestamp() * 1000 - ) - past_exp_time = int( - (datetime.now() - timedelta(milliseconds=500)).timestamp() * 1000 - ) - assert r.hpexpireat("test:hash", future_exp_time, "field1", xx=True) == [0] - assert r.hpexpireat("test:hash", future_exp_time, "field1", nx=True) == [1] - assert r.hpexpireat("test:hash", past_exp_time, "field1", gt=True) == [0] - assert r.hpexpireat("test:hash", past_exp_time, "field1", lt=True) == [2] - assert r.hexists("test:hash", "field1") is False - - -@skip_if_server_version_lt("7.3.240") -def test_hpexpireat_nonexistent_key_or_field(r): - r.delete("test:hash") - future_exp_time = int( - (datetime.now() + timedelta(milliseconds=500)).timestamp() * 1000 - ) - assert r.hpexpireat("test:hash", future_exp_time, "field1") == [] - r.hset("test:hash", "field1", "value1") - assert r.hpexpireat("test:hash", future_exp_time, "nonexistent_field") == [-2] - - -@skip_if_server_version_lt("7.3.240") -def test_hpexpireat_multiple_fields(r): - r.delete("test:hash") - r.hset( - "test:hash", - mapping={"field1": "value1", "field2": "value2", "field3": "value3"}, - ) - exp_time = int((datetime.now() + timedelta(milliseconds=400)).timestamp() * 1000) - assert r.hpexpireat("test:hash", exp_time, "field1", "field2") == [1, 1] - time.sleep(0.5) - assert r.hexists("test:hash", "field1") is False - assert r.hexists("test:hash", "field2") is False - assert r.hexists("test:hash", "field3") is True - - -@skip_if_server_version_lt("7.3.240") -def test_hpexpireat_multiple_condition_flags_error(r): - r.delete("test:hash") - r.hset("test:hash", mapping={"field1": "value1"}) - exp_time = int((datetime.now() + timedelta(milliseconds=500)).timestamp()) - with pytest.raises(ValueError) as e: - r.hpexpireat("test:hash", exp_time, "field1", nx=True, xx=True) - assert "Only one of" in str(e) - - -@skip_if_server_version_lt("7.3.240") -def test_hpersist_multiple_fields(r): - r.delete("test:hash") - r.hset("test:hash", mapping={"field1": "value1", "field2": "value2"}) - r.hexpire("test:hash", 5000, "field1") - assert r.hpersist("test:hash", "field1", "field2", "field3") == [1, -1, -2] - - -@skip_if_server_version_lt("7.3.240") -def test_hpersist_nonexistent_key(r): - r.delete("test:hash") - assert r.hpersist("test:hash", "field1", "field2", "field3") == [] - - -@skip_if_server_version_lt("7.3.240") -def test_hexpiretime_multiple_fields_mixed_conditions(r): - r.delete("test:hash") - r.hset("test:hash", mapping={"field1": "value1", "field2": "value2"}) - future_time = int((datetime.now() + timedelta(minutes=30)).timestamp()) - r.hexpireat("test:hash", future_time, "field1") - result = r.hexpiretime("test:hash", "field1", "field2", "field3") - assert future_time - 10 < result[0] <= future_time - assert result[1:] == [-1, -2] - - -@skip_if_server_version_lt("7.3.240") -def test_hexpiretime_nonexistent_key(r): - r.delete("test:hash") - assert r.hexpiretime("test:hash", "field1", "field2", "field3") == [] - - -@skip_if_server_version_lt("7.3.240") -def test_hpexpiretime_multiple_fields_mixed_conditions(r): - r.delete("test:hash") - r.hset("test:hash", mapping={"field1": "value1", "field2": "value2"}) - future_time = int((datetime.now() + timedelta(minutes=30)).timestamp()) - r.hexpireat("test:hash", future_time, "field1") - result = r.hpexpiretime("test:hash", "field1", "field2", "field3") - assert future_time * 1000 - 10000 < result[0] <= future_time * 1000 - assert result[1:] == [-1, -2] - - -@skip_if_server_version_lt("7.3.240") -def test_hpexpiretime_nonexistent_key(r): - r.delete("test:hash") - assert r.hpexpiretime("test:hash", "field1", "field2", "field3") == [] - - -@skip_if_server_version_lt("7.3.240") -def test_httl_multiple_fields_mixed_conditions(r): - r.delete("test:hash") - r.hset("test:hash", mapping={"field1": "value1", "field2": "value2"}) - future_time = int((datetime.now() + timedelta(minutes=30)).timestamp()) - r.hexpireat("test:hash", future_time, "field1") - result = r.httl("test:hash", "field1", "field2", "field3") - assert 30 * 60 - 10 < result[0] <= 30 * 60 - assert result[1:] == [-1, -2] - - -@skip_if_server_version_lt("7.3.240") -def test_httl_nonexistent_key(r): - r.delete("test:hash") - assert r.httl("test:hash", "field1", "field2", "field3") == [] - - -@skip_if_server_version_lt("7.3.240") -def test_hpttl_multiple_fields_mixed_conditions(r): - r.delete("test:hash") - r.hset("test:hash", mapping={"field1": "value1", "field2": "value2"}) - future_time = int((datetime.now() + timedelta(minutes=30)).timestamp()) - r.hexpireat("test:hash", future_time, "field1") - result = r.hpttl("test:hash", "field1", "field2", "field3") - assert 30 * 60000 - 10000 < result[0] <= 30 * 60000 - assert result[1:] == [-1, -2] - - -@skip_if_server_version_lt("7.3.240") -def test_hpttl_nonexistent_key(r): - r.delete("test:hash") - assert r.hpttl("test:hash", "field1", "field2", "field3") == [] diff --git a/valkey/commands/core.py b/valkey/commands/core.py index 75f45eec..90dfb3b8 100644 --- a/valkey/commands/core.py +++ b/valkey/commands/core.py @@ -5092,410 +5092,6 @@ def hstrlen(self, name: str, key: str) -> Union[Awaitable[int], int]: """ return self.execute_command("HSTRLEN", name, key, keys=[name]) - def hexpire( - self, - name: KeyT, - seconds: ExpiryT, - *fields: str, - nx: bool = False, - xx: bool = False, - gt: bool = False, - lt: bool = False, - ) -> ResponseT: - """ - :meta private: - - Command not available in Valkey - - Sets or updates the expiration time for fields within a hash key, using relative - time in seconds. - - If a field already has an expiration time, the behavior of the update can be - controlled using the `nx`, `xx`, `gt`, and `lt` parameters. - - The return value provides detailed information about the outcome for each field. - - For more information, see https://redis.io/commands/hexpire - - Args: - name: The name of the hash key. - seconds: Expiration time in seconds, relative. Can be an integer, or a - Python `timedelta` object. - fields: List of fields within the hash to apply the expiration time to. - nx: Set expiry only when the field has no expiry. - xx: Set expiry only when the field has an existing expiry. - gt: Set expiry only when the new expiry is greater than the current one. - lt: Set expiry only when the new expiry is less than the current one. - - Returns: - If the key does not exist, returns an empty list. If the key exists, returns - a list which contains for each field in the request: - - `-2` if the field does not exist. - - `0` if the specified NX | XX | GT | LT condition was not met. - - `1` if the expiration time was set or updated. - - `2` if the field was deleted because the specified expiration time is - in the past. - """ - conditions = [nx, xx, gt, lt] - if sum(conditions) > 1: - raise ValueError("Only one of 'nx', 'xx', 'gt', 'lt' can be specified.") - - if isinstance(seconds, datetime.timedelta): - seconds = int(seconds.total_seconds()) - - options = [] - if nx: - options.append("NX") - if xx: - options.append("XX") - if gt: - options.append("GT") - if lt: - options.append("LT") - - return self.execute_command( - "HEXPIRE", name, seconds, *options, "FIELDS", len(fields), *fields - ) - - def hpexpire( - self, - name: KeyT, - milliseconds: ExpiryT, - *fields: str, - nx: bool = False, - xx: bool = False, - gt: bool = False, - lt: bool = False, - ) -> ResponseT: - """ - :meta private: - - Command not available in Valkey - - Sets or updates the expiration time for fields within a hash key, using relative - time in milliseconds. - - If a field already has an expiration time, the behavior of the update can be - controlled using the `nx`, `xx`, `gt`, and `lt` parameters. - - The return value provides detailed information about the outcome for each field. - - For more information, see https://redis.io/commands/hpexpire - - Args: - name: The name of the hash key. - milliseconds: Expiration time in milliseconds, relative. Can be an integer, - or a Python `timedelta` object. - fields: List of fields within the hash to apply the expiration time to. - nx: Set expiry only when the field has no expiry. - xx: Set expiry only when the field has an existing expiry. - gt: Set expiry only when the new expiry is greater than the current one. - lt: Set expiry only when the new expiry is less than the current one. - - Returns: - If the key does not exist, returns an empty list. If the key exists, returns - a list which contains for each field in the request: - - `-2` if the field does not exist. - - `0` if the specified NX | XX | GT | LT condition was not met. - - `1` if the expiration time was set or updated. - - `2` if the field was deleted because the specified expiration time is - in the past. - """ - conditions = [nx, xx, gt, lt] - if sum(conditions) > 1: - raise ValueError("Only one of 'nx', 'xx', 'gt', 'lt' can be specified.") - - if isinstance(milliseconds, datetime.timedelta): - milliseconds = int(milliseconds.total_seconds() * 1000) - - options = [] - if nx: - options.append("NX") - if xx: - options.append("XX") - if gt: - options.append("GT") - if lt: - options.append("LT") - - return self.execute_command( - "HPEXPIRE", name, milliseconds, *options, "FIELDS", len(fields), *fields - ) - - def hexpireat( - self, - name: KeyT, - unix_time_seconds: AbsExpiryT, - *fields: str, - nx: bool = False, - xx: bool = False, - gt: bool = False, - lt: bool = False, - ) -> ResponseT: - """ - :meta private: - - Command not available in Valkey - - Sets or updates the expiration time for fields within a hash key, using an - absolute Unix timestamp in seconds. - - If a field already has an expiration time, the behavior of the update can be - controlled using the `nx`, `xx`, `gt`, and `lt` parameters. - - The return value provides detailed information about the outcome for each field. - - For more information, see https://redis.io/commands/hexpireat - - Args: - name: The name of the hash key. - unix_time_seconds: Expiration time as Unix timestamp in seconds. Can be an - integer or a Python `datetime` object. - fields: List of fields within the hash to apply the expiration time to. - nx: Set expiry only when the field has no expiry. - xx: Set expiry only when the field has an existing expiration time. - gt: Set expiry only when the new expiry is greater than the current one. - lt: Set expiry only when the new expiry is less than the current one. - - Returns: - If the key does not exist, returns an empty list. If the key exists, returns - a list which contains for each field in the request: - - `-2` if the field does not exist. - - `0` if the specified NX | XX | GT | LT condition was not met. - - `1` if the expiration time was set or updated. - - `2` if the field was deleted because the specified expiration time is - in the past. - """ - conditions = [nx, xx, gt, lt] - if sum(conditions) > 1: - raise ValueError("Only one of 'nx', 'xx', 'gt', 'lt' can be specified.") - - if isinstance(unix_time_seconds, datetime.datetime): - unix_time_seconds = int(unix_time_seconds.timestamp()) - - options = [] - if nx: - options.append("NX") - if xx: - options.append("XX") - if gt: - options.append("GT") - if lt: - options.append("LT") - - return self.execute_command( - "HEXPIREAT", - name, - unix_time_seconds, - *options, - "FIELDS", - len(fields), - *fields, - ) - - def hpexpireat( - self, - name: KeyT, - unix_time_milliseconds: AbsExpiryT, - *fields: str, - nx: bool = False, - xx: bool = False, - gt: bool = False, - lt: bool = False, - ) -> ResponseT: - """ - :meta private: - - Command not available in Valkey - - Sets or updates the expiration time for fields within a hash key, using an - absolute Unix timestamp in milliseconds. - - If a field already has an expiration time, the behavior of the update can be - controlled using the `nx`, `xx`, `gt`, and `lt` parameters. - - The return value provides detailed information about the outcome for each field. - - For more information, see https://redis.io/commands/hpexpireat - - Args: - name: The name of the hash key. - unix_time_milliseconds: Expiration time as Unix timestamp in milliseconds. - Can be an integer or a Python `datetime` object. - fields: List of fields within the hash to apply the expiry. - nx: Set expiry only when the field has no expiry. - xx: Set expiry only when the field has an existing expiry. - gt: Set expiry only when the new expiry is greater than the current one. - lt: Set expiry only when the new expiry is less than the current one. - - Returns: - If the key does not exist, returns an empty list. If the key exists, returns - a list which contains for each field in the request: - - `-2` if the field does not exist. - - `0` if the specified NX | XX | GT | LT condition was not met. - - `1` if the expiration time was set or updated. - - `2` if the field was deleted because the specified expiration time is - in the past. - """ - conditions = [nx, xx, gt, lt] - if sum(conditions) > 1: - raise ValueError("Only one of 'nx', 'xx', 'gt', 'lt' can be specified.") - - if isinstance(unix_time_milliseconds, datetime.datetime): - unix_time_milliseconds = int(unix_time_milliseconds.timestamp() * 1000) - - options = [] - if nx: - options.append("NX") - if xx: - options.append("XX") - if gt: - options.append("GT") - if lt: - options.append("LT") - - return self.execute_command( - "HPEXPIREAT", - name, - unix_time_milliseconds, - *options, - "FIELDS", - len(fields), - *fields, - ) - - def hpersist(self, name: KeyT, *fields: str) -> ResponseT: - """ - :meta private: - - Command not available in Valkey - - Removes the expiration time for each specified field in a hash. - - For more information, see https://redis.io/commands/hpersist - - Args: - name: The name of the hash key. - fields: A list of fields within the hash from which to remove the - expiration time. - - Returns: - If the key does not exist, returns an empty list. If the key exists, returns - a list which contains for each field in the request: - - `-2` if the field does not exist. - - `-1` if the field exists but has no associated expiration time. - - `1` if the expiration time was successfully removed from the field. - """ - return self.execute_command("HPERSIST", name, "FIELDS", len(fields), *fields) - - def hexpiretime(self, key: KeyT, *fields: str) -> ResponseT: - """ - :meta private: - - Command not available in Valkey - - Returns the expiration times of hash fields as Unix timestamps in seconds. - - For more information, see https://redis.io/commands/hexpiretime - - Args: - key: The hash key. - fields: A list of fields within the hash for which to get the expiration - time. - - Returns: - If the key does not exist, returns an empty list. If the key exists, returns - a list which contains for each field in the request: - - `-2` if the field does not exist. - - `-1` if the field exists but has no associated expire time. - - A positive integer representing the expiration Unix timestamp in - seconds, if the field has an associated expiration time. - """ - return self.execute_command( - "HEXPIRETIME", key, "FIELDS", len(fields), *fields, keys=[key] - ) - - def hpexpiretime(self, key: KeyT, *fields: str) -> ResponseT: - """ - :meta private: - - Command not available in Valkey - - Returns the expiration times of hash fields as Unix timestamps in milliseconds. - - For more information, see https://redis.io/commands/hpexpiretime - - Args: - key: The hash key. - fields: A list of fields within the hash for which to get the expiration - time. - - Returns: - If the key does not exist, returns an empty list. If the key exists, returns - a list which contains for each field in the request: - - `-2` if the field does not exist. - - `-1` if the field exists but has no associated expire time. - - A positive integer representing the expiration Unix timestamp in - milliseconds, if the field has an associated expiration time. - """ - return self.execute_command( - "HPEXPIRETIME", key, "FIELDS", len(fields), *fields, keys=[key] - ) - - def httl(self, key: KeyT, *fields: str) -> ResponseT: - """ - :meta private: - - Command not available in Valkey - - Returns the TTL (Time To Live) in seconds for each specified field within a hash - key. - - For more information, see https://redis.io/commands/httl - - Args: - key: The hash key. - fields: A list of fields within the hash for which to get the TTL. - - Returns: - If the key does not exist, returns an empty list. If the key exists, returns - a list which contains for each field in the request: - - `-2` if the field does not exist. - - `-1` if the field exists but has no associated expire time. - - A positive integer representing the TTL in seconds if the field has - an associated expiration time. - """ - return self.execute_command( - "HTTL", key, "FIELDS", len(fields), *fields, keys=[key] - ) - - def hpttl(self, key: KeyT, *fields: str) -> ResponseT: - """ - :meta private: - - Command not available in Valkey - - Returns the TTL (Time To Live) in milliseconds for each specified field within a - hash key. - - For more information, see https://redis.io/commands/hpttl - - Args: - key: The hash key. - fields: A list of fields within the hash for which to get the TTL. - - Returns: - If the key does not exist, returns an empty list. If the key exists, returns - a list which contains for each field in the request: - - `-2` if the field does not exist. - - `-1` if the field exists but has no associated expire time. - - A positive integer representing the TTL in milliseconds if the field - has an associated expiration time. - """ - return self.execute_command( - "HPTTL", key, "FIELDS", len(fields), *fields, keys=[key] - ) - AsyncHashCommands = HashCommands From 0d5658417c120a4e02e86713548e1fda615d18ee Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Sun, 1 Dec 2024 11:26:45 +0000 Subject: [PATCH 37/39] build(deps): bump codecov/codecov-action from 4 to 5 MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [codecov/codecov-action](https://github.com/codecov/codecov-action) from 4 to 5. - [Release notes](https://github.com/codecov/codecov-action/releases) - [Changelog](https://github.com/codecov/codecov-action/blob/main/CHANGELOG.md) - [Commits](https://github.com/codecov/codecov-action/compare/v4...v5) --- updated-dependencies: - dependency-name: codecov/codecov-action dependency-type: direct:production update-type: version-update:semver-major ... Signed-off-by: dependabot[bot] Signed-off-by: Raphaël Vinot --- .github/workflows/integration.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/integration.yaml b/.github/workflows/integration.yaml index 1aa9a29c..f1ff9f0e 100644 --- a/.github/workflows/integration.yaml +++ b/.github/workflows/integration.yaml @@ -128,7 +128,7 @@ jobs: path: '${{matrix.test-type}}*results.xml' - name: Upload codecov coverage - uses: codecov/codecov-action@v4 + uses: codecov/codecov-action@v5 with: fail_ci_if_error: false token: ${{ secrets.CODECOV_TOKEN }} From c1479118e6bffbdaa6af7463fddcc5ab38d35f75 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Sun, 1 Dec 2024 11:26:47 +0000 Subject: [PATCH 38/39] build(deps): bump rojopolis/spellcheck-github-actions MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [rojopolis/spellcheck-github-actions](https://github.com/rojopolis/spellcheck-github-actions) from 0.44.0 to 0.45.0. - [Release notes](https://github.com/rojopolis/spellcheck-github-actions/releases) - [Changelog](https://github.com/rojopolis/spellcheck-github-actions/blob/master/CHANGELOG.md) - [Commits](https://github.com/rojopolis/spellcheck-github-actions/compare/0.44.0...0.45.0) --- updated-dependencies: - dependency-name: rojopolis/spellcheck-github-actions dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Signed-off-by: Raphaël Vinot --- .github/workflows/spellcheck.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/spellcheck.yml b/.github/workflows/spellcheck.yml index 83071568..977f8c5c 100644 --- a/.github/workflows/spellcheck.yml +++ b/.github/workflows/spellcheck.yml @@ -8,7 +8,7 @@ jobs: - name: Checkout uses: actions/checkout@v4 - name: Check Spelling - uses: rojopolis/spellcheck-github-actions@0.44.0 + uses: rojopolis/spellcheck-github-actions@0.45.0 with: config_path: .github/spellcheck-settings.yml task_name: Markdown From 121ce37d38a3a81a5137efd68f5e68999ec2bcb7 Mon Sep 17 00:00:00 2001 From: Mikhail Koviazin Date: Mon, 2 Dec 2024 11:09:36 +0100 Subject: [PATCH 39/39] v6.1.0b1 MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Signed-off-by: Mikhail Koviazin Signed-off-by: Raphaël Vinot --- setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.py b/setup.py index 2df4059d..e155d98e 100644 --- a/setup.py +++ b/setup.py @@ -8,7 +8,7 @@ long_description_content_type="text/markdown", keywords=["Valkey", "key-value store", "database"], license="MIT", - version="6.0.2", + version="6.1.0b1", packages=find_packages( include=[ "valkey",