From a2d19cd6061745fd86cb9fc1592b6b38cac5bd88 Mon Sep 17 00:00:00 2001 From: Mark Kurtz Date: Fri, 19 Sep 2025 03:20:24 +0000 Subject: [PATCH 01/90] Base version update to what is pushed as latest to enable PR for base refactor branch Signed-off-by: Mark Kurtz --- setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.py b/setup.py index 009cf362..623bad28 100644 --- a/setup.py +++ b/setup.py @@ -7,7 +7,7 @@ from setuptools import setup from setuptools_git_versioning import count_since, get_branch, get_sha, get_tags -LAST_RELEASE_VERSION = Version("0.0.0") +LAST_RELEASE_VERSION = Version("0.3.0") TAG_VERSION_PATTERN = re.compile(r"^v(\d+\.\d+\.\d+)$") From cd5a92df7752fd10494fd687c2de97c092a8d3b3 Mon Sep 17 00:00:00 2001 From: Mark Kurtz Date: Fri, 19 Sep 2025 03:20:24 +0000 Subject: [PATCH 02/90] Base version update to what is pushed as latest to enable PR for base refactor branch Signed-off-by: Mark Kurtz --- setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.py b/setup.py index 009cf362..623bad28 100644 --- a/setup.py +++ b/setup.py @@ -7,7 +7,7 @@ from setuptools import setup from setuptools_git_versioning import count_since, get_branch, get_sha, get_tags -LAST_RELEASE_VERSION = Version("0.0.0") +LAST_RELEASE_VERSION = Version("0.3.0") TAG_VERSION_PATTERN = re.compile(r"^v(\d+\.\d+\.\d+)$") From 8d6e19ad01768d2b432063c40a8c6caee71542a5 Mon Sep 17 00:00:00 2001 From: Mark Kurtz Date: Fri, 19 Sep 2025 03:42:15 +0000 Subject: [PATCH 03/90] core changes for refactor including pyproject.toml updates and renaming config.py to settings.py due to later config additions and potential conflicts in naming Signed-off-by: Mark Kurtz --- pyproject.toml | 141 +++++++++--------- src/guidellm/__init__.py | 4 +- src/guidellm/__main__.py | 2 +- src/guidellm/backend/backend.py | 2 +- src/guidellm/backend/openai.py | 2 +- src/guidellm/backend/response.py | 2 +- src/guidellm/benchmark/aggregator.py | 2 +- src/guidellm/benchmark/output.py | 2 +- src/guidellm/benchmark/profile.py | 2 +- src/guidellm/logger.py | 4 +- src/guidellm/presentation/injector.py | 2 +- src/guidellm/request/loader.py | 2 +- src/guidellm/scheduler/scheduler.py | 2 +- src/guidellm/scheduler/strategy.py | 2 +- src/guidellm/{config.py => settings.py} | 0 src/guidellm/utils/text.py | 2 +- tests/unit/backend/test_openai_backend.py | 2 +- .../test_openai_backend_custom_configs.py | 2 +- tests/unit/presentation/test_injector.py | 2 +- tests/unit/test_config.py | 2 +- tests/unit/test_logger.py | 2 +- 21 files changed, 93 insertions(+), 90 deletions(-) rename src/guidellm/{config.py => settings.py} (100%) diff --git a/pyproject.toml b/pyproject.toml index 984f44ae..964cefd6 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -10,9 +10,6 @@ include = ["*"] [tool.setuptools.package-data] "guidellm.data" = ["*.gz"] -[tool.pdm] -distribution = true - # ************************************************ # ********** Project Metadata ********** @@ -24,9 +21,8 @@ name = "guidellm" description = "Guidance platform for deploying and managing large language models." readme = { file = "README.md", content-type = "text/markdown" } requires-python = ">=3.9.0,<4.0" -license = "Apache-2.0" -license-files = ["LICENSE"] -authors = [ { name = "Red Hat" } ] +license = {text = "Apache-2.0"} +authors = [{ name = "Red Hat" }] keywords = [ "ai", "benchmarking", @@ -47,25 +43,28 @@ keywords = [ ] dependencies = [ "click>=8.0.0,<8.2.0", + "culsans~=0.9.0", "datasets", + "eval_type_backport", + "faker", "ftfy>=6.0.0", "httpx[http2]<1.0.0", "loguru", + "msgpack", "numpy", "pillow", "protobuf", "pydantic>=2.11.7", "pydantic-settings>=2.0.0", + "pyhumps>=3.8.0", "pyyaml>=6.0.0", "rich", + "sanic", "transformers", + "uvloop>=0.18", ] [project.optional-dependencies] -recommended = [ - "tiktoken>=0.11.0", # For OpenAI tokenizer - "blobfile>=3.1.0", # For OpenAI tokenizer -] dev = [ # build "build>=1.0.0", @@ -81,7 +80,7 @@ dev = [ # testing "lorem~=0.1.1", "pytest~=8.2.2", - "pytest-asyncio~=0.23.8", + "pytest-asyncio~=1.1.0", "pytest-cov~=5.0.0", "pytest-mock~=3.14.0", "pytest-rerunfailures~=14.0", @@ -106,9 +105,6 @@ dev = [ "mkdocs-linkcheck~=1.0.6", ] -[dependency-groups] -dev = [ "guidellm[dev]" ] - [project.urls] homepage = "https://github.com/vllm-project/guidellm" source = "https://github.com/vllm-project/guidellm" @@ -143,11 +139,17 @@ exclude = ["venv", ".tox"] follow_imports = 'silent' [[tool.mypy.overrides]] -module = ["datasets.*", "transformers.*", "setuptools.*", "setuptools_git_versioning.*"] -ignore_missing_imports=true +module = [ + "datasets.*", + "transformers.*", + "setuptools.*", + "setuptools_git_versioning.*", +] +ignore_missing_imports = true [tool.ruff] +target-version = "py39" line-length = 88 indent-width = 4 exclude = ["build", "dist", "env", ".venv"] @@ -158,64 +160,65 @@ indent-style = "space" [tool.ruff.lint] ignore = [ - "PLR0913", - "TC001", - "COM812", - "ISC001", - "TC002", + "COM812", # ignore trailing comma errors due to older Python versions + "PD011", # ignore .values usage since ruff assumes it's a Pandas DataFrame + "PLR0913", # ignore too many arguments in function definitions "PLW1514", # allow Path.open without encoding - "RET505", # allow `else` blocks - "RET506", # allow `else` blocks - "PD011", # ignore .values usage since ruff assumes it's a Pandas DataFrame + "RET505", # allow `else` blocks + "RET506", # allow `else` blocks + "S311", # allow standard pseudo-random generators + "TC001", # ignore imports used only for type checking + "TC002", # ignore imports used only for type checking + "TC003", # ignore imports used only for type checking ] select = [ # Rules reference: https://docs.astral.sh/ruff/rules/ # Code Style / Formatting - "E", # pycodestyle: checks adherence to PEP 8 conventions including spacing, indentation, and line length - "W", # pycodestyle: checks adherence to PEP 8 conventions including spacing, indentation, and line length - "A", # flake8-builtins: prevents shadowing of Python built-in names - "C", # Convention: ensures code adheres to specific style and formatting conventions - "COM", # flake8-commas: enforces the correct use of trailing commas - "ERA", # eradicate: detects commented-out code that should be removed - "I", # isort: ensures imports are sorted in a consistent manner - "ICN", # flake8-import-conventions: enforces import conventions for better readability - "N", # pep8-naming: enforces PEP 8 naming conventions for classes, functions, and variables - "NPY", # NumPy: enforces best practices for using the NumPy library - "PD", # pandas-vet: enforces best practices for using the pandas library - "PT", # flake8-pytest-style: enforces best practices and style conventions for pytest tests - "PTH", # flake8-use-pathlib: encourages the use of pathlib over os.path for file system operations - "Q", # flake8-quotes: enforces consistent use of single or double quotes - "TCH", # flake8-type-checking: enforces type checking practices and standards - "TID", # flake8-tidy-imports: enforces tidy and well-organized imports + "E", # pycodestyle: checks adherence to PEP 8 conventions including spacing, indentation, and line length + "W", # pycodestyle: checks adherence to PEP 8 conventions including spacing, indentation, and line length + "A", # flake8-builtins: prevents shadowing of Python built-in names + "C", # Convention: ensures code adheres to specific style and formatting conventions + "COM", # flake8-commas: enforces the correct use of trailing commas + "ERA", # eradicate: detects commented-out code that should be removed + "I", # isort: ensures imports are sorted in a consistent manner + "ICN", # flake8-import-conventions: enforces import conventions for better readability + "N", # pep8-naming: enforces PEP 8 naming conventions for classes, functions, and variables + "NPY", # NumPy: enforces best practices for using the NumPy library + "PD", # pandas-vet: enforces best practices for using the pandas library + "PT", # flake8-pytest-style: enforces best practices and style conventions for pytest tests + "PTH", # flake8-use-pathlib: encourages the use of pathlib over os.path for file system operations + "Q", # flake8-quotes: enforces consistent use of single or double quotes + "TCH", # flake8-type-checking: enforces type checking practices and standards + "TID", # flake8-tidy-imports: enforces tidy and well-organized imports "RUF022", # flake8-ruff: enforce sorting of __all__ in modules # Code Structure / Complexity - "C4", # flake8-comprehensions: improves readability and performance of list, set, and dict comprehensions + "C4", # flake8-comprehensions: improves readability and performance of list, set, and dict comprehensions "C90", # mccabe: checks for overly complex code using cyclomatic complexity "ISC", # flake8-implicit-str-concat: prevents implicit string concatenation "PIE", # flake8-pie: identifies and corrects common code inefficiencies and mistakes - "R", # Refactor: suggests improvements to code structure and readability + "R", # Refactor: suggests improvements to code structure and readability "SIM", # flake8-simplify: simplifies complex expressions and improves code readability # Code Security / Bug Prevention - "ARG", # flake8-unused-arguments: detects unused function and method arguments + "ARG", # flake8-unused-arguments: detects unused function and method arguments "ASYNC", # flake8-async: identifies incorrect or inefficient usage patterns in asynchronous code - "B", # flake8-bugbear: detects common programming mistakes and potential bugs - "BLE", # flake8-blind-except: prevents blind exceptions that catch all exceptions without handling - "E", # Error: detects and reports errors in the code - "F", # Pyflakes: detects unused imports, shadowed imports, undefined variables, and various formatting errors in string operations - "INP", # flake8-no-pep420: prevents implicit namespace packages by requiring __init__.py - "PGH", # pygrep-hooks: detects deprecated and dangerous code patterns - "PL", # Pylint: comprehensive source code analyzer for enforcing coding standards and detecting errors - "RSE", # flake8-raise: ensures exceptions are raised correctly - "S", # flake8-bandit: detects security issues and vulnerabilities in the code - "SLF", # flake8-self: prevents incorrect usage of the self argument in class methods - "T10", # flake8-debugger: detects the presence of debugging tools such as pdb - "T20", # flake8-print: detects print statements left in the code - "UP", # pyupgrade: automatically upgrades syntax for newer versions of Python - "W", # Warning: provides warnings about potential issues in the code - "YTT", # flake8-2020: identifies code that will break with future Python releases + "B", # flake8-bugbear: detects common programming mistakes and potential bugs + "BLE", # flake8-blind-except: prevents blind exceptions that catch all exceptions without handling + "E", # Error: detects and reports errors in the code + "F", # Pyflakes: detects unused imports, shadowed imports, undefined variables, and various formatting errors in string operations + "INP", # flake8-no-pep420: prevents implicit namespace packages by requiring __init__.py + "PGH", # pygrep-hooks: detects deprecated and dangerous code patterns + "PL", # Pylint: comprehensive source code analyzer for enforcing coding standards and detecting errors + "RSE", # flake8-raise: ensures exceptions are raised correctly + "S", # flake8-bandit: detects security issues and vulnerabilities in the code + "SLF", # flake8-self: prevents incorrect usage of the self argument in class methods + "T10", # flake8-debugger: detects the presence of debugging tools such as pdb + "T20", # flake8-print: detects print statements left in the code + "UP", # pyupgrade: automatically upgrades syntax for newer versions of Python + "W", # Warning: provides warnings about potential issues in the code + "YTT", # flake8-2020: identifies code that will break with future Python releases # Code Documentation "FIX", # flake8-fixme: detects FIXMEs and other temporary comments that should be resolved @@ -223,17 +226,17 @@ select = [ [tool.ruff.lint.extend-per-file-ignores] "tests/**/*.py" = [ - "S101", # asserts allowed in tests - "ARG", # Unused function args allowed in tests + "S101", # asserts allowed in tests + "ARG", # Unused function args allowed in tests "PLR2004", # Magic value used in comparison - "TCH002", # No import only type checking in tests - "SLF001", # enable private member access in tests - "S105", # allow hardcoded passwords in tests - "S311", # allow standard pseudo-random generators in tests - "PT011", # allow generic exceptions in tests - "N806", # allow uppercase variable names in tests - "PGH003", # allow general ignores in tests - "S106", # allow hardcoded passwords in tests + "TCH002", # No import only type checking in tests + "SLF001", # enable private member access in tests + "S105", # allow hardcoded passwords in tests + "S311", # allow standard pseudo-random generators in tests + "PT011", # allow generic exceptions in tests + "N806", # allow uppercase variable names in tests + "PGH003", # allow general ignores in tests + "S106", # allow hardcoded passwords in tests "PLR0915", # allow complext statements in tests ] @@ -246,5 +249,5 @@ addopts = '-s -vvv --cache-clear' markers = [ "smoke: quick tests to check basic functionality", "sanity: detailed tests to ensure major functions work correctly", - "regression: tests to ensure that new changes do not break existing functionality" + "regression: tests to ensure that new changes do not break existing functionality", ] diff --git a/src/guidellm/__init__.py b/src/guidellm/__init__.py index 9333860e..f2206e94 100644 --- a/src/guidellm/__init__.py +++ b/src/guidellm/__init__.py @@ -20,7 +20,8 @@ hf_logging.set_verbosity_error() logging.getLogger("transformers").setLevel(logging.ERROR) -from .config import ( +from .logger import configure_logger, logger +from .settings import ( DatasetSettings, Environment, LoggingSettings, @@ -30,7 +31,6 @@ reload_settings, settings, ) -from .logger import configure_logger, logger __all__ = [ "DatasetSettings", diff --git a/src/guidellm/__main__.py b/src/guidellm/__main__.py index 7cba6a7c..f82c19cf 100644 --- a/src/guidellm/__main__.py +++ b/src/guidellm/__main__.py @@ -13,9 +13,9 @@ ) from guidellm.benchmark.entrypoints import benchmark_with_scenario from guidellm.benchmark.scenario import GenerativeTextScenario, get_builtin_scenarios -from guidellm.config import print_config from guidellm.preprocess.dataset import ShortPromptStrategy, process_dataset from guidellm.scheduler import StrategyType +from guidellm.settings import print_config from guidellm.utils import DefaultGroupHandler from guidellm.utils import cli as cli_tools diff --git a/src/guidellm/backend/backend.py b/src/guidellm/backend/backend.py index bf2788a7..ceffdc77 100644 --- a/src/guidellm/backend/backend.py +++ b/src/guidellm/backend/backend.py @@ -7,7 +7,7 @@ from PIL import Image from guidellm.backend.response import ResponseSummary, StreamingTextResponse -from guidellm.config import settings +from guidellm.settings import settings __all__ = [ "Backend", diff --git a/src/guidellm/backend/openai.py b/src/guidellm/backend/openai.py index 680578cc..e1fcdf89 100644 --- a/src/guidellm/backend/openai.py +++ b/src/guidellm/backend/openai.py @@ -16,7 +16,7 @@ ResponseSummary, StreamingTextResponse, ) -from guidellm.config import settings +from guidellm.settings import settings __all__ = [ "CHAT_COMPLETIONS", diff --git a/src/guidellm/backend/response.py b/src/guidellm/backend/response.py index ee2101d7..f2272a73 100644 --- a/src/guidellm/backend/response.py +++ b/src/guidellm/backend/response.py @@ -2,8 +2,8 @@ from pydantic import computed_field -from guidellm.config import settings from guidellm.objects.pydantic import StandardBaseModel +from guidellm.settings import settings __all__ = [ "RequestArgs", diff --git a/src/guidellm/benchmark/aggregator.py b/src/guidellm/benchmark/aggregator.py index f10eb5ed..9e6ffd68 100644 --- a/src/guidellm/benchmark/aggregator.py +++ b/src/guidellm/benchmark/aggregator.py @@ -21,7 +21,6 @@ GenerativeTextErrorStats, GenerativeTextResponseStats, ) -from guidellm.config import settings from guidellm.objects import ( RunningStats, StandardBaseModel, @@ -40,6 +39,7 @@ SchedulerRequestResult, WorkerDescription, ) +from guidellm.settings import settings from guidellm.utils import check_load_processor __all__ = [ diff --git a/src/guidellm/benchmark/output.py b/src/guidellm/benchmark/output.py index da868106..6759f16f 100644 --- a/src/guidellm/benchmark/output.py +++ b/src/guidellm/benchmark/output.py @@ -20,7 +20,6 @@ SweepProfile, ThroughputProfile, ) -from guidellm.config import settings from guidellm.objects import ( DistributionSummary, StandardBaseModel, @@ -29,6 +28,7 @@ from guidellm.presentation import UIDataBuilder from guidellm.presentation.injector import create_report from guidellm.scheduler import strategy_display_str +from guidellm.settings import settings from guidellm.utils import Colors, split_text_list_by_length from guidellm.utils.dict import recursive_key_update from guidellm.utils.text import camelize_str diff --git a/src/guidellm/benchmark/profile.py b/src/guidellm/benchmark/profile.py index 642cb7a8..ca25fc24 100644 --- a/src/guidellm/benchmark/profile.py +++ b/src/guidellm/benchmark/profile.py @@ -4,7 +4,6 @@ import numpy as np from pydantic import Field, computed_field -from guidellm.config import settings from guidellm.objects import StandardBaseModel from guidellm.scheduler import ( AsyncConstantStrategy, @@ -15,6 +14,7 @@ SynchronousStrategy, ThroughputStrategy, ) +from guidellm.settings import settings __all__ = [ "AsyncProfile", diff --git a/src/guidellm/logger.py b/src/guidellm/logger.py index 527d66ff..da3464f9 100644 --- a/src/guidellm/logger.py +++ b/src/guidellm/logger.py @@ -41,7 +41,7 @@ from loguru import logger -from guidellm.config import LoggingSettings, settings +from guidellm.settings import LoggingSettings, settings __all__ = ["configure_logger", "logger"] @@ -72,7 +72,7 @@ def configure_logger(config: LoggingSettings = settings.logging): sys.stdout, level=config.console_log_level.upper(), format="{time:YY-MM-DD HH:mm:ss}|{level: <8} \ - |{name}:{function}:{line} - {message}" + |{name}:{function}:{line} - {message}", ) if config.log_file or config.log_file_level: diff --git a/src/guidellm/presentation/injector.py b/src/guidellm/presentation/injector.py index 02d53b1d..bb1fd684 100644 --- a/src/guidellm/presentation/injector.py +++ b/src/guidellm/presentation/injector.py @@ -4,7 +4,7 @@ from loguru import logger -from guidellm.config import settings +from guidellm.settings import settings from guidellm.utils.text import load_text diff --git a/src/guidellm/request/loader.py b/src/guidellm/request/loader.py index 48566976..1c875046 100644 --- a/src/guidellm/request/loader.py +++ b/src/guidellm/request/loader.py @@ -11,10 +11,10 @@ from datasets import Dataset, DatasetDict, IterableDataset, IterableDatasetDict from transformers import PreTrainedTokenizerBase # type: ignore[import] -from guidellm.config import settings from guidellm.dataset import ColumnInputTypes, load_dataset from guidellm.objects import StandardBaseModel from guidellm.request.request import GenerationRequest +from guidellm.settings import settings __all__ = [ "GenerativeRequestLoader", diff --git a/src/guidellm/scheduler/scheduler.py b/src/guidellm/scheduler/scheduler.py index 83a611ec..11e1102a 100644 --- a/src/guidellm/scheduler/scheduler.py +++ b/src/guidellm/scheduler/scheduler.py @@ -14,7 +14,6 @@ from loguru import logger -from guidellm.config import settings from guidellm.request.types import ( RequestT, ResponseT, @@ -31,6 +30,7 @@ from guidellm.scheduler.worker import ( RequestsWorker, ) +from guidellm.settings import settings __all__ = ["Scheduler"] diff --git a/src/guidellm/scheduler/strategy.py b/src/guidellm/scheduler/strategy.py index 74d19266..81ff6558 100644 --- a/src/guidellm/scheduler/strategy.py +++ b/src/guidellm/scheduler/strategy.py @@ -10,8 +10,8 @@ from pydantic import Field -from guidellm.config import settings from guidellm.objects import StandardBaseModel +from guidellm.settings import settings __all__ = [ "AsyncConstantStrategy", diff --git a/src/guidellm/config.py b/src/guidellm/settings.py similarity index 100% rename from src/guidellm/config.py rename to src/guidellm/settings.py diff --git a/src/guidellm/utils/text.py b/src/guidellm/utils/text.py index 539ea8a0..3b9a2e26 100644 --- a/src/guidellm/utils/text.py +++ b/src/guidellm/utils/text.py @@ -10,7 +10,7 @@ from loguru import logger from guidellm import data as package_data -from guidellm.config import settings +from guidellm.settings import settings __all__ = [ "EndlessTextCreator", diff --git a/tests/unit/backend/test_openai_backend.py b/tests/unit/backend/test_openai_backend.py index 0a4c2c38..7123c590 100644 --- a/tests/unit/backend/test_openai_backend.py +++ b/tests/unit/backend/test_openai_backend.py @@ -3,7 +3,7 @@ import pytest from guidellm.backend import OpenAIHTTPBackend, ResponseSummary, StreamingTextResponse -from guidellm.config import settings +from guidellm.settings import settings @pytest.mark.smoke diff --git a/tests/unit/backend/test_openai_backend_custom_configs.py b/tests/unit/backend/test_openai_backend_custom_configs.py index 7f6706ad..5855152d 100644 --- a/tests/unit/backend/test_openai_backend_custom_configs.py +++ b/tests/unit/backend/test_openai_backend_custom_configs.py @@ -1,7 +1,7 @@ import pytest from guidellm.backend import OpenAIHTTPBackend -from guidellm.config import settings +from guidellm.settings import settings @pytest.mark.smoke diff --git a/tests/unit/presentation/test_injector.py b/tests/unit/presentation/test_injector.py index b2ff7116..da269815 100644 --- a/tests/unit/presentation/test_injector.py +++ b/tests/unit/presentation/test_injector.py @@ -3,8 +3,8 @@ import pytest from pydantic import BaseModel -from guidellm.config import settings from guidellm.presentation.injector import create_report, inject_data +from guidellm.settings import settings class ExampleModel(BaseModel): diff --git a/tests/unit/test_config.py b/tests/unit/test_config.py index f5d9415c..42c8901d 100644 --- a/tests/unit/test_config.py +++ b/tests/unit/test_config.py @@ -1,6 +1,6 @@ import pytest -from guidellm.config import ( +from guidellm.settings import ( DatasetSettings, Environment, LoggingSettings, diff --git a/tests/unit/test_logger.py b/tests/unit/test_logger.py index 53e8b664..792c9770 100644 --- a/tests/unit/test_logger.py +++ b/tests/unit/test_logger.py @@ -3,7 +3,7 @@ import pytest from guidellm import configure_logger, logger -from guidellm.config import LoggingSettings +from guidellm.settings import LoggingSettings @pytest.fixture(autouse=True) From 669848d9a78b39ab3859734dba7e6069b0f061fc Mon Sep 17 00:00:00 2001 From: Mark Kurtz Date: Fri, 19 Sep 2025 03:49:54 +0000 Subject: [PATCH 04/90] remove improper readdition of pyhumps Signed-off-by: Mark Kurtz --- pyproject.toml | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 964cefd6..966a032b 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -21,7 +21,7 @@ name = "guidellm" description = "Guidance platform for deploying and managing large language models." readme = { file = "README.md", content-type = "text/markdown" } requires-python = ">=3.9.0,<4.0" -license = {text = "Apache-2.0"} +license = { text = "Apache-2.0" } authors = [{ name = "Red Hat" }] keywords = [ "ai", @@ -56,7 +56,6 @@ dependencies = [ "protobuf", "pydantic>=2.11.7", "pydantic-settings>=2.0.0", - "pyhumps>=3.8.0", "pyyaml>=6.0.0", "rich", "sanic", From 6b6ed9886da8fad98917711652682bf247ea06f6 Mon Sep 17 00:00:00 2001 From: Mark Kurtz Date: Fri, 19 Sep 2025 03:55:12 +0000 Subject: [PATCH 05/90] refactors for the utility modules Signed-off-by: Mark Kurtz --- src/guidellm/utils/__init__.py | 96 ++- src/guidellm/utils/auto_importer.py | 98 +++ src/guidellm/utils/cli.py | 2 +- src/guidellm/utils/console.py | 183 ++++ src/guidellm/utils/dict.py | 23 - src/guidellm/utils/encoding.py | 787 +++++++++++++++++ src/guidellm/utils/functions.py | 133 +++ src/guidellm/utils/messaging.py | 1029 +++++++++++++++++++++++ src/guidellm/utils/mixins.py | 115 +++ src/guidellm/utils/pydantic_utils.py | 401 +++++++++ src/guidellm/utils/registry.py | 214 +++++ src/guidellm/utils/singleton.py | 130 +++ src/guidellm/utils/statistics.py | 990 ++++++++++++++++++++++ src/guidellm/utils/synchronous.py | 161 ++++ src/guidellm/utils/text.py | 199 ++++- tests/unit/utils/dict.py | 71 -- tests/unit/utils/test_auto_importer.py | 269 ++++++ tests/unit/utils/test_encoding.py | 556 ++++++++++++ tests/unit/utils/test_functions.py | 222 +++++ tests/unit/utils/test_messaging.py | 974 +++++++++++++++++++++ tests/unit/utils/test_mixins.py | 245 ++++++ tests/unit/utils/test_pydantic_utils.py | 1002 ++++++++++++++++++++++ tests/unit/utils/test_registry.py | 593 +++++++++++++ tests/unit/utils/test_singleton.py | 371 ++++++++ tests/unit/utils/test_synchronous.py | 238 ++++++ tests/unit/utils/test_text.py | 531 ++++++++++++ tests/unit/utils/text.py | 13 - 27 files changed, 9492 insertions(+), 154 deletions(-) create mode 100644 src/guidellm/utils/auto_importer.py create mode 100644 src/guidellm/utils/console.py delete mode 100644 src/guidellm/utils/dict.py create mode 100644 src/guidellm/utils/encoding.py create mode 100644 src/guidellm/utils/functions.py create mode 100644 src/guidellm/utils/messaging.py create mode 100644 src/guidellm/utils/mixins.py create mode 100644 src/guidellm/utils/pydantic_utils.py create mode 100644 src/guidellm/utils/registry.py create mode 100644 src/guidellm/utils/singleton.py create mode 100644 src/guidellm/utils/statistics.py create mode 100644 src/guidellm/utils/synchronous.py delete mode 100644 tests/unit/utils/dict.py create mode 100644 tests/unit/utils/test_auto_importer.py create mode 100644 tests/unit/utils/test_encoding.py create mode 100644 tests/unit/utils/test_functions.py create mode 100644 tests/unit/utils/test_messaging.py create mode 100644 tests/unit/utils/test_mixins.py create mode 100644 tests/unit/utils/test_pydantic_utils.py create mode 100644 tests/unit/utils/test_registry.py create mode 100644 tests/unit/utils/test_singleton.py create mode 100644 tests/unit/utils/test_synchronous.py create mode 100644 tests/unit/utils/test_text.py delete mode 100644 tests/unit/utils/text.py diff --git a/src/guidellm/utils/__init__.py b/src/guidellm/utils/__init__.py index 02f2427f..83a276b2 100644 --- a/src/guidellm/utils/__init__.py +++ b/src/guidellm/utils/__init__.py @@ -1,6 +1,21 @@ -from .colors import Colors +from .auto_importer import AutoImporterMixin +from .console import Colors, Console, ConsoleUpdateStep, StatusIcons, StatusStyles from .default_group import DefaultGroupHandler -from .dict import recursive_key_update +from .encoding import ( + Encoder, + EncodingTypesAlias, + MessageEncoding, + SerializationTypesAlias, + Serializer, +) +from .functions import ( + all_defined, + safe_add, + safe_divide, + safe_format_timestamp, + safe_getattr, + safe_multiply, +) from .hf_datasets import ( SUPPORTED_TYPES, save_dataset_to_file, @@ -8,32 +23,103 @@ from .hf_transformers import ( check_load_processor, ) +from .messaging import ( + InterProcessMessaging, + InterProcessMessagingManagerQueue, + InterProcessMessagingPipe, + InterProcessMessagingQueue, + SendMessageT, +) +from .mixins import InfoMixin +from .pydantic_utils import ( + PydanticClassRegistryMixin, + ReloadableBaseModel, + StandardBaseDict, + StandardBaseModel, + StatusBreakdown, +) from .random import IntegerRangeSampler +from .registry import RegistryMixin, RegistryObjT +from .singleton import SingletonMixin, ThreadSafeSingletonMixin +from .statistics import ( + DistributionSummary, + Percentiles, + RunningStats, + StatusDistributionSummary, + TimeRunningStats, +) +from .synchronous import ( + wait_for_sync_barrier, + wait_for_sync_event, + wait_for_sync_objects, +) from .text import ( EndlessTextCreator, - camelize_str, clean_text, filter_text, + format_value_display, is_puncutation, load_text, split_text, split_text_list_by_length, ) +from .typing import get_literal_vals __all__ = [ "SUPPORTED_TYPES", + "AutoImporterMixin", + "Colors", "Colors", + "Console", + "ConsoleUpdateStep", "DefaultGroupHandler", + "DistributionSummary", + "Encoder", + "EncodingTypesAlias", "EndlessTextCreator", + "InfoMixin", "IntegerRangeSampler", - "camelize_str", + "InterProcessMessaging", + "InterProcessMessagingManagerQueue", + "InterProcessMessagingPipe", + "InterProcessMessagingQueue", + "MessageEncoding", + "MessageEncoding", + "Percentiles", + "PydanticClassRegistryMixin", + "RegistryMixin", + "RegistryObjT", + "ReloadableBaseModel", + "RunningStats", + "SendMessageT", + "SerializationTypesAlias", + "Serializer", + "SingletonMixin", + "StandardBaseDict", + "StandardBaseModel", + "StatusBreakdown", + "StatusDistributionSummary", + "StatusIcons", + "StatusStyles", + "ThreadSafeSingletonMixin", + "TimeRunningStats", + "all_defined", "check_load_processor", "clean_text", "filter_text", + "format_value_display", + "get_literal_vals", "is_puncutation", "load_text", - "recursive_key_update", + "safe_add", + "safe_divide", + "safe_format_timestamp", + "safe_getattr", + "safe_multiply", "save_dataset_to_file", "split_text", "split_text_list_by_length", + "wait_for_sync_barrier", + "wait_for_sync_event", + "wait_for_sync_objects", ] diff --git a/src/guidellm/utils/auto_importer.py b/src/guidellm/utils/auto_importer.py new file mode 100644 index 00000000..5b939014 --- /dev/null +++ b/src/guidellm/utils/auto_importer.py @@ -0,0 +1,98 @@ +""" +Automatic module importing utilities for dynamic class discovery. + +This module provides a mixin class for automatic module importing within a package, +enabling dynamic discovery of classes and implementations without explicit imports. +It is particularly useful for auto-registering classes in a registry pattern where +subclasses need to be discoverable at runtime. + +The AutoImporterMixin can be combined with registration mechanisms to create +extensible systems where new implementations are automatically discovered and +registered when they are placed in the correct package structure. +""" + +from __future__ import annotations + +import importlib +import pkgutil +import sys +from typing import ClassVar + +__all__ = ["AutoImporterMixin"] + + +class AutoImporterMixin: + """ + Mixin class for automatic module importing within packages. + + This mixin enables dynamic discovery of classes and implementations without + explicit imports by automatically importing all modules within specified + packages. It is designed for use with class registration mechanisms to enable + automatic discovery and registration of classes when they are placed in the + correct package structure. + + Example: + :: + from guidellm.utils import AutoImporterMixin + + class MyRegistry(AutoImporterMixin): + auto_package = "my_package.implementations" + + MyRegistry.auto_import_package_modules() + + :cvar auto_package: Package name or tuple of package names to import modules from + :cvar auto_ignore_modules: Module names to ignore during import + :cvar auto_imported_modules: List tracking which modules have been imported + """ + + auto_package: ClassVar[str | tuple[str, ...] | None] = None + auto_ignore_modules: ClassVar[tuple[str, ...] | None] = None + auto_imported_modules: ClassVar[list[str] | None] = None + + @classmethod + def auto_import_package_modules(cls) -> None: + """ + Automatically import all modules within the specified package(s). + + Scans the package(s) defined in the `auto_package` class variable and imports + all modules found, tracking them in `auto_imported_modules`. Skips packages + (directories) and any modules listed in `auto_ignore_modules`. + + :raises ValueError: If the `auto_package` class variable is not set + """ + if cls.auto_package is None: + raise ValueError( + "The class variable 'auto_package' must be set to the package name to " + "import modules from." + ) + + cls.auto_imported_modules = [] + packages = ( + cls.auto_package + if isinstance(cls.auto_package, tuple) + else (cls.auto_package,) + ) + + for package_name in packages: + package = importlib.import_module(package_name) + + for _, module_name, is_pkg in pkgutil.walk_packages( + package.__path__, package.__name__ + "." + ): + if ( + is_pkg + or ( + cls.auto_ignore_modules is not None + and module_name in cls.auto_ignore_modules + ) + or module_name in cls.auto_imported_modules + ): + # Skip packages and ignored modules + continue + + if module_name in sys.modules: + # Avoid circular imports + cls.auto_imported_modules.append(module_name) + else: + importlib.import_module(module_name) + cls.auto_imported_modules.append(module_name) diff --git a/src/guidellm/utils/cli.py b/src/guidellm/utils/cli.py index 69cf15d3..4d83526a 100644 --- a/src/guidellm/utils/cli.py +++ b/src/guidellm/utils/cli.py @@ -35,7 +35,7 @@ def __init__(self, *types: click.ParamType): self.types = types self.name = "".join(t.name for t in types) - def convert(self, value, param, ctx): # noqa: RET503 + def convert(self, value, param, ctx): fails = [] for t in self.types: try: diff --git a/src/guidellm/utils/console.py b/src/guidellm/utils/console.py new file mode 100644 index 00000000..c8cd6825 --- /dev/null +++ b/src/guidellm/utils/console.py @@ -0,0 +1,183 @@ +from __future__ import annotations + +from collections.abc import Mapping +from dataclasses import dataclass +from typing import Any, Literal + +from rich.console import Console as RichConsole +from rich.padding import Padding +from rich.status import Status +from rich.text import Text + +__all__ = [ + "Colors", + "Console", + "ConsoleUpdateStep", + "StatusIcons", + "StatusStyles", +] + + +class Colors: + # Core states + info: str = "light_steel_blue" + progress: str = "dark_slate_gray1" + success: str = "chartreuse1" + warning: str = "#FDB516" + error: str = "orange_red1" + + # Branding + primary: str = "#30A2FF" + secondary: str = "#FDB516" + tertiary: str = "#008080" + + +StatusIcons: Mapping[str, str] = { + "debug": "…", + "info": "ℹ", + "warning": "⚠", + "error": "✖", + "critical": "‼", + "notset": "⟳", + "success": "✔", +} + +StatusStyles: Mapping[str, str] = { + "debug": "dim", + "info": f"bold {Colors.info}", + "warning": f"bold {Colors.warning}", + "error": f"bold {Colors.error}", + "critical": "bold red reverse", + "notset": f"bold {Colors.progress}", + "success": f"bold {Colors.success}", +} + + +@dataclass +class ConsoleUpdateStep: + console: Console + title: str + details: Any | None = None + status_level: Literal[ + "debug", + "info", + "warning", + "error", + "critical", + "notset", + "success", + ] = "info" + spinner: str = "dots" + _status: Status | None = None + + def __enter__(self): + if self.console.quiet: + return self + + self._status = self.console.status( + f"[{StatusStyles.get(self.status_level, 'bold')}]{self.title}[/]", + spinner=self.spinner, + ) + self._status.__enter__() + return self + + def update( + self, + title: str, + status_level: Literal[ + "debug", + "info", + "warning", + "error", + "critical", + "notset", + "success", + ] + | None = None, + ): + self.title = title + if status_level is not None: + self.status_level = status_level + if self._status: + self._status.update( + status=f"[{StatusStyles.get(self.status_level, 'bold')}]{title}[/]" + ) + + def finish( + self, + title: str, + details: Any | None = None, + status_level: Literal[ + "debug", + "info", + "warning", + "error", + "critical", + "notset", + "success", + ] = "info", + ): + self.title = title + self.status_level = status_level + if self._status: + self._status.stop() + self.console.print_update(title, details, status_level) + + def __exit__(self, exc_type, exc_val, exc_tb): + if self._status: + return self._status.__exit__(exc_type, exc_val, exc_tb) + return False + + +class Console(RichConsole): + def print_update( + self, + title: str, + details: str | None = None, + status: Literal[ + "debug", + "info", + "warning", + "error", + "critical", + "notset", + "success", + ] = "info", + ) -> None: + icon = StatusIcons.get(status, "•") + style = StatusStyles.get(status, "bold") + line = Text.assemble(f"{icon} ", (title, style)) + self.print(line) + self.print_update_details(details) + + def print_update_details(self, details: Any | None): + if details: + block = Padding( + Text.from_markup(str(details)), + (0, 0, 0, 2), + style=StatusStyles.get("debug"), + ) + self.print(block) + + def print_update_step( + self, + title: str, + status: Literal[ + "debug", + "info", + "warning", + "error", + "critical", + "notset", + "success", + ] = "info", + details: Any | None = None, + spinner: str = "dots", + ) -> ConsoleUpdateStep: + return ConsoleUpdateStep( + console=self, + title=title, + details=details, + status_level=status, + spinner=spinner, + ) diff --git a/src/guidellm/utils/dict.py b/src/guidellm/utils/dict.py deleted file mode 100644 index 5b4579c9..00000000 --- a/src/guidellm/utils/dict.py +++ /dev/null @@ -1,23 +0,0 @@ -def recursive_key_update(d, key_update_func): - if not isinstance(d, dict) and not isinstance(d, list): - return d - - if isinstance(d, list): - for item in d: - recursive_key_update(item, key_update_func) - return d - - updated_key_pairs = [] - for key, _ in d.items(): - updated_key = key_update_func(key) - if key != updated_key: - updated_key_pairs.append((key, updated_key)) - - for key_pair in updated_key_pairs: - old_key, updated_key = key_pair - d[updated_key] = d[old_key] - del d[old_key] - - for _, value in d.items(): - recursive_key_update(value, key_update_func) - return d diff --git a/src/guidellm/utils/encoding.py b/src/guidellm/utils/encoding.py new file mode 100644 index 00000000..ccd26982 --- /dev/null +++ b/src/guidellm/utils/encoding.py @@ -0,0 +1,787 @@ +""" +Message encoding utilities for multiprocess communication with Pydantic model support. + +Provides binary serialization and deserialization of Python objects using various +serialization formats and encoding packages to enable performance configurations +for distributed scheduler operations. Supports configurable two-stage processing +pipeline: object serialization (to dict/sequence) followed by binary encoding +(msgpack/msgspec) with specialized Pydantic model handling for type preservation. +""" + +from __future__ import annotations + +import json +from collections.abc import Mapping +from typing import Annotated, Any, ClassVar, Generic, Literal, Optional, TypeVar + +try: + import msgpack + from msgpack import Packer, Unpacker + + HAS_MSGPACK = True +except ImportError: + msgpack = Packer = Unpacker = None + HAS_MSGPACK = False + +try: + from msgspec.msgpack import Decoder as MsgspecDecoder + from msgspec.msgpack import Encoder as MsgspecEncoder + + HAS_MSGSPEC = True +except ImportError: + MsgspecDecoder = MsgspecEncoder = None + HAS_MSGSPEC = False + +try: + import orjson + + HAS_ORJSON = True +except ImportError: + orjson = None + HAS_ORJSON = False + +from pydantic import BaseModel +from typing_extensions import TypeAlias + +__all__ = [ + "Encoder", + "EncodingTypesAlias", + "MessageEncoding", + "MsgT", + "ObjT", + "SerializationTypesAlias", + "Serializer", +] + +ObjT = TypeVar("ObjT") +MsgT = TypeVar("MsgT") + +SerializationTypesAlias: TypeAlias = Annotated[ + Optional[Literal["dict", "sequence"]], + "Type alias for available serialization strategies", +] +EncodingTypesAlias: TypeAlias = Annotated[ + Optional[Literal["msgpack", "msgspec"]], + "Type alias for available binary encoding formats", +] + + +class MessageEncoding(Generic[ObjT, MsgT]): + """ + High-performance message encoding and decoding for multiprocessing communication. + + Supports configurable object serialization and binary encoding with specialized + handling for Pydantic models. Provides a two-stage pipeline of serialization + (object to dict/str) followed by encoding (dict/str to binary) for optimal + performance and compatibility across different transport mechanisms used in + distributed scheduler operations. + + Example: + :: + from guidellm.utils.encoding import MessageEncoding + from pydantic import BaseModel + + class DataModel(BaseModel): + name: str + value: int + + # Configure with dict serialization and msgpack encoding + encoding = MessageEncoding(serialization="dict", encoding="msgpack") + encoding.register_pydantic(DataModel) + + # Encode and decode objects + data = DataModel(name="test", value=42) + encoded_msg = encoding.encode(data) + decoded_data = encoding.decode(encoded_msg) + + :cvar DEFAULT_ENCODING_PREFERENCE: Preferred encoding formats in priority order + """ + + DEFAULT_ENCODING_PREFERENCE: ClassVar[list[str]] = ["msgspec", "msgpack"] + + @classmethod + def encode_message( + cls, + obj: ObjT, + serializer: Serializer | None, + encoder: Encoder | None, + ) -> MsgT: + """ + Encode object using specified serializer and encoder. + + :param obj: Object to encode + :param serializer: Serializer for object conversion, None for no serialization + :param encoder: Encoder for binary conversion, None for no encoding + :return: Encoded message ready for transport + """ + serialized = serializer.serialize(obj) if serializer else obj + + return encoder.encode(serialized) if encoder else serialized + + @classmethod + def decode_message( + cls, + message: MsgT, + serializer: Serializer | None, + encoder: Encoder | None, + ) -> ObjT: + """ + Decode message using specified serializer and encoder. + Must match the encoding configuration originally used. + + :param message: Encoded message to decode + :param serializer: Serializer for object reconstruction, None for no + serialization + :param encoder: Encoder for binary decoding, None for no encoding + :return: Reconstructed object + """ + serialized = encoder.decode(message) if encoder else message + + return serializer.deserialize(serialized) if serializer else serialized + + def __init__( + self, + serialization: SerializationTypesAlias = None, + encoding: EncodingTypesAlias | list[EncodingTypesAlias] = None, + pydantic_models: list[type[BaseModel]] | None = None, + ) -> None: + """ + Initialize MessageEncoding with serialization and encoding strategies. + + :param serialization: Serialization strategy (None, "dict", or "sequence") + :param encoding: Encoding strategy (None, "msgpack", "msgspec", or + preference list) + """ + self.serializer = Serializer(serialization, pydantic_models) + self.encoder = Encoder(encoding) + + def register_pydantic(self, model: type[BaseModel]) -> None: + """ + Register Pydantic model for specialized serialization handling. + + :param model: Pydantic model class to register for type preservation + """ + self.serializer.register_pydantic(model) + + def encode(self, obj: ObjT) -> MsgT: + """ + Encode object using instance configuration. + + :param obj: Object to encode using configured serialization and encoding + :return: Encoded message ready for transport + """ + return self.encode_message( + obj=obj, + serializer=self.serializer, + encoder=self.encoder, + ) + + def decode(self, message: MsgT) -> ObjT: + """ + Decode message using instance configuration. + + :param message: Encoded message to decode using configured strategies + :return: Reconstructed object + """ + return self.decode_message( + message=message, + serializer=self.serializer, + encoder=self.encoder, + ) + + +class Encoder: + """ + Binary encoding and decoding using MessagePack or msgspec formats. + + Handles binary serialization of Python objects using configurable encoding + strategies with automatic fallback when dependencies are unavailable. Supports + both standalone instances and pooled encoder/decoder pairs for performance + optimization in high-throughput scenarios. + """ + + def __init__( + self, encoding: EncodingTypesAlias | list[EncodingTypesAlias] = None + ) -> None: + """ + Initialize encoder with specified encoding strategy. + + :param encoding: Encoding format preference (None, "msgpack", "msgspec", or + preference list) + """ + self.encoding, self.encoder, self.decoder = self._resolve_encoding(encoding) + + def encode(self, obj: Any) -> bytes | Any: + """ + Encode object to binary format using configured encoding strategy. + + :param obj: Object to encode (must be serializable by chosen format) + :return: Encoded bytes or original object if no encoding configured + :raises ImportError: If required encoding library is not available + """ + if self.encoding == "msgpack": + if not HAS_MSGPACK: + raise ImportError("msgpack is not available") + + return self.encoder.pack(obj) if self.encoder else msgpack.packb(obj) + + if self.encoding == "msgspec": + if not HAS_MSGSPEC: + raise ImportError("msgspec is not available") + + return ( + self.encoder.encode(obj) + if self.encoder + else MsgspecEncoder().encode(obj) + ) + + return obj + + def decode(self, data: bytes | Any) -> Any: + """ + Decode binary data using configured encoding strategy. + + :param data: Binary data to decode or object if no encoding configured + :return: Decoded Python object + :raises ImportError: If required encoding library is not available + """ + if self.encoding == "msgpack": + if not HAS_MSGPACK: + raise ImportError("msgpack is not available") + + if self.decoder is not None: + self.decoder.feed(data) + return self.decoder.unpack() + + return msgpack.unpackb(data, raw=False) + + if self.encoding == "msgspec": + if not HAS_MSGSPEC: + raise ImportError("msgspec is not available") + + if self.decoder is not None: + return self.decoder.decode(data) + + return MsgspecDecoder().decode(data) + + return data + + def _resolve_encoding( + self, encoding: EncodingTypesAlias | list[EncodingTypesAlias] | None + ) -> tuple[EncodingTypesAlias, Any, Any]: + def _get_available_encoder_decoder( + encoding: EncodingTypesAlias, + ) -> tuple[Any, Any]: + if encoding == "msgpack" and HAS_MSGPACK: + return Packer(), Unpacker(raw=False) + if encoding == "msgspec" and HAS_MSGSPEC: + return MsgspecEncoder(), MsgspecDecoder() + return None, None + + if not isinstance(encoding, list): + if encoding is None: + return None, None, None + + encoder, decoder = _get_available_encoder_decoder(encoding) + if encoder is None or decoder is None: + raise ImportError(f"Encoding '{encoding}' is not available.") + + return encoding, encoder, decoder + + for test_encoding in encoding: + encoder, decoder = _get_available_encoder_decoder(test_encoding) + if encoder is not None and decoder is not None: + return test_encoding, encoder, decoder + + return None, None, None + + +class Serializer: + """ + Object serialization with specialized Pydantic model support. + + Converts Python objects to serializable formats (dict/sequence) with type + preservation for Pydantic models. Maintains object integrity through + encoding/decoding cycles by storing class metadata and enabling proper + reconstruction of complex objects. Supports both dictionary-based and + sequence-based serialization strategies for different use cases. + """ + + def __init__( + self, + serialization: SerializationTypesAlias = None, + pydantic_models: list[type[BaseModel]] | None = None, + ): + """ + Initialize serializer with strategy and Pydantic registry. + + :param serialization: Default serialization strategy for this instance + """ + self.serialization = serialization + self.pydantic_registry: dict[tuple[str, str], type[BaseModel]] = {} + if pydantic_models: + for model in pydantic_models: + self.register_pydantic(model) + + def register_pydantic(self, model: type[BaseModel]) -> None: + """ + Register Pydantic model for specialized serialization handling. + + :param model: Pydantic model class to register for type preservation + """ + key = (model.__module__, model.__name__) + self.pydantic_registry[key] = model + + def load_pydantic(self, type_name: str, module_name: str) -> type[BaseModel]: + """ + Load Pydantic class by name with registry fallback to dynamic import. + + :param type_name: Class name to load + :param module_name: Module containing the class + :return: Loaded Pydantic model class + """ + key = (module_name, type_name) + + if key in self.pydantic_registry: + return self.pydantic_registry[key] + + # Dynamic import fallback; need to update to better handle generics + module = __import__(module_name, fromlist=[type_name]) + pydantic_class = getattr(module, type_name) + self.pydantic_registry[key] = pydantic_class + + return pydantic_class + + def serialize(self, obj: Any) -> Any: + """ + Serialize object using specified or configured strategy. + + :param obj: Object to serialize + :return: Serialized representation (dict, str, or original object) + """ + if self.serialization == "dict": + return self.to_dict(obj) + elif self.serialization == "sequence": + return self.to_sequence(obj) + + return obj + + def deserialize(self, msg: Any) -> Any: + """ + Deserialize object using specified or configured strategy. + + :param msg: Serialized message to deserialize + :return: Reconstructed object + """ + if self.serialization == "dict": + return self.from_dict(msg) + elif self.serialization == "sequence": + return self.from_sequence(msg) + + return msg + + def to_dict(self, obj: Any) -> Any: + """ + Convert object to dictionary with Pydantic model type preservation. + + :param obj: Object to convert (BaseModel, collections, or primitive) + :return: Dictionary representation with type metadata for Pydantic models + """ + if isinstance(obj, BaseModel): + return self.to_dict_pydantic(obj) + + if isinstance(obj, (list, tuple)) and any( + isinstance(item, BaseModel) for item in obj + ): + return [ + self.to_dict_pydantic(item) if isinstance(item, BaseModel) else item + for item in obj + ] + + if isinstance(obj, dict) and any( + isinstance(value, BaseModel) for value in obj.values() + ): + return { + key: self.to_dict_pydantic(value) + if isinstance(value, BaseModel) + else value + for key, value in obj.items() + } + + return obj + + def from_dict(self, data: Any) -> Any: + """ + Reconstruct object from dictionary with Pydantic model type restoration. + + :param data: Dictionary representation possibly containing type metadata + :return: Reconstructed object with proper types restored + """ + if isinstance(data, (list, tuple)): + return [ + self.from_dict_pydantic(item) + if isinstance(item, dict) and "*PYD*" in item + else item + for item in data + ] + elif isinstance(data, dict) and data: + if "*PYD*" in data: + return self.from_dict_pydantic(data) + + return { + key: self.from_dict_pydantic(value) + if isinstance(value, dict) and "*PYD*" in value + else value + for key, value in data.items() + } + + return data + + def to_dict_pydantic(self, item: Any) -> Any: + """ + Convert item to dictionary with Pydantic type metadata. + + :param item: Item to convert (may or may not be a Pydantic model) + :return: Dictionary with type preservation metadata + """ + return { + "*PYD*": True, + "typ": item.__class__.__name__, + "mod": item.__class__.__module__, + "dat": item.model_dump(mode="python"), + } + + def from_dict_pydantic(self, item: dict[str, Any]) -> Any: + """ + Reconstruct object from dictionary with Pydantic type metadata. + + :param item: Dictionary containing type metadata and data + :return: Reconstructed Pydantic model or original data + """ + type_name = item["typ"] + module_name = item["mod"] + model_class = self.load_pydantic(type_name, module_name) + + return model_class.model_validate(item["dat"]) + + def to_sequence(self, obj: Any) -> str | Any: + """ + Convert object to sequence format with type-aware serialization. + + Handles Pydantic models, collections, and mappings with proper type + preservation through structured sequence encoding. + + :param obj: Object to serialize to sequence format + :return: Serialized sequence string or bytes + """ + if isinstance(obj, BaseModel): + payload_type = "pydantic" + payload = self.to_sequence_pydantic(obj) + elif isinstance(obj, (list, tuple)) and any( + isinstance(item, BaseModel) for item in obj + ): + payload_type = "collection_sequence" + payload = None + + for item in obj: + is_pydantic = isinstance(item, BaseModel) + payload = self.pack_next_sequence( + type_="pydantic" if is_pydantic else "python", + payload=( + self.to_sequence_pydantic(item) + if is_pydantic + else self.to_sequence_python(item) + ), + current=payload, + ) + elif isinstance(obj, Mapping) and any( + isinstance(value, BaseModel) for value in obj.values() + ): + payload_type = "collection_mapping" + keys = ",".join(str(key) for key in obj) + payload = keys.encode() + b"|" if HAS_ORJSON else keys + "|" + for item in obj.values(): + is_pydantic = isinstance(item, BaseModel) + payload = self.pack_next_sequence( + type_="pydantic" if is_pydantic else "python", + payload=( + self.to_sequence_pydantic(item) + if is_pydantic + else self.to_sequence_python(item) + ), + current=payload, + ) + else: + payload_type = "python" + payload = self.to_sequence_python(obj) + + return self.pack_next_sequence(payload_type, payload, None) + + def from_sequence(self, data: str | Any) -> Any: # noqa: C901, PLR0912 + """ + Reconstruct object from sequence format with type restoration. + + Handles deserialization of objects encoded with to_sequence, properly + restoring Pydantic models and collection structures. + + :param data: Serialized sequence data to reconstruct + :return: Reconstructed object with proper types + :raises ValueError: If sequence format is invalid or contains multiple + packed sequences + """ + type_, payload, remaining = self.unpack_next_sequence(data) + if remaining is not None: + raise ValueError("Data contains multiple packed sequences; expected one.") + + if type_ == "pydantic": + return self.from_sequence_pydantic(payload) + + if type_ == "python": + return self.from_sequence_python(payload) + + if type_ in {"collection_sequence", "collection_tuple"}: + items = [] + while payload: + type_, item_payload, payload = self.unpack_next_sequence(payload) + if type_ == "pydantic": + items.append(self.from_sequence_pydantic(item_payload)) + elif type_ == "python": + items.append(self.from_sequence_python(item_payload)) + else: + raise ValueError("Invalid type in collection sequence") + return items + + if type_ != "collection_mapping": + raise ValueError(f"Invalid type for mapping sequence: {type_}") + + if isinstance(payload, bytes): + keys_end = payload.index(b"|") + keys = payload[:keys_end].decode().split(",") + payload = payload[keys_end + 1 :] + else: + keys_end = payload.index("|") + keys = payload[:keys_end].split(",") + payload = payload[keys_end + 1 :] + + items = {} + index = 0 + while payload: + type_, item_payload, payload = self.unpack_next_sequence(payload) + if type_ == "pydantic": + items[keys[index]] = self.from_sequence_pydantic(item_payload) + elif type_ == "python": + items[keys[index]] = self.from_sequence_python(item_payload) + else: + raise ValueError("Invalid type in mapping sequence") + index += 1 + return items + + def to_sequence_pydantic(self, obj: BaseModel) -> str | bytes: + """ + Serialize Pydantic model to sequence format with class metadata. + + :param obj: Pydantic model instance to serialize + :return: Sequence string or bytes containing class info and JSON data + """ + class_name: str = obj.__class__.__name__ + class_module: str = obj.__class__.__module__ + json_data = obj.__pydantic_serializer__.to_json(obj) + + return ( + (class_name.encode() + b"|" + class_module.encode() + b"|" + json_data) + if HAS_ORJSON + else ( + class_name + "|" + class_module + "|" + json_data.decode() + if isinstance(json_data, bytes) + else json_data + ) + ) + + def from_sequence_pydantic(self, data: str | bytes) -> BaseModel: + """ + Reconstruct Pydantic model from sequence format. + + :param data: Sequence data containing class metadata and JSON + :return: Reconstructed Pydantic model instance + """ + if isinstance(data, bytes): + class_name_end = data.index(b"|") + class_name = data[:class_name_end].decode() + module_name_end = data.index(b"|", class_name_end + 1) + module_name = data[class_name_end + 1 : module_name_end].decode() + json_data = data[module_name_end + 1 :] + else: + class_name_end = data.index("|") + class_name = data[:class_name_end] + module_name_end = data.index("|", class_name_end + 1) + module_name = data[class_name_end + 1 : module_name_end] + json_data = data[module_name_end + 1 :] + + model_class = self.load_pydantic(class_name, module_name) + + return model_class.model_validate_json(json_data) + + def to_sequence_python(self, obj: Any) -> str | bytes: + """ + Serialize Python object to JSON format. + + :param obj: Python object to serialize + :return: JSON string or bytes representation + """ + return orjson.dumps(obj) if HAS_ORJSON else json.dumps(obj) + + def from_sequence_python(self, data: str | bytes) -> Any: + """ + Deserialize Python object from JSON format. + + :param data: JSON string or bytes to deserialize + :return: Reconstructed Python object + :raises ImportError: If orjson is required but not available + """ + if isinstance(data, bytes): + if not HAS_ORJSON: + raise ImportError("orjson is not available, cannot deserialize bytes") + return orjson.loads(data) + + return json.loads(data) + + def pack_next_sequence( # noqa: C901, PLR0912 + self, + type_: Literal[ + "pydantic", + "python", + "collection_tuple", + "collection_sequence", + "collection_mapping", + ], + payload: str | bytes, + current: str | bytes | None, + ) -> str | bytes: + """ + Pack payload into sequence format with type and length metadata. + + :param type_: Type identifier for the payload + :param payload: Data to pack into sequence + :param current: Current sequence data to append to (unused but maintained + for signature compatibility) + :return: Packed sequence with type, length, and payload + :raises ValueError: If payload type doesn't match current type or unknown + type specified + """ + if current is not None and type(payload) is not type(current): + raise ValueError("Payload and current must be of the same type") + + payload_len = len(payload) + + if isinstance(payload, bytes): + payload_len = payload_len.to_bytes( + length=(payload_len.bit_length() + 7) // 8 if payload_len > 0 else 1, + byteorder="big", + ) + if type_ == "pydantic": + payload_type = b"P" + elif type_ == "python": + payload_type = b"p" + elif type_ == "collection_tuple": + payload_type = b"T" + elif type_ == "collection_sequence": + payload_type = b"S" + elif type_ == "collection_mapping": + payload_type = b"M" + else: + raise ValueError(f"Unknown type for packing: {type_}") + delimiter = b"|" + else: + payload_len = str(payload_len) + if type_ == "pydantic": + payload_type = "P" + elif type_ == "python": + payload_type = "p" + elif type_ == "collection_tuple": + payload_type = "T" + elif type_ == "collection_sequence": + payload_type = "S" + elif type_ == "collection_mapping": + payload_type = "M" + else: + raise ValueError(f"Unknown type for packing: {type_}") + delimiter = "|" + + next_sequence = payload_type + delimiter + payload_len + delimiter + payload + + return current + next_sequence if current else next_sequence + + def unpack_next_sequence( # noqa: C901, PLR0912 + self, data: str | bytes + ) -> tuple[ + Literal[ + "pydantic", + "python", + "collection_tuple", + "collection_sequence", + "collection_mapping", + ], + str | bytes, + str | bytes | None, + ]: + """ + Unpack sequence format to extract type, payload, and remaining data. + + :param data: Packed sequence data to unpack + :return: Tuple of (type, payload, remaining_data) + :raises ValueError: If sequence format is invalid or unknown type character + """ + if isinstance(data, bytes): + if len(data) < len(b"T|N") or data[1:2] != b"|": + raise ValueError("Invalid packed data format") + + type_char = data[0:1] + if type_char == b"P": + type_ = "pydantic" + elif type_char == b"p": + type_ = "python" + elif type_char == b"T": + type_ = "collection_tuple" + elif type_char == b"S": + type_ = "collection_sequence" + elif type_char == b"M": + type_ = "collection_mapping" + else: + raise ValueError("Unknown type character in packed data") + + len_end = data.index(b"|", 2) + payload_len = int.from_bytes(data[2:len_end], "big") + payload = data[len_end + 1 : len_end + 1 + payload_len] + remaining = ( + data[len_end + 1 + payload_len :] + if len_end + 1 + payload_len < len(data) + else None + ) + + return type_, payload, remaining + + if len(data) < len("T|N") or data[1] != "|": + raise ValueError("Invalid packed data format") + + type_char = data[0] + if type_char == "P": + type_ = "pydantic" + elif type_char == "p": + type_ = "python" + elif type_char == "S": + type_ = "collection_sequence" + elif type_char == "M": + type_ = "collection_mapping" + else: + raise ValueError("Unknown type character in packed data") + + len_end = data.index("|", 2) + payload_len = int(data[2:len_end]) + payload = data[len_end + 1 : len_end + 1 + payload_len] + remaining = ( + data[len_end + 1 + payload_len :] + if len_end + 1 + payload_len < len(data) + else None + ) + + return type_, payload, remaining diff --git a/src/guidellm/utils/functions.py b/src/guidellm/utils/functions.py new file mode 100644 index 00000000..6343cbf2 --- /dev/null +++ b/src/guidellm/utils/functions.py @@ -0,0 +1,133 @@ +""" +Utility functions for safe operations and value handling. + +Provides defensive programming utilities for common operations that may encounter +None values, invalid inputs, or edge cases. Includes safe arithmetic operations, +attribute access, and timestamp formatting. +""" + +from __future__ import annotations + +from datetime import datetime +from typing import Any + +__all__ = [ + "all_defined", + "safe_add", + "safe_divide", + "safe_format_timestamp", + "safe_getattr", + "safe_multiply", +] + + +def safe_getattr(obj: Any | None, attr: str, default: Any = None) -> Any: + """ + Safely get an attribute from an object with None handling. + + :param obj: Object to get the attribute from, or None + :param attr: Name of the attribute to retrieve + :param default: Value to return if object is None or attribute doesn't exist + :return: Attribute value or default if not found or object is None + """ + if obj is None: + return default + + return getattr(obj, attr, default) + + +def all_defined(*values: Any | None) -> bool: + """ + Check if all provided values are defined (not None). + + :param values: Variable number of values to check for None + :return: True if all values are not None, False otherwise + """ + return all(value is not None for value in values) + + +def safe_divide( + numerator: int | float | None, + denominator: int | float | None, + num_default: float = 0.0, + den_default: float = 1.0, +) -> float: + """ + Safely divide two numbers with None handling and zero protection. + + :param numerator: Number to divide, or None to use num_default + :param denominator: Number to divide by, or None to use den_default + :param num_default: Default value for numerator if None + :param den_default: Default value for denominator if None + :return: Division result with protection against division by zero + """ + numerator = numerator if numerator is not None else num_default + denominator = denominator if denominator is not None else den_default + + return numerator / (denominator or 1e-10) + + +def safe_multiply(*values: int | float | None, default: float = 1.0) -> float: + """ + Safely multiply multiple numbers with None handling. + + :param values: Variable number of values to multiply, None values treated as 1.0 + :param default: Starting value for multiplication + :return: Product of all non-None values multiplied by default + """ + result = default + for val in values: + result *= val if val is not None else 1.0 + return result + + +def safe_add( + *values: int | float | None, signs: list[int] | None = None, default: float = 0.0 +) -> float: + """ + Safely add multiple numbers with None handling and optional signs. + + :param values: Variable number of values to add, None values use default + :param signs: Optional list of 1 (add) or -1 (subtract) for each value. + If None, all values are added. Must match length of values. + :param default: Value to substitute for None values + :return: Result of adding all values safely (default used when value is None) + """ + if not values: + return default + + values = list(values) + + if signs is None: + signs = [1] * len(values) + + if len(signs) != len(values): + raise ValueError("Length of signs must match length of values") + + result = values[0] if values[0] is not None else default + + for ind in range(1, len(values)): + val = values[ind] if values[ind] is not None else default + result += signs[ind] * val + + return result + + +def safe_format_timestamp( + timestamp: float | None, format_: str = "%H:%M:%S", default: str = "N/A" +) -> str: + """ + Safely format a timestamp with error handling and validation. + + :param timestamp: Unix timestamp to format, or None + :param format_: Strftime format string for timestamp formatting + :param default: Value to return if timestamp is invalid or None + :return: Formatted timestamp string or default value + """ + if timestamp is None or timestamp < 0 or timestamp > 2**31: + return default + + try: + return datetime.fromtimestamp(timestamp).strftime(format_) + except (ValueError, OverflowError, OSError): + return default diff --git a/src/guidellm/utils/messaging.py b/src/guidellm/utils/messaging.py new file mode 100644 index 00000000..c56ec29a --- /dev/null +++ b/src/guidellm/utils/messaging.py @@ -0,0 +1,1029 @@ +""" +Inter-process messaging abstractions for distributed scheduler coordination. + +Provides high-level interfaces for asynchronous message passing between worker +processes using various transport mechanisms including queues and pipes. Supports +configurable encoding, serialization, error handling, and flow control with +buffering and stop event coordination for distributed scheduler operations. +""" + +from __future__ import annotations + +import asyncio +import contextlib +import multiprocessing +import queue +import threading +import time +from abc import ABC, abstractmethod +from collections.abc import Iterable +from multiprocessing.connection import Connection +from multiprocessing.context import BaseContext +from multiprocessing.managers import SyncManager +from multiprocessing.synchronize import Event as ProcessingEvent +from threading import Event as ThreadingEvent +from typing import Any, Callable, Generic, Protocol, TypeVar + +import culsans +from pydantic import BaseModel + +from guidellm.utils.encoding import ( + EncodingTypesAlias, + MessageEncoding, + SerializationTypesAlias, +) + +__all__ = [ + "InterProcessMessaging", + "InterProcessMessagingManagerQueue", + "InterProcessMessagingPipe", + "InterProcessMessagingQueue", + "MessagingStopCallback", + "ReceiveMessageT", + "SendMessageT", +] + +SendMessageT = TypeVar("SendMessageT", bound=Any) +"""Generic type variable for messages sent through the messaging system""" +ReceiveMessageT = TypeVar("ReceiveMessageT", bound=Any) +"""Generic type variable for messages received through the messaging system""" + + +class MessagingStopCallback(Protocol): + """Protocol for evaluating stop conditions in messaging operations.""" + + def __call__( + self, messaging: InterProcessMessaging, pending: bool, queue_empty: int + ) -> bool: + """ + Evaluate whether messaging operations should stop. + + :param messaging: The messaging instance to evaluate + :param pending: Whether there are pending operations + :param queue_empty: The number of times in a row the queue has been empty + :return: True if operations should stop, False otherwise + """ + ... + + +class InterProcessMessaging(Generic[SendMessageT, ReceiveMessageT], ABC): + """ + Abstract base for inter-process messaging in distributed scheduler coordination. + + Provides unified interface for asynchronous message passing between scheduler + components using configurable transport mechanisms, encoding schemes, and + flow control policies. Manages buffering, serialization, error handling, + and coordinated shutdown across worker processes for distributed operations. + + Example: + :: + from guidellm.utils.messaging import InterProcessMessagingQueue + + messaging = InterProcessMessagingQueue( + serialization="pickle", + max_pending_size=100 + ) + + await messaging.start() + await messaging.put(request_data) + response = await messaging.get(timeout=5.0) + await messaging.stop() + """ + + STOP_REQUIRED_QUEUE_EMPTY: int = 3 + + def __init__( + self, + mp_context: BaseContext | None = None, + serialization: SerializationTypesAlias = "dict", + encoding: EncodingTypesAlias | list[EncodingTypesAlias] = None, + max_pending_size: int | None = None, + max_buffer_send_size: int | None = None, + max_done_size: int | None = None, + max_buffer_receive_size: int | None = None, + poll_interval: float = 0.1, + worker_index: int | None = None, + ): + """ + Initialize inter-process messaging coordinator. + + :param serialization: Message serialization method for transport encoding + :param encoding: Optional encoding scheme for serialized message data + :param max_pending_size: Maximum items in send queue before blocking + :param max_buffer_send_size: Maximum items in buffer send queue + :param max_done_size: Maximum items in done queue before blocking + :param max_buffer_receive_size: Maximum items in buffer receive queue + :param poll_interval: Time interval for checking queue status and events + :param worker_index: Index identifying this worker in the process group + """ + self.worker_index: int | None = worker_index + self.mp_context = mp_context or multiprocessing.get_context() + self.serialization = serialization + self.encoding = encoding + self.max_pending_size = max_pending_size + self.max_buffer_send_size = max_buffer_send_size + self.max_done_size = max_done_size + self.max_buffer_receive_size = max_buffer_receive_size + self.poll_interval = poll_interval + + self.send_stopped_event: ThreadingEvent | ProcessingEvent = None + self.receive_stopped_event: ThreadingEvent | ProcessingEvent = None + self.shutdown_event: ThreadingEvent = None + self.buffer_send_queue: culsans.Queue[SendMessageT] = None + self.buffer_receive_queue: culsans.Queue[ReceiveMessageT] = None + self.send_task: asyncio.Task = None + self.receive_task: asyncio.Task = None + self.running = False + + @abstractmethod + def create_worker_copy( + self, worker_index: int, **kwargs + ) -> InterProcessMessaging[ReceiveMessageT, SendMessageT]: + """ + Create worker-specific copy for distributed process coordination. + + :param worker_index: Index of the worker process for message routing + :return: Configured messaging instance for the specified worker + """ + ... + + @abstractmethod + def create_send_messages_threads( + self, + send_items: Iterable[Any] | None, + message_encoding: MessageEncoding, + check_stop: Callable[[bool, bool], bool], + ) -> list[tuple[Callable, tuple[Any, ...]]]: + """ + Create send message processing threads for transport implementation. + + :param send_items: Optional collection of items to send during processing + :param message_encoding: Message encoding configuration for serialization + :param check_stop: Callable for evaluating stop conditions during processing + :return: List of thread callables with their arguments for execution + """ + ... + + @abstractmethod + def create_receive_messages_threads( + self, + receive_callback: Callable[[Any], Any] | None, + message_encoding: MessageEncoding, + check_stop: Callable[[bool, bool], bool], + ) -> list[tuple[Callable, tuple[Any, ...]]]: + """ + Create receive message processing threads for transport implementation. + + :param receive_callback: Optional callback for processing received messages + :param message_encoding: Message encoding configuration for deserialization + :param check_stop: Callable for evaluating stop conditions during processing + :return: List of thread callables with their arguments for execution + """ + ... + + async def start( + self, + send_items: Iterable[Any] | None = None, + receive_callback: Callable[[Any], Any] | None = None, + send_stop_criteria: ( + list[ThreadingEvent | ProcessingEvent | MessagingStopCallback] | None + ) = None, + send_stopped_event: ThreadingEvent | ProcessingEvent | None = None, + receive_stop_criteria: ( + list[ThreadingEvent | ProcessingEvent | MessagingStopCallback] | None + ) = None, + receive_stopped_event: ThreadingEvent | ProcessingEvent | None = None, + pydantic_models: list[type[BaseModel]] | None = None, + ): + """ + Start asynchronous message processing tasks with buffering. + + :param send_items: Optional collection of items to send during processing + :param receive_callback: Optional callback for processing received messages + :param send_stop_criteria: Events and callables that trigger send task shutdown + :param send_stopped_event: Event set when send task has fully stopped + :param receive_stop_criteria: Events and callables that trigger receive shutdown + :param receive_stopped_event: Event set when receive task has fully stopped + :param pydantic_models: Optional list of Pydantic models for serialization + """ + self.running = True + self.send_stopped_event = send_stopped_event or ThreadingEvent() + self.receive_stopped_event = receive_stopped_event or ThreadingEvent() + self.shutdown_event = ThreadingEvent() + self.buffer_send_queue = culsans.Queue[SendMessageT]( + maxsize=self.max_buffer_send_size or 0 + ) + self.buffer_receive_queue = culsans.Queue[ReceiveMessageT]( + maxsize=self.max_buffer_receive_size or 0 + ) + self.tasks_lock = threading.Lock() + + message_encoding = MessageEncoding( + serialization=self.serialization, + encoding=self.encoding, + pydantic_models=pydantic_models, + ) + send_stop_criteria = send_stop_criteria or [] + receive_stop_events = receive_stop_criteria or [] + + self.send_task = asyncio.create_task( + self.send_messages_coroutine( + send_items=send_items, + message_encoding=message_encoding, + send_stop_criteria=send_stop_criteria, + ) + ) + self.receive_task = asyncio.create_task( + self.receive_messages_coroutine( + receive_callback=receive_callback, + message_encoding=message_encoding, + receive_stop_criteria=receive_stop_events, + ) + ) + + async def stop(self): + """ + Stop message processing tasks and clean up resources. + """ + self.shutdown_event.set() + with contextlib.suppress(asyncio.CancelledError): + await asyncio.gather( + self.send_task, self.receive_task, return_exceptions=True + ) + self.send_task = None + self.receive_task = None + if self.worker_index is None: + self.buffer_send_queue.clear() + await self.buffer_send_queue.aclose() + self.buffer_receive_queue.clear() + await self.buffer_receive_queue.aclose() + self.buffer_send_queue = None + self.buffer_receive_queue = None + self.send_stopped_event = None + self.receive_stopped_event = None + self.shutdown_event = None + self.running = False + + async def send_messages_coroutine( + self, + send_items: Iterable[Any] | None, + message_encoding: MessageEncoding, + send_stop_criteria: ( + list[ThreadingEvent | ProcessingEvent | MessagingStopCallback] | None + ), + ): + """ + Execute send message processing with encoding and stop condition handling. + + :param send_items: Optional collection of items to send during processing + :param message_encoding: Message encoding configuration for serialization + :param send_stop_criteria: Events and callables that trigger send task shutdown + """ + canceled_event = ThreadingEvent() + + try: + await asyncio.gather( + *[ + asyncio.to_thread(thread, *args) + for (thread, args) in self.create_send_messages_threads( + send_items=send_items, + message_encoding=message_encoding, + check_stop=self._create_check_stop_callable( + send_stop_criteria, canceled_event + ), + ) + ] + ) + except asyncio.CancelledError: + canceled_event.set() + raise + finally: + self.send_stopped_event.set() + + async def receive_messages_coroutine( + self, + receive_callback: Callable[[Any], Any] | None, + message_encoding: MessageEncoding, + receive_stop_criteria: ( + list[ThreadingEvent | ProcessingEvent | MessagingStopCallback] | None + ), + ): + """ + Execute receive message processing with decoding and callback handling. + + :param receive_callback: Optional callback for processing received messages + :param message_encoding: Message encoding configuration for deserialization + :param receive_stop_criteria: Events and callables that trigger receive shutdown + """ + canceled_event = ThreadingEvent() + + try: + await asyncio.gather( + *[ + asyncio.to_thread(thread, *args) + for thread, args in self.create_receive_messages_threads( + receive_callback=receive_callback, + message_encoding=message_encoding, + check_stop=self._create_check_stop_callable( + receive_stop_criteria, canceled_event + ), + ) + ] + ) + except asyncio.CancelledError: + canceled_event.set() + raise + finally: + self.receive_stopped_event.set() + + async def get(self, timeout: float | None = None) -> ReceiveMessageT: + """ + Retrieve message from receive buffer with optional timeout. + + :param timeout: Maximum time to wait for a message + :return: Decoded message from the receive buffer + """ + return await asyncio.wait_for( + self.buffer_receive_queue.async_get(), timeout=timeout + ) + + def get_sync(self, timeout: float | None = None) -> ReceiveMessageT: + """ + Retrieve message from receive buffer synchronously with optional timeout. + + :param timeout: Maximum time to wait for a message, if <=0 uses get_nowait + :return: Decoded message from the receive buffer + """ + if timeout is not None and timeout <= 0: + return self.buffer_receive_queue.get_nowait() + else: + return self.buffer_receive_queue.sync_get(timeout=timeout) + + async def put(self, item: SendMessageT, timeout: float | None = None): + """ + Add message to send buffer with optional timeout. + + :param item: Message item to add to the send buffer + :param timeout: Maximum time to wait for buffer space + """ + await asyncio.wait_for(self.buffer_send_queue.async_put(item), timeout=timeout) + + def put_sync(self, item: SendMessageT, timeout: float | None = None): + """ + Add message to send buffer synchronously with optional timeout. + + :param item: Message item to add to the send buffer + :param timeout: Maximum time to wait for buffer space, if <=0 uses put_nowait + """ + if timeout is not None and timeout <= 0: + self.buffer_send_queue.put_nowait(item) + else: + self.buffer_send_queue.sync_put(item, timeout=timeout) + + def _create_check_stop_callable( + self, + stop_criteria: ( + list[ThreadingEvent | ProcessingEvent | MessagingStopCallback] | None + ), + canceled_event: ThreadingEvent, + ): + stop_events = tuple( + item + for item in stop_criteria or [] + if isinstance(item, (ThreadingEvent, ProcessingEvent)) + ) + stop_callbacks = tuple(item for item in stop_criteria or [] if callable(item)) + + def check_stop(pending: bool, queue_empty: int) -> bool: + if canceled_event.is_set(): + return True + + if stop_callbacks and any( + cb(self, pending, queue_empty) for cb in stop_callbacks + ): + return True + + return ( + not pending + and queue_empty >= self.STOP_REQUIRED_QUEUE_EMPTY + and ( + self.shutdown_event.is_set() + or any(event.is_set() for event in stop_events) + ) + ) + + return check_stop + + +class InterProcessMessagingQueue(InterProcessMessaging[SendMessageT, ReceiveMessageT]): + """ + Queue-based inter-process messaging for distributed scheduler coordination. + + Provides message passing using multiprocessing.Queue objects for communication + between scheduler workers and main process. Handles message encoding, buffering, + flow control, and coordinated shutdown with configurable queue behavior and + error handling policies for distributed operations. + + Example: + :: + from guidellm.utils.messaging import InterProcessMessagingQueue + + messaging = InterProcessMessagingQueue( + serialization="pickle", + max_pending_size=100 + ) + + # Create worker copy for distributed processing + worker_messaging = messaging.create_worker_copy(worker_index=0) + """ + + def __init__( + self, + mp_context: BaseContext | None = None, + serialization: SerializationTypesAlias = "dict", + encoding: EncodingTypesAlias = None, + max_pending_size: int | None = None, + max_buffer_send_size: int | None = None, + max_done_size: int | None = None, + max_buffer_receive_size: int | None = None, + poll_interval: float = 0.1, + worker_index: int | None = None, + pending_queue: multiprocessing.Queue | None = None, + done_queue: multiprocessing.Queue | None = None, + ): + """ + Initialize queue-based messaging for inter-process communication. + + :param serialization: Message serialization method for transport encoding + :param encoding: Optional encoding scheme for serialized message data + :param max_pending_size: Maximum items in send queue before blocking + :param max_buffer_send_size: Maximum items in buffer send queue + :param max_done_size: Maximum items in receive queue before blocking + :param max_buffer_receive_size: Maximum items in buffer receive queue + :param poll_interval: Time interval for checking queue status and events + :param worker_index: Index identifying this worker in the process group + :param pending_queue: Multiprocessing queue for sending messages + :param done_queue: Multiprocessing queue for receiving completed messages + :param context: Multiprocessing context for creating queues + """ + super().__init__( + mp_context=mp_context, + serialization=serialization, + encoding=encoding, + max_pending_size=max_pending_size, + max_buffer_send_size=max_buffer_send_size, + max_done_size=max_done_size, + max_buffer_receive_size=max_buffer_receive_size, + poll_interval=poll_interval, + worker_index=worker_index, + ) + self.pending_queue = pending_queue or self.mp_context.Queue( + maxsize=max_pending_size or 0 + ) + self.done_queue = done_queue or self.mp_context.Queue( + maxsize=max_done_size or 0 + ) + + def create_worker_copy( + self, worker_index: int, **kwargs + ) -> InterProcessMessagingQueue[ReceiveMessageT, SendMessageT]: + """ + Create worker-specific copy for distributed queue-based coordination. + + :param worker_index: Index of the worker process for message routing + :return: Configured queue messaging instance for the specified worker + """ + copy_args = { + "mp_context": self.mp_context, + "serialization": self.serialization, + "encoding": self.encoding, + "max_pending_size": self.max_pending_size, + "max_buffer_send_size": self.max_buffer_send_size, + "max_done_size": self.max_done_size, + "max_buffer_receive_size": self.max_buffer_receive_size, + "poll_interval": self.poll_interval, + "worker_index": worker_index, + "pending_queue": self.pending_queue, + "done_queue": self.done_queue, + } + copy_args.update(kwargs) + + return InterProcessMessagingQueue[ReceiveMessageT, SendMessageT](**copy_args) + + async def stop(self): + """ + Stop the messaging system and wait for all tasks to complete. + """ + await super().stop() + if self.worker_index is None: + # only main process should close the queues + with contextlib.suppress(queue.Empty): + while True: + self.pending_queue.get_nowait() + self.pending_queue.close() + + with contextlib.suppress(queue.Empty): + while True: + self.done_queue.get_nowait() + self.done_queue.close() + + self.pending_queue = None + self.done_queue = None + + def create_send_messages_threads( + self, + send_items: Iterable[Any] | None, + message_encoding: MessageEncoding, + check_stop: Callable[[bool, bool], bool], + ) -> list[tuple[Callable, tuple[Any, ...]]]: + """ + Create send message processing threads for queue-based transport. + + :param send_items: Optional collection of items to send during processing + :param message_encoding: Message encoding configuration for serialization + :param check_stop: Callable for evaluating stop conditions during processing + :return: List of thread callables with their arguments for execution + """ + return [ + ( + self._send_messages_task_thread, + (send_items, message_encoding, check_stop), + ) + ] + + def create_receive_messages_threads( + self, + receive_callback: Callable[[Any], Any] | None, + message_encoding: MessageEncoding, + check_stop: Callable[[bool, bool], bool], + ) -> list[tuple[Callable, tuple[Any, ...]]]: + """ + Create receive message processing threads for queue-based transport. + + :param receive_callback: Optional callback for processing received messages + :param message_encoding: Message encoding configuration for deserialization + :param check_stop: Callable for evaluating stop conditions during processing + :return: List of thread callables with their arguments for execution + """ + return [ + ( + self._receive_messages_task_thread, + (receive_callback, message_encoding, check_stop), + ) + ] + + def _send_messages_task_thread( # noqa: C901, PLR0912 + self, + send_items: Iterable[Any] | None, + message_encoding: MessageEncoding, + check_stop: Callable[[bool, bool], bool], + ): + send_items_iter = iter(send_items) if send_items is not None else None + pending_item = None + queue_empty = 0 + + while not check_stop(pending_item is not None, queue_empty): + if pending_item is None: + try: + if send_items_iter is not None: + item = next(send_items_iter) + else: + item = self.buffer_send_queue.sync_get( + timeout=self.poll_interval + ) + pending_item = message_encoding.encode(item) + queue_empty = 0 + except (culsans.QueueEmpty, queue.Empty, StopIteration): + queue_empty += 1 + + if pending_item is not None: + try: + if self.worker_index is None: + # Main publisher + self.pending_queue.put(pending_item, timeout=self.poll_interval) + else: + # Worker + self.done_queue.put(pending_item, timeout=self.poll_interval) + if send_items_iter is None: + self.buffer_send_queue.task_done() + pending_item = None + except (culsans.QueueFull, queue.Full): + pass + + def _receive_messages_task_thread( # noqa: C901 + self, + receive_callback: Callable[[Any], Any] | None, + message_encoding: MessageEncoding, + check_stop: Callable[[bool, bool], bool], + ): + pending_item = None + received_item = None + queue_empty = 0 + + while not check_stop(pending_item is not None, queue_empty): + if pending_item is None: + try: + if self.worker_index is None: + # Main publisher + item = self.done_queue.get(timeout=self.poll_interval) + else: + # Worker + item = self.pending_queue.get(timeout=self.poll_interval) + pending_item = message_encoding.decode(item) + queue_empty = 0 + except (culsans.QueueEmpty, queue.Empty): + queue_empty += 1 + + if pending_item is not None or received_item is not None: + try: + if received_item is None: + received_item = ( + pending_item + if not receive_callback + else receive_callback(pending_item) + ) + + self.buffer_receive_queue.sync_put(received_item) + pending_item = None + received_item = None + except (culsans.QueueFull, queue.Full): + pass + + +class InterProcessMessagingManagerQueue( + InterProcessMessagingQueue[SendMessageT, ReceiveMessageT] +): + """ + Manager-based queue messaging for inter-process scheduler coordination. + + Extends queue-based messaging with multiprocessing.Manager support for + shared state coordination across worker processes. Provides managed queues + for reliable message passing in distributed scheduler environments with + enhanced process synchronization and resource management capabilities. + + Example: + :: + import multiprocessing + from guidellm.utils.messaging import InterProcessMessagingManagerQueue + + manager = multiprocessing.Manager() + messaging = InterProcessMessagingManagerQueue( + manager=manager, + serialization="pickle" + ) + """ + + def __init__( + self, + manager: SyncManager, + mp_context: BaseContext | None = None, + serialization: SerializationTypesAlias = "dict", + encoding: EncodingTypesAlias = None, + max_pending_size: int | None = None, + max_buffer_send_size: int | None = None, + max_done_size: int | None = None, + max_buffer_receive_size: int | None = None, + poll_interval: float = 0.1, + worker_index: int | None = None, + pending_queue: multiprocessing.Queue | None = None, + done_queue: multiprocessing.Queue | None = None, + ): + """ + Initialize manager-based queue messaging for inter-process communication. + + :param manager: Multiprocessing manager for shared queue creation + :param serialization: Message serialization method for transport encoding + :param encoding: Optional encoding scheme for serialized message data + :param max_pending_size: Maximum items in send queue before blocking + :param max_buffer_send_size: Maximum items in buffer send queue + :param max_done_size: Maximum items in receive queue before blocking + :param max_buffer_receive_size: Maximum items in buffer receive queue + :param poll_interval: Time interval for checking queue status and events + :param worker_index: Index identifying this worker in the process group + :param pending_queue: Managed multiprocessing queue for sending messages + :param done_queue: Managed multiprocessing queue for receiving completed + messages + """ + super().__init__( + mp_context=mp_context, + serialization=serialization, + encoding=encoding, + max_pending_size=max_pending_size, + max_buffer_send_size=max_buffer_send_size, + max_done_size=max_done_size, + max_buffer_receive_size=max_buffer_receive_size, + poll_interval=poll_interval, + worker_index=worker_index, + pending_queue=pending_queue or manager.Queue(maxsize=max_pending_size or 0), # type: ignore [assignment] + done_queue=done_queue or manager.Queue(maxsize=max_done_size or 0), # type: ignore [assignment] + ) + + def create_worker_copy( + self, worker_index: int, **kwargs + ) -> InterProcessMessagingManagerQueue[ReceiveMessageT, SendMessageT]: + """ + Create worker-specific copy for managed queue-based coordination. + + :param worker_index: Index of the worker process for message routing + :return: Configured manager queue messaging instance for the specified worker + """ + copy_args = { + "manager": None, + "mp_context": self.mp_context, + "serialization": self.serialization, + "encoding": self.encoding, + "max_pending_size": self.max_pending_size, + "max_buffer_send_size": self.max_buffer_send_size, + "max_done_size": self.max_done_size, + "max_buffer_receive_size": self.max_buffer_receive_size, + "poll_interval": self.poll_interval, + "worker_index": worker_index, + "pending_queue": self.pending_queue, + "done_queue": self.done_queue, + } + copy_args.update(kwargs) + + return InterProcessMessagingManagerQueue(**copy_args) + + async def stop(self): + """ + Stop the messaging system and wait for all tasks to complete. + """ + await InterProcessMessaging.stop(self) + self.pending_queue = None + self.done_queue = None + + +class InterProcessMessagingPipe(InterProcessMessaging[SendMessageT, ReceiveMessageT]): + """ + Pipe-based inter-process messaging for distributed scheduler coordination. + + Provides message passing using multiprocessing.Pipe objects for direct + communication between scheduler workers and main process. Offers lower + latency than queue-based messaging with duplex communication channels + for high-performance distributed operations. + + Example: + :: + from guidellm.utils.messaging import InterProcessMessagingPipe + + messaging = InterProcessMessagingPipe( + num_workers=4, + serialization="pickle", + poll_interval=0.05 + ) + + # Create worker copy for specific worker process + worker_messaging = messaging.create_worker_copy(worker_index=0) + """ + + def __init__( + self, + num_workers: int, + mp_context: BaseContext | None = None, + serialization: SerializationTypesAlias = "dict", + encoding: EncodingTypesAlias = None, + max_pending_size: int | None = None, + max_buffer_send_size: int | None = None, + max_done_size: int | None = None, + max_buffer_receive_size: int | None = None, + poll_interval: float = 0.1, + worker_index: int | None = None, + pipe: tuple[Connection, Connection] | None = None, + ): + """ + Initialize pipe-based messaging for inter-process communication. + + :param num_workers: Number of worker processes requiring pipe connections + :param serialization: Message serialization method for transport encoding + :param encoding: Optional encoding scheme for serialized message data + :param max_pending_size: Maximum items in send queue before blocking + :param max_buffer_send_size: Maximum items in buffer send queue + :param max_done_size: Maximum items in receive queue before blocking + :param max_buffer_receive_size: Maximum items in buffer receive queue + :param poll_interval: Time interval for checking queue status and events + :param worker_index: Index identifying this worker in the process group + :param pipe: Existing pipe connection for worker-specific instances + """ + super().__init__( + mp_context=mp_context, + serialization=serialization, + encoding=encoding, + max_pending_size=max_pending_size, + max_buffer_send_size=max_buffer_send_size, + max_done_size=max_done_size, + max_buffer_receive_size=max_buffer_receive_size, + poll_interval=poll_interval, + worker_index=worker_index, + ) + self.num_workers = num_workers + + if pipe is None: + self.pipes: list[tuple[Connection, Connection]] = [ + self.mp_context.Pipe(duplex=True) for _ in range(num_workers) + ] + else: + self.pipes: list[tuple[Connection, Connection]] = [pipe] + + def create_worker_copy( + self, worker_index: int, **kwargs + ) -> InterProcessMessagingPipe[ReceiveMessageT, SendMessageT]: + """ + Create worker-specific copy for pipe-based coordination. + + :param worker_index: Index of the worker process for pipe routing + :return: Configured pipe messaging instance for the specified worker + """ + copy_args = { + "num_workers": self.num_workers, + "mp_context": self.mp_context, + "serialization": self.serialization, + "encoding": self.encoding, + "max_pending_size": self.max_pending_size, + "max_buffer_send_size": self.max_buffer_send_size, + "max_done_size": self.max_done_size, + "max_buffer_receive_size": self.max_buffer_receive_size, + "poll_interval": self.poll_interval, + "worker_index": worker_index, + "pipe": self.pipes[worker_index], + } + copy_args.update(kwargs) + + return InterProcessMessagingPipe(**copy_args) + + async def stop(self): + """ + Stop the messaging system and wait for all tasks to complete. + """ + await super().stop() + if self.worker_index is None: + # Only main process should close the pipes + for main_con, worker_con in self.pipes: + main_con.close() + worker_con.close() + + def create_send_messages_threads( + self, + send_items: Iterable[Any] | None, + message_encoding: MessageEncoding, + check_stop: Callable[[bool, bool], bool], + ) -> list[tuple[Callable, tuple[Any, ...]]]: + """ + Create send message processing threads for pipe-based transport. + + :param send_items: Optional collection of items to send during processing + :param message_encoding: Message encoding configuration for serialization + :param check_stop: Callable for evaluating stop conditions during processing + :return: List of thread callables with their arguments for execution + """ + if self.worker_index is None: + # Create a separate task for each worker's pipe + return [ + ( + self._send_messages_task_thread, + (self.pipes[index], send_items, message_encoding, check_stop), + ) + for index in range(self.num_workers) + ] + else: + return [ + ( + self._send_messages_task_thread, + (self.pipes[0], send_items, message_encoding, check_stop), + ) + ] + + def create_receive_messages_threads( + self, + receive_callback: Callable[[Any], Any] | None, + message_encoding: MessageEncoding, + check_stop: Callable[[bool, bool], bool], + ) -> list[tuple[Callable, tuple[Any, ...]]]: + """ + Create receive message processing threads for pipe-based transport. + + :param receive_callback: Optional callback for processing received messages + :param message_encoding: Message encoding configuration for deserialization + :param check_stop: Callable for evaluating stop conditions during processing + :return: List of thread callables with their arguments for execution + """ + if self.worker_index is None: + # Create a separate task for each worker's pipe + return [ + ( + self._receive_messages_task_thread, + (self.pipes[index], receive_callback, message_encoding, check_stop), + ) + for index in range(self.num_workers) + ] + else: + return [ + ( + self._receive_messages_task_thread, + (self.pipes[0], receive_callback, message_encoding, check_stop), + ) + ] + + def _send_messages_task_thread( # noqa: C901, PLR0912 + self, + pipe: tuple[Connection, Connection], + send_items: Iterable[Any] | None, + message_encoding: MessageEncoding, + check_stop: Callable[[bool, bool], bool], + ): + local_stop = ThreadingEvent() + send_connection: Connection = pipe[0] if self.worker_index is None else pipe[1] + send_items_iter = iter(send_items) if send_items is not None else None + pending_item = None + queue_empty = 0 + pipe_item = None + pipe_lock = threading.Lock() + + def _background_pipe_recv(): + nonlocal pipe_item + + while not local_stop.is_set(): + try: + with pipe_lock: + pending = pipe_item + pipe_item = None + + if pending is not None: + send_connection.send(pending) + except (EOFError, ConnectionResetError): + break + + if send_items_iter is None: + threading.Thread(target=_background_pipe_recv, daemon=True).start() + + try: + while not check_stop(pending_item is not None, queue_empty): + if pending_item is None: + try: + if send_items_iter is not None: + item = next(send_items_iter) + else: + item = self.buffer_send_queue.sync_get( + timeout=self.poll_interval + ) + pending_item = message_encoding.encode(item) + queue_empty = 0 + except (culsans.QueueEmpty, queue.Empty, StopIteration): + queue_empty += 1 + + if pending_item is not None: + try: + with pipe_lock: + if pipe_item is not None: + time.sleep(self.poll_interval / 100) + raise queue.Full + else: + pipe_item = pending_item + if send_items_iter is None: + self.buffer_send_queue.task_done() + pending_item = None + except (culsans.QueueFull, queue.Full): + pass + finally: + local_stop.set() + + def _receive_messages_task_thread( # noqa: C901 + self, + pipe: tuple[Connection, Connection], + receive_callback: Callable[[Any], Any] | None, + message_encoding: MessageEncoding, + check_stop: Callable[[bool, bool], bool], + ): + receive_connection: Connection = ( + pipe[0] if self.worker_index is not None else pipe[1] + ) + pending_item = None + received_item = None + queue_empty = 0 + + while not check_stop(pending_item is not None, queue_empty): + if pending_item is None: + try: + if receive_connection.poll(self.poll_interval): + item = receive_connection.recv() + pending_item = message_encoding.decode(item) + else: + raise queue.Empty + queue_empty = 0 + except (culsans.QueueEmpty, queue.Empty): + queue_empty += 1 + + if pending_item is not None or received_item is not None: + try: + if received_item is None: + received_item = ( + pending_item + if not receive_callback + else receive_callback(pending_item) + ) + + self.buffer_receive_queue.sync_put(received_item) + pending_item = None + received_item = None + except (culsans.QueueFull, queue.Full): + pass diff --git a/src/guidellm/utils/mixins.py b/src/guidellm/utils/mixins.py new file mode 100644 index 00000000..b001ff2d --- /dev/null +++ b/src/guidellm/utils/mixins.py @@ -0,0 +1,115 @@ +""" +Mixin classes for common metadata extraction and object introspection. + +Provides reusable mixins for extracting structured metadata from objects, +enabling consistent information exposure across different class hierarchies. +""" + +from __future__ import annotations + +from typing import Any + +__all__ = ["InfoMixin"] + + +PYTHON_PRIMITIVES = (str, int, float, bool, list, tuple, dict) +"""Type alias for serialized object representations""" + + +class InfoMixin: + """ + Mixin class providing standardized metadata extraction for introspection. + + Enables consistent object metadata extraction patterns across different + class hierarchies for debugging, serialization, and runtime analysis. + Provides both instance and class-level methods for extracting structured + information from arbitrary objects with fallback handling for objects + without built-in info capabilities. + + Example: + :: + from guidellm.utils.mixins import InfoMixin + + class ConfiguredClass(InfoMixin): + def __init__(self, setting: str): + self.setting = setting + + obj = ConfiguredClass("value") + # Returns {'str': 'ConfiguredClass(...)', 'type': 'ConfiguredClass', ...} + print(obj.info) + """ + + @classmethod + def extract_from_obj(cls, obj: Any) -> dict[str, Any]: + """ + Extract structured metadata from any object. + + Attempts to use the object's own `info` method or property if available, + otherwise constructs metadata from object attributes and type information. + Provides consistent metadata format across different object types. + + :param obj: Object to extract metadata from + :return: Dictionary containing object metadata including type, class, + module, and public attributes + """ + if hasattr(obj, "info"): + return obj.info() if callable(obj.info) else obj.info + + return { + "str": str(obj), + "type": type(obj).__name__, + "class": obj.__class__.__name__ if hasattr(obj, "__class__") else None, + "module": obj.__class__.__module__ if hasattr(obj, "__class__") else None, + "attributes": ( + { + key: val if isinstance(val, PYTHON_PRIMITIVES) else repr(val) + for key, val in obj.__dict__.items() + if not key.startswith("_") + } + if hasattr(obj, "__dict__") + else {} + ), + } + + @classmethod + def create_info_dict(cls, obj: Any) -> dict[str, Any]: + """ + Create a structured info dictionary for the given object. + + Builds standardized metadata dictionary containing object identification, + type information, and accessible attributes. Used internally by other + info extraction methods and available for direct metadata construction. + + :param obj: Object to extract info from + :return: Dictionary containing structured metadata about the object + """ + return { + "str": str(obj), + "type": type(obj).__name__, + "class": obj.__class__.__name__ if hasattr(obj, "__class__") else None, + "module": obj.__class__.__module__ if hasattr(obj, "__class__") else None, + "attributes": ( + { + key: val + if isinstance(val, (str, int, float, bool, list, dict)) + else repr(val) + for key, val in obj.__dict__.items() + if not key.startswith("_") + } + if hasattr(obj, "__dict__") + else {} + ), + } + + @property + def info(self) -> dict[str, Any]: + """ + Return structured metadata about this instance. + + Provides consistent access to object metadata for debugging, serialization, + and introspection. Uses the create_info_dict method to generate standardized + metadata format including class information and public attributes. + + :return: Dictionary containing class name, module, and public attributes + """ + return self.create_info_dict(self) diff --git a/src/guidellm/utils/pydantic_utils.py b/src/guidellm/utils/pydantic_utils.py new file mode 100644 index 00000000..27c2e1cf --- /dev/null +++ b/src/guidellm/utils/pydantic_utils.py @@ -0,0 +1,401 @@ +""" +Pydantic utilities for polymorphic model serialization and registry integration. + +Provides integration between Pydantic and the registry system, enabling +polymorphic serialization and deserialization of Pydantic models using +a discriminator field and dynamic class registry. Includes base model classes +with standardized configurations and generic status breakdown models for +structured result organization. +""" + +from __future__ import annotations + +from abc import ABC, abstractmethod +from typing import Any, ClassVar, Generic, TypeVar + +from pydantic import BaseModel, ConfigDict, Field, GetCoreSchemaHandler +from pydantic_core import CoreSchema, core_schema +from typing_extensions import get_args, get_origin + +from guidellm.utils.registry import RegistryMixin + +__all__ = [ + "PydanticClassRegistryMixin", + "ReloadableBaseModel", + "StandardBaseDict", + "StandardBaseModel", + "StatusBreakdown", +] + + +BaseModelT = TypeVar("BaseModelT", bound=BaseModel) +RegisterClassT = TypeVar("RegisterClassT") +SuccessfulT = TypeVar("SuccessfulT") +ErroredT = TypeVar("ErroredT") +IncompleteT = TypeVar("IncompleteT") +TotalT = TypeVar("TotalT") + + +class ReloadableBaseModel(BaseModel): + """ + Base Pydantic model with schema reloading capabilities. + + Provides dynamic schema rebuilding functionality for models that need to + update their validation schemas at runtime, particularly useful when + working with registry-based polymorphic models where new types are + registered after initial class definition. + """ + + model_config = ConfigDict( + extra="ignore", + use_enum_values=True, + from_attributes=True, + arbitrary_types_allowed=True, + ) + + @classmethod + def reload_schema(cls, parents: bool = True) -> None: + """ + Reload the class schema with updated registry information. + + Forces a complete rebuild of the Pydantic model schema to incorporate + any changes made to associated registries or validation rules. + + :param parents: Whether to also rebuild schemas for any pydantic parent + types that reference this model. + """ + cls.model_rebuild(force=True) + + if parents: + cls.reload_parent_schemas() + + @classmethod + def reload_parent_schemas(cls): + """ + Recursively reload schemas for all parent Pydantic models. + + Traverses the inheritance hierarchy to find all parent classes that + are Pydantic models and triggers schema rebuilding on each to ensure + that any changes in child models are reflected in parent schemas. + """ + potential_parents: set[type[BaseModel]] = {BaseModel} + stack: list[type[BaseModel]] = [BaseModel] + + while stack: + current = stack.pop() + for subclass in current.__subclasses__(): + if ( + issubclass(subclass, BaseModel) + and subclass is not cls + and subclass not in potential_parents + ): + potential_parents.add(subclass) + stack.append(subclass) + + for check in cls.__mro__: + if isinstance(check, type) and issubclass(check, BaseModel): + cls._reload_schemas_depending_on(check, potential_parents) + + @classmethod + def _reload_schemas_depending_on(cls, target: type[BaseModel], types: set[type]): + changed = True + while changed: + changed = False + for candidate in types: + if ( + isinstance(candidate, type) + and issubclass(candidate, BaseModel) + and any( + cls._uses_type(target, field_info.annotation) + for field_info in candidate.model_fields.values() + if field_info.annotation is not None + ) + ): + try: + before = candidate.model_json_schema() + except Exception: # noqa: BLE001 + before = None + candidate.model_rebuild(force=True) + if before is not None: + after = candidate.model_json_schema() + changed |= before != after + + @classmethod + def _uses_type(cls, target: type, candidate: type) -> bool: + if target is candidate: + return True + + origin = get_origin(candidate) + + if origin is None: + return isinstance(candidate, type) and issubclass(candidate, target) + + if isinstance(origin, type) and ( + target is origin or issubclass(origin, target) + ): + return True + + for arg in get_args(candidate) or []: + if isinstance(arg, type) and cls._uses_type(target, arg): + return True + + return False + + +class StandardBaseModel(BaseModel): + """ + Base Pydantic model with standardized configuration for GuideLLM. + + Provides consistent validation behavior and configuration settings across + all Pydantic models in the application, including field validation, + attribute conversion, and default value handling. + + Example: + :: + class MyModel(StandardBaseModel): + name: str + value: int = 42 + + # Access default values + default_value = MyModel.get_default("value") # Returns 42 + """ + + model_config = ConfigDict( + extra="ignore", + use_enum_values=True, + from_attributes=True, + ) + + @classmethod + def get_default(cls: type[BaseModel], field: str) -> Any: + """ + Get default value for a model field. + + :param field: Name of the field to get the default value for + :return: Default value of the specified field + :raises KeyError: If the field does not exist in the model + """ + return cls.model_fields[field].default + + +class StandardBaseDict(StandardBaseModel): + """ + Base Pydantic model allowing arbitrary additional fields. + + Extends StandardBaseModel to accept extra fields beyond those explicitly + defined in the model schema. Useful for flexible data structures that + need to accommodate varying or unknown field sets while maintaining + type safety for known fields. + """ + + model_config = ConfigDict( + extra="allow", + use_enum_values=True, + from_attributes=True, + arbitrary_types_allowed=True, + ) + + +class StatusBreakdown(BaseModel, Generic[SuccessfulT, ErroredT, IncompleteT, TotalT]): + """ + Generic model for organizing results by processing status. + + Provides structured categorization of results into successful, errored, + incomplete, and total status groups. Supports flexible typing for each + status category to accommodate different result types while maintaining + consistent organization patterns across the application. + + Example: + :: + from guidellm.utils import StatusBreakdown + + # Define a breakdown for request counts + breakdown = StatusBreakdown[int, int, int, int]( + successful=150, + errored=5, + incomplete=10, + total=165 + ) + """ + + successful: SuccessfulT = Field( + description="Results or metrics for requests with successful completion status", + default=None, # type: ignore[assignment] + ) + errored: ErroredT = Field( + description="Results or metrics for requests with error completion status", + default=None, # type: ignore[assignment] + ) + incomplete: IncompleteT = Field( + description="Results or metrics for requests with incomplete processing status", + default=None, # type: ignore[assignment] + ) + total: TotalT = Field( + description="Aggregated results or metrics combining all status categories", + default=None, # type: ignore[assignment] + ) + + +class PydanticClassRegistryMixin( + ReloadableBaseModel, RegistryMixin[type[BaseModelT]], ABC, Generic[BaseModelT] +): + """ + Polymorphic Pydantic model mixin enabling registry-based dynamic instantiation. + + Integrates Pydantic validation with the registry system to enable polymorphic + serialization and deserialization based on a discriminator field. Automatically + instantiates the correct subclass during validation based on registry mappings, + providing a foundation for extensible plugin-style architectures. + + Example: + :: + from speculators.utils import PydanticClassRegistryMixin + + class BaseConfig(PydanticClassRegistryMixin["BaseConfig"]): + schema_discriminator: ClassVar[str] = "config_type" + config_type: str = Field(description="Configuration type identifier") + + @classmethod + def __pydantic_schema_base_type__(cls) -> type["BaseConfig"]: + return BaseConfig + + @BaseConfig.register("database") + class DatabaseConfig(BaseConfig): + config_type: str = "database" + connection_string: str = Field(description="Database connection string") + + # Dynamic instantiation based on discriminator + config = BaseConfig.model_validate({ + "config_type": "database", + "connection_string": "postgresql://localhost:5432/db" + }) + + :cvar schema_discriminator: Field name used for polymorphic type discrimination + """ + + schema_discriminator: ClassVar[str] = "model_type" + + @classmethod + def register_decorator( + cls, clazz: RegisterClassT, name: str | list[str] | None = None + ) -> RegisterClassT: + """ + Register a Pydantic model class with type validation and schema reload. + + Validates that the class is a proper Pydantic BaseModel subclass before + registering it in the class registry. Automatically triggers schema + reload to incorporate the new type into polymorphic validation. + + :param clazz: Pydantic model class to register in the polymorphic hierarchy + :param name: Registry identifier for the class. Uses class name if None + :return: The registered class unchanged for decorator chaining + :raises TypeError: If clazz is not a Pydantic BaseModel subclass + """ + if not issubclass(clazz, BaseModel): + raise TypeError( + f"Cannot register {clazz.__name__} as it is not a subclass of " + "Pydantic BaseModel" + ) + + super().register_decorator(clazz, name=name) + cls.reload_schema() + + return clazz + + @classmethod + def __get_pydantic_core_schema__( + cls, source_type: Any, handler: GetCoreSchemaHandler + ) -> CoreSchema: + """ + Generate polymorphic validation schema for dynamic type instantiation. + + Creates a tagged union schema that enables Pydantic to automatically + instantiate the correct subclass based on the discriminator field value. + Falls back to base schema generation when no registry is available. + + :param source_type: Type being processed for schema generation + :param handler: Pydantic core schema generation handler + :return: Tagged union schema for polymorphic validation or base schema + """ + if source_type == cls.__pydantic_schema_base_type__(): + if not cls.registry: + return cls.__pydantic_generate_base_schema__(handler) + + choices = { + name: handler(model_class) for name, model_class in cls.registry.items() + } + + return core_schema.tagged_union_schema( + choices=choices, + discriminator=cls.schema_discriminator, + ) + + return handler(cls) + + @classmethod + @abstractmethod + def __pydantic_schema_base_type__(cls) -> type[BaseModelT]: + """ + Define the base type for polymorphic validation hierarchy. + + Must be implemented by subclasses to specify which type serves as the + root of the polymorphic hierarchy for schema generation and validation. + + :return: Base class type for the polymorphic model hierarchy + """ + ... + + @classmethod + def __pydantic_generate_base_schema__( + cls, handler: GetCoreSchemaHandler + ) -> CoreSchema: + """ + Generate fallback schema for polymorphic models without registry. + + Provides a base schema that accepts any valid input when no registry + is available for polymorphic validation. Used as fallback during + schema generation when the registry has not been populated. + + :param handler: Pydantic core schema generation handler + :return: Base CoreSchema that accepts any valid input + """ + return core_schema.any_schema() + + @classmethod + def auto_populate_registry(cls) -> bool: + """ + Initialize registry with auto-discovery and reload validation schema. + + Triggers automatic population of the class registry through the parent + RegistryMixin functionality and ensures the Pydantic validation schema + is updated to include all discovered types for polymorphic validation. + + :return: True if registry was populated, False if already populated + :raises ValueError: If called when registry_auto_discovery is disabled + """ + populated = super().auto_populate_registry() + cls.reload_schema() + + return populated + + @classmethod + def registered_classes(cls) -> tuple[type[BaseModelT], ...]: + """ + Get all registered pydantic classes from the registry. + + Automatically triggers auto-discovery if registry_auto_discovery is enabled + to ensure all available implementations are included. + + :return: Tuple of all registered classes including auto-discovered ones + :raises ValueError: If called before any objects have been registered + """ + if cls.registry_auto_discovery: + cls.auto_populate_registry() + + if cls.registry is None: + raise ValueError( + "ClassRegistryMixin.registered_classes() must be called after " + "registering classes with ClassRegistryMixin.register()." + ) + + return tuple(cls.registry.values()) diff --git a/src/guidellm/utils/registry.py b/src/guidellm/utils/registry.py new file mode 100644 index 00000000..b9e3faf5 --- /dev/null +++ b/src/guidellm/utils/registry.py @@ -0,0 +1,214 @@ +""" +Registry system for dynamic object registration and discovery. + +Provides a flexible object registration system with optional auto-discovery +capabilities through decorators and module imports. Enables dynamic discovery +and instantiation of implementations based on configuration parameters, supporting +both manual registration and automatic package-based discovery for extensible +plugin architectures. +""" + +from __future__ import annotations + +from typing import Callable, ClassVar, Generic, TypeVar, cast + +from guidellm.utils.auto_importer import AutoImporterMixin + +__all__ = ["RegisterT", "RegistryMixin", "RegistryObjT"] + + +RegistryObjT = TypeVar("RegistryObjT") +"""Generic type variable for objects managed by the registry system.""" +RegisterT = TypeVar("RegisterT") +"""Generic type variable for the args and return values within the registry.""" + + +class RegistryMixin(Generic[RegistryObjT], AutoImporterMixin): + """ + Generic mixin for creating object registries with optional auto-discovery. + + Enables classes to maintain separate registries of objects that can be dynamically + discovered and instantiated through decorators and module imports. Supports both + manual registration via decorators and automatic discovery through package scanning + for extensible plugin architectures. + + Example: + :: + class BaseAlgorithm(RegistryMixin): + pass + + @BaseAlgorithm.register() + class ConcreteAlgorithm(BaseAlgorithm): + pass + + @BaseAlgorithm.register("custom_name") + class AnotherAlgorithm(BaseAlgorithm): + pass + + # Get all registered implementations + algorithms = BaseAlgorithm.registered_objects() + + Example with auto-discovery: + :: + class TokenProposal(RegistryMixin): + registry_auto_discovery = True + auto_package = "mypackage.proposals" + + # Automatically imports and registers decorated objects + proposals = TokenProposal.registered_objects() + + :cvar registry: Dictionary mapping names to registered objects + :cvar registry_auto_discovery: Enable automatic package-based discovery + :cvar registry_populated: Track whether auto-discovery has completed + """ + + registry: ClassVar[dict[str, RegistryObjT] | None] = None + registry_auto_discovery: ClassVar[bool] = False + registry_populated: ClassVar[bool] = False + + @classmethod + def register( + cls, name: str | list[str] | None = None + ) -> Callable[[RegisterT], RegisterT]: + """ + Decorator for registering objects with the registry. + + :param name: Optional name(s) to register the object under. + If None, uses the object's __name__ attribute + :return: Decorator function that registers the decorated object + :raises ValueError: If name is not a string, list of strings, or None + """ + + def _decorator(obj: RegisterT) -> RegisterT: + cls.register_decorator(obj, name=name) + return obj + + return _decorator + + @classmethod + def register_decorator( + cls, obj: RegisterT, name: str | list[str] | None = None + ) -> RegisterT: + """ + Register an object directly with the registry. + + :param obj: The object to register + :param name: Optional name(s) to register the object under. + If None, uses the object's __name__ attribute + :return: The registered object + :raises ValueError: If the object is already registered or name is invalid + """ + + if name is None: + name = obj.__name__ + elif not isinstance(name, (str, list)): + raise ValueError( + "RegistryMixin.register_decorator name must be a string or " + f"an iterable of strings. Got {name}." + ) + + if cls.registry is None: + cls.registry = {} + + names = [name] if isinstance(name, str) else list(name) + + for register_name in names: + if not isinstance(register_name, str): + raise ValueError( + "RegistryMixin.register_decorator name must be a string or " + f"a list of strings. Got {register_name}." + ) + + if register_name in cls.registry: + raise ValueError( + f"RegistryMixin.register_decorator cannot register an object " + f"{obj} with the name {register_name} because it is already " + "registered." + ) + + cls.registry[register_name] = cast("RegistryObjT", obj) + + return obj + + @classmethod + def auto_populate_registry(cls) -> bool: + """ + Import and register all modules from the auto_package. + + Automatically called by registered_objects when registry_auto_discovery is True + to ensure all available implementations are discovered. + + :return: True if registry was populated, False if already populated + :raises ValueError: If called when registry_auto_discovery is False + """ + if not cls.registry_auto_discovery: + raise ValueError( + "RegistryMixin.auto_populate_registry() cannot be called " + "because registry_auto_discovery is set to False. " + "Set registry_auto_discovery to True to enable auto-discovery." + ) + + if cls.registry_populated: + return False + + cls.auto_import_package_modules() + cls.registry_populated = True + + return True + + @classmethod + def registered_objects(cls) -> tuple[RegistryObjT, ...]: + """ + Get all registered objects from the registry. + + Automatically triggers auto-discovery if registry_auto_discovery is enabled + to ensure all available implementations are included. + + :return: Tuple of all registered objects including auto-discovered ones + :raises ValueError: If called before any objects have been registered + """ + if cls.registry_auto_discovery: + cls.auto_populate_registry() + + if cls.registry is None: + raise ValueError( + "RegistryMixin.registered_objects() must be called after " + "registering objects with RegistryMixin.register()." + ) + + return tuple(cls.registry.values()) + + @classmethod + def is_registered(cls, name: str) -> bool: + """ + Check if an object is registered under the given name. + It matches first by exact name, then by str.lower(). + + :param name: The name to check for registration. + :return: True if the object is registered, False otherwise. + """ + if cls.registry is None: + return False + + return name in cls.registry or name.lower() in [ + key.lower() for key in cls.registry + ] + + @classmethod + def get_registered_object(cls, name: str) -> RegistryObjT | None: + """ + Get a registered object by its name. It matches first by exact name, + then by str.lower(). + + :param name: The name of the registered object. + :return: The registered object if found, None otherwise. + """ + if cls.registry is None: + return None + + if name in cls.registry: + return cls.registry[name] + + lower_key_map = {key.lower(): key for key in cls.registry} + + return cls.registry.get(lower_key_map.get(name.lower())) diff --git a/src/guidellm/utils/singleton.py b/src/guidellm/utils/singleton.py new file mode 100644 index 00000000..3ec10f79 --- /dev/null +++ b/src/guidellm/utils/singleton.py @@ -0,0 +1,130 @@ +""" +Singleton pattern implementations for ensuring single instance classes. + +Provides singleton mixins for creating classes that maintain a single instance +throughout the application lifecycle, with support for both basic and thread-safe +implementations. These mixins integrate with the scheduler and other system components +to ensure consistent state management and prevent duplicate resource allocation. +""" + +from __future__ import annotations + +import threading + +__all__ = ["SingletonMixin", "ThreadSafeSingletonMixin"] + + +class SingletonMixin: + """ + Basic singleton mixin ensuring single instance per class. + + Implements the singleton pattern using class variables to control instance + creation. Subclasses must call super().__init__() for proper initialization + state management. Suitable for single-threaded environments or when external + synchronization is provided. + + Example: + :: + class ConfigManager(SingletonMixin): + def __init__(self, config_path: str): + super().__init__() + if not self.initialized: + self.config = load_config(config_path) + + manager1 = ConfigManager("config.json") + manager2 = ConfigManager("config.json") + assert manager1 is manager2 + """ + + def __new__(cls, *args, **kwargs): # noqa: ARG004 + """ + Create or return the singleton instance. + + :param args: Positional arguments passed to the constructor + :param kwargs: Keyword arguments passed to the constructor + :return: The singleton instance of the class + """ + # Use class-specific attribute name to avoid inheritance issues + attr_name = f"_singleton_instance_{cls.__name__}" + + if not hasattr(cls, attr_name) or getattr(cls, attr_name) is None: + instance = super().__new__(cls) + setattr(cls, attr_name, instance) + instance._singleton_initialized = False + return getattr(cls, attr_name) + + def __init__(self): + """Initialize the singleton instance exactly once.""" + if hasattr(self, "_singleton_initialized") and self._singleton_initialized: + return + self._singleton_initialized = True + + @property + def initialized(self): + """Return True if the singleton has been initialized.""" + return getattr(self, "_singleton_initialized", False) + + +class ThreadSafeSingletonMixin(SingletonMixin): + """ + Thread-safe singleton mixin with locking mechanisms. + + Extends SingletonMixin with thread safety using locks to prevent race + conditions during instance creation in multi-threaded environments. Essential + for scheduler components and other shared resources accessed concurrently. + + Example: + :: + class SchedulerResource(ThreadSafeSingletonMixin): + def __init__(self): + super().__init__() + if not self.initialized: + self.resource_pool = initialize_resources() + """ + + def __new__(cls, *args, **kwargs): # noqa: ARG004 + """ + Create or return the singleton instance with thread safety. + + :param args: Positional arguments passed to the constructor + :param kwargs: Keyword arguments passed to the constructor + :return: The singleton instance of the class + """ + # Use class-specific lock and instance names to avoid inheritance issues + lock_attr_name = f"_singleton_lock_{cls.__name__}" + instance_attr_name = f"_singleton_instance_{cls.__name__}" + + with getattr(cls, lock_attr_name): + instance_exists = ( + hasattr(cls, instance_attr_name) + and getattr(cls, instance_attr_name) is not None + ) + if not instance_exists: + instance = super(SingletonMixin, cls).__new__(cls) + setattr(cls, instance_attr_name, instance) + instance._singleton_initialized = False + instance._init_lock = threading.Lock() + return getattr(cls, instance_attr_name) + + def __init_subclass__(cls, *args, **kwargs): + super().__init_subclass__(*args, **kwargs) + lock_attr_name = f"_singleton_lock_{cls.__name__}" + setattr(cls, lock_attr_name, threading.Lock()) + + def __init__(self): + """Initialize the singleton instance with thread-safe initialization.""" + with self._init_lock: + if hasattr(self, "_singleton_initialized") and self._singleton_initialized: + return + self._singleton_initialized = True + + @property + def thread_lock(self): + """Return the thread lock for this singleton instance.""" + return getattr(self, "_init_lock", None) + + @classmethod + def get_singleton_lock(cls): + """Get the class-specific singleton creation lock.""" + lock_attr_name = f"_singleton_lock_{cls.__name__}" + return getattr(cls, lock_attr_name, None) diff --git a/src/guidellm/utils/statistics.py b/src/guidellm/utils/statistics.py new file mode 100644 index 00000000..c820de9d --- /dev/null +++ b/src/guidellm/utils/statistics.py @@ -0,0 +1,990 @@ +""" +Statistical analysis utilities for distribution calculations and running metrics. + +Provides comprehensive statistical computation tools for analyzing numerical +distributions, percentiles, and streaming data. Includes specialized support for +request timing analysis, concurrency measurement, and rate calculations. Integrates +with Pydantic for serializable statistical models and supports both weighted and +unweighted distributions with cumulative distribution function (CDF) generation. +""" + +from __future__ import annotations + +import math +import time as timer +from collections import defaultdict +from typing import Any, Literal + +import numpy as np +from pydantic import Field, computed_field + +from guidellm.utils.pydantic_utils import StandardBaseModel, StatusBreakdown + +__all__ = [ + "DistributionSummary", + "Percentiles", + "RunningStats", + "StatusDistributionSummary", + "TimeRunningStats", +] + + +class Percentiles(StandardBaseModel): + """ + Standard percentiles model for statistical distribution analysis. + + Provides complete percentile coverage from 0.1th to 99.9th percentiles for + statistical distribution characterization. Used as a component within + DistributionSummary to provide detailed distribution shape analysis. + """ + + p001: float = Field( + description="The 0.1th percentile of the distribution.", + ) + p01: float = Field( + description="The 1st percentile of the distribution.", + ) + p05: float = Field( + description="The 5th percentile of the distribution.", + ) + p10: float = Field( + description="The 10th percentile of the distribution.", + ) + p25: float = Field( + description="The 25th percentile of the distribution.", + ) + p50: float = Field( + description="The 50th percentile of the distribution.", + ) + p75: float = Field( + description="The 75th percentile of the distribution.", + ) + p90: float = Field( + description="The 90th percentile of the distribution.", + ) + p95: float = Field( + description="The 95th percentile of the distribution.", + ) + p99: float = Field( + description="The 99th percentile of the distribution.", + ) + p999: float = Field( + description="The 99.9th percentile of the distribution.", + ) + + +class DistributionSummary(StandardBaseModel): + """ + Comprehensive statistical summary for numerical value distributions. + + Calculates and stores complete statistical metrics including central tendency, + dispersion, extremes, and percentiles for any numerical distribution. Supports + both weighted and unweighted data with optional cumulative distribution function + generation. Primary statistical analysis tool for request timing, performance + metrics, and benchmark result characterization. + + Example: + :: + # Create from simple values + summary = DistributionSummary.from_values([1.0, 2.0, 3.0, 4.0, 5.0]) + print(f"Mean: {summary.mean}, P95: {summary.percentiles.p95}") + + # Create from request timings for concurrency analysis + requests = [(0.0, 1.0), (0.5, 2.0), (1.0, 2.5)] + concurrency = DistributionSummary.from_request_times( + requests, "concurrency" + ) + """ + + mean: float = Field( + description="The mean/average of the distribution.", + ) + median: float = Field( + description="The median of the distribution.", + ) + mode: float = Field( + description="The mode of the distribution.", + ) + variance: float = Field( + description="The variance of the distribution.", + ) + std_dev: float = Field( + description="The standard deviation of the distribution.", + ) + min: float = Field( + description="The minimum value of the distribution.", + ) + max: float = Field( + description="The maximum value of the distribution.", + ) + count: int = Field( + description="The number of values in the distribution.", + ) + total_sum: float = Field( + description="The total sum of the values in the distribution.", + ) + percentiles: Percentiles = Field( + description="The percentiles of the distribution.", + ) + cumulative_distribution_function: list[tuple[float, float]] | None = Field( + description="The cumulative distribution function (CDF) of the distribution.", + default=None, + ) + + @staticmethod + def from_distribution_function( + distribution: list[tuple[float, float]], + include_cdf: bool = False, + ) -> DistributionSummary: + """ + Create statistical summary from weighted distribution or probability function. + + Converts weighted numerical values or probability distribution function (PDF) + into comprehensive statistical summary. Normalizes weights to probabilities + and calculates all statistical metrics including percentiles. + + :param distribution: List of (value, weight) or (value, probability) tuples + representing the distribution + :param include_cdf: Whether to include cumulative distribution function + in the output + :return: DistributionSummary instance with calculated statistical metrics + """ + values, weights = zip(*distribution) if distribution else ([], []) + values = np.array(values) # type: ignore[assignment] + weights = np.array(weights) # type: ignore[assignment] + + # create the PDF + probabilities = weights / np.sum(weights) # type: ignore[operator] + pdf = np.column_stack((values, probabilities)) + pdf = pdf[np.argsort(pdf[:, 0])] + values = pdf[:, 0] # type: ignore[assignment] + probabilities = pdf[:, 1] + + # calculate the CDF + cumulative_probabilities = np.cumsum(probabilities) + cdf = np.column_stack((values, cumulative_probabilities)) + + # calculate statistics + mean = np.sum(values * probabilities).item() # type: ignore[attr-defined] + median = cdf[np.argmax(cdf[:, 1] >= 0.5), 0].item() if len(cdf) > 0 else 0 # noqa: PLR2004 + mode = values[np.argmax(probabilities)].item() if len(values) > 0 else 0 # type: ignore[call-overload] + variance = np.sum((values - mean) ** 2 * probabilities).item() # type: ignore[attr-defined] + std_dev = math.sqrt(variance) + minimum = values[0].item() if len(values) > 0 else 0 + maximum = values[-1].item() if len(values) > 0 else 0 + count = len(values) + total_sum = np.sum(values).item() # type: ignore[attr-defined] + + return DistributionSummary( + mean=mean, + median=median, + mode=mode, + variance=variance, + std_dev=std_dev, + min=minimum, + max=maximum, + count=count, + total_sum=total_sum, + percentiles=( + Percentiles( + p001=cdf[np.argmax(cdf[:, 1] >= 0.001), 0].item(), # noqa: PLR2004 + p01=cdf[np.argmax(cdf[:, 1] >= 0.01), 0].item(), # noqa: PLR2004 + p05=cdf[np.argmax(cdf[:, 1] >= 0.05), 0].item(), # noqa: PLR2004 + p10=cdf[np.argmax(cdf[:, 1] >= 0.1), 0].item(), # noqa: PLR2004 + p25=cdf[np.argmax(cdf[:, 1] >= 0.25), 0].item(), # noqa: PLR2004 + p50=cdf[np.argmax(cdf[:, 1] >= 0.50), 0].item(), # noqa: PLR2004 + p75=cdf[np.argmax(cdf[:, 1] >= 0.75), 0].item(), # noqa: PLR2004 + p90=cdf[np.argmax(cdf[:, 1] >= 0.9), 0].item(), # noqa: PLR2004 + p95=cdf[np.argmax(cdf[:, 1] >= 0.95), 0].item(), # noqa: PLR2004 + p99=cdf[np.argmax(cdf[:, 1] >= 0.99), 0].item(), # noqa: PLR2004 + p999=cdf[np.argmax(cdf[:, 1] >= 0.999), 0].item(), # noqa: PLR2004 + ) + if len(cdf) > 0 + else Percentiles( + p001=0, + p01=0, + p05=0, + p10=0, + p25=0, + p50=0, + p75=0, + p90=0, + p95=0, + p99=0, + p999=0, + ) + ), + cumulative_distribution_function=cdf.tolist() if include_cdf else None, + ) + + @staticmethod + def from_values( + values: list[float], + weights: list[float] | None = None, + include_cdf: bool = False, + ) -> DistributionSummary: + """ + Create statistical summary from numerical values with optional weights. + + Wrapper around from_distribution_function for simple value lists. If weights + are not provided, all values are equally weighted. Enables statistical + analysis of any numerical dataset. + + :param values: Numerical values representing the distribution + :param weights: Optional weights for each value. If not provided, all values + are equally weighted + :param include_cdf: Whether to include cumulative distribution function in + the output DistributionSummary + :return: DistributionSummary instance with calculated statistical metrics + :raises ValueError: If values and weights lists have different lengths + """ + if weights is None: + weights = [1.0] * len(values) + + if len(values) != len(weights): + raise ValueError( + "The length of values and weights must be the same.", + ) + + return DistributionSummary.from_distribution_function( + distribution=list(zip(values, weights)), + include_cdf=include_cdf, + ) + + @staticmethod + def from_request_times( + requests: list[tuple[float, float]], + distribution_type: Literal["concurrency", "rate"], + include_cdf: bool = False, + epsilon: float = 1e-6, + ) -> DistributionSummary: + """ + Create statistical summary from request timing data. + + Analyzes request start/end times to calculate concurrency or rate + distributions. Converts timing events into statistical metrics for + performance analysis and load characterization. + + :param requests: List of (start_time, end_time) tuples for each request + :param distribution_type: Type of analysis - "concurrency" for simultaneous + requests or "rate" for completion rates + :param include_cdf: Whether to include cumulative distribution function + :param epsilon: Threshold for merging close timing events + :return: DistributionSummary with timing-based statistical metrics + :raises ValueError: If distribution_type is not "concurrency" or "rate" + """ + if distribution_type == "concurrency": + # convert to delta changes based on when requests were running + time_deltas: dict[float, int] = defaultdict(int) + for start, end in requests: + time_deltas[start] += 1 + time_deltas[end] -= 1 + + # convert to the events over time measuring concurrency changes + events = [] + active = 0 + + for time, delta in sorted(time_deltas.items()): + active += delta + events.append((time, active)) + elif distribution_type == "rate": + # convert to events for when requests finished + global_start = min(start for start, _ in requests) if requests else 0 + events = [(global_start, 1)] + [(end, 1) for _, end in requests] + else: + raise ValueError( + f"Invalid distribution_type '{distribution_type}'. " + "Must be 'concurrency' or 'rate'." + ) + + # combine any events that are very close together + flattened_events: list[tuple[float, float]] = [] + for time, val in sorted(events): + last_time, last_val = ( + flattened_events[-1] if flattened_events else (None, None) + ) + + if ( + last_time is not None + and last_val is not None + and abs(last_time - time) <= epsilon + ): + flattened_events[-1] = (last_time, last_val + val) + else: + flattened_events.append((time, val)) + + # convert to value distribution function + distribution: dict[float, float] = defaultdict(float) + + for ind in range(len(flattened_events) - 1): + start_time, value = flattened_events[ind] + end_time, _ = flattened_events[ind + 1] + duration = end_time - start_time + + if distribution_type == "concurrency": + # weight the concurrency value by the duration + distribution[value] += duration + elif distribution_type == "rate": + # weight the rate value by the duration + rate = value / duration + distribution[rate] += duration + + distribution_list: list[tuple[float, float]] = sorted(distribution.items()) + + return DistributionSummary.from_distribution_function( + distribution=distribution_list, + include_cdf=include_cdf, + ) + + @staticmethod + def from_iterable_request_times( + requests: list[tuple[float, float]], + first_iter_times: list[float], + iter_counts: list[int], + first_iter_counts: list[int] | None = None, + include_cdf: bool = False, + epsilon: float = 1e-6, + ) -> DistributionSummary: + """ + Create statistical summary from iterative request timing data. + + Analyzes autoregressive or streaming requests with multiple iterations + between start and end times. Calculates rate distributions based on + iteration timing patterns for LLM token generation analysis. + + :param requests: List of (start_time, end_time) tuples for each request + :param first_iter_times: Times when first iteration was received for + each request + :param iter_counts: Total iteration counts for each request from first + iteration to end + :param first_iter_counts: Iteration counts for first iteration (defaults + to 1 for each request) + :param include_cdf: Whether to include cumulative distribution function + :param epsilon: Threshold for merging close timing events + :return: DistributionSummary with iteration rate statistical metrics + :raises ValueError: If input lists have mismatched lengths + """ + + if first_iter_counts is None: + first_iter_counts = [1] * len(requests) + + if ( + len(requests) != len(first_iter_times) + or len(requests) != len(iter_counts) + or len(requests) != len(first_iter_counts) + ): + raise ValueError( + "requests, first_iter_times, iter_counts, and first_iter_counts must" + "be the same length." + f"Given {len(requests)}, {len(first_iter_times)}, {len(iter_counts)}, " + f"{len(first_iter_counts)}", + ) + + # first break up the requests into individual iterable events + events = defaultdict(int) + global_start = min(start for start, _ in requests) if requests else 0 + global_end = max(end for _, end in requests) if requests else 0 + events[global_start] = 0 + events[global_end] = 0 + + for (_, end), first_iter, first_iter_count, total_count in zip( + requests, first_iter_times, first_iter_counts, iter_counts + ): + events[first_iter] += first_iter_count + + if total_count > 1: + iter_latency = (end - first_iter) / (total_count - 1) + for ind in range(1, total_count): + events[first_iter + ind * iter_latency] += 1 + + # combine any events that are very close together + flattened_events: list[tuple[float, int]] = [] + + for time, count in sorted(events.items()): + last_time, last_count = ( + flattened_events[-1] if flattened_events else (None, None) + ) + + if ( + last_time is not None + and last_count is not None + and abs(last_time - time) <= epsilon + ): + flattened_events[-1] = (last_time, last_count + count) + else: + flattened_events.append((time, count)) + + # convert to value distribution function + distribution: dict[float, float] = defaultdict(float) + + for ind in range(len(flattened_events) - 1): + start_time, count = flattened_events[ind] + end_time, _ = flattened_events[ind + 1] + duration = end_time - start_time + rate = count / duration + distribution[rate] += duration + + distribution_list = sorted(distribution.items()) + + return DistributionSummary.from_distribution_function( + distribution=distribution_list, + include_cdf=include_cdf, + ) + + +class StatusDistributionSummary( + StatusBreakdown[ + DistributionSummary, + DistributionSummary, + DistributionSummary, + DistributionSummary, + ] +): + """ + Status-grouped statistical summary for request processing analysis. + + Provides comprehensive statistical analysis grouped by request status (total, + successful, incomplete, errored). Enables performance analysis across different + request outcomes for benchmarking and monitoring applications. Each status + category maintains complete DistributionSummary metrics. + + Example: + :: + status_summary = StatusDistributionSummary.from_values( + value_types=["successful", "error", "successful"], + values=[1.5, 10.0, 2.1] + ) + print(f"Success mean: {status_summary.successful.mean}") + print(f"Error rate: {status_summary.errored.count}") + """ + + @staticmethod + def from_values( + value_types: list[Literal["successful", "incomplete", "error"]], + values: list[float], + weights: list[float] | None = None, + include_cdf: bool = False, + ) -> StatusDistributionSummary: + """ + Create status-grouped statistical summary from values and status types. + + Groups numerical values by request status and calculates complete + statistical summaries for each category. Enables performance analysis + across different request outcomes. + + :param value_types: Status type for each value ("successful", "incomplete", + or "error") + :param values: Numerical values representing the distribution + :param weights: Optional weights for each value (defaults to equal weighting) + :param include_cdf: Whether to include cumulative distribution functions + :return: StatusDistributionSummary with statistics grouped by status + :raises ValueError: If input lists have mismatched lengths or invalid + status types + """ + if any( + type_ not in {"successful", "incomplete", "error"} for type_ in value_types + ): + raise ValueError( + "value_types must be one of 'successful', 'incomplete', or 'error'. " + f"Got {value_types} instead.", + ) + + if weights is None: + weights = [1.0] * len(values) + + if len(value_types) != len(values) or len(value_types) != len(weights): + raise ValueError( + "The length of value_types, values, and weights must be the same.", + ) + + _, successful_values, successful_weights = ( + zip(*successful) + if ( + successful := list( + filter( + lambda val: val[0] == "successful", + zip(value_types, values, weights), + ) + ) + ) + else ([], [], []) + ) + _, incomplete_values, incomplete_weights = ( + zip(*incomplete) + if ( + incomplete := list( + filter( + lambda val: val[0] == "incomplete", + zip(value_types, values, weights), + ) + ) + ) + else ([], [], []) + ) + _, errored_values, errored_weights = ( + zip(*errored) + if ( + errored := list( + filter( + lambda val: val[0] == "error", + zip(value_types, values, weights), + ) + ) + ) + else ([], [], []) + ) + + return StatusDistributionSummary( + total=DistributionSummary.from_values( + values, + weights, + include_cdf=include_cdf, + ), + successful=DistributionSummary.from_values( + successful_values, # type: ignore[arg-type] + successful_weights, # type: ignore[arg-type] + include_cdf=include_cdf, + ), + incomplete=DistributionSummary.from_values( + incomplete_values, # type: ignore[arg-type] + incomplete_weights, # type: ignore[arg-type] + include_cdf=include_cdf, + ), + errored=DistributionSummary.from_values( + errored_values, # type: ignore[arg-type] + errored_weights, # type: ignore[arg-type] + include_cdf=include_cdf, + ), + ) + + @staticmethod + def from_request_times( + request_types: list[Literal["successful", "incomplete", "error"]], + requests: list[tuple[float, float]], + distribution_type: Literal["concurrency", "rate"], + include_cdf: bool = False, + epsilon: float = 1e-6, + ) -> StatusDistributionSummary: + """ + Create status-grouped statistical summary from request timing data. + + Analyzes request timings grouped by status to calculate concurrency or + rate distributions for each outcome category. Enables comparative + performance analysis across successful, incomplete, and errored requests. + + :param request_types: Status type for each request ("successful", + "incomplete", or "error") + :param requests: List of (start_time, end_time) tuples for each request + :param distribution_type: Analysis type - "concurrency" or "rate" + :param include_cdf: Whether to include cumulative distribution functions + :param epsilon: Threshold for merging close timing events + :return: StatusDistributionSummary with timing statistics by status + :raises ValueError: If input lists have mismatched lengths or invalid types + """ + if distribution_type not in {"concurrency", "rate"}: + raise ValueError( + f"Invalid distribution_type '{distribution_type}'. " + "Must be 'concurrency' or 'rate'." + ) + + if any( + type_ not in {"successful", "incomplete", "error"} + for type_ in request_types + ): + raise ValueError( + "request_types must be one of 'successful', 'incomplete', or 'error'. " + f"Got {request_types} instead.", + ) + + if len(request_types) != len(requests): + raise ValueError( + "The length of request_types and requests must be the same. " + f"Got {len(request_types)} and {len(requests)} instead.", + ) + + _, successful_requests = ( + zip(*successful) + if ( + successful := list( + filter( + lambda val: val[0] == "successful", + zip(request_types, requests), + ) + ) + ) + else ([], []) + ) + _, incomplete_requests = ( + zip(*incomplete) + if ( + incomplete := list( + filter( + lambda val: val[0] == "incomplete", + zip(request_types, requests), + ) + ) + ) + else ([], []) + ) + _, errored_requests = ( + zip(*errored) + if ( + errored := list( + filter( + lambda val: val[0] == "error", + zip(request_types, requests), + ) + ) + ) + else ([], []) + ) + + return StatusDistributionSummary( + total=DistributionSummary.from_request_times( + requests, + distribution_type=distribution_type, + include_cdf=include_cdf, + epsilon=epsilon, + ), + successful=DistributionSummary.from_request_times( + successful_requests, # type: ignore[arg-type] + distribution_type=distribution_type, + include_cdf=include_cdf, + epsilon=epsilon, + ), + incomplete=DistributionSummary.from_request_times( + incomplete_requests, # type: ignore[arg-type] + distribution_type=distribution_type, + include_cdf=include_cdf, + epsilon=epsilon, + ), + errored=DistributionSummary.from_request_times( + errored_requests, # type: ignore[arg-type] + distribution_type=distribution_type, + include_cdf=include_cdf, + epsilon=epsilon, + ), + ) + + @staticmethod + def from_iterable_request_times( + request_types: list[Literal["successful", "incomplete", "error"]], + requests: list[tuple[float, float]], + first_iter_times: list[float], + iter_counts: list[int] | None = None, + first_iter_counts: list[int] | None = None, + include_cdf: bool = False, + epsilon: float = 1e-6, + ) -> StatusDistributionSummary: + """ + Create status-grouped statistical summary from iterative request timing data. + + Analyzes autoregressive request timings grouped by status to calculate + iteration rate distributions for each outcome category. Enables comparative + analysis of token generation or streaming response performance across + different request statuses. + + :param request_types: Status type for each request ("successful", + "incomplete", or "error") + :param requests: List of (start_time, end_time) tuples for each request + :param first_iter_times: Times when first iteration was received for + each request + :param iter_counts: Total iteration counts for each request (defaults to 1) + :param first_iter_counts: Iteration counts for first iteration (defaults + to 1) + :param include_cdf: Whether to include cumulative distribution functions + :param epsilon: Threshold for merging close timing events + :return: StatusDistributionSummary with iteration statistics by status + :raises ValueError: If input lists have mismatched lengths or invalid types + """ + if any( + type_ not in {"successful", "incomplete", "error"} + for type_ in request_types + ): + raise ValueError( + "request_types must be one of 'successful', 'incomplete', or 'error'. " + f"Got {request_types} instead.", + ) + + if iter_counts is None: + iter_counts = [1] * len(requests) + + if first_iter_counts is None: + first_iter_counts = [1] * len(requests) + + if ( + len(request_types) != len(requests) + or len(requests) != len(first_iter_times) + or len(requests) != len(iter_counts) + or len(requests) != len(first_iter_counts) + ): + raise ValueError( + "request_types, requests, first_iter_times, iter_counts, and " + "first_iter_counts must be the same length." + f"Given {len(request_types)}, {len(requests)}, " + f"{len(first_iter_times)}, {len(iter_counts)}, " + f"{len(first_iter_counts)}", + ) + + ( + _, + successful_requests, + successful_first_iter_times, + successful_iter_counts, + successful_first_iter_counts, + ) = ( + zip(*successful) + if ( + successful := list( + filter( + lambda val: val[0] == "successful", + zip( + request_types, + requests, + first_iter_times, + iter_counts, + first_iter_counts, + ), + ) + ) + ) + else ([], [], [], [], []) + ) + ( + _, + incomplete_requests, + incomplete_first_iter_times, + incomplete_iter_counts, + incomplete_first_iter_counts, + ) = ( + zip(*incomplete) + if ( + incomplete := list( + filter( + lambda val: val[0] == "incomplete", + zip( + request_types, + requests, + first_iter_times, + iter_counts, + first_iter_counts, + ), + ) + ) + ) + else ([], [], [], [], []) + ) + ( + _, + errored_requests, + errored_first_iter_times, + errored_iter_counts, + errored_first_iter_counts, + ) = ( + zip(*errored) + if ( + errored := list( + filter( + lambda val: val[0] == "error", + zip( + request_types, + requests, + first_iter_times, + iter_counts, + first_iter_counts, + ), + ) + ) + ) + else ([], [], [], [], []) + ) + + return StatusDistributionSummary( + total=DistributionSummary.from_iterable_request_times( + requests, + first_iter_times, + iter_counts, + first_iter_counts, + include_cdf=include_cdf, + epsilon=epsilon, + ), + successful=DistributionSummary.from_iterable_request_times( + successful_requests, # type: ignore[arg-type] + successful_first_iter_times, # type: ignore[arg-type] + successful_iter_counts, # type: ignore[arg-type] + successful_first_iter_counts, # type: ignore[arg-type] + include_cdf=include_cdf, + epsilon=epsilon, + ), + incomplete=DistributionSummary.from_iterable_request_times( + incomplete_requests, # type: ignore[arg-type] + incomplete_first_iter_times, # type: ignore[arg-type] + incomplete_iter_counts, # type: ignore[arg-type] + incomplete_first_iter_counts, # type: ignore[arg-type] + include_cdf=include_cdf, + epsilon=epsilon, + ), + errored=DistributionSummary.from_iterable_request_times( + errored_requests, # type: ignore[arg-type] + errored_first_iter_times, # type: ignore[arg-type] + errored_iter_counts, # type: ignore[arg-type] + errored_first_iter_counts, # type: ignore[arg-type] + include_cdf=include_cdf, + epsilon=epsilon, + ), + ) + + +class RunningStats(StandardBaseModel): + """ + Real-time statistics tracking for streaming numerical data. + + Maintains mean, rate, and cumulative statistics for continuous data streams + without storing individual values. Optimized for memory efficiency in + long-running monitoring applications. Supports arithmetic operators for + convenient value addition and provides computed properties for derived metrics. + + Example: + :: + stats = RunningStats() + stats += 10.5 # Add value using operator + stats.update(20.0, count=3) # Add value with custom count + print(f"Mean: {stats.mean}, Rate: {stats.rate}") + """ + + start_time: float = Field( + default_factory=timer.time, + description=( + "The time the running statistics object was created. " + "This is used to calculate the rate of the statistics." + ), + ) + count: int = Field( + default=0, + description="The number of values added to the running statistics.", + ) + total: float = Field( + default=0.0, + description="The total sum of the values added to the running statistics.", + ) + last: float = Field( + default=0.0, + description="The last value added to the running statistics.", + ) + + @computed_field # type: ignore[misc] + @property + def mean(self) -> float: + """ + :return: The mean of the running statistics (total / count). + If count is 0, return 0.0. + """ + if self.count == 0: + return 0.0 + return self.total / self.count + + @computed_field # type: ignore[misc] + @property + def rate(self) -> float: + """ + :return: The rate of the running statistics + (total / (time.time() - start_time)). + If count is 0, return 0.0. + """ + if self.count == 0: + return 0.0 + return self.total / (timer.time() - self.start_time) + + def __add__(self, value: Any) -> float: + """ + Add value using + operator and return current mean. + + :param value: Numerical value to add to the running statistics + :return: Updated mean after adding the value + :raises ValueError: If value is not numeric (int or float) + """ + if not isinstance(value, (int, float)): + raise ValueError( + f"Value must be an int or float, got {type(value)} instead.", + ) + + self.update(value) + + return self.mean + + def __iadd__(self, value: Any) -> RunningStats: + """ + Add value using += operator and return updated instance. + + :param value: Numerical value to add to the running statistics + :return: Self reference for method chaining + :raises ValueError: If value is not numeric (int or float) + """ + if not isinstance(value, (int, float)): + raise ValueError( + f"Value must be an int or float, got {type(value)} instead.", + ) + + self.update(value) + + return self + + def update(self, value: float, count: int = 1) -> None: + """ + Update running statistics with new value and count. + + :param value: Numerical value to add to the running statistics + :param count: Number of occurrences to count for this value (defaults to 1) + """ + self.count += count + self.total += value + self.last = value + + +class TimeRunningStats(RunningStats): + """ + Specialized running statistics for time-based measurements. + + Extends RunningStats with time-specific computed properties for millisecond + conversions. Designed for tracking latency, duration, and timing metrics in + performance monitoring applications. + + Example: + :: + time_stats = TimeRunningStats() + time_stats += 0.125 # Add 125ms in seconds + print(f"Mean: {time_stats.mean_ms}ms, Total: {time_stats.total_ms}ms") + """ + + @computed_field # type: ignore[misc] + @property + def total_ms(self) -> float: + """ + :return: The total time multiplied by 1000.0 to convert to milliseconds. + """ + return self.total * 1000.0 + + @computed_field # type: ignore[misc] + @property + def last_ms(self) -> float: + """ + :return: The last time multiplied by 1000.0 to convert to milliseconds. + """ + return self.last * 1000.0 + + @computed_field # type: ignore[misc] + @property + def mean_ms(self) -> float: + """ + :return: The mean time multiplied by 1000.0 to convert to milliseconds. + """ + return self.mean * 1000.0 + + @computed_field # type: ignore[misc] + @property + def rate_ms(self) -> float: + """ + :return: The rate of the running statistics multiplied by 1000.0 + to convert to milliseconds. + """ + return self.rate * 1000.0 diff --git a/src/guidellm/utils/synchronous.py b/src/guidellm/utils/synchronous.py new file mode 100644 index 00000000..3bec0247 --- /dev/null +++ b/src/guidellm/utils/synchronous.py @@ -0,0 +1,161 @@ +""" +Async utilities for waiting on synchronization objects. + +This module provides async-compatible wrappers for threading and multiprocessing +synchronization primitives (Events and Barriers). These utilities enable async code +to wait for synchronization objects without blocking the event loop, essential for +coordinating between async and sync code or between processes in the guidellm system. +""" + +from __future__ import annotations + +import asyncio +import contextlib +from datetime import time +from multiprocessing.synchronize import Barrier as ProcessingBarrier +from multiprocessing.synchronize import Event as ProcessingEvent +from threading import Barrier as ThreadingBarrier +from threading import Event as ThreadingEvent +from typing import Annotated, Union + +from typing_extensions import TypeAlias + +__all__ = [ + "SyncObjectTypesAlias", + "wait_for_sync_barrier", + "wait_for_sync_event", + "wait_for_sync_objects", +] + + +SyncObjectTypesAlias: TypeAlias = Annotated[ + Union[ThreadingEvent, ProcessingEvent, ThreadingBarrier, ProcessingBarrier], + "Type alias for threading and multiprocessing synchronization object types", +] + + +async def wait_for_sync_event( + event: ThreadingEvent | ProcessingEvent, + poll_interval: float, +) -> None: + """ + Asynchronously wait for a threading or multiprocessing Event to be set. + + This function polls the event at regular intervals without blocking the async + event loop, allowing other async tasks to continue executing while waiting. + + :param event: The Event object to wait for (threading or multiprocessing) + :param poll_interval: Time in seconds between polling checks + :raises asyncio.CancelledError: If the async task is cancelled + """ + stop = ThreadingEvent() + + def _watch(): + try: + while not stop.is_set(): + if event.wait(timeout=poll_interval): + return + except Exception as err: # noqa: BLE001 + if stop.is_set(): + return # Ignore error if we should have stopped + raise err + + try: + await asyncio.to_thread(_watch) + except asyncio.CancelledError: + stop.set() + raise + + +async def wait_for_sync_barrier( + barrier: ThreadingBarrier | ProcessingBarrier, + poll_interval: float, +) -> None: + """ + Asynchronously wait for a threading or multiprocessing Barrier to be reached. + + This function polls the barrier at regular intervals without blocking the async + event loop, allowing other async tasks to continue executing while waiting. + + :param barrier: The Barrier object to wait for (threading or multiprocessing) + :param poll_interval: Time in seconds between polling checks + :raises asyncio.CancelledError: If the async task is cancelled + """ + stop = ThreadingEvent() + barrier_broken = ThreadingEvent() + + def _wait_indefinite(): + try: + # wait forever, count on barrier broken event to exit + barrier.wait() + barrier_broken.set() + except Exception as err: + if stop.is_set(): + return # Ignore error if we should have stopped + raise err + + def _watch(): + while not barrier_broken.is_set(): + if stop.is_set(): + with contextlib.suppress(Exception): + if not barrier.broken: + barrier.abort() + break + time.sleep(poll_interval) + + try: + await asyncio.gather( + asyncio.to_thread(_wait_indefinite), + asyncio.to_thread(_watch), + ) + except asyncio.CancelledError: + stop.set() + raise + + +async def wait_for_sync_objects( + objects: SyncObjectTypesAlias + | list[SyncObjectTypesAlias] + | dict[str, SyncObjectTypesAlias], + poll_interval: float = 0.1, +) -> int | str: + """ + Asynchronously wait for the first synchronization object to complete. + + This function waits for the first Event to be set or Barrier to be reached + from a collection of synchronization objects. It returns immediately when + any object completes and cancels waiting on the remaining objects. + + :param objects: Single sync object, list of objects, or dict mapping names + to objects + :param poll_interval: Time in seconds between polling checks for each object + :return: Index (for list/single) or key name (for dict) of the first + completed object + :raises asyncio.CancelledError: If the async task is cancelled + """ + if isinstance(objects, dict): + keys = list(objects.keys()) + objects = list(objects.values()) + elif isinstance(objects, list): + keys = list(range(len(objects))) + else: + keys = [0] + objects = [objects] + + tasks = [ + asyncio.create_task( + wait_for_sync_barrier(obj, poll_interval) + if isinstance(obj, (ThreadingBarrier, ProcessingBarrier)) + else wait_for_sync_event(obj, poll_interval) + ) + for obj in objects + ] + + done, pending = await asyncio.wait(tasks, return_when=asyncio.FIRST_COMPLETED) + + # Cancel the remaining pending tasks + for pend in pending: + pend.cancel() + await asyncio.gather(*pending, return_exceptions=True) + + return keys[tasks.index(list(done)[0])] diff --git a/src/guidellm/utils/text.py b/src/guidellm/utils/text.py index 3b9a2e26..519b46c3 100644 --- a/src/guidellm/utils/text.py +++ b/src/guidellm/utils/text.py @@ -1,9 +1,21 @@ +""" +Text processing utilities for content manipulation and formatting operations. + +Provides comprehensive text processing capabilities including cleaning, filtering, +splitting, loading from various sources, and formatting utilities. Supports loading +text from URLs, compressed files, package resources, and local files with automatic +encoding detection. Includes specialized formatting for display values and text +wrapping operations for consistent presentation across the system. +""" + +from __future__ import annotations + import gzip import re import textwrap from importlib.resources import as_file, files # type: ignore[attr-defined] from pathlib import Path -from typing import Any, Optional, Union +from typing import Any import ftfy import httpx @@ -11,36 +23,86 @@ from guidellm import data as package_data from guidellm.settings import settings +from guidellm.utils.console import Colors __all__ = [ + "MAX_PATH_LENGTH", "EndlessTextCreator", - "camelize_str", "clean_text", "filter_text", + "format_value_display", "is_puncutation", "load_text", "split_text", "split_text_list_by_length", ] -MAX_PATH_LENGTH = 4096 +MAX_PATH_LENGTH: int = 4096 + + +def format_value_display( + value: float, + label: str, + units: str = "", + total_characters: int | None = None, + digits_places: int | None = None, + decimal_places: int | None = None, +) -> str: + """ + Format a numeric value with units and label for consistent display output. + + Creates standardized display strings for metrics and measurements with + configurable precision, width, and color formatting. Supports both + fixed-width and variable-width output for tabular displays. + + :param value: Numeric value to format and display + :param label: Descriptive label for the value + :param units: Units string to append after the value + :param total_characters: Total width for right-aligned output formatting + :param digits_places: Total number of digits for numeric formatting + :param decimal_places: Number of decimal places for numeric precision + :return: Formatted string with value, units, and colored label + """ + if decimal_places is None and digits_places is None: + formatted_number = f"{value}:.0f" + elif digits_places is None: + formatted_number = f"{value:.{decimal_places}f}" + elif decimal_places is None: + formatted_number = f"{value:>{digits_places}f}" + else: + formatted_number = f"{value:>{digits_places}.{decimal_places}f}" + + result = f"{formatted_number}{units} [{Colors.info}]{label}[/{Colors.info}]" + + if total_characters is not None: + total_characters += len(Colors.info) * 2 + 5 + + if len(result) < total_characters: + result = result.rjust(total_characters) + + return result def split_text_list_by_length( text_list: list[Any], - max_characters: Union[int, list[int]], + max_characters: int | list[int], pad_horizontal: bool = True, pad_vertical: bool = True, ) -> list[list[str]]: """ - Split a list of strings into a list of strings, - each with a maximum length of max_characters - - :param text_list: the list of strings to split - :param max_characters: the maximum length of each string - :param pad_horizontal: whether to pad the strings horizontally, defaults to True - :param pad_vertical: whether to pad the strings vertically, defaults to True - :return: a list of strings + Split text strings into wrapped lines with specified maximum character limits. + + Processes each string in the input list by wrapping text to fit within character + limits, with optional padding for consistent formatting in tabular displays. + Supports different character limits per string and uniform padding across results. + + :param text_list: List of strings to process and wrap + :param max_characters: Maximum characters per line, either single value or + per-string limits + :param pad_horizontal: Right-align lines within their character limits + :param pad_vertical: Pad shorter results to match the longest wrapped result + :return: List of wrapped line lists, one per input string + :raises ValueError: If max_characters list length doesn't match text_list length """ if not isinstance(max_characters, list): max_characters = [max_characters] * len(text_list) @@ -76,16 +138,21 @@ def split_text_list_by_length( def filter_text( text: str, - filter_start: Optional[Union[str, int]] = None, - filter_end: Optional[Union[str, int]] = None, + filter_start: str | int | None = None, + filter_end: str | int | None = None, ) -> str: """ - Filter text by start and end strings or indices + Extract text substring using start and end markers or indices. + + Filters text content by locating string markers or using numeric indices + to extract specific portions. Supports flexible filtering for content + extraction and preprocessing operations. - :param text: the text to filter - :param filter_start: the start string or index to filter from - :param filter_end: the end string or index to filter to - :return: the filtered text + :param text: Source text to filter and extract from + :param filter_start: Starting marker string or index position + :param filter_end: Ending marker string or index position + :return: Filtered text substring between specified boundaries + :raises ValueError: If filter indices are invalid or markers not found """ filter_start_index = -1 filter_end_index = -1 @@ -113,10 +180,29 @@ def filter_text( def clean_text(text: str) -> str: + """ + Normalize text by fixing encoding issues and standardizing whitespace. + + Applies Unicode normalization and whitespace standardization for consistent + text processing. Removes excessive whitespace and fixes common encoding problems. + + :param text: Raw text string to clean and normalize + :return: Cleaned text with normalized encoding and whitespace + """ return re.sub(r"\s+", " ", ftfy.fix_text(text)).strip() def split_text(text: str, split_punctuation: bool = False) -> list[str]: + """ + Split text into tokens with optional punctuation separation. + + Tokenizes text into words and optionally separates punctuation marks + for detailed text analysis and processing operations. + + :param text: Text string to tokenize and split + :param split_punctuation: Separate punctuation marks as individual tokens + :return: List of text tokens + """ text = clean_text(text) if split_punctuation: @@ -125,16 +211,20 @@ def split_text(text: str, split_punctuation: bool = False) -> list[str]: return text.split() -def load_text(data: Union[str, Path], encoding: Optional[str] = None) -> str: +def load_text(data: str | Path, encoding: str | None = None) -> str: """ - Load an HTML file from a path or URL - - :param data: the path or URL to load the HTML file from - :type data: Union[str, Path] - :param encoding: the encoding to use when reading the file - :type encoding: str - :return: the HTML content - :rtype: str + Load text content from various sources including URLs, files, and package data. + + Supports loading from HTTP/FTP URLs, local files, compressed archives, package + resources, and raw text strings. Automatically detects source type and applies + appropriate loading strategy with encoding support. + + :param data: Source location or raw text - URL, file path, package resource + identifier, or text content + :param encoding: Character encoding for file reading operations + :return: Loaded text content as string + :raises FileNotFoundError: If local file path does not exist + :raises httpx.HTTPStatusError: If URL request fails """ logger.debug("Loading text: {}", data) @@ -180,35 +270,62 @@ def load_text(data: Union[str, Path], encoding: Optional[str] = None) -> str: def is_puncutation(text: str) -> bool: """ - Check if the text is a punctuation + Check if a single character is a punctuation mark. + + Identifies punctuation characters by excluding alphanumeric characters + and whitespace from single-character strings. - :param text: the text to check - :type text: str - :return: True if the text is a punctuation, False otherwise - :rtype: bool + :param text: Single character string to test + :return: True if the character is punctuation, False otherwise """ return len(text) == 1 and not text.isalnum() and not text.isspace() -def camelize_str(snake_case_string: str) -> str: - return (words := snake_case_string.split("_"))[0].lower() + "".join( - word.capitalize() for word in words[1:] - ) +class EndlessTextCreator: + """ + Infinite text generator for load testing and content creation operations. + Provides deterministic text generation by cycling through preprocessed word + tokens from source content. Supports filtering and punctuation handling for + realistic text patterns in benchmarking scenarios. + + Example: + :: + creator = EndlessTextCreator("path/to/source.txt") + generated = creator.create_text(start=0, length=100) + more_text = creator.create_text(start=50, length=200) + """ -class EndlessTextCreator: def __init__( self, - data: Union[str, Path], - filter_start: Optional[Union[str, int]] = None, - filter_end: Optional[Union[str, int]] = None, + data: str | Path, + filter_start: str | int | None = None, + filter_end: str | int | None = None, ): + """ + Initialize text creator with source content and optional filtering. + + :param data: Source text location or content - file path, URL, or raw text + :param filter_start: Starting marker or index for content filtering + :param filter_end: Ending marker or index for content filtering + """ self.data = data self.text = load_text(data) self.filtered_text = filter_text(self.text, filter_start, filter_end) self.words = split_text(self.filtered_text, split_punctuation=True) def create_text(self, start: int, length: int) -> str: + """ + Generate text by cycling through word tokens from the specified position. + + Creates deterministic text sequences by selecting consecutive tokens from + the preprocessed word list, wrapping around when reaching the end. + Maintains proper spacing and punctuation formatting. + + :param start: Starting position in the token sequence + :param length: Number of tokens to include in generated text + :return: Generated text string with proper spacing and punctuation + """ text = "" for counter in range(length): diff --git a/tests/unit/utils/dict.py b/tests/unit/utils/dict.py deleted file mode 100644 index 09d93df6..00000000 --- a/tests/unit/utils/dict.py +++ /dev/null @@ -1,71 +0,0 @@ -import pytest - -from guidellm.utils.dict import recursive_key_update - - -def update_str(string): - return string + "_updated" - - -@pytest.mark.smoke -def test_recursive_key_update_updates_keys(): - my_dict = { - "my_key": { - "my_nested_key": {"my_double_nested_key": "someValue"}, - "my_other_nested_key": "someValue", - }, - "my_other_key": "value", - } - my_updated_dict = { - "my_key_updated": { - "my_nested_key_updated": {"my_double_nested_key_updated": "someValue"}, - "my_other_nested_key_updated": "someValue", - }, - "my_other_key_updated": "value", - } - recursive_key_update(my_dict, update_str) - assert my_dict == my_updated_dict - - -def truncate_str_to_ten(string): - return string[:10] - - -@pytest.mark.smoke -def test_recursive_key_update_leaves_unchanged_keys(): - my_dict = { - "my_key": { - "my_nested_key": {"my_double_nested_key": "someValue"}, - "my_other_nested_key": "someValue", - }, - "my_other_key": "value", - } - my_updated_dict = { - "my_key": { - "my_nested_": {"my_double_": "someValue"}, - "my_other_n": "someValue", - }, - "my_other_k": "value", - } - recursive_key_update(my_dict, truncate_str_to_ten) - assert my_dict == my_updated_dict - - -@pytest.mark.smoke -def test_recursive_key_update_updates_dicts_in_list(): - my_dict = { - "my_key": [ - {"my_list_item_key_1": "someValue"}, - {"my_list_item_key_2": "someValue"}, - {"my_list_item_key_3": "someValue"}, - ] - } - my_updated_dict = { - "my_key_updated": [ - {"my_list_item_key_1_updated": "someValue"}, - {"my_list_item_key_2_updated": "someValue"}, - {"my_list_item_key_3_updated": "someValue"}, - ] - } - recursive_key_update(my_dict, update_str) - assert my_dict == my_updated_dict diff --git a/tests/unit/utils/test_auto_importer.py b/tests/unit/utils/test_auto_importer.py new file mode 100644 index 00000000..cc71bce3 --- /dev/null +++ b/tests/unit/utils/test_auto_importer.py @@ -0,0 +1,269 @@ +""" +Unit tests for the auto_importer module. +""" + +from __future__ import annotations + +from unittest import mock + +import pytest + +from guidellm.utils import AutoImporterMixin + + +class TestAutoImporterMixin: + """Test suite for AutoImporterMixin functionality.""" + + @pytest.fixture( + params=[ + { + "auto_package": "test.package", + "auto_ignore_modules": None, + "modules": [ + ("test.package.module1", False), + ("test.package.module2", False), + ], + "expected_imports": ["test.package.module1", "test.package.module2"], + }, + { + "auto_package": ("test.package1", "test.package2"), + "auto_ignore_modules": None, + "modules": [ + ("test.package1.moduleA", False), + ("test.package2.moduleB", False), + ], + "expected_imports": ["test.package1.moduleA", "test.package2.moduleB"], + }, + { + "auto_package": "test.package", + "auto_ignore_modules": ("test.package.module1",), + "modules": [ + ("test.package.module1", False), + ("test.package.module2", False), + ], + "expected_imports": ["test.package.module2"], + }, + ], + ids=["single_package", "multiple_packages", "ignored_modules"], + ) + def valid_instances(self, request): + """Fixture providing test data for AutoImporterMixin subclasses.""" + config = request.param + + class TestClass(AutoImporterMixin): + auto_package = config["auto_package"] + auto_ignore_modules = config["auto_ignore_modules"] + + return TestClass, config + + @pytest.mark.smoke + def test_class_signatures(self): + """Test AutoImporterMixin class signatures and attributes.""" + assert hasattr(AutoImporterMixin, "auto_package") + assert hasattr(AutoImporterMixin, "auto_ignore_modules") + assert hasattr(AutoImporterMixin, "auto_imported_modules") + assert hasattr(AutoImporterMixin, "auto_import_package_modules") + assert callable(AutoImporterMixin.auto_import_package_modules) + + # Test default class variables + assert AutoImporterMixin.auto_package is None + assert AutoImporterMixin.auto_ignore_modules is None + assert AutoImporterMixin.auto_imported_modules is None + + @pytest.mark.smoke + def test_initialization(self, valid_instances): + """Test AutoImporterMixin subclass initialization.""" + test_class, config = valid_instances + assert issubclass(test_class, AutoImporterMixin) + assert test_class.auto_package == config["auto_package"] + assert test_class.auto_ignore_modules == config["auto_ignore_modules"] + assert test_class.auto_imported_modules is None + + @pytest.mark.sanity + def test_invalid_initialization_missing(self): + """Test AutoImporterMixin with missing auto_package.""" + + class TestClass(AutoImporterMixin): + pass + + with pytest.raises(ValueError, match="auto_package.*must be set"): + TestClass.auto_import_package_modules() + + @pytest.mark.smoke + @mock.patch("importlib.import_module") + @mock.patch("pkgutil.walk_packages") + def test_auto_import_package_modules(self, mock_walk, mock_import, valid_instances): + """Test auto_import_package_modules core functionality.""" + test_class, config = valid_instances + + # Setup mocks based on config + packages = {} + modules = {} + + if isinstance(config["auto_package"], tuple): + for pkg in config["auto_package"]: + pkg_path = pkg.replace(".", "/") + packages[pkg] = MockHelper.create_mock_package(pkg, pkg_path) + else: + pkg = config["auto_package"] + packages[pkg] = MockHelper.create_mock_package(pkg, pkg.replace(".", "/")) + + for module_name, is_pkg in config["modules"]: + if not is_pkg: + modules[module_name] = MockHelper.create_mock_module(module_name) + + mock_import.side_effect = lambda name: {**packages, **modules}.get( + name, mock.MagicMock() + ) + + def walk_side_effect(path, prefix): + return [ + (None, module_name, is_pkg) + for module_name, is_pkg in config["modules"] + if module_name.startswith(prefix) + ] + + mock_walk.side_effect = walk_side_effect + + # Execute + test_class.auto_import_package_modules() + + # Verify + assert test_class.auto_imported_modules == config["expected_imports"] + + # Verify package imports + if isinstance(config["auto_package"], tuple): + for pkg in config["auto_package"]: + mock_import.assert_any_call(pkg) + else: + mock_import.assert_any_call(config["auto_package"]) + + # Verify expected module imports + for expected_module in config["expected_imports"]: + mock_import.assert_any_call(expected_module) + + @pytest.mark.sanity + @mock.patch("importlib.import_module") + @mock.patch("pkgutil.walk_packages") + def test_auto_import_package_modules_invalid(self, mock_walk, mock_import): + """Test auto_import_package_modules with invalid configurations.""" + + class TestClass(AutoImporterMixin): + auto_package = "test.package" + + # Test import error handling + mock_import.side_effect = ImportError("Module not found") + + with pytest.raises(ImportError): + TestClass.auto_import_package_modules() + + @pytest.mark.sanity + @mock.patch("importlib.import_module") + @mock.patch("pkgutil.walk_packages") + def test_skip_packages(self, mock_walk, mock_import): + """Test that packages (is_pkg=True) are skipped.""" + + class TestClass(AutoImporterMixin): + auto_package = "test.package" + + # Setup mocks + mock_package = MockHelper.create_mock_package("test.package", "test/package") + mock_module = MockHelper.create_mock_module("test.package.module") + + mock_import.side_effect = lambda name: { + "test.package": mock_package, + "test.package.module": mock_module, + }[name] + + mock_walk.return_value = [ + (None, "test.package.subpackage", True), + (None, "test.package.module", False), + ] + + # Execute + TestClass.auto_import_package_modules() + + # Verify + assert TestClass.auto_imported_modules == ["test.package.module"] + mock_import.assert_any_call("test.package.module") + # subpackage should not be imported + with pytest.raises(AssertionError): + mock_import.assert_any_call("test.package.subpackage") + + @pytest.mark.sanity + @mock.patch("sys.modules", {"test.package.existing": mock.MagicMock()}) + @mock.patch("importlib.import_module") + @mock.patch("pkgutil.walk_packages") + def test_skip_already_imported_modules(self, mock_walk, mock_import): + """Test that modules already in sys.modules are tracked but not re-imported.""" + + class TestClass(AutoImporterMixin): + auto_package = "test.package" + + # Setup mocks + mock_package = MockHelper.create_mock_package("test.package", "test/package") + mock_import.side_effect = lambda name: { + "test.package": mock_package, + }.get(name, mock.MagicMock()) + + mock_walk.return_value = [ + (None, "test.package.existing", False), + ] + + # Execute + TestClass.auto_import_package_modules() + + # Verify + assert TestClass.auto_imported_modules == ["test.package.existing"] + mock_import.assert_called_once_with("test.package") + with pytest.raises(AssertionError): + mock_import.assert_any_call("test.package.existing") + + @pytest.mark.sanity + @mock.patch("importlib.import_module") + @mock.patch("pkgutil.walk_packages") + def test_prevent_duplicate_module_imports(self, mock_walk, mock_import): + """Test that modules already in auto_imported_modules are not re-imported.""" + + class TestClass(AutoImporterMixin): + auto_package = "test.package" + + # Setup mocks + mock_package = MockHelper.create_mock_package("test.package", "test/package") + mock_module = MockHelper.create_mock_module("test.package.module") + + mock_import.side_effect = lambda name: { + "test.package": mock_package, + "test.package.module": mock_module, + }[name] + + mock_walk.return_value = [ + (None, "test.package.module", False), + (None, "test.package.module", False), + ] + + # Execute + TestClass.auto_import_package_modules() + + # Verify + assert TestClass.auto_imported_modules == ["test.package.module"] + assert mock_import.call_count == 2 # Package + module (not duplicate) + + +class MockHelper: + """Helper class to create consistent mock objects for testing.""" + + @staticmethod + def create_mock_package(name: str, path: str): + """Create a mock package with required attributes.""" + package = mock.MagicMock() + package.__name__ = name + package.__path__ = [path] + return package + + @staticmethod + def create_mock_module(name: str): + """Create a mock module with required attributes.""" + module = mock.MagicMock() + module.__name__ = name + return module diff --git a/tests/unit/utils/test_encoding.py b/tests/unit/utils/test_encoding.py new file mode 100644 index 00000000..da1f63ee --- /dev/null +++ b/tests/unit/utils/test_encoding.py @@ -0,0 +1,556 @@ +from __future__ import annotations + +import uuid +from typing import Any, Generic, TypeVar + +import pytest +from pydantic import BaseModel, Field + +from guidellm.backend.objects import ( + GenerationRequest, + GenerationResponse, +) +from guidellm.scheduler.objects import RequestSchedulerTimings, ScheduledRequestInfo +from guidellm.utils.encoding import Encoder, MessageEncoding, Serializer + + +class SampleModel(BaseModel): + """Sample Pydantic model for testing.""" + + name: str = Field(description="Name field for testing") + value: int = Field(description="Value field for testing") + + +class SampleModelSubclass(SampleModel): + """Subclass of SampleModel for testing.""" + + extra_field: str + + +SampleModelT = TypeVar("SampleModelT", bound=SampleModel) + + +class ComplexModel(BaseModel, Generic[SampleModelT]): + """Complex Pydantic model for testing.""" + + items: list[str] = Field(default_factory=list) + metadata: dict[str, Any] = Field(default_factory=dict) + nested: SampleModelT | None = Field(default=None) + + +class GenricModelWrapper(Generic[SampleModelT]): + """Simulates a layered generic type.""" + + def method(self, **kwargs) -> ComplexModel[SampleModelT]: + return ComplexModel[SampleModelT](**kwargs) + + +class TestMessageEncoding: + """Test suite for MessageEncoding class.""" + + @pytest.fixture( + params=[ + {"serialization": None, "encoding": None}, + {"serialization": "dict", "encoding": None}, + {"serialization": "sequence", "encoding": None}, + {"serialization": None, "encoding": "msgpack"}, + {"serialization": "dict", "encoding": "msgpack"}, + {"serialization": "sequence", "encoding": "msgpack"}, + {"serialization": None, "encoding": "msgspec"}, + {"serialization": "dict", "encoding": "msgspec"}, + {"serialization": "sequence", "encoding": "msgspec"}, + {"serialization": None, "encoding": ["msgspec", "msgpack"]}, + {"serialization": "dict", "encoding": ["msgspec", "msgpack"]}, + ], + ids=[ + "no_serialization_no_encoding", + "dict_serialization_no_encoding", + "str_serialization_no_encoding", + "no_serialization_msgpack", + "dict_serialization_msgpack", + "str_serialization_msgpack", + "no_serialization_msgspec", + "dict_serialization_msgspec", + "str_serialization_msgspec", + "no_serialization_encoding_list", + "dict_serialization_encoding_list", + ], + ) + def valid_instances(self, request): + """Fixture providing test data for MessageEncoding.""" + constructor_args = request.param + try: + instance = MessageEncoding(**constructor_args) + return instance, constructor_args + except ImportError: + pytest.skip("Required encoding library not available") + + @pytest.mark.smoke + def test_class_signatures(self): + """Test MessageEncoding inheritance and type relationships.""" + assert issubclass(MessageEncoding, Generic) + assert hasattr(MessageEncoding, "DEFAULT_ENCODING_PREFERENCE") + assert isinstance(MessageEncoding.DEFAULT_ENCODING_PREFERENCE, list) + assert MessageEncoding.DEFAULT_ENCODING_PREFERENCE == ["msgspec", "msgpack"] + + # Check classmethods + assert hasattr(MessageEncoding, "encode_message") + assert callable(MessageEncoding.encode_message) + assert hasattr(MessageEncoding, "decode_message") + assert callable(MessageEncoding.decode_message) + + # Check instance methods + assert hasattr(MessageEncoding, "__init__") + assert hasattr(MessageEncoding, "register_pydantic") + assert hasattr(MessageEncoding, "encode") + assert hasattr(MessageEncoding, "decode") + + @pytest.mark.smoke + def test_initialization(self, valid_instances): + """Test MessageEncoding initialization.""" + instance, constructor_args = valid_instances + assert isinstance(instance, MessageEncoding) + assert hasattr(instance, "serializer") + assert isinstance(instance.serializer, Serializer) + assert instance.serializer.serialization == constructor_args["serialization"] + assert hasattr(instance, "encoder") + assert isinstance(instance.encoder, Encoder) + + expected_encoding = constructor_args["encoding"] + if isinstance(expected_encoding, list): + assert instance.encoder.encoding in expected_encoding + else: + assert instance.encoder.encoding == expected_encoding + + @pytest.mark.smoke + @pytest.mark.parametrize( + "obj", + [ + None, + 0, + 0.0, + "0.1.2.3", + [0, 0.0, "0.1.2.3", None], + (0, 0.0, "0.1.2.3", None), + {"key1": 0, "key2": 0.0, "key3": "0.1.2.3", "key4": None}, + ], + ) + def test_encode_decode_python(self, valid_instances, obj: Any): + """Test MessageEncoding encode/decode with comprehensive data types.""" + instance, constructor_args = valid_instances + + message = instance.encode(obj) + decoded = instance.decode(message) + + if isinstance(obj, tuple): + assert list(decoded) == list(obj) + else: + assert decoded == obj + + @pytest.mark.smoke + @pytest.mark.parametrize( + "obj", + [ + SampleModel(name="sample", value=123), + ComplexModel( + items=["item1", "item2"], + metadata={"key": "value"}, + nested=SampleModel(name="sample", value=123), + ), + ( + SampleModel(name="sample", value=123), + None, + ComplexModel( + items=["item1", "item2"], + metadata={"key": "value"}, + nested=SampleModel(name="sample", value=123), + ), + ), + { + "key1": SampleModel(name="sample", value=123), + "key2": None, + "key3": ComplexModel( + items=["item1", "item2"], + metadata={"key": "value"}, + nested=SampleModel(name="sample", value=123), + ), + }, + ], + ) + def test_encode_decode_pydantic(self, valid_instances, obj: Any): + """Test MessageEncoding encode/decode with Pydantic models.""" + instance, constructor_args = valid_instances + + if ( + constructor_args["serialization"] is None + and constructor_args["encoding"] is not None + ): + # msgpack/msgspec don't support Pydantic models natively + pytest.skip("Skipping unsupported Pydantic serialization/encoding combo") + + # Register Pydantic models for proper serialization + instance.register_pydantic(SampleModel) + instance.register_pydantic(ComplexModel) + + message = instance.encode(obj) + decoded = instance.decode(message) + + if isinstance(obj, tuple): + assert list(decoded) == list(obj) + else: + assert decoded == obj + + @pytest.mark.smoke + @pytest.mark.parametrize( + "obj", + [ + ( + None, + GenerationRequest(content="test content"), + ScheduledRequestInfo( + scheduler_timings=RequestSchedulerTimings( + targeted_start=1.0, + queued=0.1, + dequeued=0.2, + scheduled_at=0.3, + resolve_start=1.1, + resolve_end=1.5, + finalized=1.6, + ) + ), + ), + ( + GenerationResponse( + request_id=str(uuid.uuid4()), + request_args={}, + value="test response", + request_prompt_tokens=2, + request_output_tokens=3, + response_prompt_tokens=4, + response_output_tokens=6, + ), + GenerationRequest(content="test content"), + ScheduledRequestInfo( + scheduler_timings=RequestSchedulerTimings( + targeted_start=1.0, + queued=0.1, + dequeued=0.2, + scheduled_at=0.3, + resolve_start=1.1, + resolve_end=1.5, + finalized=1.6, + ) + ), + ), + ], + ) + def test_encode_decode_generative(self, valid_instances, obj: Any): + """Test MessageEncoding encode/decode with generative models.""" + instance, constructor_args = valid_instances + + if ( + constructor_args["serialization"] is None + and constructor_args["encoding"] is not None + ): + # msgpack/msgspec don't support Pydantic models natively + pytest.skip("Skipping unsupported Pydantic serialization/encoding combo") + + instance.register_pydantic(GenerationRequest) + instance.register_pydantic(GenerationResponse) + instance.register_pydantic(ScheduledRequestInfo) + + message = instance.encode(obj) + decoded = instance.decode(message) + + assert list(decoded) == list(obj) + + @pytest.mark.smoke + @pytest.mark.parametrize( + "serialization", + [ + None, + "dict", + "sequence", + ], + ) + @pytest.mark.parametrize( + "encoding", + [None, "msgpack", "msgspec"], + ) + @pytest.mark.parametrize( + "obj", + [ + "0.1.2.3", + [0, 0.0, "0.1.2.3", None, SampleModel(name="sample", value=123)], + { + "key1": 0, + "key2": 0.0, + "key3": "0.1.2.3", + "key4": None, + "key5": ComplexModel( + items=["item1", "item2"], + metadata={"key": "value"}, + nested=SampleModel(name="sample", value=123), + ), + }, + ], + ) + def test_encode_decode_message(self, serialization, encoding, obj): + """Test MessageEncoding.encode_message and decode_message class methods.""" + if encoding is not None and serialization is None and obj != "0.1.2.3": + pytest.skip("Skipping unsupported serialization/encoding combo") + + try: + serializer = Serializer(serialization) if serialization else None + encoder = Encoder(encoding) if encoding else None + + message = MessageEncoding.encode_message(obj, serializer, encoder) + decoded = MessageEncoding.decode_message(message, serializer, encoder) + + if isinstance(obj, tuple): + assert list(decoded) == list(obj) + else: + assert decoded == obj + except ImportError: + pytest.skip("Required encoding library not available") + + @pytest.mark.smoke + def test_register_pydantic(self): + """Test MessageEncoding.register_pydantic functionality.""" + instance = MessageEncoding(serialization="dict", encoding=None) + assert len(instance.serializer.pydantic_registry) == 0 + instance.register_pydantic(SampleModel) + assert len(instance.serializer.pydantic_registry) == 1 + assert ( + instance.serializer.pydantic_registry.values().__iter__().__next__() + is SampleModel + ) + + @pytest.mark.sanity + def test_invalid_initialization_values(self): + """Test invalid initialization (unsupported encoding).""" + inst = MessageEncoding(serialization="dict", encoding=["invalid_encoding"]) # type: ignore[arg-type] + assert inst.encoder.encoding is None + with pytest.raises(ImportError): + MessageEncoding(serialization="dict", encoding="invalid") # type: ignore[arg-type] + + +class TestEncoder: + """Test suite for Encoder class.""" + + @pytest.fixture( + params=[ + None, + "msgpack", + "msgspec", + ["msgspec", "msgpack"], + ["msgpack", "msgspec"], + ], + ids=[ + "none", + "msgpack", + "msgspec", + "list_pref_msgspec_first", + "list_pref_msgpack_first", + ], + ) + def valid_instances(self, request): + args = request.param + try: + inst = Encoder(args) + except ImportError: + pytest.skip("Encoding backend missing") + return inst, args + + @pytest.mark.smoke + def test_class_signatures(self): + assert hasattr(Encoder, "encode") + assert hasattr(Encoder, "decode") + assert hasattr(Encoder, "_resolve_encoding") + + @pytest.mark.smoke + def test_initialization(self, valid_instances): + inst, args = valid_instances + assert isinstance(inst, Encoder) + if isinstance(args, list): + assert inst.encoding in args or inst.encoding is None + else: + assert inst.encoding == args + + @pytest.mark.sanity + def test_invalid_initialization_values(self): + with pytest.raises(ImportError): + Encoder("invalid") # type: ignore[arg-type] + + @pytest.mark.smoke + @pytest.mark.parametrize("obj", [None, 0, 1.2, "text", [1, 2], {"a": 1}]) + def test_encode_decode(self, valid_instances, obj): + inst, _ = valid_instances + msg = inst.encode(obj) + out = inst.decode(msg) + assert out == obj + + +class TestSerializer: + """Test suite for Serializer class.""" + + @pytest.fixture(params=[None, "dict", "sequence"], ids=["none", "dict", "sequence"]) + def valid_instances(self, request): + inst = Serializer(request.param) + return inst, request.param + + @pytest.mark.smoke + def test_class_signatures(self): + assert hasattr(Serializer, "serialize") + assert hasattr(Serializer, "deserialize") + assert hasattr(Serializer, "register_pydantic") + + @pytest.mark.smoke + def test_initialization(self, valid_instances): + inst, mode = valid_instances + assert isinstance(inst, Serializer) + assert inst.serialization == mode + + @pytest.mark.smoke + def test_register_pydantic(self, valid_instances): + inst, _ = valid_instances + assert len(inst.pydantic_registry) == 0 + inst.register_pydantic(SampleModel) + assert len(inst.pydantic_registry) == 1 + + @pytest.mark.smoke + @pytest.mark.parametrize( + "obj", + [ + 1, + "str_val", + [1, 2, 3], + SampleModel(name="x", value=1), + {"k": SampleModel(name="y", value=2)}, + ], + ) + def test_serialize_deserialize(self, valid_instances, obj): + inst, mode = valid_instances + inst.register_pydantic(SampleModel) + msg = inst.serialize(obj) + out = inst.deserialize(msg) + if isinstance(obj, list): + assert list(out) == obj + else: + assert out == obj + + @pytest.mark.regression + def test_sequence_mapping_roundtrip(self): + inst = Serializer("sequence") + inst.register_pydantic(SampleModel) + data = { + "a": SampleModel(name="a", value=1), + "b": SampleModel(name="b", value=2), + } + msg = inst.serialize(data) + out = inst.deserialize(msg) + assert out == data + + @pytest.mark.sanity + def test_to_from_dict_variations(self): + inst = Serializer("dict") + inst.register_pydantic(SampleModel) + model = SampleModel(name="n", value=3) + lst = [model, 5] + mp = {"k1": model, "k2": 9} + assert inst.from_dict(inst.to_dict(model)) == model + assert inst.from_dict(inst.to_dict(lst)) == lst + assert inst.from_dict(inst.to_dict(mp)) == mp + + @pytest.mark.sanity + @pytest.mark.parametrize( + "collection", + [ + [SampleModel(name="x", value=1), 2, 3], + (SampleModel(name="y", value=2), None), + ], + ) + def test_to_from_sequence_collections(self, collection): + inst = Serializer("sequence") + inst.register_pydantic(SampleModel) + seq = inst.to_sequence(collection) + out = inst.from_sequence(seq) + assert len(out) == len(collection) + assert all(a == b for a, b in zip(out, list(collection))) + + @pytest.mark.sanity + def test_to_from_sequence_mapping(self): + inst = Serializer("sequence") + inst.register_pydantic(SampleModel) + data = {"k": SampleModel(name="z", value=7), "j": 1} + seq = inst.to_sequence(data) + out = inst.from_sequence(seq) + assert out == data + + @pytest.mark.sanity + def test_sequence_multiple_root_raises(self): + inst = Serializer("sequence") + part1 = inst.pack_next_sequence("python", inst.to_sequence_python(1), None) + part2 = inst.pack_next_sequence("python", inst.to_sequence_python(2), None) + with pytest.raises(ValueError): + inst.from_sequence(part1 + part2) # type: ignore[operator] + + @pytest.mark.sanity + def test_pack_next_sequence_type_mismatch(self): + inst = Serializer("sequence") + first_payload = inst.to_sequence_python(1) + first = inst.pack_next_sequence("python", first_payload, None) + bad_payload: Any = ( + first_payload.decode() if isinstance(first_payload, bytes) else b"1" + ) + with pytest.raises(ValueError): + inst.pack_next_sequence("python", bad_payload, first) + + @pytest.mark.sanity + def test_unpack_invalid(self): + inst = Serializer("sequence") + with pytest.raises(ValueError): + inst.unpack_next_sequence("X|3|abc") + with pytest.raises(ValueError): + inst.unpack_next_sequence("p?bad") + + @pytest.mark.sanity + def test_dynamic_import_load_pydantic(self, monkeypatch): + inst = Serializer("dict") + inst.pydantic_registry.clear() + sample = SampleModel(name="dyn", value=5) + dumped = inst.to_dict(sample) + inst.pydantic_registry.clear() + restored = inst.from_dict(dumped) + assert restored == sample + + @pytest.mark.sanity + def test_generic_model(self): + inst = Serializer("dict") + inst.register_pydantic(ComplexModel[SampleModelSubclass]) + nested = ComplexModel[SampleModelSubclass]( + items=["i1", "i2"], + metadata={"m": 1}, + nested=SampleModelSubclass(name="nested", value=10, extra_field="extra"), + ) + dumped = inst.to_dict(nested) + restored = inst.from_dict(dumped) + assert restored == nested + + @pytest.mark.sanity + @pytest.mark.xfail( + reason="A generic object returned by a generic method loses its type args" + ) + def test_generic_emitted_type(self): + generic_instance = GenricModelWrapper[SampleModelSubclass]() + + inst = Serializer("dict") + inst.register_pydantic(ComplexModel[SampleModelSubclass]) + nested = generic_instance.method( + items=["i1", "i2"], + metadata={"m": 1}, + nested=SampleModelSubclass(name="nested", value=10, extra_field="extra"), + ) + dumped = inst.to_dict(nested) + restored = inst.from_dict(dumped) + assert restored == nested diff --git a/tests/unit/utils/test_functions.py b/tests/unit/utils/test_functions.py new file mode 100644 index 00000000..3b353759 --- /dev/null +++ b/tests/unit/utils/test_functions.py @@ -0,0 +1,222 @@ +from __future__ import annotations + +from datetime import datetime + +import pytest + +from guidellm.utils.functions import ( + all_defined, + safe_add, + safe_divide, + safe_format_timestamp, + safe_getattr, + safe_multiply, +) + + +class TestAllDefined: + """Test suite for all_defined function.""" + + @pytest.mark.smoke + @pytest.mark.parametrize( + ("values", "expected"), + [ + ((1, 2, 3), True), + (("test", "hello"), True), + ((0, False, ""), True), + ((1, None, 3), False), + ((None,), False), + ((None, None), False), + ((), True), + ], + ) + def test_invocation(self, values, expected): + """Test all_defined with valid inputs.""" + result = all_defined(*values) + assert result == expected + + @pytest.mark.sanity + def test_mixed_types(self): + """Test all_defined with mixed data types.""" + result = all_defined(1, "test", [], {}, 0.0, False) + assert result is True + + result = all_defined(1, "test", None, {}) + assert result is False + + +class TestSafeGetattr: + """Test suite for safe_getattr function.""" + + @pytest.mark.smoke + @pytest.mark.parametrize( + ("obj", "attr", "default", "expected"), + [ + (None, "any_attr", "default_val", "default_val"), + (None, "any_attr", None, None), + ("test_string", "nonexistent", "default_val", "default_val"), + ], + ) + def test_invocation(self, obj, attr, default, expected): + """Test safe_getattr with valid inputs.""" + result = safe_getattr(obj, attr, default) + assert result == expected + + @pytest.mark.smoke + def test_with_object(self): + """Test safe_getattr with actual object attributes.""" + + class TestObj: + test_attr = "test_value" + + obj = TestObj() + result = safe_getattr(obj, "test_attr", "default") + assert result == "test_value" + + result = safe_getattr(obj, "missing_attr", "default") + assert result == "default" + + # Test with method attribute + result = safe_getattr("test_string", "upper", None) + assert callable(result) + assert result() == "TEST_STRING" + + +class TestSafeDivide: + """Test suite for safe_divide function.""" + + @pytest.mark.smoke + @pytest.mark.parametrize( + ("numerator", "denominator", "num_default", "den_default", "expected"), + [ + (10, 2, 0.0, 1.0, 5.0), + (None, 2, 6.0, 1.0, 3.0), + (10, None, 0.0, 5.0, 2.0), + (None, None, 8.0, 4.0, 2.0), + (10, 0, 0.0, 1.0, 10 / 1e-10), + ], + ) + def test_invocation( + self, numerator, denominator, num_default, den_default, expected + ): + """Test safe_divide with valid inputs.""" + result = safe_divide(numerator, denominator, num_default, den_default) + assert result == pytest.approx(expected, rel=1e-6) + + @pytest.mark.sanity + def test_zero_division_protection(self): + """Test safe_divide protection against zero division.""" + result = safe_divide(10, 0) + assert result == 10 / 1e-10 + + result = safe_divide(5, None, den_default=0) + assert result == 5 / 1e-10 + + +class TestSafeMultiply: + """Test suite for safe_multiply function.""" + + @pytest.mark.smoke + @pytest.mark.parametrize( + ("values", "default", "expected"), + [ + ((2, 3, 4), 1.0, 24.0), + ((2, None, 4), 1.0, 8.0), + ((None, None), 5.0, 5.0), + ((), 3.0, 3.0), + ((2, 3, None, 5), 2.0, 60.0), + ], + ) + def test_invocation(self, values, default, expected): + """Test safe_multiply with valid inputs.""" + result = safe_multiply(*values, default=default) + assert result == expected + + @pytest.mark.sanity + def test_with_zero(self): + """Test safe_multiply with zero values.""" + result = safe_multiply(2, 0, 3, default=1.0) + assert result == 0.0 + + result = safe_multiply(None, 0, None, default=5.0) + assert result == 0.0 + + +class TestSafeAdd: + """Test suite for safe_add function.""" + + @pytest.mark.smoke + @pytest.mark.parametrize( + ("values", "signs", "default", "expected"), + [ + ((1, 2, 3), None, 0.0, 6.0), + ((1, None, 3), None, 5.0, 9.0), + ((10, 5), [1, -1], 0.0, 5.0), + ((None, None), [1, -1], 2.0, 0.0), + ((), None, 3.0, 3.0), + ((1, 2, 3), [1, 1, -1], 0.0, 0.0), + ], + ) + def test_invocation(self, values, signs, default, expected): + """Test safe_add with valid inputs.""" + result = safe_add(*values, signs=signs, default=default) + assert result == expected + + @pytest.mark.sanity + def test_invalid_signs_length(self): + """Test safe_add with invalid signs length.""" + with pytest.raises( + ValueError, match="Length of signs must match length of values" + ): + safe_add(1, 2, 3, signs=[1, -1]) + + @pytest.mark.sanity + def test_single_value(self): + """Test safe_add with single value.""" + result = safe_add(5, default=1.0) + assert result == 5.0 + + result = safe_add(None, default=3.0) + assert result == 3.0 + + +class TestSafeFormatTimestamp: + """Test suite for safe_format_timestamp function.""" + + @pytest.mark.smoke + @pytest.mark.parametrize( + ("timestamp", "format_", "default", "expected"), + [ + (1609459200.0, "%Y-%m-%d", "N/A", "2020-12-31"), + (1609459200.0, "%H:%M:%S", "N/A", "19:00:00"), + (None, "%H:%M:%S", "N/A", "N/A"), + (-1, "%H:%M:%S", "N/A", "N/A"), + (2**32, "%H:%M:%S", "N/A", "N/A"), + ], + ) + def test_invocation(self, timestamp, format_, default, expected): + """Test safe_format_timestamp with valid inputs.""" + result = safe_format_timestamp(timestamp, format_, default) + assert result == expected + + @pytest.mark.sanity + def test_edge_cases(self): + """Test safe_format_timestamp with edge case timestamps.""" + result = safe_format_timestamp(0.0, "%Y", "N/A") + assert result == "1969" + + result = safe_format_timestamp(1.0, "%Y", "N/A") + assert result == "1969" + + result = safe_format_timestamp(2**31 - 1, "%Y", "N/A") + expected_year = datetime.fromtimestamp(2**31 - 1).strftime("%Y") + assert result == expected_year + + @pytest.mark.sanity + def test_invalid_timestamp_ranges(self): + """Test safe_format_timestamp with invalid timestamp ranges.""" + result = safe_format_timestamp(2**31 + 1, "%Y", "ERROR") + assert result == "ERROR" + + result = safe_format_timestamp(-1000, "%Y", "ERROR") + assert result == "ERROR" diff --git a/tests/unit/utils/test_messaging.py b/tests/unit/utils/test_messaging.py new file mode 100644 index 00000000..d6627e88 --- /dev/null +++ b/tests/unit/utils/test_messaging.py @@ -0,0 +1,974 @@ +from __future__ import annotations + +import asyncio +import multiprocessing +import threading +from functools import wraps +from typing import Any, TypeVar + +import culsans +import pytest +from pydantic import BaseModel + +from guidellm.backend import ( + GenerationRequest, + GenerationResponse, +) +from guidellm.scheduler import ScheduledRequestInfo +from guidellm.utils import ( + InterProcessMessaging, + InterProcessMessagingManagerQueue, + InterProcessMessagingPipe, + InterProcessMessagingQueue, +) +from guidellm.utils.messaging import ReceiveMessageT, SendMessageT + + +def async_timeout(delay: float): + """Decorator to add timeout to async test functions.""" + + def decorator(func): + @wraps(func) + async def new_func(*args, **kwargs): + return await asyncio.wait_for(func(*args, **kwargs), timeout=delay) + + return new_func + + return decorator + + +class MockMessage(BaseModel): + content: str + num: int + + +class MockProcessTarget: + """Mock process target for testing.""" + + def __init__( + self, + messaging: InterProcessMessaging, + num_messages: int, + worker_index: int = 0, + ): + self.messaging = messaging + self.num_messages = num_messages + self.worker_index = worker_index + + def run(self): + loop = asyncio.new_event_loop() + + try: + asyncio.set_event_loop(loop) + asyncio.run(asyncio.wait_for(self._async_runner(), timeout=10.0)) + except RuntimeError: + pass + finally: + loop.close() + + async def _async_runner(self): + await self.messaging.start( + pydantic_models=[ + MockMessage, + GenerationRequest, + GenerationResponse, + ScheduledRequestInfo, + ], + ) + + try: + for _ in range(self.num_messages): + obj = await self.messaging.get(timeout=2.0) + await self.messaging.put(obj, timeout=2.0) + finally: + await self.messaging.stop() + + +@pytest.fixture( + params=[ + {"ctx_name": "fork"}, + {"ctx_name": "spawn"}, + ], + ids=["fork_ctx", "spawn_ctx"], +) +def multiprocessing_contexts(request): + context = multiprocessing.get_context(request.param["ctx_name"]) + manager = context.Manager() + try: + yield manager, context + finally: + manager.shutdown() + + +def test_send_message_type(): + """Test that SendMessageT is filled out correctly as a TypeVar.""" + assert isinstance(SendMessageT, type(TypeVar("test"))) + assert SendMessageT.__name__ == "SendMessageT" + assert SendMessageT.__bound__ is Any + assert SendMessageT.__constraints__ == () + + +def test_receive_message_type(): + """Test that ReceiveMessageT is filled out correctly as a TypeVar.""" + assert isinstance(ReceiveMessageT, type(TypeVar("test"))) + assert ReceiveMessageT.__name__ == "ReceiveMessageT" + assert ReceiveMessageT.__bound__ is Any + assert ReceiveMessageT.__constraints__ == () + + +class TestInterProcessMessaging: + """Test suite for InterProcessMessaging abstract base class.""" + + @pytest.mark.smoke + def test_class_signatures(self): + """Test InterProcessMessaging abstract class signatures.""" + assert hasattr(InterProcessMessaging, "__init__") + assert hasattr(InterProcessMessaging, "create_worker_copy") + assert hasattr(InterProcessMessaging, "create_send_messages_threads") + assert hasattr(InterProcessMessaging, "create_receive_messages_threads") + assert hasattr(InterProcessMessaging, "start") + assert hasattr(InterProcessMessaging, "stop") + assert hasattr(InterProcessMessaging, "get") + assert hasattr(InterProcessMessaging, "put") + + # Check abstract methods + assert getattr( + InterProcessMessaging.create_worker_copy, "__isabstractmethod__", False + ) + assert getattr( + InterProcessMessaging.create_send_messages_threads, + "__isabstractmethod__", + False, + ) + assert getattr( + InterProcessMessaging.create_receive_messages_threads, + "__isabstractmethod__", + False, + ) + + @pytest.mark.smoke + def test_cannot_instantiate_directly(self): + """Test InterProcessMessaging cannot be instantiated directly.""" + with pytest.raises(TypeError): + InterProcessMessaging() + + +class TestInterProcessMessagingQueue: + """Test suite for InterProcessMessagingQueue.""" + + @pytest.fixture( + params=[ + { + "serialization": "dict", + "encoding": None, + "max_pending_size": None, + "max_done_size": None, + "worker_index": None, + }, + { + "serialization": "sequence", + "encoding": None, + "max_pending_size": 10, + "max_buffer_send_size": 2, + "max_done_size": 5, + "max_buffer_receive_size": 3, + "worker_index": None, + }, + { + "serialization": None, + "encoding": None, + "max_pending_size": None, + "max_done_size": None, + "worker_index": None, + }, + ], + ) + def valid_instances(self, multiprocessing_contexts, request): + """Fixture providing test data for InterProcessMessagingQueue.""" + constructor_args = request.param + manager, context = multiprocessing_contexts + instance = InterProcessMessagingQueue( + **constructor_args, poll_interval=0.01, mp_context=context + ) + + return instance, constructor_args, manager, context + + @pytest.mark.smoke + def test_class_signatures(self): + """Test InterProcessMessagingQueue inheritance and signatures.""" + assert issubclass(InterProcessMessagingQueue, InterProcessMessaging) + assert hasattr(InterProcessMessagingQueue, "__init__") + assert hasattr(InterProcessMessagingQueue, "create_worker_copy") + assert hasattr(InterProcessMessagingQueue, "create_send_messages_threads") + assert hasattr(InterProcessMessagingQueue, "create_receive_messages_threads") + + @pytest.mark.smoke + def test_initialization(self, valid_instances): + """Test InterProcessMessagingQueue initialization.""" + instance, constructor_args, _, _ = valid_instances + + assert isinstance(instance, InterProcessMessagingQueue) + assert instance.worker_index == constructor_args["worker_index"] + assert instance.max_pending_size == constructor_args["max_pending_size"] + assert instance.max_done_size == constructor_args["max_done_size"] + assert hasattr(instance, "pending_queue") + assert hasattr(instance, "done_queue") + assert instance.running is False + + @pytest.mark.smoke + def test_create_worker_copy(self, valid_instances): + """Test InterProcessMessagingQueue.create_worker_copy.""" + instance, _, _, _ = valid_instances + worker_index = 42 + + worker_copy = instance.create_worker_copy(worker_index) + + assert isinstance(worker_copy, InterProcessMessagingQueue) + assert worker_copy.worker_index == worker_index + assert worker_copy.pending_queue is instance.pending_queue + assert worker_copy.done_queue is instance.done_queue + assert worker_copy.max_pending_size == instance.max_pending_size + assert worker_copy.max_done_size == instance.max_done_size + + @pytest.mark.smoke + @pytest.mark.asyncio + @pytest.mark.parametrize( + "stop_events_lambda", + [ + list, + lambda: [threading.Event()], + lambda: [multiprocessing.Event()], + lambda: [threading.Event(), multiprocessing.Event()], + ], + ) + @async_timeout(5.0) + async def test_start_stop_lifecycle(self, valid_instances, stop_events_lambda): + """Test InterProcessMessagingQueue start/stop lifecycle.""" + instance, _, _, _ = valid_instances + stop_events = stop_events_lambda() + + # Initially not running + assert instance.running is False + assert instance.send_stopped_event is None + assert instance.receive_stopped_event is None + assert instance.shutdown_event is None + assert instance.buffer_send_queue is None + assert instance.buffer_receive_queue is None + assert instance.send_task is None + assert instance.receive_task is None + + # Start should work + await instance.start( + send_stop_criteria=stop_events, receive_stop_criteria=stop_events + ) + assert instance.running is True + assert instance.send_stopped_event is not None + assert isinstance(instance.send_stopped_event, threading.Event) + assert instance.receive_stopped_event is not None + assert isinstance(instance.receive_stopped_event, threading.Event) + assert instance.shutdown_event is not None + assert isinstance(instance.shutdown_event, threading.Event) + assert instance.buffer_send_queue is not None + assert isinstance(instance.buffer_send_queue, culsans.Queue) + assert instance.buffer_receive_queue is not None + assert isinstance(instance.buffer_receive_queue, culsans.Queue) + assert instance.send_task is not None + assert isinstance(instance.send_task, asyncio.Task) + assert instance.receive_task is not None + assert isinstance(instance.receive_task, asyncio.Task) + + # Stop should work + if stop_events: + for event in stop_events: + event.set() + + await asyncio.sleep(0.1) + assert instance.send_stopped_event.is_set() + assert instance.receive_stopped_event.is_set() + assert instance.send_task.done() + assert instance.receive_task.done() + + await instance.stop() + assert instance.running is False + assert instance.send_stopped_event is None + assert instance.receive_stopped_event is None + assert instance.shutdown_event is None + assert instance.buffer_send_queue is None + assert instance.buffer_receive_queue is None + assert instance.send_task is None + assert instance.receive_task is None + + @pytest.mark.smoke + @pytest.mark.asyncio + @pytest.mark.parametrize( + "test_obj", + [ + 123451, + "asdfghjkl", + [None, 123, 45.67, "string", {"key": "value"}, [1, 2, 3]], + {"key": "value", "another_key": 123.456, "yet_another_key": [1, 2, 3]}, + MockMessage(content="hello", num=42), + ( + None, + GenerationRequest(content="asdfkj;"), + ScheduledRequestInfo(), + ), + ( + GenerationResponse(request_id="id", request_args={}), + GenerationRequest(content="asdfkj;"), + ScheduledRequestInfo(), + ), + ], + ) + @async_timeout(10.0) + async def test_lifecycle_put_get(self, valid_instances, test_obj): + instance, constructor_args, manager, context = valid_instances + + if ( + ( + isinstance(test_obj, ScheduledRequestInfo) + or ( + isinstance(test_obj, tuple) + and any(isinstance(item, ScheduledRequestInfo) for item in test_obj) + ) + ) + and constructor_args["serialization"] is None + and constructor_args["encoding"] is None + ): + # Handle case where ScheduledRequestInfo is not pickleable + pytest.skip("ScheduledRequestInfo is not pickleable") + + # Worker setup + process_target = MockProcessTarget( + instance.create_worker_copy(0), num_messages=5 + ) + process = context.Process(target=process_target.run) + process.start() + + # Local startup and wait + await instance.start( + pydantic_models=[ + MockMessage, + GenerationRequest, + GenerationResponse, + ScheduledRequestInfo, + ], + ) + await asyncio.sleep(0.1) + + try: + for _ in range(5): + await instance.put(test_obj, timeout=2.0) + + for _ in range(5): + val = await instance.get(timeout=2.0) + if not isinstance(test_obj, tuple): + assert val == test_obj + else: + assert list(val) == list(test_obj) + finally: + # Clean up + process.join(timeout=2.0) + if process.is_alive(): + process.terminate() + process.join(timeout=1.0) + + await instance.stop() + + @pytest.mark.smoke + @pytest.mark.asyncio + @pytest.mark.parametrize( + "test_obj", + [ + ( + None, + GenerationRequest(content="asdfkj;"), + ScheduledRequestInfo(), + ), + ( + GenerationResponse(request_id="id", request_args={}), + GenerationRequest(content="asdfkj;"), + ScheduledRequestInfo(), + ), + ], + ) + @async_timeout(10.0) + async def test_lifecycle_put_get_iter(self, valid_instances, test_obj): + instance, constructor_args, manager, context = valid_instances + + if ( + ( + isinstance(test_obj, ScheduledRequestInfo) + or ( + isinstance(test_obj, tuple) + and any(isinstance(item, ScheduledRequestInfo) for item in test_obj) + ) + ) + and constructor_args["serialization"] is None + and constructor_args["encoding"] is None + ): + # Handle case where ScheduledRequestInfo is not pickleable + pytest.skip("ScheduledRequestInfo is not pickleable") + + # Worker setup + process_target = MockProcessTarget( + instance.create_worker_copy(0), num_messages=5 + ) + process = context.Process(target=process_target.run) + process.start() + + def _received_callback(msg): + if not isinstance(test_obj, tuple): + assert msg == test_obj + else: + assert list(msg) == list(test_obj) + return "changed_obj" + + # Local startup and wait + await instance.start( + send_items=[test_obj for _ in range(5)], + receive_callback=_received_callback, + pydantic_models=[ + MockMessage, + GenerationRequest, + GenerationResponse, + ScheduledRequestInfo, + ], + ) + await asyncio.sleep(0.1) + + try: + for _ in range(5): + val = await instance.get(timeout=2.0) + assert val == "changed_obj" + finally: + # Clean up + process.join(timeout=2.0) + if process.is_alive(): + process.terminate() + process.join(timeout=1.0) + + await instance.stop() + + +class TestInterProcessMessagingManagerQueue: + """Test suite for InterProcessMessagingManagerQueue.""" + + @pytest.fixture( + params=[ + { + "serialization": "dict", + "encoding": None, + "max_pending_size": None, + "max_done_size": None, + "worker_index": None, + }, + { + "serialization": "sequence", + "encoding": None, + "max_pending_size": 10, + "max_buffer_send_size": 2, + "max_done_size": 5, + "max_buffer_receive_size": 3, + "worker_index": None, + }, + { + "serialization": None, + "encoding": None, + "max_pending_size": None, + "max_done_size": None, + "worker_index": None, + }, + ], + ) + def valid_instances(self, multiprocessing_contexts, request): + """Fixture providing test data for InterProcessMessagingManagerQueue.""" + constructor_args = request.param + manager, context = multiprocessing_contexts + instance = InterProcessMessagingManagerQueue( + **constructor_args, manager=manager, poll_interval=0.01 + ) + return instance, constructor_args, manager, context + + @pytest.mark.smoke + def test_class_signatures(self): + """Test InterProcessMessagingManagerQueue inheritance and signatures.""" + assert issubclass(InterProcessMessagingManagerQueue, InterProcessMessaging) + assert issubclass(InterProcessMessagingManagerQueue, InterProcessMessagingQueue) + assert hasattr(InterProcessMessagingManagerQueue, "__init__") + assert hasattr(InterProcessMessagingManagerQueue, "create_worker_copy") + assert hasattr(InterProcessMessagingManagerQueue, "_send_messages_task_thread") + assert hasattr( + InterProcessMessagingManagerQueue, "_receive_messages_task_thread" + ) + + @pytest.mark.smoke + def test_initialization(self, valid_instances): + """Test InterProcessMessagingManagerQueue initialization.""" + instance, constructor_args, _, _ = valid_instances + + assert isinstance(instance, InterProcessMessagingManagerQueue) + assert instance.worker_index == constructor_args["worker_index"] + assert instance.max_pending_size == constructor_args["max_pending_size"] + assert instance.max_done_size == constructor_args["max_done_size"] + assert hasattr(instance, "pending_queue") + assert hasattr(instance, "done_queue") + assert instance.running is False + + @pytest.mark.smoke + def test_create_worker_copy(self, valid_instances): + """Test InterProcessMessagingQueue.create_worker_copy.""" + instance, _, _, _ = valid_instances + worker_index = 42 + + worker_copy = instance.create_worker_copy(worker_index) + + assert isinstance(worker_copy, InterProcessMessagingManagerQueue) + assert worker_copy.worker_index == worker_index + assert worker_copy.pending_queue is instance.pending_queue + assert worker_copy.done_queue is instance.done_queue + assert worker_copy.max_pending_size == instance.max_pending_size + assert worker_copy.max_done_size == instance.max_done_size + + @pytest.mark.smoke + @pytest.mark.asyncio + @pytest.mark.parametrize( + "stop_events_lambda", + [ + list, + lambda: [threading.Event()], + lambda: [multiprocessing.Event()], + lambda: [threading.Event(), multiprocessing.Event()], + ], + ) + @async_timeout(5.0) + async def test_start_stop_lifecycle(self, valid_instances, stop_events_lambda): + """Test InterProcessMessagingQueue start/stop lifecycle.""" + instance, _, _, _ = valid_instances + stop_events = stop_events_lambda() + + # Initially not running + assert instance.running is False + assert instance.send_stopped_event is None + assert instance.receive_stopped_event is None + assert instance.shutdown_event is None + assert instance.buffer_send_queue is None + assert instance.buffer_receive_queue is None + assert instance.send_task is None + assert instance.receive_task is None + + # Start should work + await instance.start( + send_stop_criteria=stop_events, receive_stop_criteria=stop_events + ) + assert instance.running is True + assert instance.send_stopped_event is not None + assert isinstance(instance.send_stopped_event, threading.Event) + assert instance.receive_stopped_event is not None + assert isinstance(instance.receive_stopped_event, threading.Event) + assert instance.shutdown_event is not None + assert isinstance(instance.shutdown_event, threading.Event) + assert instance.buffer_send_queue is not None + assert isinstance(instance.buffer_send_queue, culsans.Queue) + assert instance.buffer_receive_queue is not None + assert isinstance(instance.buffer_receive_queue, culsans.Queue) + assert instance.send_task is not None + assert isinstance(instance.send_task, asyncio.Task) + assert instance.receive_task is not None + assert isinstance(instance.receive_task, asyncio.Task) + + # Stop should work + if stop_events: + for event in stop_events: + event.set() + + await asyncio.sleep(0.1) + assert instance.send_stopped_event.is_set() + assert instance.receive_stopped_event.is_set() + assert instance.send_task.done() + assert instance.receive_task.done() + + await instance.stop() + assert instance.running is False + assert instance.send_stopped_event is None + assert instance.receive_stopped_event is None + assert instance.shutdown_event is None + assert instance.buffer_send_queue is None + assert instance.buffer_receive_queue is None + assert instance.send_task is None + assert instance.receive_task is None + + @pytest.mark.smoke + @pytest.mark.asyncio + @pytest.mark.parametrize( + "test_obj", + [ + 123451, + "asdfghjkl", + [None, 123, 45.67, "string", {"key": "value"}, [1, 2, 3]], + {"key": "value", "another_key": 123.456, "yet_another_key": [1, 2, 3]}, + MockMessage(content="hello", num=42), + ( + None, + GenerationRequest(content="asdfkj;"), + ScheduledRequestInfo(), + ), + ], + ) + @async_timeout(10.0) + async def test_lifecycle_put_get(self, valid_instances, test_obj): + instance, constructor_args, _, context = valid_instances + + if ( + ( + isinstance(test_obj, ScheduledRequestInfo) + or ( + isinstance(test_obj, tuple) + and any(isinstance(item, ScheduledRequestInfo) for item in test_obj) + ) + ) + and constructor_args["serialization"] is None + and constructor_args["encoding"] is None + ): + # Handle case where ScheduledRequestInfo is not pickleable + pytest.skip("ScheduledRequestInfo is not pickleable") + + # Worker setup + process_target = MockProcessTarget( + instance.create_worker_copy(0), num_messages=5 + ) + process = context.Process(target=process_target.run) + process.start() + + # Local startup and wait + await instance.start( + pydantic_models=[ + MockMessage, + GenerationRequest, + GenerationResponse, + ScheduledRequestInfo, + ], + ) + await asyncio.sleep(0.1) + + try: + for _ in range(5): + await instance.put(test_obj, timeout=2.0) + + for _ in range(5): + val = await instance.get(timeout=2.0) + if not isinstance(test_obj, tuple): + assert val == test_obj + else: + assert list(val) == list(test_obj) + finally: + # Clean up + process.join(timeout=2.0) + if process.is_alive(): + process.terminate() + process.join(timeout=1.0) + + await instance.stop() + + @pytest.mark.smoke + @pytest.mark.asyncio + @pytest.mark.parametrize( + "test_obj", + [ + ( + None, + GenerationRequest(content="asdfkj;"), + ScheduledRequestInfo(), + ), + ( + GenerationResponse(request_id="id", request_args={}), + GenerationRequest(content="asdfkj;"), + ScheduledRequestInfo(), + ), + ], + ) + @async_timeout(10.0) + async def test_lifecycle_put_get_iter(self, valid_instances, test_obj): + instance, constructor_args, _, context = valid_instances + + if ( + ( + isinstance(test_obj, ScheduledRequestInfo) + or ( + isinstance(test_obj, tuple) + and any(isinstance(item, ScheduledRequestInfo) for item in test_obj) + ) + ) + and constructor_args["serialization"] is None + and constructor_args["encoding"] is None + ): + # Handle case where ScheduledRequestInfo is not pickleable + pytest.skip("ScheduledRequestInfo is not pickleable") + + # Worker setup + process_target = MockProcessTarget( + instance.create_worker_copy(0), num_messages=5 + ) + process = context.Process(target=process_target.run) + process.start() + + def _received_callback(msg): + if not isinstance(test_obj, tuple): + assert msg == test_obj + else: + assert list(msg) == list(test_obj) + return "changed_obj" + + # Local startup and wait + await instance.start( + send_items=[test_obj for _ in range(5)], + receive_callback=_received_callback, + pydantic_models=[ + MockMessage, + GenerationRequest, + GenerationResponse, + ScheduledRequestInfo, + ], + ) + await asyncio.sleep(0.1) + + try: + for _ in range(5): + val = await instance.get(timeout=2.0) + assert val == "changed_obj" + finally: + # Clean up + process.join(timeout=2.0) + if process.is_alive(): + process.terminate() + process.join(timeout=1.0) + + await instance.stop() + + +class TestInterProcessMessagingPipe: + """Test suite for InterProcessMessagingPipe.""" + + @pytest.fixture( + params=[ + { + "num_workers": 2, + "serialization": "dict", + "encoding": None, + "max_pending_size": None, + "max_done_size": None, + "worker_index": None, + }, + { + "num_workers": 1, + "serialization": "sequence", + "encoding": None, + "max_pending_size": 10, + "max_buffer_send_size": 2, + "max_done_size": 5, + "max_buffer_receive_size": 3, + "worker_index": None, + }, + { + "num_workers": 1, + "serialization": None, + "encoding": None, + "max_pending_size": None, + "max_done_size": None, + "worker_index": None, + }, + ], + ) + def valid_instances(self, multiprocessing_contexts, request): + """Fixture providing test data for InterProcessMessagingPipe.""" + constructor_args = request.param + manager, context = multiprocessing_contexts + instance = InterProcessMessagingPipe(**constructor_args, poll_interval=0.01) + return instance, constructor_args, manager, context + + @pytest.mark.smoke + def test_class_signatures(self): + """Test InterProcessMessagingPipe inheritance and signatures.""" + assert issubclass(InterProcessMessagingPipe, InterProcessMessaging) + assert hasattr(InterProcessMessagingPipe, "__init__") + assert hasattr(InterProcessMessagingPipe, "create_worker_copy") + assert hasattr(InterProcessMessagingPipe, "_send_messages_task_thread") + assert hasattr(InterProcessMessagingPipe, "_receive_messages_task_thread") + + @pytest.mark.smoke + def test_initialization(self, valid_instances): + """Test InterProcessMessagingPipe initialization.""" + instance, constructor_args, _, _ = valid_instances + + assert isinstance(instance, InterProcessMessagingPipe) + assert instance.worker_index == constructor_args["worker_index"] + assert instance.max_pending_size == constructor_args["max_pending_size"] + assert instance.max_done_size == constructor_args["max_done_size"] + assert instance.num_workers == constructor_args["num_workers"] + assert hasattr(instance, "pipes") + assert len(instance.pipes) == constructor_args["num_workers"] + assert len(instance.pipes) == constructor_args["num_workers"] + assert instance.running is False + + @pytest.mark.sanity + @pytest.mark.parametrize( + ("kwargs", "expected_error"), + [ + ({"invalid_param": "value"}, TypeError), + ({"num_workers": 1, "unknown_arg": "test"}, TypeError), + ], + ) + def test_invalid_initialization_values(self, kwargs, expected_error): + """Test InterProcessMessagingPipe with invalid field values.""" + with pytest.raises(expected_error): + InterProcessMessagingPipe(**kwargs) + + @pytest.mark.sanity + def test_invalid_initialization_missing(self): + """Test InterProcessMessagingPipe initialization without required field.""" + with pytest.raises(TypeError): + InterProcessMessagingPipe() + + @pytest.mark.smoke + def test_create_worker_copy(self, valid_instances): + """Test InterProcessMessagingPipe.create_worker_copy.""" + instance, _, _, _ = valid_instances + worker_index = 0 + + worker_copy = instance.create_worker_copy(worker_index) + + assert isinstance(worker_copy, InterProcessMessagingPipe) + assert worker_copy.worker_index == worker_index + assert worker_copy.pipes[0] is instance.pipes[worker_index] + assert worker_copy.max_pending_size == instance.max_pending_size + assert worker_copy.max_done_size == instance.max_done_size + assert worker_copy.num_workers == instance.num_workers + + @pytest.mark.smoke + @pytest.mark.asyncio + @async_timeout(5.0) + async def test_start_stop_lifecycle(self, valid_instances): + """Test InterProcessMessagingPipe start/stop lifecycle.""" + instance, _, _, _ = valid_instances + stop_events = [] + + # Initially not running + assert instance.running is False + assert instance.send_stopped_event is None + assert instance.receive_stopped_event is None + assert instance.shutdown_event is None + assert instance.buffer_send_queue is None + assert instance.buffer_receive_queue is None + assert instance.send_task is None + assert instance.receive_task is None + + # Start should work + await instance.start( + send_stop_criteria=stop_events, receive_stop_criteria=stop_events + ) + assert instance.running is True + assert instance.send_stopped_event is not None + assert isinstance(instance.send_stopped_event, threading.Event) + assert instance.receive_stopped_event is not None + assert isinstance(instance.receive_stopped_event, threading.Event) + assert instance.shutdown_event is not None + assert isinstance(instance.shutdown_event, threading.Event) + assert instance.buffer_send_queue is not None + assert isinstance(instance.buffer_send_queue, culsans.Queue) + assert instance.buffer_receive_queue is not None + assert isinstance(instance.buffer_receive_queue, culsans.Queue) + assert instance.send_task is not None + assert isinstance(instance.send_task, asyncio.Task) + assert instance.receive_task is not None + assert isinstance(instance.receive_task, asyncio.Task) + + # Stop should work + await instance.stop() + assert instance.running is False + assert instance.send_stopped_event is None + assert instance.receive_stopped_event is None + assert instance.shutdown_event is None + assert instance.buffer_send_queue is None + assert instance.buffer_receive_queue is None + assert instance.send_task is None + assert instance.receive_task is None + + @pytest.mark.smoke + @pytest.mark.asyncio + @pytest.mark.parametrize( + "test_obj", + [ + 123451, + "asdfghjkl", + [None, 123, 45.67, "string", {"key": "value"}, [1, 2, 3]], + {"key": "value", "another_key": 123.456, "yet_another_key": [1, 2, 3]}, + MockMessage(content="hello", num=42), + ( + None, + GenerationRequest(content="asdfkj;"), + ScheduledRequestInfo(), + ), + ( + GenerationResponse(request_id="id", request_args={}), + GenerationRequest(content="asdfkj;"), + ScheduledRequestInfo(), + ), + ], + ) + @async_timeout(10.0) + async def test_lifecycle_put_get(self, valid_instances, test_obj): + instance, constructor_args, manager, context = valid_instances + + if ( + ( + isinstance(test_obj, ScheduledRequestInfo) + or ( + isinstance(test_obj, tuple) + and any(isinstance(item, ScheduledRequestInfo) for item in test_obj) + ) + ) + and constructor_args["serialization"] is None + and constructor_args["encoding"] is None + ): + pytest.skip("ScheduledRequestInfo is not pickleable") + + # Worker setup + processes = [] + for index in range(constructor_args["num_workers"]): + process_target = MockProcessTarget( + instance.create_worker_copy(index), num_messages=5 + ) + process = context.Process(target=process_target.run) + processes.append(process) + process.start() + + # Local startup and wait + await instance.start( + pydantic_models=[ + MockMessage, + GenerationRequest, + GenerationResponse, + ScheduledRequestInfo, + ], + ) + await asyncio.sleep(0.1) + + try: + for _ in range(5 * constructor_args["num_workers"]): + await instance.put(test_obj, timeout=2.0) + + for _ in range(5 * constructor_args["num_workers"]): + val = await instance.get(timeout=2.0) + if not isinstance(test_obj, tuple): + assert val == test_obj + else: + assert list(val) == list(test_obj) + finally: + # Clean up + for process in processes: + process.join(timeout=2.0) + if process.is_alive(): + process.terminate() + process.join(timeout=1.0) + + await instance.stop() diff --git a/tests/unit/utils/test_mixins.py b/tests/unit/utils/test_mixins.py new file mode 100644 index 00000000..cd8990de --- /dev/null +++ b/tests/unit/utils/test_mixins.py @@ -0,0 +1,245 @@ +from __future__ import annotations + +import pytest + +from guidellm.utils.mixins import InfoMixin + + +class TestInfoMixin: + """Test suite for InfoMixin.""" + + @pytest.fixture( + params=[ + {"attr_one": "test_value", "attr_two": 42}, + {"attr_one": "hello_world", "attr_two": 100, "attr_three": [1, 2, 3]}, + ], + ids=["basic_attributes", "extended_attributes"], + ) + def valid_instances(self, request): + """Fixture providing test data for InfoMixin.""" + constructor_args = request.param + + class TestClass(InfoMixin): + def __init__(self, **kwargs): + for key, value in kwargs.items(): + setattr(self, key, value) + + instance = TestClass(**constructor_args) + return instance, constructor_args + + @pytest.mark.smoke + def test_class_signatures(self): + """Test InfoMixin class signatures and methods.""" + assert hasattr(InfoMixin, "extract_from_obj") + assert callable(InfoMixin.extract_from_obj) + assert hasattr(InfoMixin, "create_info_dict") + assert callable(InfoMixin.create_info_dict) + assert hasattr(InfoMixin, "info") + assert isinstance(InfoMixin.info, property) + + @pytest.mark.smoke + def test_initialization(self, valid_instances): + """Test InfoMixin initialization through inheritance.""" + instance, constructor_args = valid_instances + assert isinstance(instance, InfoMixin) + for key, value in constructor_args.items(): + assert hasattr(instance, key) + assert getattr(instance, key) == value + + @pytest.mark.smoke + def test_info_property(self, valid_instances): + """Test InfoMixin.info property.""" + instance, constructor_args = valid_instances + result = instance.info + assert isinstance(result, dict) + assert "str" in result + assert "type" in result + assert "class" in result + assert "module" in result + assert "attributes" in result + assert result["type"] == "TestClass" + assert result["class"] == "TestClass" + assert isinstance(result["attributes"], dict) + for key, value in constructor_args.items(): + assert key in result["attributes"] + assert result["attributes"][key] == value + + @pytest.mark.smoke + @pytest.mark.parametrize( + ("obj_data", "expected_attributes"), + [ + ({"name": "test", "value": 42}, {"name": "test", "value": 42}), + ({"data": [1, 2, 3], "flag": True}, {"data": [1, 2, 3], "flag": True}), + ({"nested": {"key": "value"}}, {"nested": {"key": "value"}}), + ], + ) + def test_create_info_dict(self, obj_data, expected_attributes): + """Test InfoMixin.create_info_dict class method.""" + + class SimpleObject: + def __init__(self, **kwargs): + for key, value in kwargs.items(): + setattr(self, key, value) + + obj = SimpleObject(**obj_data) + result = InfoMixin.create_info_dict(obj) + + assert isinstance(result, dict) + assert "str" in result + assert "type" in result + assert "class" in result + assert "module" in result + assert "attributes" in result + assert result["type"] == "SimpleObject" + assert result["class"] == "SimpleObject" + assert result["attributes"] == expected_attributes + + @pytest.mark.smoke + @pytest.mark.parametrize( + ("obj_data", "expected_attributes"), + [ + ({"name": "test", "value": 42}, {"name": "test", "value": 42}), + ({"data": [1, 2, 3], "flag": True}, {"data": [1, 2, 3], "flag": True}), + ], + ) + def test_extract_from_obj_without_info(self, obj_data, expected_attributes): + """Test InfoMixin.extract_from_obj with objects without info method.""" + + class SimpleObject: + def __init__(self, **kwargs): + for key, value in kwargs.items(): + setattr(self, key, value) + + obj = SimpleObject(**obj_data) + result = InfoMixin.extract_from_obj(obj) + + assert isinstance(result, dict) + assert "str" in result + assert "type" in result + assert "class" in result + assert "module" in result + assert "attributes" in result + assert result["type"] == "SimpleObject" + assert result["class"] == "SimpleObject" + assert result["attributes"] == expected_attributes + + @pytest.mark.smoke + def test_extract_from_obj_with_info_method(self): + """Test InfoMixin.extract_from_obj with objects that have info method.""" + + class ObjectWithInfoMethod: + def info(self): + return {"custom": "info_method", "type": "custom_type"} + + obj = ObjectWithInfoMethod() + result = InfoMixin.extract_from_obj(obj) + + assert result == {"custom": "info_method", "type": "custom_type"} + + @pytest.mark.smoke + def test_extract_from_obj_with_info_property(self): + """Test InfoMixin.extract_from_obj with objects that have info property.""" + + class ObjectWithInfoProperty: + @property + def info(self): + return {"custom": "info_property", "type": "custom_type"} + + obj = ObjectWithInfoProperty() + result = InfoMixin.extract_from_obj(obj) + + assert result == {"custom": "info_property", "type": "custom_type"} + + @pytest.mark.sanity + @pytest.mark.parametrize( + ("obj_type", "obj_value"), + [ + (str, "test_string"), + (int, 42), + (float, 3.14), + (list, [1, 2, 3]), + (dict, {"key": "value"}), + ], + ) + def test_extract_from_obj_builtin_types(self, obj_type, obj_value): + """Test InfoMixin.extract_from_obj with built-in types.""" + result = InfoMixin.extract_from_obj(obj_value) + + assert isinstance(result, dict) + assert "str" in result + assert "type" in result + assert result["type"] == obj_type.__name__ + assert result["str"] == str(obj_value) + + @pytest.mark.sanity + def test_extract_from_obj_without_dict(self): + """Test InfoMixin.extract_from_obj with objects without __dict__.""" + obj = 42 + result = InfoMixin.extract_from_obj(obj) + + assert isinstance(result, dict) + assert "attributes" in result + assert result["attributes"] == {} + assert result["type"] == "int" + assert result["str"] == "42" + + @pytest.mark.sanity + def test_extract_from_obj_with_private_attributes(self): + """Test InfoMixin.extract_from_obj filters private attributes.""" + + class ObjectWithPrivate: + def __init__(self): + self.public_attr = "public" + self._private_attr = "private" + self.__very_private = "very_private" + + obj = ObjectWithPrivate() + result = InfoMixin.extract_from_obj(obj) + + assert "public_attr" in result["attributes"] + assert result["attributes"]["public_attr"] == "public" + assert "_private_attr" not in result["attributes"] + assert "__very_private" not in result["attributes"] + + @pytest.mark.sanity + def test_extract_from_obj_complex_attributes(self): + """Test InfoMixin.extract_from_obj with complex attribute types.""" + + class ComplexObject: + def __init__(self): + self.simple_str = "test" + self.simple_int = 42 + self.simple_list = [1, 2, 3] + self.simple_dict = {"key": "value"} + self.complex_object = object() + + obj = ComplexObject() + result = InfoMixin.extract_from_obj(obj) + + attributes = result["attributes"] + assert attributes["simple_str"] == "test" + assert attributes["simple_int"] == 42 + assert attributes["simple_list"] == [1, 2, 3] + assert attributes["simple_dict"] == {"key": "value"} + assert isinstance(attributes["complex_object"], str) + + @pytest.mark.regression + def test_create_info_dict_consistency(self, valid_instances): + """Test InfoMixin.create_info_dict produces consistent results.""" + instance, _ = valid_instances + + result1 = InfoMixin.create_info_dict(instance) + result2 = InfoMixin.create_info_dict(instance) + + assert result1 == result2 + assert result1 is not result2 + + @pytest.mark.regression + def test_info_property_uses_create_info_dict(self, valid_instances): + """Test InfoMixin.info property uses create_info_dict method.""" + instance, _ = valid_instances + + info_result = instance.info + create_result = InfoMixin.create_info_dict(instance) + + assert info_result == create_result diff --git a/tests/unit/utils/test_pydantic_utils.py b/tests/unit/utils/test_pydantic_utils.py new file mode 100644 index 00000000..726b5ddf --- /dev/null +++ b/tests/unit/utils/test_pydantic_utils.py @@ -0,0 +1,1002 @@ +""" +Unit tests for the pydantic_utils module. +""" + +from __future__ import annotations + +from typing import ClassVar, TypeVar +from unittest import mock + +import pytest +from pydantic import BaseModel, Field, ValidationError + +from guidellm.utils import ( + PydanticClassRegistryMixin, + ReloadableBaseModel, + StandardBaseDict, + StandardBaseModel, + StatusBreakdown, +) +from guidellm.utils.pydantic_utils import ( + BaseModelT, + ErroredT, + IncompleteT, + RegisterClassT, + SuccessfulT, + TotalT, +) + + +@pytest.mark.smoke +def test_base_model_t(): + """Test that BaseModelT is configured correctly as a TypeVar.""" + assert isinstance(BaseModelT, type(TypeVar("test"))) + assert BaseModelT.__name__ == "BaseModelT" + assert BaseModelT.__bound__ is BaseModel + assert BaseModelT.__constraints__ == () + + +@pytest.mark.smoke +def test_register_class_t(): + """Test that RegisterClassT is configured correctly as a TypeVar.""" + assert isinstance(RegisterClassT, type(TypeVar("test"))) + assert RegisterClassT.__name__ == "RegisterClassT" + assert RegisterClassT.__bound__ is None + assert RegisterClassT.__constraints__ == () + + +@pytest.mark.smoke +def test_successful_t(): + """Test that SuccessfulT is configured correctly as a TypeVar.""" + assert isinstance(SuccessfulT, type(TypeVar("test"))) + assert SuccessfulT.__name__ == "SuccessfulT" + assert SuccessfulT.__bound__ is None + assert SuccessfulT.__constraints__ == () + + +@pytest.mark.smoke +def test_errored_t(): + """Test that ErroredT is configured correctly as a TypeVar.""" + assert isinstance(ErroredT, type(TypeVar("test"))) + assert ErroredT.__name__ == "ErroredT" + assert ErroredT.__bound__ is None + assert ErroredT.__constraints__ == () + + +@pytest.mark.smoke +def test_incomplete_t(): + """Test that IncompleteT is configured correctly as a TypeVar.""" + assert isinstance(IncompleteT, type(TypeVar("test"))) + assert IncompleteT.__name__ == "IncompleteT" + assert IncompleteT.__bound__ is None + assert IncompleteT.__constraints__ == () + + +@pytest.mark.smoke +def test_total_t(): + """Test that TotalT is configured correctly as a TypeVar.""" + assert isinstance(TotalT, type(TypeVar("test"))) + assert TotalT.__name__ == "TotalT" + assert TotalT.__bound__ is None + assert TotalT.__constraints__ == () + + +class TestReloadableBaseModel: + """Test suite for ReloadableBaseModel.""" + + @pytest.fixture( + params=[ + {"name": "test_value"}, + {"name": "hello_world"}, + {"name": "another_test"}, + ], + ids=["basic_string", "multi_word", "underscore"], + ) + def valid_instances(self, request) -> tuple[ReloadableBaseModel, dict[str, str]]: + """Fixture providing test data for ReloadableBaseModel.""" + + class TestModel(ReloadableBaseModel): + name: str + + constructor_args = request.param + instance = TestModel(**constructor_args) + return instance, constructor_args + + @pytest.mark.smoke + def test_class_signatures(self): + """Test ReloadableBaseModel inheritance and class variables.""" + assert issubclass(ReloadableBaseModel, BaseModel) + assert hasattr(ReloadableBaseModel, "model_config") + assert hasattr(ReloadableBaseModel, "reload_schema") + + # Check model configuration + config = ReloadableBaseModel.model_config + assert config["extra"] == "ignore" + assert config["use_enum_values"] is True + assert config["from_attributes"] is True + assert config["arbitrary_types_allowed"] is True + + @pytest.mark.smoke + def test_initialization(self, valid_instances): + """Test ReloadableBaseModel initialization.""" + instance, constructor_args = valid_instances + assert isinstance(instance, ReloadableBaseModel) + assert instance.name == constructor_args["name"] # type: ignore[attr-defined] + + @pytest.mark.sanity + @pytest.mark.parametrize( + ("field", "value"), + [ + ("name", None), + ("name", 123), + ("name", []), + ], + ) + def test_invalid_initialization_values(self, field, value): + """Test ReloadableBaseModel with invalid field values.""" + + class TestModel(ReloadableBaseModel): + name: str + + data = {field: value} + with pytest.raises(ValidationError): + TestModel(**data) + + @pytest.mark.sanity + def test_invalid_initialization_missing(self): + """Test ReloadableBaseModel initialization without required field.""" + + class TestModel(ReloadableBaseModel): + name: str + + with pytest.raises(ValidationError): + TestModel() # type: ignore[call-arg] + + @pytest.mark.smoke + def test_reload_schema(self): + """Test ReloadableBaseModel.reload_schema method.""" + + class TestModel(ReloadableBaseModel): + name: str + + # Mock the model_rebuild method to simulate schema reload + with mock.patch.object(TestModel, "model_rebuild") as mock_rebuild: + TestModel.reload_schema() + mock_rebuild.assert_called_once_with(force=True) + + @pytest.mark.sanity + def test_marshalling(self, valid_instances): + """Test ReloadableBaseModel serialization and deserialization.""" + instance, constructor_args = valid_instances + data_dict = instance.model_dump() + assert isinstance(data_dict, dict) + assert data_dict["name"] == constructor_args["name"] + + recreated = instance.__class__.model_validate(data_dict) + assert isinstance(recreated, instance.__class__) + assert recreated.name == constructor_args["name"] + + +class TestStandardBaseModel: + """Test suite for StandardBaseModel.""" + + @pytest.fixture( + params=[ + {"field_str": "test_value", "field_int": 42}, + {"field_str": "hello_world", "field_int": 100}, + {"field_str": "another_test", "field_int": 0}, + ], + ids=["basic_values", "positive_values", "zero_value"], + ) + def valid_instances( + self, request + ) -> tuple[StandardBaseModel, dict[str, int | str]]: + """Fixture providing test data for StandardBaseModel.""" + + class TestModel(StandardBaseModel): + field_str: str = Field(description="Test string field") + field_int: int = Field(default=10, description="Test integer field") + + constructor_args = request.param + instance = TestModel(**constructor_args) + return instance, constructor_args + + @pytest.mark.smoke + def test_class_signatures(self): + """Test StandardBaseModel inheritance and class variables.""" + assert issubclass(StandardBaseModel, BaseModel) + assert hasattr(StandardBaseModel, "model_config") + assert hasattr(StandardBaseModel, "get_default") + + # Check model configuration + config = StandardBaseModel.model_config + assert config["extra"] == "ignore" + assert config["use_enum_values"] is True + assert config["from_attributes"] is True + + @pytest.mark.smoke + def test_initialization(self, valid_instances): + """Test StandardBaseModel initialization.""" + instance, constructor_args = valid_instances + assert isinstance(instance, StandardBaseModel) + assert instance.field_str == constructor_args["field_str"] # type: ignore[attr-defined] + assert instance.field_int == constructor_args["field_int"] # type: ignore[attr-defined] + + @pytest.mark.sanity + @pytest.mark.parametrize( + ("field", "value"), + [ + ("field_str", None), + ("field_str", 123), + ("field_int", "not_int"), + ], + ) + def test_invalid_initialization_values(self, field, value): + """Test StandardBaseModel with invalid field values.""" + + class TestModel(StandardBaseModel): + field_str: str = Field(description="Test string field") + field_int: int = Field(default=10, description="Test integer field") + + data = {field: value} + if field == "field_str": + data["field_int"] = 42 + else: + data["field_str"] = "test" + + with pytest.raises(ValidationError): + TestModel(**data) + + @pytest.mark.sanity + def test_invalid_initialization_missing(self): + """Test StandardBaseModel initialization without required field.""" + + class TestModel(StandardBaseModel): + field_str: str = Field(description="Test string field") + field_int: int = Field(default=10, description="Test integer field") + + with pytest.raises(ValidationError): + TestModel() # type: ignore[call-arg] + + @pytest.mark.smoke + def test_get_default(self): + """Test StandardBaseModel.get_default method.""" + + class TestModel(StandardBaseModel): + field_str: str = Field(description="Test string field") + field_int: int = Field(default=42, description="Test integer field") + + default_value = TestModel.get_default("field_int") + assert default_value == 42 + + @pytest.mark.sanity + def test_get_default_invalid(self): + """Test StandardBaseModel.get_default with invalid field.""" + + class TestModel(StandardBaseModel): + field_str: str = Field(description="Test string field") + + with pytest.raises(KeyError): + TestModel.get_default("nonexistent_field") + + @pytest.mark.sanity + def test_marshalling(self, valid_instances): + """Test StandardBaseModel serialization and deserialization.""" + instance, constructor_args = valid_instances + data_dict = instance.model_dump() + assert isinstance(data_dict, dict) + assert data_dict["field_str"] == constructor_args["field_str"] + assert data_dict["field_int"] == constructor_args["field_int"] + + recreated = instance.__class__.model_validate(data_dict) + assert isinstance(recreated, instance.__class__) + assert recreated.field_str == constructor_args["field_str"] + assert recreated.field_int == constructor_args["field_int"] + + +class TestStandardBaseDict: + """Test suite for StandardBaseDict.""" + + @pytest.fixture( + params=[ + {"field_str": "test_value", "extra_field": "extra_value"}, + {"field_str": "hello_world", "another_extra": 123}, + {"field_str": "another_test", "complex_extra": {"nested": "value"}}, + ], + ids=["string_extra", "int_extra", "dict_extra"], + ) + def valid_instances( + self, request + ) -> tuple[StandardBaseDict, dict[str, str | int | dict[str, str]]]: + """Fixture providing test data for StandardBaseDict.""" + + class TestModel(StandardBaseDict): + field_str: str = Field(description="Test string field") + + constructor_args = request.param + instance = TestModel(**constructor_args) + return instance, constructor_args + + @pytest.mark.smoke + def test_class_signatures(self): + """Test StandardBaseDict inheritance and class variables.""" + assert issubclass(StandardBaseDict, StandardBaseModel) + assert hasattr(StandardBaseDict, "model_config") + + # Check model configuration + config = StandardBaseDict.model_config + assert config["extra"] == "allow" + assert config["use_enum_values"] is True + assert config["from_attributes"] is True + assert config["arbitrary_types_allowed"] is True + + @pytest.mark.smoke + def test_initialization(self, valid_instances): + """Test StandardBaseDict initialization.""" + instance, constructor_args = valid_instances + assert isinstance(instance, StandardBaseDict) + assert instance.field_str == constructor_args["field_str"] # type: ignore[attr-defined] + + # Check extra fields are preserved + for key, value in constructor_args.items(): + if key != "field_str": + assert hasattr(instance, key) + assert getattr(instance, key) == value + + @pytest.mark.sanity + @pytest.mark.parametrize( + ("field", "value"), + [ + ("field_str", None), + ("field_str", 123), + ], + ) + def test_invalid_initialization_values(self, field, value): + """Test StandardBaseDict with invalid field values.""" + + class TestModel(StandardBaseDict): + field_str: str = Field(description="Test string field") + + data = {field: value} + with pytest.raises(ValidationError): + TestModel(**data) + + @pytest.mark.sanity + def test_invalid_initialization_missing(self): + """Test StandardBaseDict initialization without required field.""" + + class TestModel(StandardBaseDict): + field_str: str = Field(description="Test string field") + + with pytest.raises(ValidationError): + TestModel() # type: ignore[call-arg] + + @pytest.mark.sanity + def test_marshalling(self, valid_instances): + """Test StandardBaseDict serialization and deserialization.""" + instance, constructor_args = valid_instances + data_dict = instance.model_dump() + assert isinstance(data_dict, dict) + assert data_dict["field_str"] == constructor_args["field_str"] + + # Check extra fields are in the serialized data + for key, value in constructor_args.items(): + if key != "field_str": + assert key in data_dict + assert data_dict[key] == value + + recreated = instance.__class__.model_validate(data_dict) + assert isinstance(recreated, instance.__class__) + assert recreated.field_str == constructor_args["field_str"] + + # Check extra fields are preserved after deserialization + for key, value in constructor_args.items(): + if key != "field_str": + assert hasattr(recreated, key) + assert getattr(recreated, key) == value + + +class TestStatusBreakdown: + """Test suite for StatusBreakdown.""" + + @pytest.fixture( + params=[ + {"successful": 100, "errored": 5, "incomplete": 10, "total": 115}, + { + "successful": "success_data", + "errored": "error_data", + "incomplete": "incomplete_data", + "total": "total_data", + }, + { + "successful": [1, 2, 3], + "errored": [4, 5], + "incomplete": [6], + "total": [1, 2, 3, 4, 5, 6], + }, + ], + ids=["int_values", "string_values", "list_values"], + ) + def valid_instances(self, request) -> tuple[StatusBreakdown, dict]: + """Fixture providing test data for StatusBreakdown.""" + constructor_args = request.param + instance = StatusBreakdown(**constructor_args) + return instance, constructor_args + + @pytest.mark.smoke + def test_class_signatures(self): + """Test StatusBreakdown inheritance and type relationships.""" + assert issubclass(StatusBreakdown, BaseModel) + # Check if Generic is in the MRO (method resolution order) + assert any(cls.__name__ == "Generic" for cls in StatusBreakdown.__mro__) + assert "successful" in StatusBreakdown.model_fields + assert "errored" in StatusBreakdown.model_fields + assert "incomplete" in StatusBreakdown.model_fields + assert "total" in StatusBreakdown.model_fields + + @pytest.mark.smoke + def test_initialization(self, valid_instances): + """Test StatusBreakdown initialization.""" + instance, constructor_args = valid_instances + assert isinstance(instance, StatusBreakdown) + assert instance.successful == constructor_args["successful"] + assert instance.errored == constructor_args["errored"] + assert instance.incomplete == constructor_args["incomplete"] + assert instance.total == constructor_args["total"] + + @pytest.mark.smoke + def test_initialization_defaults(self): + """Test StatusBreakdown initialization with default values.""" + instance: StatusBreakdown = StatusBreakdown() + assert instance.successful is None + assert instance.errored is None + assert instance.incomplete is None + assert instance.total is None + + @pytest.mark.sanity + def test_marshalling(self, valid_instances): + """Test StatusBreakdown serialization and deserialization.""" + instance, constructor_args = valid_instances + data_dict = instance.model_dump() + assert isinstance(data_dict, dict) + assert data_dict["successful"] == constructor_args["successful"] + assert data_dict["errored"] == constructor_args["errored"] + assert data_dict["incomplete"] == constructor_args["incomplete"] + assert data_dict["total"] == constructor_args["total"] + + recreated: StatusBreakdown = StatusBreakdown.model_validate(data_dict) + assert isinstance(recreated, StatusBreakdown) + assert recreated.successful == constructor_args["successful"] + assert recreated.errored == constructor_args["errored"] + assert recreated.incomplete == constructor_args["incomplete"] + assert recreated.total == constructor_args["total"] + + +class TestPydanticClassRegistryMixin: + """Test suite for PydanticClassRegistryMixin.""" + + @pytest.fixture( + params=[ + {"test_type": "test_sub", "value": "test_value"}, + {"test_type": "test_sub", "value": "hello_world"}, + ], + ids=["basic_value", "multi_word"], + ) + def valid_instances( + self, request + ) -> tuple[PydanticClassRegistryMixin, dict, type, type]: + """Fixture providing test data for PydanticClassRegistryMixin.""" + + class TestBaseModel(PydanticClassRegistryMixin): + schema_discriminator: ClassVar[str] = "test_type" + test_type: str + + @classmethod + def __pydantic_schema_base_type__(cls) -> type[TestBaseModel]: + if cls.__name__ == "TestBaseModel": + return cls + return TestBaseModel + + @TestBaseModel.register("test_sub") + class TestSubModel(TestBaseModel): + test_type: str = "test_sub" + value: str + + TestBaseModel.reload_schema() + + constructor_args = request.param + instance = TestSubModel(value=constructor_args["value"]) + return instance, constructor_args, TestBaseModel, TestSubModel + + @pytest.mark.smoke + def test_class_signatures(self): + """Test PydanticClassRegistryMixin inheritance and class variables.""" + assert issubclass(PydanticClassRegistryMixin, ReloadableBaseModel) + assert hasattr(PydanticClassRegistryMixin, "schema_discriminator") + assert PydanticClassRegistryMixin.schema_discriminator == "model_type" + assert hasattr(PydanticClassRegistryMixin, "register_decorator") + assert hasattr(PydanticClassRegistryMixin, "__get_pydantic_core_schema__") + assert hasattr(PydanticClassRegistryMixin, "__pydantic_generate_base_schema__") + assert hasattr(PydanticClassRegistryMixin, "auto_populate_registry") + assert hasattr(PydanticClassRegistryMixin, "registered_classes") + + @pytest.mark.smoke + def test_initialization(self, valid_instances): + """Test PydanticClassRegistryMixin initialization.""" + instance, constructor_args, base_class, sub_class = valid_instances + assert isinstance(instance, sub_class) + assert isinstance(instance, base_class) + assert instance.test_type == constructor_args["test_type"] + assert instance.value == constructor_args["value"] + + @pytest.mark.sanity + @pytest.mark.parametrize( + ("field", "value"), + [ + ("test_type", None), + ("test_type", 123), + ("value", None), + ], + ) + def test_invalid_initialization_values(self, field, value): + """Test PydanticClassRegistryMixin with invalid field values.""" + + class TestBaseModel(PydanticClassRegistryMixin): + schema_discriminator: ClassVar[str] = "test_type" + test_type: str + + @classmethod + def __pydantic_schema_base_type__(cls) -> type[TestBaseModel]: + if cls.__name__ == "TestBaseModel": + return cls + return TestBaseModel + + @TestBaseModel.register("test_sub") + class TestSubModel(TestBaseModel): + test_type: str = "test_sub" + value: str + + data = {field: value} + if field == "test_type": + data["value"] = "test" + else: + data["test_type"] = "test_sub" + + with pytest.raises(ValidationError): + TestSubModel(**data) + + @pytest.mark.sanity + def test_invalid_initialization_missing(self): + """Test PydanticClassRegistryMixin initialization without required field.""" + + class TestBaseModel(PydanticClassRegistryMixin): + schema_discriminator: ClassVar[str] = "test_type" + test_type: str + + @classmethod + def __pydantic_schema_base_type__(cls) -> type[TestBaseModel]: + if cls.__name__ == "TestBaseModel": + return cls + return TestBaseModel + + @TestBaseModel.register("test_sub") + class TestSubModel(TestBaseModel): + test_type: str = "test_sub" + value: str + + with pytest.raises(ValidationError): + TestSubModel() # type: ignore[call-arg] + + @pytest.mark.smoke + def test_register_decorator(self): + """Test PydanticClassRegistryMixin.register_decorator method.""" + + class TestBaseModel(PydanticClassRegistryMixin): + schema_discriminator: ClassVar[str] = "test_type" + test_type: str + + @classmethod + def __pydantic_schema_base_type__(cls) -> type[TestBaseModel]: + if cls.__name__ == "TestBaseModel": + return cls + return TestBaseModel + + @TestBaseModel.register() + class TestSubModel(TestBaseModel): + test_type: str = "TestSubModel" + value: str + + assert TestBaseModel.registry is not None # type: ignore[misc] + assert "TestSubModel" in TestBaseModel.registry # type: ignore[misc] + assert TestBaseModel.registry["TestSubModel"] is TestSubModel # type: ignore[misc] + + @pytest.mark.sanity + def test_register_decorator_with_name(self): + """Test PydanticClassRegistryMixin.register_decorator with custom name.""" + + class TestBaseModel(PydanticClassRegistryMixin): + schema_discriminator: ClassVar[str] = "test_type" + test_type: str + + @classmethod + def __pydantic_schema_base_type__(cls) -> type[TestBaseModel]: + if cls.__name__ == "TestBaseModel": + return cls + return TestBaseModel + + @TestBaseModel.register("custom_name") + class TestSubModel(TestBaseModel): + test_type: str = "custom_name" + value: str + + assert TestBaseModel.registry is not None # type: ignore[misc] + assert "custom_name" in TestBaseModel.registry # type: ignore[misc] + assert TestBaseModel.registry["custom_name"] is TestSubModel # type: ignore[misc] + + @pytest.mark.sanity + def test_register_decorator_invalid_type(self): + """Test PydanticClassRegistryMixin.register_decorator with invalid type.""" + + class TestBaseModel(PydanticClassRegistryMixin): + schema_discriminator: ClassVar[str] = "test_type" + test_type: str + + @classmethod + def __pydantic_schema_base_type__(cls) -> type[TestBaseModel]: + if cls.__name__ == "TestBaseModel": + return cls + return TestBaseModel + + class RegularClass: + pass + + with pytest.raises(TypeError) as exc_info: + TestBaseModel.register_decorator(RegularClass) # type: ignore[arg-type] + + assert "not a subclass of Pydantic BaseModel" in str(exc_info.value) + + @pytest.mark.smoke + def test_auto_populate_registry(self): + """Test PydanticClassRegistryMixin.auto_populate_registry method.""" + + class TestBaseModel(PydanticClassRegistryMixin): + schema_discriminator: ClassVar[str] = "test_type" + test_type: str + registry_auto_discovery: ClassVar[bool] = True + + @classmethod + def __pydantic_schema_base_type__(cls) -> type[TestBaseModel]: + if cls.__name__ == "TestBaseModel": + return cls + return TestBaseModel + + with ( + mock.patch.object(TestBaseModel, "reload_schema") as mock_reload, + mock.patch( + "guidellm.utils.registry.RegistryMixin.auto_populate_registry", + return_value=True, + ), + ): + result = TestBaseModel.auto_populate_registry() + assert result is True + mock_reload.assert_called_once() + + @pytest.mark.smoke + def test_registered_classes(self): + """Test PydanticClassRegistryMixin.registered_classes method.""" + + class TestBaseModel(PydanticClassRegistryMixin): + schema_discriminator: ClassVar[str] = "test_type" + test_type: str + registry_auto_discovery: ClassVar[bool] = False + + @classmethod + def __pydantic_schema_base_type__(cls) -> type[TestBaseModel]: + if cls.__name__ == "TestBaseModel": + return cls + return TestBaseModel + + @TestBaseModel.register("test_sub_a") + class TestSubModelA(TestBaseModel): + test_type: str = "test_sub_a" + value_a: str + + @TestBaseModel.register("test_sub_b") + class TestSubModelB(TestBaseModel): + test_type: str = "test_sub_b" + value_b: int + + # Test normal case with registered classes + registered = TestBaseModel.registered_classes() + assert isinstance(registered, tuple) + assert len(registered) == 2 + assert TestSubModelA in registered + assert TestSubModelB in registered + + @pytest.mark.sanity + def test_registered_classes_with_auto_discovery(self): + """Test PydanticClassRegistryMixin.registered_classes with auto discovery.""" + + class TestBaseModel(PydanticClassRegistryMixin): + schema_discriminator: ClassVar[str] = "test_type" + test_type: str + registry_auto_discovery: ClassVar[bool] = True + + @classmethod + def __pydantic_schema_base_type__(cls) -> type[TestBaseModel]: + if cls.__name__ == "TestBaseModel": + return cls + return TestBaseModel + + with mock.patch.object( + TestBaseModel, "auto_populate_registry" + ) as mock_auto_populate: + # Mock the registry to simulate registered classes + TestBaseModel.registry = {"test_class": type("TestClass", (), {})} + mock_auto_populate.return_value = False + + registered = TestBaseModel.registered_classes() + mock_auto_populate.assert_called_once() + assert isinstance(registered, tuple) + assert len(registered) == 1 + + @pytest.mark.sanity + def test_registered_classes_no_registry(self): + """Test PydanticClassRegistryMixin.registered_classes with no registry.""" + + class TestBaseModel(PydanticClassRegistryMixin): + schema_discriminator: ClassVar[str] = "test_type" + test_type: str + + @classmethod + def __pydantic_schema_base_type__(cls) -> type[TestBaseModel]: + if cls.__name__ == "TestBaseModel": + return cls + return TestBaseModel + + # Ensure registry is None + TestBaseModel.registry = None + + with pytest.raises(ValueError) as exc_info: + TestBaseModel.registered_classes() + + assert "must be called after registering classes" in str(exc_info.value) + + @pytest.mark.sanity + def test_marshalling(self, valid_instances): + """Test PydanticClassRegistryMixin serialization and deserialization.""" + instance, constructor_args, base_class, sub_class = valid_instances + + # Test serialization with model_dump + dump_data = instance.model_dump() + assert isinstance(dump_data, dict) + assert dump_data["test_type"] == constructor_args["test_type"] + assert dump_data["value"] == constructor_args["value"] + + # Test deserialization via subclass + recreated = sub_class.model_validate(dump_data) + assert isinstance(recreated, sub_class) + assert recreated.test_type == constructor_args["test_type"] + assert recreated.value == constructor_args["value"] + + # Test polymorphic deserialization via base class + recreated_base = base_class.model_validate(dump_data) # type: ignore[assignment] + assert isinstance(recreated_base, sub_class) + assert recreated_base.test_type == constructor_args["test_type"] + assert recreated_base.value == constructor_args["value"] + + @pytest.mark.regression + def test_polymorphic_container_marshalling(self): + """Test PydanticClassRegistryMixin in container models.""" + + class TestBaseModel(PydanticClassRegistryMixin): + schema_discriminator: ClassVar[str] = "test_type" + test_type: str + + @classmethod + def __pydantic_schema_base_type__(cls) -> type[TestBaseModel]: + if cls.__name__ == "TestBaseModel": + return cls + return TestBaseModel + + @classmethod + def __pydantic_generate_base_schema__(cls, handler): + return handler(cls) + + @TestBaseModel.register("sub_a") + class TestSubModelA(TestBaseModel): + test_type: str = "sub_a" + value_a: str + + @TestBaseModel.register("sub_b") + class TestSubModelB(TestBaseModel): + test_type: str = "sub_b" + value_b: int + + class ContainerModel(BaseModel): + name: str + model: TestBaseModel + models: list[TestBaseModel] + + sub_a = TestSubModelA(value_a="test") + sub_b = TestSubModelB(value_b=123) + + container = ContainerModel(name="container", model=sub_a, models=[sub_a, sub_b]) + + # Verify container construction + assert isinstance(container.model, TestSubModelA) + assert container.model.test_type == "sub_a" + assert container.model.value_a == "test" + assert len(container.models) == 2 + assert isinstance(container.models[0], TestSubModelA) + assert isinstance(container.models[1], TestSubModelB) + + # Test serialization + dump_data = container.model_dump() + assert isinstance(dump_data, dict) + assert dump_data["name"] == "container" + assert dump_data["model"]["test_type"] == "sub_a" + assert dump_data["model"]["value_a"] == "test" + assert len(dump_data["models"]) == 2 + assert dump_data["models"][0]["test_type"] == "sub_a" + assert dump_data["models"][1]["test_type"] == "sub_b" + + # Test deserialization + recreated = ContainerModel.model_validate(dump_data) + assert isinstance(recreated, ContainerModel) + assert recreated.name == "container" + assert isinstance(recreated.model, TestSubModelA) + assert len(recreated.models) == 2 + assert isinstance(recreated.models[0], TestSubModelA) + assert isinstance(recreated.models[1], TestSubModelB) + + @pytest.mark.smoke + def test_register_preserves_pydantic_metadata(self): # noqa: C901 + """Test that registered Pydantic classes retain docs, types, and methods.""" + + class TestBaseModel(PydanticClassRegistryMixin): + schema_discriminator: ClassVar[str] = "model_type" + model_type: str + + @classmethod + def __pydantic_schema_base_type__(cls) -> type[TestBaseModel]: + if cls.__name__ == "TestBaseModel": + return cls + + return TestBaseModel + + @TestBaseModel.register("documented_model") + class DocumentedModel(TestBaseModel): + """This is a documented Pydantic model with methods and type hints.""" + + model_type: str = "documented_model" + value: int = Field(description="An integer value for the model") + + def get_value(self) -> int: + """Get the stored value. + + :return: The stored integer value + """ + return self.value + + def set_value(self, new_value: int) -> None: + """Set a new value. + + :param new_value: The new integer value to set + """ + self.value = new_value + + @classmethod + def from_string(cls, value_str: str) -> DocumentedModel: + """Create instance from string. + + :param value_str: String representation of value + :return: New DocumentedModel instance + """ + return cls(value=int(value_str)) + + @staticmethod + def validate_value(value: int) -> bool: + """Validate that a value is positive. + + :param value: Value to validate + :return: True if positive, False otherwise + """ + return value > 0 + + def model_post_init(self, __context) -> None: + """Post-initialization processing. + + :param __context: Validation context + """ + if self.value < 0: + raise ValueError("Value must be non-negative") + + # Check that the class was registered + assert TestBaseModel.is_registered("documented_model") + registered_class = TestBaseModel.get_registered_object("documented_model") + assert registered_class is DocumentedModel + + # Check that the class retains its documentation + assert registered_class.__doc__ is not None + assert "documented Pydantic model with methods" in registered_class.__doc__ + + # Check that methods retain their documentation + assert registered_class.get_value.__doc__ is not None + assert "Get the stored value" in registered_class.get_value.__doc__ + assert registered_class.set_value.__doc__ is not None + assert "Set a new value" in registered_class.set_value.__doc__ + assert registered_class.from_string.__doc__ is not None + assert "Create instance from string" in registered_class.from_string.__doc__ + assert registered_class.validate_value.__doc__ is not None + assert ( + "Validate that a value is positive" + in registered_class.validate_value.__doc__ + ) + assert registered_class.model_post_init.__doc__ is not None + assert ( + "Post-initialization processing" in registered_class.model_post_init.__doc__ + ) + + # Check that methods are callable and work correctly + instance = DocumentedModel(value=42) + assert isinstance(instance, DocumentedModel) + assert instance.get_value() == 42 + instance.set_value(100) + assert instance.get_value() == 100 + assert instance.model_type == "documented_model" + + # Check class methods work + instance2 = DocumentedModel.from_string("123") + assert instance2.get_value() == 123 + assert instance2.model_type == "documented_model" + + # Check static methods work + assert DocumentedModel.validate_value(10) is True + assert DocumentedModel.validate_value(-5) is False + + # Check that Pydantic functionality is preserved + data_dict = instance.model_dump() + assert data_dict["value"] == 100 + assert data_dict["model_type"] == "documented_model" + + recreated = DocumentedModel.model_validate(data_dict) + assert isinstance(recreated, DocumentedModel) + assert recreated.value == 100 + assert recreated.model_type == "documented_model" + + # Test field validation + with pytest.raises(ValidationError): + DocumentedModel(value="not_an_int") + + # Test post_init validation + with pytest.raises(ValueError, match="Value must be non-negative"): + DocumentedModel(value=-10) + + # Check that Pydantic field metadata is preserved + value_field = DocumentedModel.model_fields["value"] + assert value_field.description == "An integer value for the model" + + # Check that type annotations are preserved (if accessible) + import inspect + + if hasattr(inspect, "get_annotations"): + # Python 3.10+ + try: + annotations = inspect.get_annotations(DocumentedModel.get_value) + return_ann = annotations.get("return") + assert return_ann is int or return_ann == "int" + except (AttributeError, NameError): + # Fallback for older Python or missing annotations + pass + + # Check that the class name is preserved + assert DocumentedModel.__name__ == "DocumentedModel" + assert DocumentedModel.__qualname__.endswith("DocumentedModel") + + # Verify that the class is still properly integrated with the registry system + all_registered = TestBaseModel.registered_classes() + assert DocumentedModel in all_registered + + # Test that the registered class is the same as the original + assert registered_class is DocumentedModel diff --git a/tests/unit/utils/test_registry.py b/tests/unit/utils/test_registry.py new file mode 100644 index 00000000..eed126d3 --- /dev/null +++ b/tests/unit/utils/test_registry.py @@ -0,0 +1,593 @@ +""" +Unit tests for the registry module. +""" + +from __future__ import annotations + +import inspect +from typing import TypeVar +from unittest import mock + +import pytest + +from guidellm.utils import RegistryMixin +from guidellm.utils.registry import RegisterT, RegistryObjT + + +def test_registry_obj_type(): + """Test that RegistryObjT is configured correctly as a TypeVar.""" + assert isinstance(RegistryObjT, type(TypeVar("test"))) + assert RegistryObjT.__name__ == "RegistryObjT" + assert RegistryObjT.__bound__ is None + assert RegistryObjT.__constraints__ == () + + +def test_registered_type(): + """Test that RegisterT is configured correctly as a TypeVar.""" + assert isinstance(RegisterT, type(TypeVar("test"))) + assert RegisterT.__name__ == "RegisterT" + assert RegisterT.__bound__ is None + assert RegisterT.__constraints__ == () + + +class TestRegistryMixin: + """Test suite for RegistryMixin class.""" + + @pytest.fixture( + params=[ + {"registry_auto_discovery": False, "auto_package": None}, + {"registry_auto_discovery": True, "auto_package": "test.package"}, + ], + ids=["manual_registry", "auto_discovery"], + ) + def valid_instances(self, request): + """Fixture providing test data for RegistryMixin subclasses.""" + config = request.param + + class TestRegistryClass(RegistryMixin): + registry_auto_discovery = config["registry_auto_discovery"] + if config["auto_package"]: + auto_package = config["auto_package"] + + return TestRegistryClass, config + + @pytest.mark.smoke + def test_class_signatures(self): + """Test RegistryMixin inheritance and exposed methods.""" + assert hasattr(RegistryMixin, "registry") + assert hasattr(RegistryMixin, "registry_auto_discovery") + assert hasattr(RegistryMixin, "registry_populated") + assert hasattr(RegistryMixin, "register") + assert hasattr(RegistryMixin, "register_decorator") + assert hasattr(RegistryMixin, "auto_populate_registry") + assert hasattr(RegistryMixin, "registered_objects") + assert hasattr(RegistryMixin, "is_registered") + assert hasattr(RegistryMixin, "get_registered_object") + + @pytest.mark.smoke + def test_initialization(self, valid_instances): + """Test RegistryMixin initialization.""" + registry_class, config = valid_instances + + assert registry_class.registry is None + assert ( + registry_class.registry_auto_discovery == config["registry_auto_discovery"] + ) + assert registry_class.registry_populated is False + + @pytest.mark.sanity + def test_invalid_initialization_missing(self): + """Test RegistryMixin with missing auto_package when auto_discovery enabled.""" + + class TestRegistryClass(RegistryMixin): + registry_auto_discovery = True + + with pytest.raises(ValueError, match="auto_package.*must be set"): + TestRegistryClass.auto_import_package_modules() + + @pytest.mark.smoke + @pytest.mark.parametrize( + ("name", "expected_key"), + [ + ("custom_name", "custom_name"), + (["name1", "name2"], ["name1", "name2"]), + (None, "TestClass"), + ], + ) + def test_register(self, valid_instances, name, expected_key): + """Test register method with various name configurations.""" + registry_class, _ = valid_instances + + @registry_class.register(name) + class TestClass: + pass + + assert registry_class.registry is not None + if isinstance(expected_key, list): + for key in expected_key: + assert key in registry_class.registry + assert registry_class.registry[key] is TestClass + else: + assert expected_key in registry_class.registry + assert registry_class.registry[expected_key] is TestClass + + @pytest.mark.sanity + @pytest.mark.parametrize( + "invalid_name", + [123, 42.5, True, {"key": "value"}], + ) + def test_register_invalid(self, valid_instances, invalid_name): + """Test register method with invalid name types.""" + registry_class, _ = valid_instances + + # The register method returns a decorator, so we need to apply it to test + # validation + decorator = registry_class.register(invalid_name) + + class TestClass: + pass + + with pytest.raises( + ValueError, match="name must be a string or an iterable of strings" + ): + decorator(TestClass) + + @pytest.mark.smoke + @pytest.mark.parametrize( + ("name", "expected_key"), + [ + ("custom_name", "custom_name"), + (["name1", "name2"], ["name1", "name2"]), + (None, "TestClass"), + ], + ) + def test_register_decorator(self, valid_instances, name, expected_key): + """Test register_decorator method with various name configurations.""" + registry_class, _ = valid_instances + + class TestClass: + pass + + registry_class.register_decorator(TestClass, name=name) + + assert registry_class.registry is not None + if isinstance(expected_key, list): + for key in expected_key: + assert key in registry_class.registry + assert registry_class.registry[key] is TestClass + else: + assert expected_key in registry_class.registry + assert registry_class.registry[expected_key] is TestClass + + @pytest.mark.sanity + @pytest.mark.parametrize( + "invalid_name", + [123, 42.5, True, {"key": "value"}], + ) + def test_register_decorator_invalid(self, valid_instances, invalid_name): + """Test register_decorator with invalid name types.""" + registry_class, _ = valid_instances + + class TestClass: + pass + + with pytest.raises( + ValueError, match="name must be a string or an iterable of strings" + ): + registry_class.register_decorator(TestClass, name=invalid_name) + + @pytest.mark.smoke + def test_auto_populate_registry(self): + """Test auto_populate_registry method with valid configuration.""" + + class TestAutoRegistry(RegistryMixin): + registry_auto_discovery = True + auto_package = "test.package" + + with mock.patch.object( + TestAutoRegistry, "auto_import_package_modules" + ) as mock_import: + result = TestAutoRegistry.auto_populate_registry() + assert result is True + mock_import.assert_called_once() + assert TestAutoRegistry.registry_populated is True + + # Second call should return False + result = TestAutoRegistry.auto_populate_registry() + assert result is False + mock_import.assert_called_once() + + @pytest.mark.sanity + def test_auto_populate_registry_invalid(self): + """Test auto_populate_registry when auto-discovery is disabled.""" + + class TestDisabledRegistry(RegistryMixin): + registry_auto_discovery = False + + with pytest.raises(ValueError, match="registry_auto_discovery is set to False"): + TestDisabledRegistry.auto_populate_registry() + + @pytest.mark.smoke + def test_registered_objects(self, valid_instances): + """Test registered_objects method with manual registration.""" + registry_class, config = valid_instances + + @registry_class.register("class1") + class TestClass1: + pass + + @registry_class.register("class2") + class TestClass2: + pass + + if config["registry_auto_discovery"]: + with mock.patch.object(registry_class, "auto_import_package_modules"): + objects = registry_class.registered_objects() + else: + objects = registry_class.registered_objects() + + assert isinstance(objects, tuple) + assert len(objects) == 2 + assert TestClass1 in objects + assert TestClass2 in objects + + @pytest.mark.sanity + def test_registered_objects_invalid(self): + """Test registered_objects when no objects are registered.""" + + class TestRegistryClass(RegistryMixin): + pass + + with pytest.raises( + ValueError, match="must be called after registering objects" + ): + TestRegistryClass.registered_objects() + + @pytest.mark.smoke + @pytest.mark.parametrize( + ("register_name", "check_name", "expected"), + [ + ("test_name", "test_name", True), + ("TestName", "testname", True), + ("UPPERCASE", "uppercase", True), + ("test_name", "nonexistent", False), + ], + ) + def test_is_registered(self, valid_instances, register_name, check_name, expected): + """Test is_registered with various name combinations.""" + registry_class, _ = valid_instances + + @registry_class.register(register_name) + class TestClass: + pass + + result = registry_class.is_registered(check_name) + assert result == expected + + @pytest.mark.smoke + @pytest.mark.parametrize( + ("register_name", "lookup_name"), + [ + ("test_name", "test_name"), + ("TestName", "testname"), + ("UPPERCASE", "uppercase"), + ], + ) + def test_get_registered_object(self, valid_instances, register_name, lookup_name): + """Test get_registered_object with valid names.""" + registry_class, _ = valid_instances + + @registry_class.register(register_name) + class TestClass: + pass + + result = registry_class.get_registered_object(lookup_name) + assert result is TestClass + + @pytest.mark.sanity + @pytest.mark.parametrize( + "lookup_name", + ["nonexistent", "wrong_name", "DIFFERENT_CASE"], + ) + def test_get_registered_object_invalid(self, valid_instances, lookup_name): + """Test get_registered_object with invalid names.""" + registry_class, _ = valid_instances + + @registry_class.register("valid_name") + class TestClass: + pass + + result = registry_class.get_registered_object(lookup_name) + assert result is None + + @pytest.mark.regression + def test_multiple_registries_isolation(self): + """Test that different registry classes maintain separate registries.""" + + class Registry1(RegistryMixin): + pass + + class Registry2(RegistryMixin): + pass + + @Registry1.register() + class TestClass1: + pass + + @Registry2.register() + class TestClass2: + pass + + assert Registry1.registry is not None + assert Registry2.registry is not None + assert Registry1.registry != Registry2.registry + assert "TestClass1" in Registry1.registry + assert "TestClass2" in Registry2.registry + assert "TestClass1" not in Registry2.registry + assert "TestClass2" not in Registry1.registry + + @pytest.mark.smoke + def test_auto_discovery_initialization(self): + """Test initialization of auto-discovery enabled registry.""" + + class TestAutoRegistry(RegistryMixin): + registry_auto_discovery = True + auto_package = "test_package.modules" + + assert TestAutoRegistry.registry is None + assert TestAutoRegistry.registry_populated is False + assert TestAutoRegistry.auto_package == "test_package.modules" + assert TestAutoRegistry.registry_auto_discovery is True + + @pytest.mark.smoke + def test_auto_discovery_registered_objects(self): + """Test automatic population during registered_objects call.""" + + class TestAutoRegistry(RegistryMixin): + registry_auto_discovery = True + auto_package = "test_package.modules" + + with mock.patch.object( + TestAutoRegistry, "auto_populate_registry" + ) as mock_populate: + TestAutoRegistry.registry = {"class1": "obj1", "class2": "obj2"} + objects = TestAutoRegistry.registered_objects() + mock_populate.assert_called_once() + assert objects == ("obj1", "obj2") + + @pytest.mark.sanity + def test_register_duplicate_registration(self, valid_instances): + """Test register method with duplicate names.""" + registry_class, _ = valid_instances + + @registry_class.register("duplicate_name") + class TestClass1: + pass + + with pytest.raises(ValueError, match="already registered"): + + @registry_class.register("duplicate_name") + class TestClass2: + pass + + @pytest.mark.sanity + def test_register_decorator_duplicate_registration(self, valid_instances): + """Test register_decorator with duplicate names.""" + registry_class, _ = valid_instances + + class TestClass1: + pass + + class TestClass2: + pass + + registry_class.register_decorator(TestClass1, name="duplicate_name") + with pytest.raises(ValueError, match="already registered"): + registry_class.register_decorator(TestClass2, name="duplicate_name") + + @pytest.mark.sanity + def test_register_decorator_invalid_list_element(self, valid_instances): + """Test register_decorator with invalid elements in name list.""" + registry_class, _ = valid_instances + + class TestClass: + pass + + with pytest.raises( + ValueError, match="name must be a string or a list of strings" + ): + registry_class.register_decorator(TestClass, name=["valid", 123]) + + @pytest.mark.sanity + def test_register_decorator_invalid_object(self, valid_instances): + """Test register_decorator with object lacking __name__ attribute.""" + registry_class, _ = valid_instances + + with pytest.raises(AttributeError): + registry_class.register_decorator("not_a_class") + + @pytest.mark.sanity + def test_register_decorator_empty_string_name(self, valid_instances): + """Test register_decorator with empty string name.""" + registry_class, _ = valid_instances + + class TestClass: + pass + + registry_class.register_decorator(TestClass, name="") + assert "" in registry_class.registry + assert registry_class.registry[""] is TestClass + + @pytest.mark.sanity + def test_register_decorator_none_in_list(self, valid_instances): + """Test register_decorator with None in name list.""" + registry_class, _ = valid_instances + + class TestClass: + pass + + with pytest.raises( + ValueError, match="name must be a string or a list of strings" + ): + registry_class.register_decorator(TestClass, name=["valid", None]) + + @pytest.mark.smoke + def test_is_registered_empty_registry(self, valid_instances): + """Test is_registered with empty registry.""" + registry_class, _ = valid_instances + + result = registry_class.is_registered("any_name") + assert result is False + + @pytest.mark.smoke + def test_get_registered_object_empty_registry(self, valid_instances): + """Test get_registered_object with empty registry.""" + registry_class, _ = valid_instances + + result = registry_class.get_registered_object("any_name") + assert result is None + + @pytest.mark.regression + def test_auto_registry_integration(self): + """Test complete auto-discovery workflow with mocked imports.""" + + class TestAutoRegistry(RegistryMixin): + registry_auto_discovery = True + auto_package = "test_package.modules" + + with ( + mock.patch("pkgutil.walk_packages") as mock_walk, + mock.patch("importlib.import_module") as mock_import, + ): + mock_package = mock.MagicMock() + mock_package.__path__ = ["test_package/modules"] + mock_package.__name__ = "test_package.modules" + + def import_module(name: str): + if name == "test_package.modules": + return mock_package + elif name == "test_package.modules.module1": + module = mock.MagicMock() + module.__name__ = "test_package.modules.module1" + + class Module1Class: + pass + + TestAutoRegistry.register_decorator(Module1Class, "Module1Class") + return module + else: + raise ImportError(f"No module named {name}") + + def walk_packages(package_path, package_name): + if package_name == "test_package.modules.": + return [(None, "test_package.modules.module1", False)] + else: + raise ValueError(f"Unknown package: {package_name}") + + mock_walk.side_effect = walk_packages + mock_import.side_effect = import_module + + objects = TestAutoRegistry.registered_objects() + assert len(objects) == 1 + assert TestAutoRegistry.registry_populated is True + assert TestAutoRegistry.registry is not None + assert "Module1Class" in TestAutoRegistry.registry + + @pytest.mark.smoke + def test_register_preserves_class_metadata(self): + """Test that registered classes retain docs, types, and methods.""" + + class TestRegistry(RegistryMixin): + pass + + @TestRegistry.register("documented_class") + class DocumentedClass: + """This is a documented class with methods and type hints.""" + + def __init__(self, value: int) -> None: + """Initialize with a value. + + :param value: An integer value + """ + self.value = value + + def get_value(self) -> int: + """Get the stored value. + + :return: The stored integer value + """ + return self.value + + def set_value(self, new_value: int) -> None: + """Set a new value. + + :param new_value: The new integer value to set + """ + self.value = new_value + + @classmethod + def from_string(cls, value_str: str) -> DocumentedClass: + """Create instance from string. + + :param value_str: String representation of value + :return: New DocumentedClass instance + """ + return cls(int(value_str)) + + @staticmethod + def validate_value(value: int) -> bool: + """Validate that a value is positive. + + :param value: Value to validate + :return: True if positive, False otherwise + """ + return value > 0 + + # Check that the class was registered + assert TestRegistry.is_registered("documented_class") + registered_class = TestRegistry.get_registered_object("documented_class") + assert registered_class is DocumentedClass + + # Check that the class retains its documentation + assert registered_class.__doc__ is not None + assert "documented class with methods" in registered_class.__doc__ + assert registered_class.__init__.__doc__ is not None + assert "Initialize with a value" in registered_class.__init__.__doc__ + assert registered_class.get_value.__doc__ is not None + assert "Get the stored value" in registered_class.get_value.__doc__ + assert registered_class.set_value.__doc__ is not None + assert "Set a new value" in registered_class.set_value.__doc__ + assert registered_class.from_string.__doc__ is not None + assert "Create instance from string" in registered_class.from_string.__doc__ + assert registered_class.validate_value.__doc__ is not None + assert ( + "Validate that a value is positive" + in registered_class.validate_value.__doc__ + ) + + # Check that methods are callable and work correctly + instance = registered_class(42) + assert instance.get_value() == 42 + instance.set_value(100) + assert instance.get_value() == 100 + instance2 = registered_class.from_string("123") + assert instance2.get_value() == 123 + assert registered_class.validate_value(10) is True + assert registered_class.validate_value(-5) is False + + # Check that type annotations are preserved (if accessible) + if hasattr(inspect, "get_annotations"): + # Python 3.10+ + try: + annotations = inspect.get_annotations(registered_class.__init__) + assert "value" in annotations + assert annotations["value"] is int + return_ann = annotations.get("return") + assert return_ann is None or return_ann is type(None) + except (AttributeError, NameError): + # Fallback for older Python or missing annotations + pass + + # Check that the class name is preserved + assert registered_class.__name__ == "DocumentedClass" + assert registered_class.__qualname__.endswith("DocumentedClass") diff --git a/tests/unit/utils/test_singleton.py b/tests/unit/utils/test_singleton.py new file mode 100644 index 00000000..ee01ead1 --- /dev/null +++ b/tests/unit/utils/test_singleton.py @@ -0,0 +1,371 @@ +from __future__ import annotations + +import threading +import time + +import pytest + +from guidellm.utils.singleton import SingletonMixin, ThreadSafeSingletonMixin + + +class TestSingletonMixin: + """Test suite for SingletonMixin class.""" + + @pytest.fixture( + params=[ + {"init_value": "test_value"}, + {"init_value": "another_value"}, + ], + ids=["basic_singleton", "different_value"], + ) + def valid_instances(self, request): + """Provide parameterized test configurations for singleton testing.""" + config = request.param + + class TestSingleton(SingletonMixin): + def __init__(self): + # Check if we need to initialize before calling super().__init__() + should_initialize = not getattr(self, "_singleton_initialized", False) + super().__init__() + if should_initialize: + self.value = config["init_value"] + + return TestSingleton, config + + @pytest.mark.smoke + def test_class_signatures(self): + """Test SingletonMixin inheritance and exposed attributes.""" + assert hasattr(SingletonMixin, "__new__") + assert hasattr(SingletonMixin, "__init__") + assert hasattr(SingletonMixin, "initialized") + assert isinstance(SingletonMixin.initialized, property) + + @pytest.mark.smoke + def test_initialization(self, valid_instances): + """Test SingletonMixin initialization.""" + singleton_class, config = valid_instances + + # Create first instance + instance1 = singleton_class() + + assert isinstance(instance1, singleton_class) + assert instance1.initialized is True + assert hasattr(instance1, "value") + assert instance1.value == config["init_value"] + + # Check that the class has the singleton instance stored + instance_attr = f"_singleton_instance_{singleton_class.__name__}" + assert hasattr(singleton_class, instance_attr) + assert getattr(singleton_class, instance_attr) is instance1 + + @pytest.mark.smoke + def test_singleton_behavior(self, valid_instances): + """Test that multiple instantiations return the same instance.""" + singleton_class, config = valid_instances + + # Create multiple instances + instance1 = singleton_class() + instance2 = singleton_class() + instance3 = singleton_class() + + # All should be the same instance + assert instance1 is instance2 + assert instance2 is instance3 + assert instance1 is instance3 + + # Value should remain from first initialization + assert hasattr(instance1, "value") + assert instance1.value == config["init_value"] + assert instance2.value == config["init_value"] + assert instance3.value == config["init_value"] + + @pytest.mark.sanity + def test_initialization_called_once(self, valid_instances): + """Test that __init__ is only called once despite multiple instantiations.""" + singleton_class, config = valid_instances + + class TestSingletonWithCounter(SingletonMixin): + init_count = 0 + + def __init__(self): + should_initialize = not getattr(self, "_singleton_initialized", False) + super().__init__() + if should_initialize: + TestSingletonWithCounter.init_count += 1 + self.value = config["init_value"] + + # Create multiple instances + instance1 = TestSingletonWithCounter() + instance2 = TestSingletonWithCounter() + + assert TestSingletonWithCounter.init_count == 1 + assert instance1 is instance2 + assert hasattr(instance1, "value") + assert instance1.value == config["init_value"] + + @pytest.mark.regression + def test_multiple_singleton_classes_isolation(self): + """Test that different singleton classes maintain separate instances.""" + + class Singleton1(SingletonMixin): + def __init__(self): + should_initialize = not getattr(self, "_singleton_initialized", False) + super().__init__() + if should_initialize: + self.value = "value1" + + class Singleton2(SingletonMixin): + def __init__(self): + should_initialize = not getattr(self, "_singleton_initialized", False) + super().__init__() + if should_initialize: + self.value = "value2" + + instance1a = Singleton1() + instance2a = Singleton2() + instance1b = Singleton1() + instance2b = Singleton2() + + # Each class has its own singleton instance + assert instance1a is instance1b + assert instance2a is instance2b + assert instance1a is not instance2a + + # Each maintains its own value + assert hasattr(instance1a, "value") + assert hasattr(instance2a, "value") + assert instance1a.value == "value1" + assert instance2a.value == "value2" + + @pytest.mark.regression + def test_inheritance_singleton_sharing(self): + """Test that inherited singleton classes share the same singleton_instance.""" + + class BaseSingleton(SingletonMixin): + def __init__(self): + should_initialize = not getattr(self, "_singleton_initialized", False) + super().__init__() + if should_initialize: + self.value = "base_value" + + class ChildSingleton(BaseSingleton): + def __init__(self): + should_initialize = not getattr(self, "_singleton_initialized", False) + super().__init__() + if should_initialize: + self.extra = "extra_value" + + # Child classes now have separate singleton instances + base_instance = BaseSingleton() + child_instance = ChildSingleton() + + # They should be different instances now (fixed inheritance behavior) + assert base_instance is not child_instance + assert hasattr(base_instance, "value") + assert base_instance.value == "base_value" + assert hasattr(child_instance, "value") + assert child_instance.value == "base_value" + assert hasattr(child_instance, "extra") + assert child_instance.extra == "extra_value" + + @pytest.mark.sanity + def test_without_super_init_call(self): + """Test singleton behavior when subclass doesn't call super().__init__().""" + + class BadSingleton(SingletonMixin): + def __init__(self): + # Not calling super().__init__() + self.value = "bad_value" + + instance1 = BadSingleton() + instance2 = BadSingleton() + + assert instance1 is instance2 + assert hasattr(instance1, "initialized") + assert instance1.initialized is False + + +class TestThreadSafeSingletonMixin: + """Test suite for ThreadSafeSingletonMixin class.""" + + @pytest.fixture( + params=[ + {"init_value": "thread_safe_value"}, + {"init_value": "concurrent_value"}, + ], + ids=["basic_thread_safe", "concurrent_test"], + ) + def valid_instances(self, request): + """Fixture providing test data for ThreadSafeSingletonMixin subclasses.""" + config = request.param + + class TestThreadSafeSingleton(ThreadSafeSingletonMixin): + def __init__(self): + should_initialize = not getattr(self, "_singleton_initialized", False) + super().__init__() + if should_initialize: + self.value = config["init_value"] + + return TestThreadSafeSingleton, config + + @pytest.mark.smoke + def test_class_signatures(self): + """Test ThreadSafeSingletonMixin inheritance and exposed attributes.""" + assert issubclass(ThreadSafeSingletonMixin, SingletonMixin) + assert hasattr(ThreadSafeSingletonMixin, "get_singleton_lock") + assert hasattr(ThreadSafeSingletonMixin, "__new__") + assert hasattr(ThreadSafeSingletonMixin, "__init__") + + @pytest.mark.smoke + def test_initialization(self, valid_instances): + """Test ThreadSafeSingletonMixin initialization.""" + singleton_class, config = valid_instances + + instance = singleton_class() + + assert isinstance(instance, singleton_class) + assert instance.initialized is True + assert hasattr(instance, "value") + assert instance.value == config["init_value"] + assert hasattr(instance, "thread_lock") + lock_type = type(threading.Lock()) + assert isinstance(instance.thread_lock, lock_type) + + @pytest.mark.smoke + def test_singleton_behavior(self, valid_instances): + """Test multiple instantiations return same instance with thread safety.""" + singleton_class, config = valid_instances + + instance1 = singleton_class() + instance2 = singleton_class() + + assert instance1 is instance2 + assert hasattr(instance1, "value") + assert instance1.value == config["init_value"] + assert hasattr(instance1, "thread_lock") + + @pytest.mark.regression + def test_thread_safety_concurrent_creation(self, valid_instances): + """Test thread safety during concurrent instance creation.""" + singleton_class, config = valid_instances + + instances = [] + exceptions = [] + creation_count = 0 + lock = threading.Lock() + + class ThreadSafeTestSingleton(ThreadSafeSingletonMixin): + def __init__(self): + should_initialize = not getattr(self, "_singleton_initialized", False) + super().__init__() + if should_initialize: + nonlocal creation_count + with lock: + creation_count += 1 + + time.sleep(0.01) + self.value = config["init_value"] + + def create_instance(): + try: + instance = ThreadSafeTestSingleton() + instances.append(instance) + except (TypeError, ValueError, AttributeError) as exc: + exceptions.append(exc) + + threads = [] + for _ in range(10): + thread = threading.Thread(target=create_instance) + threads.append(thread) + + for thread in threads: + thread.start() + + for thread in threads: + thread.join() + + assert len(exceptions) == 0, f"Exceptions occurred: {exceptions}" + + assert len(instances) == 10 + for instance in instances: + assert instance is instances[0] + + assert creation_count == 1 + assert all(instance.value == config["init_value"] for instance in instances) + + @pytest.mark.sanity + def test_thread_lock_creation(self, valid_instances): + """Test that thread_lock is created during initialization.""" + singleton_class, config = valid_instances + + instance1 = singleton_class() + instance2 = singleton_class() + + assert hasattr(instance1, "thread_lock") + lock_type = type(threading.Lock()) + assert isinstance(instance1.thread_lock, lock_type) + assert instance1.thread_lock is instance2.thread_lock + + @pytest.mark.regression + def test_multiple_thread_safe_classes_isolation(self): + """Test thread-safe singleton classes behavior with separate locks.""" + + class ThreadSafeSingleton1(ThreadSafeSingletonMixin): + def __init__(self): + should_initialize = not getattr(self, "_singleton_initialized", False) + super().__init__() + if should_initialize: + self.value = "value1" + + class ThreadSafeSingleton2(ThreadSafeSingletonMixin): + def __init__(self): + should_initialize = not getattr(self, "_singleton_initialized", False) + super().__init__() + if should_initialize: + self.value = "value2" + + instance1 = ThreadSafeSingleton1() + instance2 = ThreadSafeSingleton2() + + lock1 = ThreadSafeSingleton1.get_singleton_lock() + lock2 = ThreadSafeSingleton2.get_singleton_lock() + + assert lock1 is not None + assert lock2 is not None + assert lock1 is not lock2 + + assert instance1 is not instance2 + assert hasattr(instance1, "value") + assert hasattr(instance2, "value") + assert instance1.value == "value1" + assert instance2.value == "value2" + + @pytest.mark.sanity + def test_inheritance_with_thread_safety(self): + """Test inheritance behavior with thread-safe singletons.""" + + class BaseThreadSafeSingleton(ThreadSafeSingletonMixin): + def __init__(self): + should_initialize = not getattr(self, "_singleton_initialized", False) + super().__init__() + if should_initialize: + self.value = "base_value" + + class ChildThreadSafeSingleton(BaseThreadSafeSingleton): + def __init__(self): + super().__init__() + + base_instance = BaseThreadSafeSingleton() + child_instance = ChildThreadSafeSingleton() + + base_lock = BaseThreadSafeSingleton.get_singleton_lock() + child_lock = ChildThreadSafeSingleton.get_singleton_lock() + + assert base_lock is not None + assert child_lock is not None + assert base_lock is not child_lock + + assert base_instance is not child_instance + assert hasattr(base_instance, "value") + assert base_instance.value == "base_value" + assert hasattr(base_instance, "thread_lock") diff --git a/tests/unit/utils/test_synchronous.py b/tests/unit/utils/test_synchronous.py new file mode 100644 index 00000000..1a9ea2c9 --- /dev/null +++ b/tests/unit/utils/test_synchronous.py @@ -0,0 +1,238 @@ +from __future__ import annotations + +import asyncio +import multiprocessing +import threading +from functools import wraps +from multiprocessing.synchronize import Barrier as ProcessingBarrier +from multiprocessing.synchronize import Event as ProcessingEvent +from typing import Union + +import pytest + +from guidellm.utils.synchronous import ( + SyncObjectTypesAlias, + wait_for_sync_barrier, + wait_for_sync_event, + wait_for_sync_objects, +) + + +def async_timeout(delay: float): + """Decorator to add timeout to async functions.""" + + def decorator(func): + @wraps(func) + async def new_func(*args, **kwargs): + return await asyncio.wait_for(func(*args, **kwargs), timeout=delay) + + return new_func + + return decorator + + +def test_sync_object_types_alias(): + """Test that SyncObjectTypesAlias is defined correctly as a type alias.""" + assert hasattr(SyncObjectTypesAlias, "__origin__") + if hasattr(SyncObjectTypesAlias, "__args__"): + actual_type = SyncObjectTypesAlias.__args__[0] + assert hasattr(actual_type, "__origin__") + assert actual_type.__origin__ is Union + union_args = actual_type.__args__ + assert threading.Event in union_args + assert ProcessingEvent in union_args + assert threading.Barrier in union_args + assert ProcessingBarrier in union_args + + +class TestWaitForSyncEvent: + """Test suite for wait_for_sync_event function.""" + + @pytest.mark.smoke + @pytest.mark.asyncio + @pytest.mark.parametrize( + "event_type", + [threading.Event, multiprocessing.Event], + ids=["threading", "multiprocessing"], + ) + @async_timeout(2.0) + async def test_invocation(self, event_type): + """Test wait_for_sync_event with valid events that get set.""" + event: threading.Event | ProcessingEvent = event_type() + + async def set_event(): + await asyncio.sleep(0.01) + event.set() + + asyncio.create_task(set_event()) + await wait_for_sync_event(event, poll_interval=0.001) + assert event.is_set() + + @pytest.mark.sanity + @pytest.mark.asyncio + @pytest.mark.parametrize( + "event_type", + [threading.Event, multiprocessing.Event], + ids=["threading", "multiprocessing"], + ) + @async_timeout(2.0) + async def test_cancellation_stops_waiting(self, event_type): + """Test that cancelling the task stops waiting for the event.""" + event: threading.Event | ProcessingEvent = event_type() + + async def waiter(): + await wait_for_sync_event(event, poll_interval=0.001) + + task = asyncio.create_task(waiter()) + await asyncio.sleep(0.02) + task.cancel() + + with pytest.raises(asyncio.CancelledError): + await task + + +class TestWaitForSyncBarrier: + """Test suite for wait_for_sync_barrier function.""" + + @pytest.mark.smoke + @pytest.mark.asyncio + @pytest.mark.parametrize( + "barrier_type", + [threading.Barrier, multiprocessing.Barrier], + ids=["threading", "multiprocessing"], + ) + @async_timeout(5.0) + async def test_invocation(self, barrier_type): + """Test wait_for_sync_barrier with barrier that gets reached.""" + barrier: threading.Barrier | ProcessingBarrier = barrier_type(2) + + async def reach_barrier(): + await asyncio.sleep(0.01) + await asyncio.to_thread(barrier.wait) + + task = asyncio.create_task(reach_barrier()) + await wait_for_sync_barrier(barrier, poll_interval=0.01) + await task + + @pytest.mark.sanity + @pytest.mark.asyncio + @pytest.mark.parametrize( + "barrier_type", + [threading.Barrier, multiprocessing.Barrier], + ids=["threading", "multiprocessing"], + ) + @async_timeout(2.0) + async def test_cancellation_stops_waiting(self, barrier_type): + """Test that cancelling the task stops waiting for the barrier.""" + barrier: threading.Barrier | ProcessingBarrier = barrier_type(2) + + async def waiter(): + await wait_for_sync_barrier(barrier, 0.01) + + task = asyncio.create_task(waiter()) + await asyncio.sleep(0.1) + task.cancel() + + with pytest.raises(asyncio.CancelledError): + await task + + +class TestWaitForSyncObjects: + """Test suite for wait_for_sync_objects function.""" + + @pytest.mark.smoke + @pytest.mark.parametrize( + ("objects_types", "expected_result"), + [ + (threading.Event, 0), + (multiprocessing.Event, 0), + (threading.Barrier, 0), + (multiprocessing.Barrier, 0), + ([threading.Event, multiprocessing.Barrier], 1), + ([multiprocessing.Event, threading.Barrier], 0), + ( + [ + threading.Event, + multiprocessing.Event, + threading.Barrier, + multiprocessing.Barrier, + ], + 2, + ), + ( + { + "multiprocessing.Event": multiprocessing.Event, + "threading.Barrier": threading.Barrier, + }, + "threading.Barrier", + ), + ( + { + "threading.Event": threading.Event, + "multiprocessing.Barrier": multiprocessing.Barrier, + }, + "threading.Event", + ), + ( + { + "multiprocessing.Event": multiprocessing.Event, + "threading.Event": threading.Event, + "multiprocessing.Barrier": multiprocessing.Barrier, + "threading.Barrier": threading.Barrier, + }, + "threading.Event", + ), + ], + ids=[ + "threading_event", + "multiprocessing_event", + "threading_barrier", + "multiprocessing_barrier", + "mixed_list_event_barrier_1", + "mixed_list_event_barrier_2", + "mixed_list_all", + "mixed_dict_event_barrier_1", + "mixed_dict_event_barrier_2", + "mixed_dict_all", + ], + ) + @pytest.mark.asyncio + @async_timeout(2.0) + async def test_invocation(self, objects_types, expected_result): + """Test wait_for_sync_objects with various object configurations.""" + if isinstance(objects_types, list): + objects = [ + obj() + if obj not in (threading.Barrier, multiprocessing.Barrier) + else obj(2) + for obj in objects_types + ] + elif isinstance(objects_types, dict): + objects = { + key: ( + obj() + if obj not in (threading.Barrier, multiprocessing.Barrier) + else obj(2) + ) + for key, obj in objects_types.items() + } + else: + objects = [ + objects_types() + if objects_types not in (threading.Barrier, multiprocessing.Barrier) + else objects_types(2) + ] + + async def set_target(): + await asyncio.sleep(0.01) + obj = objects[expected_result] + if isinstance(obj, (threading.Event, ProcessingEvent)): + obj.set() + else: + await asyncio.to_thread(obj.wait) + + task = asyncio.create_task(set_target()) + result = await wait_for_sync_objects(objects, poll_interval=0.001) + await task + + assert result == expected_result diff --git a/tests/unit/utils/test_text.py b/tests/unit/utils/test_text.py new file mode 100644 index 00000000..50f18ce3 --- /dev/null +++ b/tests/unit/utils/test_text.py @@ -0,0 +1,531 @@ +from __future__ import annotations + +import gzip +import tempfile +from pathlib import Path +from unittest.mock import Mock, patch + +import httpx +import pytest + +from guidellm.utils.text import ( + MAX_PATH_LENGTH, + EndlessTextCreator, + clean_text, + filter_text, + format_value_display, + is_puncutation, + load_text, + split_text, + split_text_list_by_length, +) + + +def test_max_path_length(): + """Test that MAX_PATH_LENGTH is correctly defined.""" + assert isinstance(MAX_PATH_LENGTH, int) + assert MAX_PATH_LENGTH == 4096 + + +class TestFormatValueDisplay: + """Test suite for format_value_display.""" + + @pytest.mark.smoke + @pytest.mark.parametrize( + ( + "value", + "label", + "units", + "total_characters", + "digits_places", + "decimal_places", + "expected", + ), + [ + (42.0, "test", "", None, None, None, "42 [info]test[/info]"), + (42.5, "test", "ms", None, None, 1, "42.5ms [info]test[/info]"), + (42.123, "test", "", None, 5, 2, " 42.12 [info]test[/info]"), + ( + 42.0, + "test", + "ms", + 30, + None, + 0, + " 42ms [info]test[/info]", + ), + ], + ) + def test_invocation( + self, + value, + label, + units, + total_characters, + digits_places, + decimal_places, + expected, + ): + """Test format_value_display with various parameters.""" + result = format_value_display( + value=value, + label=label, + units=units, + total_characters=total_characters, + digits_places=digits_places, + decimal_places=decimal_places, + ) + assert label in result + assert units in result + value_check = ( + str(int(value)) + if decimal_places == 0 + else ( + f"{value:.{decimal_places}f}" + if decimal_places is not None + else str(value) + ) + ) + assert value_check in result or str(value) in result + + @pytest.mark.sanity + @pytest.mark.parametrize( + ("value", "label"), + [ + (None, "test"), + (42.0, None), + ("not_number", "test"), + ], + ) + def test_invocation_with_none_values(self, value, label): + """Test format_value_display with None/invalid inputs still works.""" + result = format_value_display(value, label) + assert isinstance(result, str) + if label is not None: + assert str(label) in result + if value is not None: + assert str(value) in result + + +class TestSplitTextListByLength: + """Test suite for split_text_list_by_length.""" + + @pytest.mark.smoke + @pytest.mark.parametrize( + ( + "text_list", + "max_characters", + "pad_horizontal", + "pad_vertical", + "expected_structure", + ), + [ + ( + ["hello world", "test"], + 5, + False, + False, + [["hello", "world"], ["test"]], + ), + ( + ["short", "longer text"], + [5, 10], + True, + True, + [[" short"], ["longer", "text"]], + ), + ( + ["a", "b", "c"], + 10, + True, + True, + [[" a"], [" b"], [" c"]], + ), + ], + ) + def test_invocation( + self, + text_list, + max_characters, + pad_horizontal, + pad_vertical, + expected_structure, + ): + """Test split_text_list_by_length with various parameters.""" + result = split_text_list_by_length( + text_list, max_characters, pad_horizontal, pad_vertical + ) + assert len(result) == len(text_list) + if pad_vertical: + max_lines = max(len(lines) for lines in result) + assert all(len(lines) == max_lines for lines in result) + + @pytest.mark.sanity + def test_invalid_max_characters_length(self): + """Test split_text_list_by_length with mismatched max_characters length.""" + error_msg = "max_characters must be a list of the same length" + with pytest.raises(ValueError, match=error_msg): + split_text_list_by_length(["a", "b"], [5, 10, 15]) + + @pytest.mark.sanity + @pytest.mark.parametrize( + ("text_list", "max_characters"), + [ + (None, 5), + (["test"], None), + (["test"], []), + ], + ) + def test_invalid_invocation(self, text_list, max_characters): + """Test split_text_list_by_length with invalid inputs.""" + with pytest.raises((TypeError, ValueError)): + split_text_list_by_length(text_list, max_characters) + + +class TestFilterText: + """Test suite for filter_text.""" + + @pytest.mark.smoke + @pytest.mark.parametrize( + ("text", "filter_start", "filter_end", "expected"), + [ + ("hello world test", "world", None, "world test"), + ("hello world test", None, "world", "hello "), + ("hello world test", "hello", "test", "hello world "), + ("hello world test", 6, 11, "world test"), + ("hello world test", 0, 5, "hello"), + ("hello world test", None, None, "hello world test"), + ], + ) + def test_invocation(self, text, filter_start, filter_end, expected): + """Test filter_text with various start and end markers.""" + result = filter_text(text, filter_start, filter_end) + assert result == expected + + @pytest.mark.sanity + @pytest.mark.parametrize( + ("text", "filter_start", "filter_end"), + [ + ("hello", "notfound", None), + ("hello", None, "notfound"), + ("hello", "invalid_type", None), + ("hello", None, "invalid_type"), + ], + ) + def test_invalid_invocation(self, text, filter_start, filter_end): + """Test filter_text with invalid markers.""" + with pytest.raises((ValueError, TypeError)): + filter_text(text, filter_start, filter_end) + + +class TestCleanText: + """Test suite for clean_text.""" + + @pytest.mark.smoke + @pytest.mark.parametrize( + ("text", "expected"), + [ + ("hello world", "hello world"), + (" hello\n\nworld ", "hello world"), + ("hello\tworld\r\ntest", "hello world test"), + ("", ""), + (" ", ""), + ], + ) + def test_invocation(self, text, expected): + """Test clean_text with various whitespace scenarios.""" + result = clean_text(text) + assert result == expected + + @pytest.mark.sanity + @pytest.mark.parametrize( + "text", + [ + None, + 123, + ], + ) + def test_invalid_invocation(self, text): + """Test clean_text with invalid inputs.""" + with pytest.raises((TypeError, AttributeError)): + clean_text(text) + + +class TestSplitText: + """Test suite for split_text.""" + + @pytest.mark.smoke + @pytest.mark.parametrize( + ("text", "split_punctuation", "expected"), + [ + ("hello world", False, ["hello", "world"]), + ("hello, world!", True, ["hello", ",", "world", "!"]), + ("test.example", False, ["test.example"]), + ("test.example", True, ["test", ".", "example"]), + ("", False, []), + ], + ) + def test_invocation(self, text, split_punctuation, expected): + """Test split_text with various punctuation options.""" + result = split_text(text, split_punctuation) + assert result == expected + + @pytest.mark.sanity + @pytest.mark.parametrize( + "text", + [ + None, + 123, + ], + ) + def test_invalid_invocation(self, text): + """Test split_text with invalid inputs.""" + with pytest.raises((TypeError, AttributeError)): + split_text(text) + + +class TestLoadText: + """Test suite for load_text.""" + + @pytest.mark.smoke + def test_empty_data(self): + """Test load_text with empty data.""" + result = load_text("") + assert result == "" + + @pytest.mark.smoke + def test_raw_text(self): + """Test load_text with raw text that's not a file.""" + long_text = "a" * (MAX_PATH_LENGTH + 1) + result = load_text(long_text) + assert result == long_text + + @pytest.mark.smoke + def test_local_file(self): + """Test load_text with local file.""" + with tempfile.NamedTemporaryFile(mode="w", delete=False, suffix=".txt") as tmp: + test_content = "test file content" + tmp.write(test_content) + tmp.flush() + + result = load_text(tmp.name) + assert result == test_content + + Path(tmp.name).unlink() + + @pytest.mark.smoke + def test_gzipped_file(self): + """Test load_text with gzipped file.""" + with tempfile.NamedTemporaryFile(delete=False, suffix=".gz") as tmp: + test_content = "test gzipped content" + with gzip.open(tmp.name, "wt") as gzf: + gzf.write(test_content) + + result = load_text(tmp.name) + assert result == test_content + + Path(tmp.name).unlink() + + @pytest.mark.smoke + @patch("httpx.Client") + def test_url_loading(self, mock_client): + """Test load_text with HTTP URL.""" + mock_response = Mock() + mock_response.text = "url content" + mock_client.return_value.__enter__.return_value.get.return_value = mock_response + + result = load_text("http://example.com/test.txt") + assert result == "url content" + + @pytest.mark.smoke + @patch("guidellm.utils.text.files") + @patch("guidellm.utils.text.as_file") + def test_package_data_loading(self, mock_as_file, mock_files): + """Test load_text with package data.""" + mock_resource = Mock() + mock_files.return_value.joinpath.return_value = mock_resource + + mock_file = Mock() + mock_file.read.return_value = "package data content" + mock_as_file.return_value.__enter__.return_value = mock_file + + with patch("gzip.open") as mock_gzip: + mock_gzip.return_value.__enter__.return_value = mock_file + result = load_text("data:test.txt") + assert result == "package data content" + + @pytest.mark.sanity + def test_nonexistent_file(self): + """Test load_text with nonexistent file returns the path as raw text.""" + result = load_text("/nonexistent/path/file.txt") + assert result == "/nonexistent/path/file.txt" + + @pytest.mark.sanity + @patch("httpx.Client") + def test_url_error(self, mock_client): + """Test load_text with HTTP error.""" + mock_client.return_value.__enter__.return_value.get.side_effect = ( + httpx.HTTPStatusError("HTTP error", request=None, response=None) + ) + + with pytest.raises(httpx.HTTPStatusError): + load_text("http://example.com/error.txt") + + +class TestIsPuncutation: + """Test suite for is_puncutation.""" + + @pytest.mark.smoke + @pytest.mark.parametrize( + ("text", "expected"), + [ + (".", True), + (",", True), + ("!", True), + ("?", True), + (";", True), + ("a", False), + ("1", False), + (" ", False), + ("ab", False), + ("", False), + ], + ) + def test_invocation(self, text, expected): + """Test is_puncutation with various characters.""" + result = is_puncutation(text) + assert result == expected + + @pytest.mark.sanity + @pytest.mark.parametrize( + "text", + [ + None, + 123, + ], + ) + def test_invalid_invocation(self, text): + """Test is_puncutation with invalid inputs.""" + with pytest.raises((TypeError, AttributeError)): + is_puncutation(text) + + +class TestEndlessTextCreator: + """Test suite for EndlessTextCreator.""" + + @pytest.fixture( + params=[ + { + "data": "hello world test", + "filter_start": None, + "filter_end": None, + }, + { + "data": "hello world test", + "filter_start": "world", + "filter_end": None, + }, + {"data": "one two three four", "filter_start": 0, "filter_end": 9}, + ], + ids=["no_filter", "string_filter", "index_filter"], + ) + def valid_instances(self, request): + """Fixture providing test data for EndlessTextCreator.""" + constructor_args = request.param + instance = EndlessTextCreator(**constructor_args) + return instance, constructor_args + + @pytest.mark.smoke + def test_class_signatures(self): + """Test EndlessTextCreator signatures and methods.""" + assert hasattr(EndlessTextCreator, "__init__") + assert hasattr(EndlessTextCreator, "create_text") + instance = EndlessTextCreator("test") + assert hasattr(instance, "data") + assert hasattr(instance, "text") + assert hasattr(instance, "filtered_text") + assert hasattr(instance, "words") + + @pytest.mark.smoke + def test_initialization(self, valid_instances): + """Test EndlessTextCreator initialization.""" + instance, constructor_args = valid_instances + assert isinstance(instance, EndlessTextCreator) + assert instance.data == constructor_args["data"] + assert isinstance(instance.text, str) + assert isinstance(instance.filtered_text, str) + assert isinstance(instance.words, list) + + @pytest.mark.sanity + @pytest.mark.parametrize( + ("data", "filter_start", "filter_end"), + [ + ("test", "notfound", None), + ], + ) + def test_invalid_initialization_values(self, data, filter_start, filter_end): + """Test EndlessTextCreator with invalid initialization values.""" + with pytest.raises((TypeError, ValueError)): + EndlessTextCreator(data, filter_start, filter_end) + + @pytest.mark.smoke + def test_initialization_with_none(self): + """Test EndlessTextCreator handles None data gracefully.""" + instance = EndlessTextCreator(None) + assert isinstance(instance, EndlessTextCreator) + assert instance.data is None + + @pytest.mark.smoke + @pytest.mark.parametrize( + ("start", "length", "expected_length"), + [ + (0, 5, 5), + (2, 3, 3), + (0, 0, 0), + ], + ) + def test_create_text(self, valid_instances, start, length, expected_length): + """Test EndlessTextCreator.create_text.""" + instance, constructor_args = valid_instances + result = instance.create_text(start, length) + assert isinstance(result, str) + if length > 0 and instance.words: + assert len(result) > 0 + + @pytest.mark.smoke + def test_create_text_cycling(self): + """Test EndlessTextCreator.create_text cycling behavior.""" + instance = EndlessTextCreator("one two three") + result1 = instance.create_text(0, 3) + result2 = instance.create_text(3, 3) + assert isinstance(result1, str) + assert isinstance(result2, str) + + @pytest.mark.sanity + @pytest.mark.parametrize( + ("start", "length"), + [ + ("invalid", 5), + (0, "invalid"), + ], + ) + def test_create_text_invalid(self, valid_instances, start, length): + """Test EndlessTextCreator.create_text with invalid inputs.""" + instance, constructor_args = valid_instances + with pytest.raises((TypeError, ValueError)): + instance.create_text(start, length) + + @pytest.mark.smoke + @pytest.mark.parametrize( + ("start", "length", "min_length"), + [ + (-1, 5, 0), + (0, -1, 0), + ], + ) + def test_create_text_edge_cases(self, valid_instances, start, length, min_length): + """Test EndlessTextCreator.create_text with edge cases.""" + instance, constructor_args = valid_instances + result = instance.create_text(start, length) + assert isinstance(result, str) + assert len(result) >= min_length diff --git a/tests/unit/utils/text.py b/tests/unit/utils/text.py deleted file mode 100644 index ae0fa52f..00000000 --- a/tests/unit/utils/text.py +++ /dev/null @@ -1,13 +0,0 @@ -import pytest - -from guidellm.utils.text import camelize_str - - -@pytest.mark.smoke -def test_camelize_str_camelizes_string(): - assert camelize_str("no_longer_snake_case") == "noLongerSnakeCase" - - -@pytest.mark.smoke -def test_camelize_str_leaves_non_snake_case_text_untouched(): - assert camelize_str("notsnakecase") == "notsnakecase" From d15cf17f07ee438646bdd2736245fef540918ede Mon Sep 17 00:00:00 2001 From: Mark Kurtz Date: Fri, 19 Sep 2025 11:54:01 +0000 Subject: [PATCH 06/90] Remove old pydantic file that is now replaced Signed-off-by: Mark Kurtz --- src/guidellm/objects/__init__.py | 1 - src/guidellm/objects/pydantic.py | 89 -------------------------------- 2 files changed, 90 deletions(-) delete mode 100644 src/guidellm/objects/pydantic.py diff --git a/src/guidellm/objects/__init__.py b/src/guidellm/objects/__init__.py index 89e3c9b9..f97f1ef3 100644 --- a/src/guidellm/objects/__init__.py +++ b/src/guidellm/objects/__init__.py @@ -1,4 +1,3 @@ -from .pydantic import StandardBaseModel, StatusBreakdown from .statistics import ( DistributionSummary, Percentiles, diff --git a/src/guidellm/objects/pydantic.py b/src/guidellm/objects/pydantic.py deleted file mode 100644 index fcededcf..00000000 --- a/src/guidellm/objects/pydantic.py +++ /dev/null @@ -1,89 +0,0 @@ -import json -from pathlib import Path -from typing import Any, Generic, Optional, TypeVar - -import yaml -from loguru import logger -from pydantic import BaseModel, ConfigDict, Field - -__all__ = ["StandardBaseModel", "StatusBreakdown"] - -T = TypeVar("T", bound="StandardBaseModel") - - -class StandardBaseModel(BaseModel): - """ - A base class for Pydantic models throughout GuideLLM enabling standard - configuration and logging. - """ - - model_config = ConfigDict( - extra="ignore", - use_enum_values=True, - validate_assignment=True, - from_attributes=True, - ) - - def __init__(self, /, **data: Any) -> None: - super().__init__(**data) - logger.debug( - "Initialized new instance of {} with data: {}", - self.__class__.__name__, - data, - ) - - @classmethod - def get_default(cls: type[T], field: str) -> Any: - """Get default values for model fields""" - return cls.model_fields[field].default - - @classmethod - def from_file(cls: type[T], filename: Path, overrides: Optional[dict] = None) -> T: - """ - Attempt to create a new instance of the model using - data loaded from json or yaml file. - """ - try: - with filename.open() as f: - if str(filename).endswith(".json"): - data = json.load(f) - else: # Assume everything else is yaml - data = yaml.safe_load(f) - except (json.JSONDecodeError, yaml.YAMLError) as e: - logger.error(f"Failed to parse {filename} as type {cls.__name__}") - raise ValueError(f"Error when parsing file: {filename}") from e - - data.update(overrides) - return cls.model_validate(data) - - -SuccessfulT = TypeVar("SuccessfulT") -ErroredT = TypeVar("ErroredT") -IncompleteT = TypeVar("IncompleteT") -TotalT = TypeVar("TotalT") - - -class StatusBreakdown(BaseModel, Generic[SuccessfulT, ErroredT, IncompleteT, TotalT]): - """ - A base class for Pydantic models that are separated by statuses including - successful, incomplete, and errored. It additionally enables the inclusion - of total, which is intended as the combination of all statuses. - Total may or may not be used depending on if it duplicates information. - """ - - successful: SuccessfulT = Field( - description="The results with a successful status.", - default=None, # type: ignore[assignment] - ) - errored: ErroredT = Field( - description="The results with an errored status.", - default=None, # type: ignore[assignment] - ) - incomplete: IncompleteT = Field( - description="The results with an incomplete status.", - default=None, # type: ignore[assignment] - ) - total: TotalT = Field( - description="The combination of all statuses.", - default=None, # type: ignore[assignment] - ) From 5b83c2d371713bd821d6f8cda4ec2bb76a8b400c Mon Sep 17 00:00:00 2001 From: Mark Kurtz Date: Fri, 19 Sep 2025 12:05:28 +0000 Subject: [PATCH 07/90] fixes from copilot review Signed-off-by: Mark Kurtz --- src/guidellm/benchmark/progress.py | 2 +- src/guidellm/utils/__init__.py | 4 ++-- src/guidellm/utils/synchronous.py | 2 +- src/guidellm/utils/text.py | 8 ++++---- tests/unit/utils/test_text.py | 14 +++++++------- 5 files changed, 15 insertions(+), 15 deletions(-) diff --git a/src/guidellm/benchmark/progress.py b/src/guidellm/benchmark/progress.py index d6f437e1..1232107b 100644 --- a/src/guidellm/benchmark/progress.py +++ b/src/guidellm/benchmark/progress.py @@ -253,7 +253,7 @@ def format_progress_display( decimal_places: Optional[int] = None, ) -> str: if decimal_places is None and digits_places is None: - formatted_number = f"{value}:.0f" + formatted_number = f"{value:.0f}" elif digits_places is None: formatted_number = f"{value:.{decimal_places}f}" elif decimal_places is None: diff --git a/src/guidellm/utils/__init__.py b/src/guidellm/utils/__init__.py index 83a276b2..20daeea4 100644 --- a/src/guidellm/utils/__init__.py +++ b/src/guidellm/utils/__init__.py @@ -58,7 +58,7 @@ clean_text, filter_text, format_value_display, - is_puncutation, + is_punctuation, load_text, split_text, split_text_list_by_length, @@ -109,7 +109,7 @@ "filter_text", "format_value_display", "get_literal_vals", - "is_puncutation", + "is_punctuation", "load_text", "safe_add", "safe_divide", diff --git a/src/guidellm/utils/synchronous.py b/src/guidellm/utils/synchronous.py index 3bec0247..14f3d908 100644 --- a/src/guidellm/utils/synchronous.py +++ b/src/guidellm/utils/synchronous.py @@ -11,7 +11,7 @@ import asyncio import contextlib -from datetime import time +import time from multiprocessing.synchronize import Barrier as ProcessingBarrier from multiprocessing.synchronize import Event as ProcessingEvent from threading import Barrier as ThreadingBarrier diff --git a/src/guidellm/utils/text.py b/src/guidellm/utils/text.py index 519b46c3..8385ec7b 100644 --- a/src/guidellm/utils/text.py +++ b/src/guidellm/utils/text.py @@ -31,7 +31,7 @@ "clean_text", "filter_text", "format_value_display", - "is_puncutation", + "is_punctuation", "load_text", "split_text", "split_text_list_by_length", @@ -64,7 +64,7 @@ def format_value_display( :return: Formatted string with value, units, and colored label """ if decimal_places is None and digits_places is None: - formatted_number = f"{value}:.0f" + formatted_number = f"{value:.0f}" elif digits_places is None: formatted_number = f"{value:.{decimal_places}f}" elif decimal_places is None: @@ -268,7 +268,7 @@ def load_text(data: str | Path, encoding: str | None = None) -> str: return data.read_text(encoding=encoding) -def is_puncutation(text: str) -> bool: +def is_punctuation(text: str) -> bool: """ Check if a single character is a punctuation mark. @@ -332,7 +332,7 @@ def create_text(self, start: int, length: int) -> str: index = (start + counter) % len(self.words) add_word = self.words[index] - if counter != 0 and not is_puncutation(add_word): + if counter != 0 and not is_punctuation(add_word): text += " " text += add_word diff --git a/tests/unit/utils/test_text.py b/tests/unit/utils/test_text.py index 50f18ce3..3774ca1f 100644 --- a/tests/unit/utils/test_text.py +++ b/tests/unit/utils/test_text.py @@ -14,7 +14,7 @@ clean_text, filter_text, format_value_display, - is_puncutation, + is_punctuation, load_text, split_text, split_text_list_by_length, @@ -372,8 +372,8 @@ def test_url_error(self, mock_client): load_text("http://example.com/error.txt") -class TestIsPuncutation: - """Test suite for is_puncutation.""" +class TestIsPunctuation: + """Test suite for is_punctuation.""" @pytest.mark.smoke @pytest.mark.parametrize( @@ -392,8 +392,8 @@ class TestIsPuncutation: ], ) def test_invocation(self, text, expected): - """Test is_puncutation with various characters.""" - result = is_puncutation(text) + """Test is_punctuation with various characters.""" + result = is_punctuation(text) assert result == expected @pytest.mark.sanity @@ -405,9 +405,9 @@ def test_invocation(self, text, expected): ], ) def test_invalid_invocation(self, text): - """Test is_puncutation with invalid inputs.""" + """Test is_punctuation with invalid inputs.""" with pytest.raises((TypeError, AttributeError)): - is_puncutation(text) + is_punctuation(text) class TestEndlessTextCreator: From c84299b99d3639101ef77fe5ea24cdc95b3afe88 Mon Sep 17 00:00:00 2001 From: Mark Kurtz Date: Fri, 19 Sep 2025 04:05:16 +0000 Subject: [PATCH 08/90] add refactored scheduler package and tests Signed-off-by: Mark Kurtz --- src/guidellm/scheduler/__init__.py | 93 +- src/guidellm/scheduler/constraints.py | 1035 +++++++++++++++ src/guidellm/scheduler/environment.py | 273 ++++ src/guidellm/scheduler/objects.py | 468 +++++++ src/guidellm/scheduler/queues.py | 25 - src/guidellm/scheduler/result.py | 155 --- src/guidellm/scheduler/scheduler.py | 509 +++----- src/guidellm/scheduler/strategy.py | 853 ++++++++----- src/guidellm/scheduler/worker.py | 769 +++++------ src/guidellm/scheduler/worker_group.py | 681 ++++++++++ tests/unit/scheduler/__init__.py | 0 tests/unit/scheduler/test_constraints.py | 1412 +++++++++++++++++++++ tests/unit/scheduler/test_environment.py | 329 +++++ tests/unit/scheduler/test_objects.py | 1286 +++++++++++++++++++ tests/unit/scheduler/test_scheduler.py | 253 ++++ tests/unit/scheduler/test_strategy.py | 1154 +++++++++++++++++ tests/unit/scheduler/test_worker.py | 672 ++++++++++ tests/unit/scheduler/test_worker_group.py | 473 +++++++ 18 files changed, 9118 insertions(+), 1322 deletions(-) create mode 100644 src/guidellm/scheduler/constraints.py create mode 100644 src/guidellm/scheduler/environment.py create mode 100644 src/guidellm/scheduler/objects.py delete mode 100644 src/guidellm/scheduler/queues.py delete mode 100644 src/guidellm/scheduler/result.py create mode 100644 src/guidellm/scheduler/worker_group.py create mode 100644 tests/unit/scheduler/__init__.py create mode 100644 tests/unit/scheduler/test_constraints.py create mode 100644 tests/unit/scheduler/test_environment.py create mode 100644 tests/unit/scheduler/test_objects.py create mode 100644 tests/unit/scheduler/test_scheduler.py create mode 100644 tests/unit/scheduler/test_strategy.py create mode 100644 tests/unit/scheduler/test_worker.py create mode 100644 tests/unit/scheduler/test_worker_group.py diff --git a/src/guidellm/scheduler/__init__.py b/src/guidellm/scheduler/__init__.py index d3aa0aab..24d73df2 100644 --- a/src/guidellm/scheduler/__init__.py +++ b/src/guidellm/scheduler/__init__.py @@ -1,47 +1,90 @@ -from .result import ( - SchedulerRequestInfo, - SchedulerRequestResult, - SchedulerResult, - SchedulerRunInfo, +from .constraints import ( + Constraint, + ConstraintInitializer, + ConstraintsInitializerFactory, + MaxDurationConstraint, + MaxErrorRateConstraint, + MaxErrorsConstraint, + MaxGlobalErrorRateConstraint, + MaxNumberConstraint, + PydanticConstraintInitializer, + SerializableConstraintInitializer, + UnserializableConstraintInitializer, +) +from .environment import Environment, NonDistributedEnvironment +from .objects import ( + BackendInterface, + BackendT, + MeasuredRequestTimings, + MultiTurnRequestT, + RequestSchedulerTimings, + RequestT, + ResponseT, + ScheduledRequestInfo, + SchedulerMessagingPydanticRegistry, + SchedulerState, + SchedulerUpdateAction, + SchedulerUpdateActionProgress, ) from .scheduler import Scheduler from .strategy import ( AsyncConstantStrategy, AsyncPoissonStrategy, ConcurrentStrategy, + ConstantRateRequestTimings, + LastCompletionRequestTimings, + NoDelayRequestTimings, + PoissonRateRequestTimings, + ScheduledRequestTimings, SchedulingStrategy, + StrategyT, StrategyType, SynchronousStrategy, ThroughputStrategy, - strategy_display_str, -) -from .worker import ( - GenerativeRequestsWorker, - GenerativeRequestsWorkerDescription, - RequestsWorker, - ResolveStatus, - WorkerDescription, - WorkerProcessResult, ) +from .worker import WorkerProcess +from .worker_group import WorkerProcessGroup __all__ = [ "AsyncConstantStrategy", "AsyncPoissonStrategy", + "BackendInterface", + "BackendT", "ConcurrentStrategy", - "GenerativeRequestsWorker", - "GenerativeRequestsWorkerDescription", - "RequestsWorker", - "ResolveStatus", + "ConstantRateRequestTimings", + "Constraint", + "ConstraintInitializer", + "ConstraintsInitializerFactory", + "Environment", + "LastCompletionRequestTimings", + "MaxDurationConstraint", + "MaxErrorRateConstraint", + "MaxErrorsConstraint", + "MaxGlobalErrorRateConstraint", + "MaxNumberConstraint", + "MeasuredRequestTimings", + "MultiTurnRequestT", + "NoDelayRequestTimings", + "NonDistributedEnvironment", + "PoissonRateRequestTimings", + "PydanticConstraintInitializer", + "RequestSchedulerTimings", + "RequestT", + "ResponseT", + "ScheduledRequestInfo", + "ScheduledRequestTimings", "Scheduler", - "SchedulerRequestInfo", - "SchedulerRequestResult", - "SchedulerResult", - "SchedulerRunInfo", + "SchedulerMessagingPydanticRegistry", + "SchedulerState", + "SchedulerUpdateAction", + "SchedulerUpdateActionProgress", "SchedulingStrategy", + "SerializableConstraintInitializer", + "StrategyT", "StrategyType", "SynchronousStrategy", "ThroughputStrategy", - "WorkerDescription", - "WorkerProcessResult", - "strategy_display_str", + "UnserializableConstraintInitializer", + "WorkerProcess", + "WorkerProcessGroup", ] diff --git a/src/guidellm/scheduler/constraints.py b/src/guidellm/scheduler/constraints.py new file mode 100644 index 00000000..c724a74a --- /dev/null +++ b/src/guidellm/scheduler/constraints.py @@ -0,0 +1,1035 @@ +""" +Constraint system for scheduler behavior control and request processing limits. + +Provides flexible constraints for managing scheduler behavior with configurable +thresholds based on time, error rates, and request counts. Constraints evaluate +scheduler state and individual requests to determine whether processing should +continue or stop based on predefined limits. The constraint system enables +sophisticated benchmark stopping criteria through composable constraint types. +""" + +from __future__ import annotations + +import time +from abc import ABC, abstractmethod +from typing import Any, Literal, Protocol, runtime_checkable + +from pydantic import Field, field_validator + +from guidellm.scheduler.objects import ( + ScheduledRequestInfo, + SchedulerState, + SchedulerUpdateAction, + SchedulerUpdateActionProgress, +) +from guidellm.settings import settings +from guidellm.utils import InfoMixin, RegistryMixin, StandardBaseModel + +__all__ = [ + "Constraint", + "ConstraintInitializer", + "ConstraintsInitializerFactory", + "MaxDurationConstraint", + "MaxErrorRateConstraint", + "MaxErrorsConstraint", + "MaxGlobalErrorRateConstraint", + "MaxNumberConstraint", + "PydanticConstraintInitializer", + "RequestsExhaustedConstraint", + "SerializableConstraintInitializer", + "UnserializableConstraintInitializer", +] + + +@runtime_checkable +class Constraint(Protocol): + """Protocol for constraint evaluation functions that control scheduler behavior.""" + + def __call__( + self, state: SchedulerState, request: ScheduledRequestInfo + ) -> SchedulerUpdateAction: + """ + Evaluate constraint against scheduler state and request information. + + :param state: Current scheduler state with metrics and timing information + :param request: Individual request information and metadata + :return: Action indicating whether to continue or stop scheduler operations + """ + + +@runtime_checkable +class ConstraintInitializer(Protocol): + """Protocol for constraint initializer factory functions that create constraints.""" + + def create_constraint(self, **kwargs) -> Constraint: + """ + Create a constraint instance from configuration parameters. + + :param kwargs: Configuration parameters for constraint creation + :return: Configured constraint evaluation function + """ + + +@runtime_checkable +class SerializableConstraintInitializer(Protocol): + """Protocol for serializable constraint initializers supporting persistence.""" + + @classmethod + def validated_kwargs(cls, *args, **kwargs) -> dict[str, Any]: + """ + Validate and process arguments for constraint creation. + + :param args: Positional arguments for constraint configuration + :param kwargs: Keyword arguments for constraint configuration + :return: Validated parameter dictionary for constraint creation + """ + + @classmethod + def model_validate(cls, **kwargs) -> ConstraintInitializer: + """ + Create validated constraint initializer from configuration. + + :param kwargs: Configuration dictionary for initializer creation + :return: Validated constraint initializer instance + """ + + def model_dump(self) -> dict[str, Any]: + """ + Serialize constraint initializer to dictionary format. + + :return: Dictionary representation of constraint initializer + """ + + def create_constraint(self, **kwargs) -> Constraint: + """ + Create constraint instance from this initializer. + + :param kwargs: Additional configuration parameters + :return: Configured constraint evaluation function + """ + + +class ConstraintsInitializerFactory(RegistryMixin[ConstraintInitializer]): + """ + Registry factory for creating and managing constraint initializers. + + Provides centralized access to registered constraint types with support for + creating constraints from configuration dictionaries, simple values, or + pre-configured instances. Handles constraint resolution and type validation + for the scheduler constraint system. + + Example: + :: + from guidellm.scheduler import ConstraintsInitializerFactory + + # Register new constraint type + @ConstraintsInitializerFactory.register("new_constraint") + class NewConstraint: + def create_constraint(self, **kwargs) -> Constraint: + return lambda state, request: SchedulerUpdateAction() + + # Create and use constraint + constraint = ConstraintsInitializerFactory.create_constraint("new_constraint") + """ + + @classmethod + def create(cls, key: str, *args, **kwargs) -> ConstraintInitializer: + """ + Create a constraint initializer for the specified key. + + :param key: Registered constraint initializer key + :param args: Positional arguments for initializer creation + :param kwargs: Keyword arguments for initializer creation + :return: Configured constraint initializer instance + :raises ValueError: If the key is not registered in the factory + """ + if cls.registry is None or key not in cls.registry: + raise ValueError(f"Unknown constraint initializer key: {key}") + + initializer_class = cls.registry[key] + + return ( + initializer_class(*args, **kwargs) # type: ignore[operator] + if not isinstance(initializer_class, type) + or not issubclass(initializer_class, SerializableConstraintInitializer) + else initializer_class( + **initializer_class.validated_kwargs(*args, **kwargs) # type: ignore[misc] + ) + ) + + @classmethod + def serialize(cls, initializer: ConstraintInitializer) -> dict[str, Any]: + """ + Serialize constraint initializer to dictionary format. + + :param initializer: Constraint initializer to serialize + :return: Dictionary representation or unserializable placeholder + """ + if isinstance(initializer, SerializableConstraintInitializer): + return initializer.model_dump() + else: + unserializable = UnserializableConstraintInitializer( + orig_info=InfoMixin.extract_from_obj(initializer) + ) + return unserializable.model_dump() + + @classmethod + def deserialize( + cls, initializer_dict: dict[str, Any] + ) -> SerializableConstraintInitializer: + """ + Deserialize constraint initializer from dictionary format. + + :param initializer_dict: Dictionary representation of constraint initializer + :return: Reconstructed constraint initializer instance + :raises ValueError: If constraint type is unknown or cannot be deserialized + """ + if initializer_dict.get("type_") == "unserializable": + return UnserializableConstraintInitializer.model_validate(initializer_dict) + + if ( + cls.registry is not None + and initializer_dict.get("type_") + and initializer_dict["type_"] in cls.registry + ): + initializer_class = cls.registry[initializer_dict["type_"]] + if hasattr(initializer_class, "model_validate"): + return initializer_class.model_validate(initializer_dict) # type: ignore[return-value] + else: + return initializer_class(**initializer_dict) # type: ignore[return-value,operator] + + raise ValueError( + f"Cannot deserialize unknown constraint initializer: " + f"{initializer_dict.get('type_', 'unknown')}" + ) + + @classmethod + def create_constraint(cls, key: str, *args, **kwargs) -> Constraint: + """ + Create a constraint instance for the specified key. + + :param key: Registered constraint initializer key + :param args: Positional arguments for constraint creation + :param kwargs: Keyword arguments for constraint creation + :return: Configured constraint function ready for evaluation + :raises ValueError: If the key is not registered in the factory + """ + return cls.create(key, *args, **kwargs).create_constraint() + + @classmethod + def resolve( + cls, + initializers: dict[ + str, + Any | dict[str, Any] | Constraint | ConstraintInitializer, + ], + ) -> dict[str, Constraint]: + """ + Resolve mixed constraint specifications to callable constraints. + + :param initializers: Dictionary mapping constraint keys to specifications + :return: Dictionary mapping constraint keys to callable functions + :raises ValueError: If any key is not registered in the factory + """ + constraints = {} + + for key, val in initializers.items(): + if isinstance(val, Constraint): + constraints[key] = val + elif isinstance(val, ConstraintInitializer): + constraints[key] = val.create_constraint() + elif isinstance(val, dict): + constraints[key] = cls.create_constraint(key, **val) + else: + constraints[key] = cls.create_constraint(key, val) + + return constraints + + @classmethod + def resolve_constraints( + cls, + constraints: dict[str, Any | dict[str, Any] | Constraint], + ) -> dict[str, Constraint]: + """ + Resolve constraints from mixed constraint specifications. + + :param constraints: Dictionary mapping constraint keys to specifications + :return: Dictionary mapping constraint keys to callable functions + :raises ValueError: If any constraint key is not registered + """ + resolved_constraints = {} + + for key, val in constraints.items(): + if isinstance(val, Constraint): + resolved_constraints[key] = val + elif isinstance(val, dict): + resolved_constraints[key] = cls.create_constraint(key, **val) + else: + resolved_constraints[key] = cls.create_constraint(key, val) + + return resolved_constraints + + +class PydanticConstraintInitializer(StandardBaseModel, ABC, InfoMixin): + """ + Abstract base for Pydantic-based constraint initializers. + + Provides standardized serialization, validation, and metadata handling for + constraint initializers using Pydantic models. Subclasses implement specific + constraint creation logic while inheriting validation and persistence support. + """ + + type_: str = Field(description="Type identifier for the constraint initializer") + + @property + def info(self) -> dict[str, Any]: + """ + Extract serializable information from this constraint initializer. + + :return: Dictionary containing constraint configuration and metadata + """ + return self.model_dump() + + @classmethod + @abstractmethod + def validated_kwargs(cls, *args, **kwargs) -> dict[str, Any]: + """ + Validate and process arguments for constraint creation. + + Must be implemented by subclasses to handle their specific parameter patterns + and validation requirements. + + :param args: Positional arguments passed to the constraint + :param kwargs: Keyword arguments passed to the constraint + :return: Validated dictionary of parameters for constraint creation + :raises NotImplementedError: Must be implemented by subclasses + """ + ... + + @abstractmethod + def create_constraint(self, **kwargs) -> Constraint: + """ + Create a constraint instance. + + Must be implemented by subclasses to return their specific constraint type + with appropriate configuration and validation. + + :param kwargs: Additional keyword arguments (usually unused) + :return: Configured constraint instance + :raises NotImplementedError: Must be implemented by subclasses + """ + ... + + +class UnserializableConstraintInitializer(PydanticConstraintInitializer): + """ + Placeholder for constraints that cannot be serialized or executed. + + Represents constraint initializers that failed serialization or contain + non-serializable components. Cannot be executed and raises errors when + invoked to prevent runtime failures from invalid constraint state. + """ + + type_: Literal["unserializable"] = "unserializable" # type: ignore[assignment] + orig_info: dict[str, Any] = Field( + default_factory=dict, + description="Original constraint information before serialization failure", + ) + + @classmethod + def validated_kwargs( + cls, + orig_info: dict[str, Any] | None = None, + **kwargs, # noqa: ARG003 + ) -> dict[str, Any]: + """ + Validate arguments for unserializable constraint creation. + + :param orig_info: Original constraint information before serialization failure + :param kwargs: Additional arguments (ignored) + :return: Validated parameters for unserializable constraint creation + """ + return {"orig_info": orig_info or {}} + + def create_constraint( + self, + **kwargs, # noqa: ARG002 + ) -> Constraint: + """ + Raise error for unserializable constraint creation attempt. + + :param kwargs: Additional keyword arguments (unused) + :raises RuntimeError: Always raised since unserializable constraints + cannot be executed + """ + raise RuntimeError( + "Cannot create constraint from unserializable constraint instance. " + "This constraint cannot be serialized and therefore cannot be executed." + ) + + def __call__( + self, + state: SchedulerState, # noqa: ARG002 + request: ScheduledRequestInfo, # noqa: ARG002 + ) -> SchedulerUpdateAction: + """ + Raise error since unserializable constraints cannot be invoked. + + :param state: Current scheduler state (unused) + :param request: Individual request information (unused) + :raises RuntimeError: Always raised for unserializable constraints + """ + raise RuntimeError( + "Cannot invoke unserializable constraint instance. " + "This constraint was not properly serialized and cannot be executed." + ) + + +@ConstraintsInitializerFactory.register( # type: ignore[arg-type] + ["max_number", "max_num", "max_requests", "max_req"] +) +class MaxNumberConstraint(PydanticConstraintInitializer): + """ + Constraint that limits execution based on maximum request counts. + + Stops request queuing when created requests reach the limit and stops local + request processing when processed requests reach the limit. Provides progress + tracking based on remaining requests and completion fraction. + """ + + type_: Literal["max_number"] = "max_number" # type: ignore[assignment] + max_num: int | float | list[int | float] = Field( + description="Maximum number of requests allowed before triggering constraint", + ) + current_index: int = Field( + default=-1, description="Current index for list-based max_num values" + ) + + @classmethod + def validated_kwargs( + cls, max_num: int | float | list[int | float], **kwargs + ) -> dict[str, Any]: + """ + Validate and process arguments for MaxNumberConstraint creation. + + :param max_num: Maximum number of requests to allow + :param kwargs: Supports max_num, max_number, max_requests, max_req, + and optional type_ + :return: Validated dictionary with max_num and type_ fields + """ + aliases = ["max_number", "max_num", "max_requests", "max_req"] + for alias in aliases: + if max_num is None: + max_num = kwargs.get(alias) + + return {"max_num": max_num, "current_index": kwargs.get("current_index", -1)} + + def create_constraint(self, **kwargs) -> Constraint: # noqa: ARG002 + """ + Return self as the constraint instance. + + :param kwargs: Additional keyword arguments (unused) + :return: Self instance as the constraint + """ + self.current_index += 1 + + return self.model_copy() # type: ignore[return-value] + + def __call__( + self, + state: SchedulerState, + request_info: ScheduledRequestInfo, # noqa: ARG002 + ) -> SchedulerUpdateAction: + """ + Evaluate constraint against current scheduler state and request count. + + :param state: Current scheduler state with request counts + :param request_info: Individual request information (unused) + :return: Action indicating whether to continue or stop operations + """ + current_index = max(0, self.current_index) + max_num = ( + self.max_num + if isinstance(self.max_num, (int, float)) + else self.max_num[min(current_index, len(self.max_num) - 1)] + ) + + create_exceeded = state.created_requests >= max_num + processed_exceeded = state.processed_requests >= max_num + remaining_requests = min(max(0, max_num - state.processed_requests), max_num) + remaining_fraction = remaining_requests / float(max_num) + + return SchedulerUpdateAction( + request_queuing="stop" if create_exceeded else "continue", + request_processing="stop_local" if processed_exceeded else "continue", + metadata={ + "max_number": max_num, + "create_exceeded": create_exceeded, + "processed_exceeded": processed_exceeded, + "created_requests": state.created_requests, + "processed_requests": state.processed_requests, + "remaining_fraction": remaining_fraction, + "remaining_requests": remaining_requests, + }, + progress=SchedulerUpdateActionProgress( + remaining_fraction=remaining_fraction, + remaining_requests=remaining_requests, + ), + ) + + @field_validator("max_num") + @classmethod + def _validate_max_num( + cls, value: int | float | list[int | float] + ) -> int | float | list[int | float]: + if not isinstance(value, list): + value = [value] + for val in value: + if not val: + raise ValueError( + f"max_num must be set and truthful, received {value} ({val} failed)" + ) + if not isinstance(val, (int, float)) or val <= 0: + raise ValueError( + f"max_num must be a positive num, received {value} ({val} failed)" + ) + + return value[0] if isinstance(value, list) and len(value) == 1 else value + + +@ConstraintsInitializerFactory.register( # type: ignore[arg-type] + ["max_duration", "max_dur", "max_sec", "max_seconds", "max_min", "max_minutes"] +) +class MaxDurationConstraint(PydanticConstraintInitializer): + """ + Constraint that limits execution based on maximum time duration. + + Stops both request queuing and processing when the elapsed time since scheduler + start exceeds the maximum duration. Provides progress tracking based on + remaining time and completion fraction. + """ + + type_: Literal["max_duration"] = "max_duration" # type: ignore[assignment] + max_duration: int | float | list[int | float] = Field( + description="Maximum duration in seconds before triggering constraint" + ) + current_index: int = Field(default=-1, description="Current index in duration list") + + @classmethod + def validated_kwargs( + cls, max_duration: int | float | list[int | float] | None = None, **kwargs + ) -> dict[str, Any]: + """ + Validate and process arguments for MaxDurationConstraint creation. + + :param max_duration: Maximum duration in seconds + :param kwargs: Supports max_duration, max_dur, max_sec, max_seconds, + max_min, max_minutes, and optional type_ + :return: Validated dictionary with max_duration and type_ fields + """ + seconds_aliases = ["max_dur", "max_sec", "max_seconds"] + for alias in seconds_aliases: + if max_duration is None: + max_duration = kwargs.get(alias) + minutes_aliases = ["max_min", "max_minutes"] + for alias in minutes_aliases: + minutes = kwargs.get(alias) + if minutes is not None and max_duration is None: + max_duration = minutes * 60 + + return { + "max_duration": max_duration, + "current_index": kwargs.get("current_index", -1), + } + + def create_constraint(self, **kwargs) -> Constraint: # noqa: ARG002 + """ + Return self as the constraint instance. + + :param kwargs: Additional keyword arguments (unused) + :return: Self instance as the constraint + """ + self.current_index += 1 + + return self.model_copy() # type: ignore[return-value] + + def __call__( + self, + state: SchedulerState, + request_info: ScheduledRequestInfo, # noqa: ARG002 + ) -> SchedulerUpdateAction: + """ + Evaluate constraint against current scheduler state and elapsed time. + + :param state: Current scheduler state with start time + :param request_info: Individual request information (unused) + :return: Action indicating whether to continue or stop operations + """ + current_index = max(0, self.current_index) + max_duration = ( + self.max_duration + if isinstance(self.max_duration, (int, float)) + else self.max_duration[min(current_index, len(self.max_duration) - 1)] + ) + + current_time = time.time() + elapsed = current_time - state.start_time + duration_exceeded = elapsed >= max_duration + remaining_duration = min(max(0.0, max_duration - elapsed), max_duration) + remaining_fraction = remaining_duration / float(max_duration) + + return SchedulerUpdateAction( + request_queuing="stop" if duration_exceeded else "continue", + request_processing="stop_local" if duration_exceeded else "continue", + metadata={ + "max_duration": max_duration, + "elapsed_time": elapsed, + "duration_exceeded": duration_exceeded, + "start_time": state.start_time, + "current_time": current_time, + }, + progress=SchedulerUpdateActionProgress( + remaining_fraction=remaining_fraction, + remaining_duration=remaining_duration, + ), + ) + + @field_validator("max_duration") + @classmethod + def _validate_max_duration( + cls, value: int | float | list[int | float] + ) -> int | float | list[int | float]: + if not isinstance(value, list): + value = [value] + for val in value: + if not val: + raise ValueError( + "max_duration must be set and truthful, " + f"received {value} ({val} failed)" + ) + if not isinstance(val, (int, float)) or val <= 0: + raise ValueError( + "max_duration must be a positive num," + f"received {value} ({val} failed)" + ) + + return value[0] if isinstance(value, list) and len(value) == 1 else value + + +@ConstraintsInitializerFactory.register( # type: ignore[arg-type] + ["max_errors", "max_err", "max_error", "max_errs"] +) +class MaxErrorsConstraint(PydanticConstraintInitializer): + """ + Constraint that limits execution based on absolute error count. + + Stops both request queuing and all request processing when the total number + of errored requests reaches the maximum threshold. Uses global error tracking + across all requests for immediate constraint evaluation. + """ + + type_: Literal["max_errors"] = "max_errors" # type: ignore[assignment] + max_errors: int | float | list[int | float] = Field( + description="Maximum number of errors allowed before triggering constraint", + ) + current_index: int = Field(default=-1, description="Current index in error list") + + @classmethod + def validated_kwargs( + cls, max_errors: int | float | list[int | float] | None = None, **kwargs + ) -> dict[str, Any]: + """ + Validate and process arguments for MaxErrorsConstraint creation. + + :param max_errors: Maximum number of errors to allow + :param kwargs: Supports max_errors, max_err, max_error, max_errs, + and optional type_ + :return: Validated dictionary with max_errors and type_ fields + """ + aliases = ["max_errors", "max_err", "max_error", "max_errs"] + for alias in aliases: + if max_errors is None: + max_errors = kwargs.get(alias) + + return { + "max_errors": max_errors, + "current_index": kwargs.get("current_index", -1), + } + + def create_constraint(self, **kwargs) -> Constraint: # noqa: ARG002 + """ + Return self as the constraint instance. + + :param kwargs: Additional keyword arguments (unused) + :return: Self instance as the constraint + """ + self.current_index += 1 + + return self.model_copy() # type: ignore[return-value] + + def __call__( + self, + state: SchedulerState, + request_info: ScheduledRequestInfo, # noqa: ARG002 + ) -> SchedulerUpdateAction: + """ + Evaluate constraint against current error count. + + :param state: Current scheduler state with error counts + :param request_info: Individual request information (unused) + :return: Action indicating whether to continue or stop operations + """ + current_index = max(0, self.current_index) + max_errors = ( + self.max_errors + if isinstance(self.max_errors, (int, float)) + else self.max_errors[min(current_index, len(self.max_errors) - 1)] + ) + errors_exceeded = state.errored_requests >= max_errors + + return SchedulerUpdateAction( + request_queuing="stop" if errors_exceeded else "continue", + request_processing="stop_all" if errors_exceeded else "continue", + metadata={ + "max_errors": max_errors, + "errors_exceeded": errors_exceeded, + "current_errors": state.errored_requests, + }, + ) + + @field_validator("max_errors") + @classmethod + def _validate_max_errors( + cls, value: int | float | list[int | float] + ) -> int | float | list[int | float]: + if not isinstance(value, list): + value = [value] + for val in value: + if not val: + raise ValueError( + "max_errors must be set and truthful, " + f"received {value} ({val} failed)" + ) + if not isinstance(val, (int, float)) or val <= 0: + raise ValueError( + f"max_errors must be a positive num,received {value} ({val} failed)" + ) + + return value[0] if isinstance(value, list) and len(value) == 1 else value + + +@ConstraintsInitializerFactory.register( # type: ignore[arg-type] + ["max_error_rate", "max_err_rate", "max_errors_rate"] +) +class MaxErrorRateConstraint(PydanticConstraintInitializer): + """ + Constraint that limits execution based on sliding window error rate. + + Tracks error status of recent requests in a sliding window and stops all + processing when the error rate exceeds the threshold. Only applies the + constraint after processing enough requests to fill the minimum window size + for statistical significance. + """ + + type_: Literal["max_error_rate"] = "max_error_rate" # type: ignore[assignment] + max_error_rate: int | float | list[int | float] = Field( + description="Maximum error rate allowed (0.0, 1.0)" + ) + window_size: int | float = Field( + default=30, + gt=0, + description="Size of sliding window for calculating error rate", + ) + error_window: list[bool] = Field( + default_factory=list, + description="Sliding window tracking error status of recent requests", + ) + current_index: int = Field( + default=-1, description="Current index in the error window" + ) + + @classmethod + def validated_kwargs( + cls, max_error_rate: int | float | list[int | float], **kwargs + ) -> dict[str, Any]: + """ + Validate and process arguments for MaxErrorRateConstraint creation. + + :param max_error_rate: Maximum error rate to allow + :param kwargs: Supports max_error_rate, max_err_rate, max_errors_rate, + optional window_size, and optional type_ + :return: Validated dictionary with max_error_rate, window_size, + and type_ fields + """ + aliases = ["max_error_rate", "max_err_rate", "max_errors_rate"] + for alias in aliases: + if max_error_rate is None: + max_error_rate = kwargs.get(alias) + + return { + "max_error_rate": max_error_rate, + "window_size": kwargs.get( + "window_size", settings.constraint_error_window_size + ), + "error_window": kwargs.get("error_window", []), + "current_index": kwargs.get("current_index", -1), + } + + def create_constraint(self, **kwargs) -> Constraint: # noqa: ARG002 + """ + Create a new instance of MaxErrorRateConstraint (due to stateful window). + + :param kwargs: Additional keyword arguments (unused) + :return: New instance of the constraint + """ + self.current_index += 1 + + return self.model_copy() # type: ignore[return-value] + + def __call__( + self, state: SchedulerState, request_info: ScheduledRequestInfo + ) -> SchedulerUpdateAction: + """ + Evaluate constraint against sliding window error rate. + + :param state: Current scheduler state with request counts + :param request_info: Individual request with completion status + :return: Action indicating whether to continue or stop operations + """ + current_index = max(0, self.current_index) + max_error_rate = ( + self.max_error_rate + if isinstance(self.max_error_rate, (int, float)) + else self.max_error_rate[min(current_index, len(self.max_error_rate) - 1)] + ) + + if request_info.status in ["completed", "errored", "cancelled"]: + self.error_window.append(request_info.status == "errored") + if len(self.error_window) > self.window_size: + self.error_window.pop(0) + + error_count = sum(self.error_window) + window_requests = len(self.error_window) + error_rate = ( + error_count / float(window_requests) if window_requests > 0 else 0.0 + ) + exceeded_min_processed = state.processed_requests >= self.window_size + exceeded_error_rate = error_rate >= max_error_rate + + return SchedulerUpdateAction( + request_queuing=( + "stop" if exceeded_min_processed and exceeded_error_rate else "continue" + ), + request_processing=( + "stop_all" + if exceeded_min_processed and exceeded_error_rate + else "continue" + ), + metadata={ + "max_error_rate": max_error_rate, + "window_size": self.window_size, + "error_count": error_count, + "processed_count": state.processed_requests, + "current_window_size": len(self.error_window), + "current_error_rate": error_rate, + "exceeded_min_processed": exceeded_min_processed, + "exceeded_error_rate": exceeded_error_rate, + }, + ) + + @field_validator("max_error_rate") + @classmethod + def _validate_max_error_rate( + cls, value: int | float | list[int | float] + ) -> int | float | list[int | float]: + if not isinstance(value, list): + value = [value] + for val in value: + if not val: + raise ValueError( + "max_error_rate must be set and truthful, " + f"received {value} ({val} failed)" + ) + if not isinstance(val, (int, float)) or val <= 0 or val >= 1: + raise ValueError( + "max_error_rate must be a number between 0 and 1," + f"received {value} ({val} failed)" + ) + + return value[0] if isinstance(value, list) and len(value) == 1 else value + + +@ConstraintsInitializerFactory.register( # type: ignore[arg-type] + ["max_global_error_rate", "max_global_err_rate", "max_global_errors_rate"] +) +class MaxGlobalErrorRateConstraint(PydanticConstraintInitializer): + """ + Constraint that limits execution based on global error rate. + + Calculates error rate across all processed requests and stops all processing + when the rate exceeds the threshold. Only applies the constraint after + processing the minimum number of requests to ensure statistical significance + for global error rate calculations. + """ + + type_: Literal["max_global_error_rate"] = "max_global_error_rate" # type: ignore[assignment] + max_error_rate: int | float = Field( + description="Maximum error rate allowed (0.0 to 1.0)" + ) + min_processed: int | float | None = Field( + default=30, + gt=0, + description="Minimum requests processed before applying error rate constraint", + ) + current_index: int = Field( + default=-1, description="Current index for list-based max_error_rate values" + ) + + @classmethod + def validated_kwargs( + cls, max_error_rate: int | float | list[int | float], **kwargs + ) -> dict[str, Any]: + """ + Validate and process arguments for MaxGlobalErrorRateConstraint creation. + + :param max_error_rate: Maximum error rate to allow + :param kwargs: Supports max_global_error_rate, max_global_err_rate, + max_global_errors_rate, optional min_processed, and optional type_ + :return: Validated dictionary with max_error_rate, min_processed, + and type_ fields + """ + for alias in [ + "max_global_error_rate", + "max_global_err_rate", + "max_global_errors_rate", + ]: + if max_error_rate is None: + max_error_rate = kwargs.get(alias) + + return { + "max_error_rate": max_error_rate, + "min_processed": kwargs.get( + "min_processed", settings.constraint_error_min_processed + ), + "current_index": kwargs.get("current_index", -1), + } + + def create_constraint(self, **kwargs) -> Constraint: # noqa: ARG002 + """ + Return self as the constraint instance. + + :param kwargs: Additional keyword arguments (unused) + :return: Self instance as the constraint + """ + self.current_index += 1 + + return self.model_copy() # type: ignore[return-value] + + def __call__( + self, + state: SchedulerState, + request_info: ScheduledRequestInfo, # noqa: ARG002 + ) -> SchedulerUpdateAction: + """ + Evaluate constraint against global error rate. + + :param state: Current scheduler state with global request and error counts + :param request_info: Individual request information (unused) + :return: Action indicating whether to continue or stop operations + """ + current_index = max(0, self.current_index) + max_error_rate = ( + self.max_error_rate + if isinstance(self.max_error_rate, (int, float)) + else self.max_error_rate[min(current_index, len(self.max_error_rate) - 1)] + ) + + exceeded_min_processed = ( + self.min_processed is None or state.processed_requests >= self.min_processed + ) + error_rate = ( + state.errored_requests / float(state.processed_requests) + if state.processed_requests > 0 + else 0.0 + ) + exceeded_error_rate = error_rate >= max_error_rate + should_stop = exceeded_min_processed and exceeded_error_rate + + return SchedulerUpdateAction( + request_queuing="stop" if should_stop else "continue", + request_processing="stop_all" if should_stop else "continue", + metadata={ + "max_error_rate": max_error_rate, + "min_processed": self.min_processed, + "processed_requests": state.processed_requests, + "errored_requests": state.errored_requests, + "error_rate": error_rate, + "exceeded_min_processed": exceeded_min_processed, + "exceeded_error_rate": exceeded_error_rate, + }, + ) + + @field_validator("max_error_rate") + @classmethod + def _validate_max_error_rate( + cls, value: int | float | list[int | float] + ) -> int | float | list[int | float]: + if not isinstance(value, list): + value = [value] + for val in value: + if not val: + raise ValueError( + "max_error_rate must be set and truthful, " + f"received {value} ({val} failed)" + ) + if not isinstance(val, (int, float)) or val <= 0 or val >= 1: + raise ValueError( + "max_error_rate must be a number between 0 and 1," + f"received {value} ({val} failed)" + ) + + return value[0] if isinstance(value, list) and len(value) == 1 else value + + +class RequestsExhaustedConstraint(StandardBaseModel, InfoMixin): + type_: Literal["requests_exhausted"] = "requests_exhausted" # type: ignore[assignment] + num_requests: int + + @property + def info(self) -> dict[str, Any]: + """ + Extract serializable information from this constraint initializer. + + :return: Dictionary containing constraint configuration and metadata + """ + return self.model_dump() + + def __call__( + self, + state: SchedulerState, + request_info: ScheduledRequestInfo, # noqa: ARG002 + ) -> SchedulerUpdateAction: + create_exceeded = state.created_requests >= self.num_requests + processed_exceeded = state.processed_requests >= self.num_requests + remaining_fraction = min( + max(0.0, 1.0 - state.processed_requests / float(self.num_requests)), 1.0 + ) + remaining_requests = max(0, self.num_requests - state.processed_requests) + + return SchedulerUpdateAction( + request_queuing="stop" if create_exceeded else "continue", + request_processing="stop_local" if processed_exceeded else "continue", + metadata={ + "num_requests": self.num_requests, + "create_exceeded": create_exceeded, + "processed_exceeded": processed_exceeded, + "created_requests": state.created_requests, + "processed_requests": state.processed_requests, + "remaining_fraction": remaining_fraction, + "remaining_requests": remaining_requests, + }, + progress=SchedulerUpdateActionProgress( + remaining_fraction=remaining_fraction, + remaining_requests=remaining_requests, + ), + ) diff --git a/src/guidellm/scheduler/environment.py b/src/guidellm/scheduler/environment.py new file mode 100644 index 00000000..3bc29681 --- /dev/null +++ b/src/guidellm/scheduler/environment.py @@ -0,0 +1,273 @@ +""" +Environment abstractions for coordinating scheduler execution across distributed nodes. + +Provides environment abstractions that handle synchronization, timing coordination, +error propagation, and lifecycle management for scheduler execution across single +or multiple nodes. The Environment protocol defines the interface for distributed +coordination while NonDistributedEnvironment provides a minimal implementation +for single-node execution. + +Environment Execution Flow: +1. sync_run_params() - Distribute workload and synchronize parameters across nodes +2. sync_run_start() - Coordinate synchronized start time for all nodes +3. update_run_iteration() - Update state after each request (called per iteration) +4. sync_run_error() - Handle and propagate errors across nodes +5. sync_run_end() - Aggregate results and cleanup at completion +""" + +from __future__ import annotations + +import time +from abc import ABC, abstractmethod +from collections.abc import AsyncIterator, Iterable +from typing import ( + Generic, +) + +from guidellm.scheduler.constraints import Constraint +from guidellm.scheduler.objects import ( + MultiTurnRequestT, + RequestT, + ResponseT, + ScheduledRequestInfo, + SchedulerState, +) +from guidellm.scheduler.strategy import SchedulingStrategy +from guidellm.settings import settings +from guidellm.utils import InfoMixin + +__all__ = ["Environment", "NonDistributedEnvironment"] + + +class Environment(ABC, Generic[RequestT, ResponseT], InfoMixin): + """ + Abstract base for coordinating scheduler execution across distributed nodes. + + Defines the interface for managing distributed scheduler execution including + parameter synchronization, timing coordination, state updates, error propagation, + and result aggregation. Implementations handle the complexity of distributed + coordination while providing a unified interface for scheduler orchestration. + """ + + @abstractmethod + async def sync_run_params( + self, + requests: Iterable[RequestT | MultiTurnRequestT[RequestT]], + strategy: SchedulingStrategy, + constraints: dict[str, Constraint], + ) -> tuple[ + Iterable[RequestT | MultiTurnRequestT[RequestT]], + SchedulingStrategy, + dict[str, Constraint], + ]: + """ + Synchronize execution parameters across nodes and resolve local scope. + + Coordinates parameter distribution and validation across active nodes. + In distributed environments, handles node assignment and workload partitioning. + In non-distributed environments, typically returns parameters unchanged. + + :param requests: Complete set of requests to process across all nodes + :param strategy: Scheduling strategy to apply during execution + :param constraints: Runtime constraints to enforce during execution + :return: Tuple of (local_requests, strategy, constraints) for this node + :raises Exception: If parameter synchronization fails or nodes inconsistent + """ + ... + + @abstractmethod + async def sync_run_start(self) -> float: + """ + Coordinate synchronized start time across all nodes. + + Ensures all nodes begin processing simultaneously for accurate benchmarking + and consistent timing measurements across distributed execution. + + :return: Unix timestamp when all nodes should begin processing + :raises Exception: If startup synchronization fails across nodes + """ + ... + + @abstractmethod + async def update_run_iteration( + self, + response: ResponseT | None, + request: RequestT, + request_info: ScheduledRequestInfo, + state: SchedulerState, + ): + """ + Update environment state with completed request iteration results. + + Called after each request processing to update execution progress and + synchronize any required state across nodes in distributed environments. + Generally, distributed is expected to store the iteration updates until + all nodes have processed and sync_run_end is called to retrieve them. + + :param response: Response generated for the request, if successful + :param request: The processed request + :param request_info: Metadata about request processing including timings + :param state: Current scheduler state with metrics and progress + :raises Exception: If state update fails or indicates critical errors + """ + ... + + @abstractmethod + async def sync_run_error(self, err: list[Exception] | Exception): + """ + Handle and propagate errors across all active nodes. + + Coordinates error handling when failures occur, ensuring all nodes are + notified for appropriate cleanup or shutdown procedures. + + :param err: The exception(s) that occurred during execution + """ + ... + + @abstractmethod + async def sync_run_end( + self, + ) -> AsyncIterator[ + tuple[ + ResponseT, + RequestT | MultiTurnRequestT[RequestT], + ScheduledRequestInfo, + SchedulerState, + ] + ]: + """ + Finalize execution and aggregate results from all nodes. + + Handles cleanup, result synchronization, and error propagation at execution + completion. Collects and yields results from worker nodes in distributed + environments. + + :return: Iterator of (response, request, request_info, state) tuples from + remote nodes in distributed environments, empty for non-distributed + :raises Exception: Any errors that occurred during execution + """ + ... + + +class NonDistributedEnvironment(Environment): + """ + Single-node scheduler execution environment with minimal coordination overhead. + + Simplified environment for running schedulers on a single node without distributed + coordination requirements. Implements the Environment interface with no-op + synchronization for local testing, development, and single-machine benchmarking. + + Example: + :: + from guidellm.scheduler import ( + MaxNumberConstraint, + NonDistributedEnvironment, + ScheduledRequestInfo, + SchedulerState, + SynchronousStrategy, + ) + + + # Definitions + requests = [f"req_{ind}" for ind in range(5)] + strategy = SynchronousStrategy() + constraints = {"max_num": MaxNumberConstraint(max_num=5)} + state = SchedulerState() + + # Run environment + local_req, local_strat, local_const = await env.sync_run_params( + requests, strategy, constraints + ) + start_time = await env.sync_run_start() + for req in local_req: + state.processed_requests += 1 + await env.update_run_iteration( + f"resp_{req}", req, ScheduledRequestInfo(), state + ) + async for nonlocal_req in env.sync_run_end(): + state.processed_requests += 1 + """ + + def __init__(self): + """Initialize with empty error storage for single-node execution.""" + self.run_errors: list[Exception] = [] + + async def sync_run_params( + self, + requests: Iterable[RequestT | MultiTurnRequestT[RequestT]], + strategy: SchedulingStrategy, + constraints: dict[str, Constraint], + ) -> tuple[ + Iterable[RequestT | MultiTurnRequestT[RequestT]], + SchedulingStrategy, + dict[str, Constraint], + ]: + """ + Return parameters unchanged for single-node execution. + + :param requests: Requests to process locally + :param strategy: Scheduling strategy to apply during execution + :param constraints: Runtime constraints to enforce during execution + :return: Tuple containing the original (requests, strategy, constraints) + """ + return requests, strategy, constraints + + async def sync_run_start(self) -> float: + """ + Return current time plus configured delay for single-node startup. + + :return: Unix timestamp for when the run should start + """ + return time.time() + settings.scheduler_start_delay_non_distributed + + async def update_run_iteration( + self, + response: ResponseT | None, + request: RequestT, + request_info: ScheduledRequestInfo, + state: SchedulerState, + ): + """ + No-op for single-node execution with no distributed state synchronization. + + :param response: Response generated for the request, if successful + :param request: The request that was processed + :param request_info: Metadata about request processing including timings + :param state: Current scheduler state with metrics and progress + """ + + async def sync_run_error(self, err: Exception): + """ + Store error for later propagation during run finalization. + + :param err: The exception(s) that occurred during execution + """ + err = [err] if not isinstance(err, list) else err + self.run_errors.extend(err) + + async def sync_run_end( + self, + ) -> AsyncIterator[ + tuple[ + ResponseT, + RequestT | MultiTurnRequestT[RequestT], + ScheduledRequestInfo, + SchedulerState, + ] + ]: + """ + Finalize single-node execution and propagate any stored errors. + + :return: Empty iterator since there are no remote nodes + :raises Exception: Any error stored during execution via sync_run_error + """ + if self.run_errors: + if len(self.run_errors) == 1: + raise self.run_errors[0] + else: + raise RuntimeError( + f"Errors occurred during execution: {self.run_errors}" + ) + + return + yield # needed to force generator compilation diff --git a/src/guidellm/scheduler/objects.py b/src/guidellm/scheduler/objects.py new file mode 100644 index 00000000..b7f2efc3 --- /dev/null +++ b/src/guidellm/scheduler/objects.py @@ -0,0 +1,468 @@ +""" +Core data structures and interfaces for the GuideLLM scheduler system. + +Provides type-safe abstractions for distributed request processing, timing +measurements, and backend interfaces for benchmarking operations. Central to +the scheduler architecture, enabling request lifecycle tracking, backend +coordination, and state management across distributed worker processes. +""" + +from __future__ import annotations + +import time +import uuid +from collections.abc import AsyncIterator +from typing import ( + Any, + ClassVar, + Generic, + Literal, + Protocol, + TypeVar, + Union, +) + +from pydantic import Field, computed_field +from typing_extensions import TypeAliasType, TypedDict + +from guidellm.utils import ( + PydanticClassRegistryMixin, + RegistryMixin, + StandardBaseModel, +) +from guidellm.utils.registry import RegistryObjT + +__all__ = [ + "BackendInterface", + "BackendT", + "MeasuredRequestTimings", + "MultiTurnRequestT", + "RequestSchedulerTimings", + "RequestT", + "ResponseT", + "ScheduledRequestInfo", + "SchedulerMessagingPydanticRegistry", + "SchedulerState", + "SchedulerUpdateAction", + "SchedulerUpdateActionProgress", +] + +RequestT = TypeVar("RequestT") +"""Generic request object type for scheduler processing.""" + +ResponseT = TypeVar("ResponseT") +"""Generic response object type returned by backend processing.""" + +MultiTurnRequestT = TypeAliasType( + "MultiTurnRequestT", + Union[ + list[Union[RequestT, tuple[RequestT, float]]], + tuple[Union[RequestT, tuple[RequestT, float]]], + ], + type_params=(RequestT,), +) +"""Multi-turn request structure supporting conversation history with optional delays.""" + + +class SchedulerMessagingPydanticRegistry(RegistryMixin[RegistryObjT]): + """ + Registry for enabling a generic interface to define the pydantic class types used + for inter-process messaging within the scheduler. + """ + + +@SchedulerMessagingPydanticRegistry.register() +class RequestSchedulerTimings(StandardBaseModel): + """ + Scheduler-level timing measurements for request lifecycle tracking. + All timestamps are expected to be in Unix time (seconds since epoch). + """ + + targeted_start: float | None = Field( + default=None, + description="When the request was initially targeted for execution", + ) + queued: float | None = Field( + default=None, + description="When the request was placed into the processing queue", + ) + dequeued: float | None = Field( + default=None, + description="When the request was removed from the queue for processing", + ) + scheduled_at: float | None = Field( + default=None, description="When the request was scheduled for processing" + ) + resolve_start: float | None = Field( + default=None, description="When backend resolution of the request began" + ) + resolve_end: float | None = Field( + default=None, description="When backend resolution of the request completed" + ) + finalized: float | None = Field( + default=None, + description="When the request was processed/acknowledged by the scheduler", + ) + + +@SchedulerMessagingPydanticRegistry.register() +class MeasuredRequestTimings(PydanticClassRegistryMixin["MeasuredRequestTimings"]): + """ + Base timing measurements for backend request processing. + All timestamps are expected to be in Unix time (seconds since epoch). + """ + + @classmethod + def __pydantic_schema_base_type__(cls) -> type[MeasuredRequestTimings]: + if cls.__name__ == "MeasuredRequestTimings": + return cls + + return MeasuredRequestTimings + + schema_discriminator: ClassVar[str] = "timings_type" + + timings_type: Literal["measured_request_timings"] = Field( + default="measured_request_timings", + description="Type identifier for the timing measurement", + ) + request_start: float | None = Field( + default=None, description="When the backend began processing the request" + ) + request_end: float | None = Field( + default=None, description="When the backend completed processing the request" + ) + + +@SchedulerMessagingPydanticRegistry.register() +class ScheduledRequestInfo(StandardBaseModel): + """ + Complete request information including status, timings, and metadata. + + Central data structure for tracking request lifecycle from creation through + completion, containing scheduling metadata, timing measurements, and processing + status. Used by scheduler components to coordinate request processing across + distributed worker processes. + + Example: + :: + from guidellm.scheduler.objects import ScheduledRequestInfo + + # Create request info with automatic ID generation + request_info = ScheduledRequestInfo() + request_info.status = "in_progress" + request_info.scheduler_timings.queued = time.time() + + # Check processing completion + if request_info.completed_at: + duration = request_info.completed_at - request_info.started_at + """ + + request_id: str = Field( + description="Unique identifier for the request", + default_factory=lambda: str(uuid.uuid4()), + ) + status: Literal[ + "queued", "pending", "in_progress", "completed", "errored", "cancelled" + ] = Field(description="Current processing status of the request", default="queued") + scheduler_node_id: int = Field( + description="ID/rank of the scheduler node handling the request", + default=-1, + ) + scheduler_process_id: int = Field( + description="ID/rank of the node's scheduler process handling the request", + default=-1, + ) + scheduler_start_time: float = Field( + description="Unix timestamp for the local time when scheduler processing began", + default=-1, + ) + + error: str | None = Field( + default=None, description="Error message if the request.status is 'errored'" + ) + scheduler_timings: RequestSchedulerTimings = Field( + default_factory=RequestSchedulerTimings, + description="Scheduler-level timing measurements for request lifecycle", + ) + request_timings: MeasuredRequestTimings | None = Field( + default=None, + description="Backend-specific timing measurements for request processing", + ) + + @computed_field # type: ignore[misc] + @property + def started_at(self) -> float | None: + """ + Get the effective request processing start time. + + :return: Unix timestamp when processing began, or None if not started. + """ + request_start = ( + self.request_timings.request_start if self.request_timings else None + ) + + return request_start or self.scheduler_timings.resolve_start + + @computed_field # type: ignore[misc] + @property + def completed_at(self) -> float | None: + """ + Get the effective request processing completion time. + + :return: Unix timestamp when processing completed, or None if not completed. + """ + request_end = self.request_timings.request_end if self.request_timings else None + + return request_end or self.scheduler_timings.resolve_end + + def model_copy(self, **kwargs) -> ScheduledRequestInfo: # type: ignore[override] # noqa: ARG002 + """ + Create a deep copy of the request info with copied timing objects. + + :return: New ScheduledRequestInfo instance with independent timing objects + """ + return super().model_copy( + update={ + "scheduler_timings": self.scheduler_timings.model_copy(), + "request_timings": ( + self.request_timings.model_copy() if self.request_timings else None + ), + }, + deep=False, + ) + + +class BackendInterface(Protocol, Generic[RequestT, ResponseT]): + """ + Abstract interface for request processing backends. + + Defines the contract for backend implementations that process requests within + the scheduler system. Backends handle initialization, validation, processing, + and shutdown lifecycle management. Must ensure all properties are pickleable + before process_startup is invoked for multi-process environments. + + Example: + :: + from guidellm.scheduler.objects import BackendInterface + + class CustomBackend(BackendInterface): + @property + def processes_limit(self) -> int: + return 4 + + async def resolve(self, request, request_info, history=None): + # Process request and yield responses + yield response, updated_request_info + """ + + @property + def processes_limit(self) -> int | None: + """ + :return: Maximum worker processes supported, or None if unlimited + """ + + @property + def requests_limit(self) -> int | None: + """ + :return: Maximum concurrent requests supported, or None if unlimited + """ + + @property + def info(self) -> dict[str, Any]: + """ + :return: Backend metadata including model initialization and configuration + """ + + async def process_startup(self) -> None: + """ + Perform backend initialization and startup procedures. + + :raises: Implementation-specific exceptions for startup failures. + """ + + async def validate(self) -> None: + """ + Validate backend configuration and operational status. + + :raises: Implementation-specific exceptions for validation failures. + """ + + async def process_shutdown(self) -> None: + """ + Perform backend cleanup and shutdown procedures. + + :raises: Implementation-specific exceptions for shutdown failures. + """ + + async def resolve( + self, + request: RequestT, + request_info: ScheduledRequestInfo, + history: list[tuple[RequestT, ResponseT]] | None = None, + ) -> AsyncIterator[tuple[ResponseT, ScheduledRequestInfo]]: + """ + Process a request and yield incremental response updates. + + :param request: The request object to process + :param request_info: Scheduling metadata and timing information + :param history: Optional conversation history for multi-turn requests + :yield: Tuples of (response, updated_request_info) for each response chunk + :raises: Implementation-specific exceptions for processing failures + """ + + +BackendT = TypeVar("BackendT", bound=BackendInterface) +"""Generic backend interface type for request processing.""" + + +class SchedulerUpdateActionProgress(TypedDict, total=False): + """ + Progress information for a scheduler update action. + + Optional progress tracking data that provides estimates for remaining work + in scheduler operations. Used by constraints and monitoring systems to + track execution progress and make termination decisions. + """ + + remaining_fraction: float | None + remaining_requests: float | None + remaining_duration: float | None + + +class SchedulerUpdateAction(StandardBaseModel): + """ + Scheduler behavior control directives and actions. + + Encapsulates control signals for scheduler operations including request + queuing and processing directives. Used by constraints to communicate + termination conditions and progress information to scheduler components. + + Example: + :: + from guidellm.scheduler.objects import SchedulerUpdateAction + + # Signal to stop queuing but continue processing + action = SchedulerUpdateAction( + request_queuing="stop", + request_processing="continue", + metadata={"reason": "max_requests_reached"} + ) + """ + + request_queuing: Literal["continue", "stop"] = Field( + default="continue", description="Action to take for request queuing operations" + ) + request_processing: Literal["continue", "stop_local", "stop_all"] = Field( + default="continue", + description="Action to take for request processing operations", + ) + metadata: dict[str, Any] = Field( + default_factory=dict, + description="Additional context and data for the scheduler action", + ) + progress: SchedulerUpdateActionProgress = Field( + default_factory=lambda: SchedulerUpdateActionProgress(), + description="Progress information for the scheduler action", + ) + + +class SchedulerState(StandardBaseModel): + """ + Scheduler operation state tracking and statistics. + + Comprehensive state container for tracking scheduler execution progress, + request counts, timing information, and constraint enforcement. Central + to scheduler coordination and provides real-time metrics for monitoring + and decision-making across distributed worker processes. + + Example: + :: + from guidellm.scheduler.objects import SchedulerState + + # Initialize scheduler state + state = SchedulerState(node_id=0, num_processes=4) + + # Track request processing + state.created_requests += 1 + state.queued_requests += 1 + + # Monitor completion progress + completion_rate = state.processed_requests / state.created_requests + """ + + node_id: int = Field( + description="Unique identifier for this scheduler node", default=-1 + ) + num_processes: int = Field( + description="Number of worker processes in this scheduler", default=-1 + ) + start_time: float = Field( + description="Unix timestamp when the scheduler started", + default_factory=time.time, + ) + end_time: float | None = Field( + default=None, description="Unix timestamp when the scheduler stopped" + ) + end_queuing_time: float | None = Field( + default=None, description="When request queuing stopped, if applicable" + ) + end_queuing_constraints: dict[str, SchedulerUpdateAction] = Field( + default_factory=dict, + description="Constraints that triggered queuing termination", + ) + end_processing_time: float | None = Field( + default=None, description="When request processing stopped, if applicable" + ) + end_processing_constraints: dict[str, SchedulerUpdateAction] = Field( + default_factory=dict, + description="Constraints that triggered process ing termination", + ) + scheduler_constraints: dict[str, SchedulerUpdateAction] = Field( + default_factory=dict, + description=( + "The latest state from all constraints applied during the scheduler run" + ), + ) + + remaining_fraction: float | None = Field( + default=None, + description=( + "Estimated fraction for the remaining progress of the run, if known" + ), + ) + remaining_requests: float | None = Field( + default=None, + description="Estimated number of requests remaining to be processed, if known", + ) + remaining_duration: float | None = Field( + default=None, + description=( + "Estimated time remaining in seconds for the scheduler run, if known" + ), + ) + + created_requests: int = Field( + default=0, description="Total number of requests created" + ) + queued_requests: int = Field( + default=0, description="Total number of requests queued for processing" + ) + pending_requests: int = Field( + default=0, + description="Total number of requests pending processing within a worker", + ) + processing_requests: int = Field( + default=0, description="Number of requests currently being processed" + ) + processed_requests: int = Field( + default=0, description="Total number of requests that completed processing" + ) + successful_requests: int = Field( + default=0, description="Number of requests that completed successfully" + ) + errored_requests: int = Field( + default=0, description="Number of requests that failed with errors" + ) + cancelled_requests: int = Field( + default=0, description="Number of requests that were cancelled" + ) diff --git a/src/guidellm/scheduler/queues.py b/src/guidellm/scheduler/queues.py deleted file mode 100644 index 6ccc6704..00000000 --- a/src/guidellm/scheduler/queues.py +++ /dev/null @@ -1,25 +0,0 @@ -""" -Helper module for importing the correct queue types. -""" - -from dataclasses import dataclass -from queue import Empty as QueueEmpty -from queue import Full as QueueFull -from queue import Queue -from typing import Generic - -from guidellm.request.types import RequestT, ResponseT -from guidellm.scheduler.result import WorkerProcessRequest, WorkerProcessResult - -__all__ = [ - "MPQueues", - "Queue", - "QueueEmpty", - "QueueFull", -] - - -@dataclass -class MPQueues(Generic[RequestT, ResponseT]): - requests: Queue[WorkerProcessRequest[RequestT, ResponseT]] - responses: Queue[WorkerProcessResult[RequestT, ResponseT]] diff --git a/src/guidellm/scheduler/result.py b/src/guidellm/scheduler/result.py deleted file mode 100644 index 04fbf931..00000000 --- a/src/guidellm/scheduler/result.py +++ /dev/null @@ -1,155 +0,0 @@ -from dataclasses import dataclass -from typing import ( - Generic, - Literal, - Optional, -) - -from guidellm.objects import StandardBaseModel -from guidellm.request.types import RequestT, ResponseT -from guidellm.scheduler.strategy import SchedulingStrategy - -__all__ = [ - "SchedulerRequestInfo", - "SchedulerRequestResult", - "SchedulerResult", - "SchedulerRunInfo", - "WorkerProcessRequest", - "WorkerProcessResult", -] - - -class SchedulerRunInfo(StandardBaseModel): - """ - Information about the current run of the scheduler. - This class holds metadata about the scheduling run, - including the start and end times, the number of processes, - and the scheduling strategy used. - It also tracks the number of requests created, queued, pending, - and completed during the run. - - :param start_time: The start time of the scheduling run. - :param end_time: The end time of the scheduling run; - if None, then this will be math.inf. - :param end_number: The maximum number of requests to be processed; - if None, then this will be math.inf. - :param processes: The number of processes used in the scheduling run. - :param strategy: The scheduling strategy used in the run. - This should be an instance of SchedulingStrategy. - :param created_requests: The number of requests created during the run. - :param queued_requests: The number of requests queued during the run. - :param scheduled_requests: The number of requests scheduled during the run. - (requests pending being sent to the worker but recieved by a process) - :param processing_requests: The number of requests actively being run. - :param completed_requests: The number of requests completed during the run. - """ - - start_time: float - end_time: float - end_number: float - processes: int - strategy: SchedulingStrategy - - created_requests: int = 0 - queued_requests: int = 0 - scheduled_requests: int = 0 - processing_requests: int = 0 - completed_requests: int = 0 - - -class SchedulerRequestInfo(StandardBaseModel): - """ - Information about a specific request run through the scheduler. - This class holds metadata about the request, including - the targeted start time, queued time, start time, end time, - and the process ID that handled the request. - - :param targeted_start_time: The targeted start time for the request (time.time()). - :param queued_time: The time the request was queued (time.time()). - :param scheduled_time: The time the request was scheduled (time.time()) - (any sleep time before the request was sent to the worker). - :param worker_start: The time the worker started processing request (time.time()). - :param worker_end: The time the worker finished processing request. (time.time()). - :param process_id: The ID of the underlying process that handled the request. - """ - - requested: bool = False - completed: bool = False - errored: bool = False - canceled: bool = False - - targeted_start_time: float = -1 - queued_time: float = -1 - dequeued_time: float = -1 - scheduled_time: float = -1 - worker_start: float = -1 - request_start: float = -1 - request_end: float = -1 - worker_end: float = -1 - process_id: int = -1 - - -class SchedulerResult(StandardBaseModel): - """ - The yielded, iterative result for a scheduler run. - These are triggered on the start and end of the run, - as well as on the start and end of each request. - Depending on the type, it will hold the request and response - along with information and statistics about the request and general run. - - :param type_: The type of the result, which can be one of: - - "run_start": Indicates the start of the run. - - "run_complete": Indicates the completion of the run (teardown happens after). - - "request_start": Indicates the start of a request. - - "request_complete": Indicates the completion of a request. - :param request: The request that was processed. - :param response: The response from the worker for the request. - :param request_info: Information about the request, including - the targeted start time, queued time, start time, end time, - and the process ID that handled the request. - :param run_info: Information about the current run of the scheduler, - including the start and end times, the number of processes, - and the scheduling strategy used. - It also tracks the number of requests created, queued, pending, - and completed during the run. - """ - - pydantic_type: Literal["scheduler_result"] = "scheduler_result" - type_: Literal[ - "run_start", - "run_complete", - "request_scheduled", - "request_start", - "request_complete", - ] - run_info: SchedulerRunInfo - - -class SchedulerRequestResult( - SchedulerResult, - Generic[RequestT, ResponseT], -): - pydantic_type: Literal["scheduler_request_result"] = "scheduler_request_result" # type: ignore[assignment] - type_: Literal[ - "request_scheduled", - "request_start", - "request_complete", - ] - request: RequestT - request_info: SchedulerRequestInfo - response: Optional[ResponseT] = None - - -@dataclass -class WorkerProcessRequest(Generic[RequestT, ResponseT]): - request: RequestT - timeout_time: float - queued_time: float - - -@dataclass -class WorkerProcessResult(Generic[RequestT, ResponseT]): - type_: Literal["request_scheduled", "request_start", "request_complete"] - request: RequestT - response: Optional[ResponseT] - info: SchedulerRequestInfo diff --git a/src/guidellm/scheduler/scheduler.py b/src/guidellm/scheduler/scheduler.py index 11e1102a..8089c64c 100644 --- a/src/guidellm/scheduler/scheduler.py +++ b/src/guidellm/scheduler/scheduler.py @@ -1,390 +1,165 @@ -import asyncio -import math -import time -from collections.abc import AsyncGenerator, Iterable, Iterator -from concurrent.futures import ProcessPoolExecutor -from multiprocessing import Manager -from threading import Event -from typing import ( - Any, - Generic, - Optional, - Union, -) +""" +Thread-safe singleton scheduler for distributed load generation workload coordination. + +Provides the core orchestration engine that coordinates request processing across +worker processes and distributed environments. Manages timing synchronization, +resource allocation, constraint enforcement, and result aggregation for +load generation operations. Integrates with backends, environments, and strategies +to enable scalable load testing across various scenarios including LLM inference. +""" -from loguru import logger +from __future__ import annotations -from guidellm.request.types import ( +from collections.abc import AsyncIterator, Iterable +from typing import Any, Generic + +from guidellm.scheduler.constraints import ( + Constraint, + ConstraintsInitializerFactory, +) +from guidellm.scheduler.environment import Environment, NonDistributedEnvironment +from guidellm.scheduler.objects import ( + BackendInterface, + MultiTurnRequestT, RequestT, ResponseT, -) -from guidellm.scheduler.queues import MPQueues, Queue, QueueEmpty -from guidellm.scheduler.result import ( - SchedulerRequestResult, - SchedulerResult, - SchedulerRunInfo, - WorkerProcessRequest, - WorkerProcessResult, + ScheduledRequestInfo, + SchedulerState, ) from guidellm.scheduler.strategy import SchedulingStrategy -from guidellm.scheduler.worker import ( - RequestsWorker, -) -from guidellm.settings import settings +from guidellm.scheduler.worker_group import WorkerProcessGroup +from guidellm.utils.singleton import ThreadSafeSingletonMixin __all__ = ["Scheduler"] -class Scheduler(Generic[RequestT, ResponseT]): +class Scheduler( + Generic[RequestT, ResponseT], + ThreadSafeSingletonMixin, +): """ - A class that handles the scheduling of requests to a worker. - This class is responsible for managing the lifecycle of the requests, - including their creation, queuing, and processing. - It uses a multiprocessing approach to handle requests concurrently - and efficiently, based on the specified scheduling strategy. - The Scheduler class is designed to work with a RequestsWorker, - which is an abstract base class that defines the interface for a worker - that can resolve requests asynchronously or synchronously. - The Scheduler class also supports different scheduling strategies, - including synchronous, throughput, and concurrent strategies. - - :param worker: The worker that will process the requests. - This should be an instance of RequestsWorker. - :param request_loader: An iterable that generates requests. - This can be a list, generator, or any other iterable. - The requests will be processed by the worker. + Thread-safe singleton scheduler for distributed benchmarking workload coordination. + + Orchestrates request processing across worker processes with distributed timing + coordination, constraint enforcement, and result aggregation. Provides a unified + interface for executing benchmarking operations while abstracting the complexity + of multi-process coordination, environment synchronization, and resource management. + Implements singleton pattern to ensure consistent execution state across concurrent + benchmark operations. + + Example: + :: + from guidellm.scheduler import Scheduler + from guidellm.backend import OpenAIBackend + from guidellm.scheduler import NonDistributedEnvironment, SynchronousStrategy + + scheduler = Scheduler() + async for response, request, info, state in scheduler.run( + requests=request_list, + backend=backend, + strategy=SynchronousStrategy(), + env=NonDistributedEnvironment(), + max_requests=1000 + ): + print(f"Processed: {request} with info: {info} and response: {response}") """ - def __init__( - self, - worker: RequestsWorker[RequestT, ResponseT], - request_loader: Iterable[RequestT], - ): - if not isinstance(worker, RequestsWorker): - raise ValueError(f"Invalid worker: {worker}") - - if not isinstance(request_loader, Iterable): - raise ValueError(f"Invalid request_loader: {request_loader}") - - self.worker = worker - self.request_loader = request_loader - async def run( self, - scheduling_strategy: SchedulingStrategy, - max_number: Optional[int] = None, - max_duration: Optional[float] = None, - ) -> AsyncGenerator[ - Union[SchedulerResult, SchedulerRequestResult[RequestT, ResponseT]], None + requests: Iterable[RequestT | MultiTurnRequestT[RequestT]], + backend: BackendInterface[RequestT, ResponseT], + strategy: SchedulingStrategy, + env: Environment | None, + **constraints: dict[str, Any | dict[str, Any] | Constraint], + ) -> AsyncIterator[ + tuple[ + ResponseT | None, + RequestT, + ScheduledRequestInfo, + SchedulerState, + ] ]: """ - The main method that runs the scheduler. - This method is a generator that yields SchedulerResult objects - at the start and end of the run, as well as at the start and end - of each request. - It uses multiprocessing to handle requests concurrently - and efficiently, based on the specified scheduling strategy. - The method also handles the lifecycle of the requests, - including their creation, queuing, and processing. - The method is designed to be used as an asynchronous generator, - allowing it to be used with asyncio and other asynchronous frameworks. - - :param scheduling_strategy: The scheduling strategy to use. - Specifies the times at which requests will be sent as well how many - worker processes are used and if requests are scheduled sync or async. - This can be one of the following: - - "synchronous": Requests are sent synchronously. - - "throughput": Requests are sent at the maximum rate possible. - - An instance of SchedulingStrategy. - :param max_number: The maximum number of requests to process. - If None, then no limit is set and either the iterator must be exhaustible - or the max_duration must be set. - :param max_duration: The maximum duration for the scheduling run. - If None, then no limit is set and either the iterator must be exhaustible - or the max_number must be set. - :return: An asynchronous generator that yields SchedulerResult objects. - Each SchedulerResult object contains information about the request, - the response, and the run information. + Execute distributed request processing with coordinated timing and constraints. + + Orchestrates the complete benchmarking workflow across worker processes with + environment synchronization, constraint enforcement, and error handling. + Manages resource lifecycle from initialization through cleanup while yielding + real-time processing updates for monitoring and aggregation. + + :param requests: Request collection to process. Supports single requests or + multi-turn sequences with optional inter-request delays + :param backend: Backend interface for request processing and response generation + :param strategy: Scheduling strategy controlling request timing and distribution + :param env: Environment interface for distributed coordination and + synchronization + :param constraints: Runtime constraints for execution control (max_requests, + max_duration, max_error_rate, etc.). Values can be primitives, dictionaries, + or constraint instances + :yields: Requests udpates as (response, request, request_info, scheduler_state) + tuples. Each request will generate three ordered updates: + queued, in_progress, completed | errored | cancelled. + :raises Exception: Worker process errors, environment synchronization failures, + or constraint evaluation errors are propagated after cleanup """ - if scheduling_strategy is None or not isinstance( - scheduling_strategy, SchedulingStrategy - ): - raise ValueError(f"Invalid scheduling strategy: {scheduling_strategy}") - - if max_number is not None and max_number < 1: - raise ValueError(f"Invalid max_number: {max_number}") + with self.thread_lock: + if env is None: + env = NonDistributedEnvironment() - if max_duration is not None and max_duration < 0: - raise ValueError(f"Invalid max_duration: {max_duration}") - - with ( - Manager() as manager, - ProcessPoolExecutor( - max_workers=scheduling_strategy.processes_limit - ) as executor, - ): - requests_iter: Optional[Iterator[Any]] = None - scheduling_strategy.start_time = ( - time.time() + settings.scheduler_start_delay - ) # Add a small delay to allow processes to start - futures, queues, stop_event = await self._start_processes( - manager, executor, scheduling_strategy - ) - run_info, requests_iter, times_iter = self._run_setup( - futures, scheduling_strategy, max_number, max_duration - ) - - # Add some initial requests to the queue - requests_iter = self._add_requests( - requests_iter, - queues.requests, - times_iter, - run_info, - ) - # Wait for the test to start - await asyncio.sleep(time.time() - scheduling_strategy.start_time) - yield SchedulerResult( - type_="run_start", - run_info=run_info, - ) + worker_group: WorkerProcessGroup[RequestT, ResponseT] | None = None + # Any issues during the run will raise an error (local or remote), + # be caught and passed to the environment, + # and will ensure clean up before raising the error. try: - while True: - # check errors and raise them - for future in futures: - if future.done() and (err := future.exception()) is not None: - raise err - - if ( - requests_iter is None - and run_info.processing_requests <= 0 - and ( # Ensure we have met one of the end conditions - time.time() >= run_info.end_time - or run_info.completed_requests >= run_info.end_number - ) - ): - # we've exhausted all requests we've wanted to run - # and yielded all responses - break - - requests_iter = self._add_requests( - requests_iter, - queues.requests, - times_iter, - run_info, - ) - await asyncio.sleep(0) # enable requests to start - - iter_result = self._check_result_ready( - queues.responses, - run_info, - ) - if iter_result is not None: - yield iter_result - - # yield control to the event loop - await asyncio.sleep(settings.default_async_loop_sleep) - except Exception as err: - raise RuntimeError(f"Scheduler run failed: {err}") from err - - yield SchedulerResult( - type_="run_complete", - run_info=run_info, - ) - - await self._stop_processes(futures, stop_event) - - async def _start_processes( - self, - manager, - executor: ProcessPoolExecutor, - scheduling_strategy: SchedulingStrategy, - ) -> tuple[ - list[asyncio.Future], - MPQueues[RequestT, ResponseT], - Event, - ]: - await self.worker.prepare_multiprocessing() - queues: MPQueues[RequestT, ResponseT] = MPQueues( - requests=manager.Queue( - maxsize=scheduling_strategy.processing_requests_limit - ), - responses=manager.Queue(), - ) - stop_event = manager.Event() - - num_processes = min( - scheduling_strategy.processes_limit, - scheduling_strategy.processing_requests_limit, - ) - requests_limit_split = ( - scheduling_strategy.processing_requests_limit - // scheduling_strategy.processes_limit - ) - requests_limit_remain = ( - scheduling_strategy.processing_requests_limit - % scheduling_strategy.processes_limit - ) - process_ids = (id_ for id_ in range(num_processes)) - process_requests_limits = ( - requests_limit_split + 1 - if i < requests_limit_remain - else requests_limit_split - for i in range(num_processes) - ) - - futures = [] - loop = asyncio.get_event_loop() - for id_, requests_limit in zip(process_ids, process_requests_limits): - futures.append( - loop.run_in_executor( - executor, - self.worker.process_loop_asynchronous, - queues, - scheduling_strategy, - stop_event, - requests_limit, - id_, - num_processes, + # Setup local run parameters, sync with the environment + constraints = ConstraintsInitializerFactory.resolve_constraints( + constraints ) - ) - - await asyncio.sleep(0.1) # give time for processes to start - - return futures, queues, stop_event - - def _run_setup( - self, - processes: list[asyncio.Future], - scheduling_strategy: SchedulingStrategy, - max_number: Optional[int], - max_duration: Optional[float], - ) -> tuple[SchedulerRunInfo, Iterator[Any], Iterator[float]]: - requests_iter = iter(self.request_loader) - times_iter = iter(scheduling_strategy.request_times()) - end_time = scheduling_strategy.start_time + (max_duration or math.inf) - end_number = max_number or math.inf - - try: - # update end number if the request loader is finite and less than max - iter_length = len(self.request_loader) # type: ignore[arg-type] - if 0 < iter_length < end_number: - end_number = iter_length - except Exception: # noqa: BLE001, S110 - pass - - if end_number == math.inf and end_time is None: - logger.warning( - "No end number or end time set, " - "scheduler will run indefinitely until the request loader is exhausted." - ) - - info = SchedulerRunInfo( - start_time=scheduling_strategy.start_time, - end_time=end_time, - end_number=end_number, - processes=len(processes), - strategy=scheduling_strategy, - ) - - return info, requests_iter, times_iter - - def _add_requests( - self, - requests_iter: Optional[Iterator[Any]], - requests_queue: Queue[WorkerProcessRequest[RequestT, ResponseT]], - times_iter: Iterator[float], - run_info: SchedulerRunInfo, - ) -> Optional[Iterator[Any]]: - if requests_iter is not None: - try: - added_count = 0 - - while not requests_queue.full() and added_count < ( - run_info.strategy.queued_requests_limit - or settings.min_queued_requests - ): - if run_info.created_requests >= run_info.end_number: - raise StopIteration - - if ( - next(times_iter) >= run_info.end_time - or time.time() >= run_info.end_time - ): - raise StopIteration - - work_req = WorkerProcessRequest[RequestT, ResponseT]( - request=next(requests_iter), - timeout_time=run_info.end_time, - queued_time=time.time(), + ( + local_requests, + local_strategy, + local_constraints, + ) = await env.sync_run_params(requests, strategy, constraints) + + # Setup the worker group, sync start with the environment + worker_group = WorkerProcessGroup[RequestT, ResponseT]( + requests=None, + cycle_requests=local_requests, + backend=backend, + strategy=local_strategy, + constraints=local_constraints, + ) + await worker_group.create_processes() + local_start_time = await env.sync_run_start() + await worker_group.start(local_start_time) + + # Yield any updates and sync with the environment for non-local updates + async for ( + response, + request, + request_info, + state, + ) in worker_group.request_updates(): + await env.update_run_iteration( + response, request, request_info, state ) - requests_queue.put(work_req) - - run_info.created_requests += 1 - run_info.queued_requests += 1 - added_count += 1 - except StopIteration: - # we've reached the limit number, limit time, or exhausted the requests - # set to None to stop adding more and tell the loop no more requests - requests_iter = None - - return requests_iter - - def _check_result_ready( - self, - responses_queue: Queue[WorkerProcessResult[RequestT, ResponseT]], - run_info: SchedulerRunInfo, - ) -> Optional[SchedulerRequestResult[RequestT, ResponseT]]: - try: - process_response: WorkerProcessResult[RequestT, ResponseT] = ( - responses_queue.get_nowait() - ) - except QueueEmpty: - return None - - if process_response.type_ == "request_scheduled": - run_info.queued_requests -= 1 - run_info.scheduled_requests += 1 - - return SchedulerRequestResult( - type_="request_scheduled", - run_info=run_info, - request=process_response.request, - request_info=process_response.info, - response=None, - ) - - if process_response.type_ == "request_start": - run_info.scheduled_requests -= 1 - run_info.processing_requests += 1 - - return SchedulerRequestResult( - type_="request_start", - run_info=run_info, - request=process_response.request, - request_info=process_response.info, - response=None, - ) - - if process_response.type_ == "request_complete": - run_info.processing_requests -= 1 - run_info.completed_requests += 1 - - return SchedulerRequestResult( - type_="request_complete", - run_info=run_info, - request=process_response.request, - request_info=process_response.info, - response=process_response.response, - ) - raise ValueError(f"Invalid process response type: {process_response}") - - async def _stop_processes( - self, - futures: list[asyncio.Future], - stop_event: Event, - ): - # stop all processes - stop_event.set() - - await asyncio.gather(*futures) + yield response, request, request_info, state + except Exception as err: # noqa: BLE001 + await env.sync_run_error(err) + finally: + # Ensure all worker processes are cleaned up for error or completion + if worker_group is not None: + err = await worker_group.shutdown() + if err is not None: + await env.sync_run_error(err) + + # Ensure any errors are raised and all responses + # are yielded for aggregation on the primary node + async for ( + response, + request, + request_info, + state, + ) in env.sync_run_end(): + yield response, request, request_info, state diff --git a/src/guidellm/scheduler/strategy.py b/src/guidellm/scheduler/strategy.py index 81ff6558..8c791671 100644 --- a/src/guidellm/scheduler/strategy.py +++ b/src/guidellm/scheduler/strategy.py @@ -1,495 +1,700 @@ +""" +Request scheduling strategies for controlling how benchmark requests are processed. + +This module provides timing implementations and concrete strategies that control request +concurrency, timing patterns, and throughput characteristics to simulate real-world +usage scenarios. The scheduling system separates timing logic from strategy constraints, +enabling flexible combination of timing behaviors with process and concurrency limits. +""" + +from __future__ import annotations + import math import random import time -from collections.abc import Generator -from typing import ( - Literal, - Optional, - Union, -) +from abc import ABC, abstractmethod +from typing import Annotated, ClassVar, Literal, TypeVar -from pydantic import Field +from pydantic import Field, PrivateAttr -from guidellm.objects import StandardBaseModel -from guidellm.settings import settings +from guidellm.scheduler.objects import ScheduledRequestInfo +from guidellm.utils import InfoMixin, PydanticClassRegistryMixin, StandardBaseModel __all__ = [ "AsyncConstantStrategy", "AsyncPoissonStrategy", "ConcurrentStrategy", + "ConstantRateRequestTimings", + "LastCompletionRequestTimings", + "NoDelayRequestTimings", + "PoissonRateRequestTimings", + "ScheduledRequestTimings", "SchedulingStrategy", + "StrategyT", "StrategyType", "SynchronousStrategy", "ThroughputStrategy", - "strategy_display_str", ] -StrategyType = Literal["synchronous", "concurrent", "throughput", "constant", "poisson"] +StrategyType = Annotated[ + Literal["synchronous", "concurrent", "throughput", "constant", "poisson"], + "Valid strategy type identifiers for scheduling request patterns", +] + +def _exponential_decay_tau(max_progress: float, convergence: float = 0.99) -> float: + """ + Calculate tau value for exponential decay to reach target progress level. -class SchedulingStrategy(StandardBaseModel): + :param max_progress: The max progress value to reach + :param convergence: The target convergence level for reaching max_progress + :return: The calculated tau value for the given max_progress and convergence """ - An abstract base class for scheduling strategies. - This class defines the interface for scheduling requests and provides - a common structure for all scheduling strategies. - Subclasses should implement the `request_times` method to provide - specific scheduling behavior. - - :param type_: The type of scheduling strategy to use. - This should be one of the predefined strategy types. + return max_progress / (-math.log(1 - convergence)) + + +def _exponential_decay_fraction(progress: float, tau: float = 1.0) -> float: """ + Calculate completion fraction based on exponential decay curve. - type_: Literal["strategy"] = Field( - description="The type of scheduling strategy schedule requests with.", + :param progress: The current progress value (>=0) + :param tau: The scale factor for the exponential decay + :return: The fraction of completion based on exponential decay (0 -> 1) + """ + return 1 - math.exp(-progress / tau) + + +class ScheduledRequestTimings(StandardBaseModel, ABC): + """ + Abstract base class for controlling when requests are scheduled. + + Defines the interface for timing implementations that determine request scheduling + behavior. Different implementations provide various patterns like synchronous, + constant-rate, or stochastic scheduling to simulate real-world scenarios. + """ + + @abstractmethod + def next_offset(self) -> float: + """ + Calculate the time offset for the next request to be scheduled. + + :return: The offset in seconds from scheduler start time for next request + """ + + @abstractmethod + def request_completed(self, request_info: ScheduledRequestInfo): + """ + Handle request completion and update internal timing state. + + :param request_info: Information about the completed request including + timing details and completion status + """ + + +class LastCompletionRequestTimings(ScheduledRequestTimings): + """ + Timing implementation for synchronous and concurrent scheduling strategies. + + Schedules the next request immediately after the last request completes, enabling + sequential or limited concurrent processing with completion-based timing control. + """ + + offset: float = Field( + default=0.0, + description="Current time offset in seconds from scheduler start time", + ) + startup_requests: int = Field( + default=0, + description="Number of initial requests to schedule with equal spacing", + ge=0, ) - start_time: float = Field( - default_factory=time.time, - description="The start time for the scheduling strategy.", + startup_requests_delay: float = Field( + default=0.0, + description="Delay in seconds between startup requests", + ge=0, ) + _requests_count: int = PrivateAttr(0) - @property - def processing_mode(self) -> Literal["sync", "async"]: + def next_offset(self) -> float: """ - The processing mode for the scheduling strategy, either 'sync' or 'async'. - This property determines how the worker processes are setup: - either to run synchronously with one request at a time or asynchronously. - This property should be implemented by subclasses to return - the appropriate processing mode. + Get the current offset value and apply startup delay if applicable. - :return: The processing mode for the scheduling strategy, - either 'sync' or 'async'. + :return: The current offset value in seconds from scheduler start time """ - return "async" + self._requests_count += 1 - @property - def processes_limit(self) -> int: + if self._requests_count <= self.startup_requests: + self.offset += self.startup_requests_delay + + return self.offset + + def request_completed(self, request_info: ScheduledRequestInfo): """ - The limit on the number of worker processes for the scheduling strategy. - It determines how many worker processes are created - for the scheduling strategy and must be implemented by subclasses. + Update timing state based on the completed request. - :return: The number of processes for the scheduling strategy. + :param request_info: Information about the completed request """ - return settings.max_worker_processes + if ( + self._requests_count > self.startup_requests + and request_info.completed_at is not None + ): + # set the next sync offset to the time when the previous request completed + self.offset = request_info.completed_at - request_info.scheduler_start_time - @property - def queued_requests_limit(self) -> Optional[int]: + +class NoDelayRequestTimings(ScheduledRequestTimings): + """ + Timing implementation for throughput-maximizing scheduling strategies. + + Schedules requests with minimal delay to achieve maximum throughput, with optional + startup ramping to gradually increase request processing during initialization. + """ + + offset: float = Field( + default=0.0, + description="Base time offset in seconds from scheduler start time", + ge=0, + ) + startup_duration: float = Field( + default=0.0, + description="Duration in seconds for gradual startup ramp", + ge=0, + ) + startup_target_requests: int = Field( + default=1, + description="Target number of requests to converge to during startup", + gt=0, + ) + startup_convergence: float = Field( + default=0.99, + description="Target convergence rate during startup phase", + ) + _start_time: float | None = PrivateAttr(None) + _requests_count: int = PrivateAttr(0) + + def next_offset(self) -> float: """ - The maximum number of queued requests for the scheduling strategy. - It determines how many requests can be queued at one time - for the scheduling strategy and must be implemented by subclasses. + Calculate offset with optional startup adjustment. - :return: The maximum number of queued requests for the scheduling strategy. + :return: Static offset plus any startup adjustment """ - return settings.max_concurrency + if self._start_time is None: + self._start_time = time.time() - @property - def processing_requests_limit(self) -> int: + self._requests_count += 1 + elapsed = time.time() - self._start_time + + if self.startup_duration > 0 and elapsed < self.startup_duration: + startup_percent = _exponential_decay_fraction( + self._requests_count, + _exponential_decay_tau( + self.startup_target_requests, self.startup_convergence + ), + ) + else: + startup_percent = 1.0 + + return self.offset + startup_percent * self.startup_duration + + def request_completed(self, request_info: ScheduledRequestInfo): """ - The maximum number of processing requests for the scheduling strategy. - It determines how many requests can be processed at one time - for the scheduling strategy and must be implemented by subclasses. + Handle request completion (no action needed for throughput strategy). - :return: The maximum number of processing requests for the scheduling strategy. + :param request_info: Information about the completed request (unused) """ - return settings.max_concurrency - def request_times(self) -> Generator[float, None, None]: + +class ConstantRateRequestTimings(ScheduledRequestTimings): + """ + Timing implementation for constant-rate scheduling strategies. + + Schedules requests at a fixed rate with evenly spaced intervals to provide + predictable timing behavior for steady-state load simulation. + """ + + rate: float = Field( + description="Target rate in requests per second", + gt=0, + ) + offset: float = Field( + default=0.0, + description="Base time offset in seconds from scheduler start time", + ge=0, + ) + _requests_count: int = PrivateAttr(0) + + def next_offset(self) -> float: """ - A generator that yields timestamps for when requests should be sent. - This method should be implemented by subclasses to provide specific - scheduling behavior. + Calculate the offset for the next request at a constant rate. - :return: A generator that yields timestamps for request scheduling - or -1 for requests that should be sent immediately. + :return: The offset in seconds for the next request """ - raise NotImplementedError("Subclasses must implement request_times() method.") + num_requests = self._requests_count + self._requests_count += 1 + interval = 1.0 / self.rate + return self.offset + interval * num_requests -class SynchronousStrategy(SchedulingStrategy): + def request_completed(self, request_info: ScheduledRequestInfo): + """ + Handle request completion (no action needed for constant rate strategy). + + :param request_info: Information about the completed request (unused) + """ + + +class PoissonRateRequestTimings(ScheduledRequestTimings): """ - A class representing a synchronous scheduling strategy. - This strategy schedules requests synchronously, one at a time, - with the maximum rate possible. - It inherits from the `SchedulingStrategy` base class and - implements the `request_times` method to provide the specific - behavior for synchronous scheduling. - - :param type_: The synchronous StrategyType to schedule requests synchronously. + Timing implementation for Poisson-distributed scheduling strategies. + + Schedules requests following a Poisson process with exponentially distributed + inter-arrival times to simulate realistic traffic patterns with random variance. """ - type_: Literal["synchronous"] = "synchronous" # type: ignore[assignment] + rate: float = Field( + description="Target average rate in requests per second", + gt=0, + ) + random_seed: int = Field( + default=42, + description="Seed for random number generator for reproducible behavior", + ) + offset: float = Field( + default=0.0, + description="Base time offset in seconds from scheduler start time", + ) + _requests_count: int = PrivateAttr(0) + _random: random.Random | None = PrivateAttr(None) + + def next_offset(self) -> float: + """ + Calculate the offset for the next request using Poisson distribution. + + :return: The cumulative offset in seconds for the next request + """ + self._requests_count += 1 + + if self._random is None: + self._random = random.Random(self.random_seed) + else: + next_delay = self._random.expovariate(self.rate) + self.offset += next_delay + + return self.offset + + def request_completed(self, request_info: ScheduledRequestInfo): + """ + Handle request completion (no action needed for Poisson rate strategy). + + :param request_info: Information about the completed request (unused) + """ + + +class SchedulingStrategy(PydanticClassRegistryMixin["SchedulingStrategy"], InfoMixin): + """ + Abstract base class for scheduling strategies controlling request processing. + + Defines the interface for strategies that combine timing implementations with + process and concurrency constraints to enable various benchmark scenarios. + """ + + schema_discriminator: ClassVar[str] = "type_" + + @classmethod + def __pydantic_schema_base_type__(cls) -> type[SchedulingStrategy]: + if cls.__name__ == "SchedulingStrategy": + return cls + + return SchedulingStrategy + + type_: Literal["strategy"] = Field( + description="The type of scheduling strategy to schedule requests with", + ) @property - def processing_mode(self) -> Literal["sync"]: + def processes_limit(self) -> int | None: """ - The processing mode for the scheduling strategy, either 'sync' or 'async'. - This property determines how the worker processes are setup: - either to run synchronously with one request at a time or asynchronously. + Get the maximum number of worker processes supported by this strategy. - :return: 'sync' for synchronous scheduling strategy - for the single worker process. + :return: Maximum number of worker processes, None if unlimited """ - return "sync" + return None @property - def processes_limit(self) -> int: + def requests_limit(self) -> int | None: """ - The limit on the number of worker processes for the scheduling strategy. - It determines how many worker processes are created - for the scheduling strategy and must be implemented by subclasses. + Get the maximum number of concurrent requests supported by this strategy. - :return: 1 for the synchronous scheduling strategy to limit - the worker processes to one. + :return: Maximum number of concurrent requests, None if unlimited """ - return 1 + return None + + def create_request_timings( + self, local_rank: int, local_world_size: int, local_max_concurrency: int + ) -> ScheduledRequestTimings: + """ + Create a timing instance to define scheduling behavior for a worker process. + + :param local_rank: The rank of the worker process within local world size + :param local_world_size: Total number of worker processes in local world + :param local_max_concurrency: Maximum concurrent requests for the worker + :return: A ScheduledRequestTimings instance for the worker process + :raises NotImplementedError: Must be implemented by subclasses + """ + raise NotImplementedError( + "create_worker_timings method must be implemented by subclasses." + ) + + +StrategyT = TypeVar("StrategyT", bound=SchedulingStrategy) + + +@SchedulingStrategy.register("synchronous") +class SynchronousStrategy(SchedulingStrategy): + """ + Sequential request processing strategy with single-process constraint. + + Processes requests one at a time in strict sequential order, providing predictable + timing behavior ideal for measuring maximum sequential throughput and ensuring + request isolation. + """ + + type_: Literal["synchronous"] = "synchronous" # type: ignore[assignment] + + def __str__(self) -> str: + """ + Return string representation of the strategy. + + :return: String identifier for synchronous strategy + """ + return "synchronous" @property - def queued_requests_limit(self) -> int: + def processes_limit(self) -> int | None: """ - The maximum number of queued requests for the scheduling strategy. - It determines how many requests can be queued at one time - for the scheduling strategy and must be implemented by subclasses. + Get maximum number of worker processes for synchronous scheduling. - :return: 1 for the synchronous scheduling strategy to limit - the queued requests to one that is ready to be processed. + :return: Always returns 1 to enforce single-process constraint """ return 1 @property - def processing_requests_limit(self) -> int: + def requests_limit(self) -> int | None: """ - The maximum number of processing requests for the scheduling strategy. - It determines how many requests can be processed at one time - for the scheduling strategy and must be implemented by subclasses. + Get maximum number of concurrent requests for synchronous scheduling. - :return: 1 for the synchronous scheduling strategy to limit - the processing requests to one that is ready to be processed. + :return: Always returns 1 to enforce single-request constraint """ return 1 - def request_times(self) -> Generator[float, None, None]: + def create_request_timings( + self, + local_rank: int, + local_world_size: int, + local_max_concurrency: int, # noqa: ARG002 + ) -> ScheduledRequestTimings: """ - A generator that yields time.time() so requests are sent immediately, - while scheduling them synchronously. + Create timing implementation for synchronous request scheduling. - :return: A generator that yields time.time() for immediate request scheduling. + :param local_rank: The rank of the worker process (must be 0) + :param local_world_size: Total number of worker processes (must be 1) + :param local_max_concurrency: Maximum concurrent requests (unused) + :return: LastCompletionRequestTimings instance for sequential processing + :raises ValueError: If multiple workers or non-zero rank specified """ - init_time = self.start_time - while True: - yield max(init_time, time.time()) + if local_world_size > 1 or local_rank != 0: + raise ValueError( + "SynchronousStrategy can only be used with a single worker process." + ) + return LastCompletionRequestTimings() + +@SchedulingStrategy.register("concurrent") class ConcurrentStrategy(SchedulingStrategy): """ - A class representing a concurrent scheduling strategy. - This strategy schedules requests concurrently with the specified - number of streams. - It inherits from the `SchedulingStrategy` base class and - implements the `request_times` method to provide the specific - behavior for concurrent scheduling. - - :param type_: The concurrent StrategyType to schedule requests concurrently. - :param streams: The number of concurrent streams to use for scheduling requests. - Each stream runs synchronously with the maximum rate possible. - This must be a positive integer. + Parallel request processing strategy with controlled concurrency limits. + + Enables concurrent request processing up to a specified number of streams, + providing balanced throughput while maintaining predictable resource usage + and completion-based timing coordination. """ type_: Literal["concurrent"] = "concurrent" # type: ignore[assignment] streams: int = Field( - description=( - "The number of concurrent streams to use for scheduling requests. " - "Each stream runs sychronously with the maximum rate possible. " - "This must be a positive integer." - ), + description="Number of concurrent streams for scheduling requests", gt=0, ) + startup_duration: float = Field( + default=0.0, + description="Duration in seconds for distributing startup requests", + ge=0, + ) - @property - def processing_mode(self) -> Literal["sync"]: + def __str__(self) -> str: """ - The processing mode for the scheduling strategy, either 'sync' or 'async'. - This property determines how the worker processes are setup: - either to run synchronously with one request at a time or asynchronously. + Return string representation of the strategy. - :return: 'sync' for synchronous scheduling strategy - for the multiple worker processes equal to streams. + :return: String identifier with stream count """ - return "sync" + return f"concurrent@{self.streams}" @property def processes_limit(self) -> int: """ - The limit on the number of worker processes for the scheduling strategy. - It determines how many worker processes are created - for the scheduling strategy and must be implemented by subclasses. + Get maximum number of worker processes for concurrent scheduling. - :return: {self.streams} for the concurrent scheduling strategy to limit - the worker processes to the number of streams. - """ - - return min(self.streams, settings.max_worker_processes) - - @property - def queued_requests_limit(self) -> int: - """ - The maximum number of queued requests for the scheduling strategy. - It determines how many requests can be queued at one time - for the scheduling strategy and must be implemented by subclasses. - - :return: {self.streams} for the concurrent scheduling strategy to limit - the queued requests to the number of streams that are ready to be processed. + :return: Number of streams as maximum worker processes """ return self.streams @property - def processing_requests_limit(self) -> int: + def requests_limit(self) -> int: """ - The maximum number of processing requests for the scheduling strategy. - It determines how many requests can be processed at one time - for the scheduling strategy and must be implemented by subclasses. + Get maximum number of concurrent requests for concurrent scheduling. - :return: {self.streams} for the concurrent scheduling strategy to limit - the processing requests to the number of streams that ready to be processed. + :return: Number of streams as maximum concurrent requests """ return self.streams - def request_times(self) -> Generator[float, None, None]: - """ - A generator that yields time.time() so requests are sent - immediately, while scheduling them concurrently with the specified - number of streams. + def create_request_timings( + self, + local_rank: int, + local_world_size: int, + local_max_concurrency: int, # noqa: ARG002 + ) -> LastCompletionRequestTimings: + """ + Create timing implementation for concurrent request scheduling. + + :param local_rank: The rank of the worker process (must be < streams) + :param local_world_size: Total worker processes (must not exceed streams) + :param local_max_concurrency: Maximum concurrent requests (unused) + :return: LastCompletionRequestTimings instance for stream-based processing + :raises ValueError: If worker configuration exceeds stream limits + """ + if local_world_size > self.streams: + raise ValueError( + "ConcurrentStrategy can only be used with up to " + f"{self.streams} worker processes." + ) + + if local_rank >= self.streams: + raise ValueError( + f"Local rank {local_rank} exceeds the number of streams {self.streams}." + ) + + if self.startup_duration > 0: + # Ensure equal global distribution of the start up for concurrent streams + # Ex: for 10 streams, 2 workers, and 8 seconds start up duration, + # the first worker should start at 0.0, 1.6, 3.2, 4.8, 6.4 + # and the second worker should start at 0.8, 2.4, 4.0, 5.6, 7.2 + delay_per_stream = self.startup_duration / self.streams + streams_per_worker = self.streams // local_world_size + + offset = local_rank * streams_per_worker * delay_per_stream + startup_requests = streams_per_worker + ( + 1 + if local_world_size > 1 and local_rank < self.streams % local_world_size + else 0 + ) + startup_requests_delay = delay_per_stream * local_world_size + else: + offset = 0.0 + startup_requests = 0 + startup_requests_delay = 0.0 - :return: A generator that yields time.time() for immediate request scheduling. - """ - init_time = self.start_time - while True: - yield max(init_time, time.time()) + return LastCompletionRequestTimings( + offset=offset, + startup_requests=startup_requests, + startup_requests_delay=startup_requests_delay, + ) +@SchedulingStrategy.register("throughput") class ThroughputStrategy(SchedulingStrategy): """ - A class representing a throughput scheduling strategy. - This strategy schedules as many requests asynchronously as possible, - with the maximum rate possible. - It inherits from the `SchedulingStrategy` base class and - implements the `request_times` method to provide the specific - behavior for throughput scheduling. - - :param type_: The throughput StrategyType to schedule requests asynchronously. + Maximum throughput strategy with optional concurrency limits. + + Schedules requests to maximize system throughput by allowing unlimited concurrent + processing with optional constraints and startup ramping for controlled ramp-up. """ type_: Literal["throughput"] = "throughput" # type: ignore[assignment] - max_concurrency: Optional[int] = Field( + max_concurrency: int | None = Field( default=None, - description=( - "The maximum number of concurrent requests to schedule. " - "If set to None, the concurrency value from settings will be used. " - "This must be a positive integer greater than 0." - ), + description="Maximum number of concurrent requests to schedule", gt=0, ) + startup_duration: float = Field( + default=0.0, + description="Duration in seconds for startup request distribution", + ge=0, + ) - @property - def processing_mode(self) -> Literal["async"]: + def __str__(self) -> str: """ - The processing mode for the scheduling strategy, either 'sync' or 'async'. - This property determines how the worker processes are setup: - either to run synchronously with one request at a time or asynchronously. + Return string representation of the strategy. - :return: 'async' for asynchronous scheduling strategy - for the multiple worker processes handling requests. + :return: String identifier for throughput strategy """ - return "async" + return "throughput" @property - def queued_requests_limit(self) -> int: + def processes_limit(self) -> int | None: """ - The maximum number of queued requests for the scheduling strategy. - It determines how many requests can be queued at one time - for the scheduling strategy and must be implemented by subclasses. + Get maximum number of worker processes for throughput scheduling. - :return: The processing requests limit to ensure that there are enough - requests even for the worst case scenario where the max concurrent - requests are pulled at once for processing. + :return: The max_concurrency value if set, otherwise None for unlimited """ - return self.processing_requests_limit + return self.max_concurrency @property - def processing_requests_limit(self) -> int: + def requests_limit(self) -> int | None: """ - The maximum number of processing requests for the scheduling strategy. - It determines how many requests can be processed at one time - for the scheduling strategy and must be implemented by subclasses. + Get maximum number of concurrent requests for throughput scheduling. - :return: {self.max_concurrency} for the throughput scheduling strategy to limit - the processing requests to the maximum concurrency. - If max_concurrency is None, then the default processing requests limit - will be used. + :return: The max_concurrency value if set, otherwise None for unlimited """ - return self.max_concurrency or super().processing_requests_limit + return self.max_concurrency - def request_times(self) -> Generator[float, None, None]: + def create_request_timings( + self, local_rank: int, local_world_size: int, local_max_concurrency: int + ) -> ScheduledRequestTimings: """ - A generator that yields the start time.time() so requests are sent - immediately, while scheduling as many asynchronously as possible. + Create timing implementation for throughput request scheduling. - :return: A generator that yields the start time.time() - for immediate request scheduling. + :param local_rank: The rank of the worker process + :param local_world_size: Total number of worker processes + :param local_max_concurrency: Maximum concurrent requests for the worker + :return: NoDelayRequestTimings instance for immediate request scheduling """ - init_time = self.start_time - while True: - yield init_time + if self.startup_duration > 0: + # Vary offset by up to 5% of the startup duration for a bit of variance + offset = 0.05 * self.startup_duration * (local_rank / local_world_size) + # Use local_max_concurrency as the target requests for startup convergence + startup_target_requests = local_max_concurrency + else: + offset = 0.0 + startup_target_requests = 1 + + return NoDelayRequestTimings( + startup_duration=self.startup_duration, + startup_target_requests=startup_target_requests, + offset=offset, + ) +@SchedulingStrategy.register("constant") class AsyncConstantStrategy(ThroughputStrategy): """ - A class representing an asynchronous constant scheduling strategy. - This strategy schedules requests asynchronously at a constant request rate - in requests per second. - If initial_burst is set, it will send an initial burst of math.floor(rate) - requests to reach the target rate. - This is useful to ensure that the target rate is reached quickly - and then maintained. - It inherits from the `SchedulingStrategy` base class and - implements the `request_times` method to provide the specific - behavior for asynchronous constant scheduling. - - :param type_: The constant StrategyType to schedule requests asynchronously. - :param rate: The rate at which to schedule requests asynchronously in - requests per second. This must be a positive float. - :param initial_burst: True to send an initial burst of requests - (math.floor(self.rate)) to reach target rate. - False to not send an initial burst. + Asynchronous constant-rate scheduling strategy for predictable load patterns. + + Schedules requests at a fixed rate distributed evenly across worker processes, + providing predictable timing behavior for steady-state load simulation and + consistent system performance measurement. """ type_: Literal["constant"] = "constant" # type: ignore[assignment] rate: float = Field( - description=( - "The rate at which to schedule requests asynchronously in " - "requests per second. This must be a positive float." - ), + description="Rate for scheduling requests asynchronously in requests/second", gt=0, ) - initial_burst: bool = Field( - default=True, - description=( - "True to send an initial burst of requests (math.floor(self.rate)) " - "to reach target rate. False to not send an initial burst." - ), + startup_duration: float = Field( + default=0.0, + description="Duration in seconds for startup request distribution", + ge=0, ) - def request_times(self) -> Generator[float, None, None]: + def __str__(self) -> str: """ - A generator that yields timestamps for when requests should be sent. - This method schedules requests asynchronously at a constant rate - in requests per second. - If burst_time is set, it will send an initial burst of requests - to reach the target rate. - This is useful to ensure that the target rate is reached quickly - and then maintained. + Return string representation of the strategy. - :return: A generator that yields timestamps for request scheduling. + :return: String identifier with rate value """ - constant_increment = 1.0 / self.rate - - init_time = self.start_time - # handle bursts first to get to the desired rate - if self.initial_burst is not None: - # send an initial burst equal to the rate - # to reach the target rate - burst_count = math.floor(self.rate) - for _ in range(burst_count): - yield init_time + return f"constant@{self.rate:.2f}" - init_time += constant_increment + def create_request_timings( + self, + local_rank: int, + local_world_size: int, + local_max_concurrency: int, # noqa: ARG002 + ) -> ScheduledRequestTimings: + """ + Create timing implementation for constant-rate request scheduling. - counter = 0 + :param local_rank: The rank of the worker process + :param local_world_size: Total number of worker processes for rate division + :param local_max_concurrency: Maximum concurrent requests for the worker + :return: ConstantRateRequestTimings instance with per-worker rate + """ + # Divide the rate evenly across all worker processes + worker_rate = self.rate / local_world_size + # Start each worker with an offset to interleave rates + worker_offset = (1 / self.rate) * local_rank - # continue with constant rate after bursting - while True: - yield init_time + constant_increment * counter - counter += 1 + return ConstantRateRequestTimings( + rate=worker_rate, + offset=worker_offset, + ) +@SchedulingStrategy.register("poisson") class AsyncPoissonStrategy(ThroughputStrategy): """ - A class representing an asynchronous Poisson scheduling strategy. - This strategy schedules requests asynchronously at a Poisson request rate - in requests per second. - If initial_burst is set, it will send an initial burst of math.floor(rate) - requests to reach the target rate. - It inherits from the `SchedulingStrategy` base class and - implements the `request_times` method to provide the specific - behavior for asynchronous Poisson scheduling. - - :param type_: The Poisson StrategyType to schedule requests asynchronously. - :param rate: The rate at which to schedule requests asynchronously in - requests per second. This must be a positive float. - :param initial_burst: True to send an initial burst of requests - (math.floor(self.rate)) to reach target rate. - False to not send an initial burst. + Asynchronous Poisson-distributed scheduling strategy for realistic load simulation. + + Schedules requests following a Poisson process with exponentially distributed + inter-arrival times, providing realistic simulation of user behavior and network + traffic patterns with random variance around the target rate. """ type_: Literal["poisson"] = "poisson" # type: ignore[assignment] rate: float = Field( - description=( - "The rate at which to schedule requests asynchronously in " - "requests per second. This must be a positive float." - ), + description="Rate for scheduling requests asynchronously in requests/second", gt=0, ) - initial_burst: bool = Field( - default=True, - description=( - "True to send an initial burst of requests (math.floor(self.rate)) " - "to reach target rate. False to not send an initial burst." - ), + startup_duration: float = Field( + default=0.0, + description="Duration in seconds for startup request distribution", + ge=0, ) random_seed: int = Field( default=42, - description=("The random seed to use for the Poisson distribution. "), + description="Random seed to use for Poisson distribution", ) - def request_times(self) -> Generator[float, None, None]: + def __str__(self) -> str: """ - A generator that yields timestamps for when requests should be sent. - This method schedules requests asynchronously at a Poisson rate - in requests per second. - The inter arrival time between requests is exponentially distributed - based on the rate. + Return string representation of the strategy. - :return: A generator that yields timestamps for request scheduling. + :return: String identifier with rate value """ - init_time = self.start_time - if self.initial_burst is not None: - # send an initial burst equal to the rate - # to reach the target rate - burst_count = math.floor(self.rate) - for _ in range(burst_count): - yield init_time - else: - yield init_time - - # set the random seed for reproducibility - rand = random.Random(self.random_seed) # noqa: S311 + return f"poisson@{self.rate:.2f}" - while True: - inter_arrival_time = rand.expovariate(self.rate) - init_time += inter_arrival_time - yield init_time - - -def strategy_display_str(strategy: Union[StrategyType, SchedulingStrategy]) -> str: - strategy_type = strategy if isinstance(strategy, str) else strategy.type_ - strategy_instance = strategy if isinstance(strategy, SchedulingStrategy) else None + def create_request_timings( + self, + local_rank: int, + local_world_size: int, + local_max_concurrency: int, # noqa: ARG002 + ) -> ScheduledRequestTimings: + """ + Create timing implementation for Poisson-distributed request scheduling. - if strategy_type == "concurrent": - rate = f"@{strategy_instance.streams}" if strategy_instance else "@##" # type: ignore[attr-defined] - elif strategy_type in ("constant", "poisson"): - rate = f"@{strategy_instance.rate:.2f}" if strategy_instance else "@#.##" # type: ignore[attr-defined] - else: - rate = "" + :param local_rank: The rank of the worker process for seed generation + :param local_world_size: Total number of worker processes for rate division + :param local_max_concurrency: Maximum concurrent requests for the worker + :return: PoissonRateRequestTimings instance with per-worker rate and unique seed + """ + # Divide the rate evenly across all worker processes + worker_rate = self.rate / local_world_size + # Use a different seed for each worker to ensure different sequences + worker_seed = self.random_seed + local_rank + # Start each worker with an offset to interleave rates + worker_offset = (1 / self.rate) * local_rank - return f"{strategy_type}{rate}" + return PoissonRateRequestTimings( + rate=worker_rate, + random_seed=worker_seed, + offset=worker_offset, + ) diff --git a/src/guidellm/scheduler/worker.py b/src/guidellm/scheduler/worker.py index ba36559e..834c0921 100644 --- a/src/guidellm/scheduler/worker.py +++ b/src/guidellm/scheduler/worker.py @@ -1,472 +1,389 @@ -import asyncio -import math -import time -from abc import ABC, abstractmethod -from collections.abc import AsyncGenerator -from dataclasses import dataclass -from itertools import islice -from threading import Event -from typing import ( - Any, - Generic, - Literal, - Optional, - Union, -) +""" +Individual worker process management for multi-process request execution. -from loguru import logger -from pydantic import Field +Manages worker processes that handle request scheduling, backend processing, and +coordination in distributed benchmark environments. Workers consume requests from +queues, apply timing strategies, process requests through backends, and publish +status updates while maintaining synchronization across the process group. +""" -from guidellm.backend import ( - Backend, - BackendType, - RequestArgs, - ResponseSummary, - StreamingTextResponse, +from __future__ import annotations + +import asyncio +import time +from multiprocessing.synchronize import Barrier as ProcessingBarrier +from multiprocessing.synchronize import Event as ProcessingEvent +from typing import Annotated, Generic, Literal + +try: + import uvloop + + HAS_UVLOOP: Annotated[ + bool, "Flag indicating if uvloop is available for event loop optimization" + ] = True +except ImportError: + uvloop = None + + HAS_UVLOOP: Annotated[ + bool, "Flag indicating if uvloop is available for event loop optimization" + ] = False + + +from guidellm.scheduler.objects import ( + BackendInterface, + MultiTurnRequestT, + RequestT, + ResponseT, + ScheduledRequestInfo, + SchedulerMessagingPydanticRegistry, ) -from guidellm.objects import StandardBaseModel -from guidellm.request import GenerationRequest -from guidellm.request.types import RequestT, ResponseT -from guidellm.scheduler.queues import MPQueues, Queue, QueueEmpty -from guidellm.scheduler.result import ( - SchedulerRequestInfo, - WorkerProcessRequest, - WorkerProcessResult, +from guidellm.scheduler.strategy import ScheduledRequestTimings +from guidellm.utils import ( + InterProcessMessaging, + wait_for_sync_barrier, + wait_for_sync_event, + wait_for_sync_objects, ) -from guidellm.scheduler.strategy import SchedulingStrategy - -__all__ = [ - "GenerativeRequestsWorker", - "GenerativeRequestsWorkerDescription", - "RequestsWorker", - "ResolveStatus", - "WorkerDescription", -] - - -@dataclass -class ResolveStatus: - requested: bool - completed: bool - errored: bool - canceled: bool - request_start: float - request_end: float +__all__ = ["WorkerProcess"] -class WorkerDescription(StandardBaseModel): - type_: Literal["worker"] = "worker" - - -class RequestsWorker(ABC, Generic[RequestT, ResponseT]): +class WorkerProcess(Generic[RequestT, ResponseT]): """ - An abstract base class for a worker that processes requests. - This class defines the interface for a worker that can resolve requests - asynchronously or synchronously within the Scheduler class. - Subclasses must implement the `resolve` method, - which takes a request directly given from the load generator, - along with the desired start_time for the request and a timeout_time. - The `resolve` method should return the response from the backend. + Individual worker process for distributed request execution and coordination. + + Manages the complete request lifecycle from queue consumption through backend + processing and status publication. Coordinates with other workers through + barriers and events while maintaining configurable concurrency limits and + timing strategies for request scheduling. + + Example: + :: + worker = WorkerProcess( + messaging=messaging_interface, + async_limit=10, + startup_barrier=barrier, + shutdown_event=shutdown, + error_event=error, + backend=backend_instance, + request_timings=timing_strategy + ) + worker.run() """ - @property - @abstractmethod - def description(self) -> WorkerDescription: + def __init__( + self, + messaging: InterProcessMessaging[ + tuple[ + ResponseT | None, + RequestT | MultiTurnRequestT[RequestT], + ScheduledRequestInfo, + ], + ], + backend: BackendInterface[RequestT, ResponseT], + request_timings: ScheduledRequestTimings, + async_limit: int, + startup_barrier: ProcessingBarrier, + requests_generated_event: ProcessingEvent, + constraint_reached_event: ProcessingEvent, + shutdown_event: ProcessingEvent, + error_event: ProcessingEvent, + ): """ - An abstract property that must be implemented by subclasses. - This property should return a Serializable class representing the information - about the worker instance. + Initialize worker process instance. + + :param messaging: Inter-process communication interface for request coordination + :param backend: Backend instance for processing requests + :param request_timings: Timing strategy for request scheduling + :param async_limit: Maximum concurrent requests this worker can handle + :param startup_barrier: Multiprocessing barrier for coordinated startup + :param requests_generated_event: Event signaling when request generation is + complete + :param constraint_reached_event: Event signaling when processing constraints + are met + :param shutdown_event: Event for signaling graceful shutdown + :param error_event: Event for signaling error conditions across processes """ - ... + self.messaging = messaging + self.backend = backend + self.request_timings = request_timings + self.async_limit = async_limit + self.startup_barrier = startup_barrier + self.requests_generated_event = requests_generated_event + self.constraint_reached_event = constraint_reached_event + self.shutdown_event = shutdown_event + self.error_event = error_event + + # Internal states + self.startup_completed = False + self.backend_started = False + self.messaging_started = False + + def run(self): + """ + Main entry point for worker process execution. - @abstractmethod - async def prepare_multiprocessing(self): + Initializes asyncio event loop with optional uvloop optimization and starts + worker async operations. Handles event loop cleanup for forked processes. + + :raises RuntimeError: If worker encounters unrecoverable error during execution """ - An abstract method that must be implemented by subclasses. - This is useful for workers that have instance state that can not - be shared across processes and should be cleared out and re-initialized - for each new process. + try: + if HAS_UVLOOP: + asyncio.set_event_loop_policy(uvloop.EventLoopPolicy()) + asyncio.run(self.run_async()) + except Exception as err: + self.error_event.set() + raise RuntimeError( + f"Worker process {self.messaging.worker_index} encountered an " + f"error: {err}" + ) from err + + async def run_async(self): """ - ... + Execute main asynchronous worker process logic. - @abstractmethod - async def resolve( - self, - request: RequestT, - timeout_time: float, - ) -> tuple[ResolveStatus, ResponseT]: - """ - An abstract method that must be implemented by subclasses. - This method should handle the resolution of a request through asyncio, - including any necessary backend processing and response handling. - - :param request: The request to be resolved generated by the load generator. - :param timeout_time: The timeout time for the request, if there is no timeout - given, then this will be math.inf. - :return: The response from the worker. + Orchestrates concurrent execution of request processing and shutdown monitoring + tasks. Handles task cleanup, error propagation, and cancellation coordination + when any task completes or fails. + + :raises RuntimeError: If worker tasks encounter unrecoverable errors + :raises asyncio.CancelledError: If worker process was cancelled """ - ... + stop_task = asyncio.create_task(self._stop_monitor()) + request_proc_task = asyncio.create_task(self._process_requests()) + caller_cancelled = False - async def send_result( - self, - results_queue: Queue[WorkerProcessResult[RequestT, ResponseT]], - result: WorkerProcessResult[RequestT, ResponseT], - ): - await asyncio.to_thread(results_queue.put, result) # type: ignore[attr-defined] + try: + await asyncio.wait( + [stop_task, request_proc_task], + return_when=asyncio.FIRST_COMPLETED, + ) + except asyncio.CancelledError: + caller_cancelled = True - async def resolve_scheduler_request( - self, - process_request: WorkerProcessRequest[RequestT, ResponseT], - dequeued_time: float, - start_time: float, - results_queue: Queue[WorkerProcessResult[RequestT, ResponseT]], - process_id: int, - ): - request = process_request.request - timeout_time = process_request.timeout_time - queued_time = process_request.queued_time - - info = SchedulerRequestInfo( - targeted_start_time=start_time, - queued_time=queued_time, - dequeued_time=dequeued_time, - scheduled_time=time.time(), - process_id=process_id, - ) - result: WorkerProcessResult[RequestT, ResponseT] = WorkerProcessResult( - type_="request_scheduled", - request=request, - response=None, - info=info, - ) - asyncio.create_task(self.send_result(results_queue, result)) + stop_task.cancel() + request_proc_task.cancel() - if (wait_time := start_time - time.time()) > 0: - await asyncio.sleep(wait_time) + try: + # Ensure all child tasks cancel correctly + await asyncio.wait( + [stop_task, request_proc_task], return_when=asyncio.ALL_COMPLETED + ) + except asyncio.CancelledError: + caller_cancelled = True + + if ( + task_err := ( + request_proc_task.exception() + if not request_proc_task.cancelled() + else stop_task.exception() + if not stop_task.cancelled() + else None + ) + ) is not None: + raise RuntimeError( + f"Worker process {self.messaging.worker_index} encountered an " + f"error: {task_err}" + ) from task_err - info.worker_start = time.time() - result = WorkerProcessResult( - type_="request_start", - request=request, - response=None, - info=info, - ) - asyncio.create_task(self.send_result(results_queue, result)) - - status, response = await self.resolve(request, timeout_time) - info.worker_end = time.time() - info.requested = status.requested - info.completed = status.completed - info.errored = status.errored - info.canceled = status.canceled - info.request_start = status.request_start - info.request_end = status.request_end - result = WorkerProcessResult( - type_="request_complete", - request=request, - response=response, - info=info, - ) - asyncio.create_task(self.send_result(results_queue, result)) + if caller_cancelled: + raise asyncio.CancelledError("Worker process was cancelled") - def process_loop_asynchronous( + async def _stop_monitor( self, - queues: MPQueues[RequestT, ResponseT], - strategy: SchedulingStrategy, - stop_event: Event, - max_concurrency: int, - process_id: int, - num_processes: int, - ): - async def _process_runner(): - lock = asyncio.Semaphore(max_concurrency) - times_iter = islice( - strategy.request_times(), - process_id, - None, - num_processes, - ) + ) -> Literal["error_event", "shutdown_event"]: + exit_key = await wait_for_sync_objects( + { + "error_event": self.error_event, + "shutdown_event": self.shutdown_event, + }, + poll_interval=self.messaging.poll_interval, + ) - start_time = None - while not stop_event.is_set(): - if start_time is None: - start_time = next(times_iter) - - # Yield control to the event loop. Sleep if we are way ahead - await asyncio.sleep(start_time - time.time() - 1) - await lock.acquire() - - try: - process_request = queues.requests.get_nowait() - dequeued_time = time.time() - except QueueEmpty: - lock.release() - continue - - def _request_callback( - _: asyncio.Future[WorkerProcessRequest[RequestT, ResponseT]], - ): - nonlocal lock - lock.release() - - task = asyncio.create_task( - self.resolve_scheduler_request( - process_request=process_request, - dequeued_time=dequeued_time, - start_time=start_time, - results_queue=queues.responses, - process_id=process_id, - ) - ) - task.add_done_callback(_request_callback) - start_time = None + if exit_key == "error_event": + raise RuntimeError( + f"Worker process {self.messaging.worker_index} received error signal." + ) + async def _process_requests(self): try: - asyncio.run(_process_runner()) - except Exception as exc: # noqa: BLE001 - logger.error( - f"Error in worker process {process_id}: {exc}", - exc_info=True, - stack_info=True, + # 1. Start up synchronization (backend, messaging, and other processes) + # 2. Messaging startup, receive requests until requests_generated event + await self._processing_startup() + + # 3. Run process requests loop until constraint_reached event + processing_task = asyncio.create_task(self._process_requests_loop()) + await wait_for_sync_event( + self.constraint_reached_event, + poll_interval=self.messaging.poll_interval, ) + processing_task.cancel() + + # 4. Cancel pending requests until proc canceled (manual, shutdown, error) + await self._cancel_requests_loop() + finally: + # 5. On cancel, shut down event, error event, or internal error: + # attempt to shut down this worker cleanly (stop backend and messaging) + await self._processing_shutdown() + + async def _processing_startup(self): + # Get backend ready + await self.backend.process_startup() + self.backend_started = True + await self.backend.validate() + + # Get messaging system ready + await self.messaging.start( + receive_stop_criteria=[self.requests_generated_event], + pydantic_models=list(SchedulerMessagingPydanticRegistry.registry.values()), + ) + self.messaging_started = True + # Wait for all processes to be ready + await wait_for_sync_barrier( + self.startup_barrier, + poll_interval=self.messaging.poll_interval, + ) -class GenerativeRequestsWorkerDescription(WorkerDescription): - type_: Literal["generative_requests_worker"] = "generative_requests_worker" # type: ignore[assignment] - backend_type: BackendType - backend_target: str - backend_model: str - backend_info: dict[str, Any] = Field( - default_factory=dict, - ) - - -class GenerativeRequestsWorker(RequestsWorker[GenerationRequest, ResponseSummary]): - """ - A class that handles the execution of requests using a backend. - This class is responsible for sending requests to the backend, - handling responses, and managing errors. + self.startup_completed = True - :param backend: The backend to use for handling requests. - This should be an instance of Backend such as an OpenAIHTTPBackend. - """ + async def _processing_shutdown(self): + if self.backend_started: + await self.backend.process_shutdown() + self.backend_started = False - def __init__(self, backend: Backend): - self.backend = backend + if self.messaging_started: + await self.messaging.stop() + self.messaging_started = False - @property - def description(self) -> GenerativeRequestsWorkerDescription: - """ - Get the description of the worker. - :return: The description of the worker. - """ - return GenerativeRequestsWorkerDescription( - backend_type=self.backend.type_, - backend_target=self.backend.target, - backend_model=self.backend.model or "None", - backend_info=self.backend.info, - ) + self.startup_completed = False - async def prepare_multiprocessing(self): - """ - Prepare the worker for multiprocessing. - This is useful for workers that have instance state that can not - be shared across processes and should be cleared out and re-initialized - for each new process. - """ - await self.backend.prepare_multiprocessing() + async def _process_requests_loop(self): + try: + # Run request processing + async_semaphore = asyncio.Semaphore(self.async_limit) + pending_tasks: set[asyncio.Task] = set() + + def _task_done(task): + pending_tasks.discard(task) + async_semaphore.release() + + if not task.cancelled() and (exception := task.exception()): + raise exception + + # Main loop; loop until canceled + while True: + await async_semaphore.acquire() + request_task = asyncio.create_task(self._process_next_request()) + pending_tasks.add(request_task) + request_task.add_done_callback(_task_done) + except asyncio.CancelledError as err: + for task in pending_tasks: + task.cancel() + await asyncio.gather(*pending_tasks, return_exceptions=True) + + raise err + + async def _cancel_requests_loop(self): + while True: + try: + request: RequestT + request_info: ScheduledRequestInfo + request, request_info = await self.messaging.get( + timeout=self.messaging.poll_interval + ) + except asyncio.TimeoutError: + continue - def process_loop_asynchronous( - self, - queues: MPQueues[GenerationRequest, ResponseSummary], - strategy: SchedulingStrategy, - stop_event: Event, - max_concurrency: int, - process_id: int, - num_processes: int, - ): - asyncio.run(self.backend.validate()) - super().process_loop_asynchronous( - queues=queues, - strategy=strategy, - stop_event=stop_event, - max_concurrency=max_concurrency, - process_id=process_id, - num_processes=num_processes, - ) + request_info.scheduler_node_id = self.messaging.worker_index + request_info.error = "Request was cancelled" + request_info.scheduler_timings.resolve_end = time.time() + self._send_update("cancelled", None, request, request_info) - async def resolve( - self, - request: GenerationRequest, - timeout_time: float, - ) -> tuple[ResolveStatus, ResponseSummary]: - """ - Resolve a request by sending it to the backend and handling the response. - This method sends the request to the backend, waits for a response, - and handles any errors that may occur during the process. - - :param request: The request to resolve. - :param timeout_time: The time to wait for a response before timing out. - If timeout_time is math.inf, the request will not timeout. - :return: A ResponseSummary object containing the response from the backend. - If an error occurs, the ResponseSummary will contain the error message. - """ - resolve_start_time = time.time() - response = None - error: Optional[str] = None - status = ResolveStatus( - requested=False, - completed=False, - errored=False, - canceled=False, - request_start=-1, - request_end=-1, - ) + async def _process_next_request(self): + request: RequestT | MultiTurnRequestT[RequestT] | None = None + request_info: ScheduledRequestInfo | None = None + response: ResponseT | None = None try: - if timeout_time < time.time(): - raise asyncio.TimeoutError( - "The timeout time has already passed." - ) # exit early - - status.requested = True - request_func, request_kwargs = self._create_request_func_kwargs(request) - - async def _runner(): - # wrap function so we can enforce timeout and - # still return the latest state from the backend - async for resp in request_func(**request_kwargs): # type: ignore[operator] - nonlocal response - response = resp - - await asyncio.wait_for( - _runner(), - timeout=timeout_time - time.time() if timeout_time < math.inf else None, - ) + # Pull request from the queue + request, request_info = await self.messaging.get() - if not response: - raise ValueError( - f"No response received for request: {request} " - f"and backend: {self.backend}" - ) - if not isinstance(response, ResponseSummary): - raise ValueError( - f"Received no ResponseSummary for request: {request} " - f"and backend: {self.backend}, received: {response}" - ) + if isinstance(request, (list, tuple)): + raise NotImplementedError("Multi-turn requests are not yet supported") - status.completed = True - except asyncio.TimeoutError: - error = "TimeoutError: The request timed out before completing." - status.errored = True - status.canceled = True + # Calculate targeted start and set pending state for request + request_info.scheduler_node_id = self.messaging.worker_index + request_info.scheduler_timings.dequeued = time.time() + target_start = ( + request_info.scheduler_start_time + self.request_timings.next_offset() + ) + request_info.scheduler_timings.targeted_start = target_start + self._send_update("pending", response, request, request_info) + + # Schedule the request + current_time = time.time() + request_info.scheduler_timings.scheduled_at = current_time + if target_start > current_time: + await asyncio.sleep(target_start - current_time) + # Adapt delay so that scheduled at reflects the sleep time + request_info.scheduler_timings.scheduled_at = target_start + + # Process the request with the backend + request_info.scheduler_timings.resolve_start = time.time() + self._send_update("in_progress", response, request, request_info) + async for resp, info in self.backend.resolve(request, request_info, None): + response = resp + request_info = info + + # Complete the request + request_info.scheduler_timings.resolve_end = time.time() + self._send_update("completed", response, request, request_info) + + response = request = request_info = None + except asyncio.CancelledError: + # Handle cancellation + if request is not None and request_info is not None: + request_info.error = "Request was cancelled" + request_info.scheduler_timings.resolve_end = time.time() + self._send_update("cancelled", response, request, request_info) + raise except Exception as exc: # noqa: BLE001 - error = str(exc) - status.errored = True - - return self._handle_response( - status=status, - request=request, - response=response, - error=error, - resolve_start_time=resolve_start_time, - ) + if request is not None and request_info is not None: + request_info.error = str(exc) + request_info.scheduler_timings.resolve_end = time.time() + self._send_update("errored", response, request, request_info) - def _create_request_func_kwargs( + def _send_update( self, - request: GenerationRequest, - ) -> tuple[ - AsyncGenerator[Union[StreamingTextResponse, ResponseSummary], None], - dict[str, Any], - ]: - request_func: AsyncGenerator[ - Union[StreamingTextResponse, ResponseSummary], None - ] - request_kwargs: dict[str, Any] - - if request.request_type == "text_completions": - request_func = self.backend.text_completions # type: ignore[assignment] - request_kwargs = { - "prompt": request.content, - "request_id": request.request_id, - "prompt_token_count": request.stats.get("prompt_tokens", None), - "output_token_count": request.constraints.get("output_tokens", None), - **request.params, - } - elif request.request_type == "chat_completions": - request_func = self.backend.chat_completions # type: ignore[assignment] - request_kwargs = { - "content": request.content, - "request_id": request.request_id, - "prompt_token_count": request.stats.get("prompt_tokens", None), - "output_token_count": request.constraints.get("output_tokens", None), - **request.params, - } - else: - raise ValueError( - f"Invalid request type: {request.request_type} for {request}" - ) + new_status: Literal[ + "pending", "in_progress", "completed", "errored", "cancelled" + ], + response: ResponseT | None, + request: RequestT | MultiTurnRequestT[RequestT], + request_info: ScheduledRequestInfo, + ): + prev_status = request_info.status - return request_func, request_kwargs + if new_status == prev_status: + # already sent this update, don't send again + return - def _handle_response( - self, - status: ResolveStatus, - request: GenerationRequest, - response: Any, - error: Optional[str], - resolve_start_time: float, - ) -> tuple[ResolveStatus, ResponseSummary]: - if response is None or not isinstance( - response, (ResponseSummary, StreamingTextResponse) - ): - # nothing received or invalid response, fill in defaults for error - if response: - error = str( - ValueError( - f"Invalid response: {type(response)} for request: {request}; " - ) - ) + (error or "") - - response = ResponseSummary( - value="", - request_args=RequestArgs( - target=self.backend.target, - headers={}, - params={}, - payload={}, - ), - start_time=resolve_start_time, - end_time=status.request_end, - first_iter_time=None, - last_iter_time=None, - request_id=request.request_id, - error=error or "Unknown error", + try: + request_info.status = new_status + request_info = ( + request_info.model_copy() + if new_status not in {"completed", "errored", "cancelled"} + else request_info # last update, don't need to copy ) - elif isinstance(response, StreamingTextResponse): - response = ResponseSummary( - value=response.value, - request_args=RequestArgs( - target=self.backend.target, - headers={}, - params={}, - payload={}, - ), - start_time=response.start_time, - end_time=time.time(), - first_iter_time=response.first_iter_time, - last_iter_time=response.time if response.iter_count > 0 else None, - request_prompt_tokens=request.stats.get("prompt_tokens", None), - request_output_tokens=request.constraints.get("output_tokens", None), - response_prompt_tokens=None, - response_output_tokens=response.iter_count, - request_id=request.request_id, - error=error or "Unknown error", + self.messaging.put_sync( + (response, request, request_info), + timeout=-1, ) - - response.error = error - status.request_start = response.start_time - status.request_end = response.end_time - - return status, response + prev_status = new_status + except Exception as exc: + # Reset status to last one that succeeded or started function with + # Calling logic can retry after handling error, if possible + request_info.status = prev_status + raise exc diff --git a/src/guidellm/scheduler/worker_group.py b/src/guidellm/scheduler/worker_group.py new file mode 100644 index 00000000..7369e5af --- /dev/null +++ b/src/guidellm/scheduler/worker_group.py @@ -0,0 +1,681 @@ +""" +Multi-process worker group orchestration for distributed request scheduling. + +Provides infrastructure for coordinating worker processes with shared state +management, inter-process communication, and lifecycle coordination. Handles +dynamic scaling, load balancing, constraint evaluation, and graceful shutdown +across distributed workers processing concurrent requests. +""" + +from __future__ import annotations + +import asyncio +import math +import threading +import time +import uuid +from collections.abc import AsyncIterator, Generator, Iterable, Iterator +from multiprocessing import get_context +from multiprocessing.context import BaseContext +from multiprocessing.managers import BaseManager +from multiprocessing.process import BaseProcess +from multiprocessing.synchronize import Barrier, Event +from typing import Generic, NamedTuple + +from guidellm.scheduler.constraints import Constraint, RequestsExhaustedConstraint +from guidellm.scheduler.objects import ( + BackendInterface, + MultiTurnRequestT, + RequestT, + ResponseT, + ScheduledRequestInfo, + SchedulerMessagingPydanticRegistry, + SchedulerState, + SchedulerUpdateAction, +) +from guidellm.scheduler.strategy import SchedulingStrategy +from guidellm.scheduler.worker import WorkerProcess +from guidellm.settings import settings +from guidellm.utils import ( + InterProcessMessaging, + InterProcessMessagingManagerQueue, + InterProcessMessagingPipe, + InterProcessMessagingQueue, + wait_for_sync_objects, +) + +__all__ = ["WorkerGroupState", "WorkerProcessGroup"] + + +class WorkerProcessGroup(Generic[RequestT, ResponseT]): + """ + Orchestrates multiple worker processes for distributed request processing. + + Manages process lifecycle, request distribution, response collection, and state + synchronization across workers. Handles dynamic scaling, load balancing, and + constraint evaluation with graceful shutdown coordination for high-throughput + request processing workloads. + + Example: + :: + from guidellm.scheduler.worker_group import WorkerProcessGroup + + group = WorkerProcessGroup( + requests=request_iterable, + cycle_requests=None, + backend=backend_instance, + strategy=scheduling_strategy, + constraints={"max_time": time_constraint} + ) + + await group.create_processes() + await group.start(time.time()) + + async for response, request, info, state in group.request_updates(): + if response is not None: + # Process completed request + handle_response(response) + + await group.shutdown() + """ + + def __init__( + self, + requests: Iterable[RequestT | MultiTurnRequestT[RequestT]] | None, + cycle_requests: Iterable[RequestT | MultiTurnRequestT[RequestT]] | None, + backend: BackendInterface[RequestT, ResponseT], + strategy: SchedulingStrategy, + constraints: dict[str, Constraint], + ): + """ + Initialize a worker process group for distributed request processing. + + :param requests: Finite iterable of requests to process sequentially + :param cycle_requests: Iterable of requests to cycle through indefinitely + :param backend: Backend interface for processing requests + :param strategy: Scheduling strategy for request timing and distribution + :param constraints: Named constraints for controlling execution behavior + :raises ValueError: If neither requests nor cycle_requests are provided, + or if cycle_requests is an Iterator rather than Iterable + """ + if not requests and not cycle_requests: + raise ValueError( + "At least one of 'requests' or 'cycle_requests' must be provided. " + f"Got requests: {requests}, cycle_requests: {cycle_requests}" + ) + + if isinstance(cycle_requests, Iterator): + raise ValueError( + f"cycle_requests must be an Iterable or None, not an Iterator. " + f"Got {type(cycle_requests)}" + ) + + self.requests = requests + self.cycle_requests = cycle_requests + self.backend = backend + self.strategy = strategy + self.constraints = constraints + + # Multiprocessing contexts and primitives, created in create_processes + self.mp_context: BaseContext = None + self.mp_manager: BaseManager = None + self.processes: list[BaseProcess] = None + self.startup_barrier: Barrier = None + self.requests_generated_event: Event = None + self.constraint_reached_event: Event = None + self.shutdown_event: Event = None + self.error_event: Event = None + + # Scheduler and messaging state, created in start + self.state: WorkerGroupState[ResponseT, RequestT] = None + self.messaging: InterProcessMessaging[ + tuple[ + RequestT | MultiTurnRequestT[RequestT], + ScheduledRequestInfo, + ], + tuple[ + ResponseT | None, + RequestT | MultiTurnRequestT[RequestT], + ScheduledRequestInfo, + SchedulerState, + ], + ] = None + + async def create_processes(self): + """ + Create and initialize worker processes for distributed request processing. + + Sets up multiprocessing infrastructure and worker processes based on + strategy constraints, backend capabilities, and system configuration. + Determines optimal process count and concurrency limits, then spawns + worker processes with distributed request handling capabilities. + + :raises RuntimeError: If process initialization or startup fails + """ + # Processes limits and params + max_conc: int = min( + self.strategy.requests_limit or math.inf, + self.backend.requests_limit or math.inf, + ) + if max_conc == math.inf: + # if concurrency not specified, use settings + max_conc = settings.max_concurrency + if max_conc <= 0: + raise RuntimeError("max_concurrency resolved to 0; increase limits/config") + + # Calculate number of processes, ensure we don't exceed the max concurrency, + # or limits from the backend, strategy, or user settings + num_processes = int( + min( + max_conc, + self.strategy.processes_limit or math.inf, + self.backend.processes_limit or math.inf, + settings.max_worker_processes, + ) + ) + if num_processes <= 0: + raise RuntimeError("num_processes resolved to 0; increase limits/config") + + per_proc_max_conc = max_conc // num_processes + max_pending_size = max( + 1, math.floor(max_conc * settings.mp_max_pending_buffer_percent) + ) + per_proc_max_buffer_size = max( + 1, math.floor(per_proc_max_conc * settings.mp_max_worker_buffer_percent) + ) + + # Initialize multiprocessing components + self.mp_context: BaseContext = get_context(settings.mp_context_type) + self.mp_manager = self.mp_context.Manager() + self.startup_barrier = self.mp_context.Barrier(num_processes + 1) + self.requests_generated_event = self.mp_context.Event() + self.constraint_reached_event = self.mp_context.Event() + self.shutdown_event = self.mp_context.Event() + self.error_event = self.mp_context.Event() + + if settings.mp_messaging_object == "queue": + self.messaging = InterProcessMessagingQueue( + mp_context=self.mp_context, + serialization=settings.mp_serialization, + encoding=settings.mp_encoding, + max_pending_size=max_pending_size, + max_buffer_send_size=settings.mp_requests_send_buffer_size, + poll_interval=settings.mp_poll_interval, + ) + elif settings.mp_messaging_object == "manager_queue": + self.messaging = InterProcessMessagingManagerQueue( + manager=self.mp_manager, + mp_context=self.mp_context, + serialization=settings.mp_serialization, + encoding=settings.mp_encoding, + max_pending_size=max_pending_size, + max_buffer_send_size=settings.mp_requests_send_buffer_size, + poll_interval=settings.mp_poll_interval, + ) + elif settings.mp_messaging_object == "pipe": + self.messaging = InterProcessMessagingPipe( + num_workers=num_processes, + mp_context=self.mp_context, + serialization=settings.mp_serialization, + encoding=settings.mp_encoding, + max_pending_size=max_pending_size, + max_buffer_send_size=settings.mp_requests_send_buffer_size, + poll_interval=settings.mp_poll_interval, + ) + + # Initialize worker processes + self.processes = [] + for rank in range(num_processes): + # Distribute any remainder across the first N ranks + async_limit = per_proc_max_conc + ( + 1 if rank < (max_conc % num_processes) else 0 + ) + + worker = WorkerProcess[RequestT, ResponseT]( + messaging=self.messaging.create_worker_copy( + worker_index=rank, + max_buffer_send_size=None, + max_buffer_receive_size=per_proc_max_buffer_size, + ), + backend=self.backend, + request_timings=self.strategy.create_request_timings( + local_rank=rank, + local_world_size=num_processes, + local_max_concurrency=async_limit, + ), + async_limit=async_limit, + startup_barrier=self.startup_barrier, + requests_generated_event=self.requests_generated_event, + constraint_reached_event=self.constraint_reached_event, + shutdown_event=self.shutdown_event, + error_event=self.error_event, + ) + proc = self.mp_context.Process(target=worker.run, daemon=False) + proc.start() + self.processes.append(proc) + + wait_key = await wait_for_sync_objects( + { + "startup_barrier": self.startup_barrier, + "shutdown_event": self.shutdown_event, + "error_event": self.error_event, + }, + poll_interval=settings.mp_poll_interval, + ) + + if wait_key == "error_event": + raise RuntimeError( + "Worker process group startup failed: error_event is set" + ) + + async def start(self, start_time: float): + """ + Begin request processing at the specified start time. + + Initializes scheduler state and background tasks, then waits until the + specified start time before beginning operations. Sets up inter-process + communication and coordinates synchronized startup across all workers. + + :param start_time: Unix timestamp when processing should begin + :raises RuntimeError: If workers encounter errors during startup or + if create_processes() was not called first + """ + if not self.processes: + raise RuntimeError("create_processes() must be called before start()") + + stop_send_requests_event = threading.Event() + send_requests_stopped_event = threading.Event() + self.state = WorkerGroupState[RequestT, ResponseT]( + start_time=start_time, + processes=self.processes, + constraints=self.constraints, + stop_send_requests_event=stop_send_requests_event, + send_requests_stopped_event=send_requests_stopped_event, + requests_generated_event=self.requests_generated_event, + constraint_reached_event=self.constraint_reached_event, + shutdown_event=self.shutdown_event, + error_event=self.error_event, + ) + await self.messaging.start( + send_items=self.state.requests_generator( + self.requests, self.cycle_requests + ), + receive_callback=self.state.received_callback, + send_stopped_event=send_requests_stopped_event, + send_stop_criteria=[stop_send_requests_event], + receive_stop_criteria=[self.shutdown_event], + pydantic_models=list(SchedulerMessagingPydanticRegistry.registry.values()), + ) + + if (wait_time := start_time - time.time()) > 0: + await asyncio.sleep(wait_time) + if self.error_event.is_set(): + raise RuntimeError( + "error_event is set in WorkerProcessGroup, " + "indicating an error occurred in one of the worker processes." + ) + + async def request_updates( + self, + ) -> AsyncIterator[ + tuple[ + ResponseT | None, + RequestT, + ScheduledRequestInfo, + SchedulerState, + ] + ]: + """ + Yield request processing updates as they become available. + + Returns an async iterator of request updates including response, request, + request scheduling info, and scheduler state. Updates occur on request queued, + processing start, and completion. Response is None until processing completes. + + :return: Async iterator yielding (response, request, request_info, state) + tuples where response is None until processing is complete + :raises RuntimeError: If workers encounter unrecoverable errors + """ + while True: + if self.error_event.is_set(): + raise RuntimeError( + "error_event is set in WorkerProcessGroup, " + "indicating an error occurred in one of the worker processes." + ) + + try: + ( + response, + request, + request_info, + scheduler_state, + ) = await self.messaging.get(timeout=settings.mp_poll_interval) + + yield response, request, request_info, scheduler_state + except asyncio.TimeoutError: + if self.shutdown_event.is_set(): + # Everything yielded, exit + break + + async def shutdown(self) -> list[Exception]: # noqa: C901 + """ + Gracefully shut down the worker process group and clean up resources. + + Performs safe shutdown of worker processes, background tasks, and + multiprocessing resources. Coordinates orderly termination across + all workers and collects any exceptions encountered during shutdown. + + :return: List of exceptions encountered during shutdown; empty if no errors + """ + exceptions: list[Exception] = [] + if self.shutdown_event is not None: + self.shutdown_event.set() + + # Clear out start values + if self.messaging is not None: + try: + await asyncio.wait_for(self.messaging.stop(), timeout=5.0) + except Exception as err: # noqa: BLE001 + exceptions.append(err) + self.messaging = None + self.state = None + + # Clear out create processes values + if self.processes is not None: + for proc in self.processes: + try: + await asyncio.to_thread(proc.join, timeout=5.0) + if proc.exitcode is not None and proc.exitcode > 0: + exceptions.append( + RuntimeError( + f"Worker {proc.pid} exited with code {proc.exitcode}" + ) + ) + except Exception as err: # noqa: BLE001 + exceptions.append(err) + self.processes = None + self.startup_barrier = None + self.requests_generated_event = None + self.constraint_reached_event = None + self.shutdown_event = None + self.error_event = None + if self.mp_manager is not None: + try: + self.mp_manager.shutdown() + except Exception as err: # noqa: BLE001 + exceptions.append(err) + self.mp_manager = None + self.mp_context = None + + return exceptions + + +class _StateUpdate(NamedTuple): + state: SchedulerState + stop_queueing: bool + stop_processing: bool + + +class WorkerGroupState(Generic[RequestT, ResponseT]): + """ + Manages scheduler state and synchronization for worker process groups. + + Handles request generation, state updates, constraint evaluation, and + coordination between worker processes. Provides thread-safe state management + with request lifecycle tracking and constraint-based termination logic. + """ + + def __init__( + self, + start_time: float, + processes: list[BaseProcess], + constraints: dict[str, Constraint], + stop_send_requests_event: threading.Event, + send_requests_stopped_event: threading.Event, + requests_generated_event: Event, + constraint_reached_event: Event, + shutdown_event: Event, + error_event: Event, + ): + """ + Initialize worker group state management. + + :param start_time: Unix timestamp when processing should begin + :param processes: List of worker process instances + :param constraints: Named constraints for controlling execution behavior + :param send_requests_stopped_event: Threading event for request coordination + :param requests_generated_event: Multiprocessing event for generation completion + :param constraint_reached_event: Multiprocessing event for constraint stopping + :param shutdown_event: Multiprocessing event for coordinated shutdown + :param error_event: Multiprocessing event for error condition signaling + """ + self.start_time = start_time + self.processes = processes + self.constraints = constraints + self.stop_send_requests_event = stop_send_requests_event + self.send_requests_stopped_event = send_requests_stopped_event + self.requests_generated_event = requests_generated_event + self.constraint_reached_event = constraint_reached_event + self.shutdown_event = shutdown_event + self.error_event = error_event + + self._update_lock: threading.Lock = threading.Lock() + self._state: SchedulerState = SchedulerState( + node_id=0, + num_processes=len(processes), + start_time=start_time, + ) + self._queued_requests = set() + self._pending_requests = set() + self._processing_requests = set() + + def requests_generator( + self, + requests: Iterable[RequestT | MultiTurnRequestT[RequestT]] | None, + cycle_requests: Iterable[RequestT | MultiTurnRequestT[RequestT]] | None, + ) -> Generator[tuple[RequestT | MultiTurnRequestT[RequestT],], None, None]: + """ + Generate request-info pairs for worker processing with constraint evaluation. + + Processes finite requests sequentially then cycles through repeating requests + indefinitely. Creates scheduling metadata for each request and evaluates + constraints to determine when to stop request generation. + + :param requests: Finite iterable of requests to process sequentially + :param cycle_requests: Iterable of requests to cycle through indefinitely + :return: Generator yielding (request, request_info) tuples + """ + + def _iter(): + if requests: + yield from requests + + if cycle_requests: + while True: + yield from cycle_requests + + count = 0 + request_info: ScheduledRequestInfo = None + for request in _iter(): + count += 1 + + if hasattr(request, "request_id"): + request_id = request.request_id + elif hasattr(request, "id"): + request_id = request.id + else: + request_id = str(uuid.uuid4()) + request_info: ScheduledRequestInfo = ScheduledRequestInfo( + request_id=request_id, + status="queued", + scheduler_process_id=0, + scheduler_start_time=self.start_time, + ) + state_update = self._locked_update(request_info) + yield (request, request_info) + + if state_update.stop_queueing: + self.stop_send_requests_event.set() + return + + # Reached the end, inject a RequestsExhaustedConstraint to record + self._locked_update( + info=None, + requests_exhausted=RequestsExhaustedConstraint(num_requests=count), + ) + self.stop_send_requests_event.set() + + def received_callback( + self, + update: tuple[ + ResponseT | None, + RequestT | MultiTurnRequestT, + ScheduledRequestInfo, + ], + ) -> tuple[ + ResponseT | None, + RequestT | MultiTurnRequestT, + ScheduledRequestInfo, + SchedulerState, + ]: + """ + Process received request updates and inject current scheduler state. + + Updates internal state tracking based on request status changes and + evaluates constraints to determine if processing should be terminated. + Triggers shutdown when stop conditions are met. + + :param update: Tuple containing response, request, and request info + :return: Updated tuple with injected scheduler state + """ + response, request, request_info = update + state_update = self._locked_update(info=request_info) + + # Check if we need to tell workers to stop pulling new requests + # based on no more requests sent and all requests removed from queue + if ( + state_update.state.queued_requests == 0 + and self.send_requests_stopped_event.is_set() + and not self.requests_generated_event.is_set() + ): + self.requests_generated_event.set() + + # Check if we need to tell workers to stop processing requests (constraints) + if state_update.stop_processing and not self.constraint_reached_event.is_set(): + self.constraint_reached_event.set() + + # Check if all requests have been processed and can shutdown + if ( + state_update.state.processed_requests == state_update.state.created_requests + and self.send_requests_stopped_event.is_set() + and self.requests_generated_event.is_set() + and self.constraint_reached_event.is_set() + and not self.shutdown_event.is_set() + ): + self.shutdown_event.set() + + return ( + response, + request, + request_info, + state_update.state, # inject state for updates to be yielded back + ) + + def _locked_update( + self, + info: ScheduledRequestInfo | None = None, + **add_constraints: dict[str, Constraint], + ) -> _StateUpdate: + with self._update_lock: + if add_constraints: + self.constraints.update(add_constraints) + + if info is not None: + self._state.end_time = time.time() # Always update in case last update + self._update_state_request_counts(info) + self._update_with_constraints(info) + + state_copy: SchedulerState = self._state.model_copy() + + return _StateUpdate( + state_copy, + state_copy.end_queuing_time is not None, + state_copy.end_processing_time is not None, + ) + + def _update_state_request_counts(self, info: ScheduledRequestInfo): + if info.status == "queued": + self._queued_requests.add(info.request_id) + self._state.queued_requests = len(self._queued_requests) + self._state.created_requests += 1 + elif info.status == "pending": + self._queued_requests.remove(info.request_id) + self._state.queued_requests = len(self._queued_requests) + self._pending_requests.add(info.request_id) + self._state.pending_requests = len(self._pending_requests) + elif info.status == "in_progress": + self._pending_requests.remove(info.request_id) + self._state.pending_requests = len(self._pending_requests) + self._processing_requests.add(info.request_id) + self._state.processing_requests = len(self._processing_requests) + elif info.status == "completed": + self._processing_requests.remove(info.request_id) + self._state.processing_requests = len(self._processing_requests) + self._state.processed_requests += 1 + self._state.successful_requests += 1 + elif info.status in ("errored", "cancelled"): + if info.request_id in self._queued_requests: + self._queued_requests.remove(info.request_id) + self._state.queued_requests = len(self._queued_requests) + elif info.request_id in self._pending_requests: + self._pending_requests.remove(info.request_id) + self._state.pending_requests = len(self._pending_requests) + elif info.request_id in self._processing_requests: + self._processing_requests.remove(info.request_id) + self._state.processing_requests = len(self._processing_requests) + + self._state.processed_requests += 1 + self._state.errored_requests += 1 if info.status == "errored" else 0 + self._state.cancelled_requests += 1 if info.status == "cancelled" else 0 + else: + raise ValueError(f"Unknown request_info status {info.status} for {info}") + + def _update_with_constraints(self, info: ScheduledRequestInfo): + actions: dict[str, SchedulerUpdateAction] = { + name: const(self._state, info) for name, const in self.constraints.items() + } + self._state.scheduler_constraints = actions + stop_queuing_actions = {} + stop_processing_actions = {} + + for key, action in actions.items(): + # Action updates + if ( + self._state.end_queuing_time is None + and action.request_queuing == "stop" + ): + stop_queuing_actions[key] = action + if ( + self._state.end_processing_time is None + and action.request_processing in ("stop_local", "stop_all") + ): + stop_processing_actions[key] = action + + for progress_key in ( + "remaining_fraction", + "remaining_requests", + "remaining_duration", + ): + if (new_val := action.progress.get(progress_key)) is not None and ( + getattr(self._state, progress_key) is None + or new_val < getattr(self._state, progress_key) + ): + setattr(self._state, progress_key, new_val) + + if stop_queuing_actions: + self._state.end_queuing_constraints = stop_queuing_actions + self._state.end_queuing_time = time.time() + + if stop_processing_actions: + self._state.end_processing_constraints = stop_processing_actions + self._state.end_processing_time = time.time() diff --git a/tests/unit/scheduler/__init__.py b/tests/unit/scheduler/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/tests/unit/scheduler/test_constraints.py b/tests/unit/scheduler/test_constraints.py new file mode 100644 index 00000000..0cdec5e2 --- /dev/null +++ b/tests/unit/scheduler/test_constraints.py @@ -0,0 +1,1412 @@ +import inspect +import random +import time +from abc import ABC +from typing import Protocol + +import pytest +from pydantic import ValidationError + +from guidellm.scheduler import ( + Constraint, + ConstraintInitializer, + ConstraintsInitializerFactory, + MaxDurationConstraint, + MaxErrorRateConstraint, + MaxErrorsConstraint, + MaxGlobalErrorRateConstraint, + MaxNumberConstraint, + PydanticConstraintInitializer, + ScheduledRequestInfo, + SchedulerState, + SchedulerUpdateAction, + SerializableConstraintInitializer, + UnserializableConstraintInitializer, +) +from guidellm.utils import InfoMixin, StandardBaseModel + + +class TestConstraint: + """Test the Constraint protocol.""" + + @pytest.mark.smoke + def test_is_protocol(self): + """Test that Constraint is a protocol and runtime checkable.""" + assert issubclass(Constraint, Protocol) + assert hasattr(Constraint, "_is_protocol") + assert Constraint._is_protocol is True + assert hasattr(Constraint, "_is_runtime_protocol") + assert Constraint._is_runtime_protocol is True + + @pytest.mark.smoke + def test_protocol_method_signature(self): + """Test that the Constraint protocol has the correct method signature.""" + call_method = Constraint.__call__ + sig = inspect.signature(call_method) + + expected_params = ["self", "state", "request"] + assert list(sig.parameters.keys()) == expected_params + + params = sig.parameters + assert "state" in params + assert "request" in params + + @pytest.mark.smoke + def test_runtime_is_constraint(self): + """Test that Constraint can be checked at runtime using isinstance.""" + + class ValidConstraint: + def __call__( + self, + state: SchedulerState, + request: ScheduledRequestInfo, + ) -> SchedulerUpdateAction: + return SchedulerUpdateAction() + + valid_instance = ValidConstraint() + assert isinstance(valid_instance, Constraint) + + class InvalidConstraint: + pass + + invalid_instance = InvalidConstraint() + assert not isinstance(invalid_instance, Constraint) + + @pytest.mark.smoke + def test_runtime_is_not_intializer(self): + """ + Test that a class not implementing the ConstraintInitializer + protocol is not recognized as such. + """ + + class ValidConstraint: + def __call__( + self, + state: SchedulerState, + request: ScheduledRequestInfo, + ) -> SchedulerUpdateAction: + return SchedulerUpdateAction() + + not_initializer_instance = ValidConstraint() + assert not isinstance(not_initializer_instance, ConstraintInitializer) + + +class TestConstraintInitializer: + """Test the ConstraintInitializer protocol.""" + + @pytest.mark.smoke + def test_is_protocol(self): + """Test that ConstraintInitializer is a protocol and runtime checkable.""" + assert issubclass(ConstraintInitializer, Protocol) + assert hasattr(ConstraintInitializer, "_is_protocol") + assert ConstraintInitializer._is_protocol is True + assert hasattr(ConstraintInitializer, "_is_runtime_protocol") + assert ConstraintInitializer._is_runtime_protocol is True + + @pytest.mark.smoke + def test_protocol_method_signature(self): + """Test that ConstraintInitializer protocol has correct method signature.""" + create_constraint_method = ConstraintInitializer.create_constraint + sig = inspect.signature(create_constraint_method) + + expected_params = ["self", "kwargs"] + assert list(sig.parameters.keys()) == expected_params + kwargs_param = sig.parameters["kwargs"] + assert kwargs_param.kind == kwargs_param.VAR_KEYWORD + + @pytest.mark.smoke + def test_runtime_is_initializer(self): + """Test that ConstraintInitializer can be checked at runtime.""" + + class ValidInitializer: + def create_constraint(self, **kwargs) -> Constraint: + class SimpleConstraint: + def __call__( + self, + state: SchedulerState, + request: ScheduledRequestInfo, + ) -> SchedulerUpdateAction: + return SchedulerUpdateAction() + + return SimpleConstraint() + + valid_instance = ValidInitializer() + assert isinstance(valid_instance, ConstraintInitializer) + + @pytest.mark.smoke + def test_runtime_is_not_constraint(self): + """ + Test that a class not implementing the Constraint protocol + is not recognized as such. + """ + + class ValidInitializer: + def create_constraint(self, **kwargs) -> Constraint: + class SimpleConstraint: + def __call__( + self, + state: SchedulerState, + request: ScheduledRequestInfo, + ) -> SchedulerUpdateAction: + return SchedulerUpdateAction() + + return SimpleConstraint() + + not_constraint_instance = ValidInitializer() + assert not isinstance(not_constraint_instance, Constraint) + + +class TestSerializableConstraintInitializer: + """Test the SerializableConstraintInitializer protocol.""" + + @pytest.mark.smoke + def test_is_protocol(self): + """Test SerializableConstraintInitializer is a protocol and checkable.""" + assert issubclass(SerializableConstraintInitializer, Protocol) + assert hasattr(SerializableConstraintInitializer, "_is_protocol") + assert SerializableConstraintInitializer._is_protocol is True + assert hasattr(SerializableConstraintInitializer, "_is_runtime_protocol") + assert SerializableConstraintInitializer._is_runtime_protocol is True + + @pytest.mark.smoke + def test_protocol_method_signatures(self): + """Test SerializableConstraintInitializer protocol has correct signatures.""" + methods = [ + "validated_kwargs", + "model_validate", + "model_dump", + "create_constraint", + ] + + for method_name in methods: + assert hasattr(SerializableConstraintInitializer, method_name) + + @pytest.mark.smoke + def test_runtime_is_serializable_initializer(self): + """Test that SerializableConstraintInitializer can be checked at runtime.""" + + class ValidSerializableInitializer: + @classmethod + def validated_kwargs(cls, *args, **kwargs): + return kwargs + + @classmethod + def model_validate(cls, **kwargs): + return cls() + + def model_dump(self): + return {} + + def create_constraint(self, **kwargs): + class SimpleConstraint: + def __call__(self, state, request): + return SchedulerUpdateAction() + + return SimpleConstraint() + + valid_instance = ValidSerializableInitializer() + assert isinstance(valid_instance, SerializableConstraintInitializer) + + +class TestPydanticConstraintInitializer: + """Test the PydanticConstraintInitializer implementation.""" + + @pytest.mark.smoke + def test_class_signatures(self): + """Test PydanticConstraintInitializer inheritance and abstract methods.""" + assert issubclass(PydanticConstraintInitializer, StandardBaseModel) + assert issubclass(PydanticConstraintInitializer, ABC) + assert issubclass(PydanticConstraintInitializer, InfoMixin) + + @pytest.mark.smoke + def test_abstract_methods(self): + """Test that PydanticConstraintInitializer has required abstract methods.""" + abstract_methods = PydanticConstraintInitializer.__abstractmethods__ + expected_methods = {"validated_kwargs", "create_constraint"} + assert abstract_methods == expected_methods + + @pytest.mark.sanity + def test_cannot_instantiate_directly(self): + """Test that PydanticConstraintInitializer cannot be instantiated directly.""" + with pytest.raises(TypeError): + PydanticConstraintInitializer(type_="test") + + +class TestUnserializableConstraintInitializer: + """Test the UnserializableConstraintInitializer implementation.""" + + @pytest.fixture( + params=[ + {"orig_info": {}}, + {"orig_info": {"class": "SomeClass", "module": "some.module"}}, + ] + ) + def valid_instances(self, request): + """Fixture providing test data for UnserializableConstraintInitializer.""" + constructor_args = request.param + instance = UnserializableConstraintInitializer(**constructor_args) + return instance, constructor_args + + @pytest.mark.smoke + def test_class_signatures(self): + """Test UnserializableConstraintInitializer inheritance.""" + assert issubclass( + UnserializableConstraintInitializer, PydanticConstraintInitializer + ) + + @pytest.mark.smoke + def test_initialization(self, valid_instances): + """Test UnserializableConstraintInitializer initialization.""" + instance, constructor_args = valid_instances + assert isinstance(instance, UnserializableConstraintInitializer) + assert instance.type_ == "unserializable" + assert instance.orig_info == constructor_args["orig_info"] + + @pytest.mark.smoke + def test_validated_kwargs(self): + """Test validated_kwargs class method.""" + result = UnserializableConstraintInitializer.validated_kwargs( + orig_info={"test": "data"} + ) + assert result == {"orig_info": {"test": "data"}} + + result = UnserializableConstraintInitializer.validated_kwargs() + assert result == {"orig_info": {}} + + @pytest.mark.sanity + def test_create_constraint_raises(self, valid_instances): + """Test that create_constraint raises RuntimeError.""" + instance, _ = valid_instances + with pytest.raises( + RuntimeError, match="Cannot create constraint from unserializable" + ): + instance.create_constraint() + + @pytest.mark.sanity + def test_call_raises(self, valid_instances): + """Test that calling constraint raises RuntimeError.""" + instance, _ = valid_instances + state = SchedulerState() + request = ScheduledRequestInfo() + + with pytest.raises( + RuntimeError, match="Cannot invoke unserializable constraint" + ): + instance(state, request) + + @pytest.mark.smoke + def test_marshalling(self, valid_instances): + """Test UnserializableConstraintInitializer serialization/deserialization.""" + instance, constructor_args = valid_instances + + data = instance.model_dump() + assert data["type_"] == "unserializable" + assert data["orig_info"] == constructor_args["orig_info"] + + reconstructed = UnserializableConstraintInitializer.model_validate(data) + assert reconstructed.type_ == instance.type_ + assert reconstructed.orig_info == instance.orig_info + + +class TestMaxNumberConstraint: + """Test the MaxNumberConstraint implementation.""" + + @pytest.fixture(params=[{"max_num": 100}, {"max_num": 50.5}, {"max_num": 1}]) + def valid_instances(self, request): + constructor_args = request.param + instance = MaxNumberConstraint(**constructor_args) + + return instance, constructor_args + + @pytest.mark.smoke + def test_is_constraint_protocol(self, valid_instances): + """Test that MaxNumberConstraint satisfies the Constraint protocol.""" + constraint, _ = valid_instances + assert isinstance(constraint, Constraint) + + @pytest.mark.smoke + def test_is_constraint_initializer_protocol(self, valid_instances): + """Test MaxNumberConstraint satisfies the ConstraintInitializer protocol.""" + constraint, _ = valid_instances + assert isinstance(constraint, ConstraintInitializer) + + @pytest.mark.smoke + def test_initialization_valid(self, valid_instances): + """Test that MaxNumberConstraint can be initialized with valid parameters.""" + instance, constructor_args = valid_instances + + for key, value in constructor_args.items(): + assert hasattr(instance, key) + assert getattr(instance, key) == value + + @pytest.mark.sanity + def test_initialization_invalid(self): + """Test that MaxNumberConstraint rejects invalid parameters.""" + with pytest.raises(ValidationError): + MaxNumberConstraint() + with pytest.raises(ValidationError): + MaxNumberConstraint(max_num=-1) + with pytest.raises(ValidationError): + MaxNumberConstraint(max_num=0) + with pytest.raises(ValidationError): + MaxNumberConstraint(max_num="invalid") + + @pytest.mark.smoke + def test_constraint_functionality(self, valid_instances): + """Test constraint returns correct actions and progress""" + instance, constructor_args = valid_instances + start_time = time.time() + + for num_requests in range(0, int(constructor_args["max_num"]) * 2 + 1, 1): + state = SchedulerState( + start_time=start_time, + created_requests=num_requests, + processed_requests=num_requests, + errored_requests=0, + ) + request_info = ScheduledRequestInfo( + request_id="test", status="completed", created_at=start_time + ) + + action = instance(state, request_info) + assert isinstance(action, SchedulerUpdateAction) + + @pytest.mark.smoke + def test_marshalling(self, valid_instances): + """Test that MaxNumberConstraint can be serialized and deserialized.""" + instance, constructor_args = valid_instances + + data = instance.model_dump() + for key, value in constructor_args.items(): + assert data[key] == value + + reconstructed = MaxNumberConstraint.model_validate(data) + assert reconstructed.max_num == instance.max_num + + for key, value in constructor_args.items(): + assert getattr(reconstructed, key) == value + + @pytest.mark.smoke + def test_create_constraint_functionality(self, valid_instances): + """Test the constraint initializer functionality.""" + instance, constructor_args = valid_instances + + constraint = instance.create_constraint() + assert isinstance(constraint, MaxNumberConstraint) + assert constraint.max_num == constructor_args["max_num"] + + @pytest.mark.smoke + def test_validated_kwargs(self): + """Test MaxNumberConstraint.validated_kwargs class method.""" + result = MaxNumberConstraint.validated_kwargs(max_num=100) + assert result == {"max_num": 100, "current_index": -1} + + result = MaxNumberConstraint.validated_kwargs(50.5) + assert result == {"max_num": 50.5, "current_index": -1} + + @pytest.mark.smoke + def test_create_constraint(self, valid_instances): + """Test MaxNumberConstraint.create_constraint method.""" + instance, constructor_args = valid_instances + original_index = instance.current_index + constraint = instance.create_constraint() + + assert isinstance(constraint, MaxNumberConstraint) + assert constraint is not instance # Should return a copy + assert constraint.max_num == instance.max_num + assert instance.current_index == original_index + 1 # Original is incremented + assert constraint.current_index == original_index + 1 # Copy has incremented + + @pytest.mark.smoke + def test_factory_registration(self): + """Test MaxNumberConstraint is properly registered with expected aliases.""" + expected_aliases = ["max_number", "max_num", "max_requests", "max_req"] + + for alias in expected_aliases: + assert ConstraintsInitializerFactory.is_registered(alias) + registered_class = ConstraintsInitializerFactory.get_registered_object( + alias + ) + assert registered_class == MaxNumberConstraint + + @pytest.mark.smoke + @pytest.mark.parametrize( + "alias", ["max_number", "max_num", "max_requests", "max_req"] + ) + def test_factory_creation_with_aliases(self, alias): + """Test factory creation using different aliases.""" + # Test with dict configuration + constraint = ConstraintsInitializerFactory.create_constraint(alias, max_num=100) + assert isinstance(constraint, MaxNumberConstraint) + assert constraint.max_num == 100 + + # Test with simple value + constraint = ConstraintsInitializerFactory.create_constraint(alias, 50) + assert isinstance(constraint, MaxNumberConstraint) + assert constraint.max_num == 50 + + @pytest.mark.smoke + def test_factory_resolve_methods(self): + """Test factory resolve methods with various input formats.""" + # Test with dict config + resolved = ConstraintsInitializerFactory.resolve( + {"max_number": {"max_num": 200}} + ) + assert isinstance(resolved["max_number"], MaxNumberConstraint) + assert resolved["max_number"].max_num == 200 + + # Test with simple value + resolved = ConstraintsInitializerFactory.resolve({"max_num": 150}) + assert isinstance(resolved["max_num"], MaxNumberConstraint) + assert resolved["max_num"].max_num == 150 + + # Test with instance + instance = MaxNumberConstraint(max_num=75) + resolved = ConstraintsInitializerFactory.resolve({"max_requests": instance}) + assert resolved["max_requests"] is instance + + +class TestMaxDurationConstraint: + """Test the MaxDurationConstraint implementation.""" + + @pytest.fixture( + params=[{"max_duration": 2.0}, {"max_duration": 1}, {"max_duration": 0.5}] + ) + def valid_instances(self, request): + constructor_args = request.param + instance = MaxDurationConstraint(**constructor_args) + return instance, constructor_args + + @pytest.mark.smoke + def test_is_constraint_protocol(self, valid_instances): + """Test that MaxDurationConstraint satisfies the Constraint protocol.""" + constraint, _ = valid_instances + assert isinstance(constraint, Constraint) + + @pytest.mark.smoke + def test_is_constraint_initializer_protocol(self, valid_instances): + """ + Test that MaxDurationConstraint also satisfies + the ConstraintInitializer protocol. + """ + constraint, _ = valid_instances + assert isinstance(constraint, ConstraintInitializer) + + @pytest.mark.smoke + def test_initialization_valid(self, valid_instances): + """Test that MaxDurationConstraint can be initialized with valid parameters.""" + instance, constructor_args = valid_instances + + for key, value in constructor_args.items(): + assert hasattr(instance, key) + assert getattr(instance, key) == value + + @pytest.mark.sanity + def test_initialization_invalid(self): + """Test that MaxDurationConstraint rejects invalid parameters.""" + with pytest.raises(ValidationError): + MaxDurationConstraint() + with pytest.raises(ValidationError): + MaxDurationConstraint(max_duration=-1) + with pytest.raises(ValidationError): + MaxDurationConstraint(max_duration=0) + with pytest.raises(ValidationError): + MaxDurationConstraint(max_duration="invalid") + + @pytest.mark.smoke + def test_constraint_functionality(self, valid_instances): + """Test constraint returns correct actions and progress through a time loop""" + instance, constructor_args = valid_instances + start_time = time.time() + + max_duration = constructor_args["max_duration"] + sleep_interval = max_duration * 0.05 + target_duration = max_duration * 1.5 + + elapsed = 0.0 + step = 0 + + while elapsed <= target_duration: + state = SchedulerState( + node_id=0, + num_processes=1, + start_time=start_time, + created_requests=step + 1, + processed_requests=step, + ) + request = ScheduledRequestInfo( + request_id=f"test-{step}", + status="completed", + scheduler_node_id=0, + scheduler_process_id=0, + scheduler_start_time=start_time, + ) + + action = instance(state, request) + assert isinstance(action, SchedulerUpdateAction) + + duration_exceeded = elapsed >= max_duration + + if not duration_exceeded: + assert action.request_queuing == "continue" + assert action.request_processing == "continue" + else: + assert action.request_queuing == "stop" + assert action.request_processing == "stop_local" + assert isinstance(action.metadata, dict) + assert action.metadata["max_duration"] == max_duration + assert action.metadata["elapsed_time"] == pytest.approx(elapsed, abs=0.01) + assert action.metadata["duration_exceeded"] == duration_exceeded + assert action.metadata["start_time"] == start_time + assert isinstance(action.progress, dict) + expected_remaining_fraction = max(0.0, 1.0 - elapsed / max_duration) + expected_remaining_duration = max(0.0, max_duration - elapsed) + assert action.progress["remaining_fraction"] == pytest.approx( + expected_remaining_fraction, abs=0.1 + ) + assert action.progress["remaining_duration"] == pytest.approx( + expected_remaining_duration, abs=0.1 + ) + time.sleep(sleep_interval) + elapsed = time.time() - start_time + step += 1 + + @pytest.mark.smoke + def test_marshalling(self, valid_instances): + """Test that MaxDurationConstraint can be serialized and deserialized.""" + instance, constructor_args = valid_instances + + data = instance.model_dump() + for key, value in constructor_args.items(): + assert data[key] == value + + reconstructed = MaxDurationConstraint.model_validate(data) + assert reconstructed.max_duration == instance.max_duration + + for key, value in constructor_args.items(): + assert getattr(reconstructed, key) == value + + @pytest.mark.smoke + def test_create_constraint_functionality(self, valid_instances): + """Test the constraint initializer functionality.""" + instance, constructor_args = valid_instances + + constraint = instance.create_constraint() + assert isinstance(constraint, MaxDurationConstraint) + assert constraint.max_duration == constructor_args["max_duration"] + + @pytest.mark.smoke + def test_validated_kwargs(self): + """Test MaxDurationConstraint.validated_kwargs class method.""" + result = MaxDurationConstraint.validated_kwargs(max_duration=60.0) + assert result == {"max_duration": 60.0, "current_index": -1} + + result = MaxDurationConstraint.validated_kwargs(30) + assert result == {"max_duration": 30, "current_index": -1} + + @pytest.mark.smoke + def test_create_constraint(self, valid_instances): + """Test MaxDurationConstraint.create_constraint method.""" + instance, constructor_args = valid_instances + original_index = instance.current_index + constraint = instance.create_constraint() + + assert isinstance(constraint, MaxDurationConstraint) + assert constraint is not instance # Should return a copy + assert constraint.max_duration == instance.max_duration + assert instance.current_index == original_index + 1 # Original is incremented + assert constraint.current_index == original_index + 1 # Copy has incremented + + @pytest.mark.smoke + def test_factory_registration(self): + """Test MaxDurationConstraint is properly registered with expected aliases.""" + expected_aliases = [ + "max_duration", + "max_dur", + "max_sec", + "max_seconds", + "max_min", + "max_minutes", + ] + + for alias in expected_aliases: + assert ConstraintsInitializerFactory.is_registered(alias) + registered_class = ConstraintsInitializerFactory.get_registered_object( + alias + ) + assert registered_class == MaxDurationConstraint + + @pytest.mark.smoke + @pytest.mark.parametrize( + "alias", + ["max_duration", "max_dur", "max_sec", "max_seconds", "max_min", "max_minutes"], + ) + def test_factory_creation_with_aliases(self, alias): + """Test factory creation using different aliases.""" + # Test with dict configuration + constraint = ConstraintsInitializerFactory.create_constraint( + alias, max_duration=60.0 + ) + assert isinstance(constraint, MaxDurationConstraint) + assert constraint.max_duration == 60.0 + + # Test with simple value + constraint = ConstraintsInitializerFactory.create_constraint(alias, 30.0) + assert isinstance(constraint, MaxDurationConstraint) + assert constraint.max_duration == 30.0 + + @pytest.mark.smoke + def test_factory_resolve_methods(self): + """Test factory resolve methods with various input formats.""" + # Test with dict config + resolved = ConstraintsInitializerFactory.resolve( + {"max_duration": {"max_duration": 120.0}} + ) + assert isinstance(resolved["max_duration"], MaxDurationConstraint) + assert resolved["max_duration"].max_duration == 120.0 + + # Test with simple value + resolved = ConstraintsInitializerFactory.resolve({"max_sec": 90.0}) + assert isinstance(resolved["max_sec"], MaxDurationConstraint) + assert resolved["max_sec"].max_duration == 90.0 + + # Test with instance + instance = MaxDurationConstraint(max_duration=45.0) + resolved = ConstraintsInitializerFactory.resolve({"max_minutes": instance}) + assert resolved["max_minutes"] is instance + + +class TestMaxErrorsConstraint: + """Test the MaxErrorsConstraint implementation.""" + + @pytest.fixture(params=[{"max_errors": 10}, {"max_errors": 5.5}, {"max_errors": 1}]) + def valid_instances(self, request): + constructor_args = request.param + instance = MaxErrorsConstraint(**constructor_args) + return instance, constructor_args + + @pytest.mark.smoke + def test_is_constraint_protocol(self, valid_instances): + """Test that MaxErrorsConstraint satisfies the Constraint protocol.""" + constraint, _ = valid_instances + assert isinstance(constraint, Constraint) + + @pytest.mark.smoke + def test_is_constraint_initializer_protocol(self, valid_instances): + """ + Test that MaxErrorsConstraint also satisfies + the ConstraintInitializer protocol. + """ + constraint, _ = valid_instances + assert isinstance(constraint, ConstraintInitializer) + + @pytest.mark.smoke + def test_initialization_valid(self, valid_instances): + """Test that MaxErrorsConstraint can be initialized with valid parameters.""" + instance, constructor_args = valid_instances + + for key, value in constructor_args.items(): + assert hasattr(instance, key) + assert getattr(instance, key) == value + + @pytest.mark.sanity + def test_initialization_invalid(self): + """Test that MaxErrorsConstraint rejects invalid parameters.""" + with pytest.raises(ValidationError): + MaxErrorsConstraint() + with pytest.raises(ValidationError): + MaxErrorsConstraint(max_errors=-1) + with pytest.raises(ValidationError): + MaxErrorsConstraint(max_errors=0) + with pytest.raises(ValidationError): + MaxErrorsConstraint(max_errors="invalid") + + @pytest.mark.smoke + def test_constraint_functionality(self, valid_instances): + """Test constraint returns correct actions""" + instance, constructor_args = valid_instances + start_time = time.time() + + for num_errors in range(int(constructor_args["max_errors"] * 2)): + created_requests = (num_errors + 1) * 2 + processed_requests = num_errors + 1 + state = SchedulerState( + node_id=0, + num_processes=1, + start_time=start_time, + created_requests=created_requests, + processed_requests=processed_requests, + errored_requests=num_errors, + ) + request = ScheduledRequestInfo( + request_id=f"test-{num_errors}", + status="completed", + scheduler_node_id=0, + scheduler_process_id=0, + scheduler_start_time=start_time, + ) + action = instance(state, request) + assert isinstance(action, SchedulerUpdateAction) + errors_exceeded = num_errors >= constructor_args["max_errors"] + if not errors_exceeded: + assert action.request_queuing == "continue" + assert action.request_processing == "continue" + else: + assert action.request_queuing == "stop" + assert action.request_processing == "stop_all" + + assert isinstance(action.metadata, dict) + assert action.metadata == { + "max_errors": constructor_args["max_errors"], + "errors_exceeded": errors_exceeded, + "current_errors": num_errors, + } + assert action.progress == {} + + @pytest.mark.smoke + def test_marshalling(self, valid_instances): + """Test that MaxErrorsConstraint can be serialized and deserialized.""" + instance, constructor_args = valid_instances + + data = instance.model_dump() + for key, value in constructor_args.items(): + assert data[key] == value + + reconstructed = MaxErrorsConstraint.model_validate(data) + assert reconstructed.max_errors == instance.max_errors + + for key, value in constructor_args.items(): + assert getattr(reconstructed, key) == value + + @pytest.mark.smoke + def test_validated_kwargs(self): + """Test MaxErrorsConstraint.validated_kwargs class method.""" + result = MaxErrorsConstraint.validated_kwargs(max_errors=10) + assert result == {"max_errors": 10, "current_index": -1} + + result = MaxErrorsConstraint.validated_kwargs(5.5) + assert result == {"max_errors": 5.5, "current_index": -1} + + @pytest.mark.smoke + def test_create_constraint(self, valid_instances): + """Test MaxErrorsConstraint.create_constraint method.""" + instance, constructor_args = valid_instances + original_index = instance.current_index + constraint = instance.create_constraint() + + assert isinstance(constraint, MaxErrorsConstraint) + assert constraint is not instance + assert constraint.max_errors == instance.max_errors + assert instance.current_index == original_index + 1 + assert constraint.current_index == original_index + 1 + + @pytest.mark.smoke + def test_factory_registration(self): + """Test MaxErrorsConstraint is properly registered with expected aliases.""" + expected_aliases = ["max_errors", "max_err", "max_error", "max_errs"] + + for alias in expected_aliases: + assert ConstraintsInitializerFactory.is_registered(alias) + registered_class = ConstraintsInitializerFactory.get_registered_object( + alias + ) + assert registered_class == MaxErrorsConstraint + + @pytest.mark.smoke + @pytest.mark.parametrize( + "alias", ["max_errors", "max_err", "max_error", "max_errs"] + ) + def test_factory_creation_with_aliases(self, alias): + """Test factory creation using different aliases.""" + # Test with dict configuration + constraint = ConstraintsInitializerFactory.create_constraint( + alias, max_errors=10 + ) + assert isinstance(constraint, MaxErrorsConstraint) + assert constraint.max_errors == 10 + + # Test with simple value + constraint = ConstraintsInitializerFactory.create_constraint(alias, 5) + assert isinstance(constraint, MaxErrorsConstraint) + assert constraint.max_errors == 5 + + @pytest.mark.smoke + def test_factory_resolve_methods(self): + """Test factory resolve methods with various input formats.""" + # Test with dict config + resolved = ConstraintsInitializerFactory.resolve( + {"max_errors": {"max_errors": 15}} + ) + assert isinstance(resolved["max_errors"], MaxErrorsConstraint) + assert resolved["max_errors"].max_errors == 15 + + # Test with simple value + resolved = ConstraintsInitializerFactory.resolve({"max_err": 8}) + assert isinstance(resolved["max_err"], MaxErrorsConstraint) + assert resolved["max_err"].max_errors == 8 + + # Test with instance + instance = MaxErrorsConstraint(max_errors=3) + resolved = ConstraintsInitializerFactory.resolve({"max_error": instance}) + assert resolved["max_error"] is instance + + +class TestMaxErrorRateConstraint: + """Test the MaxErrorRateConstraint implementation.""" + + @pytest.fixture( + params=[ + {"max_error_rate": 0.1, "window_size": 40}, + {"max_error_rate": 0.5, "window_size": 50}, + {"max_error_rate": 0.05, "window_size": 55}, + ] + ) + def valid_instances(self, request): + constructor_args = request.param + instance = MaxErrorRateConstraint(**constructor_args) + return instance, constructor_args + + @pytest.mark.smoke + def test_is_constraint_protocol(self, valid_instances): + """Test that MaxErrorRateConstraint satisfies the Constraint protocol.""" + constraint, _ = valid_instances + assert isinstance(constraint, Constraint) + + @pytest.mark.smoke + def test_is_constraint_initializer_protocol(self, valid_instances): + """ + Test that MaxErrorRateConstraint also satisfies + the ConstraintInitializer protocol. + """ + constraint, _ = valid_instances + assert isinstance(constraint, ConstraintInitializer) + + @pytest.mark.smoke + def test_initialization_valid(self, valid_instances): + """Test that MaxErrorRateConstraint can be initialized with valid parameters.""" + instance, constructor_args = valid_instances + + for key, value in constructor_args.items(): + assert hasattr(instance, key) + assert getattr(instance, key) == value + + @pytest.mark.sanity + def test_initialization_invalid(self): + """Test that MaxErrorRateConstraint rejects invalid parameters.""" + with pytest.raises(ValidationError): + MaxErrorRateConstraint() + with pytest.raises(ValidationError): + MaxErrorRateConstraint(max_error_rate=0) + with pytest.raises(ValidationError): + MaxErrorRateConstraint(max_error_rate=-1) + with pytest.raises(ValidationError): + MaxErrorRateConstraint(max_error_rate=1.5) + with pytest.raises(ValidationError): + MaxErrorRateConstraint(max_error_rate=0.5, window_size=0) + with pytest.raises(ValidationError): + MaxErrorRateConstraint(max_error_rate="invalid") + + @pytest.mark.smoke + def test_constraint_functionality(self, valid_instances): + """Test constraint returns correct actions with sliding window behavior""" + instance, constructor_args = valid_instances + start_time = time.time() + + max_error_rate = constructor_args["max_error_rate"] + window_size = constructor_args["window_size"] + safety_factor = 1.5 + total_errors = 0 + error_window = [] + + for request_num in range(window_size * 2): + error_probability = max_error_rate * safety_factor + + if random.random() < error_probability: + total_errors += 1 + status = "errored" + error_window.append(1) + else: + status = "completed" + error_window.append(0) + error_window = ( + error_window[-window_size:] + if len(error_window) > window_size + else error_window + ) + + state = SchedulerState( + node_id=0, + num_processes=1, + start_time=start_time, + created_requests=request_num + 1, + processed_requests=request_num + 1, + ) + request = ScheduledRequestInfo( + request_id=f"test-{request_num}", + status=status, + scheduler_node_id=0, + scheduler_process_id=0, + scheduler_start_time=start_time, + ) + + action = instance(state, request) + assert isinstance(action, SchedulerUpdateAction) + error_count = sum(instance.error_window) + processed_requests = state.processed_requests + exceeded_min_processed = processed_requests >= window_size + current_error_rate = ( + error_count / float(min(processed_requests, window_size)) + if processed_requests > 0 + else 0.0 + ) + exceeded_error_rate = current_error_rate >= max_error_rate + should_stop = exceeded_min_processed and exceeded_error_rate + expected_queuing = "stop" if should_stop else "continue" + expected_processing = "stop_all" if should_stop else "continue" + + assert action.request_queuing == expected_queuing + assert action.request_processing == expected_processing + assert isinstance(action.metadata, dict) + assert action.metadata["max_error_rate"] == max_error_rate + assert action.metadata["window_size"] == window_size + assert action.metadata["error_count"] == error_count + assert action.metadata["current_error_rate"] == current_error_rate + assert action.metadata["exceeded_error_rate"] == exceeded_error_rate + assert action.progress == {} + + @pytest.mark.smoke + def test_marshalling(self, valid_instances): + """Test that MaxErrorRateConstraint can be serialized and deserialized.""" + instance, constructor_args = valid_instances + + data = instance.model_dump() + for key, value in constructor_args.items(): + assert data[key] == value + + reconstructed = MaxErrorRateConstraint.model_validate(data) + assert reconstructed.max_error_rate == instance.max_error_rate + assert reconstructed.window_size == instance.window_size + + for key, value in constructor_args.items(): + assert getattr(reconstructed, key) == value + + @pytest.mark.smoke + def test_validated_kwargs(self): + """Test MaxErrorRateConstraint.validated_kwargs class method.""" + result = MaxErrorRateConstraint.validated_kwargs( + max_error_rate=0.1, window_size=50 + ) + assert result == { + "max_error_rate": 0.1, + "window_size": 50, + "error_window": [], + "current_index": -1, + } + + result = MaxErrorRateConstraint.validated_kwargs(0.05) + assert result == { + "max_error_rate": 0.05, + "window_size": 30, + "error_window": [], + "current_index": -1, + } + + @pytest.mark.smoke + def test_create_constraint(self, valid_instances): + """Test MaxErrorRateConstraint.create_constraint method.""" + instance, constructor_args = valid_instances + original_index = instance.current_index + constraint = instance.create_constraint() + + assert isinstance(constraint, MaxErrorRateConstraint) + assert constraint is not instance # Should return a copy + assert constraint.max_error_rate == instance.max_error_rate + assert constraint.window_size == instance.window_size + assert instance.current_index == original_index + 1 # Original is incremented + assert constraint.current_index == original_index + 1 # Copy has incremented + + @pytest.mark.smoke + def test_factory_registration(self): + """Test MaxErrorRateConstraint is properly registered with expected aliases.""" + expected_aliases = ["max_error_rate", "max_err_rate", "max_errors_rate"] + + for alias in expected_aliases: + assert ConstraintsInitializerFactory.is_registered(alias) + registered_class = ConstraintsInitializerFactory.get_registered_object( + alias + ) + assert registered_class == MaxErrorRateConstraint + + @pytest.mark.smoke + @pytest.mark.parametrize( + "alias", ["max_error_rate", "max_err_rate", "max_errors_rate"] + ) + def test_factory_creation_with_aliases(self, alias): + """Test factory creation using different aliases.""" + # Test with dict configuration + constraint = ConstraintsInitializerFactory.create_constraint( + alias, max_error_rate=0.1, window_size=50 + ) + assert isinstance(constraint, MaxErrorRateConstraint) + assert constraint.max_error_rate == 0.1 + assert constraint.window_size == 50 + + # Test with simple value + constraint = ConstraintsInitializerFactory.create_constraint(alias, 0.05) + assert isinstance(constraint, MaxErrorRateConstraint) + assert constraint.max_error_rate == 0.05 + + @pytest.mark.smoke + def test_factory_resolve_methods(self): + """Test factory resolve methods with various input formats.""" + # Test with dict config + resolved = ConstraintsInitializerFactory.resolve( + {"max_error_rate": {"max_error_rate": 0.15, "window_size": 100}} + ) + assert isinstance(resolved["max_error_rate"], MaxErrorRateConstraint) + assert resolved["max_error_rate"].max_error_rate == 0.15 + assert resolved["max_error_rate"].window_size == 100 + + # Test with simple value + resolved = ConstraintsInitializerFactory.resolve({"max_err_rate": 0.08}) + assert isinstance(resolved["max_err_rate"], MaxErrorRateConstraint) + assert resolved["max_err_rate"].max_error_rate == 0.08 + + # Test with instance + instance = MaxErrorRateConstraint(max_error_rate=0.2, window_size=25) + resolved = ConstraintsInitializerFactory.resolve({"max_errors_rate": instance}) + assert resolved["max_errors_rate"] is instance + + +class TestMaxGlobalErrorRateConstraint: + """Test the MaxGlobalErrorRateConstraint implementation.""" + + @pytest.fixture( + params=[ + {"max_error_rate": 0.1, "min_processed": 50}, + {"max_error_rate": 0.2, "min_processed": 100}, + {"max_error_rate": 0.05, "min_processed": 31}, + ] + ) + def valid_instances(self, request): + constructor_args = request.param + instance = MaxGlobalErrorRateConstraint(**constructor_args) + return instance, constructor_args + + @pytest.mark.smoke + def test_is_constraint_protocol(self, valid_instances): + """Test that MaxGlobalErrorRateConstraint satisfies the Constraint protocol.""" + constraint, _ = valid_instances + assert isinstance(constraint, Constraint) + + @pytest.mark.smoke + def test_is_constraint_initializer_protocol(self, valid_instances): + """ + Test that MaxGlobalErrorRateConstraint also satisfies + the ConstraintInitializer protocol. + """ + constraint, _ = valid_instances + assert isinstance(constraint, ConstraintInitializer) + + @pytest.mark.smoke + def test_initialization_valid(self, valid_instances): + """ + Test that MaxGlobalErrorRateConstraint can be initialized + with valid parameters. + """ + instance, constructor_args = valid_instances + + for key, value in constructor_args.items(): + assert hasattr(instance, key) + assert getattr(instance, key) == value + + @pytest.mark.sanity + def test_initialization_invalid(self): + """Test that MaxGlobalErrorRateConstraint rejects invalid parameters.""" + with pytest.raises(ValidationError): + MaxGlobalErrorRateConstraint() + with pytest.raises(ValidationError): + MaxGlobalErrorRateConstraint(max_error_rate=0) + with pytest.raises(ValidationError): + MaxGlobalErrorRateConstraint(max_error_rate=-1) + with pytest.raises(ValidationError): + MaxGlobalErrorRateConstraint(max_error_rate=1.5) + with pytest.raises(ValidationError): + MaxGlobalErrorRateConstraint(max_error_rate=0.5, min_processed=0) + with pytest.raises(ValidationError): + MaxGlobalErrorRateConstraint(max_error_rate="invalid") + + @pytest.mark.smoke + def test_constraint_functionality(self, valid_instances): + """Test constraint returns correct actions based on global error rate""" + instance, constructor_args = valid_instances + start_time = time.time() + + max_error_rate = constructor_args["max_error_rate"] + min_processed = constructor_args["min_processed"] + safety_factor = 1.5 + total_requests = min_processed * 2 + total_errors = 0 + + for request_num in range(total_requests): + error_probability = max_error_rate * safety_factor + + if random.random() < error_probability: + total_errors += 1 + status = "errored" + else: + status = "completed" + + processed_requests = request_num + 1 + + state = SchedulerState( + node_id=0, + num_processes=1, + start_time=start_time, + created_requests=processed_requests + 10, + processed_requests=processed_requests, + errored_requests=total_errors, + ) + request = ScheduledRequestInfo( + request_id=f"test-{request_num}", + status=status, + scheduler_node_id=0, + scheduler_process_id=0, + scheduler_start_time=start_time, + ) + + action = instance(state, request) + assert isinstance(action, SchedulerUpdateAction) + + exceeded_min_processed = processed_requests >= min_processed + error_rate = ( + total_errors / float(processed_requests) + if processed_requests > 0 + else 0.0 + ) + exceeded_error_rate = error_rate >= max_error_rate + should_stop = exceeded_min_processed and exceeded_error_rate + + expected_queuing = "stop" if should_stop else "continue" + expected_processing = "stop_all" if should_stop else "continue" + + assert action.request_queuing == expected_queuing + assert action.request_processing == expected_processing + + assert isinstance(action.metadata, dict) + assert action.metadata == { + "max_error_rate": max_error_rate, + "min_processed": min_processed, + "processed_requests": processed_requests, + "errored_requests": total_errors, + "error_rate": error_rate, + "exceeded_min_processed": exceeded_min_processed, + "exceeded_error_rate": exceeded_error_rate, + } + + # Error constraints don't provide progress information + assert action.progress == {} + + @pytest.mark.smoke + def test_marshalling(self, valid_instances): + """Test that MaxGlobalErrorRateConstraint can be serialized and deserialized.""" + instance, constructor_args = valid_instances + + data = instance.model_dump() + for key, value in constructor_args.items(): + assert data[key] == value + + reconstructed = MaxGlobalErrorRateConstraint.model_validate(data) + assert reconstructed.max_error_rate == instance.max_error_rate + assert reconstructed.min_processed == instance.min_processed + + for key, value in constructor_args.items(): + assert getattr(reconstructed, key) == value + + @pytest.mark.smoke + def test_validated_kwargs(self): + """Test MaxGlobalErrorRateConstraint.validated_kwargs class method.""" + result = MaxGlobalErrorRateConstraint.validated_kwargs( + max_error_rate=0.1, min_processed=50 + ) + assert result == { + "max_error_rate": 0.1, + "min_processed": 50, + "current_index": -1, + } + + result = MaxGlobalErrorRateConstraint.validated_kwargs(0.05) + assert result == { + "max_error_rate": 0.05, + "min_processed": 30, + "current_index": -1, + } + + @pytest.mark.smoke + def test_create_constraint(self, valid_instances): + """Test MaxGlobalErrorRateConstraint.create_constraint method.""" + instance, constructor_args = valid_instances + original_index = instance.current_index + constraint = instance.create_constraint() + + assert isinstance(constraint, MaxGlobalErrorRateConstraint) + assert constraint is not instance # Should return a copy + assert constraint.max_error_rate == instance.max_error_rate + assert constraint.min_processed == instance.min_processed + assert instance.current_index == original_index + 1 # Original is incremented + assert constraint.current_index == original_index + 1 # Copy has incremented + + @pytest.mark.smoke + def test_factory_registration(self): + """Test MaxGlobalErrorRateConstraint is properly registered with aliases.""" + expected_aliases = [ + "max_global_error_rate", + "max_global_err_rate", + "max_global_errors_rate", + ] + + for alias in expected_aliases: + assert ConstraintsInitializerFactory.is_registered(alias) + registered_class = ConstraintsInitializerFactory.get_registered_object( + alias + ) + assert registered_class == MaxGlobalErrorRateConstraint + + @pytest.mark.smoke + @pytest.mark.parametrize( + "alias", + ["max_global_error_rate", "max_global_err_rate", "max_global_errors_rate"], + ) + def test_factory_creation_with_aliases(self, alias): + """Test factory creation using different aliases.""" + # Test with dict configuration + constraint = ConstraintsInitializerFactory.create_constraint( + alias, max_error_rate=0.1, min_processed=50 + ) + assert isinstance(constraint, MaxGlobalErrorRateConstraint) + assert constraint.max_error_rate == 0.1 + assert constraint.min_processed == 50 + + # Test with simple value + constraint = ConstraintsInitializerFactory.create_constraint(alias, 0.05) + assert isinstance(constraint, MaxGlobalErrorRateConstraint) + assert constraint.max_error_rate == 0.05 + + @pytest.mark.smoke + def test_factory_resolve_methods(self): + """Test factory resolve methods with various input formats.""" + # Test with dict config + resolved = ConstraintsInitializerFactory.resolve( + {"max_global_error_rate": {"max_error_rate": 0.12, "min_processed": 100}} + ) + assert isinstance( + resolved["max_global_error_rate"], MaxGlobalErrorRateConstraint + ) + assert resolved["max_global_error_rate"].max_error_rate == 0.12 + assert resolved["max_global_error_rate"].min_processed == 100 + + # Test with simple value + resolved = ConstraintsInitializerFactory.resolve({"max_global_err_rate": 0.08}) + assert isinstance(resolved["max_global_err_rate"], MaxGlobalErrorRateConstraint) + assert resolved["max_global_err_rate"].max_error_rate == 0.08 + + # Test with instance + instance = MaxGlobalErrorRateConstraint(max_error_rate=0.15, min_processed=75) + resolved = ConstraintsInitializerFactory.resolve( + {"max_global_errors_rate": instance} + ) + assert resolved["max_global_errors_rate"] is instance + + +class TestConstraintsInitializerFactory: + """Test the ConstraintsInitializerFactory implementation.""" + + @pytest.mark.sanity + def test_unregistered_key_fails(self): + """Test that unregistered keys raise ValueError.""" + unregistered_key = "nonexistent_constraint" + assert not ConstraintsInitializerFactory.is_registered(unregistered_key) + + with pytest.raises( + ValueError, match=f"Unknown constraint initializer key: {unregistered_key}" + ): + ConstraintsInitializerFactory.create(unregistered_key) + + with pytest.raises( + ValueError, match=f"Unknown constraint initializer key: {unregistered_key}" + ): + ConstraintsInitializerFactory.create_constraint(unregistered_key) + + @pytest.mark.smoke + def test_resolve_mixed_types(self): + """Test resolve method with mixed constraint types.""" + max_num_constraint = MaxNumberConstraint(max_num=25) + max_duration_initializer = MaxDurationConstraint(max_duration=120.0) + + mixed_spec = { + "max_number": max_num_constraint, + "max_duration": max_duration_initializer, + "max_errors": {"max_errors": 15}, + "max_error_rate": 0.08, + } + + resolved = ConstraintsInitializerFactory.resolve(mixed_spec) + + assert len(resolved) == 4 + assert all(isinstance(c, Constraint) for c in resolved.values()) + assert resolved["max_number"] is max_num_constraint + assert isinstance(resolved["max_duration"], MaxDurationConstraint) + assert isinstance(resolved["max_errors"], MaxErrorsConstraint) + assert isinstance(resolved["max_error_rate"], MaxErrorRateConstraint) + assert resolved["max_error_rate"].max_error_rate == 0.08 + + @pytest.mark.sanity + def test_resolve_with_invalid_key(self): + """Test that resolve raises ValueError for unregistered keys.""" + invalid_spec = { + "max_number": {"max_num": 100}, + "invalid_constraint": {"some_param": 42}, + } + + with pytest.raises( + ValueError, match="Unknown constraint initializer key: invalid_constraint" + ): + ConstraintsInitializerFactory.resolve(invalid_spec) + + @pytest.mark.smoke + def test_functional_constraint_creation(self): + """Test that created constraints are functionally correct.""" + constraint = ConstraintsInitializerFactory.create_constraint( + "max_number", max_num=10 + ) + start_time = time.time() + state = SchedulerState( + node_id=0, + num_processes=1, + start_time=start_time, + created_requests=5, + processed_requests=5, + ) + request = ScheduledRequestInfo( + request_id="test-request", + status="completed", + scheduler_node_id=0, + scheduler_process_id=0, + scheduler_start_time=start_time, + ) + + action = constraint(state, request) + assert isinstance(action, SchedulerUpdateAction) + assert action.request_queuing == "continue" + assert action.request_processing == "continue" + + state_exceeded = SchedulerState( + node_id=0, + num_processes=1, + start_time=start_time, + created_requests=15, + processed_requests=15, + ) + action_exceeded = constraint(state_exceeded, request) + assert action_exceeded.request_queuing == "stop" + assert action_exceeded.request_processing == "stop_local" diff --git a/tests/unit/scheduler/test_environment.py b/tests/unit/scheduler/test_environment.py new file mode 100644 index 00000000..c73abe42 --- /dev/null +++ b/tests/unit/scheduler/test_environment.py @@ -0,0 +1,329 @@ +import inspect +import time +from abc import ABC +from typing import Generic +from unittest.mock import patch + +import pytest + +from guidellm.scheduler import ( + Environment, + MaxNumberConstraint, + NonDistributedEnvironment, + RequestT, + ResponseT, + ScheduledRequestInfo, + SchedulerState, + SynchronousStrategy, +) +from guidellm.utils import InfoMixin + + +class TestEnvironment: + @pytest.mark.smoke + def test_class_signatures(self): + """Test Environment inheritance and type relationships.""" + # Inheritance and abstract class properties + assert issubclass(Environment, ABC) + assert issubclass(Environment, Generic) + assert issubclass(Environment, InfoMixin) + assert inspect.isabstract(Environment) + assert hasattr(Environment, "info") + + # Abstract methods validation + expected_abstract_methods = { + "sync_run_params", + "sync_run_start", + "update_run_iteration", + "sync_run_error", + "sync_run_end", + } + assert Environment.__abstractmethods__ == expected_abstract_methods + + # Method signatures and async properties + method_signatures = { + "sync_run_params": ["self", "requests", "strategy", "constraints"], + "sync_run_start": ["self"], + "update_run_iteration": [ + "self", + "response", + "request", + "request_info", + "state", + ], + "sync_run_error": ["self", "err"], + "sync_run_end": ["self"], + } + + for method_name, expected_params in method_signatures.items(): + method = getattr(Environment, method_name) + sig = inspect.signature(method) + + # Check parameter names and count + param_names = list(sig.parameters.keys()) + assert param_names == expected_params + + # Check async nature + assert inspect.iscoroutinefunction(method) or inspect.isasyncgenfunction( + method + ) + + # Generic type parameters + orig_bases = getattr(Environment, "__orig_bases__", ()) + generic_base = next( + ( + base + for base in orig_bases + if hasattr(base, "__origin__") and base.__origin__ is Generic + ), + None, + ) + assert generic_base is not None + type_args = getattr(generic_base, "__args__", ()) + assert RequestT in type_args + assert ResponseT in type_args + + @pytest.mark.sanity + def test_invalid_implementation(self): + """Test that invalid implementations raise TypeError.""" + + class InvalidImplementation(Environment): + pass + + with pytest.raises(TypeError): + InvalidImplementation() + + @pytest.mark.sanity + def test_partial_invalid_implementation(self): + """Test that partial implementations raise TypeError.""" + + class PartialImplementation(Environment): + async def sync_run_params(self, requests, strategy, constraints): + return requests, strategy, constraints + + async def sync_run_start(self): + return 0.0 + + # Missing other required methods + + with pytest.raises(TypeError): + PartialImplementation() + + @pytest.mark.smoke + def test_implementation_construction(self): + """Test that concrete implementations can be constructed.""" + + class TestEnvironment(Environment): + async def sync_run_params(self, requests, strategy, constraints): + return requests, strategy, constraints + + async def sync_run_start(self): + return 0.0 + + async def update_run_iteration(self, response, request, request_info): + pass + + async def sync_run_error(self, err): + pass + + async def sync_run_end(self): + yield + + env = TestEnvironment() + assert isinstance(env, Environment) + + +class TestNonDistributedEnvironment: + @pytest.fixture + def valid_instances(self): + """Fixture providing test data for NonDistributedEnvironment.""" + instance = NonDistributedEnvironment() + return instance, {} + + @pytest.mark.smoke + def test_class_signatures(self, valid_instances): + """Test NonDistributedEnvironment inheritance and type relationships.""" + instance, constructor_args = valid_instances + assert issubclass(NonDistributedEnvironment, Environment) + assert issubclass(NonDistributedEnvironment, InfoMixin) + assert not inspect.isabstract(NonDistributedEnvironment) + + # Should inherit from Environment + assert isinstance(instance, Environment) + assert issubclass(NonDistributedEnvironment, Environment) + + # Should implement all required methods + required_methods = [ + "sync_run_params", + "sync_run_start", + "update_run_iteration", + "sync_run_error", + "sync_run_end", + ] + + for method_name in required_methods: + assert hasattr(instance, method_name) + assert callable(getattr(instance, method_name)) + + @pytest.mark.smoke + def test_initialization(self, valid_instances): + """Test NonDistributedEnvironment initialization.""" + instance, constructor_args = valid_instances + assert isinstance(instance, NonDistributedEnvironment) + assert isinstance(instance, Environment) + assert instance.run_errors == [] + + @pytest.mark.sanity + def test_invalid_initialization(self): + """Test that initialization doesn't accept invalid arguments.""" + with pytest.raises(TypeError): + NonDistributedEnvironment("invalid_arg") + + @pytest.mark.smoke + @pytest.mark.asyncio + @pytest.mark.parametrize( + ("requests", "strategy", "constraints"), + [ + ( + ["request1", "request2"], + SynchronousStrategy(), + {"max_requests": MaxNumberConstraint(max_num=10)}, + ), + ( + [], + SynchronousStrategy(), + {}, + ), + ( + ["single_request"], + SynchronousStrategy(), + {"max_requests": MaxNumberConstraint(max_num=1)}, + ), + ( + range(5), + SynchronousStrategy(), + {"max_requests": MaxNumberConstraint(max_num=5)}, + ), + ], + ids=[ + "multiple_requests", + "empty_requests", + "single_request", + "range_requests", + ], + ) + async def test_sync_run_params( + self, valid_instances, requests, strategy, constraints + ): + """Test sync_run_params returns parameters unchanged.""" + instance, constructor_args = valid_instances + + ( + returned_requests, + returned_strategy, + returned_constraints, + ) = await instance.sync_run_params(requests, strategy, constraints) + + assert returned_requests is requests + assert returned_strategy is strategy + assert returned_constraints is constraints + + @pytest.mark.smoke + @pytest.mark.asyncio + @pytest.mark.parametrize( + ("mock_time", "delay", "expected"), + [ + (1000.0, 0.0, 1000.0), + (500.0, 1.5, 501.5), + (100.0, 10.0, 110.0), + (0.0, 2.5, 2.5), + ], + ids=["no_delay", "small_delay", "large_delay", "zero_time"], + ) + async def test_sync_run_start(self, valid_instances, mock_time, delay, expected): + """Test sync_run_start uses configuration value correctly.""" + instance, constructor_args = valid_instances + + with ( + patch("time.time", return_value=mock_time), + patch("guidellm.scheduler.environment.settings") as mock_settings, + ): + mock_settings.scheduler_start_delay_non_distributed = delay + start_time = await instance.sync_run_start() + assert start_time == expected + + @pytest.mark.smoke + @pytest.mark.asyncio + @pytest.mark.parametrize( + ("response", "req"), + [ + ("mock_response", "mock_request"), + (None, "mock_request"), + ("mock_response", None), + (None, None), + ], + ids=["both_present", "no_response", "no_request", "both_none"], + ) + async def test_update_run_iteration(self, valid_instances, response, req): + """Test update_run_iteration no-op behavior.""" + instance, constructor_args = valid_instances + + mock_request_info = ScheduledRequestInfo( + request_id="test-123", + status="completed", + scheduler_node_id=0, + scheduler_process_id=0, + scheduler_start_time=time.time(), + ) + mock_state = SchedulerState( + node_id=0, + num_processes=1, + start_time=time.time(), + ) + + # Should not raise any errors and is a no-op + await instance.update_run_iteration( + response, req, mock_request_info, mock_state + ) + + @pytest.mark.smoke + @pytest.mark.asyncio + async def test_sync_run_error(self, valid_instances): + """Test sync_run_error stores errors correctly.""" + instance, constructor_args = valid_instances + + error1 = RuntimeError("First error") + error2 = ValueError("Second error") + + await instance.sync_run_error(error1) + assert error1 in instance.run_errors + assert len(instance.run_errors) == 1 + + await instance.sync_run_error(error2) + assert len(instance.run_errors) == 2 + + @pytest.mark.smoke + @pytest.mark.asyncio + async def test_sync_run_end(self, valid_instances): + """Test sync_run_end behavior with no errors and multiple errors.""" + instance, constructor_args = valid_instances + + # No errors - empty iterator + results = [] + async for result in instance.sync_run_end(): + results.append(result) + assert results == [] + + # Single error - raises original error + error = RuntimeError("Test error") + await instance.sync_run_error(error) + with pytest.raises(RuntimeError): + async for _ in instance.sync_run_end(): + pass + + # Multiple errors - raises RuntimeError with combined message + await instance.sync_run_error(ValueError("Second error")) + with pytest.raises(RuntimeError) as exc_info: + async for _ in instance.sync_run_end(): + pass + assert "Errors occurred during execution" in str(exc_info.value) diff --git a/tests/unit/scheduler/test_objects.py b/tests/unit/scheduler/test_objects.py new file mode 100644 index 00000000..df794ff8 --- /dev/null +++ b/tests/unit/scheduler/test_objects.py @@ -0,0 +1,1286 @@ +from __future__ import annotations + +import inspect +import typing +from collections.abc import AsyncIterator +from typing import Any, Optional, TypeVar, Union + +import pytest +from pydantic import ValidationError +from typing_extensions import TypeAliasType + +from guidellm.scheduler import ( + BackendInterface, + BackendT, + MeasuredRequestTimings, + MultiTurnRequestT, + RequestSchedulerTimings, + RequestT, + ResponseT, + ScheduledRequestInfo, + SchedulerState, + SchedulerUpdateAction, + SchedulerUpdateActionProgress, +) +from guidellm.utils import StandardBaseModel + + +def test_request_t(): + """Validate that RequestT is a TypeVar usable for generics and isn't bound.""" + assert isinstance(RequestT, TypeVar) + assert RequestT.__name__ == "RequestT" + assert RequestT.__bound__ is None + assert RequestT.__constraints__ == () + + +def test_response_t(): + """Validate that ResponseT is a TypeVar usable for generics and isn't bound.""" + assert isinstance(ResponseT, TypeVar) + assert ResponseT.__name__ == "ResponseT" + assert ResponseT.__bound__ is None + assert ResponseT.__constraints__ == () + + +def test_backend_t(): + """Validate that BackendT is a TypeVar bound to BackendInterface.""" + assert isinstance(BackendT, TypeVar) + assert BackendT.__name__ == "BackendT" + assert BackendT.__bound__.__name__ == "BackendInterface" + assert BackendT.__constraints__ == () + + +def test_multi_turn_request_t(): + """Validate MultiTurnRequestT is a TypeAliasType for multi-turn requests.""" + assert isinstance(MultiTurnRequestT, TypeAliasType) + assert MultiTurnRequestT.__name__ == "MultiTurnRequestT" + + value = MultiTurnRequestT.__value__ + assert hasattr(value, "__origin__") + assert value.__origin__ is Union + + type_params = getattr(MultiTurnRequestT, "__type_params__", ()) + assert len(type_params) == 1 + assert type_params[0].__name__ == "RequestT" + + +class TestBackendInterface: + """Test the BackendInterface abstract base class.""" + + @pytest.mark.smoke + def test_abstract_methods_defined(self): + """Test that all expected abstract methods are defined.""" + expected_methods = { + "process_startup", + "validate", + "process_shutdown", + "resolve", + } + expected_properties = { + "processes_limit", + "requests_limit", + "info", + } + + for method_name in expected_methods: + assert hasattr(BackendInterface, method_name) + method = getattr(BackendInterface, method_name) + assert inspect.isfunction(method) or inspect.ismethod(method) + + for prop_name in expected_properties: + assert hasattr(BackendInterface, prop_name) + prop = getattr(BackendInterface, prop_name) + assert hasattr(prop, "__get__") + + @pytest.mark.smoke + def test_generic_type_parameters(self): + """Test that BackendInterface has the correct generic type parameters.""" + orig_bases = BackendInterface.__orig_bases__ + protocol_base = None + generic_base = None + + for base in orig_bases: + if hasattr(base, "__origin__"): + if base.__origin__ is typing.Generic: + generic_base = base + elif base.__name__ == "Protocol": + protocol_base = base + + assert protocol_base is not None, "Should inherit from Protocol" + assert generic_base is not None, "Should inherit from Generic" + + if hasattr(generic_base, "__args__"): + type_params = generic_base.__args__ + assert len(type_params) == 3, "Should have 3 type parameters" + param_names = [param.__name__ for param in type_params] + expected_names = ["RequestT", "ResponseT"] + assert param_names == expected_names + + @pytest.mark.smoke + def test_implementation_construction(self): + """Test that a complete concrete implementation can be instantiated.""" + + class ConcreteBackend(BackendInterface[str, MeasuredRequestTimings, str]): + @property + def processes_limit(self) -> int | None: + return 4 + + @property + def requests_limit(self) -> int | None: + return 100 + + @property + def info(self) -> dict[str, Any]: + return {"model": "test", "version": "1.0"} + + async def process_startup(self) -> None: + pass + + async def validate(self) -> None: + pass + + async def process_shutdown(self) -> None: + pass + + async def resolve( + self, + request: str, + request_info: ScheduledRequestInfo, + history: list[tuple[str, str]] | None = None, + ) -> AsyncIterator[tuple[str, ScheduledRequestInfo]]: + yield f"Response to: {request}", request_info + + backend = ConcreteBackend() + assert isinstance(backend, BackendInterface) + assert isinstance(backend, ConcreteBackend) + assert backend.processes_limit == 4 + assert backend.requests_limit == 100 + info = backend.info + assert info == {"model": "test", "version": "1.0"} + + @pytest.mark.smoke + @pytest.mark.asyncio + async def test_implementation_async_methods(self): # noqa: C901 + """Test that async methods work correctly in concrete implementation.""" + + class AsyncBackend(BackendInterface[dict, MeasuredRequestTimings, dict]): + def __init__(self): + self.startup_called = False + self.validate_called = False + self.shutdown_called = False + + @property + def processes_limit(self) -> int | None: + return None # Unlimited + + @property + def requests_limit(self) -> int | None: + return None # Unlimited + + @property + def info(self) -> dict[str, Any]: + return {"backend": "async_test"} + + async def process_startup(self) -> None: + self.startup_called = True + + async def validate(self) -> None: + self.validate_called = True + + async def process_shutdown(self) -> None: + self.shutdown_called = True + + async def resolve( + self, + request: dict, + request_info: ScheduledRequestInfo, + history: list[tuple[dict, dict]] | None = None, + ) -> AsyncIterator[tuple[dict, ScheduledRequestInfo]]: + response = {"result": request.get("input", ""), "status": "success"} + yield response, request_info + + backend = AsyncBackend() + await backend.process_startup() + assert backend.startup_called + + await backend.validate() + assert backend.validate_called + + await backend.process_shutdown() + assert backend.shutdown_called + + request = {"input": "test_request"} + request_info = ScheduledRequestInfo( + request_id="test-123", + status="queued", + scheduler_node_id=0, + scheduler_process_id=0, + scheduler_start_time=1000.0, + ) + results = [] + async for response, updated_info in backend.resolve(request, request_info): + results.append((response, updated_info)) + + assert len(results) == 1 + response, updated_info = results[0] + assert response == {"result": "test_request", "status": "success"} + assert updated_info == request_info + + @pytest.mark.smoke + def test_method_signatures(self): + """Test that abstract methods have the expected signatures.""" + info_prop = BackendInterface.info + assert isinstance(info_prop, property) + + processes_limit_prop = BackendInterface.processes_limit + assert isinstance(processes_limit_prop, property) + + requests_limit_prop = BackendInterface.requests_limit + assert isinstance(requests_limit_prop, property) + + startup_sig = inspect.signature(BackendInterface.process_startup) + assert len(startup_sig.parameters) == 1 # Only self + assert list(startup_sig.parameters.keys()) == ["self"] + + validate_sig = inspect.signature(BackendInterface.validate) + assert len(validate_sig.parameters) == 1 # Only self + assert list(validate_sig.parameters.keys()) == ["self"] + + shutdown_sig = inspect.signature(BackendInterface.process_shutdown) + assert len(shutdown_sig.parameters) == 1 # Only self + assert list(shutdown_sig.parameters.keys()) == ["self"] + + resolve_sig = inspect.signature(BackendInterface.resolve) + expected_params = ["self", "request", "request_info", "history"] + assert list(resolve_sig.parameters.keys()) == expected_params + + history_param = resolve_sig.parameters["history"] + assert history_param.default is None + + +class TestRequestSchedulerTimings: + """Test the RequestSchedulerTimings model class.""" + + CHECK_KEYS = [ + "targeted_start", + "queued", + "dequeued", + "scheduled_at", + "resolve_start", + "resolve_end", + "finalized", + ] + + @pytest.fixture( + params=[ + {}, + { + "targeted_start": None, + "queued": None, + "dequeued": None, + "scheduled_at": None, + "resolve_start": None, + "resolve_end": None, + "finalized": None, + }, + { + "targeted_start": 1000.0, + "queued": 200.0, + "dequeued": 800.0, + "scheduled_at": 900.0, + "resolve_start": 1000.5, + "resolve_end": 1100.0, + "finalized": 1100.5, + }, + { + "queued": 200.0, + "scheduled_at": 250.0, + "resolve_start": 1000.5, + "resolve_end": 1100.0, + }, + { + "targeted_start": 0.0, + "queued": 0.0, + "dequeued": 0.0, + "scheduled_at": 0.0, + "resolve_start": 0.0, + "resolve_end": 0.0, + "finalized": 0.0, + }, + ], + ids=[ + "default_empty", + "all_none_explicit", + "complete_sequence", + "partial_data", + "zero_timestamps", + ], + ) + def valid_instances(self, request): + """Creates various valid configurations of RequestSchedulerTimings.""" + constructor_args = request.param + instance = RequestSchedulerTimings(**constructor_args) + return instance, constructor_args + + @pytest.mark.smoke + def test_class_signatures(self): + """Test RequestSchedulerTimings inheritance and type relationships.""" + assert issubclass(RequestSchedulerTimings, StandardBaseModel) + assert hasattr(RequestSchedulerTimings, "model_dump") + assert hasattr(RequestSchedulerTimings, "model_validate") + + # Check all expected fields are defined + fields = RequestSchedulerTimings.model_fields + for key in self.CHECK_KEYS: + assert key in fields + field_info = fields[key] + assert field_info.annotation in (Union[float, None], Optional[float]) + assert field_info.default is None + + @pytest.mark.smoke + def test_initialization(self, valid_instances): + """Test initialization with valid configurations.""" + instance, constructor_args = valid_instances + assert isinstance(instance, RequestSchedulerTimings) + for key in self.CHECK_KEYS: + assert hasattr(instance, key) + + # Validate that the instance attributes match the constructor args + for field, expected_value in constructor_args.items(): + assert getattr(instance, field) == expected_value + + @pytest.mark.sanity + @pytest.mark.parametrize( + ("field", "value"), + [ + ("targeted_start", "invalid_string"), + ("queued", "invalid_string"), + ("dequeued", [1, 2, 3]), + ("scheduled_at", {"key": "value"}), + ("resolve_start", {"key": "value"}), + ("resolve_end", [1, 2, 3]), + ("finalized", object()), + ], + ) + def test_invalid_initialization(self, field, value): + """Test invalid initialization scenarios.""" + kwargs = {field: value} + with pytest.raises(ValidationError): + RequestSchedulerTimings(**kwargs) + + @pytest.mark.smoke + def test_marshalling(self, valid_instances): + """Test marshalling to/from pydantic dict formats.""" + instance, constructor_args = valid_instances + + # Test model_dump + data = instance.model_dump() + assert isinstance(data, dict) + assert all(key in data for key in self.CHECK_KEYS) + + # Test model_validate + reconstructed = RequestSchedulerTimings.model_validate(data) + assert isinstance(reconstructed, RequestSchedulerTimings) + + # Validate that all fields match between original and reconstructed instances + for field in self.CHECK_KEYS: + assert getattr(reconstructed, field) == getattr(instance, field) + + # Validate that the reconstructed instance matches original constructor args + for field, expected_value in constructor_args.items(): + assert getattr(reconstructed, field) == expected_value + + +class TestRequestTimings: + """Test the MeasuredRequestTimings model class.""" + + CHECK_KEYS = [ + "request_start", + "request_end", + ] + + @pytest.fixture( + params=[ + {}, + { + "request_start": None, + "request_end": None, + }, + { + "request_start": 1000.0, + "request_end": 1100.0, + }, + { + "request_start": 1000.0, + }, + { + "request_start": 0.0, + "request_end": 0.0, + }, + ], + ids=[ + "default_empty", + "all_none_explicit", + "complete_sequence", + "partial_data", + "zero_timestamps", + ], + ) + def valid_instances(self, request): + """Creates various valid configurations of MeasuredRequestTimings.""" + constructor_args = request.param + instance = MeasuredRequestTimings(**constructor_args) + return instance, constructor_args + + @pytest.mark.smoke + def test_class_signatures(self): + """Test MeasuredRequestTimings inheritance and type relationships.""" + assert issubclass(MeasuredRequestTimings, StandardBaseModel) + assert hasattr(MeasuredRequestTimings, "model_dump") + assert hasattr(MeasuredRequestTimings, "model_validate") + + # Check all expected fields are defined + fields = MeasuredRequestTimings.model_fields + for key in self.CHECK_KEYS: + assert key in fields + field_info = fields[key] + assert field_info.annotation in (Union[float, None], Optional[float]) + assert field_info.default is None + + @pytest.mark.smoke + def test_initialization(self, valid_instances): + """Test initialization with valid configurations.""" + instance, constructor_args = valid_instances + assert isinstance(instance, MeasuredRequestTimings) + for key in self.CHECK_KEYS: + assert hasattr(instance, key) + + # Validate that the instance attributes match the constructor args + for field, expected_value in constructor_args.items(): + assert getattr(instance, field) == expected_value + + @pytest.mark.sanity + @pytest.mark.parametrize( + ("field", "value"), + [ + ("request_start", "invalid_string"), + ("request_end", [1, 2, 3]), + ], + ) + def test_invalid_initialization(self, field, value): + """Test invalid initialization scenarios.""" + kwargs = {field: value} + with pytest.raises(ValidationError): + MeasuredRequestTimings(**kwargs) + + @pytest.mark.smoke + def test_marshalling(self, valid_instances): + """Test marshalling to/from pydantic dict formats.""" + instance, constructor_args = valid_instances + + # Test model_dump + data = instance.model_dump() + assert isinstance(data, dict) + assert all(key in data for key in self.CHECK_KEYS) + + # Test model_validate + reconstructed = MeasuredRequestTimings.model_validate(data) + assert isinstance(reconstructed, MeasuredRequestTimings) + + # Validate that all fields match between original and reconstructed instances + for field in self.CHECK_KEYS: + assert getattr(reconstructed, field) == getattr(instance, field) + + # Validate that the reconstructed instance matches original constructor args + for field, expected_value in constructor_args.items(): + assert getattr(reconstructed, field) == expected_value + + +class TestScheduledRequestInfo: + CHECK_KEYS = [ + "request_id", + "status", + "error", + "scheduler_node_id", + "scheduler_process_id", + "scheduler_start_time", + "scheduler_timings", + "request_timings", + ] + + @pytest.fixture( + params=[ + # Minimal required configuration + { + "request_id": "test-req-123", + "status": "queued", + "scheduler_node_id": 1, + "scheduler_process_id": 0, + "scheduler_start_time": 1000.0, + }, + # Complete configuration with all fields + { + "request_id": "test-req-456", + "status": "completed", + "error": None, + "scheduler_node_id": 2, + "scheduler_process_id": 1, + "scheduler_start_time": 2000.0, + "scheduler_timings": { + "targeted_start": 1900.0, + "queued": 1950.0, + "dequeued": 2000.0, + "resolve_start": 2050.0, + "resolve_end": 2100.0, + "finalized": 2150.0, + }, + "request_timings": { + "request_start": 2060.0, + "request_end": 2110.0, + }, + }, + # Error state configuration + { + "request_id": "test-req-error", + "status": "errored", + "error": "Connection timeout", + "scheduler_node_id": 0, + "scheduler_process_id": 0, + "scheduler_start_time": 3000.0, + }, + # Different status values + { + "request_id": "test-req-pending", + "status": "pending", + "scheduler_node_id": 1, + "scheduler_process_id": 2, + "scheduler_start_time": 4000.0, + }, + { + "request_id": "test-req-in-progress", + "status": "in_progress", + "scheduler_node_id": 2, + "scheduler_process_id": 1, + "scheduler_start_time": 5000.0, + }, + ], + ids=[ + "minimal_required", + "complete_configuration", + "error_state", + "pending_status", + "in_progress_status", + ], + ) + def valid_instances(self, request): + """Creates various valid configurations of ScheduledRequestInfo. + + Returns: + tuple: (instance, constructor_args) where instance is the constructed + ScheduledRequestInfo and constructor_args are the kwargs used. + """ + constructor_args = request.param.copy() + + # Handle nested objects + if "scheduler_timings" in constructor_args: + constructor_args["scheduler_timings"] = RequestSchedulerTimings( + **constructor_args["scheduler_timings"] + ) + if "request_timings" in constructor_args: + constructor_args["request_timings"] = MeasuredRequestTimings( + **constructor_args["request_timings"] + ) + + instance = ScheduledRequestInfo(**constructor_args) + return instance, constructor_args + + @pytest.mark.smoke + def test_class_signatures(self): + """Test ScheduledRequestInfo inheritance and type relationships.""" + assert issubclass(ScheduledRequestInfo, StandardBaseModel) + assert issubclass(ScheduledRequestInfo, typing.Generic) + assert hasattr(ScheduledRequestInfo, "model_dump") + assert hasattr(ScheduledRequestInfo, "model_validate") + + # Check computed properties + assert hasattr(ScheduledRequestInfo, "started_at") + assert hasattr(ScheduledRequestInfo, "completed_at") + assert isinstance(ScheduledRequestInfo.started_at, property) + assert isinstance(ScheduledRequestInfo.completed_at, property) + + # Check that it's properly generic + orig_bases = getattr(ScheduledRequestInfo, "__orig_bases__", ()) + generic_base = next( + ( + base + for base in orig_bases + if hasattr(base, "__origin__") and base.__origin__ is typing.Generic + ), + None, + ) + assert generic_base is not None + + # Check required fields + fields = ScheduledRequestInfo.model_fields + for key in self.CHECK_KEYS: + assert key in fields + + @pytest.mark.smoke + def test_initialization(self, valid_instances): + """Test initialization with valid configurations.""" + instance, constructor_args = valid_instances + assert isinstance(instance, ScheduledRequestInfo) + for key in self.CHECK_KEYS: + assert hasattr(instance, key) + + # Validate that the instance attributes match the constructor args + for field, expected_value in constructor_args.items(): + if field in ["scheduler_timings", "request_timings"]: + actual_value = getattr(instance, field) + if expected_value is None: + assert actual_value is None or ( + field == "scheduler_timings" + and isinstance(actual_value, RequestSchedulerTimings) + ) + else: + assert isinstance(actual_value, type(expected_value)) + else: + assert getattr(instance, field) == expected_value + + @pytest.mark.sanity + @pytest.mark.parametrize( + ("field", "value"), + [ + ("request_id", None), # Required field + ("request_id", 123), # Wrong type + ("status", "invalid_status"), # Invalid literal + ("scheduler_node_id", "not_an_int"), + ("scheduler_process_id", -1.5), + ("scheduler_start_time", "not_a_float"), + ("error", 123), # Should be string or None + ], + ) + def test_invalid_initialization(self, field, value): + """Test invalid initialization scenarios.""" + # Start with valid base config + base_kwargs = { + "request_id": "test-req", + "status": "queued", + "scheduler_node_id": 1, + "scheduler_process_id": 0, + "scheduler_start_time": 1000.0, + } + base_kwargs[field] = value + with pytest.raises(ValidationError): + ScheduledRequestInfo(**base_kwargs) + + @pytest.mark.smoke + def test_marshalling(self, valid_instances): + """Test marshalling to/from pydantic dict formats.""" + instance, constructor_args = valid_instances + + # Test model_dump + data = instance.model_dump() + assert isinstance(data, dict) + assert all(key in data for key in self.CHECK_KEYS) + + # Test model_validate + reconstructed = ScheduledRequestInfo.model_validate(data) + assert isinstance(reconstructed, ScheduledRequestInfo) + + # Validate that all fields match between original and reconstructed instances + for field in self.CHECK_KEYS: + original_value = getattr(instance, field) + reconstructed_value = getattr(reconstructed, field) + + if field in ["scheduler_timings", "request_timings"]: + if original_value is not None and reconstructed_value is not None: + assert ( + original_value.model_dump() == reconstructed_value.model_dump() + ) + else: + assert original_value is None or isinstance( + original_value, + (RequestSchedulerTimings, MeasuredRequestTimings), + ) + assert reconstructed_value is None or isinstance( + reconstructed_value, + (RequestSchedulerTimings, MeasuredRequestTimings), + ) + else: + assert original_value == reconstructed_value + + @pytest.mark.smoke + def test_started_at_property(self): + """Test the started_at property logic.""" + # Test with request_timings.request_start (should take precedence) + instance = ScheduledRequestInfo( + request_id="test-req", + status="completed", + scheduler_node_id=1, + scheduler_process_id=0, + scheduler_start_time=1000.0, + scheduler_timings=RequestSchedulerTimings(resolve_start=2000.0), + request_timings=MeasuredRequestTimings(request_start=2100.0), + ) + assert instance.started_at == 2100.0 + + # Test with only scheduler_timings.resolve_start + instance = ScheduledRequestInfo( + request_id="test-req", + status="completed", + scheduler_node_id=1, + scheduler_process_id=0, + scheduler_start_time=1000.0, + scheduler_timings=RequestSchedulerTimings(resolve_start=2000.0), + ) + assert instance.started_at == 2000.0 + + # Test with no timing info + instance = ScheduledRequestInfo( + request_id="test-req", + status="queued", + scheduler_node_id=1, + scheduler_process_id=0, + scheduler_start_time=1000.0, + ) + assert instance.started_at is None + + @pytest.mark.smoke + def test_completed_at_property(self): + """Test the completed_at property logic.""" + # Test with request_timings.request_end (should take precedence) + instance = ScheduledRequestInfo( + request_id="test-req", + status="completed", + scheduler_node_id=1, + scheduler_process_id=0, + scheduler_start_time=1000.0, + scheduler_timings=RequestSchedulerTimings(resolve_end=2000.0), + request_timings=MeasuredRequestTimings(request_end=2100.0), + ) + assert instance.completed_at == 2100.0 + + # Test with only scheduler_timings.resolve_end + instance = ScheduledRequestInfo( + request_id="test-req", + status="completed", + scheduler_node_id=1, + scheduler_process_id=0, + scheduler_start_time=1000.0, + scheduler_timings=RequestSchedulerTimings(resolve_end=2000.0), + ) + assert instance.completed_at == 2000.0 + + # Test with no timing info + instance = ScheduledRequestInfo( + request_id="test-req", + status="queued", + scheduler_node_id=1, + scheduler_process_id=0, + scheduler_start_time=1000.0, + ) + assert instance.completed_at is None + + +class TestSchedulerState: + CHECK_KEYS = [ + "node_id", + "num_processes", + "start_time", + "end_time", + "end_queuing_time", + "end_queuing_constraints", + "end_processing_time", + "end_processing_constraints", + "scheduler_constraints", + "remaining_fraction", + "remaining_requests", + "remaining_duration", + "created_requests", + "queued_requests", + "pending_requests", + "processing_requests", + "processed_requests", + "successful_requests", + "errored_requests", + "cancelled_requests", + ] + + @pytest.fixture( + params=[ + # Minimal required configuration + { + "node_id": 0, + "num_processes": 1, + "start_time": 1000.0, + }, + # Complete configuration with all fields + { + "node_id": 1, + "num_processes": 4, + "start_time": 2000.0, + "end_time": 3000.0, + "end_queuing_time": 2500.0, + "end_queuing_constraints": { + "time_limit": SchedulerUpdateAction( + request_queuing="stop", metadata={"max_duration": 1500} + ) + }, + "end_processing_time": 2800.0, + "end_processing_constraints": { + "request_limit": SchedulerUpdateAction( + request_processing="stop_all", metadata={"max_requests": 1000} + ) + }, + "scheduler_constraints": { + "rate_limit": SchedulerUpdateAction(metadata={"max_rps": 100}) + }, + "remaining_fraction": 0.25, + "remaining_requests": 50, + "remaining_duration": 300.0, + "created_requests": 200, + "queued_requests": 180, + "pending_requests": 20, + "processing_requests": 10, + "processed_requests": 150, + "successful_requests": 140, + "errored_requests": 8, + "cancelled_requests": 2, + }, + # Partial configuration with some stats + { + "node_id": 2, + "num_processes": 2, + "start_time": 4000.0, + "created_requests": 50, + "processed_requests": 30, + "successful_requests": 28, + "errored_requests": 2, + }, + # Edge case: zero values + { + "node_id": 0, + "num_processes": 1, + "start_time": 0.0, + "created_requests": 0, + "processed_requests": 0, + "successful_requests": 0, + }, + ], + ids=[ + "minimal_required", + "complete_configuration", + "partial_stats", + "zero_values", + ], + ) + def valid_instances(self, request): + """Creates various valid configurations of SchedulerState. + + Returns: + tuple: (instance, constructor_args) where instance is the constructed + SchedulerState and constructor_args are the kwargs used. + """ + constructor_args = request.param + instance = SchedulerState(**constructor_args) + return instance, constructor_args + + @pytest.mark.smoke + def test_class_signatures(self): + """Test SchedulerState inheritance and type relationships.""" + assert issubclass(SchedulerState, StandardBaseModel) + assert hasattr(SchedulerState, "model_dump") + assert hasattr(SchedulerState, "model_validate") + + # Check all expected fields are defined + fields = SchedulerState.model_fields + for key in self.CHECK_KEYS: + assert key in fields + + # Check field defaults for key counters + counter_fields = [ + "created_requests", + "queued_requests", + "pending_requests", + "processing_requests", + "processed_requests", + "successful_requests", + "errored_requests", + "cancelled_requests", + ] + for field in counter_fields: + field_info = fields[field] + assert field_info.default == 0 + + # Check that start_time has a default factory + start_time_field = fields["start_time"] + assert start_time_field.default_factory is not None + + @pytest.mark.smoke + def test_initialization(self, valid_instances): + """Test initialization with valid configurations.""" + instance, constructor_args = valid_instances + assert isinstance(instance, SchedulerState) + for key in self.CHECK_KEYS: + assert hasattr(instance, key) + + # Validate that the instance attributes match the constructor args + for field, expected_value in constructor_args.items(): + assert getattr(instance, field) == expected_value + + @pytest.mark.smoke + @pytest.mark.parametrize( + ("field", "value"), + [ + ("node_id", "not_an_int"), + ("start_time", "not_a_float"), + ("end_time", [1, 2, 3]), + ("remaining_fraction", "not_a_float"), + ("created_requests", "not_an_int"), + ("end_queuing_constraints", "not_a_dict"), + ("scheduler_constraints", ["not", "a", "dict"]), + ], + ) + def test_invalid_initialization(self, field, value): + """Test invalid initialization scenarios.""" + # Start with valid base config + base_kwargs = { + "node_id": 0, + "num_processes": 1, + "start_time": 1000.0, + } + base_kwargs[field] = value + with pytest.raises(ValidationError): + SchedulerState(**base_kwargs) + + @pytest.mark.smoke + def test_marshalling(self, valid_instances): + """Test marshalling to/from pydantic dict formats.""" + instance, constructor_args = valid_instances + + # Test model_dump + data = instance.model_dump() + assert isinstance(data, dict) + assert all(key in data for key in self.CHECK_KEYS) + + # Test model_validate + reconstructed = SchedulerState.model_validate(data) + assert isinstance(reconstructed, SchedulerState) + + # Validate that all fields match between original and reconstructed instances + for field in self.CHECK_KEYS: + assert getattr(reconstructed, field) == getattr(instance, field) + + # Validate that the reconstructed instance matches original constructor args + for field, expected_value in constructor_args.items(): + assert getattr(reconstructed, field) == expected_value + + +class TestSchedulerUpdateAction: + CHECK_KEYS = [ + "request_queuing", + "request_processing", + "metadata", + "progress", + ] + + @pytest.fixture( + params=[ + # Default configuration + {}, + # All explicit default values + { + "request_queuing": "continue", + "request_processing": "continue", + "metadata": {}, + "progress": {}, + }, + # Stop queuing configuration + { + "request_queuing": "stop", + "request_processing": "continue", + "metadata": {"reason": "rate_limit_exceeded"}, + }, + # Stop local processing configuration + { + "request_queuing": "continue", + "request_processing": "stop_local", + "metadata": {"node_id": 1, "reason": "resource_exhausted"}, + }, + # Stop all processing configuration + { + "request_queuing": "stop", + "request_processing": "stop_all", + "metadata": { + "emergency_stop": True, + "reason": "critical_error", + "error_details": {"code": 500, "message": "Internal server error"}, + }, + }, + # Complex metadata configuration + { + "request_queuing": "continue", + "request_processing": "continue", + "metadata": { + "stats": {"processed": 100, "pending": 50}, + "constraints": {"max_rps": 10, "max_concurrent": 20}, + "config": {"batch_size": 32, "timeout": 30.0}, + }, + }, + # Progress with remaining_fraction only + { + "request_queuing": "continue", + "request_processing": "continue", + "progress": {"remaining_fraction": 0.75}, + }, + # Progress with remaining_requests only + { + "request_queuing": "continue", + "request_processing": "continue", + "progress": {"remaining_requests": 250.0}, + }, + # Progress with remaining_duration only + { + "request_queuing": "continue", + "request_processing": "continue", + "progress": {"remaining_duration": 120.5}, + }, + # Complete progress configuration + { + "request_queuing": "stop", + "request_processing": "stop_all", + "metadata": {"shutdown_reason": "completion"}, + "progress": { + "remaining_fraction": 0.0, + "remaining_requests": 0.0, + "remaining_duration": 0.0, + }, + }, + # Partial progress configuration + { + "request_queuing": "continue", + "request_processing": "continue", + "metadata": {"checkpoint": "mid_benchmark"}, + "progress": { + "remaining_fraction": 0.45, + "remaining_duration": 180.0, + }, + }, + ], + ids=[ + "default_empty", + "explicit_defaults", + "stop_queuing", + "stop_local_processing", + "stop_all_processing", + "complex_metadata", + "progress_fraction_only", + "progress_requests_only", + "progress_duration_only", + "complete_progress", + "partial_progress", + ], + ) + def valid_instances(self, request): + """Creates various valid configurations of SchedulerUpdateAction. + + Returns: + tuple: (instance, constructor_args) where instance is the constructed + SchedulerUpdateAction and constructor_args are the kwargs used. + """ + constructor_args = request.param + instance = SchedulerUpdateAction(**constructor_args) + return instance, constructor_args + + @pytest.mark.smoke + def test_class_signatures(self): + """Test SchedulerUpdateAction inheritance and type relationships.""" + assert issubclass(SchedulerUpdateAction, StandardBaseModel) + assert hasattr(SchedulerUpdateAction, "model_dump") + assert hasattr(SchedulerUpdateAction, "model_validate") + + # Check all expected fields are defined + fields = SchedulerUpdateAction.model_fields + for key in self.CHECK_KEYS: + assert key in fields + + # Check field defaults + assert fields["request_queuing"].default == "continue" + assert fields["request_processing"].default == "continue" + metadata_field = fields["metadata"] + assert metadata_field.default_factory is not None + progress_field = fields["progress"] + assert progress_field.default_factory is not None + + @pytest.mark.smoke + def test_initialization(self, valid_instances): + """Test initialization with valid configurations.""" + instance, constructor_args = valid_instances + assert isinstance(instance, SchedulerUpdateAction) + for key in self.CHECK_KEYS: + assert hasattr(instance, key) + + # Validate that the instance attributes match the constructor args or defaults + for field in self.CHECK_KEYS: + if field in constructor_args: + assert getattr(instance, field) == constructor_args[field] + elif field in ["request_queuing", "request_processing"]: + assert getattr(instance, field) == "continue" + elif field in ["metadata", "progress"]: + assert getattr(instance, field) == {} + + @pytest.mark.smoke + @pytest.mark.parametrize( + ("field", "value"), + [ + ("request_queuing", "invalid_action"), + ("request_queuing", 123), + ("request_processing", "invalid_action"), + ("request_processing", ["stop"]), + ("metadata", "not_a_dict"), + ("metadata", [{"key": "value"}]), + ("progress", "not_a_dict"), + ("progress", [{"remaining_fraction": 0.5}]), + ("progress", {"remaining_fraction": "not_a_float"}), + ("progress", {"remaining_requests": "not_a_float"}), + ("progress", {"remaining_duration": "not_a_float"}), + ], + ) + def test_invalid_initialization(self, field, value): + """Test invalid initialization scenarios.""" + kwargs = {field: value} + with pytest.raises(ValidationError): + SchedulerUpdateAction(**kwargs) + + @pytest.mark.smoke + def test_marshalling(self, valid_instances): + """Test marshalling to/from pydantic dict formats.""" + instance, constructor_args = valid_instances + + # Test model_dump + data = instance.model_dump() + assert isinstance(data, dict) + assert all(key in data for key in self.CHECK_KEYS) + + # Test model_validate + reconstructed = SchedulerUpdateAction.model_validate(data) + assert isinstance(reconstructed, SchedulerUpdateAction) + + # Validate that all fields match between original and reconstructed instances + for field in self.CHECK_KEYS: + assert getattr(reconstructed, field) == getattr(instance, field) + + # Validate that the reconstructed instance matches expected values + for field in self.CHECK_KEYS: + if field in constructor_args: + assert getattr(reconstructed, field) == constructor_args[field] + elif field in ["request_queuing", "request_processing"]: + assert getattr(reconstructed, field) == "continue" + elif field in ["metadata", "progress"]: + assert getattr(reconstructed, field) == {} + + @pytest.mark.smoke + def test_progress_field_behavior(self): + """Test the progress field specific behavior and validation.""" + # Test empty progress (default) + instance = SchedulerUpdateAction() + assert instance.progress == {} + assert isinstance(instance.progress, dict) + + # Test progress with all valid fields + progress_data = { + "remaining_fraction": 0.75, + "remaining_requests": 100.0, + "remaining_duration": 30.5, + } + instance = SchedulerUpdateAction(progress=progress_data) + assert instance.progress == progress_data + + # Test progress with partial fields (TypedDict allows partial) + partial_progress = {"remaining_fraction": 0.25} + instance = SchedulerUpdateAction(progress=partial_progress) + assert instance.progress == partial_progress + + # Test progress with zero values + zero_progress = { + "remaining_fraction": 0.0, + "remaining_requests": 0.0, + "remaining_duration": 0.0, + } + instance = SchedulerUpdateAction(progress=zero_progress) + assert instance.progress == zero_progress + + # Test that progress field persists through marshalling + data = instance.model_dump() + assert "progress" in data + assert data["progress"] == zero_progress + + reconstructed = SchedulerUpdateAction.model_validate(data) + assert reconstructed.progress == zero_progress + + @pytest.mark.smoke + @pytest.mark.parametrize( + "progress_value", + [ + {"remaining_fraction": 0.0}, + {"remaining_fraction": 1.0}, + {"remaining_requests": 0.0}, + {"remaining_requests": 1000.0}, + {"remaining_duration": 0.0}, + {"remaining_duration": 3600.0}, + {"remaining_fraction": 0.5, "remaining_requests": 50.0}, + {"remaining_requests": 25.0, "remaining_duration": 120.0}, + {"remaining_fraction": 0.33, "remaining_duration": 45.0}, + ], + ) + def test_progress_valid_combinations(self, progress_value): + """Test various valid combinations of progress field values.""" + instance = SchedulerUpdateAction(progress=progress_value) + assert instance.progress == progress_value + + # Verify marshalling works correctly + data = instance.model_dump() + reconstructed = SchedulerUpdateAction.model_validate(data) + assert reconstructed.progress == progress_value + + @pytest.mark.smoke + def test_scheduler_update_action_progress_typeddict(self): + """Test the SchedulerUpdateActionProgress TypedDict behavior.""" + # Test that SchedulerUpdateActionProgress is a proper TypedDict + # Verify it's a TypedDict (has the special attributes) + assert hasattr(SchedulerUpdateActionProgress, "__annotations__") + assert hasattr(SchedulerUpdateActionProgress, "__total__") + assert hasattr(SchedulerUpdateActionProgress, "__required_keys__") + assert hasattr(SchedulerUpdateActionProgress, "__optional_keys__") + + # Check that all keys are optional (total=False) + expected_keys = { + "remaining_fraction", + "remaining_requests", + "remaining_duration", + } + actual_keys = set(SchedulerUpdateActionProgress.__annotations__.keys()) + assert actual_keys == expected_keys + assert SchedulerUpdateActionProgress.__total__ is False + assert SchedulerUpdateActionProgress.__required_keys__ == frozenset() + assert SchedulerUpdateActionProgress.__optional_keys__ == expected_keys + + # Test that type annotations are correct + annotations = SchedulerUpdateActionProgress.__annotations__ + assert "remaining_fraction" in annotations + assert "remaining_requests" in annotations + assert "remaining_duration" in annotations + + # Test creation of valid TypedDict instances + valid_progress_1: SchedulerUpdateActionProgress = {} + valid_progress_2: SchedulerUpdateActionProgress = {"remaining_fraction": 0.5} + valid_progress_3: SchedulerUpdateActionProgress = { + "remaining_fraction": 0.25, + "remaining_requests": 100.0, + "remaining_duration": 60.0, + } + + # All should be valid dict instances + assert isinstance(valid_progress_1, dict) + assert isinstance(valid_progress_2, dict) + assert isinstance(valid_progress_3, dict) diff --git a/tests/unit/scheduler/test_scheduler.py b/tests/unit/scheduler/test_scheduler.py new file mode 100644 index 00000000..33efc27f --- /dev/null +++ b/tests/unit/scheduler/test_scheduler.py @@ -0,0 +1,253 @@ +from __future__ import annotations + +import asyncio +import inspect +import random +import uuid +from functools import wraps +from typing import Any, Generic + +import pytest +from pydantic import BaseModel, Field + +from guidellm.scheduler import ( + BackendInterface, + MaxNumberConstraint, + NonDistributedEnvironment, + ScheduledRequestInfo, + Scheduler, + SchedulerState, + SynchronousStrategy, +) +from guidellm.utils.singleton import ThreadSafeSingletonMixin + + +def async_timeout(delay: float): + """Decorator to add timeout to async test functions.""" + + def decorator(func): + @wraps(func) + async def new_func(*args, **kwargs): + return await asyncio.wait_for(func(*args, **kwargs), timeout=delay) + + return new_func + + return decorator + + +class MockRequest(BaseModel): + payload: str + id_: str = Field(default_factory=lambda: str(uuid.uuid4())) + + +class MockBackend(BackendInterface): + """Mock backend for integration testing with predictable responses.""" + + def __init__( + self, + processes_limit_value: int | None = None, + requests_limit_value: int | None = None, + error_rate: float = 0.2, + response_delay: float = 0.0, + ): + self._processes_limit = processes_limit_value + self._requests_limit = requests_limit_value + self._error_rate = error_rate + self._response_delay = response_delay + + @property + def processes_limit(self) -> int | None: + return self._processes_limit + + @property + def requests_limit(self) -> int | None: + return self._requests_limit + + def info(self) -> dict[str, Any]: + return {"type": "mock_integration", "delay": self._response_delay} + + async def process_startup(self): + pass + + async def validate(self): + pass + + async def process_shutdown(self): + pass + + async def resolve(self, request: MockRequest, request_info, request_history): + """Return predictable response based on input request.""" + await asyncio.sleep(self._response_delay) + + if ( + self._error_rate + and self._error_rate > 0 + and random.random() < self._error_rate + ): + raise RuntimeError(f"mock_error_for_{request.payload}") + + yield f"response_for_{request.payload}" + + +class TestScheduler: + """Test suite for Scheduler class.""" + + @pytest.fixture + def valid_instances(self): + """Fixture providing test data for Scheduler.""" + # Clear singleton state between tests + if hasattr(Scheduler, "singleton_instance"): + Scheduler.singleton_instance = None + + instance = Scheduler() + constructor_args = {} + return instance, constructor_args + + @pytest.mark.smoke + def test_class_signatures(self): + """Test Scheduler inheritance and type relationships.""" + # Clear singleton before testing + if hasattr(Scheduler, "singleton_instance"): + Scheduler.singleton_instance = None + + assert issubclass(Scheduler, ThreadSafeSingletonMixin) + assert issubclass(Scheduler, Generic) + assert hasattr(Scheduler, "run") + assert callable(Scheduler.run) + + # Check method signature + run_sig = inspect.signature(Scheduler.run) + expected_params = [ + "self", + "requests", + "backend", + "strategy", + "env", + "constraints", + ] + param_names = list(run_sig.parameters.keys()) + assert param_names == expected_params + + # Check that run is async generator (returns AsyncIterator) + assert hasattr(Scheduler.run, "__code__") + code = Scheduler.run.__code__ + # Check for async generator flags or return annotation + assert ( + inspect.iscoroutinefunction(Scheduler.run) + or "AsyncIterator" in str(run_sig.return_annotation) + or code.co_flags & 0x100 # CO_GENERATOR flag + ) + + @pytest.mark.smoke + def test_initialization(self, valid_instances): + """Test Scheduler initialization as singleton.""" + instance1, _ = valid_instances + instance2 = Scheduler() + + assert isinstance(instance1, Scheduler) + assert instance1 is instance2 + assert id(instance1) == id(instance2) + assert hasattr(instance1, "thread_lock") + + @pytest.mark.smoke + @pytest.mark.asyncio + @async_timeout(10.0) + @pytest.mark.parametrize( + ("num_requests", "constraint_args"), + [ + (5, {"max_number": MaxNumberConstraint(max_num=10)}), + (20, {"max_number": MaxNumberConstraint(max_num=25)}), + (1, {"max_number": MaxNumberConstraint(max_num=5)}), + ], + ) + async def test_run_basic_functionality( + self, valid_instances, num_requests, constraint_args + ): + """Test Scheduler.run basic functionality with various parameters.""" + instance, _ = valid_instances + requests = [MockRequest(payload=f"req_{i}") for i in range(num_requests)] + backend = MockBackend(error_rate=0.0, response_delay=0.001) + strategy = SynchronousStrategy() + env = NonDistributedEnvironment() + + results = [] + async for response, _request, info, _state in instance.run( + requests=requests, + backend=backend, + strategy=strategy, + env=env, + **constraint_args, + ): + results.append((response, _request, info, _state)) + + assert len(results) > 0 + assert all(isinstance(r[1], MockRequest) for r in results) + assert all(isinstance(r[2], ScheduledRequestInfo) for r in results) + assert all(isinstance(r[3], SchedulerState) for r in results) + + @pytest.mark.smoke + @pytest.mark.asyncio + @async_timeout(10.0) + async def test_run_with_errors(self, valid_instances): + """Test Scheduler.run error handling.""" + instance, _ = valid_instances + requests = [MockRequest(payload=f"req_{i}") for i in range(5)] + backend = MockBackend(error_rate=1.0) # Force all requests to error + strategy = SynchronousStrategy() + env = NonDistributedEnvironment() + + error_count = 0 + async for response, _request, info, _state in instance.run( + requests=requests, + backend=backend, + strategy=strategy, + env=env, + max_number=MaxNumberConstraint(max_num=10), + ): + if info.status == "errored": + error_count += 1 + assert response is None + assert info.error is not None + + assert error_count > 0 + + @pytest.mark.sanity + @pytest.mark.asyncio + @async_timeout(10.0) + async def test_run_invalid_parameters(self, valid_instances): + """Test Scheduler.run with invalid parameters.""" + instance, _ = valid_instances + + with pytest.raises((TypeError, ValueError, AttributeError)): + async for _ in instance.run( + requests=None, # Invalid requests + backend=None, # Invalid backend + strategy=SynchronousStrategy(), + env=NonDistributedEnvironment(), + ): + pass + + @pytest.mark.smoke + @pytest.mark.asyncio + @async_timeout(10.0) + async def test_run_constraint_variations(self, valid_instances): + """Test Scheduler.run with different constraint types.""" + instance, _ = valid_instances + requests = [MockRequest(payload=f"req_{i}") for i in range(3)] + backend = MockBackend(error_rate=0.0, response_delay=0.001) + strategy = SynchronousStrategy() + env = NonDistributedEnvironment() + + # Test with multiple constraints + results = [] + async for response, request, info, state in instance.run( + requests=requests, + backend=backend, + strategy=strategy, + env=env, + max_number=MaxNumberConstraint(max_num=5), + max_duration=5.0, # Should be converted to constraint + ): + results.append((response, request, info, state)) + + assert len(results) > 0 diff --git a/tests/unit/scheduler/test_strategy.py b/tests/unit/scheduler/test_strategy.py new file mode 100644 index 00000000..8cb91d82 --- /dev/null +++ b/tests/unit/scheduler/test_strategy.py @@ -0,0 +1,1154 @@ +from __future__ import annotations + +import inspect +import math +import statistics +import time +from abc import ABC +from typing import Literal, TypeVar + +import pytest +from pydantic import ValidationError + +from guidellm.scheduler import ( + AsyncConstantStrategy, + AsyncPoissonStrategy, + ConcurrentStrategy, + ConstantRateRequestTimings, + LastCompletionRequestTimings, + NoDelayRequestTimings, + PoissonRateRequestTimings, + ScheduledRequestInfo, + ScheduledRequestTimings, + SchedulingStrategy, + StrategyT, + SynchronousStrategy, + ThroughputStrategy, +) +from guidellm.scheduler.strategy import ( + _exponential_decay_fraction, + _exponential_decay_tau, +) + + +def test_strategy_type(): + """Test that StrategyType is defined correctly as a Literal type.""" + # StrategyType is a type alias/literal type, we can't test its runtime value + # but we can test that it exists and is importable + from guidellm.scheduler.strategy import StrategyType + + assert StrategyType is not None + + +def test_strategy_t(): + """Test that StrategyT is filled out correctly as a TypeVar.""" + assert isinstance(StrategyT, type(TypeVar("test"))) + assert StrategyT.__name__ == "StrategyT" + assert StrategyT.__bound__ == SchedulingStrategy + assert StrategyT.__constraints__ == () + + +class TestExponentialDecay: + """Test suite for _exponential_decay_tau function.""" + + @pytest.mark.smoke + @pytest.mark.parametrize( + ("max_progress", "convergence", "expected_range"), + [ + (1.0, 0.99, (0.21, 0.22)), + (5.0, 0.99, (1.08, 1.09)), + (10.0, 0.95, (3.33, 3.35)), + ], + ) + def test_tau_invocation(self, max_progress, convergence, expected_range): + """Test exponential decay tau calculation with valid inputs.""" + tau = _exponential_decay_tau(max_progress, convergence) + assert expected_range[0] <= tau <= expected_range[1] + expected_tau = max_progress / (-math.log(1 - convergence)) + assert tau == pytest.approx(expected_tau, rel=1e-10) + + @pytest.mark.smoke + @pytest.mark.parametrize( + ("progress", "tau", "expected_min", "expected_max"), + [ + (0.0, 1.0, 0.0, 0.0), # No progress = 0 + (1.0, 1.0, 0.6, 0.7), # 1 tau ≈ 63.2% + (2.0, 1.0, 0.85, 0.87), # 2 tau ≈ 86.5% + (3.0, 1.0, 0.95, 0.96), # 3 tau ≈ 95.0% + ], + ) + def test_exp_decay_invocation(self, progress, tau, expected_min, expected_max): + """Test exponential decay fraction calculation with valid inputs.""" + fraction = _exponential_decay_fraction(progress, tau) + assert expected_min <= fraction <= expected_max + expected_fraction = 1 - math.exp(-progress / tau) + assert fraction == pytest.approx(expected_fraction, rel=1e-10) + + @pytest.mark.smoke + def test_exp_boundary_conditions(self): + """Test boundary conditions for exponential decay fraction.""" + assert _exponential_decay_fraction(0.0, 1.0) == 0.0 + assert _exponential_decay_fraction(0.0, 10.0) == 0.0 + large_progress = 100.0 + fraction = _exponential_decay_fraction(large_progress, 1.0) + assert fraction > 0.99999 + + +class TestScheduledRequestTimings: + @pytest.mark.smoke + def test_signatures(self): + """Test that ScheduledRequestTimings is an abstract base class.""" + assert issubclass(ScheduledRequestTimings, ABC) + assert inspect.isabstract(ScheduledRequestTimings) + + abstract_methods = ScheduledRequestTimings.__abstractmethods__ + expected_methods = {"next_offset", "request_completed"} + assert abstract_methods == expected_methods + + # Validate method signatures + next_offset_method = ScheduledRequestTimings.next_offset + assert callable(next_offset_method) + request_completed_method = ScheduledRequestTimings.request_completed + assert callable(request_completed_method) + + # Check signature parameters using inspect + next_offset_sig = inspect.signature(next_offset_method) + assert len(next_offset_sig.parameters) == 1 + assert str(next_offset_sig.return_annotation) == "float" + request_completed_sig = inspect.signature(request_completed_method) + assert len(request_completed_sig.parameters) == 2 + params = list(request_completed_sig.parameters.values()) + param_annotation = params[1].annotation + assert param_annotation in {ScheduledRequestInfo, "ScheduledRequestInfo"} + + @pytest.mark.sanity + def test_invalid_implementation(self): + """Test that invalid implementations raise TypeError.""" + + class InvalidImplementation(ScheduledRequestTimings): + pass # Missing required abstract methods + + with pytest.raises(TypeError): + InvalidImplementation() + + @pytest.mark.smoke + def test_child_implementation(self): + """Test that concrete implementations can be constructed.""" + + class TestRequestTimings(ScheduledRequestTimings): + offset: float = 0.0 + + def next_offset(self) -> float: + self.offset += 1.0 + return self.offset + + def request_completed(self, request_info: ScheduledRequestInfo): + pass + + timing = TestRequestTimings() + assert isinstance(timing, ScheduledRequestTimings) + + assert timing.next_offset() == 1.0 + assert timing.next_offset() == 2.0 + + mock_request = ScheduledRequestInfo( + request_id="test", + status="completed", + scheduler_node_id=0, + scheduler_process_id=0, + scheduler_start_time=time.time(), + ) + timing.request_completed(mock_request) + + +class TestLastCompletionRequestTimings: + @pytest.fixture( + params=[ + {}, + {"offset": 10.0}, + {"startup_requests": 5, "startup_requests_delay": 0.5}, + { + "offset": 0.0, + "startup_requests": 0, + "startup_requests_delay": 0.0, + }, + { + "offset": 2.5, + "startup_requests": 3, + "startup_requests_delay": 1.0, + }, + ] + ) + def valid_instances(self, request): + """Creates various valid configurations of LastCompletionRequestTimings.""" + constructor_args = request.param + instance = LastCompletionRequestTimings(**constructor_args) + return instance, constructor_args + + @pytest.mark.smoke + def test_initialization( + self, valid_instances: tuple[LastCompletionRequestTimings, dict] + ): + """Test initialization with valid configurations.""" + instance, constructor_args = valid_instances + assert isinstance(instance, LastCompletionRequestTimings) + + for key, value in constructor_args.items(): + assert getattr(instance, key) == value + + @pytest.mark.sanity + @pytest.mark.parametrize( + ("field", "value"), + [ + ("startup_requests", -1), + ("startup_requests_delay", -0.5), + ("offset", "invalid"), + ("startup_requests", 1.5), + ], + ) + def test_invalid_initialization(self, field, value): + """Test invalid initialization scenarios.""" + kwargs = {field: value} + with pytest.raises(ValidationError): + LastCompletionRequestTimings(**kwargs) + + @pytest.mark.smoke + def test_lifecycle( + self, valid_instances: tuple[LastCompletionRequestTimings, dict] + ): + """Test the complete lifecycle of next_offset and request_completed calls.""" + instance, constructor_args = valid_instances + initial_offset = instance.offset + startup_requests = constructor_args.get("startup_requests", 0) + startup_delay = constructor_args.get("startup_requests_delay", 0.0) + request_times = [] + + for index in range(max(5, startup_requests + 2)): + offset = instance.next_offset() + assert isinstance(offset, (int, float)) + + if index < startup_requests: + expected_offset = initial_offset + (index + 1) * startup_delay + assert offset == pytest.approx(expected_offset, abs=1e-5) + + completion_time = time.time() + offset + request_times.append(completion_time) + + mock_request: ScheduledRequestInfo = ScheduledRequestInfo( + request_id=f"test-{index}", + status="completed", + scheduler_node_id=0, + scheduler_process_id=0, + scheduler_start_time=time.time(), + ) + mock_request.scheduler_timings.resolve_end = completion_time + instance.request_completed(mock_request) + + @pytest.mark.smoke + def test_marshalling( + self, valid_instances: tuple[LastCompletionRequestTimings, dict] + ): + """Test marshalling to/from pydantic dict formats.""" + instance, constructor_args = valid_instances + + data = instance.model_dump() + assert isinstance(data, dict) + + for key, value in constructor_args.items(): + assert data[key] == value + + reconstructed = LastCompletionRequestTimings.model_validate(data) + assert isinstance(reconstructed, LastCompletionRequestTimings) + + for key, value in constructor_args.items(): + assert getattr(reconstructed, key) == value + + +class TestNoDelayRequestTimings: + @pytest.fixture( + params=[ + {}, + {"offset": 0.2}, + {"startup_duration": 0.3, "startup_target_requests": 5}, + { + "offset": 0.15, + "startup_duration": 0.2, + "startup_target_requests": 20, + "startup_convergence": 0.9, + }, + ] + ) + def valid_instances(self, request): + """Creates various valid configurations of NoDelayRequestTimings.""" + constructor_args = request.param + instance = NoDelayRequestTimings(**constructor_args) + return instance, constructor_args + + @pytest.mark.smoke + def test_initialization(self, valid_instances: tuple[NoDelayRequestTimings, dict]): + """Test initialization with valid configurations.""" + instance, constructor_args = valid_instances + assert isinstance(instance, NoDelayRequestTimings) + + for key, value in constructor_args.items(): + assert getattr(instance, key) == value + + @pytest.mark.sanity + @pytest.mark.parametrize( + ("field", "value"), + [ + ("offset", -1.0), + ("startup_duration", -1.0), + ("startup_target_requests", 0), + ("startup_target_requests", -1), + ], + ) + def test_invalid_initialization(self, field, value): + """Test invalid initialization scenarios.""" + kwargs = {field: value} + with pytest.raises(ValidationError): + NoDelayRequestTimings(**kwargs) + + @pytest.mark.smoke + def test_lifecycle(self, valid_instances: tuple[NoDelayRequestTimings, dict]): + """Test the complete lifecycle of timing methods.""" + instance, constructor_args = valid_instances + startup_duration = constructor_args.get("startup_duration", 0.0) + base_offset = constructor_args.get("offset", 0.0) + start_time = time.time() + min_time = base_offset + startup_duration + 0.2 + end_time = start_time + min_time + last_offset = -1 * math.inf + + while (current_time := time.time()) < end_time: + offset = instance.next_offset() + + if startup_duration > 0 and (current_time - start_time) <= startup_duration: + assert offset < base_offset + startup_duration + assert offset > last_offset + elif startup_duration > 0: + assert offset == base_offset + startup_duration + else: + assert offset == base_offset + + last_offset = offset + time.sleep(0.025) + + @pytest.mark.smoke + def test_marshalling(self, valid_instances: tuple[NoDelayRequestTimings, dict]): + """Test marshalling to/from pydantic dict formats.""" + instance, constructor_args = valid_instances + + data = instance.model_dump() + assert isinstance(data, dict) + + for key, value in constructor_args.items(): + assert data[key] == value + + reconstructed = NoDelayRequestTimings.model_validate(data) + assert isinstance(reconstructed, NoDelayRequestTimings) + + for key, value in constructor_args.items(): + assert getattr(reconstructed, key) == value + + +class TestConstantRateRequestTimings: + @pytest.fixture( + params=[ + {"rate": 1.0}, + {"rate": 5.0, "offset": 2.0}, + {"rate": 10.5, "offset": 1.0}, + ] + ) + def valid_instances(self, request): + """Creates various valid configurations of ConstantRateRequestTimings.""" + constructor_args = request.param + instance = ConstantRateRequestTimings(**constructor_args) + return instance, constructor_args + + @pytest.mark.smoke + def test_initialization( + self, valid_instances: tuple[ConstantRateRequestTimings, dict] + ): + """Test initialization with valid configurations.""" + instance, constructor_args = valid_instances + assert isinstance(instance, ConstantRateRequestTimings) + + for key, value in constructor_args.items(): + assert getattr(instance, key) == value + + @pytest.mark.sanity + @pytest.mark.parametrize( + ("field", "value"), + [ + ("rate", 0), + ("rate", -1.0), + ("offset", -1.0), + ], + ) + def test_invalid_initialization(self, field, value): + """Test invalid initialization scenarios.""" + kwargs = {"rate": 1.0} + kwargs[field] = value + with pytest.raises(ValidationError): + ConstantRateRequestTimings(**kwargs) + + @pytest.mark.smoke + def test_constant_rate_behavior( + self, valid_instances: tuple[ConstantRateRequestTimings, dict] + ): + """Test that requests are scheduled at constant intervals.""" + instance, constructor_args = valid_instances + rate = constructor_args["rate"] + expected_interval = 1.0 / rate + base_offset = constructor_args.get("offset", 0.0) + num_requests = int(5 * rate) # simulate 5 seconds + + for ind in range(num_requests): + offset = instance.next_offset() + assert offset >= base_offset + assert offset == pytest.approx( + base_offset + ind * expected_interval, rel=1e-2 + ) + + @pytest.mark.smoke + def test_marshalling( + self, valid_instances: tuple[ConstantRateRequestTimings, dict] + ): + """Test marshalling to/from pydantic dict formats.""" + instance, constructor_args = valid_instances + + data = instance.model_dump() + assert isinstance(data, dict) + + for key, value in constructor_args.items(): + assert data[key] == value + + reconstructed = ConstantRateRequestTimings.model_validate(data) + assert isinstance(reconstructed, ConstantRateRequestTimings) + + for key, value in constructor_args.items(): + assert getattr(reconstructed, key) == value + + +class TestPoissonRateRequestTimings: + @pytest.fixture( + params=[ + {"rate": 1.0}, + { + "rate": 5.0, + "random_seed": 123, + "offset": 1.0, + }, + { + "rate": 0.5, + }, + ] + ) + def valid_instances(self, request): + """Creates various valid configurations of PoissonRateRequestTimings.""" + constructor_args = request.param + instance = PoissonRateRequestTimings(**constructor_args) + return instance, constructor_args + + @pytest.mark.smoke + def test_initialization( + self, valid_instances: tuple[PoissonRateRequestTimings, dict] + ): + """Test initialization with valid configurations.""" + instance, constructor_args = valid_instances + assert isinstance(instance, PoissonRateRequestTimings) + + for key, value in constructor_args.items(): + assert getattr(instance, key) == value + + @pytest.mark.sanity + @pytest.mark.parametrize( + ("field", "value"), + [ + ("rate", 0), + ("rate", -1.0), + ("offset", "invalid"), + ("random_seed", "invalid"), + ], + ) + def test_invalid_initialization(self, field, value): + """Test invalid initialization scenarios.""" + kwargs = {"rate": 1.0} + kwargs[field] = value + with pytest.raises(ValidationError): + PoissonRateRequestTimings(**kwargs) + + @pytest.mark.smoke + def test_lifecycle(self, valid_instances: tuple[PoissonRateRequestTimings, dict]): + """Test that Poisson timing produces variable intervals.""" + instance, constructor_args = valid_instances + rate = constructor_args["rate"] + base_offset = constructor_args.get("offset", 0.0) + num_requests = 200 + last_offset = 0.0 + intervals = [] + + for index in range(num_requests): + offset = instance.next_offset() + + if index == 0: + assert offset == base_offset + else: + assert offset > last_offset + + intervals.append(offset - last_offset) + last_offset = offset + + expected_mean_interval = 1.0 / rate + actual_mean_interval = statistics.mean(intervals) + tolerance = 0.2 * expected_mean_interval + assert abs(actual_mean_interval - expected_mean_interval) < tolerance + + @pytest.mark.smoke + def test_marshalling(self, valid_instances: tuple[PoissonRateRequestTimings, dict]): + """Test marshalling to/from pydantic dict formats.""" + instance, constructor_args = valid_instances + + data = instance.model_dump() + assert isinstance(data, dict) + + for key, value in constructor_args.items(): + assert data[key] == value + + reconstructed = PoissonRateRequestTimings.model_validate(data) + assert isinstance(reconstructed, PoissonRateRequestTimings) + + for key, value in constructor_args.items(): + assert getattr(reconstructed, key) == value + + +class TestSchedulingStrategy: + @pytest.mark.smoke + def test_class_signatures(self): + """Test SchedulingStrategy inheritance and type relationships.""" + # Inheritance and abstract class properties + assert issubclass(SchedulingStrategy, object) + assert hasattr(SchedulingStrategy, "info") + + # Validate expected methods exist + expected_methods = { + "processes_limit", + "requests_limit", + "create_request_timings", + } + strategy_methods = set(dir(SchedulingStrategy)) + for method in expected_methods: + assert method in strategy_methods + + # validate expected properties + processes_limit_prop = SchedulingStrategy.processes_limit + assert isinstance(processes_limit_prop, property) + requests_limit_prop = SchedulingStrategy.requests_limit + assert isinstance(requests_limit_prop, property) + create_request_timings_method = SchedulingStrategy.create_request_timings + assert callable(create_request_timings_method) + + # Validate method signature + sig = inspect.signature(create_request_timings_method) + params = list(sig.parameters.keys()) + expected_params = [ + "self", + "local_rank", + "local_world_size", + "local_max_concurrency", + ] + assert params == expected_params + + @pytest.mark.sanity + def test_invalid_implementation(self): + """Test that invalid implementations raise NotImplementedError.""" + + class InvalidStrategy(SchedulingStrategy): + type_: Literal["strategy"] = "strategy" # type: ignore[assignment,annotation-unchecked] + + strategy = InvalidStrategy() + with pytest.raises(NotImplementedError): + strategy.create_request_timings(0, 1, 1) + + @pytest.mark.smoke + def test_concrete_implementation(self): + """Test that concrete implementations can be constructed.""" + + class TestStrategy(SchedulingStrategy): + type_: Literal["strategy"] = "strategy" # type: ignore[assignment,annotation-unchecked] + + def create_request_timings( + self, + local_rank: int, + local_world_size: int, + local_max_concurrency: int, + ): + return LastCompletionRequestTimings() + + strategy = TestStrategy() + assert isinstance(strategy, SchedulingStrategy) + timing = strategy.create_request_timings(0, 1, 1) + assert isinstance(timing, ScheduledRequestTimings) + + +class TestSynchronousStrategy: + @pytest.mark.smoke + def test_initialization(self): + """Test initialization of SynchronousStrategy.""" + strategy = SynchronousStrategy() + assert strategy.type_ == "synchronous" + + @pytest.mark.smoke + def test_limits(self): + """Test that SynchronousStrategy enforces proper limits.""" + strategy = SynchronousStrategy() + assert strategy.processes_limit == 1 + assert strategy.requests_limit == 1 + + @pytest.mark.smoke + def test_create_timings_valid(self): + """Test creating timings with valid parameters.""" + strategy = SynchronousStrategy() + timing = strategy.create_request_timings(0, 1, 1) + assert isinstance(timing, LastCompletionRequestTimings) + + @pytest.mark.sanity + def test_create_timings_invalid(self): + """Test that invalid parameters raise ValueError.""" + strategy = SynchronousStrategy() + + with pytest.raises(ValueError): + strategy.create_request_timings(1, 1, 1) # rank != 0 + + with pytest.raises(ValueError): + strategy.create_request_timings(0, 2, 1) # world_size > 1 + + @pytest.mark.smoke + def test_string_representation(self): + """Test __str__ method for SynchronousStrategy.""" + strategy = SynchronousStrategy() + result = str(strategy) + assert result == "synchronous" + + @pytest.mark.smoke + def test_marshalling(self): + """Test marshalling to/from pydantic dict formats.""" + strategy = SynchronousStrategy() + data = strategy.model_dump() + assert isinstance(data, dict) + assert data["type_"] == "synchronous" + + reconstructed = SynchronousStrategy.model_validate(data) + assert isinstance(reconstructed, SynchronousStrategy) + assert reconstructed.type_ == "synchronous" + + # Test polymorphic reconstruction via base registry class + base_reconstructed = SchedulingStrategy.model_validate(data) + assert isinstance(base_reconstructed, SynchronousStrategy) + assert base_reconstructed.type_ == "synchronous" + + # Test model_validate_json pathway + json_str = strategy.model_dump_json() + json_reconstructed = SynchronousStrategy.model_validate_json(json_str) + assert isinstance(json_reconstructed, SynchronousStrategy) + assert json_reconstructed.type_ == "synchronous" + + # Test polymorphic model_validate_json via base class + base_json_reconstructed = SchedulingStrategy.model_validate_json(json_str) + assert isinstance(base_json_reconstructed, SynchronousStrategy) + assert base_json_reconstructed.type_ == "synchronous" + + +class TestConcurrentStrategy: + @pytest.fixture( + params=[ + {"streams": 1}, + {"streams": 4}, + {"streams": 8, "startup_duration": 2.0}, + {"streams": 2, "startup_duration": 0.0}, + ] + ) + def valid_instances(self, request): + """Creates various valid configurations of ConcurrentStrategy.""" + constructor_args = request.param + instance = ConcurrentStrategy(**constructor_args) + return instance, constructor_args + + @pytest.mark.smoke + def test_initialization(self, valid_instances: tuple[ConcurrentStrategy, dict]): + """Test initialization of ConcurrentStrategy.""" + instance, constructor_args = valid_instances + assert instance.type_ == "concurrent" + + for key, value in constructor_args.items(): + assert getattr(instance, key) == value + + @pytest.mark.sanity + @pytest.mark.parametrize( + ("field", "value"), + [ + ("streams", 0), + ("streams", -1), + ("startup_duration", -1.0), + ], + ) + def test_invalid_initialization(self, field, value): + """Test invalid initialization.""" + kwargs = {"streams": 2} + kwargs[field] = value + with pytest.raises(ValidationError): + ConcurrentStrategy(**kwargs) + + @pytest.mark.smoke + def test_limits(self, valid_instances: tuple[ConcurrentStrategy, dict]): + """Test that ConcurrentStrategy returns correct limits.""" + instance, constructor_args = valid_instances + streams = constructor_args["streams"] + assert instance.processes_limit == streams + assert instance.requests_limit == streams + + @pytest.mark.smoke + def test_create_timings(self, valid_instances: tuple[ConcurrentStrategy, dict]): + """Test creating timings.""" + instance, constructor_args = valid_instances + streams = constructor_args["streams"] + startup_duration = constructor_args.get("startup_duration", 0.0) + + # Test with different rank and world_size combinations + for local_rank in range(min(streams, 2)): + for local_world_size in range(1, min(streams + 1, 3)): + if local_rank < local_world_size: + timing = instance.create_request_timings( + local_rank, local_world_size, streams + ) + assert isinstance(timing, LastCompletionRequestTimings) + + # Verify startup behavior + if startup_duration > 0: + # Check that timing has proper startup configuration + expected_delay_per_stream = startup_duration / streams + streams_per_worker = streams // local_world_size + expected_offset = ( + local_rank * streams_per_worker * expected_delay_per_stream + ) + assert timing.offset == pytest.approx(expected_offset, abs=1e-5) + + @pytest.mark.sanity + def test_create_timings_invalid( + self, valid_instances: tuple[ConcurrentStrategy, dict] + ): + """Test invalid inputs for create request timings.""" + instance, constructor_args = valid_instances + streams = constructor_args["streams"] + + # Test various invalid configurations + invalid_configs = [ + (streams, 1, 1), # rank >= streams + (0, streams + 1, 1), # world_size > streams + ] + + for local_rank, local_world_size, local_max_concurrency in invalid_configs: + if local_rank >= streams or local_world_size > streams: + with pytest.raises(ValueError): + instance.create_request_timings( + local_rank, local_world_size, local_max_concurrency + ) + + @pytest.mark.smoke + def test_string_representation( + self, valid_instances: tuple[ConcurrentStrategy, dict] + ): + """Test __str__ method for ConcurrentStrategy.""" + instance, constructor_args = valid_instances + streams = constructor_args["streams"] + result = str(instance) + assert result == f"concurrent@{streams}" + + @pytest.mark.smoke + def test_marshalling(self, valid_instances: tuple[ConcurrentStrategy, dict]): + """Test marshalling to/from pydantic dict formats.""" + instance, constructor_args = valid_instances + + data = instance.model_dump() + assert isinstance(data, dict) + assert data["type_"] == "concurrent" + + for key, value in constructor_args.items(): + assert data[key] == value + + reconstructed = ConcurrentStrategy.model_validate(data) + assert isinstance(reconstructed, ConcurrentStrategy) + + for key, value in constructor_args.items(): + assert getattr(reconstructed, key) == value + + # Test polymorphic reconstruction via base registry class + base_reconstructed = SchedulingStrategy.model_validate(data) + assert isinstance(base_reconstructed, ConcurrentStrategy) + assert base_reconstructed.type_ == "concurrent" + + for key, value in constructor_args.items(): + assert getattr(base_reconstructed, key) == value + + # Test model_validate_json pathway + json_str = instance.model_dump_json() + json_reconstructed = ConcurrentStrategy.model_validate_json(json_str) + assert isinstance(json_reconstructed, ConcurrentStrategy) + + for key, value in constructor_args.items(): + assert getattr(json_reconstructed, key) == value + + # Test polymorphic model_validate_json via base class + base_json_reconstructed = SchedulingStrategy.model_validate_json(json_str) + assert isinstance(base_json_reconstructed, ConcurrentStrategy) + assert base_json_reconstructed.type_ == "concurrent" + + for key, value in constructor_args.items(): + assert getattr(base_json_reconstructed, key) == value + + +class TestThroughputStrategy: + @pytest.fixture( + params=[ + {}, + {"max_concurrency": 10}, + {"startup_duration": 5.0}, + {"max_concurrency": 5, "startup_duration": 2.0}, + ] + ) + def valid_instances(self, request): + """Creates various valid configurations of ThroughputStrategy.""" + constructor_args = request.param + instance = ThroughputStrategy(**constructor_args) + return instance, constructor_args + + @pytest.mark.smoke + def test_initialization(self, valid_instances: tuple[ThroughputStrategy, dict]): + """Test initialization of ThroughputStrategy.""" + instance, constructor_args = valid_instances + assert instance.type_ == "throughput" + + for key, value in constructor_args.items(): + assert getattr(instance, key) == value + + @pytest.mark.sanity + @pytest.mark.parametrize( + ("field", "value"), + [ + ("max_concurrency", 0), + ("max_concurrency", -1), + ("startup_duration", -1.0), + ], + ) + def test_invalid_initialization(self, field, value): + """Test invalid initialization.""" + kwargs = {field: value} + with pytest.raises(ValidationError): + ThroughputStrategy(**kwargs) + + @pytest.mark.smoke + def test_limits(self, valid_instances: tuple[ThroughputStrategy, dict]): + """Test that ThroughputStrategy returns correct limits.""" + instance, constructor_args = valid_instances + max_concurrency = constructor_args.get("max_concurrency") + assert instance.processes_limit == max_concurrency + assert instance.requests_limit == max_concurrency + + @pytest.mark.smoke + def test_create_timings(self, valid_instances: tuple[ThroughputStrategy, dict]): + """Test creating timings.""" + instance, constructor_args = valid_instances + startup_duration = constructor_args.get("startup_duration", 0.0) + + # Test with different configurations + for local_rank in range(3): + for local_world_size in range(1, 4): + for local_max_concurrency in range(1, 6): + timing = instance.create_request_timings( + local_rank, local_world_size, local_max_concurrency + ) + assert isinstance(timing, NoDelayRequestTimings) + + # Verify startup configuration + if startup_duration > 0: + assert timing.startup_duration == startup_duration + assert timing.startup_target_requests == local_max_concurrency + expected_offset = ( + 0.05 * startup_duration * (local_rank / local_world_size) + ) + assert timing.offset == pytest.approx(expected_offset, abs=1e-5) + else: + assert timing.startup_duration == 0.0 + assert timing.offset == 0.0 + + @pytest.mark.smoke + def test_string_representation( + self, valid_instances: tuple[ThroughputStrategy, dict] + ): + """Test __str__ method for ThroughputStrategy.""" + instance, _ = valid_instances + result = str(instance) + assert result == "throughput" + + @pytest.mark.smoke + def test_marshalling(self, valid_instances: tuple[ThroughputStrategy, dict]): + """Test marshalling to/from pydantic dict formats.""" + instance, constructor_args = valid_instances + + data = instance.model_dump() + assert isinstance(data, dict) + assert data["type_"] == "throughput" + + for key, value in constructor_args.items(): + assert data[key] == value + + reconstructed = ThroughputStrategy.model_validate(data) + assert isinstance(reconstructed, ThroughputStrategy) + + for key, value in constructor_args.items(): + assert getattr(reconstructed, key) == value + + # Test polymorphic reconstruction via base registry class + base_reconstructed = SchedulingStrategy.model_validate(data) + assert isinstance(base_reconstructed, ThroughputStrategy) + assert base_reconstructed.type_ == "throughput" + + for key, value in constructor_args.items(): + assert getattr(base_reconstructed, key) == value + + # Test model_validate_json pathway + json_str = instance.model_dump_json() + json_reconstructed = ThroughputStrategy.model_validate_json(json_str) + assert isinstance(json_reconstructed, ThroughputStrategy) + + for key, value in constructor_args.items(): + assert getattr(json_reconstructed, key) == value + + # Test polymorphic model_validate_json via base class + base_json_reconstructed = SchedulingStrategy.model_validate_json(json_str) + assert isinstance(base_json_reconstructed, ThroughputStrategy) + assert base_json_reconstructed.type_ == "throughput" + + for key, value in constructor_args.items(): + assert getattr(base_json_reconstructed, key) == value + + +class TestAsyncConstantStrategy: + @pytest.fixture( + params=[ + {"rate": 1.0}, + {"rate": 5.0}, + {"rate": 10.3, "max_concurrency": 8}, + ] + ) + def valid_instances(self, request): + """Creates various valid configurations of AsyncConstantStrategy.""" + constructor_args = request.param + instance = AsyncConstantStrategy(**constructor_args) + return instance, constructor_args + + @pytest.mark.smoke + def test_initialization(self, valid_instances: tuple[AsyncConstantStrategy, dict]): + """Test initialization of AsyncConstantStrategy.""" + instance, constructor_args = valid_instances + assert instance.type_ == "constant" + + for key, value in constructor_args.items(): + assert getattr(instance, key) == value + + @pytest.mark.sanity + @pytest.mark.parametrize( + ("field", "value"), + [ + ("rate", 0), + ("rate", -1.0), + ], + ) + def test_invalid_initialization(self, field, value): + """Test invalid initialization.""" + kwargs = {"rate": 1.0} + kwargs[field] = value + with pytest.raises(ValidationError): + AsyncConstantStrategy(**kwargs) + + @pytest.mark.smoke + def test_create_timings(self, valid_instances: tuple[AsyncConstantStrategy, dict]): + """Test creating timings.""" + instance, constructor_args = valid_instances + rate = constructor_args["rate"] + + # Test with different worker configurations + for local_world_size in range(1, 5): + timing = instance.create_request_timings(0, local_world_size, 1) + assert isinstance(timing, ConstantRateRequestTimings) + + # Rate should be distributed across workers + expected_worker_rate = rate / local_world_size + assert timing.rate == pytest.approx(expected_worker_rate, abs=1e-5) + + @pytest.mark.smoke + def test_string_representation( + self, valid_instances: tuple[AsyncConstantStrategy, dict] + ): + """Test __str__ method for AsyncConstantStrategy.""" + instance, constructor_args = valid_instances + rate = constructor_args["rate"] + result = str(instance) + assert result == f"constant@{rate:.2f}" + + @pytest.mark.smoke + def test_marshalling(self, valid_instances: tuple[AsyncConstantStrategy, dict]): + """Test marshalling to/from pydantic dict formats.""" + instance, constructor_args = valid_instances + + data = instance.model_dump() + assert isinstance(data, dict) + assert data["type_"] == "constant" + + for key, value in constructor_args.items(): + assert data[key] == value + + reconstructed = AsyncConstantStrategy.model_validate(data) + assert isinstance(reconstructed, AsyncConstantStrategy) + + for key, value in constructor_args.items(): + assert getattr(reconstructed, key) == value + + # Test polymorphic reconstruction via base registry class + base_reconstructed = SchedulingStrategy.model_validate(data) + assert isinstance(base_reconstructed, AsyncConstantStrategy) + assert base_reconstructed.type_ == "constant" + + for key, value in constructor_args.items(): + assert getattr(base_reconstructed, key) == value + + # Test model_validate_json pathway + json_str = instance.model_dump_json() + json_reconstructed = AsyncConstantStrategy.model_validate_json(json_str) + assert isinstance(json_reconstructed, AsyncConstantStrategy) + + for key, value in constructor_args.items(): + assert getattr(json_reconstructed, key) == value + + # Test polymorphic model_validate_json via base class + base_json_reconstructed = SchedulingStrategy.model_validate_json(json_str) + assert isinstance(base_json_reconstructed, AsyncConstantStrategy) + assert base_json_reconstructed.type_ == "constant" + + for key, value in constructor_args.items(): + assert getattr(base_json_reconstructed, key) == value + + +class TestAsyncPoissonStrategy: + @pytest.fixture( + params=[ + {"rate": 1.0}, + {"rate": 5.0, "random_seed": 123}, + {"rate": 10.3, "random_seed": 456, "max_concurrency": 8}, + ] + ) + def valid_instances(self, request): + """Creates various valid configurations of AsyncPoissonStrategy.""" + constructor_args = request.param + instance = AsyncPoissonStrategy(**constructor_args) + return instance, constructor_args + + @pytest.mark.smoke + def test_initialization(self, valid_instances: tuple[AsyncPoissonStrategy, dict]): + """Test initialization of AsyncPoissonStrategy.""" + instance, constructor_args = valid_instances + assert instance.type_ == "poisson" + + for key, value in constructor_args.items(): + assert getattr(instance, key) == value + + @pytest.mark.sanity + @pytest.mark.parametrize( + ("field", "value"), + [ + ("rate", 0), + ("rate", -1.0), + ], + ) + def test_invalid_initialization(self, field, value): + """Test invalid initialization.""" + kwargs = {"rate": 1.0, "random_seed": 42} + kwargs[field] = value + with pytest.raises(ValidationError): + AsyncPoissonStrategy(**kwargs) + + @pytest.mark.smoke + def test_create_timings(self, valid_instances: tuple[AsyncPoissonStrategy, dict]): + """Test creating timings.""" + instance, constructor_args = valid_instances + rate = constructor_args["rate"] + base_seed = constructor_args.get("random_seed", 42) + + # Test with different worker configurations + for local_rank in range(3): + for local_world_size in range(1, 4): + timing = instance.create_request_timings( + local_rank, local_world_size, 1 + ) + assert isinstance(timing, PoissonRateRequestTimings) + + # Rate should be distributed across workers + expected_worker_rate = rate / local_world_size + assert timing.rate == pytest.approx(expected_worker_rate, abs=1e-5) + + # Each worker should have a unique seed + expected_seed = base_seed + local_rank + assert timing.random_seed == expected_seed + + @pytest.mark.smoke + def test_string_representation( + self, valid_instances: tuple[AsyncPoissonStrategy, dict] + ): + """Test __str__ method for AsyncPoissonStrategy.""" + instance, constructor_args = valid_instances + rate = constructor_args["rate"] + result = str(instance) + assert result == f"poisson@{rate:.2f}" + + @pytest.mark.smoke + def test_marshalling(self, valid_instances: tuple[AsyncPoissonStrategy, dict]): + """Test marshalling to/from pydantic dict formats.""" + instance, constructor_args = valid_instances + + data = instance.model_dump() + assert isinstance(data, dict) + assert data["type_"] == "poisson" + + for key, value in constructor_args.items(): + assert data[key] == value + + reconstructed = AsyncPoissonStrategy.model_validate(data) + assert isinstance(reconstructed, AsyncPoissonStrategy) + + for key, value in constructor_args.items(): + assert getattr(reconstructed, key) == value + + # Test polymorphic reconstruction via base registry class + base_reconstructed = SchedulingStrategy.model_validate(data) + assert isinstance(base_reconstructed, AsyncPoissonStrategy) + assert base_reconstructed.type_ == "poisson" + + for key, value in constructor_args.items(): + assert getattr(base_reconstructed, key) == value + + # Test model_validate_json pathway + json_str = instance.model_dump_json() + json_reconstructed = AsyncPoissonStrategy.model_validate_json(json_str) + assert isinstance(json_reconstructed, AsyncPoissonStrategy) + + for key, value in constructor_args.items(): + assert getattr(json_reconstructed, key) == value + + # Test polymorphic model_validate_json via base class + base_json_reconstructed = SchedulingStrategy.model_validate_json(json_str) + assert isinstance(base_json_reconstructed, AsyncPoissonStrategy) + assert base_json_reconstructed.type_ == "poisson" + + for key, value in constructor_args.items(): + assert getattr(base_json_reconstructed, key) == value diff --git a/tests/unit/scheduler/test_worker.py b/tests/unit/scheduler/test_worker.py new file mode 100644 index 00000000..b62d66d5 --- /dev/null +++ b/tests/unit/scheduler/test_worker.py @@ -0,0 +1,672 @@ +from __future__ import annotations + +import asyncio +import inspect +import random +import time +from dataclasses import dataclass +from functools import wraps +from multiprocessing import Barrier, Event, Process +from multiprocessing.synchronize import Barrier as ProcessingBarrier +from multiprocessing.synchronize import Event as ProcessingEvent +from typing import Any, Generic, Literal + +import pytest +import pytest_asyncio + +from guidellm.scheduler import ( + BackendInterface, + ConstantRateRequestTimings, + LastCompletionRequestTimings, + MeasuredRequestTimings, + NoDelayRequestTimings, + PoissonRateRequestTimings, + ScheduledRequestInfo, + ScheduledRequestTimings, + SchedulerMessagingPydanticRegistry, + WorkerProcess, +) +from guidellm.utils import InterProcessMessagingQueue + +STANDARD_NUM_REQUESTS: int = 200 + + +def async_timeout(delay): + def decorator(func): + @wraps(func) + async def new_func(*args, **kwargs): + return await asyncio.wait_for(func(*args, **kwargs), timeout=delay) + + return new_func + + return decorator + + +@dataclass +class TimingsBounds: + exact: float | None = None + lower: float | None = None + upper: float | None = None + prev_request: Literal["greater", "greater_equal", "less", "less_equal"] | None = ( + None + ) + tolerance: float = 10e-4 + actual_tolerance: float = 10e-4 + + +class MockRequestTimings(MeasuredRequestTimings): + """Mock timing implementation for testing.""" + + +class MockBackend(BackendInterface): + """Mock backend for testing worker functionality.""" + + def __init__( + self, + lifecycle_delay: float = 0.1, + resolve_delay: float = 0.0, + should_fail: bool = False, + request_error_rate: float = 0.0, + ): + self.lifecycle_delay = lifecycle_delay + self.resolve_delay = resolve_delay + self.should_fail = should_fail + self.request_error_rate = request_error_rate + self.process_startup_called = False + self.validate_called = False + self.process_shutdown_called = False + self.resolve_called = False + + @property + def processes_limit(self) -> int | None: + return None + + @property + def requests_limit(self) -> int | None: + return None + + @property + def info(self) -> dict[str, Any]: + return { + "type": "mock", + "lifecycle_delay": self.lifecycle_delay, + "resolve_delay": self.resolve_delay, + } + + async def process_startup(self): + await asyncio.sleep(self.lifecycle_delay) + self.process_startup_called = True + + async def validate(self): + await asyncio.sleep(self.lifecycle_delay) + self.validate_called = True + if self.should_fail: + raise RuntimeError("Mock validation failed") + + async def process_shutdown(self): + await asyncio.sleep(self.lifecycle_delay) + self.process_shutdown_called = True + + async def resolve(self, request, request_info, request_history): + self.resolve_called = True + await asyncio.sleep( + self.resolve_delay if not str(request).startswith("cancel") else 1000.0 + ) + if self.should_fail: + raise RuntimeError("Mock resolve failed") + if self.request_error_rate > 0.0 and random.random() < self.request_error_rate: + raise RuntimeError("Mock resolve failed") + yield f"response_for_{request}", request_info + + +class TestWorkerProcess: + """Test suite for WorkerProcess class.""" + + @pytest_asyncio.fixture( + params=[ + { + "messaging": { + "serialization": "dict", + "encoding": None, + "max_buffer_receive_size": 2, + }, + "worker": { + "async_limit": 1, + }, + }, + { + "messaging": { + "serialization": "dict", + "encoding": None, + "max_buffer_receive_size": 100, + }, + "worker": { + "async_limit": 1000, + }, + }, + ], + ) + async def valid_instances(self, request): + """Fixture providing test data for WorkerProcess.""" + constructor_args = request.param + main_messaging = InterProcessMessagingQueue( + **constructor_args["messaging"], poll_interval=0.01 + ) + + try: + instance = WorkerProcess( + messaging=main_messaging.create_worker_copy(0), + backend=MockBackend(), + request_timings=LastCompletionRequestTimings(), + **constructor_args["worker"], + startup_barrier=Barrier(2), + requests_generated_event=Event(), + constraint_reached_event=Event(), + shutdown_event=Event(), + error_event=Event(), + ) + await main_messaging.start( + pydantic_models=list( + SchedulerMessagingPydanticRegistry.registry.values() + ) + ) + yield instance, main_messaging, constructor_args + finally: + await main_messaging.stop() + + @pytest.mark.smoke + def test_class_signatures( + self, + valid_instances: tuple[WorkerProcess, InterProcessMessagingQueue, dict], + ): + """Test inheritance and type relationships.""" + worker_process, main_messaging, constructor_args = valid_instances + + # Class + assert isinstance(worker_process, Generic) + assert issubclass(WorkerProcess, Generic) + + # Generics + orig_bases = getattr(WorkerProcess, "__orig_bases__", ()) + assert len(orig_bases) > 0 + generic_base = next( + ( + base + for base in orig_bases + if hasattr(base, "__origin__") and base.__origin__ is Generic + ), + None, + ) + assert generic_base is not None + type_args = getattr(generic_base, "__args__", ()) + assert len(type_args) == 2 # RequestT, ResponseT + + # Function signatures + run_sig = inspect.signature(WorkerProcess.run) + assert len(run_sig.parameters) == 1 + assert "self" in run_sig.parameters + + run_async_sig = inspect.signature(WorkerProcess.run_async) + assert len(run_async_sig.parameters) == 1 + assert "self" in run_async_sig.parameters + + stop_processing_sig = inspect.signature(WorkerProcess._stop_monitor) + assert len(stop_processing_sig.parameters) == 1 + assert "self" in stop_processing_sig.parameters + + requests_processing_sig = inspect.signature(WorkerProcess._process_requests) + assert len(requests_processing_sig.parameters) == 1 + assert "self" in requests_processing_sig.parameters + + @pytest.mark.smoke + def test_initialization( + self, + valid_instances: tuple[WorkerProcess, InterProcessMessagingQueue, dict], + ): + """Test basic initialization of WorkerProcess.""" + instance, main_messaging, constructor_args = valid_instances + + # messaging + assert instance.messaging is not None + assert isinstance(instance.messaging, InterProcessMessagingQueue) + assert instance.messaging is not main_messaging + assert instance.messaging.worker_index is not None + assert instance.messaging.worker_index == 0 + assert ( + instance.messaging.serialization + == constructor_args["messaging"]["serialization"] + ) + assert instance.messaging.encoding == constructor_args["messaging"]["encoding"] + assert ( + instance.messaging.max_buffer_receive_size + == constructor_args["messaging"]["max_buffer_receive_size"] + ) + + # worker + assert instance.async_limit == constructor_args["worker"]["async_limit"] + assert instance.startup_barrier is not None + assert isinstance(instance.startup_barrier, ProcessingBarrier) + assert instance.shutdown_event is not None + assert isinstance(instance.shutdown_event, ProcessingEvent) + assert instance.error_event is not None + assert isinstance(instance.error_event, ProcessingEvent) + assert instance.requests_generated_event is not None + assert isinstance(instance.requests_generated_event, ProcessingEvent) + assert instance.constraint_reached_event is not None + assert isinstance(instance.constraint_reached_event, ProcessingEvent) + assert instance.backend is not None + assert isinstance(instance.backend, MockBackend) + assert instance.request_timings is not None + assert isinstance(instance.request_timings, LastCompletionRequestTimings) + assert not instance.startup_completed + + @pytest.mark.sanity + def test_invalid_initialization(self): + """Test that invalid initialization raises appropriate errors.""" + + # Test with missing required parameters + with pytest.raises(TypeError): + WorkerProcess() + + # Create a complete set of valid parameters + backend = MockBackend() + request_timings = LastCompletionRequestTimings() + barrier = Barrier(2) + shutdown_event = Event() + error_event = Event() + requests_generated_event = Event() + constraint_reached_event = Event() + messaging = InterProcessMessagingQueue() + + # Test missing each required parameter one by one + required_params = [ + "messaging", + "backend", + "request_timings", + "async_limit", + "startup_barrier", + "requests_generated_event", + "constraint_reached_event", + "shutdown_event", + "error_event", + ] + + for param_to_remove in required_params: + kwargs = { + "messaging": messaging, + "backend": backend, + "request_timings": request_timings, + "async_limit": 5, + "startup_barrier": barrier, + "requests_generated_event": requests_generated_event, + "constraint_reached_event": constraint_reached_event, + "shutdown_event": shutdown_event, + "error_event": error_event, + } + + del kwargs[param_to_remove] + + with pytest.raises(TypeError): + WorkerProcess(**kwargs) + + @pytest.mark.smoke + @pytest.mark.asyncio + # @async_timeout(15) + @pytest.mark.parametrize( + ("num_requests", "num_canceled", "error_rate"), + [ + (20, 0, 0), + (STANDARD_NUM_REQUESTS, 20, 0.5), + ], + ) + async def test_run_async_lifecycle( # noqa: C901, PLR0912 + self, + valid_instances: tuple[WorkerProcess, InterProcessMessagingQueue, dict], + num_requests: int, + num_canceled: int, + error_rate: float, + ): + """Test the asynchronous request processing of WorkerProcess.""" + instance, main_messaging, constructor_args = valid_instances + instance.backend.request_error_rate = error_rate + instance_task = asyncio.create_task(instance.run_async()) + + try: + await asyncio.to_thread(instance.startup_barrier.wait) + start_time = time.time() + + # Send regular requests + requests_tracker = {} + for index in range(num_requests): + request = f"request_{index}" + request_info = ScheduledRequestInfo( + request_id=request, + scheduler_start_time=start_time, + scheduler_process_id=0, + ) + request_info.scheduler_timings.queued = time.time() + requests_tracker[request] = { + "sent": True, + "received_pending": 0, + "received_in_progress": 0, + "received_resolved": 0, + } + await main_messaging.put( + (request, request_info), + timeout=2.0, + ) + + # Process regular requests + error_count = 0 + for _ in range(num_requests * 3): + # Each request must have a pending, in_progress, and resolution + response, request, request_info = await main_messaging.get(timeout=2.0) + assert request is not None + assert request_info is not None + assert request_info.request_id is not None + assert request_info.status is not None + assert request_info.scheduler_node_id > -1 + assert request_info.scheduler_process_id > -1 + assert request_info.scheduler_start_time == start_time + assert request_info.scheduler_timings is not None + assert request_info.scheduler_timings.targeted_start is not None + assert request_info.scheduler_timings.targeted_start >= start_time + + if request_info.status == "pending": + requests_tracker[request]["received_pending"] += 1 + assert request_info.scheduler_timings.dequeued is not None + assert ( + request_info.scheduler_timings.dequeued + >= request_info.scheduler_timings.targeted_start + ) + elif request_info.status == "in_progress": + requests_tracker[request]["received_in_progress"] += 1 + assert request_info.scheduler_timings.scheduled_at is not None + assert ( + request_info.scheduler_timings.scheduled_at + >= request_info.scheduler_timings.dequeued + ) + assert request_info.scheduler_timings.resolve_start is not None + assert ( + request_info.scheduler_timings.resolve_start + >= request_info.scheduler_timings.scheduled_at + ) + elif request_info.status == "completed": + assert response == f"response_for_{request}" + requests_tracker[request]["received_resolved"] += 1 + assert request_info.scheduler_timings.resolve_end is not None + assert ( + request_info.scheduler_timings.resolve_end + > request_info.scheduler_timings.resolve_start + ) + elif request_info.status == "errored": + assert response is None + requests_tracker[request]["received_resolved"] += 1 + error_count += 1 + assert request_info.scheduler_timings.resolve_end is not None + assert ( + request_info.scheduler_timings.resolve_end + > request_info.scheduler_timings.resolve_start + ) + else: + raise ValueError(f"Unexpected status: {request_info.status}") + + # Ensure correct error rate + assert float(error_count) / num_requests == pytest.approx( + error_rate, rel=0.2 + ) + + # Ensure no extra statuses + with pytest.raises(asyncio.TimeoutError): + await main_messaging.get(timeout=0.5) + + # Send cancel requests + for index in range(num_canceled): + cancel_request = f"cancel_request_{index}" + cancel_info = ScheduledRequestInfo( + request_id=request, + scheduler_start_time=start_time, + scheduler_process_id=0, + ) + cancel_info.scheduler_timings.queued = time.time() + requests_tracker[cancel_request] = { + "sent": True, + "received_pending": 0, + "received_in_progress": 0, + "received_resolved": 0, + } + await main_messaging.put( + (cancel_request, cancel_info), + timeout=2.0, + ) + + # Receive expected updates for cancel up to async number + for _ in range(2 * min(num_canceled, instance.async_limit)): + # Each request (up to async limit) will have pending, in_progress + response, request, request_info = await main_messaging.get(timeout=2.0) + if request_info.status == "pending": + requests_tracker[request]["received_pending"] += 1 + elif request_info.status == "in_progress": + requests_tracker[request]["received_in_progress"] += 1 + error_count += 1 + else: + raise ValueError(f"Unexpected status: {request_info.status}") + + # Signal constraints reached to start canceling + instance.constraint_reached_event.set() + await asyncio.sleep(0) + + # Receive the remaining canceled updates + for _ in range(num_canceled): + # All cancel requests should resolve with canceled (no other statuses) + response, request, request_info = await main_messaging.get(timeout=2.0) + assert request is not None + assert request_info is not None + assert request_info.request_id is not None + assert request_info.status is not None + assert request_info.scheduler_node_id > -1 + assert request_info.scheduler_process_id > -1 + assert request_info.scheduler_start_time == start_time + assert request_info.scheduler_timings is not None + + if request_info.status == "cancelled": + requests_tracker[request]["received_resolved"] += 1 + assert request_info.scheduler_timings.resolve_end is not None + assert request_info.scheduler_timings.resolve_end > start_time + else: + raise ValueError(f"Unexpected status: {request_info.status}") + + # Ensure no extra statuses + with pytest.raises(asyncio.TimeoutError): + await main_messaging.get(timeout=0.5) + + # Signal requests stop now that all requests have been processed + instance.requests_generated_event.set() + await asyncio.sleep(0) + + # Validate all the requests are correct + for request_key in [f"request_{index}" for index in range(num_requests)]: + assert request_key in requests_tracker + assert requests_tracker[request_key]["sent"] + assert requests_tracker[request_key]["received_pending"] == 1 + assert requests_tracker[request_key]["received_resolved"] == 1 + if request_key.startswith("request"): + assert requests_tracker[request_key]["received_in_progress"] == 1 + finally: + # Shut down + instance.shutdown_event.set() + await asyncio.wait_for(instance_task, timeout=2.0) + + @pytest.mark.smoke + @pytest.mark.asyncio + @async_timeout(15) + @pytest.mark.parametrize( + ("request_timings", "timing_bounds"), + [ + ( + LastCompletionRequestTimings(offset=0.1), + [ + TimingsBounds(lower=0.1, prev_request="greater_equal") + for _ in range(STANDARD_NUM_REQUESTS) + ], + ), + ( + NoDelayRequestTimings(offset=0.05), + [ + TimingsBounds(lower=0.05, upper=0.05, actual_tolerance=1.0) + for _ in range(STANDARD_NUM_REQUESTS) + ], + ), + ( + ConstantRateRequestTimings(rate=100, offset=0.2), + [ + TimingsBounds( + exact=0.2 + ind * 0.01, + lower=0.2, + prev_request="greater", + actual_tolerance=10e-2, + ) + for ind in range(STANDARD_NUM_REQUESTS) + ], + ), + ( + PoissonRateRequestTimings(rate=200, offset=0.01), + [ + TimingsBounds(lower=0.01, prev_request="greater") + for ind in range(STANDARD_NUM_REQUESTS) + ], + ), + ], + ids=[ + "LastCompletion", + "NoDelay", + "ConstantRate", + "PoissonRate", + ], + ) + async def test_run_with_timings( # noqa: C901, PLR0912 + self, + valid_instances: tuple[WorkerProcess, InterProcessMessagingQueue, dict], + request_timings: ScheduledRequestTimings, + timing_bounds: list[TimingsBounds], + ): + instance, main_messaging, constructor_args = valid_instances + instance.request_timings = request_timings + num_requests = STANDARD_NUM_REQUESTS + assert len(timing_bounds) == num_requests + + # Start process + process = Process(target=instance.run) + process.start() + + try: + await asyncio.to_thread(instance.startup_barrier.wait) + start_time = time.time() + 0.1 + + # Send regular requests + requests_tracker = {} + for ind in range(num_requests): + request = f"request_{ind}" + requests_tracker[request] = { + "sent": True, + "target_start_time": -1, + "actual_start_time": -1, + "received_pending": 0, + "received_in_progress": 0, + "received_resolved": 0, + } + await main_messaging.put( + ( + request, + ScheduledRequestInfo(scheduler_start_time=start_time), + ), + timeout=2.0, + ) + + # Process regular requests + for _ in range(num_requests * 3): + # Each request must have pending, in_progress, and resolved statuses + response, request, request_info = await main_messaging.get(timeout=2.0) + if request_info.status == "pending": + requests_tracker[request]["received_pending"] += 1 + elif request_info.status == "in_progress": + requests_tracker[request]["received_in_progress"] += 1 + requests_tracker[request]["target_start_time"] = ( + request_info.scheduler_timings.targeted_start + ) + requests_tracker[request]["actual_start_time"] = ( + request_info.scheduler_timings.resolve_start + ) + elif request_info.status == "completed": + assert response == f"response_for_{request}" + requests_tracker[request]["received_resolved"] += 1 + else: + raise ValueError(f"Unexpected status: {request_info.status}") + + # Ensure no extra statuses + with pytest.raises(asyncio.TimeoutError): + await main_messaging.get(timeout=0.1) + + # Trigger stopping for constraints and requests + instance.requests_generated_event.set() + instance.constraint_reached_event.set() + await asyncio.sleep(0) + + # Validate request values are correct + for ind in range(num_requests): + request = f"request_{ind}" + assert requests_tracker[request]["received_pending"] == 1 + assert requests_tracker[request]["received_in_progress"] == 1 + assert requests_tracker[request]["received_resolved"] == 1 + + bounds = timing_bounds[ind] + target_offset = ( + requests_tracker[request]["target_start_time"] - start_time + ) + actual_offset = ( + requests_tracker[request]["actual_start_time"] - start_time + ) + prev_offset = ( + requests_tracker[f"request_{ind - 1}"]["target_start_time"] + - start_time + if ind > 0 + else None + ) + + if bounds.exact is not None: + assert target_offset == pytest.approx( + bounds.exact, rel=bounds.tolerance + ) + assert target_offset == pytest.approx( + actual_offset, rel=bounds.actual_tolerance or bounds.tolerance + ) + if bounds.lower is not None: + assert target_offset >= bounds.lower - bounds.tolerance + assert actual_offset >= bounds.lower - ( + bounds.actual_tolerance or bounds.tolerance + ) + if bounds.upper is not None: + assert target_offset <= bounds.upper + bounds.tolerance + assert actual_offset <= bounds.upper + ( + bounds.actual_tolerance or bounds.tolerance + ) + if bounds.prev_request is not None and prev_offset is not None: + if bounds.prev_request == "greater": + assert target_offset > prev_offset - bounds.tolerance + elif bounds.prev_request == "greater_equal": + assert target_offset >= prev_offset - bounds.tolerance + elif bounds.prev_request == "less": + assert target_offset < prev_offset + bounds.tolerance + elif bounds.prev_request == "less_equal": + assert target_offset <= prev_offset + bounds.tolerance + finally: + # Trigger shutdown + instance.shutdown_event.set() + await asyncio.to_thread(process.join, timeout=2.0) + + if process.is_alive(): + process.terminate() + await asyncio.to_thread(process.join, timeout=2.0) + assert process.exitcode <= 0, ( + f"Process exited with error code: {process.exitcode}" + ) diff --git a/tests/unit/scheduler/test_worker_group.py b/tests/unit/scheduler/test_worker_group.py new file mode 100644 index 00000000..b72fb95b --- /dev/null +++ b/tests/unit/scheduler/test_worker_group.py @@ -0,0 +1,473 @@ +from __future__ import annotations + +import asyncio +import inspect +import time +from functools import wraps +from multiprocessing.context import BaseContext +from multiprocessing.managers import BaseManager +from multiprocessing.process import BaseProcess +from multiprocessing.synchronize import Barrier, Event +from typing import Any, Generic, Literal + +import pytest +from pydantic import Field + +from guidellm.scheduler import ( + AsyncConstantStrategy, + BackendInterface, + ConcurrentStrategy, + MaxDurationConstraint, + MaxNumberConstraint, + MeasuredRequestTimings, + ScheduledRequestInfo, + SchedulerMessagingPydanticRegistry, + SchedulerState, + SynchronousStrategy, + ThroughputStrategy, + WorkerProcessGroup, +) +from guidellm.scheduler.worker_group import WorkerGroupState +from guidellm.utils import InterProcessMessaging + + +def async_timeout(delay): + def decorator(func): + @wraps(func) + async def new_func(*args, **kwargs): + return await asyncio.wait_for(func(*args, **kwargs), timeout=delay) + + return new_func + + return decorator + + +class MockRequestTimings(MeasuredRequestTimings): + """Mock timing implementation for testing.""" + + timings_type: Literal["mock"] = Field(default="mock") + + +class MockBackend(BackendInterface): + """Mock backend for testing worker group functionality.""" + + def __init__( + self, + processes_limit_value: int | None = None, + requests_limit_value: int | None = None, + ): + self._processes_limit = processes_limit_value + self._requests_limit = requests_limit_value + + @property + def processes_limit(self) -> int | None: + return self._processes_limit + + @property + def requests_limit(self) -> int | None: + return self._requests_limit + + def info(self) -> dict[str, Any]: + return {"type": "mock"} + + async def process_startup(self): + pass + + async def validate(self): + pass + + async def process_shutdown(self): + pass + + async def resolve(self, request, request_info, request_history): + request_info.request_timings = MockRequestTimings( + request_start=time.time(), request_end=time.time() + ) + yield f"response_for_{request}", request_info + + +class TestWorkerProcessGroup: + """Test suite for WorkerProcessGroup class.""" + + def setup_method(self): + self._original_messaging_registry = ( + SchedulerMessagingPydanticRegistry.registry.copy() + if SchedulerMessagingPydanticRegistry.registry + else {} + ) + self._original_timings_registry = ( + MeasuredRequestTimings.registry.copy() + if MeasuredRequestTimings.registry + else {} + ) + MeasuredRequestTimings.register_decorator(MockRequestTimings, "mock") + SchedulerMessagingPydanticRegistry.register_decorator( + MockRequestTimings, "mock" + ) + + def teardown_method(self): + SchedulerMessagingPydanticRegistry.registry = self._original_messaging_registry + MeasuredRequestTimings.registry = self._original_timings_registry + MeasuredRequestTimings.model_rebuild(force=True) + ScheduledRequestInfo.model_rebuild(force=True) + + @pytest.fixture( + params=[ + { + "requests": None, + "cycle_requests": ["request1", "request2", "request3"], + "strategy": SynchronousStrategy(), + "constraints": {"max_num": MaxNumberConstraint(max_num=10)}, + }, + { + "requests": None, + "cycle_requests": ["req_a", "req_b"], + "strategy": ConcurrentStrategy(streams=2), + "constraints": {"max_num": MaxNumberConstraint(max_num=5)}, + }, + { + "requests": ["req_x", "req_y", "req_z"], + "cycle_requests": None, + "strategy": ThroughputStrategy(max_concurrency=5), + "constraints": {}, + }, + { + "requests": None, + "cycle_requests": ["req_8", "req_9", "req_10"], + "strategy": AsyncConstantStrategy(rate=20), + "constraints": {"max_duration": MaxDurationConstraint(max_duration=1)}, + }, + ], + ids=["sync_max", "concurrent_max", "throughput_no_cycle", "constant_duration"], + ) + def valid_instances(self, request): + """Fixture providing test data for WorkerProcessGroup.""" + constructor_args = request.param.copy() + instance = WorkerProcessGroup(**request.param, backend=MockBackend()) + return instance, constructor_args + + @pytest.mark.smoke + def test_class_signatures(self, valid_instances): + """Test inheritance and type relationships.""" + instance, _ = valid_instances + + # Class + assert isinstance(instance, Generic) + assert issubclass(WorkerProcessGroup, Generic) + + # Generics + orig_bases = getattr(WorkerProcessGroup, "__orig_bases__", ()) + assert len(orig_bases) > 0 + generic_base = next( + ( + base + for base in orig_bases + if hasattr(base, "__origin__") and base.__origin__ is Generic + ), + None, + ) + assert generic_base is not None + type_args = getattr(generic_base, "__args__", ()) + assert len(type_args) == 2 + + # Function signatures + create_processes_sig = inspect.signature(WorkerProcessGroup.create_processes) + assert len(create_processes_sig.parameters) == 1 + assert "self" in create_processes_sig.parameters + + start_sig = inspect.signature(WorkerProcessGroup.start) + assert len(start_sig.parameters) == 2 + assert "self" in start_sig.parameters + assert "start_time" in start_sig.parameters + + request_updates_sig = inspect.signature(WorkerProcessGroup.request_updates) + assert len(request_updates_sig.parameters) == 1 + assert "self" in request_updates_sig.parameters + + shutdown_sig = inspect.signature(WorkerProcessGroup.shutdown) + assert len(shutdown_sig.parameters) == 1 + assert "self" in shutdown_sig.parameters + + @pytest.mark.smoke + def test_initialization(self, valid_instances): + """Test basic initialization of WorkerProcessGroup.""" + instance, constructor_args = valid_instances + + # Core attributes + assert isinstance(instance.backend, MockBackend) + assert instance.requests is constructor_args["requests"] + assert instance.cycle_requests is constructor_args["cycle_requests"] + assert isinstance(instance.strategy, type(constructor_args["strategy"])) + assert isinstance(instance.constraints, dict) + assert instance.constraints == constructor_args["constraints"] + + # Multiprocessing attributes (should be None initially) + assert instance.mp_context is None + assert instance.mp_manager is None + assert instance.processes is None + + # Synchronization primitives (should be None initially) + assert instance.startup_barrier is None + assert instance.shutdown_event is None + assert instance.error_event is None + assert instance.requests_generated_event is None + assert instance.constraint_reached_event is None + + # Scheduler state and messaging (should be None initially) + assert instance.state is None + assert instance.messaging is None + + @pytest.mark.sanity + @pytest.mark.parametrize( + ("requests", "cycle_requests", "expected_error"), + [ + (None, None, ValueError), + ([], iter([]), ValueError), # cycle_requests as Iterator + (None, iter(["req1"]), ValueError), # cycle_requests as Iterator + ], + ids=["no_requests", "cycle_as_iterator_empty", "cycle_as_iterator_data"], + ) + def test_invalid_initialization_values( + self, requests, cycle_requests, expected_error + ): + """Test WorkerProcessGroup with invalid initialization values.""" + with pytest.raises(expected_error): + WorkerProcessGroup( + requests=requests, + cycle_requests=cycle_requests, + backend=MockBackend(), + strategy=SynchronousStrategy(), + constraints={}, + ) + + @pytest.mark.sanity + def test_invalid_initialization_missing(self): + """Test WorkerProcessGroup initialization without required fields.""" + with pytest.raises(TypeError): + WorkerProcessGroup() + + @pytest.mark.smoke + @async_timeout(10) + @pytest.mark.asyncio + async def test_lifecycle(self, valid_instances: tuple[WorkerProcessGroup, dict]): # noqa: C901, PLR0912 + """Test the lifecycle methods of WorkerProcessGroup.""" + instance, constructor_args = valid_instances + assert instance.requests or instance.cycle_requests + assert instance.backend + assert instance.strategy + assert instance.constraints is not None + + # Validate create_processes works and sets correct state + await instance.create_processes() + assert instance.mp_context is not None + assert isinstance(instance.mp_context, BaseContext) + assert instance.mp_manager is not None + assert isinstance(instance.mp_manager, BaseManager) + assert instance.processes is not None + assert isinstance(instance.processes, list) + assert len(instance.processes) > 0 + assert all(isinstance(proc, BaseProcess) for proc in instance.processes) + assert all(proc.is_alive() for proc in instance.processes) + assert instance.startup_barrier is not None + assert isinstance(instance.startup_barrier, Barrier) + assert instance.requests_generated_event is not None + assert isinstance(instance.requests_generated_event, Event) + assert instance.constraint_reached_event is not None + assert isinstance(instance.constraint_reached_event, Event) + assert instance.shutdown_event is not None + assert isinstance(instance.shutdown_event, Event) + assert instance.error_event is not None + assert isinstance(instance.error_event, Event) + assert instance.messaging is not None + assert isinstance(instance.messaging, InterProcessMessaging) + assert instance.messaging.worker_index is None + + # Validate start works and sets correct state + start_time = time.time() + 0.1 + await instance.start(start_time=start_time) + assert instance.state is not None + assert isinstance(instance.state, WorkerGroupState) + assert not instance.requests_generated_event.is_set() + assert not instance.constraint_reached_event.is_set() + assert not instance.shutdown_event.is_set() + assert not instance.error_event.is_set() + + # Test iter updates + requests_tracker = {} + + async for ( + response, + request, + request_info, + scheduler_state, + ) in instance.request_updates(): + # Validate returned request + assert request is not None + + # Validate returned request info and response + assert request_info is not None + assert isinstance(request_info, ScheduledRequestInfo) + assert request_info.request_id is not None + assert request_info.status is not None + if request_info.request_id not in requests_tracker: + requests_tracker[request_info.request_id] = { + "received_pending": 0, + "received_in_progress": 0, + "received_resolved": 0, + "received_cancelled": 0, + } + assert request_info.scheduler_node_id > -1 + assert request_info.scheduler_process_id > -1 + assert request_info.scheduler_start_time == start_time + assert request_info.scheduler_timings is not None + if request_info.status == "pending": + requests_tracker[request_info.request_id]["received_pending"] += 1 + assert request_info.scheduler_timings.dequeued is not None + assert request_info.scheduler_timings.targeted_start is not None + assert request_info.scheduler_timings.targeted_start >= start_time + elif request_info.status == "in_progress": + requests_tracker[request_info.request_id]["received_in_progress"] += 1 + assert request_info.scheduler_timings.scheduled_at is not None + assert ( + request_info.scheduler_timings.scheduled_at + >= request_info.scheduler_timings.dequeued + ) + assert request_info.scheduler_timings.resolve_start is not None + assert ( + request_info.scheduler_timings.resolve_start + >= request_info.scheduler_timings.scheduled_at + ) + elif request_info.status == "completed": + requests_tracker[request_info.request_id]["received_resolved"] += 1 + assert response is not None + assert request_info.scheduler_timings.resolve_end is not None + assert ( + request_info.scheduler_timings.resolve_end + > request_info.scheduler_timings.resolve_start + ) + assert request_info.request_timings is not None + assert isinstance(request_info.request_timings, MockRequestTimings) + assert request_info.request_timings.request_start is not None + assert ( + request_info.request_timings.request_start + >= request_info.scheduler_timings.targeted_start + ) + assert request_info.request_timings.request_end is not None + assert ( + request_info.request_timings.request_end + >= request_info.request_timings.request_start + ) + elif request_info.status in ("errored", "cancelled"): + assert response is None + requests_tracker[request_info.request_id]["received_resolved"] += 1 + assert request_info.scheduler_timings.resolve_end is not None + assert ( + request_info.scheduler_timings.resolve_end + > request_info.scheduler_start_time + ) + if request_info.status == "cancelled": + requests_tracker[request_info.request_id]["received_cancelled"] += 1 + + # Validate state structure + assert scheduler_state is not None + assert isinstance(scheduler_state, SchedulerState) + assert scheduler_state.node_id > -1 + assert scheduler_state.start_time == start_time + assert scheduler_state.end_time is not None + if constructor_args.get("constraints"): + assert scheduler_state.remaining_fraction is not None + assert scheduler_state.remaining_fraction >= 0.0 + assert scheduler_state.remaining_fraction <= 1.0 + if constructor_args.get("constraints", {}).get("max_num") is not None: + assert scheduler_state.remaining_requests is not None + assert scheduler_state.remaining_requests >= 0 + assert ( + scheduler_state.remaining_requests + <= constructor_args["constraints"]["max_num"].max_num + ) + if constructor_args.get("constraints", {}).get("max_duration") is not None: + assert scheduler_state.remaining_duration is not None + assert scheduler_state.remaining_duration >= 0.0 + assert ( + scheduler_state.remaining_duration + <= constructor_args["constraints"]["max_duration"].max_duration + ) + assert scheduler_state.created_requests >= 0 + assert scheduler_state.queued_requests >= 0 + assert scheduler_state.pending_requests >= 0 + assert scheduler_state.processing_requests >= 0 + assert scheduler_state.processed_requests >= 0 + assert scheduler_state.successful_requests >= 0 + assert scheduler_state.errored_requests >= 0 + assert scheduler_state.cancelled_requests >= 0 + + # Validate correctness of all updates + for _, counts in requests_tracker.items(): + assert counts["received_cancelled"] in (0, 1) + if counts["received_cancelled"] == 0: + assert counts["received_pending"] == 1 + assert counts["received_in_progress"] >= 1 + assert counts["received_resolved"] == 1 + assert scheduler_state is not None # last yielded state + assert scheduler_state.end_time > scheduler_state.start_time + assert scheduler_state.end_queuing_time is not None + assert scheduler_state.end_queuing_constraints is not None + assert scheduler_state.end_processing_time is not None + assert scheduler_state.end_processing_time >= scheduler_state.start_time + assert scheduler_state.end_processing_constraints is not None + assert scheduler_state.scheduler_constraints is not None + assert scheduler_state.created_requests == len(requests_tracker) + assert scheduler_state.queued_requests == 0 + assert scheduler_state.pending_requests == 0 + assert scheduler_state.processing_requests == 0 + assert scheduler_state.processed_requests == len(requests_tracker) + assert scheduler_state.successful_requests >= 0 + assert scheduler_state.errored_requests >= 0 + assert scheduler_state.cancelled_requests >= 0 + assert ( + scheduler_state.successful_requests + + scheduler_state.errored_requests + + scheduler_state.cancelled_requests + == len(requests_tracker) + ) + if constructor_args.get("constraints"): + assert list(scheduler_state.scheduler_constraints.keys()) == list( + constructor_args["constraints"].keys() + ) + assert scheduler_state.remaining_fraction == 0.0 + if "max_num" in constructor_args["constraints"]: + assert "max_num" in scheduler_state.end_queuing_constraints + assert "max_num" in scheduler_state.end_processing_constraints + max_num = constructor_args["constraints"]["max_num"].max_num + assert scheduler_state.created_requests == max_num + assert scheduler_state.successful_requests == max_num + assert scheduler_state.errored_requests == 0 + assert scheduler_state.cancelled_requests == 0 + if "max_duration" in constructor_args["constraints"]: + assert "max_duration" in scheduler_state.end_queuing_constraints + assert "max_duration" in scheduler_state.end_processing_constraints + assert scheduler_state.remaining_duration == 0.0 + else: + assert "requests_exhausted" in scheduler_state.scheduler_constraints + assert "requests_exhausted" in scheduler_state.end_queuing_constraints + assert "requests_exhausted" in scheduler_state.end_processing_constraints + assert scheduler_state.remaining_fraction is None + assert scheduler_state.remaining_requests is None + assert scheduler_state.remaining_duration is None + + # Test shutdown + exceptions = await instance.shutdown() + + # Check valid shutdown behavior + assert isinstance(exceptions, list) + assert len(exceptions) == 0 + assert instance.messaging is None + assert instance.state is None + assert instance.processes is None + assert instance.startup_barrier is None + assert instance.requests_generated_event is None + assert instance.constraint_reached_event is None + assert instance.shutdown_event is None + assert instance.error_event is None + assert instance.mp_manager is None + assert instance.mp_context is None From a7ae737e6b9dcba8010f5558ea6ab3c59a0f7d80 Mon Sep 17 00:00:00 2001 From: Mark Kurtz Date: Fri, 19 Sep 2025 12:01:24 +0000 Subject: [PATCH 09/90] Standardize on plural for modules/packages and update from copilot review Signed-off-by: Mark Kurtz --- src/guidellm/benchmark/scenario.py | 2 +- src/guidellm/scheduler/__init__.py | 4 ++-- .../scheduler/{environment.py => environments.py} | 2 +- src/guidellm/scheduler/scheduler.py | 4 ++-- src/guidellm/scheduler/{strategy.py => strategies.py} | 0 src/guidellm/scheduler/worker.py | 2 +- src/guidellm/scheduler/worker_group.py | 2 +- tests/unit/scheduler/test_constraints.py | 10 ++++++++-- .../scheduler/{test_strategy.py => test_strategies.py} | 4 ++-- 9 files changed, 18 insertions(+), 12 deletions(-) rename src/guidellm/scheduler/{environment.py => environments.py} (99%) rename src/guidellm/scheduler/{strategy.py => strategies.py} (100%) rename tests/unit/scheduler/{test_strategy.py => test_strategies.py} (99%) diff --git a/src/guidellm/benchmark/scenario.py b/src/guidellm/benchmark/scenario.py index af43e426..042b25b1 100644 --- a/src/guidellm/benchmark/scenario.py +++ b/src/guidellm/benchmark/scenario.py @@ -12,7 +12,7 @@ from guidellm.backend.backend import BackendType from guidellm.benchmark.profile import ProfileType from guidellm.objects.pydantic import StandardBaseModel -from guidellm.scheduler.strategy import StrategyType +from guidellm.scheduler.strategies import StrategyType __ALL__ = ["Scenario", "GenerativeTextScenario", "get_builtin_scenarios"] diff --git a/src/guidellm/scheduler/__init__.py b/src/guidellm/scheduler/__init__.py index 24d73df2..64647424 100644 --- a/src/guidellm/scheduler/__init__.py +++ b/src/guidellm/scheduler/__init__.py @@ -11,7 +11,7 @@ SerializableConstraintInitializer, UnserializableConstraintInitializer, ) -from .environment import Environment, NonDistributedEnvironment +from .environments import Environment, NonDistributedEnvironment from .objects import ( BackendInterface, BackendT, @@ -27,7 +27,7 @@ SchedulerUpdateActionProgress, ) from .scheduler import Scheduler -from .strategy import ( +from .strategies import ( AsyncConstantStrategy, AsyncPoissonStrategy, ConcurrentStrategy, diff --git a/src/guidellm/scheduler/environment.py b/src/guidellm/scheduler/environments.py similarity index 99% rename from src/guidellm/scheduler/environment.py rename to src/guidellm/scheduler/environments.py index 3bc29681..6234f8f6 100644 --- a/src/guidellm/scheduler/environment.py +++ b/src/guidellm/scheduler/environments.py @@ -32,7 +32,7 @@ ScheduledRequestInfo, SchedulerState, ) -from guidellm.scheduler.strategy import SchedulingStrategy +from guidellm.scheduler.strategies import SchedulingStrategy from guidellm.settings import settings from guidellm.utils import InfoMixin diff --git a/src/guidellm/scheduler/scheduler.py b/src/guidellm/scheduler/scheduler.py index 8089c64c..de0660e2 100644 --- a/src/guidellm/scheduler/scheduler.py +++ b/src/guidellm/scheduler/scheduler.py @@ -17,7 +17,7 @@ Constraint, ConstraintsInitializerFactory, ) -from guidellm.scheduler.environment import Environment, NonDistributedEnvironment +from guidellm.scheduler.environments import Environment, NonDistributedEnvironment from guidellm.scheduler.objects import ( BackendInterface, MultiTurnRequestT, @@ -26,7 +26,7 @@ ScheduledRequestInfo, SchedulerState, ) -from guidellm.scheduler.strategy import SchedulingStrategy +from guidellm.scheduler.strategies import SchedulingStrategy from guidellm.scheduler.worker_group import WorkerProcessGroup from guidellm.utils.singleton import ThreadSafeSingletonMixin diff --git a/src/guidellm/scheduler/strategy.py b/src/guidellm/scheduler/strategies.py similarity index 100% rename from src/guidellm/scheduler/strategy.py rename to src/guidellm/scheduler/strategies.py diff --git a/src/guidellm/scheduler/worker.py b/src/guidellm/scheduler/worker.py index 834c0921..5f2fb74b 100644 --- a/src/guidellm/scheduler/worker.py +++ b/src/guidellm/scheduler/worker.py @@ -37,7 +37,7 @@ ScheduledRequestInfo, SchedulerMessagingPydanticRegistry, ) -from guidellm.scheduler.strategy import ScheduledRequestTimings +from guidellm.scheduler.strategies import ScheduledRequestTimings from guidellm.utils import ( InterProcessMessaging, wait_for_sync_barrier, diff --git a/src/guidellm/scheduler/worker_group.py b/src/guidellm/scheduler/worker_group.py index 7369e5af..c1d516f1 100644 --- a/src/guidellm/scheduler/worker_group.py +++ b/src/guidellm/scheduler/worker_group.py @@ -33,7 +33,7 @@ SchedulerState, SchedulerUpdateAction, ) -from guidellm.scheduler.strategy import SchedulingStrategy +from guidellm.scheduler.strategies import SchedulingStrategy from guidellm.scheduler.worker import WorkerProcess from guidellm.settings import settings from guidellm.utils import ( diff --git a/tests/unit/scheduler/test_constraints.py b/tests/unit/scheduler/test_constraints.py index 0cdec5e2..931af413 100644 --- a/tests/unit/scheduler/test_constraints.py +++ b/tests/unit/scheduler/test_constraints.py @@ -286,8 +286,14 @@ def test_create_constraint_raises(self, valid_instances): def test_call_raises(self, valid_instances): """Test that calling constraint raises RuntimeError.""" instance, _ = valid_instances - state = SchedulerState() - request = ScheduledRequestInfo() + state = SchedulerState(node_id="test_node", num_processes=1, start_time=0.0) + request = ScheduledRequestInfo( + request_id="test_request", + status="pending", + scheduler_node_id="test_node", + scheduler_process_id=1, + scheduler_start_time=0.0, + ) with pytest.raises( RuntimeError, match="Cannot invoke unserializable constraint" diff --git a/tests/unit/scheduler/test_strategy.py b/tests/unit/scheduler/test_strategies.py similarity index 99% rename from tests/unit/scheduler/test_strategy.py rename to tests/unit/scheduler/test_strategies.py index 8cb91d82..67a2d77d 100644 --- a/tests/unit/scheduler/test_strategy.py +++ b/tests/unit/scheduler/test_strategies.py @@ -25,7 +25,7 @@ SynchronousStrategy, ThroughputStrategy, ) -from guidellm.scheduler.strategy import ( +from guidellm.scheduler.strategies import ( _exponential_decay_fraction, _exponential_decay_tau, ) @@ -35,7 +35,7 @@ def test_strategy_type(): """Test that StrategyType is defined correctly as a Literal type.""" # StrategyType is a type alias/literal type, we can't test its runtime value # but we can test that it exists and is importable - from guidellm.scheduler.strategy import StrategyType + from guidellm.scheduler.strategies import StrategyType assert StrategyType is not None From 02554b034615dd3f6debc81ad564e0314f9c6db5 Mon Sep 17 00:00:00 2001 From: Mark Kurtz Date: Fri, 19 Sep 2025 04:16:08 +0000 Subject: [PATCH 10/90] backend refactor implementations Signed-off-by: Mark Kurtz --- src/guidellm/backend/__init__.py | 27 +- src/guidellm/backend/backend.py | 290 +--- src/guidellm/backend/objects.py | 156 ++ src/guidellm/backend/openai.py | 1108 +++++++------ src/guidellm/backend/response.py | 136 -- tests/unit/backend/test_backend.py | 456 ++++-- tests/unit/backend/test_objects.py | 467 ++++++ tests/unit/backend/test_openai_backend.py | 1373 ++++++++++++++--- .../test_openai_backend_custom_configs.py | 88 -- tests/unit/backend/test_response.py | 192 --- 10 files changed, 2733 insertions(+), 1560 deletions(-) create mode 100644 src/guidellm/backend/objects.py delete mode 100644 src/guidellm/backend/response.py create mode 100644 tests/unit/backend/test_objects.py delete mode 100644 tests/unit/backend/test_openai_backend_custom_configs.py delete mode 100644 tests/unit/backend/test_response.py diff --git a/src/guidellm/backend/__init__.py b/src/guidellm/backend/__init__.py index 315a28f0..064722ac 100644 --- a/src/guidellm/backend/__init__.py +++ b/src/guidellm/backend/__init__.py @@ -1,23 +1,26 @@ +""" +Backend infrastructure for GuideLLM language model interactions. + +Provides abstract base classes, implemented backends, request/response objects, +and timing utilities for standardized communication with LLM providers. +""" + from .backend import ( Backend, BackendType, ) -from .openai import CHAT_COMPLETIONS_PATH, TEXT_COMPLETIONS_PATH, OpenAIHTTPBackend -from .response import ( - RequestArgs, - ResponseSummary, - StreamingResponseType, - StreamingTextResponse, +from .objects import ( + GenerationRequest, + GenerationRequestTimings, + GenerationResponse, ) +from .openai import OpenAIHTTPBackend __all__ = [ - "CHAT_COMPLETIONS_PATH", - "TEXT_COMPLETIONS_PATH", "Backend", "BackendType", + "GenerationRequest", + "GenerationRequestTimings", + "GenerationResponse", "OpenAIHTTPBackend", - "RequestArgs", - "ResponseSummary", - "StreamingResponseType", - "StreamingTextResponse", ] diff --git a/src/guidellm/backend/backend.py b/src/guidellm/backend/backend.py index ceffdc77..c9a73535 100644 --- a/src/guidellm/backend/backend.py +++ b/src/guidellm/backend/backend.py @@ -1,13 +1,27 @@ -from abc import ABC, abstractmethod -from collections.abc import AsyncGenerator -from pathlib import Path -from typing import Any, Literal, Optional, Union +""" +Backend interface and registry for generative AI model interactions. -from loguru import logger -from PIL import Image +Provides the abstract base class for implementing backends that communicate with +generative AI models. Backends handle the lifecycle of generation requests. -from guidellm.backend.response import ResponseSummary, StreamingTextResponse -from guidellm.settings import settings +Classes: + Backend: Abstract base class for generative AI backends with registry support. + +Type Aliases: + BackendType: Literal type defining supported backend implementations. +""" + +from __future__ import annotations + +from abc import abstractmethod +from typing import Literal + +from guidellm.backend.objects import ( + GenerationRequest, + GenerationResponse, +) +from guidellm.scheduler import BackendInterface +from guidellm.utils import RegistryMixin __all__ = [ "Backend", @@ -18,242 +32,88 @@ BackendType = Literal["openai_http"] -class Backend(ABC): +class Backend( + RegistryMixin["type[Backend]"], + BackendInterface[GenerationRequest, GenerationResponse], +): """ - Abstract base class for generative AI backends. - - This class provides a common interface for creating and interacting with different - generative AI backends. Subclasses should implement the abstract methods to - define specific backend behavior. - - :cvar _registry: A registration dictionary that maps BackendType to backend classes. - :param type_: The type of the backend. + Base class for generative AI backends with registry and lifecycle. + + Provides a standard interface for backends that communicate with generative AI + models. Combines the registry pattern for automatic discovery with a defined + lifecycle for process-based distributed execution. + + Backend lifecycle phases: + 1. Creation and configuration + 2. Process startup - Initialize resources in worker process + 3. Validation - Verify backend readiness + 4. Request resolution - Process generation requests + 5. Process shutdown - Clean up resources + + Backend state (excluding process_startup resources) must be pickleable for + distributed execution across process boundaries. + + Example: + :: + @Backend.register("my_backend") + class MyBackend(Backend): + def __init__(self, api_key: str): + super().__init__("my_backend") + self.api_key = api_key + + async def process_startup(self): + self.client = MyAPIClient(self.api_key) + + backend = Backend.create("my_backend", api_key="secret") """ - _registry: dict[BackendType, "type[Backend]"] = {} - - @classmethod - def register(cls, backend_type: BackendType): - """ - A decorator to register a backend class in the backend registry. - - :param backend_type: The type of backend to register. - :type backend_type: BackendType - :return: The decorated backend class. - :rtype: Type[Backend] - """ - if backend_type in cls._registry: - raise ValueError(f"Backend type already registered: {backend_type}") - - if not issubclass(cls, Backend): - raise TypeError("Only subclasses of Backend can be registered") - - def inner_wrapper(wrapped_class: type["Backend"]): - cls._registry[backend_type] = wrapped_class - logger.info("Registered backend type: {}", backend_type) - return wrapped_class - - return inner_wrapper - @classmethod - def create(cls, type_: BackendType, **kwargs) -> "Backend": + def create(cls, type_: BackendType, **kwargs) -> Backend: """ - Factory method to create a backend instance based on the backend type. + Create a backend instance based on the backend type. :param type_: The type of backend to create. - :type type_: BackendType :param kwargs: Additional arguments for backend initialization. :return: An instance of a subclass of Backend. - :rtype: Backend :raises ValueError: If the backend type is not registered. """ - logger.info("Creating backend of type {}", type_) + backend = cls.get_registered_object(type_) - if type_ not in cls._registry: - err = ValueError(f"Unsupported backend type: {type_}") - logger.error("{}", err) - raise err + if backend is None: + raise ValueError( + f"Backend type '{type_}' is not registered. " + f"Available types: {list(cls.registry.keys()) if cls.registry else []}" + ) - return Backend._registry[type_](**kwargs) + return backend(**kwargs) def __init__(self, type_: BackendType): - self._type = type_ - - @property - def type_(self) -> BackendType: """ - :return: The type of the backend. - """ - return self._type + Initialize a backend instance. - @property - @abstractmethod - def target(self) -> str: - """ - :return: The target location for the backend. + :param type_: The backend type identifier. """ - ... + self.type_ = type_ @property - @abstractmethod - def model(self) -> Optional[str]: + def processes_limit(self) -> int | None: """ - :return: The model used for the backend requests. + :return: Maximum number of worker processes supported. None if unlimited. """ - ... + return None @property - @abstractmethod - def info(self) -> dict[str, Any]: - """ - :return: The information about the backend. - """ - ... - - @abstractmethod - async def reset(self) -> None: + def requests_limit(self) -> int | None: """ - Reset the connection object. This is useful for backends that - reuse connections or have state that needs to be cleared. + :return: Maximum number of concurrent requests supported globally. + None if unlimited. """ - ... - - async def validate(self): - """ - Handle final setup and validate the backend is ready for use. - If not successful, raises the appropriate exception. - """ - logger.info("{} validating backend {}", self.__class__.__name__, self.type_) - await self.check_setup() - models = await self.available_models() - if not models: - raise ValueError("No models available for the backend") - - # Use the preferred route defined in the global settings when performing the - # validation request. This avoids calling an unavailable endpoint (ie - # /v1/completions) when the deployment only supports the chat completions - # endpoint. - if settings.preferred_route == "chat_completions": - async for _ in self.chat_completions( # type: ignore[attr-defined] - content="Test connection", output_token_count=1 - ): - pass - else: - async for _ in self.text_completions( # type: ignore[attr-defined] - prompt="Test connection", output_token_count=1 - ): - pass - - await self.reset() - - @abstractmethod - async def check_setup(self): - """ - Check the setup for the backend. - If unsuccessful, raises the appropriate exception. - - :raises ValueError: If the setup check fails. - """ - ... - - @abstractmethod - async def prepare_multiprocessing(self): - """ - Prepare the backend for use in a multiprocessing environment. - This is useful for backends that have instance state that can not - be shared across processes and should be cleared out and re-initialized - for each new process. - """ - ... - - @abstractmethod - async def available_models(self) -> list[str]: - """ - Get the list of available models for the backend. - - :return: The list of available models. - :rtype: List[str] - """ - ... + return None @abstractmethod - async def text_completions( - self, - prompt: Union[str, list[str]], - request_id: Optional[str] = None, - prompt_token_count: Optional[int] = None, - output_token_count: Optional[int] = None, - **kwargs, - ) -> AsyncGenerator[Union[StreamingTextResponse, ResponseSummary], None]: + async def default_model(self) -> str | None: """ - Generate text only completions for the given prompt. - Does not support multiple modalities, complicated chat interfaces, - or chat templates. Specifically, it requests with only the prompt. - - :param prompt: The prompt (or list of prompts) to generate a completion for. - If a list is supplied, these are concatenated and run through the model - for a single prompt. - :param request_id: The unique identifier for the request, if any. - Added to logging statements and the response for tracking purposes. - :param prompt_token_count: The number of tokens measured in the prompt, if any. - Returned in the response stats for later analysis, if applicable. - :param output_token_count: If supplied, the number of tokens to enforce - generation of for the output for this request. - :param kwargs: Additional keyword arguments to pass with the request. - :return: An async generator that yields a StreamingTextResponse for start, - a StreamingTextResponse for each received iteration, - and a ResponseSummary for the final response. - """ - ... - - @abstractmethod - async def chat_completions( - self, - content: Union[ - str, - list[Union[str, dict[str, Union[str, dict[str, str]]], Path, Image.Image]], - Any, - ], - request_id: Optional[str] = None, - prompt_token_count: Optional[int] = None, - output_token_count: Optional[int] = None, - raw_content: bool = False, - **kwargs, - ) -> AsyncGenerator[Union[StreamingTextResponse, ResponseSummary], None]: - """ - Generate chat completions for the given content. - Supports multiple modalities, complicated chat interfaces, and chat templates. - Specifically, it requests with the content, which can be any combination of - text, images, and audio provided the target model supports it, - and returns the output text. Additionally, any chat templates - for the model are applied within the backend. - - :param content: The content (or list of content) to generate a completion for. - This supports any combination of text, images, and audio (model dependent). - Supported text only request examples: - content="Sample prompt", content=["Sample prompt", "Second prompt"], - content=[{"type": "text", "value": "Sample prompt"}. - Supported text and image request examples: - content=["Describe the image", PIL.Image.open("image.jpg")], - content=["Describe the image", Path("image.jpg")], - content=["Describe the image", {"type": "image_url", - "image_url": {"url": f"data:image/jpeg;base64,{base64_image}"}]. - Supported text and audio request examples: - content=["Transcribe the audio", Path("audio.wav")], - content=["Transcribe the audio", {"type": "input_audio", - "input_audio": {"data": f"{base64_bytes}", "format": "wav}]. - Additionally, if raw_content=True then the content is passed directly to the - backend without any processing. - :param request_id: The unique identifier for the request, if any. - Added to logging statements and the response for tracking purposes. - :param prompt_token_count: The number of tokens measured in the prompt, if any. - Returned in the response stats for later analysis, if applicable. - :param output_token_count: If supplied, the number of tokens to enforce - generation of for the output for this request. - :param kwargs: Additional keyword arguments to pass with the request. - :return: An async generator that yields a StreamingTextResponse for start, - a StreamingTextResponse for each received iteration, - and a ResponseSummary for the final response. + :return: The default model name or identifier for generation requests. """ ... diff --git a/src/guidellm/backend/objects.py b/src/guidellm/backend/objects.py new file mode 100644 index 00000000..05280940 --- /dev/null +++ b/src/guidellm/backend/objects.py @@ -0,0 +1,156 @@ +""" +Backend object models for request and response handling. + +Provides standardized models for generation requests, responses, and timing +information to ensure consistent data handling across different backend +implementations. +""" + +import uuid +from typing import Any, Literal, Optional + +from pydantic import Field + +from guidellm.scheduler import ( + MeasuredRequestTimings, + SchedulerMessagingPydanticRegistry, +) +from guidellm.utils import StandardBaseModel + +__all__ = [ + "GenerationRequest", + "GenerationRequestTimings", + "GenerationResponse", +] + + +@SchedulerMessagingPydanticRegistry.register() +class GenerationRequest(StandardBaseModel): + """Request model for backend generation operations.""" + + request_id: str = Field( + default_factory=lambda: str(uuid.uuid4()), + description="Unique identifier for the request.", + ) + request_type: Literal["text_completions", "chat_completions"] = Field( + default="text_completions", + description=( + "Type of request. 'text_completions' uses backend.text_completions(), " + "'chat_completions' uses backend.chat_completions()." + ), + ) + content: Any = Field( + description=( + "Request content. For text_completions: string or list of strings. " + "For chat_completions: string, list of messages, or raw content " + "(set raw_content=True in params)." + ) + ) + params: dict[str, Any] = Field( + default_factory=dict, + description=( + "Additional parameters passed to backend methods. " + "Common: max_tokens, temperature, stream." + ), + ) + stats: dict[Literal["prompt_tokens"], int] = Field( + default_factory=dict, + description="Request statistics including prompt token count.", + ) + constraints: dict[Literal["output_tokens"], int] = Field( + default_factory=dict, + description="Request constraints such as maximum output tokens.", + ) + + +@SchedulerMessagingPydanticRegistry.register() +class GenerationResponse(StandardBaseModel): + """Response model for backend generation operations.""" + + request_id: str = Field( + description="Unique identifier matching the original GenerationRequest." + ) + request_args: dict[str, Any] = Field( + description="Arguments passed to the backend for this request." + ) + value: Optional[str] = Field( + default=None, + description="Complete generated text content. None for streaming responses.", + ) + delta: Optional[str] = Field( + default=None, description="Incremental text content for streaming responses." + ) + iterations: int = Field( + default=0, description="Number of generation iterations completed." + ) + request_prompt_tokens: Optional[int] = Field( + default=None, description="Token count from the original request prompt." + ) + request_output_tokens: Optional[int] = Field( + default=None, + description="Expected output token count from the original request.", + ) + response_prompt_tokens: Optional[int] = Field( + default=None, description="Actual prompt token count reported by the backend." + ) + response_output_tokens: Optional[int] = Field( + default=None, description="Actual output token count reported by the backend." + ) + + @property + def prompt_tokens(self) -> Optional[int]: + """ + :return: The number of prompt tokens used in the request + (response_prompt_tokens if available, otherwise request_prompt_tokens). + """ + return self.response_prompt_tokens or self.request_prompt_tokens + + @property + def output_tokens(self) -> Optional[int]: + """ + :return: The number of output tokens generated in the response + (response_output_tokens if available, otherwise request_output_tokens). + """ + return self.response_output_tokens or self.request_output_tokens + + @property + def total_tokens(self) -> Optional[int]: + """ + :return: The total number of tokens used in the request and response. + Sum of prompt_tokens and output_tokens. + """ + if self.prompt_tokens is None or self.output_tokens is None: + return None + return self.prompt_tokens + self.output_tokens + + def preferred_prompt_tokens( + self, preferred_source: Literal["request", "response"] + ) -> Optional[int]: + if preferred_source == "request": + return self.request_prompt_tokens or self.response_prompt_tokens + else: + return self.response_prompt_tokens or self.request_prompt_tokens + + def preferred_output_tokens( + self, preferred_source: Literal["request", "response"] + ) -> Optional[int]: + if preferred_source == "request": + return self.request_output_tokens or self.response_output_tokens + else: + return self.response_output_tokens or self.request_output_tokens + + +@SchedulerMessagingPydanticRegistry.register() +@MeasuredRequestTimings.register("generation_request_timings") +class GenerationRequestTimings(MeasuredRequestTimings): + """Timing model for tracking generation request lifecycle events.""" + + timings_type: Literal["generation_request_timings"] = "generation_request_timings" + first_iteration: Optional[float] = Field( + default=None, + description="Unix timestamp when the first generation iteration began.", + ) + last_iteration: Optional[float] = Field( + default=None, + description="Unix timestamp when the last generation iteration completed.", + ) diff --git a/src/guidellm/backend/openai.py b/src/guidellm/backend/openai.py index e1fcdf89..d616be6a 100644 --- a/src/guidellm/backend/openai.py +++ b/src/guidellm/backend/openai.py @@ -1,705 +1,641 @@ +""" +OpenAI HTTP backend implementation for GuideLLM. + +Provides HTTP-based backend for OpenAI-compatible servers including OpenAI API, +vLLM servers, and other compatible inference engines. Supports text and chat +completions with streaming, authentication, and multimodal capabilities. + +Classes: + UsageStats: Token usage statistics for generation requests. + OpenAIHTTPBackend: HTTP backend for OpenAI-compatible API servers. +""" + import base64 +import contextlib import copy import json import time -from collections.abc import AsyncGenerator +from collections.abc import AsyncIterator from pathlib import Path -from typing import Any, Literal, Optional, Union +from typing import Any, ClassVar, Optional, Union import httpx -from loguru import logger from PIL import Image +from pydantic import dataclasses from guidellm.backend.backend import Backend -from guidellm.backend.response import ( - RequestArgs, - ResponseSummary, - StreamingTextResponse, +from guidellm.backend.objects import ( + GenerationRequest, + GenerationRequestTimings, + GenerationResponse, ) -from guidellm.settings import settings +from guidellm.scheduler import ScheduledRequestInfo -__all__ = [ - "CHAT_COMPLETIONS", - "CHAT_COMPLETIONS_PATH", - "MODELS", - "TEXT_COMPLETIONS", - "TEXT_COMPLETIONS_PATH", - "OpenAIHTTPBackend", -] +__all__ = ["OpenAIHTTPBackend", "UsageStats"] -TEXT_COMPLETIONS_PATH = "/v1/completions" -CHAT_COMPLETIONS_PATH = "/v1/chat/completions" +@dataclasses.dataclass +class UsageStats: + """Token usage statistics for generation requests.""" -EndpointType = Literal["chat_completions", "models", "text_completions"] -CHAT_COMPLETIONS: EndpointType = "chat_completions" -MODELS: EndpointType = "models" -TEXT_COMPLETIONS: EndpointType = "text_completions" + prompt_tokens: Optional[int] = None + output_tokens: Optional[int] = None @Backend.register("openai_http") class OpenAIHTTPBackend(Backend): """ - A HTTP-based backend implementation for requests to an OpenAI compatible server. - For example, a vLLM server instance or requests to OpenAI's API. - - :param target: The target URL string for the OpenAI server. ex: http://0.0.0.0:8000 - :param model: The model to use for all requests on the target server. - If none is provided, the first available model will be used. - :param api_key: The API key to use for requests to the OpenAI server. - If provided, adds an Authorization header with the value - "Authorization: Bearer {api_key}". - If not provided, no Authorization header is added. - :param organization: The organization to use for requests to the OpenAI server. - For example, if set to "org_123", adds an OpenAI-Organization header with the - value "OpenAI-Organization: org_123". - If not provided, no OpenAI-Organization header is added. - :param project: The project to use for requests to the OpenAI server. - For example, if set to "project_123", adds an OpenAI-Project header with the - value "OpenAI-Project: project_123". - If not provided, no OpenAI-Project header is added. - :param timeout: The timeout to use for requests to the OpenAI server. - If not provided, the default timeout provided from settings is used. - :param http2: If True, uses HTTP/2 for requests to the OpenAI server. - Defaults to True. - :param follow_redirects: If True, the HTTP client will follow redirect responses. - If not provided, the default value from settings is used. - :param max_output_tokens: The maximum number of tokens to request for completions. - If not provided, the default maximum tokens provided from settings is used. - :param extra_query: Query parameters to include in requests to the OpenAI server. - If "chat_completions", "models", or "text_completions" are included as keys, - the values of these keys will be used as the parameters for the respective - endpoint. - If not provided, no extra query parameters are added. - :param extra_body: Body parameters to include in requests to the OpenAI server. - If "chat_completions", "models", or "text_completions" are included as keys, - the values of these keys will be included in the body for the respective - endpoint. - If not provided, no extra body parameters are added. - :param remove_from_body: Parameters that should be removed from the body of each - request. - If not provided, no parameters are removed from the body. + HTTP backend for OpenAI-compatible servers. + + Supports OpenAI API, vLLM servers, and other compatible endpoints with + text/chat completions, streaming, authentication, and multimodal inputs. + Handles request formatting, response parsing, error handling, and token + usage tracking with flexible parameter customization. + + Example: + :: + backend = OpenAIHTTPBackend( + target="http://localhost:8000", + model="gpt-3.5-turbo", + api_key="your-api-key" + ) + + await backend.process_startup() + async for response, request_info in backend.resolve(request, info): + process_response(response) + await backend.process_shutdown() """ + HEALTH_PATH: ClassVar[str] = "/health" + MODELS_PATH: ClassVar[str] = "/v1/models" + TEXT_COMPLETIONS_PATH: ClassVar[str] = "/v1/completions" + CHAT_COMPLETIONS_PATH: ClassVar[str] = "/v1/chat/completions" + + MODELS_KEY: ClassVar[str] = "models" + TEXT_COMPLETIONS_KEY: ClassVar[str] = "text_completions" + CHAT_COMPLETIONS_KEY: ClassVar[str] = "chat_completions" + def __init__( self, - target: Optional[str] = None, + target: str, model: Optional[str] = None, api_key: Optional[str] = None, organization: Optional[str] = None, project: Optional[str] = None, - timeout: Optional[float] = None, - http2: Optional[bool] = True, - follow_redirects: Optional[bool] = None, + timeout: float = 60.0, + http2: bool = True, + follow_redirects: bool = True, max_output_tokens: Optional[int] = None, + stream_response: bool = True, extra_query: Optional[dict] = None, extra_body: Optional[dict] = None, remove_from_body: Optional[list[str]] = None, headers: Optional[dict] = None, - verify: Optional[bool] = None, + verify: bool = False, ): - super().__init__(type_="openai_http") - self._target = target or settings.openai.base_url - - if not self._target: - raise ValueError("Target URL must be provided for OpenAI HTTP backend.") - - if self._target.endswith("/v1") or self._target.endswith("/v1/"): - # backwards compatability, strip v1 off - self._target = self._target[:-3] - - if self._target.endswith("/"): - self._target = self._target[:-1] - - self._model = model - - # Start with default headers based on other params - default_headers: dict[str, str] = {} - api_key = api_key or settings.openai.api_key - bearer_token = settings.openai.bearer_token - if api_key: - default_headers["Authorization"] = f"Bearer {api_key}" - elif bearer_token: - default_headers["Authorization"] = bearer_token - - self.organization = organization or settings.openai.organization - if self.organization: - default_headers["OpenAI-Organization"] = self.organization - - self.project = project or settings.openai.project - if self.project: - default_headers["OpenAI-Project"] = self.project - - # User-provided headers from kwargs or settings override defaults - merged_headers = default_headers.copy() - merged_headers.update(settings.openai.headers or {}) - if headers: - merged_headers.update(headers) - - # Remove headers with None values for backward compatibility and convenience - self.headers = {k: v for k, v in merged_headers.items() if v is not None} - - self.timeout = timeout if timeout is not None else settings.request_timeout - self.http2 = http2 if http2 is not None else settings.request_http2 - self.follow_redirects = ( - follow_redirects - if follow_redirects is not None - else settings.request_follow_redirects - ) - self.verify = verify if verify is not None else settings.openai.verify - self.max_output_tokens = ( - max_output_tokens - if max_output_tokens is not None - else settings.openai.max_output_tokens - ) - self.extra_query = extra_query - self.extra_body = extra_body - self.remove_from_body = remove_from_body - self._async_client: Optional[httpx.AsyncClient] = None - - @property - def target(self) -> str: """ - :return: The target URL string for the OpenAI server. + Initialize OpenAI HTTP backend. + + :param target: Target URL for the OpenAI server (e.g., "http://localhost:8000"). + :param model: Model to use for requests. If None, uses first available model. + :param api_key: API key for authentication. Adds Authorization header + if provided. + :param organization: Organization ID. Adds OpenAI-Organization header + if provided. + :param project: Project ID. Adds OpenAI-Project header if provided. + :param timeout: Request timeout in seconds. Defaults to 60 seconds. + :param http2: Whether to use HTTP/2. Defaults to True. + :param follow_redirects: Whether to follow redirects. Default True. + :param max_output_tokens: Maximum tokens for completions. If None, none is set. + :param stream_response: Whether to stream responses by default. Can be + overridden per request. Defaults to True. + :param extra_query: Additional query parameters. Both general and + endpoint-specific with type keys supported. + :param extra_body: Additional body parameters. Both general and + endpoint-specific with type keys supported. + :param remove_from_body: Parameter names to remove from request bodies. + :param headers: Additional HTTP headers. + :param verify: Whether to verify SSL certificates. Default False. """ - return self._target + super().__init__(type_="openai_http") - @property - def model(self) -> Optional[str]: - """ - :return: The model to use for all requests on the target server. - If validate hasn't been called yet and no model was passed in, - this will be None until validate is called to set the default. - """ - return self._model + # Request Values + self.target = target.rstrip("/").removesuffix("/v1") + self.model = model + self.headers = self._build_headers(api_key, organization, project, headers) + + # Store configuration + self.timeout = timeout + self.http2 = http2 + self.follow_redirects = follow_redirects + self.verify = verify + self.max_output_tokens = max_output_tokens + self.stream_response = stream_response + self.extra_query = extra_query or {} + self.extra_body = extra_body or {} + self.remove_from_body = remove_from_body or [] + + # Runtime state + self._in_process = False + self._async_client: Optional[httpx.AsyncClient] = None @property def info(self) -> dict[str, Any]: """ - :return: The information about the backend. + :return: Dictionary containing backend configuration details. """ return { - "max_output_tokens": self.max_output_tokens, + "target": self.target, + "model": self.model, + "headers": self.headers, "timeout": self.timeout, "http2": self.http2, "follow_redirects": self.follow_redirects, - "headers": self.headers, - "text_completions_path": TEXT_COMPLETIONS_PATH, - "chat_completions_path": CHAT_COMPLETIONS_PATH, + "verify": self.verify, + "max_output_tokens": self.max_output_tokens, + "stream_response": self.stream_response, + "extra_query": self.extra_query, + "extra_body": self.extra_body, + "remove_from_body": self.remove_from_body, + "health_path": self.HEALTH_PATH, + "models_path": self.MODELS_PATH, + "text_completions_path": self.TEXT_COMPLETIONS_PATH, + "chat_completions_path": self.CHAT_COMPLETIONS_PATH, } - async def reset(self) -> None: + async def process_startup(self): """ - Reset the connection object. This is useful for backends that - reuse connections or have state that needs to be cleared. - For this backend, it closes the async client if it exists. + Initialize HTTP client and backend resources. + + :raises RuntimeError: If backend is already initialized. + :raises httpx.Exception: If HTTP client cannot be created. """ - if self._async_client is not None: - await self._async_client.aclose() + if self._in_process: + raise RuntimeError("Backend already started up for process.") + + self._async_client = httpx.AsyncClient( + http2=self.http2, + timeout=self.timeout, + follow_redirects=self.follow_redirects, + verify=self.verify, + ) + self._in_process = True - async def check_setup(self): + async def process_shutdown(self): """ - Check if the backend is setup correctly and can be used for requests. - Specifically, if a model is not provided, it grabs the first available model. - If no models are available, raises a ValueError. - If a model is provided and not available, raises a ValueError. + Clean up HTTP client and backend resources. - :raises ValueError: If no models or the provided model is not available. + :raises RuntimeError: If backend was not properly initialized. + :raises httpx.Exception: If HTTP client cannot be closed. """ - models = await self.available_models() - if not models: - raise ValueError(f"No models available for target: {self.target}") - - if not self.model: - self._model = models[0] - elif self.model not in models: - raise ValueError( - f"Model {self.model} not found in available models:" - f"{models} for target: {self.target}" - ) + if not self._in_process: + raise RuntimeError("Backend not started up for process.") + + await self._async_client.aclose() # type: ignore [union-attr] + self._async_client = None + self._in_process = False - async def prepare_multiprocessing(self): + async def validate(self): """ - Prepare the backend for use in a multiprocessing environment. - Clears out the sync and async clients to ensure they are re-initialized - for each process. + Validate backend configuration and connectivity. + + Validate backend configuration and connectivity through test requests, + and auto-selects first available model if none is configured. + + :raises RuntimeError: If backend cannot connect or validate configuration. """ - if self._async_client is not None: - await self._async_client.aclose() - self._async_client = None + self._check_in_process() + + if self.model: + with contextlib.suppress(httpx.TimeoutException, httpx.HTTPStatusError): + # Model is set, use /health endpoint as first check + target = f"{self.target}{self.HEALTH_PATH}" + headers = self._get_headers() + response = await self._async_client.get(target, headers=headers) # type: ignore [union-attr] + response.raise_for_status() + + return + + with contextlib.suppress(httpx.TimeoutException, httpx.HTTPStatusError): + # Check if models endpoint is available next + models = await self.available_models() + if models and not self.model: + self.model = models[0] + elif not self.model: + raise RuntimeError( + "No model available and could not set a default model " + "from the server's available models." + ) + + return + + with contextlib.suppress(httpx.TimeoutException, httpx.HTTPStatusError): + # Last check, fall back on dummy request to text completions + async for _, __ in self.text_completions( + prompt="Validate backend", + request_id="validate", + output_token_count=1, + stream_response=False, + ): + pass + + return + + raise RuntimeError( + "Backend validation failed. Could not connect to the server or " + "validate the backend configuration." + ) async def available_models(self) -> list[str]: """ - Get the available models for the target server using the OpenAI models endpoint: - /v1/models + Get available models from the target server. + + :return: List of model identifiers. + :raises HTTPError: If models endpoint returns an error. + :raises RuntimeError: If backend is not initialized. """ - target = f"{self.target}/v1/models" - headers = self._headers() - params = self._params(MODELS) - response = await self._get_async_client().get( - target, headers=headers, params=params - ) + self._check_in_process() + + target = f"{self.target}{self.MODELS_PATH}" + headers = self._get_headers() + params = self._get_params(self.MODELS_KEY) + response = await self._async_client.get(target, headers=headers, params=params) # type: ignore [union-attr] response.raise_for_status() - models = [] + return [item["id"] for item in response.json()["data"]] + + async def default_model(self) -> Optional[str]: + """ + Get the default model for this backend. + + :return: Model name or None if no model is available. + """ + if self.model or not self._in_process: + return self.model + + models = await self.available_models() + return models[0] if models else None + + async def resolve( + self, + request: GenerationRequest, + request_info: ScheduledRequestInfo, + history: Optional[list[tuple[GenerationRequest, GenerationResponse]]] = None, + ) -> AsyncIterator[tuple[GenerationResponse, ScheduledRequestInfo]]: + """ + Process a generation request and yield progressive responses. + + Handles request formatting, timing tracking, API communication, and + response parsing with streaming support. + + :param request: Generation request with content and parameters. + :param request_info: Request tracking info updated with timing metadata. + :param history: Conversation history. Currently not supported. + :raises NotImplementedError: If history is provided. + :yields: Tuples of (response, updated_request_info) as generation progresses. + """ + self._check_in_process() + if history is not None: + raise NotImplementedError( + "Multi-turn requests with conversation history are not yet supported" + ) + + response = GenerationResponse( + request_id=request.request_id, + request_args={ + "request_type": request.request_type, + "output_token_count": request.constraints.get("output_tokens"), + **request.params, + }, + value="", + request_prompt_tokens=request.stats.get("prompt_tokens"), + request_output_tokens=request.constraints.get("output_tokens"), + ) + request_info.request_timings = GenerationRequestTimings() + request_info.request_timings.request_start = time.time() + + completion_method = ( + self.text_completions + if request.request_type == "text_completions" + else self.chat_completions + ) + completion_kwargs = ( + { + "prompt": request.content, + "request_id": request.request_id, + "output_token_count": request.constraints.get("output_tokens"), + "stream_response": request.params.get("stream", self.stream_response), + **request.params, + } + if request.request_type == "text_completions" + else { + "content": request.content, + "request_id": request.request_id, + "output_token_count": request.constraints.get("output_tokens"), + "stream_response": request.params.get("stream", self.stream_response), + **request.params, + } + ) + + async for delta, usage_stats in completion_method(**completion_kwargs): + if request_info.request_timings.request_start is None: + request_info.request_timings.request_start = time.time() + + if delta is not None: + if request_info.request_timings.first_iteration is None: + request_info.request_timings.first_iteration = time.time() + response.value += delta # type: ignore [operator] + response.delta = delta + request_info.request_timings.last_iteration = time.time() + response.iterations += 1 - for item in response.json()["data"]: - models.append(item["id"]) + if usage_stats is not None: + request_info.request_timings.request_end = time.time() + response.request_output_tokens = usage_stats.output_tokens + response.request_prompt_tokens = usage_stats.prompt_tokens - return models + yield response, request_info - async def text_completions( # type: ignore[override] + if request_info.request_timings.request_end is None: + request_info.request_timings.request_end = time.time() + response.delta = None + yield response, request_info + + async def text_completions( self, prompt: Union[str, list[str]], - request_id: Optional[str] = None, - prompt_token_count: Optional[int] = None, + request_id: Optional[str], # noqa: ARG002 output_token_count: Optional[int] = None, + stream_response: bool = True, **kwargs, - ) -> AsyncGenerator[Union[StreamingTextResponse, ResponseSummary], None]: + ) -> AsyncIterator[tuple[Optional[str], Optional[UsageStats]]]: """ - Generate text completions for the given prompt using the OpenAI - completions endpoint: /v1/completions. - - :param prompt: The prompt (or list of prompts) to generate a completion for. - If a list is supplied, these are concatenated and run through the model - for a single prompt. - :param request_id: The unique identifier for the request, if any. - Added to logging statements and the response for tracking purposes. - :param prompt_token_count: The number of tokens measured in the prompt, if any. - Returned in the response stats for later analysis, if applicable. - :param output_token_count: If supplied, the number of tokens to enforce - generation of for the output for this request. - :param kwargs: Additional keyword arguments to pass with the request. - :return: An async generator that yields a StreamingTextResponse for start, - a StreamingTextResponse for each received iteration, - and a ResponseSummary for the final response. + Generate text completions using the /v1/completions endpoint. + + :param prompt: Text prompt(s) for completion. Single string or list. + :param request_id: Request identifier for tracking. + :param output_token_count: Maximum tokens to generate. Overrides default + if specified. + :param stream_response: Whether to stream response progressively. + :param kwargs: Additional request parameters (temperature, top_p, etc.). + :yields: Tuples of (generated_text, usage_stats). First yield is (None, None). + :raises RuntimeError: If backend is not initialized. + :raises HTTPError: If API request fails. """ - logger.debug("{} invocation with args: {}", self.__class__.__name__, locals()) - - if isinstance(prompt, list): - raise ValueError( - "List prompts (batching) is currently not supported for " - f"text_completions OpenAI pathways. Received: {prompt}" - ) - - headers = self._headers() - params = self._params(TEXT_COMPLETIONS) - payload = self._completions_payload( - endpoint_type=TEXT_COMPLETIONS, - orig_kwargs=kwargs, + self._check_in_process() + target = f"{self.target}{self.TEXT_COMPLETIONS_PATH}" + headers = self._get_headers() + params = self._get_params(self.TEXT_COMPLETIONS_KEY) + body = self._get_body( + endpoint_type=self.TEXT_COMPLETIONS_KEY, + request_kwargs=kwargs, max_output_tokens=output_token_count, prompt=prompt, ) + yield None, None # Initial yield for async iterator to signal start - try: - async for resp in self._iterative_completions_request( - type_="text_completions", - request_id=request_id, - request_prompt_tokens=prompt_token_count, - request_output_tokens=output_token_count, + if not stream_response: + response = await self._async_client.post( # type: ignore [union-attr] + target, headers=headers, params=params, - payload=payload, - ): - yield resp - except Exception as ex: - logger.error( - "{} request with headers: {} and params: {} and payload: {} failed: {}", - self.__class__.__name__, - headers, - params, - payload, - ex, + json=body, ) - raise ex + response.raise_for_status() + data = response.json() + yield ( + self._get_completions_text_content(data), + self._get_completions_usage_stats(data), + ) + return + + body.update({"stream": True, "stream_options": {"include_usage": True}}) + async with self._async_client.stream( # type: ignore [union-attr] + "POST", + target, + headers=headers, + params=params, + json=body, + ) as stream: + stream.raise_for_status() + async for line in stream.aiter_lines(): + if not line or not line.strip().startswith("data:"): + continue + if line.strip() == "data: [DONE]": + break + data = json.loads(line.strip()[len("data: ") :]) + yield ( + self._get_completions_text_content(data), + self._get_completions_usage_stats(data), + ) - async def chat_completions( # type: ignore[override] + async def chat_completions( self, content: Union[ str, list[Union[str, dict[str, Union[str, dict[str, str]]], Path, Image.Image]], Any, ], - request_id: Optional[str] = None, - prompt_token_count: Optional[int] = None, + request_id: Optional[str] = None, # noqa: ARG002 output_token_count: Optional[int] = None, raw_content: bool = False, + stream_response: bool = True, **kwargs, - ) -> AsyncGenerator[Union[StreamingTextResponse, ResponseSummary], None]: + ) -> AsyncIterator[tuple[Optional[str], Optional[UsageStats]]]: """ - Generate chat completions for the given content using the OpenAI - chat completions endpoint: /v1/chat/completions. - - :param content: The content (or list of content) to generate a completion for. - This supports any combination of text, images, and audio (model dependent). - Supported text only request examples: - content="Sample prompt", content=["Sample prompt", "Second prompt"], - content=[{"type": "text", "value": "Sample prompt"}. - Supported text and image request examples: - content=["Describe the image", PIL.Image.open("image.jpg")], - content=["Describe the image", Path("image.jpg")], - content=["Describe the image", {"type": "image_url", - "image_url": {"url": f"data:image/jpeg;base64,{base64_image}"}]. - Supported text and audio request examples: - content=["Transcribe the audio", Path("audio.wav")], - content=["Transcribe the audio", {"type": "input_audio", - "input_audio": {"data": f"{base64_bytes}", "format": "wav}]. - Additionally, if raw_content=True then the content is passed directly to the - backend without any processing. - :param request_id: The unique identifier for the request, if any. - Added to logging statements and the response for tracking purposes. - :param prompt_token_count: The number of tokens measured in the prompt, if any. - Returned in the response stats for later analysis, if applicable. - :param output_token_count: If supplied, the number of tokens to enforce - generation of for the output for this request. - :param kwargs: Additional keyword arguments to pass with the request. - :return: An async generator that yields a StreamingTextResponse for start, - a StreamingTextResponse for each received iteration, - and a ResponseSummary for the final response. + Generate chat completions using the /v1/chat/completions endpoint. + + Supports multimodal inputs including text and images with message formatting. + + :param content: Chat content - string, list of mixed content, or raw content + when raw_content=True. + :param request_id: Request identifier (currently unused). + :param output_token_count: Maximum tokens to generate. Overrides default + if specified. + :param raw_content: If True, passes content directly without formatting. + :param stream_response: Whether to stream response progressively. + :param kwargs: Additional request parameters (temperature, top_p, tools, etc.). + :yields: Tuples of (generated_text, usage_stats). First yield is (None, None). + :raises RuntimeError: If backend is not initialized. + :raises HTTPError: If API request fails. """ - logger.debug("{} invocation with args: {}", self.__class__.__name__, locals()) - headers = self._headers() - params = self._params(CHAT_COMPLETIONS) - messages = ( - content if raw_content else self._create_chat_messages(content=content) - ) - payload = self._completions_payload( - endpoint_type=CHAT_COMPLETIONS, - orig_kwargs=kwargs, + self._check_in_process() + target = f"{self.target}{self.CHAT_COMPLETIONS_PATH}" + headers = self._get_headers() + params = self._get_params(self.CHAT_COMPLETIONS_KEY) + body = self._get_body( + endpoint_type=self.CHAT_COMPLETIONS_KEY, + request_kwargs=kwargs, max_output_tokens=output_token_count, - messages=messages, + messages=self._get_chat_messages(content) if not raw_content else content, + **kwargs, ) + yield None, None # Initial yield for async iterator to signal start - try: - async for resp in self._iterative_completions_request( - type_="chat_completions", - request_id=request_id, - request_prompt_tokens=prompt_token_count, - request_output_tokens=output_token_count, - headers=headers, - params=params, - payload=payload, - ): - yield resp - except Exception as ex: - logger.error( - "{} request with headers: {} and params: {} and payload: {} failed: {}", - self.__class__.__name__, - headers, - params, - payload, - ex, + if not stream_response: + response = await self._async_client.post( # type: ignore [union-attr] + target, headers=headers, params=params, json=body ) - raise ex - - def _get_async_client(self) -> httpx.AsyncClient: - """ - Get the async HTTP client for making requests. - If the client has not been created yet, it will create one. - - :return: The async HTTP client. - """ - if self._async_client is None or self._async_client.is_closed: - client = httpx.AsyncClient( - http2=self.http2, - timeout=self.timeout, - follow_redirects=self.follow_redirects, - verify=self.verify, + response.raise_for_status() + data = response.json() + yield ( + self._get_completions_text_content(data), + self._get_completions_usage_stats(data), ) - self._async_client = client - else: - client = self._async_client + return - return client - - def _headers(self) -> dict[str, str]: - headers = { - "Content-Type": "application/json", - } - headers.update(self.headers) - return headers - - def _params(self, endpoint_type: EndpointType) -> dict[str, str]: - if self.extra_query is None: - return {} - - if ( - CHAT_COMPLETIONS in self.extra_query - or MODELS in self.extra_query - or TEXT_COMPLETIONS in self.extra_query - ): - return self.extra_query.get(endpoint_type, {}) - - return self.extra_query - - def _extra_body(self, endpoint_type: EndpointType) -> dict[str, Any]: - if self.extra_body is None: - return {} - - if ( - CHAT_COMPLETIONS in self.extra_body - or MODELS in self.extra_body - or TEXT_COMPLETIONS in self.extra_body - ): - return copy.deepcopy(self.extra_body.get(endpoint_type, {})) - - return copy.deepcopy(self.extra_body) + body.update({"stream": True, "stream_options": {"include_usage": True}}) + async with self._async_client.stream( # type: ignore [union-attr] + "POST", target, headers=headers, params=params, json=body + ) as stream: + stream.raise_for_status() + async for line in stream.aiter_lines(): + if not line or not line.strip().startswith("data:"): + continue + if line.strip() == "data: [DONE]": + break + data = json.loads(line.strip()[len("data: ") :]) + yield ( + self._get_completions_text_content(data), + self._get_completions_usage_stats(data), + ) - def _completions_payload( + def _build_headers( self, - endpoint_type: EndpointType, - orig_kwargs: Optional[dict], - max_output_tokens: Optional[int], - **kwargs, - ) -> dict: - payload = self._extra_body(endpoint_type) - payload.update(orig_kwargs or {}) - payload.update(kwargs) - payload["model"] = self.model - payload["stream"] = True - payload["stream_options"] = { - "include_usage": True, - } + api_key: Optional[str], + organization: Optional[str], + project: Optional[str], + user_headers: Optional[dict], + ) -> dict[str, str]: + headers = {} - if max_output_tokens or self.max_output_tokens: - logger.debug( - "{} adding payload args for setting output_token_count: {}", - self.__class__.__name__, - max_output_tokens or self.max_output_tokens, + if api_key: + headers["Authorization"] = ( + f"Bearer {api_key}" if not api_key.startswith("Bearer") else api_key + ) + if organization: + headers["OpenAI-Organization"] = organization + if project: + headers["OpenAI-Project"] = project + if user_headers: + headers.update(user_headers) + + return {key: val for key, val in headers.items() if val is not None} + + def _check_in_process(self): + if not self._in_process or self._async_client is None: + raise RuntimeError( + "Backend not started up for process, cannot process requests." ) - payload["max_tokens"] = max_output_tokens or self.max_output_tokens - payload["max_completion_tokens"] = payload["max_tokens"] - - if max_output_tokens: - # only set stop and ignore_eos if max_output_tokens set at request level - # otherwise the instance value is just the max to enforce we stay below - payload["stop"] = None - payload["ignore_eos"] = True - if self.remove_from_body: - for key in self.remove_from_body: - payload.pop(key, None) + def _get_headers(self) -> dict[str, str]: + return { + "Content-Type": "application/json", + **self.headers, + } - return payload + def _get_params(self, endpoint_type: str) -> dict[str, str]: + if endpoint_type in self.extra_query: + return copy.deepcopy(self.extra_query[endpoint_type]) + return copy.deepcopy(self.extra_query) - @staticmethod - def _create_chat_messages( + def _get_chat_messages( + self, content: Union[ str, list[Union[str, dict[str, Union[str, dict[str, str]]], Path, Image.Image]], Any, ], - ) -> list[dict]: + ) -> list[dict[str, Any]]: if isinstance(content, str): - return [ - { - "role": "user", - "content": content, - } - ] - - if isinstance(content, list): - resolved_content = [] - - for item in content: - if isinstance(item, dict): - resolved_content.append(item) - elif isinstance(item, str): - resolved_content.append({"type": "text", "text": item}) - elif isinstance(item, Image.Image) or ( - isinstance(item, Path) and item.suffix.lower() in [".jpg", ".jpeg"] - ): - image = item if isinstance(item, Image.Image) else Image.open(item) - encoded = base64.b64encode(image.tobytes()).decode("utf-8") - resolved_content.append( - { - "type": "image", - "image": { - "url": f"data:image/jpeg;base64,{encoded}", - }, - } - ) - elif isinstance(item, Path) and item.suffix.lower() in [".wav"]: - encoded = base64.b64encode(item.read_bytes()).decode("utf-8") - resolved_content.append( - { - "type": "input_audio", - "input_audio": { - "data": f"{encoded}", - "format": "wav", - }, - } - ) - else: - raise ValueError( - f"Unsupported content item type: {item} in list: {content}" - ) - - return [ - { - "role": "user", - "content": resolved_content, - } - ] - - raise ValueError(f"Unsupported content type: {content}") - - async def _iterative_completions_request( - self, - type_: Literal["text_completions", "chat_completions"], - request_id: Optional[str], - request_prompt_tokens: Optional[int], - request_output_tokens: Optional[int], - headers: dict[str, str], - params: dict[str, str], - payload: dict[str, Any], - ) -> AsyncGenerator[Union[StreamingTextResponse, ResponseSummary], None]: - if type_ == "text_completions": - target = f"{self.target}{TEXT_COMPLETIONS_PATH}" - elif type_ == "chat_completions": - target = f"{self.target}{CHAT_COMPLETIONS_PATH}" + return [{"role": "user", "content": content}] + + if not isinstance(content, list): + raise ValueError(f"Unsupported content type: {type(content)}") + + resolved_content = [] + for item in content: + if isinstance(item, dict): + resolved_content.append(item) + elif isinstance(item, str): + resolved_content.append({"type": "text", "text": item}) + elif isinstance(item, (Image.Image, Path)): + resolved_content.append(self._get_chat_message_media_item(item)) + else: + raise ValueError(f"Unsupported content item type: {type(item)}") + + return [{"role": "user", "content": resolved_content}] + + def _get_chat_message_media_item( + self, item: Union[Path, Image.Image] + ) -> dict[str, Any]: + if isinstance(item, Image.Image): + encoded = base64.b64encode(item.tobytes()).decode("utf-8") + return { + "type": "image", + "image": {"url": f"data:image/jpeg;base64,{encoded}"}, + } + + # Handle file paths + suffix = item.suffix.lower() + if suffix in [".jpg", ".jpeg"]: + image = Image.open(item) + encoded = base64.b64encode(image.tobytes()).decode("utf-8") + return { + "type": "image", + "image": {"url": f"data:image/jpeg;base64,{encoded}"}, + } + elif suffix == ".wav": + encoded = base64.b64encode(item.read_bytes()).decode("utf-8") + return { + "type": "input_audio", + "input_audio": {"data": encoded, "format": "wav"}, + } else: - raise ValueError(f"Unsupported type: {type_}") - - logger.info( - "{} making request: {} to target: {} using http2: {} following " - "redirects: {} for timeout: {} with headers: {} and params: {} and ", - "payload: {}", - self.__class__.__name__, - request_id, - target, - self.http2, - self.follow_redirects, - self.timeout, - headers, - params, - payload, - ) - - response_value = "" - response_prompt_count: Optional[int] = None - response_output_count: Optional[int] = None - iter_count = 0 - start_time = time.time() - iter_time = start_time - first_iter_time: Optional[float] = None - last_iter_time: Optional[float] = None - - yield StreamingTextResponse( - type_="start", - value="", - start_time=start_time, - first_iter_time=None, - iter_count=iter_count, - delta="", - time=start_time, - request_id=request_id, - ) - - # reset start time after yielding start response to ensure accurate timing - start_time = time.time() - - async with self._get_async_client().stream( - "POST", target, headers=headers, params=params, json=payload - ) as stream: - stream.raise_for_status() - - async for line in stream.aiter_lines(): - iter_time = time.time() - logger.debug( - "{} request: {} recieved iter response line: {}", - self.__class__.__name__, - request_id, - line, - ) - - if not line or not line.strip().startswith("data:"): - continue + raise ValueError(f"Unsupported file type: {suffix}") - if line.strip() == "data: [DONE]": - break - - data = json.loads(line.strip()[len("data: ") :]) - if delta := self._extract_completions_delta_content(type_, data): - if first_iter_time is None: - first_iter_time = iter_time - last_iter_time = iter_time - - iter_count += 1 - response_value += delta - - yield StreamingTextResponse( - type_="iter", - value=response_value, - iter_count=iter_count, - start_time=start_time, - first_iter_time=first_iter_time, - delta=delta, - time=iter_time, - request_id=request_id, - ) - - if usage := self._extract_completions_usage(data): - response_prompt_count = usage["prompt"] - response_output_count = usage["output"] - - logger.info( - "{} request: {} with headers: {} and params: {} and payload: {} completed" - "with: {}", - self.__class__.__name__, - request_id, - headers, - params, - payload, - response_value, - ) + def _get_body( + self, + endpoint_type: str, + request_kwargs: Optional[dict[str, Any]], + max_output_tokens: Optional[int] = None, + **kwargs, + ) -> dict[str, Any]: + # Start with endpoint-specific extra body parameters + extra_body = self.extra_body.get(endpoint_type, self.extra_body) + + body = copy.deepcopy(extra_body) + body.update(request_kwargs or {}) + body.update(kwargs) + body["model"] = self.model + + # Handle token limits + max_tokens = max_output_tokens or self.max_output_tokens + if max_tokens is not None: + body.update( + { + "max_tokens": max_tokens, + "max_completion_tokens": max_tokens, + } + ) + # Set stop conditions only for request-level limits + if max_output_tokens: + body.update({"stop": None, "ignore_eos": True}) - yield ResponseSummary( - value=response_value, - request_args=RequestArgs( - target=target, - headers=headers, - params=params, - payload=payload, - timeout=self.timeout, - http2=self.http2, - follow_redirects=self.follow_redirects, - ), - start_time=start_time, - end_time=iter_time, - first_iter_time=first_iter_time, - last_iter_time=last_iter_time, - iterations=iter_count, - request_prompt_tokens=request_prompt_tokens, - request_output_tokens=request_output_tokens, - response_prompt_tokens=response_prompt_count, - response_output_tokens=response_output_count, - request_id=request_id, - ) + return {key: val for key, val in body.items() if val is not None} - @staticmethod - def _extract_completions_delta_content( - type_: Literal["text_completions", "chat_completions"], data: dict - ) -> Optional[str]: - if "choices" not in data or not data["choices"]: + def _get_completions_text_content(self, data: dict) -> Optional[str]: + if not data.get("choices"): return None - if type_ == "text_completions": - return data["choices"][0]["text"] + choice = data["choices"][0] + return choice.get("text") or choice.get("delta", {}).get("content") - if type_ == "chat_completions": - return data.get("choices", [{}])[0].get("delta", {}).get("content") - - raise ValueError(f"Unsupported type: {type_}") - - @staticmethod - def _extract_completions_usage( - data: dict, - ) -> Optional[dict[Literal["prompt", "output"], int]]: - if "usage" not in data or not data["usage"]: + def _get_completions_usage_stats(self, data: dict) -> Optional[UsageStats]: + if not data.get("usage"): return None - return { - "prompt": data["usage"]["prompt_tokens"], - "output": data["usage"]["completion_tokens"], - } + return UsageStats( + prompt_tokens=data["usage"].get("prompt_tokens"), + output_tokens=data["usage"].get("completion_tokens"), + ) diff --git a/src/guidellm/backend/response.py b/src/guidellm/backend/response.py deleted file mode 100644 index f2272a73..00000000 --- a/src/guidellm/backend/response.py +++ /dev/null @@ -1,136 +0,0 @@ -from typing import Any, Literal, Optional - -from pydantic import computed_field - -from guidellm.objects.pydantic import StandardBaseModel -from guidellm.settings import settings - -__all__ = [ - "RequestArgs", - "ResponseSummary", - "StreamingResponseType", - "StreamingTextResponse", -] - - -StreamingResponseType = Literal["start", "iter"] - - -class StreamingTextResponse(StandardBaseModel): - """ - A model representing the response content for a streaming text request. - - :param type_: The type of the response; either 'start' or 'iter'. - :param value: The value of the response up to this iteration. - :param start_time: The time.time() the request started. - :param iter_count: The iteration count for the response. For 'start' this is 0 - and for the first 'iter' it is 1. - :param delta: The text delta added to the response for this stream iteration. - :param time: If 'start', the time.time() the request started. - If 'iter', the time.time() the iteration was received. - :param request_id: The unique identifier for the request, if any. - """ - - type_: StreamingResponseType - value: str - start_time: float - first_iter_time: Optional[float] - iter_count: int - delta: str - time: float - request_id: Optional[str] = None - - -class RequestArgs(StandardBaseModel): - """ - A model representing the arguments for a request to a backend. - Biases towards an HTTP request, but can be used for other types of backends. - - :param target: The target URL or function for the request. - :param headers: The headers, if any, included in the request such as authorization. - :param params: The query parameters, if any, included in the request. - :param payload: The payload / arguments for the request including the prompt / - content and other configurations. - :param timeout: The timeout for the request in seconds, if any. - :param http2: Whether HTTP/2 was used for the request, if applicable. - :param follow_redirects: Whether the request should follow redirect responses. - """ - - target: str - headers: dict[str, str] - params: dict[str, str] - payload: dict[str, Any] - timeout: Optional[float] = None - http2: Optional[bool] = None - follow_redirects: Optional[bool] = None - - -class ResponseSummary(StandardBaseModel): - """ - A model representing a summary of a backend request. - Always returned as the final iteration of a streaming request. - - :param value: The final value returned from the request. - :param request_args: The arguments used to make the request. - :param iterations: The number of iterations in the request. - :param start_time: The time the request started. - :param end_time: The time the request ended. - :param first_iter_time: The time the first iteration was received. - :param last_iter_time: The time the last iteration was received. - :param request_prompt_tokens: The number of tokens measured in the prompt - for the request, if any. - :param request_output_tokens: The number of tokens enforced for the output - for the request, if any. - :param response_prompt_tokens: The number of tokens measured in the prompt - for the response, if any. - :param response_output_tokens: The number of tokens measured in the output - for the response, if any. - :param request_id: The unique identifier for the request, if any. - :param error: The error message, if any, returned from making the request. - """ - - value: str - request_args: RequestArgs - iterations: int = 0 - start_time: float - end_time: float - first_iter_time: Optional[float] - last_iter_time: Optional[float] - request_prompt_tokens: Optional[int] = None - request_output_tokens: Optional[int] = None - response_prompt_tokens: Optional[int] = None - response_output_tokens: Optional[int] = None - request_id: Optional[str] = None - error: Optional[str] = None - - @computed_field # type: ignore[misc] - @property - def prompt_tokens(self) -> Optional[int]: - """ - The number of tokens measured in the prompt based on preferences - for trusting the input or response. - - :return: The number of tokens in the prompt, if any. - """ - if settings.preferred_prompt_tokens_source == "request": - return self.request_prompt_tokens or self.response_prompt_tokens - - return self.response_prompt_tokens or self.request_prompt_tokens - - @computed_field # type: ignore[misc] - @property - def output_tokens(self) -> Optional[int]: - """ - The number of tokens measured in the output based on preferences - for trusting the input or response. - - :return: The number of tokens in the output, if any. - """ - if self.error is not None: - # error occurred, can't trust request tokens were all generated - return self.response_prompt_tokens - - if settings.preferred_output_tokens_source == "request": - return self.request_output_tokens or self.response_output_tokens - - return self.response_output_tokens or self.request_output_tokens diff --git a/tests/unit/backend/test_backend.py b/tests/unit/backend/test_backend.py index 1115d509..1cdb672b 100644 --- a/tests/unit/backend/test_backend.py +++ b/tests/unit/backend/test_backend.py @@ -1,136 +1,332 @@ -import time +""" +Unit tests for the Backend base class and registry functionality. +""" + +from __future__ import annotations + +import asyncio +from collections.abc import AsyncIterator +from functools import wraps +from typing import Any +from unittest.mock import Mock, patch import pytest -from guidellm.backend import ( - Backend, - ResponseSummary, - StreamingTextResponse, +from guidellm.backend.backend import Backend, BackendType +from guidellm.backend.objects import ( + GenerationRequest, + GenerationRequestTimings, ) +from guidellm.scheduler import BackendInterface, ScheduledRequestInfo +from guidellm.utils import RegistryMixin + + +def async_timeout(delay): + def decorator(func): + @wraps(func) + async def new_func(*args, **kwargs): + return await asyncio.wait_for(func(*args, **kwargs), timeout=delay) + + return new_func + + return decorator + + +def test_backend_type(): + """Test that BackendType is defined correctly as a Literal type.""" + assert BackendType is not None + # BackendType should be a literal type containing "openai_http" + assert "openai_http" in str(BackendType) + + +class TestBackend: + """Test cases for Backend base class.""" + + @pytest.fixture( + params=[ + {"type_": "openai_http"}, + {"type_": "openai_http"}, # Test multiple instances with same type + ] + ) + def valid_instances(self, request): + """Fixture providing valid Backend instances.""" + constructor_args = request.param + + class TestBackend(Backend): + def info(self) -> dict[str, Any]: + return {"type": self.type_} + + async def process_startup(self): + pass + + async def process_shutdown(self): + pass + + async def validate(self): + pass + + async def resolve( + self, request, request_info, history=None + ) -> AsyncIterator[tuple[Any, Any]]: + yield request, request_info + + async def default_model(self) -> str | None: + return "test-model" + + instance = TestBackend(**constructor_args) + return instance, constructor_args + + @pytest.mark.smoke + def test_class_signatures(self): + """Test Backend inheritance and type relationships.""" + assert issubclass(Backend, RegistryMixin) + assert issubclass(Backend, BackendInterface) + assert hasattr(Backend, "create") + assert hasattr(Backend, "register") + assert hasattr(Backend, "get_registered_object") + + # Check properties exist + assert hasattr(Backend, "processes_limit") + assert hasattr(Backend, "requests_limit") + + # Check abstract method exists + assert hasattr(Backend, "default_model") + + @pytest.mark.smoke + def test_initialization(self, valid_instances): + """Test Backend initialization.""" + instance, constructor_args = valid_instances + assert isinstance(instance, Backend) + assert instance.type_ == constructor_args["type_"] + + @pytest.mark.sanity + @pytest.mark.parametrize( + ("field", "value"), + [ + ("type_", None), + ("type_", 123), + ("type_", ""), + ], + ) + def test_invalid_initialization_values(self, field, value): + """Test Backend with invalid field values.""" + + class TestBackend(Backend): + def info(self) -> dict[str, Any]: + return {} + + async def process_startup(self): + pass + + async def process_shutdown(self): + pass + + async def validate(self): + pass + + async def resolve(self, request, request_info, history=None): + yield request, request_info + + async def default_model(self) -> str | None: + return "test-model" + + data = {field: value} + # Backend itself doesn't validate types, but we test that it accepts the value + backend = TestBackend(**data) + assert getattr(backend, field) == value + + @pytest.mark.smoke + def test_default_properties(self, valid_instances): + """Test Backend default property implementations.""" + instance, _ = valid_instances + assert instance.processes_limit is None + assert instance.requests_limit is None + + @pytest.mark.smoke + @pytest.mark.asyncio + @async_timeout(5.0) + async def test_default_model_abstract(self): + """Test that default_model is abstract and must be implemented.""" + # Backend itself is abstract and cannot be instantiated + with pytest.raises(TypeError): + Backend("openai_http") # type: ignore + + @pytest.mark.regression + @pytest.mark.asyncio + @async_timeout(5.0) + async def test_interface_compatibility(self, valid_instances): + """Test that Backend is compatible with BackendInterface.""" + instance, _ = valid_instances + + # Test that Backend uses the correct generic types + request = GenerationRequest(content="test") + request_info = ScheduledRequestInfo( + request_id="test-id", + status="pending", + scheduler_node_id=1, + scheduler_process_id=1, + scheduler_start_time=123.0, + request_timings=GenerationRequestTimings(), + ) + + # Test resolve method + async for response, info in instance.resolve(request, request_info): + assert response == request + assert info == request_info + break # Only test first iteration + + @pytest.mark.smoke + def test_create_method_valid(self): + """Test Backend.create class method with valid backend.""" + # Mock a registered backend + mock_backend_class = Mock() + mock_backend_instance = Mock() + mock_backend_class.return_value = mock_backend_instance + + with patch.object( + Backend, "get_registered_object", return_value=mock_backend_class + ): + result = Backend.create("openai_http", test_arg="value") + + Backend.get_registered_object.assert_called_once_with("openai_http") + mock_backend_class.assert_called_once_with(test_arg="value") + assert result == mock_backend_instance + + @pytest.mark.sanity + def test_create_method_invalid(self): + """Test Backend.create class method with invalid backend type.""" + with pytest.raises( + ValueError, match="Backend type 'invalid_type' is not registered" + ): + Backend.create("invalid_type") + + @pytest.mark.regression + def test_docstring_example_pattern(self): + """Test that Backend docstring examples work as documented.""" + + # Test the pattern shown in docstring + class MyBackend(Backend): + def __init__(self, api_key: str): + super().__init__("mock_backend") # type: ignore [arg-type] + self.api_key = api_key + + def info(self) -> dict[str, Any]: + return {"api_key": "***"} + + async def process_startup(self): + self.client = Mock() # Simulate API client + + async def process_shutdown(self): + self.client = None # type: ignore[assignment] + + async def validate(self): + pass + + async def resolve(self, request, request_info, history=None): + yield request, request_info + + async def default_model(self) -> str | None: + return "my-model" + + # Register the backend + Backend.register("my_backend")(MyBackend) + + # Create instance + backend = Backend.create("my_backend", api_key="secret") + assert isinstance(backend, MyBackend) + assert backend.api_key == "secret" + assert backend.type_ == "mock_backend" + + +class TestBackendRegistry: + """Test cases for Backend registry functionality.""" + + @pytest.mark.smoke + def test_openai_backend_registered(self): + """Test that OpenAI HTTP backend is registered.""" + from guidellm.backend.openai import OpenAIHTTPBackend + + # OpenAI backend should be registered + backend = Backend.create("openai_http", target="http://test") + assert isinstance(backend, OpenAIHTTPBackend) + assert backend.type_ == "openai_http" + + @pytest.mark.sanity + def test_backend_create_invalid_type(self): + """Test Backend.create with invalid type raises appropriate error.""" + with pytest.raises( + ValueError, match="Backend type 'invalid_type' is not registered" + ): + Backend.create("invalid_type") + + @pytest.mark.smoke + def test_backend_registry_functionality(self): + """Test that backend registry functions work.""" + from guidellm.backend.openai import OpenAIHTTPBackend + + # Test that we can get registered backends + openai_class = Backend.get_registered_object("openai_http") + assert openai_class == OpenAIHTTPBackend + + # Test creating with kwargs + backend = Backend.create( + "openai_http", target="http://localhost:8000", model="gpt-4" + ) + assert backend.target == "http://localhost:8000" + assert backend.model == "gpt-4" + + @pytest.mark.smoke + def test_backend_is_registered(self): + """Test Backend.is_registered method.""" + # Test with a known registered backend + assert Backend.is_registered("openai_http") + + # Test with unknown backend + assert not Backend.is_registered("unknown_backend") + + @pytest.mark.regression + def test_backend_registration_decorator(self): + """Test that backend registration decorator works.""" + + # Create a test backend class + @Backend.register("test_backend") + class TestBackend(Backend): + def __init__(self, test_param="default"): + super().__init__("test_backend") # type: ignore + self._test_param = test_param + + def info(self): + return {"test_param": self._test_param} + + async def process_startup(self): + pass + + async def process_shutdown(self): + pass + + async def validate(self): + pass + + async def resolve(self, request, request_info, history=None): + yield request, request_info + + async def default_model(self): + return "test-model" + + # Test that it's registered and can be created + backend = Backend.create("test_backend", test_param="custom") + assert isinstance(backend, TestBackend) + assert backend.info() == {"test_param": "custom"} + + @pytest.mark.smoke + def test_backend_registered_objects(self): + """Test Backend.registered_objects method returns registered backends.""" + # Should include at least the openai_http backend + registered = Backend.registered_objects() + assert isinstance(registered, tuple) + assert len(registered) > 0 + # Check that openai backend is in the registered objects + from guidellm.backend.openai import OpenAIHTTPBackend -@pytest.mark.smoke -def test_backend_registry(): - assert Backend._registry["mock"] is not None # type: ignore - - backend_instance = Backend.create("mock") # type: ignore - assert backend_instance is not None - - with pytest.raises(ValueError): - Backend.register("mock")("backend") # type: ignore - - with pytest.raises(ValueError): - Backend.create("invalid_type") # type: ignore - - -@pytest.mark.smoke -@pytest.mark.asyncio -async def test_backend_text_completions(mock_backend): - index = 0 - prompt = "Test Prompt" - request_id = "test-request-id" - prompt_token_count = 3 - output_token_count = 10 - final_resp = None - - async for response in mock_backend.text_completions( - prompt=prompt, - request_id=request_id, - prompt_token_count=prompt_token_count, - output_token_count=output_token_count, - ): - assert isinstance(response, (StreamingTextResponse, ResponseSummary)) - - if index == 0: - assert isinstance(response, StreamingTextResponse) - assert response.type_ == "start" - assert response.iter_count == 0 - assert response.delta == "" - assert response.time == pytest.approx(time.time(), abs=0.01) - assert response.request_id == request_id - elif not isinstance(response, ResponseSummary): - assert response.type_ == "iter" - assert response.iter_count == index - assert len(response.delta) > 0 - assert response.time == pytest.approx(time.time(), abs=0.01) - assert response.request_id == request_id - else: - assert not final_resp - final_resp = response - assert isinstance(response, ResponseSummary) - assert len(response.value) > 0 - assert response.iterations > 0 - assert response.start_time > 0 - assert response.end_time == pytest.approx(time.time(), abs=0.01) - assert response.request_prompt_tokens == prompt_token_count - assert response.request_output_tokens == output_token_count - assert response.response_prompt_tokens == 3 - assert response.response_output_tokens == 10 - assert response.request_id == request_id - - index += 1 - - assert final_resp - - -@pytest.mark.smoke -@pytest.mark.asyncio -async def test_backend_chat_completions(mock_backend): - index = 0 - prompt = "Test Prompt" - request_id = "test-request-id" - prompt_token_count = 3 - output_token_count = 10 - final_resp = None - - async for response in mock_backend.chat_completions( - content=prompt, - request_id=request_id, - prompt_token_count=prompt_token_count, - output_token_count=output_token_count, - ): - assert isinstance(response, (StreamingTextResponse, ResponseSummary)) - - if index == 0: - assert isinstance(response, StreamingTextResponse) - assert response.type_ == "start" - assert response.iter_count == 0 - assert response.delta == "" - assert response.time == pytest.approx(time.time(), abs=0.01) - assert response.request_id == request_id - elif not isinstance(response, ResponseSummary): - assert response.type_ == "iter" - assert response.iter_count == index - assert len(response.delta) > 0 - assert response.time == pytest.approx(time.time(), abs=0.01) - assert response.request_id == request_id - else: - assert not final_resp - final_resp = response - assert isinstance(response, ResponseSummary) - assert len(response.value) > 0 - assert response.iterations > 0 - assert response.start_time > 0 - assert response.end_time == pytest.approx(time.time(), abs=0.01) - assert response.request_prompt_tokens == prompt_token_count - assert response.request_output_tokens == output_token_count - assert response.response_prompt_tokens == 3 - assert response.response_output_tokens == 10 - assert response.request_id == request_id - - index += 1 - - assert final_resp - - -@pytest.mark.smoke -@pytest.mark.asyncio -async def test_backend_models(mock_backend): - models = await mock_backend.available_models() - assert models == ["mock-model"] - - -@pytest.mark.smoke -@pytest.mark.asyncio -async def test_backend_validate(mock_backend): - await mock_backend.validate() + assert OpenAIHTTPBackend in registered diff --git a/tests/unit/backend/test_objects.py b/tests/unit/backend/test_objects.py new file mode 100644 index 00000000..2f91a76b --- /dev/null +++ b/tests/unit/backend/test_objects.py @@ -0,0 +1,467 @@ +""" +Unit tests for GenerationRequest, GenerationResponse, GenerationRequestTimings. +""" + +from __future__ import annotations + +import uuid + +import pytest +from pydantic import ValidationError + +from guidellm.backend.objects import ( + GenerationRequest, + GenerationRequestTimings, + GenerationResponse, +) +from guidellm.scheduler import MeasuredRequestTimings +from guidellm.utils import StandardBaseModel + + +class TestGenerationRequest: + """Test cases for GenerationRequest model.""" + + @pytest.fixture( + params=[ + {"content": "test content"}, + { + "content": ["message1", "message2"], + "request_type": "chat_completions", + "params": {"temperature": 0.7}, + }, + { + "request_id": "custom-id", + "content": {"role": "user", "content": "test"}, + "stats": {"prompt_tokens": 50}, + "constraints": {"output_tokens": 100}, + }, + ] + ) + def valid_instances(self, request): + """Fixture providing valid GenerationRequest instances.""" + constructor_args = request.param + instance = GenerationRequest(**constructor_args) + return instance, constructor_args + + @pytest.mark.smoke + def test_class_signatures(self): + """Test GenerationRequest inheritance and type relationships.""" + assert issubclass(GenerationRequest, StandardBaseModel) + assert hasattr(GenerationRequest, "model_dump") + assert hasattr(GenerationRequest, "model_validate") + + # Check all expected fields are defined + fields = GenerationRequest.model_fields + expected_fields = [ + "request_id", + "request_type", + "content", + "params", + "stats", + "constraints", + ] + for field in expected_fields: + assert field in fields + + @pytest.mark.smoke + def test_initialization(self, valid_instances): + """Test GenerationRequest initialization.""" + instance, constructor_args = valid_instances + assert isinstance(instance, GenerationRequest) + assert instance.content == constructor_args["content"] + + # Check defaults + expected_request_type = constructor_args.get("request_type", "text_completions") + assert instance.request_type == expected_request_type + + if "request_id" in constructor_args: + assert instance.request_id == constructor_args["request_id"] + else: + assert isinstance(instance.request_id, str) + # Should be valid UUID + uuid.UUID(instance.request_id) + + @pytest.mark.sanity + def test_invalid_initialization_values(self): + """Test GenerationRequest with invalid field values.""" + # Invalid request_type + with pytest.raises(ValidationError): + GenerationRequest(content="test", request_type="invalid_type") + + @pytest.mark.sanity + def test_invalid_initialization_missing(self): + """Test GenerationRequest initialization without required field.""" + with pytest.raises(ValidationError): + GenerationRequest() # Missing required 'content' field + + @pytest.mark.smoke + def test_auto_id_generation(self): + """Test that request_id is auto-generated if not provided.""" + request1 = GenerationRequest(content="test1") + request2 = GenerationRequest(content="test2") + + assert request1.request_id != request2.request_id + assert len(request1.request_id) > 0 + assert len(request2.request_id) > 0 + + # Should be valid UUIDs + uuid.UUID(request1.request_id) + uuid.UUID(request2.request_id) + + @pytest.mark.regression + def test_content_types(self): + """Test GenerationRequest with different content types.""" + # String content + request1 = GenerationRequest(content="string content") + assert request1.content == "string content" + + # List content + request2 = GenerationRequest(content=["item1", "item2"]) + assert request2.content == ["item1", "item2"] + + # Dict content + dict_content = {"role": "user", "content": "test"} + request3 = GenerationRequest(content=dict_content) + assert request3.content == dict_content + + @pytest.mark.sanity + def test_marshalling(self, valid_instances): + """Test GenerationRequest serialization and deserialization.""" + instance, constructor_args = valid_instances + data_dict = instance.model_dump() + assert isinstance(data_dict, dict) + assert data_dict["content"] == constructor_args["content"] + + # Test reconstruction + reconstructed = GenerationRequest.model_validate(data_dict) + assert reconstructed.content == instance.content + assert reconstructed.request_type == instance.request_type + assert reconstructed.request_id == instance.request_id + + +class TestGenerationResponse: + """Test cases for GenerationResponse model.""" + + @pytest.fixture( + params=[ + { + "request_id": "test-123", + "request_args": {"model": "gpt-3.5-turbo"}, + }, + { + "request_id": "test-456", + "request_args": {"model": "gpt-4"}, + "value": "Generated text", + "delta": "new text", + "iterations": 5, + "request_prompt_tokens": 50, + "request_output_tokens": 100, + "response_prompt_tokens": 55, + "response_output_tokens": 95, + }, + ] + ) + def valid_instances(self, request): + """Fixture providing valid GenerationResponse instances.""" + constructor_args = request.param + instance = GenerationResponse(**constructor_args) + return instance, constructor_args + + @pytest.mark.smoke + def test_class_signatures(self): + """Test GenerationResponse inheritance and type relationships.""" + assert issubclass(GenerationResponse, StandardBaseModel) + assert hasattr(GenerationResponse, "model_dump") + assert hasattr(GenerationResponse, "model_validate") + + # Check all expected fields and properties are defined + fields = GenerationResponse.model_fields + expected_fields = [ + "request_id", + "request_args", + "value", + "delta", + "iterations", + "request_prompt_tokens", + "request_output_tokens", + "response_prompt_tokens", + "response_output_tokens", + ] + for field in expected_fields: + assert field in fields + + # Check properties exist + assert hasattr(GenerationResponse, "prompt_tokens") + assert hasattr(GenerationResponse, "output_tokens") + assert hasattr(GenerationResponse, "total_tokens") + assert hasattr(GenerationResponse, "preferred_prompt_tokens") + assert hasattr(GenerationResponse, "preferred_output_tokens") + + @pytest.mark.smoke + def test_initialization(self, valid_instances): + """Test GenerationResponse initialization.""" + instance, constructor_args = valid_instances + assert isinstance(instance, GenerationResponse) + assert instance.request_id == constructor_args["request_id"] + assert instance.request_args == constructor_args["request_args"] + + # Check defaults for optional fields + if "value" not in constructor_args: + assert instance.value is None + if "delta" not in constructor_args: + assert instance.delta is None + if "iterations" not in constructor_args: + assert instance.iterations == 0 + + @pytest.mark.sanity + def test_invalid_initialization_values(self): + """Test GenerationResponse with invalid field values.""" + # Invalid iterations type + with pytest.raises(ValidationError): + GenerationResponse(request_id="test", request_args={}, iterations="not_int") + + @pytest.mark.sanity + def test_invalid_initialization_missing(self): + """Test GenerationResponse initialization without required fields.""" + with pytest.raises(ValidationError): + GenerationResponse() # Missing required fields + + with pytest.raises(ValidationError): + GenerationResponse(request_id="test") # Missing request_args + + @pytest.mark.smoke + def test_prompt_tokens_property(self): + """Test prompt_tokens property logic.""" + # When both are available, prefers response_prompt_tokens + response1 = GenerationResponse( + request_id="test", + request_args={}, + request_prompt_tokens=50, + response_prompt_tokens=55, + ) + assert response1.prompt_tokens == 55 + + # When only request_prompt_tokens is available + response2 = GenerationResponse( + request_id="test", request_args={}, request_prompt_tokens=50 + ) + assert response2.prompt_tokens == 50 + + # When only response_prompt_tokens is available + response3 = GenerationResponse( + request_id="test", request_args={}, response_prompt_tokens=55 + ) + assert response3.prompt_tokens == 55 + + # When neither is available + response4 = GenerationResponse(request_id="test", request_args={}) + assert response4.prompt_tokens is None + + @pytest.mark.smoke + def test_output_tokens_property(self): + """Test output_tokens property logic.""" + # When both are available, prefers response_output_tokens + response1 = GenerationResponse( + request_id="test", + request_args={}, + request_output_tokens=100, + response_output_tokens=95, + ) + assert response1.output_tokens == 95 + + # When only request_output_tokens is available + response2 = GenerationResponse( + request_id="test", request_args={}, request_output_tokens=100 + ) + assert response2.output_tokens == 100 + + # When only response_output_tokens is available + response3 = GenerationResponse( + request_id="test", request_args={}, response_output_tokens=95 + ) + assert response3.output_tokens == 95 + + # When neither is available + response4 = GenerationResponse(request_id="test", request_args={}) + assert response4.output_tokens is None + + @pytest.mark.smoke + def test_total_tokens_property(self): + """Test total_tokens property calculation.""" + # When both prompt and output tokens are available + response1 = GenerationResponse( + request_id="test", + request_args={}, + response_prompt_tokens=50, + response_output_tokens=100, + ) + assert response1.total_tokens == 150 + + # When one is missing + response2 = GenerationResponse( + request_id="test", request_args={}, response_prompt_tokens=50 + ) + assert response2.total_tokens is None + + # When both are missing + response3 = GenerationResponse(request_id="test", request_args={}) + assert response3.total_tokens is None + + @pytest.mark.smoke + @pytest.mark.parametrize( + ("preferred_source", "expected_prompt", "expected_output"), + [ + ("request", 50, 100), + ("response", 55, 95), + ], + ) + def test_preferred_token_methods( + self, preferred_source, expected_prompt, expected_output + ): + """Test preferred_*_tokens methods.""" + response = GenerationResponse( + request_id="test", + request_args={}, + request_prompt_tokens=50, + request_output_tokens=100, + response_prompt_tokens=55, + response_output_tokens=95, + ) + + assert response.preferred_prompt_tokens(preferred_source) == expected_prompt + assert response.preferred_output_tokens(preferred_source) == expected_output + + @pytest.mark.regression + def test_preferred_tokens_fallback(self): + """Test preferred_*_tokens methods with fallback logic.""" + # Only response tokens available + response1 = GenerationResponse( + request_id="test", + request_args={}, + response_prompt_tokens=55, + response_output_tokens=95, + ) + + assert response1.preferred_prompt_tokens("request") == 55 # Falls back + assert response1.preferred_output_tokens("request") == 95 # Falls back + + # Only request tokens available + response2 = GenerationResponse( + request_id="test", + request_args={}, + request_prompt_tokens=50, + request_output_tokens=100, + ) + + assert response2.preferred_prompt_tokens("response") == 50 # Falls back + assert response2.preferred_output_tokens("response") == 100 # Falls back + + @pytest.mark.sanity + def test_marshalling(self, valid_instances): + """Test GenerationResponse serialization and deserialization.""" + instance, constructor_args = valid_instances + data_dict = instance.model_dump() + assert isinstance(data_dict, dict) + assert data_dict["request_id"] == constructor_args["request_id"] + assert data_dict["request_args"] == constructor_args["request_args"] + + # Test reconstruction + reconstructed = GenerationResponse.model_validate(data_dict) + assert reconstructed.request_id == instance.request_id + assert reconstructed.request_args == instance.request_args + assert reconstructed.value == instance.value + assert reconstructed.iterations == instance.iterations + + +class TestGenerationRequestTimings: + """Test cases for GenerationRequestTimings model.""" + + @pytest.fixture( + params=[ + {}, + {"first_iteration": 1234567890.0}, + {"last_iteration": 1234567895.0}, + { + "first_iteration": 1234567890.0, + "last_iteration": 1234567895.0, + }, + ] + ) + def valid_instances(self, request): + """Fixture providing valid GenerationRequestTimings instances.""" + constructor_args = request.param + instance = GenerationRequestTimings(**constructor_args) + return instance, constructor_args + + @pytest.mark.smoke + def test_class_signatures(self): + """Test GenerationRequestTimings inheritance and type relationships.""" + assert issubclass(GenerationRequestTimings, MeasuredRequestTimings) + assert issubclass(GenerationRequestTimings, StandardBaseModel) + assert hasattr(GenerationRequestTimings, "model_dump") + assert hasattr(GenerationRequestTimings, "model_validate") + + # Check inherited fields from MeasuredRequestTimings + fields = GenerationRequestTimings.model_fields + expected_inherited_fields = ["request_start", "request_end"] + for field in expected_inherited_fields: + assert field in fields + + # Check own fields + expected_own_fields = ["first_iteration", "last_iteration"] + for field in expected_own_fields: + assert field in fields + + @pytest.mark.smoke + def test_initialization(self, valid_instances): + """Test GenerationRequestTimings initialization.""" + instance, constructor_args = valid_instances + assert isinstance(instance, GenerationRequestTimings) + assert isinstance(instance, MeasuredRequestTimings) + + # Check field values + expected_first = constructor_args.get("first_iteration") + expected_last = constructor_args.get("last_iteration") + assert instance.first_iteration == expected_first + assert instance.last_iteration == expected_last + + @pytest.mark.sanity + def test_invalid_initialization_values(self): + """Test GenerationRequestTimings with invalid field values.""" + # Invalid timestamp type + with pytest.raises(ValidationError): + GenerationRequestTimings(first_iteration="not_float") + + with pytest.raises(ValidationError): + GenerationRequestTimings(last_iteration="not_float") + + @pytest.mark.smoke + def test_optional_fields(self): + """Test that all timing fields are optional.""" + # Should be able to create with no fields + timings1 = GenerationRequestTimings() + assert timings1.first_iteration is None + assert timings1.last_iteration is None + + # Should be able to create with only one field + timings2 = GenerationRequestTimings(first_iteration=123.0) + assert timings2.first_iteration == 123.0 + assert timings2.last_iteration is None + + timings3 = GenerationRequestTimings(last_iteration=456.0) + assert timings3.first_iteration is None + assert timings3.last_iteration == 456.0 + + @pytest.mark.sanity + def test_marshalling(self, valid_instances): + """Test GenerationRequestTimings serialization and deserialization.""" + instance, constructor_args = valid_instances + data_dict = instance.model_dump() + assert isinstance(data_dict, dict) + + # Test reconstruction + reconstructed = GenerationRequestTimings.model_validate(data_dict) + assert reconstructed.first_iteration == instance.first_iteration + assert reconstructed.last_iteration == instance.last_iteration + assert reconstructed.request_start == instance.request_start + assert reconstructed.request_end == instance.request_end diff --git a/tests/unit/backend/test_openai_backend.py b/tests/unit/backend/test_openai_backend.py index 7123c590..8b15bfb1 100644 --- a/tests/unit/backend/test_openai_backend.py +++ b/tests/unit/backend/test_openai_backend.py @@ -1,207 +1,1178 @@ -import time +""" +Unit tests for OpenAIHTTPBackend implementation. +""" +from __future__ import annotations + +import asyncio +import base64 +from functools import wraps +from pathlib import Path +from unittest.mock import AsyncMock, Mock, patch + +import httpx import pytest +from PIL import Image -from guidellm.backend import OpenAIHTTPBackend, ResponseSummary, StreamingTextResponse -from guidellm.settings import settings - - -@pytest.mark.smoke -def test_openai_http_backend_default_initialization(): - backend = OpenAIHTTPBackend() - assert backend.target == settings.openai.base_url - assert backend.model is None - assert backend.headers.get("Authorization") == settings.openai.bearer_token - assert backend.organization == settings.openai.organization - assert backend.project == settings.openai.project - assert backend.timeout == settings.request_timeout - assert backend.http2 is True - assert backend.follow_redirects is True - assert backend.max_output_tokens == settings.openai.max_output_tokens - assert backend.extra_query is None - - -@pytest.mark.smoke -def test_openai_http_backend_intialization(): - backend = OpenAIHTTPBackend( - target="http://test-target", - model="test-model", - api_key="test-key", - organization="test-org", - project="test-proj", - timeout=10, - http2=False, - follow_redirects=False, - max_output_tokens=100, - extra_query={"foo": "bar"}, - ) - assert backend.target == "http://test-target" - assert backend.model == "test-model" - assert backend.headers.get("Authorization") == "Bearer test-key" - assert backend.organization == "test-org" - assert backend.project == "test-proj" - assert backend.timeout == 10 - assert backend.http2 is False - assert backend.follow_redirects is False - assert backend.max_output_tokens == 100 - assert backend.extra_query == {"foo": "bar"} - - -@pytest.mark.smoke -@pytest.mark.asyncio -async def test_openai_http_backend_available_models(httpx_openai_mock): - backend = OpenAIHTTPBackend(target="http://target.mock") - models = await backend.available_models() - assert models == ["mock-model"] - - -@pytest.mark.smoke -@pytest.mark.asyncio -async def test_openai_http_backend_validate(httpx_openai_mock): - backend = OpenAIHTTPBackend(target="http://target.mock", model="mock-model") - await backend.validate() - - backend = OpenAIHTTPBackend(target="http://target.mock") - await backend.validate() - assert backend.model == "mock-model" - - backend = OpenAIHTTPBackend(target="http://target.mock", model="invalid-model") - with pytest.raises(ValueError): - await backend.validate() - - -@pytest.mark.smoke -@pytest.mark.asyncio -async def test_openai_http_backend_text_completions(httpx_openai_mock): - backend = OpenAIHTTPBackend(target="http://target.mock", model="mock-model") - - index = 0 - final_resp = None - async for response in backend.text_completions("Test Prompt", request_id="test-id"): - assert isinstance(response, (StreamingTextResponse, ResponseSummary)) - - if index == 0: - assert isinstance(response, StreamingTextResponse) - assert response.type_ == "start" - assert response.iter_count == 0 - assert response.delta == "" - assert response.time == pytest.approx(time.time(), abs=0.01) - assert response.request_id == "test-id" - elif not isinstance(response, ResponseSummary): - assert response.type_ == "iter" - assert response.iter_count == index - assert len(response.delta) > 0 - assert response.time == pytest.approx(time.time(), abs=0.01) - assert response.request_id == "test-id" - else: - assert not final_resp - final_resp = response - assert isinstance(response, ResponseSummary) - assert len(response.value) > 0 - assert response.request_args is not None - assert response.iterations > 0 - assert response.start_time > 0 - assert response.end_time == pytest.approx(time.time(), abs=0.01) - assert response.request_prompt_tokens is None - assert response.request_output_tokens is None - assert response.response_prompt_tokens == 3 - assert response.response_output_tokens > 0 # type: ignore - assert response.request_id == "test-id" - - index += 1 - assert final_resp - - -@pytest.mark.smoke -@pytest.mark.asyncio -async def test_openai_http_backend_text_completions_counts(httpx_openai_mock): - backend = OpenAIHTTPBackend( - target="http://target.mock", - model="mock-model", - max_output_tokens=100, +from guidellm.backend.backend import Backend +from guidellm.backend.objects import ( + GenerationRequest, + GenerationRequestTimings, + GenerationResponse, +) +from guidellm.backend.openai import OpenAIHTTPBackend, UsageStats +from guidellm.scheduler import ScheduledRequestInfo + + +def async_timeout(delay): + def decorator(func): + @wraps(func) + async def new_func(*args, **kwargs): + return await asyncio.wait_for(func(*args, **kwargs), timeout=delay) + + return new_func + + return decorator + + +def test_usage_stats(): + """Test that UsageStats is defined correctly as a dataclass.""" + stats = UsageStats() + assert stats.prompt_tokens is None + assert stats.output_tokens is None + + stats_with_values = UsageStats(prompt_tokens=10, output_tokens=5) + assert stats_with_values.prompt_tokens == 10 + assert stats_with_values.output_tokens == 5 + + +class TestOpenAIHTTPBackend: + """Test cases for OpenAIHTTPBackend.""" + + @pytest.fixture( + params=[ + {"target": "http://localhost:8000"}, + { + "target": "https://api.openai.com", + "model": "gpt-4", + "api_key": "test-key", + "timeout": 30.0, + "stream_response": False, + }, + { + "target": "http://test-server:8080", + "model": "test-model", + "api_key": "Bearer test-token", + "organization": "test-org", + "project": "test-proj", + "timeout": 120.0, + "http2": False, + "follow_redirects": False, + "max_output_tokens": 500, + "extra_query": {"param": "value"}, + "extra_body": {"setting": "test"}, + "remove_from_body": ["unwanted"], + "headers": {"Custom": "header"}, + "verify": True, + }, + ] ) - final_resp = None - - async for response in backend.text_completions( - "Test Prompt", request_id="test-id", prompt_token_count=3, output_token_count=10 - ): - final_resp = response - - assert final_resp - assert isinstance(final_resp, ResponseSummary) - assert len(final_resp.value) > 0 - assert final_resp.request_args is not None - assert final_resp.request_prompt_tokens == 3 - assert final_resp.request_output_tokens == 10 - assert final_resp.response_prompt_tokens == 3 - assert final_resp.response_output_tokens == 10 - assert final_resp.request_id == "test-id" - - -@pytest.mark.smoke -@pytest.mark.asyncio -async def test_openai_http_backend_chat_completions(httpx_openai_mock): - backend = OpenAIHTTPBackend(target="http://target.mock", model="mock-model") - - index = 0 - final_resp = None - async for response in backend.chat_completions("Test Prompt", request_id="test-id"): - assert isinstance(response, (StreamingTextResponse, ResponseSummary)) - - if index == 0: - assert isinstance(response, StreamingTextResponse) - assert response.type_ == "start" - assert response.iter_count == 0 - assert response.delta == "" - assert response.time == pytest.approx(time.time(), abs=0.01) - assert response.request_id == "test-id" - elif not isinstance(response, ResponseSummary): - assert response.type_ == "iter" - assert response.iter_count == index - assert len(response.delta) > 0 - assert response.time == pytest.approx(time.time(), abs=0.01) - assert response.request_id == "test-id" + def valid_instances(self, request): + """Fixture providing valid OpenAIHTTPBackend instances.""" + constructor_args = request.param + instance = OpenAIHTTPBackend(**constructor_args) + return instance, constructor_args + + @pytest.mark.smoke + def test_class_signatures(self): + """Test OpenAIHTTPBackend inheritance and type relationships.""" + assert issubclass(OpenAIHTTPBackend, Backend) + assert hasattr(OpenAIHTTPBackend, "HEALTH_PATH") + assert OpenAIHTTPBackend.HEALTH_PATH == "/health" + assert hasattr(OpenAIHTTPBackend, "MODELS_PATH") + assert OpenAIHTTPBackend.MODELS_PATH == "/v1/models" + assert hasattr(OpenAIHTTPBackend, "TEXT_COMPLETIONS_PATH") + assert OpenAIHTTPBackend.TEXT_COMPLETIONS_PATH == "/v1/completions" + assert hasattr(OpenAIHTTPBackend, "CHAT_COMPLETIONS_PATH") + assert OpenAIHTTPBackend.CHAT_COMPLETIONS_PATH == "/v1/chat/completions" + assert hasattr(OpenAIHTTPBackend, "MODELS_KEY") + assert OpenAIHTTPBackend.MODELS_KEY == "models" + assert hasattr(OpenAIHTTPBackend, "TEXT_COMPLETIONS_KEY") + assert OpenAIHTTPBackend.TEXT_COMPLETIONS_KEY == "text_completions" + assert hasattr(OpenAIHTTPBackend, "CHAT_COMPLETIONS_KEY") + assert OpenAIHTTPBackend.CHAT_COMPLETIONS_KEY == "chat_completions" + + @pytest.mark.smoke + def test_initialization(self, valid_instances): + """Test OpenAIHTTPBackend initialization.""" + instance, constructor_args = valid_instances + assert isinstance(instance, OpenAIHTTPBackend) + expected_target = constructor_args["target"].rstrip("/").removesuffix("/v1") + assert instance.target == expected_target + if "model" in constructor_args: + assert instance.model == constructor_args["model"] + if "timeout" in constructor_args: + assert instance.timeout == constructor_args["timeout"] else: - assert not final_resp - final_resp = response - assert isinstance(response, ResponseSummary) - assert len(response.value) > 0 - assert response.request_args is not None - assert response.iterations > 0 - assert response.start_time > 0 - assert response.end_time == pytest.approx(time.time(), abs=0.01) - assert response.request_prompt_tokens is None - assert response.request_output_tokens is None - assert response.response_prompt_tokens == 3 - assert response.response_output_tokens > 0 # type: ignore - assert response.request_id == "test-id" - - index += 1 - - assert final_resp - - -@pytest.mark.smoke -@pytest.mark.asyncio -async def test_openai_http_backend_chat_completions_counts(httpx_openai_mock): - backend = OpenAIHTTPBackend( - target="http://target.mock", - model="mock-model", - max_output_tokens=100, + assert instance.timeout == 60.0 + + @pytest.mark.sanity + @pytest.mark.parametrize( + ("field", "value"), + [ + ("target", ""), + ("timeout", -1.0), + ("http2", "invalid"), + ("verify", "invalid"), + ], ) - final_resp = None - - async for response in backend.chat_completions( - "Test Prompt", request_id="test-id", prompt_token_count=3, output_token_count=10 - ): - final_resp = response - - assert final_resp - assert isinstance(final_resp, ResponseSummary) - assert len(final_resp.value) > 0 - assert final_resp.request_args is not None - assert final_resp.request_prompt_tokens == 3 - assert final_resp.request_output_tokens == 10 - assert final_resp.response_prompt_tokens == 3 - assert final_resp.response_output_tokens == 10 - assert final_resp.request_id == "test-id" + def test_invalid_initialization_values(self, field, value): + """Test OpenAIHTTPBackend with invalid field values.""" + base_args = {"target": "http://localhost:8000"} + base_args[field] = value + # OpenAI backend doesn't validate types at init, accepts whatever is passed + backend = OpenAIHTTPBackend(**base_args) + assert getattr(backend, field) == value + + @pytest.mark.smoke + def test_factory_registration(self): + """Test that OpenAIHTTPBackend is registered with Backend factory.""" + assert Backend.is_registered("openai_http") + backend = Backend.create("openai_http", target="http://test") + assert isinstance(backend, OpenAIHTTPBackend) + assert backend.type_ == "openai_http" + + @pytest.mark.smoke + def test_initialization_minimal(self): + """Test minimal OpenAIHTTPBackend initialization.""" + backend = OpenAIHTTPBackend(target="http://localhost:8000") + + assert backend.target == "http://localhost:8000" + assert backend.model is None + assert backend.timeout == 60.0 + assert backend.http2 is True + assert backend.follow_redirects is True + assert backend.verify is False + assert backend.stream_response is True + assert backend._in_process is False + assert backend._async_client is None + + @pytest.mark.smoke + def test_initialization_full(self): + """Test full OpenAIHTTPBackend initialization.""" + extra_query = {"param": "value"} + extra_body = {"setting": "test"} + remove_from_body = ["unwanted"] + headers = {"Custom-Header": "value"} + + backend = OpenAIHTTPBackend( + target="https://localhost:8000/v1", + model="test-model", + api_key="test-key", + organization="test-org", + project="test-project", + timeout=120.0, + http2=False, + follow_redirects=False, + max_output_tokens=1000, + stream_response=False, + extra_query=extra_query, + extra_body=extra_body, + remove_from_body=remove_from_body, + headers=headers, + verify=True, + ) + + assert backend.target == "https://localhost:8000" + assert backend.model == "test-model" + assert backend.timeout == 120.0 + assert backend.http2 is False + assert backend.follow_redirects is False + assert backend.verify is True + assert backend.max_output_tokens == 1000 + assert backend.stream_response is False + assert backend.extra_query == extra_query + assert backend.extra_body == extra_body + assert backend.remove_from_body == remove_from_body + + @pytest.mark.sanity + def test_target_normalization(self): + """Test target URL normalization.""" + # Remove trailing slashes and /v1 + backend1 = OpenAIHTTPBackend(target="http://localhost:8000/") + assert backend1.target == "http://localhost:8000" + + backend2 = OpenAIHTTPBackend(target="http://localhost:8000/v1") + assert backend2.target == "http://localhost:8000" + + backend3 = OpenAIHTTPBackend(target="http://localhost:8000/v1/") + assert backend3.target == "http://localhost:8000" + + @pytest.mark.sanity + def test_header_building(self): + """Test header building logic.""" + # Test with API key + backend1 = OpenAIHTTPBackend(target="http://test", api_key="test-key") + assert "Authorization" in backend1.headers + assert backend1.headers["Authorization"] == "Bearer test-key" + + # Test with Bearer prefix already + backend2 = OpenAIHTTPBackend(target="http://test", api_key="Bearer test-key") + assert backend2.headers["Authorization"] == "Bearer test-key" + + # Test with organization and project + backend3 = OpenAIHTTPBackend( + target="http://test", organization="test-org", project="test-project" + ) + assert backend3.headers["OpenAI-Organization"] == "test-org" + assert backend3.headers["OpenAI-Project"] == "test-project" + + @pytest.mark.smoke + @pytest.mark.asyncio + @async_timeout(10.0) + @async_timeout(5.0) + async def test_info(self): + """Test info method.""" + backend = OpenAIHTTPBackend( + target="http://test", model="test-model", timeout=30.0 + ) + + info = backend.info() + + assert info["target"] == "http://test" + assert info["model"] == "test-model" + assert info["timeout"] == 30.0 + assert info["health_path"] == "/health" + assert info["models_path"] == "/v1/models" + assert info["text_completions_path"] == "/v1/completions" + assert info["chat_completions_path"] == "/v1/chat/completions" + + @pytest.mark.smoke + @pytest.mark.asyncio + @async_timeout(10.0) + @async_timeout(5.0) + async def test_process_startup(self): + """Test process startup.""" + backend = OpenAIHTTPBackend(target="http://test") + + assert not backend._in_process + assert backend._async_client is None + + await backend.process_startup() + + assert backend._in_process + assert backend._async_client is not None + assert isinstance(backend._async_client, httpx.AsyncClient) + + @pytest.mark.smoke + @pytest.mark.asyncio + @async_timeout(10.0) + @async_timeout(5.0) + async def test_process_startup_already_started(self): + """Test process startup when already started.""" + backend = OpenAIHTTPBackend(target="http://test") + await backend.process_startup() + + with pytest.raises(RuntimeError, match="Backend already started up"): + await backend.process_startup() + + @pytest.mark.smoke + @pytest.mark.asyncio + @async_timeout(10.0) + @async_timeout(5.0) + async def test_process_shutdown(self): + """Test process shutdown.""" + backend = OpenAIHTTPBackend(target="http://test") + await backend.process_startup() + + assert backend._in_process + assert backend._async_client is not None + + await backend.process_shutdown() + + assert not backend._in_process + assert backend._async_client is None + + @pytest.mark.smoke + @pytest.mark.asyncio + @async_timeout(10.0) + @async_timeout(5.0) + async def test_process_shutdown_not_started(self): + """Test process shutdown when not started.""" + backend = OpenAIHTTPBackend(target="http://test") + + with pytest.raises(RuntimeError, match="Backend not started up"): + await backend.process_shutdown() + + @pytest.mark.sanity + @pytest.mark.asyncio + @async_timeout(10.0) + @async_timeout(5.0) + async def test_check_in_process(self): + """Test _check_in_process method.""" + backend = OpenAIHTTPBackend(target="http://test") + + with pytest.raises(RuntimeError, match="Backend not started up"): + backend._check_in_process() + + await backend.process_startup() + backend._check_in_process() # Should not raise + + await backend.process_shutdown() + with pytest.raises(RuntimeError, match="Backend not started up"): + backend._check_in_process() + + @pytest.mark.sanity + @pytest.mark.asyncio + @async_timeout(10.0) + @async_timeout(5.0) + async def test_available_models(self): + """Test available_models method.""" + backend = OpenAIHTTPBackend(target="http://test") + await backend.process_startup() + + mock_response = Mock() + mock_response.json.return_value = { + "data": [{"id": "test-model1"}, {"id": "test-model2"}] + } + mock_response.raise_for_status = Mock() + + with patch.object(backend._async_client, "get", return_value=mock_response): + models = await backend.available_models() + + assert models == ["test-model1", "test-model2"] + backend._async_client.get.assert_called_once() + + @pytest.mark.sanity + @pytest.mark.asyncio + @async_timeout(10.0) + @async_timeout(5.0) + async def test_default_model(self): + """Test default_model method.""" + # Test when model is already set + backend1 = OpenAIHTTPBackend(target="http://test", model="test-model") + result1 = await backend1.default_model() + assert result1 == "test-model" + + # Test when not in process + backend2 = OpenAIHTTPBackend(target="http://test") + result2 = await backend2.default_model() + assert result2 is None + + # Test when in process but no model set + backend3 = OpenAIHTTPBackend(target="http://test") + await backend3.process_startup() + + with patch.object(backend3, "available_models", return_value=["test-model2"]): + result3 = await backend3.default_model() + assert result3 == "test-model2" + + @pytest.mark.regression + @pytest.mark.asyncio + @async_timeout(10.0) + @async_timeout(10.0) + async def test_validate_with_model(self): + """Test validate method when model is set.""" + backend = OpenAIHTTPBackend(target="http://test", model="test-model") + await backend.process_startup() + + mock_response = Mock() + mock_response.raise_for_status = Mock() + + with patch.object(backend._async_client, "get", return_value=mock_response): + await backend.validate() # Should not raise + + backend._async_client.get.assert_called_once_with( + "http://test/health", headers={"Content-Type": "application/json"} + ) + + @pytest.mark.regression + @pytest.mark.asyncio + @async_timeout(10.0) + async def test_validate_without_model(self): + """Test validate method when no model is set.""" + backend = OpenAIHTTPBackend(target="http://test") + await backend.process_startup() + + with patch.object(backend, "available_models", return_value=["test-model"]): + await backend.validate() + assert backend.model == "test-model" + + @pytest.mark.regression + @pytest.mark.asyncio + @async_timeout(10.0) + async def test_validate_fallback_to_text_completions(self): + """Test validate method fallback to text completions.""" + backend = OpenAIHTTPBackend(target="http://test") + await backend.process_startup() + + # Mock health and models endpoints to fail + def mock_get(*args, **kwargs): + raise httpx.HTTPStatusError("Error", request=Mock(), response=Mock()) + + # Mock text_completions to succeed + async def mock_text_completions(*args, **kwargs): + yield "test", UsageStats() + + with ( + patch.object(backend._async_client, "get", side_effect=mock_get), + patch.object( + backend, "text_completions", side_effect=mock_text_completions + ), + ): + await backend.validate() # Should not raise + + @pytest.mark.regression + @pytest.mark.asyncio + @async_timeout(10.0) + async def test_validate_failure(self): + """Test validate method when all validation methods fail.""" + backend = OpenAIHTTPBackend(target="http://test") + await backend.process_startup() + + def mock_fail(*args, **kwargs): + raise httpx.HTTPStatusError("Error", request=Mock(), response=Mock()) + + def mock_http_error(*args, **kwargs): + raise httpx.HTTPStatusError("Error", request=Mock(), response=Mock()) + + with ( + patch.object(backend._async_client, "get", side_effect=mock_http_error), + patch.object(backend, "text_completions", side_effect=mock_http_error), + pytest.raises(RuntimeError, match="Backend validation failed"), + ): + await backend.validate() + + @pytest.mark.sanity + def test_get_headers(self): + """Test _get_headers method.""" + backend = OpenAIHTTPBackend( + target="http://test", api_key="test-key", headers={"Custom": "value"} + ) + + headers = backend._get_headers() + + expected = { + "Content-Type": "application/json", + "Authorization": "Bearer test-key", + "Custom": "value", + } + assert headers == expected + + @pytest.mark.sanity + def test_get_params(self): + """Test _get_params method.""" + extra_query = { + "general": "value", + "text_completions": {"specific": "text"}, + "chat_completions": {"specific": "chat"}, + } + + backend = OpenAIHTTPBackend(target="http://test", extra_query=extra_query) + + # Test endpoint-specific params + text_params = backend._get_params("text_completions") + assert text_params == {"specific": "text"} + + # Test fallback to general params + other_params = backend._get_params("other") + assert other_params == extra_query + + @pytest.mark.regression + def test_get_chat_messages_string(self): + """Test _get_chat_messages with string content.""" + backend = OpenAIHTTPBackend(target="http://test") + + messages = backend._get_chat_messages("Hello world") + + expected = [{"role": "user", "content": "Hello world"}] + assert messages == expected + + @pytest.mark.regression + def test_get_chat_messages_list(self): + """Test _get_chat_messages with list content.""" + backend = OpenAIHTTPBackend(target="http://test") + + content = [ + "Hello", + {"type": "text", "text": "world"}, + {"role": "assistant", "content": "existing message"}, + ] + + messages = backend._get_chat_messages(content) + + expected = [ + { + "role": "user", + "content": [ + {"type": "text", "text": "Hello"}, + {"type": "text", "text": "world"}, + {"role": "assistant", "content": "existing message"}, + ], + } + ] + assert messages == expected + + @pytest.mark.regression + def test_get_chat_messages_invalid(self): + """Test _get_chat_messages with invalid content.""" + backend = OpenAIHTTPBackend(target="http://test") + + with pytest.raises(ValueError, match="Unsupported content type"): + backend._get_chat_messages(123) + + with pytest.raises(ValueError, match="Unsupported content item type"): + backend._get_chat_messages([123]) + + @pytest.mark.regression + def test_get_chat_message_media_item_image(self): + """Test _get_chat_message_media_item with PIL Image.""" + backend = OpenAIHTTPBackend(target="http://test") + + # Create a mock PIL Image + mock_image = Mock(spec=Image.Image) + mock_image.tobytes.return_value = b"fake_image_data" + + result = backend._get_chat_message_media_item(mock_image) + + expected_data = base64.b64encode(b"fake_image_data").decode("utf-8") + expected = { + "type": "image", + "image": {"url": f"data:image/jpeg;base64,{expected_data}"}, + } + assert result == expected + + @pytest.mark.regression + def test_get_chat_message_media_item_path(self): + """Test _get_chat_message_media_item with file paths.""" + backend = OpenAIHTTPBackend(target="http://test") + + # Test unsupported file type + unsupported_path = Path("test.txt") + with pytest.raises(ValueError, match="Unsupported file type: .txt"): + backend._get_chat_message_media_item(unsupported_path) + + @pytest.mark.regression + def test_get_body(self): + """Test _get_body method.""" + extra_body = {"general": "value", "text_completions": {"temperature": 0.5}} + + backend = OpenAIHTTPBackend( + target="http://test", + model="test-model", + max_output_tokens=1000, + extra_body=extra_body, + ) + + request_kwargs = {"temperature": 0.7} + + body = backend._get_body( + endpoint_type="text_completions", + request_kwargs=request_kwargs, + max_output_tokens=500, + prompt="test", + ) + + # Check that max_tokens settings are applied + assert body["temperature"] == 0.7 # request_kwargs override extra_body + assert body["model"] == "test-model" + assert body["max_tokens"] == 500 + assert body["max_completion_tokens"] == 500 + assert body["ignore_eos"] is True + assert body["prompt"] == "test" + # stop: None is filtered out by the None filter + assert "stop" not in body + + @pytest.mark.regression + def test_get_completions_text_content(self): + """Test _get_completions_text_content method.""" + backend = OpenAIHTTPBackend(target="http://test") + + # Test with text field + data1 = {"choices": [{"text": "generated text"}]} + result1 = backend._get_completions_text_content(data1) + assert result1 == "generated text" + + # Test with delta content field + data2 = {"choices": [{"delta": {"content": "delta text"}}]} + result2 = backend._get_completions_text_content(data2) + assert result2 == "delta text" + + # Test with no choices + data3: dict[str, list] = {"choices": []} + result3 = backend._get_completions_text_content(data3) + assert result3 is None + + # Test with no choices key + data4: dict[str, str] = {} + result4 = backend._get_completions_text_content(data4) + assert result4 is None + + @pytest.mark.regression + def test_get_completions_usage_stats(self): + """Test _get_completions_usage_stats method.""" + backend = OpenAIHTTPBackend(target="http://test") + + # Test with usage data + data1 = {"usage": {"prompt_tokens": 50, "completion_tokens": 100}} + result1 = backend._get_completions_usage_stats(data1) + assert isinstance(result1, UsageStats) + assert result1.prompt_tokens == 50 + assert result1.output_tokens == 100 + + # Test with no usage data + data2: dict[str, str] = {} + result2 = backend._get_completions_usage_stats(data2) + assert result2 is None + + @pytest.mark.regression + @pytest.mark.asyncio + @async_timeout(10.0) + async def test_resolve_not_implemented_history(self): + """Test resolve method raises error for conversation history.""" + backend = OpenAIHTTPBackend(target="http://test") + await backend.process_startup() + + request = GenerationRequest(content="test") + request_info = ScheduledRequestInfo( + request_id="test-id", + status="pending", + scheduler_node_id=1, + scheduler_process_id=1, + scheduler_start_time=123.0, + request_timings=GenerationRequestTimings(), + ) + history = [(request, GenerationResponse(request_id="test", request_args={}))] + + with pytest.raises(NotImplementedError, match="Multi-turn requests"): + async for _ in backend.resolve(request, request_info, history): + pass + + @pytest.mark.regression + @pytest.mark.asyncio + @async_timeout(10.0) + async def test_resolve_text_completions(self): + """Test resolve method for text completions.""" + backend = OpenAIHTTPBackend(target="http://test") + await backend.process_startup() + + request = GenerationRequest( + content="test prompt", + request_type="text_completions", + params={"temperature": 0.7}, + constraints={"output_tokens": 100}, + ) + request_info = ScheduledRequestInfo( + request_id="test-id", + status="pending", + scheduler_node_id=1, + scheduler_process_id=1, + scheduler_start_time=123.0, + request_timings=GenerationRequestTimings(), + ) + + # Mock text_completions method + async def mock_text_completions(*args, **kwargs): + yield None, None # Start signal + yield "Hello", None # First token + yield " world", UsageStats(prompt_tokens=10, output_tokens=2) # Final + + with patch.object( + backend, "text_completions", side_effect=mock_text_completions + ): + responses = [] + async for response, info in backend.resolve(request, request_info): + responses.append((response, info)) + + assert len(responses) >= 2 + final_response = responses[-1][0] + assert final_response.value == "Hello world" + assert final_response.request_id == request.request_id + assert final_response.iterations == 2 + + @pytest.mark.regression + @pytest.mark.asyncio + @async_timeout(10.0) + async def test_resolve_chat_completions(self): + """Test resolve method for chat completions.""" + backend = OpenAIHTTPBackend(target="http://test") + await backend.process_startup() + + request = GenerationRequest( + content="test message", + request_type="chat_completions", + params={"temperature": 0.5}, + ) + request_info = ScheduledRequestInfo( + request_id="test-id", + status="pending", + scheduler_node_id=1, + scheduler_process_id=1, + scheduler_start_time=123.0, + request_timings=GenerationRequestTimings(), + ) + + # Mock chat_completions method + async def mock_chat_completions(*args, **kwargs): + yield None, None # Start signal + yield "Response", UsageStats(prompt_tokens=5, output_tokens=1) + + with patch.object( + backend, "chat_completions", side_effect=mock_chat_completions + ): + responses = [] + async for response, info in backend.resolve(request, request_info): + responses.append((response, info)) + + final_response = responses[-1][0] + assert final_response.value == "Response" + assert final_response.request_id == request.request_id + + +class TestOpenAICompletions: + """Test cases for completion methods.""" + + @pytest.mark.smoke + @pytest.mark.asyncio + @async_timeout(10.0) + async def test_text_completions_not_in_process(self): + """Test text_completions when backend not started.""" + backend = OpenAIHTTPBackend(target="http://test") + + with pytest.raises(RuntimeError, match="Backend not started up"): + async for _ in backend.text_completions("test", "req-id"): + pass + + @pytest.mark.smoke + @pytest.mark.asyncio + @async_timeout(10.0) + async def test_text_completions_basic(self): + """Test basic text_completions functionality.""" + backend = OpenAIHTTPBackend(target="http://test", model="gpt-4") + await backend.process_startup() + + try: + mock_response = Mock() + mock_response.raise_for_status = Mock() + mock_response.json.return_value = { + "choices": [{"text": "Generated text"}], + "usage": {"prompt_tokens": 10, "completion_tokens": 5}, + } + + with patch.object( + backend._async_client, "post", return_value=mock_response + ): + results = [] + async for result in backend.text_completions( + prompt="test prompt", request_id="req-123", stream_response=False + ): + results.append(result) + + assert len(results) == 2 + assert results[0] == (None, None) # Initial yield + assert results[1][0] == "Generated text" + assert isinstance(results[1][1], UsageStats) + assert results[1][1].prompt_tokens == 10 + assert results[1][1].output_tokens == 5 + finally: + await backend.process_shutdown() + + @pytest.mark.smoke + @pytest.mark.asyncio + @async_timeout(10.0) + async def test_chat_completions_not_in_process(self): + """Test chat_completions when backend not started.""" + backend = OpenAIHTTPBackend(target="http://test") + + with pytest.raises(RuntimeError, match="Backend not started up"): + async for _ in backend.chat_completions("test"): + pass + + @pytest.mark.smoke + @pytest.mark.asyncio + @async_timeout(10.0) + async def test_chat_completions_basic(self): + """Test basic chat_completions functionality.""" + backend = OpenAIHTTPBackend(target="http://test", model="gpt-4") + await backend.process_startup() + + try: + mock_response = Mock() + mock_response.raise_for_status = Mock() + mock_response.json.return_value = { + "choices": [{"delta": {"content": "Chat response"}}], + "usage": {"prompt_tokens": 8, "completion_tokens": 3}, + } + + with patch.object( + backend._async_client, "post", return_value=mock_response + ): + results = [] + async for result in backend.chat_completions( + content="Hello", request_id="req-456", stream_response=False + ): + results.append(result) + + assert len(results) == 2 + assert results[0] == (None, None) + assert results[1][0] == "Chat response" + assert isinstance(results[1][1], UsageStats) + assert results[1][1].prompt_tokens == 8 + assert results[1][1].output_tokens == 3 + finally: + await backend.process_shutdown() + + @pytest.mark.sanity + @pytest.mark.asyncio + @async_timeout(10.0) + async def test_text_completions_with_parameters(self): + """Test text_completions with additional parameters.""" + backend = OpenAIHTTPBackend(target="http://test", model="gpt-4") + await backend.process_startup() + + try: + mock_response = Mock() + mock_response.raise_for_status = Mock() + mock_response.json.return_value = { + "choices": [{"text": "response"}], + "usage": {"prompt_tokens": 5, "completion_tokens": 1}, + } + + with patch.object( + backend._async_client, "post", return_value=mock_response + ) as mock_post: + async for _ in backend.text_completions( + prompt="test", + request_id="req-123", + output_token_count=50, + temperature=0.7, + stream_response=False, + ): + pass + + # Check that the request body contains expected parameters + call_args = mock_post.call_args + body = call_args[1]["json"] + assert body["max_tokens"] == 50 + assert body["temperature"] == 0.7 + assert body["model"] == "gpt-4" + finally: + await backend.process_shutdown() + + @pytest.mark.sanity + @pytest.mark.asyncio + @async_timeout(10.0) + async def test_chat_completions_content_formatting(self): + """Test chat_completions content formatting.""" + backend = OpenAIHTTPBackend(target="http://test", model="gpt-4") + await backend.process_startup() + + try: + mock_response = Mock() + mock_response.raise_for_status = Mock() + mock_response.json.return_value = { + "choices": [{"delta": {"content": "response"}}] + } + + with patch.object( + backend._async_client, "post", return_value=mock_response + ) as mock_post: + async for _ in backend.chat_completions( + content="Hello world", stream_response=False + ): + pass + + call_args = mock_post.call_args + body = call_args[1]["json"] + expected_messages = [{"role": "user", "content": "Hello world"}] + assert body["messages"] == expected_messages + finally: + await backend.process_shutdown() + + @pytest.mark.regression + @pytest.mark.asyncio + @async_timeout(10.0) + async def test_validate_no_models_available(self): + """Test validate method when no models are available.""" + backend = OpenAIHTTPBackend(target="http://test") + await backend.process_startup() + + try: + # Mock endpoints to fail, then available_models to return empty list + def mock_get_fail(*args, **kwargs): + raise httpx.HTTPStatusError("Error", request=Mock(), response=Mock()) + + with ( + patch.object(backend._async_client, "get", side_effect=mock_get_fail), + patch.object(backend, "available_models", return_value=[]), + patch.object(backend, "text_completions", side_effect=mock_get_fail), + pytest.raises( + RuntimeError, + match="No model available and could not set a default model", + ), + ): + await backend.validate() + finally: + await backend.process_shutdown() + + @pytest.mark.sanity + @pytest.mark.asyncio + @async_timeout(10.0) + async def test_text_completions_streaming(self): + """Test text_completions with streaming enabled.""" + backend = OpenAIHTTPBackend(target="http://test", model="gpt-4") + await backend.process_startup() + + try: + # Mock streaming response + mock_stream = Mock() + mock_stream.raise_for_status = Mock() + + async def mock_aiter_lines(): + lines = [ + 'data: {"choices":[{"text":"Hello"}], "usage":{"prompt_tokens":5,"completion_tokens":1}}', # noqa: E501 + 'data: {"choices":[{"text":" world"}], "usage":{"prompt_tokens":5,"completion_tokens":2}}', # noqa: E501 + 'data: {"choices":[{"text":"!"}], "usage":{"prompt_tokens":5,"completion_tokens":3}}', # noqa: E501 + "data: [DONE]", + ] + for line in lines: + yield line + + mock_stream.aiter_lines = mock_aiter_lines + + mock_client_stream = AsyncMock() + mock_client_stream.__aenter__ = AsyncMock(return_value=mock_stream) + mock_client_stream.__aexit__ = AsyncMock(return_value=None) + + with patch.object( + backend._async_client, "stream", return_value=mock_client_stream + ): + results = [] + async for result in backend.text_completions( + prompt="test prompt", request_id="req-123", stream_response=True + ): + results.append(result) + + # Should get initial None, then tokens, then final with usage + assert len(results) >= 3 + assert results[0] == (None, None) # Initial yield + assert all( + isinstance(result[0], str) for result in results[1:] + ) # Has text content + assert all( + isinstance(result[1], UsageStats) for result in results[1:] + ) # Has usage stats + assert all( + result[1].output_tokens == i for i, result in enumerate(results[1:], 1) + ) + finally: + await backend.process_shutdown() + + @pytest.mark.sanity + @pytest.mark.asyncio + @async_timeout(10.0) + async def test_chat_completions_streaming(self): + """Test chat_completions with streaming enabled.""" + backend = OpenAIHTTPBackend(target="http://test", model="gpt-4") + await backend.process_startup() + + try: + # Mock streaming response + mock_stream = Mock() + mock_stream.raise_for_status = Mock() + + async def mock_aiter_lines(): + lines = [ + 'data: {"choices":[{"delta":{"content":"Hi"}}]}', + 'data: {"choices":[{"delta":{"content":" there"}}]}', + 'data: {"choices":[{"delta":{"content":"!"}}]}', + 'data: {"usage":{"prompt_tokens":3,"completion_tokens":3}}', + "data: [DONE]", + ] + for line in lines: + yield line + + mock_stream.aiter_lines = mock_aiter_lines + + mock_client_stream = AsyncMock() + mock_client_stream.__aenter__ = AsyncMock(return_value=mock_stream) + mock_client_stream.__aexit__ = AsyncMock(return_value=None) + + with patch.object( + backend._async_client, "stream", return_value=mock_client_stream + ): + results = [] + async for result in backend.chat_completions( + content="Hello", request_id="req-456", stream_response=True + ): + results.append(result) + + # Should get initial None, then deltas, then final with usage + assert len(results) >= 3 + assert results[0] == (None, None) # Initial yield + assert any(result[0] for result in results if result[0]) # Has content + assert any(result[1] for result in results if result[1]) # Has usage stats + finally: + await backend.process_shutdown() + + @pytest.mark.regression + @pytest.mark.asyncio + @async_timeout(10.0) + async def test_streaming_response_edge_cases(self): + """Test streaming response edge cases for line processing.""" + backend = OpenAIHTTPBackend(target="http://test", model="gpt-4") + await backend.process_startup() + + try: + # Mock streaming response with edge cases + mock_stream = Mock() + mock_stream.raise_for_status = Mock() + + async def mock_aiter_lines(): + lines = [ + "", # Empty line + " ", # Whitespace only + "not data line", # Line without data prefix + 'data: {"choices":[{"text":"Hello"}]}', # Valid data + "data: [DONE]", # End marker + ] + for line in lines: + yield line + + mock_stream.aiter_lines = mock_aiter_lines + + mock_client_stream = AsyncMock() + mock_client_stream.__aenter__ = AsyncMock(return_value=mock_stream) + mock_client_stream.__aexit__ = AsyncMock(return_value=None) + + with patch.object( + backend._async_client, "stream", return_value=mock_client_stream + ): + results = [] + async for result in backend.text_completions( + prompt="test", request_id="req-123", stream_response=True + ): + results.append(result) + + # Should get initial None and the valid response + assert len(results) == 2 + assert results[0] == (None, None) + assert results[1][0] == "Hello" + finally: + await backend.process_shutdown() + + @pytest.mark.sanity + def test_get_chat_message_media_item_jpeg_file(self): + """Test _get_chat_message_media_item with JPEG file path.""" + backend = OpenAIHTTPBackend(target="http://test") + + # Create a mock Path object for JPEG file + mock_jpeg_path = Mock(spec=Path) + mock_jpeg_path.suffix.lower.return_value = ".jpg" + + # Mock Image.open to return a mock image + mock_image = Mock(spec=Image.Image) + mock_image.tobytes.return_value = b"fake_jpeg_data" + + with patch("guidellm.backend.openai.Image.open", return_value=mock_image): + result = backend._get_chat_message_media_item(mock_jpeg_path) + + expected_data = base64.b64encode(b"fake_jpeg_data").decode("utf-8") + expected = { + "type": "image", + "image": {"url": f"data:image/jpeg;base64,{expected_data}"}, + } + assert result == expected + + @pytest.mark.sanity + def test_get_chat_message_media_item_wav_file(self): + """Test _get_chat_message_media_item with WAV file path.""" + backend = OpenAIHTTPBackend(target="http://test") + + # Create a mock Path object for WAV file + mock_wav_path = Mock(spec=Path) + mock_wav_path.suffix.lower.return_value = ".wav" + mock_wav_path.read_bytes.return_value = b"fake_wav_data" + + result = backend._get_chat_message_media_item(mock_wav_path) + + expected_data = base64.b64encode(b"fake_wav_data").decode("utf-8") + expected = { + "type": "input_audio", + "input_audio": {"data": expected_data, "format": "wav"}, + } + assert result == expected + + @pytest.mark.sanity + def test_get_chat_messages_with_pil_image(self): + """Test _get_chat_messages with PIL Image in content list.""" + backend = OpenAIHTTPBackend(target="http://test") + + # Create a mock PIL Image + mock_image = Mock(spec=Image.Image) + mock_image.tobytes.return_value = b"fake_image_bytes" + + content = ["Hello", mock_image, "world"] + + result = backend._get_chat_messages(content) + + # Should have one user message with mixed content + assert len(result) == 1 + assert result[0]["role"] == "user" + assert len(result[0]["content"]) == 3 + + # Check text items + assert result[0]["content"][0] == {"type": "text", "text": "Hello"} + assert result[0]["content"][2] == {"type": "text", "text": "world"} + + # Check image item + image_item = result[0]["content"][1] + assert image_item["type"] == "image" + assert "data:image/jpeg;base64," in image_item["image"]["url"] + + @pytest.mark.regression + @pytest.mark.asyncio + @async_timeout(10.0) + async def test_resolve_timing_edge_cases(self): + """Test resolve method timing edge cases.""" + backend = OpenAIHTTPBackend(target="http://test") + await backend.process_startup() + + try: + request = GenerationRequest( + content="test prompt", + request_type="text_completions", + constraints={"output_tokens": 50}, + ) + request_info = ScheduledRequestInfo( + request_id="test-id", + status="pending", + scheduler_node_id=1, + scheduler_process_id=1, + scheduler_start_time=123.0, + request_timings=GenerationRequestTimings(), + ) + + # Mock text_completions to test timing edge cases + async def mock_text_completions(*args, **kwargs): + yield None, None # Initial yield - tests line 343 + yield "token1", None # First token + yield "token2", UsageStats(prompt_tokens=10, output_tokens=2) # Final + + with patch.object( + backend, "text_completions", side_effect=mock_text_completions + ): + responses = [] + async for response, info in backend.resolve(request, request_info): + responses.append((response, info)) + + # Check that timing was properly set + final_response, final_info = responses[-1] + assert final_info.request_timings.request_start is not None + assert final_info.request_timings.first_iteration is not None + assert final_info.request_timings.last_iteration is not None + assert final_info.request_timings.request_end is not None + assert final_response.delta is None # Tests line 362 + + finally: + await backend.process_shutdown() diff --git a/tests/unit/backend/test_openai_backend_custom_configs.py b/tests/unit/backend/test_openai_backend_custom_configs.py deleted file mode 100644 index 5855152d..00000000 --- a/tests/unit/backend/test_openai_backend_custom_configs.py +++ /dev/null @@ -1,88 +0,0 @@ -import pytest - -from guidellm.backend import OpenAIHTTPBackend -from guidellm.settings import settings - - -@pytest.mark.smoke -def test_openai_http_backend_default_initialization(): - backend = OpenAIHTTPBackend() - assert backend.verify is True - - -@pytest.mark.smoke -def test_openai_http_backend_custom_ssl_verification(): - backend = OpenAIHTTPBackend(verify=False) - assert backend.verify is False - - -@pytest.mark.smoke -def test_openai_http_backend_custom_headers_override(): - # Set a default api_key, which would normally create an Authorization header - settings.openai.api_key = "default-api-key" - - # Set custom headers that override the default Authorization and add a new header - openshift_token = "Bearer sha256~xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx" - override_headers = { - "Authorization": openshift_token, - "Custom-Header": "Custom-Value", - } - - # Initialize the backend - backend = OpenAIHTTPBackend(headers=override_headers) - - # Check that the override headers are used - assert backend.headers["Authorization"] == openshift_token - assert backend.headers["Custom-Header"] == "Custom-Value" - assert len(backend.headers) == 2 - - # Reset the settings - settings.openai.api_key = None - settings.openai.headers = None - - -@pytest.mark.smoke -def test_openai_http_backend_kwarg_headers_override_settings(): - # Set headers via settings (simulating environment variables) - settings.openai.headers = {"Authorization": "Bearer settings-token"} - - # Set different headers via kwargs (simulating --backend-args) - override_headers = { - "Authorization": "Bearer kwargs-token", - "Custom-Header": "Custom-Value", - } - - # Initialize the backend with kwargs - backend = OpenAIHTTPBackend(headers=override_headers) - - # Check that the kwargs headers took precedence - assert backend.headers["Authorization"] == "Bearer kwargs-token" - assert backend.headers["Custom-Header"] == "Custom-Value" - assert len(backend.headers) == 2 - - # Reset the settings - settings.openai.headers = None - - -@pytest.mark.smoke -def test_openai_http_backend_remove_header_with_none(): - # Set a default api_key, which would normally create an Authorization header - settings.openai.api_key = "default-api-key" - - # Set a custom header and explicitly set Authorization to None to remove it - override_headers = { - "Authorization": None, - "Custom-Header": "Custom-Value", - } - - # Initialize the backend - backend = OpenAIHTTPBackend(headers=override_headers) - - # Check that the Authorization header is removed and the custom header is present - assert "Authorization" not in backend.headers - assert backend.headers["Custom-Header"] == "Custom-Value" - assert len(backend.headers) == 1 - - # Reset the settings - settings.openai.api_key = None - settings.openai.headers = None diff --git a/tests/unit/backend/test_response.py b/tests/unit/backend/test_response.py deleted file mode 100644 index b3dc99c9..00000000 --- a/tests/unit/backend/test_response.py +++ /dev/null @@ -1,192 +0,0 @@ -from typing import get_args - -import pytest - -from guidellm.backend import ( - RequestArgs, - ResponseSummary, - StreamingResponseType, - StreamingTextResponse, -) - - -@pytest.mark.smoke -def test_streaming_response_types(): - valid_types = get_args(StreamingResponseType) - assert valid_types == ("start", "iter") - - -@pytest.mark.smoke -def test_streaming_text_response_default_initilization(): - response = StreamingTextResponse( - type_="start", - value="", - start_time=0.0, - first_iter_time=None, - iter_count=0, - delta="", - time=0.0, - ) - assert response.request_id is None - - -@pytest.mark.smoke -def test_streaming_text_response_initialization(): - response = StreamingTextResponse( - type_="start", - value="Hello, world!", - start_time=0.0, - first_iter_time=0.0, - iter_count=1, - delta="Hello, world!", - time=1.0, - request_id="123", - ) - assert response.type_ == "start" - assert response.value == "Hello, world!" - assert response.start_time == 0.0 - assert response.first_iter_time == 0.0 - assert response.iter_count == 1 - assert response.delta == "Hello, world!" - assert response.time == 1.0 - assert response.request_id == "123" - - -@pytest.mark.smoke -def test_streaming_text_response_marshalling(): - response = StreamingTextResponse( - type_="start", - value="Hello, world!", - start_time=0.0, - first_iter_time=0.0, - iter_count=0, - delta="Hello, world!", - time=1.0, - request_id="123", - ) - serialized = response.model_dump() - deserialized = StreamingTextResponse.model_validate(serialized) - - for key, value in vars(response).items(): - assert getattr(deserialized, key) == value - - -@pytest.mark.smoke -def test_request_args_default_initialization(): - args = RequestArgs( - target="http://example.com", - headers={}, - params={}, - payload={}, - ) - assert args.timeout is None - assert args.http2 is None - assert args.follow_redirects is None - - -@pytest.mark.smoke -def test_request_args_initialization(): - args = RequestArgs( - target="http://example.com", - headers={ - "Authorization": "Bearer token", - }, - params={}, - payload={ - "query": "Hello, world!", - }, - timeout=10.0, - http2=True, - follow_redirects=True, - ) - assert args.target == "http://example.com" - assert args.headers == {"Authorization": "Bearer token"} - assert args.payload == {"query": "Hello, world!"} - assert args.timeout == 10.0 - assert args.http2 is True - assert args.follow_redirects is True - - -@pytest.mark.smoke -def test_response_args_marshalling(): - args = RequestArgs( - target="http://example.com", - headers={"Authorization": "Bearer token"}, - params={}, - payload={"query": "Hello, world!"}, - timeout=10.0, - http2=True, - ) - serialized = args.model_dump() - deserialized = RequestArgs.model_validate(serialized) - - for key, value in vars(args).items(): - assert getattr(deserialized, key) == value - - -@pytest.mark.smoke -def test_response_summary_default_initialization(): - summary = ResponseSummary( - value="Hello, world!", - request_args=RequestArgs( - target="http://example.com", - headers={}, - params={}, - payload={}, - ), - start_time=0.0, - end_time=0.0, - first_iter_time=None, - last_iter_time=None, - ) - assert summary.value == "Hello, world!" - assert summary.request_args.target == "http://example.com" - assert summary.request_args.headers == {} - assert summary.request_args.payload == {} - assert summary.start_time == 0.0 - assert summary.end_time == 0.0 - assert summary.first_iter_time is None - assert summary.last_iter_time is None - assert summary.iterations == 0 - assert summary.request_prompt_tokens is None - assert summary.request_output_tokens is None - assert summary.response_prompt_tokens is None - assert summary.response_output_tokens is None - assert summary.request_id is None - - -@pytest.mark.smoke -def test_response_summary_initialization(): - summary = ResponseSummary( - value="Hello, world!", - request_args=RequestArgs( - target="http://example.com", - headers={}, - params={}, - payload={}, - ), - start_time=1.0, - end_time=2.0, - iterations=3, - first_iter_time=1.0, - last_iter_time=2.0, - request_prompt_tokens=5, - request_output_tokens=10, - response_prompt_tokens=5, - response_output_tokens=10, - request_id="123", - ) - assert summary.value == "Hello, world!" - assert summary.request_args.target == "http://example.com" - assert summary.request_args.headers == {} - assert summary.request_args.payload == {} - assert summary.start_time == 1.0 - assert summary.end_time == 2.0 - assert summary.iterations == 3 - assert summary.first_iter_time == 1.0 - assert summary.last_iter_time == 2.0 - assert summary.request_prompt_tokens == 5 - assert summary.request_output_tokens == 10 - assert summary.response_prompt_tokens == 5 - assert summary.response_output_tokens == 10 - assert summary.request_id == "123" From a88605eaf7e86b8c252c144d44d5a85297d2a97f Mon Sep 17 00:00:00 2001 From: Mark Kurtz Date: Fri, 19 Sep 2025 12:14:16 +0000 Subject: [PATCH 11/90] fixes from copilot review and standardize backend package to backends for plural Signed-off-by: Mark Kurtz --- src/guidellm/__main__.py | 2 +- .../{backend => backends}/__init__.py | 0 src/guidellm/{backend => backends}/backend.py | 2 +- src/guidellm/{backend => backends}/objects.py | 0 src/guidellm/{backend => backends}/openai.py | 22 +++++++++++++------ src/guidellm/benchmark/aggregator.py | 2 +- src/guidellm/benchmark/benchmarker.py | 2 +- src/guidellm/benchmark/entrypoints.py | 2 +- src/guidellm/benchmark/scenario.py | 2 +- tests/unit/backend/test_backend.py | 10 ++++----- tests/unit/backend/test_objects.py | 2 +- tests/unit/backend/test_openai_backend.py | 6 ++--- tests/unit/conftest.py | 2 +- tests/unit/mock_backend.py | 2 +- tests/unit/utils/test_encoding.py | 2 +- tests/unit/utils/test_messaging.py | 2 +- 16 files changed, 34 insertions(+), 26 deletions(-) rename src/guidellm/{backend => backends}/__init__.py (100%) rename src/guidellm/{backend => backends}/backend.py (98%) rename src/guidellm/{backend => backends}/objects.py (100%) rename src/guidellm/{backend => backends}/openai.py (97%) diff --git a/src/guidellm/__main__.py b/src/guidellm/__main__.py index f82c19cf..f222a36f 100644 --- a/src/guidellm/__main__.py +++ b/src/guidellm/__main__.py @@ -6,7 +6,7 @@ import click from pydantic import ValidationError -from guidellm.backend import BackendType +from guidellm.backends import BackendType from guidellm.benchmark import ( ProfileType, reimport_benchmarks_report, diff --git a/src/guidellm/backend/__init__.py b/src/guidellm/backends/__init__.py similarity index 100% rename from src/guidellm/backend/__init__.py rename to src/guidellm/backends/__init__.py diff --git a/src/guidellm/backend/backend.py b/src/guidellm/backends/backend.py similarity index 98% rename from src/guidellm/backend/backend.py rename to src/guidellm/backends/backend.py index c9a73535..8f91d5e7 100644 --- a/src/guidellm/backend/backend.py +++ b/src/guidellm/backends/backend.py @@ -16,7 +16,7 @@ from abc import abstractmethod from typing import Literal -from guidellm.backend.objects import ( +from guidellm.backends.objects import ( GenerationRequest, GenerationResponse, ) diff --git a/src/guidellm/backend/objects.py b/src/guidellm/backends/objects.py similarity index 100% rename from src/guidellm/backend/objects.py rename to src/guidellm/backends/objects.py diff --git a/src/guidellm/backend/openai.py b/src/guidellm/backends/openai.py similarity index 97% rename from src/guidellm/backend/openai.py rename to src/guidellm/backends/openai.py index d616be6a..ce83076f 100644 --- a/src/guidellm/backend/openai.py +++ b/src/guidellm/backends/openai.py @@ -23,8 +23,8 @@ from PIL import Image from pydantic import dataclasses -from guidellm.backend.backend import Backend -from guidellm.backend.objects import ( +from guidellm.backends.backend import Backend +from guidellm.backends.objects import ( GenerationRequest, GenerationRequestTimings, GenerationResponse, @@ -351,8 +351,8 @@ async def resolve( if usage_stats is not None: request_info.request_timings.request_end = time.time() - response.request_output_tokens = usage_stats.output_tokens - response.request_prompt_tokens = usage_stats.prompt_tokens + response.response_output_tokens = usage_stats.output_tokens + response.response_prompt_tokens = usage_stats.prompt_tokens yield response, request_info @@ -602,7 +602,7 @@ def _get_body( **kwargs, ) -> dict[str, Any]: # Start with endpoint-specific extra body parameters - extra_body = self.extra_body.get(endpoint_type, self.extra_body) + extra_body: dict = self.extra_body.get(endpoint_type, self.extra_body) body = copy.deepcopy(extra_body) body.update(request_kwargs or {}) @@ -622,14 +622,22 @@ def _get_body( if max_output_tokens: body.update({"stop": None, "ignore_eos": True}) + if self.remove_from_body: + for key in self.remove_from_body: + body.pop(key, None) + return {key: val for key, val in body.items() if val is not None} def _get_completions_text_content(self, data: dict) -> Optional[str]: if not data.get("choices"): return None - choice = data["choices"][0] - return choice.get("text") or choice.get("delta", {}).get("content") + choice: dict = data["choices"][0] + return ( + choice.get("text") + or choice.get("delta", {}).get("content") + or choice.get("message", {}).get("content") + ) def _get_completions_usage_stats(self, data: dict) -> Optional[UsageStats]: if not data.get("usage"): diff --git a/src/guidellm/benchmark/aggregator.py b/src/guidellm/benchmark/aggregator.py index 9e6ffd68..42f54100 100644 --- a/src/guidellm/benchmark/aggregator.py +++ b/src/guidellm/benchmark/aggregator.py @@ -12,7 +12,7 @@ from pydantic import Field -from guidellm.backend import ResponseSummary +from guidellm.backends import ResponseSummary from guidellm.benchmark.benchmark import ( BenchmarkArgs, BenchmarkRunStats, diff --git a/src/guidellm/benchmark/benchmarker.py b/src/guidellm/benchmark/benchmarker.py index 0e34e322..e1ff7efc 100644 --- a/src/guidellm/benchmark/benchmarker.py +++ b/src/guidellm/benchmark/benchmarker.py @@ -14,7 +14,7 @@ from pydantic import Field from transformers import PreTrainedTokenizerBase # type: ignore # noqa: PGH003 -from guidellm.backend import Backend, ResponseSummary +from guidellm.backends import Backend, ResponseSummary from guidellm.benchmark.aggregator import ( AggregatorT, BenchmarkT, diff --git a/src/guidellm/benchmark/entrypoints.py b/src/guidellm/benchmark/entrypoints.py index 2ef85c3e..5feb23e7 100644 --- a/src/guidellm/benchmark/entrypoints.py +++ b/src/guidellm/benchmark/entrypoints.py @@ -7,7 +7,7 @@ PreTrainedTokenizerBase, ) -from guidellm.backend import Backend, BackendType +from guidellm.backends import Backend, BackendType from guidellm.benchmark.benchmarker import GenerativeBenchmarker from guidellm.benchmark.output import ( GenerativeBenchmarksConsole, diff --git a/src/guidellm/benchmark/scenario.py b/src/guidellm/benchmark/scenario.py index 042b25b1..6d6348d9 100644 --- a/src/guidellm/benchmark/scenario.py +++ b/src/guidellm/benchmark/scenario.py @@ -9,7 +9,7 @@ PreTrainedTokenizerBase, ) -from guidellm.backend.backend import BackendType +from guidellm.backends.backend import BackendType from guidellm.benchmark.profile import ProfileType from guidellm.objects.pydantic import StandardBaseModel from guidellm.scheduler.strategies import StrategyType diff --git a/tests/unit/backend/test_backend.py b/tests/unit/backend/test_backend.py index 1cdb672b..49b65077 100644 --- a/tests/unit/backend/test_backend.py +++ b/tests/unit/backend/test_backend.py @@ -12,8 +12,8 @@ import pytest -from guidellm.backend.backend import Backend, BackendType -from guidellm.backend.objects import ( +from guidellm.backends.backend import Backend, BackendType +from guidellm.backends.objects import ( GenerationRequest, GenerationRequestTimings, ) @@ -244,7 +244,7 @@ class TestBackendRegistry: @pytest.mark.smoke def test_openai_backend_registered(self): """Test that OpenAI HTTP backend is registered.""" - from guidellm.backend.openai import OpenAIHTTPBackend + from guidellm.backends.openai import OpenAIHTTPBackend # OpenAI backend should be registered backend = Backend.create("openai_http", target="http://test") @@ -262,7 +262,7 @@ def test_backend_create_invalid_type(self): @pytest.mark.smoke def test_backend_registry_functionality(self): """Test that backend registry functions work.""" - from guidellm.backend.openai import OpenAIHTTPBackend + from guidellm.backends.openai import OpenAIHTTPBackend # Test that we can get registered backends openai_class = Backend.get_registered_object("openai_http") @@ -327,6 +327,6 @@ def test_backend_registered_objects(self): assert len(registered) > 0 # Check that openai backend is in the registered objects - from guidellm.backend.openai import OpenAIHTTPBackend + from guidellm.backends.openai import OpenAIHTTPBackend assert OpenAIHTTPBackend in registered diff --git a/tests/unit/backend/test_objects.py b/tests/unit/backend/test_objects.py index 2f91a76b..34a6350c 100644 --- a/tests/unit/backend/test_objects.py +++ b/tests/unit/backend/test_objects.py @@ -9,7 +9,7 @@ import pytest from pydantic import ValidationError -from guidellm.backend.objects import ( +from guidellm.backends.objects import ( GenerationRequest, GenerationRequestTimings, GenerationResponse, diff --git a/tests/unit/backend/test_openai_backend.py b/tests/unit/backend/test_openai_backend.py index 8b15bfb1..7c7f528d 100644 --- a/tests/unit/backend/test_openai_backend.py +++ b/tests/unit/backend/test_openai_backend.py @@ -14,13 +14,13 @@ import pytest from PIL import Image -from guidellm.backend.backend import Backend -from guidellm.backend.objects import ( +from guidellm.backends.backend import Backend +from guidellm.backends.objects import ( GenerationRequest, GenerationRequestTimings, GenerationResponse, ) -from guidellm.backend.openai import OpenAIHTTPBackend, UsageStats +from guidellm.backends.openai import OpenAIHTTPBackend, UsageStats from guidellm.scheduler import ScheduledRequestInfo diff --git a/tests/unit/conftest.py b/tests/unit/conftest.py index a0457b6f..92bb89e1 100644 --- a/tests/unit/conftest.py +++ b/tests/unit/conftest.py @@ -7,7 +7,7 @@ import pytest import respx -from guidellm.backend import ResponseSummary, StreamingTextResponse +from guidellm.backends import ResponseSummary, StreamingTextResponse from .mock_backend import MockBackend diff --git a/tests/unit/mock_backend.py b/tests/unit/mock_backend.py index 27bfe382..6080a9d1 100644 --- a/tests/unit/mock_backend.py +++ b/tests/unit/mock_backend.py @@ -8,7 +8,7 @@ from lorem.text import TextLorem # type: ignore from PIL import Image -from guidellm.backend import ( +from guidellm.backends import ( Backend, RequestArgs, ResponseSummary, diff --git a/tests/unit/utils/test_encoding.py b/tests/unit/utils/test_encoding.py index da1f63ee..cc4600cf 100644 --- a/tests/unit/utils/test_encoding.py +++ b/tests/unit/utils/test_encoding.py @@ -6,7 +6,7 @@ import pytest from pydantic import BaseModel, Field -from guidellm.backend.objects import ( +from guidellm.backends.objects import ( GenerationRequest, GenerationResponse, ) diff --git a/tests/unit/utils/test_messaging.py b/tests/unit/utils/test_messaging.py index d6627e88..d6b3283d 100644 --- a/tests/unit/utils/test_messaging.py +++ b/tests/unit/utils/test_messaging.py @@ -10,7 +10,7 @@ import pytest from pydantic import BaseModel -from guidellm.backend import ( +from guidellm.backends import ( GenerationRequest, GenerationResponse, ) From 452eb6536c04d9d908393ad20fe1c036f1173ca0 Mon Sep 17 00:00:00 2001 From: Mark Kurtz Date: Fri, 19 Sep 2025 12:16:50 +0000 Subject: [PATCH 12/90] remove renaming changes from benchmark package til after that PR is up to avoid conflicts Signed-off-by: Mark Kurtz --- src/guidellm/__main__.py | 2 +- src/guidellm/benchmark/aggregator.py | 2 +- src/guidellm/benchmark/benchmarker.py | 2 +- src/guidellm/benchmark/entrypoints.py | 2 +- src/guidellm/benchmark/scenario.py | 2 +- src/guidellm/scheduler/scheduler.py | 2 +- 6 files changed, 6 insertions(+), 6 deletions(-) diff --git a/src/guidellm/__main__.py b/src/guidellm/__main__.py index f222a36f..f82c19cf 100644 --- a/src/guidellm/__main__.py +++ b/src/guidellm/__main__.py @@ -6,7 +6,7 @@ import click from pydantic import ValidationError -from guidellm.backends import BackendType +from guidellm.backend import BackendType from guidellm.benchmark import ( ProfileType, reimport_benchmarks_report, diff --git a/src/guidellm/benchmark/aggregator.py b/src/guidellm/benchmark/aggregator.py index 42f54100..9e6ffd68 100644 --- a/src/guidellm/benchmark/aggregator.py +++ b/src/guidellm/benchmark/aggregator.py @@ -12,7 +12,7 @@ from pydantic import Field -from guidellm.backends import ResponseSummary +from guidellm.backend import ResponseSummary from guidellm.benchmark.benchmark import ( BenchmarkArgs, BenchmarkRunStats, diff --git a/src/guidellm/benchmark/benchmarker.py b/src/guidellm/benchmark/benchmarker.py index e1ff7efc..0e34e322 100644 --- a/src/guidellm/benchmark/benchmarker.py +++ b/src/guidellm/benchmark/benchmarker.py @@ -14,7 +14,7 @@ from pydantic import Field from transformers import PreTrainedTokenizerBase # type: ignore # noqa: PGH003 -from guidellm.backends import Backend, ResponseSummary +from guidellm.backend import Backend, ResponseSummary from guidellm.benchmark.aggregator import ( AggregatorT, BenchmarkT, diff --git a/src/guidellm/benchmark/entrypoints.py b/src/guidellm/benchmark/entrypoints.py index 5feb23e7..2ef85c3e 100644 --- a/src/guidellm/benchmark/entrypoints.py +++ b/src/guidellm/benchmark/entrypoints.py @@ -7,7 +7,7 @@ PreTrainedTokenizerBase, ) -from guidellm.backends import Backend, BackendType +from guidellm.backend import Backend, BackendType from guidellm.benchmark.benchmarker import GenerativeBenchmarker from guidellm.benchmark.output import ( GenerativeBenchmarksConsole, diff --git a/src/guidellm/benchmark/scenario.py b/src/guidellm/benchmark/scenario.py index 6d6348d9..042b25b1 100644 --- a/src/guidellm/benchmark/scenario.py +++ b/src/guidellm/benchmark/scenario.py @@ -9,7 +9,7 @@ PreTrainedTokenizerBase, ) -from guidellm.backends.backend import BackendType +from guidellm.backend.backend import BackendType from guidellm.benchmark.profile import ProfileType from guidellm.objects.pydantic import StandardBaseModel from guidellm.scheduler.strategies import StrategyType diff --git a/src/guidellm/scheduler/scheduler.py b/src/guidellm/scheduler/scheduler.py index de0660e2..e7d8b2c6 100644 --- a/src/guidellm/scheduler/scheduler.py +++ b/src/guidellm/scheduler/scheduler.py @@ -50,7 +50,7 @@ class Scheduler( Example: :: from guidellm.scheduler import Scheduler - from guidellm.backend import OpenAIBackend + from guidellm.backends import OpenAIBackend from guidellm.scheduler import NonDistributedEnvironment, SynchronousStrategy scheduler = Scheduler() From 7829fb8e09b491ac87c491cd42a85cd1b8947130 Mon Sep 17 00:00:00 2001 From: Mark Kurtz Date: Fri, 19 Sep 2025 11:41:17 +0000 Subject: [PATCH 13/90] Add in benchmark package refactor Signed-off-by: Mark Kurtz --- src/guidellm/benchmark/__init__.py | 73 +- src/guidellm/benchmark/aggregator.py | 1828 ++++++++++++++++--------- src/guidellm/benchmark/benchmark.py | 837 ----------- src/guidellm/benchmark/benchmarker.py | 534 ++++---- src/guidellm/benchmark/entrypoints.py | 386 ++++-- src/guidellm/benchmark/objects.py | 473 +++++++ src/guidellm/benchmark/output.py | 1227 +++++++---------- src/guidellm/benchmark/profile.py | 863 ++++++++---- src/guidellm/benchmark/progress.py | 1344 ++++++++++-------- src/guidellm/benchmark/scenario.py | 60 +- 10 files changed, 4077 insertions(+), 3548 deletions(-) delete mode 100644 src/guidellm/benchmark/benchmark.py create mode 100644 src/guidellm/benchmark/objects.py diff --git a/src/guidellm/benchmark/__init__.py b/src/guidellm/benchmark/__init__.py index a4676c7e..76324a65 100644 --- a/src/guidellm/benchmark/__init__.py +++ b/src/guidellm/benchmark/__init__.py @@ -1,19 +1,31 @@ -from .aggregator import AggregatorT, BenchmarkAggregator, GenerativeBenchmarkAggregator -from .benchmark import ( +from .aggregator import ( + Aggregator, + AggregatorState, + CompilableAggregator, + GenerativeRequestsAggregator, + GenerativeStatsProgressAggregator, + InjectExtrasAggregator, + SchedulerStatsAggregator, + SerializableAggregator, +) +from .benchmarker import Benchmarker +from .entrypoints import benchmark_generative_text, reimport_benchmarks_report +from .objects import ( Benchmark, - BenchmarkArgs, BenchmarkMetrics, - BenchmarkRunStats, + BenchmarkSchedulerStats, BenchmarkT, GenerativeBenchmark, + GenerativeBenchmarksReport, GenerativeMetrics, - GenerativeTextErrorStats, - GenerativeTextResponseStats, - StatusBreakdown, + GenerativeRequestStats, +) +from .output import ( + GenerativeBenchmarkerConsole, + GenerativeBenchmarkerCSV, + GenerativeBenchmarkerHTML, + GenerativeBenchmarkerOutput, ) -from .benchmarker import Benchmarker, BenchmarkerResult, GenerativeBenchmarker -from .entrypoints import benchmark_generative_text, reimport_benchmarks_report -from .output import GenerativeBenchmarksConsole, GenerativeBenchmarksReport from .profile import ( AsyncProfile, ConcurrentProfile, @@ -22,46 +34,45 @@ SweepProfile, SynchronousProfile, ThroughputProfile, - create_profile, ) from .progress import ( - BenchmarkerProgressDisplay, - BenchmarkerTaskProgressState, - GenerativeTextBenchmarkerProgressDisplay, - GenerativeTextBenchmarkerTaskProgressState, + BenchmarkerProgress, + BenchmarkerProgressGroup, + GenerativeConsoleBenchmarkerProgress, ) __all__ = [ - "AggregatorT", + "Aggregator", + "AggregatorState", "AsyncProfile", "Benchmark", - "BenchmarkAggregator", - "BenchmarkArgs", "BenchmarkMetrics", - "BenchmarkRunStats", + "BenchmarkSchedulerStats", "BenchmarkT", "Benchmarker", - "BenchmarkerProgressDisplay", - "BenchmarkerResult", - "BenchmarkerTaskProgressState", + "BenchmarkerProgress", + "BenchmarkerProgressGroup", + "CompilableAggregator", "ConcurrentProfile", "GenerativeBenchmark", - "GenerativeBenchmarkAggregator", - "GenerativeBenchmarker", - "GenerativeBenchmarksConsole", + "GenerativeBenchmarkerCSV", + "GenerativeBenchmarkerConsole", + "GenerativeBenchmarkerHTML", + "GenerativeBenchmarkerOutput", "GenerativeBenchmarksReport", + "GenerativeConsoleBenchmarkerProgress", "GenerativeMetrics", - "GenerativeTextBenchmarkerProgressDisplay", - "GenerativeTextBenchmarkerTaskProgressState", - "GenerativeTextErrorStats", - "GenerativeTextResponseStats", + "GenerativeRequestStats", + "GenerativeRequestsAggregator", + "GenerativeStatsProgressAggregator", + "InjectExtrasAggregator", "Profile", "ProfileType", - "StatusBreakdown", + "SchedulerStatsAggregator", + "SerializableAggregator", "SweepProfile", "SynchronousProfile", "ThroughputProfile", "benchmark_generative_text", - "create_profile", "reimport_benchmarks_report", ] diff --git a/src/guidellm/benchmark/aggregator.py b/src/guidellm/benchmark/aggregator.py index 9e6ffd68..e0f34218 100644 --- a/src/guidellm/benchmark/aggregator.py +++ b/src/guidellm/benchmark/aggregator.py @@ -1,760 +1,1260 @@ -import time +""" +Benchmark result aggregation and compilation interfaces. + +Provides protocols and implementations for collecting, processing, and compiling +benchmark data from scheduler executions into final metrics and statistics. + +Classes: + Aggregator: Protocol for processing benchmark data updates. + CompilableAggregator: Protocol for aggregators that can compile final results. + SchedulerStatsAggregator: Aggregates scheduler timing and performance metrics. + GenerativeRequestsStatsProgressAggregator: Tracks generation metrics during run. + GenerativeRequestsAggregator: Compiles complete generative benchmark results. + +Functions: + add_aggregate_metric: Helper for accumulating timing and count metrics. + +Type Variables: + RequestT: Generic request object type. + ResponseT: Generic response object type. + RequestTimingsT: Generic request timing object type. +""" + +from __future__ import annotations + +import math +import random from abc import ABC, abstractmethod -from pathlib import Path from typing import ( Any, + ClassVar, Generic, Literal, - Optional, - TypeVar, - Union, + Protocol, + runtime_checkable, ) -from pydantic import Field +from pydantic import Field, PrivateAttr -from guidellm.backend import ResponseSummary -from guidellm.benchmark.benchmark import ( - BenchmarkArgs, - BenchmarkRunStats, - BenchmarkT, - GenerativeBenchmark, - GenerativeTextErrorStats, - GenerativeTextResponseStats, +from guidellm.backend import ( + GenerationRequest, + GenerationResponse, ) -from guidellm.objects import ( - RunningStats, - StandardBaseModel, - StatusBreakdown, - TimeRunningStats, +from guidellm.benchmark.objects import ( + BenchmarkSchedulerStats, + GenerativeMetrics, + GenerativeRequestStats, ) -from guidellm.request import ( - GenerationRequest, - GenerativeRequestLoaderDescription, - RequestLoaderDescription, +from guidellm.scheduler import ( RequestT, ResponseT, -) -from guidellm.scheduler import ( - GenerativeRequestsWorkerDescription, - SchedulerRequestResult, - WorkerDescription, + ScheduledRequestInfo, + SchedulerState, ) from guidellm.settings import settings -from guidellm.utils import check_load_processor +from guidellm.utils import ( + InfoMixin, + PydanticClassRegistryMixin, + StatusBreakdown, + StatusDistributionSummary, + all_defined, + safe_divide, + safe_getattr, +) __all__ = [ - "AggregatorT", - "BenchmarkAggregator", - "GenerativeBenchmarkAggregator", + "Aggregator", + "AggregatorState", + "CompilableAggregator", + "GenerativeRequestsAggregator", + "GenerativeStatsProgressAggregator", + "InjectExtrasAggregator", + "SchedulerStatsAggregator", + "SerializableAggregator", ] -class SchedulerRunningStats(StandardBaseModel): +class AggregatorState(dict[str, Any]): + def add_metric( + self, + key: str, + value: int | float | None, + start_val: int | float | None = 0.0, + count: int | None = 1, + duration: float | None = None, + duration_div: Literal["total", "avg"] = "total", + prefix: str | None = None, + ): + """ + Add timing or count metrics to aggregation state. + """ + if prefix: + self.add_metric( + key=f"{prefix}_{key}", + value=value, + start_val=start_val, + count=count, + duration=duration, + duration_div=duration_div, + ) + return + + if not all_defined(value, start_val, count): + return + + delta_val = value - start_val + self[f"{key}_total"] = self.get(f"{key}_total", 0) + delta_val + self[f"{key}_count"] = self.get(f"{key}_count", 0) + count + self[f"{key}_avg"] = safe_divide( + self.get(f"{key}_total"), self.get(f"{key}_count") + ) + + if all_defined(duration): + self[f"{key}_duration"] = duration + self[f"{key}_rate"] = safe_divide( + self.get(f"{key}_{duration_div}"), duration + ) + + def set_metric( + self, + key: str, + value: int | float | None, + type_: Literal["total", "count", "avg", "duration", "rate"], + prefix: str | None = None, + ): + if prefix: + self.set_metric( + key=f"{prefix}_{key}", + value=value, + type_=type_, + prefix=None, + ) + return + + self[f"{key}_{type_}"] = value + + def get_metric( + self, + key: str, + type_: Literal["total", "count", "avg", "duration", "rate"], + default: int | float | None = None, + prefix: str | None = None, + ) -> int | float | None: + if prefix: + return self.get_metric( + key=f"{prefix}_{key}", + type_=type_, + default=default, + ) + + return self.get(f"{key}_{type_}", default) + + +@runtime_checkable +class Aggregator(Protocol[ResponseT, RequestT]): """ - The metrics for the scheduler stored as running statistics for easy calculations - of rates, averages, totals, etc. + Protocol for processing benchmark data updates during execution. + + Defines the interface for aggregators that collect and process request/response + data from scheduler executions. Implementations update aggregation state with + each completed request for eventual compilation into final metrics. """ - created_requests: RunningStats = Field( - description=( - "The running statistics for the number of requests created for this " - "benchmark run. This includes all requests created, regardless of " - "their status." - ), - default_factory=RunningStats, - ) - queued_requests: RunningStats = Field( - description=( - "The running statistics for the number of requests pending in queue " - "for this benchmark run. This includes requests that are waiting to " - "be scheduled." - ), - default_factory=RunningStats, - ) - scheduled_requests: RunningStats = Field( - description=( - "The running statistics for the number of requests scheduled (actively " - "running but waiting for the desired start time) for this benchmark run." - ), - default_factory=RunningStats, - ) - processing_requests: RunningStats = Field( - description=( - "The running statistics for the number of requests actively being " - "processed by the worker for this benchmark run." - ), - default_factory=RunningStats, - ) - completed_requests: RunningStats = Field( - description=( - "The running statistics for the number of requests completed for this " - "benchmark run. This includes requests within the warmup and cooldown " - "period, if any, along with the final results." - ), - default_factory=RunningStats, - ) + def __call__( + self, + state: AggregatorState, + response: ResponseT | None, + request: RequestT, + request_info: ScheduledRequestInfo, + scheduler_state: SchedulerState, + ) -> dict[str, Any] | None: + """ + Process a completed request and update aggregation state. + + :param state: Current aggregation state to update in-place. + :param response: Response generated for the request, if successful. + :param request: The processed request object. + :param request_info: Scheduling metadata and timing information. + :param scheduler_state: Current scheduler execution state. + :return: Optional intermediate updates for progress reporting. + """ -class RequestsRunningStats(StandardBaseModel): +@runtime_checkable +class CompilableAggregator(Protocol[ResponseT, RequestT]): """ - The metrics for requests that have succeeded, been canceled, or errored stored - as running statistics for easy calculations of rates, averages, totals, etc. + Protocol for aggregators that compile final results from aggregated state. + + Extends the Aggregator protocol with the ability to transform accumulated + state into final benchmark results and metrics after execution completes. """ - totals: StatusBreakdown[RunningStats, RunningStats, RunningStats, RunningStats] = ( - Field( - description=( - "The running statistics for the total number of requests that " - "completed within the benchmark run." - ), - default_factory=lambda: StatusBreakdown( - successful=RunningStats(), - errored=RunningStats(), - incomplete=RunningStats(), - total=RunningStats(), - ), - ) - ) - queued_time: TimeRunningStats = Field( - description=( - "The running statistics for the time spent in queue for all requests that " - "completed within the benchmark run. This is the time from when the " - "request was created to when it was dequeued by the worker." - ), - default_factory=TimeRunningStats, - ) - scheduled_time_delay: TimeRunningStats = Field( - description=( - "The running statistics for the time spent from when a request was " - "dequeued by the worker to when it was actually scheduled by the worker" - "for all requests that completed within the benchmark run. " - "This should be as close to 0 as possible, any additional time is " - "overheads from the system or the worker." - ), - default_factory=TimeRunningStats, - ) - scheduled_time_sleep: TimeRunningStats = Field( - description=( - "The running statistics for the time for each request spent sleeping til " - "the desired start time was reached for all requests that completed within " - "the benchmark run. This is the time from when the request was scheduled " - "to when the desired start time was reached. " - ), - default_factory=TimeRunningStats, - ) - worker_start_delay: TimeRunningStats = Field( - description=( - "The running statistics for the time delay between when the request was " - "scheduled and when the worker actually started processing subtracting any " - "sleep time for all requests that completed within the benchmark run. " - "This should be as close to 0 as possible, any additional time is " - "overheads from the system or the worker." - ), - default_factory=TimeRunningStats, - ) - worker_time: TimeRunningStats = Field( - description=( - "The running statistics for the time spent processing all requests that " - "completed within the benchmark run. This is the time from when the " - "request was started to when it was completed." - ), - default_factory=TimeRunningStats, - ) - worker_start_time_targeted_delay: TimeRunningStats = Field( - description=( - "The running statistics for the delay between the targeted start time and " - "the actual start time for requests that completed within the benchmark " - "run. This represents delays from the best case desired start time. " - "For async strategies, this represents delays from the ideal system. " - "For sync strategies, since those are doubled in queue, this should be " - "as close to the time for a request to be processed as possible." - ), - default_factory=TimeRunningStats, - ) - request_start_time_delay: TimeRunningStats = Field( - description=( - "The running statistics for the delay between the actual request being " - "made and the time the worker started on the request for all requests " - "that completed within the benchmark run. This time should be as close to " - "0 as possible, any additional time is overhead from the system or " - "the worker." - ), - default_factory=TimeRunningStats, - ) - request_start_time_targeted_delay: TimeRunningStats = Field( - description=( - "The running statistics for the delay between the targeted start time and " - "the actual start time for all requests that completed within the " - "benchmark run. This represents delays from the best case desired start " - "time. For async strategies, this represents delays from the ideal system. " - "For sync strategies, since those are duplicated in queue, this should be " - "as close to the time for a request to be processed." - ), - default_factory=TimeRunningStats, - ) - request_time_delay: TimeRunningStats = Field( - description=( - "The running statistics for the delay in time between the total request " - "time and the worker time. This should be as close to 0 as possible, any " - "additional time is overhead from the system or the worker. " - ), - default_factory=TimeRunningStats, - ) - request_time: TimeRunningStats = Field( - description=( - "The running statistics for the time spent processing all requests that " - "completed within the benchmark run. This is the time from when the " - "request was created to when it was completed." - ), - default_factory=TimeRunningStats, - ) + def __call__( + self, + state: AggregatorState, + response: ResponseT | None, + request: RequestT, + request_info: ScheduledRequestInfo, + scheduler_state: SchedulerState, + ) -> dict[str, Any] | None: + """ + Process a completed request and update aggregation state. + :param state: Current aggregation state to update in-place. + :param response: Response generated for the request, if successful. + :param request: The processed request object. + :param request_info: Scheduling metadata and timing information. + :param scheduler_state: Current scheduler execution state. + :return: Optional intermediate updates for progress reporting. + """ -class BenchmarkAggregator( - ABC, StandardBaseModel, Generic[BenchmarkT, RequestT, ResponseT] + def compile( + self, state: AggregatorState, scheduler_state: SchedulerState + ) -> dict[str, Any]: + """ + Compile aggregated state into final benchmark results. + + :param agg_state: The accumulated aggregation state. + :param scheduler_state: Final scheduler execution state. + :return: Compiled benchmark results and metrics. + """ + + +class SerializableAggregator( + PydanticClassRegistryMixin[type["SerializableAggregator"]], + ABC, + Generic[ResponseT, RequestT], ): + schema_discriminator: ClassVar[str] = "type_" + + @classmethod + def __pydantic_schema_base_type__(cls) -> type[SerializableAggregator]: + if cls.__name__ == "SerializableAggregator": + return cls + + return SerializableAggregator + + @classmethod + @abstractmethod + def validated_kwargs(cls, *args, **kwargs) -> dict[str, Any]: + """ + Validate and process arguments for constraint creation. + + Must be implemented by subclasses to handle their specific parameter patterns. + + :param args: Positional arguments passed to the constraint + :param kwargs: Keyword arguments passed to the constraint + :return: Validated dictionary of parameters for constraint creation + :raises NotImplementedError: Must be implemented by subclasses + """ + ... + + @classmethod + def resolve( + cls, + aggregators: dict[ + str, + Any | dict[str, Any] | Aggregator | CompilableAggregator, + ], + ) -> dict[str, Aggregator | CompilableAggregator]: + """ + Resolve mixed aggregator specifications to callable aggregators. + + :param aggregators: Dictionary mapping aggregator keys to specifications + :return: Dictionary mapping aggregator keys to callable functions + :raises ValueError: If any key is not registered in the factory + """ + resolved = {} + + for key, val in aggregators.items(): + if isinstance(val, (Aggregator, CompilableAggregator)): + resolved[key] = val + else: + aggregator_class = cls.get_registered_object(key) + kwargs = aggregator_class.validated_kwargs(**val) + resolved[key] = aggregator_class(**kwargs) + + return resolved + + type_: Literal["aggregator"] = Field(default="aggregator", description="") + + @abstractmethod + def __call__( + self, + state: AggregatorState, + response: ResponseT | None, + request: RequestT, + request_info: ScheduledRequestInfo, + scheduler_state: SchedulerState, + ) -> dict[str, Any] | None: + """ + Process a completed request and update aggregation state. + + :param agg_state: Current aggregation state to update in-place. + :param response: Response generated for the request, if successful. + :param request: The processed request object. + :param request_info: Scheduling metadata and timing information. + :param scheduler_state: Current scheduler execution state. + :return: Optional intermediate updates for progress reporting. + """ + + @abstractmethod + def compile( + self, state: AggregatorState, scheduler_state: SchedulerState + ) -> dict[str, Any]: + """ + Compile aggregated state into final benchmark results. + + :param agg_state: The accumulated aggregation state. + :param scheduler_state: Final scheduler execution state. + :return: Compiled benchmark results and metrics. + """ + + +@SerializableAggregator.register("inject_extras") +class InjectExtrasAggregator(SerializableAggregator[ResponseT, RequestT], InfoMixin): """ - A pydantic base class representing the base class for aggregating benchmark results. - The purpose is to receive and process results from a Benchmarker as it iterates - through a Scheduler for an individual benchmark run. - As results are added, lightweight statistics are updated and stored for immediate - progress and informational updates to the caller. - Once the benchmark run is complete, the `compile` method is called to finalize - the benchmark and return a Benchmark object with all the results and statistics - fully calculated. + Aggregator for injecting extra metadata into the output. """ - type_: Literal["benchmark_aggregator"] = "benchmark_aggregator" - run_id: str = Field( - description=( - "The unique identifier for the encompasing benchmark run that this " - "benchmark was a part of." - ) - ) - args: BenchmarkArgs = Field( - description=( - "The arguments used to create the benchmark run that this benchmark was " - "a part of." - ) - ) - worker_description: Union[ - GenerativeRequestsWorkerDescription, WorkerDescription - ] = Field( - description=( - "The description and specifics for the worker used to resolve requests " - "for this benchmark." - ), - discriminator="type_", - ) - request_loader_description: Union[ - GenerativeRequestLoaderDescription, RequestLoaderDescription - ] = Field( - description=( - "The description and specifics for the request loader used to create " - "requests for this benchmark." - ), - discriminator="type_", - ) - extras: dict[str, Any] = Field( - description=( - "Any additional information or metadata that was passed for this benchmark." - ) - ) - in_warmup: bool = Field( - description=( - "A flag to indicate if the benchmark is currently in the warmup phase." - ), - default=False, - exclude=True, - ) - in_cooldown: bool = Field( - description=( - "A flag to indicate if the benchmark is currently in the cooldown phase." - ), - default=False, - exclude=True, - ) - scheduler_stats: SchedulerRunningStats = Field( - description=( - "The running statistics for the scheduler for this benchmark run. " - "This includes all requests created, regardless of their status." - ), - default_factory=SchedulerRunningStats, - ) - requests_stats: RequestsRunningStats = Field( - description=( - "The running statistics for the requests for this benchmark run. " - "This includes all requests created, regardless of their status." - ), - default_factory=RequestsRunningStats, - ) - results: StatusBreakdown[ - list[SchedulerRequestResult[RequestT, ResponseT]], - list[SchedulerRequestResult[RequestT, ResponseT]], - list[SchedulerRequestResult[RequestT, ResponseT]], - None, - ] = Field( - description=( - "The completed requests for this benchmark run broken down by status" - "and excluding warmup and cooldown requests." - ), - default_factory=lambda: StatusBreakdown( # type: ignore[arg-type] - successful=[], - errored=[], - incomplete=[], - total=None, - ), - ) + @classmethod + def validated_kwargs(cls, extras: dict[str, Any], **_kwargs) -> dict[str, Any]: + return {"extras": extras} - def add_result( + type_: Literal["inject_extras"] = Field(default="inject_extras") + extras: dict[str, Any] | None = Field(default_factory=None) + + def __call__( self, - result: SchedulerRequestResult[RequestT, ResponseT], - ) -> bool: + state: AggregatorState, + response: ResponseT | None, + request: RequestT, + request_info: ScheduledRequestInfo, + scheduler_state: SchedulerState, + ) -> dict[str, Any] | None: """ - Add a result to the aggregator. This will update the internal statistics - and add the result to the list of results if it is not within the warmup or - cooldown period. - - :param result: The result to add to the aggregator. - :return: True if the result was added, False if it was added because it - did not fit within the warmup or cooldown period, was not requested, - or is not finished + Inject extra metadata into the aggregation state. + + :param agg_state: Current aggregation state to update. + :param response: Response generated for the request, if successful. + :param request: The processed request object. + :param request_info: Scheduling metadata and timing information. + :param scheduler_state: Current scheduler execution state. + :return: Updated aggregation state with injected extras. """ - # Add scheduler statistics - self.scheduler_stats.created_requests += max( - 0, result.run_info.created_requests - ) - self.scheduler_stats.queued_requests += max(0, result.run_info.queued_requests) - self.scheduler_stats.scheduled_requests += max( - 0, result.run_info.scheduled_requests - ) - self.scheduler_stats.processing_requests += max( - 0, result.run_info.processing_requests - ) - self.scheduler_stats.completed_requests += max( - 0, result.run_info.completed_requests - ) + _ = (state, response, request, request_info, scheduler_state) # unused + return None - if result.type_ != "request_complete" or ( - result.request_info.canceled and not result.request_info.requested - ): - # If the result is not completed yet, don't add to the results - # If the result was canceled and not started, ignore it - return False + def compile( + self, state: AggregatorState, scheduler_state: SchedulerState + ) -> dict[str, Any]: + _ = (state, scheduler_state) # unused + return {"extras": self.extras} if self.extras else {} - # Add request statistics - self.requests_stats.totals.total += 1 - if result.request_info.canceled: - self.requests_stats.totals.incomplete += 1 - elif result.request_info.errored: - self.requests_stats.totals.errored += 1 - elif result.request_info.completed: - self.requests_stats.totals.successful += 1 - else: - raise ValueError( - "Unexpected state: request_info must be either " - "completed, canceled, or errored. " - f"Got {result.request_info}" - ) - self.requests_stats.queued_time.update( - result.request_info.dequeued_time - result.request_info.queued_time - ) - self.requests_stats.scheduled_time_delay.update( - result.request_info.scheduled_time - result.request_info.dequeued_time +@SerializableAggregator.register("scheduler_stats") +class SchedulerStatsAggregator(SerializableAggregator[ResponseT, RequestT], InfoMixin): + """ + Aggregates scheduler timing and performance metrics. + + Collects timing data for various scheduler phases including queuing, + resolution, and processing delays to generate performance statistics. + """ + + @classmethod + def validated_kwargs(cls, *_args, **_kwargs) -> dict[str, Any]: + return {} + + type_: Literal["scheduler_stats"] = Field(default="scheduler_stats") + + def __call__( + self, + state: AggregatorState, + response: ResponseT | None, + request: RequestT, + request_info: ScheduledRequestInfo, + scheduler_state: SchedulerState, + ) -> dict[str, Any] | None: + """ + Aggregate scheduler timing metrics for a completed request. + + :param agg_state: Current aggregation state to update. + :param response: Response generated for the request, if successful. + :param request: The processed request object. + :param request_info: Scheduling metadata and timing information. + :param scheduler_state: Current scheduler execution state. + :return: Updated aggregation state for intermediate reporting. + """ + _ = (response, request, scheduler_state) # unused + if request_info.status not in ("completed", "errored", "cancelled"): + # Only compile scheduler stats for processed requests + return None + + state["updated_scheduler_stats"] = True + state.add_metric( + key="queued_time", + value=request_info.scheduler_timings.dequeued, + start_val=request_info.scheduler_timings.queued, ) - sleep_time = max( - 0.0, - result.request_info.targeted_start_time - - result.request_info.scheduled_time, + state.add_metric( + key="worker_resolve_start_delay", + value=request_info.scheduler_timings.resolve_start, + start_val=request_info.scheduler_timings.scheduled_at, ) - self.requests_stats.scheduled_time_sleep.update(sleep_time) - time_to_worker_start = ( - result.request_info.worker_start - result.request_info.scheduled_time + state.add_metric( + key="worker_resolve_time", + value=request_info.scheduler_timings.resolve_end, + start_val=request_info.scheduler_timings.resolve_start, ) - self.requests_stats.worker_start_delay.update(time_to_worker_start - sleep_time) - self.requests_stats.worker_time.update( - result.request_info.worker_end - result.request_info.worker_start + state.add_metric( + key="worker_resolve_end_delay", + value=request_info.scheduler_timings.resolve_end, + start_val=safe_getattr(request_info.request_timings, "request_end"), ) - self.requests_stats.worker_start_time_targeted_delay.update( - result.request_info.worker_start - result.request_info.targeted_start_time + state.add_metric( + key="finalized_delay", + value=request_info.scheduler_timings.finalized, + start_val=request_info.scheduler_timings.resolve_end, ) - self.requests_stats.request_start_time_delay.update( - result.request_info.worker_start - result.request_info.targeted_start_time + state.add_metric( + key="worker_targeted_start_delay", + value=request_info.scheduler_timings.resolve_start, + start_val=request_info.scheduler_timings.targeted_start, ) - self.requests_stats.request_start_time_targeted_delay.update( - result.request_info.worker_start - result.request_info.targeted_start_time + state.add_metric( + key="request_start_delay", + value=request_info.scheduler_timings.resolve_start, + start_val=safe_getattr(request_info.request_timings, "request_start"), ) - self.requests_stats.request_time_delay.update( - (result.request_info.worker_end - result.request_info.worker_start) - - (result.request_info.worker_end - result.request_info.worker_start) + state.add_metric( + key="request_time", + value=safe_getattr(request_info.request_timings, "request_end"), + start_val=safe_getattr(request_info.request_timings, "request_start"), ) - self.requests_stats.request_time.update( - result.request_info.worker_end - result.request_info.worker_start + state.add_metric( + key="request_targeted_start_delay", + value=safe_getattr(request_info.request_timings, "request_start"), + start_val=request_info.scheduler_timings.targeted_start, ) - # Add result to the list of results provided we are not in warmup or cooldown - total_completed = self.requests_stats.totals.total.total - global_start_time = self.requests_stats.totals.total.start_time + return state - in_warmup_number = ( - self.args.warmup_number and total_completed <= self.args.warmup_number - ) - in_warmup_duration = ( - self.args.warmup_duration - and result.request_info.worker_start - <= (global_start_time + self.args.warmup_duration) - ) + def compile( + self, state: AggregatorState, scheduler_state: SchedulerState + ) -> dict[Literal["scheduler_stats"], BenchmarkSchedulerStats]: + """ + Compile scheduler timing metrics into benchmark statistics. + + :param agg_state: Accumulated timing data and counts. + :param scheduler_state: Final scheduler execution state. + :return: Dictionary containing compiled scheduler statistics. + """ + return { + "run_stats": BenchmarkSchedulerStats( + start_time=scheduler_state.start_time, + end_time=scheduler_state.end_time, + requests_made=StatusBreakdown[int, int, int, int]( + successful=scheduler_state.successful_requests, + incomplete=scheduler_state.cancelled_requests, + errored=scheduler_state.errored_requests, + total=( + scheduler_state.successful_requests + + scheduler_state.cancelled_requests + + scheduler_state.errored_requests + ), + ), + queued_time_avg=state.get_metric( + key="queued_time", type_="avg", default=0.0 + ), + worker_resolve_start_delay_avg=state.get_metric( + key="worker_resolve_start_delay", type_="avg", default=0.0 + ), + worker_resolve_time_avg=state.get_metric( + key="worker_resolve_time", type_="avg", default=0.0 + ), + worker_resolve_end_delay_avg=state.get_metric( + key="worker_resolve_end_delay", type_="avg" + ), + finalized_delay_avg=state.get_metric( + key="finalized_delay", type_="avg", default=0.0 + ), + worker_targeted_start_delay_avg=state.get_metric( + key="worker_targeted_start_delay", type_="avg", default=0.0 + ), + request_start_delay_avg=state.get_metric( + key="request_start_delay", type_="avg", default=0.0 + ), + request_time_avg=state.get_metric( + key="request_time", type_="avg", default=0.0 + ), + request_targeted_start_delay_avg=state.get_metric( + key="request_targeted_start_delay", type_="avg", default=0.0 + ), + ), + } - if in_warmup_number or in_warmup_duration: - self.in_warmup = True - return True - self.in_warmup = False - in_cooldown_number = ( - self.args.cooldown_number - and self.args.max_number - and total_completed > self.args.max_number - self.args.cooldown_number - ) - in_cooldown_duration = ( - self.args.cooldown_duration - and self.args.max_duration - and result.request_info.worker_start - > global_start_time + self.args.max_duration - self.args.cooldown_duration +@SerializableAggregator.register("generative_stats_progress") +class GenerativeStatsProgressAggregator( + SerializableAggregator[GenerationResponse, GenerationRequest] +): + """ + Tracks generative model metrics during benchmark execution. + + Aggregates token-level metrics including time to first token, inter-token + latency, and token counts for real-time progress monitoring. + """ + + @classmethod + def validated_kwargs(cls, *_args, **_kwargs) -> dict[str, Any]: + return {} + + type_: Literal["generative_stats_progress"] = Field( + default="generative_stats_progress" + ) + + def __call__( + self, + state: AggregatorState, + response: GenerationResponse | None, + request: GenerationRequest, + request_info: ScheduledRequestInfo, + scheduler_state: SchedulerState, + ) -> dict[str, Any] | None: + """ + Aggregate generative model metrics for a completed request. + + :param agg_state: Current aggregation state to update. + :param response: Generation response with token and timing data. + :param request: The processed generation request. + :param request_info: Scheduling metadata and timing information. + :param scheduler_state: Current scheduler execution state. + :return: Updated aggregation state for progress reporting. + """ + _ = (request,) # unused + if request_info.status not in {"completed", "errored", "cancelled"}: + # Only compile progress stats for processed requests + return None + + state["updated_generative_stats"] = True + start_time = scheduler_state.start_time + end_time = ( + safe_getattr(request_info.request_timings, "request_end") + or request_info.scheduler_timings.resolve_end ) + duration = end_time - start_time if end_time else None - if in_cooldown_number or in_cooldown_duration: - self.in_cooldown = True - return True + for prefix in (request_info.status, None): + requests_count = ( + scheduler_state.processed_requests + if prefix is None + else scheduler_state.successful_requests + if request_info.status == "completed" + else scheduler_state.cancelled_requests + if request_info.status == "cancelled" + else scheduler_state.errored_requests + ) - self.in_cooldown = False + # Requests per Second + if duration is not None: + state.set_metric( + key="requests", + value=safe_divide(requests_count, duration), + type_="rate", + prefix=prefix, + ) - if result.request_info.canceled: - self.results.incomplete.append(result) - elif result.request_info.errored: - self.results.errored.append(result) - elif result.request_info.completed: - self.results.successful.append(result) - else: - raise ValueError( - "Unexpected state: request_info must be either " - "completed, canceled, or errored. " - f"Got {result.request_info}" + # Request Concurrency + state.set_metric( + key="requests", + value=scheduler_state.processing_requests, + type_="avg", + prefix=prefix, ) - return True + # Request Latency + state.add_metric( + key="request_latency", + value=safe_getattr(request_info.request_timings, "request_end"), + start_val=safe_getattr(request_info.request_timings, "request_start"), + prefix=prefix, + ) - @abstractmethod - def compile(self) -> BenchmarkT: - """ - Compile the benchmark results and statistics into a Benchmark object. - This is required to be implemented by subclasses to finalize the benchmark - and return the compiled object. + # Time to First Token + state.add_metric( + key="time_to_first_token", + value=safe_getattr(request_info.request_timings, "first_iteration"), + start_val=safe_getattr(request_info.request_timings, "request_start"), + prefix=prefix, + ) + + output_tokens = safe_getattr(response, "output_tokens") + prompt_tokens = safe_getattr(response, "prompt_tokens") + + # Inter Token Latency + state.add_metric( + key="inter_token_latency", + value=safe_getattr(request_info.request_timings, "last_iteration"), + start_val=safe_getattr(request_info.request_timings, "first_iteration"), + count=( + output_tokens - 1 if output_tokens and output_tokens > 1 else None + ), + prefix=prefix, + ) + + # Time per Output Token + state.add_metric( + key="time_per_output_token", + value=safe_getattr(request_info.request_timings, "request_start"), + start_val=safe_getattr(request_info.request_timings, "last_iteration"), + count=output_tokens, + prefix=prefix, + ) + + # Prompt Tokens + state.add_metric( + key="prompt_tokens", + value=prompt_tokens, + duration=duration, + prefix=prefix, + ) + + # Output Tokens + state.add_metric( + key="output_tokens", + value=output_tokens, + duration=duration, + prefix=prefix, + ) + + # Total Tokens + state.add_metric( + key="total_tokens", + value=( + prompt_tokens + output_tokens + if all_defined(prompt_tokens, output_tokens) + else prompt_tokens + if all_defined(prompt_tokens) + else output_tokens + if all_defined(output_tokens) + else None + ), + duration=duration, + prefix=prefix, + ) + + return state + + def compile( + self, state: AggregatorState, scheduler_state: SchedulerState + ) -> dict[str, Any]: """ - ... + Compile progress metrics into final results. + GenerativeStatsProgressAggregator is primarily for progress tracking, + so compilation returns the aggregated state as-is. -AggregatorT = TypeVar("AggregatorT", bound=BenchmarkAggregator) + :param agg_state: The accumulated aggregation state. + :param scheduler_state: Final scheduler execution state. + :return: The aggregated state as final results. + """ + _ = (state, scheduler_state) # unused + return {} -class GenerativeRequestsRunningStats(RequestsRunningStats): +@SerializableAggregator.register("generative_requests") +class GenerativeRequestsAggregator( + SerializableAggregator[GenerationResponse, GenerationRequest], +): """ - The metrics for generative requests that have succeeded, been canceled, or errored - stored as running statistics for easy calculations of rates, averages, totals, etc. + Compiles complete generative benchmark results with warmup/cooldown filtering. + + Aggregates request data during execution and compiles comprehensive metrics + including timing distributions, token statistics, and throughput measurements. + Supports filtering warmup and cooldown periods from final results. """ - time_to_first_token: TimeRunningStats = Field( - description=( - "The running statistics for the time from the start of the request to the " - "first token being generated for all requests that completed within the " - "benchmark run." - ), - default_factory=TimeRunningStats, - ) - inter_token_latency: TimeRunningStats = Field( - description=( - "The running statistics for the time between each token being generated " - "for all requests that completed within the benchmark run." - ), - default_factory=TimeRunningStats, - ) - prompt_tokens: RunningStats = Field( - description=( - "The running statistics for the token count for the prompt for all " - "requests that completed, if available in the response." - ), - default_factory=RunningStats, - ) - output_tokens: RunningStats = Field( - description=( - "The running statistics for the token count for the output for all " - "requests that completed, if available in the response." - ), - default_factory=RunningStats, - ) - total_tokens: RunningStats = Field( - description=( - "The running statistics for the total token count for all requests that " - "completed, if available in the response." - ), - default_factory=RunningStats, - ) + @classmethod + def validated_kwargs( + cls, + request_samples: int | None = 20, + warmup: int | float | None = None, + cooldown: int | float | None = None, + **_kwargs, + ) -> dict[str, Any]: + return { + "request_samples": request_samples, + "warmup": warmup, + "cooldown": cooldown, + } + type_: Literal["generative_requests"] = Field(default="generative_requests") -class GenerativeBenchmarkAggregator( - BenchmarkAggregator[GenerativeBenchmark, GenerationRequest, ResponseSummary] -): - type_: Literal["generative_benchmark_aggregator"] = ( - "generative_benchmark_aggregator" # type: ignore[assignment] - ) - processor: Optional[Union[str, Path, Any]] = Field( - description=( - "The tokenizer to use for calculating token counts when none are " - "avaiable that match the preferred source." - ) + request_samples: int | None = Field(default=20, description="") + warmup: int | float | None = Field( + default=None, + description="Number of warmup requests to ignore at benchmark start", ) - processor_args: Optional[dict[str, Any]] = Field( - description=( - "Additional arguments to pass to the tokenizer if it requires " - "any specific configuration for loading or processing." - ), - ) - worker_description: GenerativeRequestsWorkerDescription = Field( - description=( - "The description and specifics for the worker used to resolve requests " - "for this benchmark." - ), - discriminator="type_", - ) - request_loader_description: GenerativeRequestLoaderDescription = Field( - description=( - "The description and specifics for the request loader used to create " - "requests for this benchmark." - ), - discriminator="type_", - ) - requests_stats: GenerativeRequestsRunningStats = Field( - description=( - "The running statistics for the requests for this benchmark run. " - "This includes all requests created, regardless of their status." - ), - default_factory=GenerativeRequestsRunningStats, + cooldown: int | float | None = Field( + default=None, + description="Number of cooldown requests to ignore at benchmark end", ) + _in_cooldown: bool = PrivateAttr(False) + _in_warmup: bool = PrivateAttr(False) - def add_result( - self, result: SchedulerRequestResult[GenerationRequest, ResponseSummary] - ) -> bool: + def __call__( + self, + state: AggregatorState, + response: GenerationResponse | None, + request: GenerationRequest, + request_info: ScheduledRequestInfo, + scheduler_state: SchedulerState, + ) -> dict[str, Any] | None: """ - Add a result to the aggregator. This will update the internal statistics - and add the result to the list of results if it is not within the warmup or - cooldown period. + Collect completed requests for final compilation. - :param result: The result to add to the aggregator. + Filters requests based on warmup/cooldown settings and categorizes by + completion status for comprehensive benchmark analysis. + + :param agg_state: Current aggregation state to update. + :param response: Generation response data. + :param request: The processed generation request. + :param request_info: Scheduling metadata and timing information. + :param scheduler_state: Current scheduler execution state. + :return: None, as this aggregator only collects for final compilation. """ - if not super().add_result(result): - return False + # Skip invalid requests + if request_info.status not in {"completed", "canceled", "errored"} or ( + request_info.status == "canceled" + and safe_getattr(request_info.scheduler_timings, "resolve_start") is None + # Canceled requests that never started should not be kept + ): + return None - if result.request is None: - raise ValueError("Request is None, cannot add result.") + status = { + "updated_generative_requests": True, + "requests_in_warmup": False, + "requests_in_cooldown": False, + } - if result.response is None: - raise ValueError("Response is None, cannot add result.") + if self._is_in_warmup(request_info, scheduler_state): + status["requests_in_warmup"] = True + return status - self.requests_stats.request_start_time_delay.update( - result.response.start_time - result.request_info.worker_start - ) - self.requests_stats.request_start_time_targeted_delay.update( - result.response.start_time - result.request_info.targeted_start_time - ) - self.requests_stats.request_time_delay.update( - (result.response.start_time - result.request_info.worker_start) - + result.request_info.worker_end - - result.response.end_time - ) - self.requests_stats.request_time.update( - result.response.end_time - result.response.start_time - ) - if result.response.first_iter_time: - self.requests_stats.time_to_first_token.update( - result.response.first_iter_time - result.response.start_time - ) - if result.response.last_iter_time and result.response.first_iter_time: - self.requests_stats.inter_token_latency.update( - result.response.last_iter_time - result.response.first_iter_time, - count=(result.response.output_tokens or 1) - 1, - ) - self.requests_stats.prompt_tokens += result.response.request_prompt_tokens or 0 - self.requests_stats.output_tokens += result.response.request_output_tokens or 0 - total_tokens = (result.response.request_prompt_tokens or 0) + ( - result.response.request_output_tokens or 0 - ) - self.requests_stats.total_tokens += total_tokens + if self._is_in_cooldown(request_info, scheduler_state): + status["requests_in_cooldown"] = True + return status - return True + if "completed" not in state: + state["completed"] = [] + state["errored"] = [] + state["incomplete"] = [] - def compile(self) -> GenerativeBenchmark: + # Categorize request by status + if request_info.status == "completed": + state["completed"].append((response, request, request_info)) + elif request_info.status == "canceled": + state["incomplete"].append((response, request, request_info)) + else: + state["errored"].append((response, request, request_info)) + + return status + + def compile( + self, + state: AggregatorState, + scheduler_state: SchedulerState, # noqa: ARG002 + ) -> dict[str, Any]: """ - Compile the benchmark results and statistics into a GenerativeBenchmark object. - This is required to be implemented by subclasses to finalize the benchmark - and return the compiled object. + Compile aggregated requests into comprehensive benchmark results. + + Transforms collected request data into detailed metrics including timing + distributions, token statistics, throughput measurements, and status breakdowns. + + :param agg_state: Accumulated request data categorized by completion status. + :param scheduler_state: Final scheduler execution state. + :return: Complete benchmark results with metrics and request statistics. """ - successful, incomplete, errored = self._compile_results() - - return GenerativeBenchmark.from_stats( - run_id=self.run_id, - successful=successful, - incomplete=incomplete, - errored=errored, - args=self.args, - run_stats=BenchmarkRunStats( - start_time=self.requests_stats.totals.total.start_time, - end_time=time.time(), - requests_made=StatusBreakdown( - successful=int(self.requests_stats.totals.successful.total), - errored=int(self.requests_stats.totals.errored.total), - incomplete=int(self.requests_stats.totals.incomplete.total), - total=int(self.requests_stats.totals.total.total), - ), - queued_time_avg=self.requests_stats.queued_time.mean, - scheduled_time_delay_avg=self.requests_stats.scheduled_time_delay.mean, - scheduled_time_sleep_avg=self.requests_stats.scheduled_time_sleep.mean, - worker_start_delay_avg=self.requests_stats.worker_start_delay.mean, - worker_time_avg=self.requests_stats.worker_time.mean, - worker_start_time_targeted_delay_avg=self.requests_stats.worker_start_time_targeted_delay.mean, - request_start_time_delay_avg=self.requests_stats.request_start_time_delay.mean, - request_start_time_targeted_delay_avg=self.requests_stats.request_start_time_targeted_delay.mean, - request_time_delay_avg=self.requests_stats.request_time_delay.mean, - request_time_avg=self.requests_stats.request_time.mean, - ), - worker=self.worker_description, - requests_loader=self.request_loader_description, - extras=self.extras, + successful: list[GenerativeRequestStats] = [ + self._create_generative_request_stats(response, request, request_info) + for (response, request, request_info) in state.get("completed", []) + ] + incomplete: list[GenerativeRequestStats] = [ + self._create_generative_request_stats(response, request, request_info) + for (response, request, request_info) in state.get("incomplete", []) + ] + errored: list[GenerativeRequestStats] = [ + self._create_generative_request_stats(response, request, request_info) + for (response, request, request_info) in state.get("errored", []) + ] + + # Use all requests for metrics calculations (not sampled) + total: list[GenerativeRequestStats] = successful + incomplete + errored + total_types: list[Literal["successful", "incomplete", "error"]] = [ + *["successful"] * len(successful), + *["incomplete"] * len(incomplete), + *["error"] * len(errored), + ] + start_time = min( + [math.inf] + + [ + req.scheduler_info.request_timings.request_start + for req in total + if req.scheduler_info.request_timings.request_start is not None + ] + ) + end_time = max( + [-1 * math.inf] + + [ + req.scheduler_info.request_timings.request_end + for req in total + if req.scheduler_info.request_timings.request_end is not None + ] ) - def _compile_results( - self, - ) -> tuple[ - list[GenerativeTextResponseStats], - list[GenerativeTextErrorStats], - list[GenerativeTextErrorStats], - ]: - successful: list[GenerativeTextResponseStats] = [ - GenerativeTextResponseStats( - request_id=result.request.request_id, - request_type=result.request.request_type, - scheduler_info=result.request_info, - prompt=str(result.request.content), - prompt_tokens=self._compile_tokens_count( - value=str(result.request.content), - requests_tokens=result.response.request_prompt_tokens, - response_tokens=result.response.response_prompt_tokens, - preferred_tokens_source=settings.preferred_prompt_tokens_source, - errored=False, + return { + "start_time": start_time, + "end_time": end_time, + "request_totals": StatusBreakdown[int, int, int, int]( + successful=len(successful), + incomplete=len(incomplete), + errored=len(errored), + total=len(total), + ), + "requests": StatusBreakdown[ + list[GenerativeRequestStats], + list[GenerativeRequestStats], + list[GenerativeRequestStats], + list[GenerativeRequestStats], + ]( + successful=self._sample_request_stats(successful, self.request_samples), + incomplete=self._sample_request_stats(incomplete, self.request_samples), + errored=self._sample_request_stats(errored, self.request_samples), + ), + "metrics": GenerativeMetrics( + requests_per_second=self._calculate_requests_per_second( + statuses=total_types, requests=total ), - output=result.response.value, - output_tokens=self._compile_tokens_count( - value=result.response.value, - requests_tokens=result.response.request_output_tokens, - response_tokens=result.response.response_output_tokens, - preferred_tokens_source=settings.preferred_output_tokens_source, - errored=False, + request_concurrency=self._calculate_request_concurrency( + statuses=total_types, requests=total ), - start_time=result.response.start_time, - end_time=result.response.end_time, - first_token_time=result.response.first_iter_time or -1.0, - last_token_time=result.response.last_iter_time or -1.0, - ) - for result in self.results.successful - if result.request and result.response - ] - incomplete: list[GenerativeTextErrorStats] = [ - GenerativeTextErrorStats( - error=result.response.error or "", - request_id=result.request.request_id, - request_type=result.request.request_type, - scheduler_info=result.request_info, - prompt=str(result.request.content), - prompt_tokens=self._compile_tokens_count( - value=str(result.request.content), - requests_tokens=result.response.request_prompt_tokens, - response_tokens=result.response.response_prompt_tokens, - preferred_tokens_source=settings.preferred_prompt_tokens_source, - errored=True, + request_latency=self._calculate_request_latency( + statuses=total_types, requests=total ), - output=result.response.value, - output_tokens=self._compile_tokens_count( - value=result.response.value, - requests_tokens=result.response.request_output_tokens, - response_tokens=result.response.response_output_tokens, - preferred_tokens_source=settings.preferred_output_tokens_source, - errored=True, + prompt_token_count=self._calculate_prompt_token_count( + statuses=total_types, requests=total ), - start_time=result.response.start_time, - end_time=result.response.end_time, - first_token_time=result.response.first_iter_time, - last_token_time=result.response.last_iter_time, - ) - for result in self.results.incomplete - if result.request and result.response - ] - error: list[GenerativeTextErrorStats] = [ - GenerativeTextErrorStats( - error=result.response.error or "", - request_id=result.request.request_id, - request_type=result.request.request_type, - scheduler_info=result.request_info, - prompt=str(result.request.content), - prompt_tokens=self._compile_tokens_count( - value=str(result.request.content), - requests_tokens=result.response.request_prompt_tokens, - response_tokens=result.response.response_prompt_tokens, - preferred_tokens_source=settings.preferred_prompt_tokens_source, - errored=True, + output_token_count=self._calculate_output_token_count( + statuses=total_types, requests=total + ), + total_token_count=self._calculate_total_token_count( + statuses=total_types, requests=total + ), + time_to_first_token_ms=self._calculate_time_to_first_token_ms( + statuses=total_types, requests=total + ), + time_per_output_token_ms=self._calculate_time_per_output_token_ms( + statuses=total_types, requests=total + ), + inter_token_latency_ms=self._calculate_inter_token_latency_ms( + statuses=total_types, requests=total ), - output=result.response.value, - output_tokens=self._compile_tokens_count( - value=result.response.value, - requests_tokens=result.response.request_output_tokens, - response_tokens=result.response.response_output_tokens, - preferred_tokens_source=settings.preferred_output_tokens_source, - errored=True, + output_tokens_per_second=self._calculate_output_tokens_per_second( + statuses=total_types, requests=total ), - start_time=result.response.start_time, - end_time=result.response.end_time, - first_token_time=result.response.first_iter_time, - last_token_time=result.response.last_iter_time, + tokens_per_second=self._calculate_tokens_per_second( + statuses=total_types, requests=total + ), + ), + } + + def _is_in_warmup( + self, + request_info: ScheduledRequestInfo, + scheduler_state: SchedulerState, + ) -> bool: + """Check if the current request is within the warmup period.""" + if self.warmup is None: + return False + + if 0 < self.warmup < 1: # Percentage-based warmup + return ( + scheduler_state.remaining_fraction is not None + and scheduler_state.remaining_fraction > (1 - self.warmup) + ) + + if self.warmup >= 1: # Count/time-based warmup + if scheduler_state.processed_requests < self.warmup: + return True + + current_time = request_info.scheduler_timings.targeted_start + return ( + current_time is not None + and (current_time - scheduler_state.start_time) < self.warmup ) - for result in self.results.errored - if result.request and result.response - ] - return successful, incomplete, error + return False - def _compile_tokens_count( + def _is_in_cooldown( self, - value: str, - requests_tokens: Optional[int], - response_tokens: Optional[int], - preferred_tokens_source: Optional[Literal["request", "response", "local"]], - errored: bool, - ) -> int: - if not errored and preferred_tokens_source == "response" and response_tokens: - return response_tokens or 0 - - if not errored and preferred_tokens_source == "request" and requests_tokens: - return requests_tokens or 0 - - if preferred_tokens_source in {"response", "request"} and ( - self.processor is None or errored or response_tokens or requests_tokens - ): - # we had a preferred tokens source that isn't local and we either - # have the data to return something or we don't have the ability - # to calculate locally - return response_tokens or requests_tokens or 0 - - self.processor = check_load_processor( - self.processor, - processor_args=self.processor_args, - error_msg="Processor/Tokenizer is required for calculating token counts.", + request_info: ScheduledRequestInfo, + scheduler_state: SchedulerState, + ) -> bool: + """Check if the current request is within the cooldown period.""" + if self.cooldown is None: + return False + + if 0 < self.cooldown < 1: # Percentage-based cooldown + return ( + scheduler_state.remaining_fraction is not None + and scheduler_state.remaining_fraction < self.cooldown + ) + + if self.cooldown >= 1: # Count/time-based cooldown + if scheduler_state.remaining_requests < self.cooldown: + return True + + current_time = ( + request_info.scheduler_timings.resolve_end + or request_info.scheduler_timings.targeted_start + ) + return ( + current_time is not None + and scheduler_state.remaining_duration is not None + and scheduler_state.remaining_duration < self.cooldown + ) + + return False + + @classmethod + def _create_generative_request_stats( + cls, + response: GenerationResponse, + request: GenerationRequest, + request_info: ScheduledRequestInfo, + ) -> GenerativeRequestStats: + prompt_tokens = response.preferred_prompt_tokens( + settings.preferred_prompt_tokens_source + ) + output_tokens = response.preferred_output_tokens( + settings.preferred_output_tokens_source + ) + + return GenerativeRequestStats( + request_id=request.request_id, + request_type=request.request_type, + prompt=str(request.content), + request_args=response.request_args, + output=response.value, + iterations=response.iterations, + prompt_tokens=prompt_tokens, + output_tokens=output_tokens, + total_tokens=( + prompt_tokens + output_tokens + if prompt_tokens is not None and output_tokens is not None + else None + ), + scheduler_info=request_info, + ) + + @classmethod + def _sample_request_stats( + cls, stats: list[GenerativeRequestStats], sample_size: int | None + ) -> list[GenerativeRequestStats]: + if sample_size is None or sample_size <= 0 or not stats: + return stats + + return random.sample(stats, min(sample_size, len(stats))) + + @classmethod + def _calculate_requests_per_second( + cls, + statuses: list[Literal["successful", "incomplete", "error"]], + requests: list[GenerativeRequestStats], + ) -> StatusDistributionSummary: + filtered_statuses = [] + filtered_times = [] + + for status, request in zip(statuses, requests): + if not all_defined( + safe_getattr(request.scheduler_info.request_timings, "request_start"), + safe_getattr(request.scheduler_info.request_timings, "request_end"), + ): + continue + + filtered_statuses.append(status) + filtered_times.append( + ( + request.scheduler_info.request_timings.request_start, + request.scheduler_info.request_timings.request_end, + ) + ) + + return StatusDistributionSummary.from_request_times( + request_types=filtered_statuses, + requests=filtered_times, + distribution_type="rate", + ) + + @classmethod + def _calculate_request_concurrency( + cls, + statuses: list[Literal["successful", "incomplete", "error"]], + requests: list[GenerativeRequestStats], + ) -> StatusDistributionSummary: + filtered_statuses = [] + filtered_times = [] + + for status, request in zip(statuses, requests): + if not all_defined( + safe_getattr(request.scheduler_info.request_timings, "request_start"), + safe_getattr(request.scheduler_info.request_timings, "request_end"), + ): + continue + + filtered_statuses.append(status) + filtered_times.append( + ( + request.scheduler_info.request_timings.request_start, + request.scheduler_info.request_timings.request_end, + ) + ) + + return StatusDistributionSummary.from_request_times( + request_types=filtered_statuses, + requests=filtered_times, + distribution_type="concurrency", + ) + + @classmethod + def _calculate_request_latency( + cls, + statuses: list[Literal["successful", "incomplete", "error"]], + requests: list[GenerativeRequestStats], + ) -> StatusDistributionSummary: + filtered_statuses = [] + filtered_values = [] + + for status, request in zip(statuses, requests): + if not all_defined(request.request_latency): + continue + + filtered_statuses.append(status) + filtered_values.append(request.request_latency) + + return StatusDistributionSummary.from_values( + value_types=filtered_statuses, + values=filtered_values, + ) + + @classmethod + def _calculate_prompt_token_count( + cls, + statuses: list[Literal["successful", "incomplete", "error"]], + requests: list[GenerativeRequestStats], + ) -> StatusDistributionSummary: + filtered_statuses = [] + filtered_values = [] + + for status, request in zip(statuses, requests): + if not all_defined(request.prompt_tokens): + continue + + filtered_statuses.append(status) + filtered_values.append(request.prompt_tokens) + + return StatusDistributionSummary.from_values( + value_types=filtered_statuses, + values=filtered_values, + ) + + @classmethod + def _calculate_output_token_count( + cls, + statuses: list[Literal["successful", "incomplete", "error"]], + requests: list[GenerativeRequestStats], + ) -> StatusDistributionSummary: + filtered_statuses = [] + filtered_values = [] + + for status, request in zip(statuses, requests): + if not all_defined(request.output_tokens): + continue + + filtered_statuses.append(status) + filtered_values.append(request.output_tokens) + + return StatusDistributionSummary.from_values( + value_types=filtered_statuses, + values=filtered_values, + ) + + @classmethod + def _calculate_total_token_count( + cls, + statuses: list[Literal["successful", "incomplete", "error"]], + requests: list[GenerativeRequestStats], + ) -> StatusDistributionSummary: + filtered_statuses = [] + filtered_values = [] + + for status, request in zip(statuses, requests): + if not all_defined(request.total_tokens): + continue + + filtered_statuses.append(status) + filtered_values.append(request.total_tokens) + + return StatusDistributionSummary.from_values( + value_types=filtered_statuses, + values=filtered_values, + ) + + @classmethod + def _calculate_time_to_first_token_ms( + cls, + statuses: list[Literal["successful", "incomplete", "error"]], + requests: list[GenerativeRequestStats], + ) -> StatusDistributionSummary: + filtered_statuses = [] + filtered_values = [] + + for status, request in zip(statuses, requests): + if not all_defined(request.time_to_first_token_ms): + continue + + filtered_statuses.append(status) + filtered_values.append(request.time_to_first_token_ms) + + return StatusDistributionSummary.from_values( + value_types=filtered_statuses, + values=filtered_values, + ) + + @classmethod + def _calculate_time_per_output_token_ms( + cls, + statuses: list[Literal["successful", "incomplete", "error"]], + requests: list[GenerativeRequestStats], + ) -> StatusDistributionSummary: + filtered_statuses = [] + filtered_values = [] + filtered_weights = [] + + for status, request in zip(statuses, requests): + if not all_defined(request.time_to_first_token_ms): + continue + + # Add time to first token separately to better reflect in distribution + filtered_statuses.append(status) + filtered_values.append(request.time_to_first_token_ms) + filtered_weights.append(1) + + if not all_defined(request.inter_token_latency_ms): + continue + + # Add tokens after the first token to get the full distribution + filtered_statuses.append(status) + filtered_values.append(request.inter_token_latency_ms) + filtered_weights.append(request.output_tokens - 1) + + return StatusDistributionSummary.from_values( + value_types=filtered_statuses, + values=filtered_values, + weights=filtered_weights, + ) + + @classmethod + def _calculate_inter_token_latency_ms( + cls, + statuses: list[Literal["successful", "incomplete", "error"]], + requests: list[GenerativeRequestStats], + ) -> StatusDistributionSummary: + filtered_statuses = [] + filtered_values = [] + filtered_weights = [] + + for status, request in zip(statuses, requests): + if not all_defined(request.inter_token_latency_ms): + continue + + filtered_statuses.append(status) + filtered_values.append(request.inter_token_latency_ms) + filtered_weights.append(request.output_tokens - 1) + + return StatusDistributionSummary.from_values( + value_types=filtered_statuses, + values=filtered_values, + weights=filtered_weights, + ) + + @classmethod + def _calculate_output_tokens_per_second( + cls, + statuses: list[Literal["successful", "incomplete", "error"]], + requests: list[GenerativeRequestStats], + ) -> StatusDistributionSummary: + filtered_statuses = [] + filtered_request_times = [] + filtered_first_iter_times = [] + filtered_iter_counts = [] + + for status, request in zip(statuses, requests): + if not all_defined(request.output_tokens_per_second): + continue + + filtered_statuses.append(status) + filtered_request_times.append( + ( + request.scheduler_info.request_timings.request_start, + request.scheduler_info.request_timings.request_end, + ) + ) + filtered_first_iter_times.append( + request.scheduler_info.request_timings.first_iteration + ) + filtered_iter_counts.append(request.output_tokens) + + return StatusDistributionSummary.from_iterable_request_times( + request_types=filtered_statuses, + requests=filtered_request_times, + first_iter_times=filtered_first_iter_times, + iter_counts=filtered_iter_counts, + ) + + @classmethod + def _calculate_tokens_per_second( + cls, + statuses: list[Literal["successful", "incomplete", "error"]], + requests: list[GenerativeRequestStats], + ) -> StatusDistributionSummary: + filtered_statuses = [] + filtered_request_times = [] + filtered_first_iter_times = [] + filtered_iter_counts = [] + filtered_first_iter_counts = [] + + for status, request in zip(statuses, requests): + if not all_defined(request.tokens_per_second): + continue + + filtered_statuses.append(status) + filtered_request_times.append( + ( + request.scheduler_info.request_timings.request_start, + request.scheduler_info.request_timings.request_end, + ) + ) + filtered_first_iter_times.append( + request.scheduler_info.request_timings.first_iteration + ) + filtered_iter_counts.append(request.output_tokens - 1) + filtered_first_iter_counts.append(request.prompt_tokens + 1) + + return StatusDistributionSummary.from_iterable_request_times( + request_types=filtered_statuses, + requests=filtered_request_times, + first_iter_times=filtered_first_iter_times, + iter_counts=filtered_iter_counts, + first_iter_counts=filtered_first_iter_counts, ) - return len(self.processor.tokenize(value)) diff --git a/src/guidellm/benchmark/benchmark.py b/src/guidellm/benchmark/benchmark.py deleted file mode 100644 index 02eea02b..00000000 --- a/src/guidellm/benchmark/benchmark.py +++ /dev/null @@ -1,837 +0,0 @@ -import random -import uuid -from typing import Any, Literal, Optional, TypeVar, Union - -from pydantic import Field, computed_field - -from guidellm.benchmark.profile import ( - AsyncProfile, - ConcurrentProfile, - Profile, - SweepProfile, - SynchronousProfile, - ThroughputProfile, -) -from guidellm.objects import ( - StandardBaseModel, - StatusBreakdown, - StatusDistributionSummary, -) -from guidellm.request import ( - GenerativeRequestLoaderDescription, - RequestLoaderDescription, -) -from guidellm.scheduler import ( - AsyncConstantStrategy, - AsyncPoissonStrategy, - ConcurrentStrategy, - GenerativeRequestsWorkerDescription, - SchedulerRequestInfo, - SchedulingStrategy, - SynchronousStrategy, - ThroughputStrategy, - WorkerDescription, -) - -__all__ = [ - "Benchmark", - "BenchmarkArgs", - "BenchmarkMetrics", - "BenchmarkRunStats", - "BenchmarkT", - "GenerativeBenchmark", - "GenerativeMetrics", - "GenerativeTextErrorStats", - "GenerativeTextResponseStats", - "StatusBreakdown", -] - - -class BenchmarkArgs(StandardBaseModel): - """ - A serializable model representing the arguments used to specify a benchmark run - and how data was collected for it. - """ - - profile: Union[ - AsyncProfile, - SweepProfile, - ConcurrentProfile, - ThroughputProfile, - SynchronousProfile, - Profile, - ] = Field( - description=( - "The profile used for the entire benchmark run that the strategy for " - "this benchmark was pulled from." - ), - discriminator="type_", - ) - strategy_index: int = Field( - description=( - "The index of the strategy in the profile that was used for this benchmark." - ) - ) - strategy: Union[ - ConcurrentStrategy, - SchedulingStrategy, - ThroughputStrategy, - SynchronousStrategy, - AsyncPoissonStrategy, - AsyncConstantStrategy, - SchedulingStrategy, - ] = Field( - description="The scheduling strategy used to run this benchmark. ", - discriminator="type_", - ) - max_number: Optional[int] = Field( - description="The maximum number of requests to run for this benchmark, if any." - ) - max_duration: Optional[float] = Field( - description="The maximum duration in seconds to run this benchmark, if any." - ) - warmup_number: Optional[int] = Field( - description=( - "The number of requests to run for the warmup phase of this benchmark, " - "if any. These are requests that were not included in the final results." - ) - ) - warmup_duration: Optional[float] = Field( - description=( - "The duration in seconds to run for the warmup phase of this benchmark, " - "if any. These are requests that were not included in the final results." - ) - ) - cooldown_number: Optional[int] = Field( - description=( - "The number of requests to run for the cooldown phase of this benchmark, " - "if any. These are requests that were not included in the final results." - ) - ) - cooldown_duration: Optional[float] = Field( - description=( - "The duration in seconds to run for the cooldown phase of this benchmark, " - "if any. These are requests that were not included in the final results." - ) - ) - - -class BenchmarkRunStats(StandardBaseModel): - """ - A serializable model representing the run process statistics for the - entire benchmark run across all requests including warmup and cooldown. - """ - - start_time: float = Field( - description="The start time of the benchmark run.", - ) - end_time: float = Field( - description="The end time of the benchmark run.", - ) - requests_made: StatusBreakdown[int, int, int, int] = Field( - description=( - "The number of requests made for the benchmark run broken down by " - "status including successful, incomplete, errored, and the sum of all three" - ) - ) - queued_time_avg: float = Field( - description=( - "The average time spent in the queue for each request in the benchmark " - "run until it was dequeued by a worker." - ) - ) - scheduled_time_delay_avg: float = Field( - description=( - "The average time delay between when a request was dequeued and when it " - "was scheduled to be processed by a worker in the benchmark run. " - "This should be as close to 0 as possible, any additional time is " - "overheads from the system or the worker." - ) - ) - scheduled_time_sleep_avg: float = Field( - description=( - "The average time spent sleeping til the desired start time was reached " - "after being scheduled by the worker in the benchmark run." - ) - ) - worker_start_delay_avg: float = Field( - description=( - "The average time delay between when a request was scheduled and when " - "the worker started processing it in the benchmark run. " - "This should be as close to 0 as possible, any additional time is " - "overheads from the system or the worker." - ) - ) - worker_time_avg: float = Field( - description=( - "The average time taken by the worker to process each request in the " - "benchmark run. This includes the time to generate the response and " - "any additional processing time." - ) - ) - worker_start_time_targeted_delay_avg: float = Field( - description=( - "The average time delay between when a request was targeted to start " - "and when the worker actually started processing it in the benchmark " - "run. For async strategies, this represents delays from the ideal " - "system. For sync strategies, since those are doubled in queue, " - "this should be as close to the time for a request to be processed " - "as possible. Any additional time is overhead from the system or " - "the worker." - ) - ) - request_start_time_delay_avg: float = Field( - description=( - "The average time delay between the actual request being made " - "and the time the worker started on the request for all requests " - "that completed within the benchmark run. This time should be as close " - "to 0 as possible, any additional time is overhead from the system or " - "the worker." - ) - ) - request_start_time_targeted_delay_avg: float = Field( - description=( - "The average time delay between when the targeted start time and " - "the actual start time for each request in the benchmark run. " - "For async strategies, this represents delays from the ideal " - "system. For sync strategies, this should be as close to the " - "time for a request to be processed as possible. Any additional " - "time is overhead from the system or the worker." - ) - ) - request_time_delay_avg: float = Field( - description=( - "The average time delay between the total request time and the " - "worker time. This should be as close to 0 as possible, any additional " - "time is overhead from the system or the worker. " - ) - ) - request_time_avg: float = Field( - description=( - "The average time spent processing all requests in the benchmark run. " - "This is the time from when the actual request was started to when " - "it was completed." - ) - ) - - -class BenchmarkMetrics(StandardBaseModel): - """ - A serializable model representing the metrics for a benchmark run. - """ - - requests_per_second: StatusDistributionSummary = Field( - description="The distribution of requests per second for the benchmark.", - ) - request_concurrency: StatusDistributionSummary = Field( - description="The distribution of requests concurrency for the benchmark.", - ) - - -class Benchmark(StandardBaseModel): - """ - The base serializable model representing a benchmark run and its results. - Specific benchmarker implementations should extend this model to include - additional information or metadata as needed. - - Note, requests_per_second and request_concurrency are kept at this level - and are expected to be populated by the subclass implementation to ensure - the logic for Profiles can include more complicated logic for determining - what rates and concurrency values to use for subsequent strategies. - """ - - type_: Literal["benchmark"] = "benchmark" - id_: str = Field( - default_factory=lambda: str(uuid.uuid4()), - description="The unique identifier for the benchmark.", - ) - run_id: str = Field( - description=( - "The unique identifier for the encompasing benchmark run that this " - "benchmark was a part of." - ) - ) - args: BenchmarkArgs = Field( - description=( - "The arguments used to specify how to run the benchmark and collect data." - ) - ) - run_stats: BenchmarkRunStats = Field( - description=( - "The process statistics for the entire benchmark run across all requests." - ) - ) - worker: Union[WorkerDescription] = Field( - description=( - "The description and specifics for the worker used to resolve requests " - "for this benchmark." - ), - ) - request_loader: Union[RequestLoaderDescription] = Field( - description=( - "The description and specifics for the request loader used to create " - "requests for this benchmark." - ), - ) - extras: dict[str, Any] = Field( - description=( - "Any additional information or metadata that was passed for this benchmark." - ) - ) - metrics: BenchmarkMetrics = Field( - description=( - "The metrics for the benchmark run represented as a distribution of " - "various per-request statistics." - ), - ) - - -BenchmarkT = TypeVar("BenchmarkT", bound=Benchmark) - - -class GenerativeTextResponseStats(StandardBaseModel): - """ - A serializable model representing the request values, response values, and - statistics for a generative text response. - """ - - type_: Literal["generative_text_response"] = "generative_text_response" - request_id: Optional[str] = Field( - description="The unique identifier for the request.", - ) - request_type: Literal["text_completions", "chat_completions"] = Field( - description="The type of request made to the generative backend." - ) - scheduler_info: SchedulerRequestInfo = Field( - description=( - "The info about the request from the scheduler about how it was run." - ), - ) - prompt: str = Field( - description="The text prompt used for the generative request.", - ) - output: str = Field( - description="The generated text output from the generative request.", - ) - prompt_tokens: int = Field( - description="The number of tokens in the prompt text.", - ) - output_tokens: int = Field( - description="The number of tokens in the generated output text.", - ) - start_time: float = Field( - description="The time the request started.", - ) - end_time: float = Field( - description="The time the request ended.", - ) - first_token_time: float = Field( - description="The time the first token was received.", - ) - last_token_time: float = Field( - description="The time the last token was received.", - ) - - @computed_field # type: ignore[misc] - @property - def request_latency(self) -> float: - """ - :return: The duration of the request in seconds from the start to the end. - """ - return self.end_time - self.start_time - - @computed_field # type: ignore[misc] - @property - def time_to_first_token_ms(self) -> float: - """ - :return: The time in milliseconds from the start of the request to the first - token received. - """ - return 1000 * (self.first_token_time - self.start_time) - - @computed_field # type: ignore[misc] - @property - def time_per_output_token_ms(self) -> float: - """ - :return: The average time in milliseconds per output token generated. - This includes the time to generate the first token and all other tokens. - """ - if self.output_tokens == 0: - return 0.0 - - return ( - 1000 * (self.last_token_time - self.first_token_time) / self.output_tokens - ) - - @computed_field # type: ignore[misc] - @property - def inter_token_latency_ms(self) -> float: - """ - :return: The average time in milliseconds between generating tokens in the - output text. Note, does not include the time to generate the first token. - """ - if self.output_tokens <= 1: - return 0.0 - - return ( - 1000 - * (self.last_token_time - self.first_token_time) - / (self.output_tokens - 1) - ) - - @computed_field # type: ignore[misc] - @property - def tokens_per_second(self) -> float: - """ - :return: The average number of tokens generated per second in the prompt and - output text. - """ - if (latency := self.request_latency) == 0.0: - return 0.0 - - return (self.prompt_tokens + self.output_tokens) / latency - - @computed_field # type: ignore[misc] - @property - def output_tokens_per_second(self) -> float: - """ - :return: The average number of output tokens generated per second. - """ - if (latency := self.request_latency) == 0.0: - return 0.0 - - return self.output_tokens / latency - - -class GenerativeTextErrorStats(GenerativeTextResponseStats): - """ - A serializable model representing the request values, response values, and - statistics for a generative text response that errored. - Extends and overrides the GenerativeTextResponseStats model to include the - error message and optional properties given the error occurred. - """ - - type_: Literal["generative_text_error"] = "generative_text_error" # type: ignore[assignment] - error: str = Field( - description=( - "The error message for the error that occurred while making the request." - ) - ) - output: Optional[str] = Field( # type: ignore[assignment] - default=None, - description=( - "The generated text output from the generative request, if any, " - "before the error occurred." - ), - ) - first_token_time: Optional[float] = Field( # type: ignore[assignment] - default=None, - description=( - "The time the first token was received, if any, before the error occurred." - ), - ) - last_token_time: Optional[float] = Field( # type: ignore[assignment] - default=None, - description=( - "The time the last token was received, if any, before the error occurred." - ), - ) - - @computed_field # type: ignore[misc] - @property - def time_to_first_token_ms(self) -> Optional[float]: # type: ignore[override] - """ - :return: The time in milliseconds from the start of the request to the first - token received. None if the first token was not received. - """ - if self.first_token_time is None: - return None - - return super().time_to_first_token_ms - - @computed_field # type: ignore[misc] - @property - def time_per_output_token_ms(self) -> Optional[float]: # type: ignore[override] - """ - :return: The average time in milliseconds per output token generated. - This includes the time to generate the first token and all other tokens. - None if the output_tokens is None or 0. - """ - if ( - self.output_tokens is None - or self.output_tokens == 0 - or self.first_token_time is None - or self.last_token_time is None - ): - return None - - return super().time_per_output_token_ms - - @computed_field # type: ignore[misc] - @property - def inter_token_latency_ms(self) -> Optional[float]: # type: ignore[override] - """ - :return: The average time in milliseconds between generating tokens in the - output text. Note, does not include the time to generate the first token. - None if there were no output_tokens or the first token was not received. - """ - if ( - self.output_tokens is None - or self.first_token_time is None - or self.last_token_time is None - ): - return None - - return super().inter_token_latency_ms - - @computed_field # type: ignore[misc] - @property - def output_tokens_per_second(self) -> Optional[float]: # type: ignore[override] - """ - :return: The average number of tokens generated per second in the output text. - Note, does not include the time to generate the first token. None if there - were no output_tokens or the first token was not received. - """ - if self.inter_token_latency_ms is None: - return None - - return super().output_tokens_per_second - - -class GenerativeMetrics(BenchmarkMetrics): - """ - A serializable model representing the metrics for a generative benchmark run. - """ - - request_latency: StatusDistributionSummary = Field( - description="The distribution of latencies for the completed requests.", - ) - prompt_token_count: StatusDistributionSummary = Field( - description=( - "The distribution of token counts in the prompts for completed, " - "errored, and all requests." - ) - ) - output_token_count: StatusDistributionSummary = Field( - description=( - "The distribution of token counts in the outputs for completed, " - "errored, and all requests." - ) - ) - time_to_first_token_ms: StatusDistributionSummary = Field( - description=( - "The distribution of latencies to receiving the first token in " - "milliseconds for completed, errored, and all requests." - ), - ) - time_per_output_token_ms: StatusDistributionSummary = Field( - description=( - "The distribution of latencies per output token in milliseconds for " - "completed, errored, and all requests. " - "This includes the time to generate the first token and all other tokens." - ), - ) - inter_token_latency_ms: StatusDistributionSummary = Field( - description=( - "The distribution of latencies between tokens in milliseconds for " - "completed, errored, and all requests." - ), - ) - output_tokens_per_second: StatusDistributionSummary = Field( - description=( - "The distribution of output tokens per second for completed, " - "errored, and all requests." - ), - ) - tokens_per_second: StatusDistributionSummary = Field( - description=( - "The distribution of tokens per second, including prompt and output tokens " - "for completed, errored, and all requests." - ), - ) - - -class GenerativeBenchmark(Benchmark): - """ - A serializable model representing a benchmark run and its results for generative - requests and responses. Includes the completed and errored requests, the start - and end times for the benchmark, and the statistics for the requests and responses. - """ - - type_: Literal["generative_benchmark"] = "generative_benchmark" # type: ignore[assignment] - start_time: float = Field( - description="The start time of the first request for the benchmark.", - ) - end_time: float = Field( - description="The end time of the last request for the benchmark.", - ) - - @computed_field # type: ignore[misc] - @property - def duration(self) -> float: - """ - :return: The duration of the benchmark in seconds from the start of the - first request to the end of the last request. - """ - return self.end_time - self.start_time - - worker: GenerativeRequestsWorkerDescription = Field( - description=( - "The description and specifics for the worker used to resolve requests " - "for this benchmark." - ), - ) - request_loader: GenerativeRequestLoaderDescription = Field( - description=( - "The description and specifics for the request loader used to create " - "requests for this benchmark." - ), - ) - metrics: GenerativeMetrics = Field( - description=( - "The metrics for the benchmark run represented as a distribution of " - "various per-request statistics." - ), - ) - # Output is ordered so keep the requests at the end for better readability in files - request_totals: StatusBreakdown[int, int, int, int] = Field( - description=( - "The number of requests made for the benchmark broken down by status " - "including successful, incomplete, errored, and the sum of all three" - ) - ) - request_samples: Optional[StatusBreakdown[int, int, int, None]] = Field( - description=( - "The number of requests that were randomly sampled for " - "the benchmark. None if no sampling was applied." - ), - default=None, - ) - requests: StatusBreakdown[ - list[GenerativeTextResponseStats], - list[GenerativeTextErrorStats], - list[GenerativeTextErrorStats], - None, - ] = Field( - description=( - "The breakdown of requests for the benchmark run including successful, " - "incomplete, and errored requests." - ), - ) - - def set_sample_size(self, sample_size: Optional[int]) -> "GenerativeBenchmark": - """ - Set the sample size for the benchmark. This will randomly sample the - requests for each status type to the given sample size or the maximum - number of requests for that status type, whichever is smaller. - This is applied to requests.successful, requests.errored, and - requests.incomplete. - If None, no sampling is applied and the state is kept. - - :param sample_size: The number of requests to sample for each status type. - :return: The benchmark with the sampled requests. - :raises ValueError: If the sample size is invalid. - """ - - if sample_size is not None: - if sample_size < 0 or not isinstance(sample_size, int): - raise ValueError( - f"Sample size must be non-negative integer, given {sample_size}" - ) - - sample_size = min(sample_size, len(self.requests.successful)) - error_sample_size = min(sample_size, len(self.requests.errored)) - incomplete_sample_size = min(sample_size, len(self.requests.incomplete)) - - self.requests.successful = random.sample( - self.requests.successful, sample_size - ) - self.requests.errored = random.sample( - self.requests.errored, error_sample_size - ) - self.requests.incomplete = random.sample( - self.requests.incomplete, incomplete_sample_size - ) - self.request_samples = StatusBreakdown( - successful=len(self.requests.successful), - incomplete=len(self.requests.incomplete), - errored=len(self.requests.errored), - ) - - return self - - @staticmethod - def from_stats( - run_id: str, - successful: list[GenerativeTextResponseStats], - incomplete: list[GenerativeTextErrorStats], - errored: list[GenerativeTextErrorStats], - args: BenchmarkArgs, - run_stats: BenchmarkRunStats, - worker: GenerativeRequestsWorkerDescription, - requests_loader: GenerativeRequestLoaderDescription, - extras: Optional[dict[str, Any]], - ) -> "GenerativeBenchmark": - """ - Create a GenerativeBenchmark instance from the given statistics and metadata. - Given the completed and errored requests, the benchmark will fill in the - remaining statistics for the various metrics required for a benchmark. - This is the preferred method for creating a GenerativeBenchmark instance - to ensure all statistics are properly calculated and populated. - - :param run_id: The unique identifier for the benchmark run. - :param completed: The list of completed requests. - :param errored: The list of errored requests. - :param args: The arguments used to specify how to run the benchmark - and collect data. - :param run_stats: The process statistics for the entire benchmark run across - all requests. - :param worker: The description and specifics for the worker used to resolve - requests. - :param requests_loader: The description and specifics for the request loader - used to create requests. - :param extras: Any additional information or metadata that was passed for - this benchmark. - :return: A GenerativeBenchmark instance with the given statistics and metadata - populated and calculated - """ - total = successful + incomplete + errored - total_types: list[Literal["successful", "incomplete", "error"]] = [ - *["successful"] * len(successful), # type: ignore[list-item] - *["incomplete"] * len(incomplete), # type: ignore[list-item] - *["error"] * len(errored), # type: ignore[list-item] - ] - start_time = min(req.start_time for req in total) - end_time = max(req.end_time for req in total) - - total_with_prompt, total_types_with_prompt = ( - zip(*filtered) - if ( - filtered := list( - filter(lambda val: bool(val[0].prompt), zip(total, total_types)) - ) - ) - else ([], []) - ) - total_with_output_first, total_types_with_output_first = ( - zip(*filtered) - if ( - filtered := list( - filter( - lambda val: bool(val[0].output_tokens > 0), - zip(total, total_types), - ) - ) - ) - else ([], []) - ) - total_with_output_multi, total_types_with_output_multi = ( - zip(*filtered) - if ( - filtered := list( - filter( - lambda val: bool(val[0].output_tokens > 1), - zip(total, total_types), - ) - ) - ) - else ([], []) - ) - - return GenerativeBenchmark( - run_id=run_id, - args=args, - run_stats=run_stats, - extras=extras or {}, - start_time=start_time, - end_time=end_time, - worker=worker, - request_loader=requests_loader, - metrics=GenerativeMetrics( - requests_per_second=StatusDistributionSummary.from_request_times( - request_types=total_types, - requests=[(req.start_time, req.end_time) for req in total], - distribution_type="rate", - ), - request_concurrency=StatusDistributionSummary.from_request_times( - request_types=total_types, - requests=[(req.start_time, req.end_time) for req in total], - distribution_type="concurrency", - ), - request_latency=StatusDistributionSummary.from_values( - value_types=total_types, - values=[req.request_latency for req in total], - ), - prompt_token_count=StatusDistributionSummary.from_values( - value_types=list(total_types_with_prompt), - values=[req.prompt_tokens for req in total_with_prompt], - ), - output_token_count=StatusDistributionSummary.from_values( - value_types=list(total_types_with_output_first), - values=[req.output_tokens for req in total_with_output_first], - ), - time_to_first_token_ms=StatusDistributionSummary.from_values( - value_types=list(total_types_with_output_first), - values=[ - req.time_to_first_token_ms or 0 - for req in total_with_output_first - ], - ), - time_per_output_token_ms=StatusDistributionSummary.from_values( - value_types=list(total_types_with_output_first), - values=[ - req.time_per_output_token_ms or 0 - for req in total_with_output_first - ], - weights=[req.output_tokens for req in total_with_output_first], - ), - inter_token_latency_ms=StatusDistributionSummary.from_values( - value_types=list(total_types_with_output_multi), - values=[ - req.inter_token_latency_ms or 0 - for req in total_with_output_multi - ], - weights=[req.output_tokens - 1 for req in total_with_output_multi], - ), - output_tokens_per_second=StatusDistributionSummary.from_iterable_request_times( - request_types=list(total_types_with_output_first), - requests=[ - (req.start_time, req.end_time) - for req in total_with_output_first - ], - first_iter_times=[ - req.first_token_time or req.start_time - for req in total_with_output_first - ], - iter_counts=[req.output_tokens for req in total_with_output_first], - ), - tokens_per_second=StatusDistributionSummary.from_iterable_request_times( - request_types=list(total_types_with_output_first), - requests=[ - (req.start_time, req.end_time) - for req in total_with_output_first - ], - first_iter_times=[ - req.first_token_time or req.start_time - for req in total_with_output_first - ], - iter_counts=[req.output_tokens for req in total_with_output_first], - first_iter_counts=[ - # prompt tokens + first token - req.prompt_tokens + 1 - for req in total_with_output_first - ], - ), - ), - request_totals=StatusBreakdown( - successful=len(successful), - incomplete=len(incomplete), - errored=len(errored), - total=len(total), - ), - requests=StatusBreakdown( - successful=successful, - incomplete=incomplete, - errored=errored, - ), - ) diff --git a/src/guidellm/benchmark/benchmarker.py b/src/guidellm/benchmark/benchmarker.py index 0e34e322..ae591c23 100644 --- a/src/guidellm/benchmark/benchmarker.py +++ b/src/guidellm/benchmark/benchmarker.py @@ -1,334 +1,266 @@ -import time +""" +Benchmark execution orchestration and lifecycle management. + +Provides the core benchmarking engine that coordinates request scheduling, +data aggregation, and result compilation across different execution strategies +and environments. + +Classes: + Benchmarker: Abstract benchmark orchestrator for request processing workflows. + +Type Variables: + BenchmarkT: Generic benchmark result type. + RequestT: Generic request object type. + RequestTimingsT: Generic request timing object type. + ResponseT: Generic response object type. +""" + +from __future__ import annotations + import uuid -from abc import ABC, abstractmethod -from collections.abc import AsyncGenerator, Iterable -from pathlib import Path +from abc import ABC +from collections.abc import AsyncIterator, Iterable from typing import ( Any, Generic, - Literal, - Optional, - Union, ) -from pydantic import Field -from transformers import PreTrainedTokenizerBase # type: ignore # noqa: PGH003 - -from guidellm.backend import Backend, ResponseSummary from guidellm.benchmark.aggregator import ( - AggregatorT, - BenchmarkT, - GenerativeBenchmarkAggregator, + Aggregator, + AggregatorState, + CompilableAggregator, ) -from guidellm.benchmark.benchmark import BenchmarkArgs, GenerativeBenchmark +from guidellm.benchmark.objects import BenchmarkerDict, BenchmarkT, SchedulerDict from guidellm.benchmark.profile import Profile -from guidellm.objects import StandardBaseModel -from guidellm.request import ( - GenerationRequest, - GenerativeRequestLoaderDescription, - RequestLoaderDescription, +from guidellm.scheduler import ( + BackendInterface, + Constraint, + Environment, + NonDistributedEnvironment, RequestT, ResponseT, -) -from guidellm.scheduler import ( - GenerativeRequestsWorker, - RequestsWorker, Scheduler, - SchedulerRequestResult, + SchedulerState, SchedulingStrategy, ) +from guidellm.utils import InfoMixin, ThreadSafeSingletonMixin +from guidellm.utils.pydantic_utils import StandardBaseDict -__all__ = ["Benchmarker", "BenchmarkerResult", "GenerativeBenchmarker"] +__all__ = ["Benchmarker"] -class BenchmarkerResult( - StandardBaseModel, Generic[AggregatorT, BenchmarkT, RequestT, ResponseT] +class Benchmarker( + Generic[BenchmarkT, RequestT, ResponseT], + ABC, + ThreadSafeSingletonMixin, ): - type_: Literal[ - "run_start", - "run_complete", - "scheduler_start", - "scheduler_update", - "scheduler_complete", - "benchmark_compiled", - ] - start_time: float - end_number: int - profile: Profile - current_index: int - current_strategy: Optional[SchedulingStrategy] = None - current_aggregator: Optional[AggregatorT] = None - current_benchmark: Optional[BenchmarkT] = None - current_result: Optional[SchedulerRequestResult[RequestT, ResponseT]] = None - - -class BenchmarkerStrategyLimits(StandardBaseModel): - requests_loader_size: Optional[int] = Field( - description="Size of the request loader.", - ) - max_number_per_strategy: Optional[int] = Field( - description="Maximum number of requests to process per strategy.", - ge=0, - ) - max_duration_per_strategy: Optional[float] = Field( - description="Maximum duration (in seconds) to process requests per strategy.", - ge=0, - ) - warmup_percent_per_strategy: Optional[float] = Field( - description="Percentage of requests to use for warmup.", - ge=0, - le=1, - ) - cooldown_percent_per_strategy: Optional[float] = Field( - description="Percentage of requests to use for cooldown.", - ge=0, - le=1, - ) - - @property - def max_number(self) -> Optional[int]: - if self.max_number_per_strategy is not None: - return self.max_number_per_strategy - - if self.requests_loader_size is not None: - return self.requests_loader_size - - return None - - @property - def max_duration(self) -> Optional[float]: - return self.max_duration_per_strategy + """ + Abstract benchmark orchestrator for request processing workflows. - @property - def warmup_number(self) -> Optional[int]: - if self.warmup_percent_per_strategy is None or self.max_number is None: - return None + Coordinates the execution of benchmarking runs across different scheduling + strategies, aggregating metrics and compiling results. Manages the complete + benchmark lifecycle from request submission through result compilation. - return int(self.warmup_percent_per_strategy * self.max_number) - - @property - def warmup_duration(self) -> Optional[float]: - if self.warmup_percent_per_strategy is None or self.max_duration is None: - return None - - return self.warmup_percent_per_strategy * self.max_duration - - @property - def cooldown_number(self) -> Optional[int]: - if self.cooldown_percent_per_strategy is None or self.max_number is None: - return None - - return int(self.cooldown_percent_per_strategy * self.max_number) - - @property - def cooldown_duration(self) -> Optional[float]: - if self.cooldown_percent_per_strategy is None or self.max_duration is None: - return None - - return self.cooldown_percent_per_strategy * self.max_duration - - -class Benchmarker(Generic[AggregatorT, BenchmarkT, RequestT, ResponseT], ABC): - def __init__( - self, - worker: RequestsWorker[RequestT, ResponseT], - request_loader: Iterable[RequestT], - requests_loader_description: RequestLoaderDescription, - benchmark_save_extras: Optional[dict[str, Any]] = None, - ): - self.worker = worker - self.scheduler: Scheduler[RequestT, ResponseT] = Scheduler( - worker=worker, request_loader=request_loader - ) - self.requests_loader_description = requests_loader_description - self.benchmark_save_extras = benchmark_save_extras + Implements thread-safe singleton pattern to ensure consistent state across + concurrent benchmark operations. + """ async def run( self, + requests: Iterable[RequestT | Iterable[RequestT | tuple[RequestT, float]]], + backend: BackendInterface[RequestT, ResponseT], profile: Profile, - max_number_per_strategy: Optional[int], - max_duration_per_strategy: Optional[float], - warmup_percent_per_strategy: Optional[float], - cooldown_percent_per_strategy: Optional[float], - ) -> AsyncGenerator[ - BenchmarkerResult[AggregatorT, BenchmarkT, RequestT, ResponseT], None + benchmark_class: type[BenchmarkT], + benchmark_aggregators: dict[ + str, + Aggregator[ResponseT, RequestT] | CompilableAggregator[ResponseT, RequestT], + ], + environment: Environment | None = None, + ) -> AsyncIterator[ + tuple[ + AggregatorState | None, + BenchmarkT | None, + SchedulingStrategy, + SchedulerState | None, + ] ]: - try: - requests_loader_size = len(self.scheduler.request_loader) # type: ignore[arg-type] - except Exception: # noqa: BLE001 - requests_loader_size = None - - strategy_limits = BenchmarkerStrategyLimits( - requests_loader_size=requests_loader_size, - max_number_per_strategy=max_number_per_strategy, - max_duration_per_strategy=max_duration_per_strategy, - warmup_percent_per_strategy=warmup_percent_per_strategy, - cooldown_percent_per_strategy=cooldown_percent_per_strategy, - ) - start_time = time.time() - end_number = len(profile.strategy_types) - current_index = -1 - run_id = str(uuid.uuid4()) - - yield BenchmarkerResult( - type_="run_start", - start_time=start_time, - end_number=end_number, - profile=profile, - current_index=current_index, - current_strategy=None, - current_aggregator=None, - current_benchmark=None, - current_result=None, - ) - - while scheduling_strategy := profile.next_strategy(): - current_index += 1 - aggregator = self.create_benchmark_aggregator( - run_id=run_id, + """ + Execute benchmark runs across multiple scheduling strategies. + + Orchestrates the complete benchmark workflow: iterates through scheduling + strategies from the profile, executes requests through the scheduler, + aggregates metrics, and compiles final benchmark results. + + :param requests: Request datasets for processing across strategies. + :param backend: Backend interface for request processing. + :param profile: Benchmark profile defining strategies and constraints. + :param environment: Execution environment for coordination. + :param benchmark_aggregators: Metric aggregation functions by name. + :param benchmark_class: Class for constructing final benchmark objects. + :yield: Tuples of (metrics_update, benchmark_result, strategy, state). + :raises Exception: If benchmark execution or compilation fails. + """ + with self.thread_lock: + if environment is None: + environment = NonDistributedEnvironment() + + run_id = str(uuid.uuid4()) + strategies_generator = profile.strategies_generator() + strategy, constraints = next(strategies_generator) + + while strategy is not None: + yield None, None, strategy, None + aggregators_state = { + key: AggregatorState() for key in benchmark_aggregators + } + + async for ( + response, + request, + request_info, + scheduler_state, + ) in Scheduler[RequestT, ResponseT]().run( + requests=requests, + backend=backend, + strategy=strategy, + env=environment, + **constraints, + ): + aggregators_update = AggregatorState() + for key, aggregator in benchmark_aggregators.items(): + update = aggregator( + aggregators_state[key], + response, + request, + request_info, + scheduler_state, + ) + if update: + aggregators_update.update(update) + yield aggregators_update, None, strategy, scheduler_state + + benchmark_kwargs = self._compile_benchmark_kwargs( + run_id=run_id, + run_index=len(profile.completed_strategies), + profile=profile, + requests=requests, + backend=backend, + environment=environment, + aggregators=benchmark_aggregators, + aggregators_state=aggregators_state, + strategy=strategy, + constraints=constraints, + scheduler_state=scheduler_state, + ) + benchmark = benchmark_class(**benchmark_kwargs) + yield None, benchmark, strategy, None + + try: + strategy, constraints = strategies_generator.send(benchmark) + except StopIteration: + strategy = None + constraints = None + + @classmethod + def _compile_benchmark_kwargs( + cls, + run_id: str, + run_index: int, + profile: Profile, + requests: Iterable[RequestT | Iterable[RequestT | tuple[RequestT, float]]], + backend: BackendInterface[RequestT, ResponseT], + environment: Environment, + aggregators: dict[ + str, + Aggregator[ResponseT, RequestT] | CompilableAggregator[ResponseT, RequestT], + ], + aggregators_state: dict[str, dict[str, Any]], + strategy: SchedulingStrategy, + constraints: dict[str, Any | dict[str, Any] | Constraint], + scheduler_state: SchedulerState | None, + ) -> dict[str, Any]: + """ + Compile benchmark construction parameters from execution results. + + Aggregates metadata from scheduler execution and compiles it into + structured parameters for benchmark object construction. + + :param run_id: Unique identifier for the benchmark run. + :param run_index: Index of this strategy in the benchmark profile. + :param profile: Benchmark profile containing strategy configuration. + :param requests: Request datasets used for the benchmark. + :param backend: Backend interface used for request processing. + :param environment: Execution environment for coordination. + :param aggregators: Metric aggregation functions by name. + :param aggregators_state: Current state of metric aggregators. + :param strategy: Scheduling strategy that was executed. + :param constraints: Runtime constraints applied during execution. + :param scheduler_state: Final state of scheduler execution. + :return: Dictionary of parameters for benchmark object construction. + :raises ValueError: If aggregator output conflicts with existing keys. + """ + benchmark_kwargs = { + "run_id": run_id, + "run_index": run_index, + "scheduler": SchedulerDict( + strategy=strategy, + constraints={ + key: InfoMixin.extract_from_obj(val) + for key, val in constraints.items() + }, + state=scheduler_state, + ), + "benchmarker": BenchmarkerDict( profile=profile, - strategy_index=current_index, - strategy=scheduling_strategy, - limits=strategy_limits, + requests=InfoMixin.extract_from_obj(requests), + backend=backend.info, + environment=environment.info, + aggregators={ + key: InfoMixin.extract_from_obj(aggregator) + for key, aggregator in aggregators.items() + }, + ), + "env_args": StandardBaseDict(), + "extras": StandardBaseDict(), + } + + def _combine( + existing: dict[str, Any] | StandardBaseDict, + addition: dict[str, Any] | StandardBaseDict, + ) -> dict[str, Any] | StandardBaseDict: + if not isinstance(existing, (dict, StandardBaseDict)): + raise ValueError( + f"Existing value {existing} (type: {type(existing).__name__}) " + f"is not a valid type for merging." + ) + if not isinstance(addition, (dict, StandardBaseDict)): + raise ValueError( + f"Addition value {addition} (type: {type(addition).__name__}) " + f"is not a valid type for merging." + ) + + add_kwargs = ( + addition if isinstance(addition, dict) else addition.model_dump() ) - async for result in self.scheduler.run( - scheduling_strategy=scheduling_strategy, - max_number=max_number_per_strategy, - max_duration=max_duration_per_strategy, - ): - if result.type_ == "run_start": - yield BenchmarkerResult( - type_="scheduler_start", - start_time=start_time, - end_number=end_number, - profile=profile, - current_index=current_index, - current_strategy=scheduling_strategy, - current_aggregator=aggregator, - current_benchmark=None, - current_result=None, - ) - elif result.type_ == "run_complete": - yield BenchmarkerResult( - type_="scheduler_complete", - start_time=start_time, - end_number=end_number, - profile=profile, - current_index=current_index, - current_strategy=scheduling_strategy, - current_aggregator=aggregator, - current_benchmark=None, - current_result=None, - ) - elif isinstance(result, SchedulerRequestResult): - aggregator.add_result(result) + if isinstance(existing, dict): + return {**add_kwargs, **existing} - yield BenchmarkerResult( - type_="scheduler_update", - start_time=start_time, - end_number=end_number, - profile=profile, - current_index=current_index, - current_strategy=scheduling_strategy, - current_aggregator=aggregator, - current_benchmark=None, - current_result=result, - ) - else: - raise ValueError(f"Unexpected result type: {type(result)}") + return existing.__class__(**{**add_kwargs, **existing.model_dump()}) - benchmark: BenchmarkT = aggregator.compile() - profile.completed_strategy( - average_rate=benchmark.metrics.requests_per_second.successful.mean, - average_concurrency=benchmark.metrics.request_concurrency.successful.mean, - ) - - yield BenchmarkerResult( - type_="benchmark_compiled", - start_time=start_time, - end_number=end_number, - profile=profile, - current_index=current_index, - current_strategy=scheduling_strategy, - current_aggregator=None, - current_benchmark=benchmark, - current_result=None, - ) + for key, aggregator in aggregators.items(): + if not isinstance(aggregator, CompilableAggregator): + continue - yield BenchmarkerResult( - type_="run_complete", - start_time=start_time, - end_number=end_number, - profile=profile, - current_index=current_index, - current_strategy=None, - current_aggregator=None, - current_benchmark=None, - current_result=None, - ) + compiled = aggregator.compile(aggregators_state[key], scheduler_state) - @abstractmethod - def create_benchmark_aggregator( - self, - run_id: str, - profile: Profile, - strategy_index: int, - strategy: SchedulingStrategy, - limits: BenchmarkerStrategyLimits, - ) -> AggregatorT: ... - - -class GenerativeBenchmarker( - Benchmarker[ - GenerativeBenchmarkAggregator, - GenerativeBenchmark, - GenerationRequest, - ResponseSummary, - ], -): - def __init__( - self, - backend: Backend, - request_loader: Iterable[GenerationRequest], - request_loader_description: GenerativeRequestLoaderDescription, - benchmark_save_extras: Optional[dict[str, Any]] = None, - processor: Optional[Union[str, Path, PreTrainedTokenizerBase]] = None, - processor_args: Optional[dict[str, Any]] = None, - ): - super().__init__( - worker=GenerativeRequestsWorker(backend), - request_loader=request_loader, - requests_loader_description=request_loader_description, - benchmark_save_extras=benchmark_save_extras, - ) - self.processor = processor - self.processor_args = processor_args + for field_name, field_val in compiled.items(): + if field_name in benchmark_kwargs: + # If the key already exists, merge the values + benchmark_kwargs[field_name] = _combine( + benchmark_kwargs[field_name], field_val + ) + else: + benchmark_kwargs[field_name] = field_val - def create_benchmark_aggregator( - self, - run_id: str, - profile: Profile, - strategy_index: int, - strategy: SchedulingStrategy, - limits: BenchmarkerStrategyLimits, - ) -> GenerativeBenchmarkAggregator: - return GenerativeBenchmarkAggregator( - run_id=run_id, - args=BenchmarkArgs( - profile=profile, - strategy_index=strategy_index, - strategy=strategy, - max_number=limits.max_number, - max_duration=limits.max_duration, - warmup_number=limits.warmup_number, - warmup_duration=limits.warmup_duration, - cooldown_number=limits.cooldown_number, - cooldown_duration=limits.cooldown_duration, - ), - worker_description=self.worker.description, # type: ignore[arg-type] - request_loader_description=self.requests_loader_description, # type: ignore[arg-type] - extras=self.benchmark_save_extras or {}, - processor=self.processor, - processor_args=self.processor_args, - ) + return benchmark_kwargs diff --git a/src/guidellm/benchmark/entrypoints.py b/src/guidellm/benchmark/entrypoints.py index 2ef85c3e..82f92ceb 100644 --- a/src/guidellm/benchmark/entrypoints.py +++ b/src/guidellm/benchmark/entrypoints.py @@ -1,23 +1,56 @@ +from __future__ import annotations + from collections.abc import Iterable from pathlib import Path -from typing import Any, Literal, Optional, Union +from typing import Any, Literal from datasets import Dataset, DatasetDict, IterableDataset, IterableDatasetDict from transformers import ( # type: ignore[import] PreTrainedTokenizerBase, ) -from guidellm.backend import Backend, BackendType -from guidellm.benchmark.benchmarker import GenerativeBenchmarker +from guidellm.backend import ( + Backend, + BackendType, + GenerationRequest, + GenerationResponse, +) +from guidellm.benchmark.aggregator import ( + Aggregator, + CompilableAggregator, + GenerativeRequestsAggregator, + GenerativeStatsProgressAggregator, + SchedulerStatsAggregator, + SerializableAggregator, +) +from guidellm.benchmark.benchmarker import Benchmarker +from guidellm.benchmark.objects import GenerativeBenchmark, GenerativeBenchmarksReport from guidellm.benchmark.output import ( - GenerativeBenchmarksConsole, - GenerativeBenchmarksReport, + GenerativeBenchmarkerConsole, + GenerativeBenchmarkerOutput, +) +from guidellm.benchmark.profile import Profile, ProfileType +from guidellm.benchmark.progress import ( + BenchmarkerProgress, + BenchmarkerProgressGroup, ) -from guidellm.benchmark.profile import ProfileType, create_profile -from guidellm.benchmark.progress import GenerativeTextBenchmarkerProgressDisplay from guidellm.benchmark.scenario import GenerativeTextScenario, Scenario from guidellm.request import GenerativeRequestLoader -from guidellm.scheduler import StrategyType +from guidellm.scheduler import ( + ConstraintInitializer, + NonDistributedEnvironment, + StrategyType, +) +from guidellm.utils import Console, InfoMixin + +__all__ = [ + "benchmark_generative_text", + "benchmark_with_scenario", + "reimport_benchmarks_report", +] + + +_CURRENT_WORKING_DIR = Path.cwd() async def benchmark_with_scenario(scenario: Scenario, **kwargs): @@ -31,135 +64,250 @@ async def benchmark_with_scenario(scenario: Scenario, **kwargs): raise ValueError(f"Unsupported Scenario type {type(scenario)}") -async def benchmark_generative_text( +# @validate_call(config={"arbitrary_types_allowed": True}) +async def benchmark_generative_text( # noqa: C901 target: str, - backend_type: BackendType, - backend_args: Optional[dict[str, Any]], - model: Optional[str], - processor: Optional[Optional[Union[str, Path, PreTrainedTokenizerBase]]], - processor_args: Optional[dict[str, Any]], - data: Union[ - str, - Path, - Iterable[Union[str, dict[str, Any]]], - Dataset, - DatasetDict, - IterableDataset, - IterableDatasetDict, - ], - data_args: Optional[dict[str, Any]], - data_sampler: Optional[Literal["random"]], - rate_type: Union[StrategyType, ProfileType], - rate: Optional[Union[float, list[float]]], - max_seconds: Optional[float], - max_requests: Optional[int], - warmup_percent: Optional[float], - cooldown_percent: Optional[float], - output_path: Optional[Union[str, Path]], - output_extras: Optional[dict[str, Any]], - output_sampling: Optional[int], - random_seed: int, - show_progress: bool = True, - show_progress_scheduler_stats: bool = False, - output_console: bool = True, -) -> tuple[GenerativeBenchmarksReport, Optional[Path]]: - console = GenerativeBenchmarksConsole(enabled=show_progress) - console.print_line("Creating backend...") - backend = Backend.create( - backend_type, target=target, model=model, **(backend_args or {}) - ) - await backend.validate() - console.print_line( - f"Backend {backend_type} connected to {target} for model {backend.model}." - ) + data: ( + Iterable[str] + | Iterable[dict[str, Any]] + | Dataset + | DatasetDict + | IterableDataset + | IterableDatasetDict + | str + | Path + ), + profile: StrategyType | ProfileType | Profile, + rate: float | list[float] | None = None, + random_seed: int = 42, + # Backend configuration + backend: BackendType | Backend = "openai_http", + backend_kwargs: dict[str, Any] | None = None, + model: str | None = None, + # Data configuration + processor: str | Path | PreTrainedTokenizerBase | None = None, + processor_args: dict[str, Any] | None = None, + data_args: dict[str, Any] | None = None, + data_sampler: Literal["random"] | None = None, + # Output configuration + output_path: str | Path | None = _CURRENT_WORKING_DIR, + output_formats: ( + tuple[str, ...] + | list[str] + | dict[str, str | dict[str, Any] | GenerativeBenchmarkerOutput] + | None + ) = ("console", "json", "html", "csv"), + # Updates configuration + progress: tuple[str, ...] | list[str] | list[BenchmarkerProgress] | None = None, + print_updates: bool = False, + # Aggregators configuration + add_aggregators: ( + dict[str, str | dict[str, Any] | Aggregator | CompilableAggregator] | None + ) = None, + warmup: float | None = None, + cooldown: float | None = None, + request_samples: int | None = 20, + # Constraints configuration + max_seconds: int | float | None = None, + max_requests: int | None = None, + max_errors: int | None = None, + max_error_rate: float | None = None, + max_global_error_rate: float | None = None, + **constraints: dict[str, ConstraintInitializer | Any], +) -> tuple[GenerativeBenchmarksReport, dict[str, Any]]: + console = Console(quiet=not print_updates) - if processor is None: - processor = backend.model - - console.print_line("Creating request loader...") - request_loader = GenerativeRequestLoader( - data=data, - data_args=data_args, - processor=processor, - processor_args=processor_args, - shuffle=data_sampler == "random", - iter_type=( - "finite" # assume a finite dataset is our limit - if max_requests is None and max_seconds is None - else "infinite" # default to infinite so we don't run out of data - ), - random_seed=random_seed, - ) - unique_requests = request_loader.num_unique_items(raise_err=False) - console.print_line( - f"Created loader with {unique_requests} unique requests from {data}.\n\n" - if unique_requests > 0 - else f"Created loader with unknown number unique requests from {data}.\n\n" - ) + with console.print_update_step( + title=f"Initializing backend {backend}" + ) as console_step: + backend = ( + Backend.create( + backend, target=target, model=model, **(backend_kwargs or {}) + ) + if not isinstance(backend, Backend) + else backend + ) + console_step.update(f"{backend.__class__.__name__} backend initialized") + await backend.process_startup() + await backend.validate() + console_step.finish( + title=f"{backend.__class__.__name__} backend initialized", + details=backend.info, + status_level="success", + ) - profile = create_profile(rate_type=rate_type, rate=rate) - benchmarker = GenerativeBenchmarker( - backend=backend, - request_loader=request_loader, - request_loader_description=request_loader.description, - benchmark_save_extras=output_extras, - processor=processor, - processor_args=processor_args, - ) - progress = ( - GenerativeTextBenchmarkerProgressDisplay( - display_scheduler_stats=show_progress_scheduler_stats + with console.print_update_step(title="Resolving processor") as console_step: + if processor is not None: + console_step.finish( + title="Processor resolved", + details=f"Using processor '{processor}'", + status_level="success", + ) + elif model is not None: + console_step.finish( + title="Processor resolved", + details=f"Using model '{model}' as processor", + status_level="success", + ) + processor = model + else: + console_step.update( + title="Resolving processor from backend.default_model", + status_level="info", + ) + processor = await backend.default_model() + console_step.finish( + title="Processor resolved", + details=( + f"Using model '{processor}' from backend " + f"{backend.__class__.__name__} as processor" + ), + status_level="success", + ) + await backend.process_shutdown() + + with console.print_update_step( + title=f"Initializing request loader from {data}" + ) as console_step: + request_loader = GenerativeRequestLoader( + data=data, + data_args=data_args, + processor=processor, + processor_args=processor_args, + shuffle=data_sampler == "random", + random_seed=random_seed, + ) + unique_requests = request_loader.num_unique_items(raise_err=False) + console_step.finish( + title=( + f"Request loader initialized with {unique_requests} unique requests " + f"from {data}" + ), + details=InfoMixin.extract_from_obj(request_loader), + status_level="success", + ) + + with console.print_update_step( + title=f"Resolving profile {profile}" + ) as console_step: + for key, val in { + "max_seconds": max_seconds, + "max_requests": max_requests, + "max_errors": max_errors, + "max_error_rate": max_error_rate, + "max_global_error_rate": max_global_error_rate, + }.items(): + if val is not None: + constraints[key] = val + if not isinstance(profile, Profile): + profile = Profile.create( + rate_type=profile, + rate=rate, + random_seed=random_seed, + constraints={**constraints}, + ) + elif constraints: + raise ValueError( + "Constraints must be empty when providing a Profile instance. " + f"Provided constraints: {constraints} ; provided profile: {profile}" + ) + console_step.finish( + title=f"{profile.__class__.__name__} profile resolved", + details=InfoMixin.extract_from_obj(profile), + status_level="success", + ) + + with console.print_update_step( + title="Creating benchmark aggregators" + ) as console_step: + aggregators = { + "scheduler_stats": SchedulerStatsAggregator(), + "requests_progress": GenerativeStatsProgressAggregator(), + "requests": GenerativeRequestsAggregator( + request_samples=request_samples, + warmup=warmup, + cooldown=cooldown, + ), + **SerializableAggregator.resolve(add_aggregators or {}), + } + console_step.finish( + title="Benchmark aggregators created", + details={key: str(val) for key, val in aggregators.items()}, + status_level="success", + ) + + with console.print_update_step(title="Resolving output formats") as console_step: + output_formats = GenerativeBenchmarkerOutput.resolve( + output_formats=(output_formats or {}), output_path=output_path + ) + console_step.finish( + title="Output formats resolved", + details={key: str(val) for key, val in output_formats.items()}, + status_level="success", ) - if show_progress - else None + + progress_group = BenchmarkerProgressGroup( + instances=progress or [], enabled=bool(progress) ) report = GenerativeBenchmarksReport() + console.print_update( + title="Setup complete, starting benchmarks...", status="success" + ) + console.print("\n\n") - async for result in benchmarker.run( - profile=profile, - max_number_per_strategy=max_requests, - max_duration_per_strategy=max_seconds, - warmup_percent_per_strategy=warmup_percent, - cooldown_percent_per_strategy=cooldown_percent, + async for ( + _aggregator_update, + benchmark, + _strategy, + _scheduler_state, + ) in progress_group( + profile, + Benchmarker[ + GenerativeBenchmark, + GenerationRequest, + GenerationResponse, + ]().run( + requests=request_loader, + backend=backend, + profile=profile, + environment=NonDistributedEnvironment(), + benchmark_aggregators=aggregators, + benchmark_class=GenerativeBenchmark, + ), ): - if progress: - progress.update(result) - - if result.type_ == "benchmark_compiled": - if result.current_benchmark is None: - raise ValueError("Current benchmark is None") - report.benchmarks.append( - result.current_benchmark.set_sample_size(output_sampling) - ) + if benchmark: + report.benchmarks.append(benchmark) - if output_console: - console.benchmarks = report.benchmarks - console.print_full_report() + output_format_results = {} + for key, output in output_formats.items(): + output_result = await output.finalize(report) + output_format_results[key] = output_result - if output_path: - console.print_line("\nSaving benchmarks report...") - saved_path = report.save_file(output_path) - console.print_line(f"Benchmarks report saved to {saved_path}") - else: - saved_path = None - - console.print_line("\nBenchmarking complete.") + console.print("\n\n") + console.print_update( + title=f"Benchmarking complete, generated {len(report.benchmarks)} benchmark(s)", + status="success", + ) + for key, value in output_format_results.items(): + console.print_update(title=f" {key:<8}: {value}", status="debug") - return report, saved_path + return report, output_format_results -def reimport_benchmarks_report(file: Path, output_path: Optional[Path]) -> None: +def reimport_benchmarks_report(file: Path, output_path: Path | None) -> None: """ The command-line entry point for re-importing and displaying an existing benchmarks report. Can also specify Assumes the file provided exists. """ - console = GenerativeBenchmarksConsole(enabled=True) report = GenerativeBenchmarksReport.load_file(file) - console.benchmarks = report.benchmarks - console.print_full_report() + console_output = GenerativeBenchmarkerConsole() + console_output.finalize(report) + console = Console() if output_path: - console.print_line("\nSaving benchmarks report...") - saved_path = report.save_file(output_path) - console.print_line(f"Benchmarks report saved to {saved_path}") + with console.print_update_step( + title=f"Saving benchmarks report to {output_path}..." + ) as console_step: + saved_path = report.save_file(output_path) + console_step.finish(title=f"Benchmarks report saved to {saved_path}") diff --git a/src/guidellm/benchmark/objects.py b/src/guidellm/benchmark/objects.py new file mode 100644 index 00000000..8afabba9 --- /dev/null +++ b/src/guidellm/benchmark/objects.py @@ -0,0 +1,473 @@ +""" +Benchmark data models and metrics for performance measurement and analysis. + +Provides comprehensive data structures for capturing, storing, and analyzing +benchmark results from scheduler executions. Includes timing measurements, +token statistics, and performance metrics for generative AI workloads. + +Classes: + BenchmarkSchedulerStats: Scheduler timing and performance statistics. + BenchmarkMetrics: Core benchmark metrics and distributions. + BenchmarkRequestStats: Individual request processing statistics. + Benchmark: Base benchmark result container with generic metrics. + GenerativeRequestStats: Request statistics for generative AI workloads. + GenerativeMetrics: Comprehensive metrics for generative benchmarks. + GenerativeBenchmark: Complete generative benchmark results and analysis. + GenerativeBenchmarksReport: Container for multiple benchmark results. + +Type Variables: + BenchmarkMetricsT: Generic benchmark metrics type. + BenchmarkRequestStatsT: Generic request statistics type. + BenchmarkT: Generic benchmark container type. +""" + +from __future__ import annotations + +import json +import uuid +from pathlib import Path +from typing import Any, ClassVar, Generic, Literal, TypeVar + +import yaml +from pydantic import Field, computed_field + +from guidellm.benchmark.profile import ( + Profile, +) +from guidellm.scheduler import ( + ScheduledRequestInfo, + SchedulerState, + SchedulingStrategy, +) +from guidellm.utils import ( + StandardBaseDict, + StandardBaseModel, + StatusBreakdown, + StatusDistributionSummary, +) + +__all__ = [ + "Benchmark", + "BenchmarkMetrics", + "BenchmarkSchedulerStats", + "BenchmarkT", + "GenerativeBenchmark", + "GenerativeBenchmarksReport", + "GenerativeMetrics", + "GenerativeRequestStats", +] + + +class BenchmarkSchedulerStats(StandardBaseDict): + """Scheduler timing and performance statistics.""" + + start_time: float = Field( + description="Unix timestamp when the benchmark run started" + ) + end_time: float = Field(description="Unix timestamp when the benchmark run ended") + requests_made: StatusBreakdown[int, int, int, int] = Field( + description="Request counts by status: successful, incomplete, errored, total" + ) + queued_time_avg: float = Field( + description="Avg time requests spent in the queue (seconds)" + ) + worker_resolve_start_delay_avg: float = Field( + description="Avg delay before worker begins resolving req after dequeue (sec)" + ) + worker_resolve_time_avg: float = Field( + description="Avg time for worker to resolve requests (seconds)" + ) + worker_resolve_end_delay_avg: float = Field( + description="Avg delay after request end till worker resolves (seconds)" + ) + finalized_delay_avg: float = Field( + description="Avg delay after resolve til finalized with in scheduler (sec)" + ) + worker_targeted_start_delay_avg: float = Field( + description="Avg delay from targeted start to actual worker start (seconds)" + ) + request_start_delay_avg: float = Field( + description="Avg delay after resolve til request start (seconds)" + ) + request_time_avg: float = Field(description="Avg request processing time (seconds)") + request_targeted_start_delay_avg: float = Field( + description="Avg delay from targeted start to actual request start" + ) + + +class SchedulerDict(StandardBaseDict): + """Scheduler configuration and execution state dictionary.""" + + strategy: SchedulingStrategy + constraints: dict[str, dict[str, Any]] + state: SchedulerState + + +class BenchmarkerDict(StandardBaseDict): + """Benchmarker configuration and component settings dictionary.""" + + profile: Profile + requests: dict[str, Any] + backend: dict[str, Any] + environment: dict[str, Any] + aggregators: dict[str, dict[str, Any]] + + +class BenchmarkMetrics(StandardBaseDict): + """Core benchmark metrics and statistical distributions.""" + + requests_per_second: StatusDistributionSummary = Field( + description="Distribution of requests per second across benchmark execution" + ) + request_concurrency: StatusDistributionSummary = Field( + description="Distribution of concurrent request counts during execution" + ) + request_latency: StatusDistributionSummary = Field( + description="Distribution of request latencies for completed requests" + ) + + +BenchmarkMetricsT = TypeVar("BenchmarkMetricsT", bound=BenchmarkMetrics) + + +class BenchmarkRequestStats(StandardBaseDict): + """Individual request processing statistics and scheduling metadata.""" + + scheduler_info: ScheduledRequestInfo = Field( + description="Scheduler metadata and timing information for the request" + ) + + +BenchmarkRequestStatsT = TypeVar("BenchmarkRequestStatsT", bound=BenchmarkRequestStats) + + +class Benchmark(StandardBaseDict, Generic[BenchmarkMetricsT, BenchmarkRequestStatsT]): + """Base benchmark result container with execution metadata.""" + + type_: Literal["benchmark"] = "benchmark" + id_: str = Field( + default_factory=lambda: str(uuid.uuid4()), + description="Unique identifier for this benchmark execution", + ) + run_id: str = Field( + description="Identifier for the benchmarker run containing this benchmark" + ) + run_index: int = Field( + description="Sequential index of this benchmark within the benchmarker run" + ) + scheduler: SchedulerDict = Field( + description="Scheduler configuration and execution state" + ) + benchmarker: BenchmarkerDict = Field( + description="Benchmarker configuration and component settings" + ) + env_args: StandardBaseDict = Field( + description="Environment arguments and runtime configuration" + ) + extras: StandardBaseDict = Field( + description="Additional metadata and custom benchmark parameters" + ) + run_stats: BenchmarkSchedulerStats = Field( + description="Scheduler timing and performance statistics" + ) + start_time: float = Field( + default=-1.0, description="Unix timestamp when the first request was initiated" + ) + end_time: float = Field( + default=-1.0, description="Unix timestamp when the last request completed" + ) + + @computed_field # type: ignore[misc] + @property + def duration(self) -> float: + """ + Benchmark execution duration in seconds. + + :return: Time elapsed from first request start to last request completion. + """ + return self.end_time - self.start_time + + metrics: BenchmarkMetricsT = Field( + description="Performance metrics and statistical distributions" + ) + request_totals: StatusBreakdown[int, int, int, int] = Field( + description="Request counts by status: successful, incomplete, errored, total" + ) + requests: StatusBreakdown[ + list[BenchmarkRequestStatsT], + list[BenchmarkRequestStatsT], + list[BenchmarkRequestStatsT], + None, + ] = Field( + description="Request details grouped by status: successful, incomplete, errored" + ) + + +BenchmarkT = TypeVar("BenchmarkT", bound=Benchmark) + + +class GenerativeRequestStats(BenchmarkRequestStats): + """Request statistics for generative AI text generation workloads.""" + + type_: Literal["generative_request_stats"] = "generative_request_stats" + request_id: str = Field(description="Unique identifier for the request") + request_type: Literal["text_completions", "chat_completions"] = Field( + description="Type of generative request: text or chat completion" + ) + prompt: str = Field(description="Input text prompt for generation") + request_args: dict[str, Any] = Field( + description="Generation parameters and configuration options" + ) + output: str | None = Field( + description="Generated text output, if request completed successfully" + ) + iterations: int = Field( + description="Number of processing iterations for the request" + ) + prompt_tokens: int | None = Field( + description="Number of tokens in the input prompt" + ) + output_tokens: int | None = Field( + description="Number of tokens in the generated output" + ) + + @computed_field # type: ignore[misc] + @property + def total_tokens(self) -> int | None: + """ + Total token count including prompt and output tokens. + + :return: Sum of prompt and output tokens, or None if either is unavailable. + """ + if self.prompt_tokens is None and self.output_tokens is None: + return None + + return (self.prompt_tokens or 0) + (self.output_tokens or 0) + + @computed_field # type: ignore[misc] + @property + def request_latency(self) -> float | None: + """ + End-to-end request processing latency in seconds. + + :return: Duration from request start to completion, or None if unavailable. + """ + if ( + not self.scheduler_info.request_timings.request_end + or not self.scheduler_info.request_timings.request_start + ): + return None + + return ( + self.scheduler_info.request_timings.request_end + - self.scheduler_info.request_timings.request_start + ) + + @computed_field # type: ignore[misc] + @property + def time_to_first_token_ms(self) -> float | None: + """ + Time to first token generation in milliseconds. + + :return: Latency from request start to first token, or None if unavailable. + """ + if ( + not self.scheduler_info.request_timings.first_iteration + or not self.scheduler_info.request_timings.request_start + ): + return None + + return 1000 * ( + self.scheduler_info.request_timings.first_iteration + - self.scheduler_info.request_timings.request_start + ) + + @computed_field # type: ignore[misc] + @property + def time_per_output_token_ms(self) -> float | None: + """ + Average time per output token in milliseconds. + + Includes time for first token and all subsequent tokens. + + :return: Average milliseconds per output token, or None if unavailable. + """ + if ( + not self.scheduler_info.request_timings.request_start + or not self.scheduler_info.request_timings.last_iteration + or not self.output_tokens + ): + return None + + return ( + 1000 + * ( + self.scheduler_info.request_timings.last_iteration + - self.scheduler_info.request_timings.request_start + ) + / self.output_tokens + ) + + @computed_field # type: ignore[misc] + @property + def inter_token_latency_ms(self) -> float | None: + """ + Average inter-token latency in milliseconds. + + Measures time between token generations, excluding first token. + + :return: Average milliseconds between tokens, or None if unavailable. + """ + if ( + not self.scheduler_info.request_timings.first_iteration + or not self.scheduler_info.request_timings.last_iteration + or not self.output_tokens + or self.output_tokens <= 1 + ): + return None + + return ( + 1000 + * ( + self.scheduler_info.request_timings.last_iteration + - self.scheduler_info.request_timings.first_iteration + ) + / (self.output_tokens - 1) + ) + + @computed_field # type: ignore[misc] + @property + def tokens_per_second(self) -> float | None: + """ + Overall token throughput including prompt and output tokens. + + :return: Total tokens per second, or None if unavailable. + """ + if not (latency := self.request_latency) or not (tokens := self.total_tokens): + return None + + return tokens / latency + + @computed_field # type: ignore[misc] + @property + def output_tokens_per_second(self) -> float | None: + """ + Output token generation throughput. + + :return: Output tokens per second, or None if unavailable. + """ + if not (latency := self.request_latency) or not self.output_tokens: + return None + + return self.output_tokens / latency + + +class GenerativeMetrics(BenchmarkMetrics): + """Comprehensive metrics for generative AI benchmarks.""" + + prompt_token_count: StatusDistributionSummary = Field( + description="Distribution of prompt token counts by request status" + ) + output_token_count: StatusDistributionSummary = Field( + description="Distribution of output token counts by request status" + ) + total_token_count: StatusDistributionSummary = Field( + description="Distribution of total token counts by request status" + ) + time_to_first_token_ms: StatusDistributionSummary = Field( + description="Distribution of first token latencies in milliseconds" + ) + time_per_output_token_ms: StatusDistributionSummary = Field( + description="Distribution of average time per output token in milliseconds" + ) + inter_token_latency_ms: StatusDistributionSummary = Field( + description="Distribution of inter-token latencies in milliseconds" + ) + output_tokens_per_second: StatusDistributionSummary = Field( + description="Distribution of output token generation rates" + ) + tokens_per_second: StatusDistributionSummary = Field( + description="Distribution of total token throughput including prompt and output" + ) + + +class GenerativeBenchmark(Benchmark[GenerativeMetrics, GenerativeRequestStats]): + """Complete generative AI benchmark results with specialized metrics.""" + + type_: Literal["generative_benchmark"] = "generative_benchmark" # type: ignore[assignment] + + +class GenerativeBenchmarksReport(StandardBaseModel): + """Container for multiple benchmark results with load/save functionality.""" + + DEFAULT_FILE: ClassVar[str] = "benchmarks.json" + + @staticmethod + def load_file( + path: str | Path, type_: Literal["json", "yaml"] | None = None + ) -> GenerativeBenchmarksReport: + """ + Load a report from a file. + + :param path: The path to load the report from. + :param type_: File type override, auto-detected from extension if None. + :return: The loaded report. + :raises ValueError: If file type is unsupported. + """ + path = Path(path) if not isinstance(path, Path) else path + + if path.is_dir(): + path = path / GenerativeBenchmarksReport.DEFAULT_FILE + + path.parent.mkdir(parents=True, exist_ok=True) + path_suffix = path.suffix.lower()[1:] + + with path.open("r") as file: + if (type_ or path_suffix) == "json": + model_dict = json.loads(file.read()) + elif (type_ or path_suffix) in ["yaml", "yml"]: + model_dict = yaml.safe_load(file) + else: + raise ValueError(f"Unsupported file type: {type_} for {path}.") + + return GenerativeBenchmarksReport.model_validate(model_dict) + + benchmarks: list[GenerativeBenchmark] = Field( + description="The list of completed benchmarks contained within the report.", + default_factory=list, + ) + + def save_file( + self, path: str | Path | None, type_: Literal["json", "yaml"] | None = None + ) -> Path: + """ + Save the report to a file. + + :param path: The path to save the report to. + :param type_: File type override, auto-detected from extension if None. + :return: The path to the saved report. + :raises ValueError: If file type is unsupported. + """ + if path is None: + path = Path.cwd() + elif not isinstance(path, Path): + path = Path(path) + + if path.is_dir(): + path = path / GenerativeBenchmarksReport.DEFAULT_FILE + + path.parent.mkdir(parents=True, exist_ok=True) + path_suffix = path.suffix.lower()[1:] + model_dict = self.model_dump() + + if (type_ or path_suffix) == "json": + save_str = json.dumps(model_dict) + elif (type_ or path_suffix) in ["yaml", "yml"]: + save_str = yaml.dump(model_dict) + else: + raise ValueError(f"Unsupported file type: {type_} for {path}.") + + with path.open("w") as file: + file.write(save_str) + + return path diff --git a/src/guidellm/benchmark/output.py b/src/guidellm/benchmark/output.py index 6759f16f..95b51d70 100644 --- a/src/guidellm/benchmark/output.py +++ b/src/guidellm/benchmark/output.py @@ -1,429 +1,318 @@ +from __future__ import annotations + import csv import json import math +from abc import ABC, abstractmethod from collections import OrderedDict -from copy import deepcopy from datetime import datetime from pathlib import Path -from typing import Any, Literal, Optional, Union +from typing import Any, ClassVar -import yaml -from pydantic import Field +from pydantic import BaseModel, ConfigDict, Field from rich.console import Console from rich.padding import Padding from rich.text import Text -from guidellm.benchmark.benchmark import GenerativeBenchmark, GenerativeMetrics +from guidellm.benchmark.objects import ( + GenerativeBenchmark, + GenerativeBenchmarksReport, + GenerativeMetrics, +) from guidellm.benchmark.profile import ( AsyncProfile, ConcurrentProfile, SweepProfile, ThroughputProfile, ) -from guidellm.objects import ( - DistributionSummary, - StandardBaseModel, - StatusDistributionSummary, -) from guidellm.presentation import UIDataBuilder from guidellm.presentation.injector import create_report -from guidellm.scheduler import strategy_display_str from guidellm.settings import settings -from guidellm.utils import Colors, split_text_list_by_length -from guidellm.utils.dict import recursive_key_update -from guidellm.utils.text import camelize_str +from guidellm.utils import ( + Colors, + DistributionSummary, + RegistryMixin, + StatusDistributionSummary, + safe_format_timestamp, + split_text_list_by_length, +) __all__ = [ - "GenerativeBenchmarksConsole", - "GenerativeBenchmarksReport", + "GenerativeBenchmarkerCSV", + "GenerativeBenchmarkerConsole", + "GenerativeBenchmarkerHTML", + "GenerativeBenchmarkerOutput", ] -class GenerativeBenchmarksReport(StandardBaseModel): - """ - A pydantic model representing a completed benchmark report. - Contains a list of benchmarks along with convenience methods for finalizing - and saving the report. - """ - - @staticmethod - def load_file(path: Union[str, Path]) -> "GenerativeBenchmarksReport": - """ - Load a report from a file. The file type is determined by the file extension. - If the file is a directory, it expects a file named benchmarks.json under the - directory. - - :param path: The path to load the report from. - :return: The loaded report. - """ - path, type_ = GenerativeBenchmarksReport._file_setup(path) - - if type_ == "json": - with path.open("r") as file: - model_dict = json.load(file) - - return GenerativeBenchmarksReport.model_validate(model_dict) - - if type_ == "yaml": - with path.open("r") as file: - model_dict = yaml.safe_load(file) - - return GenerativeBenchmarksReport.model_validate(model_dict) - - if type_ == "csv": - raise ValueError(f"CSV file type is not supported for loading: {path}.") - - if type_ == "html": - raise ValueError(f"HTML file type is not supported for loading: {path}.") - - raise ValueError(f"Unsupported file type: {type_} for {path}.") - - benchmarks: list[GenerativeBenchmark] = Field( - description="The list of completed benchmarks contained within the report.", - default_factory=list, +class GenerativeBenchmarkerOutput( + BaseModel, RegistryMixin[type["GenerativeBenchmarkerOutput"]], ABC +): + model_config = ConfigDict( + extra="ignore", + arbitrary_types_allowed=True, + validate_assignment=True, + from_attributes=True, + use_enum_values=True, ) - def set_sample_size( - self, sample_size: Optional[int] - ) -> "GenerativeBenchmarksReport": + @classmethod + @abstractmethod + def validated_kwargs(cls, *args, **kwargs) -> dict[str, Any]: """ - Set the sample size for each benchmark in the report. In doing this, it will - reduce the contained requests of each benchmark to the sample size. - If sample size is None, it will return the report as is. + Validate and process arguments for constraint creation. - :param sample_size: The sample size to set for each benchmark. - If None, the report will be returned as is. - :return: The report with the sample size set for each benchmark. - """ - - if sample_size is not None: - for benchmark in self.benchmarks: - benchmark.set_sample_size(sample_size) + Must be implemented by subclasses to handle their specific parameter patterns. - return self - - def save_file(self, path: Union[str, Path]) -> Path: + :param args: Positional arguments passed to the constraint + :param kwargs: Keyword arguments passed to the constraint + :return: Validated dictionary of parameters for constraint creation + :raises NotImplementedError: Must be implemented by subclasses """ - Save the report to a file. The file type is determined by the file extension. - If the file is a directory, it will save the report to a file named - benchmarks.json under the directory. + ... - :param path: The path to save the report to. - :return: The path to the saved report. - """ - path, type_ = GenerativeBenchmarksReport._file_setup(path) - - if type_ == "json": - return self.save_json(path) - - if type_ == "yaml": - return self.save_yaml(path) - - if type_ == "csv": - return self.save_csv(path) - - if type_ == "html": - return self.save_html(path) - - raise ValueError(f"Unsupported file type: {type_} for {path}.") - - def save_json(self, path: Union[str, Path]) -> Path: - """ - Save the report to a JSON file containing all of the report data which is - reloadable using the pydantic model. If the file is a directory, it will save - the report to a file named benchmarks.json under the directory. - - :param path: The path to save the report to. - :return: The path to the saved report. - """ - path, type_ = GenerativeBenchmarksReport._file_setup(path, "json") - - if type_ != "json": - raise ValueError( - f"Unsupported file type for saving a JSON: {type_} for {path}." - ) - - model_dict = self.model_dump() - model_json = json.dumps(model_dict) - - with path.open("w") as file: - file.write(model_json) - - return path - - def save_yaml(self, path: Union[str, Path]) -> Path: - """ - Save the report to a YAML file containing all of the report data which is - reloadable using the pydantic model. If the file is a directory, it will save - the report to a file named benchmarks.yaml under the directory. - - :param path: The path to save the report to. - :return: The path to the saved report. - """ - - path, type_ = GenerativeBenchmarksReport._file_setup(path, "yaml") - - if type_ != "yaml": - raise ValueError( - f"Unsupported file type for saving a YAML: {type_} for {path}." - ) - - model_dict = self.model_dump() - model_yaml = yaml.dump(model_dict) - - with path.open("w") as file: - file.write(model_yaml) - - return path - - def save_csv(self, path: Union[str, Path]) -> Path: - """ - Save the report to a CSV file containing the summarized statistics and values - for each report. Note, this data is not reloadable using the pydantic model. - If the file is a directory, it will save the report to a file named - benchmarks.csv under the directory. - - :param path: The path to save the report to. - :return: The path to the saved report. - """ - path, type_ = GenerativeBenchmarksReport._file_setup(path, "csv") - - if type_ != "csv": - raise ValueError( - f"Unsupported file type for saving a CSV: {type_} for {path}." + @classmethod + def resolve( + cls, + output_formats: ( + tuple[str, ...] + | list[str] + | dict[ + str, + Any | dict[str, Any] | GenerativeBenchmarkerOutput, + ] + | None + ), + output_path: str | Path | None, + ) -> dict[str, GenerativeBenchmarkerOutput]: + if not output_formats: + return {} + + if isinstance(output_formats, (list, tuple)): + # support list of output keys: ["csv", "json"] + # support list of files: ["path/to/file.json", "path/to/file.csv"] + formats_list = output_formats + output_formats = {} + for output_format in formats_list: + if not isinstance(output_format, str): + raise TypeError( + f"Expected string format, got {type(output_format)} for " + f"{output_format} in {formats_list}" + ) + try: + if cls.is_registered(output_format): + output_formats[output_format] = {} + else: + # treat it as a file save location + path = Path(output_format) + format_type = path.suffix[1:].lower() + output_formats[format_type] = {"output_path": path} + + except Exception as err: + raise ValueError( + f"Failed to resolve output format '{output_format}': {err}" + ) from err + + resolved = {} + + for key, val in output_formats.items(): + if isinstance(val, GenerativeBenchmarkerOutput): + resolved[key] = val + else: + output_class = cls.get_registered_object(key) + kwargs = {"output_path": output_path} + + if isinstance(val, dict): + kwargs.update(val) + kwargs = output_class.validated_kwargs(**kwargs) + else: + kwargs = output_class.validated_kwargs(val, **kwargs) + + resolved[key] = output_class(**kwargs) + + return resolved + + @abstractmethod + async def finalize(self, report: GenerativeBenchmarksReport) -> Any: ... + + +@GenerativeBenchmarkerOutput.register(["json", "yaml"]) +class GenerativeBenchmarkerSerialized(GenerativeBenchmarkerOutput): + @classmethod + def validated_kwargs( + cls, output_path: str | Path | None, **_kwargs + ) -> dict[str, Any]: + new_kwargs = {} + if output_path is not None: + new_kwargs["output_path"] = ( + Path(output_path) if not isinstance(output_path, Path) else output_path ) + return new_kwargs - with path.open("w", newline="") as file: - writer = csv.writer(file) - headers: list[str] = [] - rows: list[list[Union[str, float, list[float]]]] = [] - - for benchmark in self.benchmarks: - benchmark_headers: list[str] = [] - benchmark_values: list[Union[str, float, list[float]]] = [] - - desc_headers, desc_values = self._benchmark_desc_headers_and_values( - benchmark - ) - benchmark_headers += desc_headers - benchmark_values += desc_values + output_path: Path = Field(default_factory=lambda: Path.cwd()) - for status in StatusDistributionSummary.model_fields: - status_headers, status_values = ( - self._benchmark_status_headers_and_values(benchmark, status) - ) - benchmark_headers += status_headers - benchmark_values += status_values + async def finalize(self, report: GenerativeBenchmarksReport) -> Path: + return report.save_file(self.output_path) - benchmark_extra_headers, benchmark_extra_values = ( - self._benchmark_extras_headers_and_values(benchmark) - ) - benchmark_headers += benchmark_extra_headers - benchmark_values += benchmark_extra_values - if not headers: - headers = benchmark_headers - rows.append(benchmark_values) +@GenerativeBenchmarkerOutput.register("console") +class GenerativeBenchmarkerConsole(GenerativeBenchmarkerOutput): + """Console output formatter for benchmark results with rich formatting.""" - writer.writerow(headers) - for row in rows: - writer.writerow(row) + @classmethod + def validated_kwargs(cls, *_args, **_kwargs) -> dict[str, Any]: + return {} - return path + console: Console = Field(default_factory=Console) - def save_html(self, path: Union[str, Path]) -> Path: + async def finalize(self, report: GenerativeBenchmarksReport) -> str: """ - Download html, inject report data and save to a file. + Print the complete benchmark report to the console. - :param path: The path to create the report at. - :return: The path to the report. + :param report: The completed benchmark report. + :return: """ + self._print_benchmarks_metadata(report.benchmarks) + self._print_benchmarks_info(report.benchmarks) + self._print_benchmarks_stats(report.benchmarks) - data_builder = UIDataBuilder(self.benchmarks) - data = data_builder.to_dict() - camel_data = recursive_key_update(deepcopy(data), camelize_str) - ui_api_data = {} - for k, v in camel_data.items(): - key = f"window.{k} = {{}};" - value = f"window.{k} = {json.dumps(v, indent=2)};\n" - ui_api_data[key] = value - return create_report(ui_api_data, path) - - @staticmethod - def _file_setup( - path: Union[str, Path], - default_file_type: Literal["json", "yaml", "csv", "html"] = "json", - ) -> tuple[Path, Literal["json", "yaml", "csv", "html"]]: - path = Path(path) if not isinstance(path, Path) else path + return "printed to console" - if path.is_dir(): - path = path / f"benchmarks.{default_file_type}" - - path.parent.mkdir(parents=True, exist_ok=True) - path_suffix = path.suffix.lower() - - if path_suffix == ".json": - return path, "json" - - if path_suffix in [".yaml", ".yml"]: - return path, "yaml" - - if path_suffix in [".csv"]: - return path, "csv" - - if path_suffix in [".html"]: - return path, "html" + def _print_benchmarks_metadata(self, benchmarks: list[GenerativeBenchmark]): + start_time = benchmarks[0].run_stats.start_time + end_time = benchmarks[-1].run_stats.end_time + duration = end_time - start_time - raise ValueError( - f"Unsupported file extension: {path_suffix} for {path}; " - "expected json, yaml, csv, or html." - ) + self._print_section_header("Benchmarks Metadata") + self._print_labeled_line("Run id", str(benchmarks[0].run_id)) + self._print_labeled_line("Duration", f"{duration:.1f} seconds") + self._print_labeled_line("Profile", self._get_profile_str(benchmarks[0])) - @staticmethod - def _benchmark_desc_headers_and_values( - benchmark: GenerativeBenchmark, - ) -> tuple[list[str], list[Union[str, float]]]: + def _print_benchmarks_info(self, benchmarks: list[GenerativeBenchmark]): + sections = { + "Metadata": (0, 3), + "Requests Made": (4, 6), + "Prompt Tok/Req": (7, 9), + "Output Tok/Req": (10, 12), + "Prompt Tok Total": (13, 15), + "Output Tok Total": (16, 18), + } headers = [ - "Type", - "Run Id", - "Id", - "Name", + "Benchmark", "Start Time", "End Time", - "Duration", - ] - values: list[Union[str, float]] = [ - benchmark.type_, - benchmark.run_id, - benchmark.id_, - strategy_display_str(benchmark.args.strategy), - datetime.fromtimestamp(benchmark.start_time).strftime("%Y-%m-%d %H:%M:%S"), - datetime.fromtimestamp(benchmark.end_time).strftime("%Y-%m-%d %H:%M:%S"), - benchmark.duration, - ] - - if len(headers) != len(values): - raise ValueError("Headers and values length mismatch.") - - return headers, values - - @staticmethod - def _benchmark_extras_headers_and_values( - benchmark: GenerativeBenchmark, - ) -> tuple[list[str], list[str]]: - headers = ["Args", "Worker", "Request Loader", "Extras"] - values: list[str] = [ - json.dumps(benchmark.args.model_dump()), - json.dumps(benchmark.worker.model_dump()), - json.dumps(benchmark.request_loader.model_dump()), - json.dumps(benchmark.extras), - ] - - if len(headers) != len(values): - raise ValueError("Headers and values length mismatch.") - - return headers, values - - @staticmethod - def _benchmark_status_headers_and_values( - benchmark: GenerativeBenchmark, status: str - ) -> tuple[list[str], list[Union[float, list[float]]]]: - headers = [ - f"{status.capitalize()} Requests", - ] - values = [ - getattr(benchmark.request_totals, status), + "Duration (s)", + "Comp", + "Inc", + "Err", + "Comp", + "Inc", + "Err", + "Comp", + "Inc", + "Err", + "Comp", + "Inc", + "Err", + "Comp", + "Inc", + "Err", ] - for metric in GenerativeMetrics.model_fields: - metric_headers, metric_values = ( - GenerativeBenchmarksReport._benchmark_status_metrics_stats( - benchmark, status, metric - ) + rows = [] + for benchmark in benchmarks: + rows.append( + [ + str(benchmark.scheduler.strategy), + safe_format_timestamp(benchmark.start_time), + safe_format_timestamp(benchmark.end_time), + f"{(benchmark.end_time - benchmark.start_time):.1f}", + f"{benchmark.request_totals.successful:.0f}", + f"{benchmark.request_totals.incomplete:.0f}", + f"{benchmark.request_totals.errored:.0f}", + f"{benchmark.metrics.prompt_token_count.successful.mean:.1f}", + f"{benchmark.metrics.prompt_token_count.incomplete.mean:.1f}", + f"{benchmark.metrics.prompt_token_count.errored.mean:.1f}", + f"{benchmark.metrics.output_token_count.successful.mean:.1f}", + f"{benchmark.metrics.output_token_count.incomplete.mean:.1f}", + f"{benchmark.metrics.output_token_count.errored.mean:.1f}", + f"{benchmark.metrics.prompt_token_count.successful.total_sum:.0f}", + f"{benchmark.metrics.prompt_token_count.incomplete.total_sum:.0f}", + f"{benchmark.metrics.prompt_token_count.errored.total_sum:.0f}", + f"{benchmark.metrics.output_token_count.successful.total_sum:.0f}", + f"{benchmark.metrics.output_token_count.incomplete.total_sum:.0f}", + f"{benchmark.metrics.output_token_count.errored.total_sum:.0f}", + ] ) - headers += metric_headers - values += metric_values - if len(headers) != len(values): - raise ValueError("Headers and values length mismatch.") - - return headers, values + self._print_table(headers, rows, "Benchmarks Info", sections) - @staticmethod - def _benchmark_status_metrics_stats( - benchmark: GenerativeBenchmark, - status: str, - metric: str, - ) -> tuple[list[str], list[Union[float, list[float]]]]: - status_display = status.capitalize() - metric_display = metric.replace("_", " ").capitalize() - status_dist_summary: StatusDistributionSummary = getattr( - benchmark.metrics, metric - ) - dist_summary: DistributionSummary = getattr(status_dist_summary, status) + def _print_benchmarks_stats(self, benchmarks: list[GenerativeBenchmark]): + sections = { + "Metadata": (0, 0), + "Request Stats": (1, 2), + "Out Tok/sec": (3, 3), + "Tot Tok/sec": (4, 4), + "Req Latency (sec)": (5, 7), + "TTFT (ms)": (8, 10), + "ITL (ms)": (11, 13), + "TPOT (ms)": (14, 16), + } headers = [ - f"{status_display} {metric_display} mean", - f"{status_display} {metric_display} median", - f"{status_display} {metric_display} std dev", - ( - f"{status_display} {metric_display} " - "[min, 0.1, 1, 5, 10, 25, 75, 90, 95, 99, max]" - ), - ] - values: list[Union[float, list[float]]] = [ - dist_summary.mean, - dist_summary.median, - dist_summary.std_dev, - [ - dist_summary.min, - dist_summary.percentiles.p001, - dist_summary.percentiles.p01, - dist_summary.percentiles.p05, - dist_summary.percentiles.p10, - dist_summary.percentiles.p25, - dist_summary.percentiles.p75, - dist_summary.percentiles.p90, - dist_summary.percentiles.p95, - dist_summary.percentiles.p99, - dist_summary.max, - ], + "Benchmark", + "Per Second", + "Concurrency", + "mean", + "mean", + "mean", + "median", + "p99", + "mean", + "median", + "p99", + "mean", + "median", + "p99", + "mean", + "median", + "p99", ] - if len(headers) != len(values): - raise ValueError("Headers and values length mismatch.") - - return headers, values - - -class GenerativeBenchmarksConsole: - """ - A class for outputting progress and benchmark results to the console. - Utilizes the rich library for formatting, enabling colored and styled output. - """ - - def __init__(self, enabled: bool = True): - """ - :param enabled: Whether to enable console output. Defaults to True. - If False, all console output will be suppressed. - """ - self.enabled = enabled - self.benchmarks: Optional[list[GenerativeBenchmark]] = None - self.console = Console() + rows = [] + for benchmark in benchmarks: + rows.append( + [ + str(benchmark.scheduler.strategy), + f"{benchmark.metrics.requests_per_second.successful.mean:.2f}", + f"{benchmark.metrics.request_concurrency.successful.mean:.2f}", + f"{benchmark.metrics.output_tokens_per_second.successful.mean:.1f}", + f"{benchmark.metrics.tokens_per_second.successful.mean:.1f}", + f"{benchmark.metrics.request_latency.successful.mean:.2f}", + f"{benchmark.metrics.request_latency.successful.median:.2f}", + f"{benchmark.metrics.request_latency.successful.percentiles.p99:.2f}", + f"{benchmark.metrics.time_to_first_token_ms.successful.mean:.1f}", + f"{benchmark.metrics.time_to_first_token_ms.successful.median:.1f}", + f"{benchmark.metrics.time_to_first_token_ms.successful.percentiles.p99:.1f}", + f"{benchmark.metrics.inter_token_latency_ms.successful.mean:.1f}", + f"{benchmark.metrics.inter_token_latency_ms.successful.median:.1f}", + f"{benchmark.metrics.inter_token_latency_ms.successful.percentiles.p99:.1f}", + f"{benchmark.metrics.time_per_output_token_ms.successful.mean:.1f}", + f"{benchmark.metrics.time_per_output_token_ms.successful.median:.1f}", + f"{benchmark.metrics.time_per_output_token_ms.successful.percentiles.p99:.1f}", + ] + ) - @property - def benchmarks_profile_str(self) -> str: - """ - :return: A string representation of the profile used for the benchmarks. - """ - profile = self.benchmarks[0].args.profile if self.benchmarks else None + self._print_table(headers, rows, "Benchmarks Stats", sections) + def _get_profile_str(self, benchmark: GenerativeBenchmark) -> str: + profile = benchmark.benchmarker.profile if profile is None: return "None" profile_args = OrderedDict( { "type": profile.type_, - "strategies": profile.strategy_types, + "strategies": getattr(profile, "strategy_types", []), } ) @@ -434,22 +323,13 @@ def benchmarks_profile_str(self) -> str: elif isinstance(profile, AsyncProfile): profile_args["max_concurrency"] = str(profile.max_concurrency) profile_args["rate"] = str(profile.rate) - profile_args["initial_burst"] = str(profile.initial_burst) elif isinstance(profile, SweepProfile): profile_args["sweep_size"] = str(profile.sweep_size) return ", ".join(f"{key}={value}" for key, value in profile_args.items()) - @property - def benchmarks_args_str(self) -> str: - """ - :return: A string representation of the arguments used for the benchmarks. - """ - args = self.benchmarks[0].args if self.benchmarks else None - - if args is None: - return "None" - + def _get_args_str(self, benchmark: GenerativeBenchmark) -> str: + args = benchmark.args args_dict = OrderedDict( { "max_number": args.max_number, @@ -460,111 +340,45 @@ def benchmarks_args_str(self) -> str: "cooldown_duration": args.cooldown_duration, } ) - return ", ".join(f"{key}={value}" for key, value in args_dict.items()) - @property - def benchmarks_worker_desc_str(self) -> str: - """ - :return: A string representation of the worker used for the benchmarks. - """ - return str(self.benchmarks[0].worker) if self.benchmarks else "None" - - @property - def benchmarks_request_loader_desc_str(self) -> str: - """ - :return: A string representation of the request loader used for the benchmarks. - """ - return str(self.benchmarks[0].request_loader) if self.benchmarks else "None" - - @property - def benchmarks_extras_str(self) -> str: - """ - :return: A string representation of the extras used for the benchmarks. - """ - extras = self.benchmarks[0].extras if self.benchmarks else None - - if not extras: - return "None" - - return ", ".join(f"{key}={value}" for key, value in extras.items()) - - def print_section_header(self, title: str, indent: int = 0, new_lines: int = 2): - """ - Print out a styled section header to the console. - The title is underlined, bolded, and colored with the INFO color. - - :param title: The title of the section. - :param indent: The number of spaces to indent the title. - Defaults to 0. - :param new_lines: The number of new lines to print before the title. - Defaults to 2. - """ - self.print_line( - value=f"{title}:", - style=f"bold underline {Colors.INFO}", + def _print_section_header(self, title: str, indent: int = 0, new_lines: int = 2): + self._print_line( + f"{title}:", + f"bold underline {Colors.info}", indent=indent, new_lines=new_lines, ) - def print_labeled_line( + def _print_labeled_line( self, label: str, value: str, indent: int = 4, new_lines: int = 0 ): - """ - Print out a styled, labeled line (label: value) to the console. - The label is bolded and colored with the INFO color, - and the value is italicized. - - :param label: The label of the line. - :param value: The value of the line. - :param indent: The number of spaces to indent the line. - Defaults to 4. - :param new_lines: The number of new lines to print before the line. - Defaults to 0. - """ - self.print_line( - value=[label + ":", value], - style=["bold " + Colors.INFO, "italic"], + self._print_line( + [label + ":", value], + ["bold " + Colors.info, "italic"], new_lines=new_lines, indent=indent, ) - def print_line( + def _print_line( self, - value: Union[str, list[str]], - style: Union[str, list[str]] = "", + value: str | list[str], + style: str | list[str] = "", indent: int = 0, new_lines: int = 0, ): - """ - Print out a a value to the console as a line with optional indentation. - - :param value: The value to print. - :param style: The style to apply to the value. - Defaults to none. - :param indent: The number of spaces to indent the line. - Defaults to 0. - :param new_lines: The number of new lines to print before the value. - Defaults to 0. - """ - if not self.enabled: - return - text = Text() - for _ in range(new_lines): text.append("\n") if not isinstance(value, list): value = [value] - if not isinstance(style, list): style = [style for _ in range(len(value))] if len(value) != len(style): raise ValueError( - f"Value and style length mismatch. Value length: {len(value)}, " - f"Style length: {len(style)}." + f"Value and style length mismatch: {len(value)} vs {len(style)}" ) for val, sty in zip(value, style): @@ -572,128 +386,81 @@ def print_line( self.console.print(Padding.indent(text, indent)) - def print_table( + def _print_table( self, headers: list[str], rows: list[list[Any]], title: str, - sections: Optional[dict[str, tuple[int, int]]] = None, - max_char_per_col: int = 2**10, + sections: dict[str, tuple[int, int]] | None = None, + max_char_per_col: int = 1024, indent: int = 0, new_lines: int = 2, ): - """ - Print a table to the console with the given headers and rows. - - :param headers: The headers of the table. - :param rows: The rows of the table. - :param title: The title of the table. - :param sections: The sections of the table grouping columns together. - This is a mapping of the section display name to a tuple of the start and - end column indices. If None, no sections are added (default). - :param max_char_per_col: The maximum number of characters per column. - :param indent: The number of spaces to indent the table. - Defaults to 0. - :param new_lines: The number of new lines to print before the table. - Defaults to 0. - """ - if rows and any(len(row) != len(headers) for row in rows): raise ValueError( - f"Headers and rows length mismatch. Headers length: {len(headers)}, " - f"Row length: {len(rows[0]) if rows else 'N/A'}." + "Headers and rows length mismatch: " + f"{len(headers)} vs {len(rows[0]) if rows else 'N/A'}" ) - max_characters_per_column = self.calculate_max_chars_per_column( + max_chars_per_column = self._calculate_max_chars_per_column( headers, rows, sections, max_char_per_col ) - self.print_section_header(title, indent=indent, new_lines=new_lines) - self.print_table_divider( - max_characters_per_column, include_separators=False, indent=indent - ) + self._print_section_header(title, indent=indent, new_lines=new_lines) + self._print_table_divider(max_chars_per_column, False, indent) if sections: - self.print_table_sections( - sections, max_characters_per_column, indent=indent - ) - self.print_table_row( - split_text_list_by_length(headers, max_characters_per_column), - style=f"bold {Colors.INFO}", - indent=indent, - ) - self.print_table_divider( - max_characters_per_column, include_separators=True, indent=indent + self._print_table_sections(sections, max_chars_per_column, indent) + self._print_table_row( + split_text_list_by_length(headers, max_chars_per_column), + f"bold {Colors.info}", + indent, ) + self._print_table_divider(max_chars_per_column, True, indent) for row in rows: - self.print_table_row( - split_text_list_by_length(row, max_characters_per_column), - style="italic", - indent=indent, + self._print_table_row( + split_text_list_by_length(row, max_chars_per_column), + "italic", + indent, ) - self.print_table_divider( - max_characters_per_column, include_separators=False, indent=indent - ) + self._print_table_divider(max_chars_per_column, False, indent) - def calculate_max_chars_per_column( + def _calculate_max_chars_per_column( self, headers: list[str], rows: list[list[Any]], - sections: Optional[dict[str, tuple[int, int]]], + sections: dict[str, tuple[int, int]] | None, max_char_per_col: int, ) -> list[int]: - """ - Calculate the maximum number of characters per column in the table. - This is done by checking the length of the headers, rows, and optional sections - to ensure all columns are accounted for and spaced correctly. - - :param headers: The headers of the table. - :param rows: The rows of the table. - :param sections: The sections of the table grouping columns together. - This is a mapping of the section display name to a tuple of the start and - end column indices. If None, no sections are added (default). - :param max_char_per_col: The maximum number of characters per column. - :return: A list of the maximum number of characters per column. - """ - max_characters_per_column = [] + """Calculate maximum characters per column for table formatting.""" + max_chars_per_column = [] for ind in range(len(headers)): - max_characters_per_column.append(min(len(headers[ind]), max_char_per_col)) - + max_chars_per_column.append(min(len(headers[ind]), max_char_per_col)) for row in rows: - max_characters_per_column[ind] = max( - max_characters_per_column[ind], len(str(row[ind])) + max_chars_per_column[ind] = max( + max_chars_per_column[ind], len(str(row[ind])) ) if not sections: - return max_characters_per_column + return max_chars_per_column - for section in sections: - start_col, end_col = sections[section] - min_section_len = len(section) + ( - end_col - start_col - ) # ensure we have enough space for separators + for section, (start_col, end_col) in sections.items(): + min_section_len = len(section) + (end_col - start_col) chars_in_columns = sum( - max_characters_per_column[start_col : end_col + 1] + max_chars_per_column[start_col : end_col + 1] ) + 2 * (end_col - start_col) if min_section_len > chars_in_columns: add_chars_per_col = math.ceil( (min_section_len - chars_in_columns) / (end_col - start_col + 1) ) for col in range(start_col, end_col + 1): - max_characters_per_column[col] += add_chars_per_col + max_chars_per_column[col] += add_chars_per_col - return max_characters_per_column + return max_chars_per_column - def print_table_divider( + def _print_table_divider( self, max_chars_per_column: list[int], include_separators: bool, indent: int = 0 ): - """ - Print a divider line for the table (top and bottom of table with '=' characters) - - :param max_chars_per_column: The maximum number of characters per column. - :param include_separators: Whether to include separators between columns. - :param indent: The number of spaces to indent the line. - Defaults to 0. - """ + """Print table divider line.""" if include_separators: columns = [ settings.table_headers_border_char * max_chars @@ -706,29 +473,15 @@ def print_table_divider( settings.table_border_char * (max_chars + 2) for max_chars in max_chars_per_column ] - columns[-1] = columns[-1][:-2] - self.print_line(value=columns, style=Colors.INFO, indent=indent) + self._print_line(columns, Colors.info, indent) - def print_table_sections( + def _print_table_sections( self, sections: dict[str, tuple[int, int]], max_chars_per_column: list[int], indent: int = 0, ): - """ - Print the sections of the table with corresponding separators to the columns - the sections are mapped to to ensure it is compliant with a CSV format. - For example, a section named "Metadata" with columns 0-3 will print this: - Metadata ,,,, - Where the spaces plus the separators at the end will span the columns 0-3. - All columns must be accounted for in the sections. - - :param sections: The sections of the table. - :param max_chars_per_column: The maximum number of characters per column. - :param indent: The number of spaces to indent the line. - Defaults to 0. - """ section_tuples = [(start, end, name) for name, (start, end) in sections.items()] section_tuples.sort(key=lambda x: x[0]) @@ -752,30 +505,23 @@ def print_table_sections( end_col - start_col + 1 ) num_separators = end_col - start_col - line_values.append(section) - line_styles.append("bold " + Colors.INFO) - line_values.append( - " " * (section_length - len(section) - num_separators - 2) + line_values.extend( + [ + section, + " " * (section_length - len(section) - num_separators - 2), + settings.table_column_separator_char * num_separators, + settings.table_column_separator_char + " ", + ] ) - line_styles.append("") - line_values.append(settings.table_column_separator_char * num_separators) - line_styles.append("") - line_values.append(settings.table_column_separator_char + " ") - line_styles.append(Colors.INFO) + line_styles.extend(["bold " + Colors.info, "", "", Colors.info]) + line_values = line_values[:-1] line_styles = line_styles[:-1] - self.print_line(value=line_values, style=line_styles, indent=indent) + self._print_line(line_values, line_styles, indent) - def print_table_row( + def _print_table_row( self, column_lines: list[list[str]], style: str, indent: int = 0 ): - """ - Print a single row of a table to the console. - - :param column_lines: The lines of text to print for each column. - :param indent: The number of spaces to indent the line. - Defaults to 0. - """ for row in range(len(column_lines[0])): print_line = [] print_styles = [] @@ -787,212 +533,203 @@ def print_table_row( " ", ] ) - print_styles.extend([style, Colors.INFO, ""]) + print_styles.extend([style, Colors.info, ""]) print_line = print_line[:-2] print_styles = print_styles[:-2] - self.print_line(value=print_line, style=print_styles, indent=indent) + self._print_line(print_line, print_styles, indent) - def print_benchmarks_metadata(self): - """ - Print out the metadata of the benchmarks to the console including the run id, - duration, profile, args, worker, request loader, and extras. - """ - if not self.benchmarks: - raise ValueError( - "No benchmarks to print metadata for. Please set benchmarks first." - ) +@GenerativeBenchmarkerOutput.register("csv") +class GenerativeBenchmarkerCSV(GenerativeBenchmarkerOutput): + """CSV output formatter for benchmark results.""" - start_time = self.benchmarks[0].run_stats.start_time - end_time = self.benchmarks[-1].run_stats.end_time - duration = end_time - start_time + DEFAULT_FILE: ClassVar[str] = "benchmarks.csv" - self.print_section_header(title="Benchmarks Metadata") - self.print_labeled_line( - label="Run id", - value=str(self.benchmarks[0].run_id), - ) - self.print_labeled_line( - label="Duration", - value=f"{duration:.1f} seconds", - ) - self.print_labeled_line( - label="Profile", - value=self.benchmarks_profile_str, - ) - self.print_labeled_line( - label="Args", - value=self.benchmarks_args_str, - ) - self.print_labeled_line( - label="Worker", - value=self.benchmarks_worker_desc_str, - ) - self.print_labeled_line( - label="Request Loader", - value=self.benchmarks_request_loader_desc_str, - ) - self.print_labeled_line( - label="Extras", - value=self.benchmarks_extras_str, - ) + @classmethod + def validated_kwargs( + cls, output_path: str | Path | None, **_kwargs + ) -> dict[str, Any]: + new_kwargs = {} + if output_path is not None: + new_kwargs["output_path"] = ( + Path(output_path) if not isinstance(output_path, Path) else output_path + ) + return new_kwargs + + output_path: Path = Field(default_factory=lambda: Path.cwd()) - def print_benchmarks_info(self): + async def finalize(self, report: GenerativeBenchmarksReport) -> Path: """ - Print out the benchmark information to the console including the start time, - end time, duration, request totals, and token totals for each benchmark. + Save the benchmark report as a CSV file. + + :param report: The completed benchmark report. + :return: Path to the saved CSV file. """ - if not self.benchmarks: - raise ValueError( - "No benchmarks to print info for. Please set benchmarks first." - ) + output_path = self.output_path + if output_path.is_dir(): + output_path = output_path / GenerativeBenchmarkerCSV.DEFAULT_FILE + output_path.parent.mkdir(parents=True, exist_ok=True) - sections = { - "Metadata": (0, 3), - "Requests Made": (4, 6), - "Prompt Tok/Req": (7, 9), - "Output Tok/Req": (10, 12), - "Prompt Tok Total": (13, 15), - "Output Tok Total": (16, 18), - } + with output_path.open("w", newline="") as file: + writer = csv.writer(file) + headers: list[str] = [] + rows: list[list[str | float | list[float]]] = [] + + for benchmark in report.benchmarks: + benchmark_headers: list[str] = [] + benchmark_values: list[str | float | list[float]] = [] + + # Add status-based metrics + for status in StatusDistributionSummary.model_fields: + status_headers, status_values = ( + self._get_benchmark_status_headers_and_values(benchmark, status) + ) + benchmark_headers.extend(status_headers) + benchmark_values.extend(status_values) + + # Add extra fields + extras_headers, extras_values = ( + self._get_benchmark_extras_headers_and_values(benchmark) + ) + benchmark_headers.extend(extras_headers) + benchmark_values.extend(extras_values) + + if not headers: + headers = benchmark_headers + rows.append(benchmark_values) + + writer.writerow(headers) + for row in rows: + writer.writerow(row) + + return output_path + + def _get_benchmark_desc_headers_and_values( + self, benchmark: GenerativeBenchmark + ) -> tuple[list[str], list[str | float]]: + """Get description headers and values for a benchmark.""" headers = [ - "Benchmark", + "Type", + "Run Id", + "Id", + "Name", "Start Time", "End Time", - "Duration (s)", - "Comp", - "Inc", - "Err", - "Comp", - "Inc", - "Err", - "Comp", - "Inc", - "Err", - "Comp", - "Inc", - "Err", - "Comp", - "Inc", - "Err", + "Duration", ] - rows = [] + values: list[str | float] = [ + benchmark.type_, + benchmark.run_id, + benchmark.id_, + str(benchmark.scheduler.strategy), + datetime.fromtimestamp(benchmark.start_time).strftime("%Y-%m-%d %H:%M:%S"), + datetime.fromtimestamp(benchmark.end_time).strftime("%Y-%m-%d %H:%M:%S"), + benchmark.duration, + ] + return headers, values - for benchmark in self.benchmarks: - rows.append( - [ - strategy_display_str(benchmark.args.strategy), - f"{datetime.fromtimestamp(benchmark.start_time).strftime('%H:%M:%S')}", - f"{datetime.fromtimestamp(benchmark.end_time).strftime('%H:%M:%S')}", - f"{(benchmark.end_time - benchmark.start_time):.1f}", - f"{benchmark.request_totals.successful:.0f}", - f"{benchmark.request_totals.incomplete:.0f}", - f"{benchmark.request_totals.errored:.0f}", - f"{benchmark.metrics.prompt_token_count.successful.mean:.1f}", - f"{benchmark.metrics.prompt_token_count.incomplete.mean:.1f}", - f"{benchmark.metrics.prompt_token_count.errored.mean:.1f}", - f"{benchmark.metrics.output_token_count.successful.mean:.1f}", - f"{benchmark.metrics.output_token_count.incomplete.mean:.1f}", - f"{benchmark.metrics.output_token_count.errored.mean:.1f}", - f"{benchmark.metrics.prompt_token_count.successful.total_sum:.0f}", - f"{benchmark.metrics.prompt_token_count.incomplete.total_sum:.0f}", - f"{benchmark.metrics.prompt_token_count.errored.total_sum:.0f}", - f"{benchmark.metrics.output_token_count.successful.total_sum:.0f}", - f"{benchmark.metrics.output_token_count.incomplete.total_sum:.0f}", - f"{benchmark.metrics.output_token_count.errored.total_sum:.0f}", - ] + def _get_benchmark_status_headers_and_values( + self, benchmark: GenerativeBenchmark, status: str + ) -> tuple[list[str], list[float | list[float]]]: + """Get status-based metrics headers and values for a benchmark.""" + headers = [f"{status.capitalize()} Requests"] + values = [getattr(benchmark.request_totals, status)] + + for metric in GenerativeMetrics.model_fields: + metric_headers, metric_values = self._get_benchmark_status_metrics_stats( + benchmark, status, metric ) + headers.extend(metric_headers) + values.extend(metric_values) - self.print_table( - headers=headers, rows=rows, title="Benchmarks Info", sections=sections - ) + return headers, values - def print_benchmarks_stats(self): - """ - Print out the benchmark statistics to the console including the requests per - second, request concurrency, output tokens per second, total tokens per second, - request latency, time to first token, inter token latency, and time per output - token for each benchmark. - """ - if not self.benchmarks: - raise ValueError( - "No benchmarks to print stats for. Please set benchmarks first." - ) + def _get_benchmark_status_metrics_stats( + self, benchmark: GenerativeBenchmark, status: str, metric: str + ) -> tuple[list[str], list[float | list[float]]]: + """Get statistical metrics for a specific status and metric.""" + status_display = status.capitalize() + metric_display = metric.replace("_", " ").capitalize() + status_dist_summary: StatusDistributionSummary = getattr( + benchmark.metrics, metric + ) + dist_summary: DistributionSummary = getattr(status_dist_summary, status) - sections = { - "Metadata": (0, 0), - "Request Stats": (1, 2), - "Out Tok/sec": (3, 3), - "Tot Tok/sec": (4, 4), - "Req Latency (sec)": (5, 7), - "TTFT (ms)": (8, 10), - "ITL (ms)": (11, 13), - "TPOT (ms)": (14, 16), - } headers = [ - "Benchmark", - "Per Second", - "Concurrency", - "mean", - "mean", - "mean", - "median", - "p99", - "mean", - "median", - "p99", - "mean", - "median", - "p99", - "mean", - "median", - "p99", + f"{status_display} {metric_display} mean", + f"{status_display} {metric_display} median", + f"{status_display} {metric_display} std dev", + ( + f"{status_display} {metric_display} " + "[min, 0.1, 1, 5, 10, 25, 75, 90, 95, 99, max]" + ), ] - rows = [] + values: list[float | list[float]] = [ + dist_summary.mean, + dist_summary.median, + dist_summary.std_dev, + [ + dist_summary.min, + dist_summary.percentiles.p001, + dist_summary.percentiles.p01, + dist_summary.percentiles.p05, + dist_summary.percentiles.p10, + dist_summary.percentiles.p25, + dist_summary.percentiles.p75, + dist_summary.percentiles.p90, + dist_summary.percentiles.p95, + dist_summary.percentiles.p99, + dist_summary.max, + ], + ] + return headers, values - for benchmark in self.benchmarks: - rows.append( - [ - strategy_display_str(benchmark.args.strategy), - f"{benchmark.metrics.requests_per_second.successful.mean:.2f}", - f"{benchmark.metrics.request_concurrency.successful.mean:.2f}", - f"{benchmark.metrics.output_tokens_per_second.successful.mean:.1f}", - f"{benchmark.metrics.tokens_per_second.successful.mean:.1f}", - f"{benchmark.metrics.request_latency.successful.mean:.2f}", - f"{benchmark.metrics.request_latency.successful.median:.2f}", - f"{benchmark.metrics.request_latency.successful.percentiles.p99:.2f}", - f"{benchmark.metrics.time_to_first_token_ms.successful.mean:.1f}", - f"{benchmark.metrics.time_to_first_token_ms.successful.median:.1f}", - f"{benchmark.metrics.time_to_first_token_ms.successful.percentiles.p99:.1f}", - f"{benchmark.metrics.inter_token_latency_ms.successful.mean:.1f}", - f"{benchmark.metrics.inter_token_latency_ms.successful.median:.1f}", - f"{benchmark.metrics.inter_token_latency_ms.successful.percentiles.p99:.1f}", - f"{benchmark.metrics.time_per_output_token_ms.successful.mean:.1f}", - f"{benchmark.metrics.time_per_output_token_ms.successful.median:.1f}", - f"{benchmark.metrics.time_per_output_token_ms.successful.percentiles.p99:.1f}", - ] + +@GenerativeBenchmarkerOutput.register("html") +class GenerativeBenchmarkerHTML(GenerativeBenchmarkerOutput): + """HTML output formatter for benchmark results.""" + + DEFAULT_FILE: ClassVar[str] = "benchmarks.html" + + @classmethod + def validated_kwargs( + cls, output_path: str | Path | None, **_kwargs + ) -> dict[str, Any]: + new_kwargs = {} + if output_path is not None: + new_kwargs["output_path"] = ( + Path(output_path) if not isinstance(output_path, Path) else output_path ) + return new_kwargs - self.print_table( - headers=headers, - rows=rows, - title="Benchmarks Stats", - sections=sections, - ) + output_path: Path = Field(default_factory=lambda: Path.cwd()) - def print_full_report(self): + async def finalize(self, report: GenerativeBenchmarksReport) -> Path: """ - Print out the benchmark statistics to the console. - Temporarily enables the console if it's disabled. + Save the benchmark report as an HTML file. - Format: - - Metadata - - Info - - Stats + :param report: The completed benchmark report. + :return: Path to the saved HTML file. """ - orig_enabled = self.enabled - self.enabled = True - self.print_benchmarks_metadata() - self.print_benchmarks_info() - self.print_benchmarks_stats() - self.enabled = orig_enabled + import humps + + output_path = self.output_path + if output_path.is_dir(): + output_path = output_path / GenerativeBenchmarkerHTML.DEFAULT_FILE + output_path.parent.mkdir(parents=True, exist_ok=True) + + data_builder = UIDataBuilder(report.benchmarks) + data = data_builder.to_dict() + camel_data = humps.camelize(data) + + ui_api_data = {} + for key, value in camel_data.items(): + placeholder_key = f"window.{humps.decamelize(key)} = {{}};" + replacement_value = ( + f"window.{humps.decamelize(key)} = {json.dumps(value, indent=2)};\n" + ) + ui_api_data[placeholder_key] = replacement_value + + create_report(ui_api_data, output_path) + + return output_path diff --git a/src/guidellm/benchmark/profile.py b/src/guidellm/benchmark/profile.py index ca25fc24..042179ba 100644 --- a/src/guidellm/benchmark/profile.py +++ b/src/guidellm/benchmark/profile.py @@ -1,20 +1,52 @@ -from collections.abc import Sequence -from typing import Literal, Optional, Union +""" +Benchmarking profile configurations for coordinating multi-strategy execution. + +Provides configurable profile abstractions for orchestrating sequential and +parallel execution of different scheduling strategies during benchmarking, +with automatic strategy generation and constraint management. + +Classes: + Profile: Abstract base for multi-strategy benchmarking profiles. + SynchronousProfile: Single synchronous strategy execution profile. + ConcurrentProfile: Fixed-concurrency strategy execution profile. + ThroughputProfile: Maximum throughput strategy execution profile. + AsyncProfile: Rate-based asynchronous strategy execution profile. + SweepProfile: Adaptive multi-strategy sweep execution profile. + +Type Aliases: + ProfileType: Literal type for supported profile configurations. +""" + +from __future__ import annotations + +from abc import ABC, abstractmethod +from collections.abc import Generator +from typing import ( + TYPE_CHECKING, + Any, + ClassVar, + Literal, +) import numpy as np -from pydantic import Field, computed_field +from pydantic import Field, computed_field, field_serializer, field_validator -from guidellm.objects import StandardBaseModel from guidellm.scheduler import ( AsyncConstantStrategy, AsyncPoissonStrategy, ConcurrentStrategy, + Constraint, + ConstraintInitializer, + ConstraintsInitializerFactory, SchedulingStrategy, StrategyType, SynchronousStrategy, ThroughputStrategy, ) -from guidellm.settings import settings +from guidellm.utils import PydanticClassRegistryMixin + +if TYPE_CHECKING: + from guidellm.benchmark.objects import Benchmark __all__ = [ "AsyncProfile", @@ -24,386 +56,661 @@ "SweepProfile", "SynchronousProfile", "ThroughputProfile", - "create_profile", ] ProfileType = Literal["synchronous", "concurrent", "throughput", "async", "sweep"] -class Profile(StandardBaseModel): +class Profile( + PydanticClassRegistryMixin["type[Profile]"], + ABC, +): + """ + Abstract base for multi-strategy benchmarking execution profiles. + + Coordinates sequential execution of scheduling strategies with automatic + strategy generation, constraint management, and completion tracking for + comprehensive benchmarking workflows. + """ + + schema_discriminator: ClassVar[str] = "type_" + + @classmethod + def __pydantic_schema_base_type__(cls) -> type[Profile]: + if cls.__name__ == "Profile": + return cls + + return Profile + + @classmethod + def create( + cls, + rate_type: str, + rate: float | int | list[float | int] | None, + random_seed: int = 42, + **kwargs: Any, + ) -> Profile: + """ + Create a profile instance based on the specified type. + + :param rate_type: The type of profile to create. + :param rate: Rate parameter for profile configuration. + :param random_seed: Random seed for stochastic strategies. + :param kwargs: Additional arguments for profile configuration. + :return: Configured profile instance for the specified type. + :raises ValueError: If the profile type is not registered. + """ + profile_class: type[Profile] = cls.get_registered_object(rate_type) + resolved_kwargs = profile_class.resolve_args( + rate_type=rate_type, rate=rate, random_seed=random_seed, **kwargs + ) + + return profile_class(**resolved_kwargs) + + @classmethod + @abstractmethod + def resolve_args( + cls, + rate_type: str, + rate: float | int | list[float, int] | None, + random_seed: int, + **kwargs: Any, + ) -> dict[str, Any]: + """ + Resolve and validate arguments for profile construction. + + :param rate_type: The type of the profile. + :param rate: Rate parameter for configuration. + :param random_seed: Random seed for stochastic strategies. + :param kwargs: Additional arguments to resolve. + :return: Dictionary of resolved arguments for profile construction. + """ + ... + type_: Literal["profile"] = Field( - description="The type of benchmarking profile to use.", + description="The type of benchmarking profile to use", ) - completed_strategies: int = Field( - default=0, - description="The number of scheduling strategies generated so far.", - ) - measured_rates: list[float] = Field( + completed_strategies: list[SchedulingStrategy] = Field( default_factory=list, - description=("The average rates measured for the strategies that have run."), + description="The strategies that have completed execution", ) - measured_concurrencies: list[float] = Field( - default_factory=list, - description=( - "The average concurrency measured for the strategies that have run." - ), + constraints: dict[str, Any | dict[str, Any] | ConstraintInitializer] | None = Field( + default=None, + description="Runtime constraints to apply during strategy execution", ) - def completed_strategy(self, average_rate: float, average_concurrency: float): - self.measured_rates.append(average_rate) - self.measured_concurrencies.append(average_concurrency) - self.completed_strategies += 1 - @computed_field # type: ignore[misc] @property def strategy_types(self) -> list[StrategyType]: - return [] + """ + :return: List of all strategy types expected to be executed or have been + executed in this profile. By default, this returns just the + completed strategies. + """ + return [strat.type_ for strat in self.completed_strategies] + + def strategies_generator( + self, + ) -> Generator[ + tuple[ + SchedulingStrategy | None, + dict[str, Any | dict[str, Any] | Constraint] | None, + ], + Benchmark | None, + None, + ]: + """ + Generate strategies and constraints for sequential profile execution. + + :return: Generator yielding (strategy, constraints) tuples and + receiving benchmark results from each execution. + """ + prev_strategy: SchedulingStrategy | None = None + prev_benchmark: Benchmark | None = None + + while ( + strategy := self.next_strategy(prev_strategy, prev_benchmark) + ) is not None: + constraints = self.next_strategy_constraints( + strategy, prev_strategy, prev_benchmark + ) + prev_benchmark = yield ( + strategy, + constraints, + ) + prev_strategy = strategy + self.completed_strategies.append(prev_strategy) + + @abstractmethod + def next_strategy( + self, + prev_strategy: SchedulingStrategy | None, + prev_benchmark: Benchmark | None, + ) -> SchedulingStrategy | None: + """ + Generate the next strategy to execute in the profile sequence. + + :param prev_strategy: The previously completed strategy. + :param prev_benchmark: Benchmark results from the previous strategy. + :return: Next strategy to execute, or None if profile is complete. + """ + ... + + def next_strategy_constraints( + self, + next_strategy: SchedulingStrategy | None, + prev_strategy: SchedulingStrategy | None, + prev_benchmark: Benchmark | None, + ) -> dict[str, Any | dict[str, Any] | Constraint] | None: + """ + Generate constraints for the next strategy execution. + + :param next_strategy: The next strategy to be executed. + :param prev_strategy: The previously completed strategy. + :param prev_benchmark: Benchmark results from the previous strategy. + :return: Constraints dictionary for the next strategy, or None. + """ + _ = (prev_strategy, prev_benchmark) # unused + return ( + ConstraintsInitializerFactory.resolve(self.constraints) + if next_strategy and self.constraints + else None + ) - def next_strategy(self) -> Optional[SchedulingStrategy]: - return None + @field_validator("constraints", mode="before") + @classmethod + def _constraints_validator( + cls, value: Any + ) -> dict[str, Any | dict[str, Any] | ConstraintInitializer] | None: + if value is None: + return None + if not isinstance(value, dict): + raise ValueError("Constraints must be a dictionary") + return { + key: ( + val + if not isinstance(val, ConstraintInitializer) + else ConstraintsInitializerFactory.deserialize(initializer_dict=val) + ) + for key, val in value.items() + } + + @field_serializer + def _constraints_serializer( + self, + constraints: dict[str, Any | dict[str, Any] | ConstraintInitializer] | None, + ) -> dict[str, Any | dict[str, Any]] | None: + if constraints is None: + return None + + return { + key: ( + val + if not isinstance(val, ConstraintInitializer) + else ConstraintsInitializerFactory.serialize(initializer=val) + ) + for key, val in constraints.items() + } + + +@Profile.register("synchronous") class SynchronousProfile(Profile): + """Single synchronous strategy execution profile.""" + type_: Literal["synchronous"] = "synchronous" # type: ignore[assignment] + @classmethod + def resolve_args( + cls, + rate_type: str, + rate: float | int | list[float, int] | None, + random_seed: int, + **kwargs: Any, + ) -> dict[str, Any]: + """ + Resolve arguments for synchronous profile construction. + + :param rate_type: The type/strategy of the profile (ignored). + :param rate: Rate parameter (must be None, will be stripped). + :param random_seed: Random seed (ignored and stripped). + :param kwargs: Additional arguments to pass through. + :return: Dictionary of resolved arguments. + :raises ValueError: If rate is not None. + """ + _ = (rate_type, random_seed) # unused + if rate is not None: + raise ValueError("SynchronousProfile does not accept a rate parameter") + + return kwargs + @property def strategy_types(self) -> list[StrategyType]: + """ + :return: The single synchronous strategy type. + """ return [self.type_] - def next_strategy(self) -> Optional[SchedulingStrategy]: - if self.completed_strategies >= 1: + def next_strategy( + self, + prev_strategy: SchedulingStrategy | None, + prev_benchmark: Benchmark | None, + ) -> SynchronousStrategy | None: + """ + Generate synchronous strategy or None if already completed. + + :param prev_strategy: The previously completed strategy (unused). + :param prev_benchmark: Benchmark results from the previous strategy (unused). + :return: SynchronousStrategy for the first execution, None afterward. + """ + _ = (prev_strategy, prev_benchmark) # unused + if len(self.completed_strategies) >= 1: return None return SynchronousStrategy() - @staticmethod - def from_standard_args( - rate_type: Union[StrategyType, ProfileType], - rate: Optional[Union[float, Sequence[float]]], - **kwargs, - ) -> "SynchronousProfile": - if rate_type != "synchronous": - raise ValueError("Rate type must be 'synchronous' for synchronous profile.") - - if rate is not None: - raise ValueError( - "Rate does not apply to synchronous profile, it must be set to None." - ) - - if kwargs: - raise ValueError( - "No additional arguments are allowed for synchronous profile." - ) - - return SynchronousProfile() - +@Profile.register("concurrent") class ConcurrentProfile(Profile): + """Fixed-concurrency strategy execution profile with configurable stream counts.""" + type_: Literal["concurrent"] = "concurrent" # type: ignore[assignment] - streams: Union[int, Sequence[int]] = Field( - description="The number of concurrent streams to use.", + streams: int | list[int] = Field( + description="Number of concurrent streams for request scheduling", + gt=0, + ) + startup_duration: float = Field( + default=0.0, + description=( + "Duration in seconds for distributing startup requests " + "before completion-based timing" + ), + ge=0, ) + @classmethod + def resolve_args( + cls, + rate_type: str, + rate: float | int | list[float, int] | None, + random_seed: int, + **kwargs: Any, + ) -> dict[str, Any]: + """ + Resolve arguments for concurrent profile construction. + + :param rate_type: The type/strategy of the profile (ignored). + :param rate: Rate parameter, remapped to streams. + :param random_seed: Random seed (ignored and stripped). + :param kwargs: Additional arguments to pass through. + :return: Dictionary of resolved arguments. + :raises ValueError: If rate is None. + """ + _ = (rate_type, random_seed) # unused + kwargs["streams"] = rate + return kwargs + @property def strategy_types(self) -> list[StrategyType]: - num_strategies = len(self.streams) if isinstance(self.streams, Sequence) else 1 - + """Get concurrent strategy types for each configured stream count.""" + num_strategies = len(self.streams) if isinstance(self.streams, list) else 1 return [self.type_] * num_strategies - def next_strategy(self) -> Optional[SchedulingStrategy]: - streams = self.streams if isinstance(self.streams, Sequence) else [self.streams] - - if self.completed_strategies >= len(streams): + def next_strategy( + self, + prev_strategy: SchedulingStrategy | None, + prev_benchmark: Benchmark | None, + ) -> ConcurrentStrategy | None: + """ + Generate concurrent strategy for the next stream count. + + :param prev_strategy: The previously completed strategy (unused). + :param prev_benchmark: Benchmark results from the previous strategy (unused). + :return: ConcurrentStrategy with next stream count, or None if complete. + """ + _ = (prev_strategy, prev_benchmark) # unused + streams = self.streams if isinstance(self.streams, list) else [self.streams] + + if len(self.completed_strategies) >= len(streams): return None return ConcurrentStrategy( - streams=streams[self.completed_strategies], + streams=streams[len(self.completed_strategies)], + startup_duration=self.startup_duration, ) - @staticmethod - def from_standard_args( - rate_type: Union[StrategyType, ProfileType], - rate: Optional[Union[float, Sequence[float]]], - **kwargs, - ) -> "ConcurrentProfile": - if rate_type != "concurrent": - raise ValueError("Rate type must be 'concurrent' for concurrent profile.") - - if not rate: - raise ValueError("Rate (streams) must be provided for concurrent profile.") - - if not isinstance(rate, Sequence): - rate = [rate] - - if not all(stream.is_integer() and stream > 0 for stream in rate): - raise ValueError( - f"All rate values (streams) must be positive integers, received {rate}" - ) - - if kwargs: - raise ValueError( - "No additional arguments are allowed for concurrent profile." - ) - - return ConcurrentProfile(streams=[int(rat) for rat in rate]) - +@Profile.register("throughput") class ThroughputProfile(Profile): + """ + Maximum throughput strategy execution profile with optional concurrency limits. + """ + type_: Literal["throughput"] = "throughput" # type: ignore[assignment] - max_concurrency: Optional[int] = Field( + max_concurrency: int | None = Field( default=None, - description="The maximum number of concurrent requests that can be scheduled.", + description="Maximum number of concurrent requests to schedule", + gt=0, + ) + startup_duration: float = Field( + default=0.0, + description=( + "Duration in seconds for distributing startup requests " + "before full throughput scheduling" + ), + ge=0, ) + @classmethod + def resolve_args( + cls, + rate_type: str, + rate: float | int | list[float, int] | None, + random_seed: int, + **kwargs: Any, + ) -> dict[str, Any]: + """ + Resolve arguments for throughput profile construction. + + :param rate_type: The type/strategy of the profile (ignored). + :param rate: Rate parameter to remap to max_concurrency. + :param random_seed: Random seed (ignored and stripped). + :param kwargs: Additional arguments to pass through. + :return: Dictionary of resolved arguments. + """ + _ = (rate_type, random_seed) # unused + # Remap rate to max_concurrency, strip out random_seed + kwargs.pop("random_seed", None) + if rate is not None: + kwargs["max_concurrency"] = rate + return kwargs + @property def strategy_types(self) -> list[StrategyType]: + """Get the single throughput strategy type.""" return [self.type_] - def next_strategy(self) -> Optional[SchedulingStrategy]: - if self.completed_strategies >= 1: + def next_strategy( + self, + prev_strategy: SchedulingStrategy | None, + prev_benchmark: Benchmark | None, + ) -> ThroughputStrategy | None: + """ + Generate throughput strategy or None if already completed. + + :param prev_strategy: The previously completed strategy (unused). + :param prev_benchmark: Benchmark results from the previous strategy (unused). + :return: ThroughputStrategy for the first execution, None afterward. + """ + _ = (prev_strategy, prev_benchmark) # unused + if len(self.completed_strategies) >= 1: return None return ThroughputStrategy( max_concurrency=self.max_concurrency, + startup_duration=self.startup_duration, ) - @staticmethod - def from_standard_args( - rate_type: Union[StrategyType, ProfileType], - rate: Optional[Union[float, Sequence[float]]], - **kwargs, - ) -> "ThroughputProfile": - if rate_type != "throughput": - raise ValueError("Rate type must be 'throughput' for throughput profile.") - - if rate is not None: - raise ValueError( - "Rate does not apply to throughput profile, it must be set to None." - ) - return ThroughputProfile(**kwargs) +@Profile.register(["async", "constant", "poisson"]) +class AsyncProfile(Profile): + """ + Rate-based asynchronous strategy execution profile with configurable patterns. + """ - -class AsyncProfile(ThroughputProfile): - type_: Literal["async"] = "async" # type: ignore[assignment] + type_: Literal["async", "constant", "poisson"] = "async" # type: ignore[assignment] strategy_type: Literal["constant", "poisson"] = Field( - description="The type of asynchronous strategy to use.", + description="Type of asynchronous strategy pattern to use", ) - rate: Union[float, Sequence[float]] = Field( - description="The rate of requests per second to use.", + rate: float | list[float] = Field( + description="Request scheduling rate in requests per second", + gt=0, ) - initial_burst: bool = Field( - default=True, + startup_duration: float = Field( + default=0.0, description=( - "True to send an initial burst of requests (math.floor(self.rate)) " - "to reach target rate. False to not send an initial burst." + "Duration in seconds for distributing startup requests " + "to converge quickly to desired rate" ), + ge=0, + ) + max_concurrency: int | None = Field( + default=None, + description="Maximum number of concurrent requests to schedule", + gt=0, ) random_seed: int = Field( default=42, - description=( - "The random seed to use for the asynchronous strategy. " - "This is used to generate random numbers for the Poisson strategy." - ), + description="Random seed for Poisson distribution strategy", ) + @classmethod + def resolve_args( + cls, + rate_type: str, + rate: float | int | list[float, int] | None, + random_seed: int, + **kwargs: Any, + ) -> dict[str, Any]: + """ + Resolve arguments for async profile construction. + + :param rate_type: The type/strategy of the profile. + :param rate: Rate parameter for the profile. + :param random_seed: Random seed for stochastic strategies. + :param kwargs: Additional arguments to pass through. + :return: Dictionary of resolved arguments. + :raises ValueError: If rate is None. + """ + if rate is None: + raise ValueError("AsyncProfile requires a rate parameter") + + kwargs["type_"] = ( + rate_type + if rate_type in ["async", "constant", "poisson"] + else kwargs.get("type_", "async") + ) + kwargs["strategy_type"] = ( + rate_type + if rate_type in ["constant", "poisson"] + else kwargs.get("strategy_type", "constant") + ) + kwargs["rate"] = rate + kwargs["random_seed"] = random_seed + return kwargs + @property def strategy_types(self) -> list[StrategyType]: - num_strategies = len(self.rate) if isinstance(self.rate, Sequence) else 1 - + """Get async strategy types for each configured rate.""" + num_strategies = len(self.rate) if isinstance(self.rate, list) else 1 return [self.strategy_type] * num_strategies - def next_strategy(self) -> Optional[SchedulingStrategy]: - rate = self.rate if isinstance(self.rate, Sequence) else [self.rate] - - if self.completed_strategies >= len(rate): + def next_strategy( + self, + prev_strategy: SchedulingStrategy | None, + prev_benchmark: Benchmark | None, + ) -> AsyncConstantStrategy | AsyncPoissonStrategy | None: + """ + Generate async strategy for the next configured rate. + + :param prev_strategy: The previously completed strategy (unused). + :param prev_benchmark: Benchmark results from the previous strategy (unused). + :return: AsyncConstantStrategy or AsyncPoissonStrategy for next rate, + or None if all rates completed. + :raises ValueError: If strategy_type is neither 'constant' nor 'poisson'. + """ + _ = (prev_strategy, prev_benchmark) # unused + rate = self.rate if isinstance(self.rate, list) else [self.rate] + + if len(self.completed_strategies) >= len(rate): return None + current_rate = rate[len(self.completed_strategies)] + if self.strategy_type == "constant": return AsyncConstantStrategy( - rate=rate[self.completed_strategies], - initial_burst=self.initial_burst, + rate=current_rate, + startup_duration=self.startup_duration, max_concurrency=self.max_concurrency, ) elif self.strategy_type == "poisson": return AsyncPoissonStrategy( - rate=rate[self.completed_strategies], - initial_burst=self.initial_burst, + rate=current_rate, + startup_duration=self.startup_duration, max_concurrency=self.max_concurrency, random_seed=self.random_seed, ) else: raise ValueError(f"Invalid strategy type: {self.strategy_type}") - @staticmethod - def from_standard_args( # type: ignore[override] - rate_type: Union[StrategyType, ProfileType], - rate: Optional[Union[float, Sequence[float]]], - random_seed: int, - **kwargs, - ) -> "AsyncProfile": - if rate_type not in ("async", "constant", "poisson"): - raise ValueError( - "Rate type must be in ('async', 'constant', 'poisson') " - f"for async profile. Received: {rate_type}" - ) - - if not rate: - raise ValueError("Rate must be provided for async profile.") - - if not isinstance(rate, Sequence): - rate = [rate] - - if not all(isinstance(r, (float, int)) and r > 0 for r in rate): - raise ValueError( - f"All rate values must be positive numbers, received {rate}" - ) - - if rate_type == "async": - rate_type = "constant" # default to constant if not specified - return AsyncProfile( - strategy_type=rate_type, # type: ignore[arg-type] - rate=rate, - random_seed=random_seed, - **kwargs, - ) +@Profile.register("sweep") +class SweepProfile(Profile): + """ + Adaptive multi-strategy sweep execution profile with rate discovery. + """ - -class SweepProfile(AsyncProfile): type_: Literal["sweep"] = "sweep" # type: ignore[assignment] sweep_size: int = Field( - description="The number of strategies to generate for the sweep.", + description="Number of strategies to generate for the sweep", + ge=2, + ) + strategy_type: Literal["constant", "poisson"] = "constant" + startup_duration: float = Field( + default=0.0, + description=( + "Duration in seconds for distributing startup requests " + "to converge quickly to desired rate" + ), + ge=0, + ) + max_concurrency: int | None = Field( + default=None, + description="Maximum number of concurrent requests to schedule", + gt=0, ) - rate: float = -1 - rate_type: Literal["constant", "poisson"] = "constant" + random_seed: int = Field( + default=42, + description="Random seed for Poisson distribution strategy", + ) + synchronous_rate: float = Field( + default=-1.0, + description="Measured rate from synchronous strategy execution", + ) + throughput_rate: float = Field( + default=-1.0, + description="Measured rate from throughput strategy execution", + ) + async_rates: list[float] = Field( + default_factory=list, + description="Generated rates for async strategy sweep", + ) + measured_rates: list[float] = Field( + default_factory=list, + description="Calculated interpolated rates between synchronous and throughput", + ) + + @classmethod + def resolve_args( + cls, + rate_type: str, + rate: float | int | list[float, int] | None, + random_seed: int, + **kwargs: Any, + ) -> dict[str, Any]: + """ + Resolve arguments for sweep profile construction. + + :param rate_type: The type/strategy for async strategies in the sweep. + :param rate: Rate parameter (ignored for sweep). + :param random_seed: Random seed for stochastic strategies. + :param kwargs: Additional arguments to pass through. + :return: Dictionary of resolved arguments. + """ + kwargs["sweep_size"] = kwargs.get("sweep_size", rate) + kwargs["random_seed"] = random_seed + if rate_type in ["constant", "poisson"]: + kwargs["strategy_type"] = rate_type + return kwargs @property def strategy_types(self) -> list[StrategyType]: - return ( - ["synchronous"] + ["throughput"] + [self.rate_type] * (self.sweep_size - 2) # type: ignore[return-value] - ) - - def next_strategy(self) -> Optional[SchedulingStrategy]: - if self.completed_strategies >= self.sweep_size: - return None - - if self.completed_strategies == 0: + """Get strategy types for the complete sweep sequence.""" + types = ["synchronous", "throughput"] + types += [self.strategy_type] * (self.sweep_size - len(types)) + return types + + def next_strategy( + self, + prev_strategy: SchedulingStrategy | None, + prev_benchmark: Benchmark | None, + ) -> ( + AsyncConstantStrategy + | AsyncPoissonStrategy + | SynchronousProfile + | ThroughputProfile + | None + ): + """ + Generate the next strategy in the adaptive sweep sequence. + + Executes synchronous and throughput strategies first to measure + baseline rates, then generates interpolated rates for async strategies. + + :param prev_strategy: The previously completed strategy. + :param prev_benchmark: Benchmark results from the previous strategy. + :return: Next strategy in sweep sequence, or None if complete. + :raises ValueError: If strategy_type is neither 'constant' nor 'poisson'. + """ + if prev_strategy is None: return SynchronousStrategy() - if self.completed_strategies == 1: + if prev_strategy.type_ == "synchronous": + self.synchronous_rate = ( + prev_benchmark.metrics.requests_per_second.successful.mean + ) + return ThroughputStrategy( max_concurrency=self.max_concurrency, + startup_duration=self.startup_duration, ) - min_rate = self.measured_rates[0] - max_rate = self.measured_rates[1] - rates = np.linspace(min_rate, max_rate, self.sweep_size - 1)[1:] + if prev_strategy.type_ == "throughput": + self.throughput_rate = ( + prev_benchmark.metrics.requests_per_second.successful.mean + ) + self.measured_rates = list( + np.linspace( + self.synchronous_rate, + self.throughput_rate, + self.sweep_size - 1, + ) + )[1:] # don't rerun synchronous - if self.rate_type == "constant": + if len(self.completed_strategies) >= self.sweep_size: + return None + + next_rate_index = len( + [ + strat + for strat in self.completed_strategies + if strat.type_ == self.strategy_type + ] + ) + + if self.strategy_type == "constant": return AsyncConstantStrategy( - rate=rates[self.completed_strategies - 2], - initial_burst=self.initial_burst, + rate=self.measured_rates[next_rate_index], + startup_duration=self.startup_duration, max_concurrency=self.max_concurrency, ) - elif self.rate_type == "poisson": + elif self.strategy_type == "poisson": return AsyncPoissonStrategy( - rate=rates[self.completed_strategies - 2], - initial_burst=self.initial_burst, + rate=self.measured_rates[next_rate_index], + startup_duration=self.startup_duration, max_concurrency=self.max_concurrency, + random_seed=self.random_seed, ) else: - raise ValueError(f"Invalid strategy type: {self.rate_type}") - - @staticmethod - def from_standard_args( # type: ignore[override] - rate_type: Union[StrategyType, ProfileType], - rate: Optional[Union[float, Sequence[float]]], - random_seed: int, - **kwargs, - ) -> "SweepProfile": - if rate_type != "sweep": - raise ValueError("Rate type must be 'sweep' for sweep profile.") - - if "sweep_size" in kwargs: - raise ValueError("Sweep size must not be provided, use rate instead.") - - if isinstance(rate, Sequence): - if len(rate) != 1: - raise ValueError( - "Rate must be a single value for sweep profile, received " - f"{len(rate)} values." - ) - rate = rate[0] - - if not rate: - rate = settings.default_sweep_number - - if not rate: - raise ValueError( - "Rate (sweep_size) must be provided for concurrent profile." - ) - - if ( - not isinstance(rate, (int, float)) - or (isinstance(rate, float) and not rate.is_integer()) - or rate <= 1 - ): - raise ValueError( - f"Rate (sweep_size) must be a positive integer > 1, received {rate} " - f"with type {type(rate)}" - ) - - if not kwargs: - kwargs = {} - - if "strategy_type" not in kwargs: - kwargs["strategy_type"] = "constant" - - return SweepProfile(sweep_size=int(rate), random_seed=random_seed, **kwargs) - - -def create_profile( - rate_type: Union[StrategyType, ProfileType], - rate: Optional[Union[float, Sequence[float]]], - random_seed: int = 42, - **kwargs, -) -> "Profile": - if rate_type == "synchronous": - return SynchronousProfile.from_standard_args( - rate_type=rate_type, - rate=rate, - **kwargs, - ) - - if rate_type == "concurrent": - return ConcurrentProfile.from_standard_args( - rate_type=rate_type, - rate=rate, - **kwargs, - ) - - if rate_type == "throughput": - return ThroughputProfile.from_standard_args( - rate_type=rate_type, - rate=rate, - **kwargs, - ) - - if rate_type in ("async", "constant", "poisson"): - return AsyncProfile.from_standard_args( - rate_type=rate_type, - rate=rate, - random_seed=random_seed, - **kwargs, - ) - - if rate_type == "sweep": - return SweepProfile.from_standard_args( - rate_type=rate_type, - rate=rate, - random_seed=random_seed, - **kwargs, - ) - - raise ValueError(f"Invalid profile type: {rate_type}") + raise ValueError(f"Invalid strategy type: {self.strategy_type}") diff --git a/src/guidellm/benchmark/progress.py b/src/guidellm/benchmark/progress.py index 1232107b..d6b881bc 100644 --- a/src/guidellm/benchmark/progress.py +++ b/src/guidellm/benchmark/progress.py @@ -1,8 +1,27 @@ -import math -import time +""" +Benchmark progress tracking and console display abstractions. + +Provides progress tracking interfaces and implementations for monitoring benchmark +execution, displaying real-time statistics, and managing UI updates during +generative benchmarking operations. + +Classes: + BenchmarkerProgress: Abstract base for benchmark progress tracking. + BenchmarkerProgressGroup: Composite progress handler for multiple instances. + GenerativeConsoleBenchmarkerProgress: Console-based progress display. + +Type Variables: + BenchmarkT: Generic benchmark object type. +""" + +from __future__ import annotations + +import asyncio +from abc import ABC, abstractmethod +from collections.abc import AsyncIterable, AsyncIterator, Iterable from dataclasses import dataclass from datetime import datetime -from typing import Generic, Optional, TypeVar, Union +from typing import Any, Generic, Literal from rich.console import Group from rich.live import Live @@ -10,7 +29,6 @@ from rich.progress import ( BarColumn, Progress, - ProgressColumn, SpinnerColumn, TaskID, TaskProgressColumn, @@ -19,145 +37,631 @@ TimeRemainingColumn, ) -from guidellm.benchmark.aggregator import ( - BenchmarkAggregator, - GenerativeBenchmarkAggregator, -) -from guidellm.benchmark.benchmark import Benchmark, GenerativeBenchmark -from guidellm.benchmark.benchmarker import BenchmarkerResult +from guidellm.benchmark.aggregator import AggregatorState +from guidellm.benchmark.objects import BenchmarkT, GenerativeBenchmark +from guidellm.benchmark.profile import Profile from guidellm.scheduler import ( + SchedulerState, SchedulingStrategy, StrategyType, - strategy_display_str, ) -from guidellm.utils import Colors +from guidellm.utils import Colors, format_value_display __all__ = [ - "BenchmarkerProgressDisplay", - "BenchmarkerTaskProgressState", - "GenerativeTextBenchmarkerProgressDisplay", - "GenerativeTextBenchmarkerTaskProgressState", + "BenchmarkerProgress", + "BenchmarkerProgressGroup", + "GenerativeConsoleBenchmarkerProgress", ] -@dataclass -class BenchmarkerTaskProgressState: - display_scheduler_stats: bool - - task_id: TaskID - strategy: Union[StrategyType, SchedulingStrategy] - started: bool = False - compiling: bool = False - ended: bool = False - - start_time: Optional[float] = None - max_number: Optional[float] = None - max_duration: Optional[float] = None - in_warmup: bool = False - in_cooldown: bool = False - - requests_rate: float = 0 - request_latency: float = 0 - requests_processing: float = 0 - requests_successful: float = 0 - requests_incomplete: float = 0 - requests_errored: float = 0 +class BenchmarkerProgress(Generic[BenchmarkT], ABC): + """ + Abstract base class for tracking and displaying benchmark progress. + + Provides lifecycle hooks for monitoring benchmark execution stages including + initialization, start, updates, completion, and finalization. Supports + enable/disable functionality for conditional progress tracking. + """ + + def __init__(self, enabled: bool = True): + """ + Initialize progress tracker. - worker_overheads_time_ms: float = 0.0 - backend_overheads_time_ms: float = 0.0 - requests_sleep_time_ms: float = 0.0 - requests_targeted_start_time_delay_ms: float = 0.0 + :param enabled: Whether to enable progress tracking and display. + """ + self._enabled = enabled + self.profile: Profile = None + self.current_strategy: SchedulingStrategy = None @property - def description(self) -> str: - return strategy_display_str(self.strategy) + def enabled(self) -> bool: + """ + :return: Whether progress tracking is currently enabled. + """ + return self._enabled + + @enabled.setter + def enabled(self, value: bool) -> None: + """ + :param value: True to enable progress tracking, False to disable. + :raises RuntimeError: If called after progress run has started. + """ + if self.profile is not None: + raise RuntimeError( + "Cannot change enabled state after __call__ for progress run" + ) + + self._enabled = value + + def __call__( + self, + profile: Profile, + agen: AsyncIterable[ + tuple[ + AggregatorState | None, + BenchmarkT | None, + SchedulingStrategy, + SchedulerState | None, + ] + ], + ) -> AsyncIterator[ + tuple[ + AggregatorState | None, + BenchmarkT | None, + SchedulingStrategy, + SchedulerState | None, + ] + ]: + """ + Track progress through benchmark execution pipeline. + + Wraps the provided async generator to monitor benchmark progress, + calling appropriate lifecycle hooks based on execution state. + + :param profile: Benchmark profile configuration. + :param agen: Async generator yielding benchmark execution updates. + :return: Async iterator forwarding original updates with progress tracking. + """ + + async def aiterator() -> AsyncIterator[ + tuple[ + AggregatorState | None, + BenchmarkT | None, + SchedulingStrategy, + SchedulerState | None, + ] + ]: + self.profile = profile + if self.enabled: + await self.on_initialize(profile) + + async for aggregator_update, benchmark, strategy, scheduler_state in agen: + if self.enabled: + await self.on_raw_update( + profile, + aggregator_update, + benchmark, + strategy, + scheduler_state, + ) + + if self.current_strategy != strategy: + self.current_strategy = strategy + await self.on_benchmark_start(strategy) + elif benchmark is not None: + await self.on_benchmark_complete(benchmark) + self.current_strategy = None + else: + await self.on_benchmark_update( + aggregator_update, scheduler_state + ) + + yield aggregator_update, benchmark, strategy, scheduler_state + + if self.enabled: + await self.on_finalize() + + return aiterator() + + @abstractmethod + async def on_initialize(self, profile: Profile): + """ + Initialize progress tracking for benchmark profile. + + :param profile: Benchmark profile configuration. + """ + + @abstractmethod + async def on_benchmark_start(self, strategy: SchedulingStrategy): + """ + Handle start of new benchmark strategy execution. + + :param strategy: Scheduling strategy being executed. + """ + + @abstractmethod + async def on_benchmark_update( + self, aggregator_update: AggregatorState, scheduler_state: SchedulerState + ): + """ + Handle benchmark execution progress update. + + :param aggregator_update: Current benchmark metrics and statistics. + :param scheduler_state: Current scheduler execution state. + """ + + @abstractmethod + async def on_benchmark_complete(self, benchmark: BenchmarkT): + """ + Handle completion of benchmark strategy execution. + + :param benchmark: Completed benchmark results. + """ + + @abstractmethod + async def on_finalize(self): + """Finalize progress tracking and cleanup resources.""" + + async def on_raw_update( + self, + profile: Profile, + aggregator_update: AggregatorState | None, + benchmark: BenchmarkT | None, + strategy: SchedulingStrategy, + scheduler_state: SchedulerState | None, + ): + """ + Handle raw benchmark execution update. + + Optional hook for accessing all execution state updates. Default + implementation does nothing. + + :param profile: Benchmark profile configuration. + :param aggregator_update: Current benchmark metrics and statistics. + :param benchmark: Completed benchmark if available. + :param strategy: Current scheduling strategy. + :param scheduler_state: Current scheduler execution state. + """ + + +class BenchmarkerProgressGroup(BenchmarkerProgress[BenchmarkT]): + """ + Composite progress handler that manages multiple progress instances. + + Distributes progress events to all contained progress instances, enabling + parallel progress tracking through multiple channels (e.g., console display + and file logging). + + :param instances: Collection of progress handlers to manage. + :param enabled: Whether the group is active. + """ + + def __init__( + self, + instances: ( + Iterable[BenchmarkerProgress[BenchmarkT]] + | list[BenchmarkerProgress[BenchmarkT]] + ), + enabled: bool = True, + ): + """ + Initialize progress group with handler instances. + + :param instances: Progress handler instances to coordinate. + :param enabled: Whether to enable the progress group. + """ + self.instances: list[BenchmarkerProgress[BenchmarkT]] = list(instances) + super().__init__(enabled=enabled) @property - def total(self) -> Optional[float]: - if self.max_number is None and self.max_duration is None: - return None + def enabled(self) -> bool: + """Whether the progress group is currently enabled.""" + return self._enabled + + @enabled.setter + def enabled(self, value: bool): + """ + Set enabled state for group and all contained instances. + + :param value: New enabled state. + """ + self._enabled = value + for instance in self.instances: + instance.enabled = value - return 1000 + async def on_initialize(self, profile: Profile): + """ + Initialize all progress handler instances. + + :param profile: Benchmark profile configuration. + """ + await asyncio.gather( + *[child.on_initialize(profile) for child in self.instances] + ) + + async def on_benchmark_start(self, strategy: SchedulingStrategy): + """ + Notify all handlers of benchmark strategy start. + + :param strategy: Scheduling strategy being executed. + """ + await asyncio.gather( + *[child.on_benchmark_start(strategy) for child in self.instances] + ) + + async def on_benchmark_update( + self, aggregator_update: AggregatorState, scheduler_state: SchedulerState + ): + """ + Distribute benchmark updates to all handlers. + + :param aggregator_update: Current benchmark metrics and statistics. + :param scheduler_state: Current scheduler execution state. + """ + await asyncio.gather( + *[ + child.on_benchmark_update(aggregator_update, scheduler_state) + for child in self.instances + ] + ) + + async def on_benchmark_complete(self, benchmark: BenchmarkT): + """ + Notify all handlers of benchmark completion. + + :param benchmark: Completed benchmark results. + """ + await asyncio.gather( + *[child.on_benchmark_complete(benchmark) for child in self.instances] + ) + + async def on_finalize(self): + """Finalize all progress handler instances.""" + await asyncio.gather(*[child.on_finalize() for child in self.instances]) + + async def on_raw_update( + self, + profile: Profile, + aggregator_update: AggregatorState | None, + benchmark: BenchmarkT | None, + strategy: SchedulingStrategy, + scheduler_state: SchedulerState | None, + ): + """ + Distribute raw updates to all handlers. + + :param profile: Benchmark profile configuration. + :param aggregator_update: Current benchmark metrics and statistics. + :param benchmark: Completed benchmark if available. + :param strategy: Current scheduling strategy. + :param scheduler_state: Current scheduler execution state. + """ + await asyncio.gather( + *[ + child.on_raw_update( + profile, + aggregator_update, + benchmark, + strategy, + scheduler_state, + ) + for child in self.instances + ] + ) + + +class GenerativeConsoleBenchmarkerProgress( + BenchmarkerProgress[GenerativeBenchmark], Live +): + """ + Console-based progress display for generative benchmarks. + + Provides real-time visual progress tracking using Rich library components, + displaying benchmark execution statistics, timing information, and progress + bars in a structured console interface. + """ + + def __init__(self, enabled: bool = True, display_scheduler_stats: bool = False): + """ + Initialize console progress display. + + :param enabled: Whether to enable progress tracking and display. + :param display_scheduler_stats: Whether to display scheduler statistics. + """ + BenchmarkerProgress.__init__(self, enabled=enabled) + Live.__init__( + self, + refresh_per_second=4, + auto_refresh=True, + redirect_stdout=True, + redirect_stderr=True, + ) + self.display_scheduler_stats: bool = display_scheduler_stats + self.run_progress: Progress = None + self.run_progress_task: TaskID = None + self.tasks_progress: _GenerativeProgressTasks = None + + async def on_initialize(self, profile: Profile): + """ + Initialize console display components and start rendering. + + :param profile: Benchmark profile configuration. + """ + self.tasks_progress = _GenerativeProgressTasks( + profile=profile, display_scheduler_stats=self.display_scheduler_stats + ) + self.run_progress = Progress( + TextColumn("Generating...", style=f"italic {Colors.progress}"), + BarColumn( + bar_width=None, + complete_style=Colors.progress, + finished_style=Colors.success, + ), + TextColumn( + "({task.fields[completed_benchmarks]}/{task.fields[total_benchmarks]})", + style=Colors.progress, + ), + TextColumn("["), + TimeElapsedColumn(), + TextColumn("<"), + TimeRemainingColumn(), + TextColumn("]"), + ) + self.run_progress_task = self.run_progress.add_task("") + self._sync_run_progress() + self.update( + Group( + Panel( + self.tasks_progress, + title="Benchmarks", + title_align="left", + expand=True, + ), + self.run_progress, + ) + ) + self.start() + + async def on_benchmark_start(self, strategy: SchedulingStrategy): + """ + Update display for new benchmark strategy start. + + :param strategy: Scheduling strategy being executed. + """ + self.tasks_progress.start_benchmark(strategy) + self._sync_run_progress() + + async def on_benchmark_update( + self, aggregator_update: AggregatorState | None, scheduler_state: SchedulerState + ): + """ + Update display with current benchmark progress. + + :param aggregator_update: Current benchmark metrics and statistics. + :param scheduler_state: Current scheduler execution state. + """ + self.tasks_progress.update_benchmark(aggregator_update, scheduler_state) + self._sync_run_progress() + + async def on_benchmark_complete(self, benchmark: GenerativeBenchmark): + """ + Update display for completed benchmark. + + :param benchmark: Completed benchmark results. + """ + self.tasks_progress.complete_benchmark(benchmark) + self._sync_run_progress() + + async def on_finalize(self): + """Stop display rendering and cleanup resources.""" + self.tasks_progress.finalize() + self._sync_run_progress() + self.run_progress.stop_task(self.run_progress_task) + self.stop() + self.run_progress = None + self.run_progress_task = None + self.tasks_progress = None + + def _sync_run_progress(self): + """Synchronize overall progress display with task progress.""" + self.run_progress.update( + self.run_progress_task, + total=self.tasks_progress.steps_total, + completed=self.tasks_progress.steps_progress, + completed_benchmarks=self.tasks_progress.tasks_progress, + total_benchmarks=self.tasks_progress.tasks_total, + ) + + +# Scaling factor for progress calculations to provide granular progress updates +_PROGRESS_SCALE = 1000 + + +class _GenerativeProgressTasks(Progress): + def __init__(self, profile: Profile, display_scheduler_stats: bool): + self.profile: Profile = profile + self.display_scheduler_stats: bool = display_scheduler_stats + self.benchmark_task_states: list[_GenerativeProgressTaskState] = [] + self.current_index: int = -1 + + summary_text = "{task.fields[requests_summary]}\n{task.fields[tokens_summary]}" + if self.display_scheduler_stats: + summary_text += "\n{task.fields[scheduler_stats]}" + super().__init__( + TextColumn("[{task.fields[start_time]}]"), + SpinnerColumn(style=Colors.progress), + TaskProgressColumn(style=Colors.progress), + TextColumn("{task.description}"), + TextColumn("({task.fields[progress_status]})"), + TextColumn(" "), + TextColumn(summary_text), + ) + + for strategy_type in profile.strategy_types: + task_state = _GenerativeProgressTaskState( + strategy_type=strategy_type, + ) + task_id = self.add_task(**task_state.current) + task_state.task_id = task_id + self.benchmark_task_states.append(task_state) @property - def completed(self) -> int: - if self.ended: - return 1000 + def tasks_total(self) -> int: + return len(self.benchmark_task_states) - if self.max_number is None and self.max_duration is None: - return 0 + @property + def tasks_progress(self) -> int: + return self.current_index + 1 - number = self.requests_successful + self.requests_errored - number_percent = ( - number / float(self.max_number) * 1000 if self.max_number else -math.inf + @property + def steps_total(self) -> int: + return _PROGRESS_SCALE * len(self.benchmark_task_states) + + @property + def steps_progress(self) -> int: + progress_current_task = ( + self.benchmark_task_states[self.current_index].progress + if self.current_index < len(self.benchmark_task_states) + else 0 + ) + progress_total = self.current_index + (progress_current_task or 0) + + return progress_total * _PROGRESS_SCALE + + def start_benchmark(self, strategy: SchedulingStrategy): + self.current_index += 1 + if self.current_index >= len(self.benchmark_task_states): + # New task past initially estimated, append it to the end + task_state = _GenerativeProgressTaskState(strategy_type=strategy.type_) + task_id = self.add_task(**task_state.current) + task_state.task_id = task_id + self.benchmark_task_states.append(task_state) + + self.benchmark_task_states[self.current_index].start(strategy) + self.update( + self.benchmark_task_states[self.current_index].task_id, + start=True, + **self.benchmark_task_states[self.current_index].current, + ) + + def update_benchmark( + self, aggregator_update: AggregatorState, scheduler_state: SchedulerState + ): + self.benchmark_task_states[self.current_index].update( + aggregator_update, scheduler_state + ) + self.update( + self.benchmark_task_states[self.current_index].task_id, + **self.benchmark_task_states[self.current_index].current, ) - duration_percent = ( - (time.time() - self.start_time) / self.max_duration * 1000 - if self.max_duration and self.start_time - else -math.inf + + def complete_benchmark(self, benchmark: GenerativeBenchmark): + self.benchmark_task_states[self.current_index].complete(benchmark) + self.update( + self.benchmark_task_states[self.current_index].task_id, + **self.benchmark_task_states[self.current_index].current, ) - return min(int(max(number_percent, duration_percent)), 1000) + def finalize(self): + self.stop() + + +@dataclass +class _GenerativeProgressTaskState: + strategy_type: StrategyType + task_id: TaskID = None + strategy: SchedulingStrategy | None = None + benchmark_status: Literal[ + "pending", "in_warmup", "in_progress", "in_cooldown", "completed" + ] = "pending" + progress: float | None = None + start_time: float = -1.0 + successful_requests: int = 0 + cancelled_requests: int = 0 + errored_requests: int = 0 + request_concurrency: int = 0 + requests_per_second: float = 0 + request_latency: float = 0 + output_tokens: int = 0 + output_tokens_rate: float = 0 + prompt_tokens: int = 0 + total_tokens_rate: float = 0 + time_to_first_token: float = 0 + inter_token_latency: float = 0 + queued_time: float = 0 + request_targeted_start_delay: float = 0 + scheduler_overheads_time: float = 0 @property - def fields(self) -> dict[str, str]: - fields = { + def current(self) -> dict[str, Any]: + return { "start_time": self.formatted_start_time, + "description": str(self.strategy or self.strategy_type), "progress_status": self.formatted_progress_status, "requests_summary": self.formatted_requests_summary, + "tokens_summary": self.formatted_tokens_summary, + "scheduler_stats": self.formatted_scheduler_stats, + "completed": self.completed, + "total": self.total, } - if self.display_scheduler_stats: - fields["scheduler_stats"] = self.formatted_scheduler_stats + @property + def completed(self) -> float: + if self.benchmark_status == "pending": + return 0 + + if self.benchmark_status == "completed": + return _PROGRESS_SCALE - return fields + return self.progress * _PROGRESS_SCALE if self.progress is not None else None + + @property + def total(self) -> float: + return _PROGRESS_SCALE @property def formatted_start_time(self) -> str: - if self.start_time is None: + if self.start_time < 0.0: return "--:--:--" return datetime.fromtimestamp(self.start_time).strftime("%H:%M:%S") @property def formatted_progress_status(self) -> str: - if self.ended: - status = "complete" - color = Colors.SUCCESS - elif self.compiling: - status = "compiling" - color = Colors.PROGRESS - elif self.started and self.in_warmup: + if self.benchmark_status == "in_warmup": status = "warmup" - color = Colors.PROGRESS - elif self.started and self.in_cooldown: - status = "cooldown" - color = Colors.PROGRESS - elif self.started: + color = Colors.progress + elif self.benchmark_status == "in_progress": status = "running" - color = Colors.PROGRESS + color = Colors.progress + elif self.benchmark_status == "in_cooldown": + status = "cooldown" + color = Colors.progress + elif self.benchmark_status == "completed": + status = "complete" + color = Colors.success else: status = "pending" - color = Colors.INFO + color = Colors.info return f"[{color}]{status.ljust(8)}[/{color}]" @property def formatted_requests_summary(self) -> str: - if not self.started: + if self.benchmark_status == "pending": return " " return ( - f"[{Colors.INFO}]Req:[/{Colors.INFO}] " - + BenchmarkerTaskProgressState.format_progress_display( - value=self.requests_rate, + f"[{Colors.info}]Req:[/{Colors.info}] " + + format_value_display( + value=self.requests_per_second, label="req/s", total_characters=12, digits_places=4, decimal_places=1, ) + ", " - + BenchmarkerTaskProgressState.format_progress_display( + + format_value_display( value=self.request_latency, label="Lat", units="s", @@ -166,32 +670,32 @@ def formatted_requests_summary(self) -> str: decimal_places=2, ) + ", " - + BenchmarkerTaskProgressState.format_progress_display( - value=self.requests_processing, + + format_value_display( + value=self.request_concurrency, label="Conc", total_characters=12, digits_places=4, decimal_places=1, ) + ", " - + BenchmarkerTaskProgressState.format_progress_display( - value=self.requests_successful, + + format_value_display( + value=self.successful_requests, label="Comp", total_characters=12, digits_places=5, decimal_places=0, ) + ", " - + BenchmarkerTaskProgressState.format_progress_display( - value=self.requests_incomplete, + + format_value_display( + value=self.cancelled_requests, label="Inc", total_characters=12, digits_places=5, decimal_places=0, ) + ", " - + BenchmarkerTaskProgressState.format_progress_display( - value=self.requests_errored, + + format_value_display( + value=self.errored_requests, label="Err", total_characters=12, digits_places=5, @@ -199,101 +703,14 @@ def formatted_requests_summary(self) -> str: ) ) - @property - def formatted_scheduler_stats(self) -> str: - if not self.started: - return " " - - return ( - f"[{Colors.INFO}]Sys:[/{Colors.INFO}] " - + BenchmarkerTaskProgressState.format_progress_display( - value=self.worker_overheads_time_ms, - label="Work OH", - units="ms", - total_characters=18, - digits_places=3, - decimal_places=1, - ) - + ", " - + BenchmarkerTaskProgressState.format_progress_display( - value=self.backend_overheads_time_ms, - label="Back OH", - units="ms", - total_characters=18, - digits_places=3, - decimal_places=1, - ) - + ", " - + BenchmarkerTaskProgressState.format_progress_display( - value=self.requests_sleep_time_ms, - label="Req Sleep", - units="ms", - total_characters=18, - digits_places=5, - decimal_places=0, - ) - + ", " - + BenchmarkerTaskProgressState.format_progress_display( - value=self.requests_targeted_start_time_delay_ms, - label="Start Del", - units="ms", - total_characters=18, - digits_places=5, - decimal_places=0, - ) - ) - - @staticmethod - def format_progress_display( - value: float, - label: str, - units: str = "", - total_characters: Optional[int] = None, - digits_places: Optional[int] = None, - decimal_places: Optional[int] = None, - ) -> str: - if decimal_places is None and digits_places is None: - formatted_number = f"{value:.0f}" - elif digits_places is None: - formatted_number = f"{value:.{decimal_places}f}" - elif decimal_places is None: - formatted_number = f"{value:>{digits_places}f}" - else: - formatted_number = f"{value:>{digits_places}.{decimal_places}f}" - - result = f"{formatted_number}{units} [{Colors.INFO}]{label}[/{Colors.INFO}]" - - if total_characters is not None: - total_characters += len(Colors.INFO) * 2 + 5 - - if len(result) < total_characters: - result = result.rjust(total_characters) - - return result - - -class GenerativeTextBenchmarkerTaskProgressState(BenchmarkerTaskProgressState): - output_tokens: float = 0 - prompt_tokens: float = 0 - output_tokens_rate: float = 0 - total_tokens_rate: float = 0 - tokens_ttft: float = 0 - tokens_itl: float = 0 - - @property - def fields(self) -> dict[str, str]: - fields = super().fields - fields["tokens_summary"] = self.formatted_tokens_summary - return fields - @property def formatted_tokens_summary(self) -> str: - if not self.started: + if self.benchmark_status == "pending": return " " return ( - f"[{Colors.INFO}]Tok:[/{Colors.INFO}] " - + BenchmarkerTaskProgressState.format_progress_display( + f"[{Colors.info}]Tok:[/{Colors.info}] " + + format_value_display( value=self.output_tokens_rate, label="gen/s", total_characters=12, @@ -301,7 +718,7 @@ def formatted_tokens_summary(self) -> str: decimal_places=1, ) + ", " - + BenchmarkerTaskProgressState.format_progress_display( + + format_value_display( value=self.total_tokens_rate, label="tot/s", total_characters=12, @@ -309,8 +726,8 @@ def formatted_tokens_summary(self) -> str: decimal_places=1, ) + ", " - + BenchmarkerTaskProgressState.format_progress_display( - value=self.tokens_ttft, + + format_value_display( + value=self.time_to_first_token, label="TTFT", units="ms", total_characters=12, @@ -318,8 +735,8 @@ def formatted_tokens_summary(self) -> str: decimal_places=1, ) + ", " - + BenchmarkerTaskProgressState.format_progress_display( - value=self.tokens_itl, + + format_value_display( + value=self.inter_token_latency, label="ITL", units="ms", total_characters=12, @@ -327,7 +744,7 @@ def formatted_tokens_summary(self) -> str: decimal_places=1, ) + ", " - + BenchmarkerTaskProgressState.format_progress_display( + + format_value_display( value=self.prompt_tokens, label="Prompt", total_characters=12, @@ -335,7 +752,7 @@ def formatted_tokens_summary(self) -> str: decimal_places=0, ) + ", " - + BenchmarkerTaskProgressState.format_progress_display( + + format_value_display( value=self.output_tokens, label="Gen", total_characters=12, @@ -344,377 +761,216 @@ def formatted_tokens_summary(self) -> str: ) ) + @property + def formatted_scheduler_stats(self) -> str: + if self.benchmark_status == "pending": + return " " -BTPS = TypeVar("BTPS", bound=BenchmarkerTaskProgressState) - - -class BenchmarkerProgressDisplay(Generic[BTPS]): - def __init__(self, display_scheduler_stats: bool): - self.display_scheduler_stats = display_scheduler_stats - self.started = False - self.benchmarker_tasks_progress = Progress(*self.create_task_progress_columns()) - self.benchmarker_tasks_panel = Panel( - self.benchmarker_tasks_progress, - title="Benchmarks", - title_align="left", - expand=True, - ) - self.benchmarker_progress = Progress( - TextColumn("Generating...", style=f"italic {Colors.PROGRESS}"), - BarColumn( - bar_width=None, - complete_style=Colors.PROGRESS, - finished_style=Colors.SUCCESS, - ), - TextColumn( - "({task.fields[completed_benchmarks]}/{task.fields[total_benchmarks]})", - style=Colors.PROGRESS, - ), - TextColumn("["), - TimeElapsedColumn(), - TextColumn("<"), - TimeRemainingColumn(), - TextColumn("]"), - ) - self.benchmarker_live = Live( - Group( - self.benchmarker_tasks_panel, - self.benchmarker_progress, - ), - redirect_stdout=True, - redirect_stderr=True, - ) - self.active_task: Optional[TaskID] = None - self.benchmarker_tasks: list[BTPS] = [] - self.progress_task: Optional[TaskID] = None - - def update(self, result: BenchmarkerResult): - if result.type_ == "run_start": - if self.started: - raise RuntimeError("Progress display already started.") - - self.handle_start(result) - self.started = True - elif result.type_ == "run_complete": - if not self.started: - raise RuntimeError("Progress display not started.") - - self.handle_end(result) - self.started = False - else: - if not self.started: - raise RuntimeError("Progress display not started.") - - self.handle_update(result) - - def handle_start(self, result: BenchmarkerResult): - self.benchmarker_live.start() - - for index, strategy_type in enumerate(result.profile.strategy_types): - task_id = self.benchmarker_tasks_progress.add_task( - description=strategy_type, - start=False, - total=None, - completed=0, - visible=False, + return ( + f"[{Colors.info}]Sys:[/{Colors.info}] , " + + format_value_display( + value=self.request_targeted_start_delay, + label="Start Del", + units="ms", + total_characters=18, + digits_places=5, + decimal_places=0, ) - task_progress_state = self.create_task_progress_state( - task_id=task_id, - index=index, - strategy_type=strategy_type, - result=result, + + format_value_display( + value=self.scheduler_overheads_time, + label="Sched OH", + units="ms", + total_characters=18, + digits_places=3, + decimal_places=1, ) - self.benchmarker_tasks.append(task_progress_state) - self.benchmarker_tasks_progress.update( - task_id, - description=task_progress_state.description, - visible=True, - **task_progress_state.fields, # type: ignore[arg-type] + + ", " + + format_value_display( + value=self.queued_time, + label="Queued", + units="ms", + total_characters=18, + digits_places=5, + decimal_places=0, ) - - self.progress_task = self.benchmarker_progress.add_task( - "", - total=len(self.benchmarker_tasks) * 1000, - completed_benchmarks=0, - total_benchmarks=len(self.benchmarker_tasks), ) - def handle_update(self, result: BenchmarkerResult): - current_state: BTPS = self.benchmarker_tasks[result.current_index] - - if result.type_ == "scheduler_start": - self.handle_update_scheduler_start(current_state, result) - self.active_task = current_state.task_id - elif result.type_ == "scheduler_update": - self.handle_update_scheduler_update(current_state, result) - elif result.type_ == "scheduler_complete": - self.handle_update_scheduler_complete(current_state, result) - elif result.type_ == "benchmark_compiled": - self.handle_update_benchmark_compiled(current_state, result) - else: - raise ValueError(f"Unknown result type: {result.type_}") + def start(self, strategy: SchedulingStrategy): + self.strategy = strategy + self.strategy_type = strategy.type_ - if self.progress_task is None: - raise RuntimeError("Progress task not set.") - - self.benchmarker_tasks_progress.update( - current_state.task_id, - description=current_state.description, - completed=current_state.completed, - total=current_state.total, - **current_state.fields, # type: ignore[arg-type] - ) - self.benchmarker_progress.update( - self.progress_task, - completed=(result.current_index * 1000) + current_state.completed, - total=1000 * len(self.benchmarker_tasks), - completed_benchmarks=( - result.current_index + (1 if current_state.ended else 0) + def update( + self, aggregator_update: AggregatorState, scheduler_state: SchedulerState + ): + self.progress = ( + (1.0 - scheduler_state.remaining_fraction) + if scheduler_state.remaining_fraction is not None + else 0.0 + ) + status: Literal["in_warmup", "in_progress", "in_cooldown"] | None = ( + "in_progress" # Need to handle requests_in_* isn't in aggregator_update + ) + if aggregator_update.get("requests_in_warmup"): + status = "in_warmup" + elif aggregator_update.get("requests_in_cooldown"): + status = "in_cooldown" + self._update_processing_states( + benchmark_status=status, + start_time=scheduler_state.start_time, + successful_requests=scheduler_state.successful_requests, + cancelled_requests=scheduler_state.cancelled_requests, + errored_requests=scheduler_state.errored_requests, + ) + self._update_request_stats( + request_concurrency=aggregator_update.get_metric( + key="requests", type_="avg", prefix="completed" + ), + requests_per_second=aggregator_update.get_metric( + key="requests", + type_="rate", + prefix="completed", + ), + request_latency=aggregator_update.get_metric( + key="request_latency", type_="avg", prefix="completed" ), - total_benchmarks=len(self.benchmarker_tasks), ) - - if current_state.ended: - self.benchmarker_tasks_progress.stop_task(current_state.task_id) - self.active_task = None - - def handle_update_scheduler_start( - self, progress_state: BTPS, result: BenchmarkerResult - ): - if self.active_task is not None: - raise RuntimeError("Active task already set.") - - progress_state.strategy = result.current_strategy # type: ignore[assignment] - progress_state.started = True - current_aggregator: BenchmarkAggregator = result.current_aggregator # type: ignore[assignment] - progress_state.start_time = ( - current_aggregator.requests_stats.totals.total.start_time + self._update_token_stats( + output_tokens=aggregator_update.get_metric( + key="output_tokens", type_="avg", prefix="completed" + ), + output_tokens_rate=aggregator_update.get_metric( + key="output_tokens", type_="rate" + ), + prompt_tokens=aggregator_update.get_metric( + key="prompt_tokens", type_="avg", prefix="completed" + ), + total_tokens_rate=aggregator_update.get_metric( + key="total_tokens", type_="rate" + ), + time_to_first_token=( + aggregator_update.get_metric(key="time_to_first_token", type_="avg") + ), + inter_token_latency=( + aggregator_update.get_metric(key="inter_token_latency", type_="avg") + ), ) - progress_state.max_number = current_aggregator.args.max_number - progress_state.max_duration = current_aggregator.args.max_duration - - def handle_update_scheduler_update( - self, progress_state: BTPS, result: BenchmarkerResult - ): - if self.active_task is None: - raise RuntimeError("Active task not set.") - - if self.active_task != progress_state.task_id: - raise RuntimeError("Active task does not match current task.") + if aggregator_update.get("updated_scheduler_stats"): + self._update_system_stats( + request_targeted_start_delay=( + aggregator_update.get_metric( + key="request_targeted_start_delay", type_="avg", default=0.0 + ) + ), + queued_time=( + aggregator_update.get_metric( + key="queued_time", type_="avg", default=0.0 + ) + ), + scheduler_overheads_time=0.0, # Need to add up metrics here + ) - current_aggregator: BenchmarkAggregator = result.current_aggregator # type: ignore[assignment] - progress_state.in_warmup = current_aggregator.in_warmup - progress_state.in_cooldown = current_aggregator.in_cooldown - progress_state.requests_rate = ( - current_aggregator.requests_stats.totals.successful.rate - ) - progress_state.request_latency = ( - current_aggregator.requests_stats.request_time.mean - ) - progress_state.requests_processing = ( - current_aggregator.scheduler_stats.processing_requests.last - ) - progress_state.requests_successful = ( - current_aggregator.requests_stats.totals.successful.total - ) - progress_state.requests_incomplete = ( - current_aggregator.requests_stats.totals.incomplete.total - ) - progress_state.requests_errored = ( - current_aggregator.requests_stats.totals.errored.total - ) - progress_state.worker_overheads_time_ms = ( - current_aggregator.requests_stats.scheduled_time_delay.mean_ms - + current_aggregator.requests_stats.worker_start_delay.mean_ms - ) - progress_state.backend_overheads_time_ms = ( - current_aggregator.requests_stats.request_time_delay.mean_ms - ) - progress_state.requests_sleep_time_ms = ( - current_aggregator.requests_stats.scheduled_time_sleep.mean_ms - ) - progress_state.requests_targeted_start_time_delay_ms = ( - current_aggregator.requests_stats.request_start_time_targeted_delay.mean_ms + def complete(self, benchmark: GenerativeBenchmark): + self._update_processing_states( + benchmark_status="completed", + start_time=benchmark.start_time, + successful_requests=benchmark.request_totals.successful, + cancelled_requests=benchmark.request_totals.incomplete, + errored_requests=benchmark.request_totals.errored, + ) + self._update_request_stats( + request_concurrency=benchmark.metrics.request_concurrency.successful.mean, + requests_per_second=benchmark.metrics.requests_per_second.successful.mean, + request_latency=benchmark.metrics.request_latency.successful.mean, + ) + self._update_token_stats( + output_tokens=benchmark.metrics.output_token_count.successful.mean, + output_tokens_rate=benchmark.metrics.output_tokens_per_second.successful.mean, + prompt_tokens=benchmark.metrics.prompt_token_count.successful.mean, + total_tokens_rate=benchmark.metrics.tokens_per_second.successful.mean, + time_to_first_token=( + benchmark.metrics.time_to_first_token_ms.successful.mean + ), + inter_token_latency=( + benchmark.metrics.inter_token_latency_ms.successful.mean + ), + converted=True, ) - def handle_update_scheduler_complete( + def _update_processing_states( self, - progress_state: BTPS, - result: BenchmarkerResult, # noqa: ARG002 + benchmark_status: Literal[ + "pending", "in_warmup", "in_progress", "in_cooldown", "completed" + ], + start_time: float | None = None, + successful_requests: int | None = None, + cancelled_requests: int | None = None, + errored_requests: int | None = None, ): - if self.active_task is None: - raise RuntimeError("Active task not set.") - - if self.active_task != progress_state.task_id: - raise RuntimeError("Active task does not match current task.") - - progress_state.in_warmup = False - progress_state.in_cooldown = False - progress_state.compiling = True - - def handle_update_benchmark_compiled( - self, progress_state: BTPS, result: BenchmarkerResult - ): - if self.active_task is None: - raise RuntimeError("Active task not set.") - - if self.active_task != progress_state.task_id: - raise RuntimeError("Active task does not match current task.") - - current_benchmark: Benchmark = result.current_benchmark # type: ignore[assignment] - progress_state.compiling = False - progress_state.ended = True - progress_state.requests_rate = ( - current_benchmark.metrics.requests_per_second.successful.mean - ) - progress_state.requests_processing = ( - current_benchmark.metrics.request_concurrency.successful.mean - ) - - def handle_end(self, result: BenchmarkerResult): # noqa: ARG002 - if self.progress_task is None: - raise RuntimeError("Progress task not set.") - - self.benchmarker_progress.update( - self.progress_task, - completed=len(self.benchmarker_tasks) * 1000, - total=len(self.benchmarker_tasks) * 1000, - completed_benchmarks=len(self.benchmarker_tasks), - total_benchmarks=len(self.benchmarker_tasks), - ) - self.benchmarker_progress.stop_task(self.progress_task) - self.benchmarker_live.stop() - self.active_task = None - self.benchmarker_tasks = [] - self.progress_task = None - - def create_task_progress_columns(self) -> list[ProgressColumn]: - columns = [ - TextColumn("[{task.fields[start_time]}]"), - SpinnerColumn(style=Colors.PROGRESS), - TaskProgressColumn(style=Colors.PROGRESS), - TextColumn("{task.description}"), - TextColumn("({task.fields[progress_status]})"), - TextColumn(" "), - ] - - if not self.display_scheduler_stats: - columns += [ - TextColumn("{task.fields[requests_summary]}\n"), - ] - else: - columns += [ - TextColumn( - "{task.fields[requests_summary]}\n{task.fields[scheduler_stats]}\n" - ), - ] - - return columns - - def create_task_progress_state( + if self.benchmark_status is not None: + self.benchmark_status = benchmark_status + if start_time is not None: + self.start_time = start_time + if successful_requests is not None: + self.successful_requests = successful_requests + if cancelled_requests is not None: + self.cancelled_requests = cancelled_requests + if errored_requests is not None: + self.errored_requests = errored_requests + + def _update_request_stats( self, - task_id: TaskID, - index: int, # noqa: ARG002 - strategy_type: StrategyType, - result: BenchmarkerResult, # noqa: ARG002 - ) -> BTPS: - return BenchmarkerTaskProgressState( # type: ignore[return-value] - display_scheduler_stats=self.display_scheduler_stats, - task_id=task_id, - strategy=strategy_type, - ) - - -class GenerativeTextBenchmarkerProgressDisplay( - BenchmarkerProgressDisplay[GenerativeTextBenchmarkerTaskProgressState] -): - def handle_update_scheduler_update( - self, - progress_state: GenerativeTextBenchmarkerTaskProgressState, - result: BenchmarkerResult, + request_concurrency: int | None = None, + requests_per_second: float | None = None, + request_latency: float | None = None, ): - super().handle_update_scheduler_update(progress_state, result) - current_aggregator: GenerativeBenchmarkAggregator = result.current_aggregator # type: ignore[assignment] - progress_state.output_tokens = ( - current_aggregator.requests_stats.output_tokens.mean - ) - progress_state.prompt_tokens = ( - current_aggregator.requests_stats.prompt_tokens.mean - ) - progress_state.output_tokens_rate = ( - current_aggregator.requests_stats.output_tokens.rate - ) - progress_state.total_tokens_rate = ( - current_aggregator.requests_stats.total_tokens.rate - ) - progress_state.tokens_ttft = ( - current_aggregator.requests_stats.time_to_first_token.mean_ms - ) - progress_state.tokens_itl = ( - current_aggregator.requests_stats.inter_token_latency.mean_ms - ) - - def handle_update_benchmark_compiled( + if request_concurrency is not None: + self.request_concurrency = request_concurrency + if requests_per_second is not None: + self.requests_per_second = requests_per_second + if request_latency is not None: + self.request_latency = request_latency + + def _update_token_stats( self, - progress_state: GenerativeTextBenchmarkerTaskProgressState, - result: BenchmarkerResult, + output_tokens: int | None = None, + output_tokens_rate: float | None = None, + prompt_tokens: int | None = None, + total_tokens_rate: float | None = None, + time_to_first_token: float | None = None, + inter_token_latency: float | None = None, + converted: bool = False, ): - super().handle_update_benchmark_compiled(progress_state, result) - - current_benchmark: GenerativeBenchmark = result.current_benchmark # type: ignore[assignment] - progress_state.request_latency = ( - current_benchmark.metrics.request_latency.successful.mean - ) - progress_state.requests_successful = current_benchmark.request_totals.successful - progress_state.requests_errored = current_benchmark.request_totals.errored - progress_state.requests_incomplete = current_benchmark.request_totals.incomplete - progress_state.output_tokens = ( - current_benchmark.metrics.output_token_count.successful.mean - ) - progress_state.prompt_tokens = ( - current_benchmark.metrics.prompt_token_count.successful.mean - ) - progress_state.output_tokens_rate = ( - current_benchmark.metrics.output_tokens_per_second.successful.mean - ) - progress_state.total_tokens_rate = ( - current_benchmark.metrics.tokens_per_second.successful.mean - ) - progress_state.tokens_ttft = ( - current_benchmark.metrics.time_to_first_token_ms.successful.mean - ) - progress_state.tokens_itl = ( - current_benchmark.metrics.inter_token_latency_ms.successful.mean - ) + if output_tokens is not None: + self.output_tokens = output_tokens + if output_tokens_rate is not None: + self.output_tokens_rate = output_tokens_rate + if prompt_tokens is not None: + self.prompt_tokens = prompt_tokens + if total_tokens_rate is not None: + self.total_tokens_rate = total_tokens_rate + if time_to_first_token is not None: + self.time_to_first_token = time_to_first_token * ( + 1000 if not converted else 1 + ) + if inter_token_latency is not None: + self.inter_token_latency = inter_token_latency * ( + 1000 if not converted else 1 + ) - def create_task_progress_state( + def _update_system_stats( self, - task_id: TaskID, - index: int, # noqa: ARG002 - strategy_type: StrategyType, - result: BenchmarkerResult, # noqa: ARG002 - ) -> GenerativeTextBenchmarkerTaskProgressState: - return GenerativeTextBenchmarkerTaskProgressState( - display_scheduler_stats=self.display_scheduler_stats, - task_id=task_id, - strategy=strategy_type, - ) - - def create_task_progress_columns(self) -> list[ProgressColumn]: - columns = super().create_task_progress_columns() - columns = columns[:-1] # remove the last display info column - - if not self.display_scheduler_stats: - columns += [ - TextColumn( - "{task.fields[requests_summary]}\n{task.fields[tokens_summary]}", - ), - ] - else: - columns += [ - TextColumn( - "{task.fields[requests_summary]}\n{task.fields[tokens_summary]}\n{task.fields[scheduler_stats]}", - ), - ] - - return columns + request_targeted_start_delay: float | None = None, + queued_time: float | None = None, + scheduler_overheads_time: float | None = None, + converted: bool = False, + ): + if request_targeted_start_delay is not None: + self.request_targeted_start_delay = request_targeted_start_delay * ( + 1000 if not converted else 1 + ) + if queued_time is not None: + self.queued_time = queued_time * (1000 if not converted else 1) + if scheduler_overheads_time is not None: + self.scheduler_overheads_time = scheduler_overheads_time * ( + 1000 if not converted else 1 + ) diff --git a/src/guidellm/benchmark/scenario.py b/src/guidellm/benchmark/scenario.py index 042b25b1..15e3cd81 100644 --- a/src/guidellm/benchmark/scenario.py +++ b/src/guidellm/benchmark/scenario.py @@ -1,7 +1,9 @@ +from __future__ import annotations + from collections.abc import Iterable from functools import cache from pathlib import Path -from typing import Annotated, Any, Literal, Optional, TypeVar, Union +from typing import Annotated, Any, Literal, TypeVar from datasets import Dataset, DatasetDict, IterableDataset, IterableDatasetDict from pydantic import BeforeValidator, Field, NonNegativeInt, PositiveFloat, PositiveInt @@ -11,8 +13,8 @@ from guidellm.backend.backend import BackendType from guidellm.benchmark.profile import ProfileType -from guidellm.objects.pydantic import StandardBaseModel -from guidellm.scheduler.strategies import StrategyType +from guidellm.scheduler.strategy import StrategyType +from guidellm.utils import StandardBaseModel __ALL__ = ["Scenario", "GenerativeTextScenario", "get_builtin_scenarios"] @@ -25,7 +27,7 @@ def get_builtin_scenarios() -> list[str]: return [p.stem for p in SCENARIO_DIR.glob("*.json")] -def parse_float_list(value: Union[str, float, list[float]]) -> list[float]: +def parse_float_list(value: str | float | list[float]) -> list[float]: """ Parse a comma separated string to a list of float or convert single float list of one or pass float @@ -57,7 +59,7 @@ class Scenario(StandardBaseModel): target: str @classmethod - def from_builtin(cls: type[T], name: str, overrides: Optional[dict] = None) -> T: + def from_builtin(cls: type[T], name: str, overrides: dict | None = None) -> T: filename = SCENARIO_DIR / f"{name}.json" if not filename.is_file(): @@ -77,28 +79,28 @@ class Config: arbitrary_types_allowed = True backend_type: BackendType = "openai_http" - backend_args: Optional[dict[str, Any]] = None - model: Optional[str] = None - processor: Optional[Union[str, Path, PreTrainedTokenizerBase]] = None - processor_args: Optional[dict[str, Any]] = None - data: Union[ - str, - Path, - Iterable[Union[str, dict[str, Any]]], - Dataset, - DatasetDict, - IterableDataset, - IterableDatasetDict, - ] - data_args: Optional[dict[str, Any]] = None - data_sampler: Optional[Literal["random"]] = None - rate_type: Union[StrategyType, ProfileType] - rate: Annotated[ - Optional[list[PositiveFloat]], BeforeValidator(parse_float_list) - ] = None - max_seconds: Optional[PositiveFloat] = None - max_requests: Optional[PositiveInt] = None - warmup_percent: Annotated[Optional[float], Field(gt=0, le=1)] = None - cooldown_percent: Annotated[Optional[float], Field(gt=0, le=1)] = None - output_sampling: Optional[NonNegativeInt] = None + backend_args: dict[str, Any] | None = None + model: str | None = None + processor: str | Path | PreTrainedTokenizerBase | None = None + processor_args: dict[str, Any] | None = None + data: ( + str + | Path + | Iterable[str | dict[str, Any]] + | Dataset + | DatasetDict + | IterableDataset + | IterableDatasetDict + ) + data_args: dict[str, Any] | None = None + data_sampler: Literal["random"] | None = None + rate_type: StrategyType | ProfileType + rate: Annotated[list[PositiveFloat] | None, BeforeValidator(parse_float_list)] = ( + None + ) + max_seconds: PositiveFloat | None = None + max_requests: PositiveInt | None = None + warmup_percent: Annotated[float | None, Field(gt=0, le=1)] = None + cooldown_percent: Annotated[float | None, Field(gt=0, le=1)] = None + output_sampling: NonNegativeInt | None = None random_seed: int = 42 From 48347679e4294a2443bb577eb131c904fd425fc8 Mon Sep 17 00:00:00 2001 From: Mark Kurtz Date: Fri, 19 Sep 2025 12:20:33 +0000 Subject: [PATCH 14/90] fixes and rebase Signed-off-by: Mark Kurtz --- src/guidellm/benchmark/aggregator.py | 2 +- src/guidellm/benchmark/entrypoints.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/src/guidellm/benchmark/aggregator.py b/src/guidellm/benchmark/aggregator.py index e0f34218..ed4bb34b 100644 --- a/src/guidellm/benchmark/aggregator.py +++ b/src/guidellm/benchmark/aggregator.py @@ -36,7 +36,7 @@ from pydantic import Field, PrivateAttr -from guidellm.backend import ( +from guidellm.backends import ( GenerationRequest, GenerationResponse, ) diff --git a/src/guidellm/benchmark/entrypoints.py b/src/guidellm/benchmark/entrypoints.py index 82f92ceb..60077ee8 100644 --- a/src/guidellm/benchmark/entrypoints.py +++ b/src/guidellm/benchmark/entrypoints.py @@ -9,7 +9,7 @@ PreTrainedTokenizerBase, ) -from guidellm.backend import ( +from guidellm.backends import ( Backend, BackendType, GenerationRequest, From 61736f5af150cecce3563580f9000d6f26fdd2e1 Mon Sep 17 00:00:00 2001 From: Mark Kurtz Date: Fri, 19 Sep 2025 12:31:00 +0000 Subject: [PATCH 15/90] fixes from copilot review Signed-off-by: Mark Kurtz --- src/guidellm/benchmark/aggregator.py | 2 +- src/guidellm/benchmark/progress.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/src/guidellm/benchmark/aggregator.py b/src/guidellm/benchmark/aggregator.py index ed4bb34b..9db93a12 100644 --- a/src/guidellm/benchmark/aggregator.py +++ b/src/guidellm/benchmark/aggregator.py @@ -911,7 +911,7 @@ def _is_in_cooldown( ) if self.cooldown >= 1: # Count/time-based cooldown - if scheduler_state.remaining_requests < self.cooldown: + if scheduler_state.remaining_requests <= self.cooldown: return True current_time = ( diff --git a/src/guidellm/benchmark/progress.py b/src/guidellm/benchmark/progress.py index d6b881bc..f93b3a83 100644 --- a/src/guidellm/benchmark/progress.py +++ b/src/guidellm/benchmark/progress.py @@ -906,7 +906,7 @@ def _update_processing_states( cancelled_requests: int | None = None, errored_requests: int | None = None, ): - if self.benchmark_status is not None: + if benchmark_status is not None: self.benchmark_status = benchmark_status if start_time is not None: self.start_time = start_time From a28bbe36e64171c8f41414e9cd57cb06bc4ed69b Mon Sep 17 00:00:00 2001 From: Mark Kurtz Date: Fri, 19 Sep 2025 11:48:24 +0000 Subject: [PATCH 16/90] Mock server implementation for guidellm --- src/guidellm/mock_server/__init__.py | 8 + src/guidellm/mock_server/config.py | 84 +++ src/guidellm/mock_server/handlers/__init__.py | 17 + .../mock_server/handlers/chat_completions.py | 280 ++++++++++ .../mock_server/handlers/completions.py | 280 ++++++++++ .../mock_server/handlers/tokenizer.py | 142 +++++ src/guidellm/mock_server/models.py | 510 +++++++++++++++++ src/guidellm/mock_server/server.py | 168 ++++++ src/guidellm/mock_server/utils.py | 307 +++++++++++ tests/unit/mock_server/__init__.py | 1 + tests/unit/mock_server/test_server.py | 518 ++++++++++++++++++ 11 files changed, 2315 insertions(+) create mode 100644 src/guidellm/mock_server/__init__.py create mode 100644 src/guidellm/mock_server/config.py create mode 100644 src/guidellm/mock_server/handlers/__init__.py create mode 100644 src/guidellm/mock_server/handlers/chat_completions.py create mode 100644 src/guidellm/mock_server/handlers/completions.py create mode 100644 src/guidellm/mock_server/handlers/tokenizer.py create mode 100644 src/guidellm/mock_server/models.py create mode 100644 src/guidellm/mock_server/server.py create mode 100644 src/guidellm/mock_server/utils.py create mode 100644 tests/unit/mock_server/__init__.py create mode 100644 tests/unit/mock_server/test_server.py diff --git a/src/guidellm/mock_server/__init__.py b/src/guidellm/mock_server/__init__.py new file mode 100644 index 00000000..f76e98fb --- /dev/null +++ b/src/guidellm/mock_server/__init__.py @@ -0,0 +1,8 @@ +""" +GuideLLM Mock Server for OpenAI and vLLM API compatibility. +""" + +from .config import MockServerConfig +from .server import MockServer + +__all__ = ["MockServer", "MockServerConfig"] diff --git a/src/guidellm/mock_server/config.py b/src/guidellm/mock_server/config.py new file mode 100644 index 00000000..27d1d742 --- /dev/null +++ b/src/guidellm/mock_server/config.py @@ -0,0 +1,84 @@ +""" +Configuration settings for the mock server component. + +Provides centralized configuration management for mock server behavior including +network binding, model identification, response timing characteristics, and token +generation parameters. Supports environment variable configuration for deployment +flexibility with automatic validation through Pydantic settings. +""" + +from __future__ import annotations + +from pydantic import Field +from pydantic_settings import BaseSettings + +__all__ = ["MockServerConfig"] + + +class MockServerConfig(BaseSettings): + """ + Configuration settings for mock server behavior and deployment. + + Centralizes all configurable parameters for mock server operation including + network settings, model identification, response timing characteristics, and + token generation behavior. Environment variables with GUIDELLM_MOCK_SERVER_ + prefix override default values for deployment flexibility. + + Example: + :: + config = MockServerConfig(host="0.0.0.0", port=8080, model="custom-model") + # Use with environment variables: + # GUIDELLM_MOCK_SERVER_HOST=127.0.0.1 GUIDELLM_MOCK_SERVER_PORT=9000 + """ + + host: str = Field( + default="127.0.0.1", description="Host address to bind the server to" + ) + port: int = Field(default=8000, description="Port number to bind the server to") + workers: int = Field(default=1, description="Number of worker processes to spawn") + model: str = Field( + default="llama-3.1-8b-instruct", + description="Model name to present in API responses", + ) + processor: str | None = Field( + default=None, + description=( + "Processor type to use for token stats, tokenize, and detokenize. " + "If None, a mock one is created." + ), + ) + request_latency: float = Field( + default=3.0, + description="Base request latency in seconds for non-streaming responses", + ) + request_latency_std: float = Field( + default=0.0, + description="Standard deviation for request latency variation", + ) + ttft_ms: float = Field( + default=150.0, + description="Time to first token in milliseconds for streaming responses", + ) + ttft_ms_std: float = Field( + default=0.0, + description="Standard deviation for time to first token variation", + ) + itl_ms: float = Field( + default=10.0, + description="Inter-token latency in milliseconds for streaming responses", + ) + itl_ms_std: float = Field( + default=0.0, + description="Standard deviation for inter-token latency variation", + ) + output_tokens: int = Field( + default=128, description="Number of output tokens to generate in responses" + ) + output_tokens_std: float = Field( + default=0.0, + description="Standard deviation for output token count variation", + ) + + class Config: + env_prefix = "GUIDELLM_MOCK_SERVER_" + case_sensitive = False diff --git a/src/guidellm/mock_server/handlers/__init__.py b/src/guidellm/mock_server/handlers/__init__.py new file mode 100644 index 00000000..7dbc209f --- /dev/null +++ b/src/guidellm/mock_server/handlers/__init__.py @@ -0,0 +1,17 @@ +""" +HTTP request handlers for the GuideLLM mock server. + +This module exposes request handlers that implement OpenAI-compatible API endpoints +for the mock server. The handlers provide realistic LLM simulation capabilities +including chat completions, legacy completions, and tokenization services with +configurable timing characteristics, token counting, and proper error handling to +support comprehensive benchmarking and testing scenarios. +""" + +from __future__ import annotations + +from .chat_completions import ChatCompletionsHandler +from .completions import CompletionsHandler +from .tokenizer import TokenizerHandler + +__all__ = ["ChatCompletionsHandler", "CompletionsHandler", "TokenizerHandler"] diff --git a/src/guidellm/mock_server/handlers/chat_completions.py b/src/guidellm/mock_server/handlers/chat_completions.py new file mode 100644 index 00000000..976901f9 --- /dev/null +++ b/src/guidellm/mock_server/handlers/chat_completions.py @@ -0,0 +1,280 @@ +""" +OpenAI Chat Completions API endpoint handler for the mock server. + +Provides a complete implementation of the /v1/chat/completions endpoint that simulates +realistic LLM behavior with configurable timing characteristics. Supports both streaming +and non-streaming responses with proper token counting, latency simulation including +TTFT (Time To First Token) and ITL (Inter-Token Latency), and OpenAI-compatible error +handling for comprehensive benchmarking scenarios. +""" + +from __future__ import annotations + +import asyncio +import json +import math +import time +import uuid + +from pydantic import ValidationError +from sanic import response +from sanic.request import Request +from sanic.response import HTTPResponse, ResponseStream +from transformers import PreTrainedTokenizer + +from guidellm.mock_server.config import MockServerConfig +from guidellm.mock_server.models import ( + ChatCompletionChoice, + ChatCompletionsRequest, + ChatCompletionsResponse, + ChatMessage, + ErrorDetail, + ErrorResponse, + Usage, +) +from guidellm.mock_server.utils import ( + MockTokenizer, + create_fake_text, + create_fake_tokens_str, + sample_number, + times_generator, +) + +__all__ = ["ChatCompletionsHandler"] + + +class ChatCompletionsHandler: + """ + Handles OpenAI Chat Completions API requests with realistic LLM simulation. + + Implements the /v1/chat/completions endpoint behavior including request validation, + response generation, and timing simulation. Supports both streaming and + non-streaming modes with configurable latency characteristics for comprehensive + benchmarking. Uses either a mock tokenizer or a real tokenizer for accurate token + counting and realistic text generation. + + Example: + :: + config = MockServerConfig(ttft_ms=100, itl_ms=50) + handler = ChatCompletionsHandler(config) + response = await handler.handle(request) + """ + + def __init__(self, config: MockServerConfig) -> None: + """ + Initialize the Chat Completions handler with server configuration. + + :param config: Mock server configuration containing timing and behavior settings + """ + self.config = config + self.tokenizer = ( + MockTokenizer() + if config.processor is None + else PreTrainedTokenizer.from_pretrained(config.processor) + ) + + async def handle(self, request: Request) -> HTTPResponse: + """ + Process incoming chat completion requests with validation and routing. + + Validates the request payload, handles errors gracefully, and routes to + appropriate streaming or non-streaming response handlers based on the + request configuration. + + :param request: Sanic HTTP request containing chat completion parameters + :return: HTTP response with completion data or error information + :raises ValidationError: When request payload fails validation + :raises JSONDecodeError: When request contains invalid JSON + """ + try: + # Parse and validate request + req_data = ChatCompletionsRequest(**request.json) + except ValidationError as exc: + return response.json( + ErrorResponse( + error=ErrorDetail( + message=f"Invalid request: {str(exc)}", + type="invalid_request_error", + code="invalid_request", + ) + ).model_dump(), + status=400, + ) + except (json.JSONDecodeError, TypeError): + return response.json( + ErrorResponse( + error=ErrorDetail( + message="Invalid JSON in request body", + type="invalid_request_error", + code="invalid_json", + ) + ).model_dump(), + status=400, + ) + + # Handle streaming vs non-streaming + if req_data.stream: + return await self._handle_stream(req_data) + else: + return await self._handle_non_stream(req_data) + + async def _handle_non_stream(self, req: ChatCompletionsRequest) -> HTTPResponse: + """ + Generate complete non-streaming chat completion response. + + Simulates realistic LLM behavior with TTFT and ITL delays, generates + appropriate token counts, and returns a complete response with usage + statistics and generated content. + + :param req: Validated chat completion request parameters + :return: Complete HTTP response with generated completion data + """ + # TTFT delay + await asyncio.sleep( + sample_number(self.config.ttft_ms, self.config.ttft_ms_std) / 1000.0 + ) + + # Token counts + prompt_text = self.tokenizer.apply_chat_template(req.messages) + prompt_tokens = len(self.tokenizer(prompt_text)) + max_tokens = req.max_completion_tokens or req.max_tokens or math.inf + completion_tokens_count = min( + sample_number(self.config.output_tokens, self.config.output_tokens_std), + max_tokens, + ) + + # ITL delay + itl_delay = 0.0 + delays_iter = iter(times_generator(self.config.itl_ms, self.config.itl_ms_std)) + for _ in range(int(completion_tokens_count) - 1): + itl_delay += next(delays_iter) + await asyncio.sleep(itl_delay / 1000.0) + + # Response + chat_response = ChatCompletionsResponse( + id=f"chatcmpl-{uuid.uuid4().hex[:29]}", + model=req.model, + choices=[ + ChatCompletionChoice( + index=0, + message=ChatMessage( + role="assistant", + content=create_fake_text( + int(completion_tokens_count), self.tokenizer + ), + ), + finish_reason="stop", + ) + ], + usage=Usage( + prompt_tokens=prompt_tokens, + completion_tokens=int(completion_tokens_count), + ), + system_fingerprint=f"fp_{uuid.uuid4().hex[:10]}", + ) + + return response.json(chat_response.model_dump()) + + async def _handle_stream(self, req: ChatCompletionsRequest) -> HTTPResponse: + """ + Generate streaming chat completion response with real-time token delivery. + + Creates a streaming response that delivers tokens incrementally with + realistic timing delays. Supports optional usage statistics in the final + stream chunk when requested via stream_options. + + :param req: Validated chat completion request with streaming enabled + :return: Streaming HTTP response delivering tokens with proper timing + """ + + async def generate_stream(stream_response): + completion_id = f"chatcmpl-{uuid.uuid4().hex[:29]}" + + # TTFT delay + await asyncio.sleep( + sample_number(self.config.ttft_ms, self.config.ttft_ms_std) / 1000.0 + ) + + # Token counts + prompt_text = self.tokenizer.apply_chat_template(req.messages) + prompt_tokens = len(self.tokenizer(prompt_text)) + max_tokens = req.max_completion_tokens or req.max_tokens or math.inf + completion_tokens_count = int( + min( + sample_number( + self.config.output_tokens, self.config.output_tokens_std + ), + max_tokens, + ) + ) + + # Send tokens + tokens = create_fake_tokens_str(completion_tokens_count, self.tokenizer) + delays_iter = iter( + times_generator(self.config.itl_ms, self.config.itl_ms_std) + ) + + for index, token in enumerate(tokens): + if index > 0: + itl_delay = next(delays_iter) + await asyncio.sleep(itl_delay / 1000.0) + + chunk_data = { + "id": completion_id, + "object": "chat.completion.chunk", + "created": int(time.time()), + "model": req.model, + "choices": [ + { + "index": 0, + "delta": {"content": token}, + "finish_reason": None, + } + ], + } + await stream_response.write(f"data: {json.dumps(chunk_data)}\n\n") + + # Send final chunk with finish reason + final_chunk = { + "id": completion_id, + "object": "chat.completion.chunk", + "created": int(time.time()), + "model": req.model, + "choices": [ + { + "index": 0, + "delta": {}, + "finish_reason": "stop", + } + ], + } + await stream_response.write(f"data: {json.dumps(final_chunk)}\n\n") + + # Send usage if requested + if req.stream_options and req.stream_options.get("include_usage"): + usage_chunk = { + "id": completion_id, + "object": "chat.completion.chunk", + "created": int(time.time()), + "model": req.model, + "choices": [], + "usage": { + "prompt_tokens": prompt_tokens, + "completion_tokens": completion_tokens_count, + "total_tokens": prompt_tokens + completion_tokens_count, + }, + } + await stream_response.write(f"data: {json.dumps(usage_chunk)}\n\n") + + # End stream + await stream_response.write("data: [DONE]\n\n") + + return ResponseStream( # type: ignore[return-value] + generate_stream, + content_type="text/event-stream", + headers={ + "Cache-Control": "no-cache", + "Connection": "keep-alive", + "X-Accel-Buffering": "no", + }, + ) diff --git a/src/guidellm/mock_server/handlers/completions.py b/src/guidellm/mock_server/handlers/completions.py new file mode 100644 index 00000000..418d2b3c --- /dev/null +++ b/src/guidellm/mock_server/handlers/completions.py @@ -0,0 +1,280 @@ +""" +Legacy OpenAI Completions API handler for the mock server. + +This module provides the CompletionsHandler class that implements the /v1/completions +endpoint for the guidellm mock server. It supports both streaming and non-streaming +completions with configurable timing parameters (TTFT, ITL) and token generation to +simulate realistic LLM behavior for benchmarking and testing purposes. +""" + +from __future__ import annotations + +import asyncio +import json +import math +import time +import uuid + +from pydantic import ValidationError +from sanic import response +from sanic.request import Request +from sanic.response import HTTPResponse, ResponseStream +from transformers import PreTrainedTokenizer + +from guidellm.mock_server.config import MockServerConfig +from guidellm.mock_server.models import ( + CompletionChoice, + CompletionsRequest, + CompletionsResponse, + ErrorDetail, + ErrorResponse, + Usage, +) +from guidellm.mock_server.utils import ( + MockTokenizer, + create_fake_text, + create_fake_tokens_str, + sample_number, + times_generator, +) + +__all__ = ["CompletionsHandler"] + + +class CompletionsHandler: + """ + Handler for the OpenAI /v1/completions endpoint in the mock server. + + This handler simulates the legacy OpenAI completions API by processing incoming + requests and generating responses with configurable timing and token generation + patterns. It supports both streaming and non-streaming modes, applying realistic + timing delays (TTFT and ITL) to mimic actual LLM behavior for benchmarking. + + Example: + :: + config = MockServerConfig(ttft_ms=100, itl_ms=50) + handler = CompletionsHandler(config) + response = await handler.handle(sanic_request) + """ + + def __init__(self, config: MockServerConfig) -> None: + """ + Initialize the completions handler with configuration settings. + + :param config: Mock server configuration containing timing parameters + and tokenizer settings + """ + self.config = config + self.tokenizer = ( + MockTokenizer() + if config.processor is None + else PreTrainedTokenizer.from_pretrained(config.processor) + ) + + async def handle(self, request: Request) -> HTTPResponse: + """ + Process a completions request and return the appropriate response. + + Validates the incoming request, determines whether to use streaming or + non-streaming mode, and delegates to the appropriate handler method. + + :param request: Sanic request object containing the completions request data + :return: HTTP response with completion data or error information + :raises ValidationError: When request validation fails + :raises json.JSONDecodeError: When request JSON is malformed + """ + try: + # Parse and validate request + req_data = CompletionsRequest(**request.json) + except ValidationError as e: + return response.json( + ErrorResponse( + error=ErrorDetail( + message=f"Invalid request: {str(e)}", + type="invalid_request_error", + code="invalid_request", + ) + ).model_dump(), + status=400, + ) + except (json.JSONDecodeError, TypeError): + return response.json( + ErrorResponse( + error=ErrorDetail( + message="Invalid JSON in request body", + type="invalid_request_error", + code="invalid_json", + ) + ).model_dump(), + status=400, + ) + + # Handle streaming vs non-streaming + if req_data.stream: + return await self._handle_stream(req_data) + else: + return await self._handle_non_stream(req_data) + + async def _handle_non_stream(self, req: CompletionsRequest) -> HTTPResponse: + """ + Generate a non-streaming completion response. + + Simulates TTFT and ITL delays, generates appropriate token counts, and returns + a complete response with the generated text and usage statistics. + + :param req: Validated completions request containing prompt and parameters + :return: JSON HTTP response with completion text and usage data + :raises NotImplementedError: When batch processing is requested + """ + if isinstance(req.prompt, list): + raise NotImplementedError("Batch processing is not supported.") + + # TTFT delay + await asyncio.sleep( + sample_number(self.config.ttft_ms, self.config.ttft_ms_std) / 1000.0 + ) + + # Token counts + prompt_tokens = len(self.tokenizer(req.prompt)) + max_tokens = req.max_tokens or math.inf + completion_tokens_count = int( + min( + sample_number(self.config.output_tokens, self.config.output_tokens_std), + max_tokens, + ) + if req.stop + else max_tokens + ) + + # ITL delay + itl_delay = 0.0 + delays_iter = iter(times_generator(self.config.itl_ms, self.config.itl_ms_std)) + for _ in range(int(completion_tokens_count) - 1): + itl_delay += next(delays_iter) + await asyncio.sleep(itl_delay / 1000.0) + + # Response + completion_response = CompletionsResponse( + id=f"cmpl-{uuid.uuid4().hex[:29]}", + model=req.model, + choices=[ + CompletionChoice( + text=create_fake_text(completion_tokens_count, self.tokenizer), + index=0, + finish_reason="stop", + ) + ], + usage=Usage( + prompt_tokens=prompt_tokens, + completion_tokens=completion_tokens_count, + ), + system_fingerprint=f"fp_{uuid.uuid4().hex[:10]}", + ) + + return response.json(completion_response.model_dump()) + + async def _handle_stream(self, req: CompletionsRequest) -> HTTPResponse: + """ + Generate a streaming completion response. + + Creates a server-sent events stream that delivers tokens incrementally with + realistic timing delays between each token. Includes usage statistics if + requested and properly terminates the stream. + + :param req: Validated completions request containing prompt and streaming + options + :return: ResponseStream object that generates server-sent events + """ + + async def generate_stream(stream_response): + completion_id = f"cmpl-{uuid.uuid4().hex[:29]}" + + # TTFT delay + await asyncio.sleep( + sample_number(self.config.ttft_ms, self.config.ttft_ms_std) / 1000.0 + ) + + # Token counts + prompt_tokens = len(self.tokenizer(req.prompt)) + max_tokens = req.max_tokens or math.inf + completion_tokens_count = int( + min( + sample_number( + self.config.output_tokens, self.config.output_tokens_std + ), + max_tokens, + ) + if req.stop + else max_tokens + ) + + # Send tokens + tokens = create_fake_tokens_str(completion_tokens_count, self.tokenizer) + delays_iter = iter( + times_generator(self.config.itl_ms, self.config.itl_ms_std) + ) + + for index, token in enumerate(tokens): + if index > 0: + itl_delay = next(delays_iter) + await asyncio.sleep(itl_delay / 1000.0) + + chunk_data = { + "id": completion_id, + "object": "text_completion", + "created": int(time.time()), + "model": req.model, + "choices": [ + { + "text": token, + "index": index, + "finish_reason": None, + } + ], + } + await stream_response.write(f"data: {json.dumps(chunk_data)}\n\n") + + # Send final chunk with finish reason + final_chunk = { + "id": completion_id, + "object": "text_completion", + "created": int(time.time()), + "model": req.model, + "choices": [ + { + "text": "", + "index": index, + "finish_reason": "stop", + } + ], + } + await stream_response.write(f"data: {json.dumps(final_chunk)}\n\n") + + # Send usage if requested + if req.stream_options and req.stream_options.get("include_usage"): + usage_chunk = { + "id": completion_id, + "object": "text_completion", + "created": int(time.time()), + "model": req.model, + "choices": [], + "usage": { + "prompt_tokens": prompt_tokens, + "completion_tokens": completion_tokens_count, + "total_tokens": prompt_tokens + completion_tokens_count, + }, + } + await stream_response.write(f"data: {json.dumps(usage_chunk)}\n\n") + + # End stream + await stream_response.write("data: [DONE]\n\n") + + return ResponseStream( # type: ignore[return-value] + generate_stream, + content_type="text/event-stream", + headers={ + "Cache-Control": "no-cache", + "Connection": "keep-alive", + "X-Accel-Buffering": "no", + }, + ) diff --git a/src/guidellm/mock_server/handlers/tokenizer.py b/src/guidellm/mock_server/handlers/tokenizer.py new file mode 100644 index 00000000..430ac0ef --- /dev/null +++ b/src/guidellm/mock_server/handlers/tokenizer.py @@ -0,0 +1,142 @@ +""" +HTTP request handler for vLLM tokenization API endpoints in the mock server. + +This module provides the TokenizerHandler class that implements vLLM-compatible +tokenization and detokenization endpoints for testing and development purposes. +It handles text-to-token conversion, token-to-text reconstruction, request +validation, and error responses with proper HTTP status codes and JSON formatting. +""" + +from __future__ import annotations + +from pydantic import ValidationError +from sanic import response +from sanic.request import Request +from sanic.response import HTTPResponse +from transformers.tokenization_utils import PreTrainedTokenizer + +from guidellm.mock_server.config import MockServerConfig +from guidellm.mock_server.models import ( + DetokenizeRequest, + DetokenizeResponse, + ErrorDetail, + ErrorResponse, + TokenizeRequest, + TokenizeResponse, +) +from guidellm.mock_server.utils import MockTokenizer + +__all__ = ["TokenizerHandler"] + + +class TokenizerHandler: + """ + HTTP request handler for vLLM tokenization and detokenization endpoints. + + Provides mock implementations of vLLM's tokenization API endpoints including + /tokenize for converting text to tokens and /detokenize for reconstructing + text from token sequences. Handles request validation, error responses, and + JSON serialization with proper HTTP status codes. + + Example: + :: + handler = TokenizerHandler(config) + response = await handler.tokenize(request) + response = await handler.detokenize(request) + """ + + def __init__(self, config: MockServerConfig) -> None: + """ + Initialize the tokenizer handler with configuration. + + :param config: Server configuration object containing tokenizer settings + """ + self.config = config + self.tokenizer = ( + MockTokenizer() + if config.processor is None + else PreTrainedTokenizer.from_pretrained(config.processor) + ) + + async def tokenize(self, request: Request) -> HTTPResponse: + """ + Convert input text to token IDs via the /tokenize endpoint. + + Validates the request payload, extracts text content, and returns a JSON + response containing the token sequence and count. Handles validation errors + and malformed JSON with appropriate HTTP error responses. + + :param request: Sanic HTTP request containing JSON payload with text field + :return: JSON response with tokens list and count, or error response + """ + try: + req_data = TokenizeRequest(**request.json) + except ValidationError as exc: + return response.json( + ErrorResponse( + error=ErrorDetail( + message=f"Invalid request: {str(exc)}", + type="invalid_request_error", + code="invalid_request", + ) + ).model_dump(), + status=400, + ) + except (ValueError, TypeError, KeyError): + return response.json( + ErrorResponse( + error=ErrorDetail( + message="Invalid JSON in request body", + type="invalid_request_error", + code="invalid_json", + ) + ).model_dump(), + status=400, + ) + + tokens = self.tokenizer.tokenize(req_data.text) + token_ids = self.tokenizer.convert_tokens_to_ids(tokens) + + return response.json( + TokenizeResponse(tokens=token_ids, count=len(token_ids)).model_dump() + ) + + async def detokenize(self, request: Request) -> HTTPResponse: + """ + Convert token IDs back to text via the /detokenize endpoint. + + Validates the request payload, extracts token sequences, and returns a JSON + response containing the reconstructed text. Handles validation errors and + malformed JSON with appropriate HTTP error responses. + + :param request: Sanic HTTP request containing JSON payload with tokens field + :return: JSON response with reconstructed text, or error response + """ + try: + req_data = DetokenizeRequest(**request.json) + except ValidationError as exc: + return response.json( + ErrorResponse( + error=ErrorDetail( + message=f"Invalid request: {str(exc)}", + type="invalid_request_error", + code="invalid_request", + ) + ).model_dump(), + status=400, + ) + except (ValueError, TypeError, KeyError): + return response.json( + ErrorResponse( + error=ErrorDetail( + message="Invalid JSON in request body", + type="invalid_request_error", + code="invalid_json", + ) + ).model_dump(), + status=400, + ) + + text = self.tokenizer.decode(req_data.tokens, skip_special_tokens=False) + + return response.json(DetokenizeResponse(text=text).model_dump()) diff --git a/src/guidellm/mock_server/models.py b/src/guidellm/mock_server/models.py new file mode 100644 index 00000000..cd342f7a --- /dev/null +++ b/src/guidellm/mock_server/models.py @@ -0,0 +1,510 @@ +""" +Pydantic models for OpenAI API and vLLM API request/response validation. + +This module defines comprehensive data models for validating and serializing API +requests and responses compatible with both OpenAI's API specification and vLLM's +extended parameters. It includes models for chat completions, legacy text completions, +tokenization operations, and error handling, supporting both streaming and non-streaming +responses with full type safety and validation. +""" + +from __future__ import annotations + +import time +from typing import Any, Literal + +from pydantic import BaseModel, Field + +__all__ = [ + "ChatCompletionChoice", + "ChatCompletionChunk", + "ChatCompletionsRequest", + "ChatCompletionsResponse", + "ChatMessage", + "CompletionChoice", + "CompletionsRequest", + "CompletionsResponse", + "DetokenizeRequest", + "DetokenizeResponse", + "ErrorDetail", + "ErrorResponse", + "StreamOptions", + "TokenizeRequest", + "TokenizeResponse", + "Usage", +] + + +class Usage(BaseModel): + """Token usage statistics for API requests and responses. + + Tracks the number of tokens consumed in prompts, completions, and total + usage for billing and monitoring purposes. + """ + + prompt_tokens: int = Field(description="Number of tokens in the input prompt") + completion_tokens: int = Field( + description="Number of tokens in the generated completion" + ) + total_tokens: int = Field(description="Total tokens used (prompt + completion)") + + def __init__(self, prompt_tokens: int = 0, completion_tokens: int = 0, **kwargs): + """Initialize usage statistics. + + :param prompt_tokens: Number of tokens in the input prompt + :param completion_tokens: Number of tokens in the generated completion + :param kwargs: Additional keyword arguments passed to BaseModel + """ + super().__init__( + prompt_tokens=prompt_tokens, + completion_tokens=completion_tokens, + total_tokens=prompt_tokens + completion_tokens, + **kwargs, + ) + + +class StreamOptions(BaseModel): + """Configuration options for streaming API responses. + + Controls the behavior and content of streamed responses including + whether to include usage statistics in the final chunk. + """ + + include_usage: bool | None = Field( + default=None, + description="Whether to include usage statistics in streaming responses", + ) + + +class ChatMessage(BaseModel): + """A single message in a chat conversation. + + Represents one exchange in a conversational interface with role-based + content and optional metadata for advanced features. + """ + + role: Literal["system", "user", "assistant", "tool"] = Field( + description="Role of the message sender in the conversation" + ) + content: str = Field(description="Text content of the message") + name: str | None = Field( + default=None, description="Optional name identifier for the message sender" + ) + + +class ChatCompletionsRequest(BaseModel): + """Request parameters for chat completion API endpoints. + + Comprehensive model supporting both OpenAI standard parameters and vLLM + extensions for advanced generation control, guided decoding, and performance + optimization. + """ + + model: str = Field(description="Model identifier to use for generation") + messages: list[ChatMessage] = Field( + description="List of messages in the conversation" + ) + max_tokens: int | None = Field( + default=None, description="Maximum number of tokens to generate" + ) + max_completion_tokens: int | None = Field( + default=None, description="Maximum tokens in completion (OpenAI naming)" + ) + temperature: float | None = Field( + default=1.0, description="Sampling temperature for randomness control" + ) + top_p: float | None = Field(default=1.0, description="Nucleus sampling parameter") + n: int | None = Field( + default=1, description="Number of completion choices to generate" + ) + stream: bool | None = Field( + default=False, description="Whether to stream response chunks" + ) + stream_options: StreamOptions | None = Field( + default=None, description="Configuration for streaming responses" + ) + stop: str | list[str] | None = Field( + default=None, description="Stop sequences to end generation" + ) + presence_penalty: float | None = Field( + default=0.0, description="Penalty for token presence to encourage diversity" + ) + frequency_penalty: float | None = Field( + default=0.0, description="Penalty for token frequency to reduce repetition" + ) + logit_bias: dict[str, float] | None = Field( + default=None, description="Bias values for specific tokens" + ) + seed: int | None = Field( + default=None, description="Random seed for reproducible outputs" + ) + user: str | None = Field( + default=None, description="User identifier for tracking and abuse monitoring" + ) + + # vLLM extensions + use_beam_search: bool | None = Field( + default=False, description="Enable beam search for better quality" + ) + top_k: int | None = Field(default=None, description="Top-k sampling parameter") + min_p: float | None = Field( + default=None, description="Minimum probability threshold for sampling" + ) + repetition_penalty: float | None = Field( + default=None, description="Penalty for repeated tokens" + ) + length_penalty: float | None = Field( + default=1.0, description="Length penalty for sequence scoring" + ) + stop_token_ids: list[int] | None = Field( + default=None, description="Token IDs that trigger generation stop" + ) + include_stop_str_in_output: bool | None = Field( + default=False, description="Include stop sequence in output" + ) + ignore_eos: bool | None = Field( + default=False, description="Ignore end-of-sequence tokens" + ) + min_tokens: int | None = Field( + default=0, description="Minimum number of tokens to generate" + ) + skip_special_tokens: bool | None = Field( + default=True, description="Skip special tokens in output" + ) + spaces_between_special_tokens: bool | None = Field( + default=True, description="Add spaces between special tokens" + ) + truncate_prompt_tokens: int | None = Field( + default=None, description="Maximum prompt tokens before truncation" + ) + allowed_token_ids: list[int] | None = Field( + default=None, description="Restrict generation to specific token IDs" + ) + prompt_logprobs: int | None = Field( + default=None, description="Number of logprobs to return for prompt tokens" + ) + add_special_tokens: bool | None = Field( + default=True, description="Add special tokens during processing" + ) + guided_json: str | dict[str, Any] | None = Field( + default=None, description="JSON schema for guided generation" + ) + guided_regex: str | None = Field( + default=None, description="Regex pattern for guided generation" + ) + guided_choice: list[str] | None = Field( + default=None, description="List of choices for guided generation" + ) + guided_grammar: str | None = Field( + default=None, description="Grammar specification for guided generation" + ) + guided_decoding_backend: str | None = Field( + default=None, description="Backend to use for guided decoding" + ) + guided_whitespace_pattern: str | None = Field( + default=None, description="Whitespace pattern for guided generation" + ) + priority: int | None = Field( + default=0, description="Request priority for scheduling" + ) + + +class ChatCompletionChoice(BaseModel): + """A single completion choice from a chat completion response. + + Contains the generated message and metadata about why generation + stopped and the choice's position in the response. + """ + + index: int = Field(description="Index of this choice in the response") + message: ChatMessage = Field(description="Generated message content") + finish_reason: Literal["stop", "length", "content_filter", "tool_calls"] | None = ( + Field(description="Reason why generation finished") + ) + + +class ChatCompletionsResponse(BaseModel): + """Response from chat completion API endpoints. + + Contains generated choices, usage statistics, and metadata for + non-streaming chat completion requests. + """ + + id: str = Field(description="Unique identifier for this completion") + object: Literal["chat.completion"] = Field( + default="chat.completion", description="Object type identifier" + ) + created: int = Field( + default_factory=lambda: int(time.time()), + description="Unix timestamp of creation", + ) + model: str = Field(description="Model used for generation") + choices: list[ChatCompletionChoice] = Field( + description="Generated completion choices" + ) + usage: Usage | None = Field(default=None, description="Token usage statistics") + system_fingerprint: str | None = Field( + default=None, description="System configuration fingerprint" + ) + + +class ChatCompletionChunk(BaseModel): + """A single chunk in a streamed chat completion response. + + Represents one piece of a streaming response with delta content + and optional usage statistics in the final chunk. + """ + + id: str = Field(description="Unique identifier for this completion") + object: Literal["chat.completion.chunk"] = Field( + default="chat.completion.chunk", + description="Object type identifier for streaming chunks", + ) + created: int = Field( + default_factory=lambda: int(time.time()), + description="Unix timestamp of creation", + ) + model: str = Field(description="Model used for generation") + choices: list[dict[str, Any]] = Field(description="Delta choices for streaming") + usage: Usage | None = Field( + default=None, description="Token usage statistics (typically in final chunk)" + ) + + +class CompletionsRequest(BaseModel): + """Request parameters for legacy text completion API endpoints. + + Supports the older text completion format with prompt-based input + and the same extensive parameter set as chat completions for + backward compatibility. + """ + + model: str = Field(description="Model identifier to use for generation") + prompt: str | list[str] = Field(description="Input prompt(s) for completion") + max_tokens: int | None = Field( + default=16, description="Maximum number of tokens to generate" + ) + temperature: float | None = Field( + default=1.0, description="Sampling temperature for randomness control" + ) + top_p: float | None = Field(default=1.0, description="Nucleus sampling parameter") + n: int | None = Field( + default=1, description="Number of completion choices to generate" + ) + stream: bool | None = Field( + default=False, description="Whether to stream response chunks" + ) + stream_options: StreamOptions | None = Field( + default=None, description="Configuration for streaming responses" + ) + logprobs: int | None = Field( + default=None, description="Number of logprobs to return" + ) + echo: bool | None = Field( + default=False, description="Whether to echo the prompt in output" + ) + stop: str | list[str] | None = Field( + default_factory=lambda: ["<|endoftext|>"], + description="Stop sequences to end generation", + ) + presence_penalty: float | None = Field( + default=0.0, description="Penalty for token presence to encourage diversity" + ) + frequency_penalty: float | None = Field( + default=0.0, description="Penalty for token frequency to reduce repetition" + ) + best_of: int | None = Field( + default=1, description="Number of candidates to generate and return the best" + ) + logit_bias: dict[str, float] | None = Field( + default=None, description="Bias values for specific tokens" + ) + seed: int | None = Field( + default=None, description="Random seed for reproducible outputs" + ) + suffix: str | None = Field( + default=None, description="Suffix to append after completion" + ) + user: str | None = Field( + default=None, description="User identifier for tracking and abuse monitoring" + ) + + # vLLM extensions (same as chat completions) + use_beam_search: bool | None = Field( + default=False, description="Enable beam search for better quality" + ) + top_k: int | None = Field(default=None, description="Top-k sampling parameter") + min_p: float | None = Field( + default=None, description="Minimum probability threshold for sampling" + ) + repetition_penalty: float | None = Field( + default=None, description="Penalty for repeated tokens" + ) + length_penalty: float | None = Field( + default=1.0, description="Length penalty for sequence scoring" + ) + stop_token_ids: list[int] | None = Field( + default=None, description="Token IDs that trigger generation stop" + ) + include_stop_str_in_output: bool | None = Field( + default=False, description="Include stop sequence in output" + ) + ignore_eos: bool | None = Field( + default=False, description="Ignore end-of-sequence tokens" + ) + min_tokens: int | None = Field( + default=0, description="Minimum number of tokens to generate" + ) + skip_special_tokens: bool | None = Field( + default=True, description="Skip special tokens in output" + ) + spaces_between_special_tokens: bool | None = Field( + default=True, description="Add spaces between special tokens" + ) + truncate_prompt_tokens: int | None = Field( + default=None, description="Maximum prompt tokens before truncation" + ) + allowed_token_ids: list[int] | None = Field( + default=None, description="Restrict generation to specific token IDs" + ) + prompt_logprobs: int | None = Field( + default=None, description="Number of logprobs to return for prompt tokens" + ) + add_special_tokens: bool | None = Field( + default=True, description="Add special tokens during processing" + ) + guided_json: str | dict[str, Any] | None = Field( + default=None, description="JSON schema for guided generation" + ) + guided_regex: str | None = Field( + default=None, description="Regex pattern for guided generation" + ) + guided_choice: list[str] | None = Field( + default=None, description="List of choices for guided generation" + ) + guided_grammar: str | None = Field( + default=None, description="Grammar specification for guided generation" + ) + guided_decoding_backend: str | None = Field( + default=None, description="Backend to use for guided decoding" + ) + guided_whitespace_pattern: str | None = Field( + default=None, description="Whitespace pattern for guided generation" + ) + priority: int | None = Field( + default=0, description="Request priority for scheduling" + ) + + +class CompletionChoice(BaseModel): + """A single completion choice from a text completion response. + + Contains the generated text and metadata about completion + quality and stopping conditions. + """ + + text: str = Field(description="Generated text content") + index: int = Field(description="Index of this choice in the response") + logprobs: dict[str, Any] | None = Field( + default=None, description="Log probabilities for generated tokens" + ) + finish_reason: Literal["stop", "length", "content_filter"] | None = Field( + description="Reason why generation finished" + ) + + +class CompletionsResponse(BaseModel): + """Response from legacy text completion API endpoints. + + Contains generated text choices, usage statistics, and metadata + for non-streaming text completion requests. + """ + + id: str = Field(description="Unique identifier for this completion") + object: Literal["text_completion"] = Field( + default="text_completion", description="Object type identifier" + ) + created: int = Field( + default_factory=lambda: int(time.time()), + description="Unix timestamp of creation", + ) + model: str = Field(description="Model used for generation") + choices: list[CompletionChoice] = Field(description="Generated completion choices") + usage: Usage | None = Field(default=None, description="Token usage statistics") + system_fingerprint: str | None = Field( + default=None, description="System configuration fingerprint" + ) + + +class TokenizeRequest(BaseModel): + """Request for tokenizing text into token sequences. + + Converts input text into model-specific token representations + with optional special token handling. + """ + + text: str = Field(description="Text to tokenize") + add_special_tokens: bool | None = Field( + default=True, description="Whether to add model-specific special tokens" + ) + + +class TokenizeResponse(BaseModel): + """Response containing tokenized representation of input text. + + Provides both the token sequence and count for analysis + and token budget planning. + """ + + tokens: list[int] = Field(description="List of token IDs") + count: int = Field(description="Total number of tokens") + + +class DetokenizeRequest(BaseModel): + """Request for converting token sequences back to text. + + Reconstructs human-readable text from model token representations + with configurable special token handling. + """ + + tokens: list[int] = Field(description="List of token IDs to convert") + skip_special_tokens: bool | None = Field( + default=True, description="Whether to skip special tokens in output" + ) + spaces_between_special_tokens: bool | None = Field( + default=True, description="Whether to add spaces between special tokens" + ) + + +class DetokenizeResponse(BaseModel): + """Response containing text reconstructed from tokens. + + Provides the human-readable text representation of the + input token sequence. + """ + + text: str = Field(description="Reconstructed text from tokens") + + +class ErrorDetail(BaseModel): + """Detailed error information for API failures. + + Provides structured error data including message, type classification, + and optional error codes for debugging and error handling. + """ + + message: str = Field(description="Human-readable error description") + type: str = Field(description="Error type classification") + code: str | None = Field( + default=None, description="Optional error code for programmatic handling" + ) + + +class ErrorResponse(BaseModel): + """Standardized error response structure for API failures. + + Wraps error details in a consistent format compatible with + OpenAI API error response conventions. + """ + + error: ErrorDetail = Field(description="Detailed error information") diff --git a/src/guidellm/mock_server/server.py b/src/guidellm/mock_server/server.py new file mode 100644 index 00000000..ff9d5fcd --- /dev/null +++ b/src/guidellm/mock_server/server.py @@ -0,0 +1,168 @@ +""" +High-performance mock server for OpenAI and vLLM API compatibility testing. + +This module provides a Sanic-based mock server that simulates OpenAI and vLLM APIs +with configurable latency, token generation patterns, and response characteristics. +The server supports both streaming and non-streaming endpoints, enabling realistic +performance testing and validation of GuideLLM benchmarking workflows without +requiring actual model deployments. +""" + +from __future__ import annotations + +import time + +from sanic import Sanic, response +from sanic.exceptions import NotFound +from sanic.log import logger +from sanic.request import Request +from sanic.response import HTTPResponse + +from guidellm.mock_server.config import MockServerConfig +from guidellm.mock_server.handlers import ( + ChatCompletionsHandler, + CompletionsHandler, + TokenizerHandler, +) + +__all__ = ["MockServer"] + + +class MockServer: + """ + High-performance mock server implementing OpenAI and vLLM API endpoints. + + Provides a Sanic-based web server that simulates API responses with configurable + timing characteristics for testing and benchmarking purposes. Supports chat + completions, text completions, tokenization endpoints, and model listing with + realistic latency patterns to enable comprehensive performance validation. + + Example: + :: + config = ServerConfig(model="test-model", port=8080) + server = MockServer(config) + server.run() + """ + + def __init__(self, config: MockServerConfig) -> None: + """ + Initialize the mock server with configuration. + + :param config: Server configuration containing network settings and response + timing parameters + """ + self.config = config + self.app = Sanic("guidellm-mock-server") + self.chat_handler = ChatCompletionsHandler(config) + self.completions_handler = CompletionsHandler(config) + self.tokenizer_handler = TokenizerHandler(config) + + self._setup_middleware() + self._setup_routes() + self._setup_error_handlers() + + def _setup_middleware(self): + """Setup middleware for CORS, logging, etc.""" + + @self.app.middleware("request") + async def add_cors_headers(_request: Request): + """Add CORS headers to all requests.""" + + @self.app.middleware("response") + async def add_response_headers(_request: Request, resp: HTTPResponse): + """Add standard response headers.""" + resp.headers["Access-Control-Allow-Origin"] = "*" + resp.headers["Access-Control-Allow-Methods"] = "GET, POST, OPTIONS" + resp.headers["Access-Control-Allow-Headers"] = "Content-Type, Authorization" + resp.headers["Server"] = "guidellm-mock-server" + + def _setup_routes(self): # noqa: C901 + @self.app.get("/health") + async def health_check(_request: Request): + return response.json({"status": "healthy", "timestamp": time.time()}) + + @self.app.get("/v1/models") + async def list_models(_request: Request): + return response.json( + { + "object": "list", + "data": [ + { + "id": self.config.model, + "object": "model", + "created": int(time.time()), + "owned_by": "guidellm-mock", + } + ], + } + ) + + @self.app.route("/v1/chat/completions", methods=["POST", "OPTIONS"]) + async def chat_completions(request: Request): + if request.method == "OPTIONS": + return response.text("", status=204) + return await self.chat_handler.handle(request) + + @self.app.route("/v1/completions", methods=["POST", "OPTIONS"]) + async def completions(request: Request): + if request.method == "OPTIONS": + return response.text("", status=204) + return await self.completions_handler.handle(request) + + @self.app.route("/tokenize", methods=["POST", "OPTIONS"]) + async def tokenize(request: Request): + if request.method == "OPTIONS": + return response.text("", status=204) + return await self.tokenizer_handler.tokenize(request) + + @self.app.route("/detokenize", methods=["POST", "OPTIONS"]) + async def detokenize(request: Request): + if request.method == "OPTIONS": + return response.text("", status=204) + return await self.tokenizer_handler.detokenize(request) + + def _setup_error_handlers(self): + """Setup error handlers.""" + + @self.app.exception(Exception) + async def generic_error_handler(_request: Request, exception: Exception): + logger.error(f"Unhandled exception: {exception}") + return response.json( + { + "error": { + "message": "Internal server error", + "type": type(exception).__name__, + "error": str(exception), + } + }, + status=500, + ) + + @self.app.exception(NotFound) + async def not_found_handler(_request: Request, _exception): + return response.json( + { + "error": { + "message": "Not Found", + "type": "not_found_error", + "code": "not_found", + } + }, + status=404, + ) + + def run(self) -> None: + """ + Start the mock server with configured settings. + + Runs the Sanic application in single-process mode with access logging enabled + for debugging and monitoring request patterns during testing. + """ + self.app.run( + host=self.config.host, + port=self.config.port, + debug=False, + single_process=True, + access_log=True, + register_sys_signals=False, # Disable signal handlers for threading + ) diff --git a/src/guidellm/mock_server/utils.py b/src/guidellm/mock_server/utils.py new file mode 100644 index 00000000..8348d0a6 --- /dev/null +++ b/src/guidellm/mock_server/utils.py @@ -0,0 +1,307 @@ +""" +Mock server utilities for text generation and tokenization testing. + +This module provides mock tokenization and text generation utilities for testing +guidellm's mock server functionality. It includes a mock tokenizer that simulates +tokenization processes, functions to generate reproducible fake text with specific +token counts, and timing generators for realistic benchmarking scenarios. +""" + +from __future__ import annotations + +import random +import re +from collections.abc import Generator + +from faker import Faker +from transformers.tokenization_utils import AddedToken, PreTrainedTokenizer, TextInput + +__all__ = [ + "MockTokenizer", + "create_fake_text", + "create_fake_tokens_str", + "sample_number", + "times_generator", +] + + +class MockTokenizer(PreTrainedTokenizer): + """ + Mock tokenizer implementation for testing text processing workflows. + + Provides a simplified tokenizer that splits text using regex patterns and + generates deterministic token IDs based on string hashing. Used for testing + guidellm components without requiring actual model tokenizers. + + :cvar VocabSize: Fixed vocabulary size for the mock tokenizer + """ + + VocabSize = 100000007 + + def __len__(self) -> int: + """ + Get the vocabulary size of the tokenizer. + + :return: The total number of tokens in the vocabulary + """ + return self.VocabSize + + def __call__(self, text: str | list[str], **kwargs) -> list[int]: # noqa: ARG002 + """ + Tokenize text and return token IDs (callable interface). + + :param text: Input text to tokenize + :return: List of token IDs + """ + if isinstance(text, str): + tokens = self.tokenize(text) + return self.convert_tokens_to_ids(tokens) + elif isinstance(text, list): + # Handle batch processing + return [self.__call__(t) for t in text] + else: + msg = f"text input must be of type `str` or `list[str]`, got {type(text)}" + raise ValueError(msg) + + def tokenize(self, text: TextInput, **_kwargs) -> list[str]: + """ + Tokenize input text into a list of token strings. + + Splits text using regex to separate words, punctuation, and whitespace + into individual tokens for processing. + + :param text: Input text to tokenize + :return: List of token strings from the input text + """ + # Split text into tokens: words, spaces, and punctuation + return re.findall(r"\w+|[^\w\s]|\s+", text) + + def convert_tokens_to_ids(self, tokens: str | list[str]) -> int | list[int]: + """ + Convert token strings to numeric token IDs. + + Uses deterministic hashing to generate consistent token IDs for + reproducible testing scenarios. + + :param tokens: Single token string or list of token strings + :return: Single token ID or list of token IDs + """ + if isinstance(tokens, str): + return hash(tokens) % self.VocabSize + return [hash(token) % self.VocabSize for token in tokens] + + def convert_ids_to_tokens( + self, ids: int | list[int], _skip_special_tokens: bool = False + ) -> str | list[str]: + """ + Convert numeric token IDs back to token strings. + + Generates fake text tokens using Faker library seeded with token IDs + for deterministic and reproducible token generation. + + :param ids: Single token ID or list of token IDs to convert + :return: Single token string or list of token strings + """ + if not ids and not isinstance(ids, list): + return "" + elif not ids: + return [""] + + if isinstance(ids, int): + fake = Faker() + fake.seed_instance(ids % self.VocabSize) + + return fake.word() + + fake = Faker() + fake.seed_instance(sum(ids) % self.VocabSize) + + target_count = len(ids) + current_count = 0 + tokens = [] + + while current_count < target_count: + text = fake.text( + max_nb_chars=(target_count - current_count) * 10 # oversample + ) + new_tokens = self.tokenize(text) + + if current_count > 0: + new_tokens = [".", " "] + new_tokens + + new_tokens = ( + new_tokens[: target_count - current_count] + if len(new_tokens) > (target_count - current_count) + else new_tokens + ) + tokens += new_tokens + current_count += len(new_tokens) + + return tokens + + def convert_tokens_to_string(self, tokens: list[str]) -> str: + """ + Convert a list of token strings back to a single text string. + + :param tokens: List of token strings to concatenate + :return: Concatenated string from all tokens + """ + return "".join(tokens) + + def _add_tokens( + self, + new_tokens: list[str] | list[AddedToken], # noqa: ARG002 + special_tokens: bool = False, # noqa: ARG002 + ) -> int: + """ + Add new tokens to the tokenizer vocabulary (mock implementation). + + :param new_tokens: List of tokens to add to the vocabulary + :param special_tokens: Whether the tokens are special tokens + :return: Number of tokens actually added (always 0 for mock) + """ + return 0 + + def apply_chat_template( + self, + conversation: list, + tokenize: bool = False, # Changed default to False to match transformers + add_generation_prompt: bool = False, # noqa: ARG002 + **kwargs, # noqa: ARG002 + ) -> str | list[int]: + """ + Apply a chat template to format conversation messages. + + Mock implementation that concatenates all message content for testing. + + :param conversation: List of chat messages + :param tokenize: Whether to return tokens or string + :param add_generation_prompt: Whether to add generation prompt + :return: Formatted text string or token IDs + """ + # Simple concatenation of all message content + texts = [] + for message in conversation: + if isinstance(message, dict) and "content" in message: + texts.append(message["content"]) + elif hasattr(message, "content"): + texts.append(message.content) + + formatted_text = " ".join(texts) + + if tokenize: + return self.convert_tokens_to_ids(self.tokenize(formatted_text)) + return formatted_text + + def decode( + self, + token_ids: list[int], + skip_special_tokens: bool = True, + **kwargs, # noqa: ARG002 + ) -> str: + """ + Decode token IDs back to text string. + + :param token_ids: List of token IDs to decode + :param skip_special_tokens: Whether to skip special tokens + :return: Decoded text string + """ + tokens = self.convert_ids_to_tokens(token_ids, skip_special_tokens) + return self.convert_tokens_to_string(tokens) + + +def create_fake_text( + num_tokens: int, + processor: PreTrainedTokenizer, + seed: int = 42, + fake: Faker | None = None, +) -> str: + """ + Generate fake text using a tokenizer processor with specified token count. + + Creates text by generating fake tokens and joining them into a string, + ensuring the result has the exact number of tokens when processed by + the given tokenizer. + + :param num_tokens: Target number of tokens in the generated text + :param processor: Tokenizer to use for token generation and validation + :param seed: Random seed for reproducible text generation + :param fake: Optional Faker instance for text generation + :return: Generated text string with the specified token count + """ + return "".join(create_fake_tokens_str(num_tokens, processor, seed, fake)) + + +def create_fake_tokens_str( + num_tokens: int, + processor: PreTrainedTokenizer, + seed: int = 42, + fake: Faker | None = None, +) -> list[str]: + """ + Generate fake token strings using a tokenizer processor. + + Creates a list of token strings by generating fake text and tokenizing it + until the desired token count is reached. Uses the provided tokenizer + for accurate token boundary detection. + + :param num_tokens: Target number of tokens to generate + :param processor: Tokenizer to use for token generation and validation + :param seed: Random seed for reproducible token generation + :param fake: Optional Faker instance for text generation + :return: List of token strings with the specified count + """ + if not fake: + fake = Faker() + fake.seed_instance(seed) + + tokens = [] + + while len(tokens) < num_tokens: + text = fake.text( + max_nb_chars=(num_tokens - len(tokens)) * 30 # oversample + ) + new_tokens = processor.tokenize(text) + + if len(tokens) > 0: + new_tokens = [".", " "] + new_tokens + + new_tokens = ( + new_tokens[: num_tokens - len(tokens)] + if len(new_tokens) > (num_tokens - len(tokens)) + else new_tokens + ) + tokens += new_tokens + + return tokens + + +def times_generator(mean: float, standard_dev: float) -> Generator[float]: + """ + Generate infinite timing values from a normal distribution. + + Creates a generator that yields timing values sampled from a normal + distribution, useful for simulating realistic request timing patterns + in benchmarking scenarios. + + :param mean: Mean value for the normal distribution + :param standard_dev: Standard deviation for the normal distribution + :return: Generator yielding positive timing values from the distribution + """ + while True: + yield sample_number(mean, standard_dev) + + +def sample_number(mean: float, standard_dev: float) -> float: + """ + Generate a single timing value from a normal distribution. + + Samples one timing value from a normal distribution with the specified + parameters, ensuring the result is non-negative for realistic timing + simulation in benchmarking scenarios. + + :param mean: Mean value for the normal distribution + :param standard_dev: Standard deviation for the normal distribution + :return: Non-negative timing value from the distribution + """ + return max(0.0, random.gauss(mean, standard_dev)) diff --git a/tests/unit/mock_server/__init__.py b/tests/unit/mock_server/__init__.py new file mode 100644 index 00000000..e02d60bd --- /dev/null +++ b/tests/unit/mock_server/__init__.py @@ -0,0 +1 @@ +"""Unit tests for the GuideLLM mock server package.""" diff --git a/tests/unit/mock_server/test_server.py b/tests/unit/mock_server/test_server.py new file mode 100644 index 00000000..ed5c7727 --- /dev/null +++ b/tests/unit/mock_server/test_server.py @@ -0,0 +1,518 @@ +from __future__ import annotations + +import asyncio +import json +import multiprocessing + +import httpx +import pytest +import pytest_asyncio +from pydantic import ValidationError + +from guidellm.mock_server.config import MockServerConfig +from guidellm.mock_server.server import MockServer + + +# Start server in a separate process +def _start_server_process(config: MockServerConfig): + server = MockServer(config) + server.run() + + +@pytest_asyncio.fixture(scope="class") +async def mock_server_instance(): + """Instance-level fixture that provides a running server for HTTP testing.""" + + config = MockServerConfig( + host="127.0.0.1", + port=8012, + model="test-model", + ttft_ms=10.0, + itl_ms=1.0, + request_latency=0.1, + ) + base_url = f"http://{config.host}:{config.port}" + server_process = multiprocessing.Process( + target=_start_server_process, args=(config,) + ) + server_process.start() + + # Wait for server to start up and be ready + async def wait_for_startup(): + poll_frequency = 1.0 + async with httpx.AsyncClient() as client: + while True: + try: + response = await client.get(f"{base_url}/health", timeout=1.0) + if response.status_code == 200: + break + except (httpx.RequestError, httpx.TimeoutException): + pass + await asyncio.sleep(poll_frequency) + poll_frequency = min(poll_frequency * 1.5, 2.0) + + timeout = 30.0 + try: + await asyncio.wait_for(wait_for_startup(), timeout) + except TimeoutError: + # Server failed to start within timeout + server_process.terminate() + server_process.kill() + server_process.join(timeout=5) + pytest.fail(f"Server failed to start within {timeout} seconds") + + yield base_url, config + + # Cleanup: terminate the server process + server_process.terminate() + server_process.kill() + server_process.join(timeout=5) + + +class TestMockServerConfig: + """Test suite for MockServerConfig class.""" + + @pytest.mark.smoke + def test_default_initialization(self): + """Test MockServerConfig initialization with default values.""" + config = MockServerConfig() + assert config.host == "127.0.0.1" + assert config.port == 8000 + assert config.workers == 1 + assert config.model == "llama-3.1-8b-instruct" + assert config.processor is None + assert config.request_latency == 3.0 + assert config.request_latency_std == 0.0 + assert config.ttft_ms == 150.0 + assert config.ttft_ms_std == 0.0 + assert config.itl_ms == 10.0 + assert config.itl_ms_std == 0.0 + assert config.output_tokens == 128 + assert config.output_tokens_std == 0.0 + + @pytest.mark.smoke + @pytest.mark.parametrize( + ("kwargs", "expected_values"), + [ + ( + {"host": "127.0.0.1", "port": 9000, "model": "custom-model"}, + {"host": "127.0.0.1", "port": 9000, "model": "custom-model"}, + ), + ( + {"request_latency": 1.5, "ttft_ms": 100.0, "output_tokens": 256}, + {"request_latency": 1.5, "ttft_ms": 100.0, "output_tokens": 256}, + ), + ], + ) + def test_custom_initialization(self, kwargs, expected_values): + """Test MockServerConfig initialization with custom values.""" + config = MockServerConfig(**kwargs) + for key, expected_value in expected_values.items(): + assert getattr(config, key) == expected_value + + @pytest.mark.sanity + @pytest.mark.parametrize( + ("field", "value"), + [ + ("port", "not_int"), + ("request_latency", "not_float"), + ("output_tokens", "not_int"), + ], + ) + def test_invalid_initialization_values(self, field, value): + """Test MockServerConfig with invalid field values.""" + kwargs = {field: value} + with pytest.raises(ValidationError): + MockServerConfig(**kwargs) + + +class TestMockServer: + """Test suite for MockServer class.""" + + @pytest.mark.smoke + def test_class_signatures(self): + """Test MockServer class signatures and attributes.""" + assert hasattr(MockServer, "__init__") + assert hasattr(MockServer, "run") + assert hasattr(MockServer, "_setup_middleware") + assert hasattr(MockServer, "_setup_routes") + assert hasattr(MockServer, "_setup_error_handlers") + + @pytest.mark.sanity + def test_invalid_initialization_missing(self): + """Test MockServer initialization without required config.""" + with pytest.raises(TypeError): + MockServer() + + +class TestMockServerEndpoints: + """Test suite for MockServer HTTP endpoints with real server instances.""" + + @pytest.mark.smoke + @pytest.mark.asyncio + async def test_health_endpoint(self, mock_server_instance): + """Test the health check endpoint.""" + server_url, _ = mock_server_instance + + async with httpx.AsyncClient() as client: + response = await client.get(f"{server_url}/health", timeout=5.0) + assert response.status_code == 200 + + data = response.json() + assert "status" in data + assert data["status"] == "healthy" + assert "timestamp" in data + assert isinstance(data["timestamp"], (int, float)) + + @pytest.mark.smoke + @pytest.mark.asyncio + async def test_models_endpoint(self, mock_server_instance): + """Test the models listing endpoint.""" + server_url, _ = mock_server_instance + + async with httpx.AsyncClient() as client: + response = await client.get(f"{server_url}/v1/models", timeout=5.0) + assert response.status_code == 200 + + data = response.json() + assert "object" in data + assert data["object"] == "list" + assert "data" in data + assert isinstance(data["data"], list) + assert len(data["data"]) > 0 + + model = data["data"][0] + assert "id" in model + assert "object" in model + assert "created" in model + assert "owned_by" in model + assert model["object"] == "model" + assert model["owned_by"] == "guidellm-mock" + assert model["id"] == "test-model" + + @pytest.mark.smoke + @pytest.mark.asyncio + @pytest.mark.parametrize( + ("payload", "expected_fields"), + [ + ( + { + "model": "test-model", + "messages": [{"role": "user", "content": "Hello!"}], + "max_tokens": 10, + }, + ["choices", "usage", "model", "object"], + ), + ( + { + "model": "test-model", + "messages": [{"role": "user", "content": "Test"}], + "max_tokens": 5, + "temperature": 0.7, + }, + ["choices", "usage", "model", "object"], + ), + ], + ) + async def test_chat_completions_endpoint( + self, mock_server_instance, payload, expected_fields + ): + """Test the chat completions endpoint.""" + server_url, _ = mock_server_instance + + async with httpx.AsyncClient() as client: + response = await client.post( + f"{server_url}/v1/chat/completions", json=payload, timeout=10.0 + ) + assert response.status_code == 200 + + data = response.json() + for field in expected_fields: + assert field in data + + assert len(data["choices"]) > 0 + choice = data["choices"][0] + assert "message" in choice + assert "content" in choice["message"] + assert "role" in choice["message"] + assert choice["message"]["role"] == "assistant" + assert isinstance(choice["message"]["content"], str) + assert len(choice["message"]["content"]) > 0 + + # Verify usage information + assert "prompt_tokens" in data["usage"] + assert "completion_tokens" in data["usage"] + assert "total_tokens" in data["usage"] + assert data["usage"]["total_tokens"] == ( + data["usage"]["prompt_tokens"] + data["usage"]["completion_tokens"] + ) + + @pytest.mark.smoke + @pytest.mark.asyncio + async def test_streaming_chat_completions(self, mock_server_instance): + """Test streaming chat completions endpoint.""" + server_url, _ = mock_server_instance + + payload = { + "model": "test-model", + "messages": [{"role": "user", "content": "Hi!"}], + "max_tokens": 5, + "stream": True, + } + + async with ( + httpx.AsyncClient() as client, + client.stream( + "POST", + f"{server_url}/v1/chat/completions", + json=payload, + timeout=10.0, + ) as response, + ): + assert response.status_code == 200 + assert "text/event-stream" in response.headers.get("content-type", "") + + chunks = [] + async for line in response.aiter_lines(): + if line and line.startswith("data: "): + data_str = line[6:] + if data_str.strip() == "[DONE]": + break + try: + chunk_data = json.loads(data_str) + chunks.append(chunk_data) + except json.JSONDecodeError: + continue + + assert len(chunks) > 0 + # Verify chunk structure + for chunk in chunks: + assert "choices" in chunk + assert len(chunk["choices"]) > 0 + assert "delta" in chunk["choices"][0] + + @pytest.mark.smoke + @pytest.mark.parametrize( + ("payload", "expected_fields"), + [ + ( + { + "model": "test-model", + "prompt": "Hello", + "max_tokens": 10, + }, + ["choices", "usage", "model", "object"], + ), + ( + { + "model": "test-model", + "prompt": "Test prompt", + "max_tokens": 5, + "temperature": 0.8, + }, + ["choices", "usage", "model", "object"], + ), + ], + ) + @pytest.mark.asyncio + async def test_completions_endpoint( + self, mock_server_instance, payload, expected_fields + ): + """Test the legacy completions endpoint.""" + server_url, _ = mock_server_instance + + async with httpx.AsyncClient() as client: + response = await client.post( + f"{server_url}/v1/completions", json=payload, timeout=10.0 + ) + assert response.status_code == 200 + + data = response.json() + for field in expected_fields: + assert field in data + + assert len(data["choices"]) > 0 + choice = data["choices"][0] + assert "text" in choice + assert isinstance(choice["text"], str) + assert len(choice["text"]) > 0 + + # Verify usage information + assert "prompt_tokens" in data["usage"] + assert "completion_tokens" in data["usage"] + assert "total_tokens" in data["usage"] + + @pytest.mark.smoke + @pytest.mark.asyncio + async def test_streaming_completions(self, mock_server_instance): + """Test streaming completions endpoint.""" + server_url, _ = mock_server_instance + payload = { + "model": "test-model", + "prompt": "Hello", + "max_tokens": 5, + "stream": True, + } + + async with ( + httpx.AsyncClient() as client, + client.stream( + "POST", + f"{server_url}/v1/completions", + json=payload, + timeout=10.0, + ) as response, + ): + assert response.status_code == 200 + assert "text/event-stream" in response.headers.get("content-type", "") + + chunks = [] + async for line in response.aiter_lines(): + if line and line.startswith("data: "): + data_str = line[6:] + if data_str.strip() == "[DONE]": + break + try: + chunk_data = json.loads(data_str) + chunks.append(chunk_data) + except json.JSONDecodeError: + continue + + assert len(chunks) > 0 + # Verify chunk structure + for chunk in chunks: + assert "choices" in chunk + assert len(chunk["choices"]) > 0 + + @pytest.mark.smoke + @pytest.mark.parametrize( + ("payload", "expected_fields"), + [ + ( + {"text": "Hello world!"}, + ["tokens", "count"], + ), + ( + {"text": "This is a test sentence."}, + ["tokens", "count"], + ), + ], + ) + @pytest.mark.asyncio + async def test_tokenize_endpoint( + self, mock_server_instance, payload, expected_fields + ): + """Test the tokenize endpoint.""" + server_url, _ = mock_server_instance + async with httpx.AsyncClient() as client: + response = await client.post( + f"{server_url}/tokenize", json=payload, timeout=5.0 + ) + assert response.status_code == 200 + + data = response.json() + for field in expected_fields: + assert field in data + + assert isinstance(data["tokens"], list) + assert isinstance(data["count"], int) + assert data["count"] == len(data["tokens"]) + assert len(data["tokens"]) > 0 + + @pytest.mark.smoke + @pytest.mark.parametrize( + ("payload", "expected_fields"), + [ + ( + {"tokens": [123, 456, 789]}, + ["text"], + ), + ( + {"tokens": [100, 200]}, + ["text"], + ), + ], + ) + @pytest.mark.asyncio + async def test_detokenize_endpoint( + self, mock_server_instance, payload, expected_fields + ): + """Test the detokenize endpoint.""" + server_url, _ = mock_server_instance + async with httpx.AsyncClient() as client: + response = await client.post( + f"{server_url}/detokenize", json=payload, timeout=5.0 + ) + assert response.status_code == 200 + + data = response.json() + for field in expected_fields: + assert field in data + + assert isinstance(data["text"], str) + assert len(data["text"]) > 0 + + @pytest.mark.sanity + @pytest.mark.asyncio + async def test_options_endpoint(self, mock_server_instance): + """Test the OPTIONS endpoint for CORS support.""" + server_url, _ = mock_server_instance + async with httpx.AsyncClient() as client: + response = await client.options( + f"{server_url}/v1/chat/completions", timeout=5.0 + ) + assert response.status_code == 204 + assert response.text == "" + + @pytest.mark.sanity + @pytest.mark.asyncio + async def test_cors_headers(self, mock_server_instance): + """Test CORS headers are properly set.""" + server_url, _ = mock_server_instance + async with httpx.AsyncClient() as client: + response = await client.get(f"{server_url}/health", timeout=5.0) + assert response.status_code == 200 + + # Check for CORS headers + assert response.headers.get("Access-Control-Allow-Origin") == "*" + methods_header = response.headers.get("Access-Control-Allow-Methods", "") + assert "GET, POST, OPTIONS" in methods_header + headers_header = response.headers.get("Access-Control-Allow-Headers", "") + assert "Content-Type, Authorization" in headers_header + assert response.headers.get("Server") == "guidellm-mock-server" + + @pytest.mark.sanity + @pytest.mark.asyncio + @pytest.mark.parametrize( + ("endpoint", "method", "payload"), + [ + ("/v1/chat/completions", "POST", {"invalid": "payload"}), + ("/v1/completions", "POST", {"invalid": "payload"}), + ("/tokenize", "POST", {"invalid": "payload"}), + ("/detokenize", "POST", {"invalid": "payload"}), + ], + ) + async def test_invalid_request_handling( + self, mock_server_instance, endpoint, method, payload + ): + """Test handling of invalid requests.""" + server_url, _ = mock_server_instance + async with httpx.AsyncClient() as client: + if method == "POST": + response = await client.post( + f"{server_url}{endpoint}", json=payload, timeout=5.0 + ) + else: + response = await client.get(f"{server_url}{endpoint}", timeout=5.0) + + # Should return an error response, not crash + assert response.status_code in [400, 422, 500] + + @pytest.mark.sanity + @pytest.mark.asyncio + async def test_nonexistent_endpoint(self, mock_server_instance): + """Test handling of requests to nonexistent endpoints.""" + server_url, _ = mock_server_instance + async with httpx.AsyncClient() as client: + response = await client.get(f"{server_url}/nonexistent", timeout=5.0) + assert response.status_code == 404 From bb981934b690f97965f6616b823905553da48b19 Mon Sep 17 00:00:00 2001 From: Mark Kurtz Date: Fri, 19 Sep 2025 12:26:42 +0000 Subject: [PATCH 17/90] fixes from copilot review Signed-off-by: Mark Kurtz --- src/guidellm/mock_server/handlers/chat_completions.py | 2 +- src/guidellm/mock_server/handlers/completions.py | 2 +- tests/unit/mock_server/test_server.py | 10 +++++----- 3 files changed, 7 insertions(+), 7 deletions(-) diff --git a/src/guidellm/mock_server/handlers/chat_completions.py b/src/guidellm/mock_server/handlers/chat_completions.py index 976901f9..de2781b0 100644 --- a/src/guidellm/mock_server/handlers/chat_completions.py +++ b/src/guidellm/mock_server/handlers/chat_completions.py @@ -251,7 +251,7 @@ async def generate_stream(stream_response): await stream_response.write(f"data: {json.dumps(final_chunk)}\n\n") # Send usage if requested - if req.stream_options and req.stream_options.get("include_usage"): + if req.stream_options and req.stream_options.include_usage: usage_chunk = { "id": completion_id, "object": "chat.completion.chunk", diff --git a/src/guidellm/mock_server/handlers/completions.py b/src/guidellm/mock_server/handlers/completions.py index 418d2b3c..5a4fe27d 100644 --- a/src/guidellm/mock_server/handlers/completions.py +++ b/src/guidellm/mock_server/handlers/completions.py @@ -251,7 +251,7 @@ async def generate_stream(stream_response): await stream_response.write(f"data: {json.dumps(final_chunk)}\n\n") # Send usage if requested - if req.stream_options and req.stream_options.get("include_usage"): + if req.stream_options and req.stream_options.include_usage: usage_chunk = { "id": completion_id, "object": "text_completion", diff --git a/tests/unit/mock_server/test_server.py b/tests/unit/mock_server/test_server.py index ed5c7727..008103c3 100644 --- a/tests/unit/mock_server/test_server.py +++ b/tests/unit/mock_server/test_server.py @@ -378,11 +378,11 @@ async def test_streaming_completions(self, mock_server_instance): except json.JSONDecodeError: continue - assert len(chunks) > 0 - # Verify chunk structure - for chunk in chunks: - assert "choices" in chunk - assert len(chunk["choices"]) > 0 + assert len(chunks) > 0 + # Verify chunk structure + for chunk in chunks: + assert "choices" in chunk + assert len(chunk["choices"]) > 0 @pytest.mark.smoke @pytest.mark.parametrize( From a9a082ad3dfc67d7e6842c32cd1085247eb1bfe9 Mon Sep 17 00:00:00 2001 From: Mark Kurtz Date: Fri, 19 Sep 2025 13:01:29 +0000 Subject: [PATCH 18/90] Any missing changes / working state for refactor Signed-off-by: Mark Kurtz --- src/guidellm/__main__.py | 645 ++++++++---- src/guidellm/benchmark/scenario.py | 4 +- src/guidellm/logger.py | 3 +- src/guidellm/objects/__init__.py | 17 - src/guidellm/objects/statistics.py | 953 ------------------ src/guidellm/presentation/builder.py | 4 +- src/guidellm/presentation/data_models.py | 4 +- src/guidellm/request/loader.py | 2 +- src/guidellm/request/request.py | 2 +- src/guidellm/settings.py | 49 +- src/guidellm/utils/typing.py | 46 + tests/integration/scheduler/__init__.py | 0 tests/integration/scheduler/test_scheduler.py | 177 ++++ .../scheduler/test_worker_group.py | 181 ++++ tests/unit/conftest.py | 195 ---- tests/unit/mock_backend.py | 266 ++--- tests/unit/mock_benchmark.py | 387 +++---- tests/unit/test_cli.py | 105 -- .../unit/{test_config.py => test_settings.py} | 0 tests/unit/utils/test_typing.py | 123 +++ 20 files changed, 1295 insertions(+), 1868 deletions(-) delete mode 100644 src/guidellm/objects/__init__.py delete mode 100644 src/guidellm/objects/statistics.py create mode 100644 src/guidellm/utils/typing.py create mode 100644 tests/integration/scheduler/__init__.py create mode 100644 tests/integration/scheduler/test_scheduler.py create mode 100644 tests/integration/scheduler/test_worker_group.py delete mode 100644 tests/unit/test_cli.py rename tests/unit/{test_config.py => test_settings.py} (100%) create mode 100644 tests/unit/utils/test_typing.py diff --git a/src/guidellm/__main__.py b/src/guidellm/__main__.py index f82c19cf..675003a9 100644 --- a/src/guidellm/__main__.py +++ b/src/guidellm/__main__.py @@ -1,33 +1,117 @@ +""" +GuideLLM command-line interface providing benchmarking, dataset preprocessing, and +mock server functionality. + +This module serves as the primary entry point for the GuideLLM CLI application, +offering a comprehensive suite of tools for language model evaluation and testing. +It provides three main command groups: benchmark operations for performance testing +against generative models, dataset preprocessing utilities for data preparation and +transformation, and a mock server for testing and development scenarios. The CLI +supports various backends, output formats, and configuration options to accommodate +different benchmarking needs and deployment environments. + +Example: +:: + # Run a benchmark against a model + guidellm benchmark run --target http://localhost:8000 --data dataset.json \\ + --profile sweep + + # Preprocess a dataset + guidellm preprocess dataset input.json output.json --processor gpt2 + + # Start a mock server for testing + guidellm mock-server --host 0.0.0.0 --port 8080 +""" + +from __future__ import annotations + import asyncio import codecs from pathlib import Path -from typing import get_args +from typing import Annotated, Union import click -from pydantic import ValidationError -from guidellm.backend import BackendType +try: + import uvloop + + HAS_UVLOOP: Annotated[ + bool, "Flag indicating if uvloop is available for event loop optimization" + ] = True +except ImportError: + uvloop = None + + HAS_UVLOOP: Annotated[ + bool, "Flag indicating if uvloop is available for event loop optimization" + ] = False + +from guidellm.backends import BackendType from guidellm.benchmark import ( + GenerativeConsoleBenchmarkerProgress, + InjectExtrasAggregator, ProfileType, + benchmark_generative_text, reimport_benchmarks_report, ) -from guidellm.benchmark.entrypoints import benchmark_with_scenario -from guidellm.benchmark.scenario import GenerativeTextScenario, get_builtin_scenarios +from guidellm.benchmark.scenario import ( + GenerativeTextScenario, +) +from guidellm.mock_server import MockServer, MockServerConfig from guidellm.preprocess.dataset import ShortPromptStrategy, process_dataset from guidellm.scheduler import StrategyType from guidellm.settings import print_config -from guidellm.utils import DefaultGroupHandler +from guidellm.utils import Console, DefaultGroupHandler, get_literal_vals from guidellm.utils import cli as cli_tools -STRATEGY_PROFILE_CHOICES = list( - set(list(get_args(ProfileType)) + list(get_args(StrategyType))) -) +__all__ = [ + "STRATEGY_PROFILE_CHOICES", + "benchmark", + "cli", + "config", + "dataset", + "decode_escaped_str", + "from_file", + "mock_server", + "preprocess", + "run", +] + +STRATEGY_PROFILE_CHOICES: Annotated[ + list[str], "Available strategy and profile choices for benchmark execution types" +] = list(get_literal_vals(Union[ProfileType, StrategyType])) + + +def decode_escaped_str(_ctx, _param, value): + """ + Decode escape sequences in Click option values. + + Click automatically escapes characters in option values, converting sequences + like "\\n" to "\\\\n". This function properly decodes these escape sequences + to their intended characters for use in CLI options. + + :param _ctx: Click context (unused) + :param _param: Click parameter (unused) + :param value: String value to decode escape sequences from + :return: Decoded string with proper escape sequences + :raises click.BadParameter: When escape sequence decoding fails + """ + if value is None: + return None + try: + return codecs.decode(value, "unicode_escape") + except Exception as e: + raise click.BadParameter(f"Could not decode escape sequences: {e}") from e @click.group() -@click.version_option(package_name="guidellm", message="guidellm version: %(version)s") def cli(): - pass + """ + Main entry point for the GuideLLM command-line interface. + + This is the root command group that organizes all GuideLLM CLI functionality + into logical subgroups for benchmarking, preprocessing, configuration, and + mock server operations. + """ @cli.group( @@ -36,7 +120,13 @@ def cli(): default="run", ) def benchmark(): - pass + """ + Benchmark command group for running and managing performance tests. + + This command group provides functionality to execute new benchmarks against + generative models and load previously saved benchmark reports for analysis. + Supports various benchmarking strategies, output formats, and backend types. + """ @benchmark.command( @@ -45,42 +135,65 @@ def benchmark(): context_settings={"auto_envvar_prefix": "GUIDELLM"}, ) @click.option( - "--scenario", - type=cli_tools.Union( - click.Path( - exists=True, - readable=True, - file_okay=True, - dir_okay=False, - path_type=Path, - ), - click.Choice(get_builtin_scenarios()), + "--target", + type=str, + help="The target path for the backend to run benchmarks against. For example, http://localhost:8000", +) +@click.option( + "--data", + type=str, + help=( + "The HuggingFace dataset ID, a path to a HuggingFace dataset, " + "a path to a data file csv, json, jsonl, or txt, " + "or a synthetic data config as a json or key=value string." ), +) +@click.option( + "--profile", + "--rate-type", # legacy alias + "profile", + type=click.Choice(STRATEGY_PROFILE_CHOICES), + help=( + "The type of benchmark to run. " + f"Supported types {', '.join(STRATEGY_PROFILE_CHOICES)}. " + ), +) +@click.option( + "--rate", default=None, help=( - "The name of a builtin scenario or path to a config file. " - "Missing values from the config will use defaults. " - "Options specified on the commandline will override the scenario." + "The rates to run the benchmark at. " + "Can be a single number or a comma-separated list of numbers. " + "For rate-type=sweep, this is the number of benchmarks it runs in the sweep. " + "For rate-type=concurrent, this is the number of concurrent requests. " + "For rate-type=async,constant,poisson, this is the rate requests per second. " + "For rate-type=synchronous,throughput, this must not be set." ), ) @click.option( - "--target", - type=str, - help="The target path for the backend to run benchmarks against. For example, http://localhost:8000", + "--random-seed", + default=GenerativeTextScenario.get_default("random_seed"), + type=int, + help="The random seed to use for benchmarking to ensure reproducibility.", ) +# Backend configuration @click.option( - "--backend-type", - type=click.Choice(list(get_args(BackendType))), + "--backend", + "--backend-type", # legacy alias + "backend", + type=click.Choice(list(get_literal_vals(BackendType))), help=( "The type of backend to use to run requests against. Defaults to 'openai_http'." - f" Supported types: {', '.join(get_args(BackendType))}" + f" Supported types: {', '.join(get_literal_vals(BackendType))}" ), - default=GenerativeTextScenario.get_default("backend_type"), + default="openai_http", ) @click.option( - "--backend-args", + "--backend-kwargs", + "--backend-args", # legacy alias + "backend_kwargs", callback=cli_tools.parse_json, - default=GenerativeTextScenario.get_default("backend_args"), + default=None, help=( "A JSON string containing any arguments to pass to the backend as a " "dict with **kwargs. Headers can be removed by setting their value to " @@ -90,16 +203,17 @@ def benchmark(): ) @click.option( "--model", - default=GenerativeTextScenario.get_default("model"), + default=None, type=str, help=( "The ID of the model to benchmark within the backend. " "If None provided (default), then it will use the first model available." ), ) +# Data configuration @click.option( "--processor", - default=GenerativeTextScenario.get_default("processor"), + default=None, type=str, help=( "The processor or tokenizer to use to calculate token counts for statistics " @@ -109,25 +223,16 @@ def benchmark(): ) @click.option( "--processor-args", - default=GenerativeTextScenario.get_default("processor_args"), + default=None, callback=cli_tools.parse_json, help=( "A JSON string containing any arguments to pass to the processor constructor " "as a dict with **kwargs." ), ) -@click.option( - "--data", - type=str, - help=( - "The HuggingFace dataset ID, a path to a HuggingFace dataset, " - "a path to a data file csv, json, jsonl, or txt, " - "or a synthetic data config as a json or key=value string." - ), -) @click.option( "--data-args", - default=GenerativeTextScenario.get_default("data_args"), + default=None, callback=cli_tools.parse_json, help=( "A JSON string containing any arguments to pass to the dataset creation " @@ -136,189 +241,226 @@ def benchmark(): ) @click.option( "--data-sampler", - default=GenerativeTextScenario.get_default("data_sampler"), + default=None, type=click.Choice(["random"]), help=( "The data sampler type to use. 'random' will add a random shuffle on the data. " "Defaults to None" ), ) +# Output configuration @click.option( - "--rate-type", - type=click.Choice(STRATEGY_PROFILE_CHOICES), + "--output-path", + type=click.Path(), + default=Path.cwd(), help=( - "The type of benchmark to run. " - f"Supported types {', '.join(STRATEGY_PROFILE_CHOICES)}. " + "The path to save the output formats to, if the format is a file type. " + "If it is a directory, it will save all output formats selected under it. " + "If it is a file, it will save the corresponding output format to that file. " + "Any output formats that were given that do not match the file extension will " + "be saved in the parent directory of the file path. " + "Defaults to the current working directory. " ), ) @click.option( - "--rate", - default=GenerativeTextScenario.get_default("rate"), + "--output-formats", + multiple=True, + type=str, + default=("console", "json"), # ("console", "json", "html", "csv") help=( - "The rates to run the benchmark at. " - "Can be a single number or a comma-separated list of numbers. " - "For rate-type=sweep, this is the number of benchmarks it runs in the sweep. " - "For rate-type=concurrent, this is the number of concurrent requests. " - "For rate-type=async,constant,poisson, this is the rate requests per second. " - "For rate-type=synchronous,throughput, this must not be set." + "The output formats to use for the benchmark results. " + "Defaults to console, json, html, and csv where the file formats " + "will be saved at the specified output path." ), ) @click.option( - "--max-seconds", - type=float, - default=GenerativeTextScenario.get_default("max_seconds"), - help=( - "The maximum number of seconds each benchmark can run for. " - "If None, will run until max_requests or the data is exhausted." - ), + "--disable-console-outputs", + is_flag=True, + help="Set this flag to disable console output", ) +# Updates configuration @click.option( - "--max-requests", - type=int, - default=GenerativeTextScenario.get_default("max_requests"), - help=( - "The maximum number of requests each benchmark can run for. " - "If None, will run until max_seconds or the data is exhausted." - ), + "--disable-progress", + is_flag=True, + help="Set this flag to disable progress updates to the console", +) +@click.option( + "--display-scheduler-stats", + is_flag=True, + help="Set this flag to display stats for the processes running the benchmarks", ) +# Aggregators configuration @click.option( - "--warmup-percent", + "--output-extras", + callback=cli_tools.parse_json, + help="A JSON string of extra data to save with the output benchmarks", +) +@click.option( + "--warmup", + "--warmup-percent", # legacy alias + "warmup", type=float, - default=GenerativeTextScenario.get_default("warmup_percent"), + default=None, help=( - "The percent of the benchmark (based on max-seconds, max-requets, " - "or lenth of dataset) to run as a warmup and not include in the final results. " - "Defaults to None." + "The specification around the number of requests to run before benchmarking. " + "If within (0, 1), then the percent of requests/time to use for warmup. " + "If >=1, then the number of requests or seconds to use for warmup." + "Whether it's requests/time used is dependent on which constraint is active. " + "Default None for no warmup." ), ) @click.option( - "--cooldown-percent", + "--cooldown", + "--cooldown-percent", # legacy alias + "cooldown", type=float, default=GenerativeTextScenario.get_default("cooldown_percent"), help=( - "The percent of the benchmark (based on max-seconds, max-requets, or lenth " - "of dataset) to run as a cooldown and not include in the final results. " - "Defaults to None." + "The specification around the number of requests to run after benchmarking. " + "If within (0, 1), then the percent of requests/time to use for cooldown. " + "If >=1, then the number of requests or seconds to use for cooldown." + "Whether it's requests/time used is dependent on which constraint is active. " + "Default None for no cooldown." ), ) @click.option( - "--disable-progress", - is_flag=True, - help="Set this flag to disable progress updates to the console", -) -@click.option( - "--display-scheduler-stats", - is_flag=True, - help="Set this flag to display stats for the processes running the benchmarks", -) -@click.option( - "--disable-console-outputs", - is_flag=True, - help="Set this flag to disable console output", -) -@click.option( - "--output-path", - type=click.Path(), - default=Path.cwd() / "benchmarks.json", + "--request-samples", + "--output-sampling", # legacy alias + "request_samples", + type=int, help=( - "The path to save the output to. If it is a directory, " - "it will save benchmarks.json under it. " - "Otherwise, json, yaml, csv, or html files are supported for output types " - "which will be read from the extension for the file path." + "The number of samples for each request status and each benchmark to save " + "in the output file. If None (default), will save all samples. " + "Defaults to 20." ), + default=20, ) +# Constraints configuration @click.option( - "--output-extras", - callback=cli_tools.parse_json, - help="A JSON string of extra data to save with the output benchmarks", + "--max-seconds", + type=float, + default=None, + help=( + "The maximum number of seconds each benchmark can run for. " + "If None, will run until max_requests or the data is exhausted." + ), ) @click.option( - "--output-sampling", + "--max-requests", type=int, + default=None, help=( - "The number of samples to save in the output file. " - "If None (default), will save all samples." + "The maximum number of requests each benchmark can run for. " + "If None, will run until max_seconds or the data is exhausted." ), - default=GenerativeTextScenario.get_default("output_sampling"), ) @click.option( - "--random-seed", - default=GenerativeTextScenario.get_default("random_seed"), + "--max-errors", type=int, - help="The random seed to use for benchmarking to ensure reproducibility.", + default=None, + help="Maximum number of errors allowed before stopping the benchmark", +) +@click.option( + "--max-error-rate", + type=float, + default=None, + help="Maximum error rate allowed before stopping the benchmark", +) +@click.option( + "--max-global-error-rate", + type=float, + default=None, + help="Maximum global error rate allowed across all benchmarks", ) def run( - scenario, target, - backend_type, - backend_args, + data, + profile, + rate, + random_seed, + # Backend Configuration + backend, + backend_kwargs, model, + # Data configuration processor, processor_args, - data, data_args, data_sampler, - rate_type, - rate, - max_seconds, - max_requests, - warmup_percent, - cooldown_percent, + # Output configuration + output_path, + output_formats, + # Updates configuration + disable_console_outputs, disable_progress, display_scheduler_stats, - disable_console_outputs, - output_path, + # Aggregators configuration output_extras, - output_sampling, - random_seed, + warmup, + cooldown, + request_samples, + # Constraints configuration + max_seconds, + max_requests, + max_errors, + max_error_rate, + max_global_error_rate, ): - click_ctx = click.get_current_context() - - overrides = cli_tools.set_if_not_default( - click_ctx, - target=target, - backend_type=backend_type, - backend_args=backend_args, - model=model, - processor=processor, - processor_args=processor_args, - data=data, - data_args=data_args, - data_sampler=data_sampler, - rate_type=rate_type, - rate=rate, - max_seconds=max_seconds, - max_requests=max_requests, - warmup_percent=warmup_percent, - cooldown_percent=cooldown_percent, - output_sampling=output_sampling, - random_seed=random_seed, - ) - - try: - # If a scenario file was specified read from it - if scenario is None: - _scenario = GenerativeTextScenario.model_validate(overrides) - elif isinstance(scenario, Path): - _scenario = GenerativeTextScenario.from_file(scenario, overrides) - else: # Only builtins can make it here; click will catch anything else - _scenario = GenerativeTextScenario.from_builtin(scenario, overrides) - except ValidationError as e: - # Translate pydantic valdation error to click argument error - errs = e.errors(include_url=False, include_context=True, include_input=True) - param_name = "--" + str(errs[0]["loc"][0]).replace("_", "-") - raise click.BadParameter( - errs[0]["msg"], ctx=click_ctx, param_hint=param_name - ) from e + """ + Execute a generative text benchmark against a target model backend. + Runs comprehensive performance testing using various strategies and profiles, + collecting metrics on latency, throughput, error rates, and resource usage. + Supports multiple backends, data sources, output formats, and constraint types + for flexible benchmark configuration. + """ + if HAS_UVLOOP: + asyncio.set_event_loop_policy(uvloop.EventLoopPolicy()) asyncio.run( - benchmark_with_scenario( - scenario=_scenario, - show_progress=not disable_progress, - show_progress_scheduler_stats=display_scheduler_stats, - output_console=not disable_console_outputs, + benchmark_generative_text( + target=target, + data=data, + profile=profile, + rate=rate, + random_seed=random_seed, + # Backend configuration + backend=backend, + backend_kwargs=backend_kwargs, + model=model, + # Data configuration + processor=processor, + processor_args=processor_args, + data_args=data_args, + data_sampler=data_sampler, + # Output configuration output_path=output_path, - output_extras=output_extras, + output_formats=[ + fmt + for fmt in output_formats + if not disable_console_outputs or fmt != "console" + ], + # Updates configuration + progress=( + [ + GenerativeConsoleBenchmarkerProgress( + display_scheduler_stats=display_scheduler_stats + ) + ] + if not disable_progress + else None + ), + print_updates=not disable_console_outputs, + # Aggregators configuration + add_aggregators={"extras": InjectExtrasAggregator(extras=output_extras)}, + warmup=warmup, + cooldown=cooldown, + request_samples=request_samples, + # Constraints configuration + max_seconds=max_seconds, + max_requests=max_requests, + max_errors=max_errors, + max_error_rate=max_error_rate, + max_global_error_rate=max_global_error_rate, ) ) @@ -348,21 +490,14 @@ def run( ), ) def from_file(path, output_path): - reimport_benchmarks_report(path, output_path) - - -def decode_escaped_str(_ctx, _param, value): """ - Click auto adds characters. For example, when using --pad-char "\n", - it parses it as "\\n". This method decodes the string to handle escape - sequences correctly. + Load and optionally re-export a previously saved benchmark report. + + Imports benchmark results from a saved file and provides optional conversion + to different output formats. Supports JSON, YAML, and CSV export formats + based on the output file extension. """ - if value is None: - return None - try: - return codecs.decode(value, "unicode_escape") - except Exception as e: - raise click.BadParameter(f"Could not decode escape sequences: {e}") from e + reimport_benchmarks_report(path, output_path) @cli.command( @@ -373,12 +508,25 @@ def decode_escaped_str(_ctx, _param, value): ), ) def config(): + """ + Display available GuideLLM configuration environment variables. + + Prints a comprehensive list of all environment variables that can be used + to configure GuideLLM behavior, including their current values, defaults, + and descriptions. + """ print_config() @cli.group(help="General preprocessing tools and utilities.") def preprocess(): - pass + """ + Preprocessing command group for dataset preparation and transformation. + + This command group provides utilities for converting, processing, and + optimizing datasets for use in GuideLLM benchmarks. Includes functionality + for token count adjustments, format conversions, and data validation. + """ @preprocess.command( @@ -494,6 +642,13 @@ def dataset( hub_dataset_id, random_seed, ): + """ + Convert and process datasets for specific prompt and output token requirements. + + Transforms datasets to meet target token length specifications using various + strategies for handling short prompts and output length adjustments. Supports + multiple input formats and can optionally push results to Hugging Face Hub. + """ process_dataset( data=data, output_path=output_path, @@ -511,5 +666,121 @@ def dataset( ) +@cli.command(help="Start the GuideLLM mock OpenAI/vLLM server for testing.") +@click.option("--host", default="127.0.0.1", help="Host to bind the server to") +@click.option("--port", default=8000, type=int, help="Port to bind the server to") +@click.option("--workers", default=1, type=int, help="Number of worker processes") +@click.option( + "--model", default="llama-3.1-8b-instruct", help="The name of the model to mock" +) +@click.option("--processor", default=None, help="The processor to use for requests") +@click.option( + "--request-latency", + default=3, + type=float, + help="Request latency in seconds for non-streaming requests", +) +@click.option( + "--request-latency-std", + default=0, + type=float, + help=( + "Request latency standard deviation (normal distribution) " + "in seconds for non-streaming requests" + ), +) +@click.option( + "--ttft-ms", + default=150, + type=float, + help="Time to first token in milliseconds for streaming requests", +) +@click.option( + "--ttft-ms-std", + default=0, + type=float, + help=( + "Time to first token standard deviation (normal distribution) in milliseconds" + ), +) +@click.option( + "--itl-ms", + default=10, + type=float, + help="Inter token latency in milliseconds for streaming requests", +) +@click.option( + "--itl-ms-std", + default=0, + type=float, + help=( + "Inter token latency standard deviation (normal distribution) " + "in milliseconds for streaming requests" + ), +) +@click.option( + "--output-tokens", + default=128, + type=int, + help="Output tokens for streaming requests", +) +@click.option( + "--output-tokens-std", + default=0, + type=float, + help=( + "Output tokens standard deviation (normal distribution) for streaming requests" + ), +) +def mock_server( + host: str, + port: int, + workers: int, + model: str, + processor: str | None, + request_latency: float, + request_latency_std: float, + ttft_ms: float, + ttft_ms_std: float, + itl_ms: float, + itl_ms_std: float, + output_tokens: int, + output_tokens_std: float, +): + """ + Start a GuideLLM mock OpenAI/vLLM-compatible server for testing and development. + + Launches a mock server that simulates model inference with configurable latency + characteristics, token generation patterns, and response timing. Useful for + testing GuideLLM benchmarks without requiring actual model deployment or for + development scenarios requiring predictable server behavior. + """ + + config = MockServerConfig( + host=host, + port=port, + workers=workers, + model=model, + processor=processor, + request_latency=request_latency, + request_latency_std=request_latency_std, + ttft_ms=ttft_ms, + ttft_ms_std=ttft_ms_std, + itl_ms=itl_ms, + itl_ms_std=itl_ms_std, + output_tokens=output_tokens, + output_tokens_std=output_tokens_std, + ) + + server = MockServer(config) + console = Console() + console.print_update( + title="GuideLLM mock server starting...", + details=f"Listening on http://{host}:{port} for model {model}", + status="success", + ) + server.run() + + if __name__ == "__main__": cli() diff --git a/src/guidellm/benchmark/scenario.py b/src/guidellm/benchmark/scenario.py index 15e3cd81..3f84f868 100644 --- a/src/guidellm/benchmark/scenario.py +++ b/src/guidellm/benchmark/scenario.py @@ -11,9 +11,9 @@ PreTrainedTokenizerBase, ) -from guidellm.backend.backend import BackendType +from guidellm.backends import BackendType from guidellm.benchmark.profile import ProfileType -from guidellm.scheduler.strategy import StrategyType +from guidellm.scheduler import StrategyType from guidellm.utils import StandardBaseModel __ALL__ = ["Scenario", "GenerativeTextScenario", "get_builtin_scenarios"] diff --git a/src/guidellm/logger.py b/src/guidellm/logger.py index da3464f9..48b41a49 100644 --- a/src/guidellm/logger.py +++ b/src/guidellm/logger.py @@ -71,8 +71,7 @@ def configure_logger(config: LoggingSettings = settings.logging): logger.add( sys.stdout, level=config.console_log_level.upper(), - format="{time:YY-MM-DD HH:mm:ss}|{level: <8} \ - |{name}:{function}:{line} - {message}", + format="{time} | {function} | {level} - {message}", ) if config.log_file or config.log_file_level: diff --git a/src/guidellm/objects/__init__.py b/src/guidellm/objects/__init__.py deleted file mode 100644 index f97f1ef3..00000000 --- a/src/guidellm/objects/__init__.py +++ /dev/null @@ -1,17 +0,0 @@ -from .statistics import ( - DistributionSummary, - Percentiles, - RunningStats, - StatusDistributionSummary, - TimeRunningStats, -) - -__all__ = [ - "DistributionSummary", - "Percentiles", - "RunningStats", - "StandardBaseModel", - "StatusBreakdown", - "StatusDistributionSummary", - "TimeRunningStats", -] diff --git a/src/guidellm/objects/statistics.py b/src/guidellm/objects/statistics.py deleted file mode 100644 index 8ba504be..00000000 --- a/src/guidellm/objects/statistics.py +++ /dev/null @@ -1,953 +0,0 @@ -import math -import time as timer -from collections import defaultdict -from typing import Any, Literal, Optional - -import numpy as np -from pydantic import Field, computed_field - -from guidellm.objects.pydantic import StandardBaseModel, StatusBreakdown - -__all__ = [ - "DistributionSummary", - "Percentiles", - "RunningStats", - "StatusDistributionSummary", - "TimeRunningStats", -] - - -class Percentiles(StandardBaseModel): - """ - A pydantic model representing the standard percentiles of a distribution. - """ - - p001: float = Field( - description="The 0.1th percentile of the distribution.", - ) - p01: float = Field( - description="The 1st percentile of the distribution.", - ) - p05: float = Field( - description="The 5th percentile of the distribution.", - ) - p10: float = Field( - description="The 10th percentile of the distribution.", - ) - p25: float = Field( - description="The 25th percentile of the distribution.", - ) - p50: float = Field( - description="The 50th percentile of the distribution.", - ) - p75: float = Field( - description="The 75th percentile of the distribution.", - ) - p90: float = Field( - description="The 90th percentile of the distribution.", - ) - p95: float = Field( - description="The 95th percentile of the distribution.", - ) - p99: float = Field( - description="The 99th percentile of the distribution.", - ) - p999: float = Field( - description="The 99.9th percentile of the distribution.", - ) - - -class DistributionSummary(StandardBaseModel): - """ - A pydantic model representing a statistical summary for a given - distribution of numerical values. - """ - - mean: float = Field( - description="The mean/average of the distribution.", - ) - median: float = Field( - description="The median of the distribution.", - ) - mode: float = Field( - description="The mode of the distribution.", - ) - variance: float = Field( - description="The variance of the distribution.", - ) - std_dev: float = Field( - description="The standard deviation of the distribution.", - ) - min: float = Field( - description="The minimum value of the distribution.", - ) - max: float = Field( - description="The maximum value of the distribution.", - ) - count: int = Field( - description="The number of values in the distribution.", - ) - total_sum: float = Field( - description="The total sum of the values in the distribution.", - ) - percentiles: Percentiles = Field( - description="The percentiles of the distribution.", - ) - cumulative_distribution_function: Optional[list[tuple[float, float]]] = Field( - description="The cumulative distribution function (CDF) of the distribution.", - default=None, - ) - - @staticmethod - def from_distribution_function( - distribution: list[tuple[float, float]], - include_cdf: bool = False, - ) -> "DistributionSummary": - """ - Create a statistical summary for a given distribution of weighted numerical - values or a probability distribution function (PDF). - 1. If the distribution is a PDF, it is expected to be a list of tuples - where each tuple contains (value, probability). The sum of the - probabilities should be 1. If it is not, it will be normalized. - 2. If the distribution is a values distribution function, it is expected - to be a list of tuples where each tuple contains (value, weight). - The weights are normalized to a probability distribution function. - - :param distribution: A list of tuples representing the distribution. - Each tuple contains (value, weight) or (value, probability). - :param include_cdf: Whether to include the calculated cumulative distribution - function (CDF) in the output DistributionSummary. - :return: An instance of DistributionSummary with calculated values. - """ - values, weights = zip(*distribution) if distribution else ([], []) - values = np.array(values) # type: ignore[assignment] - weights = np.array(weights) # type: ignore[assignment] - - # create the PDF - probabilities = weights / np.sum(weights) # type: ignore[operator] - pdf = np.column_stack((values, probabilities)) - pdf = pdf[np.argsort(pdf[:, 0])] - values = pdf[:, 0] # type: ignore[assignment] - probabilities = pdf[:, 1] - - # calculate the CDF - cumulative_probabilities = np.cumsum(probabilities) - cdf = np.column_stack((values, cumulative_probabilities)) - - # calculate statistics - mean = np.sum(values * probabilities).item() # type: ignore[attr-defined] - median = cdf[np.argmax(cdf[:, 1] >= 0.5), 0].item() if len(cdf) > 0 else 0 # noqa: PLR2004 - mode = values[np.argmax(probabilities)].item() if len(values) > 0 else 0 # type: ignore[call-overload] - variance = np.sum((values - mean) ** 2 * probabilities).item() # type: ignore[attr-defined] - std_dev = math.sqrt(variance) - minimum = values[0].item() if len(values) > 0 else 0 - maximum = values[-1].item() if len(values) > 0 else 0 - count = len(values) - total_sum = np.sum(values).item() # type: ignore[attr-defined] - - return DistributionSummary( - mean=mean, - median=median, - mode=mode, - variance=variance, - std_dev=std_dev, - min=minimum, - max=maximum, - count=count, - total_sum=total_sum, - percentiles=( - Percentiles( - p001=cdf[np.argmax(cdf[:, 1] >= 0.001), 0].item(), # noqa: PLR2004 - p01=cdf[np.argmax(cdf[:, 1] >= 0.01), 0].item(), # noqa: PLR2004 - p05=cdf[np.argmax(cdf[:, 1] >= 0.05), 0].item(), # noqa: PLR2004 - p10=cdf[np.argmax(cdf[:, 1] >= 0.1), 0].item(), # noqa: PLR2004 - p25=cdf[np.argmax(cdf[:, 1] >= 0.25), 0].item(), # noqa: PLR2004 - p50=cdf[np.argmax(cdf[:, 1] >= 0.50), 0].item(), # noqa: PLR2004 - p75=cdf[np.argmax(cdf[:, 1] >= 0.75), 0].item(), # noqa: PLR2004 - p90=cdf[np.argmax(cdf[:, 1] >= 0.9), 0].item(), # noqa: PLR2004 - p95=cdf[np.argmax(cdf[:, 1] >= 0.95), 0].item(), # noqa: PLR2004 - p99=cdf[np.argmax(cdf[:, 1] >= 0.99), 0].item(), # noqa: PLR2004 - p999=cdf[np.argmax(cdf[:, 1] >= 0.999), 0].item(), # noqa: PLR2004 - ) - if len(cdf) > 0 - else Percentiles( - p001=0, - p01=0, - p05=0, - p10=0, - p25=0, - p50=0, - p75=0, - p90=0, - p95=0, - p99=0, - p999=0, - ) - ), - cumulative_distribution_function=cdf.tolist() if include_cdf else None, - ) - - @staticmethod - def from_values( - values: list[float], - weights: Optional[list[float]] = None, - include_cdf: bool = False, - ) -> "DistributionSummary": - """ - Create a statistical summary for a given distribution of numerical values. - This is a wrapper around from_distribution_function to handle the optional case - of including weights for the values. If weights are not provided, they are - automatically set to 1.0 for each value, so each value is equally weighted. - - :param values: A list of numerical values representing the distribution. - :param weights: A list of weights for each value in the distribution. - If not provided, all values are equally weighted. - :param include_cdf: Whether to include the calculated cumulative distribution - function (CDF) in the output DistributionSummary. - """ - if weights is None: - weights = [1.0] * len(values) - - if len(values) != len(weights): - raise ValueError( - "The length of values and weights must be the same.", - ) - - return DistributionSummary.from_distribution_function( - distribution=list(zip(values, weights)), - include_cdf=include_cdf, - ) - - @staticmethod - def from_request_times( - requests: list[tuple[float, float]], - distribution_type: Literal["concurrency", "rate"], - include_cdf: bool = False, - epsilon: float = 1e-6, - ) -> "DistributionSummary": - """ - Create a statistical summary for a given distribution of request times. - Specifically, this is used to measure concurrency or rate of requests - given an input list containing the start and end time of each request. - This will first convert the request times into a distribution function - and then calculate the statistics with from_distribution_function. - - :param requests: A list of tuples representing the start and end times of - each request. Example: [(start_1, end_1), (start_2, end_2), ...] - :param distribution_type: The type of distribution to calculate. - Either "concurrency" or "rate". - :param include_cdf: Whether to include the calculated cumulative distribution - function (CDF) in the output DistributionSummary. - :param epsilon: The epsilon value for merging close events. - :return: An instance of DistributionSummary with calculated values. - """ - if distribution_type == "concurrency": - # convert to delta changes based on when requests were running - events = [(start, 1) for start, _ in requests] + [ - (end, -1) for _, end in requests - ] - elif distribution_type == "rate": - # convert to events for when requests finished - global_start = min(start for start, _ in requests) if requests else 0 - events = [(global_start, 1)] + [(end, 1) for _, end in requests] - else: - raise ValueError( - f"Invalid distribution_type '{distribution_type}'. " - "Must be 'concurrency' or 'rate'." - ) - - # combine any events that are very close together - flattened_events: list[tuple[float, float]] = [] - for time, val in sorted(events): - last_time, last_val = ( - flattened_events[-1] if flattened_events else (None, None) - ) - - if ( - last_time is not None - and last_val is not None - and abs(last_time - time) <= epsilon - ): - flattened_events[-1] = (last_time, last_val + val) - else: - flattened_events.append((time, val)) - - if distribution_type == "concurrency": - # convert to the events over time measuring concurrency changes - events_over_time: list[tuple[float, float]] = [] - active = 0 - for time, delta in flattened_events: - active += delta # type: ignore [assignment] - events_over_time.append((time, active)) - - flattened_events = events_over_time - - # convert to value distribution function - distribution: dict[float, float] = defaultdict(float) - - for ind in range(len(flattened_events) - 1): - start_time, value = flattened_events[ind] - end_time, _ = flattened_events[ind + 1] - duration = end_time - start_time - - if distribution_type == "concurrency": - # weight the concurrency value by the duration - distribution[value] += duration - elif distribution_type == "rate": - # weight the rate value by the duration - rate = value / duration - distribution[rate] += duration - - distribution_list: list[tuple[float, float]] = sorted(distribution.items()) - - return DistributionSummary.from_distribution_function( - distribution=distribution_list, - include_cdf=include_cdf, - ) - - @staticmethod - def from_iterable_request_times( - requests: list[tuple[float, float]], - first_iter_times: list[float], - iter_counts: list[int], - first_iter_counts: Optional[list[int]] = None, - include_cdf: bool = False, - epsilon: float = 1e-6, - ) -> "DistributionSummary": - """ - Create a statistical summary for a given distribution of request times - for a request with iterable responses between the start and end. - For example, this is used to measure auto regressive requests where - a request is started and at some later point, iterative responses are - received. This will convert the request times and iterable values into - a distribution function and then calculate the statistics with - from_distribution_function. - - :param requests: A list of tuples representing the start and end times of - each request. Example: [(start_1, end_1), (start_2, end_2), ...] - :param first_iter_times: A list of times when the first iteration of - each request was received. Must be the same length as requests. - :param iter_counts: A list of the total number of iterations for each - request that occurred starting at the first iteration and ending - at the request end time. Must be the same length as requests. - :param first_iter_counts: A list of the number of iterations to log - for the first iteration of each request. For example, when calculating - total number of tokens processed, this is set to the prompt tokens number. - If not provided, defaults to 1 for each request. - :param include_cdf: Whether to include the calculated cumulative distribution - function (CDF) in the output DistributionSummary. - :param epsilon: The epsilon value for merging close events. - :return: An instance of DistributionSummary with calculated values. - """ - - if first_iter_counts is None: - first_iter_counts = [1] * len(requests) - - if ( - len(requests) != len(first_iter_times) - or len(requests) != len(iter_counts) - or len(requests) != len(first_iter_counts) - ): - raise ValueError( - "requests, first_iter_times, iter_counts, and first_iter_counts must" - "be the same length." - f"Given {len(requests)}, {len(first_iter_times)}, {len(iter_counts)}, " - f"{len(first_iter_counts)}", - ) - - # first break up the requests into individual iterable events - events = defaultdict(int) - global_start = min(start for start, _ in requests) if requests else 0 - global_end = max(end for _, end in requests) if requests else 0 - events[global_start] = 0 - events[global_end] = 0 - - for (_, end), first_iter, first_iter_count, total_count in zip( - requests, first_iter_times, first_iter_counts, iter_counts - ): - events[first_iter] += first_iter_count - - if total_count > 1: - iter_latency = (end - first_iter) / (total_count - 1) - for ind in range(1, total_count): - events[first_iter + ind * iter_latency] += 1 - - # combine any events that are very close together - flattened_events: list[tuple[float, int]] = [] - - for time, count in sorted(events.items()): - last_time, last_count = ( - flattened_events[-1] if flattened_events else (None, None) - ) - - if ( - last_time is not None - and last_count is not None - and abs(last_time - time) <= epsilon - ): - flattened_events[-1] = (last_time, last_count + count) - else: - flattened_events.append((time, count)) - - # convert to value distribution function - distribution: dict[float, float] = defaultdict(float) - - for ind in range(len(flattened_events) - 1): - start_time, count = flattened_events[ind] - end_time, _ = flattened_events[ind + 1] - duration = end_time - start_time - rate = count / duration - distribution[rate] += duration - - distribution_list = sorted(distribution.items()) - - return DistributionSummary.from_distribution_function( - distribution=distribution_list, - include_cdf=include_cdf, - ) - - -class StatusDistributionSummary( - StatusBreakdown[ - DistributionSummary, - DistributionSummary, - DistributionSummary, - DistributionSummary, - ] -): - """ - A pydantic model representing a statistical summary for a given - distribution of numerical values grouped by status. - Specifically used to represent the total, successful, incomplete, - and errored values for a benchmark or other statistical summary. - """ - - @staticmethod - def from_values( - value_types: list[Literal["successful", "incomplete", "error"]], - values: list[float], - weights: Optional[list[float]] = None, - include_cdf: bool = False, - ) -> "StatusDistributionSummary": - """ - Create a statistical summary by status for a given distribution of numerical - values. This is used to measure the distribution of values for different - statuses (e.g., successful, incomplete, error) and calculate the statistics - for each status. Weights are optional to weight the probability distribution - for each value by. If not provided, all values are equally weighted. - - :param value_types: A list of status types for each value in the distribution. - Must be one of 'successful', 'incomplete', or 'error'. - :param values: A list of numerical values representing the distribution. - Must be the same length as value_types. - :param weights: A list of weights for each value in the distribution. - If not provided, all values are equally weighted (set to 1). - Must be the same length as value_types. - :param include_cdf: Whether to include the calculated cumulative distribution - function (CDF) in the output StatusDistributionSummary. - :return: An instance of StatusDistributionSummary with calculated values. - """ - if any( - type_ not in {"successful", "incomplete", "error"} for type_ in value_types - ): - raise ValueError( - "value_types must be one of 'successful', 'incomplete', or 'error'. " - f"Got {value_types} instead.", - ) - - if weights is None: - weights = [1.0] * len(values) - - if len(value_types) != len(values) or len(value_types) != len(weights): - raise ValueError( - "The length of value_types, values, and weights must be the same.", - ) - - _, successful_values, successful_weights = ( - zip(*successful) - if ( - successful := list( - filter( - lambda val: val[0] == "successful", - zip(value_types, values, weights), - ) - ) - ) - else ([], [], []) - ) - _, incomplete_values, incomplete_weights = ( - zip(*incomplete) - if ( - incomplete := list( - filter( - lambda val: val[0] == "incomplete", - zip(value_types, values, weights), - ) - ) - ) - else ([], [], []) - ) - _, errored_values, errored_weights = ( - zip(*errored) - if ( - errored := list( - filter( - lambda val: val[0] == "error", - zip(value_types, values, weights), - ) - ) - ) - else ([], [], []) - ) - - return StatusDistributionSummary( - total=DistributionSummary.from_values( - values, - weights, - include_cdf=include_cdf, - ), - successful=DistributionSummary.from_values( - successful_values, # type: ignore[arg-type] - successful_weights, # type: ignore[arg-type] - include_cdf=include_cdf, - ), - incomplete=DistributionSummary.from_values( - incomplete_values, # type: ignore[arg-type] - incomplete_weights, # type: ignore[arg-type] - include_cdf=include_cdf, - ), - errored=DistributionSummary.from_values( - errored_values, # type: ignore[arg-type] - errored_weights, # type: ignore[arg-type] - include_cdf=include_cdf, - ), - ) - - @staticmethod - def from_request_times( - request_types: list[Literal["successful", "incomplete", "error"]], - requests: list[tuple[float, float]], - distribution_type: Literal["concurrency", "rate"], - include_cdf: bool = False, - epsilon: float = 1e-6, - ) -> "StatusDistributionSummary": - """ - Create a statistical summary by status for given distribution of request times. - This is used to measure the distribution of request times for different statuses - (e.g., successful, incomplete, error) for concurrency and rates. - This will call into DistributionSummary.from_request_times to calculate - the statistics for each status. - - :param request_types: List of status types for each request in the distribution. - Must be one of 'successful', 'incomplete', or 'error'. - :param requests: A list of tuples representing the start and end times of - each request. Example: [(start_1, end_1), (start_2, end_2), ...]. - Must be the same length as request_types. - :param distribution_type: The type of distribution to calculate. - Either "concurrency" or "rate". - :param include_cdf: Whether to include the calculated cumulative distribution - function (CDF) in the output StatusDistributionSummary. - :param epsilon: The epsilon value for merging close events. - :return: An instance of StatusDistributionSummary with calculated values. - """ - if distribution_type not in {"concurrency", "rate"}: - raise ValueError( - f"Invalid distribution_type '{distribution_type}'. " - "Must be 'concurrency' or 'rate'." - ) - - if any( - type_ not in {"successful", "incomplete", "error"} - for type_ in request_types - ): - raise ValueError( - "request_types must be one of 'successful', 'incomplete', or 'error'. " - f"Got {request_types} instead.", - ) - - if len(request_types) != len(requests): - raise ValueError( - "The length of request_types and requests must be the same. " - f"Got {len(request_types)} and {len(requests)} instead.", - ) - - _, successful_requests = ( - zip(*successful) - if ( - successful := list( - filter( - lambda val: val[0] == "successful", - zip(request_types, requests), - ) - ) - ) - else ([], []) - ) - _, incomplete_requests = ( - zip(*incomplete) - if ( - incomplete := list( - filter( - lambda val: val[0] == "incomplete", - zip(request_types, requests), - ) - ) - ) - else ([], []) - ) - _, errored_requests = ( - zip(*errored) - if ( - errored := list( - filter( - lambda val: val[0] == "error", - zip(request_types, requests), - ) - ) - ) - else ([], []) - ) - - return StatusDistributionSummary( - total=DistributionSummary.from_request_times( - requests, - distribution_type=distribution_type, - include_cdf=include_cdf, - epsilon=epsilon, - ), - successful=DistributionSummary.from_request_times( - successful_requests, # type: ignore[arg-type] - distribution_type=distribution_type, - include_cdf=include_cdf, - epsilon=epsilon, - ), - incomplete=DistributionSummary.from_request_times( - incomplete_requests, # type: ignore[arg-type] - distribution_type=distribution_type, - include_cdf=include_cdf, - epsilon=epsilon, - ), - errored=DistributionSummary.from_request_times( - errored_requests, # type: ignore[arg-type] - distribution_type=distribution_type, - include_cdf=include_cdf, - epsilon=epsilon, - ), - ) - - @staticmethod - def from_iterable_request_times( - request_types: list[Literal["successful", "incomplete", "error"]], - requests: list[tuple[float, float]], - first_iter_times: list[float], - iter_counts: Optional[list[int]] = None, - first_iter_counts: Optional[list[int]] = None, - include_cdf: bool = False, - epsilon: float = 1e-6, - ) -> "StatusDistributionSummary": - """ - Create a statistical summary by status for given distribution of request times - for a request with iterable responses between the start and end. - For example, this is used to measure auto regressive requests where - a request is started and at some later point, iterative responses are - received. This will call into DistributionSummary.from_iterable_request_times - to calculate the statistics for each status. - - :param request_types: List of status types for each request in the distribution. - Must be one of 'successful', 'incomplete', or 'error'. - :param requests: A list of tuples representing the start and end times of - each request. Example: [(start_1, end_1), (start_2, end_2), ...]. - Must be the same length as request_types. - :param first_iter_times: A list of times when the first iteration of - each request was received. Must be the same length as requests. - :param iter_counts: A list of the total number of iterations for each - request that occurred starting at the first iteration and ending - at the request end time. Must be the same length as requests. - If not provided, defaults to 1 for each request. - :param first_iter_counts: A list of the number of iterations to log - for the first iteration of each request. For example, when calculating - total number of tokens processed, this is set to the prompt tokens number. - If not provided, defaults to 1 for each request. - :param include_cdf: Whether to include the calculated cumulative distribution - function (CDF) in the output StatusDistributionSummary. - :param epsilon: The epsilon value for merging close events. - :return: An instance of StatusDistributionSummary with calculated values. - """ - if any( - type_ not in {"successful", "incomplete", "error"} - for type_ in request_types - ): - raise ValueError( - "request_types must be one of 'successful', 'incomplete', or 'error'. " - f"Got {request_types} instead.", - ) - - if iter_counts is None: - iter_counts = [1] * len(requests) - - if first_iter_counts is None: - first_iter_counts = [1] * len(requests) - - if ( - len(request_types) != len(requests) - or len(requests) != len(first_iter_times) - or len(requests) != len(iter_counts) - or len(requests) != len(first_iter_counts) - ): - raise ValueError( - "request_types, requests, first_iter_times, iter_counts, and " - "first_iter_counts must be the same length." - f"Given {len(request_types)}, {len(requests)}, " - f"{len(first_iter_times)}, {len(iter_counts)}, " - f"{len(first_iter_counts)}", - ) - - ( - _, - successful_requests, - successful_first_iter_times, - successful_iter_counts, - successful_first_iter_counts, - ) = ( - zip(*successful) - if ( - successful := list( - filter( - lambda val: val[0] == "successful", - zip( - request_types, - requests, - first_iter_times, - iter_counts, - first_iter_counts, - ), - ) - ) - ) - else ([], [], [], [], []) - ) - ( - _, - incomplete_requests, - incomplete_first_iter_times, - incomplete_iter_counts, - incomplete_first_iter_counts, - ) = ( - zip(*incomplete) - if ( - incomplete := list( - filter( - lambda val: val[0] == "incomplete", - zip( - request_types, - requests, - first_iter_times, - iter_counts, - first_iter_counts, - ), - ) - ) - ) - else ([], [], [], [], []) - ) - ( - _, - errored_requests, - errored_first_iter_times, - errored_iter_counts, - errored_first_iter_counts, - ) = ( - zip(*errored) - if ( - errored := list( - filter( - lambda val: val[0] == "error", - zip( - request_types, - requests, - first_iter_times, - iter_counts, - first_iter_counts, - ), - ) - ) - ) - else ([], [], [], [], []) - ) - - return StatusDistributionSummary( - total=DistributionSummary.from_iterable_request_times( - requests, - first_iter_times, - iter_counts, - first_iter_counts, - include_cdf=include_cdf, - epsilon=epsilon, - ), - successful=DistributionSummary.from_iterable_request_times( - successful_requests, # type: ignore[arg-type] - successful_first_iter_times, # type: ignore[arg-type] - successful_iter_counts, # type: ignore[arg-type] - successful_first_iter_counts, # type: ignore[arg-type] - include_cdf=include_cdf, - epsilon=epsilon, - ), - incomplete=DistributionSummary.from_iterable_request_times( - incomplete_requests, # type: ignore[arg-type] - incomplete_first_iter_times, # type: ignore[arg-type] - incomplete_iter_counts, # type: ignore[arg-type] - incomplete_first_iter_counts, # type: ignore[arg-type] - include_cdf=include_cdf, - epsilon=epsilon, - ), - errored=DistributionSummary.from_iterable_request_times( - errored_requests, # type: ignore[arg-type] - errored_first_iter_times, # type: ignore[arg-type] - errored_iter_counts, # type: ignore[arg-type] - errored_first_iter_counts, # type: ignore[arg-type] - include_cdf=include_cdf, - epsilon=epsilon, - ), - ) - - -class RunningStats(StandardBaseModel): - """ - Create a running statistics object to track the mean, rate, and other - statistics of a stream of values. - 1. The start time is set to the time the object is created. - 2. The count is set to 0. - 3. The total is set to 0. - 4. The last value is set to 0. - 5. The mean is calculated as the total / count. - """ - - start_time: float = Field( - default_factory=timer.time, - description=( - "The time the running statistics object was created. " - "This is used to calculate the rate of the statistics." - ), - ) - count: int = Field( - default=0, - description="The number of values added to the running statistics.", - ) - total: float = Field( - default=0.0, - description="The total sum of the values added to the running statistics.", - ) - last: float = Field( - default=0.0, - description="The last value added to the running statistics.", - ) - - @computed_field # type: ignore[misc] - @property - def mean(self) -> float: - """ - :return: The mean of the running statistics (total / count). - If count is 0, return 0.0. - """ - if self.count == 0: - return 0.0 - return self.total / self.count - - @computed_field # type: ignore[misc] - @property - def rate(self) -> float: - """ - :return: The rate of the running statistics - (total / (time.time() - start_time)). - If count is 0, return 0.0. - """ - if self.count == 0: - return 0.0 - return self.total / (timer.time() - self.start_time) - - def __add__(self, value: Any) -> float: - """ - Enable the use of the + operator to add a value to the running statistics. - - :param value: The value to add to the running statistics. - :return: The mean of the running statistics. - """ - if not isinstance(value, (int, float)): - raise ValueError( - f"Value must be an int or float, got {type(value)} instead.", - ) - - self.update(value) - - return self.mean - - def __iadd__(self, value: Any) -> "RunningStats": - """ - Enable the use of the += operator to add a value to the running statistics. - - :param value: The value to add to the running statistics. - :return: The running statistics object. - """ - if not isinstance(value, (int, float)): - raise ValueError( - f"Value must be an int or float, got {type(value)} instead.", - ) - - self.update(value) - - return self - - def update(self, value: float, count: int = 1) -> None: - """ - Update the running statistics with a new value. - - :param value: The new value to add to the running statistics. - :param count: The number of times to 'count' for the value. - If not provided, defaults to 1. - """ - self.count += count - self.total += value - self.last = value - - -class TimeRunningStats(RunningStats): - """ - Create a running statistics object to track the mean, rate, and other - statistics of a stream of time values. This is used to track time values - in milliseconds and seconds. - - Adds time specific computed_fields such as measurements in milliseconds and seconds. - """ - - @computed_field # type: ignore[misc] - @property - def total_ms(self) -> float: - """ - :return: The total time multiplied by 1000.0 to convert to milliseconds. - """ - return self.total * 1000.0 - - @computed_field # type: ignore[misc] - @property - def last_ms(self) -> float: - """ - :return: The last time multiplied by 1000.0 to convert to milliseconds. - """ - return self.last * 1000.0 - - @computed_field # type: ignore[misc] - @property - def mean_ms(self) -> float: - """ - :return: The mean time multiplied by 1000.0 to convert to milliseconds. - """ - return self.mean * 1000.0 - - @computed_field # type: ignore[misc] - @property - def rate_ms(self) -> float: - """ - :return: The rate of the running statistics multiplied by 1000.0 - to convert to milliseconds. - """ - return self.rate * 1000.0 diff --git a/src/guidellm/presentation/builder.py b/src/guidellm/presentation/builder.py index a27d7cec..6ea9c5c3 100644 --- a/src/guidellm/presentation/builder.py +++ b/src/guidellm/presentation/builder.py @@ -1,9 +1,9 @@ from typing import TYPE_CHECKING, Any if TYPE_CHECKING: - from guidellm.benchmark.benchmark import GenerativeBenchmark + from guidellm.benchmark import GenerativeBenchmark -from .data_models import BenchmarkDatum, RunInfo, WorkloadDetails +from guidellm.presentation.data_models import BenchmarkDatum, RunInfo, WorkloadDetails class UIDataBuilder: diff --git a/src/guidellm/presentation/data_models.py b/src/guidellm/presentation/data_models.py index 989ca8ab..9036636a 100644 --- a/src/guidellm/presentation/data_models.py +++ b/src/guidellm/presentation/data_models.py @@ -6,9 +6,9 @@ from pydantic import BaseModel, computed_field if TYPE_CHECKING: - from guidellm.benchmark.benchmark import GenerativeBenchmark + from guidellm.benchmark import GenerativeBenchmark -from guidellm.objects.statistics import DistributionSummary +from guidellm.utils import DistributionSummary class Bucket(BaseModel): diff --git a/src/guidellm/request/loader.py b/src/guidellm/request/loader.py index 1c875046..607a7455 100644 --- a/src/guidellm/request/loader.py +++ b/src/guidellm/request/loader.py @@ -12,9 +12,9 @@ from transformers import PreTrainedTokenizerBase # type: ignore[import] from guidellm.dataset import ColumnInputTypes, load_dataset -from guidellm.objects import StandardBaseModel from guidellm.request.request import GenerationRequest from guidellm.settings import settings +from guidellm.utils import StandardBaseModel __all__ = [ "GenerativeRequestLoader", diff --git a/src/guidellm/request/request.py b/src/guidellm/request/request.py index 81c8cabd..bf4e59fb 100644 --- a/src/guidellm/request/request.py +++ b/src/guidellm/request/request.py @@ -3,7 +3,7 @@ from pydantic import Field -from guidellm.objects.pydantic import StandardBaseModel +from guidellm.utils import StandardBaseModel __all__ = ["GenerationRequest"] diff --git a/src/guidellm/settings.py b/src/guidellm/settings.py index 72178425..20d9ff96 100644 --- a/src/guidellm/settings.py +++ b/src/guidellm/settings.py @@ -1,8 +1,9 @@ +from __future__ import annotations + import json -import os from collections.abc import Sequence from enum import Enum -from typing import Literal, Optional +from typing import Literal from pydantic import BaseModel, Field, model_validator from pydantic_settings import BaseSettings, SettingsConfigDict @@ -46,8 +47,8 @@ class LoggingSettings(BaseModel): disabled: bool = False clear_loggers: bool = True console_log_level: str = "WARNING" - log_file: Optional[str] = None - log_file_level: Optional[str] = None + log_file: str | None = None + log_file_level: str | None = None class DatasetSettings(BaseModel): @@ -80,11 +81,11 @@ class OpenAISettings(BaseModel): for OpenAI server based pathways """ - api_key: Optional[str] = None - bearer_token: Optional[str] = None - headers: Optional[dict[str, str]] = None - organization: Optional[str] = None - project: Optional[str] = None + api_key: str | None = None + bearer_token: str | None = None + headers: dict[str, str] | None = None + organization: str | None = None + project: str | None = None base_url: str = "http://localhost:8000" max_output_tokens: int = 16384 verify: bool = True @@ -131,24 +132,30 @@ class Settings(BaseSettings): request_http2: bool = True # Scheduler settings + mp_context_type: Literal["spawn", "fork", "forkserver"] | None = "fork" + mp_serialization: Literal["dict", "sequence"] | None = "dict" + mp_encoding: ( + Literal["msgpack", "msgspec"] + | None + | list[Literal["msgpack", "msgspec"] | None] + ) = ["msgspec", "msgpack", None] + mp_messaging_object: Literal["queue", "manager_queue", "pipe"] = "queue" + mp_requests_send_buffer_size: int = 1 + mp_poll_interval: float = 0.1 + mp_max_pending_buffer_percent: float = 0.5 + mp_max_worker_buffer_percent: float = 0.2 max_concurrency: int = 512 - max_worker_processes: int = Field( - # use number of CPUs - 1, but at least 10 - default_factory=lambda: max((os.cpu_count() or 1) - 1, 10) - ) - min_queued_requests: int = 20 - scheduler_start_delay: float = 5 + max_worker_processes: int = 10 + scheduler_start_delay_non_distributed: float = 1.0 + constraint_error_window_size: float = 30 + constraint_error_min_processed: float = 30 # Data settings dataset: DatasetSettings = DatasetSettings() # Request/stats settings - preferred_prompt_tokens_source: Optional[ - Literal["request", "response", "local"] - ] = "response" - preferred_output_tokens_source: Optional[ - Literal["request", "response", "local"] - ] = "response" + preferred_prompt_tokens_source: Literal["request", "response"] = "response" + preferred_output_tokens_source: Literal["request", "response"] = "response" preferred_backend: Literal["openai"] = "openai" preferred_route: Literal["text_completions", "chat_completions"] = ( "text_completions" diff --git a/src/guidellm/utils/typing.py b/src/guidellm/utils/typing.py new file mode 100644 index 00000000..8146ea1e --- /dev/null +++ b/src/guidellm/utils/typing.py @@ -0,0 +1,46 @@ +from __future__ import annotations + +from collections.abc import Iterator +from typing import Annotated, Literal, Union, get_args, get_origin + +# Backwards compatibility for Python <3.10 +try: + from types import UnionType # type: ignore[attr-defined] +except ImportError: + UnionType = Union + +# Backwards compatibility for Python <3.12 +try: + from typing import TypeAliasType # type: ignore[attr-defined] +except ImportError: + from typing_extensions import TypeAliasType + + +__all__ = ["get_literal_vals"] + + +def get_literal_vals(alias) -> frozenset[str]: + """Extract all literal values from a (possibly nested) type alias.""" + + def resolve(alias) -> Iterator[str]: + origin = get_origin(alias) + + # Base case: Literal types + if origin is Literal: + for literal_val in get_args(alias): + yield str(literal_val) + # Unwrap Annotated type + elif origin is Annotated: + yield from resolve(get_args(alias)[0]) + # Unwrap TypeAliasTypes + elif isinstance(alias, TypeAliasType): + yield from resolve(alias.__value__) + # Iterate over unions + elif origin in (Union, UnionType): + for arg in get_args(alias): + yield from resolve(arg) + # Fallback + else: + yield str(alias) + + return frozenset(resolve(alias)) diff --git a/tests/integration/scheduler/__init__.py b/tests/integration/scheduler/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/tests/integration/scheduler/test_scheduler.py b/tests/integration/scheduler/test_scheduler.py new file mode 100644 index 00000000..51abf59b --- /dev/null +++ b/tests/integration/scheduler/test_scheduler.py @@ -0,0 +1,177 @@ +from __future__ import annotations + +import asyncio +import random +import uuid +from collections import defaultdict +from functools import wraps +from typing import Any + +import pytest +from pydantic import BaseModel, Field + +from guidellm.scheduler import ( + BackendInterface, + ConstraintInitializer, + Environment, + MaxNumberConstraint, + NonDistributedEnvironment, + ScheduledRequestInfo, + Scheduler, + SchedulerState, + SchedulingStrategy, + SynchronousStrategy, +) + + +def async_timeout(delay: float): + """Decorator to add timeout to async test functions.""" + + def decorator(func): + @wraps(func) + async def new_func(*args, **kwargs): + return await asyncio.wait_for(func(*args, **kwargs), timeout=delay) + + return new_func + + return decorator + + +class MockRequest(BaseModel): + payload: str + id_: str = Field(default_factory=lambda: str(uuid.uuid4())) + + +class MockBackend(BackendInterface): + """Mock backend for integration testing with predictable responses.""" + + def __init__( + self, + processes_limit_value: int | None = None, + requests_limit_value: int | None = None, + error_rate: float = 0.2, + response_delay: float = 0.0, + ): + self._processes_limit = processes_limit_value + self._requests_limit = requests_limit_value + self._error_rate = error_rate + self._response_delay = response_delay + + @property + def processes_limit(self) -> int | None: + return self._processes_limit + + @property + def requests_limit(self) -> int | None: + return self._requests_limit + + def info(self) -> dict[str, Any]: + return {"type": "mock_integration", "delay": self._response_delay} + + async def process_startup(self): + pass + + async def validate(self): + pass + + async def process_shutdown(self): + pass + + async def resolve(self, request: MockRequest, request_info, request_history): + """Return predictable response based on input request.""" + await asyncio.sleep(self._response_delay) + + if ( + self._error_rate + and self._error_rate > 0 + and random.random() < self._error_rate + ): + raise RuntimeError(f"mock_error_for_{request.payload}") + + yield f"response_for_{request.payload}", request_info + + +@pytest.mark.smoke +@pytest.mark.asyncio +@async_timeout(10.0) +@pytest.mark.parametrize( + ("strategy", "env", "constraint_inits"), + [ + ( + SynchronousStrategy(), + NonDistributedEnvironment(), + {"max_number": MaxNumberConstraint(max_num=100)}, + ), + ], +) +async def test_scheduler_run_integration( + strategy: SchedulingStrategy, + env: Environment, + constraint_inits: dict[str, ConstraintInitializer], +): + """Integration test for full scheduler workflow.""" + # Clear singleton state + if hasattr(Scheduler, "singleton_instance"): + Scheduler.singleton_instance = None + + scheduler = Scheduler() + constraints = { + key: init.create_constraint() for key, init in constraint_inits.items() + } + received_updates = defaultdict(list) + received_responses = [] + last_state = None + num_requests = 50 + + async for resp, req, info, state in scheduler.run( + requests=[MockRequest(payload=f"req_{ind}") for ind in range(num_requests)], + backend=MockBackend(), + strategy=strategy, + env=env, + **constraints, + ): + assert req is not None + assert isinstance(req, MockRequest) + assert isinstance(info, ScheduledRequestInfo) + assert info.status != "cancelled" + assert isinstance(state, SchedulerState) + if info.status == "completed": + assert resp == f"response_for_{req.payload}" + received_responses.append(resp) + elif info.status == "errored": + assert resp is None + assert info.error is not None + assert info.error == f"mock_error_for_{req.payload}" + received_responses.append(info.error) + + if len(received_updates[req.payload]) < 3: + received_updates[req.payload].append(info.status) + last_state = state + + assert len(received_updates) == num_requests + assert len(received_responses) == constraints["max_number"].max_num + assert last_state.created_requests == constraints["max_number"].max_num + assert last_state.queued_requests == 0 + assert last_state.processing_requests == 0 + assert last_state.processed_requests == constraints["max_number"].max_num + assert last_state.cancelled_requests == 0 + assert ( + last_state.successful_requests + last_state.errored_requests + ) == constraints["max_number"].max_num + + def _request_indices(): + while True: + yield from range(num_requests) + + for index, req, statuses, resp in zip( + _request_indices(), + received_updates.keys(), + received_updates.values(), + received_responses, + ): + assert req == f"req_{index}" + assert resp in (f"response_for_{req}", f"mock_error_for_{req}") + assert statuses in ( + ["queued", "in_progress", "completed"], + ["queued", "in_progress", "errored"], + ) diff --git a/tests/integration/scheduler/test_worker_group.py b/tests/integration/scheduler/test_worker_group.py new file mode 100644 index 00000000..c3be2b99 --- /dev/null +++ b/tests/integration/scheduler/test_worker_group.py @@ -0,0 +1,181 @@ +""" +Integration tests for WorkerProcessGroup. + +Tests the complete lifecycle of the worker group with real multiprocessing +worker processes and a mock backend. Validates end-to-end functionality +across different scheduling strategies and constraints. +""" + +from __future__ import annotations + +import asyncio +import random +import time +from collections import defaultdict +from functools import wraps +from typing import Any + +import pytest + +from guidellm.scheduler import ( + AsyncConstantStrategy, + AsyncPoissonStrategy, + BackendInterface, + ConcurrentStrategy, + MaxDurationConstraint, + MaxErrorRateConstraint, + MaxErrorsConstraint, + MaxGlobalErrorRateConstraint, + MaxNumberConstraint, + MeasuredRequestTimings, + SynchronousStrategy, + ThroughputStrategy, + WorkerProcessGroup, +) +from guidellm.scheduler.constraints import ConstraintInitializer +from guidellm.scheduler.strategies import SchedulingStrategy + + +def async_timeout(delay): + def decorator(func): + @wraps(func) + async def new_func(*args, **kwargs): + return await asyncio.wait_for(func(*args, **kwargs), timeout=delay) + + return new_func + + return decorator + + +class MockRequestTimings(MeasuredRequestTimings): + """Mock timing implementation for integration testing.""" + + +class MockBackend(BackendInterface): + """Mock backend for integration testing with predictable responses.""" + + def __init__( + self, + processes_limit_value: int | None = None, + requests_limit_value: int | None = None, + error_rate: float = 0.2, + response_delay: float = 0.0, + ): + self._processes_limit = processes_limit_value + self._requests_limit = requests_limit_value + self._error_rate = error_rate + self._response_delay = response_delay + + @property + def processes_limit(self) -> int | None: + return self._processes_limit + + @property + def requests_limit(self) -> int | None: + return self._requests_limit + + def info(self) -> dict[str, Any]: + return {"type": "mock_integration", "delay": self._response_delay} + + async def process_startup(self): + pass + + async def validate(self): + pass + + async def process_shutdown(self): + pass + + async def resolve(self, request, request_info, request_history): + """Return predictable response based on input request.""" + # Simulate processing time + await asyncio.sleep(self._response_delay) + + if ( + self._error_rate + and self._error_rate > 0 + and random.random() < self._error_rate + ): + raise RuntimeError("Mock error for testing") + + yield f"response_for_{request}", request_info + + +class TestWorkerGroup: + @pytest.mark.smoke + @pytest.mark.asyncio + @async_timeout(5) + @pytest.mark.parametrize( + "strategy", + [ + SynchronousStrategy(), + ConcurrentStrategy(streams=10), + ThroughputStrategy(max_concurrency=20), + AsyncConstantStrategy(rate=1000.0), + AsyncPoissonStrategy(rate=1000.0), + ], + ) + @pytest.mark.parametrize( + "constraints_inits", + [ + {"max_num": MaxNumberConstraint(max_num=100)}, + {"max_duration": MaxDurationConstraint(max_duration=0.5)}, + {"max_errors": MaxErrorsConstraint(max_errors=20)}, + {"max_error_rate": MaxErrorRateConstraint(max_error_rate=0.1)}, + {"max_global_error_rate": MaxGlobalErrorRateConstraint(max_error_rate=0.1)}, + ], + ) + async def test_lifecycle( + self, + strategy: SchedulingStrategy, + constraints_inits: dict[str, ConstraintInitializer], + ): + """Test comprehensive lifecycle with different strategies and constraints.""" + # Setup + backend = MockBackend(response_delay=0.01, processes_limit_value=1) + requests = [f"request_{ind}" for ind in range(1000)] + group = WorkerProcessGroup( + backend=backend, + requests=requests, + strategy=strategy, + constraints={ + key: init.create_constraint() for key, init in constraints_inits.items() + }, + infinite_requests=False, + ) + + try: + # Create processes + await group.create_processes() + assert group.processes is not None + assert len(group.processes) > 0 + assert group.mp_context is not None + + # Start processing + start_time = time.time() + 0.1 + await group.start(start_time) + actual_start = time.time() + assert actual_start == pytest.approx(start_time) + + # Validate scheduler state + assert group.scheduler_state is not None + assert group.scheduler_state.start_time == start_time + assert group.scheduler_state.num_processes == len(group.processes) + + # Collect all request updates + received_updates = defaultdict(list) + received_responses = [] + + async for ( + response, + request, + request_info, + _state, + ) in group.request_updates(): + received_updates[request].append(request_info.status) + if response is not None: + received_responses.append(response) + finally: + # Clean shutdown + exceptions = await group.shutdown() + assert len(exceptions) == 0, f"Shutdown errors: {exceptions}" diff --git a/tests/unit/conftest.py b/tests/unit/conftest.py index 92bb89e1..e69de29b 100644 --- a/tests/unit/conftest.py +++ b/tests/unit/conftest.py @@ -1,195 +0,0 @@ -import json -from collections.abc import AsyncIterable -from typing import Any, Literal, Optional -from unittest.mock import MagicMock, patch - -import httpx -import pytest -import respx - -from guidellm.backends import ResponseSummary, StreamingTextResponse - -from .mock_backend import MockBackend - - -@pytest.fixture -def mock_auto_tokenizer(): - with patch("transformers.AutoTokenizer.from_pretrained") as mock_from_pretrained: - - def _fake_tokenize(text: str) -> list[int]: - tokens = text.split() - return [0] * len(tokens) - - mock_tokenizer = MagicMock() - mock_tokenizer.tokenize = MagicMock(side_effect=_fake_tokenize) - mock_from_pretrained.return_value = mock_tokenizer - yield mock_tokenizer - - -@pytest.fixture -def mock_backend(request): - params = request.param if hasattr(request, "param") else {} - kwargs = {} - - for key in ("model", "target", "iter_delay"): - if key in params: - kwargs[key] = params[key] - - return MockBackend(**kwargs) - - -class MockCompletionsIter(AsyncIterable): - def __init__( - self, - type_: Literal["text", "chat"], - prompt: str, - output_token_count: Optional[int], - target: Optional[str] = None, - model: Optional[str] = None, - iter_delay: Optional[float] = None, - ): - self._type = type_ - self._backend = MockBackend( - model=model, - target=target, - iter_delay=iter_delay, - ) - self._prompt = prompt - self._output_token_count = output_token_count - - async def __aiter__(self): - async for token_iter in ( - self._backend.text_completions( - prompt=self._prompt, output_token_count=self._output_token_count - ) - if self._type == "text" - else self._backend.chat_completions( - content=self._prompt, output_token_count=self._output_token_count - ) - ): - if ( - isinstance(token_iter, StreamingTextResponse) - and token_iter.type_ == "start" - ): - continue - - data: dict[str, Any] - - if isinstance(token_iter, StreamingTextResponse): - if self._type == "text": - data = { - "choices": [ - { - "index": token_iter.iter_count, - "text": token_iter.delta, - } - ] - } - elif self._type == "chat": - data = { - "choices": [ - { - "index": token_iter.iter_count, - "delta": {"content": token_iter.delta}, - } - ] - } - else: - raise ValueError("Invalid type for mock completions") - elif isinstance(token_iter, ResponseSummary): - data = { - "usage": { - "prompt_tokens": ( - len(self._prompt.split()) + self._prompt.count(" ") - ), - "completion_tokens": token_iter.response_output_tokens, - } - } - else: - raise ValueError("Invalid token_iter type") - - yield f"data: {json.dumps(data)}\n".encode() - - yield b"data: [DONE]\n" - - -@pytest.fixture -def httpx_openai_mock(request): - params = request.param if hasattr(request, "param") else {} - model = params.get("model", "mock-model") - target = params.get("target", "http://target.mock") - iter_delay = params.get("iter_delay", None) - - with respx.mock(assert_all_mocked=True, assert_all_called=False) as mock_router: - - async def _mock_completions_response(request) -> AsyncIterable[str]: - headers = request.headers - payload = json.loads(request.content) - - assert headers["Content-Type"] == "application/json" - assert payload["model"] == model - assert payload["stream"] is True - assert payload["stream_options"] == {"include_usage": True} - assert payload["prompt"] is not None - assert len(payload["prompt"]) > 0 - assert payload["max_completion_tokens"] > 0 - assert payload["max_tokens"] > 0 - - return httpx.Response( # type: ignore - 200, - stream=MockCompletionsIter( # type: ignore - type_="text", - prompt=payload["prompt"], - output_token_count=( - payload["max_completion_tokens"] - if payload.get("ignore_eos", False) - else None - ), - target=target, - model=model, - iter_delay=iter_delay, - ), - ) - - async def _mock_chat_completions_response(request): - headers = request.headers - payload = json.loads(request.content) - - assert headers["Content-Type"] == "application/json" - assert payload["model"] == model - assert payload["stream"] is True - assert payload["stream_options"] == {"include_usage": True} - assert payload["messages"] is not None - assert len(payload["messages"]) > 0 - assert payload["max_completion_tokens"] > 0 - assert payload["max_tokens"] > 0 - - return httpx.Response( # type: ignore - 200, - stream=MockCompletionsIter( # type: ignore - type_="chat", - prompt=payload["messages"][0]["content"], - output_token_count=( - payload["max_completion_tokens"] - if payload.get("ignore_eos", False) - else None - ), - target=target, - model=model, - iter_delay=iter_delay, - ), - ) - - mock_router.route(method="GET", path="/v1/models").mock( - return_value=httpx.Response( - 200, json={"data": [{"id": model} if model else {"id": "mock-model"}]} - ) - ) - mock_router.route(method="POST", path="/v1/completions").mock( - side_effect=_mock_completions_response # type: ignore - ) - mock_router.route(method="POST", path="/v1/chat/completions").mock( - side_effect=_mock_chat_completions_response - ) - - yield mock_router diff --git a/tests/unit/mock_backend.py b/tests/unit/mock_backend.py index 6080a9d1..5ac069a8 100644 --- a/tests/unit/mock_backend.py +++ b/tests/unit/mock_backend.py @@ -1,172 +1,184 @@ +""" +Mock backend implementation for testing purposes. +""" + import asyncio import random import time -from collections.abc import AsyncGenerator -from pathlib import Path -from typing import Any, Optional, Union - -from lorem.text import TextLorem # type: ignore -from PIL import Image - -from guidellm.backends import ( - Backend, - RequestArgs, - ResponseSummary, - StreamingTextResponse, +from collections.abc import AsyncIterator +from typing import Any, Optional + +from lorem.text import TextLorem + +from guidellm.backend.backend import Backend +from guidellm.backend.objects import ( + GenerationRequest, + GenerationRequestTimings, + GenerationResponse, ) +from guidellm.scheduler import ScheduledRequestInfo -@Backend.register("mock") # type: ignore +@Backend.register("mock") class MockBackend(Backend): + """ + Mock backend for testing that simulates text generation. + + Provides predictable responses with configurable delays and token counts + for testing the backend interface without requiring an actual LLM service. + """ + def __init__( self, - model: Optional[str] = "mock-model", - target: Optional[str] = "mock-target", + target: str = "mock-target", + model: str = "mock-model", iter_delay: Optional[float] = None, ): - super().__init__(type_="mock") # type: ignore + """ + Initialize mock backend. + + :param model: Model name to simulate. + :param target: Target URL to simulate. + :param iter_delay: Delay between iterations in seconds. + """ + super().__init__(type_="mock") # type: ignore [reportCallIssue] self._model = model self._target = target self._iter_delay = iter_delay + self._in_process = False @property def target(self) -> str: - return self._target # type: ignore + """Target URL for the mock backend.""" + return self._target @property def model(self) -> Optional[str]: + """Model name for the mock backend.""" return self._model - @property def info(self) -> dict[str, Any]: - return {} - - async def reset(self) -> None: - pass - - async def prepare_multiprocessing(self): - pass - - async def check_setup(self): - pass - - async def available_models(self) -> list[str]: - return [self.model] # type: ignore + """ + Return mock backend configuration information. + """ + return { + "type": "mock", + "model": self._model, + "target": self._target, + "iter_delay": self._iter_delay, + } + + async def process_startup(self) -> None: + """ + Initialize the mock backend process. + """ + self._in_process = True + + async def process_shutdown(self) -> None: + """ + Shutdown the mock backend process. + """ + self._in_process = False + + async def validate(self) -> None: + """ + Validate the mock backend configuration. + """ + if not self._in_process: + raise RuntimeError("Backend not started up for process") + + async def default_model(self) -> Optional[str]: + """ + Return the default model for the mock backend. + """ + return self._model - async def text_completions( # type: ignore + async def resolve( self, - prompt: Union[str, list[str]], - request_id: Optional[str] = None, - prompt_token_count: Optional[int] = None, - output_token_count: Optional[int] = None, - **kwargs, - ) -> AsyncGenerator[Union[StreamingTextResponse, ResponseSummary], None]: - if not isinstance(prompt, str) or not prompt: - raise ValueError("Prompt must be a non-empty string") - - async for response in self._text_prompt_response_generator( - prompt, - request_id, - prompt_token_count, - output_token_count, - ): - yield response - - async def chat_completions( # type: ignore - self, - content: Union[ - str, - list[Union[str, dict[str, Union[str, dict[str, str]]], Path, Image.Image]], - Any, - ], - request_id: Optional[str] = None, - prompt_token_count: Optional[int] = None, - output_token_count: Optional[int] = None, - raw_content: bool = False, - **kwargs, - ) -> AsyncGenerator[Union[StreamingTextResponse, ResponseSummary], None]: - if not isinstance(content, str) or not content: - raise ValueError("Content must be a non-empty string") - - async for response in self._text_prompt_response_generator( - content, - request_id, - prompt_token_count, - output_token_count, - ): - yield response - - async def _text_prompt_response_generator( - self, - prompt: str, - request_id: Optional[str], - prompt_token_count: Optional[int], - output_token_count: Optional[int], - ) -> AsyncGenerator[Union[StreamingTextResponse, ResponseSummary], None]: - tokens = self._get_tokens(output_token_count) - start_time = time.time() - - yield StreamingTextResponse( - type_="start", + request: GenerationRequest, + request_info: ScheduledRequestInfo, + history: Optional[list[tuple[GenerationRequest, GenerationResponse]]] = None, + ) -> AsyncIterator[tuple[GenerationResponse, ScheduledRequestInfo]]: + """ + Process a generation request and yield progressive responses. + + ### WRITTEN BY AI ### + """ + if not self._in_process: + raise RuntimeError("Backend not started up for process") + + if history is not None: + raise NotImplementedError( + "Multi-turn requests not supported in mock backend" + ) + + # Extract token counts from request + prompt_tokens = request.stats.get("prompt_tokens") + output_tokens = request.constraints.get("output_tokens") + + # Generate mock tokens + tokens = self._get_tokens(output_tokens) + + # Initialize response + response = GenerationResponse( + request_id=request.request_id, + request_args={ + "request_type": request.request_type, + "output_token_count": output_tokens, + **request.params, + }, value="", - start_time=start_time, - first_iter_time=None, - iter_count=0, - delta="", - time=start_time, - request_id=request_id, + request_prompt_tokens=prompt_tokens, + request_output_tokens=output_tokens, ) - first_iter_time = None - last_iter_time = None + # Initialize timings + request_info.request_timings = GenerationRequestTimings() + request_info.request_timings.request_start = time.time() + # Generate response iteratively for index, token in enumerate(tokens): if self._iter_delay: await asyncio.sleep(self._iter_delay) - if first_iter_time is None: - first_iter_time = time.time() - - yield StreamingTextResponse( - type_="iter", - value="".join(tokens[: index + 1]), - start_time=start_time, - first_iter_time=first_iter_time, - iter_count=index + 1, - delta=token, - time=time.time(), - request_id=request_id, - ) + if request_info.request_timings.first_iteration is None: + request_info.request_timings.first_iteration = time.time() - last_iter_time = time.time() - - yield ResponseSummary( - value="".join(tokens), - request_args=RequestArgs( - target=self.target, - headers={}, - params={}, - payload={"prompt": prompt, "output_token_count": output_token_count}, - ), - iterations=len(tokens), - start_time=start_time, - end_time=time.time(), - first_iter_time=first_iter_time, - last_iter_time=last_iter_time, - request_prompt_tokens=prompt_token_count, - request_output_tokens=output_token_count, - response_prompt_tokens=len(prompt.split()) + prompt.count(" "), - response_output_tokens=len(tokens), - request_id=request_id, + response.value += token # type: ignore [reportOperatorIssue] + response.delta = token + response.iterations = index + 1 + request_info.request_timings.last_iteration = time.time() + + yield response, request_info + + # Final response with usage stats + request_info.request_timings.request_end = time.time() + response.response_prompt_tokens = prompt_tokens or self._estimate_prompt_tokens( + str(request.content) ) + response.response_output_tokens = len(tokens) + response.delta = None + + yield response, request_info + + @staticmethod + def _estimate_prompt_tokens(content: str) -> int: + """ + Estimate prompt tokens from content. + """ + # Simple word-based token estimation + return len(str(content).split()) @staticmethod def _get_tokens(token_count: Optional[int] = None) -> list[str]: + """ + Generate mock tokens for response. + """ if token_count is None: token_count = random.randint(8, 512) words = TextLorem(srange=(token_count, token_count)).sentence().split() - tokens = [] # type: ignore + tokens = [] for word in words: if len(tokens) == token_count - 1: diff --git a/tests/unit/mock_benchmark.py b/tests/unit/mock_benchmark.py index 81364fa1..d846767d 100644 --- a/tests/unit/mock_benchmark.py +++ b/tests/unit/mock_benchmark.py @@ -1,271 +1,152 @@ +"""Mock benchmark objects for unit testing.""" + +from guidellm.backend import GenerationRequestTimings from guidellm.benchmark import ( - BenchmarkArgs, - BenchmarkRunStats, + BenchmarkSchedulerStats, GenerativeBenchmark, - GenerativeTextErrorStats, - GenerativeTextResponseStats, - SynchronousProfile, + GenerativeMetrics, + GenerativeRequestStats, ) -from guidellm.objects import StatusBreakdown -from guidellm.request import GenerativeRequestLoaderDescription -from guidellm.scheduler import ( - GenerativeRequestsWorkerDescription, - SchedulerRequestInfo, - SynchronousStrategy, +from guidellm.benchmark.objects import BenchmarkerDict, SchedulerDict +from guidellm.benchmark.profile import SynchronousProfile +from guidellm.scheduler import ScheduledRequestInfo, SchedulerState, SynchronousStrategy +from guidellm.utils import ( + DistributionSummary, + Percentiles, + StandardBaseDict, + StatusBreakdown, + StatusDistributionSummary, ) __all__ = ["mock_generative_benchmark"] +def _create_mock_percentiles() -> Percentiles: + """Create mock percentiles for testing.""" + return Percentiles( + p001=0.1, + p01=1.0, + p05=5.0, + p10=10.0, + p25=25.0, + p50=50.0, + p75=75.0, + p90=90.0, + p95=95.0, + p99=99.0, + p999=99.9, + ) + + +def _create_mock_distribution() -> DistributionSummary: + """Create mock distribution summary for testing.""" + return DistributionSummary( + mean=50.0, + median=50.0, + mode=50.0, + variance=10.0, + std_dev=3.16, + min=10.0, + max=100.0, + count=100, + total_sum=5000.0, + percentiles=_create_mock_percentiles(), + ) + + +def _create_status_dist() -> StatusDistributionSummary: + """Create mock status distribution summary for testing.""" + dist = _create_mock_distribution() + return StatusDistributionSummary( + successful=dist, + incomplete=dist, + errored=dist, + total=dist, + ) + + def mock_generative_benchmark() -> GenerativeBenchmark: - return GenerativeBenchmark.from_stats( - run_id="fa4a92c1-9a1d-4c83-b237-83fcc7971bd3", - successful=[ - GenerativeTextResponseStats( - request_id="181a63e2-dc26-4268-9cfc-2ed9279aae63", - request_type="text_completions", - scheduler_info=SchedulerRequestInfo( - requested=True, - completed=True, - errored=False, - canceled=False, - targeted_start_time=1744728125.203447, - queued_time=1744728125.204123, - dequeued_time=1744728125.2048807, - scheduled_time=1744728125.2048993, - worker_start=1744728125.2049701, - request_start=1744728125.2052872, - request_end=1744728126.7004411, - worker_end=1744728126.701175, - process_id=0, - ), - prompt="such a sacrifice to her advantage as years of gratitude cannot enough acknowledge. By this time she is actually with them! If such goodness does not make her miserable now, she will never deserve to be happy! What a meeting for her, when she first sees my aunt! We must endeavour to forget all that has passed on either side, said Jane I hope and trust they will yet be happy. His consenting to marry her is a proof, I will believe, that he is come to a right way of thinking. Their mutual affection will steady them; and I flatter myself they will settle so quietly, and live in so rational a manner", # noqa: E501 - output=", as to make their long life together very comfortable and very useful. I feel, if they and the honourable Mr. Thorpe, who still lives amongst us, should be all I need, I could perfectly rest happy. Writes to meet them in that kind of obedience which is necessary and honourable, and such", # noqa: E501 - prompt_tokens=128, - output_tokens=64, - start_time=1744728125.2052872, - end_time=1744728126.7004411, - first_token_time=1744728125.2473357, - last_token_time=1744728126.699908, - ), - GenerativeTextResponseStats( - request_id="8a7846d5-7624-420d-a269-831e568a848f", - request_type="text_completions", - scheduler_info=SchedulerRequestInfo( - requested=True, - completed=True, - errored=False, - canceled=False, - targeted_start_time=1744728125.204613, - queued_time=1744728125.2047558, - dequeued_time=1744728126.7025175, - scheduled_time=1744728126.7025256, - worker_start=1744728126.702579, - request_start=1744728126.7027814, - request_end=1744728128.1961868, - worker_end=1744728128.196895, - process_id=0, - ), - prompt="a reconciliation; and, after a little further resistance on the part of his aunt, her resentment gave way, either to her affection for him, or her curiosity to see how his wife conducted herself; and she condescended to wait on them at Pemberley, in spite of that pollution which its woods had received, not merely from the presence of such a mistress, but the visits of her uncle and aunt from the city. With the Gardiners they were always on the most intimate terms. Darcy, as well as Elizabeth, really loved them; and they were both ever sensible of the warmest gratitude towards the persons who,", # noqa: E501 - output=" in their own days of poverty, had been so hotel and hospitable to a young couple leaving Pemberley. Till the size of Mr. Bennet\u2019s salary had been altered, the blessing of their friendship was much more greatly needed by the family than it appeared after that event.\n- Mr. Darcy soon deserved", # noqa: E501 - prompt_tokens=128, - output_tokens=64, - start_time=1744728126.7027814, - end_time=1744728128.1961868, - first_token_time=1744728126.7526379, - last_token_time=1744728128.1956792, - ), - GenerativeTextResponseStats( - request_id="4cde0e6c-4531-4e59-aac1-07bc8b6e4139", - request_type="text_completions", - scheduler_info=SchedulerRequestInfo( - requested=True, - completed=True, - errored=False, - canceled=False, - targeted_start_time=1744728126.7031465, - queued_time=1744728126.7034643, - dequeued_time=1744728128.198447, - scheduled_time=1744728128.1984534, - worker_start=1744728128.198509, - request_start=1744728128.1986883, - request_end=1744728129.6919055, - worker_end=1744728129.692606, - process_id=0, - ), - prompt="struck her, that _she_ was selected from among her sisters as worthy of being the mistress of Hunsford Parsonage, and of assisting to form a quadrille table at Rosings, in the absence of more eligible visitors. The idea soon reached to conviction, as she observed his increasing civilities towards herself, and heard his frequent attempt at a compliment on her wit and vivacity; and though more astonished than gratified herself by this effect of her charms, it was not long before her mother gave her to understand that the probability of their marriage was exceedingly agreeable to _her_. Elizabeth, however, did not choose", # noqa: E501 - output=" to improve this conversation into a prophecy, and her mother would hardly take on herself to announce so important a phenomenon. At last he was to drive to Hunsford from Meryton on Sunday; they staid for an hour at eight o'clock, and the following day appeared to be hung up on the walls of", # noqa: E501 - prompt_tokens=128, - output_tokens=64, - start_time=1744728128.1986883, - end_time=1744728129.6919055, - first_token_time=1744728128.2481627, - last_token_time=1744728129.6914039, - ), - GenerativeTextResponseStats( - request_id="a95b96be-05d4-4130-b0dd-9528c01c9909", - request_type="text_completions", - scheduler_info=SchedulerRequestInfo( - requested=True, - completed=True, - errored=False, - canceled=False, - targeted_start_time=1744728128.1987216, - queued_time=1744728128.1991177, - dequeued_time=1744728129.6953137, - scheduled_time=1744728129.695318, - worker_start=1744728129.695379, - request_start=1744728129.6955585, - request_end=1744728131.187553, - worker_end=1744728131.188169, - process_id=0, - ), - prompt="were comfortable on this subject. Day after day passed away without bringing any other tidings of him than the report which shortly prevailed in Meryton of his coming no more to Netherfield the whole winter; a report which highly incensed Mrs. Bennet, and which she never failed to contradict as a most scandalous falsehood. Even Elizabeth began to fear not that Bingley was indifferent but that his sisters would be successful in keeping him away. Unwilling as she was to admit an idea so destructive to Jane s happiness, and so dishonourable to the stability of her lover, she could not prevent its frequently recurring", # noqa: E501 - output=" during these indefinite disputes; and was often seriously engaged in blaming her sisters for increasing a suspense which might only be caused by their own inattention to a subject of so much moment. Whether she had really made that impression on the s+.ayers, or whether she had merely imagined it, she could decide no farther, for", # noqa: E501 - prompt_tokens=128, - output_tokens=64, - start_time=1744728129.6955585, - end_time=1744728131.187553, - first_token_time=1744728129.7438853, - last_token_time=1744728131.187019, - ), - GenerativeTextResponseStats( - request_id="714b751c-bbfe-4b2a-a0af-7c1bf2c224ae", - request_type="text_completions", - scheduler_info=SchedulerRequestInfo( - requested=True, - completed=True, - errored=False, - canceled=False, - targeted_start_time=1744728129.6975086, - queued_time=1744728129.6978767, - dequeued_time=1744728131.190093, - scheduled_time=1744728131.190101, - worker_start=1744728131.1901798, - request_start=1744728131.1904676, - request_end=1744728132.6833503, - worker_end=1744728132.6839745, - process_id=0, - ), - prompt="? cried Elizabeth, brightening up for a moment. Upon my word, said Mrs. Gardiner, I begin to be of your uncle s opinion. It is really too great a violation of decency, honour, and interest, for him to be guilty of it. I cannot think so very ill of Wickham. Can you, yourself, Lizzie, so wholly give him up, as to believe him capable of it? Not perhaps of neglecting his own interest. But of every other neglect I can believe him capable. If, indeed, it should be so! But I dare not hope it. Why should they not go on", # noqa: E501 - output=" together? This is still a motive incapable of being denied. He has such a faculty of pleasing, and you know how much she likes him. \nQuestion: What made elder sisters the center of their families?\nSometimes early this would be discussed in the family circle, but that was a very exceptional treatment.\nThank you,", # noqa: E501 - prompt_tokens=128, - output_tokens=64, - start_time=1744728131.1904676, - end_time=1744728132.6833503, - first_token_time=1744728131.2394557, - last_token_time=1744728132.6828275, - ), - GenerativeTextResponseStats( - request_id="ef73ae8a-4c8f-4c88-b303-cfff152ce378", - request_type="text_completions", - scheduler_info=SchedulerRequestInfo( - requested=True, - completed=True, - errored=False, - canceled=False, - targeted_start_time=1744728131.1891043, - queued_time=1744728131.1893764, - dequeued_time=1744728132.6859632, - scheduled_time=1744728132.6859682, - worker_start=1744728132.6860242, - request_start=1744728132.6862206, - request_end=1744728134.1805167, - worker_end=1744728134.1813161, - process_id=0, - ), - prompt="was. But her commendation, though costing her some trouble, could by no means satisfy Mr. Collins, and he was very soon obliged to take her Ladyship s praise into his own hands. Sir William stayed only a week at Hunsford; but his visit was long enough to convince him of his daughter s being most comfortably settled, and of her possessing such a husband and such a neighbour as were not often met with. While Sir William was with them, Mr. Collins devoted his mornings to driving him out in his gig, and showing him the country but when he went away, the whole family returned to their usual employments", # noqa: E501 - output=", and the sides of the family in which he was more particularly interested, to their respective places in the establishment. Here Jane was occasionally up as a substitute to her indolent sister, in her matron s stead, but was more frequently left idle, and with her hours of quietness, the unwelcome intrusion", # noqa: E501 - prompt_tokens=128, - output_tokens=64, - start_time=1744728132.6862206, - end_time=1744728134.1805167, - first_token_time=1744728132.7354612, - last_token_time=1744728134.1797993, - ), - ], - errored=[], - incomplete=[ - GenerativeTextErrorStats( - request_id="1b3def04-ca81-4f59-a56c-452a069d91af", - request_type="text_completions", - scheduler_info=SchedulerRequestInfo( - requested=True, - completed=False, - errored=True, - canceled=True, - targeted_start_time=1744728132.686177, - queued_time=1744728132.6866345, - dequeued_time=1744728134.1831052, - scheduled_time=1744728134.1831107, - worker_start=1744728134.183183, - request_start=1744728134.183544, - request_end=1744728135.2031732, - worker_end=1744728135.2033112, - process_id=0, - ), - prompt="is to tempt anyone to our humble abode. Our plain manner of living, our small rooms, and few domestics, and the little we see of the world, must make Hunsford extremely dull to a young lady like yourself; but I hope you will believe us grateful for the condescension, and that we have done everything in our power to prevent you spending your time unpleasantly. Elizabeth was eager with her thanks and assurances of happiness. She had spent six weeks with great enjoyment; and the pleasure of being with Charlotte, and the kind attention she had received, must make _her_ feel the obliged. Mr. Collins", # noqa: E501 - output=", who certainly had an eye to Elizabeth's manner, was glad _he was not to lose the curiosity she had given, and requested her away_ , _for the politeness of her conciliating manner would", # noqa: E501 - prompt_tokens=128, - output_tokens=43, - start_time=1744728134.183544, - end_time=1744728135.2031732, - first_token_time=1744728134.2323751, - last_token_time=1744728135.1950455, - error="TimeoutError: The request timed out before completing.", - ) - ], - args=BenchmarkArgs( - profile=SynchronousProfile(), - strategy_index=0, + """Create a minimal mock GenerativeBenchmark for testing purposes.""" + return GenerativeBenchmark( + run_id="test-run-gen", + run_index=0, + scheduler=SchedulerDict( strategy=SynchronousStrategy(), - max_number=None, - max_duration=10.0, - warmup_number=None, - warmup_duration=None, - cooldown_number=None, - cooldown_duration=None, + constraints={}, + state=SchedulerState(node_id=0, num_processes=1), ), - run_stats=BenchmarkRunStats( - start_time=1744728125.0772898, - end_time=1744728135.8407037, + benchmarker=BenchmarkerDict( + profile=SynchronousProfile.create("synchronous", rate=None), + requests={}, + backend={}, + environment={}, + aggregators={}, + ), + env_args=StandardBaseDict(), + extras=StandardBaseDict(), + run_stats=BenchmarkSchedulerStats( + start_time=1, + end_time=2, requests_made=StatusBreakdown( - successful=6, + successful=1, + incomplete=0, errored=0, - incomplete=1, - total=7, + total=1, ), - queued_time_avg=1.2821388585226876, - scheduled_time_delay_avg=7.96999250139509e-6, - scheduled_time_sleep_avg=0.0, - worker_start_delay_avg=6.399835859026228e-5, - worker_time_avg=1.4266603674207414, - worker_start_time_targeted_delay_avg=1.2825865745544434, - request_start_time_delay_avg=0.6414163964135307, - request_start_time_targeted_delay_avg=1.2827096836907523, - request_time_delay_avg=0.0004316908972603934, - request_time_avg=1.426228676523481, + queued_time_avg=0.1, + worker_resolve_start_delay_avg=0.1, + worker_resolve_time_avg=0.1, + worker_resolve_end_delay_avg=0.1, + finalized_delay_avg=0.1, + worker_targeted_start_delay_avg=0.1, + request_start_delay_avg=0.1, + request_time_avg=0.1, + request_targeted_delay_avg=0.1, + ), + start_time=1000.0, + end_time=2000.0, + metrics=GenerativeMetrics( + requests_per_second=_create_status_dist(), + request_concurrency=_create_status_dist(), + request_latency=_create_status_dist(), + prompt_token_count=_create_status_dist(), + output_token_count=_create_status_dist(), + total_token_count=_create_status_dist(), + time_to_first_token_ms=_create_status_dist(), + time_per_output_token_ms=_create_status_dist(), + inter_token_latency_ms=_create_status_dist(), + output_tokens_per_second=_create_status_dist(), + tokens_per_second=_create_status_dist(), ), - worker=GenerativeRequestsWorkerDescription( - backend_type="openai_http", - backend_target="http://localhost:8000", - backend_model="neuralmagic/Qwen2.5-7B-quantized.w8a8", - backend_info={ - "max_output_tokens": 16384, - "timeout": 300, - "http2": True, - "authorization": False, - "organization": None, - "project": None, - "text_completions_path": "/v1/completions", - "chat_completions_path": "/v1/chat/completions", - }, + request_totals=StatusBreakdown( + successful=1, + incomplete=0, + errored=0, + total=1, ), - requests_loader=GenerativeRequestLoaderDescription( - data='{"prompt_tokens": 128, "output_tokens": 64}', - data_args=None, - processor="neuralmagic/Qwen2.5-7B-quantized.w8a8", - processor_args=None, + requests=StatusBreakdown( + successful=[ + GenerativeRequestStats( + scheduler_info=ScheduledRequestInfo( + request_timings=GenerationRequestTimings( + request_start=1, + first_iteration=2, + last_iteration=6, + request_end=6, + ) + ), + request_id="a", + request_type="text_completions", + prompt="p", + request_args={}, + output="o", + iterations=1, + prompt_tokens=1, + output_tokens=2, + ) + ], + incomplete=[], + errored=[], + total=None, ), - extras={}, ) diff --git a/tests/unit/test_cli.py b/tests/unit/test_cli.py deleted file mode 100644 index 63beb512..00000000 --- a/tests/unit/test_cli.py +++ /dev/null @@ -1,105 +0,0 @@ -""" -Unit tests for CLI functionality, specifically the version flag. -""" - -import importlib.metadata -import re - -import pytest -from click.testing import CliRunner - -from guidellm.__main__ import cli - - -@pytest.mark.smoke -def test_version_flag_long(): - """Test that --version flag works correctly.""" - runner = CliRunner() - result = runner.invoke(cli, ["--version"]) - - assert result.exit_code == 0 - assert "guidellm version:" in result.output - assert result.output.strip().startswith("guidellm version:") - - -@pytest.mark.smoke -def test_version_flag_displays_actual_version(): - """Test that --version displays the actual version from version.py.""" - runner = CliRunner() - result = runner.invoke(cli, ["--version"]) - - assert result.exit_code == 0 - - version_pattern = r"guidellm version: \d+\.\d+" - assert re.search(version_pattern, result.output) - - -@pytest.mark.smoke -def test_version_flag_exits_cleanly(): - """Test that --version exits without processing other commands.""" - runner = CliRunner() - result = runner.invoke(cli, ["--version", "benchmark"]) - - assert result.exit_code == 0 - assert "guidellm version:" in result.output - assert "Commands to run a new benchmark" not in result.output - - -@pytest.mark.smoke -def test_help_shows_version_option(): - """Test that --help shows the --version option.""" - runner = CliRunner() - result = runner.invoke(cli, ["--help"]) - - assert result.exit_code == 0 - assert "--version" in result.output - assert "Show the version and exit" in result.output - - -@pytest.mark.smoke -def test_other_commands_still_work(): - """Test that other CLI commands still work after adding version flag.""" - runner = CliRunner() - result = runner.invoke(cli, ["--help"]) - - assert result.exit_code == 0 - assert "benchmark" in result.output - assert "config" in result.output - assert "preprocess" in result.output - - -@pytest.mark.smoke -def test_version_flag_case_sensitivity(): - """Test that --version flag is case sensitive.""" - runner = CliRunner() - - result = runner.invoke(cli, ["--version"]) - assert result.exit_code == 0 - assert "guidellm version:" in result.output - - # --VERSION should not work - result = runner.invoke(cli, ["--VERSION"]) - assert result.exit_code != 0 - assert "No such option" in result.output - - -@pytest.mark.integration -def test_version_integration_with_actual_version(): - """Integration test to verify version matches importlib.metadata.""" - try: - actual_version = importlib.metadata.version("guidellm") - - runner = CliRunner() - result = runner.invoke(cli, ["--version"]) - - assert result.exit_code == 0 - expected_output = f"guidellm version: {actual_version}" - assert expected_output in result.output - except importlib.metadata.PackageNotFoundError: - # If package is not installed, the CLI should show an error - # This is expected behavior when the package isn't properly installed - runner = CliRunner() - result = runner.invoke(cli, ["--version"]) - - # Click will handle the error when package is not found - assert result.exit_code != 0 diff --git a/tests/unit/test_config.py b/tests/unit/test_settings.py similarity index 100% rename from tests/unit/test_config.py rename to tests/unit/test_settings.py diff --git a/tests/unit/utils/test_typing.py b/tests/unit/utils/test_typing.py new file mode 100644 index 00000000..fafa8765 --- /dev/null +++ b/tests/unit/utils/test_typing.py @@ -0,0 +1,123 @@ +""" +Test suite for the typing utilities module. +""" + +from typing import Annotated, Literal, Union + +import pytest +from typing_extensions import TypeAlias + +from guidellm.utils.typing import get_literal_vals + +# Local type definitions to avoid imports from other modules +LocalProfileType = Literal["synchronous", "async", "concurrent", "throughput", "sweep"] +LocalStrategyType = Annotated[ + Literal["synchronous", "concurrent", "throughput", "constant", "poisson"], + "Valid strategy type identifiers for scheduling request patterns", +] +StrategyProfileType: TypeAlias = Union[LocalStrategyType, LocalProfileType] + + +class TestGetLiteralVals: + """Test cases for the get_literal_vals function.""" + + @pytest.mark.sanity + def test_profile_type(self): + """ + Test extracting values from ProfileType. + + ### WRITTEN BY AI ### + """ + result = get_literal_vals(LocalProfileType) + expected = frozenset( + {"synchronous", "async", "concurrent", "throughput", "sweep"} + ) + assert result == expected + + @pytest.mark.sanity + def test_strategy_type(self): + """ + Test extracting values from StrategyType. + + ### WRITTEN BY AI ### + """ + result = get_literal_vals(LocalStrategyType) + expected = frozenset( + {"synchronous", "concurrent", "throughput", "constant", "poisson"} + ) + assert result == expected + + @pytest.mark.smoke + def test_inline_union_type(self): + """ + Test extracting values from inline union of ProfileType | StrategyType. + + ### WRITTEN BY AI ### + """ + result = get_literal_vals(Union[LocalProfileType, LocalStrategyType]) + expected = frozenset( + { + "synchronous", + "async", + "concurrent", + "throughput", + "constant", + "poisson", + "sweep", + } + ) + assert result == expected + + @pytest.mark.smoke + def test_type_alias(self): + """ + Test extracting values from type alias union. + + ### WRITTEN BY AI ### + """ + result = get_literal_vals(StrategyProfileType) + expected = frozenset( + { + "synchronous", + "async", + "concurrent", + "throughput", + "constant", + "poisson", + "sweep", + } + ) + assert result == expected + + @pytest.mark.sanity + def test_single_literal(self): + """ + Test extracting values from single Literal type. + + ### WRITTEN BY AI ### + """ + result = get_literal_vals(Literal["test"]) + expected = frozenset({"test"}) + assert result == expected + + @pytest.mark.sanity + def test_multi_literal(self): + """ + Test extracting values from multi-value Literal type. + + ### WRITTEN BY AI ### + """ + result = get_literal_vals(Literal["test", "test2"]) + expected = frozenset({"test", "test2"}) + assert result == expected + + @pytest.mark.smoke + def test_literal_union(self): + """ + Test extracting values from union of Literal types. + + ### WRITTEN BY AI ### + """ + result = get_literal_vals(Union[Literal["test", "test2"], Literal["test3"]]) + expected = frozenset({"test", "test2", "test3"}) + assert result == expected From 6d0d4c24c361158d747356f7d7f3de1884697d53 Mon Sep 17 00:00:00 2001 From: Mark Kurtz Date: Fri, 19 Sep 2025 13:06:54 +0000 Subject: [PATCH 19/90] add in the perf extras Signed-off-by: Mark Kurtz --- pyproject.toml | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/pyproject.toml b/pyproject.toml index 966a032b..29ae92c5 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -64,6 +64,12 @@ dependencies = [ ] [project.optional-dependencies] +perf = [ + "orjson", + "msgpack", + "msgspec", + "uvloop", +] dev = [ # build "build>=1.0.0", From bfc8e5095768bc600b05791cc01a6f1511276216 Mon Sep 17 00:00:00 2001 From: jaredoconnell Date: Mon, 8 Sep 2025 21:50:31 +0000 Subject: [PATCH 20/90] Complete CSV output Signed-off-by: jaredoconnell --- src/guidellm/benchmark/output.py | 22 ++++++++++++++++++++++ 1 file changed, 22 insertions(+) diff --git a/src/guidellm/benchmark/output.py b/src/guidellm/benchmark/output.py index 95b51d70..802f2758 100644 --- a/src/guidellm/benchmark/output.py +++ b/src/guidellm/benchmark/output.py @@ -579,6 +579,13 @@ async def finalize(self, report: GenerativeBenchmarksReport) -> Path: benchmark_headers: list[str] = [] benchmark_values: list[str | float | list[float]] = [] + # Add basic run description info + desc_headers, desc_values = ( + self._get_benchmark_desc_headers_and_values(benchmark) + ) + benchmark_headers.extend(desc_headers) + benchmark_values.extend(desc_values) + # Add status-based metrics for status in StatusDistributionSummary.model_fields: status_headers, status_values = ( @@ -684,6 +691,21 @@ def _get_benchmark_status_metrics_stats( ] return headers, values + def _get_benchmark_extras_headers_and_values( + self, benchmark: GenerativeBenchmark, + ) -> tuple[list[str], list[str]]: + headers = ["Profile", "Backend", "Generator Data"] + values: list[str] = [ + benchmark.benchmarker.profile.model_dump_json(), + json.dumps(benchmark.benchmarker.backend), + json.dumps(benchmark.benchmarker.requests["attributes"]["data"]), + ] + + if len(headers) != len(values): + raise ValueError("Headers and values length mismatch.") + + return headers, values + @GenerativeBenchmarkerOutput.register("html") class GenerativeBenchmarkerHTML(GenerativeBenchmarkerOutput): From c12c4f88879e05486be323b3b3048f716eca393e Mon Sep 17 00:00:00 2001 From: Samuel Monson Date: Wed, 24 Sep 2025 05:57:14 -0400 Subject: [PATCH 21/90] [GuideLLM Refactor] Core: Reintroduce changes from main (#364) ## Summary TODO --- - [x] "I certify that all code in this PR is my own, except as noted below." ## Use of AI - [ ] Includes AI-assisted code completion - [ ] Includes code generated by an AI application - [ ] Includes AI-generated tests (NOTE: AI written tests should have a docstring that includes `## WRITTEN BY AI ##`) --- pyproject.toml | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/pyproject.toml b/pyproject.toml index 966a032b..df91260e 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -10,6 +10,9 @@ include = ["*"] [tool.setuptools.package-data] "guidellm.data" = ["*.gz"] +[tool.pdm] +distribution = true + # ************************************************ # ********** Project Metadata ********** @@ -64,6 +67,10 @@ dependencies = [ ] [project.optional-dependencies] +recommended = [ + "tiktoken>=0.11.0", # For OpenAI tokenizer + "blobfile>=3.1.0", # For OpenAI tokenizer +] dev = [ # build "build>=1.0.0", @@ -104,6 +111,9 @@ dev = [ "mkdocs-linkcheck~=1.0.6", ] +[dependency-groups] +dev = [ "guidellm[dev]" ] + [project.urls] homepage = "https://github.com/vllm-project/guidellm" source = "https://github.com/vllm-project/guidellm" From ab5466b1f5affd146fa69dbfc63cf31c6a1c82f0 Mon Sep 17 00:00:00 2001 From: Jared O'Connell <46976761+jaredoconnell@users.noreply.github.com> Date: Wed, 24 Sep 2025 17:31:45 -0400 Subject: [PATCH 22/90] [GuideLLM Refactor] Fix from-file (#366) ## Summary This PR ports the new functionality from `benchmark run` to `benchmark from-file`, and does so in a way that reuses as much code as practical to have one source of truth. ## Details - Fixes from-file by making it to use the new output format. - Moves code related to the new output formats to separate functions that are called from both benchmark entrypoints. - Moves additional chunks of code out of the large benchmark run entrypoint function for modularity. ## Test Plan Run a benchmark with an output of json or yaml, and use `from-file` to re-import it and export it. You can select any output type supported by `benchmark run`. `guidellm benchmark from-file ./result.json --output-formats console` `guidellm benchmark from-file ./result.yaml --output-formats yaml` ## Related Issues --- - [x] "I certify that all code in this PR is my own, except as noted below." ## Use of AI - [x] Includes AI-assisted code completion - [ ] Includes code generated by an AI application - [ ] Includes AI-generated tests (NOTE: AI written tests should have a docstring that includes `## WRITTEN BY AI ##`) --------- Signed-off-by: Jared O'Connell --- src/guidellm/__main__.py | 37 +++-- src/guidellm/benchmark/entrypoints.py | 204 +++++++++++++++++--------- 2 files changed, 158 insertions(+), 83 deletions(-) diff --git a/src/guidellm/__main__.py b/src/guidellm/__main__.py index 675003a9..9d85346b 100644 --- a/src/guidellm/__main__.py +++ b/src/guidellm/__main__.py @@ -473,23 +473,30 @@ def run( ) @click.option( "--output-path", - type=click.Path(file_okay=True, dir_okay=True, exists=False), - default=None, - is_flag=False, - flag_value=Path.cwd() / "benchmarks_reexported.json", + type=click.Path(), + default=Path.cwd(), + help=( + "Allows re-exporting the benchmarks to other formats. " + "The path to save the output formats to, if the format is a file type. " + "If it is a directory, it will save all output formats selected under it. " + "If it is a file, it will save the corresponding output format to that file. " + "Any output formats that were given that do not match the file extension will " + "be saved in the parent directory of the file path. " + "Defaults to the current working directory. " + ), +) +@click.option( + "--output-formats", + multiple=True, + type=str, + default=("console", "json"), # ("console", "json", "html", "csv") help=( - "Allows re-exporting the benchmarks to another format. " - "The path to save the output to. If it is a directory, " - "it will save benchmarks.json under it. " - "Otherwise, json, yaml, or csv files are supported for output types " - "which will be read from the extension for the file path. " - "This input is optional. If the output path flag is not provided, " - "the benchmarks will not be reexported. If the flag is present but " - "no value is specified, it will default to the current directory " - "with the file name `benchmarks_reexported.json`." + "The output formats to use for the benchmark results. " + "Defaults to console, json, html, and csv where the file formats " + "will be saved at the specified output path." ), ) -def from_file(path, output_path): +def from_file(path, output_path, output_formats): """ Load and optionally re-export a previously saved benchmark report. @@ -497,7 +504,7 @@ def from_file(path, output_path): to different output formats. Supports JSON, YAML, and CSV export formats based on the output file extension. """ - reimport_benchmarks_report(path, output_path) + asyncio.run(reimport_benchmarks_report(path, output_path, output_formats)) @cli.command( diff --git a/src/guidellm/benchmark/entrypoints.py b/src/guidellm/benchmark/entrypoints.py index 60077ee8..828402d8 100644 --- a/src/guidellm/benchmark/entrypoints.py +++ b/src/guidellm/benchmark/entrypoints.py @@ -26,7 +26,6 @@ from guidellm.benchmark.benchmarker import Benchmarker from guidellm.benchmark.objects import GenerativeBenchmark, GenerativeBenchmarksReport from guidellm.benchmark.output import ( - GenerativeBenchmarkerConsole, GenerativeBenchmarkerOutput, ) from guidellm.benchmark.profile import Profile, ProfileType @@ -53,6 +52,97 @@ _CURRENT_WORKING_DIR = Path.cwd() +# Data types + +DataType = ( + Iterable[str] + | Iterable[dict[str, Any]] + | Dataset + | DatasetDict + | IterableDataset + | IterableDatasetDict + | str + | Path +) + +OutputFormatType = ( + tuple[str, ...] + | list[str] + | dict[str, str | dict[str, Any] | GenerativeBenchmarkerOutput] + | None +) + + +# Helper functions + +async def initialize_backend( + backend: BackendType | Backend, + target: str, + model: str | None, + backend_kwargs: dict[str, Any] | None, +) -> Backend: + backend = ( + Backend.create( + backend, target=target, model=model, **(backend_kwargs or {}) + ) + if not isinstance(backend, Backend) + else backend + ) + await backend.process_startup() + await backend.validate() + return backend + + +async def resolve_profile( + constraint_inputs: dict[str, int | float], + profile: Profile | str | None, + rate: list[float] | None, + random_seed: int, + constraints: dict[str, ConstraintInitializer | Any], +): + for key, val in constraint_inputs.items(): + if val is not None: + constraints[key] = val + if not isinstance(profile, Profile): + if isinstance(profile, str): + profile = Profile.create( + rate_type=profile, + rate=rate, + random_seed=random_seed, + constraints={**constraints}, + ) + else: + raise ValueError(f"Expected string for profile; got {type(profile)}") + + elif constraints: + raise ValueError( + "Constraints must be empty when providing a Profile instance. " + f"Provided constraints: {constraints} ; provided profile: {profile}" + ) + return profile + +async def resolve_output_formats( + output_formats: OutputFormatType, + output_path: str | Path | None, +) -> dict[str, GenerativeBenchmarkerOutput]: + output_formats = GenerativeBenchmarkerOutput.resolve( + output_formats=(output_formats or {}), output_path=output_path + ) + return output_formats + +async def finalize_outputs( + report: GenerativeBenchmarksReport, + resolved_output_formats: dict[str, GenerativeBenchmarkerOutput] +): + output_format_results = {} + for key, output in resolved_output_formats.items(): + output_result = await output.finalize(report) + output_format_results[key] = output_result + return output_format_results + + +# Complete entrypoints + async def benchmark_with_scenario(scenario: Scenario, **kwargs): """ Run a benchmark using a scenario and specify any extra arguments @@ -67,16 +157,7 @@ async def benchmark_with_scenario(scenario: Scenario, **kwargs): # @validate_call(config={"arbitrary_types_allowed": True}) async def benchmark_generative_text( # noqa: C901 target: str, - data: ( - Iterable[str] - | Iterable[dict[str, Any]] - | Dataset - | DatasetDict - | IterableDataset - | IterableDatasetDict - | str - | Path - ), + data: DataType, profile: StrategyType | ProfileType | Profile, rate: float | list[float] | None = None, random_seed: int = 42, @@ -91,12 +172,7 @@ async def benchmark_generative_text( # noqa: C901 data_sampler: Literal["random"] | None = None, # Output configuration output_path: str | Path | None = _CURRENT_WORKING_DIR, - output_formats: ( - tuple[str, ...] - | list[str] - | dict[str, str | dict[str, Any] | GenerativeBenchmarkerOutput] - | None - ) = ("console", "json", "html", "csv"), + output_formats: OutputFormatType = ("console", "json", "html", "csv"), # Updates configuration progress: tuple[str, ...] | list[str] | list[BenchmarkerProgress] | None = None, print_updates: bool = False, @@ -120,16 +196,7 @@ async def benchmark_generative_text( # noqa: C901 with console.print_update_step( title=f"Initializing backend {backend}" ) as console_step: - backend = ( - Backend.create( - backend, target=target, model=model, **(backend_kwargs or {}) - ) - if not isinstance(backend, Backend) - else backend - ) - console_step.update(f"{backend.__class__.__name__} backend initialized") - await backend.process_startup() - await backend.validate() + backend = await initialize_backend(backend, target, model, backend_kwargs) console_step.finish( title=f"{backend.__class__.__name__} backend initialized", details=backend.info, @@ -190,27 +257,19 @@ async def benchmark_generative_text( # noqa: C901 with console.print_update_step( title=f"Resolving profile {profile}" ) as console_step: - for key, val in { - "max_seconds": max_seconds, - "max_requests": max_requests, - "max_errors": max_errors, - "max_error_rate": max_error_rate, - "max_global_error_rate": max_global_error_rate, - }.items(): - if val is not None: - constraints[key] = val - if not isinstance(profile, Profile): - profile = Profile.create( - rate_type=profile, - rate=rate, - random_seed=random_seed, - constraints={**constraints}, - ) - elif constraints: - raise ValueError( - "Constraints must be empty when providing a Profile instance. " - f"Provided constraints: {constraints} ; provided profile: {profile}" - ) + profile = await resolve_profile( + { + "max_seconds": max_seconds, + "max_requests": max_requests, + "max_errors": max_errors, + "max_error_rate": max_error_rate, + "max_global_error_rate": max_global_error_rate, + }, + profile, + rate, + random_seed, + constraints, + ) console_step.finish( title=f"{profile.__class__.__name__} profile resolved", details=InfoMixin.extract_from_obj(profile), @@ -237,12 +296,10 @@ async def benchmark_generative_text( # noqa: C901 ) with console.print_update_step(title="Resolving output formats") as console_step: - output_formats = GenerativeBenchmarkerOutput.resolve( - output_formats=(output_formats or {}), output_path=output_path - ) + resolved_output_formats = await resolve_output_formats(output_formats, output_path) console_step.finish( title="Output formats resolved", - details={key: str(val) for key, val in output_formats.items()}, + details={key: str(val) for key, val in resolved_output_formats.items()}, status_level="success", ) @@ -278,14 +335,11 @@ async def benchmark_generative_text( # noqa: C901 if benchmark: report.benchmarks.append(benchmark) - output_format_results = {} - for key, output in output_formats.items(): - output_result = await output.finalize(report) - output_format_results[key] = output_result + output_format_results = await finalize_outputs(report, resolved_output_formats) console.print("\n\n") console.print_update( - title=f"Benchmarking complete, generated {len(report.benchmarks)} benchmark(s)", + title=f"Benchmarking complete; generated {len(report.benchmarks)} benchmark(s)", status="success", ) for key, value in output_format_results.items(): @@ -294,20 +348,34 @@ async def benchmark_generative_text( # noqa: C901 return report, output_format_results -def reimport_benchmarks_report(file: Path, output_path: Path | None) -> None: +async def reimport_benchmarks_report( + file: Path, + output_path: Path | None, + output_formats: OutputFormatType = ("console", "json", "html", "csv"), +) -> tuple[GenerativeBenchmarksReport, dict[str, Any]]: """ The command-line entry point for re-importing and displaying an - existing benchmarks report. Can also specify + existing benchmarks report. Can also specify an output format. Assumes the file provided exists. """ - report = GenerativeBenchmarksReport.load_file(file) - console_output = GenerativeBenchmarkerConsole() - console_output.finalize(report) console = Console() + with console.print_update_step( + title=f"Loading benchmarks from {file}" + ) as console_step: + report = GenerativeBenchmarksReport.load_file(file) + console_step.finish(f"Import of old benchmarks complete; loaded {len(report.benchmarks)} benchmark(s)") + + with console.print_update_step(title="Resolving output formats") as console_step: + resolved_output_formats = await resolve_output_formats(output_formats, output_path) + console_step.finish( + title="Output formats resolved", + details={key: str(val) for key, val in resolved_output_formats.items()}, + status_level="success", + ) - if output_path: - with console.print_update_step( - title=f"Saving benchmarks report to {output_path}..." - ) as console_step: - saved_path = report.save_file(output_path) - console_step.finish(title=f"Benchmarks report saved to {saved_path}") + output_format_results = await finalize_outputs(report, resolved_output_formats) + + for key, value in output_format_results.items(): + console.print_update(title=f" {key:<8}: {value}", status="debug") + + return report, output_format_results From 78615f74af5936806f323eecc94709f2ee3317ee Mon Sep 17 00:00:00 2001 From: Samuel Monson Date: Thu, 25 Sep 2025 11:13:50 -0400 Subject: [PATCH 23/90] [GuideLLM Refactor] Entrypoint: Reintroduce changes from main (#363) ## Summary Reintroduces a few changes from main --------- Signed-off-by: Samuel Monson --- src/guidellm/__main__.py | 1 + src/guidellm/logger.py | 3 ++- 2 files changed, 3 insertions(+), 1 deletion(-) diff --git a/src/guidellm/__main__.py b/src/guidellm/__main__.py index 9d85346b..13a748d5 100644 --- a/src/guidellm/__main__.py +++ b/src/guidellm/__main__.py @@ -104,6 +104,7 @@ def decode_escaped_str(_ctx, _param, value): @click.group() +@click.version_option(package_name="guidellm", message="guidellm version: %(version)s") def cli(): """ Main entry point for the GuideLLM command-line interface. diff --git a/src/guidellm/logger.py b/src/guidellm/logger.py index 48b41a49..70259bad 100644 --- a/src/guidellm/logger.py +++ b/src/guidellm/logger.py @@ -71,7 +71,8 @@ def configure_logger(config: LoggingSettings = settings.logging): logger.add( sys.stdout, level=config.console_log_level.upper(), - format="{time} | {function} | {level} - {message}", + format="{time:YY-MM-DD HH:mm:ss}|{level: <8} \ + |{name}:{function}:{line} - {message}" ) if config.log_file or config.log_file_level: From 3ac15374c83f864f26692ba445b5462bc001d072 Mon Sep 17 00:00:00 2001 From: Samuel Monson Date: Thu, 11 Sep 2025 15:33:37 -0400 Subject: [PATCH 24/90] Update GenerativeTextScenario to match current def Replace scenario entrypoint with a decorator Forward-port get_default and from_file to Scenario Apply scenario args as an update to kwargs Readd scenario support to CLI Signed-off-by: Samuel Monson --- src/guidellm/__main__.py | 148 ++++++++++++-------------- src/guidellm/benchmark/__init__.py | 10 ++ src/guidellm/benchmark/entrypoints.py | 14 +-- src/guidellm/benchmark/scenario.py | 126 ++++++++++++++++++---- 4 files changed, 184 insertions(+), 114 deletions(-) diff --git a/src/guidellm/__main__.py b/src/guidellm/__main__.py index 13a748d5..0a035551 100644 --- a/src/guidellm/__main__.py +++ b/src/guidellm/__main__.py @@ -31,6 +31,7 @@ from typing import Annotated, Union import click +from pydantic import ValidationError try: import uvloop @@ -55,6 +56,7 @@ ) from guidellm.benchmark.scenario import ( GenerativeTextScenario, + get_builtin_scenarios, ) from guidellm.mock_server import MockServer, MockServerConfig from guidellm.preprocess.dataset import ShortPromptStrategy, process_dataset @@ -135,6 +137,25 @@ def benchmark(): help="Run a benchmark against a generative model using the specified arguments.", context_settings={"auto_envvar_prefix": "GUIDELLM"}, ) +@click.option( + "--scenario", + type=cli_tools.Union( + click.Path( + exists=True, + readable=True, + file_okay=True, + dir_okay=False, + path_type=Path, + ), + click.Choice(get_builtin_scenarios()), + ), + default=None, + help=( + "The name of a builtin scenario or path to a config file. " + "Missing values from the config will use defaults. " + "Options specified on the commandline will override the scenario." + ), +) @click.option( "--target", type=str, @@ -161,7 +182,7 @@ def benchmark(): ) @click.option( "--rate", - default=None, + default=GenerativeTextScenario.get_default("rate"), help=( "The rates to run the benchmark at. " "Can be a single number or a comma-separated list of numbers. " @@ -183,18 +204,18 @@ def benchmark(): "--backend-type", # legacy alias "backend", type=click.Choice(list(get_literal_vals(BackendType))), + default=GenerativeTextScenario.get_default("backend"), help=( "The type of backend to use to run requests against. Defaults to 'openai_http'." f" Supported types: {', '.join(get_literal_vals(BackendType))}" ), - default="openai_http", ) @click.option( "--backend-kwargs", "--backend-args", # legacy alias "backend_kwargs", callback=cli_tools.parse_json, - default=None, + default=GenerativeTextScenario.get_default("backend_kwargs"), help=( "A JSON string containing any arguments to pass to the backend as a " "dict with **kwargs. Headers can be removed by setting their value to " @@ -204,7 +225,7 @@ def benchmark(): ) @click.option( "--model", - default=None, + default=GenerativeTextScenario.get_default("model"), type=str, help=( "The ID of the model to benchmark within the backend. " @@ -214,7 +235,7 @@ def benchmark(): # Data configuration @click.option( "--processor", - default=None, + default=GenerativeTextScenario.get_default("processor"), type=str, help=( "The processor or tokenizer to use to calculate token counts for statistics " @@ -224,7 +245,7 @@ def benchmark(): ) @click.option( "--processor-args", - default=None, + default=GenerativeTextScenario.get_default("processor_args"), callback=cli_tools.parse_json, help=( "A JSON string containing any arguments to pass to the processor constructor " @@ -233,7 +254,7 @@ def benchmark(): ) @click.option( "--data-args", - default=None, + default=GenerativeTextScenario.get_default("data_args"), callback=cli_tools.parse_json, help=( "A JSON string containing any arguments to pass to the dataset creation " @@ -242,7 +263,7 @@ def benchmark(): ) @click.option( "--data-sampler", - default=None, + default=GenerativeTextScenario.get_default("data_sampler"), type=click.Choice(["random"]), help=( "The data sampler type to use. 'random' will add a random shuffle on the data. " @@ -301,7 +322,7 @@ def benchmark(): "--warmup-percent", # legacy alias "warmup", type=float, - default=None, + default=GenerativeTextScenario.get_default("warmup"), help=( "The specification around the number of requests to run before benchmarking. " "If within (0, 1), then the percent of requests/time to use for warmup. " @@ -315,7 +336,7 @@ def benchmark(): "--cooldown-percent", # legacy alias "cooldown", type=float, - default=GenerativeTextScenario.get_default("cooldown_percent"), + default=GenerativeTextScenario.get_default("cooldown"), help=( "The specification around the number of requests to run after benchmarking. " "If within (0, 1), then the percent of requests/time to use for cooldown. " @@ -328,19 +349,19 @@ def benchmark(): "--request-samples", "--output-sampling", # legacy alias "request_samples", + default=GenerativeTextScenario.get_default("request_samples"), type=int, help=( "The number of samples for each request status and each benchmark to save " "in the output file. If None (default), will save all samples. " "Defaults to 20." ), - default=20, ) # Constraints configuration @click.option( "--max-seconds", type=float, - default=None, + default=GenerativeTextScenario.get_default("max_seconds"), help=( "The maximum number of seconds each benchmark can run for. " "If None, will run until max_requests or the data is exhausted." @@ -349,7 +370,7 @@ def benchmark(): @click.option( "--max-requests", type=int, - default=None, + default=GenerativeTextScenario.get_default("max_requests"), help=( "The maximum number of requests each benchmark can run for. " "If None, will run until max_seconds or the data is exhausted." @@ -358,55 +379,22 @@ def benchmark(): @click.option( "--max-errors", type=int, - default=None, + default=GenerativeTextScenario.get_default("max_errors"), help="Maximum number of errors allowed before stopping the benchmark", ) @click.option( "--max-error-rate", type=float, - default=None, + default=GenerativeTextScenario.get_default("max_error_rate"), help="Maximum error rate allowed before stopping the benchmark", ) @click.option( "--max-global-error-rate", type=float, - default=None, + default=GenerativeTextScenario.get_default("max_global_error_rate"), help="Maximum global error rate allowed across all benchmarks", ) -def run( - target, - data, - profile, - rate, - random_seed, - # Backend Configuration - backend, - backend_kwargs, - model, - # Data configuration - processor, - processor_args, - data_args, - data_sampler, - # Output configuration - output_path, - output_formats, - # Updates configuration - disable_console_outputs, - disable_progress, - display_scheduler_stats, - # Aggregators configuration - output_extras, - warmup, - cooldown, - request_samples, - # Constraints configuration - max_seconds, - max_requests, - max_errors, - max_error_rate, - max_global_error_rate, -): +def run(**kwargs): """ Execute a generative text benchmark against a target model backend. @@ -415,53 +403,53 @@ def run( Supports multiple backends, data sources, output formats, and constraint types for flexible benchmark configuration. """ + scenario = kwargs.pop("scenario") + click_ctx = click.get_current_context() + overrides = cli_tools.set_if_not_default(click_ctx, **kwargs) + + try: + # If a scenario file was specified read from it + if scenario is None: + _scenario = GenerativeTextScenario.model_validate(overrides) + elif isinstance(scenario, Path): + _scenario = GenerativeTextScenario.from_file(scenario, overrides) + else: # Only builtins can make it here; click will catch anything else + _scenario = GenerativeTextScenario.from_builtin(scenario, overrides) + except ValidationError as e: + # Translate pydantic valdation error to click argument error + errs = e.errors(include_url=False, include_context=True, include_input=True) + param_name = "--" + str(errs[0]["loc"][0]).replace("_", "-") + raise click.BadParameter( + errs[0]["msg"], ctx=click_ctx, param_hint=param_name + ) from e + if HAS_UVLOOP: asyncio.set_event_loop_policy(uvloop.EventLoopPolicy()) asyncio.run( benchmark_generative_text( - target=target, - data=data, - profile=profile, - rate=rate, - random_seed=random_seed, - # Backend configuration - backend=backend, - backend_kwargs=backend_kwargs, - model=model, - # Data configuration - processor=processor, - processor_args=processor_args, - data_args=data_args, - data_sampler=data_sampler, + scenario=_scenario, # Output configuration - output_path=output_path, + output_path=kwargs["output_path"], output_formats=[ fmt - for fmt in output_formats - if not disable_console_outputs or fmt != "console" + for fmt in kwargs["output_formats"] + if not kwargs["disable_console_outputs"] or fmt != "console" ], # Updates configuration progress=( [ GenerativeConsoleBenchmarkerProgress( - display_scheduler_stats=display_scheduler_stats + display_scheduler_stats=kwargs["display_scheduler_stats"] ) ] - if not disable_progress + if not kwargs["disable_progress"] else None ), - print_updates=not disable_console_outputs, + print_updates=not kwargs["disable_console_outputs"], # Aggregators configuration - add_aggregators={"extras": InjectExtrasAggregator(extras=output_extras)}, - warmup=warmup, - cooldown=cooldown, - request_samples=request_samples, - # Constraints configuration - max_seconds=max_seconds, - max_requests=max_requests, - max_errors=max_errors, - max_error_rate=max_error_rate, - max_global_error_rate=max_global_error_rate, + add_aggregators={ + "extras": InjectExtrasAggregator(extras=kwargs["output_extras"]) + }, ) ) diff --git a/src/guidellm/benchmark/__init__.py b/src/guidellm/benchmark/__init__.py index 76324a65..8350f161 100644 --- a/src/guidellm/benchmark/__init__.py +++ b/src/guidellm/benchmark/__init__.py @@ -40,6 +40,12 @@ BenchmarkerProgressGroup, GenerativeConsoleBenchmarkerProgress, ) +from .scenario import ( + GenerativeTextScenario, + Scenario, + enable_scenarios, + get_builtin_scenarios, +) __all__ = [ "Aggregator", @@ -65,14 +71,18 @@ "GenerativeRequestStats", "GenerativeRequestsAggregator", "GenerativeStatsProgressAggregator", + "GenerativeTextScenario", "InjectExtrasAggregator", "Profile", "ProfileType", + "Scenario", "SchedulerStatsAggregator", "SerializableAggregator", "SweepProfile", "SynchronousProfile", "ThroughputProfile", "benchmark_generative_text", + "enable_scenarios", + "get_builtin_scenarios", "reimport_benchmarks_report", ] diff --git a/src/guidellm/benchmark/entrypoints.py b/src/guidellm/benchmark/entrypoints.py index 828402d8..df65fe8f 100644 --- a/src/guidellm/benchmark/entrypoints.py +++ b/src/guidellm/benchmark/entrypoints.py @@ -33,7 +33,7 @@ BenchmarkerProgress, BenchmarkerProgressGroup, ) -from guidellm.benchmark.scenario import GenerativeTextScenario, Scenario +from guidellm.benchmark.scenario import enable_scenarios from guidellm.request import GenerativeRequestLoader from guidellm.scheduler import ( ConstraintInitializer, @@ -44,7 +44,6 @@ __all__ = [ "benchmark_generative_text", - "benchmark_with_scenario", "reimport_benchmarks_report", ] @@ -143,18 +142,9 @@ async def finalize_outputs( # Complete entrypoints -async def benchmark_with_scenario(scenario: Scenario, **kwargs): - """ - Run a benchmark using a scenario and specify any extra arguments - """ - - if isinstance(scenario, GenerativeTextScenario): - return await benchmark_generative_text(**vars(scenario), **kwargs) - else: - raise ValueError(f"Unsupported Scenario type {type(scenario)}") - # @validate_call(config={"arbitrary_types_allowed": True}) +@enable_scenarios async def benchmark_generative_text( # noqa: C901 target: str, data: DataType, diff --git a/src/guidellm/benchmark/scenario.py b/src/guidellm/benchmark/scenario.py index 3f84f868..c45ae313 100644 --- a/src/guidellm/benchmark/scenario.py +++ b/src/guidellm/benchmark/scenario.py @@ -1,22 +1,35 @@ from __future__ import annotations +import json from collections.abc import Iterable -from functools import cache +from functools import cache, wraps +from inspect import Parameter, signature from pathlib import Path -from typing import Annotated, Any, Literal, TypeVar +from typing import Annotated, Any, Callable, Literal, TypeVar +import yaml from datasets import Dataset, DatasetDict, IterableDataset, IterableDatasetDict -from pydantic import BeforeValidator, Field, NonNegativeInt, PositiveFloat, PositiveInt +from loguru import logger +from pydantic import BeforeValidator, Field, PositiveFloat, PositiveInt from transformers.tokenization_utils_base import ( # type: ignore[import] PreTrainedTokenizerBase, ) -from guidellm.backends import BackendType -from guidellm.benchmark.profile import ProfileType +from guidellm.backends import Backend, BackendType +from guidellm.benchmark.aggregator import ( + Aggregator, + CompilableAggregator, +) +from guidellm.benchmark.profile import Profile, ProfileType from guidellm.scheduler import StrategyType from guidellm.utils import StandardBaseModel -__ALL__ = ["Scenario", "GenerativeTextScenario", "get_builtin_scenarios"] +__ALL__ = [ + "Scenario", + "GenerativeTextScenario", + "get_builtin_scenarios", + "enable_scenarios", +] SCENARIO_DIR = Path(__file__).parent / "scenarios/" @@ -58,6 +71,30 @@ class Scenario(StandardBaseModel): target: str + @classmethod + def get_default(cls: type[T], field: str) -> Any: + """Get default values for model fields""" + return cls.model_fields[field].default + + @classmethod + def from_file(cls: type[T], filename: Path, overrides: dict | None = None) -> T: + """ + Attempt to create a new instance of the model using + data loaded from json or yaml file. + """ + try: + with filename.open() as f: + if str(filename).endswith(".json"): + data = json.load(f) + else: # Assume everything else is yaml + data = yaml.safe_load(f) + except (json.JSONDecodeError, yaml.YAMLError) as e: + logger.error(f"Failed to parse {filename} as type {cls.__name__}") + raise ValueError(f"Error when parsing file: {filename}") from e + + data.update(overrides) + return cls.model_validate(data) + @classmethod def from_builtin(cls: type[T], name: str, overrides: dict | None = None) -> T: filename = SCENARIO_DIR / f"{name}.json" @@ -78,29 +115,74 @@ class Config: # types like PreTrainedTokenizerBase arbitrary_types_allowed = True - backend_type: BackendType = "openai_http" - backend_args: dict[str, Any] | None = None - model: str | None = None - processor: str | Path | PreTrainedTokenizerBase | None = None - processor_args: dict[str, Any] | None = None data: ( - str - | Path - | Iterable[str | dict[str, Any]] + Iterable[str] + | Iterable[dict[str, Any]] | Dataset | DatasetDict | IterableDataset | IterableDatasetDict + | str + | Path ) - data_args: dict[str, Any] | None = None - data_sampler: Literal["random"] | None = None - rate_type: StrategyType | ProfileType + profile: StrategyType | ProfileType | Profile rate: Annotated[list[PositiveFloat] | None, BeforeValidator(parse_float_list)] = ( None ) - max_seconds: PositiveFloat | None = None - max_requests: PositiveInt | None = None - warmup_percent: Annotated[float | None, Field(gt=0, le=1)] = None - cooldown_percent: Annotated[float | None, Field(gt=0, le=1)] = None - output_sampling: NonNegativeInt | None = None random_seed: int = 42 + # Backend configuration + backend: BackendType | Backend = "openai_http" + backend_kwargs: dict[str, Any] | None = None + model: str | None = None + # Data configuration + processor: str | Path | PreTrainedTokenizerBase | None = None + processor_args: dict[str, Any] | None = None + data_args: dict[str, Any] | None = None + data_sampler: Literal["random"] | None = None + # Aggregators configuration + add_aggregators: ( + dict[str, str | dict[str, Any] | Aggregator | CompilableAggregator] | None + ) = None + warmup: Annotated[float | None, Field(gt=0, le=1)] = None + cooldown: Annotated[float | None, Field(gt=0, le=1)] = None + request_samples: PositiveInt | None = 20 + # Constraints configuration + max_seconds: PositiveFloat | PositiveInt | None = None + max_requests: PositiveInt | None = None + max_errors: PositiveInt | None = None + max_error_rate: PositiveFloat | None = None + max_global_error_rate: PositiveFloat | None = None + + +# Decorator function to apply scenario to a function +def enable_scenarios(func: Callable) -> Any: + @wraps(func) + async def decorator(*args, scenario: Scenario | None = None, **kwargs) -> Any: + if scenario is not None: + kwargs.update(**vars(scenario)) + return await func(*args, **kwargs) + + # Modify the signature of the decorator to include the `scenario` argument + sig = signature(func) + params = list(sig.parameters.values()) + # Place `scenario` before `**kwargs` or any parameter with a default value + loc = next( + ( + i + for i, p in enumerate(params) + if p.kind is Parameter.VAR_KEYWORD or p.default is not Parameter.empty + ), + len(params), + ) + params.insert( + loc, + Parameter( + "scenario", + Parameter.POSITIONAL_OR_KEYWORD, + default=None, + annotation=Scenario | None, + ), + ) + decorator.__signature__ = sig.replace(parameters=params) # type: ignore [attr-defined] + + return decorator From c47a1f6ee9adccc63f8f014f2b7dad4c02a0fba2 Mon Sep 17 00:00:00 2001 From: Samuel Monson Date: Fri, 12 Sep 2025 12:01:25 -0400 Subject: [PATCH 25/90] Add workaround for https://github.com/pydantic/pydantic/issues/9541 Signed-off-by: Samuel Monson --- src/guidellm/benchmark/scenario.py | 12 +++++++----- 1 file changed, 7 insertions(+), 5 deletions(-) diff --git a/src/guidellm/benchmark/scenario.py b/src/guidellm/benchmark/scenario.py index c45ae313..ff5ada26 100644 --- a/src/guidellm/benchmark/scenario.py +++ b/src/guidellm/benchmark/scenario.py @@ -10,7 +10,7 @@ import yaml from datasets import Dataset, DatasetDict, IterableDataset, IterableDatasetDict from loguru import logger -from pydantic import BeforeValidator, Field, PositiveFloat, PositiveInt +from pydantic import BeforeValidator, Field, PositiveFloat, PositiveInt, SkipValidation from transformers.tokenization_utils_base import ( # type: ignore[import] PreTrainedTokenizerBase, ) @@ -115,7 +115,7 @@ class Config: # types like PreTrainedTokenizerBase arbitrary_types_allowed = True - data: ( + data: Annotated[ Iterable[str] | Iterable[dict[str, Any]] | Dataset @@ -123,8 +123,10 @@ class Config: | IterableDataset | IterableDatasetDict | str - | Path - ) + | Path, + # BUG: See https://github.com/pydantic/pydantic/issues/9541 + SkipValidation, + ] profile: StrategyType | ProfileType | Profile rate: Annotated[list[PositiveFloat] | None, BeforeValidator(parse_float_list)] = ( None @@ -159,7 +161,7 @@ def enable_scenarios(func: Callable) -> Any: @wraps(func) async def decorator(*args, scenario: Scenario | None = None, **kwargs) -> Any: if scenario is not None: - kwargs.update(**vars(scenario)) + kwargs.update(**scenario.model_dump()) return await func(*args, **kwargs) # Modify the signature of the decorator to include the `scenario` argument From 965aca2527ad14cd94f0fa29135a80fa7bd022bc Mon Sep 17 00:00:00 2001 From: Samuel Monson Date: Wed, 24 Sep 2025 10:27:16 -0400 Subject: [PATCH 26/90] Rename rate_type -> profile in builtin scenarios Signed-off-by: Samuel Monson --- src/guidellm/benchmark/scenarios/chat.json | 13 ++----------- src/guidellm/benchmark/scenarios/rag.json | 13 ++----------- 2 files changed, 4 insertions(+), 22 deletions(-) diff --git a/src/guidellm/benchmark/scenarios/chat.json b/src/guidellm/benchmark/scenarios/chat.json index 024438c5..7ed4ce16 100644 --- a/src/guidellm/benchmark/scenarios/chat.json +++ b/src/guidellm/benchmark/scenarios/chat.json @@ -1,13 +1,4 @@ { - "rate_type": "sweep", - "data": { - "prompt_tokens": 512, - "prompt_tokens_stdev": 128, - "prompt_tokens_min": 1, - "prompt_tokens_max": 1024, - "output_tokens": 256, - "output_tokens_stdev": 64, - "output_tokens_min": 1, - "output_tokens_max": 1024 - } + "profile": "sweep", + "data": "prompt_tokens=512,prompt_tokens_stdev=128,prompt_tokens_min=1,prompt_tokens_max=1024,output_tokens=256,output_tokens_stdev=64,output_tokens_min=1,output_tokens_max=1024" } diff --git a/src/guidellm/benchmark/scenarios/rag.json b/src/guidellm/benchmark/scenarios/rag.json index c7ee2f27..d790ce60 100644 --- a/src/guidellm/benchmark/scenarios/rag.json +++ b/src/guidellm/benchmark/scenarios/rag.json @@ -1,13 +1,4 @@ { - "rate_type": "sweep", - "data": { - "prompt_tokens": 4096, - "prompt_tokens_stdev": 512, - "prompt_tokens_min": 2048, - "prompt_tokens_max": 6144, - "output_tokens": 512, - "output_tokens_stdev": 128, - "output_tokens_min": 1, - "output_tokens_max": 1024 - } + "profile": "sweep", + "data": "prompt_tokens=4096,prompt_tokens_stdev=512,prompt_tokens_min=2048,prompt_tokens_max=6144,output_tokens=512,output_tokens_stdev=128,output_tokens_min=1,output_tokens_max=1024" } From d9a4df29e1a81acf40ca249a26b62a98387b8024 Mon Sep 17 00:00:00 2001 From: Samuel Monson Date: Fri, 12 Sep 2025 12:18:36 -0400 Subject: [PATCH 27/90] Always parse rate as list[float] Signed-off-by: Samuel Monson --- src/guidellm/benchmark/entrypoints.py | 2 +- src/guidellm/benchmark/profile.py | 77 +++++++++++++-------------- 2 files changed, 38 insertions(+), 41 deletions(-) diff --git a/src/guidellm/benchmark/entrypoints.py b/src/guidellm/benchmark/entrypoints.py index df65fe8f..167bc3b7 100644 --- a/src/guidellm/benchmark/entrypoints.py +++ b/src/guidellm/benchmark/entrypoints.py @@ -149,7 +149,7 @@ async def benchmark_generative_text( # noqa: C901 target: str, data: DataType, profile: StrategyType | ProfileType | Profile, - rate: float | list[float] | None = None, + rate: list[float] | None = None, random_seed: int = 42, # Backend configuration backend: BackendType | Backend = "openai_http", diff --git a/src/guidellm/benchmark/profile.py b/src/guidellm/benchmark/profile.py index 042179ba..b5ce7c24 100644 --- a/src/guidellm/benchmark/profile.py +++ b/src/guidellm/benchmark/profile.py @@ -29,8 +29,17 @@ ) import numpy as np -from pydantic import Field, computed_field, field_serializer, field_validator +from pydantic import ( + Field, + NonNegativeFloat, + PositiveFloat, + PositiveInt, + computed_field, + field_serializer, + field_validator, +) +from guidellm import settings from guidellm.scheduler import ( AsyncConstantStrategy, AsyncPoissonStrategy, @@ -86,7 +95,7 @@ def __pydantic_schema_base_type__(cls) -> type[Profile]: def create( cls, rate_type: str, - rate: float | int | list[float | int] | None, + rate: list[float] | None, random_seed: int = 42, **kwargs: Any, ) -> Profile: @@ -112,7 +121,7 @@ def create( def resolve_args( cls, rate_type: str, - rate: float | int | list[float, int] | None, + rate: list[float] | None, random_seed: int, **kwargs: Any, ) -> dict[str, Any]: @@ -265,7 +274,7 @@ class SynchronousProfile(Profile): def resolve_args( cls, rate_type: str, - rate: float | int | list[float, int] | None, + rate: list[float] | None, random_seed: int, **kwargs: Any, ) -> dict[str, Any]: @@ -316,24 +325,22 @@ class ConcurrentProfile(Profile): """Fixed-concurrency strategy execution profile with configurable stream counts.""" type_: Literal["concurrent"] = "concurrent" # type: ignore[assignment] - streams: int | list[int] = Field( + streams: list[PositiveInt] = Field( description="Number of concurrent streams for request scheduling", - gt=0, ) - startup_duration: float = Field( + startup_duration: NonNegativeFloat = Field( default=0.0, description=( "Duration in seconds for distributing startup requests " "before completion-based timing" ), - ge=0, ) @classmethod def resolve_args( cls, rate_type: str, - rate: float | int | list[float, int] | None, + rate: list[float] | None, random_seed: int, **kwargs: Any, ) -> dict[str, Any]: @@ -348,14 +355,13 @@ def resolve_args( :raises ValueError: If rate is None. """ _ = (rate_type, random_seed) # unused - kwargs["streams"] = rate + kwargs["streams"] = [int(r) for r in rate] if rate else None return kwargs @property def strategy_types(self) -> list[StrategyType]: """Get concurrent strategy types for each configured stream count.""" - num_strategies = len(self.streams) if isinstance(self.streams, list) else 1 - return [self.type_] * num_strategies + return [self.type_] * len(self.streams) def next_strategy( self, @@ -370,13 +376,12 @@ def next_strategy( :return: ConcurrentStrategy with next stream count, or None if complete. """ _ = (prev_strategy, prev_benchmark) # unused - streams = self.streams if isinstance(self.streams, list) else [self.streams] - if len(self.completed_strategies) >= len(streams): + if len(self.completed_strategies) >= len(self.streams): return None return ConcurrentStrategy( - streams=streams[len(self.completed_strategies)], + streams=self.streams[len(self.completed_strategies)], startup_duration=self.startup_duration, ) @@ -388,25 +393,22 @@ class ThroughputProfile(Profile): """ type_: Literal["throughput"] = "throughput" # type: ignore[assignment] - max_concurrency: int | None = Field( + max_concurrency: PositiveInt | None = Field( default=None, description="Maximum number of concurrent requests to schedule", - gt=0, ) - startup_duration: float = Field( - default=0.0, + startup_duration: NonNegativeFloat = Field( description=( "Duration in seconds for distributing startup requests " "before full throughput scheduling" ), - ge=0, ) @classmethod def resolve_args( cls, rate_type: str, - rate: float | int | list[float, int] | None, + rate: list[float] | None, random_seed: int, **kwargs: Any, ) -> dict[str, Any]: @@ -422,8 +424,8 @@ def resolve_args( _ = (rate_type, random_seed) # unused # Remap rate to max_concurrency, strip out random_seed kwargs.pop("random_seed", None) - if rate is not None: - kwargs["max_concurrency"] = rate + if rate is not None and len(rate) > 0: + kwargs["max_concurrency"] = rate[0] return kwargs @property @@ -463,22 +465,19 @@ class AsyncProfile(Profile): strategy_type: Literal["constant", "poisson"] = Field( description="Type of asynchronous strategy pattern to use", ) - rate: float | list[float] = Field( + rate: list[PositiveFloat] = Field( description="Request scheduling rate in requests per second", - gt=0, ) - startup_duration: float = Field( + startup_duration: NonNegativeFloat = Field( default=0.0, description=( "Duration in seconds for distributing startup requests " "to converge quickly to desired rate" ), - ge=0, ) - max_concurrency: int | None = Field( + max_concurrency: PositiveInt | None = Field( default=None, description="Maximum number of concurrent requests to schedule", - gt=0, ) random_seed: int = Field( default=42, @@ -489,7 +488,7 @@ class AsyncProfile(Profile): def resolve_args( cls, rate_type: str, - rate: float | int | list[float, int] | None, + rate: list[float] | None, random_seed: int, **kwargs: Any, ) -> dict[str, Any]: @@ -523,7 +522,7 @@ def resolve_args( @property def strategy_types(self) -> list[StrategyType]: """Get async strategy types for each configured rate.""" - num_strategies = len(self.rate) if isinstance(self.rate, list) else 1 + num_strategies = len(self.rate) return [self.strategy_type] * num_strategies def next_strategy( @@ -541,12 +540,11 @@ def next_strategy( :raises ValueError: If strategy_type is neither 'constant' nor 'poisson'. """ _ = (prev_strategy, prev_benchmark) # unused - rate = self.rate if isinstance(self.rate, list) else [self.rate] - if len(self.completed_strategies) >= len(rate): + if len(self.completed_strategies) >= len(self.rate): return None - current_rate = rate[len(self.completed_strategies)] + current_rate = self.rate[len(self.completed_strategies)] if self.strategy_type == "constant": return AsyncConstantStrategy( @@ -577,18 +575,16 @@ class SweepProfile(Profile): ge=2, ) strategy_type: Literal["constant", "poisson"] = "constant" - startup_duration: float = Field( + startup_duration: NonNegativeFloat = Field( default=0.0, description=( "Duration in seconds for distributing startup requests " "to converge quickly to desired rate" ), - ge=0, ) - max_concurrency: int | None = Field( + max_concurrency: PositiveInt | None = Field( default=None, description="Maximum number of concurrent requests to schedule", - gt=0, ) random_seed: int = Field( default=42, @@ -615,7 +611,7 @@ class SweepProfile(Profile): def resolve_args( cls, rate_type: str, - rate: float | int | list[float, int] | None, + rate: list[float] | None, random_seed: int, **kwargs: Any, ) -> dict[str, Any]: @@ -628,7 +624,8 @@ def resolve_args( :param kwargs: Additional arguments to pass through. :return: Dictionary of resolved arguments. """ - kwargs["sweep_size"] = kwargs.get("sweep_size", rate) + sweep_size_from_rate = int(rate[0]) if rate else settings.default_sweep_number + kwargs["sweep_size"] = kwargs.get("sweep_size", sweep_size_from_rate) kwargs["random_seed"] = random_seed if rate_type in ["constant", "poisson"]: kwargs["strategy_type"] = rate_type From 03f908583774c21b1302a1bb704bc7b061b84989 Mon Sep 17 00:00:00 2001 From: Jared O'Connell Date: Thu, 25 Sep 2025 15:55:53 -0400 Subject: [PATCH 28/90] Fix bug where empty constraints in sweep caused error Signed-off-by: Jared O'Connell --- src/guidellm/benchmark/benchmarker.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/guidellm/benchmark/benchmarker.py b/src/guidellm/benchmark/benchmarker.py index ae591c23..5f05065a 100644 --- a/src/guidellm/benchmark/benchmarker.py +++ b/src/guidellm/benchmark/benchmarker.py @@ -124,7 +124,7 @@ async def run( backend=backend, strategy=strategy, env=environment, - **constraints, + **constraints or {}, ): aggregators_update = AggregatorState() for key, aggregator in benchmark_aggregators.items(): From 9c401da15be3736ff17cea8cf8a9908ff097dc1b Mon Sep 17 00:00:00 2001 From: jaredoconnell Date: Wed, 10 Sep 2025 22:58:34 +0000 Subject: [PATCH 29/90] Update HTML processing references for latest data output Signed-off-by: Jared O'Connell --- src/guidellm/presentation/data_models.py | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/src/guidellm/presentation/data_models.py b/src/guidellm/presentation/data_models.py index 9036636a..c1e8f13f 100644 --- a/src/guidellm/presentation/data_models.py +++ b/src/guidellm/presentation/data_models.py @@ -67,7 +67,7 @@ class RunInfo(BaseModel): @classmethod def from_benchmarks(cls, benchmarks: list["GenerativeBenchmark"]): - model = benchmarks[0].worker.backend_model or "N/A" + model = benchmarks[0].benchmarker.backend.get("model", "N/A") timestamp = max( bm.run_stats.start_time for bm in benchmarks if bm.start_time is not None ) @@ -108,8 +108,8 @@ class WorkloadDetails(BaseModel): @classmethod def from_benchmarks(cls, benchmarks: list["GenerativeBenchmark"]): - target = benchmarks[0].worker.backend_target - rate_type = benchmarks[0].args.profile.type_ + target = benchmarks[0].benchmarker.backend.get("target", "N/A") + rate_type = benchmarks[0].scheduler.strategy.type_ successful_requests = [ req for bm in benchmarks for req in bm.requests.successful ] @@ -152,13 +152,13 @@ def from_benchmarks(cls, benchmarks: list["GenerativeBenchmark"]): statistics=output_token_stats, buckets=output_token_buckets, bucket_width=1 ) - min_start_time = benchmarks[0].run_stats.start_time + min_start_time = benchmarks[0].start_time all_req_times = [ - req.start_time - min_start_time + req.scheduler_info.started_at - min_start_time for bm in benchmarks for req in bm.requests.successful - if req.start_time is not None + if req.scheduler_info.started_at is not None ] number_of_buckets = len(benchmarks) request_over_time_buckets, bucket_width = Bucket.from_data( From 54556ae07d4da3b2bff78e5019c88657e39f973b Mon Sep 17 00:00:00 2001 From: Jared O'Connell Date: Fri, 26 Sep 2025 11:32:07 -0400 Subject: [PATCH 30/90] Fix injection of data into the HTML output Signed-off-by: Jared O'Connell --- src/guidellm/benchmark/output.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/guidellm/benchmark/output.py b/src/guidellm/benchmark/output.py index 95b51d70..53e0f7dd 100644 --- a/src/guidellm/benchmark/output.py +++ b/src/guidellm/benchmark/output.py @@ -724,9 +724,9 @@ async def finalize(self, report: GenerativeBenchmarksReport) -> Path: ui_api_data = {} for key, value in camel_data.items(): - placeholder_key = f"window.{humps.decamelize(key)} = {{}};" + placeholder_key = f"window.{key} = {{}};" replacement_value = ( - f"window.{humps.decamelize(key)} = {json.dumps(value, indent=2)};\n" + f"window.{key} = {json.dumps(value, indent=2)};\n" ) ui_api_data[placeholder_key] = replacement_value From da02ee84dac22460192805f886c61327e4d27b1d Mon Sep 17 00:00:00 2001 From: Mark Kurtz Date: Mon, 29 Sep 2025 16:19:52 +0200 Subject: [PATCH 31/90] [GuideLLM Refactor] mock server package creation (#357) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit ## **Summary** Introduces a comprehensive mock server implementation that simulates OpenAI and vLLM APIs with configurable timing characteristics and response patterns. The mock server enables realistic performance testing and validation of GuideLLM benchmarking workflows without requiring actual model deployments, supporting both streaming and non-streaming endpoints with proper token counting, latency simulation (TTFT/ITL), and error handling. ## **Details** - Added `mock_server` package with modular architecture including configuration, handlers, models, server, and utilities - Implemented `MockServerConfig` with Pydantic settings for centralized configuration management supporting environment variables - Created HTTP request handlers for OpenAI-compatible endpoints: - `ChatCompletionsHandler` for `/v1/chat/completions` with streaming support - `CompletionsHandler` for `/v1/completions` legacy endpoint - `TokenizerHandler` for vLLM-compatible `/tokenize` and `/detokenize` endpoints - Added comprehensive Pydantic models for request/response validation compatible with both OpenAI and vLLM API specifications - Implemented high-performance Sanic-based server with CORS support, middleware, and proper error handling - Created mock tokenizer and text generation utilities with deterministic token generation for reproducible testing - Added timing generators for realistic latency simulation including TTFT (Time To First Token) and ITL (Inter-Token Latency) - Included comprehensive test suite with integration tests using real HTTP server instances ## **Test Plan** - Unit/integration style tests added to automation ## **Related Issues** - Part of the larger scheduler refactor initiative --- - [x] "I certify that all code in this PR is my own, except as noted below." ## **Use of AI** - [x] Includes AI-assisted code completion - [x] Includes code generated by an AI application - [ ] Includes AI-generated tests (NOTE: AI written tests should have a docstring that includes `## WRITTEN BY AI ##`) --------- Signed-off-by: Mark Kurtz --- src/guidellm/mock_server/__init__.py | 8 + src/guidellm/mock_server/config.py | 84 +++ src/guidellm/mock_server/handlers/__init__.py | 17 + .../mock_server/handlers/chat_completions.py | 280 ++++++++++ .../mock_server/handlers/completions.py | 280 ++++++++++ .../mock_server/handlers/tokenizer.py | 142 +++++ src/guidellm/mock_server/models.py | 510 +++++++++++++++++ src/guidellm/mock_server/server.py | 168 ++++++ src/guidellm/mock_server/utils.py | 307 +++++++++++ tests/unit/mock_server/__init__.py | 1 + tests/unit/mock_server/test_server.py | 518 ++++++++++++++++++ 11 files changed, 2315 insertions(+) create mode 100644 src/guidellm/mock_server/__init__.py create mode 100644 src/guidellm/mock_server/config.py create mode 100644 src/guidellm/mock_server/handlers/__init__.py create mode 100644 src/guidellm/mock_server/handlers/chat_completions.py create mode 100644 src/guidellm/mock_server/handlers/completions.py create mode 100644 src/guidellm/mock_server/handlers/tokenizer.py create mode 100644 src/guidellm/mock_server/models.py create mode 100644 src/guidellm/mock_server/server.py create mode 100644 src/guidellm/mock_server/utils.py create mode 100644 tests/unit/mock_server/__init__.py create mode 100644 tests/unit/mock_server/test_server.py diff --git a/src/guidellm/mock_server/__init__.py b/src/guidellm/mock_server/__init__.py new file mode 100644 index 00000000..f76e98fb --- /dev/null +++ b/src/guidellm/mock_server/__init__.py @@ -0,0 +1,8 @@ +""" +GuideLLM Mock Server for OpenAI and vLLM API compatibility. +""" + +from .config import MockServerConfig +from .server import MockServer + +__all__ = ["MockServer", "MockServerConfig"] diff --git a/src/guidellm/mock_server/config.py b/src/guidellm/mock_server/config.py new file mode 100644 index 00000000..27d1d742 --- /dev/null +++ b/src/guidellm/mock_server/config.py @@ -0,0 +1,84 @@ +""" +Configuration settings for the mock server component. + +Provides centralized configuration management for mock server behavior including +network binding, model identification, response timing characteristics, and token +generation parameters. Supports environment variable configuration for deployment +flexibility with automatic validation through Pydantic settings. +""" + +from __future__ import annotations + +from pydantic import Field +from pydantic_settings import BaseSettings + +__all__ = ["MockServerConfig"] + + +class MockServerConfig(BaseSettings): + """ + Configuration settings for mock server behavior and deployment. + + Centralizes all configurable parameters for mock server operation including + network settings, model identification, response timing characteristics, and + token generation behavior. Environment variables with GUIDELLM_MOCK_SERVER_ + prefix override default values for deployment flexibility. + + Example: + :: + config = MockServerConfig(host="0.0.0.0", port=8080, model="custom-model") + # Use with environment variables: + # GUIDELLM_MOCK_SERVER_HOST=127.0.0.1 GUIDELLM_MOCK_SERVER_PORT=9000 + """ + + host: str = Field( + default="127.0.0.1", description="Host address to bind the server to" + ) + port: int = Field(default=8000, description="Port number to bind the server to") + workers: int = Field(default=1, description="Number of worker processes to spawn") + model: str = Field( + default="llama-3.1-8b-instruct", + description="Model name to present in API responses", + ) + processor: str | None = Field( + default=None, + description=( + "Processor type to use for token stats, tokenize, and detokenize. " + "If None, a mock one is created." + ), + ) + request_latency: float = Field( + default=3.0, + description="Base request latency in seconds for non-streaming responses", + ) + request_latency_std: float = Field( + default=0.0, + description="Standard deviation for request latency variation", + ) + ttft_ms: float = Field( + default=150.0, + description="Time to first token in milliseconds for streaming responses", + ) + ttft_ms_std: float = Field( + default=0.0, + description="Standard deviation for time to first token variation", + ) + itl_ms: float = Field( + default=10.0, + description="Inter-token latency in milliseconds for streaming responses", + ) + itl_ms_std: float = Field( + default=0.0, + description="Standard deviation for inter-token latency variation", + ) + output_tokens: int = Field( + default=128, description="Number of output tokens to generate in responses" + ) + output_tokens_std: float = Field( + default=0.0, + description="Standard deviation for output token count variation", + ) + + class Config: + env_prefix = "GUIDELLM_MOCK_SERVER_" + case_sensitive = False diff --git a/src/guidellm/mock_server/handlers/__init__.py b/src/guidellm/mock_server/handlers/__init__.py new file mode 100644 index 00000000..7dbc209f --- /dev/null +++ b/src/guidellm/mock_server/handlers/__init__.py @@ -0,0 +1,17 @@ +""" +HTTP request handlers for the GuideLLM mock server. + +This module exposes request handlers that implement OpenAI-compatible API endpoints +for the mock server. The handlers provide realistic LLM simulation capabilities +including chat completions, legacy completions, and tokenization services with +configurable timing characteristics, token counting, and proper error handling to +support comprehensive benchmarking and testing scenarios. +""" + +from __future__ import annotations + +from .chat_completions import ChatCompletionsHandler +from .completions import CompletionsHandler +from .tokenizer import TokenizerHandler + +__all__ = ["ChatCompletionsHandler", "CompletionsHandler", "TokenizerHandler"] diff --git a/src/guidellm/mock_server/handlers/chat_completions.py b/src/guidellm/mock_server/handlers/chat_completions.py new file mode 100644 index 00000000..de2781b0 --- /dev/null +++ b/src/guidellm/mock_server/handlers/chat_completions.py @@ -0,0 +1,280 @@ +""" +OpenAI Chat Completions API endpoint handler for the mock server. + +Provides a complete implementation of the /v1/chat/completions endpoint that simulates +realistic LLM behavior with configurable timing characteristics. Supports both streaming +and non-streaming responses with proper token counting, latency simulation including +TTFT (Time To First Token) and ITL (Inter-Token Latency), and OpenAI-compatible error +handling for comprehensive benchmarking scenarios. +""" + +from __future__ import annotations + +import asyncio +import json +import math +import time +import uuid + +from pydantic import ValidationError +from sanic import response +from sanic.request import Request +from sanic.response import HTTPResponse, ResponseStream +from transformers import PreTrainedTokenizer + +from guidellm.mock_server.config import MockServerConfig +from guidellm.mock_server.models import ( + ChatCompletionChoice, + ChatCompletionsRequest, + ChatCompletionsResponse, + ChatMessage, + ErrorDetail, + ErrorResponse, + Usage, +) +from guidellm.mock_server.utils import ( + MockTokenizer, + create_fake_text, + create_fake_tokens_str, + sample_number, + times_generator, +) + +__all__ = ["ChatCompletionsHandler"] + + +class ChatCompletionsHandler: + """ + Handles OpenAI Chat Completions API requests with realistic LLM simulation. + + Implements the /v1/chat/completions endpoint behavior including request validation, + response generation, and timing simulation. Supports both streaming and + non-streaming modes with configurable latency characteristics for comprehensive + benchmarking. Uses either a mock tokenizer or a real tokenizer for accurate token + counting and realistic text generation. + + Example: + :: + config = MockServerConfig(ttft_ms=100, itl_ms=50) + handler = ChatCompletionsHandler(config) + response = await handler.handle(request) + """ + + def __init__(self, config: MockServerConfig) -> None: + """ + Initialize the Chat Completions handler with server configuration. + + :param config: Mock server configuration containing timing and behavior settings + """ + self.config = config + self.tokenizer = ( + MockTokenizer() + if config.processor is None + else PreTrainedTokenizer.from_pretrained(config.processor) + ) + + async def handle(self, request: Request) -> HTTPResponse: + """ + Process incoming chat completion requests with validation and routing. + + Validates the request payload, handles errors gracefully, and routes to + appropriate streaming or non-streaming response handlers based on the + request configuration. + + :param request: Sanic HTTP request containing chat completion parameters + :return: HTTP response with completion data or error information + :raises ValidationError: When request payload fails validation + :raises JSONDecodeError: When request contains invalid JSON + """ + try: + # Parse and validate request + req_data = ChatCompletionsRequest(**request.json) + except ValidationError as exc: + return response.json( + ErrorResponse( + error=ErrorDetail( + message=f"Invalid request: {str(exc)}", + type="invalid_request_error", + code="invalid_request", + ) + ).model_dump(), + status=400, + ) + except (json.JSONDecodeError, TypeError): + return response.json( + ErrorResponse( + error=ErrorDetail( + message="Invalid JSON in request body", + type="invalid_request_error", + code="invalid_json", + ) + ).model_dump(), + status=400, + ) + + # Handle streaming vs non-streaming + if req_data.stream: + return await self._handle_stream(req_data) + else: + return await self._handle_non_stream(req_data) + + async def _handle_non_stream(self, req: ChatCompletionsRequest) -> HTTPResponse: + """ + Generate complete non-streaming chat completion response. + + Simulates realistic LLM behavior with TTFT and ITL delays, generates + appropriate token counts, and returns a complete response with usage + statistics and generated content. + + :param req: Validated chat completion request parameters + :return: Complete HTTP response with generated completion data + """ + # TTFT delay + await asyncio.sleep( + sample_number(self.config.ttft_ms, self.config.ttft_ms_std) / 1000.0 + ) + + # Token counts + prompt_text = self.tokenizer.apply_chat_template(req.messages) + prompt_tokens = len(self.tokenizer(prompt_text)) + max_tokens = req.max_completion_tokens or req.max_tokens or math.inf + completion_tokens_count = min( + sample_number(self.config.output_tokens, self.config.output_tokens_std), + max_tokens, + ) + + # ITL delay + itl_delay = 0.0 + delays_iter = iter(times_generator(self.config.itl_ms, self.config.itl_ms_std)) + for _ in range(int(completion_tokens_count) - 1): + itl_delay += next(delays_iter) + await asyncio.sleep(itl_delay / 1000.0) + + # Response + chat_response = ChatCompletionsResponse( + id=f"chatcmpl-{uuid.uuid4().hex[:29]}", + model=req.model, + choices=[ + ChatCompletionChoice( + index=0, + message=ChatMessage( + role="assistant", + content=create_fake_text( + int(completion_tokens_count), self.tokenizer + ), + ), + finish_reason="stop", + ) + ], + usage=Usage( + prompt_tokens=prompt_tokens, + completion_tokens=int(completion_tokens_count), + ), + system_fingerprint=f"fp_{uuid.uuid4().hex[:10]}", + ) + + return response.json(chat_response.model_dump()) + + async def _handle_stream(self, req: ChatCompletionsRequest) -> HTTPResponse: + """ + Generate streaming chat completion response with real-time token delivery. + + Creates a streaming response that delivers tokens incrementally with + realistic timing delays. Supports optional usage statistics in the final + stream chunk when requested via stream_options. + + :param req: Validated chat completion request with streaming enabled + :return: Streaming HTTP response delivering tokens with proper timing + """ + + async def generate_stream(stream_response): + completion_id = f"chatcmpl-{uuid.uuid4().hex[:29]}" + + # TTFT delay + await asyncio.sleep( + sample_number(self.config.ttft_ms, self.config.ttft_ms_std) / 1000.0 + ) + + # Token counts + prompt_text = self.tokenizer.apply_chat_template(req.messages) + prompt_tokens = len(self.tokenizer(prompt_text)) + max_tokens = req.max_completion_tokens or req.max_tokens or math.inf + completion_tokens_count = int( + min( + sample_number( + self.config.output_tokens, self.config.output_tokens_std + ), + max_tokens, + ) + ) + + # Send tokens + tokens = create_fake_tokens_str(completion_tokens_count, self.tokenizer) + delays_iter = iter( + times_generator(self.config.itl_ms, self.config.itl_ms_std) + ) + + for index, token in enumerate(tokens): + if index > 0: + itl_delay = next(delays_iter) + await asyncio.sleep(itl_delay / 1000.0) + + chunk_data = { + "id": completion_id, + "object": "chat.completion.chunk", + "created": int(time.time()), + "model": req.model, + "choices": [ + { + "index": 0, + "delta": {"content": token}, + "finish_reason": None, + } + ], + } + await stream_response.write(f"data: {json.dumps(chunk_data)}\n\n") + + # Send final chunk with finish reason + final_chunk = { + "id": completion_id, + "object": "chat.completion.chunk", + "created": int(time.time()), + "model": req.model, + "choices": [ + { + "index": 0, + "delta": {}, + "finish_reason": "stop", + } + ], + } + await stream_response.write(f"data: {json.dumps(final_chunk)}\n\n") + + # Send usage if requested + if req.stream_options and req.stream_options.include_usage: + usage_chunk = { + "id": completion_id, + "object": "chat.completion.chunk", + "created": int(time.time()), + "model": req.model, + "choices": [], + "usage": { + "prompt_tokens": prompt_tokens, + "completion_tokens": completion_tokens_count, + "total_tokens": prompt_tokens + completion_tokens_count, + }, + } + await stream_response.write(f"data: {json.dumps(usage_chunk)}\n\n") + + # End stream + await stream_response.write("data: [DONE]\n\n") + + return ResponseStream( # type: ignore[return-value] + generate_stream, + content_type="text/event-stream", + headers={ + "Cache-Control": "no-cache", + "Connection": "keep-alive", + "X-Accel-Buffering": "no", + }, + ) diff --git a/src/guidellm/mock_server/handlers/completions.py b/src/guidellm/mock_server/handlers/completions.py new file mode 100644 index 00000000..5a4fe27d --- /dev/null +++ b/src/guidellm/mock_server/handlers/completions.py @@ -0,0 +1,280 @@ +""" +Legacy OpenAI Completions API handler for the mock server. + +This module provides the CompletionsHandler class that implements the /v1/completions +endpoint for the guidellm mock server. It supports both streaming and non-streaming +completions with configurable timing parameters (TTFT, ITL) and token generation to +simulate realistic LLM behavior for benchmarking and testing purposes. +""" + +from __future__ import annotations + +import asyncio +import json +import math +import time +import uuid + +from pydantic import ValidationError +from sanic import response +from sanic.request import Request +from sanic.response import HTTPResponse, ResponseStream +from transformers import PreTrainedTokenizer + +from guidellm.mock_server.config import MockServerConfig +from guidellm.mock_server.models import ( + CompletionChoice, + CompletionsRequest, + CompletionsResponse, + ErrorDetail, + ErrorResponse, + Usage, +) +from guidellm.mock_server.utils import ( + MockTokenizer, + create_fake_text, + create_fake_tokens_str, + sample_number, + times_generator, +) + +__all__ = ["CompletionsHandler"] + + +class CompletionsHandler: + """ + Handler for the OpenAI /v1/completions endpoint in the mock server. + + This handler simulates the legacy OpenAI completions API by processing incoming + requests and generating responses with configurable timing and token generation + patterns. It supports both streaming and non-streaming modes, applying realistic + timing delays (TTFT and ITL) to mimic actual LLM behavior for benchmarking. + + Example: + :: + config = MockServerConfig(ttft_ms=100, itl_ms=50) + handler = CompletionsHandler(config) + response = await handler.handle(sanic_request) + """ + + def __init__(self, config: MockServerConfig) -> None: + """ + Initialize the completions handler with configuration settings. + + :param config: Mock server configuration containing timing parameters + and tokenizer settings + """ + self.config = config + self.tokenizer = ( + MockTokenizer() + if config.processor is None + else PreTrainedTokenizer.from_pretrained(config.processor) + ) + + async def handle(self, request: Request) -> HTTPResponse: + """ + Process a completions request and return the appropriate response. + + Validates the incoming request, determines whether to use streaming or + non-streaming mode, and delegates to the appropriate handler method. + + :param request: Sanic request object containing the completions request data + :return: HTTP response with completion data or error information + :raises ValidationError: When request validation fails + :raises json.JSONDecodeError: When request JSON is malformed + """ + try: + # Parse and validate request + req_data = CompletionsRequest(**request.json) + except ValidationError as e: + return response.json( + ErrorResponse( + error=ErrorDetail( + message=f"Invalid request: {str(e)}", + type="invalid_request_error", + code="invalid_request", + ) + ).model_dump(), + status=400, + ) + except (json.JSONDecodeError, TypeError): + return response.json( + ErrorResponse( + error=ErrorDetail( + message="Invalid JSON in request body", + type="invalid_request_error", + code="invalid_json", + ) + ).model_dump(), + status=400, + ) + + # Handle streaming vs non-streaming + if req_data.stream: + return await self._handle_stream(req_data) + else: + return await self._handle_non_stream(req_data) + + async def _handle_non_stream(self, req: CompletionsRequest) -> HTTPResponse: + """ + Generate a non-streaming completion response. + + Simulates TTFT and ITL delays, generates appropriate token counts, and returns + a complete response with the generated text and usage statistics. + + :param req: Validated completions request containing prompt and parameters + :return: JSON HTTP response with completion text and usage data + :raises NotImplementedError: When batch processing is requested + """ + if isinstance(req.prompt, list): + raise NotImplementedError("Batch processing is not supported.") + + # TTFT delay + await asyncio.sleep( + sample_number(self.config.ttft_ms, self.config.ttft_ms_std) / 1000.0 + ) + + # Token counts + prompt_tokens = len(self.tokenizer(req.prompt)) + max_tokens = req.max_tokens or math.inf + completion_tokens_count = int( + min( + sample_number(self.config.output_tokens, self.config.output_tokens_std), + max_tokens, + ) + if req.stop + else max_tokens + ) + + # ITL delay + itl_delay = 0.0 + delays_iter = iter(times_generator(self.config.itl_ms, self.config.itl_ms_std)) + for _ in range(int(completion_tokens_count) - 1): + itl_delay += next(delays_iter) + await asyncio.sleep(itl_delay / 1000.0) + + # Response + completion_response = CompletionsResponse( + id=f"cmpl-{uuid.uuid4().hex[:29]}", + model=req.model, + choices=[ + CompletionChoice( + text=create_fake_text(completion_tokens_count, self.tokenizer), + index=0, + finish_reason="stop", + ) + ], + usage=Usage( + prompt_tokens=prompt_tokens, + completion_tokens=completion_tokens_count, + ), + system_fingerprint=f"fp_{uuid.uuid4().hex[:10]}", + ) + + return response.json(completion_response.model_dump()) + + async def _handle_stream(self, req: CompletionsRequest) -> HTTPResponse: + """ + Generate a streaming completion response. + + Creates a server-sent events stream that delivers tokens incrementally with + realistic timing delays between each token. Includes usage statistics if + requested and properly terminates the stream. + + :param req: Validated completions request containing prompt and streaming + options + :return: ResponseStream object that generates server-sent events + """ + + async def generate_stream(stream_response): + completion_id = f"cmpl-{uuid.uuid4().hex[:29]}" + + # TTFT delay + await asyncio.sleep( + sample_number(self.config.ttft_ms, self.config.ttft_ms_std) / 1000.0 + ) + + # Token counts + prompt_tokens = len(self.tokenizer(req.prompt)) + max_tokens = req.max_tokens or math.inf + completion_tokens_count = int( + min( + sample_number( + self.config.output_tokens, self.config.output_tokens_std + ), + max_tokens, + ) + if req.stop + else max_tokens + ) + + # Send tokens + tokens = create_fake_tokens_str(completion_tokens_count, self.tokenizer) + delays_iter = iter( + times_generator(self.config.itl_ms, self.config.itl_ms_std) + ) + + for index, token in enumerate(tokens): + if index > 0: + itl_delay = next(delays_iter) + await asyncio.sleep(itl_delay / 1000.0) + + chunk_data = { + "id": completion_id, + "object": "text_completion", + "created": int(time.time()), + "model": req.model, + "choices": [ + { + "text": token, + "index": index, + "finish_reason": None, + } + ], + } + await stream_response.write(f"data: {json.dumps(chunk_data)}\n\n") + + # Send final chunk with finish reason + final_chunk = { + "id": completion_id, + "object": "text_completion", + "created": int(time.time()), + "model": req.model, + "choices": [ + { + "text": "", + "index": index, + "finish_reason": "stop", + } + ], + } + await stream_response.write(f"data: {json.dumps(final_chunk)}\n\n") + + # Send usage if requested + if req.stream_options and req.stream_options.include_usage: + usage_chunk = { + "id": completion_id, + "object": "text_completion", + "created": int(time.time()), + "model": req.model, + "choices": [], + "usage": { + "prompt_tokens": prompt_tokens, + "completion_tokens": completion_tokens_count, + "total_tokens": prompt_tokens + completion_tokens_count, + }, + } + await stream_response.write(f"data: {json.dumps(usage_chunk)}\n\n") + + # End stream + await stream_response.write("data: [DONE]\n\n") + + return ResponseStream( # type: ignore[return-value] + generate_stream, + content_type="text/event-stream", + headers={ + "Cache-Control": "no-cache", + "Connection": "keep-alive", + "X-Accel-Buffering": "no", + }, + ) diff --git a/src/guidellm/mock_server/handlers/tokenizer.py b/src/guidellm/mock_server/handlers/tokenizer.py new file mode 100644 index 00000000..430ac0ef --- /dev/null +++ b/src/guidellm/mock_server/handlers/tokenizer.py @@ -0,0 +1,142 @@ +""" +HTTP request handler for vLLM tokenization API endpoints in the mock server. + +This module provides the TokenizerHandler class that implements vLLM-compatible +tokenization and detokenization endpoints for testing and development purposes. +It handles text-to-token conversion, token-to-text reconstruction, request +validation, and error responses with proper HTTP status codes and JSON formatting. +""" + +from __future__ import annotations + +from pydantic import ValidationError +from sanic import response +from sanic.request import Request +from sanic.response import HTTPResponse +from transformers.tokenization_utils import PreTrainedTokenizer + +from guidellm.mock_server.config import MockServerConfig +from guidellm.mock_server.models import ( + DetokenizeRequest, + DetokenizeResponse, + ErrorDetail, + ErrorResponse, + TokenizeRequest, + TokenizeResponse, +) +from guidellm.mock_server.utils import MockTokenizer + +__all__ = ["TokenizerHandler"] + + +class TokenizerHandler: + """ + HTTP request handler for vLLM tokenization and detokenization endpoints. + + Provides mock implementations of vLLM's tokenization API endpoints including + /tokenize for converting text to tokens and /detokenize for reconstructing + text from token sequences. Handles request validation, error responses, and + JSON serialization with proper HTTP status codes. + + Example: + :: + handler = TokenizerHandler(config) + response = await handler.tokenize(request) + response = await handler.detokenize(request) + """ + + def __init__(self, config: MockServerConfig) -> None: + """ + Initialize the tokenizer handler with configuration. + + :param config: Server configuration object containing tokenizer settings + """ + self.config = config + self.tokenizer = ( + MockTokenizer() + if config.processor is None + else PreTrainedTokenizer.from_pretrained(config.processor) + ) + + async def tokenize(self, request: Request) -> HTTPResponse: + """ + Convert input text to token IDs via the /tokenize endpoint. + + Validates the request payload, extracts text content, and returns a JSON + response containing the token sequence and count. Handles validation errors + and malformed JSON with appropriate HTTP error responses. + + :param request: Sanic HTTP request containing JSON payload with text field + :return: JSON response with tokens list and count, or error response + """ + try: + req_data = TokenizeRequest(**request.json) + except ValidationError as exc: + return response.json( + ErrorResponse( + error=ErrorDetail( + message=f"Invalid request: {str(exc)}", + type="invalid_request_error", + code="invalid_request", + ) + ).model_dump(), + status=400, + ) + except (ValueError, TypeError, KeyError): + return response.json( + ErrorResponse( + error=ErrorDetail( + message="Invalid JSON in request body", + type="invalid_request_error", + code="invalid_json", + ) + ).model_dump(), + status=400, + ) + + tokens = self.tokenizer.tokenize(req_data.text) + token_ids = self.tokenizer.convert_tokens_to_ids(tokens) + + return response.json( + TokenizeResponse(tokens=token_ids, count=len(token_ids)).model_dump() + ) + + async def detokenize(self, request: Request) -> HTTPResponse: + """ + Convert token IDs back to text via the /detokenize endpoint. + + Validates the request payload, extracts token sequences, and returns a JSON + response containing the reconstructed text. Handles validation errors and + malformed JSON with appropriate HTTP error responses. + + :param request: Sanic HTTP request containing JSON payload with tokens field + :return: JSON response with reconstructed text, or error response + """ + try: + req_data = DetokenizeRequest(**request.json) + except ValidationError as exc: + return response.json( + ErrorResponse( + error=ErrorDetail( + message=f"Invalid request: {str(exc)}", + type="invalid_request_error", + code="invalid_request", + ) + ).model_dump(), + status=400, + ) + except (ValueError, TypeError, KeyError): + return response.json( + ErrorResponse( + error=ErrorDetail( + message="Invalid JSON in request body", + type="invalid_request_error", + code="invalid_json", + ) + ).model_dump(), + status=400, + ) + + text = self.tokenizer.decode(req_data.tokens, skip_special_tokens=False) + + return response.json(DetokenizeResponse(text=text).model_dump()) diff --git a/src/guidellm/mock_server/models.py b/src/guidellm/mock_server/models.py new file mode 100644 index 00000000..cd342f7a --- /dev/null +++ b/src/guidellm/mock_server/models.py @@ -0,0 +1,510 @@ +""" +Pydantic models for OpenAI API and vLLM API request/response validation. + +This module defines comprehensive data models for validating and serializing API +requests and responses compatible with both OpenAI's API specification and vLLM's +extended parameters. It includes models for chat completions, legacy text completions, +tokenization operations, and error handling, supporting both streaming and non-streaming +responses with full type safety and validation. +""" + +from __future__ import annotations + +import time +from typing import Any, Literal + +from pydantic import BaseModel, Field + +__all__ = [ + "ChatCompletionChoice", + "ChatCompletionChunk", + "ChatCompletionsRequest", + "ChatCompletionsResponse", + "ChatMessage", + "CompletionChoice", + "CompletionsRequest", + "CompletionsResponse", + "DetokenizeRequest", + "DetokenizeResponse", + "ErrorDetail", + "ErrorResponse", + "StreamOptions", + "TokenizeRequest", + "TokenizeResponse", + "Usage", +] + + +class Usage(BaseModel): + """Token usage statistics for API requests and responses. + + Tracks the number of tokens consumed in prompts, completions, and total + usage for billing and monitoring purposes. + """ + + prompt_tokens: int = Field(description="Number of tokens in the input prompt") + completion_tokens: int = Field( + description="Number of tokens in the generated completion" + ) + total_tokens: int = Field(description="Total tokens used (prompt + completion)") + + def __init__(self, prompt_tokens: int = 0, completion_tokens: int = 0, **kwargs): + """Initialize usage statistics. + + :param prompt_tokens: Number of tokens in the input prompt + :param completion_tokens: Number of tokens in the generated completion + :param kwargs: Additional keyword arguments passed to BaseModel + """ + super().__init__( + prompt_tokens=prompt_tokens, + completion_tokens=completion_tokens, + total_tokens=prompt_tokens + completion_tokens, + **kwargs, + ) + + +class StreamOptions(BaseModel): + """Configuration options for streaming API responses. + + Controls the behavior and content of streamed responses including + whether to include usage statistics in the final chunk. + """ + + include_usage: bool | None = Field( + default=None, + description="Whether to include usage statistics in streaming responses", + ) + + +class ChatMessage(BaseModel): + """A single message in a chat conversation. + + Represents one exchange in a conversational interface with role-based + content and optional metadata for advanced features. + """ + + role: Literal["system", "user", "assistant", "tool"] = Field( + description="Role of the message sender in the conversation" + ) + content: str = Field(description="Text content of the message") + name: str | None = Field( + default=None, description="Optional name identifier for the message sender" + ) + + +class ChatCompletionsRequest(BaseModel): + """Request parameters for chat completion API endpoints. + + Comprehensive model supporting both OpenAI standard parameters and vLLM + extensions for advanced generation control, guided decoding, and performance + optimization. + """ + + model: str = Field(description="Model identifier to use for generation") + messages: list[ChatMessage] = Field( + description="List of messages in the conversation" + ) + max_tokens: int | None = Field( + default=None, description="Maximum number of tokens to generate" + ) + max_completion_tokens: int | None = Field( + default=None, description="Maximum tokens in completion (OpenAI naming)" + ) + temperature: float | None = Field( + default=1.0, description="Sampling temperature for randomness control" + ) + top_p: float | None = Field(default=1.0, description="Nucleus sampling parameter") + n: int | None = Field( + default=1, description="Number of completion choices to generate" + ) + stream: bool | None = Field( + default=False, description="Whether to stream response chunks" + ) + stream_options: StreamOptions | None = Field( + default=None, description="Configuration for streaming responses" + ) + stop: str | list[str] | None = Field( + default=None, description="Stop sequences to end generation" + ) + presence_penalty: float | None = Field( + default=0.0, description="Penalty for token presence to encourage diversity" + ) + frequency_penalty: float | None = Field( + default=0.0, description="Penalty for token frequency to reduce repetition" + ) + logit_bias: dict[str, float] | None = Field( + default=None, description="Bias values for specific tokens" + ) + seed: int | None = Field( + default=None, description="Random seed for reproducible outputs" + ) + user: str | None = Field( + default=None, description="User identifier for tracking and abuse monitoring" + ) + + # vLLM extensions + use_beam_search: bool | None = Field( + default=False, description="Enable beam search for better quality" + ) + top_k: int | None = Field(default=None, description="Top-k sampling parameter") + min_p: float | None = Field( + default=None, description="Minimum probability threshold for sampling" + ) + repetition_penalty: float | None = Field( + default=None, description="Penalty for repeated tokens" + ) + length_penalty: float | None = Field( + default=1.0, description="Length penalty for sequence scoring" + ) + stop_token_ids: list[int] | None = Field( + default=None, description="Token IDs that trigger generation stop" + ) + include_stop_str_in_output: bool | None = Field( + default=False, description="Include stop sequence in output" + ) + ignore_eos: bool | None = Field( + default=False, description="Ignore end-of-sequence tokens" + ) + min_tokens: int | None = Field( + default=0, description="Minimum number of tokens to generate" + ) + skip_special_tokens: bool | None = Field( + default=True, description="Skip special tokens in output" + ) + spaces_between_special_tokens: bool | None = Field( + default=True, description="Add spaces between special tokens" + ) + truncate_prompt_tokens: int | None = Field( + default=None, description="Maximum prompt tokens before truncation" + ) + allowed_token_ids: list[int] | None = Field( + default=None, description="Restrict generation to specific token IDs" + ) + prompt_logprobs: int | None = Field( + default=None, description="Number of logprobs to return for prompt tokens" + ) + add_special_tokens: bool | None = Field( + default=True, description="Add special tokens during processing" + ) + guided_json: str | dict[str, Any] | None = Field( + default=None, description="JSON schema for guided generation" + ) + guided_regex: str | None = Field( + default=None, description="Regex pattern for guided generation" + ) + guided_choice: list[str] | None = Field( + default=None, description="List of choices for guided generation" + ) + guided_grammar: str | None = Field( + default=None, description="Grammar specification for guided generation" + ) + guided_decoding_backend: str | None = Field( + default=None, description="Backend to use for guided decoding" + ) + guided_whitespace_pattern: str | None = Field( + default=None, description="Whitespace pattern for guided generation" + ) + priority: int | None = Field( + default=0, description="Request priority for scheduling" + ) + + +class ChatCompletionChoice(BaseModel): + """A single completion choice from a chat completion response. + + Contains the generated message and metadata about why generation + stopped and the choice's position in the response. + """ + + index: int = Field(description="Index of this choice in the response") + message: ChatMessage = Field(description="Generated message content") + finish_reason: Literal["stop", "length", "content_filter", "tool_calls"] | None = ( + Field(description="Reason why generation finished") + ) + + +class ChatCompletionsResponse(BaseModel): + """Response from chat completion API endpoints. + + Contains generated choices, usage statistics, and metadata for + non-streaming chat completion requests. + """ + + id: str = Field(description="Unique identifier for this completion") + object: Literal["chat.completion"] = Field( + default="chat.completion", description="Object type identifier" + ) + created: int = Field( + default_factory=lambda: int(time.time()), + description="Unix timestamp of creation", + ) + model: str = Field(description="Model used for generation") + choices: list[ChatCompletionChoice] = Field( + description="Generated completion choices" + ) + usage: Usage | None = Field(default=None, description="Token usage statistics") + system_fingerprint: str | None = Field( + default=None, description="System configuration fingerprint" + ) + + +class ChatCompletionChunk(BaseModel): + """A single chunk in a streamed chat completion response. + + Represents one piece of a streaming response with delta content + and optional usage statistics in the final chunk. + """ + + id: str = Field(description="Unique identifier for this completion") + object: Literal["chat.completion.chunk"] = Field( + default="chat.completion.chunk", + description="Object type identifier for streaming chunks", + ) + created: int = Field( + default_factory=lambda: int(time.time()), + description="Unix timestamp of creation", + ) + model: str = Field(description="Model used for generation") + choices: list[dict[str, Any]] = Field(description="Delta choices for streaming") + usage: Usage | None = Field( + default=None, description="Token usage statistics (typically in final chunk)" + ) + + +class CompletionsRequest(BaseModel): + """Request parameters for legacy text completion API endpoints. + + Supports the older text completion format with prompt-based input + and the same extensive parameter set as chat completions for + backward compatibility. + """ + + model: str = Field(description="Model identifier to use for generation") + prompt: str | list[str] = Field(description="Input prompt(s) for completion") + max_tokens: int | None = Field( + default=16, description="Maximum number of tokens to generate" + ) + temperature: float | None = Field( + default=1.0, description="Sampling temperature for randomness control" + ) + top_p: float | None = Field(default=1.0, description="Nucleus sampling parameter") + n: int | None = Field( + default=1, description="Number of completion choices to generate" + ) + stream: bool | None = Field( + default=False, description="Whether to stream response chunks" + ) + stream_options: StreamOptions | None = Field( + default=None, description="Configuration for streaming responses" + ) + logprobs: int | None = Field( + default=None, description="Number of logprobs to return" + ) + echo: bool | None = Field( + default=False, description="Whether to echo the prompt in output" + ) + stop: str | list[str] | None = Field( + default_factory=lambda: ["<|endoftext|>"], + description="Stop sequences to end generation", + ) + presence_penalty: float | None = Field( + default=0.0, description="Penalty for token presence to encourage diversity" + ) + frequency_penalty: float | None = Field( + default=0.0, description="Penalty for token frequency to reduce repetition" + ) + best_of: int | None = Field( + default=1, description="Number of candidates to generate and return the best" + ) + logit_bias: dict[str, float] | None = Field( + default=None, description="Bias values for specific tokens" + ) + seed: int | None = Field( + default=None, description="Random seed for reproducible outputs" + ) + suffix: str | None = Field( + default=None, description="Suffix to append after completion" + ) + user: str | None = Field( + default=None, description="User identifier for tracking and abuse monitoring" + ) + + # vLLM extensions (same as chat completions) + use_beam_search: bool | None = Field( + default=False, description="Enable beam search for better quality" + ) + top_k: int | None = Field(default=None, description="Top-k sampling parameter") + min_p: float | None = Field( + default=None, description="Minimum probability threshold for sampling" + ) + repetition_penalty: float | None = Field( + default=None, description="Penalty for repeated tokens" + ) + length_penalty: float | None = Field( + default=1.0, description="Length penalty for sequence scoring" + ) + stop_token_ids: list[int] | None = Field( + default=None, description="Token IDs that trigger generation stop" + ) + include_stop_str_in_output: bool | None = Field( + default=False, description="Include stop sequence in output" + ) + ignore_eos: bool | None = Field( + default=False, description="Ignore end-of-sequence tokens" + ) + min_tokens: int | None = Field( + default=0, description="Minimum number of tokens to generate" + ) + skip_special_tokens: bool | None = Field( + default=True, description="Skip special tokens in output" + ) + spaces_between_special_tokens: bool | None = Field( + default=True, description="Add spaces between special tokens" + ) + truncate_prompt_tokens: int | None = Field( + default=None, description="Maximum prompt tokens before truncation" + ) + allowed_token_ids: list[int] | None = Field( + default=None, description="Restrict generation to specific token IDs" + ) + prompt_logprobs: int | None = Field( + default=None, description="Number of logprobs to return for prompt tokens" + ) + add_special_tokens: bool | None = Field( + default=True, description="Add special tokens during processing" + ) + guided_json: str | dict[str, Any] | None = Field( + default=None, description="JSON schema for guided generation" + ) + guided_regex: str | None = Field( + default=None, description="Regex pattern for guided generation" + ) + guided_choice: list[str] | None = Field( + default=None, description="List of choices for guided generation" + ) + guided_grammar: str | None = Field( + default=None, description="Grammar specification for guided generation" + ) + guided_decoding_backend: str | None = Field( + default=None, description="Backend to use for guided decoding" + ) + guided_whitespace_pattern: str | None = Field( + default=None, description="Whitespace pattern for guided generation" + ) + priority: int | None = Field( + default=0, description="Request priority for scheduling" + ) + + +class CompletionChoice(BaseModel): + """A single completion choice from a text completion response. + + Contains the generated text and metadata about completion + quality and stopping conditions. + """ + + text: str = Field(description="Generated text content") + index: int = Field(description="Index of this choice in the response") + logprobs: dict[str, Any] | None = Field( + default=None, description="Log probabilities for generated tokens" + ) + finish_reason: Literal["stop", "length", "content_filter"] | None = Field( + description="Reason why generation finished" + ) + + +class CompletionsResponse(BaseModel): + """Response from legacy text completion API endpoints. + + Contains generated text choices, usage statistics, and metadata + for non-streaming text completion requests. + """ + + id: str = Field(description="Unique identifier for this completion") + object: Literal["text_completion"] = Field( + default="text_completion", description="Object type identifier" + ) + created: int = Field( + default_factory=lambda: int(time.time()), + description="Unix timestamp of creation", + ) + model: str = Field(description="Model used for generation") + choices: list[CompletionChoice] = Field(description="Generated completion choices") + usage: Usage | None = Field(default=None, description="Token usage statistics") + system_fingerprint: str | None = Field( + default=None, description="System configuration fingerprint" + ) + + +class TokenizeRequest(BaseModel): + """Request for tokenizing text into token sequences. + + Converts input text into model-specific token representations + with optional special token handling. + """ + + text: str = Field(description="Text to tokenize") + add_special_tokens: bool | None = Field( + default=True, description="Whether to add model-specific special tokens" + ) + + +class TokenizeResponse(BaseModel): + """Response containing tokenized representation of input text. + + Provides both the token sequence and count for analysis + and token budget planning. + """ + + tokens: list[int] = Field(description="List of token IDs") + count: int = Field(description="Total number of tokens") + + +class DetokenizeRequest(BaseModel): + """Request for converting token sequences back to text. + + Reconstructs human-readable text from model token representations + with configurable special token handling. + """ + + tokens: list[int] = Field(description="List of token IDs to convert") + skip_special_tokens: bool | None = Field( + default=True, description="Whether to skip special tokens in output" + ) + spaces_between_special_tokens: bool | None = Field( + default=True, description="Whether to add spaces between special tokens" + ) + + +class DetokenizeResponse(BaseModel): + """Response containing text reconstructed from tokens. + + Provides the human-readable text representation of the + input token sequence. + """ + + text: str = Field(description="Reconstructed text from tokens") + + +class ErrorDetail(BaseModel): + """Detailed error information for API failures. + + Provides structured error data including message, type classification, + and optional error codes for debugging and error handling. + """ + + message: str = Field(description="Human-readable error description") + type: str = Field(description="Error type classification") + code: str | None = Field( + default=None, description="Optional error code for programmatic handling" + ) + + +class ErrorResponse(BaseModel): + """Standardized error response structure for API failures. + + Wraps error details in a consistent format compatible with + OpenAI API error response conventions. + """ + + error: ErrorDetail = Field(description="Detailed error information") diff --git a/src/guidellm/mock_server/server.py b/src/guidellm/mock_server/server.py new file mode 100644 index 00000000..ff9d5fcd --- /dev/null +++ b/src/guidellm/mock_server/server.py @@ -0,0 +1,168 @@ +""" +High-performance mock server for OpenAI and vLLM API compatibility testing. + +This module provides a Sanic-based mock server that simulates OpenAI and vLLM APIs +with configurable latency, token generation patterns, and response characteristics. +The server supports both streaming and non-streaming endpoints, enabling realistic +performance testing and validation of GuideLLM benchmarking workflows without +requiring actual model deployments. +""" + +from __future__ import annotations + +import time + +from sanic import Sanic, response +from sanic.exceptions import NotFound +from sanic.log import logger +from sanic.request import Request +from sanic.response import HTTPResponse + +from guidellm.mock_server.config import MockServerConfig +from guidellm.mock_server.handlers import ( + ChatCompletionsHandler, + CompletionsHandler, + TokenizerHandler, +) + +__all__ = ["MockServer"] + + +class MockServer: + """ + High-performance mock server implementing OpenAI and vLLM API endpoints. + + Provides a Sanic-based web server that simulates API responses with configurable + timing characteristics for testing and benchmarking purposes. Supports chat + completions, text completions, tokenization endpoints, and model listing with + realistic latency patterns to enable comprehensive performance validation. + + Example: + :: + config = ServerConfig(model="test-model", port=8080) + server = MockServer(config) + server.run() + """ + + def __init__(self, config: MockServerConfig) -> None: + """ + Initialize the mock server with configuration. + + :param config: Server configuration containing network settings and response + timing parameters + """ + self.config = config + self.app = Sanic("guidellm-mock-server") + self.chat_handler = ChatCompletionsHandler(config) + self.completions_handler = CompletionsHandler(config) + self.tokenizer_handler = TokenizerHandler(config) + + self._setup_middleware() + self._setup_routes() + self._setup_error_handlers() + + def _setup_middleware(self): + """Setup middleware for CORS, logging, etc.""" + + @self.app.middleware("request") + async def add_cors_headers(_request: Request): + """Add CORS headers to all requests.""" + + @self.app.middleware("response") + async def add_response_headers(_request: Request, resp: HTTPResponse): + """Add standard response headers.""" + resp.headers["Access-Control-Allow-Origin"] = "*" + resp.headers["Access-Control-Allow-Methods"] = "GET, POST, OPTIONS" + resp.headers["Access-Control-Allow-Headers"] = "Content-Type, Authorization" + resp.headers["Server"] = "guidellm-mock-server" + + def _setup_routes(self): # noqa: C901 + @self.app.get("/health") + async def health_check(_request: Request): + return response.json({"status": "healthy", "timestamp": time.time()}) + + @self.app.get("/v1/models") + async def list_models(_request: Request): + return response.json( + { + "object": "list", + "data": [ + { + "id": self.config.model, + "object": "model", + "created": int(time.time()), + "owned_by": "guidellm-mock", + } + ], + } + ) + + @self.app.route("/v1/chat/completions", methods=["POST", "OPTIONS"]) + async def chat_completions(request: Request): + if request.method == "OPTIONS": + return response.text("", status=204) + return await self.chat_handler.handle(request) + + @self.app.route("/v1/completions", methods=["POST", "OPTIONS"]) + async def completions(request: Request): + if request.method == "OPTIONS": + return response.text("", status=204) + return await self.completions_handler.handle(request) + + @self.app.route("/tokenize", methods=["POST", "OPTIONS"]) + async def tokenize(request: Request): + if request.method == "OPTIONS": + return response.text("", status=204) + return await self.tokenizer_handler.tokenize(request) + + @self.app.route("/detokenize", methods=["POST", "OPTIONS"]) + async def detokenize(request: Request): + if request.method == "OPTIONS": + return response.text("", status=204) + return await self.tokenizer_handler.detokenize(request) + + def _setup_error_handlers(self): + """Setup error handlers.""" + + @self.app.exception(Exception) + async def generic_error_handler(_request: Request, exception: Exception): + logger.error(f"Unhandled exception: {exception}") + return response.json( + { + "error": { + "message": "Internal server error", + "type": type(exception).__name__, + "error": str(exception), + } + }, + status=500, + ) + + @self.app.exception(NotFound) + async def not_found_handler(_request: Request, _exception): + return response.json( + { + "error": { + "message": "Not Found", + "type": "not_found_error", + "code": "not_found", + } + }, + status=404, + ) + + def run(self) -> None: + """ + Start the mock server with configured settings. + + Runs the Sanic application in single-process mode with access logging enabled + for debugging and monitoring request patterns during testing. + """ + self.app.run( + host=self.config.host, + port=self.config.port, + debug=False, + single_process=True, + access_log=True, + register_sys_signals=False, # Disable signal handlers for threading + ) diff --git a/src/guidellm/mock_server/utils.py b/src/guidellm/mock_server/utils.py new file mode 100644 index 00000000..8348d0a6 --- /dev/null +++ b/src/guidellm/mock_server/utils.py @@ -0,0 +1,307 @@ +""" +Mock server utilities for text generation and tokenization testing. + +This module provides mock tokenization and text generation utilities for testing +guidellm's mock server functionality. It includes a mock tokenizer that simulates +tokenization processes, functions to generate reproducible fake text with specific +token counts, and timing generators for realistic benchmarking scenarios. +""" + +from __future__ import annotations + +import random +import re +from collections.abc import Generator + +from faker import Faker +from transformers.tokenization_utils import AddedToken, PreTrainedTokenizer, TextInput + +__all__ = [ + "MockTokenizer", + "create_fake_text", + "create_fake_tokens_str", + "sample_number", + "times_generator", +] + + +class MockTokenizer(PreTrainedTokenizer): + """ + Mock tokenizer implementation for testing text processing workflows. + + Provides a simplified tokenizer that splits text using regex patterns and + generates deterministic token IDs based on string hashing. Used for testing + guidellm components without requiring actual model tokenizers. + + :cvar VocabSize: Fixed vocabulary size for the mock tokenizer + """ + + VocabSize = 100000007 + + def __len__(self) -> int: + """ + Get the vocabulary size of the tokenizer. + + :return: The total number of tokens in the vocabulary + """ + return self.VocabSize + + def __call__(self, text: str | list[str], **kwargs) -> list[int]: # noqa: ARG002 + """ + Tokenize text and return token IDs (callable interface). + + :param text: Input text to tokenize + :return: List of token IDs + """ + if isinstance(text, str): + tokens = self.tokenize(text) + return self.convert_tokens_to_ids(tokens) + elif isinstance(text, list): + # Handle batch processing + return [self.__call__(t) for t in text] + else: + msg = f"text input must be of type `str` or `list[str]`, got {type(text)}" + raise ValueError(msg) + + def tokenize(self, text: TextInput, **_kwargs) -> list[str]: + """ + Tokenize input text into a list of token strings. + + Splits text using regex to separate words, punctuation, and whitespace + into individual tokens for processing. + + :param text: Input text to tokenize + :return: List of token strings from the input text + """ + # Split text into tokens: words, spaces, and punctuation + return re.findall(r"\w+|[^\w\s]|\s+", text) + + def convert_tokens_to_ids(self, tokens: str | list[str]) -> int | list[int]: + """ + Convert token strings to numeric token IDs. + + Uses deterministic hashing to generate consistent token IDs for + reproducible testing scenarios. + + :param tokens: Single token string or list of token strings + :return: Single token ID or list of token IDs + """ + if isinstance(tokens, str): + return hash(tokens) % self.VocabSize + return [hash(token) % self.VocabSize for token in tokens] + + def convert_ids_to_tokens( + self, ids: int | list[int], _skip_special_tokens: bool = False + ) -> str | list[str]: + """ + Convert numeric token IDs back to token strings. + + Generates fake text tokens using Faker library seeded with token IDs + for deterministic and reproducible token generation. + + :param ids: Single token ID or list of token IDs to convert + :return: Single token string or list of token strings + """ + if not ids and not isinstance(ids, list): + return "" + elif not ids: + return [""] + + if isinstance(ids, int): + fake = Faker() + fake.seed_instance(ids % self.VocabSize) + + return fake.word() + + fake = Faker() + fake.seed_instance(sum(ids) % self.VocabSize) + + target_count = len(ids) + current_count = 0 + tokens = [] + + while current_count < target_count: + text = fake.text( + max_nb_chars=(target_count - current_count) * 10 # oversample + ) + new_tokens = self.tokenize(text) + + if current_count > 0: + new_tokens = [".", " "] + new_tokens + + new_tokens = ( + new_tokens[: target_count - current_count] + if len(new_tokens) > (target_count - current_count) + else new_tokens + ) + tokens += new_tokens + current_count += len(new_tokens) + + return tokens + + def convert_tokens_to_string(self, tokens: list[str]) -> str: + """ + Convert a list of token strings back to a single text string. + + :param tokens: List of token strings to concatenate + :return: Concatenated string from all tokens + """ + return "".join(tokens) + + def _add_tokens( + self, + new_tokens: list[str] | list[AddedToken], # noqa: ARG002 + special_tokens: bool = False, # noqa: ARG002 + ) -> int: + """ + Add new tokens to the tokenizer vocabulary (mock implementation). + + :param new_tokens: List of tokens to add to the vocabulary + :param special_tokens: Whether the tokens are special tokens + :return: Number of tokens actually added (always 0 for mock) + """ + return 0 + + def apply_chat_template( + self, + conversation: list, + tokenize: bool = False, # Changed default to False to match transformers + add_generation_prompt: bool = False, # noqa: ARG002 + **kwargs, # noqa: ARG002 + ) -> str | list[int]: + """ + Apply a chat template to format conversation messages. + + Mock implementation that concatenates all message content for testing. + + :param conversation: List of chat messages + :param tokenize: Whether to return tokens or string + :param add_generation_prompt: Whether to add generation prompt + :return: Formatted text string or token IDs + """ + # Simple concatenation of all message content + texts = [] + for message in conversation: + if isinstance(message, dict) and "content" in message: + texts.append(message["content"]) + elif hasattr(message, "content"): + texts.append(message.content) + + formatted_text = " ".join(texts) + + if tokenize: + return self.convert_tokens_to_ids(self.tokenize(formatted_text)) + return formatted_text + + def decode( + self, + token_ids: list[int], + skip_special_tokens: bool = True, + **kwargs, # noqa: ARG002 + ) -> str: + """ + Decode token IDs back to text string. + + :param token_ids: List of token IDs to decode + :param skip_special_tokens: Whether to skip special tokens + :return: Decoded text string + """ + tokens = self.convert_ids_to_tokens(token_ids, skip_special_tokens) + return self.convert_tokens_to_string(tokens) + + +def create_fake_text( + num_tokens: int, + processor: PreTrainedTokenizer, + seed: int = 42, + fake: Faker | None = None, +) -> str: + """ + Generate fake text using a tokenizer processor with specified token count. + + Creates text by generating fake tokens and joining them into a string, + ensuring the result has the exact number of tokens when processed by + the given tokenizer. + + :param num_tokens: Target number of tokens in the generated text + :param processor: Tokenizer to use for token generation and validation + :param seed: Random seed for reproducible text generation + :param fake: Optional Faker instance for text generation + :return: Generated text string with the specified token count + """ + return "".join(create_fake_tokens_str(num_tokens, processor, seed, fake)) + + +def create_fake_tokens_str( + num_tokens: int, + processor: PreTrainedTokenizer, + seed: int = 42, + fake: Faker | None = None, +) -> list[str]: + """ + Generate fake token strings using a tokenizer processor. + + Creates a list of token strings by generating fake text and tokenizing it + until the desired token count is reached. Uses the provided tokenizer + for accurate token boundary detection. + + :param num_tokens: Target number of tokens to generate + :param processor: Tokenizer to use for token generation and validation + :param seed: Random seed for reproducible token generation + :param fake: Optional Faker instance for text generation + :return: List of token strings with the specified count + """ + if not fake: + fake = Faker() + fake.seed_instance(seed) + + tokens = [] + + while len(tokens) < num_tokens: + text = fake.text( + max_nb_chars=(num_tokens - len(tokens)) * 30 # oversample + ) + new_tokens = processor.tokenize(text) + + if len(tokens) > 0: + new_tokens = [".", " "] + new_tokens + + new_tokens = ( + new_tokens[: num_tokens - len(tokens)] + if len(new_tokens) > (num_tokens - len(tokens)) + else new_tokens + ) + tokens += new_tokens + + return tokens + + +def times_generator(mean: float, standard_dev: float) -> Generator[float]: + """ + Generate infinite timing values from a normal distribution. + + Creates a generator that yields timing values sampled from a normal + distribution, useful for simulating realistic request timing patterns + in benchmarking scenarios. + + :param mean: Mean value for the normal distribution + :param standard_dev: Standard deviation for the normal distribution + :return: Generator yielding positive timing values from the distribution + """ + while True: + yield sample_number(mean, standard_dev) + + +def sample_number(mean: float, standard_dev: float) -> float: + """ + Generate a single timing value from a normal distribution. + + Samples one timing value from a normal distribution with the specified + parameters, ensuring the result is non-negative for realistic timing + simulation in benchmarking scenarios. + + :param mean: Mean value for the normal distribution + :param standard_dev: Standard deviation for the normal distribution + :return: Non-negative timing value from the distribution + """ + return max(0.0, random.gauss(mean, standard_dev)) diff --git a/tests/unit/mock_server/__init__.py b/tests/unit/mock_server/__init__.py new file mode 100644 index 00000000..e02d60bd --- /dev/null +++ b/tests/unit/mock_server/__init__.py @@ -0,0 +1 @@ +"""Unit tests for the GuideLLM mock server package.""" diff --git a/tests/unit/mock_server/test_server.py b/tests/unit/mock_server/test_server.py new file mode 100644 index 00000000..008103c3 --- /dev/null +++ b/tests/unit/mock_server/test_server.py @@ -0,0 +1,518 @@ +from __future__ import annotations + +import asyncio +import json +import multiprocessing + +import httpx +import pytest +import pytest_asyncio +from pydantic import ValidationError + +from guidellm.mock_server.config import MockServerConfig +from guidellm.mock_server.server import MockServer + + +# Start server in a separate process +def _start_server_process(config: MockServerConfig): + server = MockServer(config) + server.run() + + +@pytest_asyncio.fixture(scope="class") +async def mock_server_instance(): + """Instance-level fixture that provides a running server for HTTP testing.""" + + config = MockServerConfig( + host="127.0.0.1", + port=8012, + model="test-model", + ttft_ms=10.0, + itl_ms=1.0, + request_latency=0.1, + ) + base_url = f"http://{config.host}:{config.port}" + server_process = multiprocessing.Process( + target=_start_server_process, args=(config,) + ) + server_process.start() + + # Wait for server to start up and be ready + async def wait_for_startup(): + poll_frequency = 1.0 + async with httpx.AsyncClient() as client: + while True: + try: + response = await client.get(f"{base_url}/health", timeout=1.0) + if response.status_code == 200: + break + except (httpx.RequestError, httpx.TimeoutException): + pass + await asyncio.sleep(poll_frequency) + poll_frequency = min(poll_frequency * 1.5, 2.0) + + timeout = 30.0 + try: + await asyncio.wait_for(wait_for_startup(), timeout) + except TimeoutError: + # Server failed to start within timeout + server_process.terminate() + server_process.kill() + server_process.join(timeout=5) + pytest.fail(f"Server failed to start within {timeout} seconds") + + yield base_url, config + + # Cleanup: terminate the server process + server_process.terminate() + server_process.kill() + server_process.join(timeout=5) + + +class TestMockServerConfig: + """Test suite for MockServerConfig class.""" + + @pytest.mark.smoke + def test_default_initialization(self): + """Test MockServerConfig initialization with default values.""" + config = MockServerConfig() + assert config.host == "127.0.0.1" + assert config.port == 8000 + assert config.workers == 1 + assert config.model == "llama-3.1-8b-instruct" + assert config.processor is None + assert config.request_latency == 3.0 + assert config.request_latency_std == 0.0 + assert config.ttft_ms == 150.0 + assert config.ttft_ms_std == 0.0 + assert config.itl_ms == 10.0 + assert config.itl_ms_std == 0.0 + assert config.output_tokens == 128 + assert config.output_tokens_std == 0.0 + + @pytest.mark.smoke + @pytest.mark.parametrize( + ("kwargs", "expected_values"), + [ + ( + {"host": "127.0.0.1", "port": 9000, "model": "custom-model"}, + {"host": "127.0.0.1", "port": 9000, "model": "custom-model"}, + ), + ( + {"request_latency": 1.5, "ttft_ms": 100.0, "output_tokens": 256}, + {"request_latency": 1.5, "ttft_ms": 100.0, "output_tokens": 256}, + ), + ], + ) + def test_custom_initialization(self, kwargs, expected_values): + """Test MockServerConfig initialization with custom values.""" + config = MockServerConfig(**kwargs) + for key, expected_value in expected_values.items(): + assert getattr(config, key) == expected_value + + @pytest.mark.sanity + @pytest.mark.parametrize( + ("field", "value"), + [ + ("port", "not_int"), + ("request_latency", "not_float"), + ("output_tokens", "not_int"), + ], + ) + def test_invalid_initialization_values(self, field, value): + """Test MockServerConfig with invalid field values.""" + kwargs = {field: value} + with pytest.raises(ValidationError): + MockServerConfig(**kwargs) + + +class TestMockServer: + """Test suite for MockServer class.""" + + @pytest.mark.smoke + def test_class_signatures(self): + """Test MockServer class signatures and attributes.""" + assert hasattr(MockServer, "__init__") + assert hasattr(MockServer, "run") + assert hasattr(MockServer, "_setup_middleware") + assert hasattr(MockServer, "_setup_routes") + assert hasattr(MockServer, "_setup_error_handlers") + + @pytest.mark.sanity + def test_invalid_initialization_missing(self): + """Test MockServer initialization without required config.""" + with pytest.raises(TypeError): + MockServer() + + +class TestMockServerEndpoints: + """Test suite for MockServer HTTP endpoints with real server instances.""" + + @pytest.mark.smoke + @pytest.mark.asyncio + async def test_health_endpoint(self, mock_server_instance): + """Test the health check endpoint.""" + server_url, _ = mock_server_instance + + async with httpx.AsyncClient() as client: + response = await client.get(f"{server_url}/health", timeout=5.0) + assert response.status_code == 200 + + data = response.json() + assert "status" in data + assert data["status"] == "healthy" + assert "timestamp" in data + assert isinstance(data["timestamp"], (int, float)) + + @pytest.mark.smoke + @pytest.mark.asyncio + async def test_models_endpoint(self, mock_server_instance): + """Test the models listing endpoint.""" + server_url, _ = mock_server_instance + + async with httpx.AsyncClient() as client: + response = await client.get(f"{server_url}/v1/models", timeout=5.0) + assert response.status_code == 200 + + data = response.json() + assert "object" in data + assert data["object"] == "list" + assert "data" in data + assert isinstance(data["data"], list) + assert len(data["data"]) > 0 + + model = data["data"][0] + assert "id" in model + assert "object" in model + assert "created" in model + assert "owned_by" in model + assert model["object"] == "model" + assert model["owned_by"] == "guidellm-mock" + assert model["id"] == "test-model" + + @pytest.mark.smoke + @pytest.mark.asyncio + @pytest.mark.parametrize( + ("payload", "expected_fields"), + [ + ( + { + "model": "test-model", + "messages": [{"role": "user", "content": "Hello!"}], + "max_tokens": 10, + }, + ["choices", "usage", "model", "object"], + ), + ( + { + "model": "test-model", + "messages": [{"role": "user", "content": "Test"}], + "max_tokens": 5, + "temperature": 0.7, + }, + ["choices", "usage", "model", "object"], + ), + ], + ) + async def test_chat_completions_endpoint( + self, mock_server_instance, payload, expected_fields + ): + """Test the chat completions endpoint.""" + server_url, _ = mock_server_instance + + async with httpx.AsyncClient() as client: + response = await client.post( + f"{server_url}/v1/chat/completions", json=payload, timeout=10.0 + ) + assert response.status_code == 200 + + data = response.json() + for field in expected_fields: + assert field in data + + assert len(data["choices"]) > 0 + choice = data["choices"][0] + assert "message" in choice + assert "content" in choice["message"] + assert "role" in choice["message"] + assert choice["message"]["role"] == "assistant" + assert isinstance(choice["message"]["content"], str) + assert len(choice["message"]["content"]) > 0 + + # Verify usage information + assert "prompt_tokens" in data["usage"] + assert "completion_tokens" in data["usage"] + assert "total_tokens" in data["usage"] + assert data["usage"]["total_tokens"] == ( + data["usage"]["prompt_tokens"] + data["usage"]["completion_tokens"] + ) + + @pytest.mark.smoke + @pytest.mark.asyncio + async def test_streaming_chat_completions(self, mock_server_instance): + """Test streaming chat completions endpoint.""" + server_url, _ = mock_server_instance + + payload = { + "model": "test-model", + "messages": [{"role": "user", "content": "Hi!"}], + "max_tokens": 5, + "stream": True, + } + + async with ( + httpx.AsyncClient() as client, + client.stream( + "POST", + f"{server_url}/v1/chat/completions", + json=payload, + timeout=10.0, + ) as response, + ): + assert response.status_code == 200 + assert "text/event-stream" in response.headers.get("content-type", "") + + chunks = [] + async for line in response.aiter_lines(): + if line and line.startswith("data: "): + data_str = line[6:] + if data_str.strip() == "[DONE]": + break + try: + chunk_data = json.loads(data_str) + chunks.append(chunk_data) + except json.JSONDecodeError: + continue + + assert len(chunks) > 0 + # Verify chunk structure + for chunk in chunks: + assert "choices" in chunk + assert len(chunk["choices"]) > 0 + assert "delta" in chunk["choices"][0] + + @pytest.mark.smoke + @pytest.mark.parametrize( + ("payload", "expected_fields"), + [ + ( + { + "model": "test-model", + "prompt": "Hello", + "max_tokens": 10, + }, + ["choices", "usage", "model", "object"], + ), + ( + { + "model": "test-model", + "prompt": "Test prompt", + "max_tokens": 5, + "temperature": 0.8, + }, + ["choices", "usage", "model", "object"], + ), + ], + ) + @pytest.mark.asyncio + async def test_completions_endpoint( + self, mock_server_instance, payload, expected_fields + ): + """Test the legacy completions endpoint.""" + server_url, _ = mock_server_instance + + async with httpx.AsyncClient() as client: + response = await client.post( + f"{server_url}/v1/completions", json=payload, timeout=10.0 + ) + assert response.status_code == 200 + + data = response.json() + for field in expected_fields: + assert field in data + + assert len(data["choices"]) > 0 + choice = data["choices"][0] + assert "text" in choice + assert isinstance(choice["text"], str) + assert len(choice["text"]) > 0 + + # Verify usage information + assert "prompt_tokens" in data["usage"] + assert "completion_tokens" in data["usage"] + assert "total_tokens" in data["usage"] + + @pytest.mark.smoke + @pytest.mark.asyncio + async def test_streaming_completions(self, mock_server_instance): + """Test streaming completions endpoint.""" + server_url, _ = mock_server_instance + payload = { + "model": "test-model", + "prompt": "Hello", + "max_tokens": 5, + "stream": True, + } + + async with ( + httpx.AsyncClient() as client, + client.stream( + "POST", + f"{server_url}/v1/completions", + json=payload, + timeout=10.0, + ) as response, + ): + assert response.status_code == 200 + assert "text/event-stream" in response.headers.get("content-type", "") + + chunks = [] + async for line in response.aiter_lines(): + if line and line.startswith("data: "): + data_str = line[6:] + if data_str.strip() == "[DONE]": + break + try: + chunk_data = json.loads(data_str) + chunks.append(chunk_data) + except json.JSONDecodeError: + continue + + assert len(chunks) > 0 + # Verify chunk structure + for chunk in chunks: + assert "choices" in chunk + assert len(chunk["choices"]) > 0 + + @pytest.mark.smoke + @pytest.mark.parametrize( + ("payload", "expected_fields"), + [ + ( + {"text": "Hello world!"}, + ["tokens", "count"], + ), + ( + {"text": "This is a test sentence."}, + ["tokens", "count"], + ), + ], + ) + @pytest.mark.asyncio + async def test_tokenize_endpoint( + self, mock_server_instance, payload, expected_fields + ): + """Test the tokenize endpoint.""" + server_url, _ = mock_server_instance + async with httpx.AsyncClient() as client: + response = await client.post( + f"{server_url}/tokenize", json=payload, timeout=5.0 + ) + assert response.status_code == 200 + + data = response.json() + for field in expected_fields: + assert field in data + + assert isinstance(data["tokens"], list) + assert isinstance(data["count"], int) + assert data["count"] == len(data["tokens"]) + assert len(data["tokens"]) > 0 + + @pytest.mark.smoke + @pytest.mark.parametrize( + ("payload", "expected_fields"), + [ + ( + {"tokens": [123, 456, 789]}, + ["text"], + ), + ( + {"tokens": [100, 200]}, + ["text"], + ), + ], + ) + @pytest.mark.asyncio + async def test_detokenize_endpoint( + self, mock_server_instance, payload, expected_fields + ): + """Test the detokenize endpoint.""" + server_url, _ = mock_server_instance + async with httpx.AsyncClient() as client: + response = await client.post( + f"{server_url}/detokenize", json=payload, timeout=5.0 + ) + assert response.status_code == 200 + + data = response.json() + for field in expected_fields: + assert field in data + + assert isinstance(data["text"], str) + assert len(data["text"]) > 0 + + @pytest.mark.sanity + @pytest.mark.asyncio + async def test_options_endpoint(self, mock_server_instance): + """Test the OPTIONS endpoint for CORS support.""" + server_url, _ = mock_server_instance + async with httpx.AsyncClient() as client: + response = await client.options( + f"{server_url}/v1/chat/completions", timeout=5.0 + ) + assert response.status_code == 204 + assert response.text == "" + + @pytest.mark.sanity + @pytest.mark.asyncio + async def test_cors_headers(self, mock_server_instance): + """Test CORS headers are properly set.""" + server_url, _ = mock_server_instance + async with httpx.AsyncClient() as client: + response = await client.get(f"{server_url}/health", timeout=5.0) + assert response.status_code == 200 + + # Check for CORS headers + assert response.headers.get("Access-Control-Allow-Origin") == "*" + methods_header = response.headers.get("Access-Control-Allow-Methods", "") + assert "GET, POST, OPTIONS" in methods_header + headers_header = response.headers.get("Access-Control-Allow-Headers", "") + assert "Content-Type, Authorization" in headers_header + assert response.headers.get("Server") == "guidellm-mock-server" + + @pytest.mark.sanity + @pytest.mark.asyncio + @pytest.mark.parametrize( + ("endpoint", "method", "payload"), + [ + ("/v1/chat/completions", "POST", {"invalid": "payload"}), + ("/v1/completions", "POST", {"invalid": "payload"}), + ("/tokenize", "POST", {"invalid": "payload"}), + ("/detokenize", "POST", {"invalid": "payload"}), + ], + ) + async def test_invalid_request_handling( + self, mock_server_instance, endpoint, method, payload + ): + """Test handling of invalid requests.""" + server_url, _ = mock_server_instance + async with httpx.AsyncClient() as client: + if method == "POST": + response = await client.post( + f"{server_url}{endpoint}", json=payload, timeout=5.0 + ) + else: + response = await client.get(f"{server_url}{endpoint}", timeout=5.0) + + # Should return an error response, not crash + assert response.status_code in [400, 422, 500] + + @pytest.mark.sanity + @pytest.mark.asyncio + async def test_nonexistent_endpoint(self, mock_server_instance): + """Test handling of requests to nonexistent endpoints.""" + server_url, _ = mock_server_instance + async with httpx.AsyncClient() as client: + response = await client.get(f"{server_url}/nonexistent", timeout=5.0) + assert response.status_code == 404 From 46a2c1ed14f93793adae4aa5b7bf41b70720291f Mon Sep 17 00:00:00 2001 From: Jared O'Connell <46976761+jaredoconnell@users.noreply.github.com> Date: Mon, 29 Sep 2025 12:15:17 -0400 Subject: [PATCH 32/90] [GuideLLM Refactor] Edge case errors (#376) ## Summary This PR handles errors that occur when there are no successful requests. There will obviously still be an error, but it will be one that the user can get useful information from, rather than one that is the inner workings breaking. ## Details - Adds default value for an inner data type to allow it to work in this edge case. - Adds an error check that creates a runtime error with an explanation for the failure. The error message can be changed if you would like the wording changed. - Fixes a type literal mismatch. ## Test Plan - Run GuideLLM against a mock server in a way that results in all requests failing. Like setting the max token value way too small. --- - [x] "I certify that all code in this PR is my own, except as noted below." ## Use of AI - [ ] Includes AI-assisted code completion - [ ] Includes code generated by an AI application - [ ] Includes AI-generated tests (NOTE: AI written tests should have a docstring that includes `## WRITTEN BY AI ##`) --------- Signed-off-by: Jared O'Connell --- src/guidellm/benchmark/aggregator.py | 4 ++-- src/guidellm/benchmark/profile.py | 3 ++- 2 files changed, 4 insertions(+), 3 deletions(-) diff --git a/src/guidellm/benchmark/aggregator.py b/src/guidellm/benchmark/aggregator.py index 9db93a12..e965c482 100644 --- a/src/guidellm/benchmark/aggregator.py +++ b/src/guidellm/benchmark/aggregator.py @@ -441,7 +441,7 @@ def __call__( def compile( self, state: AggregatorState, scheduler_state: SchedulerState - ) -> dict[Literal["scheduler_stats"], BenchmarkSchedulerStats]: + ) -> dict[Literal["run_stats"], BenchmarkSchedulerStats]: """ Compile scheduler timing metrics into benchmark statistics. @@ -473,7 +473,7 @@ def compile( key="worker_resolve_time", type_="avg", default=0.0 ), worker_resolve_end_delay_avg=state.get_metric( - key="worker_resolve_end_delay", type_="avg" + key="worker_resolve_end_delay", type_="avg", default=0.0 ), finalized_delay_avg=state.get_metric( key="finalized_delay", type_="avg", default=0.0 diff --git a/src/guidellm/benchmark/profile.py b/src/guidellm/benchmark/profile.py index 042179ba..d2f9d70c 100644 --- a/src/guidellm/benchmark/profile.py +++ b/src/guidellm/benchmark/profile.py @@ -680,6 +680,8 @@ def next_strategy( self.throughput_rate = ( prev_benchmark.metrics.requests_per_second.successful.mean ) + if self.synchronous_rate <= 0 and self.throughput_rate <= 0: + raise RuntimeError("Invalid rates in sweep; aborting. Were there any successful requests?") self.measured_rates = list( np.linspace( self.synchronous_rate, @@ -698,7 +700,6 @@ def next_strategy( if strat.type_ == self.strategy_type ] ) - if self.strategy_type == "constant": return AsyncConstantStrategy( rate=self.measured_rates[next_rate_index], From 2c6637dcb58b771a1abeecc4295737dbc898eca8 Mon Sep 17 00:00:00 2001 From: Jared O'Connell Date: Mon, 29 Sep 2025 17:27:35 -0400 Subject: [PATCH 33/90] Share type alises between entrypoints and scenario Signed-off-by: Jared O'Connell --- src/guidellm/benchmark/entrypoints.py | 46 ++++----------------------- src/guidellm/benchmark/scenario.py | 22 +++---------- src/guidellm/benchmark/type.py | 45 ++++++++++++++++++++++++++ 3 files changed, 56 insertions(+), 57 deletions(-) create mode 100644 src/guidellm/benchmark/type.py diff --git a/src/guidellm/benchmark/entrypoints.py b/src/guidellm/benchmark/entrypoints.py index 167bc3b7..f7f41a98 100644 --- a/src/guidellm/benchmark/entrypoints.py +++ b/src/guidellm/benchmark/entrypoints.py @@ -1,14 +1,8 @@ from __future__ import annotations -from collections.abc import Iterable from pathlib import Path from typing import Any, Literal -from datasets import Dataset, DatasetDict, IterableDataset, IterableDatasetDict -from transformers import ( # type: ignore[import] - PreTrainedTokenizerBase, -) - from guidellm.backends import ( Backend, BackendType, @@ -16,8 +10,6 @@ GenerationResponse, ) from guidellm.benchmark.aggregator import ( - Aggregator, - CompilableAggregator, GenerativeRequestsAggregator, GenerativeStatsProgressAggregator, SchedulerStatsAggregator, @@ -29,11 +21,10 @@ GenerativeBenchmarkerOutput, ) from guidellm.benchmark.profile import Profile, ProfileType -from guidellm.benchmark.progress import ( - BenchmarkerProgress, - BenchmarkerProgressGroup, -) +from guidellm.benchmark.progress import BenchmarkerProgressGroup from guidellm.benchmark.scenario import enable_scenarios +from guidellm.benchmark.type import OutputFormatType, DataInputType, ProcessorInputType, ProgressInputType, \ + AggregatorInputType from guidellm.request import GenerativeRequestLoader from guidellm.scheduler import ( ConstraintInitializer, @@ -51,27 +42,6 @@ _CURRENT_WORKING_DIR = Path.cwd() -# Data types - -DataType = ( - Iterable[str] - | Iterable[dict[str, Any]] - | Dataset - | DatasetDict - | IterableDataset - | IterableDatasetDict - | str - | Path -) - -OutputFormatType = ( - tuple[str, ...] - | list[str] - | dict[str, str | dict[str, Any] | GenerativeBenchmarkerOutput] - | None -) - - # Helper functions async def initialize_backend( @@ -147,7 +117,7 @@ async def finalize_outputs( @enable_scenarios async def benchmark_generative_text( # noqa: C901 target: str, - data: DataType, + data: DataInputType, profile: StrategyType | ProfileType | Profile, rate: list[float] | None = None, random_seed: int = 42, @@ -156,7 +126,7 @@ async def benchmark_generative_text( # noqa: C901 backend_kwargs: dict[str, Any] | None = None, model: str | None = None, # Data configuration - processor: str | Path | PreTrainedTokenizerBase | None = None, + processor: ProcessorInputType | None = None, processor_args: dict[str, Any] | None = None, data_args: dict[str, Any] | None = None, data_sampler: Literal["random"] | None = None, @@ -164,12 +134,10 @@ async def benchmark_generative_text( # noqa: C901 output_path: str | Path | None = _CURRENT_WORKING_DIR, output_formats: OutputFormatType = ("console", "json", "html", "csv"), # Updates configuration - progress: tuple[str, ...] | list[str] | list[BenchmarkerProgress] | None = None, + progress: ProgressInputType | None = None, print_updates: bool = False, # Aggregators configuration - add_aggregators: ( - dict[str, str | dict[str, Any] | Aggregator | CompilableAggregator] | None - ) = None, + add_aggregators: AggregatorInputType | None = None, warmup: float | None = None, cooldown: float | None = None, request_samples: int | None = 20, diff --git a/src/guidellm/benchmark/scenario.py b/src/guidellm/benchmark/scenario.py index ff5ada26..0e46c094 100644 --- a/src/guidellm/benchmark/scenario.py +++ b/src/guidellm/benchmark/scenario.py @@ -1,14 +1,12 @@ from __future__ import annotations import json -from collections.abc import Iterable from functools import cache, wraps from inspect import Parameter, signature from pathlib import Path from typing import Annotated, Any, Callable, Literal, TypeVar import yaml -from datasets import Dataset, DatasetDict, IterableDataset, IterableDatasetDict from loguru import logger from pydantic import BeforeValidator, Field, PositiveFloat, PositiveInt, SkipValidation from transformers.tokenization_utils_base import ( # type: ignore[import] @@ -16,11 +14,8 @@ ) from guidellm.backends import Backend, BackendType -from guidellm.benchmark.aggregator import ( - Aggregator, - CompilableAggregator, -) from guidellm.benchmark.profile import Profile, ProfileType +from guidellm.benchmark.type import DataInputType, ProcessorInputType, AggregatorInputType from guidellm.scheduler import StrategyType from guidellm.utils import StandardBaseModel @@ -116,14 +111,7 @@ class Config: arbitrary_types_allowed = True data: Annotated[ - Iterable[str] - | Iterable[dict[str, Any]] - | Dataset - | DatasetDict - | IterableDataset - | IterableDatasetDict - | str - | Path, + DataInputType, # BUG: See https://github.com/pydantic/pydantic/issues/9541 SkipValidation, ] @@ -137,14 +125,12 @@ class Config: backend_kwargs: dict[str, Any] | None = None model: str | None = None # Data configuration - processor: str | Path | PreTrainedTokenizerBase | None = None + processor: ProcessorInputType | None = None processor_args: dict[str, Any] | None = None data_args: dict[str, Any] | None = None data_sampler: Literal["random"] | None = None # Aggregators configuration - add_aggregators: ( - dict[str, str | dict[str, Any] | Aggregator | CompilableAggregator] | None - ) = None + add_aggregators: AggregatorInputType | None = None warmup: Annotated[float | None, Field(gt=0, le=1)] = None cooldown: Annotated[float | None, Field(gt=0, le=1)] = None request_samples: PositiveInt | None = 20 diff --git a/src/guidellm/benchmark/type.py b/src/guidellm/benchmark/type.py new file mode 100644 index 00000000..7018fe61 --- /dev/null +++ b/src/guidellm/benchmark/type.py @@ -0,0 +1,45 @@ +from __future__ import annotations +from collections.abc import Iterable +from typing import Any +from pathlib import Path +from datasets import Dataset, DatasetDict, IterableDataset, IterableDatasetDict + +from guidellm.benchmark.output import ( + GenerativeBenchmarkerOutput, +) + +from transformers import ( # type: ignore[import] + PreTrainedTokenizerBase, +) + +from guidellm.benchmark.progress import BenchmarkerProgress + +from guidellm.benchmark.aggregator import ( + Aggregator, + CompilableAggregator, +) + + +DataInputType = ( + Iterable[str] + | Iterable[dict[str, Any]] + | Dataset + | DatasetDict + | IterableDataset + | IterableDatasetDict + | str + | Path +) + +OutputFormatType = ( + tuple[str, ...] + | list[str] + | dict[str, str | dict[str, Any] | GenerativeBenchmarkerOutput] + | None +) + +ProcessorInputType = str | Path | PreTrainedTokenizerBase + +ProgressInputType = tuple[str, ...] | list[str] | list[BenchmarkerProgress] + +AggregatorInputType = dict[str, str | dict[str, Any] | Aggregator | CompilableAggregator] \ No newline at end of file From 2c70eddea74ab24fd3be78f3879b573aeb78be97 Mon Sep 17 00:00:00 2001 From: Samuel Monson Date: Tue, 30 Sep 2025 10:41:21 -0400 Subject: [PATCH 34/90] Pluralize type.py > types.py --- src/guidellm/benchmark/entrypoints.py | 7 ++++++- src/guidellm/benchmark/scenario.py | 2 +- src/guidellm/benchmark/{type.py => types.py} | 2 +- 3 files changed, 8 insertions(+), 3 deletions(-) rename src/guidellm/benchmark/{type.py => types.py} (97%) diff --git a/src/guidellm/benchmark/entrypoints.py b/src/guidellm/benchmark/entrypoints.py index f7f41a98..a3e92d07 100644 --- a/src/guidellm/benchmark/entrypoints.py +++ b/src/guidellm/benchmark/entrypoints.py @@ -23,8 +23,13 @@ from guidellm.benchmark.profile import Profile, ProfileType from guidellm.benchmark.progress import BenchmarkerProgressGroup from guidellm.benchmark.scenario import enable_scenarios -from guidellm.benchmark.type import OutputFormatType, DataInputType, ProcessorInputType, ProgressInputType, \ +from guidellm.benchmark.types import ( + OutputFormatType, + DataInputType, + ProcessorInputType, + ProgressInputType, AggregatorInputType +) from guidellm.request import GenerativeRequestLoader from guidellm.scheduler import ( ConstraintInitializer, diff --git a/src/guidellm/benchmark/scenario.py b/src/guidellm/benchmark/scenario.py index 0e46c094..c8b5801e 100644 --- a/src/guidellm/benchmark/scenario.py +++ b/src/guidellm/benchmark/scenario.py @@ -15,7 +15,7 @@ from guidellm.backends import Backend, BackendType from guidellm.benchmark.profile import Profile, ProfileType -from guidellm.benchmark.type import DataInputType, ProcessorInputType, AggregatorInputType +from guidellm.benchmark.types import DataInputType, ProcessorInputType, AggregatorInputType from guidellm.scheduler import StrategyType from guidellm.utils import StandardBaseModel diff --git a/src/guidellm/benchmark/type.py b/src/guidellm/benchmark/types.py similarity index 97% rename from src/guidellm/benchmark/type.py rename to src/guidellm/benchmark/types.py index 7018fe61..9ec475b8 100644 --- a/src/guidellm/benchmark/type.py +++ b/src/guidellm/benchmark/types.py @@ -42,4 +42,4 @@ ProgressInputType = tuple[str, ...] | list[str] | list[BenchmarkerProgress] -AggregatorInputType = dict[str, str | dict[str, Any] | Aggregator | CompilableAggregator] \ No newline at end of file +AggregatorInputType = dict[str, str | dict[str, Any] | Aggregator | CompilableAggregator] From 4bff34a8eae25d5c3c8f02a6e5f745e3746aba4f Mon Sep 17 00:00:00 2001 From: Samuel Monson Date: Tue, 30 Sep 2025 10:56:56 -0400 Subject: [PATCH 35/90] Convert new types to TypeAliasTypes --- src/guidellm/benchmark/__init__.py | 12 +++++++ src/guidellm/benchmark/entrypoints.py | 49 +++++++++++++++------------ src/guidellm/benchmark/scenario.py | 17 ++++------ src/guidellm/benchmark/types.py | 44 ++++++++++++++++-------- 4 files changed, 76 insertions(+), 46 deletions(-) diff --git a/src/guidellm/benchmark/__init__.py b/src/guidellm/benchmark/__init__.py index 8350f161..9fdb231d 100644 --- a/src/guidellm/benchmark/__init__.py +++ b/src/guidellm/benchmark/__init__.py @@ -46,9 +46,17 @@ enable_scenarios, get_builtin_scenarios, ) +from .types import ( + AggregatorInputT, + DataInputT, + OutputFormatT, + ProcessorInputT, + ProgressInputT, +) __all__ = [ "Aggregator", + "AggregatorInputT", "AggregatorState", "AsyncProfile", "Benchmark", @@ -60,6 +68,7 @@ "BenchmarkerProgressGroup", "CompilableAggregator", "ConcurrentProfile", + "DataInputT", "GenerativeBenchmark", "GenerativeBenchmarkerCSV", "GenerativeBenchmarkerConsole", @@ -73,8 +82,11 @@ "GenerativeStatsProgressAggregator", "GenerativeTextScenario", "InjectExtrasAggregator", + "OutputFormatT", + "ProcessorInputT", "Profile", "ProfileType", + "ProgressInputT", "Scenario", "SchedulerStatsAggregator", "SerializableAggregator", diff --git a/src/guidellm/benchmark/entrypoints.py b/src/guidellm/benchmark/entrypoints.py index a3e92d07..b926394f 100644 --- a/src/guidellm/benchmark/entrypoints.py +++ b/src/guidellm/benchmark/entrypoints.py @@ -24,11 +24,11 @@ from guidellm.benchmark.progress import BenchmarkerProgressGroup from guidellm.benchmark.scenario import enable_scenarios from guidellm.benchmark.types import ( - OutputFormatType, - DataInputType, - ProcessorInputType, - ProgressInputType, - AggregatorInputType + AggregatorInputT, + DataInputT, + OutputFormatT, + ProcessorInputT, + ProgressInputT, ) from guidellm.request import GenerativeRequestLoader from guidellm.scheduler import ( @@ -49,6 +49,7 @@ # Helper functions + async def initialize_backend( backend: BackendType | Backend, target: str, @@ -56,9 +57,7 @@ async def initialize_backend( backend_kwargs: dict[str, Any] | None, ) -> Backend: backend = ( - Backend.create( - backend, target=target, model=model, **(backend_kwargs or {}) - ) + Backend.create(backend, target=target, model=model, **(backend_kwargs or {})) if not isinstance(backend, Backend) else backend ) @@ -95,18 +94,19 @@ async def resolve_profile( ) return profile + async def resolve_output_formats( - output_formats: OutputFormatType, + output_formats: OutputFormatT, output_path: str | Path | None, ) -> dict[str, GenerativeBenchmarkerOutput]: - output_formats = GenerativeBenchmarkerOutput.resolve( + return GenerativeBenchmarkerOutput.resolve( output_formats=(output_formats or {}), output_path=output_path ) - return output_formats + async def finalize_outputs( report: GenerativeBenchmarksReport, - resolved_output_formats: dict[str, GenerativeBenchmarkerOutput] + resolved_output_formats: dict[str, GenerativeBenchmarkerOutput], ): output_format_results = {} for key, output in resolved_output_formats.items(): @@ -122,7 +122,7 @@ async def finalize_outputs( @enable_scenarios async def benchmark_generative_text( # noqa: C901 target: str, - data: DataInputType, + data: DataInputT, profile: StrategyType | ProfileType | Profile, rate: list[float] | None = None, random_seed: int = 42, @@ -131,18 +131,18 @@ async def benchmark_generative_text( # noqa: C901 backend_kwargs: dict[str, Any] | None = None, model: str | None = None, # Data configuration - processor: ProcessorInputType | None = None, + processor: ProcessorInputT | None = None, processor_args: dict[str, Any] | None = None, data_args: dict[str, Any] | None = None, data_sampler: Literal["random"] | None = None, # Output configuration output_path: str | Path | None = _CURRENT_WORKING_DIR, - output_formats: OutputFormatType = ("console", "json", "html", "csv"), + output_formats: OutputFormatT = ("console", "json", "html", "csv"), # Updates configuration - progress: ProgressInputType | None = None, + progress: ProgressInputT | None = None, print_updates: bool = False, # Aggregators configuration - add_aggregators: AggregatorInputType | None = None, + add_aggregators: AggregatorInputT | None = None, warmup: float | None = None, cooldown: float | None = None, request_samples: int | None = 20, @@ -259,7 +259,9 @@ async def benchmark_generative_text( # noqa: C901 ) with console.print_update_step(title="Resolving output formats") as console_step: - resolved_output_formats = await resolve_output_formats(output_formats, output_path) + resolved_output_formats = await resolve_output_formats( + output_formats, output_path + ) console_step.finish( title="Output formats resolved", details={key: str(val) for key, val in resolved_output_formats.items()}, @@ -314,7 +316,7 @@ async def benchmark_generative_text( # noqa: C901 async def reimport_benchmarks_report( file: Path, output_path: Path | None, - output_formats: OutputFormatType = ("console", "json", "html", "csv"), + output_formats: OutputFormatT = ("console", "json", "html", "csv"), ) -> tuple[GenerativeBenchmarksReport, dict[str, Any]]: """ The command-line entry point for re-importing and displaying an @@ -326,10 +328,15 @@ async def reimport_benchmarks_report( title=f"Loading benchmarks from {file}" ) as console_step: report = GenerativeBenchmarksReport.load_file(file) - console_step.finish(f"Import of old benchmarks complete; loaded {len(report.benchmarks)} benchmark(s)") + console_step.finish( + "Import of old benchmarks complete;" + f" loaded {len(report.benchmarks)} benchmark(s)" + ) with console.print_update_step(title="Resolving output formats") as console_step: - resolved_output_formats = await resolve_output_formats(output_formats, output_path) + resolved_output_formats = await resolve_output_formats( + output_formats, output_path + ) console_step.finish( title="Output formats resolved", details={key: str(val) for key, val in resolved_output_formats.items()}, diff --git a/src/guidellm/benchmark/scenario.py b/src/guidellm/benchmark/scenario.py index c8b5801e..d882ecae 100644 --- a/src/guidellm/benchmark/scenario.py +++ b/src/guidellm/benchmark/scenario.py @@ -9,21 +9,18 @@ import yaml from loguru import logger from pydantic import BeforeValidator, Field, PositiveFloat, PositiveInt, SkipValidation -from transformers.tokenization_utils_base import ( # type: ignore[import] - PreTrainedTokenizerBase, -) from guidellm.backends import Backend, BackendType from guidellm.benchmark.profile import Profile, ProfileType -from guidellm.benchmark.types import DataInputType, ProcessorInputType, AggregatorInputType +from guidellm.benchmark.types import AggregatorInputT, DataInputT, ProcessorInputT from guidellm.scheduler import StrategyType from guidellm.utils import StandardBaseModel -__ALL__ = [ - "Scenario", +__all__ = [ "GenerativeTextScenario", - "get_builtin_scenarios", + "Scenario", "enable_scenarios", + "get_builtin_scenarios", ] SCENARIO_DIR = Path(__file__).parent / "scenarios/" @@ -111,7 +108,7 @@ class Config: arbitrary_types_allowed = True data: Annotated[ - DataInputType, + DataInputT, # BUG: See https://github.com/pydantic/pydantic/issues/9541 SkipValidation, ] @@ -125,12 +122,12 @@ class Config: backend_kwargs: dict[str, Any] | None = None model: str | None = None # Data configuration - processor: ProcessorInputType | None = None + processor: ProcessorInputT | None = None processor_args: dict[str, Any] | None = None data_args: dict[str, Any] | None = None data_sampler: Literal["random"] | None = None # Aggregators configuration - add_aggregators: AggregatorInputType | None = None + add_aggregators: AggregatorInputT | None = None warmup: Annotated[float | None, Field(gt=0, le=1)] = None cooldown: Annotated[float | None, Field(gt=0, le=1)] = None request_samples: PositiveInt | None = 20 diff --git a/src/guidellm/benchmark/types.py b/src/guidellm/benchmark/types.py index 9ec475b8..04ad4061 100644 --- a/src/guidellm/benchmark/types.py +++ b/src/guidellm/benchmark/types.py @@ -1,26 +1,34 @@ from __future__ import annotations + from collections.abc import Iterable -from typing import Any from pathlib import Path -from datasets import Dataset, DatasetDict, IterableDataset, IterableDatasetDict - -from guidellm.benchmark.output import ( - GenerativeBenchmarkerOutput, -) +from typing import Any, TypeAliasType +from datasets import Dataset, DatasetDict, IterableDataset, IterableDatasetDict from transformers import ( # type: ignore[import] PreTrainedTokenizerBase, ) -from guidellm.benchmark.progress import BenchmarkerProgress - from guidellm.benchmark.aggregator import ( Aggregator, CompilableAggregator, ) +from guidellm.benchmark.output import ( + GenerativeBenchmarkerOutput, +) +from guidellm.benchmark.progress import BenchmarkerProgress + +__all__ = [ + "AggregatorInputT", + "DataInputT", + "OutputFormatT", + "ProcessorInputT", + "ProgressInputT", +] -DataInputType = ( +DataInputT = TypeAliasType( + "DataInputT", Iterable[str] | Iterable[dict[str, Any]] | Dataset @@ -28,18 +36,24 @@ | IterableDataset | IterableDatasetDict | str - | Path + | Path, ) -OutputFormatType = ( +OutputFormatT = TypeAliasType( + "OutputFormatT", tuple[str, ...] | list[str] | dict[str, str | dict[str, Any] | GenerativeBenchmarkerOutput] - | None + | None, ) -ProcessorInputType = str | Path | PreTrainedTokenizerBase +ProcessorInputT = TypeAliasType("ProcessorInputT", str | Path | PreTrainedTokenizerBase) -ProgressInputType = tuple[str, ...] | list[str] | list[BenchmarkerProgress] +ProgressInputT = TypeAliasType( + "ProgressInputT", tuple[str, ...] | list[str] | list[BenchmarkerProgress] +) -AggregatorInputType = dict[str, str | dict[str, Any] | Aggregator | CompilableAggregator] +AggregatorInputT = TypeAliasType( + "AggregatorInputT", + dict[str, str | dict[str, Any] | Aggregator | CompilableAggregator], +) From 3057229530906e8503a1a431c72afe1ad079894c Mon Sep 17 00:00:00 2001 From: Samuel Monson Date: Tue, 30 Sep 2025 11:07:01 -0400 Subject: [PATCH 36/90] Address Copilot review --- src/guidellm/benchmark/profile.py | 1 + src/guidellm/benchmark/scenario.py | 4 ++-- 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/src/guidellm/benchmark/profile.py b/src/guidellm/benchmark/profile.py index b5ce7c24..3d4e7287 100644 --- a/src/guidellm/benchmark/profile.py +++ b/src/guidellm/benchmark/profile.py @@ -398,6 +398,7 @@ class ThroughputProfile(Profile): description="Maximum number of concurrent requests to schedule", ) startup_duration: NonNegativeFloat = Field( + default=0.0, description=( "Duration in seconds for distributing startup requests " "before full throughput scheduling" diff --git a/src/guidellm/benchmark/scenario.py b/src/guidellm/benchmark/scenario.py index d882ecae..b53ef424 100644 --- a/src/guidellm/benchmark/scenario.py +++ b/src/guidellm/benchmark/scenario.py @@ -84,7 +84,7 @@ def from_file(cls: type[T], filename: Path, overrides: dict | None = None) -> T: logger.error(f"Failed to parse {filename} as type {cls.__name__}") raise ValueError(f"Error when parsing file: {filename}") from e - data.update(overrides) + data.update(overrides or {}) return cls.model_validate(data) @classmethod @@ -144,7 +144,7 @@ def enable_scenarios(func: Callable) -> Any: @wraps(func) async def decorator(*args, scenario: Scenario | None = None, **kwargs) -> Any: if scenario is not None: - kwargs.update(**scenario.model_dump()) + kwargs.update(scenario.model_dump()) return await func(*args, **kwargs) # Modify the signature of the decorator to include the `scenario` argument From 33effe655b8853eb2c0628f3af19e404aa94953a Mon Sep 17 00:00:00 2001 From: Benjamin Blue Date: Thu, 21 Aug 2025 10:13:07 -0400 Subject: [PATCH 37/90] add custom dict camelize logic (#246) This removes reliance on the library pyhumps converting dicts keys from snake_case to camelCase for use in the UI --------- Signed-off-by: dalthecow Signed-off-by: Samuel Monson Signed-off-by: Jared O'Connell Co-authored-by: Samuel Monson Co-authored-by: Jared O'Connell --- src/guidellm/benchmark/output.py | 13 +++--- src/guidellm/utils/__init__.py | 4 ++ src/guidellm/utils/dict.py | 23 +++++++++++ src/guidellm/utils/text.py | 7 ++++ tests/unit/utils/dict.py | 71 ++++++++++++++++++++++++++++++++ tests/unit/utils/text.py | 13 ++++++ 6 files changed, 125 insertions(+), 6 deletions(-) create mode 100644 src/guidellm/utils/dict.py create mode 100644 tests/unit/utils/dict.py create mode 100644 tests/unit/utils/text.py diff --git a/src/guidellm/benchmark/output.py b/src/guidellm/benchmark/output.py index 53e0f7dd..e8efb431 100644 --- a/src/guidellm/benchmark/output.py +++ b/src/guidellm/benchmark/output.py @@ -5,6 +5,7 @@ import math from abc import ABC, abstractmethod from collections import OrderedDict +from copy import deepcopy from datetime import datetime from pathlib import Path from typing import Any, ClassVar @@ -36,6 +37,8 @@ safe_format_timestamp, split_text_list_by_length, ) +from guidellm.utils.dict import recursive_key_update +from guidellm.utils.text import camelize_str __all__ = [ "GenerativeBenchmarkerCSV", @@ -711,8 +714,6 @@ async def finalize(self, report: GenerativeBenchmarksReport) -> Path: :param report: The completed benchmark report. :return: Path to the saved HTML file. """ - import humps - output_path = self.output_path if output_path.is_dir(): output_path = output_path / GenerativeBenchmarkerHTML.DEFAULT_FILE @@ -720,13 +721,13 @@ async def finalize(self, report: GenerativeBenchmarksReport) -> Path: data_builder = UIDataBuilder(report.benchmarks) data = data_builder.to_dict() - camel_data = humps.camelize(data) + camel_data = recursive_key_update(deepcopy(data), camelize_str) ui_api_data = {} - for key, value in camel_data.items(): - placeholder_key = f"window.{key} = {{}};" + for k, v in camel_data.items(): + placeholder_key = f"window.{k} = {{}};" replacement_value = ( - f"window.{key} = {json.dumps(value, indent=2)};\n" + f"window.{k} = {json.dumps(v, indent=2)};\n" ) ui_api_data[placeholder_key] = replacement_value diff --git a/src/guidellm/utils/__init__.py b/src/guidellm/utils/__init__.py index 20daeea4..bd6b5a90 100644 --- a/src/guidellm/utils/__init__.py +++ b/src/guidellm/utils/__init__.py @@ -1,6 +1,7 @@ from .auto_importer import AutoImporterMixin from .console import Colors, Console, ConsoleUpdateStep, StatusIcons, StatusStyles from .default_group import DefaultGroupHandler +from .dict import recursive_key_update from .encoding import ( Encoder, EncodingTypesAlias, @@ -55,6 +56,7 @@ ) from .text import ( EndlessTextCreator, + camelize_str, clean_text, filter_text, format_value_display, @@ -79,6 +81,7 @@ "EndlessTextCreator", "InfoMixin", "IntegerRangeSampler", + "camelize_str", "InterProcessMessaging", "InterProcessMessagingManagerQueue", "InterProcessMessagingPipe", @@ -110,6 +113,7 @@ "format_value_display", "get_literal_vals", "is_punctuation", + "recursive_key_update", "load_text", "safe_add", "safe_divide", diff --git a/src/guidellm/utils/dict.py b/src/guidellm/utils/dict.py new file mode 100644 index 00000000..5b4579c9 --- /dev/null +++ b/src/guidellm/utils/dict.py @@ -0,0 +1,23 @@ +def recursive_key_update(d, key_update_func): + if not isinstance(d, dict) and not isinstance(d, list): + return d + + if isinstance(d, list): + for item in d: + recursive_key_update(item, key_update_func) + return d + + updated_key_pairs = [] + for key, _ in d.items(): + updated_key = key_update_func(key) + if key != updated_key: + updated_key_pairs.append((key, updated_key)) + + for key_pair in updated_key_pairs: + old_key, updated_key = key_pair + d[updated_key] = d[old_key] + del d[old_key] + + for _, value in d.items(): + recursive_key_update(value, key_update_func) + return d diff --git a/src/guidellm/utils/text.py b/src/guidellm/utils/text.py index 8385ec7b..a659ac6a 100644 --- a/src/guidellm/utils/text.py +++ b/src/guidellm/utils/text.py @@ -28,6 +28,7 @@ __all__ = [ "MAX_PATH_LENGTH", "EndlessTextCreator", + "camelize_str", "clean_text", "filter_text", "format_value_display", @@ -281,6 +282,12 @@ def is_punctuation(text: str) -> bool: return len(text) == 1 and not text.isalnum() and not text.isspace() +def camelize_str(snake_case_string: str) -> str: + return (words := snake_case_string.split("_"))[0].lower() + "".join( + word.capitalize() for word in words[1:] + ) + + class EndlessTextCreator: """ Infinite text generator for load testing and content creation operations. diff --git a/tests/unit/utils/dict.py b/tests/unit/utils/dict.py new file mode 100644 index 00000000..09d93df6 --- /dev/null +++ b/tests/unit/utils/dict.py @@ -0,0 +1,71 @@ +import pytest + +from guidellm.utils.dict import recursive_key_update + + +def update_str(string): + return string + "_updated" + + +@pytest.mark.smoke +def test_recursive_key_update_updates_keys(): + my_dict = { + "my_key": { + "my_nested_key": {"my_double_nested_key": "someValue"}, + "my_other_nested_key": "someValue", + }, + "my_other_key": "value", + } + my_updated_dict = { + "my_key_updated": { + "my_nested_key_updated": {"my_double_nested_key_updated": "someValue"}, + "my_other_nested_key_updated": "someValue", + }, + "my_other_key_updated": "value", + } + recursive_key_update(my_dict, update_str) + assert my_dict == my_updated_dict + + +def truncate_str_to_ten(string): + return string[:10] + + +@pytest.mark.smoke +def test_recursive_key_update_leaves_unchanged_keys(): + my_dict = { + "my_key": { + "my_nested_key": {"my_double_nested_key": "someValue"}, + "my_other_nested_key": "someValue", + }, + "my_other_key": "value", + } + my_updated_dict = { + "my_key": { + "my_nested_": {"my_double_": "someValue"}, + "my_other_n": "someValue", + }, + "my_other_k": "value", + } + recursive_key_update(my_dict, truncate_str_to_ten) + assert my_dict == my_updated_dict + + +@pytest.mark.smoke +def test_recursive_key_update_updates_dicts_in_list(): + my_dict = { + "my_key": [ + {"my_list_item_key_1": "someValue"}, + {"my_list_item_key_2": "someValue"}, + {"my_list_item_key_3": "someValue"}, + ] + } + my_updated_dict = { + "my_key_updated": [ + {"my_list_item_key_1_updated": "someValue"}, + {"my_list_item_key_2_updated": "someValue"}, + {"my_list_item_key_3_updated": "someValue"}, + ] + } + recursive_key_update(my_dict, update_str) + assert my_dict == my_updated_dict diff --git a/tests/unit/utils/text.py b/tests/unit/utils/text.py new file mode 100644 index 00000000..ae0fa52f --- /dev/null +++ b/tests/unit/utils/text.py @@ -0,0 +1,13 @@ +import pytest + +from guidellm.utils.text import camelize_str + + +@pytest.mark.smoke +def test_camelize_str_camelizes_string(): + assert camelize_str("no_longer_snake_case") == "noLongerSnakeCase" + + +@pytest.mark.smoke +def test_camelize_str_leaves_non_snake_case_text_untouched(): + assert camelize_str("notsnakecase") == "notsnakecase" From edd6317f977736042b41e5b58b91b63fb37ee8f4 Mon Sep 17 00:00:00 2001 From: Jared O'Connell <46976761+jaredoconnell@users.noreply.github.com> Date: Tue, 30 Sep 2025 14:23:05 -0400 Subject: [PATCH 38/90] Update imports in src/guidellm/benchmark/output.py Co-authored-by: Samuel Monson Signed-off-by: Jared O'Connell <46976761+jaredoconnell@users.noreply.github.com> --- src/guidellm/benchmark/output.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/src/guidellm/benchmark/output.py b/src/guidellm/benchmark/output.py index e8efb431..28b983fb 100644 --- a/src/guidellm/benchmark/output.py +++ b/src/guidellm/benchmark/output.py @@ -37,8 +37,7 @@ safe_format_timestamp, split_text_list_by_length, ) -from guidellm.utils.dict import recursive_key_update -from guidellm.utils.text import camelize_str +from guidellm.utils import recursive_key_update, camelize_str __all__ = [ "GenerativeBenchmarkerCSV", From 730eeb17fc7d0128227de01d31c3681b63118529 Mon Sep 17 00:00:00 2001 From: Mark Kurtz Date: Wed, 1 Oct 2025 08:05:07 -0400 Subject: [PATCH 39/90] Initial state for datasets rework to enable multimodal and more complicated combinations --- src/guidellm/__init__.py | 3 + src/guidellm/__main__.py | 97 ++- src/guidellm/backends/__init__.py | 2 + src/guidellm/backends/backend.py | 1 + src/guidellm/backends/objects.py | 152 ++-- src/guidellm/backends/openai.py | 652 ++++++------------ src/guidellm/benchmark/aggregator.py | 227 +++--- src/guidellm/benchmark/entrypoints.py | 268 +++---- src/guidellm/benchmark/objects.py | 8 +- src/guidellm/benchmark/profile.py | 5 +- src/guidellm/data/__init__.py | 52 +- src/guidellm/data/datasets.py | 88 +++ src/guidellm/data/deserializers/__init__.py | 51 ++ .../data/deserializers/deserializer.py | 81 +++ src/guidellm/data/deserializers/file.py | 221 ++++++ .../data/deserializers/huggingface.py | 75 ++ src/guidellm/data/deserializers/memory.py | 191 +++++ src/guidellm/data/deserializers/synthetic.py | 255 +++++++ src/guidellm/data/formatters/__init__.py | 47 ++ src/guidellm/data/formatters/environment.py | 63 ++ src/guidellm/data/formatters/filters.py | 324 +++++++++ src/guidellm/data/formatters/globals.py | 9 + src/guidellm/data/formatters/objects.py | 92 +++ src/guidellm/data/formatters/templates.py | 182 +++++ src/guidellm/data/loaders.py | 93 +++ src/guidellm/data/objects.py | 230 ++++++ src/guidellm/data/preprocessors/__init__.py | 7 + src/guidellm/data/preprocessors/mappers.py | 115 +++ src/guidellm/data/preprocessors/objects.py | 20 + src/guidellm/data/prideandprejudice.txt.gz | Bin 241795 -> 0 bytes src/guidellm/data/utils.py | 161 +++++ src/guidellm/dataset/__init__.py | 22 - src/guidellm/dataset/creator.py | 213 ------ src/guidellm/dataset/entrypoints.py | 42 -- src/guidellm/dataset/file.py | 92 --- src/guidellm/dataset/hf_datasets.py | 62 -- src/guidellm/dataset/in_memory.py | 132 ---- src/guidellm/dataset/synthetic.py | 287 -------- src/guidellm/logger.py | 2 +- src/guidellm/preprocess/dataset.py | 7 +- src/guidellm/request/__init__.py | 18 - src/guidellm/request/loader.py | 284 -------- src/guidellm/request/request.py | 79 --- src/guidellm/request/types.py | 10 - src/guidellm/scheduler/scheduler.py | 2 +- src/guidellm/scheduler/worker_group.py | 9 +- 46 files changed, 2949 insertions(+), 2084 deletions(-) create mode 100644 src/guidellm/data/datasets.py create mode 100644 src/guidellm/data/deserializers/__init__.py create mode 100644 src/guidellm/data/deserializers/deserializer.py create mode 100644 src/guidellm/data/deserializers/file.py create mode 100644 src/guidellm/data/deserializers/huggingface.py create mode 100644 src/guidellm/data/deserializers/memory.py create mode 100644 src/guidellm/data/deserializers/synthetic.py create mode 100644 src/guidellm/data/formatters/__init__.py create mode 100644 src/guidellm/data/formatters/environment.py create mode 100644 src/guidellm/data/formatters/filters.py create mode 100644 src/guidellm/data/formatters/globals.py create mode 100644 src/guidellm/data/formatters/objects.py create mode 100644 src/guidellm/data/formatters/templates.py create mode 100644 src/guidellm/data/loaders.py create mode 100644 src/guidellm/data/objects.py create mode 100644 src/guidellm/data/preprocessors/__init__.py create mode 100644 src/guidellm/data/preprocessors/mappers.py create mode 100644 src/guidellm/data/preprocessors/objects.py delete mode 100644 src/guidellm/data/prideandprejudice.txt.gz create mode 100644 src/guidellm/data/utils.py delete mode 100644 src/guidellm/dataset/__init__.py delete mode 100644 src/guidellm/dataset/creator.py delete mode 100644 src/guidellm/dataset/entrypoints.py delete mode 100644 src/guidellm/dataset/file.py delete mode 100644 src/guidellm/dataset/hf_datasets.py delete mode 100644 src/guidellm/dataset/in_memory.py delete mode 100644 src/guidellm/dataset/synthetic.py delete mode 100644 src/guidellm/request/__init__.py delete mode 100644 src/guidellm/request/loader.py delete mode 100644 src/guidellm/request/request.py delete mode 100644 src/guidellm/request/types.py diff --git a/src/guidellm/__init__.py b/src/guidellm/__init__.py index f2206e94..dde6e937 100644 --- a/src/guidellm/__init__.py +++ b/src/guidellm/__init__.py @@ -7,6 +7,8 @@ import logging import os +from datasets.utils.logging import disable_progress_bar + with ( open(os.devnull, "w") as devnull, # noqa: PTH123 contextlib.redirect_stderr(devnull), @@ -19,6 +21,7 @@ os.environ["TOKENIZERS_PARALLELISM"] = "false" # Silence warnings for tokenizers hf_logging.set_verbosity_error() logging.getLogger("transformers").setLevel(logging.ERROR) + disable_progress_bar() from .logger import configure_logger, logger from .settings import ( diff --git a/src/guidellm/__main__.py b/src/guidellm/__main__.py index 13a748d5..82632bc8 100644 --- a/src/guidellm/__main__.py +++ b/src/guidellm/__main__.py @@ -56,6 +56,11 @@ from guidellm.benchmark.scenario import ( GenerativeTextScenario, ) +from guidellm.data import ( + GenerativeDatasetArgs, + GenerativeRequestFormatter, + GenerativeRequestType, +) from guidellm.mock_server import MockServer, MockServerConfig from guidellm.preprocess.dataset import ShortPromptStrategy, process_dataset from guidellm.scheduler import StrategyType @@ -143,6 +148,7 @@ def benchmark(): @click.option( "--data", type=str, + multiple=True, help=( "The HuggingFace dataset ID, a path to a HuggingFace dataset, " "a path to a data file csv, json, jsonl, or txt, " @@ -197,9 +203,7 @@ def benchmark(): default=None, help=( "A JSON string containing any arguments to pass to the backend as a " - "dict with **kwargs. Headers can be removed by setting their value to " - "null. For example: " - """'{"headers": {"Authorization": null, "Custom-Header": "Custom-Value"}}'""" + "dict with **kwargs." ), ) @click.option( @@ -234,19 +238,72 @@ def benchmark(): @click.option( "--data-args", default=None, - callback=cli_tools.parse_json, + callback=( + lambda _ctx, _param, value: [ + GenerativeDatasetArgs.model_validate_json(val) + if val + else GenerativeDatasetArgs() + for val in value + ] + if value + else None + ), help=( "A JSON string containing any arguments to pass to the dataset creation " "as a dict with **kwargs." ), ) +@click.option( + "--data-samples", + default=-1, + type=int, + help=( + "The number of samples to use from the dataset. If -1 (default), will use all " + "samples in the dataset." + ), +) @click.option( "--data-sampler", default=None, - type=click.Choice(["random"]), + type=click.Choice(["shuffle"]), + help="The data sampler type to use.", +) +@click.option( + "--data-request-type", + default="text_completions", + type=str, + help=( + "The type of request to create for each data sample. " + f"For example, {list(get_literal_vals(GenerativeRequestType))}." + ), +) +@click.option( + "--data-request-template", + default=None, + help=( + "A Jinja2 template string or path to a Jinja2 template file to use for " + "creating requests from the data samples. If not provided, will use a " + "default template based on the request type." + ), +) +@click.option( + "--data-request-extras", + default=None, + callback=cli_tools.parse_json, + help=("A JSON string of extra data to include with each data request."), +) +@click.option( + "--data-request-nonstreaming", + is_flag=True, + help="Set this flag to disable streaming for the data requests.", +) +@click.option( + "--dataloader_kwargs", + default=None, + callback=cli_tools.parse_json, help=( - "The data sampler type to use. 'random' will add a random shuffle on the data. " - "Defaults to None" + "A JSON string containing any arguments to pass to the dataloader constructor " + "as a dict with **kwargs." ), ) # Output configuration @@ -387,7 +444,13 @@ def run( processor, processor_args, data_args, + data_samples, data_sampler, + data_request_type, + data_request_template, + data_request_extras, + data_request_nonstreaming, + dataloader_kwargs, # Output configuration output_path, output_formats, @@ -420,7 +483,8 @@ def run( asyncio.run( benchmark_generative_text( target=target, - data=data, + data=list(data), + # Benchmark configuration profile=profile, rate=rate, random_seed=random_seed, @@ -432,7 +496,22 @@ def run( processor=processor, processor_args=processor_args, data_args=data_args, - data_sampler=data_sampler, + data_samples=data_samples, + data_column_mapper=None, # use default + data_request_formatter=GenerativeRequestFormatter( + request_type=data_request_type, + request_template=data_request_template, + request_extras=data_request_extras, + request_defaults=( + {} # disable defaults if non-streaming + if data_request_nonstreaming + else None + ), + ), + data_preprocessors=None, # no preprocessors through CLI for now + dataloader_sampler=data_sampler, + dataloader_collate_fn=None, # use default + dataloader_kwargs=dataloader_kwargs, # Output configuration output_path=output_path, output_formats=[ diff --git a/src/guidellm/backends/__init__.py b/src/guidellm/backends/__init__.py index 064722ac..4bcf5683 100644 --- a/src/guidellm/backends/__init__.py +++ b/src/guidellm/backends/__init__.py @@ -13,6 +13,7 @@ GenerationRequest, GenerationRequestTimings, GenerationResponse, + GenerationTokenStats, ) from .openai import OpenAIHTTPBackend @@ -22,5 +23,6 @@ "GenerationRequest", "GenerationRequestTimings", "GenerationResponse", + "GenerationTokenStats", "OpenAIHTTPBackend", ] diff --git a/src/guidellm/backends/backend.py b/src/guidellm/backends/backend.py index 8f91d5e7..a7d82979 100644 --- a/src/guidellm/backends/backend.py +++ b/src/guidellm/backends/backend.py @@ -115,5 +115,6 @@ def requests_limit(self) -> int | None: async def default_model(self) -> str | None: """ :return: The default model name or identifier for generation requests. + None if no default model is available. """ ... diff --git a/src/guidellm/backends/objects.py b/src/guidellm/backends/objects.py index 05280940..88d25949 100644 --- a/src/guidellm/backends/objects.py +++ b/src/guidellm/backends/objects.py @@ -6,62 +6,51 @@ implementations. """ -import uuid -from typing import Any, Literal, Optional +from __future__ import annotations + +from typing import Literal from pydantic import Field +from guidellm.data import ( + GenerationRequest, + GenerationRequestArguments, + GenerationRequestTimings, +) from guidellm.scheduler import ( - MeasuredRequestTimings, SchedulerMessagingPydanticRegistry, ) from guidellm.utils import StandardBaseModel __all__ = [ "GenerationRequest", + "GenerationRequestArguments", "GenerationRequestTimings", "GenerationResponse", + "GenerationTokenStats", ] @SchedulerMessagingPydanticRegistry.register() -class GenerationRequest(StandardBaseModel): - """Request model for backend generation operations.""" +class GenerationTokenStats(StandardBaseModel): + """Token statistics for generation requests and responses.""" - request_id: str = Field( - default_factory=lambda: str(uuid.uuid4()), - description="Unique identifier for the request.", - ) - request_type: Literal["text_completions", "chat_completions"] = Field( - default="text_completions", - description=( - "Type of request. 'text_completions' uses backend.text_completions(), " - "'chat_completions' uses backend.chat_completions()." - ), - ) - content: Any = Field( - description=( - "Request content. For text_completions: string or list of strings. " - "For chat_completions: string, list of messages, or raw content " - "(set raw_content=True in params)." - ) - ) - params: dict[str, Any] = Field( - default_factory=dict, - description=( - "Additional parameters passed to backend methods. " - "Common: max_tokens, temperature, stream." - ), + request: int | None = Field( + default=None, description="Number of tokens in the original request." ) - stats: dict[Literal["prompt_tokens"], int] = Field( - default_factory=dict, - description="Request statistics including prompt token count.", - ) - constraints: dict[Literal["output_tokens"], int] = Field( - default_factory=dict, - description="Request constraints such as maximum output tokens.", + response: int | None = Field( + default=None, description="Number of tokens in the generated response." ) + def value( + self, preference: Literal["request", "response"] | None = None + ) -> int | None: + if preference == "request": + return self.request + if preference == "response": + return self.response + return self.response if self.response is not None else self.request + @SchedulerMessagingPydanticRegistry.register() class GenerationResponse(StandardBaseModel): @@ -70,87 +59,32 @@ class GenerationResponse(StandardBaseModel): request_id: str = Field( description="Unique identifier matching the original GenerationRequest." ) - request_args: dict[str, Any] = Field( + request_args: GenerationRequestArguments = Field( description="Arguments passed to the backend for this request." ) - value: Optional[str] = Field( + text: str | None = Field( default=None, - description="Complete generated text content. None for streaming responses.", - ) - delta: Optional[str] = Field( - default=None, description="Incremental text content for streaming responses." + description="The generated response text.", ) iterations: int = Field( default=0, description="Number of generation iterations completed." ) - request_prompt_tokens: Optional[int] = Field( - default=None, description="Token count from the original request prompt." - ) - request_output_tokens: Optional[int] = Field( - default=None, - description="Expected output token count from the original request.", - ) - response_prompt_tokens: Optional[int] = Field( - default=None, description="Actual prompt token count reported by the backend." + + prompt_stats: GenerationTokenStats = Field( + default_factory=GenerationTokenStats, + description="Token statistics from the prompt.", ) - response_output_tokens: Optional[int] = Field( - default=None, description="Actual output token count reported by the backend." + output_stats: GenerationTokenStats = Field( + default_factory=GenerationTokenStats, + description="Token statistics from the generated output.", ) - @property - def prompt_tokens(self) -> Optional[int]: - """ - :return: The number of prompt tokens used in the request - (response_prompt_tokens if available, otherwise request_prompt_tokens). - """ - return self.response_prompt_tokens or self.request_prompt_tokens - - @property - def output_tokens(self) -> Optional[int]: - """ - :return: The number of output tokens generated in the response - (response_output_tokens if available, otherwise request_output_tokens). - """ - return self.response_output_tokens or self.request_output_tokens - - @property - def total_tokens(self) -> Optional[int]: - """ - :return: The total number of tokens used in the request and response. - Sum of prompt_tokens and output_tokens. - """ - if self.prompt_tokens is None or self.output_tokens is None: - return None - return self.prompt_tokens + self.output_tokens - - def preferred_prompt_tokens( - self, preferred_source: Literal["request", "response"] - ) -> Optional[int]: - if preferred_source == "request": - return self.request_prompt_tokens or self.response_prompt_tokens - else: - return self.response_prompt_tokens or self.request_prompt_tokens - - def preferred_output_tokens( - self, preferred_source: Literal["request", "response"] - ) -> Optional[int]: - if preferred_source == "request": - return self.request_output_tokens or self.response_output_tokens - else: - return self.response_output_tokens or self.request_output_tokens - - -@SchedulerMessagingPydanticRegistry.register() -@MeasuredRequestTimings.register("generation_request_timings") -class GenerationRequestTimings(MeasuredRequestTimings): - """Timing model for tracking generation request lifecycle events.""" + def total_tokens( + self, preference: Literal["request", "response"] | None = None + ) -> int | None: + prompt_tokens = self.prompt_stats.value(preference=preference) + output_tokens = self.output_stats.value(preference=preference) - timings_type: Literal["generation_request_timings"] = "generation_request_timings" - first_iteration: Optional[float] = Field( - default=None, - description="Unix timestamp when the first generation iteration began.", - ) - last_iteration: Optional[float] = Field( - default=None, - description="Unix timestamp when the last generation iteration completed.", - ) + if prompt_tokens is None and output_tokens is None: + return None + return (prompt_tokens or 0) + (output_tokens or 0) diff --git a/src/guidellm/backends/openai.py b/src/guidellm/backends/openai.py index ce83076f..22394afe 100644 --- a/src/guidellm/backends/openai.py +++ b/src/guidellm/backends/openai.py @@ -10,17 +10,15 @@ OpenAIHTTPBackend: HTTP backend for OpenAI-compatible API servers. """ -import base64 -import contextlib -import copy +from __future__ import annotations + +import asyncio import json import time from collections.abc import AsyncIterator -from pathlib import Path -from typing import Any, ClassVar, Optional, Union +from typing import Any, cast import httpx -from PIL import Image from pydantic import dataclasses from guidellm.backends.backend import Backend @@ -28,9 +26,18 @@ GenerationRequest, GenerationRequestTimings, GenerationResponse, + GenerationTokenStats, ) from guidellm.scheduler import ScheduledRequestInfo +try: + import orjson + + HAS_ORJSON = True +except ImportError: + orjson = None + HAS_ORJSON = False + __all__ = ["OpenAIHTTPBackend", "UsageStats"] @@ -38,8 +45,18 @@ class UsageStats: """Token usage statistics for generation requests.""" - prompt_tokens: Optional[int] = None - output_tokens: Optional[int] = None + prompt_tokens: int | None = None + output_tokens: int | None = None + + +open_ai_paths: dict[str, str] = { + "health": "health", + "models": "v1/models", + "text_completions": "v1/completions", + "chat_completions": "v1/chat/completions", + "audio_transcriptions": "v1/audio/transcriptions", + "audio_translations": "v1/audio/translations", +} @Backend.register("openai_http") @@ -66,78 +83,34 @@ class OpenAIHTTPBackend(Backend): await backend.process_shutdown() """ - HEALTH_PATH: ClassVar[str] = "/health" - MODELS_PATH: ClassVar[str] = "/v1/models" - TEXT_COMPLETIONS_PATH: ClassVar[str] = "/v1/completions" - CHAT_COMPLETIONS_PATH: ClassVar[str] = "/v1/chat/completions" - - MODELS_KEY: ClassVar[str] = "models" - TEXT_COMPLETIONS_KEY: ClassVar[str] = "text_completions" - CHAT_COMPLETIONS_KEY: ClassVar[str] = "chat_completions" - def __init__( self, target: str, - model: Optional[str] = None, - api_key: Optional[str] = None, - organization: Optional[str] = None, - project: Optional[str] = None, + model: str | None = None, timeout: float = 60.0, http2: bool = True, follow_redirects: bool = True, - max_output_tokens: Optional[int] = None, - stream_response: bool = True, - extra_query: Optional[dict] = None, - extra_body: Optional[dict] = None, - remove_from_body: Optional[list[str]] = None, - headers: Optional[dict] = None, verify: bool = False, + validate_backend: bool | str | dict[str, Any] = True, ): - """ - Initialize OpenAI HTTP backend. - - :param target: Target URL for the OpenAI server (e.g., "http://localhost:8000"). - :param model: Model to use for requests. If None, uses first available model. - :param api_key: API key for authentication. Adds Authorization header - if provided. - :param organization: Organization ID. Adds OpenAI-Organization header - if provided. - :param project: Project ID. Adds OpenAI-Project header if provided. - :param timeout: Request timeout in seconds. Defaults to 60 seconds. - :param http2: Whether to use HTTP/2. Defaults to True. - :param follow_redirects: Whether to follow redirects. Default True. - :param max_output_tokens: Maximum tokens for completions. If None, none is set. - :param stream_response: Whether to stream responses by default. Can be - overridden per request. Defaults to True. - :param extra_query: Additional query parameters. Both general and - endpoint-specific with type keys supported. - :param extra_body: Additional body parameters. Both general and - endpoint-specific with type keys supported. - :param remove_from_body: Parameter names to remove from request bodies. - :param headers: Additional HTTP headers. - :param verify: Whether to verify SSL certificates. Default False. - """ super().__init__(type_="openai_http") # Request Values self.target = target.rstrip("/").removesuffix("/v1") self.model = model - self.headers = self._build_headers(api_key, organization, project, headers) # Store configuration self.timeout = timeout self.http2 = http2 self.follow_redirects = follow_redirects self.verify = verify - self.max_output_tokens = max_output_tokens - self.stream_response = stream_response - self.extra_query = extra_query or {} - self.extra_body = extra_body or {} - self.remove_from_body = remove_from_body or [] + self.validate_backend: dict[str, Any] | None = self._resolve_validate_kwargs( + validate_backend + ) # Runtime state self._in_process = False - self._async_client: Optional[httpx.AsyncClient] = None + self._async_client: httpx.AsyncClient | None = None @property def info(self) -> dict[str, Any]: @@ -147,20 +120,12 @@ def info(self) -> dict[str, Any]: return { "target": self.target, "model": self.model, - "headers": self.headers, "timeout": self.timeout, "http2": self.http2, "follow_redirects": self.follow_redirects, "verify": self.verify, - "max_output_tokens": self.max_output_tokens, - "stream_response": self.stream_response, - "extra_query": self.extra_query, - "extra_body": self.extra_body, - "remove_from_body": self.remove_from_body, - "health_path": self.HEALTH_PATH, - "models_path": self.MODELS_PATH, - "text_completions_path": self.TEXT_COMPLETIONS_PATH, - "chat_completions_path": self.CHAT_COMPLETIONS_PATH, + "openai_paths": open_ai_paths, + "validate_backend": self.validate_backend, } async def process_startup(self): @@ -206,45 +171,17 @@ async def validate(self): """ self._check_in_process() - if self.model: - with contextlib.suppress(httpx.TimeoutException, httpx.HTTPStatusError): - # Model is set, use /health endpoint as first check - target = f"{self.target}{self.HEALTH_PATH}" - headers = self._get_headers() - response = await self._async_client.get(target, headers=headers) # type: ignore [union-attr] - response.raise_for_status() - - return - - with contextlib.suppress(httpx.TimeoutException, httpx.HTTPStatusError): - # Check if models endpoint is available next - models = await self.available_models() - if models and not self.model: - self.model = models[0] - elif not self.model: - raise RuntimeError( - "No model available and could not set a default model " - "from the server's available models." - ) - + if not self.validate_backend: return - with contextlib.suppress(httpx.TimeoutException, httpx.HTTPStatusError): - # Last check, fall back on dummy request to text completions - async for _, __ in self.text_completions( - prompt="Validate backend", - request_id="validate", - output_token_count=1, - stream_response=False, - ): - pass - - return - - raise RuntimeError( - "Backend validation failed. Could not connect to the server or " - "validate the backend configuration." - ) + try: + response = await self._async_client.request(**self.validate_backend) + response.raise_for_status() + except Exception as exc: + raise RuntimeError( + "Backend validation request failed. Could not connect to the server " + "or validate the backend configuration." + ) from exc async def available_models(self) -> list[str]: """ @@ -256,15 +193,13 @@ async def available_models(self) -> list[str]: """ self._check_in_process() - target = f"{self.target}{self.MODELS_PATH}" - headers = self._get_headers() - params = self._get_params(self.MODELS_KEY) - response = await self._async_client.get(target, headers=headers, params=params) # type: ignore [union-attr] + target = f"{self.target}/{open_ai_paths['models']}" + response = await self._async_client.get(target) response.raise_for_status() return [item["id"] for item in response.json()["data"]] - async def default_model(self) -> Optional[str]: + async def default_model(self) -> str | None: """ Get the default model for this backend. @@ -276,11 +211,11 @@ async def default_model(self) -> Optional[str]: models = await self.available_models() return models[0] if models else None - async def resolve( + async def resolve( # noqa: C901 self, request: GenerationRequest, request_info: ScheduledRequestInfo, - history: Optional[list[tuple[GenerationRequest, GenerationResponse]]] = None, + history: list[tuple[GenerationRequest, GenerationResponse]] | None = None, ) -> AsyncIterator[tuple[GenerationResponse, ScheduledRequestInfo]]: """ Process a generation request and yield progressive responses. @@ -300,350 +235,207 @@ async def resolve( "Multi-turn requests with conversation history are not yet supported" ) - response = GenerationResponse( - request_id=request.request_id, - request_args={ - "request_type": request.request_type, - "output_token_count": request.constraints.get("output_tokens"), - **request.params, - }, - value="", - request_prompt_tokens=request.stats.get("prompt_tokens"), - request_output_tokens=request.constraints.get("output_tokens"), - ) request_info.request_timings = GenerationRequestTimings() - request_info.request_timings.request_start = time.time() - - completion_method = ( - self.text_completions - if request.request_type == "text_completions" - else self.chat_completions - ) - completion_kwargs = ( - { - "prompt": request.content, - "request_id": request.request_id, - "output_token_count": request.constraints.get("output_tokens"), - "stream_response": request.params.get("stream", self.stream_response), - **request.params, - } - if request.request_type == "text_completions" - else { - "content": request.content, - "request_id": request.request_id, - "output_token_count": request.constraints.get("output_tokens"), - "stream_response": request.params.get("stream", self.stream_response), - **request.params, - } + request.arguments.url = ( + request.arguments.url or f"{self.target}/{request.arguments.path}" + if request.arguments.path is not None + else f"{self.target}/{open_ai_paths[request.request_type]}" ) + request_info.request_timings.request_start = time.time() - async for delta, usage_stats in completion_method(**completion_kwargs): - if request_info.request_timings.request_start is None: - request_info.request_timings.request_start = time.time() - - if delta is not None: - if request_info.request_timings.first_iteration is None: - request_info.request_timings.first_iteration = time.time() - response.value += delta # type: ignore [operator] - response.delta = delta - request_info.request_timings.last_iteration = time.time() - response.iterations += 1 - - if usage_stats is not None: - request_info.request_timings.request_end = time.time() - response.response_output_tokens = usage_stats.output_tokens - response.response_prompt_tokens = usage_stats.prompt_tokens - - yield response, request_info - - if request_info.request_timings.request_end is None: - request_info.request_timings.request_end = time.time() - response.delta = None - yield response, request_info - - async def text_completions( - self, - prompt: Union[str, list[str]], - request_id: Optional[str], # noqa: ARG002 - output_token_count: Optional[int] = None, - stream_response: bool = True, - **kwargs, - ) -> AsyncIterator[tuple[Optional[str], Optional[UsageStats]]]: - """ - Generate text completions using the /v1/completions endpoint. - - :param prompt: Text prompt(s) for completion. Single string or list. - :param request_id: Request identifier for tracking. - :param output_token_count: Maximum tokens to generate. Overrides default - if specified. - :param stream_response: Whether to stream response progressively. - :param kwargs: Additional request parameters (temperature, top_p, etc.). - :yields: Tuples of (generated_text, usage_stats). First yield is (None, None). - :raises RuntimeError: If backend is not initialized. - :raises HTTPError: If API request fails. - """ - self._check_in_process() - target = f"{self.target}{self.TEXT_COMPLETIONS_PATH}" - headers = self._get_headers() - params = self._get_params(self.TEXT_COMPLETIONS_KEY) - body = self._get_body( - endpoint_type=self.TEXT_COMPLETIONS_KEY, - request_kwargs=kwargs, - max_output_tokens=output_token_count, - prompt=prompt, - ) - yield None, None # Initial yield for async iterator to signal start - - if not stream_response: - response = await self._async_client.post( # type: ignore [union-attr] - target, - headers=headers, - params=params, - json=body, + if not request.arguments.stream: + response = await self._async_client.request( + request.arguments.method or "POST", + request.arguments.url, + content=request.arguments.content_body, + files=request.arguments.files, + json=request.arguments.json_body, + params=request.arguments.params, + headers=request.arguments.headers, ) response.raise_for_status() data = response.json() + prompt_stats, output_stats = self._extract_response_stats(data, request) + request_info.request_timings.request_end = time.time() + yield ( - self._get_completions_text_content(data), - self._get_completions_usage_stats(data), + GenerationResponse( + request_id=request.request_id, + request_args=request.arguments, + text=self._extract_response_text(data), + iterations=0, + prompt_stats=prompt_stats, + output_stats=output_stats, + ), + request_info, ) return - body.update({"stream": True, "stream_options": {"include_usage": True}}) - async with self._async_client.stream( # type: ignore [union-attr] - "POST", - target, - headers=headers, - params=params, - json=body, - ) as stream: - stream.raise_for_status() - async for line in stream.aiter_lines(): - if not line or not line.strip().startswith("data:"): - continue - if line.strip() == "data: [DONE]": - break - data = json.loads(line.strip()[len("data: ") :]) - yield ( - self._get_completions_text_content(data), - self._get_completions_usage_stats(data), - ) - - async def chat_completions( - self, - content: Union[ - str, - list[Union[str, dict[str, Union[str, dict[str, str]]], Path, Image.Image]], - Any, - ], - request_id: Optional[str] = None, # noqa: ARG002 - output_token_count: Optional[int] = None, - raw_content: bool = False, - stream_response: bool = True, - **kwargs, - ) -> AsyncIterator[tuple[Optional[str], Optional[UsageStats]]]: - """ - Generate chat completions using the /v1/chat/completions endpoint. - - Supports multimodal inputs including text and images with message formatting. - - :param content: Chat content - string, list of mixed content, or raw content - when raw_content=True. - :param request_id: Request identifier (currently unused). - :param output_token_count: Maximum tokens to generate. Overrides default - if specified. - :param raw_content: If True, passes content directly without formatting. - :param stream_response: Whether to stream response progressively. - :param kwargs: Additional request parameters (temperature, top_p, tools, etc.). - :yields: Tuples of (generated_text, usage_stats). First yield is (None, None). - :raises RuntimeError: If backend is not initialized. - :raises HTTPError: If API request fails. - """ - self._check_in_process() - target = f"{self.target}{self.CHAT_COMPLETIONS_PATH}" - headers = self._get_headers() - params = self._get_params(self.CHAT_COMPLETIONS_KEY) - body = self._get_body( - endpoint_type=self.CHAT_COMPLETIONS_KEY, - request_kwargs=kwargs, - max_output_tokens=output_token_count, - messages=self._get_chat_messages(content) if not raw_content else content, - **kwargs, - ) - yield None, None # Initial yield for async iterator to signal start + deltas = [] + prompt_stats = None + output_stats = None + end_reached = False + + try: + async with self._async_client.stream( + request.arguments.method or "POST", + request.arguments.url, + content=request.arguments.content_body, + files=request.arguments.files, + json=request.arguments.json_body, + params=request.arguments.params, + headers=request.arguments.headers, + ) as stream: + stream.raise_for_status() + buffer = bytearray() + + async for chunk in stream.aiter_bytes(): + if not chunk or end_reached: + continue + buffer.extend(chunk) + + while (start := buffer.find(b"data:")) != -1 and ( + end := buffer.find(b"\n", start) + ) != -1: + line = buffer[start + len(b"data:") : end].strip() + buffer = buffer[end + 1 :] + + if not line: + continue + + if line == b"[DONE]": + if request_info.request_timings.request_end is None: + request_info.request_timings.request_end = time.time() + end_reached = True + break + + data = ( + json.loads(line) if not HAS_ORJSON else orjson.loads(line) + ) + + if "usage" in data and data["usage"] is not None: + request_info.request_timings.request_end = time.time() + prompt_stats, output_stats = self._extract_response_stats( + data, request + ) + else: + if request_info.request_timings.first_iteration is None: + request_info.request_timings.first_iteration = ( + time.time() + ) + request_info.request_timings.last_iteration = time.time() + deltas.append(self._extract_response_text(data)) - if not stream_response: - response = await self._async_client.post( # type: ignore [union-attr] - target, headers=headers, params=params, json=body - ) - response.raise_for_status() - data = response.json() yield ( - self._get_completions_text_content(data), - self._get_completions_usage_stats(data), + GenerationResponse( + request_id=request.request_id, + request_args=request.arguments, + text="".join(deltas) if deltas else None, + iterations=len(deltas), + prompt_stats=prompt_stats or GenerationTokenStats(), + output_stats=output_stats or GenerationTokenStats(), + ), + request_info, ) - return - - body.update({"stream": True, "stream_options": {"include_usage": True}}) - async with self._async_client.stream( # type: ignore [union-attr] - "POST", target, headers=headers, params=params, json=body - ) as stream: - stream.raise_for_status() - async for line in stream.aiter_lines(): - if not line or not line.strip().startswith("data:"): - continue - if line.strip() == "data: [DONE]": - break - data = json.loads(line.strip()[len("data: ") :]) - yield ( - self._get_completions_text_content(data), - self._get_completions_usage_stats(data), - ) - - def _build_headers( - self, - api_key: Optional[str], - organization: Optional[str], - project: Optional[str], - user_headers: Optional[dict], - ) -> dict[str, str]: - headers = {} - - if api_key: - headers["Authorization"] = ( - f"Bearer {api_key}" if not api_key.startswith("Bearer") else api_key + except asyncio.CancelledError as err: + yield ( # Ensure we yield what we have so far before stopping + GenerationResponse( + request_id=request.request_id, + request_args=request.arguments, + text="".join(deltas) if deltas else None, + iterations=len(deltas), + prompt_stats=prompt_stats or GenerationTokenStats(), + output_stats=output_stats or GenerationTokenStats(), + ), + request_info, ) - if organization: - headers["OpenAI-Organization"] = organization - if project: - headers["OpenAI-Project"] = project - if user_headers: - headers.update(user_headers) + raise err - return {key: val for key, val in headers.items() if val is not None} + def _extract_response_text(self, data: dict) -> str: + if not data: + return None - def _check_in_process(self): - if not self._in_process or self._async_client is None: - raise RuntimeError( - "Backend not started up for process, cannot process requests." - ) + object_type = data.get("object") or data.get("type") - def _get_headers(self) -> dict[str, str]: - return { - "Content-Type": "application/json", - **self.headers, - } + if object_type == "text_completion": + return data.get("choices", [{}])[0].get("text", "") - def _get_params(self, endpoint_type: str) -> dict[str, str]: - if endpoint_type in self.extra_query: - return copy.deepcopy(self.extra_query[endpoint_type]) - return copy.deepcopy(self.extra_query) + if object_type == "chat.completion": + return data.get("choices", [{}])[0].get("message", {}).get("content", "") - def _get_chat_messages( - self, - content: Union[ - str, - list[Union[str, dict[str, Union[str, dict[str, str]]], Path, Image.Image]], - Any, - ], - ) -> list[dict[str, Any]]: - if isinstance(content, str): - return [{"role": "user", "content": content}] - - if not isinstance(content, list): - raise ValueError(f"Unsupported content type: {type(content)}") - - resolved_content = [] - for item in content: - if isinstance(item, dict): - resolved_content.append(item) - elif isinstance(item, str): - resolved_content.append({"type": "text", "text": item}) - elif isinstance(item, (Image.Image, Path)): - resolved_content.append(self._get_chat_message_media_item(item)) - else: - raise ValueError(f"Unsupported content item type: {type(item)}") - - return [{"role": "user", "content": resolved_content}] - - def _get_chat_message_media_item( - self, item: Union[Path, Image.Image] - ) -> dict[str, Any]: - if isinstance(item, Image.Image): - encoded = base64.b64encode(item.tobytes()).decode("utf-8") - return { - "type": "image", - "image": {"url": f"data:image/jpeg;base64,{encoded}"}, - } + if object_type == "chat.completion.chunk": + return data.get("choices", [{}])[0].get("delta", {}).get("content", "") - # Handle file paths - suffix = item.suffix.lower() - if suffix in [".jpg", ".jpeg"]: - image = Image.open(item) - encoded = base64.b64encode(image.tobytes()).decode("utf-8") - return { - "type": "image", - "image": {"url": f"data:image/jpeg;base64,{encoded}"}, - } - elif suffix == ".wav": - encoded = base64.b64encode(item.read_bytes()).decode("utf-8") - return { - "type": "input_audio", - "input_audio": {"data": encoded, "format": "wav"}, - } - else: - raise ValueError(f"Unsupported file type: {suffix}") + if "text" in data: + return data.get("text", "") - def _get_body( - self, - endpoint_type: str, - request_kwargs: Optional[dict[str, Any]], - max_output_tokens: Optional[int] = None, - **kwargs, - ) -> dict[str, Any]: - # Start with endpoint-specific extra body parameters - extra_body: dict = self.extra_body.get(endpoint_type, self.extra_body) - - body = copy.deepcopy(extra_body) - body.update(request_kwargs or {}) - body.update(kwargs) - body["model"] = self.model - - # Handle token limits - max_tokens = max_output_tokens or self.max_output_tokens - if max_tokens is not None: - body.update( - { - "max_tokens": max_tokens, - "max_completion_tokens": max_tokens, - } - ) - # Set stop conditions only for request-level limits - if max_output_tokens: - body.update({"stop": None, "ignore_eos": True}) + if "delta" in data: + return data.get("delta", "") - if self.remove_from_body: - for key in self.remove_from_body: - body.pop(key, None) + raise ValueError(f"Unsupported response format: {data}") - return {key: val for key, val in body.items() if val is not None} + def _extract_response_stats( + self, data: dict, request: GenerationRequest + ) -> tuple[GenerationTokenStats, GenerationTokenStats]: + prompt_stats = GenerationTokenStats() + output_stats = GenerationTokenStats() - def _get_completions_text_content(self, data: dict) -> Optional[str]: - if not data.get("choices"): - return None + if not data or not (usage := cast("dict", data.get("usage"))): + return prompt_stats, output_stats - choice: dict = data["choices"][0] - return ( - choice.get("text") - or choice.get("delta", {}).get("content") - or choice.get("message", {}).get("content") + prompt_stats.request = request.stats.get("prompt_tokens") + prompt_stats.response = usage.get("prompt_tokens", usage.get("input_tokens")) + prompt_token_details = usage.get( + "prompt_tokens_details", usage.get("input_tokens_details") + ) + if prompt_token_details: + for key, val in prompt_token_details.items(): + setattr(prompt_stats, key, val) + + output_stats.request = request.stats.get("output_tokens") + output_stats.response = usage.get( + "completion_tokens", usage.get("output_tokens") + ) + output_token_details = usage.get( + "completion_tokens_details", usage.get("output_tokens_details") ) + if output_token_details: + for key, val in output_token_details.items(): + setattr(output_stats, key, val) - def _get_completions_usage_stats(self, data: dict) -> Optional[UsageStats]: - if not data.get("usage"): + return prompt_stats, output_stats + + def _resolve_validate_kwargs( + self, validate_backend: bool | str | dict[str, Any] + ) -> dict[str, Any] | None: + if not (validate_kwargs := validate_backend): return None - return UsageStats( - prompt_tokens=data["usage"].get("prompt_tokens"), - output_tokens=data["usage"].get("completion_tokens"), - ) + if validate_kwargs is True: + validate_kwargs = "health" + + if isinstance(validate_kwargs, str) and validate_kwargs in open_ai_paths: + validate_kwargs = f"{self.target}/{open_ai_paths[validate_kwargs]}" + + if isinstance(validate_kwargs, str): + validate_kwargs = { + "method": "GET", + "url": validate_kwargs, + } + + if not isinstance(validate_kwargs, dict) or "url" not in validate_kwargs: + raise ValueError( + "validate_backend must be a boolean, string, or dictionary and contain " + f"a target URL. Got: {validate_kwargs}" + ) + + if "method" not in validate_kwargs: + validate_kwargs["method"] = "GET" + + return validate_kwargs + + def _check_in_process(self): + if not self._in_process or self._async_client is None: + raise RuntimeError( + "Backend not started up for process, cannot process requests." + ) diff --git a/src/guidellm/benchmark/aggregator.py b/src/guidellm/benchmark/aggregator.py index e965c482..3040ad36 100644 --- a/src/guidellm/benchmark/aggregator.py +++ b/src/guidellm/benchmark/aggregator.py @@ -532,116 +532,117 @@ def __call__( :return: Updated aggregation state for progress reporting. """ _ = (request,) # unused - if request_info.status not in {"completed", "errored", "cancelled"}: - # Only compile progress stats for processed requests - return None - state["updated_generative_stats"] = True - start_time = scheduler_state.start_time - end_time = ( - safe_getattr(request_info.request_timings, "request_end") - or request_info.scheduler_timings.resolve_end + # Request Concurrency + state.set_metric( + key="requests", + value=scheduler_state.processing_requests, + type_="avg", ) - duration = end_time - start_time if end_time else None - - for prefix in (request_info.status, None): - requests_count = ( - scheduler_state.processed_requests - if prefix is None - else scheduler_state.successful_requests - if request_info.status == "completed" - else scheduler_state.cancelled_requests - if request_info.status == "cancelled" - else scheduler_state.errored_requests + + if request_info.status in {"completed", "errored", "cancelled"}: + # Only compile progress stats for processed requests + state["updated_generative_stats"] = True + start_time = scheduler_state.start_time + end_time = ( + safe_getattr(request_info.request_timings, "request_end") + or request_info.scheduler_timings.resolve_end ) + duration = end_time - start_time if end_time else None + + for prefix in (request_info.status, None): + requests_count = ( + scheduler_state.processed_requests + if prefix is None + else scheduler_state.successful_requests + if request_info.status == "completed" + else scheduler_state.cancelled_requests + if request_info.status == "cancelled" + else scheduler_state.errored_requests + ) - # Requests per Second - if duration is not None: - state.set_metric( - key="requests", - value=safe_divide(requests_count, duration), - type_="rate", + # Requests per Second + if duration is not None: + state.set_metric( + key="requests", + value=safe_divide(requests_count, duration), + type_="rate", + prefix=prefix, + ) + + # Request Latency + state.add_metric( + key="request_latency", + value=safe_getattr(request_info.request_timings, "request_end"), + start_val=safe_getattr( + request_info.request_timings, "request_start" + ), prefix=prefix, ) - # Request Concurrency - state.set_metric( - key="requests", - value=scheduler_state.processing_requests, - type_="avg", - prefix=prefix, - ) - - # Request Latency - state.add_metric( - key="request_latency", - value=safe_getattr(request_info.request_timings, "request_end"), - start_val=safe_getattr(request_info.request_timings, "request_start"), - prefix=prefix, - ) - - # Time to First Token - state.add_metric( - key="time_to_first_token", - value=safe_getattr(request_info.request_timings, "first_iteration"), - start_val=safe_getattr(request_info.request_timings, "request_start"), - prefix=prefix, - ) + # Time to First Token + state.add_metric( + key="time_to_first_token", + value=safe_getattr(request_info.request_timings, "first_iteration"), + start_val=safe_getattr( + request_info.request_timings, "request_start" + ), + prefix=prefix, + ) - output_tokens = safe_getattr(response, "output_tokens") - prompt_tokens = safe_getattr(response, "prompt_tokens") + output_tokens = response.output_stats.value() if response else None + prompt_tokens = response.prompt_stats.value() if response else None + total_tokens = response.total_tokens() if response else None - # Inter Token Latency - state.add_metric( - key="inter_token_latency", - value=safe_getattr(request_info.request_timings, "last_iteration"), - start_val=safe_getattr(request_info.request_timings, "first_iteration"), - count=( - output_tokens - 1 if output_tokens and output_tokens > 1 else None - ), - prefix=prefix, - ) + # Inter Token Latency + state.add_metric( + key="inter_token_latency", + value=safe_getattr(request_info.request_timings, "last_iteration"), + start_val=safe_getattr( + request_info.request_timings, "first_iteration" + ), + count=( + output_tokens - 1 + if output_tokens and output_tokens > 1 + else None + ), + prefix=prefix, + ) - # Time per Output Token - state.add_metric( - key="time_per_output_token", - value=safe_getattr(request_info.request_timings, "request_start"), - start_val=safe_getattr(request_info.request_timings, "last_iteration"), - count=output_tokens, - prefix=prefix, - ) + # Time per Output Token + state.add_metric( + key="time_per_output_token", + value=safe_getattr(request_info.request_timings, "request_start"), + start_val=safe_getattr( + request_info.request_timings, "last_iteration" + ), + count=output_tokens, + prefix=prefix, + ) - # Prompt Tokens - state.add_metric( - key="prompt_tokens", - value=prompt_tokens, - duration=duration, - prefix=prefix, - ) + # Prompt Tokens + state.add_metric( + key="prompt_tokens", + value=prompt_tokens, + duration=duration, + prefix=prefix, + ) - # Output Tokens - state.add_metric( - key="output_tokens", - value=output_tokens, - duration=duration, - prefix=prefix, - ) + # Output Tokens + state.add_metric( + key="output_tokens", + value=output_tokens, + duration=duration, + prefix=prefix, + ) - # Total Tokens - state.add_metric( - key="total_tokens", - value=( - prompt_tokens + output_tokens - if all_defined(prompt_tokens, output_tokens) - else prompt_tokens - if all_defined(prompt_tokens) - else output_tokens - if all_defined(output_tokens) - else None - ), - duration=duration, - prefix=prefix, - ) + # Total Tokens + state.add_metric( + key="total_tokens", + value=total_tokens, + duration=duration, + prefix=prefix, + ) return state @@ -929,29 +930,29 @@ def _is_in_cooldown( @classmethod def _create_generative_request_stats( cls, - response: GenerationResponse, + response: GenerationResponse | None, request: GenerationRequest, request_info: ScheduledRequestInfo, ) -> GenerativeRequestStats: - prompt_tokens = response.preferred_prompt_tokens( - settings.preferred_prompt_tokens_source - ) - output_tokens = response.preferred_output_tokens( - settings.preferred_output_tokens_source - ) - return GenerativeRequestStats( request_id=request.request_id, request_type=request.request_type, - prompt=str(request.content), - request_args=response.request_args, - output=response.value, - iterations=response.iterations, - prompt_tokens=prompt_tokens, - output_tokens=output_tokens, + request_args=request.arguments, + output=response.text if response else None, + iterations=response.iterations if response else 0, + prompt_tokens=( + response.prompt_stats.value(settings.preferred_prompt_tokens_source) + if response + else None + ), + output_tokens=( + response.output_stats.value(settings.preferred_output_tokens_source) + if response + else None + ), total_tokens=( - prompt_tokens + output_tokens - if prompt_tokens is not None and output_tokens is not None + response.total_tokens(settings.preferred_output_tokens_source) + if response else None ), scheduler_info=request_info, diff --git a/src/guidellm/benchmark/entrypoints.py b/src/guidellm/benchmark/entrypoints.py index 828402d8..23bc985a 100644 --- a/src/guidellm/benchmark/entrypoints.py +++ b/src/guidellm/benchmark/entrypoints.py @@ -1,11 +1,11 @@ from __future__ import annotations -from collections.abc import Iterable from pathlib import Path from typing import Any, Literal -from datasets import Dataset, DatasetDict, IterableDataset, IterableDatasetDict +from torch.utils.data import Sampler from transformers import ( # type: ignore[import] + AutoTokenizer, PreTrainedTokenizerBase, ) @@ -26,6 +26,7 @@ from guidellm.benchmark.benchmarker import Benchmarker from guidellm.benchmark.objects import GenerativeBenchmark, GenerativeBenchmarksReport from guidellm.benchmark.output import ( + GenerativeBenchmarkerConsole, GenerativeBenchmarkerOutput, ) from guidellm.benchmark.profile import Profile, ProfileType @@ -33,8 +34,14 @@ BenchmarkerProgress, BenchmarkerProgressGroup, ) -from guidellm.benchmark.scenario import GenerativeTextScenario, Scenario -from guidellm.request import GenerativeRequestLoader +from guidellm.data import ( + DatasetPreprocessor, + GenerativeColumnMapper, + GenerativeDataLoader, + GenerativeRequestCollator, + GenerativeRequestFormatter, +) +from guidellm.data.objects import GenerativeDatasetArgs from guidellm.scheduler import ( ConstraintInitializer, NonDistributedEnvironment, @@ -44,7 +51,6 @@ __all__ = [ "benchmark_generative_text", - "benchmark_with_scenario", "reimport_benchmarks_report", ] @@ -52,113 +58,13 @@ _CURRENT_WORKING_DIR = Path.cwd() -# Data types - -DataType = ( - Iterable[str] - | Iterable[dict[str, Any]] - | Dataset - | DatasetDict - | IterableDataset - | IterableDatasetDict - | str - | Path -) - -OutputFormatType = ( - tuple[str, ...] - | list[str] - | dict[str, str | dict[str, Any] | GenerativeBenchmarkerOutput] - | None -) - - -# Helper functions - -async def initialize_backend( - backend: BackendType | Backend, - target: str, - model: str | None, - backend_kwargs: dict[str, Any] | None, -) -> Backend: - backend = ( - Backend.create( - backend, target=target, model=model, **(backend_kwargs or {}) - ) - if not isinstance(backend, Backend) - else backend - ) - await backend.process_startup() - await backend.validate() - return backend - - -async def resolve_profile( - constraint_inputs: dict[str, int | float], - profile: Profile | str | None, - rate: list[float] | None, - random_seed: int, - constraints: dict[str, ConstraintInitializer | Any], -): - for key, val in constraint_inputs.items(): - if val is not None: - constraints[key] = val - if not isinstance(profile, Profile): - if isinstance(profile, str): - profile = Profile.create( - rate_type=profile, - rate=rate, - random_seed=random_seed, - constraints={**constraints}, - ) - else: - raise ValueError(f"Expected string for profile; got {type(profile)}") - - elif constraints: - raise ValueError( - "Constraints must be empty when providing a Profile instance. " - f"Provided constraints: {constraints} ; provided profile: {profile}" - ) - return profile - -async def resolve_output_formats( - output_formats: OutputFormatType, - output_path: str | Path | None, -) -> dict[str, GenerativeBenchmarkerOutput]: - output_formats = GenerativeBenchmarkerOutput.resolve( - output_formats=(output_formats or {}), output_path=output_path - ) - return output_formats - -async def finalize_outputs( - report: GenerativeBenchmarksReport, - resolved_output_formats: dict[str, GenerativeBenchmarkerOutput] -): - output_format_results = {} - for key, output in resolved_output_formats.items(): - output_result = await output.finalize(report) - output_format_results[key] = output_result - return output_format_results - - -# Complete entrypoints - -async def benchmark_with_scenario(scenario: Scenario, **kwargs): - """ - Run a benchmark using a scenario and specify any extra arguments - """ - - if isinstance(scenario, GenerativeTextScenario): - return await benchmark_generative_text(**vars(scenario), **kwargs) - else: - raise ValueError(f"Unsupported Scenario type {type(scenario)}") - - # @validate_call(config={"arbitrary_types_allowed": True}) -async def benchmark_generative_text( # noqa: C901 +async def benchmark_generative_text( # noqa: C901, PLR0915 + # Required target: str, - data: DataType, - profile: StrategyType | ProfileType | Profile, + data: list[Any], + # Benchmark configuration + profile: StrategyType | ProfileType | Profile = "sweep", rate: float | list[float] | None = None, random_seed: int = 42, # Backend configuration @@ -168,11 +74,22 @@ async def benchmark_generative_text( # noqa: C901 # Data configuration processor: str | Path | PreTrainedTokenizerBase | None = None, processor_args: dict[str, Any] | None = None, - data_args: dict[str, Any] | None = None, - data_sampler: Literal["random"] | None = None, + data_args: list[GenerativeDatasetArgs] | None = None, + data_samples: int = -1, + data_column_mapper: GenerativeColumnMapper | None = None, + data_preprocessors: list[DatasetPreprocessor] | None = None, + data_request_formatter: GenerativeRequestFormatter | None = None, + dataloader_sampler: Sampler[int] | Literal["shuffle"] | None = None, + dataloader_collate_fn: GenerativeRequestCollator | None = None, + dataloader_kwargs: dict[str, Any] | None = None, # Output configuration output_path: str | Path | None = _CURRENT_WORKING_DIR, - output_formats: OutputFormatType = ("console", "json", "html", "csv"), + output_formats: ( + tuple[str, ...] + | list[str] + | dict[str, str | dict[str, Any] | GenerativeBenchmarkerOutput] + | None + ) = ("console", "json", "html", "csv"), # Updates configuration progress: tuple[str, ...] | list[str] | list[BenchmarkerProgress] | None = None, print_updates: bool = False, @@ -196,7 +113,16 @@ async def benchmark_generative_text( # noqa: C901 with console.print_update_step( title=f"Initializing backend {backend}" ) as console_step: - backend = await initialize_backend(backend, target, model, backend_kwargs) + backend = ( + Backend.create( + backend, target=target, model=model, **(backend_kwargs or {}) + ) + if not isinstance(backend, Backend) + else backend + ) + console_step.update(f"{backend.__class__.__name__} backend initialized") + await backend.process_startup() + await backend.validate() console_step.finish( title=f"{backend.__class__.__name__} backend initialized", details=backend.info, @@ -236,19 +162,36 @@ async def benchmark_generative_text( # noqa: C901 with console.print_update_step( title=f"Initializing request loader from {data}" ) as console_step: - request_loader = GenerativeRequestLoader( + + def processor_factory() -> PreTrainedTokenizerBase: + nonlocal processor + if isinstance(processor, PreTrainedTokenizerBase): + return processor + else: + processor = AutoTokenizer.from_pretrained( + processor, + **(processor_args or {}), + ) + return processor + + request_loader = GenerativeDataLoader( data=data, data_args=data_args, - processor=processor, - processor_args=processor_args, - shuffle=data_sampler == "random", + data_samples=data_samples, + processor_factory=processor_factory, + column_mapper=data_column_mapper or GenerativeColumnMapper(), + preprocessors=data_preprocessors or [], + request_formatter=data_request_formatter or GenerativeRequestFormatter(), + sampler=dataloader_sampler, + collate_fn=dataloader_collate_fn, random_seed=random_seed, + **(dataloader_kwargs or {}), ) - unique_requests = request_loader.num_unique_items(raise_err=False) console_step.finish( title=( - f"Request loader initialized with {unique_requests} unique requests " - f"from {data}" + f"Request loader initialized with " + f"{data_samples if data_samples > 0 else 'inf'} " + f"unique requests from {data}" ), details=InfoMixin.extract_from_obj(request_loader), status_level="success", @@ -257,19 +200,27 @@ async def benchmark_generative_text( # noqa: C901 with console.print_update_step( title=f"Resolving profile {profile}" ) as console_step: - profile = await resolve_profile( - { - "max_seconds": max_seconds, - "max_requests": max_requests, - "max_errors": max_errors, - "max_error_rate": max_error_rate, - "max_global_error_rate": max_global_error_rate, - }, - profile, - rate, - random_seed, - constraints, - ) + for key, val in { + "max_seconds": max_seconds, + "max_requests": max_requests, + "max_errors": max_errors, + "max_error_rate": max_error_rate, + "max_global_error_rate": max_global_error_rate, + }.items(): + if val is not None: + constraints[key] = val + if not isinstance(profile, Profile): + profile = Profile.create( + rate_type=profile, + rate=rate, + random_seed=random_seed, + constraints={**constraints}, + ) + elif constraints: + raise ValueError( + "Constraints must be empty when providing a Profile instance. " + f"Provided constraints: {constraints} ; provided profile: {profile}" + ) console_step.finish( title=f"{profile.__class__.__name__} profile resolved", details=InfoMixin.extract_from_obj(profile), @@ -296,10 +247,12 @@ async def benchmark_generative_text( # noqa: C901 ) with console.print_update_step(title="Resolving output formats") as console_step: - resolved_output_formats = await resolve_output_formats(output_formats, output_path) + output_formats = GenerativeBenchmarkerOutput.resolve( + output_formats=(output_formats or {}), output_path=output_path + ) console_step.finish( title="Output formats resolved", - details={key: str(val) for key, val in resolved_output_formats.items()}, + details={key: str(val) for key, val in output_formats.items()}, status_level="success", ) @@ -335,11 +288,14 @@ async def benchmark_generative_text( # noqa: C901 if benchmark: report.benchmarks.append(benchmark) - output_format_results = await finalize_outputs(report, resolved_output_formats) + output_format_results = {} + for key, output in output_formats.items(): + output_result = await output.finalize(report) + output_format_results[key] = output_result console.print("\n\n") console.print_update( - title=f"Benchmarking complete; generated {len(report.benchmarks)} benchmark(s)", + title=f"Benchmarking complete, generated {len(report.benchmarks)} benchmark(s)", status="success", ) for key, value in output_format_results.items(): @@ -348,34 +304,20 @@ async def benchmark_generative_text( # noqa: C901 return report, output_format_results -async def reimport_benchmarks_report( - file: Path, - output_path: Path | None, - output_formats: OutputFormatType = ("console", "json", "html", "csv"), -) -> tuple[GenerativeBenchmarksReport, dict[str, Any]]: +def reimport_benchmarks_report(file: Path, output_path: Path | None) -> None: """ The command-line entry point for re-importing and displaying an - existing benchmarks report. Can also specify an output format. + existing benchmarks report. Can also specify Assumes the file provided exists. """ + report = GenerativeBenchmarksReport.load_file(file) + console_output = GenerativeBenchmarkerConsole() + console_output.finalize(report) console = Console() - with console.print_update_step( - title=f"Loading benchmarks from {file}" - ) as console_step: - report = GenerativeBenchmarksReport.load_file(file) - console_step.finish(f"Import of old benchmarks complete; loaded {len(report.benchmarks)} benchmark(s)") - - with console.print_update_step(title="Resolving output formats") as console_step: - resolved_output_formats = await resolve_output_formats(output_formats, output_path) - console_step.finish( - title="Output formats resolved", - details={key: str(val) for key, val in resolved_output_formats.items()}, - status_level="success", - ) - - output_format_results = await finalize_outputs(report, resolved_output_formats) - for key, value in output_format_results.items(): - console.print_update(title=f" {key:<8}: {value}", status="debug") - - return report, output_format_results + if output_path: + with console.print_update_step( + title=f"Saving benchmarks report to {output_path}..." + ) as console_step: + saved_path = report.save_file(output_path) + console_step.finish(title=f"Benchmarks report saved to {saved_path}") diff --git a/src/guidellm/benchmark/objects.py b/src/guidellm/benchmark/objects.py index 8afabba9..a9b5ff79 100644 --- a/src/guidellm/benchmark/objects.py +++ b/src/guidellm/benchmark/objects.py @@ -34,6 +34,9 @@ from guidellm.benchmark.profile import ( Profile, ) +from guidellm.data import ( + GenerationRequestArguments, +) from guidellm.scheduler import ( ScheduledRequestInfo, SchedulerState, @@ -214,9 +217,8 @@ class GenerativeRequestStats(BenchmarkRequestStats): request_type: Literal["text_completions", "chat_completions"] = Field( description="Type of generative request: text or chat completion" ) - prompt: str = Field(description="Input text prompt for generation") - request_args: dict[str, Any] = Field( - description="Generation parameters and configuration options" + request_args: GenerationRequestArguments | None = Field( + default=None, description="Arguments passed to the backend for this request" ) output: str | None = Field( description="Generated text output, if request completed successfully" diff --git a/src/guidellm/benchmark/profile.py b/src/guidellm/benchmark/profile.py index d2f9d70c..fd2a3850 100644 --- a/src/guidellm/benchmark/profile.py +++ b/src/guidellm/benchmark/profile.py @@ -681,7 +681,10 @@ def next_strategy( prev_benchmark.metrics.requests_per_second.successful.mean ) if self.synchronous_rate <= 0 and self.throughput_rate <= 0: - raise RuntimeError("Invalid rates in sweep; aborting. Were there any successful requests?") + raise RuntimeError( + "Invalid rates in sweep; aborting. " + "Were there any successful requests?" + ) self.measured_rates = list( np.linspace( self.synchronous_rate, diff --git a/src/guidellm/data/__init__.py b/src/guidellm/data/__init__.py index 8a48204e..282c5b59 100644 --- a/src/guidellm/data/__init__.py +++ b/src/guidellm/data/__init__.py @@ -1,4 +1,48 @@ -""" -Required for python < 3.12 -https://docs.python.org/3/library/importlib.resources.html#importlib.resources.files -""" +from .datasets import GenerativeRequestsDataset +from .deserializers import ( + DataNotSupportedError, + DatasetDeserializer, + DatasetDeserializerFactory, +) +from .formatters import ( + GenerativeRequestFormatter, + JinjaEnvironmentMixin, + JinjaFiltersRegistry, + JinjaGlobalsRegistry, + JinjaTemplatesRegistry, +) +from .loaders import GenerativeDataLoader, GenerativeRequestCollator +from .objects import ( + GenerationRequest, + GenerationRequestArguments, + GenerationRequestTimings, + GenerativeDatasetArgs, + GenerativeDatasetColumnType, + GenerativeRequestType, +) +from .preprocessors import ( + DatasetPreprocessor, + GenerativeColumnMapper, +) + +__all__ = [ + "DataNotSupportedError", + "DatasetDeserializer", + "DatasetDeserializerFactory", + "DatasetPreprocessor", + "GenerationRequest", + "GenerationRequestArguments", + "GenerationRequestTimings", + "GenerativeColumnMapper", + "GenerativeDataLoader", + "GenerativeDatasetArgs", + "GenerativeDatasetColumnType", + "GenerativeRequestCollator", + "GenerativeRequestFormatter", + "GenerativeRequestType", + "GenerativeRequestsDataset", + "JinjaEnvironmentMixin", + "JinjaFiltersRegistry", + "JinjaGlobalsRegistry", + "JinjaTemplatesRegistry", +] diff --git a/src/guidellm/data/datasets.py b/src/guidellm/data/datasets.py new file mode 100644 index 00000000..8c24683c --- /dev/null +++ b/src/guidellm/data/datasets.py @@ -0,0 +1,88 @@ +from __future__ import annotations + +from collections.abc import Callable +from typing import Any + +from datasets import Dataset, IterableDataset +from transformers import PreTrainedTokenizerBase + +from guidellm.data.deserializers import DatasetDeserializerFactory +from guidellm.data.formatters import GenerativeRequestFormatter +from guidellm.data.objects import GenerativeDatasetArgs +from guidellm.data.preprocessors import ( + DatasetPreprocessor, + GenerativeColumnMapper, +) +from guidellm.data.utils import datasets_item_iterator, resolve_dataset_split + +__all__ = ["GenerativeRequestsDataset"] + + +class GenerativeRequestsDataset: + @classmethod + def build( + cls, + data: list[Any], + data_args: list[GenerativeDatasetArgs] | None, + data_samples: int, + processor_factory: Callable[[], PreTrainedTokenizerBase], + column_mapper: GenerativeColumnMapper, + preprocessors: list[DatasetPreprocessor], + request_formatter: GenerativeRequestFormatter, + random_seed: int = 42, + ) -> Dataset | IterableDataset: + if not data or not isinstance(data, list): + raise ValueError(f"Data must be a non-empty list, got {data}.") + + if data_args is None: + data_args = [GenerativeDatasetArgs() for _ in data] + + if len(data) != len(data_args): + raise ValueError( + f"Length of data ({len(data)}) must match length of data_args " + f"({len(data_args)})." + ) + + datasets = [] + for datum, args in zip(data, data_args): + datasets.append( + resolve_dataset_split( + dataset=DatasetDeserializerFactory.deserialize( + data=datum, + data_kwargs=args.to_kwargs(), + processor_factory=processor_factory, + random_seed=random_seed, + type_=args.type_, + ), + split=args.split, + ) + ) + + column_mapper.init_data(datasets=datasets, data_args=data_args) + request_formatter.init_data(datasets=datasets, data_args=data_args) + for preprocessor in preprocessors: + preprocessor.init_data(datasets=datasets, data_args=data_args) + + if data_samples > 0: + dataset = Dataset.from_list( + list( + datasets_item_iterator( + datasets=datasets, + data_samples=data_samples, + ) + ) + ) + else: + dataset = IterableDataset.from_generator( + datasets_item_iterator, + gen_kwargs={ + "datasets": datasets, + "data_samples": data_samples, + }, + ) + + dataset = dataset.map(column_mapper) + for preprocessor in preprocessors: + dataset = dataset.map(preprocessor) + + return dataset.map(request_formatter) diff --git a/src/guidellm/data/deserializers/__init__.py b/src/guidellm/data/deserializers/__init__.py new file mode 100644 index 00000000..fdee12ce --- /dev/null +++ b/src/guidellm/data/deserializers/__init__.py @@ -0,0 +1,51 @@ +from .deserializer import ( + DataNotSupportedError, + DatasetDeserializer, + DatasetDeserializerFactory, +) +from .file import ( + ArrowFileDatasetDeserializer, + CSVFileDatasetDeserializer, + DBFileDatasetDeserializer, + HDF5FileDatasetDeserializer, + JSONFileDatasetDeserializer, + ParquetFileDatasetDeserializer, + TarFileDatasetDeserializer, + TextFileDatasetDeserializer, +) +from .huggingface import HuggingFaceDatasetDeserializer +from .memory import ( + InMemoryCsvDatasetDeserializer, + InMemoryDictDatasetDeserializer, + InMemoryDictListDatasetDeserializer, + InMemoryItemListDatasetDeserializer, + InMemoryJsonStrDatasetDeserializer, +) +from .synthetic import ( + SyntheticTextDatasetConfig, + SyntheticTextDatasetDeserializer, + SyntheticTextGenerator, +) + +__all__ = [ + "ArrowFileDatasetDeserializer", + "CSVFileDatasetDeserializer", + "DBFileDatasetDeserializer", + "DataNotSupportedError", + "DatasetDeserializer", + "DatasetDeserializerFactory", + "HDF5FileDatasetDeserializer", + "HuggingFaceDatasetDeserializer", + "InMemoryCsvDatasetDeserializer", + "InMemoryDictDatasetDeserializer", + "InMemoryDictListDatasetDeserializer", + "InMemoryItemListDatasetDeserializer", + "InMemoryJsonStrDatasetDeserializer", + "JSONFileDatasetDeserializer", + "ParquetFileDatasetDeserializer", + "SyntheticTextDatasetConfig", + "SyntheticTextDatasetDeserializer", + "SyntheticTextGenerator", + "TarFileDatasetDeserializer", + "TextFileDatasetDeserializer", +] diff --git a/src/guidellm/data/deserializers/deserializer.py b/src/guidellm/data/deserializers/deserializer.py new file mode 100644 index 00000000..ed9050a1 --- /dev/null +++ b/src/guidellm/data/deserializers/deserializer.py @@ -0,0 +1,81 @@ +from __future__ import annotations + +import contextlib +from collections.abc import Callable +from typing import Any, Protocol, Union, runtime_checkable + +from datasets import Dataset, DatasetDict, IterableDataset, IterableDatasetDict +from transformers import PreTrainedTokenizerBase + +from guidellm.utils import RegistryMixin + +__all__ = [ + "DataNotSupportedError", + "DatasetDeserializer", + "DatasetDeserializerFactory", +] + + +class DataNotSupportedError(Exception): + """Exception raised when data format is not supported by deserializer.""" + + +@runtime_checkable +class DatasetDeserializer(Protocol): + def __call__( + self, + data: Any, + data_kwargs: dict[str, Any], + processor_factory: Callable[[], PreTrainedTokenizerBase], + random_seed: int, + ) -> dict[str, list]: ... + + +class DatasetDeserializerFactory( + RegistryMixin[Union["type[DatasetDeserializer]", DatasetDeserializer]], +): + @classmethod + def deserialize( + cls, + data: Any, + data_kwargs: dict[str, Any], + processor_factory: Callable[[], PreTrainedTokenizerBase], + random_seed: int = 42, + type_: str | None = None, + ) -> Dataset | IterableDataset | DatasetDict | IterableDatasetDict: + if type_ is not None: + deserializer = cls.get_registered_object(type_) + + if deserializer is None: + raise DataNotSupportedError( + f"Deserializer type '{type_}' is not registered. " + f"Available types: {cls.registry}" + ) + elif isinstance(deserializer, type): + deserializer_fn = deserializer() + else: + deserializer_fn = deserializer + + return deserializer_fn( + data=data, + data_kwargs=data_kwargs, + processor_factory=processor_factory, + random_seed=random_seed, + ) + + for deserializer in cls.registered_objects(): + deserializer_fn: DatasetDeserializer = ( + deserializer() if isinstance(deserializer, type) else deserializer + ) + + with contextlib.suppress(DataNotSupportedError): + return deserializer_fn( + data=data, + data_kwargs=data_kwargs, + processor_factory=processor_factory, + random_seed=random_seed, + ) + + raise DataNotSupportedError( + f"No suitable deserializer found for data {data} with kwargs {data_kwargs}." + ) diff --git a/src/guidellm/data/deserializers/file.py b/src/guidellm/data/deserializers/file.py new file mode 100644 index 00000000..53688cf0 --- /dev/null +++ b/src/guidellm/data/deserializers/file.py @@ -0,0 +1,221 @@ +from __future__ import annotations + +from pathlib import Path +from typing import Any, Callable + +import pandas as pd +from datasets import Dataset, load_dataset +from transformers import PreTrainedTokenizerBase + +from guidellm.data.deserializers.deserializer import ( + DataNotSupportedError, + DatasetDeserializer, + DatasetDeserializerFactory, +) + +__all__ = [ + "ArrowFileDatasetDeserializer", + "CSVFileDatasetDeserializer", + "DBFileDatasetDeserializer", + "HDF5FileDatasetDeserializer", + "JSONFileDatasetDeserializer", + "ParquetFileDatasetDeserializer", + "TarFileDatasetDeserializer", + "TextFileDatasetDeserializer", +] + + +@DatasetDeserializerFactory.register("text_file") +class TextFileDatasetDeserializer(DatasetDeserializer): + def __call__( + self, + data: Any, + data_kwargs: dict[str, Any], + processor_factory: Callable[[], PreTrainedTokenizerBase], + random_seed: int, + ) -> dict[str, list]: + _ = (processor_factory, random_seed) # Ignore unused args format errors + + if ( + not isinstance(data, (str, Path)) + or not (path := Path(data)).exists() + or not path.is_file() + or path.suffix.lower() not in {".txt", ".text"} + ): + raise DataNotSupportedError( + "Unsupported data for TextFileDatasetDeserializer, " + f"expected str or Path to a local .txt or .text file, got {data}" + ) + + with path.open() as file: + lines = file.readlines() + + return Dataset.from_dict({"text": lines}, **data_kwargs) + + +@DatasetDeserializerFactory.register("csv_file") +class CSVFileDatasetDeserializer(DatasetDeserializer): + def __call__( + self, + data: Any, + data_kwargs: dict[str, Any], + processor_factory: Callable[[], PreTrainedTokenizerBase], + random_seed: int, + ) -> dict[str, list]: + _ = (processor_factory, random_seed) + if ( + not isinstance(data, (str, Path)) + or not (path := Path(data)).exists() + or not path.is_file() + or path.suffix.lower() != ".csv" + ): + raise DataNotSupportedError( + "Unsupported data for CSVFileDatasetDeserializer, " + f"expected str or Path to a local .csv file, got {data}" + ) + + return load_dataset("csv", data_files=str(path), **data_kwargs) + + +@DatasetDeserializerFactory.register("json_file") +class JSONFileDatasetDeserializer(DatasetDeserializer): + def __call__( + self, + data: Any, + data_kwargs: dict[str, Any], + processor_factory: Callable[[], PreTrainedTokenizerBase], + random_seed: int, + ) -> dict[str, list]: + _ = (processor_factory, random_seed) + if ( + not isinstance(data, (str, Path)) + or not (path := Path(data)).exists() + or not path.is_file() + or path.suffix.lower() not in {".json", ".jsonl"} + ): + raise DataNotSupportedError( + f"Unsupported data for JSONFileDatasetDeserializer, " + f"expected str or Path to a local .json or .jsonl file, got {data}" + ) + + return load_dataset("json", data_files=str(path), **data_kwargs) + + +@DatasetDeserializerFactory.register("parquet_file") +class ParquetFileDatasetDeserializer(DatasetDeserializer): + def __call__( + self, + data: Any, + data_kwargs: dict[str, Any], + processor_factory: Callable[[], PreTrainedTokenizerBase], + random_seed: int, + ) -> dict[str, list]: + _ = (processor_factory, random_seed) + if ( + not isinstance(data, (str, Path)) + or not (path := Path(data)).exists() + or not path.is_file() + or path.suffix.lower() != ".parquet" + ): + raise DataNotSupportedError( + f"Unsupported data for ParquetFileDatasetDeserializer, " + f"expected str or Path to a local .parquet file, got {data}" + ) + + return load_dataset("parquet", data_files=str(path), **data_kwargs) + + +@DatasetDeserializerFactory.register("arrow_file") +class ArrowFileDatasetDeserializer(DatasetDeserializer): + def __call__( + self, + data: Any, + data_kwargs: dict[str, Any], + processor_factory: Callable[[], PreTrainedTokenizerBase], + random_seed: int, + ) -> dict[str, list]: + _ = (processor_factory, random_seed) + if ( + not isinstance(data, (str, Path)) + or not (path := Path(data)).exists() + or not path.is_file() + or path.suffix.lower() != ".arrow" + ): + raise DataNotSupportedError( + f"Unsupported data for ArrowFileDatasetDeserializer, " + f"expected str or Path to a local .arrow file, got {data}" + ) + + return load_dataset("arrow", data_files=str(path), **data_kwargs) + + +@DatasetDeserializerFactory.register("hdf5_file") +class HDF5FileDatasetDeserializer(DatasetDeserializer): + def __call__( + self, + data: Any, + data_kwargs: dict[str, Any], + processor_factory: Callable[[], PreTrainedTokenizerBase], + random_seed: int, + ) -> dict[str, list]: + _ = (processor_factory, random_seed) + if ( + not isinstance(data, (str, Path)) + or not (path := Path(data)).exists() + or not path.is_file() + or path.suffix.lower() not in {".hdf5", ".h5"} + ): + raise DataNotSupportedError( + f"Unsupported data for HDF5FileDatasetDeserializer, " + f"expected str or Path to a local .hdf5 or .h5 file, got {data}" + ) + + return Dataset.from_pandas(pd.read_hdf(str(path)), **data_kwargs) + + +@DatasetDeserializerFactory.register("db_file") +class DBFileDatasetDeserializer(DatasetDeserializer): + def __call__( + self, + data: Any, + data_kwargs: dict[str, Any], + processor_factory: Callable[[], PreTrainedTokenizerBase], + random_seed: int, + ) -> dict[str, list]: + _ = (processor_factory, random_seed) + if ( + not isinstance(data, (str, Path)) + or not (path := Path(data)).exists() + or not path.is_file() + or path.suffix.lower() != ".db" + ): + raise DataNotSupportedError( + f"Unsupported data for DBFileDatasetDeserializer, " + f"expected str or Path to a local .db file, got {data}" + ) + + return Dataset.from_sql(con=str(path), **data_kwargs) + + +@DatasetDeserializerFactory.register("tar_file") +class TarFileDatasetDeserializer(DatasetDeserializer): + def __call__( + self, + data: Any, + data_kwargs: dict[str, Any], + processor_factory: Callable[[], PreTrainedTokenizerBase], + random_seed: int, + ) -> dict[str, list]: + _ = (processor_factory, random_seed) + if ( + not isinstance(data, (str, Path)) + or not (path := Path(data)).exists() + or not path.is_file() + or path.suffix.lower() != ".tar" + ): + raise DataNotSupportedError( + f"Unsupported data for TarFileDatasetDeserializer, " + f"expected str or Path to a local .tar file, got {data}" + ) + + return load_dataset("webdataset", data_files=str(path), **data_kwargs) diff --git a/src/guidellm/data/deserializers/huggingface.py b/src/guidellm/data/deserializers/huggingface.py new file mode 100644 index 00000000..275f7180 --- /dev/null +++ b/src/guidellm/data/deserializers/huggingface.py @@ -0,0 +1,75 @@ +from __future__ import annotations + +from pathlib import Path +from typing import Any, Callable + +from datasets import ( + Dataset, + DatasetDict, + IterableDataset, + IterableDatasetDict, + load_dataset, + load_from_disk, +) +from transformers import PreTrainedTokenizerBase + +from guidellm.data.deserializers.deserializer import ( + DataNotSupportedError, + DatasetDeserializer, + DatasetDeserializerFactory, +) + +__all__ = ["HuggingFaceDatasetDeserializer"] + + +@DatasetDeserializerFactory.register("huggingface") +class HuggingFaceDatasetDeserializer(DatasetDeserializer): + def __call__( + self, + data: Any, + data_kwargs: dict[str, Any], + processor_factory: Callable[[], PreTrainedTokenizerBase], + random_seed: int, + ) -> dict[str, list]: + _ = (processor_factory, random_seed) + + if isinstance( + data, (Dataset, IterableDataset, DatasetDict, IterableDatasetDict) + ): + return data + + load_error = None + + if ( + isinstance(data, (str, Path)) + and (path := Path(data)).exists() + and ((path.is_file() and path.suffix == ".py") or path.is_dir()) + ): + # Handle python script or nested python script in a directory + try: + return load_dataset(str(data), **data_kwargs) + except Exception as err: # noqa: BLE001 + load_error = err + + if ( + isinstance(data, (str, Path)) + and (path := Path(data)).exists() + and path.is_dir() + ): + # Handle local dataset directory + try: + return load_from_disk(str(data), **data_kwargs) + except Exception as err: # noqa: BLE001 + load_error = err + + not_supported = DataNotSupportedError( + "Unsupported data for HuggingFaceDatasetDeserializer, " + "expected Dataset, IterableDataset, DatasetDict, IterableDatasetDict, " + "str or Path to a local dataset directory or a local .py dataset script, " + f"got {data} and HF load error: {load_error}" + ) + + if load_error is not None: + raise not_supported from load_error + else: + raise not_supported diff --git a/src/guidellm/data/deserializers/memory.py b/src/guidellm/data/deserializers/memory.py new file mode 100644 index 00000000..b04ea6bc --- /dev/null +++ b/src/guidellm/data/deserializers/memory.py @@ -0,0 +1,191 @@ +from __future__ import annotations + +import contextlib +import csv +import json +from io import StringIO +from typing import Any, Callable, cast + +from datasets import Dataset +from transformers import PreTrainedTokenizerBase + +from guidellm.data.deserializers.deserializer import ( + DataNotSupportedError, + DatasetDeserializer, + DatasetDeserializerFactory, +) + +__all__ = [ + "InMemoryCsvDatasetDeserializer", + "InMemoryDictDatasetDeserializer", + "InMemoryDictListDatasetDeserializer", + "InMemoryItemListDatasetDeserializer", + "InMemoryJsonStrDatasetDeserializer", +] + + +@DatasetDeserializerFactory.register("in_memory_dict") +class InMemoryDictDatasetDeserializer(DatasetDeserializer): + def __call__( + self, + data: Any, + data_kwargs: dict[str, Any], + processor_factory: Callable[[], PreTrainedTokenizerBase], + random_seed: int, + ) -> dict[str, list]: + _ = (processor_factory, random_seed) # Ignore unused args format errors + + if ( + not data + or not isinstance(data, dict) + or not all( + isinstance(key, str) and isinstance(val, list) + for key, val in data.items() + ) + ): + raise DataNotSupportedError( + f"Unsupported data for InMemoryDictDatasetDeserializer, " + f"expected dict[str, list], got {data}" + ) + + rows = len(list(data.values())[0]) + if not all(len(val) == rows for val in data.values()): + raise DataNotSupportedError( + "All lists in the data dictionary must have the same length, " + f"expected {rows} for all keys {list(data.keys())}" + ) + + return Dataset.from_dict(data, **data_kwargs) + + +@DatasetDeserializerFactory.register("in_memory_dict_list") +class InMemoryDictListDatasetDeserializer(DatasetDeserializer): + def __call__( + self, + data: Any, + data_kwargs: dict[str, Any], + processor_factory: Callable[[], PreTrainedTokenizerBase], + random_seed: int, + ) -> dict[str, list]: + _ = (processor_factory, random_seed) # Ignore unused args format errors + + if ( + not data + or not isinstance(data, list) + or not all(isinstance(item, dict) for item in data) + or not all(isinstance(key, str) for item in data for key in item) + ): + raise DataNotSupportedError( + f"Unsupported data for InMemoryDictListDatasetDeserializer, " + f"expected list of dicts, got {data}" + ) + + data: list[dict[str, Any]] = cast("list[dict[str, Any]]", data) + first_keys = set(data[0].keys()) + for index, item in enumerate(data): + if set(item.keys()) != first_keys: + raise DataNotSupportedError( + f"All dictionaries must have the same keys. " + f"Expected keys: {first_keys}, " + f"got keys at index {index}: {set(item.keys())}" + ) + + # Convert list of dicts to dict of lists + result_dict = {key: [] for key in first_keys} + for item in data: + for key, value in item.items(): + result_dict[key].append(value) + + return Dataset.from_dict(result_dict, **data_kwargs) + + +@DatasetDeserializerFactory.register("in_memory_item_list") +class InMemoryItemListDatasetDeserializer(DatasetDeserializer): + def __call__( + self, + data: Any, + data_kwargs: dict[str, Any], + processor_factory: Callable[[], PreTrainedTokenizerBase], + random_seed: int, + ) -> dict[str, list]: + _ = (processor_factory, random_seed) # Ignore unused args format errors + + primitive_types = (str, int, float, bool, type(None)) + if ( + not data + or not isinstance(data, list) + or not all(isinstance(item, primitive_types) for item in data) + ): + raise DataNotSupportedError( + f"Unsupported data for InMemoryItemListDatasetDeserializer, " + f"expected list of primitive items, got {data}" + ) + + column_name = data_kwargs.pop("column_name", "data") + + return Dataset.from_dict({column_name: data}, **data_kwargs) + + +@DatasetDeserializerFactory.register("in_memory_json_str") +class InMemoryJsonStrDatasetDeserializer(DatasetDeserializer): + def __call__( + self, + data: Any, + data_kwargs: dict[str, Any], + processor_factory: Callable[[], PreTrainedTokenizerBase], + random_seed: int, + ) -> dict[str, list]: + if ( + isinstance(data, str) + and (json_str := data.strip()) + and ( + (json_str.startswith("{") and json_str.endswith("}")) + or (json_str.startswith("[") and json_str.endswith("]")) + ) + ): + with contextlib.suppress(Exception): + parsed = json.loads(data) + + for deserializer in [ + InMemoryDictDatasetDeserializer, + InMemoryDictListDatasetDeserializer, + InMemoryItemListDatasetDeserializer, + ]: + with contextlib.suppress(DataNotSupportedError): + return deserializer()( + parsed, data_kwargs, processor_factory, random_seed + ) + + raise DataNotSupportedError( + f"Unsupported data for InMemoryJsonStrDatasetDeserializer, " + f"expected JSON string with a list or dict of items, got {data}" + ) + + +@DatasetDeserializerFactory.register("in_memory_csv_str") +class InMemoryCsvDatasetDeserializer(DatasetDeserializer): + def __call__( + self, + data: Any, + data_kwargs: dict[str, Any], + processor_factory: Callable[[], PreTrainedTokenizerBase], + random_seed: int, + ) -> dict[str, list]: + if ( + isinstance(data, str) + and (csv_str := data.strip()) + and len(csv_str.split("\n")) > 0 + ): + with contextlib.suppress(Exception): + csv_buffer = StringIO(data) + reader = csv.DictReader(csv_buffer) + rows = list(reader) + + return InMemoryDictListDatasetDeserializer()( + rows, data_kwargs, processor_factory, random_seed + ) + + raise DataNotSupportedError( + f"Unsupported data for InMemoryCsvDatasetDeserializer, " + f"expected CSV string, got {type(data)}" + ) diff --git a/src/guidellm/data/deserializers/synthetic.py b/src/guidellm/data/deserializers/synthetic.py new file mode 100644 index 00000000..2335596d --- /dev/null +++ b/src/guidellm/data/deserializers/synthetic.py @@ -0,0 +1,255 @@ +from __future__ import annotations + +from collections.abc import Iterator +from pathlib import Path +from typing import Any, Callable + +import yaml +from datasets import Features, IterableDataset, Value +from faker import Faker +from pydantic import Field +from transformers import PreTrainedTokenizerBase + +from guidellm.data.deserializers.deserializer import ( + DataNotSupportedError, + DatasetDeserializer, + DatasetDeserializerFactory, +) +from guidellm.utils import IntegerRangeSampler, StandardBaseModel + +__all__ = [ + "SyntheticTextDatasetConfig", + "SyntheticTextDatasetDeserializer", + "SyntheticTextGenerator", +] + + +class SyntheticTextDatasetConfig(StandardBaseModel): + prompt_tokens: int = Field( + description="The average number of text tokens generated for prompts.", + gt=0, + ) + prompt_tokens_stdev: int | None = Field( + description="The standard deviation of the tokens generated for prompts.", + gt=0, + default=None, + ) + prompt_tokens_min: int | None = Field( + description="The minimum number of text tokens generated for prompts.", + gt=0, + default=None, + ) + prompt_tokens_max: int | None = Field( + description="The maximum number of text tokens generated for prompts.", + gt=0, + default=None, + ) + output_tokens: int = Field( + description="The average number of text tokens generated for outputs.", + gt=0, + ) + output_tokens_stdev: int | None = Field( + description="The standard deviation of the tokens generated for outputs.", + gt=0, + default=None, + ) + output_tokens_min: int | None = Field( + description="The minimum number of text tokens generated for outputs.", + gt=0, + default=None, + ) + output_tokens_max: int | None = Field( + description="The maximum number of text tokens generated for outputs.", + gt=0, + default=None, + ) + source: str = Field( + description="The source of the text data to be used for generation.", + default="data:prideandprejudice.txt.gz", + ) + + +class SyntheticTextGenerator: + def __init__( + self, + config: SyntheticTextDatasetConfig, + processor: PreTrainedTokenizerBase, + random_seed: int = 42, + ): + self.config = config + self.processor = processor + self.random_seed = random_seed + + def __iter__(self) -> Iterator[dict[str, Any]]: + samples_generated = 0 + + faker = Faker() + faker.seed_instance(self.random_seed) + prompt_tokens_sampler = iter( + IntegerRangeSampler( + average=self.config.prompt_tokens, + variance=self.config.prompt_tokens_stdev, + min_value=self.config.prompt_tokens_min, + max_value=self.config.prompt_tokens_max, + random_seed=self.random_seed, + ) + ) + output_tokens_sampler = iter( + IntegerRangeSampler( + average=self.config.output_tokens, + variance=self.config.output_tokens_stdev, + min_value=self.config.output_tokens_min, + max_value=self.config.output_tokens_max, + random_seed=self.random_seed + 1, # ensure diff dist from prompts + ) + ) + + while True: + prompt_tokens_count = next(prompt_tokens_sampler) + output_tokens_count = next(output_tokens_sampler) + + yield { + "prompt": self._create_prompt( + prompt_tokens_count, samples_generated, faker + ), + "prompt_tokens_count": prompt_tokens_count, + "output_tokens_count": output_tokens_count, + } + samples_generated += 1 + + def _create_prompt(self, prompt_tokens_count: int, index: int, faker: Faker) -> str: + prompt_token_ids = [] + avg_chars_per_token = 5 + margin_of_safety = 1.5 + attempts = 0 + + while len(prompt_token_ids) < prompt_tokens_count: + attempts += 1 + num_chars = ( + prompt_tokens_count * avg_chars_per_token * margin_of_safety * attempts + ) + text = f"{index} " + faker.text(max_nb_chars=num_chars) + prompt_token_ids = self.processor.encode(text) + + return self.processor.decode( + prompt_token_ids[:prompt_tokens_count], skip_special_tokens=True + ) + + +@DatasetDeserializerFactory.register("synthetic_text") +class SyntheticTextDatasetDeserializer(DatasetDeserializer): + def __call__( + self, + data: Any, + data_kwargs: dict[str, Any], + processor_factory: Callable[[], PreTrainedTokenizerBase], + random_seed: int, + ) -> IterableDataset: + # Config file pathways, deserialize and call self again + if (config := self._load_config_file(data)) is not None: + return self(config, data_kwargs, processor_factory, random_seed) + + # Config str pathways, deserialize and call self again + if (config := self._load_config_str(data)) is not None: + return self(config, data_kwargs, processor_factory, random_seed) + + if not isinstance(data, SyntheticTextDatasetConfig): + raise DataNotSupportedError( + "Unsupported data for SyntheticTextDatasetDeserializer, " + "expected SyntheticTextDatasetConfig, str or Path to a config file, " + f"got {data}" + ) + + return IterableDataset.from_generator( + lambda: SyntheticTextGenerator( + config=data, processor=processor_factory(), random_seed=random_seed + ), + features=Features( + { + "prompt": Value("string"), + "prompt_tokens_count": Value("int32"), + "output_tokens_count": Value("int32"), + } + ), + ) + + def _load_config_file(self, data: Any) -> SyntheticTextDatasetConfig | None: + if (not isinstance(data, str) and not isinstance(data, Path)) or ( + not Path(data).is_file() + ): + return None + + data_path = Path(data) if isinstance(data, str) else data + error = None + + if Path(data).is_file() and data_path.suffix.lower() == ".json": + try: + return SyntheticTextDatasetConfig.model_validate_json( + data_path.read_text() + ) + except Exception as err: # noqa: BLE001 + error = err + + if Path(data).is_file() and data_path.suffix.lower() in { + ".yaml", + ".yml", + ".config", + }: + try: + return SyntheticTextDatasetConfig.model_validate( + yaml.safe_load(data_path.read_text()) + ) + except Exception as err: # noqa: BLE001 + error = err + + err_message = ( + f"Unsupported file {data_path} for " + f"SyntheticTextDatasetDeserializer, expected .json, " + f".yaml, .yml, or .config" + ) + + if error is not None: + err_message += f" with error: {error}" + raise DataNotSupportedError(err_message) from error + raise DataNotSupportedError(err_message) + + def _load_config_str(self, data: str) -> SyntheticTextDatasetConfig | None: + if not isinstance(data, str): + return None + + data_str = data.strip() + error = None + + if (data_str.startswith("{") and data_str.endswith("}")) or ( + data_str.startswith("[") and data_str.endswith("]") + ): + try: + return SyntheticTextDatasetConfig.model_validate_json(data_str) + except Exception as err: # noqa: BLE001 + error = err + + if data_str.count("=") > 1: + # key=value pairs separated by commas + try: + config_dict = {} + items = data_str.split(",") + for item in items: + key, value = item.split("=") + config_dict[key.strip()] = ( + int(value.strip()) + if value.strip().isnumeric() + else value.strip() + ) + + return SyntheticTextDatasetConfig.model_validate(config_dict) + except Exception as err: # noqa: BLE001 + error = err + + err_message = ( + "Unsupported string data for SyntheticTextDatasetDeserializer, " + f"expected JSON or key-value pairs, got {data}" + ) + if error is not None: + err_message += f" with error: {error}" + raise DataNotSupportedError(err_message) from error + raise DataNotSupportedError(err_message) diff --git a/src/guidellm/data/formatters/__init__.py b/src/guidellm/data/formatters/__init__.py new file mode 100644 index 00000000..0a5ccbc9 --- /dev/null +++ b/src/guidellm/data/formatters/__init__.py @@ -0,0 +1,47 @@ +from .environment import JinjaEnvironmentMixin +from .filters import ( + JinjaFiltersRegistry, + download_audio, + download_image, + download_video, + encode_audio, + encode_image, + encode_image_base64, + encode_video, + encode_video_base64, + get_file_format, + is_url, + resize_image, +) +from .globals import JinjaGlobalsRegistry +from .objects import GenerativeRequestFormatter +from .templates import ( + DEFAULT_AUDIO_TRANSCRIPTIONS_TEMPLATE, + DEFAULT_AUDIO_TRANSLATIONS_TEMPLATE, + DEFAULT_CHAT_COMPLETIONS_TEMPLATE, + DEFAULT_TEXT_COMPLETIONS_TEMPLATE, + JinjaTemplatesRegistry, +) + +__all__ = [ + "DEFAULT_AUDIO_TRANSCRIPTIONS_TEMPLATE", + "DEFAULT_AUDIO_TRANSLATIONS_TEMPLATE", + "DEFAULT_CHAT_COMPLETIONS_TEMPLATE", + "DEFAULT_TEXT_COMPLETIONS_TEMPLATE", + "GenerativeRequestFormatter", + "JinjaEnvironmentMixin", + "JinjaFiltersRegistry", + "JinjaGlobalsRegistry", + "JinjaTemplatesRegistry", + "download_audio", + "download_image", + "download_video", + "encode_audio", + "encode_image", + "encode_image_base64", + "encode_video", + "encode_video_base64", + "get_file_format", + "is_url", + "resize_image", +] diff --git a/src/guidellm/data/formatters/environment.py b/src/guidellm/data/formatters/environment.py new file mode 100644 index 00000000..bd37e26b --- /dev/null +++ b/src/guidellm/data/formatters/environment.py @@ -0,0 +1,63 @@ +from __future__ import annotations + +from typing import Any, ClassVar + +from jinja2 import Template +from jinja2.nativetypes import NativeEnvironment, NativeTemplate + +from guidellm.data.formatters.filters import JinjaFiltersRegistry +from guidellm.data.formatters.globals import JinjaGlobalsRegistry +from guidellm.data.formatters.templates import JinjaTemplatesRegistry + +__all__ = ["JinjaEnvironmentMixin"] + + +class JinjaEnvironmentMixin: + jinja_environment: ClassVar[NativeEnvironment | None] = None + + @classmethod + def create_environment(cls, **env_kwargs: Any) -> NativeEnvironment: + if "autoescape" not in env_kwargs: + env_kwargs["autoescape"] = False + + extensions = env_kwargs.pop("extensions", []) + extensions = set(extensions) | {"jinja2.ext.do"} + + env = NativeEnvironment(extensions=list(extensions), **env_kwargs) # noqa: S701 + + # Attach registered filters + filters_registry = JinjaFiltersRegistry.registry # type: ignore[misc] + if filters_registry: + for name, func in filters_registry.items(): + env.filters[name] = func + + # Attach registered globals + globals_registry = JinjaGlobalsRegistry.registry # type: ignore[misc] + if globals_registry: + for name, value in globals_registry.items(): + env.globals[name] = value + + cls.jinja_environment = env + return env + + @classmethod + def get_environment(cls) -> NativeEnvironment: + if cls.jinja_environment is None: + raise ValueError( + "Jinja environment is not initialized. Call create_environment first." + ) + return cls.jinja_environment + + @classmethod + def template_from_source(cls, source: str | Template) -> NativeTemplate: + if isinstance(source, Template): + return source + env = cls.get_environment() + return env.from_string(source) + + @classmethod + def template_from_registry(cls, name: str) -> NativeTemplate: + template = JinjaTemplatesRegistry.get_registered_object(name) + if template is None: + raise ValueError(f"Template '{name}' not found in registry.") + return cls.template_from_source(template) diff --git a/src/guidellm/data/formatters/filters.py b/src/guidellm/data/formatters/filters.py new file mode 100644 index 00000000..8dd4e445 --- /dev/null +++ b/src/guidellm/data/formatters/filters.py @@ -0,0 +1,324 @@ +from __future__ import annotations + +import base64 +import io +from pathlib import Path +from typing import Any, Callable, Literal + +import datasets +import httpx +import librosa +import numpy as np +import soundfile +from PIL import Image as PILImage + +from guidellm.utils import RegistryMixin + +__all__ = [ + "JinjaFiltersRegistry", + "download_audio", + "download_image", + "download_video", + "encode_audio", + "encode_image", + "encode_image_base64", + "encode_video", + "encode_video_base64", + "get_file_format", + "is_url", + "resize_image", +] + + +class JinjaFiltersRegistry(RegistryMixin[Callable[..., Any]]): + pass + + +@JinjaFiltersRegistry.register("is_url") +def is_url(text: Any) -> bool: + return isinstance(text, str) and text.startswith(("http://", "https://")) + + +@JinjaFiltersRegistry.register("encode_image") +def encode_image( + image: bytes | str | Path | np.ndarray | PILImage.Image | datasets.Image, + max_size: int | None = None, + max_width: int | None = None, + max_height: int | None = None, + encode_type: Literal["base64", "url"] | None = None, +) -> str: + """ + Input image types: + - bytes: raw image bytes, decoded with Pillow + - str: file path on disk, url, or already base64 encoded image string + - pathlib.Path: file path on disk + - np.ndarray: image array, decoded with Pillow + - PIL.Image.Image: Pillow image + - datasets.Image: HuggingFace datasets Image object + + max_size: maximum size of the longest edge of the image + max_width: maximum width of the image + max_height: maximum height of the image + + encode_type: None to return the supported format + (url for url, base64 string for others) + "base64" to return base64 encoded string (or download URL and encode) + "url" to return url (only if input is url, otherwise fails) + + Returns a str of either: + - image url + - "data:image/{type};base64, {data}" string + """ + url = is_url(image) + + if ( + url + and (encode_type is None or encode_type == "url") + and (max_size is not None or max_width is not None or max_height is not None) + ): + raise ValueError("Cannot resize image when encode_type is 'url'") + elif url and (encode_type is None or encode_type == "url"): + return image + elif url and encode_type == "base64": + raise ValueError(f"Cannot convert non-url image to URL {image}") + + return encode_image_base64( + image=image, + max_size=max_size, + max_width=max_width, + max_height=max_height, + ) + + +@JinjaFiltersRegistry.register("encode_image_base64") +def encode_image_base64( + image: bytes | str | Path | np.ndarray | PILImage.Image, + width: int | None = None, + height: int | None = None, + max_width: int | None = None, + max_height: int | None = None, + max_size: int | None = None, +) -> str: + if ( + isinstance(image, str) + and image.startswith("data:image/") + and ";base64," in image + ): + return image + + if is_url(image): + image = download_image(image) + + if isinstance(image, bytes): + image = PILImage.open(io.BytesIO(image)) + elif isinstance(image, (str, Path)): + image = PILImage.open(image) + elif isinstance(image, np.ndarray): + image = PILImage.fromarray(image) + elif not isinstance(image, PILImage.Image): + raise ValueError(f"Unsupported image type: {type(image)}") + + image = resize_image( + image, + width=width, + height=height, + max_width=max_width, + max_height=max_height, + max_size=max_size, + ) + if image.mode != "RGB": + image = image.convert("RGB") + + buffer = io.BytesIO() + image.save(buffer, format="JPEG") + image_bytes = buffer.getvalue() + image_base64 = base64.b64encode(image_bytes).decode("utf-8") + + return f"data:image/jpeg;base64,{image_base64}" + + +@JinjaFiltersRegistry.register("resize_image") +def resize_image( + image: PILImage.Image, + width: int | None = None, + height: int | None = None, + max_width: int | None = None, + max_height: int | None = None, + max_size: int | None = None, +) -> PILImage.Image: + if not isinstance(image, PILImage.Image): + raise ValueError(f"Unsupported image type: {type(image)}") + + if width is not None and height is not None: + return image.resize((width, height), PILImage.Resampling.BILINEAR) + + orig_w, orig_h = image.size + aspect = orig_w / orig_h + + if width is not None: + target_w = width + target_h = round(width / aspect) + elif height is not None: + target_h = height + target_w = round(height * aspect) + else: + target_w, target_h = orig_w, orig_h + + # Normalize max_size → max_width/max_height + if max_size is not None: + max_width = max_width or max_size + max_height = max_height or max_size + + # Apply max constraints (preserve aspect ratio) + if max_width or max_height: + scale_w = max_width / target_w if max_width else 1.0 + scale_h = max_height / target_h if max_height else 1.0 + scale = min(scale_w, scale_h, 1.0) # never upscale + target_w = round(target_w * scale) + target_h = round(target_h * scale) + + if (target_w, target_h) != (orig_w, orig_h): + image = image.resize((target_w, target_h), PILImage.Resampling.BILINEAR) + + return image + + +@JinjaFiltersRegistry.register("download_image") +def download_image(url: str) -> bytes: + response = httpx.get(url) + response.raise_for_status() + return response.content + + +@JinjaFiltersRegistry.register("encode_video") +def encode_video( + video: bytes | str | Path | datasets.Video, + encode_type: Literal["base64", "url"] | None = None, +) -> str: + """ + Input video types: + - bytes: raw video bytes + - str: file path on disk, url, or already base64 encoded video string + - pathlib.Path: file path on disk + - datasets.Video: HuggingFace datasets Video object + + encode_type: None to return the supported format + (url for url, base64 string for others) + "base64" to return base64 encoded string (or download URL and encode) + "url" to return url (only if input is url, otherwise fails) + + Returns a str of either: + - video url + - "data:video/{type};base64, {data}" string + """ + url = is_url(video) + + if url and (encode_type is None or encode_type == "url"): + return video + elif url and encode_type == "base64": + raise ValueError(f"Cannot encode URL video {video}") + + return encode_video_base64(video=video) + + +@JinjaFiltersRegistry.register("encode_video_base64") +def encode_video_base64(video: bytes | str | Path) -> str: + if ( + isinstance(video, str) + and video.startswith("data:video/") + and ";base64," in video + ): + return video + + video_format = "unknown" + + if is_url(video): + video, video_format = download_video(video) + + if isinstance(video, (str, Path)): + path = Path(video) + video = path.read_bytes() + video_format = get_file_format(path) + elif not isinstance(video, bytes): + raise ValueError(f"Unsupported video type: {type(video)}") + + video_base64 = base64.b64encode(video).decode("utf-8") + return f"data:video/{video_format};base64,{video_base64}" + + +@JinjaFiltersRegistry.register("download_video") +def download_video(url: str) -> tuple[bytes, str]: + response = httpx.get(url) + response.raise_for_status() + return response.content, get_file_format(url) + + +@JinjaFiltersRegistry.register("encode_audio") +def encode_audio( + audio: bytes | str | Path | dict | np.ndarray, + sample_rate: int | None = None, + max_duration: float | None = None, +) -> dict[str, str]: + """ + Input audio types: + - bytes: raw audio bytes + - str: file path on disk or URL + - pathlib.Path: file path on disk + - dict: {"data": base64_string, "format": "wav"} format + - numpy.ndarray: audio array, assumed to be at sample_rate if provided + + sample_rate: sample rate of the input audio if input is np.ndarray + target_sample_rate: resample to this rate if provided + duration: limit audio to this duration in seconds if provided + + Returns dict with format: + { + "data": base64_encoded_audio_bytes, + "format": "wav" + } + """ + if is_url(audio): + audio, _ = download_audio(audio) + + if isinstance(audio, dict): + if "data" not in audio: + raise ValueError("Audio dict must contain 'data' key") + audio = base64.b64decode(audio["data"]) + + if isinstance(audio, bytes): + audio_data, sample_rate = librosa.load(io.BytesIO(audio), sr=sample_rate) + elif isinstance(audio, (str, Path)): + audio_data, sample_rate = librosa.load(str(audio), sr=sample_rate) + elif isinstance(audio, np.ndarray): + if sample_rate is None: + raise ValueError("sample_rate must be provided for numpy arrays") + audio_data = audio + else: + raise ValueError(f"Unsupported audio type: {type(audio)}") + + if max_duration is not None: + max_samples = int(max_duration * sample_rate) + if len(audio_data) > max_samples: + audio_data = audio_data[:max_samples] + + buffer = io.BytesIO() + soundfile.write(buffer, audio_data, sample_rate, format="WAV", subtype="PCM_16") + + return {"data": buffer.getvalue(), "format": "wav"} + + +@JinjaFiltersRegistry.register("download_audio") +def download_audio(url: str) -> tuple[bytes, str]: + """Download audio from URL and return bytes with format.""" + response = httpx.get(url) + response.raise_for_status() + content = response.content + audio_format = get_file_format(url) + return content, audio_format + + +@JinjaFiltersRegistry.register("get_file_format") +def get_file_format(path: Path | str) -> str: + """Get file format from path extension.""" + suffix = Path(path).suffix.lower() + return suffix[1:] if suffix.startswith(".") else "unknown" diff --git a/src/guidellm/data/formatters/globals.py b/src/guidellm/data/formatters/globals.py new file mode 100644 index 00000000..6c066191 --- /dev/null +++ b/src/guidellm/data/formatters/globals.py @@ -0,0 +1,9 @@ +from typing import Any + +from guidellm.utils import RegistryMixin + +__all__ = ["JinjaGlobalsRegistry"] + + +class JinjaGlobalsRegistry(RegistryMixin[Any]): + pass diff --git a/src/guidellm/data/formatters/objects.py b/src/guidellm/data/formatters/objects.py new file mode 100644 index 00000000..3e032089 --- /dev/null +++ b/src/guidellm/data/formatters/objects.py @@ -0,0 +1,92 @@ +from __future__ import annotations + +from typing import Any, Literal + +from datasets import Dataset, IterableDataset +from jinja2 import Template + +from guidellm.data.formatters import JinjaEnvironmentMixin +from guidellm.data.objects import ( + GenerationRequest, + GenerationRequestArguments, + GenerativeDatasetArgs, + GenerativeRequestType, +) +from guidellm.data.preprocessors.objects import DatasetPreprocessor + +__all__ = ["GenerativeRequestFormatter"] + + +class GenerativeRequestFormatter(DatasetPreprocessor, JinjaEnvironmentMixin): + def __init__( + self, + request_type: GenerativeRequestType | str = "text_completions", + request_template: str | Template | None = None, + request_extras: dict[str, Any] | GenerationRequestArguments | None = None, + request_defaults: dict[str, Any] | GenerationRequestArguments | None = None, + environment_extras: dict[str, Any] | None = None, + ): + self.datasets: list[Dataset | IterableDataset] | None = None + self.data_args: list[GenerativeDatasetArgs] | None = None + + self.request_type = request_type + self.request_template = request_template + self.request_extras = request_extras or {} + self.request_defaults = request_defaults or { + "stream": True, + "json_body": { + "stream": True, + "stream_options": { + "include_usage": True, + }, + }, + } + self.environment_extras = environment_extras or {} + self.jinja_template: Template | None = None + + def init_data( + self, + datasets: list[Dataset | IterableDataset], + data_args: list[GenerativeDatasetArgs], + ): + self.datasets = datasets + self.data_args = data_args + + self.create_environment(**self.environment_extras) + self.jinja_template = ( + self.template_from_source(self.request_template) + if self.request_template + else self.template_from_registry(self.request_type) + ) + + def __call__( + self, item: dict[str, Any] + ) -> dict[Literal["request"], GenerationRequest]: + if self.jinja_template is None: + raise ValueError("GenerativeRequestCreator not initialized with data.") + + stats = {} + if "prompt_tokens_count" in item: + count = item["prompt_tokens_count"][0] + stats["prompt_tokens"] = count + item["prompt_tokens_count"] = count + if "output_tokens_count" in item: + count = item["output_tokens_count"][0] + stats["output_tokens"] = count + item["output_tokens_count"] = count + + return { + "request": { + "request_type": self.request_type, + "arguments": GenerationRequestArguments.model_combine_dict( + self.request_defaults, + self.request_extras, + self.jinja_template.render( + **item, + request_defaults=self.request_defaults, + request_extras=self.request_extras, + ), + ), + "stats": stats, + } + } diff --git a/src/guidellm/data/formatters/templates.py b/src/guidellm/data/formatters/templates.py new file mode 100644 index 00000000..2cf6e2f3 --- /dev/null +++ b/src/guidellm/data/formatters/templates.py @@ -0,0 +1,182 @@ +import textwrap +from typing import Union + +from jinja2 import Template + +from guidellm.utils import RegistryMixin + +__all__ = [ + "DEFAULT_AUDIO_TRANSCRIPTIONS_TEMPLATE", + "DEFAULT_AUDIO_TRANSLATIONS_TEMPLATE", + "DEFAULT_CHAT_COMPLETIONS_TEMPLATE", + "DEFAULT_TEXT_COMPLETIONS_TEMPLATE", + "JinjaTemplatesRegistry", +] + + +class JinjaTemplatesRegistry(RegistryMixin[Union[Template, str]]): + pass + + +DEFAULT_TEXT_COMPLETIONS_TEMPLATE = JinjaTemplatesRegistry.register("text_completions")( + textwrap.dedent(""" + {% set obj = { + "json_body": { + "prompt": ( + text_column[0] + if text_column and text_column|length == 1 + else text_column + ) + } + } %} + + {% if output_tokens_count is defined and output_tokens_count is not none %} + {% do obj["json_body"].update({ + "max_tokens": output_tokens_count, + "max_completion_tokens": output_tokens_count, + "stop": None, + "ignore_eos": True + }) %} + {% elif max_tokens is defined and max_tokens is not none %} + {% do obj["json_body"].update({"max_tokens": max_tokens}) %} + {% elif max_completion_tokens is defined and max_completion_tokens is not none %} + {% do obj["json_body"].update({"max_completion_tokens": max_completion_tokens}) %} + {% endif %} + + {{ obj }} + """).strip() # noqa: E501 +) + +DEFAULT_CHAT_COMPLETIONS_TEMPLATE = JinjaTemplatesRegistry.register("chat_completions")( + textwrap.dedent(""" + {% set obj = { + "json_body": { + "messages": [ + { + "role": "user", + "content": [] + } + ] + } + } %} + + {%- for item in text_column or [] %} + {% do obj["json_body"].messages[0].content.append({"type": "text", "text": item}) %} + {%- endfor %} + + {%- for item in image_column or [] %} + {% do obj["json_body"].messages[0].content.append({ + "type": "image_url", + "image_url": encode_image( + item, + max_size=max_size|default(None), + max_width=max_width|default(None), + max_height=max_height|default(None), + encode_type=image_encode_type|default(encode_type|default(None)) + ) + }) %} + {%- endfor %} + + {%- for item in video_column or [] %} + {% do obj["json_body"].messages[0].content.append({ + "type": "video_url", + "video_url": encode_video( + item, + encode_type=video_encode_type|default(encode_type|default(None)) + ) + }) %} + {%- endfor %} + + {%- for item in audio_column or [] %} + {%- set audio_type, audio_val = encode_audio( + item, + sample_rate=sample_rate|default(None), + max_duration=max_duration|default(None), + encode_type=audio_encode_type|default(encode_type|default(None)) + ) -%} + {% do content_list.append({"type": audio_type, audio_type: audio_val}) %} + {%- endfor %} + + {% if output_tokens_count is defined and output_tokens_count is not none %} + {% do obj["json_body"].update({ + "max_completion_tokens": output_tokens_count, + "stop": None, + "ignore_eos": True + }) %} + {% elif max_tokens is defined and max_tokens is not none %} + {% do obj["json_body"].update({"max_completion_tokens": max_tokens}) %} + {% elif max_completion_tokens is defined and max_completion_tokens is not none %} + {% do obj["json_body"].update({"max_completion_tokens": max_completion_tokens}) %} + {% endif %} + + {{ obj }} + """).strip() # noqa: E501 +) + +DEFAULT_AUDIO_TRANSCRIPTIONS_TEMPLATE = JinjaTemplatesRegistry.register( + "audio_transcriptions" +)( + textwrap.dedent(""" + { + {%- if output_tokens_count_column is defined and output_tokens_count_column is not none -%} + "max_tokens": {{ output_tokens_count_column }}, + "max_completion_tokens": {{ output_tokens_count_column }}, + "stop": None, + "ignore_eos": True, + {%- else -%} + {%- if max_tokens is defined and max_tokens is not none -%} + "max_tokens": {{ max_tokens }}, + {%- endif -%} + {%- if max_completion_tokens is defined and max_completion_tokens is not none -%} + "max_completion_tokens": {{ max_completion_tokens }}, + {%- endif -%} + {%- endif -%} + "files": { + "file": {{ encode_audio_file( + audio_column[0], + encode_type=audio_encode_type|default(encode_type|default(None)) + ) }} + } + {%- if text_column and text_column|length > 0 -%} + , + "json": { + "prompt": {{ text_column[0] }} + } + {%- endif -%} + } + """).strip() # noqa: E501 +) + +DEFAULT_AUDIO_TRANSLATIONS_TEMPLATE = JinjaTemplatesRegistry.register( + "audio_translations" +)( + textwrap.dedent(""" + { + {%- if output_tokens_count_column is defined and output_tokens_count_column is not none -%} + "max_tokens": {{ output_tokens_count_column }}, + "max_completion_tokens": {{ output_tokens_count_column }}, + "stop": None, + "ignore_eos": True, + {%- else -%} + {%- if max_tokens is defined and max_tokens is not none -%} + "max_tokens": {{ max_tokens }}, + {%- endif -%} + {%- if max_completion_tokens is defined and max_completion_tokens is not none -%} + "max_completion_tokens": {{ max_completion_tokens }}, + {%- endif -%} + {%- endif -%} + "files": { + "file": {{ encode_audio_file( + audio_column[0], + encode_type=audio_encode_type|default(encode_type|default(None)) + ) }} + } + {%- if text_column and text_column|length > 0 -%} + , + "json": { + "prompt": {{ text_column[0] }} + } + {%- endif -%} + } + """).strip() # noqa: E501 +) diff --git a/src/guidellm/data/loaders.py b/src/guidellm/data/loaders.py new file mode 100644 index 00000000..ebecdb6f --- /dev/null +++ b/src/guidellm/data/loaders.py @@ -0,0 +1,93 @@ +from __future__ import annotations + +from collections.abc import Callable +from typing import Any, Literal + +from datasets import Dataset, IterableDataset +from torch.utils.data import DataLoader, Sampler +from transformers import PreTrainedTokenizerBase + +from guidellm.data.datasets import GenerativeRequestsDataset +from guidellm.data.formatters import GenerativeRequestFormatter +from guidellm.data.objects import GenerationRequest, GenerativeDatasetArgs +from guidellm.data.preprocessors import ( + DatasetPreprocessor, + GenerativeColumnMapper, +) + +__all__ = ["GenerativeDataLoader", "GenerativeRequestCollator"] + + +class GenerativeRequestCollator: + def __call__( + self, batch: list[dict[Literal["request"], dict[str, Any]]] + ) -> GenerationRequest: + if len(batch) != 1: + raise NotImplementedError( + f"Batch size greater than 1 is not currently supported. " + f"Got batch size: {len(batch)}" + ) + + return GenerationRequest.model_validate(batch[0]["request"]) + + +class GenerativeDataLoader(DataLoader[GenerationRequest]): + def __init__( + self, + data: list[Any], + data_args: list[GenerativeDatasetArgs] | None, + data_samples: int, + processor_factory: Callable[[], PreTrainedTokenizerBase], + column_mapper: GenerativeColumnMapper, + preprocessors: list[DatasetPreprocessor], + request_formatter: GenerativeRequestFormatter, + sampler: Sampler[int] | Literal["shuffle"] | None = None, + collate_fn: GenerativeRequestCollator | None = None, + num_workers: int | None = None, + random_seed: int = 42, + **kwargs: Any, + ): + dataset = GenerativeRequestsDataset.build( + data=data, + data_args=data_args, + data_samples=data_samples, + processor_factory=processor_factory, + column_mapper=column_mapper, + request_formatter=request_formatter, + preprocessors=preprocessors, + random_seed=random_seed, + ) + + if collate_fn is None: + collate_fn = GenerativeRequestCollator() + + # Handle sampler/shuffle logic based on dataset type + if sampler == "shuffle": + shuffle = True + sampler = None + elif isinstance(sampler, str) and sampler != "shuffle": + raise ValueError( + f"Invalid string sampler: {sampler}. " + f"Only 'shuffle' is supported as a string value." + ) + else: + shuffle = False + + if isinstance(dataset, IterableDataset) and sampler is not None: + raise ValueError( + "Samplers are not supported with IterableDataset. " + "Use shuffle=True or apply shuffling to the dataset directly." + ) + elif isinstance(dataset, Dataset) and shuffle: + dataset = dataset.shuffle(seed=random_seed) + shuffle = False + + super().__init__( + dataset=dataset, + batch_size=1, + shuffle=shuffle, + sampler=sampler, + collate_fn=collate_fn, + num_workers=num_workers or 0, + **kwargs, + ) diff --git a/src/guidellm/data/objects.py b/src/guidellm/data/objects.py new file mode 100644 index 00000000..04c5407d --- /dev/null +++ b/src/guidellm/data/objects.py @@ -0,0 +1,230 @@ +from __future__ import annotations + +import uuid +from typing import Any, Literal, get_args + +from pydantic import Field + +from guidellm.scheduler import ( + MeasuredRequestTimings, + SchedulerMessagingPydanticRegistry, +) +from guidellm.utils import StandardBaseDict, StandardBaseModel + +__all__ = [ + "GenerationRequest", + "GenerationRequestArguments", + "GenerationRequestTimings", + "GenerativeDatasetArgs", + "GenerativeDatasetColumnType", + "GenerativeRequestType", +] + + +GenerativeRequestType = Literal[ + "text_completions", + "chat_completions", + "audio_transcriptions", + "audio_translations", +] + +GenerativeDatasetColumnType = Literal[ + "prompt_tokens_count_column", + "output_tokens_count_column", + "text_column", + "image_column", + "video_column", + "audio_column", +] + + +class GenerationRequestArguments(StandardBaseDict): + @classmethod + def model_combine_dict( # noqa: C901, PLR0912 + cls, *arguments: GenerationRequestArguments | dict[str, Any] + ) -> dict[str, Any]: + combined = {} + + for args in arguments: + if ( + url := args.get("url") if isinstance(args, dict) else args.url + ) is not None: + combined["url"] = url + + if ( + path := args.get("path") if isinstance(args, dict) else args.path + ) is not None: + combined["path"] = path + + if ( + method := args.get("method") if isinstance(args, dict) else args.method + ) is not None: + combined["method"] = method + + if ( + stream := args.get("stream") if isinstance(args, dict) else args.stream + ) is not None: + combined["stream"] = stream + + if ( + content_body := ( + args.get("content_body") + if isinstance(args, dict) + else args.content_body + ) + ) is not None: + combined["content_body"] = content_body + + if ( + json_body := ( + args.get("json_body") if isinstance(args, dict) else args.json_body + ) + ) is not None: + if "json_body" not in combined: + combined["json_body"] = {} + combined["json_body"].update(json_body) + + if ( + files := args.get("files") if isinstance(args, dict) else args.files + ) is not None: + if "files" not in combined: + combined["files"] = {} + combined["files"].update(files) + + if ( + params := args.get("params") if isinstance(args, dict) else args.params + ) is not None: + if "params" not in combined: + combined["params"] = {} + combined["params"].update(params) + + if ( + headers := ( + args.get("headers") if isinstance(args, dict) else args.headers + ) + ) is not None: + if "headers" not in combined: + combined["headers"] = {} + combined["headers"].update(headers) + + return combined + + url: str | None = Field( + default=None, + description="The URL endpoint to which the request will be sent.", + ) + path: str | None = Field( + default=None, + description="The path to append to the base URL for the request.", + ) + method: str | None = Field( + default=None, + description="The HTTP method to use for the request (e.g., 'POST', 'GET').", + ) + stream: bool | None = Field( + default=None, + description="Whether to stream the response, if applicable.", + ) + content_body: Any | None = Field( + default=None, + description="Raw content to send in the request body, if applicable.", + ) + json_body: dict[str, Any] | None = Field( + default=None, + description="JSON content to include in the request body, if applicable.", + ) + files: dict[str, Any] | None = Field( + default=None, + description="Files to include in the request, if applicable.", + ) + params: dict[str, Any] | None = Field( + default=None, + description="Query parameters to include in the request URL, if applicable.", + ) + headers: dict[str, str] | None = Field( + default=None, + description="HTTP headers to include in the request, if applicable.", + ) + + +@SchedulerMessagingPydanticRegistry.register() +class GenerationRequest(StandardBaseModel): + """Request model for backend generation operations.""" + + request_id: str = Field( + default_factory=lambda: str(uuid.uuid4()), + description="Unique identifier for the request.", + ) + request_type: GenerativeRequestType | str = Field( + description=( + "Type of request. If url is not provided in arguments, " + "this will be used to determine the request url." + ), + ) + arguments: GenerationRequestArguments = Field( + description=( + "Payload for the request, structured as a dictionary of arguments to pass " + "to the respective backend method. For example, can contain " + "'json', 'headers', 'files', etc." + ) + ) + stats: dict[Literal["prompt_tokens", "output_tokens"], int] = Field( + default_factory=dict, + description="Request statistics including prompt and output token counts.", + ) + + +@SchedulerMessagingPydanticRegistry.register() +@MeasuredRequestTimings.register("generation_request_timings") +class GenerationRequestTimings(MeasuredRequestTimings): + """Timing model for tracking generation request lifecycle events.""" + + timings_type: Literal["generation_request_timings"] = "generation_request_timings" + first_iteration: float | None = Field( + default=None, + description="Unix timestamp when the first generation iteration began.", + ) + last_iteration: float | None = Field( + default=None, + description="Unix timestamp when the last generation iteration completed.", + ) + + +class GenerativeDatasetArgs(StandardBaseDict): + type_: str | None = None + split: str | None = None + prompt_tokens_count_column: str | None = None + output_tokens_count_column: str | None = None + text_column: str | list[str] | None = None + image_column: str | list[str] | None = None + video_column: str | list[str] | None = None + audio_column: str | list[str] | None = None + + def to_kwargs(self) -> dict[str, Any]: + return { + key: value + for key, value in self.model_extra.items() + if not key.endswith("_column") + } + + def get_mapped_columns( + self, + ) -> dict[GenerativeDatasetColumnType | str, str | list[str]]: + column_mapping: dict[GenerativeDatasetColumnType | str, list[str] | None] = {} + + # Add in any non None columns from the fields + for column in get_args(GenerativeDatasetColumnType): + value = getattr(self, column) + if value is not None: + column_mapping[column] = value + + # Enable flexibility for extra columns to be passed through and referenced later + for extra in self.model_extra: + if ( + extra.endswith("_column") + and extra not in column_mapping + and self.model_extra[extra] is not None + ): + column_mapping[extra] = self.model_extra[extra] + + return column_mapping diff --git a/src/guidellm/data/preprocessors/__init__.py b/src/guidellm/data/preprocessors/__init__.py new file mode 100644 index 00000000..039f74a5 --- /dev/null +++ b/src/guidellm/data/preprocessors/__init__.py @@ -0,0 +1,7 @@ +from .mappers import GenerativeColumnMapper +from .objects import DatasetPreprocessor + +__all__ = [ + "DatasetPreprocessor", + "GenerativeColumnMapper", +] diff --git a/src/guidellm/data/preprocessors/mappers.py b/src/guidellm/data/preprocessors/mappers.py new file mode 100644 index 00000000..1792cb7e --- /dev/null +++ b/src/guidellm/data/preprocessors/mappers.py @@ -0,0 +1,115 @@ +from __future__ import annotations + +from dataclasses import dataclass +from typing import Any, Literal + +from datasets import Dataset, IterableDataset + +from guidellm.data.objects import ( + GenerativeDatasetArgs, + GenerativeDatasetColumnType, +) +from guidellm.data.preprocessors.objects import DatasetPreprocessor +from guidellm.data.utils import DEFAULT_COLUMN_NAMES + +__all__ = ["ColumnMapping", "GenerativeColumnMapper"] + + +@dataclass +class ColumnMapping: + indices: list[int] + names: list[str] + + +class GenerativeColumnMapper(DatasetPreprocessor): + def __init__(self): + self.datasets: list[Dataset | IterableDataset] | None = None + self.data_args: list[GenerativeDatasetArgs] | None = None + self.column_mappings: ( + dict[GenerativeDatasetColumnType, ColumnMapping | None] | None + ) = None + + def __call__( + self, row: dict[Literal["items"], tuple[dict[str, Any]]] + ) -> dict[str, Any]: + if ( + self.datasets is None + or self.data_args is None + or self.column_mapping is None + ): + raise ValueError("GenerativeColumnMapper not initialized with data.") + + mapped: dict[GenerativeDatasetColumnType, list[Any]] = {} + items = row.pop("items") + + for column_type, column_mapping in self.column_mapping.items(): + mapped[column_type] = [ + items[index].get(name) + for index, name in zip(column_mapping.indices, column_mapping.names) + ] + + return mapped + + def init_data( + self, + datasets: list[Dataset | IterableDataset], + data_args: list[GenerativeDatasetArgs], + ): + self.datasets = datasets + self.data_args = data_args + self.column_mapping = self.generate_column_mapping() + + def generate_column_mapping( + self, + ) -> dict[GenerativeDatasetColumnType, ColumnMapping]: + mappings: dict[GenerativeDatasetColumnType, ColumnMapping] = {} + # Map any columns specified in the GenerativeDatasetArgs first + self._fill_mappings_from_data_args(mappings) + # For standard column types not mapped, fill in first one found from defaults + self._fill_mappings_from_defaults(mappings) + + return mappings + + def _fill_mappings_from_data_args( + self, mappings: dict[GenerativeDatasetColumnType, ColumnMapping] + ): + for index, args in enumerate(self.data_args): + args_column_mappings = args.get_mapped_columns() + for column_type, column_name in args_column_mappings.items(): + if column_type not in mappings: + mappings[column_type] = ColumnMapping(indices=[], names=[]) + column_mapping = mappings[column_type] + + for name in ( + column_name if isinstance(column_name, list) else [column_name] + ): + if name not in self.datasets[index].column_names: + raise ValueError( + f"Column '{name}' not found in dataset columns: " + f"{self.datasets[index].column_names}" + ) + column_mapping.indices.append(index) + column_mapping.names.append(name) + + def _fill_mappings_from_defaults( + self, mappings: dict[GenerativeDatasetColumnType, ColumnMapping] + ): + for column_type, default_names in DEFAULT_COLUMN_NAMES.items(): + if column_type in mappings: + continue + + for index, dataset in enumerate(self.datasets): + for name in default_names: + if name in dataset.column_names: + mappings[column_type] = ColumnMapping( + indices=[index], names=[name] + ) + break + # Check for plural form of the name + if f"{name}s" in dataset.column_names: + mappings[column_type] = ColumnMapping( + indices=[index], names=[f"{name}s"] + ) + break + if column_type in mappings: + break diff --git a/src/guidellm/data/preprocessors/objects.py b/src/guidellm/data/preprocessors/objects.py new file mode 100644 index 00000000..831f944d --- /dev/null +++ b/src/guidellm/data/preprocessors/objects.py @@ -0,0 +1,20 @@ +from __future__ import annotations + +from typing import Any, Protocol, runtime_checkable + +from datasets import Dataset, IterableDataset + +from guidellm.data.objects import GenerativeDatasetArgs + +__all__ = ["DatasetPreprocessor"] + + +@runtime_checkable +class DatasetPreprocessor(Protocol): + def init_data( + self, + datasets: list[Dataset | IterableDataset], + data_args: list[GenerativeDatasetArgs], + ): ... + + def __call__(self, item: dict[str, Any]) -> dict[str, Any]: ... diff --git a/src/guidellm/data/prideandprejudice.txt.gz b/src/guidellm/data/prideandprejudice.txt.gz deleted file mode 100644 index 8c7a10727c239964fff2e203f07318b29108eb2c..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 241795 zcmV(rK<>XEiwFoug!X3w|8R0?WMyG)WN>n2YIS63V`VOMcys`az1xl>Ns=Y_u3y1; zr#rB~KJ11+1hTTaJ7c%44sEk5_&49kX}c<+ww zV@f}b@2&mNrtg-;9)}+q>?hjIv2A<%iFq?lo42lgpYgT%x@uTPFhG&k$=G4vU z*iH5u?Xa|yeaSvDwB5^TH(sZcjRb?Xchj5Mf6U|Nz3uzWvHR8@Ff9Dwxf>2MMvp(t z*J&EB!vPOFHs`MI+Sy;-{?#6_ABVX+w3B_mnfaB;esDQn`*fMcOFJz$4tP9cj4j?` zv`>tzyCj*x#?_A2)v(ZLUr> z8NX_|TU!~MpY5g@`02EaHXNG{+f$pymIjY~#89e{oSQ>yKW*Qc-`hz8`2C2%@Ey5* zY!a|dHt(mAzqd56_FElaYqOv5BA762ufkQ@hzW_G$<2-f=UxE%(HwZ}zRe z^!(#^xp9wd{*V9b|NcKWyW8fU+HrbmH$U`!J3K!B+rOpsGY4+()D3Ul!XL0rJ=#Xm z;D0&UqMx->www{3ElkI@T7|i}&05TQCv-kl$$q-Qm+g}{Nwy>V1_O!ndYr~Hhd)O2&p={(x$`!E@rX}qEx!m}Ocbg*-d zgJ6H#;dn0jN1B(Zjlaeo`QtcE_7dIs+#b5dmhSdc9m#~{wD+rQk?lfzr*`wl&W6?Y z=VtcZ$WwXbHksS$%{GHsKKr0OV%H`&2&eICZ(@^$KeYBW`^M0ITg-N}ui6S+l7@y8 z&l+r2zQjNLjt26UKlpXVIM2<*9crgW7BqkU&!?^bRkg$%5Uf%AY4byEbMF`JNz*+} zW(wah-!^!>sx7oV=yCIltoE@R@Q*ZWFiG~6mu~9kPcaI!$2L7*?B(BWk#Vdx&GBf5 z0kd$!vw#h_4u>uM(#1#y=0O zo6vJTv)Tss!9zQDFT9fG)@~Gh1y(2z{P+C#FPlG^{ci35wwro*OVhvF06%TA(c9SN zbhF#()plEKw)AY5&$HbNW;^VM`q3two3t97HX06}wqQQ1Z{6H2wBpU2=Fv83!;P`G z`IyTq@zlT)?e`70GL{R!|A;w@himb87ru-S5B3rMj{VVI?vOAL-@xa&?cZ>Ry`Stb zZ0v$MH5V+By#p3y(SIyhzlyA&Vqc95*tv2XCpW{Dqd{r7fzHr-$o{tScs%=^fW#C*aTwWsac=DL_A z++COS)r>Chhl^cJIHfu=Hb`xAJFrtTPIi+wwve{Va28sas`n8G2{#CiOnRBjoIPAG zP{EQuHn`^MiZ-LbL-jsh`$O8bvOv{&-u%_v zqF-zyc4h(aYJY&w^M~uck##=U0#D|qwOr(vahuL|yzB>YPxIIId%M*ZZD2c@FYVAy zO|PlbC22o&^D}6tYiQpS<_Saps#(pmY|qzomV8RgnXtR1+w~gz^L2j4Ncgy9N7~$t zBOjnGnWyYub&#}q=KXZNTzj+6TN-wAOl+bL_;Aob2G}@~7ATZikK2bH0UF|A+4Hf3J=PjkMcD4xiJd^TI>>sBd6v;;=NE zUAMS#(l&)Tp*uZ)_Gz#k!}px!p5tq$riTN*vR(hJ8O-FHUlaeDFy@ET^=unizWg6< z=3G4T25p=bNuXL&Sc+-(OA`!>Ja&)8q?MwYFulWtQzwYJ^Pt*rNX zeP_0xaa(eO(&}PeJH}u3al5p}2~I1`t3nAb#}lle&)11=)wz}B2KH00yy)C zGjm&3omwcj|1Ru@)y z*7o|BM$Vc2pKX!LXp3RT^sT!6@P)Gt5#HRnxI>TOp*OZ=Ursi2^97#CadboEkJ(@m z5*E(Bh@j(WE|=|r{C$N=Jn41>=;mXQc!!5+qw@{RX%n!#+sAJ7q-F+Dtwm9Jv_G7- z)uE5rgr3ZNHQ#AknNAymg@{n33jR(ri9+qv?;e&1fOqpz27;9C^$6|=zm zh>nn4Z2s0ZELV9PHvHp#Wa@#!c)+`!yKl=?9su|B1|9{hXfxRrv2^>5&ha+c&cIgC zgzlTm)FJZVaOli`GykWL9Pz0*$HJ+u?fLJpTkc0azj7HF})DLx(3_dp_u3X12G@U>Bba3ZDr#0hYS8|1A%< zv$RcaPP@T&{Fxh6Z+IQtWwLnod}$|lab|z&p_^@2FF;2Q^l8$u8TPU{llg^59vs_J zJUM1h=8opVy_+7diRa+bZQ%e!(0sO)$5J82^;o0?Fex=aI(p88D6I|m;I~WcfqP) zHF|r)3_Kmjz8~N1*~ylE!0P>ZhwbY_G(%vkcgSAW#+JArZ~pzsqG2Ik_6}PFC-e1i zXyJx62eZ}ltYxNg+de$_4NM#K!PVg1f)l*Huz%6X#kBC6w^_^}%rh+A^XgG}%3Oh0 z@auVD>0Ax~$$EiTvM=xD`LTv8lgS7HPzh{Hb3Qla;s4Nge{ak)IBj)z!rovV8^SuR z2UCiov@PY?f_0c^B-;g;H;CuU7KG=-|uiRIV1`@LX*XBkC zWotI7f#1Bq1sGtfk{cfj*_7VM}+GPf2%PEyM>FgaunlT?e}?-9yf%c z+97pw_(9)ngW?P@820m=F_53f2iqp_+5sL7@YsqoV^6jVw*bBlsFp(Z|FKd;@ysfdX>Rq;SczC`TyzKvWZE#09kZD6t=Nm@; zwD}G)W}c<}_Ls6ZPUC>Fa=`m;+F#FfdTr-DZhj~W$o(F_)xfI483`aIIf{i7fFKo+6X$2Nz#djJEoIJ;o zXnyT#xBoFmw*9v68@YIOTBGGz|NWToRvCnm6EU6XvBgF%-XZf@%j5wd0iRw2PL5RK z^lGGE0KaK#n2WtQ2Ctoq-+bmOJUgBYyu$u=oBr5L76$&K;6@@H=Y}z_kOw3M9}BWZm?NByIQqz z+Swv5x4E@J0`{5@m-*x45TKyLOSV_cIish{9OApXC4Dq+XUb!NS&Flb99vaBa*LcFsM+~`#Rc{m^ zq<8GB$DBkV7Uo~Ks1*1Ti0C{fF1YzYTN!%U6mCxCO>77_L)eb>o|!qo8{9csgp@Hi_}DXo zE?E}EKY`)`O69i4X3a4RiB(2Zo&oX8q-J<%GWzx?B=5q;c1G zFY-Q`{l2|u9}1#*IC&&tXUaa5;YGNO8I;|olGOr~H6gAu-Bze!7;NAU`a5uSa+`u+rBLGB3i8&3jrB7vHrG*9b?i#vF^06&CpMrt)ej0lj zQMjo~Bv#AG+c=_PmncTjI3q!M-i>WWx@Fo%FfAi8>^M(B=*A16XUm+gUtSOnPZ0NfN`e+k<;yN4JI87C_w{ zfWU2?VNP>0E?s{Ut3dHU#jSzONq!-fuz)_DS#X_(95}nfpd^kN*1yT2!quG|P2JKL zzy}@i(Nx+4;S0fN$qaoRJnMG=uvr_;%+va9^D^Ul5JvRfnf#{W{Aumm z45L^6no>nOx;@J~ct>$uaUIX_l|ap~5hJOY4UVU3;!@E$S7`Gx0c$@vfkLs#a6RwD zr`5wfX2yxwk7rl`KQRS@%rAtQa$09vm^kSMyJ;F?T^jN>tkQ1MN^q*??pql!*H{y| zP1s4K-g$0aQ-&RA+wqa1RgMfa!?dItW8@}o4+>K{r`XI)e;xJPe0tL_;|ur7=M}`j zHAaTPe}1d8=8jl+sr`XYt$edFf*J8ZA5*$tMRFthaeR$Dp08RnAjk7LW~kXjRFTNP zX$QEK`WgHC8^M2)2wR{!Fo$q`5N7&Zan;C8d^cNevnhwdPH8s@J=%A0)sBswOdy~5 zvMmhW;Gr8H`sQ7D>`!JKktHH8C}HTn3G&GOYQbs|{^i$x-%R_P_VrFy3AHQOsOEtm0P6FPOv?a8wYB_;l*<44Y~P0IBUL9mVFO(}k6iw1ue z{k80<11GYKgU4MsgR-F@y)C?Se&0^J8}Adpjb<`X!hHC$*=&z9nJl~Rpk%)dF}vrR z59dmwxDM~2Xy1I?9PL!1f;nlA-yLn7S?+bR?QJOJ0Nf`k!cD!HmE3oircal4y!36P zt#KSa=k=xk?PLUJU)XdqYCmM`0Hdc%(F`xst?)5ul07MI_cO=k#vk;_P6WhaCO%bmLT{C5 zl*bH9&~2gp;a97F=*$_(hr)9azJe*|o#ZuxT1&GG+}z!C!U@Z701n+?6WYHP$D?3f zd}Yp=bBGyc*snx_)m{#soZ_|gQ_Wt{A(7S1p`~_%ofBRn+_0-ns#+jgIop=KXV$p* zObH=eI8-h*-8TYXOdL8#=*2eSWBLKqkXf(8UwZ3CVsQNTS&4lNY6WvBs<1(cC3mE$ z4CA_MaoFH{(igW^wV5T20?A&uO|ZR)$3apcFfD7J{E*HObpJ+%I+GE?dyRNKTKR83F6tR+}?p1VJ0JDYlnV6-k|F zh52L3f%~7uUW;lbR%alsOgdOOtaQmWA_C@UzZr?fRfRca&Z(Fo{N1l(2a=c~?;itt-+g#ps5Oj?+#L`wg*z%s*|6;z1)ajaNBj4|(b|m9yZ@(RvJQxrU2K7z zfPr>_+aO5jUx~Yhn@LCmob5wZbKrPcSZDU$a2Ci~M%<&*m*L9_8#%7H`;$~KU2Nq4;Vi1z7Zg0PQyGTUYk zQZ^}X0Nj;sree34d0})&6XS(?C$N8*0>J5x{GaB)iAN~|^Zd7L^3CXstSltE`nB5K zYbr-wKk$oChtc`BjS}2kr7P1%8&+N_dn30|*5y2MTbkoovA*COZp|bu`%|QG+na(n zEE2tWzvu#>=npv6wklf;$LUv#-7Z92KQlg1g4CDQZ70hG&zEKMd~Tng3h&$&d>E6c zr&!gCE|6R@k|7b{05ufG8Ryjc)q>hQomhL~O{@X^Dlt+Gey zjh(k#&Kp=RF>6dd%RQWy>(#Rr>TiKs5G+@z@a=Jxp{BR( z)v$5eGnZ4jO~gU)2<049KCRbGZSK8y_p9aRXYxn~6(9w#%0^mzpkv!F{0ZRB$~&6DdKC8}!@Ze; z>`Z+5!L`dA_0_O|z!5FR>GeLfxm}UowKabvT#TrewM0k;mYK;Yado;uji)umyuz38 z12)&udd{x_SSI!2*0LgC(vjceywPB>aD|8}Q(J+^*GwkH{x;yG4L&k7P;tm{I-L-r3TyYS!nqv-OBnB{){3%}s513F# zY!kzp1lv6Wm@pz;&`+InMq*M{zKSk97H2M>yUzs#hTuY3{7I+C{VPN<=3miGE2#&e zoYx_TEMlF@u7Svp(w{|UEBHI*?mmCNLe{Bk753-RntkovnY;QK?)Q=|D|m=w1%;NN zP4-o}pDyi-fsL|C|Xy@mZ8G*}v)| z&16=Y^LzK9?iioJ`z40`>yOOcxA+M8&E;R8e`kA|XZ-oW{$Gw*jycoVh@4Q7!CBC0 zj@=#UIS}+pM}lr+0Wg9}=WiZWhqzEbkrMe&;~}%rw{)V1F&F0%qx5UWPQyxqW4=tD zXT?oC>`Bm>}S415E$1GNswT)kG@YNNYY4wg|W#YvKP+qr;Ll$0Vr@I zmZ1>7e~j=gF+5WN5i+^i-QwG~J<@7RuIPj&3JX@|RY5G$Bg~<(zO0~ZoYNQ7Y{wG+ z)OrD^P<0#KbiI(GM@*@{^Dfhf*ggJ@nB6Vi(NlnkBlG>&VqLtNze^NNyZG12L=8sj4 z;a-@6a|IQLGXH&WLM!)~szx(?9{{Y6>_+rpzN&=l+149+2gAOg=qy?JfC0kfO_Q*e zt<2rRN><|3@|#r@GpdGDZ@Xb?w_CElkYd<(6%m`g ztSX=qRy>bhMU9K$6UZswi|-1{?9*@Gr#NOQ%X5eV*}XYp=Fp(1aqaQ|GtB)Q=Z%mN z{n=(v^u<~|8Sy%xa^6utGSlP~Z^lK`z3~z4amfO4Q9;e-Lci&b0^{jVFn=yy66V|# z9b#VbbZe-{qMf3c1QsDAZn~P6>jduymMfN2^&J@!l`p}_UYA;LJy?rCI_RSB!o|^%mybR z>wt)E!J=u@S8Z6v2kZ*}rOeYB15M!&f}_R}yv`wm@M5;g?xg8f>8&_hx~(wbZcb|; zEDH1p_>i65HY($$SIv7hm%-z&4uUhABy1WdmwDu>sM>-D8tByxU+OD-wGd!|`Q2QW zmCl9#a>?0w8ssry9DdqbCVm^x;f034|9pfZ)h1_g_;Wm8%z?%mCz09-x6S#+uS)Bc zadTQ_gVJo)y*Pi*-GUEjGHtHc5$;$wxlmbZZ&pOv4;$qOy`vp3I^T-?vE)_v3@555 z6S7xjqvRsQh*q%SA?KMiX1z2xlO7CXcTdlXWnC}GC7DKG)n?0WH<&Y^xm~4C9HGkVkK^7;sAbjmGo^G8V6=!H?o~k zfBxipNZW^pP{)Eh#n-VP{3QpJGf>YHv^DFX#>@D{;icsjf0U_Y^I`%w^|wkfVI zF<(&m$5YY0MyWvKC7MoGNvtmrD*Oe(MK|Sqe-sCkDd{IQFYk}}P})bf2&w2CYk!28 zhC|x!eaK4{9S0dCPk*%;*0;mDLZFBbOPC=;Zq7c@lz?eRSDw7P3LB=4cPWbjS4XZ+ z#tX6T)&YfZp;Jb(ys}hpTHJyjGp4*H^g&uu0!V#oq6%|(LtXXZvwPJg@o9FrrdWm0 zU$_!I$tUs-L~y7Khu%uwD|nVgRs;kkR;+8+QN#j&Z}$b!@f47~pR7=#gtW zxAuXqontO?y{Ov>8oQY7QsD}aCS%gb73W02)EwG7-cC0F&&FbBiP7Lpp1}{2K%vsk zcHa&bO&`+TdOGU}yp~4xE5eCdN8@sx%pOt4(Wy+^gl!s*XK2(MMgoTGLHc7lX%)SP zOPE-xNf(X0BRc@yJ`+($a0;ye5nyG{-tn?;WdF%f==PF0V*oyVw+qEp^41_;=CyZr zyY7geE}lUSg^`38r#>Bv@1#OfDWAaT(`u1Ft@9V8UlvncT*k>WUk4p|iV#v}WL+WP zW-+SKSwA7m>LxA4q(c?=o*OA|bNI$kFUrD#I~HqHk$P05gqd%t1@-=5>1Njsnh!Xkvs zK$$z$7v-+NV@A?T@44GQVQ@@M;H<blQTAmvWD}5Q~m(5k{(a7;Hu3G)(=RvY;u>;?|x6%x-pon5FH#bEoP< z>YH8S9P!H@c8&;XBW7YE(+Phb_GNZJR17?_HE>Rl&|l5MeI)a^Q|<4@I}_ZiFxZ=7T9vLR){}O86I}$J+BL|>>Yt0 zS~TE`Ed3xH_|@;sxbiE0HfD_L=2bwshG$)+GG9X*6eda`Hr*|kGK|H0s$W+SIkHPB z_@%Z}kS`GnKrcbz@|ar-z4 z%;?PaWyX`rvW{kOD{f^tgoI>Y8Cc-|%V*YYZO8g4=``n$4sxU6y@yKH1 zoQ7Z`6xsYjW=kuIFierxh|DE@AEl=?jwA2(tyMMBOWP-2136ddQ1&)*wK}L1>KlSa z0idgQ840X{`;=TGaT2HCB&#S!HD-#(CL_Xu8amCoYKU+(1Xj^x-sy4X_T7FWn^<7}O7$-sm^ zD3r@?f#nh(mcpQ;=X#^M9zet`G6Cmz=x1Tmxou*B)p$;I6*9GKndQieV4m4pzxXPB zl4>u6*Gl=Vr^-2Y*V*^p)eOnh{ax-cAkS1^$hCg1a;vion9`y8Ma2{i#?4yWo*&Fy z3vnt4hA}GeyVN14KJtp+CM$fn@ zY_#GNF^6RLx4)^yV!4j)mAX79Lf~>+r2H1q0g=^1*1Utd!e{~gq7o}73F$o97`CCQ zvGvjOK#Z^qwozUvS1VRi$$B)&i!tIt%|&=}Hu!gP87WP~l{izRt(jg~Dr-J#VwEfs zB~ih4-t0!8^FBM~XjAkeU(dc;tT%PH5O8pUR%Gua3%L*9lfuOwpR3J^py(22NgY8i%*k^nDYZ=MowWtr1dnbS72nO3+;zESL= zx(TModhuFBDtPN$?kLb;CX`MHGudJTxy!n2{?;lgmy8Kv%p z=4)qL=?Wj--XAG@Tj6X+LzJZwuFPfoQQBCtiz@RnwsBVhzgeV$&y_hX*`9R0wL?O=;SJQ+Z$6FQiLgLJ<(IR34l+Q&KTm zQB$Dy<9&LC%BvEJ{_1iPKv>&-0|Rr%ajB?Glru=Q4zw)Kb}FCw^SF$ZHq2?&c6xXS z+RxX(_d-(=Ee7!Ahlj78P$?=`A)@o}pajSX?7UkMa(j5lpH2EKX6(O@yKbn{OMh|Q zr2N93JBg~0!-ra^`Z1rw;y?u4jfXrC!uRz@B}}L69%cJbJ`p)E98|Z@ALKbJ&?U^9 z46%E7ZIuO6X#m}*ByhiG;VU6Y5B?dC3$B{;7vWvc%a@2;lB(pHrz+9$N=P<@N02uD zU)VO^C-QI8uDy+f_=!S&_My8@)=nUV$8~d7u_qDH7V1A@t)e&rB1)e;9jWsFNF{rv z*4CM1E|o=Mhws1C9l2vFPFlB6XNk3Uqw>w3)+vd^QUmeF+jHI=Byw{B1pjJkF4qZ1 zMOY3KXY=z`IreM;QZ@=a*M(P}6^axniY}s5heJ-yTt$UjP?S#Xm(5=wvc%u`LC>h%F^h6&eD3<>{SbU&{Jvu`~t*mTNO6gG@!;XhQwWd*TF zfJ@H+F3KE$uzp8pg1>WqtYi|SaT50)eZ9;`g@y+UCxg&Y0ZZ_R3YVGnh=5YU3+%f~ z$)f$>qGHZ2q?F^iv59ETUp8N7KYcoK9#?r*>q|E7Fqw}Zs;8Rwy23h25;fYD>S!3) zIdd;vyO*!MG8-Ro)$FPTS71{Paj-EJTB*V;E(M2GQH%=Z2v%e&ykEv5j$fmqz82fn zTNg&vDJbN8Q>WFdI4pVfLRWbNcd~5EIK%&V_B{n;d3tSjwN%ei#yM*iT*a65OvQoRc=WCE9ss9`OD5uilgD@ z)z?`6d}85Ium@0cY+21lQESPrad$qixo#_g>!0YxW2t`Y<@0hdC~&Gz5VRWqc2b8M ze6}3zT$v_SJ^YnAtgJL?MZnM|n8%dUvo$r#y+_4=+Y%*nE6P!FpiEj?+vJY4TnN|M zVi3n*jHkqBZ48Hc=DWd6yZCAKy8-61gac1lH< zRZHCEAk0ZA$ENFDOVoE8-}C07a`+=g+uoh>2;#PWml z!Mx9<^eOUXF2LZ8tY&Tirx(xJAtN*-&ywsTRz$xcEPB&$p(W;W`IYT=O|p!nU8|O4 z+4`U=sMA@`S9?X9k7Mc1No4KRNh#m!LG>0ayWl6>`d=Cf?55-R z?ZlL}=Y=K=xdC7V?*xzmY(|!?TNYi^ zCFbmsm*d{o_PguGv}Q%LR=q3U3E`f?L|RXBzEx(KAvn#RCl>M&DQ`J1Ijta%wvfVv z_KQ7Ta6cljT@+X3s0F7d`)u1l5K!%LA@E9pt~-`imBVooU&~}tOuIclRBAEd@``8o z2t>u$I_C&rT}zUOMUpeMZs4myN@*H*&8|}}Dl1HM!?C}v)ii5TO;Hg7FwO1fj5WN} z8}s*)z!PK$9PkYMDg0F7G^H%-Nm@-V{U1D%kZJTM>f5=n?uJv~?&G>Mn9CEDvfP)e zfY#7abUMemyf7iH(kK;^lf37OH-nR}m1Qs^x~ZBHuhDA1eN#{bqo3vI)sLZtSv+Y2;N8n(mJZS#Ws&U&4yocJXAW+@vN(Ip99jOJKpC>U0mkDQEMxpD>5 zWXR`gQR-{L9uVgEdSUE!*LVA>QyM^CEtN}bXF8NXgDg!cK`IZ^^!lI~d_Cs&P|Tt> zy%;c3Rtv?jlCY_I9OC7AVA9*26xszm za?~8fwgen>d5kuUZ4Jy6$mHiGReqY$$h5U!1V+h1{i&yFUP&Y1j4$U$UGhC!+cqz}R ztUeu<**mAtckP(GAW1pn>p(cNJ$Q2-z$e%YGLVO{a)1Px6D>UQ;W_vt%A$&=RkNo2 zq3Q$MTE?9XKjhD2NIqiN@3Cj zN*5^SzuR;}H8Nnydt49Cg>vqiY%uC)w$3oSf2z zGn_oeVT974iZs}`bb5r*2+yN=&EVOexzUuQs=~TJouIA!yt*vI= z*323Cx==L^m#gp&38fVC>_rVCzC=-jFwm2;=*tznr}bo>Nmze4URBFHC5Up8_q>ga zgZC}!+m5#yCd?J7QylMVpUj;!8*T4KZH?=eyZ6& zC4}yTORZmtP)ZZslyzSj&1IQtXjfnx#Gfm)E(bHsVZ1B^Zhl9eDY|4vnMOW6q51Mu zcV%t%^f7lgFHNyfccz2}!j|YD@}^rRM8N%$KeTK{Q#D-!(Iq3EnsPnS*-H zLvgTTs8RyjHO@~R8FT+EYM+`EYe%AiHl*viQAEj>Xz>%oMhS)J(`hkn!_ z9!RvpO5`u#enM}FDo(Gny-ckUPWEpnV=~`^LBN{w#}hQJgsRoFLv^mR=HWUV#~zMe>AKkW1$J0Fo?e?Q zi+J55=U+rS&(bzt;^eD;8nboI!4?S^k)hGp#pRA}f`wAzu;^NtZ627=Sb9e=T zeXx`K^>B+EQze)s0VaRe}yXQ%;&Mte0Q6!{=*wMq!yT$bol*G;9*{agEup7m2zwHW)-*>;8Jh(4ro&t`)O z=djF=5XHi6i(*phU(Rw8)2b|!otV{z*{*mqL=~;04rY|QHN#YiB<6ofvjp9my31rO z|B1eBq16}wS?R_0XV9iPFLBD9QSW&m_tjw;yeJA~h<=Za$B~F3+x?OYibFRdu<{-F z{$5^DVke@)K?n_!0V-gF5at}8J3-EMrQlzRRlfA2H z(PuA(X)G=|JNb6~pb&gle^I2x=r#^x0?fx6GbTvM(}E0nPs#hS$ONxsapy95Oydbk z@_XS}G-6e1$v=;YTPiNF zn`~u!Xq9Tb@4%rP%Z-HepL6L9-V09Cz9=Jaa(7nyp({(C)qbDJlg; zh*o)`RC|47dy@Tq&dn@A%6;R?dh)IQeAEQT+3^=v=LfTF?dRMBMIXqmJkDesjofBM z&pGoZY#xfZM--^}(6h|coqk7J1jP-4ugj~0pDG^A@JTD*TR<`C) zR5Vulx%_z4A0nH27}cdVNsJQV@3@dL=53)tl)PsT!=?o{c-@!*%NS&-Xkqh9ru!7T z16J2cjRT`yyAiwFnqqUlrH`-j1?r3&sgTmuyleKdt!s-$bs9Pc6$ZIeOaY50qp%@T5ZfvN z&$8su^Wm+=HR^<7OBo8 zmi{+Fj%Fphh_kpj$Xe3<6JR@wHyo}d6^BX(O|lS>XK$gBW%2k*DCj;}RH`TED=O28iDAhvZYmONBInw6kN1B*A86 z$B3J^al*E(YQFS&+~n#CB|c2o$X|pi+W-1zB+Zp@#zr^^dK`OK5T;ny3jnD9VAP$$ zwFt=DAWOs1?j$nPQEwPX3{J&;vA zsJb(BKD^Gvz!r-{+f_!qI+Pb7K=Z3~Gpi=fcAv)E2TD`N<9 zT&K3G2xbR0ffBCHs8dTW0TU-#p( zm$Kbg5sx~MU7Bd(L3b&_l{*H4wjJL{5sI92FA?z&8l-U`Q<_z@GshRvSpmg}zTIsf zhFU#pv4`BsJ*!_yT*8UD3j(%5?akVA)|Nqxd+1(<9CCUx^E|S}PO5t@mxeulV!!SC zYlLQEnHS%(q_?BY5nT+Xk^X|0H%Y;i4!`iO_n zF*F8+pFOQIH*a_;3^P6$DEu&v(Ng4S$%@sBov-PG^LEd4JVhrbu^a^O$6WG zw~&vE$|9tyy!hchJK#O&l6OqhR1&Ii?cvhS*F7WcTVgZ0Luw9!Kj;2)N(2Szis<6QoqH@FfRSaeOk4_pC^~)3;L-oUvO^L60$RE+i;#n)D!1yz z?7j|9Bjv`*Sq$^A&5OIz&vCoJaQi(cp0%X?O-RwM*}rac=pAiv&YqA9Zk0W(GQY7* zU)=`Tv~8m;j@R&Pn6Fqv4ZdQyqukG?FrMo$dJSa6^{T%L?#ZKB!*R~f+7bsVo6rV=&i_uK$;-`0P zR&_5^GaP)mJ#W$J!pcViNCWk&!IQ^o!4JuNiBk4d*XEI(X)eOT zta`ySDu*3+KK4*i-i@nJCND0(+?;lR(!4;9^Y^SurUQ6ENK?$FMO!oGTx!&GnWgiS zfiG_o>)Y_ly@sOaM9h=VodyMXHSA)dw(N7~+HZkAduWp)BeU)-x@vgItJiA{^2eDy zs)`P_k+T7)Wuf5gwf(D(fpMa%;;TwrJc~_J053q$zv!l`tDHQe8#!lmFuTW!>NR0S zSKVlAQHwNqI6}sNH?{nvR*-sKm+_72cTwk-soS%y zs}t1EpH`$En@KE(SFSAMqMq(h%;0-xE%wKaU7@Tz!~0T6Gx8p{Xyo=7CR>U=XzVQ! zMdz04)~1;bFEWLC^~GD}5Y`>olB)YV;L<*N&Rw}OJnWqC!Z5ayPBZoDc<_4r>d5KiT?|kO4Lfj4$JkGKea2UDO>VL?E)PMxpZP-eH+9-dZek){DWAqwwak; z8CRRUx>7jDPB+oih~i?+ktMqv$>u0W30li9As1Rz*o01K=_Ve0>fqQ=_q!61T;9iY z*Ff4qzAZUQ-Zo2Bi>`L*El{_v=(kX|msK5yR_fxs1-l7v-7BIe>75Y_OV={XMY-m4 zuJ1^>w$r_$29)32$h0UVf`Zv$YFud^8w`$~U1;V}I~|4`oPB5q>Y_h}xUOT12-TKl zGfaAYfrA@y{OVt z^r<4WHxR8pgx3^3gX)Hdj&A}oyUt==xa2071geu3t~ph_NKmZX_H`>SB=`u2YZkx- zI(;3MZB*`1{|WD?5+U=1E|P>8y#z?j>VEc4LIPn+b7cqb(|0<^xq9KMH1){vWe9X> zu6oDMpObIs4rH04neo=_?|R=j0&D}`7CKMptUK$AI+nC)k}yLk>H>uFo?QKhTkY&6 z*E~)nsU}go$*x4NO}mCx`Tkh3x_4^Mepx)KmPG@4sY#sg%2o+g1AEEiszczK4mS}7 zQ-Q`b2;C0k%a@NGaUByV=DSF3GYiG@%oc^OQcgUE!oVq-|lK`ews-|CYUE3TU{&uzPp`OrEvS(%FoV5k~ z{0Ae4^{&)4eanC_jdT#ryivH*puimYhbU`RI0f0g1q>YlqoF? zotkc*m$^M2nPl;}ghDjlc8aej{Y9jFVZ>b`AROj@q%T;C_*EW3bI%tCd32$i>bMQ*~wNM zMu%7h?uNOp+_kiJ*H$Ieq4~fYw|K|)YCkp?I&eSoP`e-C4JRw~rn;Ej)eIIc{NN&K zPP&JJSJE`gg6gsk&>Mk=2OW6Ly0zO!Ew#{}VdIrtQZtXBP#kVW98)p3otIF*`$eiXnrw|HnVvWG^x5K8jq^$7u0>2!`qJ#>pmzIpafcR9JZcD)$BGhrv*zQ(EV#Uquy^Rg+<`imqA*=OXX-QM-^ zyD{oYB`u{GFy9iv0F+C_sfNj@ib!08T3oRtB`93uxWk$t?P;yyDUi=n+z6XtQvXyb z&(Eh-nS>-sr;vEui+>hEGrXGNqTm0QwkgOQdzBd?9~UTtxbEIYoRT}i zXP`cy1Q-x(bu^F|`WJzm`U@UE{~NE@#o~XFzF^Y#RdNPWk=_&zIOJr@-}7PuX!N+w z94kt!aqK(AG@rSFW9NW+C@K{iXYE6Ny4$j!LoADlf!MoQCQ`$M*;0?Msvq7E)A+r6 z^eNw?0iIol{+wUgB<%NGh+op`}> zQD{i$0sB;@D;sY+7ZPAn5#tI+tn&S8UADJ_)qXc^1GT4PX{VRvA|&%& zG8R9q*`I3G`Fch2edc*4oXFO5juYutIcpZ>T>#pf!TT)_DO>n+dpj#wGQ$|W@$-*7~t821`IHUQoP*qoJp|)}ps+yK_7ELJ=EJ1ZodOQ_1 zmG3s%{cvVn3Ev~HQk3B(Rdl@}QK6P)Z1?<~nHMOqLy=Ep{zTf4)Rn)t4{p^+<^^{> zBV%V1bW4hc&CPVsvsPVvEUE;n(Xq>|YcEW-WKnL&EOZLbtK*c6L1d|AuO=W=aPC+N z^`wUHrGldFzi@6zEO;zVuq3x>=Qif7HuPV*p$*^Z)AvJ;?5w3x(&wxOU1xXBqEy%) z_#)oXwvYO4dq)=)VaZzZSg6vdopbM^Wy6Z1DtT85FIgS~Yg z?xFQ$DES`q%JzHYzIadjx=;p=ii+y~A{wQ;jW$I5yUJISt=y45jyc{3uiFpnqnZNV zWc$XuOZ+f9Dy0M*Zo@fwe`c@uurKFmpvcI*jB>GB`B0**RqCgdA<(sc*LrcA1q8h? z!5eaIE7D21(N0xUqWYnEKSzht!{^8WmEA5o=Q8u}E=>yikZU-b9=*dGV1_PWAPu;K ztEMn-mZ>X9SngfWPTfltcdwh>S4yO-$nzL<3TKG9T$dMDPRpl~lXDE2psrOb^`!26x%P8g$#hj%(aV)U#SY1KC}?Er z2EiP$G`6Yzh9Wr-a$N^RAdTv7kZl~NfJNWkk4AE&n^wXG+A1&9^akt!A<4keJ^flK zAAC&0JXTbmW)IoAHWG)fu2CX0&32uGLVgVHY|7Fyfla-)b!3xA;8AkKvEFH(DOb+0 ztA^?8H?*m)@c8cli0&@HQyzoU{WT{V^&`NEyo2i+^|64q>864oXRsb!FNa*#TZ8@Y zw)Z?qSmv!b6>>fL*XT9B2MZr52X(X)8QcGRVYz2#8}&ls2Pg%6ODZWW*weDE=u`K? z4#6Y~Uk`_#**~^2&Xp~(oxQbEr$9e4#(s|4lnPPKEsLKU4{^q&JM)|HMG|$m*?X6UPBCU zRh^qNc~TbiCS zGn6Yoi)U`7M)5fv<+1|vYav0 zT}~zxnQb|Ml=fouLxUbs&%%t7|D(rry=+T+izB&>^J(i{+!oaj7ilDk{?YM4Wz*g6 zjLNO-Nop=r#cq4bN@<0Z!6{&*2YbQo>>%P1h&m(-*)!0bcLeBc|D1Uc`yf<>LWL`m ztBzGF#|-J`lY&S`4V^>A-00Q`%^nekghxP_4I>BG*gsfuXWnnAggU;ZuPK-&mADh3p~>8~*L$wvR#$wTdlE&iY%9<&$n_ z<`{OXfNs=FW^RC2zxe`r)O}~FQchB53ERpP1`D6!Ua>kwUQr#48F1oFqY2)7D^HTR zA5T+rfoRRTQ{;e53$l( z4gDr?(3J+&2g@O%O(v0`?RaVqZzxa*xI~i8IwptQ81S4wqRNAbBbfb7?FkULrHY9& z_7T>GB)^{(on5G)T$b@X>!wl9^k`-X);?>NfI#U=V)Q>%#sZbMP>FhuK_|xa~hly z>PXCPBFt4BP?65`WOzuY&~E$C7rKJwWM7tWI?b%Qi{=O~{ilZ2{@s!}6V9o*Yn~kD z!#P0p9-xncd;FC}gDS@!G6A5eGB5zqW;G4pjK*?8fK=U07=IaDgG4yKiZIRn1rAas6A0 z1JPHE^U3yrd{=S})D_wmV)pk)f!RoxTi|6vO{Tm_ zAYuD}ADznOAleSLG@DL52W34o`WyDQNLI4g<`C*If*jA$L?NeRLXc<}v#d~IFn=zL z))n<6SL=ntA*SDoZl47%P|pVv*oexLm0XMRnZJ%V7j-Yt>V%$*7EkzFWFDIP%_S=y zJ)<+lvy#%gNhB_>b#f_QrAmj}rmAF-(h=~mcDdyXQGDQ#VE>@G8nWCB_ISY8+ExmL zb1GM@V2N9#_*QN^!$57i9@P0&lzbNG=45KEj_y6iL!~JgyOodc8coiYx0hAb=K1l1 zF-)tv^{UjZa*H$kOi~zH^g80lRI|A_3?$ffo^xKgM9or~byDV7H@|?v)98(BUDhqN z_&8fy_!Ukm7k1V-%wDV*{qEHrfhBO1zD8vTWGtYbEVOGgH7~pzJ?|0s;p`lS;+)A= zRi4>QYMU2TMU#ut8Q{K)gS%!8Da|j@z*iDV#cvE<>`*XZhNYG_OI0u9N-clKYn?kD zg5T%v+Y%COYS@C+s|pvt#|HJ3)mt1h8S=dCAY!b{yCzq=ETRt1{31k%=KxlpW1fh;pt;{dQ%>&~ow?Xk%0 z1$73AE#G0^EszQtQfWn};5)}lWX5EajIQ~5pJ7#mL*?lVZDU*Q3-!T55-+!#WW(1C zW_Qa)|>l-{;dvc2;F2moo@2G)ZtpyQ}Pd=1iI^B zO6R$u#9g<1f!Ls^CCrlWW#ReI>as63e6|@0LKaK8&JWN11>qzhjihpq;PgH$LP%Gh z4H8k~bUoLEAp8>VuM$s}0-kw^I3rj$Sx73k4UL+-iW|!WQYY`i4t0i+z+>D>lG|mcKPXKX4QQ2UM5W&ELM_{c2|v(Bw^7`LlI-`o79k9)(#W}S z;4oH7)D<20s~_#Jr4Pr!E%_~DBORpE_kDCAtctuo2nqgc<>cRDB7@=YUYCGo7w88f z_@n*o1uo`?zJM-%#$BO;jk5Kt-$6->WS02}#Nz!eXQwhkag{MB@aGZX$+NALj!elZWer{jWUO_I4F9%sVYI7bckh6s?;m^ELO8l z=4D`O93UzF7Hr=GIg1ZD=_EmrrGjWw=2(D(J#ZKc`f?(RpYXhj&Gzj+7g%;ghS=PU z@!<)r1+&<7sC;DyAQyEzzoaUOpc~(tyQHcKeg#xdx}?=>%PvgO+IYy#mp=5rdl?fQ zr+I;@T11cNAF7U8dDf-|slEp_0H1TXlNnS=A#Otlr)suUh2Vs&G|CX1I(xINEFPYs ztpGblxyW1xo%Go`8S4A6M4?L7n8_C(;PO4wnOUjVMTXAq>lRU?MKb}s4coNfFhH&> zi0Y}PsvWp9$L+DCslKBqepl6AsYiK4IC(<@Y$`;MUn)|?iY)6Zh)Wx_4d1D`kGtl( z9-I2gw)3R}$&){&_(W||kS<^)OWYU=nhKo7B4+RV3Z%B~_1n@T$ZCnaY+Sl;h=j}D zPInb?V`eZ>ZLvWiZ2LxT>p7-Hs`HvxT#n+2G%G z=%v%iU?uu;F;XTA!?ATZ7S$<~lSm@eofHZ&Rj(_~1Wp>wilZG_XNRqt4XcP#$Wx;e z7IvG{9Q&L5{j1XyU^ecn76OS#!3lwRDm7^8OipS)9I%nS&%BRvc0d?s_IorF>u~lhJ#{}+h=1;9 zR+=B0*=)xAw7QukhE-J4otFUOty^0cYWz+?*l(XBH5a*sw^g$Tkmy6L(R?OHubxF{ z1Y!vvH!g@+H2U&_#XYc@R0TYoThZW5!L5=78Bw0Z@`&Tdusib7}}TK1$LT96vRY? z%*}!F5wjW~l`ufx{B(D+> zp5W6(ZbMl-WQe)&qm8j@4@g33LZCG%=H%4>!vGQGgFJ=bx6|%6pSp@t_{)8K@|kl& zn$E5IZFL8vMu~_XGNlM%R-pxCL!E+2RZcWnVG%i&!q+5!{C^Nvxv|Id7L>2CCztxJ z!%nxjT5WEP#yo8~RxsdsKKZ6Q=<#?C?cUai_Tm};+_!;f0A!n5<(F4Xi3;p4kbDly zaYmd1$h*Z|8{9W1mJ&A%GUVkztqu`T@+9TqVJUrfXN|aOLmot!Dy5X$il{wqismiU zD$@l{EUh_vbHP52ez^}ULpCP6 zC2YP!gz~$v8KZ*}w*SIbL+&8qH!5x?Xt`g-V|MU~E;GXTPg!a6+%7riLfvonHqWe( zGkb+%25fBJYP5i`-h|gda$gZtJBLzXk)U=4ibl|W+T2TgM`P2@B4IfC>eckkTnp6k z-_?IL=jQLxRpO%?!+Q$o(~6gwrR&6u*i(o|Y-MiNdZ?L=$wjLU{||k?ero-#Pn)QR ztxPEk;8a)DHY*5VTUQZpH@+8gtEV1fTZmDx*=N*~lLcvh21}9QrU5_&sotr+kp@!h zra>t;HAo+X|7K!!REIX_#2X}bgP@J3mdl(kNT#;kk(bE;IpPzN#A2#Zp(Yrtn{pRSXsg2I# zKMNw4!$@`Vu!#deWy(`rRCPaTA@p}zjmqisEVUO|6Y9u>2ke1v&|LA|W>hbqdLnd)yI4&&e3f_JcfN6i^nDV&0m;5iz2UsG-z``uOf6ZCEW zKXLHK-0yykjjjv7azSC`|Ex%^fDNMS)fv^}?nyRl&hMH@Gc!NpmjWf%M%_5_hKM&nT`&=sT#RD~8ciaM5ty)5G3-m}Scwrw^om;b|9i*Mq0luod&qVPxIf8JuPBVMg!L6C2 z_Xf$y``C!6P2m;5H+MLWsS-THR(X*OQpIzm4k51#E(nf#3YM{#CUL|Uy^~n484PoQ z>p<^V(WbjH?K;vVU1xU4{S!M7d{=D-X+Vmmi`}g6Ed2aICwJXTE9zQz1RbgKbExTp zcm1L=DK`}i;QtSh777bq`wHGmPrV_Mt5gDoo-x7o8Rxs|7?@1+L|@kwghGCbP*p8Y zv1fNT6$hf7kK?Z$ceL}3bN%hP9YPjlrZ}WI`VzL2n9F^+QEuFHY5IjS;BPcno}XfN z)55zFdzHrK9}@~&tyrX|mCx1$ox1kOu=b#aRh}L6AQV3nZAz77VI|hI1P3q5ieRtE zOxwdE%==bibZTr(-^`BR(!Dj&aN4uI7W6{^>?)RLadoLH$@K=I?XU81f8v|A+j zLh!`*6dQ(;bG1nO#LoCpOq$YjJckFW(cqYgkAbKpLW*aaHF8x*W;CXl`OL|`FcCd^ zeZl@!u~+bjG#{&TB;dul&={icC<*ttucq&D*_Em)bxu(Z|6TU3Gr=4=n&o6K7FpEX zTOV$^Ck@Y`A+4>Fo0LejrP>0xt2AwAkS&tXgPbHSmsRdz0s^#Lho!zyq>JOC(T?sQ zB(A!biim-8DMtTF17Ou4Kn}UIdS>rnQjxqVL>-9_msYS(u%xDKJT#29=lgl?eck35)Zm5ZJy09i1f8t%^m4!cC!a< z>R#D_=v_sCkhbEs2##zcB^~k$D}!7P-d>Wv2^u(0Ik$N5;_y-eB(@0kM{(N(qmhJA z$7hHsrLHI^09=>llmMjdMF%*Xnk;+B%R}3F9K5Qo?uT7ypE4;?e&M>7W2WMWqgK@9 z;JU8FRDQbzwpfa@%g{J20SYQ{Rh69;@+sPo)paBfA~%K0I_Hm7GZRn2siz6NJMY$= z>kOQ%T2jV>8vs0!zKr`{`tnQp;azTRM1hMx-f|1`i~??p$b$PCeroJDn*Oe9J1i8Kdx|o_ zKsYyt=KPz1L&vIcI~lArL6De9a`jS)W*iM{T-EU7y_vPplWKit{XjHN{ixVB9=$ab z*L0;DG*>Cm0(Y%?ajF4l_*=P>QhmtPs(hRE98&3`8u}=E@3zMfiC@=N;_G!(M=4#s z<|}g9GYau=7_iM&?R~v98A?}pR)l7+2~W|L7nIz0iSALAI`ewUFiy>!VO)~=F6A7P zqlb(M>NH6vSmg|WI=e8RxA4@tccsVBn=0J?#6GqX*>~WJKOt(83%F1ETKK?<$wkSzBXEh?Kca=uH!<85yL5|5F)y?`Ui%M|wUU=*$My^-huV6_UI?I&IC#$j z0kl6r&lphWa5`+xah1+qCN_y`sVoD%c;0=&@37y5x>YJmMb+lJf<0L%RiqqeK6q;u z9l7a>P(Jj|QRNX3@v2#(tee*$j^vBgN`Iwk9*?l<)HJRvDN=~nwG&s16iX*#GcQ+* z5qOd1N#ZPryfhG=vx&Q| z(%Q2aE9>5Sg($`YGL*wA+?vT3Rl`Tf(_}-p;zuY2Ilj5Ak!bNa9Ervtz_<=?jX1U5 zUTMYA$URCkP)CWVpapDmgf3L8pzFX<Y6;~k*cwWPyba5^E z;jX1lUp9Sr3Zmyu?v_->>tzme2qA7H?U@9<7CLc8f#&E6>0F82Hd}SJj`|B@@pV-+ z1xlC8d7PY5mg~%uSF{zS0ydIZO#nRpBeFkUdq@or=>ocyvKu?Sb$bhCT#)Y6ows+2 zJhrK?=gVo;{)p4Pi?LmR5A=*Ke#;>{4MJ}GgUOY=#XzOl7@JTO9`&_QOzT4|g z_8R}(x!`n&0&t)yC%wg2%_}Twbu2bkW!`!Y{=#CJ!)zT|p5#>074m)Wyi$0v6 zM1gSu#NN;Gsoc^tm7TKWSZb zwa;CDRn524JyK|S`^~PShDX>ytIoJ!bP@^%BEHo2#0MCr&de0+fWek>tHjGVGg*G~ zth!sAO1sc~Nw>O$%XDqO*9SQ(yAC6d`CJM|aM@(F_(hlD*__%){3m87e5lc%>5jXW zJO27JG0SG^*AzEzAu*{>7GezY_dQrg@fYf79+vkOR5n63xvTS^sx}p_$gozMbojGX zrB(#qP;*4>C2+M>@#Rk$rRDO_-2iI{SEaiJ#6U|^xA4xY#%oR`_fkV8h2bZnW^L83 zJ(JVnm7(Eo;}=!#OF~wvJU0l96!(R2MKtx@Dvvt92KI1Fr0R?oSgOlz)i7zP8WoB6 z#porJj%b_9wWVT3ru}Zf5~o8IZ8=*u*snt=g24p_jr9~AM56(?ufW5D8^BZhr4Uh1 z`W`qrjUBde_=F??>;O6q2Mep2Z{S(Uhx=FyxpB~SIn9+t@QNW9IH-iR51X=1Wy^ac zkd+lnzT{<;TZx7X99iT`Ld_&&WoNAH!vzNgV)W$>Tg9PBcKa0aJh&dnTxhn3v1BF_ zsK!s`9^Rg7!2Qw38--vJ@RQSgb(i4R!@yi%M|1GBAI9q zkm}};(Rcg8OkfqCv-2%B$EVGoqP;oZS@2<*d)rpe_^oFZ^FM_u_YjB%iMqK9g@Yrj za&MK1HdQSq_J@+hk|e$7^W|91wY9@H&t{zTh6+*P*dD9i*+C9XI0`QOFHIb$BMfgH z$&Q3PJz=Pt`wS^a)oBzVE9EvM>Nm0q3JIl2UKFub4(a@d+cvnC(Rj{y@ooHFvO z<~CR3up4S#qUwNC-2hJWUKv@;U|>^C`$e5Lzeby?3w3_25=k55#C8ODrvMi*o^69<^n|SL-1Td`DUk9O*_Nru-t!Brmb%N&D0#NyDR_y(3U`yB#^t@42-ZEjx8 zIImPkUuc^CnsxbL>E}z^yl&k$D=hx1yO?fk7j|JB8R#No#eK^lF@z6QuPu3_Dr+{p z&e2{z{JZF~nML$$G%gz?PB^kn{rI9!+WH}e^LA8CKactt!K263gOQWF4pHQeJf#av z>0~jfk~5`yKX6J+mf}Ui2Kpj?EUHr-T&xu-iw-cS)`{ir%NxQGA;jG&iJ(|e| zms6Z_GB1PequO0V7E7IxYP3BisvrB+U%8OFpKecsfxdeTRlbI%G)9eY89dQnYE;!8`4 zFseDFq6a^5rTnSfyre(xlGs7#tv88Ra8#}1sXm)~FE+dBdRdf0?}q3$>ngCmtDf*= z=MsDW05_#6_pO{yc58ySooUHA?}h86I1h5wAqf0awmE5uvj6Mhz$kBl3g7#>fX&nN zRRn2kUZn|3Q6;akbr1G!j+)QWb6t4@lCJ?XrMf`3n}e4C8>1F`oHK8;LKhi2P)zCv zw+7LrbOU&>beVo%b!Lb3JL%$83u&}z>U%{`I)k;sztqpd^3lDCR7&0TfhIImkjDfX zj`RYuJR8pPQwT%L+>n;Qg;W=!?VW;oDorJptXl5Z<_{q-Tl&vXzSv6?ccG1f#Bj;p zhIm!SF4ewIyD7+9%!l;zSX8c<&GZgsDLRxzG4!qtv{*Ya_MCh99~00);wt8z0n?$C z3j4iWLg<<$(3zWIC2SVT4c_4-KRPQ!K#+ac!cx0B>*6W$={1{_N<}i1rkygs6uD-0 zBn4wEobt4u9LAh#p+f_LKRfJ`Jk=2R?-MAhT~p09W0SNGj~rpNwF z0}u6^XKx>k{qB@EN%GBEjXVVJn*(#HkDKqX#)G^fP3Q~ANemLyvN)45r(q@F-VJXS zTk+5qX6qqmd*^<9@5TIuZ7n?wR!SE4)K}N~kU&I;s#kgvu9`iL8!vOrVNsYhk#8Pr zAPq;&E}dKwvL;{5lDVXe43hyuLYDo>H73}yhz8xwanD1oXN62=-6`&{bx!TsD`^u0rpxT`iupV9Bh_yl=13td%OqSHLM;&cQsqz6JVH zG&hiNWMg(FRL)mjc+Np`-?}Pg&Gn<2*{B?KB3)4S>;kyhIc-40=f&ErZf%k`owns} z5&)KmyhdaJ=Y0b2GsVoK#dQ4v?}L1|FnN#qRYVr8gm%@oK$38jUu>v*!O1R)`0gJ! zo4xr(==fuIfI0ftHqTs&RK9J}>(I^7KZ7qN4n2DtpbJ{AH!YQ;RRL}8v@>RAk7N0I z9mT9awRhq>g$?jNWuov|$ObrwH6f`^OE8+|H$pgs2X$2!%kW&kJ{KT~S4xg4urc_U z4~=)5gx;K=Hh=I3fVqbpFrx_Dk)HrcY{mm8_O3P?VBV*@c82T;P2?O`!?vP39~+U& z*as2mXggC485GNSm%{(c1U$D@h3Z=R>_Zjs;~h%iF*!KCA;`T^KX1JFYE^`(R4NgTO%?nU zpDTCFqKfzx%~$EuitrOOHeXN{n*}M1TRV<&w8qMJ_3CW%7qn9zFIN)y7(k378Fo&Jt8l!<9J zEBCx4yt*fRvo6Gn&y%t~Gh{on)~P&K$q&7k5SgSkWRFCmnKw~l%`$$JYtE_($FaHf zJ*(7h&SeJ}2Ce5Gj8kaa(zVn*m;w8o3(3u?*A3t*GRVCijhEVM=yNGdVHKYF#Uh9! z71g{*hLUf^tuM`AR+>`5bfkv z^?i47FYmyTq*8t^H_)oO>#e1fxqiuc+qwew;{=K@kYeywS7r34>sh?oRp#oq`f=j6 zJ9INzif~cRlpI4Zl%TF#h&``Kx~wfk$-n06pq zbw_n%32q-TgXCS$#Wr1tN6~Y$-KE-A_glwz1$WW{&a!4SKrAUyEn-d>pCxG4{k^D( zdd2nV;enPmP*rFvt*ThPl3HGB=}J^ zwkbR%x1n>ctt;b_{kqNK9K@Rhm}J;4nb+UG+$NOaQ@OS_w;=^3gaei3QS`5jn@fwN zzPTQl>?ifkEW*BnVkgduHZqT+Ze!ZRAb;MnTbZulW4ROHq$1s>*r63Os~XmZeZp^e zXUNu-RC#mz4gF5ec2{h@D}_7v6NQj!3$K0!b`y*}L8em_#Vy|Fx#RIgnO}vzSSS zjyBP&(!|cltJ8ES|>P)Qgc^Td+d!c{hd~-aCMi8yCfEl@%w70W_8=m{{JQH zZIT;FvNXZHo&vG!76EyH;36b4tE<9iDyvdDVmhT(6eD3IEggVyV1P4Ft3F0AdNJFX zYt&8pB%|NYd#VO{%&!W}TEsXl!_-%kw+C&p98dQgBb&s`@j@QWqnl$n}ZFLm&% zF;(?IeL3W&rhvXA3!k{z16t=PBlu?atw;HKzZyqj%lwbrJ~t-hl#FB!3G?0h*T%0~ zbf;R6GEwQk!Jj3kcJp6=`g)xy54=_L-Co9&zY^X@PUUr*9nWv8l2FCmQPKNb#II@j zo(^M!5BL3vx@U^Btq>G&Zv)M@6&NqSP`-ZZY`+k0#H6WHZBc)^{t0S}t_mvPCV5Tl zukyX(;OV>iMp2j9AxGU^&E);2$URbPzmHqdrU&mLZkYoLM+9l*+++lG#T~O|HA`{e z1hI;#1vnUqDZnr5wm9ZN?2HKu=-1no^S_HPCkf>rV^Ng~+@w>R`5Nh|kh`Xl%;%H+Q|Gm$z6UI}_Xagg@)d$rCa( z>W|?k#!H!>aOYxJkBGl}i*a3DzMsA?<(3HCXu8zQ5h#`p4s~}Ia%32SU}Q|t?d?Xw zP87qIu7j-|EjM zrPk(UcXy(U+}-&ZA5kD)*m8Qb9;ALK3r{@UB;_FXPCk8XJRolqrC9ol>s$rj)JQOD z;+7i}G8<{k`^752{ChtrHw1i!ZtEtaFkUdyZBQs+eJ+b<8WUv2(hGoy za8GpPZjJoO=$Xb5VJaS;h=c}ivz6`rM6?C}acgnirbKbrNYZ9%21c7Cw(stT1m^w`x4p;`%5DsZB*m#y#r{Cb^}*B2ba>MoA5t2HIb zH3`oB^1A-oBq~EQYHjKh)zb77ZWv2E6N#W2!F;w?nuNV<0@Rw>yWpBQIFihY5p!smY9Dn(5eInc*%eV=W5FE6Y+kW4BqF=ylAVa zvh%e1GxKIhZJ_7CPRe4*Qka7`r@yXJo%K`i~xb-|%+_ZGzzg zcz{`u21&viIo88mV`Q*|!ba)-qX;h#8|zOqJ;b$COZa+A)8-&5>H4j|)*xLUImU-2 z{IohJC-qum(40QPhOy!5?ghBjTWF#=b(d-cV;u#Kdc5z0U6n9nQ%;}X#^=Dy(?Q~s zQQ7IRzZPBV#w(($@=D1eV`SRwi8>L&#+@&FD_FS|o`6*DIt$}+8`$b8BwE=q&e6nc zy~;oJya-Pc0y)?x_N*haEjDb?z)SaJ;OTIJ+i1lgRBVxYljTiAB49{Q@^euXh*SGT zZHhL59vd{V_1m-0Uhk$G1F~)Q#%;$`!lANr2#k{zs5qu+_?LVdrqe6xv7Tx9`bUjH zr4Gwb1Ix=KS=Ixj0(Q2Vg`J%C8(wJ-0KqU~dFzIhw;Pm`T4@1VXp1b7-FNJ=g7FzAupl7PfF*}u2x`!eRQcGg?!5@TbHL9^XR3t1GzW`O_J+~2 zi!kUr16ceZXS%0bl$6o2Q|=aNdm`<~GcyQ1t{#PL2aO!Ms;F>z)SB{>Q{DPaBPN`; zXd{2mZ(iA_D0TRcLQ@N&L5e(8GPdiIiyb{q66(%B1-P30;zEt!v#jWtm!&l>jZ?YG z>~CcE43fm{)yTt?4g~WY9e25Tj0r8SCl&1+76GM5bSAt^xP!jbC_jOo4qw^r`b3iJ zx65uF`)5Ph%S!dv=NV29!{l9li_Ptd7XxghBi=>4#j;x%iK)7aIrQ|p|lkMDhocfZ3P6Mt*8Jmt${Igbu2vrC6Wso^-r;*-8a<75XH$m^XI#Ro!%^B?VetHxM}w-&*<#0Q*y;jtCD=75h>Y-2Buu~efy0%W z;~~o(Tes}~Mh!;W%4u;tL(~x0F&O$e#Db)65^5<_p&#%H{1Xt?qba{Fj zPcm2LP+_MT1VzS~SF<0?nn2L);HAk#=nZnasbMSi(@Bbkbb+Sm%c6QM94%PcHra#v z%I5tpIW_ghx{MD+Qxp%NU6*1JbT>R-JT3})iwGUKjxEH>JFE0?e@@Fd>;1y?-r^QG zC&99&bG+LaDB9G9gaj*!HAN<0$(cgbS4+Z*MJ?>Z`N^ReRj1p-Ct(>q^X-ORzYO23 z1Jhe$d6zLeBVY#8odd3}XH@QRd~dUNLCT34*^ePeeX7ZnENb=j@>FCTLKFM1HItp* z7aO8I6DDo*4tQTs{kVD0Z2o$&a2)qbbf9@K@AS#oOk~=I@xp1%&VzKA#LCsF))lAx z?%04Ug}6bo<_<1$zQkYcXJ$h=Eal3lhe6KT7e&9Tq?)-zN6Q(DBXRD9%CZ? zQ-@0W=t0|{gfw$@=4t1UuINPB^J6sDAJ=cHsq7o6?{g306G5YoPuat>ABrU20gnS1g7Ju4$5Kx}#XU^eK#Ph=QoT_PnX>&bzOyyCR>WtjZFGC*+SPhX$aj1#&W(hIgK(|Ty6Ajc>Fr2 zEM%#t;uxA7f9iW%(!vSVM?s}A>+2GA!xvDrM3ecjE$m=v$Qo}c9vLbE$N51+<(1Zz zvfbe{COD8Y+WGPx?XsMllmIGs!cSUdg@C0d^KEp$06{>$zr4@Wo`yFVGAD_PJZ5Tr+_{>bjr{}&-KMi1}dSa2J1iJdwE8Y6O(%!NRH?b+C z6`g>nx^E<&00&3+H0er3cchNlikw17Xdo0X?|WaJ?01>nPEbfTdIC&gebs2Nh}qV& zW9kpDxwr{hAz?{oW>bh|v;Fllz}`=B1`LsB@Vz| zBo?f_0u)$mmfJ+MeZSa+atR#EFKbz~}autF;^#JP?sr&Ycu zl7c2j5RDY7c($LJ5JUg{JiafiO;YDSCi7Op5sYXhODi&~TZr|I0P^8}_(P-36cVjj zXuXN}tvdJ#iHI1{v`ppl)EK6jTs=+?xQB3;BTK1Q6>((hb)ODonjIt@hJ)HK+cNAg zc7od&FJF@jPVxReU*+f#7(Ycey$2y_xmkzBG^-j`-X~pd@A1k;->*xnz9b9Sl)gQ3 zM3P*O*(y=?^B)zL}E*Eyrlrdz76ztqd;t^3*nTMPbXX`0Zy>*MIt zAGRiRD^0g8)VU30ci)%kRDeBwjp#e8pN3amw;;5~YiTkGPRG5Vg3*1BjE&=vt?!OY z)sbj}Y8BOvnYYa^ssdeR*jz3cr657mh(w|vOdh{A?4J(hKiR#U6R+Ce;RhwtAR(|^=X#e zcN0R*8%2gH$Uaeox#08kPV6zz0?46xl-xLN#E}||$30yQ>Z!zyQQwI@He>_mL6N18 zdnbtXt$USj84@yxSR*)sOo%&UIKv*?8C=+`0 zz@i=N_B#&K(a=+{^|oG|@%OjkLSq3#D!Ov7)<$L{HdY-%4VR-iDe#~YH;+>9m;(aq zQF9ZHz=LkS&HIaGro+?gRT|4`U|$C_hMA*pOq)strdoLt$iTQQ?-C_xsaaPmmLJ-@ zJOnLeT7JBnh5mH1&}fk&%FYjB?8 zOREuSnh2cdp#08c0&}855cPyuikC#%(Ag!NJWt!Z&?t1$eA=vOqF^* z57vKSzHv$`cxJ)VQO*0DJo2qe5#WPtKNOWfest$^rDiJWjCJy`?Wb|lGT2{B3LrzC zCEm14n?Gp>)>@+3Qx2|g%yL*pQ95%n5rEPEX>rKdw9KB+AV10P& z4_)?WD+E&m-*W+VJxYEYQ>6oXl`Yipb!!Li;luB#iL4)+1K;^Au3`-U}+VJ8kKo@43Rget;Q z2EUh3)k|=NVw(!f;|?8*OCy4Q#<0=^uB*Xx4-J!2qhoA3ViaMQD8QDGw}t>&QImwC z52uVLJ#B}u*0>i4-J7DPBhJP+jtj`V3@y>P(RZCW^6W1hbq&$ z?N&&;UwV*;_ULyUx7Jg@?RTe|uQTNXmB%v`WsP6b(4E%0FVpMAqa7ROY`e>pe@)sP zc;cvmYK?~@I?bg6py`yZrcA7Dm=r!cjjh9k z`2~myuCT-tN>g&*+)QaBj5R;xjD&l!j0L0JrE|Dd(;>q0`v9q+&LVu^H+3?QTDg@t zv`!|gshT0yXMf9HeY&PBb0Lp&(hsWF`&3tD{H}p^h(c@N%c_58Px-EcwoVhn!H(IS zR91aXNc%~wg9cl6*7&h&+94CF@#ONJm0Toj!^iM-j1kANRu_AlKxUF>pYsfCn9-hhO#G6U8llq`tp8{-^(PWcet0a_)ZCDG0^EW4M3sM|I1 z7KU0V;C##&|4iU_I@SW=h`1bb59mqy457Zuvy($7kJ&@Q;U%V>{#g7XJg^%y2OW5_ z9Zm>=7JAjQF@+qCKZUH)W_R^-G^3CNu#m}A|1=s>cMAVsd!kA*l^g8E4AIWvpkDv? zLFM#+{*I>&;iB|KymfN%6E99vZ6`WIN6<*c3$0B@igXC$FfBtZZznN_kbiWd5A`>c zD6Vwm!lHNGhtE`;W>`nqi9V@yxc>OIy#-?FZc7A#(OODfX*Y#JoLG-yR~&~9WL>&# zbKgrNM00sLV8WE$CU=)j(}<1GVGlJzMD#B1$r3&v#qKQRc>)u?)q;&d+GBmwGp{Uk z{X(iGvhtpA>Y-H=gNyp^r6#M#z}ktQGy+@>y|ZBwU*6br7EZSxnx{GE5^=RGr7;Co zhBR$j%2JTS+#j92sZZ1)bU7fBX1xY#S9OoBK~2-rv%oP)hHY|(=nj8~MgD@g9HrJv zFINExg8G%uc$go)&nCa`CF1Ra9z{tF^Eu)fsW|E^3)u3D(Tj&>3^d^7j?ZsD-Eyie z%bem~X%-`_LPHpR#~TID_cvr+N4dlAhu6z!Wqm#ZZ=j>q)_W~bZF@?mqQb%0Lon!zcs3KaybQ@gTQAStau-3??e#NJU73zxV2 z7UHp=hy5X#SGFFKxCV*@6S=434r0aIf_cpsVYQr;@5Yy_W@(JYKP!ud;=d)hfE8(C z3IrmLMXXthEZ<14(Nvd(y92jYG&#>{blUaH@EmWK*H1jZE_P`srX}Vl5~uUScj$>0 zn6+ul@jC;icK!0-3ucj;WuaZAsd6N!<*79X>P^iRdOVDeIgD0kn;W|8+zT^L@(i_K z2MyHPFYIIbDjuS2f}b%p+v-cNk!WXGn8uG3xtVR{@&V-{;S-)*Lc`_v?(R5TrK$Oy zKcV}6Fv92VuD*#vC-_nJt3#~GlvYr3VA zMzzW%lmWojm^Qv1@+_$gt&ZX&t`MFf|7QgwKk%6Ba{OpS>sOkZ{A0~?r%|2#z{sH*PhiQ!fdVFjF5?f8pnrO-q|Pzg;f z?^y+-*{)xRvX{g7eXztsu^$;xjj$%*J)EXKp&@bRpq@oa{+hLslRk zM58`t8U1gw6>=+z|XP7|$!uC|OqM zT-B6zFV!X>#g zk&1}|)17Y)r@S!7-Q`*F02glv)UU*Ltgfn0vVp`2Q@zE8^57kHHX{AsCCBNL8Ivd9 zj7~v2szDD!W`5vdTH0wV+(_vKBCsGRjv5L0$dHHfheA)4oxM>UCj9L&Y-U#=B zDc)QojeU~54-tuy(BInriN7ZA4(lu@oyCYcH?^=fm#VYU|m0khF zsh{ePYj*Ib(^W3_3Z;^RM@I}fvSd?bD~rmK&4`trru#!V;VTOyd9`r?g%r=*e)3Gi%dG(iheTfC zHxlduLwIP|J7EWf+x<9g@8|3~OSU=qz#Z4VgchsM*SlagdM-}thpVwc z+payfyxXqvAQGiQ_0PWO@RveFkkrCxL9Ls&3cg?+m1ttuB4m~+Hz6gQflGFym&j)O zPCxmj)=W@QGfshm!Y8_TH{)-kvc{bT;^)(47p*1xDuqswrsI~Vwieu~C*Mco@6T)we+Ob07G@82?~ylhjh!JJg?*mQ+1MMYj`p1+y;IsuxK51? zSW2^O_ieIfdtN+%X2o?=~q(@{p_X;WHS`J3tKdt=mxa)zB94P-_7CWS&5aQ}J zSk!B0pyc$-&z&%n?kF*Z@0iMU%m3@q(h-p@W)Q}}Oz=mfP?Gt;ab7`>sqVwa5S|C0 zP1xl9XRZF2Gp^E$&Q8QEXNkjDA$28!@;PR;cpD&K+3T}aDxDZ`B$ zC$Ew-Atpccg*|6kxjWVeSQ)4DQaJ!x7hp8549l+5-pmTVgLUfSU0} zq+a@tHtpzfi_T=pYMzLTm4l4pcc+GuX!JK5oKX>WM+p&cw$ zm`J?pFCj0m&n`mSXD)9N}MCjL#Z+1!*36yQ^iF}QduYE!&*p=kfY1QFNALK~hPEp68A zP%Nh6)ze_k?%SniO@%!GpDP+)$RpvSE-|mLMOHo>r-k}1|GUQQ>1hCC`x;80D6&7F zhB*9tHkIo?-EaQP_d4mvY~&~-lOd|S^8N`cO9|aHv=F3L>vqLr<|W+3P7kYpj|8tI z1maclW^!jWjOL3!bJvqSjnSJi-nJFd^TZoP4hONjDdym6h4FZa z(bYs}Qk)r>g8Xqmg^4kR#AtLGGl2+AY$5aLm&=jZms_)5XEfj)b012=aB^MXF^X(h zWsy}VWr?5L(a_#xscgc1NXstCpRPe>aUSF1xL1!(VFBR}zgU`ax-r|77nqbnPM=JI z8e0YB>eszZmwx`kPx!&Y+88)ymn#*gRkW$TpL!4bLJl^@nS;q4`OpE@+0w~@a zfx3Sikpkq0^%z?Oj`4uA(nsK45D!eAyE`uNKi4n$fy3nb`Yz93uRruSVa!dRrnBtb z(y8iouJAR@N8jhUWGEq;6}kgjxUgFRIj<6!2P|Ukdl%)ken+|8`lP21u2n z8yQ{BoDA0YYPkY(HjZ6p*cwotN(EDMA{2Ph2@93Jvd!Cc|eo%~NRwU}Gb4Wa@(5s2%?%aiC&c%eedT?hYIA zNrP^x?ABX-uB)Fq1?OAGAdLnt8bUpsp;17Xv0Op8NAv9Mgv0 zvHM=*AK62N{d9Ju7NE89qW_G6$dQ!)*-C1*_84etFEW@=5TG}O{uZPRSSEMa+nOr~ z4zMyNScPf>g)=%cqI(qZWT*^v`P3wdmdJgfb4%F!0-F|ciFS}V3`Ym3X8y5+@yJe0 zWw|f55z)3o?9@MjE3C66J)Y&>#c^SJF3?h#e#Z4)ju;v-I8gPz1_}56(jqXLe*FEc zm+Sj3qR}+eF9^TNy;l&h$bhzTke%KkuAZX-9Cc|Y;+)h zOn9#BCc~&OW&nN1lY%4Tb-a2^wtWBCMhHP5}-IGs>EljlwrMAsToeoV7 z^{vx*2zM7y;S$x8mB34_;?eu7oAZ4yt8Alc5gtPlo|>QVo6E7@S*|a0OG^@ z%2l3nZAqlfWh{JOqg}|q})@B8FHvZl`&ICXvybCbmKhe$P%E$ zV+o-r{dCrGOg<*qRI3#@CdP{*yFFBbkX_-&z6Z9im)Q71j>72zdsi~-*juRibIK&A z3@nooyw8&FbDP$GR2ECccTOMPPK;ZuQc%HNvz796;r}V39l`(|cl>E-NUG@^R(y7> zmYmM0Y}OivsH~XKb3ImaN%yO~f(uEvfs17OPGebdZRB}BAKuNe;Q$RjOyQ=Aman?k zGV?{tesdrWK@Q?YE+}bETX!T3DqPh7uG>cD%qZ*qceI{eDQT6XXuOW1Kvef=5@yi zJ1qRUq`e@I;w8`?xJg#SQ}!6UBlG3BA!a?CV~pVF?pbzs0E^7qFO6gw=!!rWH4_le zo?Z0lW1wLO0`QUOLcwZ6X$3qnh=+b`DRTLx^qC|3u)Zuqo9DOryFHwv`=u=o6E=jN z7B9h5$SctK$I8|#+o3PIO+$bQYrM1>L3SgNwL69u&uOqnp21W^GjTA1$(Qqy{#z(x z#_{XYIzUpE%hDuEs!ULp`I&NTnM3K(otjGTC}f;U-%wqZ6s*_19$`GIG%k9Alo;9X z?{KfEgo#O4UH11O3?aN_0uj@s!L?t=;UdT&D#vkEu4+seIpV$Kb|Wd{mtBtRO+|_& z`{n50<~nuq1G+kiWpukzb*>WA`@R>YG(%fi$yrkVisf7!>gb1>VtJ@>nsPuquKD`0 z_e z6R!3OpWLZC5n}{(nR7QUZ&{~*AcHI$oqK+Fh!U|iM0rtZLI2HU z0s0{KML|~&gzN|X=Q+=fy{BzURWS3YJdU&Ooy1U((Y!$7jD2&!lXw76DMDTE&hwi} zogS6Z4AzVCr#C6u6fS#SWgY*sChF1_X2ct@R7}=XuN5(63*%6sbPW8raNv2Sl3P zwK~>DOMJ6SDkdL>-Ei0@L;^fpIGAV&xEi{k7W4fDEXruV(j93BPW$B<+I(17+&H96 zAwR6^@ga)Qt{K9e$M9bw$wwrbRE`XVEPph9W6@n==~oSM1iStDylGCYiCyF)vrHrJ zFSd_bV;>?bSb{Ao9{`=u0&Lc~yxnWbXjuz1QouPemrTU(0QpGjQbNaAC$v#YFqBra zO=dL6?FEor0-oMqxDCso;oVzkPvCn~!prkF4c=ZrD3_)xqPRY zHyaArCw55e`dk2cucI~|uki5e^#=~8E;f{Bn2}P2)$HYI!nVV-f#MmMVT??@Tol+U8aWu~de}lbCEi zgF@`c%oC_h%9H7orUolvZ&I_dkHr@GDbgek`Re+w-|1<$y@efV0!^%dp$Y7ZXimk) ziLteXuUq@u7jGxyN=a(8rpKU?l$Q!8dDUdm@pAYRY*v00gd)DR4SM%PJAG&T2;MXA*?D}d!_0>0*O zv%8_pFS00kp+~V=>#Ngz)VYuQI1H3b)^t^$lWF4g#Cm0fUn!8 za$d0gEoM5dvfZvz*GK^6$<3+T#CzP&^g2kSD`QP4>ubFXvf>)CE0S0;p}>}!cOODU zJ~TC$yQ^AI8`nkanFs2br1*Cru9qA_2ew`P2Q4f4Q}uI>0?{sF(gqA`qCQGaHfxRf z_3ln1(3%}QwYnL^+;MppttQ%c%t)1rV~{kS$Y8M) z!dz?)uPAk?8rLgi`7Bl$Lp4)WAX5dD~WWnf7&-P#bKBpXlLP z^Gh=d5#CtoBLqCYjgPlVlQ|Ti@I-ro_Hrm~4G5cJTsem7p~>sI|E!UvVmVv64^;B| z|KU5tu?*dW0I{@sTnvGFjbK>tR%5*>N--R;yl8gT(sqd*S*eYp9k4;={y|}$Zxa~d zGUF0&ZfUQa2(H|~yU3!bX5^^F=H7Y?s5r2tnEE6Hu3~{sWpi&!+)ZI(`YuNz%y{%v zK8SA@z>4sb>R}_3w@Hu^EgT%WI0n1)^&0kPhqOF)JVk1?Zxi>!A4+N(0JytC3l#z| z74&_N)$&K(=QlwXgJA&`kD6*+-fP64PF6=qHgbIy^%$9}C@4%MA+_0IH(2U&12YM~ zl+e%NZ)PzfLRq2{X$8SVN(3uE76nJ`D_3GZKT`xf8XpuJN!?lvdkf+8v2aU~N>wU82`N!!-ZNV$6 zCg{><=!z5d>4?a1!`GdRk7ABXxD>JDA~*f!7%Vdm}=wpr%hgw;shFjy(s9m#Gjyr#2(LI7+g8w48+ zG(;eO*R^+vhg(vQXBmHgxH0sZg86*3JQ63_OPXn%=%N$_pBykGe}ZP}Mg700iPF&i z@t0$!OnbLAm+ARyjTcCI*kEY(@!T5kC)ypjlXwU<@!{Mg8rh+NmHV^0CPJ(%9a*kr zL%a+~NjIfd#hzTHNpTklu&E&O-J&wk8Xh}U=A4MUce>G-_k zkUDjp&h>z?<>k~+O205S>rjz|25>9YIR94PGwn5KXU+Gw*rO^gQwU}fPZ(m?d-l(G&~UhYVxh-ve!@>Xp2bg#$QIeW z^S9>iiP?qj#4xQ3)uz@g!B3=+t7xxr!Q(2X0k4Vvenn}8(Y5xxMb%bfI#c)X_ArMMl3L2mOIo#wGJ*AKW%Kh?GR{P3ji3B9%XygNtsR;K%kt1lRW$^bbIH6PdG)r} zrnAeoZEAvWx$XF5yZI#SLy0EJqP~r)T!r$m;!-=&)2qD{VY*kPZdRb(BK~$NuXccz zPsq32F}2LN>?h)|o+NfT*J^s*qNJxrpBe10|6lkLPp0(#?V!5m3gBjv)n8v5Sk--?V&=uEe9ev zlH8ltF6w^xP=>eh*iGIzhP&W;;KiZFkTbo=RMUi?>GN?4mn`&?)ZtlRcK;bpP~yTvDo^0Yry!&dJ3=4 zB8>k!p3qLmN%9c>b$Ayem?HAp_Vphn_{<|wR^9~evNHJAkDAZufh?_^wAC-A!wbc+ zrO#RV&VC%K8(TyTZ4SfXFl$%btY2iNdWU{K|R1>&?PEuDmY^PWIF`oR{~wAgwaJ zqW|?OsLo!L-)7Q+$yf-?^b;e=0}7L&EL?uADi$pITzC56J&9QWcaZx~_sss<>1aOH z?e_w9Wi7_(EeO5Jt&)Tn<~uY8XizQIJrecG*8DOzU|BwI)i& zplUsc1M(tV;Hm3P({SNhY%aOoge7vk&vUwnDe0H9L60%j%$0*vlG3`j{TV?crpg+u z7URubJqiGxKC|#PEFB#`-qu(iA4j=3>g+Ye9n8el{(M+#Xd+V(+}MqJ52!?eqpylY z!3?>UK)dEgbtNaZ1qEE8Dn51u3z=1xDJBgLl@h_UwWuEsbvO(wM|ot@>{~tvUy=QY zUIvM7WR4JeUs9n9wZ2m=dJ95|5!Cdu77^?QAuA=G260y2bJDa5_Yr#Nb#bL1$hNyh zDQLw-NZ{q5)^;~A=|KN<#LyUV2iXHmh}}k#v)dy(!{%V{u<9hkt&^ z`nA8*RQ&~1FXGvU8imgfod5^tML9kx=-~niD`!fOTCCZ`>Ghf?jqQ8Uh@_tPe4bDCOb!rIsMW?OpmY@SMlNEO4@}i?pzq$FKF-&J$qtQW%-7Mqy zH9VlPxd6RyGgiEgMn^Z=%WhPIxL$DYnK0y08i?Kw6JiSncZav9jJyaPD7=oou`E9J;I{i-Jj+0GVy83U9q~*H9#ZYwf%z>GuG93)a1ZZy zv)w74NnM%CD17Kq8j;ozntR`~Ag@0z82ZIK2@i?7Go}c%YbtP7n07wt21!cah`lu| zp)^KY9(j?KqN6FrT;?s(K}A7?ut zYuVA@M1!wy^M?rC;_AKQnqs0c&@hBOY)joXvoE%)>hk7E+h(Pf-pX4XiCt(bKqqjb zI}RaCc6!o`sobDa2P3_XE8-VFh!nRFD42d9kMlw)T|lo z5g!v?fIJrQ^z=yyFnN<@%NiXcu@g0N48@sHP%DTW_R? zBt#~}E8as7-f}=+Q2jnjhc)k|S#xK39T_rYNI}d${^-`dO5oyd8Hf&@p`@e@njEBj2O{BH!rkuJ zJHARHLZ+29GkvT_@iE+Pp`IbQWA8`(F&yo8@9>M{fYg=d%GQg*XlbHPwb#&z%dMz( zKxO_k>%;}kmM_PL6Y@Frs@!vf6vzqB?=TxActb(DRscru9PE61zF7@#XU)k2ePmrF zLQFbhD{g?2PURiyE5bcWVeH(5YsrI(kVmmV9M|pedyK^hVEboN)sMlD85@~A=ok9c61We=+DmnwmH|aQ5}=5666LNYVX|y1m$aZKFuCZYIkkj zosrg&w+Q!q|3V=G3~uxVahqPZh5GC8&faI5B6L8oIpbEuR}p z`yp$oMhC+?0^{sRtwo7T4eHV+&$hFx2^@eqV|kz`{>grn+P>;&LAl|_hEm-(32v01 zi$q_PSq|iVm45aZjR=|VbYcT-q^%cdo~U08b+@RU>Ffnzu#4}CCR$?|bs8Mj*K0M$ zF6}>lnxOU~|EH6*q0r|-8~8wt`|$4VMelrkaYLjPL)a@Txp7Np1qHmCt0Ls+MQDe& znK0lT>^ZduP*8{Kkwif8PpQU91yO`Z7(W2V!R~t=-}}wK9(%~uv_|eY2ELx=%X|5B z_!P-H%Wt{%F^zTkKsD~LIxJ2%I7sdFLM3Nr8(?8_mu!ixl~%z6ia$8piN<7|vmHil z`s!D2lZZ|M@mQ>n*#%)|x#61ZmBdS)pJss`r*tt7H}WFolqm3lw@M>WOH$VoSY-UG zL?^mLFNODXlZUY>Fb-qyqr0$D@|%dD|FD*kDUt>2h_==Xk3*Gs^0;1PmWLhQoV1ks ziJsH6o8lCVRI!W)Nsr1u;Zs$&?#wCPGTrw#(6)2iU9J)k?&IcnB;Vn3F6I#v`r%v; zX6MzODmL+^cyH{+>D4$1h2Z=0x0U1E+1*69jHmNYLD}x9+H-)l9$ajf?!WrI?&oG} zr_?eAG4<^OS<#DciLc_30q|2H32)D}>3I|`wCK{`_9B{8gaTx*BdPfM@dU@0niFlW zYC2y=y0GTe87Kk9V@*i$_dL>OHqrH^n3jfpC}yJbhxB=t_eq#Np3gRbOIaFUq)Fpb z)79l7N$#C~Ed`E*i~CHRBHfkMe_pQKdM5Pjvf>hS%d zsfo~-7&8N)bTH^nQz&B5#$HIs$`%%{?3QMsK>?OhZP<}2P!R|OWRz2q`Bq{eHCrdf zUyj1N0d80Kttgz)^Gi#Y@xnV=yxLWk&&LKOd3j^&c10ii7|vpNMSVdHamsD;Fe?dC zkI8pIX9L?GUGT0cKu-Zf;Q(Ue3nG3h&M_#Ryo zN**DKt4-?`s!{=iQd_$M(uN!ApdZMJ>PL#POB{%wM#hN$_22*Ja=yIRm4MV!BiX!+ zC=rA^`_)1V^8BzD0I6{K;ojQ3$?WVR^>JMhfQ16L)sQApM0M&V?1Rk2T$PsjphrpN zZo1fS+J!}u-Zs5NH71QcrA)T0F4Hx$E=$5}L`N&Gozz=wV0S@#a~?qqG;?%N_A@Ql zTAyphC{KI3mKp@Uj^`!dkM7G_@bt}!Nz_3PXKz(*0tG0iT-cSx#1q#+YSPu4j1eu@ zI<(SB+qh8fg~ej%i1&E2ba2q%T@hGhF9fI<)NUG;UVvQAFt%zDU5+317?9Q8H%nJn zTdNAgu>_i=F=p1;(*@p?yBKw~RU|6jyJoOp!x1UH-O#G`3iTQGly7Cpp`^swe3&|) z3X=bAFh#G81*|?JHPF{XI9<@8NXggJa7i|S2>+)WD3L3#NL@tMf5p)C<%Uninj9>C z^_Fk)zR;a%DtF>W|8mRvO{DS-Yx)*Mwq4VMMHuxD6^2|qBsNS|DUtgmH%Wm?+jE1X zO=;$BDqk67liHq81Xec(WI=^!BJC;Exw1r`*2 zt|5Z2AoLs@vGi=hy#*1r>0R8)Wi(;O=GB8glQGZx4eu4CF%)KrDX9 ztuo1vWRi)8Dr<;)uyJ(t8x8ahrA$L*Vvu95U>m%gyMbbB76~BvJ#FMSX{a>$^O!E& z;@r0-^O2|kR}I8Y>fM=d?7SBFSnH{|s1mTgy}zkT!E80bqq@+Ez0hdwgCufFFeQ;<@u$LVAC()xAa8;;#oN7+i zvj7H6XT$CVl7;ZONq}Y4IAC~F`=y_RMjcujW&S~p+Q?4$tpP@;|0pPnwvB#_qvEpx zDJg8R%BbxR$et*%8llBb2^7$-yeq~Dmg;ELiI{LA3oW9csOmdViiLzPenY~cYBa$?9T zFWu?LIjWi_5~bXBK{+%rn#$wlC_!<7FmL3oqR5LQpH**aZl>IU3j^xmlKx!Q)857G zft)$5YP;}JGTfd68phq4!EU=nBQX!$_oj@VYJU=o*>2WDtLYR&0V+N&v^c*$dit|w z&;#l{?Ct=h+<{N|@^Zs0+S1=dn-y48RLP@ue2%wl4cfg|=i&XYkjNWmUI&WSsUJot zm2W=H-(TwbAtkzMq^CyK_C~CJ{ehzFKV9`{1egi=V*$v78pYda^|y7*dl97`$$;{l zzpSjiiq5Ia%oC!EN=)rhh~tm?@@d0hQvA^)qxmU>UV;zH?{%P2v}p`^f%F5>mtB?XTP*U zfX%h|3oj<0_U||Djn*Ogj>af@u9U+)a@qy2JSH4<(>|h~On@$xf zr2W7>SXHc0&nwx6T$MRn@9|w8fo)oy)AcLzi$Dkg%K^ z95EcF=0R!@_Eswt2YDi~D{W;wZivg?Z0E!xANT;5?0}+(vMS-{HY*`0sKvj+4l&xw zN|zLs@a64_#I=)uOwsrzWJI!BSCA&6{PEi+#H9wP`!q^N#wI~Lbe&c+lA?jpIbK8~ zk;;uqiO+AIQ)9-6AzJ94}E&P^bMUs;!5XR}DV(R4qsGOtet z4Z$lV#5CxWkvjza|3ldS__J%oE^VsRbi$-)t75ZmviWpPip=fYqYZl11m7`d3U|nI z2r)d$Btou1R91s(=CJHMfRl-nEs`aIQ z47xL=SG{4cYw58ONB1@^%qs=eK`bTPb#_lf6YpjdXIp2!U&X1Fg;5Jg`G)gKhFQe1 zd%64Z-2dPL7p-k5B15}9G>AlnF>()B%aBmQmi>qE^mJwRw%vRw&E392t-N-}CU! z7&X5Xi;;l#?aOb~?}*LyCN4SL9T>L7`!PeAkYIP!8u)8*^L{I)NjuJdx7)OHs$W=l z{Pou3p+mcaWQk@;Op9%(@Nd?bAs!YA;qjlh{p=Ke={btS_~qp84>n~{o}X4I%9!e5 zJ}8+=eg~~Cn+***T84s@D`KJ;>OY@GDc|txQeOAwFsFvd&mNUhzK0#hkF-hagt&U} z%y2pYoZa~0q>2{0wd^p~kJb%UzX=ErG~RaQfP?FV_h>&L>O{&6(l9G8Y(i0Bh<*u@ z$#4zl18oH3lu6kk7oZ1wh+CkWJ>cYx=Z7HfuE_^ zlnT{A*p~Fw7HX1b&Z3lnY+aoRbQ{txBm&%~DFCe3zq1hoT1Bz@8*+TOWWI4ric)q| zo!C+2p(|dq{R5{}Du&$+*^P)HR;8B!IzZ~(KB_N0Ek2~*AZUqde(Ii6GH*X|T4g}O zizN?2%r=#w>QlZ*k*ir^6*7A`^QY-Z-kB~qXlR+TrB<~hk2H*xFa_$C*FW9bGIa@w zPpTV^M}AS_yY@=***g}{`vG8(+Wz2=jeLSnU>9YH`llDFP}}R$Vd$JqZ^{=|kKuG0 zvQNC5%Zhy#3iE7KoGjrkfx32T|76_Jki11Iw6`N)** z&UEt23uR=B6qHHE`ZiCy=zH-qf)V&S-@^td1UHeY6Nk6AEP#5A>}sYbcfHxDhk8t6 z4>y(8H}4iX&T=?~Z|67jENLJh*8%Q4NO+-;kc;`gnIIA%NiVIM&WMRlnE>p`nj^i2 z#W0CI^?6V+p3s~-FOvL;g$^ICX}Kp&IBcv}sf3-$$F`JP_hhmzX7BC5ByCWaX``N2 z^p)(pK`t2AF)sg_+J@@{#8fncd_o6R-Thsx=$QlDuDZKx%VNqxMDhRBL~U!1amC)ltZDe|-{P4j8H zIStlq8%(6CS;#ww#(_?d^^-~6uQqC@THAQ&iYC3L+vHI_|MOcD#mNp-3Yn{iZy06F zh6pzsjD(`vfBM~|_?$4boyO+NH zx6NT9AO=ANOP3N~Z1NKAIivJA)KN_SgvZxIi8#KPIS^uped_7fb(u|C;I=n-e*1TW>%SIez z+z#?1T10!s>tg$PI$Q2xZyAHO>@1002PfwZfi)<&LqTE61wEx2(KvirQ|shX58OLSCL$dcPUFb!h9 z$Zp7f_&}q_Z6^B~+%$D)$Yq*fQunth)MWbAf^!zV76*H3^$hIvvm`{;u z-Ix6YbaRn_tSjew+@pK(IKMo$rqAPf_WwK2a{(`&41cDXrYc9R^D)y< zGbqbst4|@lV*e=jjZ((is=|ly2J#J8WE29O!R$o;O1DJb=4S-U9}rZ$E>WGnUut#y zj0IHk*SUNBn*R1_H(tN%5vLlbU2-Vofp-44ppw^`rrt3){9g)wITF?J++~s5Rpz}L zU#|Fy@pqEu#6-WzGj&7_g-ZjpkOkXrRfF)${e;e zH{J)*mFgd)yK7!)^e&tMYWXS6Ukm@Ua;elAW4_PU6wS*!i-Di<)F&y?c5|irGKe+c z%SPoD{~~3ibtsTFCi0R;9`Ed6d>;f9MphE=Vl?{i<`PuHspesOPDx2-MdmCR6@MyJ zvX!hJ!qUvM+I*p$pTZKt!54Vl#^i$fv)^oX*y!!xDzusZG@$pBCw57wIxMjZT7%40r)-7491bIWX)v9msX)uR= zVpbuIX;<3ZW?a$YAA0*y^#hdV?=87>sOgm^-W@j3>b&)+zE4X)N5v_)Lp3F%gVrn6 ztrWNFa#3l(uGCAhUVm^mNg1k_C%u6-EUi$xFPc0$9m)UycBxTXVsfy(U6fS)+!HFv zM+8c?e%MQDtn}=cM&8(}q5im=A4?PVrwHaCFheTnuiHIoC^&(>^W4A|MWbe9Y__8e zkrrJKFn~I$QsxS9JE;nnjVAX`3B%|e2GB2;eYYDjkEg~^Q~F;S75>q*>G8{>qXNN% z^&6PVNGRufZrc>Rs-a+fS8l^?(~BPu3dyON%q9J8E>1X(PLpDW!t{CGEiQOdSU`6U znq_81B4N{EX4>~_PE~bf=7{d+UND39c??YUrWB-)_g2pi#+6FboY12WvjvOhkth=$ z6l7_Irv{C$5PSep_@6%$b2Ivj19g|fq>RdF>Z_H*fj3x{waDG?ROPoBS}^0o*0ey=w{u$TOFKh{q_@QRP# zN)DX>A{bJEYmq%hDBf;+1IYd1o+&c|;PCF3L2v_@TV11JWo^&6kKkqJ0QqXWN7J!V%3?7G{$ zOhL5ywZMT~1iSCmoE*dA&kRpg|EdAwhfNGMTJ6H9Bcal8?d{W@kPRpZzOTBph z@FpQ@M~KSU|ATtUHlnCE0O6kO7t}>~usAc-&okZ05c$;l;+*8xKhl8#Rb%m(E(3WQ3Ctn-63+$UrB^&ZFpI7RC9Ir0& zwjy;d#I@Bs5EjJd^37JPR>&e1*9D7VL4uIZbKR@|C>|z%h%3q$$lKO}%l}Sl!JBDG zKbXrd#7?_ATtGU7v-u zwfg@r`%5S^t)uWSl6#QfT+Iuy&s{h<+h61v`S^6=#@b)w_ZCYMX#9)n=5dpMsKt~m z`LIiP0*2SP9H#58#22#IpN1qFh+5cOlw_7*^jT!qP~u@Qy1W;+-gN|n&R`uH%{}~N z&E4v6y&2m;UICVYqpl93^2>NTx7|4PT;6ir^BmKX$eD0zZ%2Sdvk#XSgT6#}ukF1O zW*Am}@zgbDVd1tBN9)GA-~5bs#?)!sV34uu)Fk^kUrsTg&^-zxzhM#`NKiYKW%cdd5wrTa8=4?Re z{W?)Jd^~-+Q)ycDMx#g1>ym>nq{$$qxjo-Y2y=QS)Gl=Tjg?fdzwfFl`uVZrpta%u zuN6KZ&`1;|Ag8ip=^4M-zARL0KxKDwN%b7&Tys-A4kHX;`;-`#@M8} zTNFOhEN4|C!dHz%Z>*Rp=Ub1rt@LQQ3VZQWm|#m)3bB5XBtnOi@wwB$vb87CO>fp* z5p9&sqrGd5{(2dx{$_+P(E}vb2cfsa`;d1M{Ea29N4 zg?N0AVBdZc?LEg$u$)8VTNwAZboMH1xO)%W*K|*&*PTpBq6I0nyokskpW$?pl>7FV zb9Sv5-`begHrkotAw`S3dQx`%TEa>XK7?$gb ztReh5M{9L8hr=g+h8p@<8)1Wyd7P8|F_gwGQ5<8PKzNvWhHpd5s+MpfOjQ+;HNO<2 zJfi`$o&^t0c#-_^8cz2z)L=4iojcCk%7C*}H>$>8NC2bOOJVaB!^ z-}9DtQCYJCS}(8JZ=Sasfu?yO4yVa52ky?L$(y@2y78;0I1|+v9=e`c^AzgOnYjAB zD4aIp*S_`?s(OzX4GVxxL&Ke#vXN%ef!|=;fH}n)m)r@h)L-}yu`1e4pR7n?&A6Q? z{=}M4F!|t3?Db8}#bxl6TC*%BQ`6AW1!B5z%V zXiC}9AABh}d%~o!IBn`Q?}IlU?vMB=U^>U9Ge_dX5*ONy>H=?rw)5%JunmTEOfb+a zf~+4Nyks8u-^6IZQg5P{p7lzCb82ldOtJy4`R(2}LTRm`;Erhn0QYpmS$MIb`8oCa)i?74AGfwsJkN)&50Z49TuR% zx6}mZuV>T~z*7wCKkQ3=380bSZ>pMR*D*v5bTCG9NOJV!5&vp%7UfcS1gA_7U-oW- z`{|5&$UP<4>Y-Q+xArp`gYTDy;L$SGFC9)_1%M9+GD5)j`1W|nQ*^?)ETrf`^c>Y( z^y;~%BMnct6B;~qn$vY-@h@=rz`tT+rh#pPAm?-rQs=JM5?xHG7QDYf5U$?9C86bk zc_oeS6DjDh(q!ZeEJ|L1rb=_ZdEVFD>vj}1BnNy*aFK2qhowlid%&JiA5u{71lw+9 zh@IY1Tnp6e0-Uj1w~CR8pc95r_VU_%#)CruQfK}5p&c(Bq! z`$IeY8A4pxuA}AeeNUVxevp89w;V(z3#X^los_+e=3KDZ^A$+@*q0mC++){R)D8&k z&28$ae6Zc=956b$P7^qu33s#6;Gf`9VzK1rz7#8+AyDCuhdzYV0HFr4%3P?jfa`Xd zbns;&dk!56qBfZ@e1~(Jba!;tmcE<`vis`4e1wY9%3@+#>$+fBLmbqLh*Pj8r(000 z_qB|9y|99399e<>3Ok{n`hI!dV|9x0BWFjUkGuW%>nZ%2Y zghwhrd{u5BkE^QG@@santY1Y$;$*pqZ?H*Bt%#*sn6#BCDgF85pq zcyLp#h92Uj_<@&W;7X+tOzUf#4hXa#xtci(0jvsq-uW#Vzi6Gj50#*U0Jhbo@qOu4WP| znTh6zZOqF-63T`0K@UuR>hVVEyKH+jj58!(Y1mL`Z`XAen{YbY{kuTF7lI1?&2>DV z29!u#$v=(QU(2@3%j&sRb*BVl{<`!~1cKrq_H)^%B~!2-clLs0bLn4?Z7$KxKIlDO zPt~fvd^JV0-j%dP#16z?X?8OHyPNLsV)DoPUAx4W3YB`)jyhACDYXRIhTZ~@=@CqN zBdAgE7;+Ym-MiiPnzq~_1W00@!G+;?!_asvXtA>@>N;N&;E8{oIUG-Qmwqo1j@yH*JW>&7W#c z_i`;7J?+wV!KNvg6h_TSV5G$6Pp}G>9a7mEBjkqhxf1S(iqZuL&?SL>Y|op#~!C8(bW&hcc8IhP?J%|;=8^Kh%x{Z>JW<)`cDTNIpP1Z5b6%H< z1@b~)*r|}YgV8p^5%aCt+Dsi$G8vYB%S_|CaxEIZ+$Ozd@UEtmQp1Y|VT~FA4Jfz$ z0~6hcjSZcLl`mh~x8)^fBk1{S(MZl%F@+6|yJVeJe+68c%;R5oY1KK{)c}A7jL1c^ zfNPyJ^I2|#$7?T|qc9kbW~!6mp`;>(n3+x{CK-A=+syA@dY$|Rn21#=#?U-TQe=c| z0go{*GCanScTbGk)E&EamRAFhg%x=vsBt1nRsZ3B^K)3<&?iB?6vr_Ci~!Kp7dd+@Sgn4@NnZF7Xa=L5>_rt)Mpo?c5DRLa31KR7?cox*UUoJV|k5)r49;8?@AG z4xJv!H#gCa;0a?Q@+=GCo22(vteZJgVbihuppPyUBQ1Y6Gly?)?GVA)SuWw)qR1`w zRFT?*8)USo$UQ%+!El_5wbxs{->YS{&@7oz<_k68?oKv5hnwird!I|T2^hsuv_P@=7`@%1?bGG9dGjQ*4NV7 znpEODbNI zzYcLg{7C}QYw{El_36JH(*4~oiN`Y90mO}!5KRGV{#jDSC?XcmOS=dhc_wc=&NFuE zcr>Mj`Yq)X0qjfCd172DlMBg?2BR*tZ&`{z59wNFvni}I$<4}P-0S|Nkpakj+})k$ zrG)IKV>8}iU!MnSRtFUCBYdvwj)>~hmy?+`{>2YLA?6n7VnuPD@dGXC2~FJV63s+d z12|Gr*|y)sd_)aRhL@sCp=m{LZebYx?QAFa4?!~EU;@Q;IIs-Yv2$^eTF@sD6hBW3 zp&6502HfN=6t@^;vVJ4s{p(vdfp`nT;##`{JUQ7*t-nbQj6 z3&jSs*}^P<{Xia=$NDIb8t06A)l2;NCPE%HSp6f}TP|ul5TN8b8($r!;+ww?EY3n? z2S<&VavxoG9*6ROUWQ%0E07(lf1C>bNoBH~oQ#YqjAp438dJ=0Nck-mf`vqy-D*BI z8E)-07qG_D1S`E@&$c{2C82!)!g3*o94{x~JD8cm4IE*y&&X%B(my6O^uN`*`1#iJ zoIj<*t&;QG3}NEzvA9M~l1`IEO90ZjZ2R^->sln3Z-Nn*ljrrGq3ri7ch_=oO-})bZIGnHT`&vWCsw3bW)SbwMmh5LQQ1Vcyo{|v& z$erv-X9*W}mhC^{3A?^paR<=gN>?~1f?bwQz(`x-BM0IsNrSdFI;*y!fuT)`ZrA^F z96y$RF2%x(*tUn(7^<1fcy|IkfT4P&7mMda1OAEE%kNhz3ihhW9pPJ=P6&I2xbrwD zFkG`OL;XP0Ox!X!OPC#K7Ui0v>W1A!3E0GUm3I?P(ziK!zj#$w^Rc*x)H72FTJrM$ z<7wh3@mI{<^iRYk%LMA^C;lATD zo?6$am(Y8OI;u%gA0_LeJX7=RoI}=&^76F17-S_D4Q6Ic@^a|jh4}VBDX#y~U?1Pj zR>en=)%;}1t>I+;Qwioc4*}Tq$D8*iPiXY34|UVw{l@;tzDJ2ROWxP6(nzoW9FQVL ztS&<~m*Zzfi(Rw9#Y6X+11H7t(2E6bYL1#FtAuF1hsuj}_PKtntW@aibKj*!FNkVWqgc@z=*HIwS2_y~WJMu=D+%$8|`||TSMW5=!KN*U# zZTBFbpcAKre1_!W607m+n-V|S6OFrE680if$fTPDtyQFL_>0X~ysMkz4Wu_suF{WN z_k`e?$4mKiAdJFB*d?{o+)~O>nZz5Ymby*^4xBxCb#rvfO-4B16R zk zV>e*62Rv_QEZq+*a5#2J`c&WYFmv(g_F2|)sj5s263s#?P;50E9PGYQW6UW=QN!9* zH~QmxJ#|6f_c@Rl7dEJ0$tnJgL?4xp!d%xXgbcgpZ-;8^xnAO=^T_mp_Z!NiCsBSGpoQf5}V z1>C_OyG1;ortEHD%Wsk}&;Vc!>%%s6DEe_^NUju&dPd zA*$do{CjgD+9A7SzM*JUk{=K7h6)2poX~`+9I6SHyXC zVG)WRh|+RgQo{hNVX=KRF*_p`hhYYyq}`=`H=|Kt&wuVE0u!!dO*F^0jIL6rSjJ~k z9_y>vTG^_@UqMIf^vJv~(2}hI&B~(_mxsETaqnueu@JRiAjCr;%C%935T}R_NO24{ zAHa!8F*7*bTD?P_D|&|&YMpx{gaPj{yEDepj2oz=BlNOI>@7BM9B z(FN{8pcQASJk|w=!J-v*f^iVhUIGK^O4e)fblI!ErWgc<=HWHssQXi|tjXXcBpn;G zn)$|DL^da1sdE9@Z>|)*d1_v1)ODDY`LT5M0)c>HQ}Q|@+$2Q3jmw7JxB9~YpQ?AF zbXB(N^dWbRfbcb_L7vIlhha~jfMRT!n!jI+<20HHx3e)(4eON47bZRk&X;`-`Q*rA z0D99QQ`bv9j2WUn6n9ufs90`w{`myBD0A8W6u9r;v2}c>)AJi9g%(;0p;-AW*vDN` zd7$ojBEFiE{kVDjkN>+~^fh1MHOk)Y>FJZC z?9mFAwZ@pfDwSZvGDzzA3cGo=2G8hg;nO?@PgO7IOrgm0A;UBTukCTW`JNfg0tn&b zztzCj5GAxFI>YOcW~QeBMMg(q@9?Mxb#Q=MdM3wCYQ7&w&nOR!{1M)qAh=`l(9Fn1 zJf&8nPhXIKg+e6$4(na=I&mJJ6muFGysU`3F&d1K4;Pxb3|(Klfq~krdaTwv9bSyE z`Zl^8z8s@umBDknIRc}FfJO)E>DCMTU+NCL4fb?Js@ZaIL* z;9QhCl2M$nIH*TVgxQwEzSgYaNZZKzN?)V7Bh}|^NSF;Ao?(+6$FwRVi_aH%-}0yX zP}c`%YMy?%N6&!|J6r?|eUM+L*lVruu2%s=$`aixdmf{CAg4^A0k|T9fhp zo{}^a|K$^E)z@b)-InO!2{;x;n}Wq0UsC|{VkwKC&l(W83-IfNa-zo9`bt2q5+KliX(@?v60fPia8_{?AuaL^P4qP&E;a@nJPl3qgkH)HZp zQ%CjJyD_*drnpdvzci@ZIq!XMC?^|-PV6$L;Y|@C$U1c`AG9$EsLe--@0ogoExK9R z`EC3rPF7yNPa_+1TBBlm@pO7d#<-!m=;ja{!>;U%zkiauT;%X6e3UA6_0Rm+xZw`w z0Znoqf(2<{RXlP4!MCMhf)tBeo4x`3j0}y%X?UZ}e|p^;B4uKFr&8b^>VMuEFU1i( zD@d_@k>ubNT&x%(Ln`Eoo0mN~Z&0-4rrfWp!UxH$gXeJ=%`LRa8gXTKd-rAK`$i#_ zjk&kGhzWc_Ip=;F>;8LY4&khC6bwyoLX?Kq5|%k3YivqHL9Flc{#0Y&PL!1v?1hQW z@djUrOCa@8pX<*;7|7{Hr%_kWY4JJLIAXuQEN-fHQ>e~*8d=B~s_@S>zjSv9ag@(% z)qoSRIC<=p!WbZ7bs_;X4Oi)y6D8PebL*w{eD?dzf2}1QOqw)e70jg*z^*9?{5qT} zx^dGEZYZ&}8>9wnlYfRE>XWSL(+d$0vLbF(&4u?pHMOs?H9 znzi33S{LekQ}vvDa4H_muRwsH&cloe!L_c-w^7Nl&Z=5(^@CWHV5kwgcA@FfdKCgz zY)dfXLvAz-hCyaW=QdMDR|ab-#wPF#SqOAfep&%DQO?pHL*+vK?{{}I=wv@Yrrk*_ zzFL#_)99A)LPuozYcyOW{%wg$rh6GgVV+LvE9#LJ3nWi3ifvEhdSrVN#gxv<17SR+ zUX9V<%xmv&$`7ajhxiAx($r@&Fm>tVaqz3WUa^W?6Exvbo$$FD#4vqn;E zn@=5QeDFgwsqHX*77+3SMbgQ-RE@i3f%i?0fL238<`Bh(^Vz*>X51FMZ(8@B2yhEh z!~i#8>j7p2j#(%78IE~9F%Raq@v3Z^_gj4u(d(nMBudTu@obLD0#q4*3l0)KHHite2FhxyW0(Yf`O?jDS##y(=-!HrR z7rQUvZ4Q;DH7#+)yP1y9Xh;bh*u)FUZ;3pO#OIWM}ektcW0g$f;dWI|{{R1#( zWe+BX|AD)vtVlXKO0t3i{5O`8K3P;gbpN?l;%6{}ukGDz63wZWlS&93kuVImq2_r; zY1&DUcRd96?Cgm_eHm%tC#^nL=T8d!?Uu${<*P%*L`(`hzhAfxmLL<(pLIy-Q1iWU zh1#nfJHhxoah^Ixm$3CV34{Z;K^7CV6Mk*=Hc?aCz6){;CBv>wK)nh>2!w|7(BOf; z9X&A?PBpC;zlkB+C3_<4J>U@9fgu~Co<qy@{g zBaid^c9B2gVsDI07w4WAv5i9;?NFIOHHIy}?4L%Yj?t(12^B;ZG}#+y%1yML3lwj( z`@bsp$I9FDv}AOQ6_aG6#FPVQoI*|DS!Y1sVM0NA>vx@j_|u*9^}WU z3w6zEyevV_n(V|ki5ej|(n~F?#vk?9=fEL?(0@|vAYY5Jp3(`Lah@YnnLJ7BH!Nv1 zUFw};0Q*^1T6n4Je+dbz`xcJK?`L#t`gY+koD!%-F7az6ooV?jaDi63?Z8*bRVn(Vu>sn9Y4dd;SN>V|N$zFwxiwW=j4v`zlBHAn0$#r(5J z0(e*On7#IcR9VkPr zK$sWPo!*8{B(MuB>$B?slF%NRUV21PI-;K_mxjX~#@G>mEQzt|t@fA2mhFOOg>F#m zam2k-Kk2{xZbwA*2h4AK2$VBoLfx>WD0kMV-dd2#{~@!`j`ck^fh}vg=DgIxO8|y~ zebG&&n)$6Tw2n?DBB6qL^vs(EK0Q^UZ1qg7Fayr zSe`QuC|eH>|kenPK1aTg&irQop99kJ2f(Rq#2mt=(^pA>f*uBDurX zANj*AvRcEN+wWgynh2m{xS0IFt1!TadmqjnNY%#F)yYXVQ z*Pfx&-c|Sr{)#stK0{WPBfOuwxoQ5?VAdp6>hnI&RNN28goLf|+pUvqB)0N$%+ZAZ#@(vs z-VV59r6w;WO{hDj}Nw+xqa#~H?&y(zv+Iw87q!zaVqTW$Ll2mGD zO}L&TPBZE0H96|9{;Y%ArdXY6v2>EPBc+lw{(jj9=%%k5|1MUxkQf4lg9+6+c0^TZbCdHXOSjd?U*P_l zt9;TLp_T}G4`h~vlxm95xTje|cj-rYgMe9Ka$%p!6q=XA#Jfmb*unLI-fYoaI@X&D zVps4}!UMW7GjnqWR%M1q+JS{C3VlumLxp5wL=3BT$V#n=wIuT*So_9oBsxYBMM9iH~r zACx5UO>=;c0x0`ttyrHl{N;bZQ)cbrC6nB!6>_p&T#JA`G z@N>erAdDV(ttl^NewUsgHCWd#I8qVxRy9V`@i&?SQJ-tMgmtMLC#*Q|T%!(#ANYQ^ zw#rQ#PJ@6{z3LDd(F==tX}kcbU@Z{fpWuN8OAR25Pj%UWE474^>G?PMdA4VHXxRPX zJAJ^=wz@h3B^sOslIU-3R#KL9#MGx!2=#-$Ux1D(hz9zom&$z`8@P&Ef1Q!p|I66B zEH{#*=Yo5G3e-+ALvl5%xkOWmfbGi_SYf<%yvP5?n6fMlUp=N-;Q$826_ zUgSK<_;>k#_W-kI#!P1FN|Hca+<*Ts--U1dCBTzCc5xNs^Yk>yfQKlzW#i%ixK#H* zle-KX>P}Y8B!XaLJyDS+>6qKB=FsR*FyRLrl6qcW5_g@9c@I0|F~85J{Vtk&RLtG8 z^Mlc6Fh(fYBxj|STlJHxbN=bdJ3_hX8*cmDz+C-NlJ+n46FKFH?4*REJG&GM>J@PU z&@2;{&qBAF1eiFyAEKt?sMl#wO*$^@^Gi-)JiT1qd8el7fy?Zha;u&S@Fn*_iNBnl zsBu9py1}M*csQbHlF}@uPZPZI6NlwToSArqb#K&jK--b8x3y12V(gob;wcC9qdu=D z7cm35?AqPnY|$u?Gm>JILU_hzg2R-CP!K_*MKq;=-^TGt33P$h6WG4Ei*3)@z7-i9 zPDWe@lXnSAaEic|fVWH>X+%lz9`6sHzwNeZ5$H&@TL>OjlE5ZE%5BSEZR2_WzxbrF z4g3X=`Co&AL?L0GhvTn6xpcPdor`F1<<4Qxb?i^Y0=1Jwdn;|cZ*d%eESzTjaS?RC zpgNJvQ6jjf>aGq9khQViL%!gKk)4A{pP|K7BfE;q0}{k1wcij@YoiMc5h<-D*klM8dJ0(M(W_t+`lacuJ_NNc*OMIoJT*sK_cJ>JX?gid zoNj|hHt+X!E5pc(|1WXHaAUvem>;ff^Y!oDc+y4+>s##E?;{^D@G8YbO8&~_%)C9` zVYb1K(uF?wlYtY=}j)*Vu7j(Z1*Gj(Uh1=+UrV8U(4BFtU-G zUaH}M)KaCgtQ}y<72}Hw&dVE8!+NSq`G?P)lRIk`DJd?N5N__$dgLH5?R9J{RAKw| zqnd512LhJa0c!YTS{HB%wD||_bGEPkk%bl}?lp5f2BN*2Tw|;jo?Wt_WdGj=LxZgo z-kV+=%^go6bKs$DMf-_!(7->>JF4OHffDP#pSvF&C6vp<+b;yS`kgN z7kfZrBL`?FF!hy|p;>F^2*WF8rU$eY*2Jli*%%Px7``qE*M*f$u+~IBVeY!9F}i>L z)NwE}Kpdm9Zm*OYr_qONU0w`gpTnlM+Y$C6d-nLv$}U|f?ufD|ibygUZ~G*j9Y71+ zyBTjrGT%sQtp(z-I=x!-gu}HO%ODgHL$)6C*rnuz6)WKCj4^POG8?At64!gwjv9ez z;nIP-o;CM{qhjPcSd;P4+Q4(;1xWKq#6!+^B~US|j`Oex|Z;l0&H#ul8&{lxVWDKI)?+!|&N5eNC4QJ|Lv}3%y>V zq*@H;Vr_e#{__Ihi^i8nwf8l6jCz05`0Vr-w;bK??@8$3i0TzT zu+ey!7M~1Eo)_S5UAo}f6dR9dSJf@^H0q2Fc=)JooIJ9(=`8;CDD%> zg9eFZepkAFbUoC`PokBm;x#nHTR0GC!) z=@vaexpXp5t_c$WF)p*Cv$+1}*$9EIBfM$MH#2fNTxWKWy9whX=<_mu(G~vN+)?S< zD_=7d!^|piPJJ@u0YMOG@Gsvc2)0fgnkXdxCI!OdN{8lge_xQ^*;GVhy%d6SbXDUir+^mTX-FoVX; zZ}oJDE{F%QODErl{JF+Yo)OdQtSAq)L+E6vl^|ygX=QsZ-4G9TuG=D=SSQcc_KB)`fH&IjuR8EL)lUm1@gk_wQ2cqo z`t1Z5Tc1LfYVI;Eq(RRTBp;@QLJp$jPCC=>Wwi31dvT=q#;+biM_v*k-iN`uuW| z>~c?f7oGxl^!K5;Moov%!7Se9#4tfVKRbB)*qx_tE z^p%^UkulYeh}uEidztK3PB0fQO$YTY1ykw|s&Ur6|~5i-H%7#l#hBh4D68 zgk1uZR^TnNf=^Lv?z8ARu~A>r7{|!v!dxLdUxS$ zCf$5hGot9=uW7(B*+GDt&8^QFk=MYM`I02?vNW27%kqHwMz{Y*1q9hKYhE zx{QpfyZfi!T~wW5eY%oMb?J`3+LvjkIs0*D>2bIDGac$;JJT;++Xx4e<)Z6FyV3i+ z`O@0Ct5!Jhx%O1b3Z+M7qWYC?7J{%sgWANhJ%FY*!Ho^z$bCL^Q+!zgd4;7`tS}(PUGn^!R4Sy$uCVtgN28mpzUkpUU!#sqTQoo z!4YF;c@P5>3KlDQ3n`o=JQ;1{Nsa^TutZuX6PHdojplAQVh+<;trnbt0TFu|bF9$s z11H?y#!&(4ibSaS zU7O@S${4;YmDH78FUrd7IMP4iW%oQ zrn^mIq;ILR`|6&m9o%lT#XESPEMwL->#{CDlanx)(E>$F=W8JaaS*Oj5yV{TDbA?d2 zitTXgy=I^)(pSQhsW*G%ZzBGGOcmEt=e77vK3&&AHy&@B)3ig=DY_MbHA?+ zXrmB%yPPc?5fbSM)$aZv+=cu8t!n1T#t%)^`0^&i1XeD>uGwFjT|JL>sA(8a|1n}; zhqSSot)k>l;|EpQpjKOjX&YguaXZld38e~(n%uXZll!&;9&x(zcx7__Bxk5_$F;|z zV`u4*Bd<*da&^My&@n*UiJDSoK~a^^nv10>CRO^&tUULI)5`_R>hk7U18odv3H|)6 zLw9F^5KD@m(2S3(Ov{#p#j8IhQ8Xj$o3Sh+^rs_r7Iz(U?beg8*N810SyUtk#{bYL z!Vt3w=`d4E?q&y1dA^*ubrs1z2jt6gbA}xTI&ny=tJB%o8Iwp5ZHTc2M8;RYZCoSb z^f_KuI7wuVo+St8)48#Yi_F~i4Sk#}tc3qhPJW8(-Xzko!3T?5E&(4H=)~q(7ED5k z&0{Mh6Fu%$zQC9Hn2@~}gG>)uhr01VsMk6D&v`qRcqUu<@wz&}R=(UnT~5n64n5P{ z8rxzvr9kd^H9)p#6f5@boJ)gY(i*IocuK&ZcWj@N9No+*@HEE=Ou!+8ALd`*Hr6vOVy z?VYOg#TG$CjgaVTexuMsWWzH8Q^8TbO?yE$4<+Q(np%B(j3N&_l9>8nK5=J*h#_@vxu@-IX1__osn-(`T(>e6a|zJupk|HPUqSMA zlfUf(r}Gk2JGbDdB(GDaeanaI202rwnlsb)g)C zs-;%3hU`tC-^%(8p3ZEVLQnfnKcuZj9)mTW18p)sU-s$i=89lUDXchV8MN)nL=Z3- zD$DuS4;cY|O6wzsa8F_Gp84>AirlEQL5(p)O*aN+ys|iAgMm??qAgn<691Y;Nl$nl$E!=NL z`o3YrfaZ~gj!w%u74X8NtHZ}6E^qDS_&pOqI&NVwf5FuLSkI)p&7YrxSQNO{{NOd% zDvNFvfUFWMoyb za4fW(Nu3R-qA`|=x5WD0nG;?Z{c5*?-WFUK@ltv@Pk?rR7bTUTptn7kUuS}hj zI1-{p*2--*qpI^c$*P%qmfys8KX-Cbq1P9cx92fYUhqsX!X%V9tUE0gib<>;3&Wpw zWD2Nvw_S<(7h_p_xrH(09(U}~hz;6wddGA?Bpnw}B-~FI2)1j{o!!Q;$YgV(7MV`_ zCC}Y0Lwr%3qSwpwu;`{*kT)~HfLXCI1@s9H6r~t(m?UrnYmf|+#1ixFn}~F7#a^u( zhuoLYHp##v!fCi3$SDMHq2}?|F0J%I0bERJ6^NPmd)0qy=yS3(>zSgg2MxYKm=B4< z$W3dz0)sWlmOz}x)A;i%xAqSkDxhZH{;rKRHh-G_{`YG#6@@m&k=TjYs*{4TvVrxg ziFu6uVaBGQH7E1*kmqZj$13rQJQhkC7sTN)l^FH zQiTvF)X0s}CM8Y*0vp~@V@Rtl@_Tz4agsig27}ZC9+|b2ny+C#I<`>mf=y!L3YMh9 zb<4@AwT%m_RV~Kjl~kE}T(FaDZf{*Zi;;IH&jg(VQbQV}?Q-k3E~Mo_X6qtI6P;r8 zPrb2+x^f;YE-4NeGO&{FH%GS386i88W25>6z>(h>I>>LF?Uat1Kz^!*t08z72abmF z3JO$MmNE)Qvao1A(VFydZTcM9&ej*QM@D5*bPbJkoISe0Glx4 zPxU+l=WltPPTjN|0riF8zZ!x~Hyt?X+UgOt`SotvH8%p%Z$30mB2nTzPs~RFWeo9V z@b$AP@ob1X^uyUI|7>A#nblm}BklMOoD{R_kg-smCHxV>wCRF07H^#=KFRq_g#Y1` z*qUNlPyuQhOT2s94e3vpb=&WG=X9n?L3VW&=GIpN5!i|umg|jp*FO;DtF!PQ)e$^< zN;SaKk=tjeah9dBRW>y^zc;34-;m058VG1;tgO0K#gPh{g0x3c1x{f3l-q%|;y(;loq3==LN5pS z8P(;C=JyLw$0A_BwJa>ua4K+_-EIDek+^|_y6{YH7AgTgVP5%|IW{6>ygW&pM?Ua^HZupcnK57DIGqG1yjMH%PT zh_It%#{^qzr)WBO*P#4z*-exrlAr60zF<^Pl9Lb0hwTk%gFY$M`c|vBr%B}t1Ku`d zDT=IyxjC3!9di1+&|&uLN&%F5k_sEnL5b!OdmxJxrx;L5kIN4}I* zb`^LqLQ*1?(|GnB`Mi<{jdY$6K(%(HMc}iTI^3A_Vs)Gqutuc?4S!n?X0?s@;uJM& zm^0$@A8J_bU<6FD7#%q+u5o-Zq4*!SsrgU)b~Y!k#d;9Y?olJi#b3k=0cGSt7Ruv2 zcaHa^G0YO=v_qnezi)2o1%!^q2wuhOyuTJf%8}nKLc)Fe#Kp7L9Z%0mIo~K+2D9c- zI;;W4<`Zy0NnJ-MQq2rvAGS9mMR~S2lOIl;wtQ}rETbk@g-|F0ONnM4@MtviG}fcj zy+?W1cPl8B9ys9xp%hgrW?{i&uAxA~ho{gsO;+AX2aSDiJ&& zJ9ddL(;#~jMXpxWR$_Uoc2}wzk+NJh^%d@1n?M7+>ZDq5wZN+*z54hW389(gs77gznLmE@$cT^Tf{&b!uB+^>yLe);l%- z*B#)19rT^xy54y@;Ks~Cuz6K4-VnYDZ)%+2&$y!ODdt{=)5S~&C5VHpWnPB81@x7I z-XP9W$DIoUEf7ur(CMxR3hjX>Xs;_FDg8vPAj%Wk*i%k3<|$ighkKaj}HOsZ4-*64(nG{ePW!Wo@G@zoo zNwHkhml|FFO_sN(l^^PD?rRltrrR}{IwUn&P&s`tQ(B#`sM^Km#?nVgO`sraxpUCHLP{=+!8a`uPOA_fc9K6XNM3>c;_%?m+NxOQ>!Vml@B^|5Y}@9zY3Er7o_WV1%OIp&RcVh&emUu zqT-Wma~pGiv20Qa;}Kd$&$N_r3e5n%n?ruLudssF$Y0bx%){jl?0mQ6eW5JKwyp9O zd^fd}l)k(B87hITQ!Z<+7b{mX6$Q}3LQt5z&>VHfkr6=tEL(~T1uPjR8W_~oJ6f8l zAHnhv^N75}iLm7wv;a#}eOYVgfwD)n?3n&12jC=4jUndrm!;VGx$R>O1!QfEAP_x^;imcG%64iLd)cspzHdy9VUpOcBl4FJ*lOS@@h|7f?ZYc8Wm4EJ!It zdK*v)w*LOH$ccx8eFn;OKcs0&dKEs`gX%Kw&$O$tHCj`{_)-`GdjRE1;jw)lUxrgfbj291ANXLxM zPc`D;N4?E;nb{^(t6mdhMqGx=z|13AN61i7+JL0Dqd=kda6edRQ55g_M6crUIaHlL zfBN+0+%)^!^NYzZyUH~Up_Dw4EX@zeog!io=g#CW7d9igdQps+<-M1%ots3iJ-E|k znsI9XNJF zR4|PxQ6#PBw*qis?YsdSl;`n)LX%0t8Cym;dR7vpPqbI5K|rVY0M-98Xt}R>Sv@p(Pain$PT&ymB)8{TyQRyT#Vy ztAaaNVy$QodFw8l^~BOh3#Az?mb)<}>Cn?u6SL7F)y+)-Mmo_(6yQk0plU*IM-ncc zo>(NL(z?pHH@*|4;z9t|la(=USsO+7W>%K7A@@4XW-9Ucw zEFojLYk#fF^pZuZMY2{MjaI#LTW210o9d&1_Xrodb3HZ&X1dIA31=TqKW_d>&G*JL zHbt7^Y<^>LDzSMS@3?*f1lah(9yJOcHrlH|7h;BX>FvHHig>BPg$KQjU{B`{;_R(+ z6yj_@w1cjBEv1<0@ctFV%kolJ^8jxeE zd3A({fDDy184$J=?wCXfa!aN|Y>4B_ka_<|_S+Agz;H1Q6R#6d(&8gMo-vF8hW8zD zzyetwJw{#5H3sj;LT1gmvz{8G)68@-l_;R2HP1&o8?_|^V5{)Z^L%O$-&#gIeFcT! zP2E(@_kv+f7DGL~?Eo(syV4^;5>NR_MTkk=A#sQeH3#YM*s%1|u^=6dV*1#5z|>8o zu0#?;0BX?re~cD=_gd2C%7d}c?-?hrsb3JyS6=upabt%{+>tUcFf zvmaOboS$ziRm_D-Se7=4zf~>oMo$7}$>|yGua~ifc2o3hpGmq8!D$-+-t@I88wMJ; zM<0T#O%h$X)h&|}G8%5_hP@lg+wpsMb|12bp1DY(?Du4R}PL2ouj!Wh| zb=SK3IU8{a9?<7@0Cx|C=j@|w#1ST@)O2naZVUvkeN`R?cRJTj8TLt>qLBk1K~^uC zn^IBDt<*zT&4^^$HiIO%Rwj-)B#cy_Eg#gst5yweq;3SQSf7ymTPM)Ue^KDCv1w8t zW(g`i9AsWkqfy+M%cOK+-PLA?;*Bm-=ckYZJsGap%ntts#loL68yP z!ObjYRdm6-7|PZF@!GoO^|Cy&HPj)z;VDm9?o$5j@0A0tgS zRO?#5py7C86{_5bFdy|bzXtA}IHh%KnX`N36!W_-5RbOlnrM_kCv!$tqACA_&m??4 z?U|%_$dG%$NCB@PKZc|n3q<7|ZrIV5SOmeiI+&oo3^}az12a&HI&vs=iiu`aTMT2> zVSlKVfz+gqJ(cx?$4`wTd<~D+^1;vj?$)C)z1eJfy0V@5DD$5-I`zCNYuzpqX1Q(t z?2g(HlQ3zZrIFO1pV!U}nC=L}uUDzD+SuONCLi`{s1ouQ9a`I{oixY!@B#XwxDtxc z@KS*$^!#_t`1}#-#oz`kmcS$2I!QYjY~p;=E#wIo6aKr+FQd6&*{TpNK<6#?Rsu*< z(jPIX;pM#L0Is*K)}oPNk7L+_ta0ns;XWMZW7y`7#XDFl2ILf6 z#%PhXZC{b5Vw{(CsL9-Q-nfgUrUL*bLI7MzIS%XgKJ~8P3Lp&q#|BpY z0G>x^F8_GLqG(P}>FcSx`#%vy0Zk!L+O9nxT4UL_V6r;yeq39AvTc*EEkE*Z^H+5g z7_?kJQKf=Yty3dm>7-4LNNEQqt>t-J>}<^|LU3LNnMo8)>!f*ueaW=3Tla3^T@@{k zs!k~mPsl1$pJoTL`J;H-glb)LL{*rlz^8>QG&^cWW6zC(^=;lPZm#$f9(MD?Eq|uN7+owpgZkN(x0;AC6!Skan1DcaUmod1-Q@`3YTjBaQRZc0 z?{m^4IR*CO6+c8|TWB>z^5(ConJ0{gHyZQKbXV=&bDDH{|WhlyYj~l1fR65#xX&Y z9UNb4g4P(eK7Kr%^582&vVrIg)vmvlcufbviV`d);1SZ4FiO!balf8mrablL%2q`b zetqAagzAe`?O2#PizNX$V$dC@IjtI zLnC20)lWF?L zLc3;$C8KnFwDdD1rC89z8IAnS%gHq4i&dPaBMGDQWhAiurzu>3Q!%tzUJ6n&oSGw# zUvS#t;d5j@9xcy~%9Y?lV;G#uy3Ni>Tm9ObpTaxm8;l3V!*z_UT{v>uGvTjaCOTNumN(ksFr-&r6NlmX))QrUK zc~Ov0#JQ-}3^4F{-AIhh#9370LX9&&X{g~hb^PP_ZneDDLp5pe+81->w&y7f`>j~T zuqS_VW&S}NrPQ6FT-9SKOw_@4X<}KqlF6uBnyhO`h#y~y85t`tiU5f*`3}LN&O6dp z9A5UJi-h*iX#g4VhS4eEuA8107gGb6(@)?Y$E6H!$z`6{+hu2F$KEvm)gW|`{-s|y z-WxLqbVvOtq?6m?-^M0eg17KC)9vx}E#`%}C}N+671G;q>ZUozb2 zZqo%;Tfm5l6vSI4J@qfrZD47oC?$D{4o`Q(1yT&ve%#1q(Xyy1+Pl?)>UR2%`ScYQ z8g}kP=P7C9(avL}`#r&OT)fO8ILFe9(nfOX=P7z&ze(_{aR{3lDxVv=WkbvlDSnqf zZOe6Sasj;aka?XTd8!wh1VD0m*yh8|n--bf$XwKyKdu1sSTuF%)pF_K%Y(BQPjjL# z_H`WJa6S6IWu`vB=_m}~mu{LGsNoP=nk;p-6Uq%#meQeEqD#X+!8PA{nK;jm;x?0Q z72nqtcj!P)28mb05|G=weuN!;27J3RRy>@V^Igsp(t%J(x!e5Pd;=~|+dB(L3rV>C zAPWlIZT>ROZ)AncK0d|tsPBf41=|VGfKW9%FDX|548DY`I|pIw1o3-tY22mw3$Dn( z5%PB z^;wjT)anV0VvXcoI3r8TWYUob_WZ?2%!}rh7vwO!gS(z3ff~-+f+x1PUOE~k#jBd?U$*m zQiMrW6^lf3Z<Z4(PE5|W96qSUL_rAEhXZK@nI^%SfP+E9)ezNKU! zAsj4z%r_)u2GVDUQi2<^?altIOwXIjKDv(mJWI9(3T^y#K>>Y?q#@CFRPYw;<9gL6 zz|{$fFHr?EK)T!2=Y$MJvV3S`NGYRPlAx9>6D*rRKFMiWd@L8mdvE54fOp&ht^v$% zD)N_3k)JL3p}y}u8K$y^=u;OUM#G{-3h{rFvO)j$KHv1Q{UCn0shGQIQLCGEbk=X; z_b`|!s%nTHm*8(hO`I)b(xyJ^>5UK-uD1|B9GdQQQQO44 zxm!blFKu=zIYO$=HDz-b{a*?v%${wWc%|qx{>7uH=xMV{yEx!y*<#rVi8Rm~j1^jr z$T)>vvG|fBxjA&QL_javp+IbR>83XAZj0Nu=Lpegz4bXL7X-+fxL6+d7N**bk_*gU zHq~p>QNqa3Z^Qn=bJVoK>EKZPADvFmHual&bQ8NzlG3sWNfU54sNBf$!)j40CR`Td zYKC`@0()Rud_t@x(bCzJXlI4t9|V{s0WN)HV^fZ04LoyCDWK88V|z27rpC7fE@?K3 z6%L8qf8~bRU7oNz$W4BxQ3u`Cj0p_slu(vq=N0e#ifxplCRj_Wy~6t=8L#90O1tI# zC`FYv%_?oqSg3Omf`sH_pvF4 zfChF+Uc#;Xu@ZF+yWWY8*0%q z?i?eCcp~mQk%~~^_I@x5gzlpy<1q8r& zQ+-O?0WB{tUAFwOHBat;Z0dHK#C+fa1WL80*Uup$5I8fD>g&f5c&D_I(p{+u?A(8* zy&-y0SbaqIF$9hynIO;M8E>VQs z*UN!h|Aha7RP!FAlUqO?#kYEsLQ!uB$9tg;XWf;Z_AOo95XL(bKfO3!i`_(_ZZk^t zg{l;u<4@$iQ$oIUSp4<8*mhXd7+BufB{9^rSYMu~0hv$$uCa9dj z8SEGgrk>IKSje9Bz_%5)% z(SC96TkKu4yC!@4`n?QQ^_)V@z8`llkg5r$go0UgdAld_P*;*=zzDRR`wpw?n7zdeqNh9M2_%=I<>-BNSY}|Cnxm*Jx&>I3@|M`u4xy$c--C=A`v^_&JC`@N2oDBUq#(AB+N&S_Kc=P!lQv$dNG zLi4uOTH?_>)yHZ?KSX#(`7)~>0yCD<(S$)ju9E(+0n~=#>cUR5bS`*eY z^p~}{f|3Lh1y<+nRm}}6J|kdBAV#+Ur9Ygftd;_zA8g|$)kGp9)e0Hcgk>SVp&_8k zds95SE-jX`#l*GUjPZ~>Nm+{4lF61j_+DjGl=$zGAb8h@)TQWUuKnul#&DX$KNB~W zKu-0{^Rtj8IdR=3`5$>yUEKe2nwPrv>Ur_=`7{c0qLEt-#B?{HH@0Lc6>P{?wu@<} z=KPsz1OMy4|5uJC+$O_L%WNeTzVtj*snri!>q6QOL}cRmNuzs|$UuA1dIkw}mc494 zsQ(boTGqvU12=EC71aH2vn+&*;_}km>Y9!{OMQz#zq`wd)W?UJRibmgZ*kBXN!`=4 zb;*(n_qJm|KQauweQGS{ayh2k#8WK;YNY_lK%>(Pk{3wJh06dq`Ts=b^vkS=Uywq4 zLTo;EHrhSpE}QzjfH>L+te2weS}1dSS<+ubZcjopO2PmdJYh^t#{b8YG0~L8m~^bLTOqkVY-q%D> ze}L`5VUl(k#+l#3&*Ic3@<`#{hf{A{7pfB4XrUlx3ky^#k9DH>QBANJ zb(Sb!4R-5TrZF#XH$nH#fZWB@da03(+!RaU*8tpfdnGZ#dTE6| z$j()Li&@O1*`*e5=X-$@A+(v%B`Te5tWwi(H3clxlj$t1Shd{<^PuG{6v4P)sJ-Mt z_1eT&1U4cKz*V{qxlOza_sM+0wDN(#-UvZ?BIt|$ioux~NYJzfqb)Mw#g0+aBKn>AT(o6mv8ZS{TawgVBe(rVGS6Z~nB`@+J z53yv+e9VIO=`_D<8kZ;67XYi?(5#hY^pjK-xYHRa{_tAE^ZB6Ddc82Ye;-kb1GUF2 z7a+N5Y*ivoQ_XXo0K=&8B!%SM*cP1`F29y|sgO&)9HcHM38|GrO-xB{Vsfm<=R$R{ zq@L7+kWmI(rH1$}y4nvcl0QaSVrZPGMo10RiL$z5R^&BD_0CL$^;yUoHYN4pf`<2j zF!^AVJgXo6pm_bzguZqnH{0cmOE`Ca{-tPZ%NAcK0ZiN%&p$SEjA8DaifjXp~mZ_IayeFApgrapI)ZnIQV*ibi*Hs zZ;I`Q<%gp@WCg(8?uCnC?T&@+m4PPH{!d< z<=@it)#zQM*6!3%XvCp@++FrB5s1UgZ6L4k zcJv z0Xf+yPy1%?2{nx|f8x4lf3;hbv)s(%j`r-Uzem0&%3b3$g;sqWyYAI}NlEjmH%4pO zP3+xMgE2Sx+Saul&P9CP4aHmV&Fq?6yY_jtS7kwe;IB(Z=O4IyAh@!>{y-&unED<@ z<33+513+(o5SoOO&rX*p^yN)#a0FO0c=TsA6%xRJM$71+%x8t*(Bs9@^ijrKW@?}F zuk#`u9!XsF0wZpiIGtmb^*p|-S8%sJPwUQJ8lfek#zf&=>fa>TP>oRF=$zl0yhDk4 zoDH^nWqugHRL-Jnua-H0jxB*4vs5`*05w6ssjx|b2Os=ToV1gAr>vyjH|xck2q5{E zyPR50=OTOy}+&n?}+7pNy0Xj(TOeLz$Ff{`_&We2)SD)?w^A= zPGXmsN6Q35hqFAPkgAGrNfI4Ke1mu^43m5hlf3D53L8MJA@hwe0lmxIaJ;eaONH?Q z?F&JCCUlRCU!XueB9HCpW}F!UW~V(Jj+^@cwHKp=cnYKWimcO?LTzCr-rLJ-+YV`H z;1e?*QkL2ni8pwtujN7K`=W+~@yB&$V=fvp*R_%ZRyAU2`58YFICZ;b!{x>)V zdsfqtni1g>_@yvw0K+srlPGyI5*ooyY$V7qbVshbsKEr@`WYD9$5WGH^|T8fuQr? z?+moJn4i}>vq$V@kz%zx1MgDgM#%uV;P^oD#SdZG2 zia5|a8FwLYDat79eL}niQ83d^;_qFw6NOdp?sG;{_rxV^#0w&(Y%pSA5qK?yGKpsw z+6q`gcmdgrN7dRhTGz+fjs$s6-dsv)lsYij(#LLN(P9pE)ZLtt8@+J(b+lV4Z3yZb zX3m`Bx(ya81xQ-DK_m~q0_g`f?SK5B-KYjuI-@XDXD2c{#Hr#n)j&>=@_{J|{aqi8 z+fvvd{670mAbd*SX*VtGv`n^HAX(ElQqS==0I%u&^ljL?kX$8{P4rD2vuCE)w4f4A zxb*LEt5$3f-14pb1Ol|BPyuv6-JQ3r4fl**4ubAs+OuG=Y$qRC<0foN>ZZCK`>4r; z1wCuzlQdm)tT!Q>{OBk4rBlqRC*MRFlunCp+I2-P%c_Q$=0nb2_C`8UspA}wI;qY# zsY#K3E}k)o*3>Up>5*+>0Md6t+`i(HetIR)x8tNdLOF?8nH@ZQ(jkF`$Kqb_M5K1T zoL@IK(fwf5)y#~0=1v<{kbBizvs})qq9gzYdiptO|1pfWmZ*jmFE1#<6+k2Kv6hPh zou7{6KQo9f771D*Yr~jI#DWJTOn=!d<1mrOth)Zy>8w~v9NTg051o$B`-?_^9YA@R zGtCCW1f5CG4+z+S5UYlYc__5IjZh;F+tc)T>y4Q6Nlv71*>Li*-HDBSE6U*-8^B*h zU}m*TpV?g$aXAWpb^9;blz^y zmTc>r0J%0p z9M5TGNWTrGmuS)~b4vXchR5{WTg`PMDswI0T7l{xCt1^;%i4rKadHh&%LBIAEGBlh z^Ijmr;+%fnJpeTz7J&!8@t^N2BqT58$sNJeHbC&O3i4uTs1~9jyJ00CQ~ZpOgset1 z^Zhr_RUy>3gSv{6-VtHz+xt4nY83+*PK$4x&3hMe=Hg)4uB_`N%If}{8aHRw*eb&7 zsQ^Sr5peZp8ro}!@;K;*Hm<&pPC?!2xn(U&(stJqbiLD8^TB!*+M6VfM4D9ncC#w6uZ(cYpXg%vp^IQ#9}|E)p(Oh0L?(p!gPWilaXsg% z>pR8EG*U?_GBU7pzH2`5T!a~ohZsb9v3yKaLOGfut4 zyrpZ6#uUti#ARb>EhA>rmnCFNF|LX0!OV4*tD9zT6P+M+F|F{;omaT4Ha~wJ`qpsbqrYDRNbx*SC(Ql1>~>(yoe$$9csX$IaHWX|JT9$TiSawJ>|j`SLdH zrog@@e+HNS98x|I@z-oZ1~?cnM7TJeX+V&N3I)UU`k$I&c9@(p*H(O_IXpq1XwehN@j5>V=8tEa{mG=ghBlrCUX8OpoaCXDw9C=O}pr2~Y$lUF3-NMAjW z9N0f0ahiJ>(WhbmM3HL4tC7}a0q$&t{qu835@S=BB3IR&r`rsgA3Z^P`sO=rI=_y_ z7f#=FPpkL&5|ETA-&0#N#TtDnH6}giLpksqlJieje({M<0;o_SSPc9t}8B!&?GkK zBiN2W2cHBffRIP=zqkk<41}(y#RGa5tbk7_Rf;j`@fxr0Pd7iqlnH6I*5iEjRyx_z zp~N27?V8K!#?2~q-pTB|{)7){!q_u@A*BEmh2tof-#eOBKRZn{qFrNw>DYBba0C_M zSHxW=lLq+|fUeK`^lP!&RFDDkaG6iq$t6__0@Zr50$5C-bKHRuG#d{{{XW~No_JIi z6VvJIG6#^IgT5U3XVc3-qSW8gaV=t8NZPVBn}mq1}t=p4@=2}~=! zzfM#V`u&a%3R~Zg`)tgh_p|BPyeY9q zM#DuN>mCOVqa5~5UDOB~k&wBXg&;|B{eX_~p_R1i`oI7Cmk7R#fAHJpvxfGsw z&KHR!ATG|wuC33$bc_ma=Gv^0zYV*wFAd8F81eXPt$JkErxvHpT82Z^1Y>)_#0X@npo_obZ+3p`CC*u z(bYC4wZO|{dcM+HJ|G9OChm>$6iDYc_YQ1IlFnw9gwx_&nU4j_X!wRch4GvWww)Yn zNqeUlR&^cj8jNS*(e7`ALsYuBKhe3LMd^bP4~vE?U9hw@dJiG8R)NoA*@r7d{~--6LEfG)J-#4`345|YMlydpU9>vs@mp6p2(u~44#p*X z2zveTf6a;a>9>8TL;G@tKwh_Xi?+ZNWDg{5!zC>t(`>f)3OQV3`meD=!N}o+A;}=4 zojiv-YulKy)UZc*-U_AGSPcjQY-5C}ya9Iqjt7&(HQScS_7A$IP5Wpi@y|gyx`SDp zfW;!I{>Q7nzzH^l>6Q|F2Ug}ean@+KFTQ-nj&z8*rkEE_8l)MlKp)3j2N>e8wHeN` zwjwjSe;U6}C|Zwd&^(y?l$y0{cGEJfJ3FXgniD+AECsXGVoJA$FNq<}y4t(CSc|H0 zren4c8YeWk5MS9Up4M1ATV?udy0M$7cN7H4IswA5lq z>r?C_KPb>q^{d~8YoI-vH)u+`6XCxqQEezNoTA`trvqTm8g|Z|2R(ry8lw*(EId5= z*=V!VdfNNBOU$^Kmd}pJ?5Y>^>vmRyQNGiahTOK&<^qlK>FbBQmxn~rAyLnUr#;VJ zc5gcjWr{yX8$KPH?432p6l6Nco;wBsnHFP4hEdiSAI*Md@W13|qGYlwGJ;L65^@gX z*&tcPz(N}>5VWMvaB@gl1Nzkyb=b`oZ6vWYWrR+g#e~|SfZl_s(ahGA|MZ3may%p5 z@|&R&(S%iA?V0I~_VjIk?YS0Gp+axFv^A#Bq%3PYJ<*=yays2=r{mYNRlbN$t?r0q z0R%wLVT1C7=3LB9=Qtui)YO8pEUcNkdnm)w5O&}DlvOb!YYe32k2N`U)vSZ?7k5LBX5HsmwpOF?BCrm`6%eg6y0% ze@L7VF`3tuctpoU)uO_y0VrZP29MFw5qNzIx-n|`y$_zm3(kYMks9He0aB?qBt4gK zLaHk=RLR3Tn^jG+mODY0MTVBtps*h3%uiLjDSUD2jpj@W20NNGGfz&_@J!utII4T3 zVBF4Qu0J}7HeMOQ^|fN$;X#%UJ|?Xk2o^ES_%5zP8wrKUW3)s|B;(!mZkfZZezAcT zk;s9T75EV0W37*dqwhO99nK3(a-_8f&(miVoJFGpwa((QZCfj^OJX0~?Bm4tVB)d? zE#;%t;mSyv!uRTYA-ADmr}}EWuFtFSUfx)kO{&@UxbJX_p=yL$>%BJzb^~LTCFk;A zqYhWEyEMdcw+0r_=jA@Se31IeL?Rdwq!r_Bga)+5uHEjms;`z0Gl6fi@e1Z>AgN1B zBt1E5bs0kbonRXT?PD#~Y`LJ(C%Xi(_+}()oW`a&E zTD^|?+Tl=rc|3zzeox9Or3Ix4a157S%K~VE)3={)enZpylJwBN^lZkl9x-85-|mls_%$AZs*GU8yBzkZoSz zE;n1Y>(g9HkwLKP`s6%rm9P(-gpmPYf%*G%&b&pwhxWyEP7r759F(0y+1gMA@(oZX zu}R^Bs=~h{(Rv2&YQG~*!_rxI{ZTCX`t$fvewnE+H^S=$cxl;~yiG)V{IL1mG2ibS z%)cna>lCWi#x=M;nc%7%mwWYsxTD#D_@n-wou%(K)FQD=ZNsLN&Q7aK9#R@5zE{O| zn+2Im;B0L^msr+MXoSP5?#7>N609+49(I&coFS-vo7um7C9qq{YPW=QXx-=13-3%g zaj(*l{kyh@S-f;mCit zi~S(mBdOA>PZ_25wjK;z7GhG>YJES{$C;_!zaX{x(|p?f(7#s=x09-XentR8kiw>4 zd>p19Q#%nM^T3@=1E^j%U#ll6-85^e0f|M0m--0C<=RJqZ9>YFTwe~4)=xJE=oCbw0A($6C;3>0 z-Yypj@S9qa+`!h)3BiP#x~ebn#F__4%;InmZ1NC2m)NA2ls(jYQn21SHk3_xooA4b#o#QLGIP!PPQYnXwHNNHu{<;gL&U-5oIMn z-z^pIrxESEFflJJUsEm@nl@2?HP8@Kif!C!-CFdJkMyzJ9&#NX{2N5kk7u4hu}^-g z=?c1q`g|$s+^vl--v?<00u8Y3TFKv_8~z?@kDzj{VEfb%TVYjj%NjXjkA2YpQ=&$I zn>)}sCKdM&!-nB)f+#HJ+h2~I7+u}`PaZ_uZdv7Xy;m{k=x&nOlE5f3RNr?dcyWda zReK4#eGi;z;7ZC9PDd2d(<%Qv%-f@0i;;1VjdW%w>)FKtWQxwOn!g1FjR>e*3Xdmn zZ;&0sxpsK-pEuz!buJ(BWjW8OKu(q>*>J?#H_Xf2 zWQmj ztApL1b`uL_-aF|CrWGvsPxy7Z+BKoTcY=Qu#7dKd#^Aah^y~Fo&3qeg0Cji0^|aF+)I=nI$PWr<1+U4fQ7 zFDbCkv~>xK`9RZ&r-?(y9$fmV!c&)-{$POwiv)Fwt!ZM@rVst*{+x9a#X|Po8MJD3 z@*DjFpI3nrZ2-wULhcI?SbFoFLr(AX)+53+F=o^KQSvtO$iaQf8HdXtJGkTV8yllN z)Ky9Ax^JUqNO`JH`{e*eK)Jtq7>+4UoKoow2c09Mx|$qUpb~$ZMjcYH*eg^y(~RI8 zI_GuicAI2{6bTJT2m8}>IlOWHX$G4+Z>RALY+)_H)^0Y9fz4)$iShc-Y!C;ZJvp1ZRmWd2_oGmup5%W2NtotcW)nBFqta%0yVKgF zh2xA;e(!>&b589ojrt)GL+=UvVrM@D<4_&xa5;+**}L&1FY~rF!?weXAmtG{suCd% z=AtL?@%)k1P~e8H=@+pzC^~r}}f-Yax zo!b!a*`=syZGBq+b#*eg+jy02%`|hJfNzE)_LwvfgN|)qbA-IxBb|f#c%8Ok_51dk z=JXAyXj23Y^V>vaZ>hmX=@yvYn11JRyJ(gxusn^I;)CjiX{c(xa>$-dhcImJxU2+> zoJDC4>yEh}_5}IwxcS$*p$E#~oCP}Llw}Ps;>)lJ>7qq?o^JgKngmWJw(lX?#pMT} z18a@m^tz{~n|u;QKq8e9_K(_Y>J6V>E*fL1KfmNF>w>QtIk8uZZ!!wX-yr0y9h>cN zni8%z3_?%+%sv=`wlF`JDVe?HvW3RkyVX=*1l{An4F?v5ahjuCDc}wBT11U0X= zFvL3TOP(xWP{xHwp!MA;#kklAJR5qLQj_%IJ_`hFuN@QyYF)|!8}IYb%Wl-A*XwGx z(qB>__snOyukgea^R(k)*WS`U?CkiskbfRx0Hlqun`B1ks%Ckre*|TGSWN7f@pYcT z#ro%Ot7OZLi8I~QK7BfdVQZ=Y488o``O~Mzi+-atjnbvl3J|qaV(%*%IH6bm+c9{m zQX~v|ABej2ci0z18+%&07qftLL@cuJgh9zwHLNG6QZW)xTH9y z&~lg4i7v6fu(wVqo~GwZylu?MF2`tPW>3%F07U&pe@ddIQX__H%yMoH&o$VchNo;5 z;)t{YT*|2)>ZaHHK)mN2>fLP0L^~`v5E@^ggw=C6Q3E19gp%nZ;5K5pI)xAQDLtgU z1`%elyP)m7s4{4n`K@SY(B$WQe?_y$t5Ru+*^M&jU}!|FX8DMWqegnN@rQj1okW`5 zJCSw`>M?`_;r5#Lmu=hjK zX;W*m3#nOO0G4Fh2asfLAY*`Pf+@iSW;E2$Mk9>tjx%KzNj3{cqhLe!kDnA7uVGgOV>`x-4 z0klT)w27oG?16$Xgp@1@1VSoq*Tf(&5@sat%7KD$r$W@6|FwEdGnKKWP|AjkVe4)4 zcTF8junJmYk))2M;PZXfj4hy(cPHxb(Qb{`wittnk}e0$sE9o~A#%*bb^V9D+JhB>ud-_Pzm4T#sm<>1{YS31PYXHm2--Jd$HR64^ZR<$zjZ zOX2w=>|hPu>0G^Skp9~&TsuzA`)}djjGsCPl>kaM+w)E~lTWa(TW+|*U;-4M{|J>(rmTy0)V$#H}s$HYCg0LpK1EhuX`2o)6c+ zH`fjOC;3#fA%a}MTonn*<&Q0nv5|V=x6ld3t(f+sE@;Nye#>)$08XWk-j{;Dk~vECu5oO z0C>Td-Vf#=W=Ur?Fr53J6H{1!=f@^Ac>C8G>ooJokNQ(uhqL$Oq^SWK-_otfXGj`~ za;~wm6zX&iwf702YoEN@1d`p$t9leeIUYbgEP>FXFlmoe2(2)8l_akZrWfm}A>x59 zc6M?BnVGip^Q|?$LX$p?*RTvOGj=D}tRx&9zwo^}V8^(7wEapc7g4CUPOh!r?ld+o#S-GtP+=K9nPgZq9=u;Zwa%dKiJ71OFxpca1|vrM?ZnEwDc?i0Q;jBIU6m3^#RFubC zw++XFCTzOYbdnqbU0kDYaMvj)TM}d-Gx+K0@;0#v*=1*;Q@|E!U#W@0$cCZj!(odU z-R(j&YrRlulI*YTofB&{EOuj@4#SfrsV#K0v(o^|d0x)iXd@-4C9&_`LFB}V12Y1U zG})8;-s&*D1Er<@RL_I$bSiI`l~?Jp#-M{kQrefOC{Hn@&N4&lQ0yy1{E%jo`+;Sy z-nh;li%7*F4w zQfbKajT$CVg*s_1A#$!_zq8)vd*O0e4EScy#BN-B!U)44Ml|M-|gm@@oJE%v?0 z{|@I&As>NJM{UY3EyrL9pRJmn#j3#2ABYttbYVIvk+AGoUO#G4bS+gQZzpB1rS(&E zDzDIOTjp8&)`1JH*=-IgxEbtuApz7-jTyAu&Ae;62GQm|Qov@{grSx_d`?_(Nhqx~ zU)(b4B;}gwV0Z&{D5zPnV0b%CeaP;lq3R3%Y1n=I+pgN=3^X-6^7}g4SfkmI4IXY< z>T2d}!*np@f5X zSM&-RPp>rr31FB1Nt~MTEtezd;_R*coNGtx26J`~6LFxMf^OGLwrcyc*&zd`Teuoc z?)I|5B4Me^TUhZL%j=~8PZgf4;VM$%J9^nsy6nYv(wMF@hv7(7f26iVkwl?a#G_?T za5V5)@%?3YU7FLKY)8Yqv;2=I79?y8iaLk)5c$>RZ*SgxX^f4)yrm3;e)nxenPF-w zULELO=m~aYyUki!Bj>omkdy7(@HK*2YKqjo)9Ihn^zKY1A2pw(qkG;<4D}tLyldQ>2NJ8 zE5R64kakq-upm}tjG4mTSDf1uo9AjXb)HW_R^*w1N5gX$zG?HW`$+gP4d(~Gd?5Ff zy%rfJX9@i2Q>bP%capQb8DEV0QKkOr6AXWYdL9hEdb#T8Bo)%*WR^WH+th26cSBRe z*yecs)wMe#TwhCV#?wjKROIp62?-FbjU6eHiBQ~0?rIk*hU__@ek{=`-b*0{a@OB%{fpwCP8`c7X z6+6n$e2amO4ZDhsb!!>s_Z-jj0%_3#Ur3bdy2;LFUfz2=i~ z6K+TTU8l?O=sJl@jJ4pRDb#_AvfP@c3DM^TZ1x~0vgKi#h*aPlsyV30QDPL zxt~T8O5Z&mmqTW{CF1RS+DPb8{Y7Dj{ATI-H zc7~L$C1l+8EOXHCG6eP`T*jMm+J|LTSwE#WV6c&f2B|+@``zZ>$9h^m20Gd9L3UQI z`4oiMJnOmFuV@BuRX}S*eOC=Nrn4pCRdxm1Hmw*022C34qv;%saG*2jHiJ*pk)EY! z<;(99A0-JNjNF>d(QJ_;`A2;T!T(c#A*uM_e&;r*9=Ly$6(v=#AIIbP+^g)=d-g6H zDXlGP0|p53%QI>1rLkEZqAR>2 z-jv4Y6GalKpiE=tM@l6d3FH-mutKJLp@dnkBog8 zCb_GwJ>r11$0(zYf8uJ%$kKY)eD>*Gbb@5~%SA=k9C{8YGjJ;Vh9G|1%OP;)f8Pus zZq*0Yo|_ZdaRG>wX3~l#qzWZ`12qBDt7vG8Zf|3+@yDgL1Rv@d@z4&5yEY*>OiL$f z#QiNhT`Pna{!<^#Eo;eQOC?sSt}+$3rse}At9QSX4$gFVyPRX1)`W~{*ws7U-p5o2Z3@K-)(tN>dQ&Vi6#W#45O1^} zrF>Gd5g6|kzoo3;Zu2uJwg+LH_qNKeOKYV#XzC@O@lg%Sv{eJVYb742W4e(gSH5$T^|vXztbbipIfb%nqfdp_p)xo197D^X<0|pw z#pl?Vrkh){bT%=Sd~fN45(w(__AvOnIo-0=+~bIRl;pF<2>xsx@}av}PqAcl*|hai z4hwb!Lu-e65YcjA=P;6n`|b-QptRX&K))bo4S3oKGvd2kzw7mUr= zee=`nJcsM5rYg5~q4!05+rFUMiHsn|PqYr_tEB~o)7X}npDfMR<*6bqaoTtA+;6Jx zjC}ef=<^(BYcixe49d23T26#@0x79-L znaeorxtC}fzky7J_J_aggL#e>wCYp4b|JtjevPlPwG!&ru@x`Wfq{?Jk>obFmUa=? zHRY83yhD06FcR$^8Ht2>IvuB>#=(PK&eitjh$zG{K7E?y-$tqOc$w;;#(({m=uUYL zSzbN`)><-@Ra)fU4W6_yfR2X31L29|IiR9GrkCm_zS;sWoLMCr7HABuLuYU3+uih- z+dI*si&6{g_1;FTWQPOR1)Rdosa(iFA;hm{Ax}u5v;i`$>`8eyh(&d5l4u~U9&R&D zZ{Ns7JAZJ_B;R%=rZKQ1^Bbt(cDI4obw9#lYY?Oae(}`x;P;sTyRbL1X zA?~nxz|{lvSK6EW)Zy$lH>wno+izZ@S7kO)TwrJCEAb#<9ChC4(Jq0!Zxb=`ve>L9Cg}zr$cInEXO5S=LfbH6*kK;E2XD zgG!^K3vc7}#o=H8`0d9w-Z&Zsa`GFGQHiwJo*m!W@>805$wG56dl^x2QaV+J!)*3m z>Y^?}%Ho50ui-cf$h*|GnX4K{U<6=^Idjdx*|WFg3G@}-+egtw@TX|;&XDUz`sXUP zE}h`;h}3TmZ{{Pa)hzKl1?x=sCkD;GPETKJK*!ba3u`XL=@lDjAz1_71H83#iZC4h zZQ475C6)v1)!xoqRsU8J(VU-Z_cx^iHeDp%z%DYT3ah%aXzsw;GsoA|p>&_Gec$Qw zT|`_|I@u76y|Kbyb}0#gJ$1*BVX1TB8CStu$uS>hO}(162d+p3*>QC}9CkfwLkf(g zam(p09=l2D>Fi>?IEj{HLNf_Ui>kvarsSppEo=OOkw~m0F)K((T6VD&N>14AH12L1 z#)*qAA^G)7Z|TP}-6Nj4^8kQwUkwztkq;p4dkJ+m8H0oe&gE!KXzxlC-WO{n&|%LW zMX@{@r+^qhEt6xDuIm^%QPpl!H1&j$3Id z(5$CfaR-&Ees-aM=tZdewM|%Te-u5Jqpl{{W?q7I1FYbapOyblh9yL&et<&dDQ+DP zX(!W)l7j@-I2U@RoIffrJ6~LMZa!YS2hS#VdyEGA@5TdCB|jLnp*ynf?*z4w+>A&l zu;dcjGN@^3KqyaAd6|tw23)tjL6g2xN)%pE-{Tes0ZybSJiDLqo_I9>5-7T-+;P2oy z`rD-DZKwm41Vuehzs3&#|6ys!Lxg6)1T&Y^R0u_|HD!t*0dt?5@<4*_Hihie|TR3Uw*{`BvUs{IBgw$v!f)9(3BIqVWx`NLNN~HU&{D zcM$++YY>vTl7D!u#nAbH^swXc%Lwpa2F%~>u0C|qJLwlrSY!SFZaGl09}+@p<+XyT zjYFELBRs(Axs?WR9Vn?dgXly>@d%oIkj#^<66TrQCgv)83Q{~?!NS{h)~oezLKYz) zh`_6bDrB!ZC{ct$Yp}q z=`l9_mTmGqMt2xkj(c#9Yf&@vS@TkiLY7-5cz=m+Z>#(WH6DIzmU|TLst@*&EU*M<<|!WB-IZVBQ0lnAdUR)ZDdVDW7S!i$Rebl^vZ>rXOBEJw zuWkd_kwj2Mg2J`|8q}jD35hCREc>!N)t`mK@jGtyd8kpQCC=w?$yU!j(YwwWMWW{A zVPLXHfc68|f4#{ns!=`$fD}Jgi zewUE=XX2EY7wXH!m{bmmJ?&EmqK&|sVg^@|jxy!%sV>2kp%4Qbgd)%HWuunhB(w3< zEnxW-4C#LJzRWBOQz2)1ZZJdl=~&eLu=z81*}SD6HowI^D{Vx6g9yGw1yDp-ldi=D zZ+LW1XGxy~Mjg>0Qd)r)e(IEnh?sl!LVk^_I@PpT4EBXbH45U~-7BXYeId zF18y(3`;I5Z1BPEQ<6lAi574$FVGfYr><76>KXtuHU&xLVEE&#d*?2~A6Ro=TN@ky3RQ#9{}&>Wk2jvPASFrDW)shzHFrTWBuMGinG;05|oV< zE#s{G>Al<2G~7#H+UXcXBZDQl|5h3R9@m|KlbMZYis-z}bzXw0;SOq24M|Ll{^=5C zj_gPAklAj6T1|}lkti}F?-+82J$eo|J}QhovI*Lq}k3_)kM0egqtjtd}p zgkkMN+`^POt?VIvDY;@na}-DDStB^hTWYw23{UDvEKh&f{H_h7>n1Vz1MfHwov!&s z(IRj)Xi~i2FKc%fpgd|icxAX2<&cm%)D(-KIGcZAh#rZ(%##k?$D?Ul_u{fifluE@ zG?avXLtQrYyFZTYZj9M0)RLx}HU_|mzke4&+4nsHxK@u=5qK}l1KTV}y)}4D;jE^15@t4x&4+ps(1EStL!_$? z6}~mrZ6LUF8%UZ?7G?97(;&Tw&4cd~&+wt#f(3BjKi=|ik;V+xN@wY$K}}MI?&Fob zoKzlB<)YxHY`TjR-jDM``)9@&YCOKUPb*5n-R7V9;3W3IA?>I+5k=H+8DYq+c?U!q zmGb}YoK?dk!K0(y3Gs}Pe4T9eudo{u-w>fIm*yN=5b zu2bEgjE+Wn0=g&UVKGB7^lIvYs0TT*R6q2|ffKE?*4(!0KvcP@nMs7PT@7l5Jd$aJ z&F5P093@>KB4%j86He`%@}2;IxbTrIxqWNwDi}jh5cR`qtj!(jldB#({?4=D4PJ&aa3k1B5r-dxv=?eslTDSCW z2Zva~D|1G76RqVZFWUN`v5z@MT&a2%kJCC3y--{T2X!ewS#V6$3AB1>7Q;U5O%_8y zJxSF$mm3;3B)O}F*VJM*qj?AYz?=B%n4}guVtE>b(^KVRL0!2OiIizvq8Euu zs*h~;BhZzE1b2OhWmc*hH0p@Bk?c3Q-IapEVHAP9Uck0^s{fh{3yfrzPPJkM7%~H< zeu1QWnu*AL_zF+zDn~1|Ca1_>)xanzk#+3bzySbIH1l~Ihz>p`Gt9$Ac_>kdi%9=N zqGtzM0MHmBt-vz9p3XmZ-&)t{ws<*jwa*^dT7L*Z2F`aZ!}u?!`ahmtFQ@v?Kc8#R z43(oXH!5dWrd@1$*-txlH=SIJijPZYe*kqdps-GMoh0Oe-R;z6U`Y85H_-0fvNr5; zo=#2C8}(hh#-HbG&nN7}z|nI+;*I0aWfI}V84un?X_qY}`@!>m`SBMVWg+rP^UvJYnHTF%lQkU&^Uf-mkXTMK6FS5I^f<0>j zjSJyI3>K2OCu1iA*0|gp+rNU8C>skwYa2CNhV>o2?xsx8#g+!s<#{v9gwhE`2e5Ba zVNF*%9uJ$@d-efL>ur*u($Nc5sh%DT&!lw3P0_!my^sgNX{_;o!su<+3?EwPbBEGt zZfPQc3Mr36M{4EMHSlKr5i!Lqof5;%;3tfx)a0n!oiC&F$mVKXdkUk(^Q*J4Lm=$l zkylzsA*6LS9}Lw)E}Wcth{O^{-P8X0R`$wx0xrPT8|9F0l{`eM*P{)6dr4b!Uk_;* z+UniLxW6`Y>(aDs-KUHbZDB)`tx!&U`UXtsb`0kxzU#K`xQMpnu1kB-F;?HMdv1oz z#Kj?uHN&L44+8<%whm}I?PbI)4TeswN81>GT-NFdovX^W11ZOTV4PeCZ`R0eaNniW zr@{Q_ui+x!89Ke&|Cq+l0vm^YZJ-EjUc_!goH>ZNoY2a$*1y@!teaSwZ3ryBP|}Pd zUixyIhsXsPz0*q#aMHx9OKsH@DP#)vLuO;Xf)PTia0?AFe+O+qhE>cM0xRc+7b92N z)z$JQ8d?(Iz;uKL8wp3zps41Qu5aW0?O{+-mC(DptN-1>JIPI4SGyv*B@`aGwu4;}*m zO>&2pwDO!~6VTEdISIjA`ls>qc$MrA`*s$RejJHi-6gnnt?7l?z?yiizS<$}$&Kpj z9}at5e%GGQ;DnRL`#4CiwCT%)+WIq6XWf1F3bj+O<2jt8`RD+8O8uYb`l_ui=qXe~ z+esiNJ1}WG8&?@vNOPBD?<(H=>g8MDzTVbf$h~933@gn*fq+1j_1(Z2s%?oCwmMNWy3=K8dDotV?5b`3q^oksIju=3x^`{xR z)Jg2=4aZ`>y%US52v(|Huu-MtIq1{}M z%HM%y+qF!v6@~Do)xKUU6s5gdMnf4TP2v1)#$-db1(+$P!!9AghS;r3e6}@J^9*Q| z1muxtxSYV(3}0h1=reKNHgSwh12zT|z>HPP1J6dTeX%RC;utSq{ptZeE z^JTX!y;;ZOe7#-2YN!vFV3e%O@J`m1YkGoLmAm${y7d}@YJe+s?zcB~DUDc3rkVpT zJLsP+*Zn%fQ3e%KGn;Owj&rZ@aK9`e6ZqK@9{D5nk$U^K%@Gji<^Y((cn{tBdiK=y8knB+XDl!95ev zzo8X89amVoO#`%&_o^F~UksPoO;1?|S$gR1~4{`r;@19X)F$ z03LAxXfCp%aA&O}$0o#30fg%l$r*#el+j3R$Ww3)&58eER6>H2ulggZTS94cmQL{yqL=B0t-UEiIT+CfXN z?JGWLWT-(A;tgiOX(f4!d7#VZ%|2 z?*`V_t?m9en#@@bJ2(gC<({tL0Q0+ZAS&9{?JuZDl!AzZ*VyD)^aomqCQLt?YeZ&2 z5HM65-S~+eNa>ErfSRP*A&;YaX(3(Gs4wL}nV3c^OTKQ3xUSqOcy%&JUnjA}3Wn%^ zyEivHVl2!{%kz=_F)hds-peEVI^{5So(?c8rry3A{+;SANsH0VseAu|zRLoDr(pjlz1t|@l$Wjlg=J)_4gr%z;FEU}F}95p z5V~`(7U?=E=w*aeth0wkU6;jlkm&QU^3TS8(k>kZx8Yu9dvm}ByNcMN>#bI59=2Zs zhv!?xW;T}$Bw1wOQbZBZ{8K<)7i~1GVQTK7H^GwjT^b}5p|2lFH30T^u-f7Y`&*D5 zp}78W^Si(ellz$0M0j_v@++NPT`HxhUoR&B=D)1Ee(fi{e1k_tP1t?5w+Cx<-b&2v zkJq%8-`;#S;Ev?ueK{7dB>zG^S}3Nnw0QFuPc6Eg7Ja`-YsZ;2qHAaLp}J)@zYRl5 z#u_rWBzM!4fgetw2=5-u5}1QV%hptMX|kD4k%g(Vx#Z&fp4N{??-J^zR@Y(Li}cUS zIs@C^-&ypu4mL1C3Uk!LRE)@J5yPZt)g?mC6itu3dEd8&wUO4C{M3QWB_av%ONXuF z&9e;w{MtBv)v3wV2+6M|jfsiy(is*X0n%8USsDv)&Q%c@C>M3QBL!N+s^6$>RTRgz z{h477w^n)BHO7NmoWJ7&o1Oym#?kTm74MRMpV(Z9xW!e$$F*v>kd22=Zcbv$k=m&!Ib32+mlD7wg124^Y9%D zCrOb#pHm4@1wKk5K%U&$ZS6Xfej zmHKG--OlddO$0L}pr?@$juunRNI=!+tdoW*AMxkgex>#l)-+CG@9}bMUL9<*?Q2$y zoyjaY2+Hc#LQ;3^MJEmjh>s9#DUf=U&TQXvz>+GKrx7A4UtQfh{`RUwm$_lnJ-CO5=+UPwwBQ z@(LW9y*;UR*DW%S{oM{vw~6 zCwZ~MX4%ZIg=*bpXUcAmH)o}LQy_VDqNOtJr6NChqRU7snOI)~&qdoQM!WPin0{1CQzilXO*y>4IswNV?%ngp`boP>h6;v~&Q* zf&pZrzNC9M>-mlTvC;FmSJgmfk4$YU!$|`6P`!E|=W#X;#;sN=-l>$)620XFyiSj2 zQ>Y?r^J@M#PVttFph_Ab%L96MYlr+gp1eXE69Jv8_hCAho>0ITu64TY>P9&^^iIEK zqoy^kK&F_7r*%)xYoYAvm8Q6~3{Q;~n0^?Yp~TW$48&CHsIYWtgVdCL6pFUA~~aIao4V=;QP$+ zwfrbnaE>d(9K!Ho(>!GBU+W&{ETM^oj}kH*P#?2v*=h zqEMdKrq8qbeFrVE=cGQOrm5d-&w<0$-3I&5MvD^%wS-wRgZqkK?%#0}H9n2{GW48l z$}}BmI~ZiCl?G7f@R&Aw?`#*F`hu*)T`LFIHqz__;#N{EX2nx>ncgm3lTC=pP|w#y zVhNb>GYj+Oh0+#0>Wn%>$1>k4n@xL3^DMo+xN)3+y5L0StJ0dR8x2>oQ`vrvl~4RK)&2;7;q04DWcm{{?SNVS4tk7Dz(cDAgqw9=COX*EJu~!(mfvl<|Z*;>y z@}vNzqPMr*W<||M9g`f5Iu$L}@-QVy26?#OqR8weG}&9CKN~~LswOA(bP{#xgLhIw z+Oc9qFvodfV=RJ!b>th)R7@FZK9QO+V>&=RNnyNE_-d&7V zt!Nx7Go#weeNP35CoZbPee?)#Gg0JKKiiGXpGqI*z6dyoMBkZ%D0XEpXB)V+K!`*N&=DT{ z*S*)Z@Wx9B?5+r8a4-XO^Y|?dne#wi4s~Y-{Mlj z?=AR$YDD+y`?0Rw&o3*F&K>~~hnG)}=?L6`X)WuF5@sk%l&?fBF7bR+LlK)pD>I1L1?%;t?c@K?{7D&A00ZZ4KC!KbcY|oJo6`#l4&JaLVSuaeVWufnG z5D)Jl-JzG7Fg4&QX&|46VTQ!`2}R|i4pExfROW;HxUk7M&ljB5;rMzA zWmf_Rqs=Tyup7;Q`-RZz8{$7B3s0VxS>8iKiU8ptu#VPbAG_tU+mWVMU$d1XY0PwF z<+q61dLdn{bK@wDTg2UXh}1giL)T=3zO*K+AdMb?$UroPrwUtL-;rDd3aNdemFw6D zCqZMTIE&P5kL$vGn+Mx}Ta5{gk2WJrLnV&7j_69U6C5Uz4V7Y0SW08O$i@NKE zo1!4JWjS%F42iAtn%bD^FPHL57-zD&=_P;8H&Dljr99RpN+LQ~S!w2cir8uG%J-Hm z-AsCm$BZUK%YPN^P{b6^?oBEgcFm~)XU~b%ms8AFN^_vOwjbYgk>>k$2$Z4W1JE>D zM2r)k8)73xf*XEVs8MXfMgQi>1c<}6GZHXl!TDZ&X2Tw(4yeH!0%|4OC8U3P2jW~ zKQ%LO=IuYk-5Q036GwWQU| z^$M`pKi2@NZq2`3>#Jr7TjW5OObIMpX>HFA{?_~7&7CfRf@Oo8E-MF-t%r|i!d(G1 z3jwP7$>-_mYKQH&=`nn=;MP+^HBLGMwYZBGx6@Waek7*Jd$%LK6r#27ni)~d+$2>A z+(L-jF464odI`P4L6R!Uj=n5Z0!J3)k%4hFy7&DLhC}P~ZM*ldn@)P%rTyFEjK_lz zUvsnzOm^^ve;gl9{10q;rAL}6#QcnJsvZ3O=z}R~sB1u)hK=1|#@wLa=cnR+#|t|+ zI=i}YMH)7-{@vQ`8mElSYy6~-tv&I48g^=@n2qI7F6p0!rUu}k+B<8>tw z3b=A{WpRTc*aQzAWb(7hme>Bpc*yw9F9;2P%l! z3wSY#!6WHIN9jPT0Wf(BMapGfnzdV%kX^SNo+g()mD0D3Ng(fs5|or020iCd(@X?t zwELC({1uBfWQVuhu`;(s!s>|mI*zX!7fD}JO2RM`r$kJ(ckp&P(p0Ra^pr+vdN_?t zO!Cf1bsmzsopxkXFRZbb!(1N`9K8ZcT#p469rh4LA#w7I>eKPW@?)%V+f`T1Ef8uS z13mHUbmEv2@Zyl)a`Q9vT=cBSAblH+sxC{c=3#RTm0(*LORv?ow9>5!?hw{|ND)CX zP&oLFSR%*FvCLrNfl6UGnOmlJphm4~U{Ss5=A=1HnlAaFz096pAQ>ADW5iL@{*pGk zktiW6mYJ`^ke2$$_U`b^&iTc74~0bn0h@Z-Y@Yk3$)<&PR7~PS#W;1gO@r-uLyaa7 zLxQisUyEM3b1s2JtC}g&ntE(4m+9Q?mMknKwV|V8ttj~r8e5KhGny^LxZC<#BffRn zr4CH~g#@MIFJ7q>xcbP4sK`8&Fo#147J|^jq-iU+-fkK%+W>c^STbZ0{yJY}%Bk`; ze%uQmJLDVqi^aK`#?<@S{GsmtXQ#|HCto6~o$94@ULmmOv+jEP*Bsw|`4WvG0;L{j zXcg>Y)?TbW*I6o()1mZd+qH%`Ipa~y{8tE3FS!zCgn`I*R&O$TgR%%7l)|2U!cFnk zv^@=lCO`$)#oMMiY`Op&?ooqO6n7SQD3wnrV$IRsAi#23EMWGxvsi&+ zCS z(%u}Fs6SIttHZ*V&Q$z?xfqF;oXo^=cpAOfEi%q`!`6!@`N3_86QX9H=Y8WSzi8Ol zTXSL{H$2qCd}rb}k*m&1L@VkZhG;JH+#EvftvgcOZw%GNkT$|`w$h;qeGZ&x*Dp^*MAmY-F-lPx zJP$`nSZtVOmjpa<0CdZ@moN!i%?1bU*e`W=y=S=;K`zwd6w+&`zdeA)^o5{}`oDJY zjYM&i5g;a#Kl9*_*b1S%A<$Y=%w*rWGh!xPAWKN7T*Lkf99Y@jMvAvsQb(s5i=AQF zbSk=TG)c9S@)tey~(;l`m@N;Qsgg`>1 zRH!P6DVTyDf24W+LrobQ$T+=RC5JdHO$b;Jf1Q7x1=IzyulWVU61G7U zNGVc-aABEU?-{KNJga&R@RjcBmo3bVr&eRz*QlXvHVMJTvwXn-Zh_W~3|1H8xo7Y3L2=X_ z3B_Xlzcm>5LQ#tqgZ4uY zpX&%%f`CL)rH&}iZhpXoOi387jB1?IL3@VD(2?T#vXnqzCDk;fzFG&aMW-0h`eQ*>+1_IS5>^V;g1;PtxIrWzN}FB_g9 zXuN<5-DF|&3tF8awj0XJ`q;mLPH+;tg`?QhXcd;7Dum&!J}8(-kI&QbTTK<0b0;n= zy&c5WWB@8_tv5_^>WIq`v9M=UB=y)ETmNo_)7 zlL(^lkydWK(w~zK_%0F{9c%}PepA;nEzl+>InVWU+I3_mlF3Wzo*u%VCuAr+21)qs zU*?*=HxsWHSrtv^maj^p9i0eI4*9ZBlHMP#_sv;oX|LA)(sLME@xkS+_i-hBM$o_6 zOM7Q9H;R6#^Fk%l2j*TrTm_^M3bR0f^xIm@FT7_&C#)q1@RKx1IE|0zFiBe(z^EI| z>G51Tpg$iUkK_h_Cf`kd3uhzz)*AZZJ@88G!d=mGyI=SzDA9tM}A%nuJn)E!%X2HDYcRd6N`#{L> zak5T+Yohuv`0|B(Q(4N7D>WlBFA-KBS#7OdyV;VR&Y|cfzp}ecz2JNAyN9vuX1#Ph zg7|5DrnRtKUhdj@ZUktDQ*&E(^X9(Z@qKpY#puVMVB}0}M0P2s{0K7rB6;TEwJTeLotB0d;(AaT9u}j$y@voF)7;Urp zvCxNB3yEVvYq}WHbH2Pz^5>K@r+Z`I(H9k{cyUTxLlV0=z66&yH3bF#c^yYIABYoL ztc}U;c1K`7m{@{@aAqV89fSbBpm`AQ-I&_?Ow8@DE!3vWd^7((5rKETBm2DtKN}+-gEsMiYnV zAu2t*n_4tdG}x4LdFqxGl=C#ao-Zd2HXdad6`7xV7(Xe9g!{)&r0o&XaTtq$S$jy8 z<6GaR=lWV|g@)$c+GdHJV-`2{jRDh5>p`sN!$^1B)BN~DRqBD}?$dGkMmn>=FI=U# zby$wEP|-M+fX(=&WfStHkA+Js0odS%WfyV=pJl zmDi`JMQbTb2tD2-zq(h7^RC|C82a(YvE~{1D=GJ3TEQ5q1$Zc2Fx&P7l%SqCLEEnH z;;)y(gGx8?ps>oZCq&~^eFKL3N!@Q;56=3-Ygpb&^f@eYZI3|F&#kh>cc;XGqQK!+ z)aMegY_Fke+TF`0>z>jHs&dDtr;FsR@Z9TeFZHHWXj+Yu)5LknWo;yNSi$A{F+5Y0 zEuH<|*3o8U4wy5YfwJRi_Z{=*T9bC#&jGp#tQ;d7xyn+er!th=&4M5}nw98Y1THRG zJFc`$Uws%*tV@9&wNBvc)zi_-wE-U8pOe{vpgz*4;SwGu$$VYvzbOoOhlWnMoJ7Yb zWzH}4M;p)-QdVtm++vJ9gA6}jM~_(QZE<4ickyonjCs>;LBAcoi9Lk(Aqq{~lyDqa zm(||L)Zi!7h4v%*Wq={0s6pX|GXr>5n7p>=SIBSPg$m!TtWi2!#R(%4lvRhO~W921N8Jy#NiEbH23o z6iRKc*cD*)ldI{4p=3Vh*cw5rt%8{ibEeBu^k6lml;P70PgL>I?TFQ+u(E;32V$2h zju>dJvdGqo`H&?8Z&bDDP}Vy8XeM|7`n;b{P5-bP+(IL_Q1B>GpEfw$57#s}MAwBG z@9Eqn-|E975)M3CblBj(J7SyKeZkMuP^`-aY*b+rtm&!5G`64K(R%p3kHdS(Mu|5Z zdKY%6Ko7bPg68mHT^PJb@}4@$%S)#(8u=KITwJRMXbm8JLCnzOhX!Jkr#Ji=qUDi$m0E%8_&^xlP*SM;K3;BmFgEs{>2{kDXG zZh#r${-r*nXF_E#WQZsDb6Bc@6Y?u^)kcWNeCUzA32F1Ud-S14c~r14A8P6LA|%sv zj3lgM)&t44Z!)V09l`Xd>EBr$EMx^K^r=z5-t~q{N)ymrev!Xvr6zp$@p+g%@M8j`f8l^rASqCy{x%fs_Q36WxG0Gn>2|W4#-LySO7gh!oL#l_d#h_ zQV^|wyp_5^o<~oF>+kK^9FG2+!x#+Trng{}l+@W|*KOA}U5%^2v7CNnOi&I&y7Mtb zl?}e;hUSKdQ|?w`B!Cvdqdxe#h(&nB9ua<0SJd9~xt{-$dtelf+2bl^|7|=yPL^0J z^E;mG!dPzIoSpkx2b;_^Lb#MJ;ei>^>zxz#6Bmt^3NsLf0=QrBKaGW6W-oUf^|4K6R8 zm~-}iyiin9keIqz30bn*lsc{+)~sgrP`^P8!>DTM$8GjSVp)$CD>ZNSm&2`CRXZf?osKCj5jU?P^&=9$~cFaij+s%9F3_~EZ}V9MYnZ|bNa3IZkJI`;p^*dBqb}g>>sQhjQnlG$5ypLj$-1nR3GLcG~ZhuHkX25|z2d zNq#KP*rQD z#zF*JVzM?cUdb7KlbITz3f*wv2+A|41S?ctFOUMrS*rxjK~M~C7TLW_h|N~a9KM#w zkUh2JY@iIb#ftCFfpGhuuyN54cYT*W0w`iHORaBr%z==pv|+H$U6vez;0W_}fe*Ge zfaQD6osqD~`Gh!lcc9i#mY@-LJGyfn z1n!5fRQQu)uX7&yebymDPS70HHV$KX1Uy{J8jm@?i5Z$Lh0qZ1czhy{>=KVfVUv0p z?Sk&ae(ss~#0;N1cfJO6Bai4j0xL* zM{wySc5Z}2I?)fnm-jPIs4jZM7-!zwKt*r0EO?Lz5$M;Dv2l*1?1~qhmC*vtu7ojK$tb3 zF0bb>eeXhhYbe(DHWC1uAI!g#YJ^f|Z}VCYa{h|d(Pj`ED=mnvAWAJ7vSuoy8s6<1 zHa431!B!9KKXvVa*L)sTZF1=tJLnoA*sRAHR7qGFyhnebSxIEOQeE7dW<#8vLy8sA zQ2<|29MsM9D3eWQM0m<~oi0ZnftOc9FgZO4FF`7vI_bBh$$q}| z>RZh~?gl=k|0qPZ6|><1smKd~(@fLrgFo9WNU#IsBR-QcOY(s@wQ}fhkWtGE|GA+qK%K~SvNhR?MW!R!Bx6Pfqik3Cp ze_SxgnOU`O-Y9sWNA&(V02&=$YK6t;*SzL596!B1AB!wN#$Ap~75@gg?Ci3kQ#E7h4+sGwU7#jBQb$Ye} z*)tWdeRomoIIP)}O^*gS%+2W*)8+uz|I|*wpYjN5GjJjAH9oIr?&FU(ZvgnJzGVpn zo&B}^x{_a~X$!bDWQASAkkYBFkoxi9~M^w(NtgvbyP4-<98At^y`4FQWmn+=MG*%UAlv(aDh zKDp8a@5ecqDDY1uiGd&v_ej4JsP}TcLbMHUVj;@L5@&4c+-?3n_~N0{9QeAM^N?w7 z6UIoBi&ts~K8;~S{db0CDq-j@lHyOt z9hR~v0-FNonEq~IScCtSfYj4@=)3lOWvFh`>?c@eG5$sCN#!_RCExqa>vX@HEfHCzjn88T18-tQ=s}*A2|CA>#^l=wkIidir5Q!p9yj`i1>Js7miJ?^(q$oQ zS!A2bOqav!iyg&n*%PRosrR4T+WMuw;}Nl*BXKIz31hVy;sDAzDt+VNYych$hq`A| zI&qL`Sn+7dUia#zt(w*5g$e5PbjQ@5g59&V7FSnEI0uzBF`jxr?l_;h=?v9^H~{8} zCKS+p(Yv?*8PbH~N~n5$5ZK_otGF!_{R0pVHrVhgW2WFCL-@HOe*?oLQDMBL8}tWU*X6RERb{zEJDj{RS+U6NSRZ!EmN=%@rwaEupY?Gc1m74?5~u z+6n{LF|7l!z0v2_07H#m9EshSDl{_lDL0(-pq%1c`F;H-)_M0!eNS%*&#^y(@pe*$ zN96clRzK`H)9OWJTfgnL{}ou`&H&k|ue9Suv--A@HkOXXvUG1}2nnQNNv~wSZxCOQ zDfKZLALMBg{Ga>B{2tLDX7GL3&Gz0Y!wefL1~-v`jPc!z>Nj>Q#3rVE4swZB)u28? z#Q-v3EPQ^Qq0h+MN1aDT0g(HCn0*TW+O>idsF`T4rZalTJ1GV;WCg9sJ?iOHtl~Mh zCAn&Q4@>96i0iD!c?%}4*h}jMCa1P}FE+>qmmm`9WgAu%kWGN5mb(E63pE-Zsn(W# zMa@I;4D5W46=e-iWPA;8<2wQz5 zz{b|RPOVm|Qtj_D0(vq<{*N+i=y!pYek03w!**e#6Z@OR*3;Sb#%g-RiM4zMfOyFfe`LFZwHX z0l+2Gjw4#nstK0rkC-O;)f9&08yh9n6IDMsxmJxU3Um>Md~K|FSwb^}u<3>-aW?pb z1oULq;0e8-(+nG{Nrh=7Kbo<`==Qa~k(d5JPQNUCJ^EM0>B*W+eBS(eh6-+dAvj2p zfhwb;3yJP!MM1gwj$FzAUtc^amDtM(ffv=c|YiP`2r%u(jP1LwmCj#CTc(t)xnK&RGI zm(oWdYfY!&@v?{M#oJU9;ZEG-z%vPr=Y8@T$N@7JsYXoWVu?`;O^DO=_lH zFzWvpxR`*-`FSU-s9$~^nj!3hN+SF*uM9%FGrs0X)=+GA#9O;bPHsRiE`uJ=Yrf59`+IH$k<#s?9J|>aHB6CRm z;0i!0Dd^}&m$c+|@4h8Aa&)%+Kns_x)_EZ+-^#>Tu^<5N)cJFhBK2cC*KTfJT<-8X z@8{c$3cc3ilKJwMbTcatbm@ooPU|HYzty2Vv-{ArgftR*QmTnCmr_%fmOkcuHd3Mo zC{ymyHu+ibkjP?}NZUJP0!f#fdZxeBBkL^23Fu!(YDnqfcscB!zDz0TXAs6oJEs>{ z>Wk67J`g&)9B_vVrd37Qadypq8dzr;l=UeLsCm5f#S<;!D4qU$zP_6Q~{`O%y zMmc@3TssRgM2-%}XH8JtHqY6$`kb%`_>#lJJ}QwEakpmY3P8GqLS5U1?X2=9jtQwwJ^tTo&{#^jTf>5S|oSX9^b6)AW2{Q7mB;#7ugc@RaPC|JrSLYJfJR zh}8eh7*y~1XJOcL3pu}|^~atF*& zp1wl#O0m}Lf$a^OBdnJ?U6e>u>!xz2F7SzKw4Kne8D~(=(h4S+WlZ5QO)DRbRKky1 zxG@{q{pW4&T#&~1^rDkz@w`YyjS*Rp!V%xnmJxzGXqAJ%?-&&_+O26 zUc^qNH|U2%4Vym%XxQZ? zTH^=R6e0_<{yF5+KH?u%! z@rkF`($O?#A5}LiF2;a^Evj!y2}2A-Oqep6noe9y%fbT-+hJ%BVB!*q=1vp6#j;q0 z6;13!a#%;HNUBvI+e;4ZnIY6RMcVgx)MJjQjsPm$tCO)BciqaM3sR9#iJULBXw~4o zO%5es{dTJz+?rdv!l5lp)V2+vu1IMds&`iQ7r&;6Srr z_%pLProK+rgm#winQ{SmXk<}K^pD4Q!8y$j5{-iICw}NuBt0{gfoIBXNtExf->bE9?o9Gi#IF7kDVYyI6H(^mQul8-5%Oh zk-CyzHLpCaQUlg~F)Fd*!~c5OJu^F@R2O2$Il7c-mBXp#4R8#N+8WoxFkz9{@k9tB z!8$x8L;W}vtO6?>7I1QC-AISklL&i4_c|0`H>o*_(N{;9(n`^r5eV|EWgAHqDLvro zxn;M6xr&3Cg-QUc=lVQyJck3BOwT}&XQGg-__RY52%B@_kY z9tiyrg5$lck}biOD11JVE=yjC6_i6tk|1sy8%8o$7aBI*N$M-%dDv;4jTULXn(uoE z3Cyq`g)M?4ML+xemPV!5`n6>LdfZ(ehqL~2I5&)M*K9_+J)U8YA4N*v`9>dxiRix? zRlePt5=L%T%R`(7#~9h;BXSj3E|l86K?32+_@C79$WzJ(>^tSpnlRNUH39Z^&jUH_ z^KlKgwEy(C%^&q5PPC=uj57%LeN5VJVp>YeF?-(N@>6#mqwf(_)Ous-3P|ARRV}T| zeX2DGWsr}Ja-)$CgHFlr6T;0W{bGqunOj4;T#QhCumu4F`bI4kMf5O{6a|SI@XF=* z$bKIm9)uc}s7{kX%7Kh&qOJ@ia>4Oe2)Mv)s&ZIfPyMtP z$IjElbMr9p&E!@6IUMEtE-JqM;jh8Od$5m3JU?s&o9@M@{K?O*KGHW!rIHKSKabyo z#K0le@!Ed3)8%Ua_|xW3^SdQ5;6-9;WZn13Xrkq^WZ&xY8V+hDbE^K+zQ?zR_Ju*Y zR;KBmF!@BKwF(0ex$Fli&+$$bj;DACw+_CB8-nh|f@QK39}n4OAq6I^@IvXeq_{X7 zd+xkytnqc`9rKCuzmpiN3;MSC{o`GWM|5*L$MJd3FSF+!;FF;w1$gwopX+K+lWQ**qU18=bCj~q#s4%BZ{~(^8`tm*>GivoN)?yXOb2?i`S=N zw)+ptGuJUIE@M-K~WMQOkMYEzK6DC zCzZ?V!OyPNhIKYf}lk%#SSXus4Q!v!iG`A>KqK0K^q7>eerS%0CE3~Nz6u!NMbheAyrX}=;*0@)zm`Tzb3v#KlmW<7egpSagw zyN2E$&xL5tsFgb+Uet_$2(o4NdKesE>Z+mCbBH?&Mfcnkcz+k(9+T7Cm;Owb%ggHu z=Gik3BKy!4o=RLZDx=l zTZYvx-*U4-3Es&jKwL;E$}W_F&Zp^VKSsu4UZzG;=<;;c>s?fu!RZWdLP_e;ZeeSY zQ)vAd`Nx=nHyS)?HME9(zMQ)i7}rv!U+R5+(T4^&&&Or!M&fQf6Kw7WfJsxt+a>zSp-mx8%=XZ|Jl0@J zFK9`!g#GihOhOX6`K9l2autSQwK#7U+mxxxCnyN(4dKza1zwB)a~e$sn=Bpj6f)En zq|iy)Qp(4^=F+?Q9337B>~!rGraXPipX#gbv2?xGJ`{R>#C*g*W6)D&`lMziMo1>N z3)(~Eou+KjO1`16I;=aACHgF$s7cnjX6SB&krte_#?p%( zUx3ih5nnNXAyp0M;nAw1d^zgCQfN5{)OGcwoYG1)%!bjSJU1-*>j!k?=MUyT9>(uR zc|O4jr>sQs2+%z-&CULvn8oXvAChjhBjr?5gnLi1t<^l{6V1)Vkh4?+VwZo*t+Ae< zu%F4lqe4HUgjk7933f^n#yd!aHGwP|OR$O=!gP$K5KybJN!2K>&6eMdx`HlCx09iu!3WFm4%G`hrjTB> zP5TtiCGXyQ9h5HIyU2VrEi86z7q?k7Hr2c|2GqPT+`i`;jWw)ve<;9?VZBIU9qUiX zPSTp`RG%Rg9ZMQ?*;|DoI%o$ycMbH|jc+rvAvjZro3fZf^=4?HKpz%8o1ViGQ+Pc( zE;P{^Ha0|IC72u-Bho4x8^*Qi&7kvBIfU|27&P{0x@)x3BO~+Zwd?7D_lp6Pl=8Go zdbBAyV@`+$y4itY0(bw%Q_VL;P;V#{033XR3KX$1Itt;;Dser%n}rKIO-xd4O`cHe zfWLUGY|AS@bS&TpU`sdzE+0AY5WZDNCaiZlHmUWPD+7VaPDCwaNH@IeiTmNFO2nH9 z00Fh&1MYaut-!14OFc_Ua33>sCpUBdG zP6&e}Du(wa*J>Fp^>mqL?STO=5^dxaB=Bm7ya505QBbU&R%po4wm7>?vLV=;3>2?m zKqTkT!pY~APj!+qvTZ)BukqDxRpYI`m2|Cg`bnBE9#_oPDLId?SsG=IIJ8hi+1j zwNZrZ=6A%sBC(4afQ2C1vN<4hV-elMQrITK7wph@ba2SMxC*+CR3YWug!+zZATY7W zKTTQE_`7U5w0R${WKLX~s$L>F+AdFZ?#>bG6k4Yb(VTR67SD+ohzD}PBzlH7w5Hs2 zsyu9n0ZkU5d{Yd|i>6zDnco|7teZZsl(_~bJk6ae8y!xcwdlwCQeMMItHb`PSwHCU zce{fdN%C1m`6EyDMN3Ig-i|B}WLPiD6L`EpQzOC~Qs{Pjz$ABYyJ^FIL;^uF=!Tr9 zaKG`RWa?CZ*%LZ9o)TF^n4aT)Fh{8wP>SeIPsBDy($4m1r*pjR6n1oV%y=RYAnBw0 zgnU|?pM75)Sh#-@Ne(I7IoPJNf$>4I^K`i%Ofh)B!YBtK+-tMD8$e1MM+N%vTIV@?U1#twIbw0zp(OFtefa@N2zKd!B(8gN3t%2;BO9%1X7#nTf^GC-xafn&+`15k5}W9*+(kn|NOq+ zU-$^?EBLvT4gL?_NH_6(9mvst`oFAChT|Obu)t3_Q|K9u1U80txA@tVqf=7OShj7F zvgwsDi^27w2^HR5vT~OT4=_}_FY5LNqE@pQ+%}q zpXl7fAd_Ia-E<~-tk2XC+QuPgo{bug9XGhCpBuPHZD6E#xUnTfSA2-YK55xoNQQJq zqC!$HooApbsI8owl@e=;6zEGTLJ}nrooVQ;m>So}5;X{Q1}?Ivjshg}JI>n#L{%%5 znuT>iv(Wiq*a|+v^O3DBDEYrr7~BNtmD;E-URn$)yJU~dOS1~0+p!-B$$~CgTc!RmBs9BiO=hjWQ(e5Sg zpszF77^s;5o)`)L(X(Y6yOcix)aJh9dw63AxN%#XeEnFK4pLNirs{wlvLSdNcJPVY z+|N4{ZB5j>^AD;|8jN0)sA$}-^c^rVr4I{HK|ws5&MS>YTVL{>BYR&~4drsHF{P*n zO+|`fqvyANg+vDLoxX-Qb`gQu$`(hp45fDH22mB z1&E8;Bf-O^K@#j&ze)CnaTMEZI*7<~Jk(|zGa>O*wHd+VG4 zS{3qBeoBq3yY4l4%x*;A+bA8rkh22c4xZ;scn;$&MvhM>>UriKvcP++nK5DgYnVVY zZ7vPWF`e?jGv%h<_VKZCw2_;9d4RQOv`SBB(73UqB=yxMWbK{y?)7*>cb~32N{d(k zf5XlyFTtut6PJ;269MVAVTeIb1YTGx){siOBs&+({|-cGPxAL0^D7KxWS|7 z*3oloqu^=j9-=M$v{lgUE!LCS-j1WbCYYS>InOZqEy=`n<%U~q6vH-nQWorNlB`Z^ z!Al;vPZtEPDwKq($DRv5Fx@`o!Pgl7$-Bun$nCPqj0H&m$|ilV)Cvxjd?=gPeW0$J zFAw+_63xZT=g}x36=&XPETGliAHGg}8 z`AC8bMJ41bkbZYf$gp#n=QZ~3SSSKuZ1oNlvBsJ#AQhYEr@l4)6dnz7I*Gh0KfS1wBt8S3haTY?s41=$Oq*>^u?$Gq-EAdBwO-iQyg@u?Iy4I&%V)NE|`TT+^ zWOxh623myaW)ntI!O#<`Cb;)aKqa-wBmGFca_fLMq-*`i5*&^cp{Nh*G)R^nnujDT zhQ>*9GGbuS2-?LNpH|^N)oh`0s=IM`^uGdBAVFrmIkcV{A)}>Fx(7bHWwtsbxu}vW z7$Ke9aC(B!1&xf4r)J$3B4E%N)4}tRVAryH#RB!NC93(!9{p(VmJ(8F$n&9pTpFQ~ zvlN*{?whMSOOd*cWOh*#EmqMtP%~3=rgf)aB}M3dvsn#Gw}sm2b1mU!w^5V0=>Wb= zdP+*dUFJpspRy3{tZCPWBQ212Pp@6>skH+>y0&4tO9hIH&MTmkijW^e{vK zHO;%pAVWwj72rNQ2Jn#@+Lzm<)ZxD<3t~fuXoTpnZijDNVUg>(2l={P6XS0@h8uE2 zR#K-BpCZEC@&YfJr3nmHG6kjo*2msKKaN@p{-m`c)6K#tk7JHWhxaj>j`{N4h{V!m zkE=ldAVO&mWs0&b&EHWA6q6PM%E$fE1*-Z%q>ZC2%vD)884_I2XQ4N)x?sCHw^RqL z`GeBk&&V^w{Cna#LX~;x?(Qzo?n}1$wM-IgXi2zGsBCRB(ixFQx zpzCy!iGkQgqTAABKOe>4VCDp)!WQueT{=>Ne>ivcf_h+<6jts5eHZ&*v$gTh;T3(8 z^fg1^7;^($EGf&e6TdmN-?JA*NVX6xF0w_gI zw_%PEYP^|WHM#gNHCqcYhpQGZO*st4I0Ehr4J{n) z^$tum(zsj3RktJ~$(XsJQBRep=@DLGg3T|X*8j_q)(k@7IDI$6iqgDRWf(x>F>42j zr7XCbPC$NbC*am|aPCOhqcs129p}^YG#m$w?^g377Hq>70oPsTp63rY{ z01mLe#mDPiLtcvp?wHP{YMhoB2h9N!(KMIjnga7R%!atw(i}q))MJ~71cuyrd8+(A zf_<*(uWny0Wi&@o)OCp89x+Z1p!zOtvv7Do1b-9eUVSXV=(;`XBiyl!i7#7%g&hj` z`Gvn4lq@#rS+cq34iQg+SzwntgQ4%zO9o0xaL}N-Ve7%m$_#X7rgM-BHQwe)?dOk$ zC4aWyHo@Y>Rv_lQC4A_SdrbQ!DR9<~PRWdtKJU8y$1&}}*cj^qS$I{6^S`|@2h;IJ zpnG5{*)+NwC9b`Cz=!T&4!6P4V<@5)eu6gk}nrcbB;vyd9mh@ntI^+6I0> z*{d?)uPW#|p`Lf;dZ?ZmfJ)ZfhxskXG1!t;+KBWLx#Z&Yc972+ znQHw|;|Y%-!u?6)!zQTLx9$K648cCUL>eAs&69J@(i|<-_}Mi0DROK>_3McXjhw?2 zDsSWzG|>o_Dx4Ns_L4!mQ|o)U#_XzhkdeBCPl<9!`h9A4Bg{aQSMV6_?@rHPbB{QD zuhoU)749oe5h$RroG%Zcys4kaX7It?Bn25Tt{{))kk*TKo;7B|mDGnHLN0&4ri8DV zv?qq4zOQ!RtiMU_`g! z`4*M$YgIc7h`0CtfzrQ6?KWzZefDhJ6OzaC<+H%DU`E+O%x+exi^OXl}usQdz zl!OW0+ol!c0Mhc&sb=8)1Zd!0WVo!@Jv27M=UaF;Asy0(|#X+YlZ$kSl z3V?uC>;W6qq9&cX$iJsl%!`3VP^&>#sxI(XmQ|iH{FjAy>Pr>Pv=}q%!hg*eCN4;A1EozXS95G+6Tj3MycI zKbbK)p9EWY9keJd_cy!kcK1K|h&*Ci8(rKf4n{pN(K5D}MT3jWP^P$rIktS-lO5}l zGTot4pDLh%G2tlLf!Ji!x^X2?Yd)ip>R#^_q7_Q?;0C{sHSsf~qj3PprcOC3h8>58 z{k0e&Rd2`e&vUC!eyi2;*IugvC#Bm?DJmX{;e3zEY=38_{>j>HZ}q#*Ym_nHaGusv zLy*v6iii(TMq9Kd@V9;DSk!L@>H1y>I&=MwM^t}qk)q?uIYO&~D+UQMI0_;vue>%K zCJ})??4)10i{7bbW&;!R&jdJI0-8H5-#+)BL#g9wBwo`eN0ZIyx-Vi%_HvbgpuDG# z9#p0hNt5Ayp&;U0b93Xluw@78@7{HZe6+vq&gbZn!uco_blWNOn0I-5xTgG1sFKu! znCJ|(jUuag>AYIR;EM}+Ns3&z1Dc+UC6dk-8QmMs_!xH9Y7&`AcDsu3VKUXF5gyW; znyc)uF_E5^F-pT@M7q!)#<8w_S{@<2=|=w9;#2Fs)*P0EQUQci$Sz@ODbDCpR8=VY z;k#b4s_m9fJq_z`?CcOk`#VXX(%P*Fi)qXy6(4A&^op_)UJD1L_c>-&yP2{7=20)T zzyw|LkprO9@H*{i0VdB4K^4B*ecT>al4^n?QMhY6DJK-chg*U4s6sZFBwDNih zmmyvbd|A77LNKm+1e|hxPgxKP%=r2uS-U@6@8i)Hl1pD>+R~;Q)VSyKkj9Qw*v7w& zkC&!9Z}aVwdZYgy*+3GzDGR}n-YG(ZD8jP^1KIPrX`#M?leizl zk8KDubga#y^Rt_$9M55P9>V2PjdE4_NwoV@Xdi~uxZRgEL;4J~7~EV*ui_vo@9QaN z>rnSaYY(!L!D3hq(uMo(R{bLIMy6zcbU>EN@=6OF@1x{6EAS0nZ>p`T&{`nA7JLhH zO0xa5B+VL8L{&I!ix96f}^ zL{m46(McA1W5)tUCJ>tNCHB}pF@7&jmzt@vqUc%*~^E$fw zAm#$XOI@aZ4-(cV(0Fb1;MXXth{N|!Bw~P<^mc0LGj3rzHta}u4zbDhLF~PkWGI3e2cJCzhB+=>@u>O) zTQk~B8+vcSZBH-8$Nm#Rz(Rzqlvy}TmN&P2sgG1#c)&6i`PGrvbfay_k-QWyYAU@~ zbcDWAv+k9S0cTz=AkdewzH&H}KoTKU3c?L4pUifJL7L7c8}^d6H_J+|ihM-7q4w@* zf)v>`0Ub^brH1{}m-JPlF=59-2@or}hI}PW-sjw;iTzaKzU>t6u8I1t3Vv(&Z`W+F zbdJwdlxy3Ma8R62fhXt$kT5y{-+8hX5S7*EkYNvz6EVmbrBEJRWXaU|@lwS%?RfoC z%COPs9q3I$jcAi|x{UWYs*r8FZTdQkjH%LeA z3F@ss8UT5QMyyF~iIy53mU&NI@1}lhq4aEhD>on^)dj!7P3FPWFFpa0PHdGpviR0} zT>kx9NCSllAw(ame+Fg^-3tfzmnGX4*`Zd9Z-bOTyeXti1RS2PMY18}g7$*cag1Z& zY1Z0q$Dw|x)c{-qusFaYbiTtAii4BNQc=cq7U278Qcy_!hw zVC^l5w-`U~w zP#-rVA5)lm+!4{q_=Zjy;O9DB=6@ZwleugC0+r1JK^E84tF) zmx@dRhN(Eh4aLmw+&`D?N~zFI5%UYnnqe7Fz|9m^-5CVL*Cfy(2#44X3wb}lS{b1hng@rly6#`H3rq_ z0E)8I$TNFxYkKT3jGVK{6pS4tr_l_T0T)|{Nk z=2u`A1#*ANf~y1ILvX0gX49XnHK3!AGKxp5)v$l>Pb1$`BVfO@u6k&;r1DgV2CIxI zT|FH~>5#!$GeVufH2D#|%n&%oXaX&lmG(@hg+YBhC9fr?DUl1i zH4|WQTm02hO5%mxW3(qPCa>qBRlNw*4646PQMAljw}sc7*m3U-jm}ONV5@2YD_Ou8 zbF~&@U#B0~>a(sxTT-s)Fdrr=yx(K@1|35zw#!MwX(10-yQ=I!8ymx#ZtZ288U zIPXz^n1AMR39*UO`CD|nC;(fx!_)anBjWxOb}4V#{hU@ExJ^*szHR!x&}`; zd~xRwMIBg|9QBh?H?!rnN;U%3^RT~2f!5mk=D@U6Fj~|`j3feLP=hrkj=SB0VlIF` zB`8k6aVmC7@(X_daeN+ACzZ5` znn)ZM1=EJ-z8sTmzilh`0TY!m)hD9gJ#?*T=x@|dL#JrGzx!M!R^hlmC+&}+85Xh0 zf>+8|R6`Ut^muBUpuO=l%eZ>K59&?X3B?6;f4Ycc?=Bqhz$4Q zOvoDaX5%-k8UbB8@qlLHdhOkwyxa7D z-*-nfaTv~R=lz|oXJ&m`cgYFmia0SXwPR_$rz5+*#YHdo7H^qq*?Hevws@?I2i&O5lO^EPKfH+%G zTPc2}_~fA%f8Q1`&^903>PY8Yww?MoBIK&k02{PgC}3oY^%}OgN!X)g)?zuYF!E`F zrBa=TSE|6A$~It_e%Z@hEeoAEx2X`eO%LuRw0FnsA(XarJK>&s^wT&2O^O8vgj+V) z$anOIL01ooJvw!mhTK*^Bm}u@xq0ANJ!Hsr09( z2tsxLTcOK`7&M=n@)+mqB;W_w0KXB!QPq9X;4^;4UerccU&Eb2k;SdtD2e`}6vkhTeb$X9#0 z7znUR6)e;}w$jlRiObS9R;*d6#sue_GsTj6Q(N&(#;`5#;rTqaq}&q@$RX(V-fe=t z>~LOd3K#0=PiN0wHvb>|n#I$R7dhm_a0xcK;HzjRlA~e^vxro>*5_VhW83OAwF4%p!Vq!D>MZLLed7WWk0=kdV9vni8!^T6`WPG zZ~~LkBc37MeQUHkg}}JbMCw4)qrB6rJkLP#zn*l>@~h2-s}7K4>N$ql@9K_LsgnH8 zddg3SfnwjQ%$BfF>U|O$;51vRMR%WJ$b`M6N`u2u15i%o$eteT)l*X6S*<>=Pvic0XAE~+*!HLbZ(R} zb8*-~hb5ZO4jsRG5Ju`cj!&vIE|=$LfxLRz zL7kZFm`nG=4O<0Z0>stToh-fs0a8xX6p56HvV(D)zVE(+$@*)wEK*I+X?N* zh_L{u!EP~I=LSS;vnZT0y&9EL0M%XdteQd}Xw61kwroyAf`2+U+!C!>*hRt{@?d#L ziXYaBU}w`pkh7$X-i9OfNsYlVEF|;0gLB3ZTAoE_`mX<^3++<30bVRFuZLn%NITwo zJ=2fJFp6{cv3v^7{F5p6#?kszTcq?qrX&qcn|d_;j8nhsC|Xq`Qx9d%a82`q)Vyf0 z9#KpTxC6I^G$G1wJ`DT8YNksEJ6g6k5?-AK2UHDEDMP9(IJgu~Xjft|>tpXhc^t!R zvljC=f(>gRqTj)3I6UlQ9NvV`%HVuUdIq;yWf>g{B$zALvSQn>Qeb&}az?n;Q8cjDTS7k`nhKB^%aBfQO zO?C5SXf)SFWZWszfqIBtI5MSAnnW&2H=Ivv2imLs`*?OOr5B{qi7=4qq%6aM3frZY zp?l0B0P!V2jM#zY}2F5z?hm} zk%~swrLF|f8PIucYo_TZmT&iyM832xGqKRns%1Z z1UrU35tN;xn)*5cllP^*)e8@Ibn>}A&@d^1^J2&ReN5$Fq3DOYQR#xf9H39HIS&ZS zT;S`@(gl;6$Rbe6mf*ipmfv3KS_Ij?d${%_7#D?XJcbR3oSXhmg)KMGF<@JF<==W( zn>_7_f#^`iMkmduM^Cm7mJ1nI4A<|OP7$?~s#B(N*<6<_VJ7_9DQkEX_M0sm%D7~f zPM(UN@I6Td$f~90v+jaO6k6?^W;*=1MWHnZwb#s= z6f{k#I1izY5_CB2$azb}lrv{oDK`_(rsG2A6_u!O)fBb+dRIvhi@>M>Hz^!wb!E5a zX#L8&n38|_Qg^gx3@QAexDKwPK;-5a4~HlkB||^#dCJ&;Ko^;a&rau2UG?=GwSFrL zV~>0FE6HC8-c_?U*lq&<4Ou|!JxhkH8$wEATVa0Jhzmc|sbXW9tv61)i{`Ym=(p>S zAOSo9cSsaGxM{gpx`*e_Eua!sFW5*SoUmW{TYi=xo{^4+^|qTrEOD@IPSfvH>q zpOhZPhA>P#OpfJ{c<^Q0Glqbto+5GWeo0J=8ED$?I_6>=gVEtHy+~~@x6nG~ms6+; z?w`KwYNCJuJwU?0iPCXbG)f#-<7|47`WXIH16MWcp3Q!~xi!G7Xs|*j5ZF?TjZU*>kdHOX!zhPOFx< z7_8JNpmkGs(5R@I>QYkHme=I8*k7LgGLn&rEV2=e>jsZKbJ31*%sz zu;0d$PioQh7u&cDwu&N+j^FT$Wq{I-x1^FOxKD;mX^SzN>2}|@jbBx>P_9)V+d=hn zG&G(bf9$Uv;)092&B4LDtO@(uXqXHh!@`w}fktwqI=R(WJCO56lZ)2si#hA=L|>-6 z&2Mw#RMliL>`D4&3)cY(TkSSNaM?+W@$QY5sne$$5zuu?Gr0k zZ+gLf^v4O8RjebPMIX-8@3u_a^s-4@gk}&u;=mgUQc!|2Zh@Zl6Vbi7#gH~2v_94& zixL-T5JHR&mk|CDZh?Odju2}5ZJ>X5a~zOG2VfSUU24?EvVTWRA}}7+ zy9whIdu@)Ehp+{4KJu{2tCdAfSC7<{Oe5UYf&EJ;R|zEeZ4+C@UT=m34f$CC7sI;x zw)w~U{EC)=1f){^LL+RI(BBj%gX$1Q4$lfgZO)c(j+}}p>`2$YIWzItSBZWZQ-KU6 zgC;n5A)#@gEW}Ov%&yIoAqHwq2oN8^RfC(*?i%k?G;s99s5D2haYaFtqAEu(FUoC` zo7!GN<9!uAHjM(0sF;b}2In{}G0m`B`HcU{G#^(lmNRz_b0z^x9%c zQ7LpPJYkK!*0Q;yXjp^8`%FFf6%U^1V_8CPR_M!XhmoJ6|YyHR{fv%AC zt+G)qPDx4Xp-6iONCY45re{vPiGW6RFr0F37T53(?caV1aORCtkiOm;N(9GrAB1E= z?Dtpsn<>|c#efWobiL8;!%Z_JvU);74(u=^v>1Q32{mTFL`x+k8!2c3O@BHp8;jxQ zuHSS}vFEQ*Xsx1mv%OY=1Tyz1F>YP8YvWr2%wfqL<}z*41Vz0BnKY5jwTLJF=G!eo zv_}>?wwT10L;N;b1m?xey*8p`p5IEH>4}wn5DP{KyIzh-0f)W^Qy+u#rgO>0T8;LX zV>(mu9j)|Wc<>1zDR+pg@tI5sRfMgo-BYb$L7D2a_Qu)ij7A%fG^qR~_ww)SRpRaq zd(B8Vf6ir8-vFTXmwm6rTjpa_hx9!SF{C?3>AmsFkbX zX1jmdlCy^%^<`;NPD{j3=% zGEyO&<=Z4Xmk@agpT*W|O4z`P4^3ng%)q;x<{ycMj&PJ`zqrZ+i=wn3VWMg)y^eBq z%joJxt&I3RI#Z)=&KcN{SyUci34a8q4**T=sI9cQn73ja5AXR~*R#lxr(sHmBySO- zT|epXXp~s9i72{1jZziQb-7yO@A~(o%>6y4&NgQlY{qVbYhUxQB|59Lg#*;=`6xd4 z=9eeQ4W4Nkrt~U%$9=2wLt;{FAK?-WVogo4s>g1x<)^ezd_9M8yK1V1E?Upu$F%B# z+9eYC1stlwHO)t`u@zRiw+-SeROb2CM;)9rMxXRHkOCdwEHpIlCR#UiOU{ijkS%#r z3iRulCxCn7BkkwhPzxl>nkXe@%;RId1zTX#DxjXcjEJ#>a2Le*n@OAm$&&ObHP4x% z$*6WXhr|juMxd;Rpzo2DqS^-cro!1W0Nc%qcblcMTEp*hE7oAvbI~p=1&IX@iL#M0 zU)ha^Kn;JfO)3-7`6h$hvw|mcUoG`H2!T`&a3icbANuH?>B6~aUO$D>d<^m7=?U+ z2c%E7GJo1%#GU`GBl$BnT9&CPXdTp`UkXJ!SI1FLY7N(zZ9Xm)V1^`p4Wv?$?zdS8 zb#d2!w>f!N`Gm?3n5op4rJ5%#tJ+M#A>crz@`zZ#%$%>CVM?5lbyw~&j02XT+`o+X z&D9n8X03L}kVfbv$bV3y?cM#(_^obXlpF)9+Zs|20;e`<6Q<($1+%AXK6zo^gIYb7 zJA{3k6t)eY5uh z(NO;p?YW<5qp|`Ulq8ZWjXDtDc+dp8!4<;2s6!3kZT@1)iqvw4M#S4(PMDL&t3n1d zs4lG!CuNtiWl^J7JyoXx;xVYDIlJR+PB{|qQV$RQvK>=1oE_jqMOGIq5e7e{>^zaT zm9JUxLY~HW_`iq3j8nYg{@cOcgOjydb+>X0cm#&vjedBUNPcb3S*ai_vRI4PZPf)B z1KSi3-V>%xnyu*Jq&y2B+%hU=Z2J(d##hi7LrMa|RO2gE)h@@5iV>rkVAM5IbC;_5 zV30^FPLLH-NY?RGM*R8-p-g%sCjL_qTxC0iYzL8ieY+f8vc>s)M48+#&S`){BmrK6@zFH%xBaM$p)imt z^q1jmY#i43a7y1~Ln+>&dsy`{se95n1}{tSRiEc=;InCK1Ez*VD#mUO%92d>pISe! zA;rn2Xjo1KMY!c2ObP~-DJw0d&mE=(c2TVZns?ChOvfY24hZ(L3t zIzc~7Pa6L=)S$vNBtb-rteX0pk8sz$PYrp^X14;Nc+4*t#SRCTM7=Gma5S+%^V|>U zPbApb_goKp0(BezGs&30*@4JTMJSqy4U{7$dcnfRe-VKgN{2Wst=JPPv<6Vj#va}P%ELe z%Xu#VoqC7Nq-X><^ahB;xbN$LvFy#grKhk1~)oRN166T>c62k6GRnoc~nPRfrFIiWRj zeO`9%HMuBmJO+W3p7&wjP-1lN$!4+$Q>#n0yb0!(ZR%|Qph2gTe{eEvkV3*R8;N@8 zeCC%X$P;_|VU*CH82CacOGF4yVdoOxWkmFbJ!}u_^^&kW$kRnt*Xn!4x!r73VGDby z7M5=*od$`{ipnwC!)+Fj8j$g}_UO zCIyx+L$OxlW>E_mSC{_n4}%2vwx8dmSewz=pxd)x3xNvtVZkS`84kmLj?JbAoWRcD z8g1RQ+?L$d%2CL8D}}}7DNK=ug=_hG*c^^ttiE{fPfdcj@05M3B~d{;1F|usPE%( zp9OZd-@B&S96jr~@856r6aZr9r5h1lm-m4^Mry;0P_VniK}r8Ia&aHWcn~a388kT> z9Mbxv>8&=q0GxA}hlq7n!{9le0BYU+6&?xLP3E<2Z)C=ZJ8hoVb5XxIT;ioVPNkR7 zA}YM0((%sQS3Pqi$C-STF{wnpphUdZtEO#IEAxCmd`#u17wT5NYlmZ zLRl<4vmcJnmsGCZWA>`a`T{;^_J&K$tgs^pcUdA!k>&w< z8B4rXTta;?hTZTgA`JNiV%sxWWnLcdQZ+(^ZndL*ffd+`l;41qvo;h>kN7rno!Q(# zPP?mYijhu2#KP<-gR<~oeRTJXH~0LNzUozH@o(aXlk7FET*)HJnh!3-9}mS}wVGF8 z3G?G>ZdJOX)(nL;)BRC(^f|Gnbww^?8-$=l`>~W}H`k#bkJ?I@Vp&gaiiz1uaFB`9 z0@#yeg%9&?G|`D2hoMEfx&F?xbu9uUiIrt-&o{*FT-M+~{zvfP#w$S>e8FN`nR7@F8_9BoX zTq4}@Z8<|urbbDHM2v({P-ibGeN5AyM_!Aqo%Jdq4YX*vRllFj3IJz5Bmy^fqzrAz^qjqr^?b4o!1b!U9*&|{EZoa?~g>4h{4EWjO za184I8uUmwHVUusy*p=3h*0r2@QjD%8_~67qCLO29#B6|IDk;R|MEJGk7M&4q^a`L z^tW+GU<^gN5DFVwr%_!4WU7M~m$5=0=nPB|_&JGR-OK`n~YA3Bw=y88gU*sm}{Bm?S zo|bH-(PcSV0z<>_tcT#@Rn`mn2~NtQHY5_d;m4Y)3Be2D?d_#`pXbv(L?5C!+4_r- z)}O||df@tp)Jdk6pjF=8lJAg}=0aL8#zo|c8nnC!#=p9xl#}r^vrtg+X$<8h^yQYi ziwsQZ_$W~g96AD?WH%0V!Fa21hawbgH>-aVo~dI|uYQ{Sx^Lz8m}v_rPUjl>v`N~a zJv&C-`?_}S5u)(V-waUX1W^Mwy3?sExv0V+aznpUH-OE&!sjMplqVJaU2Vgt@v7L^ z!3HeT>YwuN@GISEzFnKD<-jMU0o1z_8h{>_hp?!tX-0AUNoy&ghfb}5;p>dSrWaozQ))%8!*w5FmAUA?Y>gjIGv{=YU!k%PQc7*<{q zG56BB#HJ~g+&Z2ACF;T8DmH>t8W@t3^a(lkFp?SaxI!Z&pw$MVJZ}FoV_r8=Y3*3N zqDW>r3X4LchMZ*6(vb@GL=<`$rvtjsjuO890XnXou9uf7bXMsJgq@`&*3xY$uw$dtmB5Aj{XNtDNlQs(?y`MB{dmi&M4E9HlLc#o$n8&FzGtFNj`HsM>{nQ zerbKnPyvni)uH}iNf$E|d8U$uFj+$n$20Uwqu1*(ix5fF>)&>SR0kgldiX$L1nG`D z-pQq=4*Ok0I@MZ#g0V>CCg~AGs^Ott?F*nTv)=BXDQHN!x4X^1S~)WHh2BHyvNJvf z)#Mu*xFYqDjD`-yAQ>zRUW4zn;`bAgWVD z;T0&ABnUDIpU39Yml_XA3JDN`Ec`TFIH zw3n#(9RJ8gE?HB2$IH0it&Q3G8Zg~y*BE-`rghB=vvKIW`gY@MxQ1C#@Ji5z5$7b| z#;h2RC&<~IifEKymFl>9Z^`!6n+hi;57Ic1;47=k`tb(@*lLzHJf3D#Yy~`p6UJh7^cIB< z<4*Scv$(+O4!4`(Y6l1)0@NgiET zm^%5lY2eF;B%q}n;w~pPQ4hc;g0Dz4SFevZmgTXgsC(%*oZHl(L0b;&;2y&L*NOsO z&a8Qtu{=zgxxs(h`ffKDOf0Wjce>#3-mk&3hZ(m62M{s(<{1AtYkS%co^Ikbrga*t z^Ctnk10ZsE9ryN4yByPR+rEAIqP!w0Q0d)%bc;X`*=AZ!7#==jH?5T9SjR?5$+_sUzQh+7 zo!@S9Cr+TE-Tr(M$%rGNT5+BHcs;-T%3!oa>pmtIm$PW2v-CG&Eb1Q4CN1;&7D+He ztk!*8d(SJ836_4)6x5TAhgWZKKe&?3Jet~5y_2i>qD00clDv`_AcLo{;?2aT+Z22% z)KGeofKpcE(%ELVff(&Y z`y+*)o`T$or1OyltQde}&u9)~m_v^|flzL?aK^tHPN(@t+CmJxz@@%&JNtGoBEUz}3u@$LMsZ zg{#Ir#l=RD8snIf>rzL-@x2qOLxkaQ$~MaJMBM@k9-m2mB*?ZvuNp#axI6~$${*(w zX^{sbC53uY!F72}pl!T;$~kH-rk!)RGDR#>x94YQKiYkl7v|nr6V{4F7M(9kC7ma# zVm&ob$o59{J6-Qe=Ygu>#jCqPb%(&9W1i)oLMyZs^;1{;d^te70e$>H-G^HkrZvpH zlnH2arkwMO(rCu%Q=TG3L3nE#@A+Pj1$D|38$+@QPZ2%nJ(XQvP#}RBQ2L0IsvbW2 zwSI1uP|S3olOA)65>30eX?zzP!OWiyahEXlV+*0d6Bby#*+t&k=e7K4WxQai^t4x3 z3H?)v)f2z0O&(2s$@N%LCNTvuK4?(=(lDru|2SAe-t-1NpPpL1^G0G)i@-N>8w!0g zmuWPemgD=>XptDAZq1k%wVII}hs#ePjLK2u(>2EIctX<=NU?EuGum5bvtP<^Yybx( zaY;&&{LAow(V7Hp+rM%*v#=7}ro_JXtecH8-jA6W=>^O`zZ@kbU;tswRZK!CZ4kJ& z=I+j4q<-}_Q%qXD-6+p3H)Igt@kn2T zS@bsny3qM-9FubgR~ou9v2@@Dffi0F^M|p=q%PvGefoj-Z=W*~7l3fj1iUd98zK_G z__7AZr`fX>EVf5|Pz)cQj`tp8{9H*-BnWzG^w2cX8>72m+twZKA?>To#ji~ZaL*x&tsdExiQ?cg;xtY}a z=HTpab$xL>9I~Sg#a>dL#!tP|e_{EYp3sfj+_Y;u{CDbcdwec%8%y$DHMz3&E@Yfw4Dv`!LQ`nhrc4_d8=mW8C3P! z_H~0TcHV5(UY%}vC}=%n=;O@~n}}ULoS$Q6R8#513fJiK)&AGRiQb?c__2q4M4aoy zo!HdN_C+UBgYheynCOm5ru$t=QOb&gwAEbH&ZGKRH4spyTZcJ%+|0g8H)tiHYdCms zbF85KGw@zen-XC%^x}O+n}hw%3+P}Gt65n}qTJlM#%7#ym!77E#h5JN2M@exuoQfF zfNJ)zb(YcXyqAI31v+{mXWUjPf?j{|GNNcjdX}NjS#JxRwraa--xMdo(oK zaD&1LDEXgO?U@p~#GC^EJ{ioso1;u`NDshFa{cHQLRFCn@w`@#E*~Hn8t;gOdE2x5 z!9$P^zC44@e-W4z9}wd~xD+<42R7F&M%!d@0vZXsGc+b)<_;T~ZFi{7Jh?iWm=hs$ z!WcK3ueG%S>k_O-ir7JPj;k*SPtGK<2!{0?j?%}pk~3ZNwn02Asts$rOsAnR8KyL{ zu#vyfjLu&mDwSuwKSnsp78Evjn}55UmIxdo&^q+?(U!AQ5e%lL8?{4oP>x_tRJ~?W zvdyT+TfLmeAE_u1kvLw_fC3RP?4%1MnZ>?oOS5{dOJFLv=D+hLjSY5j zyq3f$i_@ozN_)D?;Xz6BRVbc95>2=T;xr)D_s)avaEJD^x0cAwaaZ?1UyVyGZhif-^h4~YUpp1>Nm_-#I zI!1)qh*B3(k>Xmu=3et51T@sO+~Ek@N!2lVd0*9bXf2WN);%_A68KFTmN($0cK`RH z&y$k2K83Z;qs6y>L#`5Y51ylI=FcXaDDWWLzvPX7-XM;DM* zqt|6%;YkY8Ve?6gJGWx5xaG5WuO09e;Vkob{a(l=44rUU5Ydup+SFgV0jOPLC<>T9 zG|_;V8?iGO?#SHZaAF8U{b80zYTR)iJG=>9H22hb!BHZ>z0bPdqGZbBk`MGpdrxBU zArioROcNC5zFSL5JRt8n&Yk&>^0ki4`!>dOhbyMO!$Us>ru9OG&i(XY0l+|0Z=+4Y z_ASs@Jzicy{UjjN^Q@!=($j^KMpD<^b!C^17KZ;v=H4m8X(pxejOk>g+R~tYibmg` zGP~(O%%MIL>*#gD$ys2jxQ6mC=$Z7m&$-Sl@ z9td*Pz{NRtd}OWg^Xnu>mil)l{~T6#gfRFQ>VNX)o2JiIO^?Wp?=+v96xWkPAm~uo>YlxR@tn*kFQ<6= zxHz==>q?&pG%iE9Z`|g_-XU&ab!woB#_~Hcs#yLKJt5esRfdDL^#<7zCM{GFOE$_ zzFdw%YUn$KkJqt94M*Ww82%POpSEqd--%(AZnGMJ*GOiZwoNCKqBSe+CimLcAstqT z5)QTS69u#CD|}1ecg zN)seY!4^$-TCR-Vzk-K4F5i`(BaYZ~HP5^Q%BVLWsh)>Cz=`sZlUw*h&@F*&VT;}E z5@hsBZQFtt)pECKR!1QdZJ0Lmigt}5O#EGy7%GFWHB6=L?!-pXbO=k!_JBFIJD)BT zSnvN{$O0N6kZ8>`4r6xrg1N1;pU+m5it+XGY_~rBYm~eZ zvUvDY4>8*a@n1?PJhubWdX&-JOyyZ+kV}+71{NhrNKXu<@_24S)J>NNBdE2|0#l*G zXnPfHbj>-@u97oxOy3mS@ES|XkqH%B7*zM2Fq z0EB72jM-VtFE7Ul83ga?d>XDvxXn(4#{6u|r`ZVOlC>OGCdH!S_>WXK{ru&N3;4rW zE6Yi-bTQsWM5vMs&3ga4=V%0@IhTMisSogHAwEUJ4Rf1r(9%xh zw`guqNX%-`Wi5)6bHsSU=7LHdcuagY>IXbmuJ*dBCW`PO^tVnvu~ESx;edBQ`W5Kd zCW)T>()mczOWqf9Xdrr=M8B{dw;_}I7)0in>XEHWza);wvh{^MaTk#`>WXa)hqj^K zi#Cg^#kVX?w^PA0xM zyKn&XEDPsrFdioLa|rgVWNuopV$KQ_N$G0?tSZSs_F6_+Gct5LNr$Js4ykPk$)9a& z6I-N|zLjiL8Z#S=%j~s%H@j)lAj3zfkiP#fVehisNRnjC}97x(WU|SX%`A8QQoA8O^`yBwZdLD zGHcoFHi4ytb$F|ldIHw)uq-w2}_!~P5kJ~QIpwT*-{npt(*WayC6Dbaj0c8BIuQ7XyU zxb>Js-jC7V>kxlhMqm(yM6 zsPjG>j0P5d4?<})D5B@jUYipePz#ORBLs+qXCSYvE{y|2^UX1b>l7d7nU+m2S}7Eh zw;IMWSFT|&e#doPFE8q?J@`;k|Ah$cy35kObJ)%A=at4ZZ|WqX1F%?2aLk~h!Dx@h z%kAP!vBfjr?>FJ{sM2rQ!0>lAsMBK8Va*W$Y!+HfF+5W-847sT$SxqaB;Vlf$B>r|cAJnPv72O!hYDMWiNd`?3 zkgkkKxXDcm0{_qi;{!+fSEg?`Wk!~1OU`4KQQ|M*{9!RcWK8mk^OKb~LR&Rr>bPGy zoLdutt*}##?xPKzv`Fg>{o9CIv%TlgLBYqr&N~u)@V~(Tmh9@VG{c3Tj84o9R~ z(xlhA>c{%hqXoN-W1sz8YB$tviXyA4aF{M|h&jKfy~W$*M9FD4wT0N7O<>!W-mn^| zAn@KIhEGCK{3?Ka99KFtv5L3-T9&%-H_r)#!RWD({=gsR&?e!q>+m5{8MXwF>Py?b zZ|+z$VV_Q}A!|OIs7{M_1xv;Dx>tQz+&#IWO1`&Uhe$+nKH3h!s!iP{s$i-c=@yVw z_P;Ei+H`>HE2Wpj%vF9lN|>H|ZH!W5!V=@AV}he7uZ8I-0|vUQiCGILwsVjVoxv+1(+hWl3EY#X)LatZ6|FRqZ55# zh(}L07&;}_;4tn+D+yrZq&+d~Bi>KHFoZp6wnhUqs86rgx}9`*q;pEj)H4g=%BOa; zANa`{06ulHcyJ zUsaDgbUxCdD#Idclrh?<3B1r7lhkvQy0D^MJf99VlQp{m`Ri0x_x0_T zl%0@M6huf<@S!@?+*1sofdHS#1B#s!+{V($k=%3Z;zB-9O?N8Lk~X02hK3iSk4AyP zdhGGONVk8RRsxV>OEM*%Pt5{B+KwkzsnSg9mXg}jL1Msz+}qaaW19C$Ns1_Y7wrH~ zx`>e4=sRN^H3N;Q`KWZAtYXfRhH!_v5_CIjQKvPJ8&`57U8vqQ6xCd=1F5O0UouAGpY6r8aYQGGGL!OOCs+9}S{8I zwYfuddpfwL8Xz%knVs$iij2@w?I1`Da%)%;*p1IE+1um3m;ZG?ajcp5v!qpmA&PPw zl~}ee&g=;y_OZiU%fi#v!lf_g65OU6_xi)NAMGB;_qVa87(ekhP-UJgfbrHT-2j81 zqz00O&A{H3^pqJ0-Cizi3uC6LdW>6iPV`G)38evI(!#}!_m8JPaRuNQwhzc~Sin?? zW}6(x1cbt;z#=Pq8rk2GTa<$PLqu#IUnIk7dB(G`<8dDBU~|34HYR9|D``vcs1XW_duv3PXV3nXNicEbBSCqvy8SoI`erQzi zH+$!b$-%;|#v0BU1VMGyOHwEG8ug$g<-3^rO2-8INeh$9`V|vbcJ_CP7U%r>hdu!p zQt`DTpkPAic^f94f5=ms5M&Hwv06Uo90xlM#~UemZ%J)9_OB*;OnvU1h9j>hBa+yW zYjUhP3P^iSi)dKiLlAAgsqBTVkT=)kFB<1qc)G5W;L z)nH~kC0C*Q4pLj0(;CI%u6?8{bpBi4qxHMC4Om?x%;|hBH(7k#nkQeBg=_xnS*|s` zp!$?v)fqX2pwJ<5;MkmrPKY!Ce>C1UE{hbR=`=$fII4UlO=;4ys9_Nuvm-1^sneE1 zOzPRx1R)4Ty~JG_u!f)cLv$+4pyQY*v&SL@V}o#CY7{n~L}I`a3EwhPnAj(J!;Zy6 zX5-$J`(z=mcopBmZ~4~RdD|-xvW-yBQGRzlk9O{*n#HZ`p307@7T8C^-@AB4eYXa}SbL;x0 z5?9;Ku34|9_0gR?Wb2|$UBW@V!|4SiH1X_+T8Dvf9#8$4I>cFn_fRA%DZ7BvChAeC z=zma+Or_dtGSfXCItp9LD~3wUC-=&6RvkEJGiPen%}3G}e&Xh8`%uP!Kw~v17E&Mz zoXoP{K-8`g@LBBVA|#(gww6tE^#h!AX>9Q#`tKOh;1JgEZ zB<<*!oIjb2xuoOBNgbONWkLREJqHw>P;I+ZmI`DiXD)u)tbxPt>q(T`^5h{a^4r+>h)vH2Zj#h{NT7$!ie3Z?lcI6Bh0)?>NlIXI z56<3Mwh)mNbffmC6tZFHdno414?E~!?4?7nn0wD;b9>W|3$5gy3*`x3oY{kn=F=#I z0yUj|f#C>oS0R5>_su^pJL+e<=e!DDhF1|qu<#aNEg#y5UF8Q!ksC6QX#RTMR=7@G z0D}bFgJ5FBrU!+dZO4$zAQAoH<&lX?SOjih};! zd7M%%C7z+Q%M%gVG*^D($SuU!){c)Q?3ENbLK0v&t^j&k7CgSYxlSX?(S2@&j=IrD zXny|$tA2i#Kv^K@XGcNdYQ)}*5&vS@elX(rbUvOG|z&%4@h{J=`sG8=@ zbk9wpt0(pA#jCdod@Wih9t@TorVw0RK(qZbl`P-gy>($r?NGpAbap?hkLkWqXc>0a zFw#`#7a@Hrzph2iIznFORDr`74VF11S!K)kH~US?0}ZeY6-5(F=*|=95!)1c7Dg5` z2<|sp+D{Fk&o%$0@Za+Mwq|eqj%l&=0x;H&92auZ>H(&@3bbEdH4)15!9`%4birVV zAn#a_x<;|vm?*wu@U6mkOKJvX&!vV3aT21*n4hhj8Y9XYQpxGa?N2#VvEB_U2gF;s zxXJvpuCU7qXxLPqh8$j^ovjgdZV9O-V#kvDolBEXp3Um%%)KCf_sgaYE@jQwWd&uW z(9Y|7a4IPcrA~~cb*`K57cZcAP9Us=QkW2B>$DBhn$Wp1wnEm($jQ+D8PxvqSFR9> zGL0V78N@Gg?J9gP*Yg%FeumCkBa=LF#*?UDf$3i917TdPw`jIMT0<$tlFq#XO;7!6 zgkPR2!bgg7uDD5S*)L3_=9ti2$A*7!KfF>fKh0qBS9n$wtiM<-{ zshRW{5a1eFaqA5#u~TbzHA&*4d8td0iaZ22z=y*0(9E@L6-)0^ydt}-2vS%&=`jyO z*)i_&a$OVzz;~DT{q)SVx`|!AV04Z22 zVvD+PE=kv$AgaF`1*tnX&u#&Bv+&~jW=uSgqY!-E;vs0-dHhABvyIG1jaF5~6cyzC zM!LfJGI28E6?eAmZjSRbWzw*}w$@favRA5PD?(x~qt|&Kolov{D|Rn)BSme(_mb_5 zl|)^Kh@=ZhPHF0?KOE|d+!`s?vf08ReAifJA&smrj%tni{8>CU!~B;gXFZetehVqY zdI^>~W~`VSJ$G0TlWTXeefI?VmYc4myOpbNhF zBj$9>Og5Cnbk1r@ZoojK?=^a2bZMncZ>PKZ|D>{Tz>1rOZSx86M+N~#OTZ(6PAb1a z>D-xNmvf9(L8lt9PcXu^Sf$`Js@Nut%5q!%2!@zp$|a!YmP!<~sQ~Uww0F+&eV80K zHv$rd#tC>;A7?aV350YOx<;;<-_leLk{q7(nWPWfLsw%x4Si(OrorH};!$s<+vee$ zyPJH{oe8E9v~wrp_s)!PY598HvL91Tn?@OAhcbMVeGC-R68@#c8Cwn*KiX+HYm(7%Zx&mnfit4^*wa{VLDt*839)H;tO&e^*+zY#=;cv$VC0={ar zD5R^f?mT-09)JUwrNfldQzLR%pU0FbQ7(eTEfN2wdCTQHh?WJ8;IMqLBp%8`U0XPG zM;1^t;-xkvH8!k!m?{%o>&^}B9-BnYCS#qTte;HEEw+F17{_4Ykh9f0bnWnxCwGP%Cr1!y;I2NzWjMOUgL_JQ-pa6nvwjZsA(dwkM*E^1is+%{Jj+c7|#dsKNznlWG_d* zFHzJtB(H|4ZJWN?V@;@ikvl`_D6Bj*hDXx;EI@Ni7IMTI`m4jzckfz|n<#`%&%x(> z1P!mflr;Qx?iuadd|N$Z^J$h?Tw_xmXA04lMhnQ*c6XNuSgk&tqxD?07j5eFYFE^i z`uNr+Lh81Ys6#DdS;E9#!$`_yZ%;OdG*n6Z4)%qit#*4;TViQK5+;;9B0-Bj}pY>I~! zX_j+LMkSK<7xMkmy$Qw6bo{=04@L-Nrq@K~`6Ruc-vZDd%({r*!3g4k9CW141t9G^ zO(Ls1|2JZLO^*Q}XzlJuaAq(7Z%8mPoBu*==~`HeXWjoKo>gKYpQ%3|R8VGC<0sDY z{b;#DkVFKwJ3z4tj)1L`gBpE)4mCadDF7W2g&{N}#DET8pq0>lh=%6mz0Eq#P-_{M zcVo$A991+-2!$;Hk|cPXX6la2{O-EIoJfd8@{8Y_$M1pX^;u{O0_HU#mL6aaJNepT zFRT-Yp!}d-BrUF>h<}n~R$~KnCUZ+a`HGV-92$npIUE<>2*Li>sIy-sqaHq9F(H!z zB>GREfOAlapFlT+LOFV*d6d@Xcx{El#31psa6pMmlEWG8EEeB*yXiSCQ>jQ0i?Yb>NQE6 zAP(D9 zP8przJR1RCA_|7j&>Aye(rXvpho;vWLQsc#iO7<@f(UmKKPr9nuBP4WT%kGe0UBD=6*TQ zgP4712_BTtx4C}V{O2ffJ7E%r_UrTU3ytW%t*nc-J(Hny(tzadpGFEjvTq5$ZFV6( zENCp69*I1mAPX6qnU zK{$mA5V|21RKK=sUGW4H#?C#k&F&;cPyj(dzQ5I#^pjD_=e12tb!I~Mo4-$icWbV5 zs^Q2p>z%6qyOE-SYSX#K4UYebf68M}8J`D@qKMIDcxuys5YO^JvE@zQ2wErjqLVk! zO0m1V0x%Oki4a^@$`21|ZNeUX4omCK-T{8uh}K3`Z$^e-5u2sM6T?QW*A@!W)Vz3x z?e1p-neqIk&-pQE_-hXmt}=2biOa;PKW2EPJVA&Xk9p+?-0D##uO!B`~%o7 zb~^`{r32!?g5qWb`V)(r1m;Bza>|J?X$_$RuN)25xmoA8IrmmD4dfJ3))w1H`-|YB z(g0eRa*(Jc%`!jwxF8Qq+-C@g4`y^gz_LqR-&#GYTK%%3Of{METef#yeHS9lYTD$= z-5nQCx^9c;Z47kXMvM_9vv%}bez1o8nPX_vk6N)Zj&_n(o)LPli$_sh1yk_d6M~@5 zPbl&Q{s>NJd--J4XjSh)lujsFKlf-j)Prf4=c-Y*)i>NoFqsPbk99Ymq$B5pA~?xy zovg;*(e`!Eq~tEqlR|Si{Lr|dpy9NQFKwDrw9Ap=Vrhztt}DWn5T`9v%T5#H3U5dO z&bot*Wl|5Nl-s@OSfvit65D$>1`^0(&6&h66tOW`Sc4~btD46Y8e3W@y|0NL`R7c_ z#d;^N=A=p;NTP)w4b$l(iS2AZ(dII`O?5Xe;T#=LmX{%usMaF;4!yGdKfqVmo2y$6 z^aCg-Tz0!r=k+p|6lj57<9T|$g!|?XCSz{|`rxjg77dLOpoiYLI<+C$R?qJi+UYJf z?wUzvOI>2Brc)EuSLj;fCaApI4>FiF`+8IG^;q4Sp>7g&(tum7U4nma{z9CTY3R{M) zP59G+MQ-3_681Wt2~%bSqQp$Gctor`?H%>^ebvi(NL85$$k=N)tciz z8(TW=9Awuok4W5jdAII`59R{$QudIP)d^A}ss9`%K5>p*xa;TLtKVO!gddtfJksc= zvBpFtTXjtae@tWlw>f+!_jYtbGiZ+l?D*VTBW-Vlab)2{PzJbR<)?}~dT4M51(Ja0 zn%`Y8;kK7$=NSak%1xS9 z=CQUTayD9Cb1;{&E=y$KPs6jzT8lRJQh-~E6)a1b$?{Y2Whmdbj%TYfc;&|qi=~S~ zWV|(efRv7!!gwSP+E*w`01!PkJKB{SW;=l~7rwdkD9>9w^~JFdN3g*&hH^*ZE0;v4 z^DssgirHIBvAgz2l030l?OJl2Mg*PX+^4`dje?r395}3P?yjd2k1$m_UyaDWVXU$A z|Gjyht9-D<^j93%<>m+WcBd6vSX^gYa_Oy?$c{WfEG`M1+{HQj>haUU_c4ZcpcOAo z$lYQSUe&2^_diTB2*aDsJz{6e{swS>>(}?7zYmV=aekV2R~ln44go?(x`mW})|6-8 z&B4y9v8>NiZ5ykXC?b*3HsP!F-Ch4^KIZ3ke|R9^I5fp^Gs_}>K03LuF+YD1QDqi) z#I@28iTMsQt%$r(fScS1jtCMl<*MJt zHtJ7-0uUP=oeW27jT_w zRd!`Vg;lT)>wD3C;|y>hFV|UWa>_*OYT`F`X}Z_*U*ry-Z3$gW1gmBTkC!L` z9hGU=&NV~CdTxQ5o7wnrGe$+56EVk9NAJo%#?MI zzU6-NYa>EaE=FtEclyYu|F{Wqwh-N3!Z#}hiF*)Eu^~vE=BX@=3?a6;l=TMG5fT8m ze%Als&{9PKo|VQD1Hew7Xq)cl(Grz9`V8=V8cy^`xqF)b*hZIWwaIFZSm4{u&u$k; zs>#-L^Q}Sv*oc<;3MP%I8MF>vUFi)A3LB|Z5 zD*dLju_~2MY*vb@FNi@V|_pU1*HK6txL!mn2Ia?~SmA+s(QK{$M<=z08{<0q^@x3W)9lOUK9TyZ z$Halck3y;3^?hEJMq>;{oOS??$5*g4w^F^^|1rOHWFD=edUCP_)URWGPvNOL(N6Vp zI;FrF|F4vlU^C>T;?q80wWGxK;=G}3I75P;*WpS(?`R$WX+*m?&-SOW#15#p|I-LW z%t&)3Y&8!RU%vP5l-f1_nBhi!6_?swywr0)E}tsfRgq@HIIu%9G^cUnaJqswikhlb zsR?JX1IOk0`!!la0CR^ILOF=;&z1B(jk~@oy%A*l^n+#-4RSNlgG*(qD*B;gKDvTCOmC9|-=%%~Qb`R`B)SbB-8H&*l_ zeafyK@XHPDuFV3_suv{23I|yImbCYYae03_4U@&MG&mw)RP}sk#J+T(!azs1>B87) z7Fi;fLrNtjrq|1hOgOZzw6u7FKmxK%CiK#%`rcidd-In->UZMK90v*ba@%Q99%tuC zP=}9acV1sEgd5W>ZqzChFJ`0>PAbA^@DQF1;r~IEPv%meM@+O}$!cuf_NOz(y4)_d zW&0TaXlC4^!vEr~A+dg8aMamhwaJg%1LU>arDb`lwlnpKx}fc%LpTf3y}%0Y zrr)xYc#;iA)Z1js2DvQc5B5!2=40CTt#B%cq$gVG$F_td*no0HX#prcA=Z~PS;`ZH z=fJ;TcheAb*JNc1*X2?|14| z)MKDbr;{>-tqIgBk2vrLe}`AfuYSEuJN3PDUZHFf?k}hLxVq$F2a=9n6hERK02oZ- zn04fUXuJe+gwIXhJ5dTc6B$v%Cd&^fRG+pa1#OL>Bj^H;vUFSLXb&h3%Kr>(nc1#E zCwSRUr*rDZKZF`W{S^PUlusq2J!qIxpKUDzUqQ-Qf7sKwDhHuAibanh{8O4YMPv^)sI)A0L8K>23)C>_p&mnDU(q}Y;prga^s(1h zr7F^BjvYt&Y|j!kQ>fI3Ha{We(MnEPW_IQUxyc#^Qv(bKm>mbZtN4i6n)XT=H>FTb zb$lU8@-Qtqhl6;eirsj6#o*@mlck!kX*Yg#px)Kg<8(PhY?i7!;`0{+K(dd#N>a39 zcEKRn)$88R@8MvwR@O`0d{Z#88*57(Qwomt8F{{<|J{nb+{79Vg znz~Lcf}J&nQ0rWf@1bMlFmb|>udvcio@`gDDJu#o@QEA%^K|iQ?uqsFT2@uTR+?7~ zcCxoa;YO)R=T_si*(FaP+XlROYfxQo}ixDxs;ry!n-)Co~r zyN1~zH%zkU)T7-R<&wi@itFy8++`Qy`;8EoLIJKp~{SngqpU5 z%K-YHNqPt^xm>Nyx#tLgi|ow;tF+H74Wu*p6XqHhv1el1;vv=jp@flZLz6slA5U0E>R%Zp`xY-W~jqecbAy zH9fbSx4n&2)2Sr|&$U&j8|c@Xa+ap%lwh+o7fg*3a_SE5LS0hT(`|$j&W%~Xb83<# ztp-f-u-5t$<_n?CfJ1un?l+?&F2Ylc4z7y;hsiSs8=O8;(hdfdlwd@;R9CPvJt)2n>=>tXj=lNhX6`&?HDx>%Cyfie8 zzZ^cBV`bPN**&#pOlw@S*jjt{guVJJ-I%qo-nO*Dz5#_p-41q6tuk zQtP&DN%&+4n$jw+L;M5F8ls!_bR|&>Uxw94;~F^)V|?C<_`L@=ZD%pfA71T#*S~Ky zVQhD)`c(Fk%S{g&|}rEX6!f?%T~V3voL9+AO`UpZv?ALazWxxoc)9Z z2?HQn9EBwMZOmRfA?&Rab1m9v)75np*Sn;1o8L!0yWkr1I^N8#^ky7kF;a3oA-ktF zkxFYKZTq@=P-FQtN>WyOzNVFRFewI^-yu~Jl!MN@C!$_0X+KsZ%V=xm8I6{1H~bbr z?8rL)J*{1!=kwT%=)o$rRdxAIFQ?VvKb|Yih!3|~MZa9w9P6NOePnEiB$Vj9z z#`Hm+&vdHbm*g>&b~Ybl`ucJzAkg0Eo;C+p-=pQC944w`ozu3kshPuY`tCKejCFrn zAEr~HU^-qeW7#wlB_0l#6;@qTcuh(d8B)mp|nHp zm`23pVX0@R{skVQ`w_d|UI8S6u4XQ3x*F>0ZbW2bNE@+imZrc!5FyZ8on>xxPlSP% zBA)WKgFM7em-i07pK4Gk+E+IY7D=JT2&35Y4ylIw&0pcSnXmmzJe&>}$`N2{ElVXQ z&QN2^qSqfN#+FDyS9Sw~$d#M!_(9<)D$b)D-Qb#pV&WW}!(5YyrtNAF7V7~MG^FHR zx1Y^s!Jk}uGp)y@(axMuvw_BG{=$2Ghfq4u$V~^!1pbL^z|YZLTec|@Lf2j_YyB!k zeh@orjcL@4X_68vdE-?sF5j6&XCXBAO#s^i6@DJ z1EJqDRrKF7zYbqdch8np2huYKJY`w@SZ-`_PF~{Z%&PAuFUEnA9oM9sbC+bpu{5OM z5>6IrQ1Nf@+&Omo5i*A6+ET~xjRKxUPB>e4Lz;F`B=9?IEot>x=F9j6<^5Dv+@Uc~ zbeL%w5|EjWyQDv-+^vWwMeU>H=&Uc?`MdwW0LuW>xW}DQlH3OG}q(<%p_@%e+gX0uRg~3 zn@;b$)=G3H+zgXO`t5mVpY!*=A)0=b((RAlO#uIr<|6||sMkZS zdgXTd!ljEf-S`&RG`P>K%(%%;qj1*<77-)ZIp+zp&Fmp2Pn=gChD^2VYsVgA&)?QR zpJVV3*Hgr>L0o$+q;VtgPj0<$AwsazNwfjC3}cPH0j|33R?3%y_X`XF896(EhCpvP z)p?;d-VID~oa>>_Ra&I=AT50X$B8+t9Y_d4Y*x+ChdyVzlUR)-2+#GFx2&@*D3^(q zk}Ys>q9u(&MeZ9s=;+kvgcfspZi7N=?M&TY68vIV`cGGcItz~l(}ur%ag+s;eihbN}c6qSW2yYS2ycv!NaQ@){2tS~%BKI^bJ^P(u0 z+G$Hs(@m1>ISRrQ*i~@bMRqOiuy`=6WlKM6NY-QjR!@x4j_!Xu*FS1iKd|JuT4=Ox ze^52$gkhUW)(&zZNjPf&nCTecE9#CA$1QcbILBDS%#>{P`O6RdugmoO9TNM{h*D-s zd^?-~gPsju_d(A0<-O*c=L=&l;YtZCKA}#usq0RKbp{)jNWZFYk3ZP2fEL zU4A*><4vdi(v8)g@OA=gp`N*4gTBCj zLE`UrDt6AP9($%GJ;yAh7r|s)2Xx4e)ukcX#z${~gmPadV>a5CeHs3~0}i zlwsSlum>+8y$((B^ED<9F`WG7AP`NLr_0q~LQ)l+Yt%))-`kS%9q8nh5&O5zE7P&C z`9(vMX1Zv;cxVk9KOhvI#69J+|1~TfTGX+L>(1A30r9M(=4i24OR`*!E&MEHf)B0!#a&&SGv)D+F?pRp4A$$Hqga@~ zrYrl3YmmfKL?UL3jWL0j#cQJgypKEj)qm3e)V?SJdnYrs6R_q*len9p9G6(vf@Z8> zG|IDnV5t1+oGe=3Ut0p-TT2DVI;R>v<5aow-=q0!*bYmGn&S@#h*~m2|A~Y0F!cGR z#wFweAjk1O(4GLx<+4Ph;nWH;J%TTEl!+|L2Rd~znc6Ki1wD*mle}XudblPrr+%4E{w^#R?iL=Xu^X=L%%}U(}2A07HHKTF3{pSU#`geqrIF?Sog8 z70G|rw~wbBTX4<3dPJi2*jw8hYl~{ow-&))(D z6q+k|LHKMUaQF4ZK8dhRk@>g2s|b}R9OlEGZ50wtppQ7y3;J~m*i0&_r?w*4Le!_- z@1>Egg_@7#e~~}JZ+e}*^^Q<)G&!pyp9=mKr+;0B?yq3!aDxT=b~kIhh1c}e#1W*V zds;^0)6utoXR}cM#ixSVK^bD!mNwj;wPF=ub!h*v1X!0Ru3RwO1O{s*DmOAY4>T{` zl-1KK$I%-4z}4JG+5m)RG)!>wF>3rZa3=MRc)oDGs`C!@V>&31l}XY0S(bZ@=EDXW2`#pE*cWmq0QU1X1qatDNj`YL}he^G-VcJ2-3dZ3UF zVJ;C@^dPp$cRYVB*{g?lwE);Rb{!ZEu-5!DW`d9+g@9lEtMFCD>-a+Ky6-ENsns?> z3qdCiEugxA_7Bz*n7sr-?hmnlcxjvxoSFHZ@zXCYOIZgK{7~uk&<$|YPS_R{Aq|>e z@;AROw@zN__3@tcUR6)IHo?*(T7AWNG_v8`JxANVc1UwTxy66OiP*L{g{B(6RkQQ< zN&LgddMG}oJ))T(qVBN2sujdVTy*>c8T}bcg6yZ?YjAsHrT6H6_G&xUYV{gThI+Es za)2LGZ~p5~un}QpaW>s1vv(#P zG~kIw$T>h634R3ZX*oC7#uE_%rza&bl2$!FgZwKWmi?ESkF}Ie{UyY)4skK=k&09O z>K)-|-3jL;_C%_~gPeHmW9`;#IH7Diky9!VtY_BKsXWbpj07^DzCR7GOMu_l{BKut zIja0%ZaBq|n6;NQ)M{GS@YXGvHZYu?Wg2TYZAX+2ZTkLv8V*8mb!|<4WQZmxdVz^) zWsXD5aG8m7gD486s14@}mt!ZI9a!b3da)p?esN4g!v(X>3(1AhogQNpCT|~{2*-wnPrAFs_l4$ytGJMXzb^H>_~${Pwu62KydlH}$V?4$D$|LHAAcUEXgs8NT+eTXcdjrv}9K*p-Di;pTkER&T{IL1Io~}6c@1CyXcKu2IdPI>& z+|OeS7Qg)8AuPtKcKL4PIZD;v@NAE-X9G15+r+No4@Pz2rf7-RmGf74F$_y*-o#l_9lK>IEx(i}x5Wr1&xFM*A1a#t#H24%$)KZ25Gl z7^7cUP<$3glQ)t!K4U)X4aRKm0Eu(quh{>1i{Zl;5d;5aJRUv!?tKb3e?8Sq#^HC> zWtdOR#Tnwt1C5B(5-TyGC+JsZG8UjDUaW`b1RPcY+lZL& zT9Tu`xFG0`qIr^i;ceTvDye3QVqA{WV)-=OBaCOWd-Hqt?TEw_KnvWNu^Sa}#6Ais zYH;~?S?#%!gSz^(7@W}#?|Nb9B@BTkD<7_?VU$7Qs3!+_OMmnc>lf3Ho`$xc3@d2) zlZeE{a)4bEiES_$b40_mgE}4~>F6s15TcW6fuZ|*m}F~01n>wBobS{$?nOot1Y>R^ z4m7lL>zhEM)6OpAa5-Nen(?c3Gi!7*Uqb>rUN7ng>^f0#Kz0{_X|)%O9?qvHsA#k(D=i>d{lA| zCZuU_m*QBSek_(J4jwk!_`e7U$8T|(VM8)@9z2zPreZpxdGPhJvsUB43xzjimI&}m z1QBGw9e47fe}9a`yZHi?zj2$}@Tz>fu~Vgde{cF6s%7P+CL3JJDb=hl%rdww@=4|q z{0y?QFn34ew$y^(w6|WHEfo?*L*@JyR-{7q2&zD`47LBK`rU;gMkU%d7 zx?y;a_JH^;^cNN3f)#0E+#^JYnVM+bIqYThmKYD-knF_ z=n|kDEjW+ zPpQ_w&)HSjJlB+-R^`yKxWo?7EmioC_1MVto?18dGeii z5)U$jz~BdDmTAvL{$daYU(})nY>f`;+BDVx7V>`|PSi^}8=>0afi2fh+ilPUpP_!H zXO^2PXuNz>2s~%}gk zbvy1>PS#qdS~opRkI&xSecNr1itkHh|@@Htk0&f!rsa zSJ6z3E!AHw=cqR{>~QRI9II&JX1)}w8~Xl9LXn-kZYPqrbb32aRJCAI;ped%t{tuI zno|BXS6?nu-ji9}OJI^Iyy`}TAQPT?1gq3DWVVsulp{W3Zr}&bu_#>1h(SSvun}5V2NuX&p7nLUePl zHU0?m9UY+05!VPPncp+ur#fsvL$nHKv`}6z0!&z>)hD_&lUD(r;Ip?F0Bcg(;c=90 z=L?T9O^I<2Q6ornynBXl?+IQ%?Ivv~cs%NnP;sC%Hm_X7C`Gb!VZ=zo8^f5mV`)$KJ zhb)ERrW`^*kBM;pk*fU7j20juIJ69tiSo3GCJZTLIeTSV$w1Wlp;jp&foDsPXefoB zHLZD~Hx}D4l$X&Fs$c({vfj;`aU~!PJqj$%A3QM2%6y>Hwom(yOi12`)X+9g(};$h zGZZ5=!!>AO*T78lWlCh0ttAx19IaHC#OfW6h zE5R1=l*rM@PN|VByrzRY*hG$ph^0xC-TJ5c242UHIW_B(l($q@!JghLT{qoLh#ECa zDkKpu(&?cF?Z(bPo!voGEo~rNXG1;IAgWOA?kS~uc@|B6nhdW?S-)C8G^>u#OPqKR zn9LDWf!Oig7+YJR6KtTexU|ZVYWT#Nw#qA%*|VNW7_FE`O(WQ18=n)Vc-(&DA8JXh z4Ecapv`@CE$LcOfR#v6G{zM)^yXj-vU4}gxpNixSUi*O`KPOFrhB*!KKd&Usw!Oic zwBsZCQ!oyfo)%p9<k=sj0A&g8R?g|uHFNX#A)pS=Z!(i# zq4AhK1DS-$CeLme9~{Syf%nu#z4fTCaa|8UZh##bs74#WD~d7Nafx`pys{7ott-N6 z{Vy~Jkddo(4wkp+g+{#^e+Y%g6~}1-rE-VU+49cPISU4ch_FEjx39in#7V{gadiN#>XX#~S|0zcCeLz~l< zjoPsLVnmL6gHUYK4Zo;)z_Bb{Uum8}7U=JeMG%Q`aM#1$_^{PM^dI<+dubms&yEkzZnWD0j_$;)HkfHYYaKE;Q=wS-d=MKi6)C+nFj zt==})ZU&2`>;te0-pJ5+t@W9at?AYFrGZAmkh0ZPZ~P_ z6Ab#Kbbk{(Q!SZ}=AOjqGVU(T#@2GTDT;i6pTu9-L0&WkcmFCb zzTqJ(#%~cJC2YZRox1GH(KlvCUGj@wQ!wy-agXhD9>d{NtnrKIq`P^AO+x+kjace) zy22tI)}GxP5fiG|{K{tvUJfC~Tbz%$!kYr?#aY4?z5<-e?^N=*~RJsm%=n!!%LW&N@ z{2=;YN2_uYDMt`wz7=T$9q06` zj22PC%!9E0?WUV(jF91%cOH>8G-(qF`^7 z&UfT};7bdrM>h`dd-Z?2P-DhT!Eli%D;J6Sn&+q!8QQQfX*lnaW?J+F$Zt}D@wFWt zl3rNV-t6&`t`>4d3e|6B1`d}RNKVr~$DNeSi~O3VrDK&%L~GQ@@{9nNmWL#Vg zWR=&eV;#n?>=?%z&y^X<$lY^-ANNdGYiHo-4e2H#YTY~c zb*4F?zZJ=T{dew-E_Z@rHCoN!U3Kf+B1}dPhH67SkKU)9y`dxXx1v-SKK^=+!F51! z5tO4zWPIi(auxl)a##Tno{}PrzX`)0~TL3CuT(yLjyYdHua^yKjh28xyCAQx5Rh;7%(%oOz9?rH`f4Z)f zZ{yZlK{y=^Fma-yWC+o+E{kU0DKlS-Uo*zU4%lm;T657M8razKBFoD>)&YM z&>cAFkwU4-og2s)Xvc+UGU5sURn2l6C665qQ+N z*>d_0bi?VOLrn!jV_h^=KXOb%#}4#d&v>^c`JFtqKE@jU;jv`(i2rK$RTdwY>nQv?WJ z6>_;VI~>`Tp=JlP5LI9MywOdm$GWH=EiVQJ5DaFQl=%-)^shhZFNSII81vGoy^4$b zS4UE(8R@TfxJD`%7{@5WGs9ZzX64n0O?R}9y6g#uP@pZLvLwjE&o{hVOXJ#fL(D6{ljvZRQ(0}ZAa;z4 zUOab4*Js%Z95`OT`uRyZ=bOeKB;!r6Z3BIkP;+tbdzUkw%Kj8uPkrs=4vFF@jXtY3 zCSPX|MClH-X!{C~v``21-3GjIr^UacR`plZptj}Gvrb!4{|wX4f<78eFo?adXtgP`;$A1U5N^`ofpB%*?enTuH9A&#Jp!u zhp~HS53Mfmj4*I|zK-@_Bxp9!iY&}P&js#L-{J@Sy}WQp)IARYZ1dK;Hc+;OemV-KVws!uGdH!M&lQoGYU zlNK?&56@|r{EyDuM8iEe{jl!c>vu#WU7;H3YG$M^6X&QbMlfk9tfo%xvadVesUuL0 zO4X?j6Uuw}yd;Tp5WMYwqcz>!hmuM_L@qIf3B{pCtLixo$Ag*DD5X5V@m-%sNk3@R zj~xM7H5b&Y-yPS7Roq ze4??8?8L*#D7J@nb0TZ$+Q6vTXU%KT>c-X}iY2wr5rPVjv9BEoq*v4o<5|i-_hO;I zI=wvHx~zSMMKhswu5juTx>)B5N__RJ0YS(%Yh6hlvyzq*b@{x`H-zz@u?EE7eX+D4 z*GiRId)nFVGP(6pr3JR%K$yivCBZM<~t z2F#US!4geeDA~y2tKF9l!JNR6_2uqv`v3VDhEfWzOp7RHsXE^Y4g^VR8Hc;4!9yu& zyh)AuBA01)<@!%$LN6GL`hU6wK|IzQ zou*O#V|Z2f^T-iS%+-HrW0e%+G!I8dEXgZxQRL@P>tLu45#%CXo_4|>7wWfi_}=QB zj|mDYA)Kx*mr`9ex%Xgd!G5c80safgQVioJ3+D>X>4d%My77Eki@eyqUQzFnl$%Fel|%J(HMLrMCrKl} z#?Z$aogT#?+?3uI_6t}Hf3WL z>R(6|$a35mnf?M5Y5dLzdJKo-0xC$PL1j-})PazGAIFbrd5g2kof5D}KHsiu0(r*> zfsD7}q&3=Loz5%-F>-atMa7OI=du3nV_^I}-pgk9w@#hTW4v+_K|)hU64Q-TuC?;E zBb){`gx^A_3J>8KX`OLrM~>Xt0PKL=ZFZ)yO7cybV`9wu@e4)25a#5+3IN(E!8WZF z#fFGNw(<(w%0f$1YoG-pp8SJs6iSSu6{$TbGK5b{XXV=Lb~!z~t0g;36hkLW)6hg# z3jgB6qv=bZ&(SYJS8h4Sq6u`%d3^6YY~;+z8u?r(xV`OEU!~x*fA`#o;=v#C)O$;N z(I{81_X<#~JDIkk=h!2?7-8ool{(9rHnw09aNof~ZpB3W6D96MVHZL2p2lp1 zi~g#S0SAxbuXWAm4{|RXmZwweGSO_+24KOd37Th=#15<){T3kP6jYM;ghRBm=2Jj~ z@Pg&Z?Q?TCr!ryP2zn)8MadUTJ&Ge#EQ0jXwBdO9?hDxvb zL7#T75&b^<&4gsG*E!Rm^)Jc4ZQp7i$mz({;(2gg!%n5zQmh9*q)P=v>=uA#Oxw+I zNXgqbmZf5j@(Y}_Oy?WWRQdE24uil%Htv)6TkD=->Jr}7XMQocL9EzD!%ua(FT9XN zn;`5%kz!GoA(5ziO74A?3x#(UD=_8m*(8P!hlKEic`Zw=bL;CAO}Zu@c&j-?8-h4w zNw6`ku43~()u*tdC61e1pEX6mFK8kHzk`GIgAVi`@z!tMhW8qA>0d^vx23F=1O*?d z8R!dg?Dit*==lQ*Ewl*g{ZQ#9kFTTLHav1bPlWJK+Z$?P+Sn<)KuYgN0_Y}(nvpZQvaKJXIW@W!FHh4B$yLo{AMWl3{i%mM)(e7E;@N5g!;u0K*hYN1{B<5X z4=ccHJy%XrR}E4tH?oyGQ%}}X44$-PthB4MR~1gS)gwL-q9H6{4W7|#!{CAN0&01o zCM-?72V{?!ey3bHAUvxh4BAn?s{XRnc)0eIfm$n z{z&S0El0sw|KEUgmY=y-*WfkpFE-sK(hcMsqdS9NpMLA=a+Me&)>E@V+0s8G_XlET zJwsDN52zptM#V*zv_Nx;Hgv?^rjWh*L^b9_tjU@Kw3v@A;BqC+=i$nE^bWh1`%B*5b}@|J&@8v-hPE9(;-S)DJlK zGv3Heo2Lx>_n{x)T{?kG171i-(kWNadRMllM+T+A`F&i09CINvi8tq7;Gw~>C7>14 zQ7Fjid7!fKuVw~NqLM|1@*6N#3JLd$vX-jt_yh*cVi^g&v7c?M>{e!gQHv`K%FQ;> zt6tAyEd|;Mkd48649e;X_FGzyA*#`BzIJMP6LgroC2twdv17u+T;Mc2Ua56Rl-pl94l^bFfos*T2*ehm*n>r<)D| z;`bXbV`ZOPLw%>cjtYh=}UT;jbi&vR3CIzWdc61_A7i%(9%(!Q65gC5ZPFU0JT8(eS)=NQ7tfs;~a zBLG(Q@nZ%0VSykCT5R3tySSqqTb5`PybdnNcf&vbxzBbW`kA$C3Apu~^6A+!XD})K zL$OxuQ>__>vkYX|uqKud+OF0$brog9P9&KnV`T`kYh^Fy9UfZ%G`ecBLN}hbcU%oAIV}x8&eFpdTv6bwAOP&kbOxZtrlJMT@wAjA6QhD6u9HCG zkORX0nv9o)ts${?BTZ!76bP;J~M{hdOj(mj6oEG1Nsus+N#1BCs1?oo3q1g3l)X4^E7s|oo2`vPS z`CU2dQdRI^X@%<@GUJO2Dp+JF7}HAD-KuO!y0xV?F}i~@5674}5AyB6raS2vp-qiO zdk@*F6)9VnLtkhFMEH|}WiAJ-m3$=Zw(`25xM(Q$Q`2on@zA zPSXdxP?quvenTy z7|YGL*ziRIZ@bZFxQEX{YyKYIVMQp`Ysb^F_RUJMNQ|S5qPWDc2h@RT@K^m&7E9Cl zpX8^QqbK#>p5@^Ll%`tpP-1OYfQwptFv@n-Rs^=NP+Qvjs!tRY)o>K2(_oWO$`HWU z4L?My-lRRGNbsCRVm2&M&%{tU+Vb%oXwa85!gF$xI#GUzr{lm<8(^iJFNjCy{cDh2 z7f|b~AdeX(X7K~CE>8rx=Fq?)jlW@~!xHTOl^9d@XO;DAJ8xRDZ)zsYsnF3tsD)tgBhWP` zb30_v;v0c-KOy3~Q6}POE$nt1V!YWAo<3B$h$9Nuhk87c?tI;kUp*Y&MXQ%Zzddui zM2gq8r-@|DmP=jlW>g$wpgn|nj4zMJ14YJ=k}NBz`*HsA;{Zxt4gKF>(qmwK@C~GH zfyqe_2PIK6_zQk-ha1$7wwa@a-w%Cr0xXeK$sjb7Z%B1#>s+W!Qg@F!I)nvZ(l8rs z;*L@+kV}&UaLwQ8RrFeF*eas?1*Da-iRiCF#h~;&$UG>w-SMrp<1XOuQhEu4pMIh2 zZdW`J!&6{=2NgHP9xpsrh}k;jrVN3~p|jM~XCHFI@mwn0tMOEEhfgK|vZ>rU2YgoU zL$zWidN+*^&cH7H1uL8BI^M>d#;Gv;BXdwD#_&t2Z(KmAm8o&WI2s$*Yd>?=u)GD+ zUqq7e@C2y9vIz4;7U}8O#QHi7O+Ip`&R$W{WO`-!YoAUdoZ~j^ECU^3@7O0m_FCCRWb8~gi>*060wB7@F!wckydq0=&}5c^H+ z``b94UM_n{0zHMT>Jb`Hr*i;RjK0EYQ@`i}Kju!HEleX>C@6(z2_qQY<;?H#(QgR8 zMrC-w{`qcPCgBy;pMkbN$2Ev+_c%v8uJhH9>C1b-1C=|+&94A)5J+*Eq|@N@ZA=tp z+PuhWc#}P;mGWtjCF*y}{ka#yy_xD#!8lM;({j~B;>9zU)!f+(Kw(MX`CoyJBc_Z1s%T$+^>T`wCBDoA*$U~!P5 zjs20I0hGj1XyaxY-pF|9W9eqrw(8^YAM>?u7yLx_qaH?5b!o(((?~-EW#x9_?nLdH zzh#r`4!)p8O7=s2Z)h|$UDU+Eb}*@@X~)|n;HJ-UuyViDyXdLl=%me;Ozm*n>~ZmDR^$ z5i=Aa^>kE0Yw1SWfQxW%cR8hg_

D_NS;t$alj-?%gi;nlcCiQ~!8PmuFG*9~vcZ zLDw2boASi;>RFX9dOS<;dIV={ur$y^MmP4 zUV;;V_j+f%{bCe{Dls@}PMSbz52lW-8qUj_AV&Q;`vjNl%p+2!NR(dZ3lxFvKDCB9 zV^^XEb6}gx6SsMb!DeSIIc1WNA8A*@jb)IX>qC20E+9D1^L)A-v^01Sb&_6Qz-dHc z5R^|9Y~$zpc6X=I`uhu(&FfpV!QYGqQ7DP#oZcS?X|RN&T%0htmVa9TWmifRIyghk zonI^u=W;m$YbQ8)##U?PCjSjjZHR1<3))$Ve4 z=l>Cpj+Gt|SX8CA#e57#Mf*Ql!7k7iJ)qKQb!0TlRwE`@YW8lb;}I@XS$H%XGQO%v zH#^1v={$DXVk4&S3G9#<3P#aP8H}CQX6J}O9H`FzJWbr@X4D$Jex{tw2dxY>wh!jd z5{d(>qzHK8d!>H>tv%YPsxcFcX9^N3BL5k~%%G<%;RUf%LBL0ah-*^(i5g&>+@tUM z$)ZdV#U+jmD^BD4()6Rz z_N&i!@q&Enky9cwY3i!bkX?WFC5vlwt83Wj;_NbQ8{N8XC&_BDrzd($gqrMXxyM(e zTV{Af1THD%BM||PW37r8bcbuGhB=;i4QV7cbynOW+C7h9BwxSq(xt)=A>dm2k5~&$ z>SSe|#EOp2vbh+D2iC#ybd_OEt8ZAA2CtLAyt?$KccXeDzlzEu01KWu_wF2q!rD^j z#sz_?``P^f+TlSPxqNmZzl~ni#Hks}$lkEdwO_(QOSriukQ6${WVp9?9@uNO%6}#c zG^xfDqGk%ZzfgguH0w>am21-Up+n`txi%@lTnitXkzY{85&vlnYxvhRl}Rfz+=5fA zjdq{b1(K`DESBSgU|lz|o3&p*#!fF6JAvuIj9FH&dxVoEOUvrZf%_3A_$E7#;?!NL z`yvnflLEnWn7^wtB1QOTj0_pl8=Anv-Sn(49V>EpT1NNF(d|qUoRwNqrWj&t*Ir|W zSmk6_k3eqTYM=^8_&xiGdLuopfF`+eJscq3dz2Hn61> z0$s_460^m433L#1!9NjL_>Hyo-`{L(+kHU;C;0Zp{bDw0w29q?`iENP?qek)zt+K0 zTvhR4iE3*F$7H|Aay=R%Z&BC!Zya}-mhK@8qF%e3VUq zn*K_JItLl}Pqf8q7;&4nwC<4YaLoKwYlXR^JB<)CK_ab}2c#3h3OrMb#aC;Qal-B2 z5c^2;HS(u5Ao(B@J%&hv^Ruf#^-5028f)RYLVO00?pCUjP;wpmLMk8&Z~iq?Krm8J8|Y|3-hwTm_9(ko6HAS`XQyL~bj*|blhC)4 z_Uo&`6_wBc3?JP^@V}^)?qzy{OT`auPxQm*Q=5mWyIpgM6B#xb35~}o5XknJI!wFW zo;|Jq0j=xSnhntez%&GZ9pcTTzzpsqI?E;6fb8C)sTuiI8>(|MpfZtMkFszf4PL0xw3-wX7NBo_mFK5*!H>ETFE7ea6O!iA)j0HR?^#ij^0ds%mR=Ggx zVw{dg3;hKbfRl)$vY$wJULe%h`TE8DsoNakq~Lx^IEQn9HF)g%PVi9F5|>`@O(L0kqJK@|2Q*eL+39(u4M=BCoT^4U6aN}%K2htK`tf= z8g)0cXPE*e-b=8CgCUc0h}uwGkdjb^hJ>HZ5?cQ+XEp$NZf%B}Z2DkG>@t^^;nWN( zV^f3;lT&%Z#B>@&oo+~~J3tbYvR10SNMzZVIu@4+ddN0AcMr844CrNTF7G9i-Ut^@ zHA0^+XLc**Q^=D)tFd=E1t#kGc|PsBGa6;#vJKZJGxlboU;*o#|)&J+=3?9uEs)6T)B%p2i7u4_rYF)>LzmCKAAMnr6738RD ze_e52e(7?av&ld#B91n?X=}+aNba>L6wD@SfOQ^a3|65!s9K|BH5HrKFF&lLBstoA zUD#$E5ASKg$aCSL#CRb!+N7zG)6W>`cHZ%ANp52eiUW1LZl>L+a#}3yly(Du%Ou6f zVZ2u(0m*?D(P|Ewf2Hsi$CTH!rQb7XiwcZ3d=GA(2ibeN8q%Ia4A?C0>CJ2>Bn*C8Fl0D7PbKSrlHpL!g8XA7qb5>UbnmEYB((b_Hf zSfkh8M)BQj=`Y8z&9{2Z{dz|@n3*i9S0d}ErOA&VP?gnBm7Me(=#`3e7iH)Y+X;6p zcXz7-fa$CxkH<2ITOzUPkM&0;( zqOF1E_C0;)tem-SXOTEr!_)Un2L{)b?>`BL1p^BcWV5Xf8ik1O=u<9f^O$Wd!13n` z`CJ4ZmOWP5U;Yph{ZI9w*Waltt7g4$eB5sSXMLJ`9aoz_O%MQhB{+9HKi^XWs{kP; zTP#C6kL3}+Yg}10 zMeUYfym{2d;?nXn3YYF7kPIL~M^Bf#g)VZCuOvPkS?F=2!t}VniF>sK;ZEl>H!XXF zc~`5Q*`&dz`fOfcIn~z&s4K`SK*{_p@9WkPlZkNj*@VZF5ZYsT5h^=EWN&P;XUmyGWR$)))#Vi2EP(zNN+f0XnH!{!J<&Z z$W_==8}FhvZy9PCv3ys$8Xi|XO1paDR!MZxW9Ax@1O2!w;eYkq`(F{`N#WF!vDPBL z5mIIkgWwmd@OEEvtyOG1-BDSj>Pq8q(fW+yeIM?H+5_4;aLkd35#hT_5G9{$l&BlG z9#Xp!REq9A7)>xSoQ`YT@9{HKwwdq^hcKfq0gJKK@iuX?A$EZI)@t4CT|3D zODog6V@KMW9#;lMb6n}exaImW8HQ1MwCozmtXXM0go4g#5`o|Kwpr^AY${<<|BOd= z6Rpn?4k_Ewd_a{CVX}qHf8KO~d?g~%L4YX*-NB`z&9$TN^&s--=Wy->zoQ?&@=R@q z#^!g%DXZxnSfK*9Ftj(RR%8}`{6bBDAkg9Pu%CCYw(JRg*+zf?RA}?ECmz*RPE9Sp zcVM~U^g1TY(ZeV$17b)N>lB9-cMB_asa4vR+>Y#6l`vgK>h5GikB3KI8pwrg^3V;M zJ}HkYv4%X@AaA%Eqg&cN{NG(Jin`f5qh8a-ZDZM%%xHemGLZ|gDPq) z)_4u*zfTgNhS=x7c1w!-)7wmEEyS_`^qSU0_~MAK9~hBcaW)0YgV9GoqFj`<_`1BS zF#psVNK*zc9f&>gR~OLh3c9sorzB)oS^(%;5%Z?SWVpltYj43|PLtiTv1kjP0-V(L zdmlX$@zCg}bD<1}91RC1sOb-}oweY5Iak2`6^f^k7*Y{UBE=K%z`0Rzmo<70$L}?P z=6PVc8A_#L&m!Pu*e|BuoTwywlTC*FK(pktF&;7APM1YBrxzThb{M#5y?tN|;*WU& zy2oM&bxzP4LtDH(H@GQ^vl%|Wt4R%1trs&(>|wu8l@-u+TdmWS))FwrplXZmrO%5! z_dGanUiSjKDU>l3Ul@O`EX{Ki%{NTd6(;SR@o*vZ@xvi|H_z_c7iC)GAN349My>_P zkeoUu4@Wy9O}&z`$i6^{Bo5(=UCW%Y+d&Wq5ytbQHJ!WMAzwmi3X*@(VRm(MYE`TC zCkh_(`5cS$%mrd+qs>oFna_bjL2<66Sxw+|+euzjH*b3VtFDv>^Hj$`_ZfhZ0e;iq zl6x`O>6f#Y8jTguWuuOv_sbOamzu7C!n5razGNH)sV?&GM-s1@uk_C9 z*gpmwZ0kFYmQJGnm(x5SS|CF8p@(~Uzh6B~R!-tiD>Qm02`UfX$ z*s7VBE0s2cGhYthXZY;=;?``er)?+Q!?G5EI-9K#oH|Qv{c0--bY*?1u)`ccCQkF?P9R#?0|f4^>e&5*mh z34)#(3NJQ-_7fsoBkEiH3sd_&vzl!QhF7BgQ z)YMi~Dbp(ozKvPGGHj_%w?r5pVhdu1zY*Q1axm&@qyubV{l|Dc&ifriRBrT9>W4o- zg@DPaXOo{}N}SW8@UHHb)BM>qG;rh;u}X1-Nm?GSVDlTRYA?8zJWj97io*UM7#Qg^ zAZPDM1!*sSt`{B<5(00&KuAW-8>vSpq)pnA^LNSy14a!YMv~A7dE^%;CkL1Hd^45-g^skY02r8~w)fSP$W86xqv{nMyI+Fn2;dzg3}+ZrEPWjd9)o87Z3P#X}Z)JWq3KM+fCNI*Dq=hA5nX& zNUWQ$A^)Ky?uf@6E7Iye>k}n=mc?#v)44@)N?oiiVPlhQRqr}m4V0EQfA{9`Q4>R? zo~R~&X4}ciJo7YBS6%he4}(>IP@3@#_0QSy?WfW(+iKoc-Q?gPD65p~yBDgy9CfO1 z?k>mbC6ZWQjTaE#kf7j0D_>veHph@JQ%#OApyZ5+CY%H!0_snB<1@8ryH^-b7PdT2 zr*Hg4YSwXxU~45++wT;c^>osNsZEkW_ueYXqv2ZVs;3!ZYe#?mLFr&WTW!oTAlpr6 z#I@2#vI77ffOLttGxWmqfpvTD05!tl@byQXqxkKg_?E%eke7VKjsmaEzpH#HG)fTm z;Mx0LBjt2jWTHR`caEK21fXD`KJ}Dm26W{)(_-c(@(?c4gDjj%zn%i8uu(bTq;Kgr z0tXS{Zo}O5B5Zk4*ZY{vSGA7kNTOE9HXvu05Q6BZm8U*7n^MW;n*;Ovn`Ne+lLnMy zB&jQXRrlwpnc*=7cWB9Wd2@|igG5>csn^{8o(4p*1_I!8;=cac?ZdVW?^8(W3Ty%M z3jh7GTE9&gpEKEUOclB!3$HyVN*a;?)gdy@B5oRJufvtOj8S-&Y2Ztz`7_|?XrP@g zY-aF9vbP@n_l<&6`U@~~%Bc5?{dx|RB^ zempu`QUkv;s{hc*>u=TuJw?7UuHHZCF>yAf_cXafHBZcJ0#Fq$cw|%v1VlVGggw z@^!+;vWPMvEfjrQ`p}$eE3Fe0!VA5RCRbTgYlB@?!;gORc?QA_nwj;%>pTtL*v*^N zyahmH9~R+LVu53D9lIqvrC`mjhnpKfE+e&!Q2+YJ^Z0&Vl{;(OncO+Av&=#MV>m3Y zHJscy*t9&Ew*$(oPZc9#Mhi+PTCG*$>TQQKoTD*gCBOR-JLNI-*YAJ$6?lNN{14En zRXW@16cnvbIha?hx|p{XP()Fw>cat8nz)X6^7S;^tqgCrc|=8lqtj&Rk9xdMPhzt! z1+ASOo3N8kpL4r+gaROLc!BPXYc$W1!a#my#hCC-S56wCORpA93*jsmJI;*R+VL^X zK#U%DU6sOhFF({s(m3lBLxW z8IyY!F7=Uc9N$hOmaCs=lm-U36`siTJdGcse-)JfAZ@Uq7EPK<(Nbknvr8pL0Yqn+ zIAAjzGPc;487I{J=09!!qw;w=mcS!jtlz*8ce^LCy1C1+$Z37^t;dK5Zmjd|`POCU zXg5b9AMVaFo;EX2qFVM~-S%D~byEX0#y6o^f#-t_`AKDmAAWw;2&-Q5SJ7xXC`su$p5ySIerKo*N=|1%} z?JUR3q`Po4i2pw%`}gfNIU8M7{uBWT8Ezh7;U)9rj4le9yF2`687MK)clh9waeKcJ zDYnL|QRx-l;gM(atCJx8V(^$5N}0tyTPOq#*Tt?5)cESi$YH9tAI@R#pm95C?p=AK z#%WCo51Ml{^#FBMo9Eq>Xp629ILT#kjVarP=mhbqNs$V-TwxhNo%qLPHwnm<%xSU_ ztf(ThC&ThhliBqZk28&}aPDY$2qF+u`_6$z3Rs_*MsZ7Ft-fBZWG1VQdb-O@Dmu-l z`MItFh8$Zo*UDdgb7Vj)F`jEl=9S2zU%Vr%kou+j`P5Bi`Om)MA8<~Y&Xzrp5ceCT zIl|1zJZ0zP7bYh)q}&a6W>LJ&J}3zmR@V*^DZS&oqMq@vPV(76hIFS9UNwsFe@k5k z&=okv5Fx9Ajyb|n_QHZ7Fm65zJBQS9zU(3mX@1i@)ryAdR(~+;@$WM=B4r^lHGc2Y zD3J?J@$7$7!#gxE*vwNg%@>l-q*J=p)nlT0m^7VY0<}{W935(0@|T~cZTCJXCbtb- zfjRNwtE;PZ(ed`y+F3Du+kdwfv88{+faN3~`~MvIf3Jaq+Us(XyN>-)gotFLefuCx#nb3-xXTD1P?O%?vGK;zB@;->9x1XQTdVC_5 zv&|xFYDOO%4!;EOmWF>#4bU38Q$1`1aE{h={#*@B?;xT&K>fTeCYpoc0v_tUta zKYh##sAB!He&~TE$FR&{crCEGYO_KC8LF#jgH*~XEDZ<0H+q2Uc^rm?aH+mJ={I^K z;&^Acto^C|q$gZj8zF{T=9~a|{IvW~v8VpS*q11lirEXVmKBE6Wyj@*grc<)A9;F3|AQ_ba zf{g%@2R*u+b1y^abKBdE-}o(DnY>>F`F!o_!OvVA!^BYo_EPplHg|X}Hpa+H9FiHv$!T8Rp~jY0R7o@5}Dt zN2(Q{U&9_)lCL@6cqUxKTf8ncMhh1zsS85&9CWF+>ZP?Id*r>8f2HkB%v3_v*-Wx7 zYr}glWLHRe1)2DMXbu3W{cxrUud<@{F;}WGI5RAM$@lBFP#LmM*Zs#f@vaDOlC zhWeC7G2F-Ivv$$aMYSRW7w|`@-Zp5sY8#g{4N4(tE{M|v$E*DsN7B^H+)I+4wv`2M zY);IF>r+dlX`G}oH>73QdRE95C~~b0h=sIuQVj(sLUhJdR`VWD1Up;?dpTNnmcLL) zE9y8<=g7gn0PHa2fx^jRCX<`YdDnK}kj>`aR9a^5ij|h6O%~`=JPk*fUVp-5vc%M+ zDG!|(0ONf1A5v#fF%*x8f148*M9ngX@|CINJP0%^`(zjj+6Jz*Zs%$t6lGDD)jWBK zo_iEo>>-8OYLe2I+(cUK5|bQ`KVTo<2{ajXoSx(Ma-nebRQM0|C|bIlFFRbwaE$%KCCWt**ZKK2co3O8oDR|0S|~yTEbn=_RaR_n zaUnO?AbW;jp96~yus_=mCR|QO2`SE9mquZVaUuf=TcH}Ngan(wH6gX~5o^XXm@AYH z4vLqv&lO6Zg`L-U4i7hd3fuE2piB8>l~&ii%f6Vv?C>f-O&cr(V zTr`K;{-jP&pi4Cxoeqcj2ZMq{FKnU{vez|3*_n+2+9O)y(t?;^6ac+IK(ST|6_}m8$^S>hQ~HR44aw*9a_cLj`e7ROb!B#*4UPl zt3MOrG3rvW6kZ!EhliUN`B)F0`Y14%f|Z*-EbiY*<<~?s4Sv?L64+`?qr=o2VObWQqPJKLpFuuYwCL#{v$bI^?m^L?tz1L z&q^6g8-=}juL%;#lkwP0H%bbXGbPA52k)p~af&o!JMP^!{3r&fI{I;>^9+ zf5+o_DvJ@s*4Qo5jH-)ZICCHyy>#Rpa`F^};{&lx_?MJj{GlWE`Gc512+5y^E9gDF zg2!Sx?rkt3fR?yeF?}O(2{yr)k2}m6NF09e(I2$jBM#yQ$Lro~!>2qC6Wf4J#e|+t z9d^CDxrHepdNi5AH8Hhi$JXa!0u*H%=7>$K*ZjRO=P)$gV=F=%{x-A9Pa(I)jK1Y7 z_y)`K^l}MOXQ$CiERm*J@6p3ev-PmH2&P7zL;$B;Q+y1pFB$xDP%F&XNbr;+YioMG@GxY48rt>={ zYHFWaWcSR+!h)GFoF9YwJj3x5FkLyTKFE%$q)=;()H5kCQnT;f6Eu&!DmVMI`PbJ^ z^)e)gB3$@k{8DBni`Uqq^ot}MgZRfys;yYeNBW9ic*0p&U_8nr<4oYO!UzTj29{o< zf7URykK=VdhB&l!yRmAZT6&reCTTx}@y~l~tJN&2XT(?_Oo>*Be+=uWHv&r=?SQa>cF)N{e}tZY1QE&0v%2deWuLp)I84XHfI zO6yC`dZW<@uO)Ga&vVHUTs?b0;JLeFPbgT%Vu^9En*J>vhB(i#qBHE%=1(@)%@8dS zrdR7>y>`ZWjn}l3Jv$EHHcb!5v}1P>ofj+-O=?P9j7kVB~ zaU!czY%jZd$p*)7{ydz%yP5ndX9@nc2-s|*E6g8_Q(C70XHRyNjge(iVi;kRv?~A| zKku7hTdvrWHFb#*Tue&njg1VxW1EOna*n|@ZNN(9LA^mP^TxhKBKW8CohBE zK9$SXWngO59S}z6kn`xXHp!*B$(qALsf7WptJX|sa5V0EpZU~Ru?%xK1z03@?=Hk_ zX*j|i-(>UgfxF4at9^3L=}^!^n>{2BN3^ZNmN#$|MgR@xTjm}kSCe7-4`@5` z_nkoAv_#_EHTIRwz1{dcML)F(z)OFkDWmS%F?7F)iIpI0chg|dqL`V_?Ez4u`ec3pdJEeEx6aPc5Zm6GyQ<1 z8qv$Qnf9;*E`}ZEtRmML*NOmP7+j!Ip z=?KqgVBtyoFU3E5VNGY|bG9kp2cRsr-P{5VLnc+}yVKLrR_teziJR7%beokup*X~y zm)6bTBZIDrlJlph0VOKK&uZV3i@Zilluo~A6MlAro1LOAG6nCA$BCS)2eay9*B2)% z0|Wm3Mi#ngQC|*kudrq?8K74x*zeXopELQX&`3HZe;YEQd%y4lX@0#;8-&XI!+JE; zv=Wr5LbeJbw6<6@c-7>yO0Oz}i2L?p75~wyU!`6)&mpffFRIbZ@AVkcV<}F{SIl5PNr$O zJRYWVrxK2G)c1!9rf(%m;po2-RdB<4IyOf7(%F!vve9&cWXubbOCFgV9|r@Vp{^nm z8uz>Ugkw-*zeg?^6GlI94xVe+Nx79?wOVr7>>^OF#06<-5pJzm;thS88z-aNqU$ai z0%j4nryDU6f#A+8?%#xy4cw0*aKo7Hw&-Lk3?7J6k`-^Bl~HG_z9P&^hO@`$A`=i; zXZ&}xZqId4sb&ns5D6N#>6xorYJC}Mc-+d{+*cZkZJVS_uq_Z~qp zX_l@N+q>2qmHu!m`%2Bj57LyQvQr4{;A3sO$Z|`h70B|3t+5(LTjMd6G&ZHR#E_(l zu=^4WpuIoK01IwBYBa5g>ltIbGi)MK}jkQ7%WhQ+M=%Hqf5Q)}OJC)zgsxe^e!$ ztj$)kF$j^8Ac36$60SLfIwvhJ;GMSKhuj4I=YRiynyMPnLU_^jUn8k9GR}TMP=W_o zy7XLQz?ILi8QgW5Xni760J<6^z12FL>!=fK_!~TwU#Ia$jj;Qxyr)hVfR@xY+e;J| zxmx!OEcf?uWpdw?U!rVMoVpxit17E#l?P0vC|mLN!KhdbpwRk) zQ)EQ-m0)57^Yncb+K=dhZEC;~GXEN8+GH)C>WkTPj^J{uwMxRI$>6fYRKE&8rddu0 z$}9no4a^KF9LBstL;5s+!oUz%ltqD%dj7yK1ggJ05A|>h?-vFEzYF1IvbMV2zc9{N z0SkbAbv9E7t;ULeo!+`z+Xe!Gkp-(WkX(S+Wt6Vhh`k7ag8LYm@-)%xaV0(-c61|b zNIZLf9z-sbbm6|opY;vgRw1tG8sGA0{N~fFSRpeA7$$vvR&Rc>;ukWY_S7b^5e0Mj zEXAFL>6tD^rHn+ayNUd*!g30Q)!1EnVXp9D?$G-`TglYT{;Oq2|}W^_(CMos4?>VavCKC0fcP9H85?c&Ul^ zq^wIy)otqeVK~=!u~I|1)`=buuOyaBCmQ1JGQp1Sqk5vUdiX@v`?UcN6RBbY)oWPN zvLCBYaJ{!OipbyPyl(zJ)8SN%oF#0Qw44!fwrvL627H+<*TVqwuvcp7FvN7^ZBWTY z@nhCbF=>QdtybMaeG;_{h-xLM?b>_vxkc@mMW0l}vJc_c-{6%Xvb@o|>4WhH>zVTI z!AYTgHjVwCXZgLipg1`??? zSQ0No^sil(XclM}C<_>3;~{<6>~Ct5lqK7(K?3~iD|wYvU5FvWm(4#)0coHe7_Qrt zi!a!v^`E$T#p3#QfbsnF@t?2X>Ld6*3P#Wm1%{KSL;;_-^Zd+=_t(pDfh2dw zxuP(*VV=q$luLmWE=BPkO|gy~_IR3qj3p@Sa$AZw-&745s+k)0&G zBz;vUN}Cxx!`0KIO9N`a=VgSZni}Qf#@K?pHN9LKNof3iJUvZIH?_AAC@}&zs*c)9 z^_c$m%aFuGZuhzoQ>U`|K-S$Z?7;%2Tmo@$ zztP(s?1kg8ud!;ot3fuLM0pmh*?`hPDow97NmXzXGn1yW(c5)#!Gv!(jOj*ElZs;wIn6LI}hk>OOyGH~oDKqjwx%$s0~J-ad`XiJq+9H=W2cv1U(r z@cWU@!pSB!UN2?#7-e}wsp*|89*f2B`9)u(fwR_gG*q?pz$_i8n{A({$fzD%P<-B-SBBo`vH8f;|eIP zOR5ifdv`|Hvk1ffrN<}%rLC_+*@94#3 z*({dR2$R{Rxikx9c>8u7yN7}rGs+&*Gn%6cj#zoiD)c7jp}rfm8R`qfR$z0e9F*wQ ziu5b0RdT=3*YoIIWzYx~UN{!c+U5PLFFS!<`2zrs=xrhcg)doDMJ-(W0IL zPVs1;X%_z)V$A|juJ|)PgB~{BaQ;BRt(GEpUYAX#okfR|duZVCmix4x5s~?O%RjOS z;R>woZknKLptbIQEZurb+(-vyjjWq zJKtp#KFb2X=!P#)?xZp~DP;&0G!e_VG_aCHnxt`~%p=?Umi7$K0pH29F9nD7MM}4UNo6 zFuBILrAj@=A^WRnUGDbMnyAD-$imFh%!;5jt7N6uoxdPj{Mv9Fi9Mv)1N$0{kfCwNcSBTbURr}guD5GNQmTz)S;Oc5arPAe zO{Sb_9sM)vh2%YJifZI=7pyS(I@XNsDHCN`R6b9~Nd$ol&Du`Vq%*-hhm(=~^~_yE z@@c2A-`^1nhX<@@rU!jF(vc;)+G#?=3-6EmZ%7cDTxJ$R%ULH9TcGOv(dha#uEb|( zzi`1@w+^6a@PLp{9pZbKeA<}eeK|gkT6WO$LGgTqZeq}RKTvUuvDbL8VO2pr`CNAs%VX{uu3Vl$}IRh^`;`?UyB+ZwE262E%&tSPTP(=$9qXpl`MX zT!q>$v?d8ssZN}aS?d~0#&{OYy+)vK@5hRMAIyiCk-esjLZ?B89)X3}9>9UKXR~!2 z@yoqToAlg{;#oYq50_+C=(e;KN}@hhUtX!|3ERzC@EYjkyUL1&?lRL4tp?B$C-<$W z-}w7e0D%OUTBNq$>o0$9rB1W+nmFg+XpyagcElcJE&tDQ?Yfx_t_?OM10m=pRdzzo zEIKSv>uY=Ap5Gb}>_Cif+h6ZMeF5MXLk2e%Yf$SqSZSLUZ|~o*7InU$o#Kw;sb6S3 z51_G&Z9)9jAgPz){EV7R8e6xqxvo*p_G%jiW-f&?cXoBGar4A#4PJn3!3yBn@acT0?%#g0zK&=3LyidX#9hS<++dwsxmy(Q1&J9u4=pMSE{$hnq&0R^U^Q zo@Pun`M@5~L`Xbd>#;0xu3?EC5;RfDUwEsK2o~E1-N72FL#hb5_B{^e^J@TSPGQGF zDT0j0Tw@H(k74v{}GO}0He zK=>^7HPAO&y5oz=*1EqQ0eDJjov-SrE#A`tQYL5xcso(~MP1%@CX$PB#XqLg`7+)v zZ$4|`Hyw_wS#0AvDE8%RfXomeNKY{fD<6)Rh`#{}PWVn>1DQLxYveA|2Et)rLlb-F zGP93{%|rdYk7XKpM8}{T5IGbEHAw%+gz#y23(C=O(?cj8U_Uh@jVuN`MA|@AM+~sX z;Yfte^KjW?l5(6+r9WWMVwd#IcIB`wAZ??J*Q@rSAsVl$8v zZD@Kx#KX$qD%vwW?5ZD3l^_hLbDfX9)AcZ3c9IRlc&wX`+92nq5>+xy9``^y|6{b^ zz499`|Jt|UxwPI{GG=vsoXwzt4@V?_qG-tLRuHUE@>oB&72KdRFS6Ms-_;ultTqgb zPD)e~flRVkXx&TWd$!St_oeb7)Rn>5@sJAjq$?BI>%qk~+OMDz85?V#DD^HRwFVM7 z>F%vzVg=KXH4$Jc@HGnx&i4R$w0Hd!I1wMy4+U zVtW$$uw4ymKyfy#j2RfO-hQcxx6{J!9ZL9*!@1>=f3Q^}Hu?UtL>rQ3Ok!=h%OZ7B z+cE2cQ<1;Ke57?f{ch6@;)$YYe^~{$9QOvmQL~xOEi27Gx;3(GN>`*+_tanCFj*-l z>1CyF*fYti$KJVGa_VAJZ9CgNV#?XpN=$71xc59F!_%g3O+mb^Nq9@P3t0+|&_cd0 z>UnhRTPK7g^BeeK->XF!teMq+1o#NLcgW&uC}P_rpqDuAJj{bOB6}Z*JG4ID!I@(0 zso)VK^H3&IGnRfjgLHK=AzF#9D%mpNbmG>SBbqy4xs8qN%)r%ipo>G@I;k*m*gVx} zz4X=&HEJi<6(KBm4*f4JP5P=aT{6m=q~Dtqgzf_%32ruZXbr+Mb(lR1Lp(%?y@HCUrxpe$286lNDgBHud3y>P&z zHXtOr_t(2eH1p&fhNq2*JnZK+zYIMt5||cM7Ylr2gEV&QAl5yJ5EnTa1Kg2SRkBC&Fj2I|ayib2 zn_x1ujyQ}Pj=xjr@9au^+#2LY?>N+Zm*uTBcf-z|?8R_3VNu)e_K8bCY zX#a>RpsvuJDrPTDyL}~-S85octDC%zLzxeVVTtc+c%lbAb^yE@thn${B@(}5nmy%u zJ)|i5zxg$5TVK&rW79Y45gjya~acTZ?vLI6h1}m3|%Aun=3ob084GJ^p;x ziw4>bE?hmv_cc0m1=_}SGi{6rq+~9!Jil2I`cypaIybxbbxotgzTKMY7j;tyf%EXLh4fnPP&TH25IV`dm4?I2dTqh7c`)>IKa)I_yn9_&N!c6Bx1HC(Z zBQAGf+bsZv#^rHG^;L(=y!l3fo-j+nJ1G^ST~^pbeua9D{F(;kA&6CBW- z;rW?srVxUp^UIhP>#sG^W6*c!Akk`dy--&A*U`~?^dnH7yuT*f$ppQvg-vdxaMW=P zrNvZrTIa~!9vXL~0B27#^CLVg`9J@Bb{rk}Q2ZIRqx;HS7?7H8mvC3l0G-R3FUGjA zM(Ht4C^(?!Yo~5^XmF_-OX>WHCLY1Y-DE@%jpc26QktaqVD+Y2;?zU&xGFL`A46l` zZj594*dL0v68Z`iD(O?UH>FP7)|wC3jz-F%KC2K$bm4ECIu6BuP>l055k`@3s43_k z+G$7;1ym0u5hg@s-f#Z(Inj7(l!dIMyFRHCn`4HWOHrXcrm}_?w^AsE#Ls(qFX`r+ z&J3}#Lebk_V{Hv?T8S84cpu@Bv72wl^2XL) z!J%=yH#s^oPCd<(_1HN4k@CT|`NS5Jkq;zoOOVm#yen8Mw@_cTBcje{TA8j314^rKaKP0Wg3oyGMV8eU^`mlyAe8W zny*V6Ex;?3z;!(G=Wcz57_OFy^5%Izz9;yU?GRIjUOyF#?WkXdX%cc9STho^i`Ki+#vyqXKldG9ZdeBtlP=nmNBi z;NQ2O3nBxh>MPcNG45LF*Cqq*d`;Dvx6gs_g z>tvznzH0veOy1j4mbB|5yhvS~`rH3)I8yJch@0c|oq&od`IXL9d0ez^wAhR|j>vYDkJ*=A4xMDoBt+5AGExcI0iWJV1*qvgc8}d~w?-faN zNEPfM{um74rUv~Q_SEdcp45LS;cnvXwUvuysFXxc%Nk&6+$P8PWlWS_VeSCS`R?Rf zP5J0~E1%OQi2wv9myRU$AxXd#FfDd?y@N+0DW3b>B{B~tN^Vz(ZB7g^qP~hqnD}XN z4A{)I!Grol>Jvfyx7 zzSZmg)`ip%$mx{u!q*}|i|&4`Tw=Ec9>{^a&3>6QR32Jc752D{2*++5$n{d%5E1Q2 zP1X%&p``|a1XYQFmfraj5R)8TDtY+;()^zxU^ zeZ}V)rlk#1remX-nfto!*jfa4lU*#PUR{gA(y?3f?RBVi-R1O+*x_l}QS#eu7VX2==8J}Z5Ji%u#OHX*+e4IdGmK0rLjpOZ8C9PMYrW!gQtG6FP1#KN$ z7AB>}26QgXNz8ocd>i`9<6LKJqZSbcqqS8msFo9pHhAv20%!v>+2D(dZbS#xv<^CP zoURl!*2F+KI8lEdT&;vbkgD(}BNdEtu~q~jH3pwzlKq|QYCb(pVC6f<4I$5dlN1+~ z*!!m;I}2S?j4M)YEBhzhOB!ygPLR`Kt;=hp$qq@bz6o|oeN3&m_8if~wRYR~R&I`% zC8Ei1yQcM4aUx59xW1^noMkDs0$T653x9#r4~^xIbdG){#*WD9lRA5GI4o#;W%tyX zbKGBM%FDI^Hu*e$#^YXP+QKHZro6(cwCm(e#o^>>crUH>I_;4EsMk}MN}O}9rck3j z&OU|Oljh0WeB%FN0R<1uz$6%(4D`XLp77U4vSe-n-NR|tfM&wM+m>_vNV^BKV)<6r z{kLs$oxAT*J(sA7tsp!#)}XkI{Je&22o;WR4y-ros>5#O%`T|qP_OvUL27Jd(>PPe zv%2i4VGAj8$!IV;H`+4NLYx_E0c`pTAvn~_ly_8&&*pm7!#*|tLhi0XERwz(QaNRr zpCDiiCx+1WwtwQ0?1bmY5hzy-^2g^wY5cp`Gw<)z+f&0Qc};aVx+;UZ{R!J{18WEqb4oXN)0*7(U1*Aa^1b>){h? z1;d$daWTDc=5XvmYmD#Q1OjwEM2m6t9j_hrCTbEiAd^}aRHorQ+I=M&#R`^zy2kOp6GZQxVkd#t&y>zSMR-Fa>A0?+(!hROWNT{Bi=Y4I4HC?7_>4oS0O3OrS0>RsRL0L_7k{Z(`^M*$k& z$8;YF1Y+(K>Q4$74Qv7U1JERT2x+3R>4eq4+y)W2e!`a9_=csMyWLQEbHur5swx4dRc zA)vr7uR}yzM?!2osbv$WcuAab>!837&166@%84nculvyHoM-PAh}LO*ZjAw^giD?& zWofEKZ^QY7VF*9?(C96V#2p^f;aBwqE*^Q6% zj|6M8NR5X3yqj=k+loWySD*oQ@*KjQ_Of{&g_?WQdZrzLTTPHai4@`C7TcOB0XVg( z12mIj_$ROj1_k^mA z{Fk_wGq>h^WJA`D%W22=gW~@9q7Q2B>U}H}B%RC?PsRY_4s1n>c1vs0(ZxZ60j8v> zU>Br@|1%ysNu2fX#)E}a-CaG*VD*Jn)vHn!PTwg!p>OO5KU7#&<(jcGE!7_G?(%zbXz=6uPgz{B^kJdUHrX@AhPpXncgOQ(pO&=)+#lv+y4bi? zJRN;1TIA0I5r4$xNJKABDR=FVps;Nf*Wy2y1Be82dPF{mfy-sswKk1yxrDP3nao04 zbvYd{N@GOXhQk5Ma%QSHSLASD_x3Srj5jx_J~T)bMEbl7zyk_Aj9gHAfj~7PlQ{tW z2%j$dX^6bVGOg{ToM;2JF_4*c$h!rRqsO2wWBsNnTC=trY^6virs!qQdJ6f?_jtO^ z9<&?3{U$i7`8A_*j&fJz?%T#=30RhJTd{9KH8chBM@@7bl+_}Bk2b&^}@be@Sh z9Y(jC@c4^P|1m`ply*Q~NI|BiQ>e=%bZLT65kcs342Wh1w1o0FYv&XP!F(j!I85UW z`;?%l_VKuD6Mft#bp@r@zioQ*gT`pdmvA(yd+Rw#mP0f z*R!^Xmacgs++6GD2mynV*ghT4AyXSI<%mHcrZKhI#6YX>@>qZL9deX+cP533r!S@2 z-G9CUaq8}=^>i(4(m((#&gWJ{`kE)r=DlB5l#v+PmRtco5`kabg#%rO@VianMmsm5 z2O12LP5LXYD1}sXxZnJAVEWM7JvCr2ArqU33Y!TnJ@(d)?ctV;wj0lqxBZnAw!V!U zjVYz^YQ5#tSMC$~hBc)fJ6WU>1gU5<_FU~@qkI5!XErIN4mj*mj@ZXV>3tiTGTrK7 zG7Pa-`FA-VtWT&tFA3Ew9o)FbM0Raj*Og@@B)9q72ny@+0VzV7Yh;ZG3RW_!F$7jo zWsS&PUvrB6bp1{Wg-Qc2;#5%&CyZ}%;{_tEVatRe8E^Cme68>0F$rtZ7fEtdr;OjI zh_o!HbygH#_m))Hg3~!HbC`oz%lsMD*yBQvMu)5 zNVitI{Y2^;yko1Dgr7!l=0VGbYoUoo_?LD!W(T_zq^42PJbYKn=eIZ34lod1&d1An zaW1MS@f2D^X?ex-#>65C=Ged-YD%+OOMZ@Noe5Xx8_*E53|E$hTB{ql)NJfRgUp-+ z-84SU+0MBzE3HWk`0~fOKGvGiB^J$nEKCWKWD81C^b9=$CLcA>5>&>?k^XwVVQi1% zjx{uy@sn@!dX>$UE|q9KGEb<>`+icbsgtm#3lN={eppZtiwTb=++s{Ot7XcwMOXL+ z_t6JPM@RAmLLi<9p4)j8a>;HIl|7K1^I7&iV=(fD!fa=L$sex&)jL`7ckv0m_8h1C z<8ti=1~^lKhoYlu*jRwgvPqmDFcB1Mhmm5~zYkX|?QP*`ma1gWx61Wdw5X1JJkfDI>43(@RPLc5l#7XQr3aFYBr%Q_8 zq``ojc(&^71+^j@5pe9oc$BNY^Mgm^tbT?z?TB`=nYd_E9^Ffz9`NoWx*STq^eRa8k=TovuOHm$}5s zeG2O~0E2a}eNHajy6vW1-M6iJCHX0m$~HDo5BR64ieud|W~?|OK6iD;wgWY98|GVr z7;9Vg1g{3YuSrY>j;*U_=2P$6G%x2qe`#L9(ciP6Y>6`|M1zA;y3(iFF)ed2gS09jhcxbTxRO=t}F#6$i{f_-Pf#=~p{99+lr{KUOzhJ-+7$P^O z#vego@6AmlXdONce~Bshk9tF1(V-;~aFTjW80grVIGR+UX+J_ACL9ekGjW$1_XR*7 zWJ`wcnF-^Y>`@%e8`Q^8S~*=NsVgi@;XARmXoylSNK|jl)SDVX4MKt(Jq^cmgBr@1 zfscE2EUY(vOXG1oA6bN)Mk)+BZd3bib`j7`q_k%Qrt*=^zAuI3uD4zNYZsDu_f7FZ zKfpiMN0v?P(lWTW>xtG=vWOhs&`X8F2bSWvge??mNYE1yMOT=#LuYt)Hi0kbzLzCL zF}62Qk^zK+6CIY|iR(7gu0~JhK>a?HT8`1)r%iXRFi~F_8FeP)UPp}^%=L#rI!qnt zh!G_+cm$PvL=6LgK2hSAJ4CQvu1Rom0}PGV`aQduUbANCO3e8@7r~XA{t{FOC{NiM z45>K&!9>CO+nUz&z;f=wPILU$u8ZSt>yx+>O+E28=Vgrw#VomT9dWqYj4OhvL1PZ4 zo|UB^kO_0R$41~-Njq%*)G3b$gUq+Ly`gd9Io6_9n^$CA++H3!TQ#)022k^rcf$3s_9bhXWlVxi z)(ELUgByoq*HoHoesk`CECvW^;)7OhP{cdWyHG2_jUV3V0lP}k*L;}!RVpDoIuAz} z8c*LxY_ybbPZ0QJ7bYdlthZ^HSG3x0qP8+XN2TRBDxRj)&D|aK?EZWL)sruPPi;#P z^VoQI_5TsH<3K?`Y~&8y{x6{R&&MyK5N9vm$hVriGw6PU*!8!_X-JQ#o1m%ha7YqP zUZksd03|+4uf_#w^3Q7sWJ#6tPA+X+kK->jo}J|Deq7C;+rf)G1q);Cu$8J>A@XzR zeMNNiM4Ro5wlwMdUZ>8ixu||jFY`$jTg)}yMypMAN_=(g3{T&cG3%L}8G4-l^-PS7 zIXL_AL=46blv5oeAm&)vqjWMUgwp(|F<79^^iHEH9&X1QRMu+U6t$OdHw)EHOjbY0 z4qvyQu&rq*ZzJC0^JPg+_vIL@$o=$8N?8y)?rAZ<>Og4DLYiLf!4@fwMzD0 zmzZ1WaIg6f_@?7-A5u%G+6r22qNcXvcnY!LNFY4^>}~Z@iDc}n@fyuOOoh*{O{P#I zu)vEi(c}4HJxz;}Y!f7)o2dbh`u@Z%sVizYGpnTO_%~GI6l6Y@wv{}@vyPRI(LTNH zxu;EP4$boH3~`;nuLO?|0v?4H*=oJOUDAYAN5J-LQP6+Z06%#-$Z{k5kn1AZ5vevL*krAUcel-JQC;o;QGJ-3;emR>hr;h@cnS%^KOqHCxrH^n=ZERpiE%mIzz zC8V#&x`WSIG>*6cy69(+6<#eutC00E7<~;zYvSE!Ivv$C1nL9VHHRg0>JZQMzR31S z9()W@JQ6L_m)0cFDngChWt@>*?^3Qz>G*VN9tLG~KWi*L5)F}I(`J&*z za*z&ce8G4+CApL|tT#5!>E?^pPtA;2D|K(xIuDR%?25+TO);gbp5tmvcz7lWUlq5@C z^&;9GH!5wKp3rUoOqy{~B?XWtCeP17FukIj3gC-<+Cr9#R}+p_@7%Qa6#J5f^*do? zkNh8EQVunb{IvPwe5sL&|MWF{nNMP)7~t_iS=vD6l!0OX{i~Uwo|Cuj()qbisl3_G z2zIdzmmRjfOX$tG132#qW`5gWJ6Ei+b1zSgyXI{scxRh-nSpdhV|G`o6qJh7X{g67 zPT3V3@VWEF2=1b1u+r+TM@xMrxPMaI;@zD_i3VDdn{^9vR0}+u>$BA>bvJn&$#}Be zZ?|1lOHUG;#Ub|))Cs4|7TtzpT)j%*UmmhX?p5k_*r-Vu!d13W=hzT!){+?iho-I_ zkZtz|EL97ai^YymBdw#LZ{uoVOj<<9fNG!wDZ337j4U4)7DC9(w|Pwb7`w5Mq=!Wp zT*zn`w>M8OUVg&aX?Hl#PK}i0qg|a1L;pdu*LtgONa9Z;nJL(l0VVosu~*4>4Cj@$ z3fI=e^_7|h`LK{r@1O6M3xv>MYjRBlo_%fOJ4~VS1Jj=QbKnIxH;F_;Ul3C(I)1JG5ge)%qBj2Y^?no;#gE%nnWxUCcpOiQ*2)I=Y{4|%Y!47=h9fISKz z)sQmLj}JQR(hY!wnWXj$8+YV2!wRQ)SGqx=?776GCH3n9D~KSCJSXNeVV~yy0~GNO zH&P2~XHa@bDb1Q@!Yi;+Bz_&NH|q>bUSTy#90(p(#)$p<;OMzZ;xmgXGUn2;f%ttS zfVX8Lohq$0Vs9DI|MS29U*BpZ|5gTn<0o1mD9JWsKt3T%kIc7xrz6P!Ei7=u(zCyr zIhLgWkb1y9&UfN^Qf3zR5g-6okra|7K(n0-?g#rejXM5qZSc?oIJ9~3x(1Pc?X|?j z%x(r)pVF{rsU;*L-r?7qSEw$cN`-^+U9$Rh1b$WS*omndgWD9TGAP0&dzRe))abd&BiY{aVzDt@U1E<>7H#@^ey@ik4W_I7d(Qv zN*#F^|D^T@8q(`AEY|6?l7^!6cK=dhv(N&;2=g0_``(6w$_0c$HcQ~%(Ue8N<5<7- zmtz_Sr{O?T4nH+CDmc@Ky-M z@V;7`#4c@F%25j0^qbHCqm_6=3b}sGX=lv++mwRy^N;KdCe_#!%tHzA{fF(iCN}7XDig@vpr19-9nwX{W7vEbPF*hkfPZah+{r1dcOFa z_Y(kmM=N%mEZ-!t8#Y~QO4qAr{!Q)g`WF(dOdZVqcW>UjH_wGK7yLjgM(Ne*&6Cuu z;&pO^W1F@BkJr-BWZL3*O!)+&E$^k$B!eDS{OGvdXXI)-;IRhaa{rTg&h)m&{T2VtOPN9+r9UlP&0(y0yG^U zQ{M!oZOP3j!^;$kT#VZ6Lxe=gdBvqngqfJWwcKV;?6Xr9DgWXtQu5}JJOl`&;AE25 zRoC%C_Mbsr?uRoeP>HjVSb|0mngeKOeXaRTeHtg=H~4&q)4{%>cXyAq9n${?Nnh-|a0WUyYCO`xAu z4+U=4znej)-*0N!rI5}x1=^A6t(CL~ZDbDHetVkF`(VSw4kz#mX*!W}7)@F|g^t2r zMsCH`{EBjSur)Z9*wgG7|8rdq*RDZ`PSZSk^N$36b3VN9J(}8Z4HGU5(3mUjqzJseRy1aVjiN zumiANod@HQ2J$FNC=JwbH~$NTTOr@KsQ5+o%$~*he`N@5h2-O4T@xBB{mLuk?@tT- z(7LoD#G>rGHY(8YtgV^Z^I&oIqb-(L%i2imUoR)ELZ`pD&SA%cm6H*=%li`^Nw#LI z|4fq6uk$-aU%vw-Qy=u#ZSDfqo%_+K{&w!?vn3$PbAKHA5BM`)I9O^Ud)I_}W-#@T z1B>-HY*0j;qq^IVw2|RdX`@}=D*22w6;2{x`ed9-V`Jx*v^=%q zyv`t?!v5tdO(w^ia}HmbN1Fj_V&ItzYA<{UYy)q0Dc{bbg%LUu+6XkM(>w+I14Rtw z&BEof%aD`w_vsmwEbjMg2nIKuV>Am%WWVKUPqHCCfD73)7IuCaP!4i2D_SM@tDs1W z^2xSx?y2VtjW5yi4_~ddT-s`IWb|R7y5X-u#WQWC!2y8y5nNXF$<-M-HBfK-aTPF z8j>1I9CET~n?n5k@f7Zsqi4wf@K#?&O+TakF$G^2`j{)G(mJ9*n zDiB0Bo21~|z-<|u@?79u?G~Amuu}Ua&qmDKH=(N*iM+#wI62-&_A(c0L;$?u)3|wb z79QSg_z_s+OwZ}Vq4i$=@&3W{3DyEngQ$8OsFlZkTqj2c$Rh)Jm4{QLMUxp|1f%sC zCGN_JXXMPB!Xd^*ymv&G-cvnlro9Qs^;`c1SF-ky(%39=55ZM^tVJw7B+8XXV|hc% zCTy+GDsKc7daF_LGOijt4aFgy&gacHbeOGHjta|g4^8hA**f5u3fdDFbcWat;1c>2 zHe<$I1tefR`gAN+4+rgE3DdVm+?Vq85E}`tZoS!P#@{OPbUVpk7kY=*qD)r0?mrIB z&l{1<=kKwocF}8Y+I>h5=C{~eHk=OZ#9!~T?~|CUM;4UJ*(FC;lawhfw5Ra7fyC6J zJ{O_9f2Z-_tBKZ)N8fSh%cIl0tjMNL0V?M$IdsE05-YWh(xzkSXUi5)4QG)1&7X1M zEOuGyh4ndjfbB*%-$rkWA{#}g>&bm(+A=E z&h*e7M7)Z!QVW;=Uut-(`|Fz_C|Z|{Y=2%f&9994f&=F(r?AYlHc3PL&B$qn-_ zx?G!$<2BWUv=bE@f*-If9#(9z&3Z;wJwkTVc)=PcG3D~c3S~S>|4~L0+f|9d#t9V< z@EpE2*oSUzEjT}%Ae}1oqZn1HLH2@N_J(}RGcn@hjZqg` zAS0Hzzg17EwC2-k6#9lWNTQm)hItJ_V|$?i#)ee6%0D}l_7NB$he*+Udb6~-O*b-1ny$|-8yfFt6rS!h;~0F zF~86WoR*#t|G4s8GYqUAZu{K>@zA){&O5R+)O0Le^+Uv<<%E$ZwxmM%>gFAt4`Yn@ zudknkybJ&)sytur7h0mT80@Oy-5_{$Hu*t=EXKyDPIemEs${9^gfOhiTs6BQIojLv zFnEiuW@R(L&HaS*EJL2KAVQ+D%RkSj&`v8hfPB36>VV2pQuEAi7eZRR!T5}T5Sk<$ zAajIjEwUrhB@npJlv~ySj-;jJEgC}@_ul#^iC=yClnz86!C|s2{KG zxCim^;?6~=4%U6JAEra%U}|7d#H>M@S+7zdt_E54)}JDg)B2eFEV^+TAyWs71%*MT zg$Qp;55Ra?W&Bix^?vhj_%tw|=5sHS z^}w(36RMwaRR{9L!16AG_EyA#5TA3iEhm-40l#oaVDa5+EyW!{&G?i-0qYFt;5Ofmv>!!iCit^ASMM7E>*zJSv?r zO_}{NaR{3{ZyCIcqW*w+2Ue0WJExhc$b+K3qEwjB^NfxoHp3se_yix}Rz1`+x}Fz{ zyFJmx=X#QH#vp1JoAC9YgIOYD_vbI0Uy!^>(Ww@!<3lD?=!WBS>a~4|m79>s+@62p zam#)@TVf4kM@Y#>CB%$z;n!9_gdE$_!P(jt_og15ZptNA`3nDGwmrkyB|u0Y(ft(@ z0ygXdIo6W0>{jw^G#eD&*KhRqJrPBX#DgOU9~kxFT-*Sf&Pfdmsfu*!3_ah#c$+;i z!|%v28^rO`C}9Ko!ybWm&}-vu)iu(k%AEVqE@&Ut45he)cc)keKLMJe9v-(N;QK^x zVaE9jV%YKJ{MxvG#SVcCf$bTI5!`_f>|E4lxT~j(@o8ysK5Gs`#E6Gw7RZ{k^^jSz zXs#esR51?Z?hZd82L!pxpkBs%3YIl$Wx$c|?!-0@l)fi81ABrwaLUsbl1S$b3ovonm7V^MnvJov%9}G?%lOz_?SUotz zwe%WlU08a=hfW+%P5Hm7@QsN?ZaRA(F{Y5BN$~xP$uw3gsDID!_3i7o(!qV3mc?}b zgdY;rlm36E(utwENDRdc{u9EhdXU;iLKxVLF#Bk>+tsYwr!DmlQw|@sOsur916{wO! zw-q{_361o>eQ6o-7H#rRT7Ea}@mPQLfB#kw?3gEZcJ*xC*3^Cbw$1nVqAq+`m4#QuGYINLv2_R#TXQ^n7Fo!EAwqyz)bXs-PqK`eZZr|TD zxR5sybW5P)G@>!`GaQ}+Y zOao@cCYPlP8P^`o4?#4rEs;q>?XaXQ-B{p_)JmbAVB<@^;=X?swlFj41 zAbn14nELwOP!`!^80XFKT|-)NhO~b zbA5I%J(Qpp_(!8pwp>O}j&dg-i^TIti^giW!eM-ST2b%6SzA5v_KSRF@z@-!|M=$PoN)QWU~oEHE6Q zI>pdT^7>XYyp^~6LLeganeBPiW}lW{f_#@$nzcLv*ArNuP>(-bbmLqa9z^*Sqd>xs zZBn_Ph~7;`SbF@a77T5oF`=w)?(dqn6fh)6{a_S!C# zu(+Hss%O~Js+R>9(D(Kq-)7t7G}&<_n$Go&$(XjD9JN|}6b%r+=W~3p3J(#ol<;Mg z#RCkN_nhB{xglIbfF5b56X-R3F1zh*sls!?6vy58JX#P*q82^#nVNQGZG3Tt{m)vB z()7dhv9=NkHVF24=o~a%EUNWk*k``rK~-z2jM!4?KgOUfz#X=U5f*lS13;`SukxSKq z{vF`Z9A7TLD&~CX+3E^`4T{pH#PMMdRkPU`+Uw1s02Y>QL^2+2v`AhFvPyO`0j5

zz+Ju9{rsl46^eq%313za(m?QmG{ZYxxz{E`lWR&BvgLpE9WUoSelkI%)Ds!%T0iZj ztn;xOpM?N_nL;D%);*-pfv|c{?I;obn9I?uKlVU9=c7LBAwW<(>Hi>zLZTsA6szp4J%82P{Xo;9kTin=7`Y$hz?a7_U8) zbhf>YLrGw^D7)6f=uVCLn{!Et=q+mfibuY^_EdD?mwByhN>kq`?U-l0^Km1&RvMVr47(Q=fPeKTsA`xg|

>}&UF~c4dDD<+)u}95AF?2P4v6^HyDT6fJXk{# z*!?ueVzC8E^Tjp?X=04mQPd^leaFHjdm|dC*@YE@SZIPUH8c2Rl&|2C9cQMHlBNCb^I6YD zhMZkx^3d6ZL5Np0kb!{@shi8yx4J>k+rAg3(l-4%>RR|4=E7DDsMX%c`?1?l&k$2o z!h|teZ7UESxHOk@1F_Hmlv|oSvexhDKrJ`$k@^w;ZGJrpP*|(%mC997b&uXAIw(s> zR9}{dySsO#)64v&lYR^_94S*DHC4XFyiS29!@mG{1U`wNsGJah~DrR>uD)%V%` z?2Dx)#@Kb6U^zL}9*n9Fge@WL4bx#P0-a3=#y4C@g~Zg-x3;n}2Qa zN~u$1@}?05-;wm9`Bu>-d)Q!IgU3MrW~c(4evHk!>2O(c@U9jx6I3>9t$vxQ@thnp zPOvahZS4Jc?f7Bdk=-r3-M70U{9v%x3^S6HD=9k))}+uXhz<|f%d)dU zLb~-dxw5%nl}cb(4lhg{}i2c%|eDtV|R zi)EgL?{j_ss+Ml>(O<|gC)z+`j7g^SH4FZCoYc{=s!7K8JX-#q2+3e)1V!iPK;y@g zgp4&eHd0F%Rju^@^GzcbI>qEdzOm&Ac~OBd%y^CTT5g)sbWJu2_g7RAxSBipY4}-rZ#x*Av_7<`4jgGw^HhyiJi+ zHlJ$09x$3^gbAAh2Yzn98uzv*_v!r?-e9E>)xyjIkE5*=l^Ks1MF@`G$?u10wBc`b z{tQ}izOXF2buINOYX$S91Iwie;&+iH=2^VYZP5GL)3A$1{qGO2T=l@UY5#ln_l0X2 zhfn&;jc4_A^dt5$lm~Uh@&F1b$vw{a|L`sv=S&YwvL0$Vq-0nL;dB)bbZN*Ib{6JT zzs@#=Czd@k+9UI>VI7TI4Wjw0(YzHaF&a;V3-UFhj}<^tAx^l#6TC*+!a{VJTOs^s zC}d z&r{DMz;omMvFlkk&6mwDi{#%2Oj`|B2$P(SP%E=Jf_pM$gNnNQ!y>_Q&{Tg-#|Ae> zc6Jj4vTagl-kUL9a#13wqmbzdOWc|W=-WLnu~#{72c#z?lC9D(uyH+0-2pI>UnLQ?%=L=KcR6AVs@zp_L)6dQ=6CU1IQB)7d!Cz`6Lu?F z6Ph{^TL6EU24Ppb!H_dGPU2wZ>z@u8jU+i!tCk|ygz3k;Zv;2@l&2K85oD(Ml zOdll|h5C?xO9AX=GsBcbnU@{;be7s_X1lsGy1={6&WUoE9PTR8f;v=tMO=rvn$o24>bOkMCYI}_o-*(A>%#^T1_C z;IDJqysGro`l-L0l4>_{3C$~ERa$IrN(&x^f}FQ(_*qk3el({!qkuK|f{CZ9%jRi` z6cdkJ`GuV%O?DT$g^<|GQ%o0HD=n_7yP|Gli>ulej!Gu0dD#fIcMto*Xu#{lJ6X1U zo9t`S!sCIYvOuje|2j%PuD#2CdDic$>-z<7h?<6Yvl#?U?4?b5`@6)>)P(HigjL3o zEE9(In>B?-$CTYQPBFlTqLVcnds4t&Ff&AUvmy}eR`oZpJ2Ke+talxD7HMu)LS=DG z6i8fis4?b1eZU5YzLd_#|LQ<_LR31DN6o)T`hBy-+zr=|pCu>+QLKsK_KszW%Qtm2 zo9v9ym@zlG6uR4ro;#ca(VG5hp|3hLxmv4q3J3$NPbaD zbCNie@HJ~mlAydrB6!wkov(;ZRQotbS?~^v^BAqe3K76;b4+R2f>HHjSU;$n+TfWU znBqFz)4JdMBa~``Hnf;6(7U|3bN4zEdcMC}GgW60V$LAj>um|o>n(+B!_b+Hw)@S$ zjr<07^1Yk_YaR(Xi-{&pxYh=*|F4`WY)l2MY-=k??vh6ci(|}JR?(56NvT&P5iOyv zTY&*qeovd}rS8Gu*qBgYVkS){l_*U&YtV@ggd)^)L&Pw1p($Kr71sOBUnU+oMn(Md z814CGcrRf_!GP2Ia;AS*AUj*5ZaV)5r7RJ~ziOJ$-Aqby)`VP7TXC8Ev~>6&taf$K zm-o(NBu;mAy5E5sn;EvxBt|jwqi#7%smRKsZ?nyaCqPtKZGE-o>8`C>LA<|iQ~7y1 zuU#lv{2>P5jqXP@mESIp`{~I7S$=z%^(bAhA5g2f;UurN$IHkBA?ytVf4kWa(Wmi|{|R-bH`Z*+(EDAM|wgP+kLzt|ahB)WX%4rOSP%&e~f> z!EggSUKo1}p$lUBN zt^6##*GIj-Z`f7e;-2amcp25LK%?nK1C{*A;S>T2Q89*%Q8%Sj15iDfkT|RN6>Pl~ z$6Y)EqM`&Ktve!sSld3qkSyd{|* zLv{n7t(xGqCpI;~p7pA01%LWRwU0oGsV$bx0uy9%Q9_j_=`zS+Xp;7Zx=ybhE+;=~ zl6VwgipVD3c*t-N>V+aUy8To)Oeq;RMds%f+znb?Xyj_N_nHLxSt|YHhpQCQWM*X9 zwStv-`F}k}i*~(v!l(Up?Gc1dKMbADU2Bwln4uaV)8aM(qw+s zUhWYk+WX6XgyeqbRT3~Hbnmw*ZmEQI++ zjT=Db(Gw0*2>4N4n2+<0^bRIGqI>WNt~DspW$Ldi-_5t{$5Ggk^invk`cJVFL+ zWi|1)%K-|fH86H~u5eB5Odk%MC@u9GzSkUmTLQh8`U*d=T^}#|RCuxw1dL)3UrrbK z1AvoTU(aO!hSDr?nZWW{?(_V7BfvONq%AlPl83xv;xY!i5_sCo-wx7(PJT>2- zrr@~dQl_Ke8*tHNo#RT=3blsp-l9ELTjBypIA(|lN70}713|Ya+>QdG3un>#H}z>A zx0W1zuVo~m^`jQNPq?&~H3CeB16F%IjCs1Aq(zays?H-uG8qi|^bsimi)BDp=#!l{ zycVSaBqsnq0L081%~}PN&K_>}?@q&eEQSC-K)}E4%7itVOU;8_c5+JdYh)dFVVZ#S z;6#?T6Fc9zlH2gYz~p|O;RSpNvRT^jICILbpu>gO2@BIN{H6(1D78CPPhF=_P&Kpj zJkYCy!o>Ade|e#y#nxZPT~T&uJ2}OE5Cx4K^>N&pJHk; zK?~&!&rkA+;g2v*wj*gny7ioF>;sW1U{& zd4y$Cn4aC8E;QO?BIZv9B=66xDDapI4+tW(#MG`|#S9t8jD3iC>@^2NkE%n#^%>8z z4cM5KOQnev_GOcfc5EQVpFQG%?e`&WV>W_7=2Q`;OAJ#O2H3xw!EHf*g?~;%@XKO% zl~gZfUb_c?Q3A0jU=6Z*R!U^_8($WcrBnrevdyp$ZQcTkwKHnnXn$vpJ z)*aj4$XP)v4ZRa%-O{pw&^EhOnwws3HpCL#rwL99 z;|_5T^ksi(TpmecA|6tb?yHw5Y(S`HmkXM9_lA)bX>;TMXC+JdlGGyhsg~Jw>Hcki3BoIlztRE6Z!5J5`My_@lK;UaAh^?bq5@04A6750P zp1L^hGg4`kZIaRIdDwcl9{q?8B_mP?2F;l8v-cJWWIeI)@V?2nly6O6Z-&qNxZ{BC zX^%TxRLC*z^8!K1bREWClSJ<5nZ*r4~Ae#rE|&X7PEjw2glifs*}3jt|E{ z8YygWuv2pGMeDgPI3z*d6Lx7By`lLy_;vD~c)6OXZC`{J+l6<%fN1Zd3#|99Xb=b} zI-cN`B16a$*M|ycZl}hG*kJm>W?|hE#9qhlg;p}pDIZ*);^W`=0+9=#p2M6b%(lNi8j_kd$6(3YBw&(js zV1SuYJS>zn3|+tzE@~EA_G@#NfqV<<=G#%_>F1geW8q`QelQTEnUMNst-y7gIICBj ztr5Q=twV8pgvVL)rdwjEIo-bG5%!#F41ii&8!3mA4beGXLuVl%K)^-JQd|Hz*5~Eb z$b(*e1%D*#ucxsadA(-hFxQfGObM7qfnGXwgDZoWg@5$Oao0rga_IbMYb=glYv}ss z4qev-$2aDw!ubHl;tkv`Gg7voa@yRP0kpRnRoXJ|rzZ*j_{}zd-T&!{Dxd_L5s?LEMRG=sgX4Cn-<@ma-9+Ak4dEFy$hDE!jh`=e%Ue z*!bSj-g08bIl66nI)0{AkUfw0ZVIaAwy6Q#7<8a4dp_mhN$}oF-9#x7Bz1OP7he(+ zA*}j;p^T#kyG?@K3thJnLKyXOmp8?S8^A9|OcyW)_^QO*{*h`s()nZB)ec=7fDNl{ zVYR=%nB*{QtRR1@OV{;EttcfELY|l5#(ppMuQeg-^EYSFW?+Y5KZSO}8*=m5=i3)a5~7A>Zo$+DrFkfpZL@6T?v39EUwHQ2UZUhLldpY}0)voy`(; z9vWGFFC3toc7$_G^!Rd!%Ftf}M_m;p+Rw+&&Oc&_Hl#Ju_rsu(#&m)b+BE)%vw@D? z*g60g645^$f6Q=HjZ?-zvCZi+9k-2(SnukRL_pj%88(k6%?ib%Va8fBkjr_h7uvb$ zGI2|}y)-<0xeO;AHAOAq2J;i1PRf4l_xUINR-F+&WXq=tjt?MAxJ4=3y_affJ=X&c+|p zzB^_PEl6`y9p?}y1wsOp*^>}@%2!Q)5B`W(kENG!qMzK#wek23)45aOpdRMf!j-AG zGYuCmM}d#Rmk$p{W_(j(7}*NO0|h4(y)&wKQ1M05AK~qX6UKWd>il(MF|_$wvJEK5 zz8JEn?Xtb?Zs*k7nr~>MfQXSg;OI5fCl}Rxm$NNsB8Ql?D3^_kzuh!MK0`yXS+LjSNnbdzh_I9JJdMvK~>ZWRPW3eXLS+NJjGM5|oLNV%EXpzPyfkQtu zvWdo5lj9*S^;`W-pym$#&WkVs&{#f6dX>G>OLnQ!@1Zr{(Aea*fdx?ZTNMWJvA2xV z&w+gy&Z}2w{lXWr#)KY`wJd8+etQ6ER&w(7Uge*9qo0+;zuXS2<-ctHUbcL!sn!nB zVQPxz@$<7*+wF_zV$l~G;k(u}Vf4if#Fo;b;CJx)1~byILVrJN!NzONhkl|chkC20(;b;{q0Oa6`Id~VDhhZ;y(JsdqJ#T z4J+iITKJ~v5iZ5%GbnG@s)S@rZJP(=w%vl3mMH*kh2(p{9M_67O?RXLJC9nb3FKtc z4d)4%k;@l~2k+}549B9&5z=sXcSC@NP>~>dTZp+A7Jhq^N=cI0;GCGk7eK=SyZg1P z+q|ndM2h;&q$oD-+0ra0nX|7 z0jz(J{c8s3HRKok91@$XzW;`j>s->1m2GR z8EmOt7;%7(Kh{IPt%ahsFdaoQ6CSsN+eHu71uRa5f^+tSK2mIQ_5OTRz1X1?Mtn>MhdQ zL$&W^za|9aKG&3{>*B_Vze%UtR|~WcA5FXmm>+F&t#2s z>IfS_7Xt&L9%HI&E?pjjTf1GdJ}`t*%w%kB*b5CML-Nb}^xSB;SW~CqGqlLpG|=m% z>-(8WGr}+%o6fE4@dlrp^W7r_yUC9q)ZpxkceX^9v8egcslI+LxmG+v4W+jH58cV9 zpFn|hT*r(y-5+tLhLBjgP)#A6>YUnIQjN1NgdbHx3RSca@T4K~?!7Cjf+!PW;kt1m z8hi9&BH1Z_4PIfM!7QLuJx24X5W^ln&P5U_iQQR<~giGMM9ac zW0xB9?3m7J#orEn9kmGBDiZB^DLi^p0r&QKVcdPi{+*uILcL#GLtjO|a>w!X zQ~5(eI^I*Z!Qu&D^MC=OL7jxcDP)%i%D(6iMyg1GFzeW%iNOXO8=z7B4eicnj3-}G`BJKx~&K8;u4 zMB?n}Ijd7)wmhJ|FZy~MzTSGt@pfPTjtZN0Bol5lDTQqKY$VQVGE z8sH_iyTSatE<;*3|M_*=Pj8EI`d-BiB!GqoW^<$giPMhZA+42`StPZ=A{>AfVEFTw z*LqQKC`%W{5b9;ib~G2xfGadzRmIYWjsN_|VRtQmygUs{TQ~V;KD&|3{wsH84WXtcMZWtOdhIm1CYs-h z#6OE{#94$W7KCTM5g78#fWSxKWmXnYwRlsHdfcQKc=K2n?S7t1Bj3!{;z`2bf}VVf zBfa$b!^Y1GTEDfH6zomfT7Qf&c~%lQPc9th4x|t@g{!~u0G5RvbeVc$3Ltw)2GGFI?WIlY%U4`%glOOOo!8HEi zZ3#3hvt|LOv@Td1*?j@AbfE#qIBr7Kw{AAV(i!O?x%PbH;sfR*-OZ3%MkChUp#=X$ znh{<3-qM_m&nKb5riG zj~9h=KA*ulElNU^fkIOsZh%AjX)mJ;ey!7kaQ+HCWEq)smSgP#EFmEs<2O0!91L4H zyF~Nm0$7e%mmtK;uZ*+5)vM#B-QU35REJnMN#LYq)x>1kk`B+2XEBVJsT~xNXd#Y*ExV*tJ4;42xbJm$4!vjaITpK+8t@VCingVqF_op1(Jnd-~*T351 z-1u8Jn0API_@(nD)FVB1@X&%2*KGv;jIL1Rgw!rpdM5x3t7LswU4n`>OmS* z057>0GuY$SLSZ;dD$XEdSQ!HMHL_#*=9KbAK`X+K>TK~DLKvL-%C?On-W}Fq!?eds z?N`%mRe^Q7(_CagS&Gt>-YYGfwmf1di>-}#1a1^&f=b^IEeX#EJlf0hSnncg za<@W-s?}(8cNzBqxX(8dRa8(zQhiYZ!XGRwgpe#mNTN70x~J=GMEqv?&ZfoF*s%6a zh&;gK?2}shi*mS@19t`!O7!`}JEUMFi9?A;nVDXgQ0Oxvg$1@?+|6=do&w6vjCI&A za{(yg96W*NQ%9#J>;!GsH?E}uTw@wBeo6REqHdcUAe!^sjab==pRZs~Q#w6Ig-r3F))@j8Tz zm)B4k@uc>b8pon<7v`wIoVi4Pthvkg@z@K>L|ld1z4-1%z;x~+uKjqXzl>~F?$y}- zRoi|yKJ7z=MSx~>3QOq8ftp<3z(IPJnM7?L|HFee^Qu418(<0baL&qlI(zK45CuXREm3$kHm+L7+E^ zEv+f4Tn*Kt4i^%$(3GbXi%~MCOHP zYN^6lvil6rJ+--`j`(mexMT-ALQjsH$!8N<>KxjeB@2*x#2gBm%&?1WipO_2hSqZ% znq1A>M&q@5JLJuLPTO6+lFsAdcGPheCqwcteY|CZreR~`+-OP?mQ056usfRS3LLX} z-wUJ&L(*#^3UIX#AQ163$H10TeLtE)Au2sSF<3b_C94)2SxiZ+>ZO zz+z^lN4Q)tnYb+XA6`={Zqtl{pMe5JPPx z>8b^q^wE+dw&}9yfw#BSJ?XtNpoV@vcGa|Id$({s``sRAp%_>4YFcDConpQUV@+9T zg~zU4Ezj51^3vAs20guIXis3ct%sI*9Y+U^^~X3mo7e7UXW6?CW_YPf!G~<0UOU`% zzPv^mhjUqJc!h1szY!0fXT%OJ*PPa0$qf%RnB0<$RxL1x_hy&`57K(|c5ITaxlE%J zk=ldYJU*NwnJ2}wRX7Ts_jGKAP-PNTjT;Gl_Q~Q^1HpJ&L7Xs%OJ^r1iZ zgR4LAN9@#a{_v133#ZS&V`^9;BoORC^Ez|s)!BHrdXLPv&_Ia{0BN`Da2yCt-R0%P zwswvoSM+g(y)<)Fr%;+42K8W?Zrlh~?uLd@*usc&5c-ubDMAe6I zkRK!)OD1#XYaO2m`NEr|k2`tdP~tL?HJssD-a!N(*9@Z-z$rg?``{R1OG+a3mO4du z5$9xcj$<7mP=RU4vo^Lo(ULjj?*dn&zxFZ8J_A7Bhx1z+m*3L_XNgjF%K~UAI!RqoxAY(h4 zQ&jxD(M@kVYLten{nXLJ4^%CygT)TQFr| zqsR?=FA)kIsO!4{!h+1_ZSM%LILi|Z(7Jk5Oh2?A(vTiimVJ`hQ9>g&{3+a^zXEF6 zAS-zui7_>)BKaHLD{81i4}^f{_Ci^Sy-11ytp}oNr(;$>zRom>qIBy4RFeDKwcJWt z8-xT(Zs0Mqn8pFuv51IkG0>+y?vA+d8vP_P*t3{xQcl(MLBVzh^qAL&8 zaw0y5ObU@(J8q3ijB+_P6_2lpm<&*-Jc^ittpb#*7su7mJ!u^lB?--9%40Xms;PUZ z*=q=|HxE6Q=4u{bA^(QRM2_kIfnE~Ums)M?XjpxwcQLn)pAMYH!`#M-yiVXo>|({c zlDnIZcl+U8oxvmJ7_TiXa5=R;b+e0L;iXc{f)b%()zr(At!Q@|>C=QM?(WC5V5N}g zYSMsvi@>h2Xgph*v+LqI2iWgIo_HJ0sN#Z_G_IKatVE$wZAhAF2<T7??ZP2Ok-5x#UX{5kjQp|* zaUlfYzgjqcmX5~S>^>b}(#3|oD_talz2hI%23j2>EhM*~C~>=^UH8SvXUO zqcjv+1jjkzTi2XU442_8mlHx3vIHVr>yw{IL2pF}bE_MwJ497owN;@^Q=b9h+C=DU?1zFa+giAs)DAB!jpIuIVZN@ASuJqe* z{r?Gjx7}8fD{b&TpMtuZ0l4)5rHhSfx7($^yWEDW{werMNRcTqYm-bhliV_WHIMNR zF!OTrBop6Zt%#)L9-z^1X-i}tB37)!cj#^iB|5WwG6>Jb{8F;}UxocKzV2}4qNlZu zjgOOL%5NGA@zFNR8KeyYsubW1#4?Dq6C|`&lQx00uVUxv?TOKk8FA#3egT>bV1m(VO1Fi_b%{ zN3xUrwI#Vo9OV+ddXEd*^q|pv5?w!`jH6glS9>=u41A)!vc;*GuDm)@kk?th>E!&X zCC;;%Szbw5zc(cIsv1bk;&M(2hPX0afFxJ0a=%; ztH^>wqE)cHpR)>BcTHAWN$_hk0IJwr9AG8y+%_U(Mm9lE?DA&C%7z!+h#qq@oegiS zD#wsDyuVQ;fNaIK1&&(QPf3IvijOjict9w8rR1S^PB-j_*VLp<+$hxJ(#{6zb0hWA zGEIDlpUvZ0yQbTZ&nME&wTXDJZ2C7V8oc|Ny=o43+`7qNg`^S&FD$obS+|x$dOD>M z?4$Gq&^~8rgD(~&`gA#J2US_)*pn$>SboP+cW?b$UstIGLabE*y1eufpB|sO;17P6 z+Tg;p;W0~kW2Wl7KuT(HDNiMIJ>C5IUH&SzEDW;LO# zozxxU8p{?wMGgaH_1XPN{30SDO|_IFBqN|4?|4v^uu^;CTbCCrg`a+J%_6ONDa$pYcQTU1FB6yI3)%zs*JcZku47&5Cn!s)F8*zqrwf(66 zRQ2WDIGlH<%kwjPd1}2ivig$h?ceLqB#3=cI;=!ODi)_ohoa}k@8z%x3yCg9 zR4gLv%!ZnlRcaG?g>r5^n;1^a7a%`52}xfUODwl}wkVR*GOUNkw-$2=*f>_ZlG6ey z!R}?836|4dH>#hBgr&hs3r*0RtYe3ety)#D46!AkJZ4QE_FNK84zxnw_?}CdK^X@? z1MAgKbi!LIwMX8V-&2zn1TWptjZNE69QTgL@G7566ddQB|cl-4CTc6Y?hG*2P&3z8$TiwJDKW?Jw+Gp9xC|cCp`vVIueVd zu^wzn^-v|Q-%W7Qgm9#R7Hns~**-Xh-CHH`+z?BhZdfKLYXysUA23$J#sZ2_V-Utn zwy>pueJ|ka0mUkoxAH^>gmx1L|70`01ts`@zEvNWoZdPzRM8h4I4rX2L#E~;Q+Nt~ zvM#}k=r<=0=IML--bg+*1Hh>LL8yAB(+*CkK*$i7)mMkS64?*dP>04WdBg9b0Am&(9nl4p$+rm;Motm3#N0D}@E- z&3$77pVGmMXiQ&6L-)UYX(rEK7ojVhI&(a&6eyzi*ToTgmL5>WtlLn8s1s0?S+7vL z+8L<4E|9kEY3Dx0AoUgz5;OEOb=v}7mcG$DShVZZB)CVfMhU@RJ(}(J@FvU)UM|-1 zd-B<~?=^8ms?)9BO3h{VL0hYj2IKbiQ}S%flU@Z+nkn%gq>Keg--X~t$UbK$yS@?6 zX2)FijRuyB#nuBc!RF&(QABno;jd(NLG9Ui5RW+hU(0U2z=w|v(NBFyG?2DSkKT4& z3^%QYKTnCi2?EttEa)gge+Kx%!kT*tInKda^Qb1qE_)R1=vc7{+;*Kx$qSE@dvCY9 z)YbL9Ek+T1ILJFmX(uKJ6X5ApU+T-bxCYP8V z;n6R(y}7EIxO!$ZHD^Id`K8Lj#y8yMA=Ah_6k4C*{xrQ+ zn?FT(#WP!Fkg_J$b=R*rDeq%&iAS+3k&V*~mRoj-izcG=msq?j^Pv9HYfCZz)u)#U z375v7VYxc1(0a_pmtL8vN%~Ww0{p_qS&jlBvz+)f!32^fbO){CuGx$6#98&R<2&E2 z{si?qb`^Ne9{S@_TR;=hZfo_<7P$JObEi^DHBu7gwS~p=eIv~CNjFiN9Sv`$FVSk+ zdw_}xcdo95P+v|J zkg^-!>g)aZg?B629MO+?+}_ceC`y%(N4mJz+tDz^$xbtriFt^QHv{Y%(=}K@P#ZG` zKaC0?+opApaZM+`8aBk`4+Y%a=6xmro++8WpAHYbt85)BCNf5 z``O7Rt(bEB73yHBJrW?L-(EP~uH$4!8?b8GICcdkF0wowd}VdwSp1xk z?)1#4w!Jx?U24OHQmZyj;%p>RbsAoKi@I>nStXm!#ii`%YLs_!RN=5Xe2Hyx-MN-( zyXHS(VB4$lcC#-+T|pn_++kgd`hNQzPg_bf_-SAox{5!GD&HKPg90L9^qh=(E~>rO z27^x5P>+n4cuV%|lvFAEt4g%nIHYi!6xxhk-BO)K3~-;DV?}G2#G^9uxT+0LE)^OI zbm60rJ|;ITl6W0%10Jd%T|-si3YbSA&NeMH zgk=6Jc)f!eyS8^LOHHBI7zKLmy#T3W#LO_zml2Qru2BHvkz5^IFdm}4YsDC21-LVk zM$Qr+&DDqi@q?O?6fBa~#AOaoBKFLOb6&M_36eh4Rv<^P#E;IVgQS&!F83H+e(7Xn zfoM}oH=WtLd?)<9iwG7g=y1WCXq<91UOH1-w%W8ju3q-CQc*bU>XmCZ;m9s)b4bqb zo@NTnyUE$c-|ABLQU$%Ga&UH9jE3fbUqBU6$Iw= zlxL&&IGwsjb%aXr16_2@y~d@amrQJj^s|-9Ow<)`0g;dL1#MHpdKw25GFyJpN5tYG z?01Aid{?n6<7>H|o?S#k3m{NnRXsI2+k9XZ!@sV<<46p|rh!ch(+sM_G7?)(NSnL$ zcnL^DjrN{&44FlYbR(oqtJGe}Jm=`v$xvi75m6e*2RFC=2Nrv;ehvZWt=7|(LW5WV zi^SJ0qUibPdh($76~CYON_;LJs7IW`xfxEjX2;Q%Uo^$?=Dswx3{}7~I=DGhxpmQ0 z_m%`4hO-8u`Gcup!O4Im@4J-@>KofKs?c`Cs@HuePD#hJPYJ*@i$ zNHpM0Y+E50?Zs?zX)t$@)ulTlKdC+Cq{C`A#tMkiB%gntHcb-QgV7$MD)Oh@y0TCj zh%Z7L_qPxZ#D~n~w7sAeOe)MTm3zuPa|AtqtYzT$abh~nw}>V(6}qI@$(@|{9{QW z8jEBliH{V+2#7d8(I&$9Z{Bw#Q)LU^MH8hNxgcf#!xa#|LVc5wiZB{~JUy2++qWjP zt?Z3MO~|nC-J3t~5GCzX`{dtfBc_B6w)5lj0$-Sx7TmLPP2mvCh^Fdq}6A;`g<0n(O z2aN?)BX6m#35I{*Vl@%6)@FQ--BO9HB{~?r%m=*R--RB(Q{j%jf`Xmle$O z-(l7Zf$m>w7H?pbGqEMVw?PIboX@9m(`s?@MeW@N@GaZ4L1Z)>*&~JCac%N5VtK6R z>~5<}Q3ys%VY79so1w&zu7IXQCacKg@cl5xHYP2W#*0`U@Q&K{S2qy$yBrjiHefiAuD90kBw9D74K%{Q4fDY>rOmnSGmc5%$ zaa;h|BI%%yKEi!#XIWO_R{dY+v4h^H&+Up?S)7O#9ZgTh$5oEL+({C!r=+hC|6ED@ zGriM69Jvo{T%MQYJ;%)qdg8l1Fw#Yvov0=87dwyxLnE{sHR- z-aeP$7^E@gCO4;^uc^yLg0536|;tlf>4oXdWZqk$m`%7}V2$-G6^F^upfnIbZ zOw2^&k-x4SI$~GfNH5T^c35W~e;iV~s`iQzz^tQP7Jn-=u$F5#8tthE0edrWnv$;ztg=V|LZpRgT=WW??+Yf?`$A?4^#JKZ(4NE0Poir5>!i>l zYm8twM5k@eCO}nKqW01KVj_W`TTS&$5oQ1g8Yvw4m5fu4Lp32IKc;IycK~K?6L~{f z$nhwtr{M7-=&*2sA3eH8^F=tiP6;e)QGhgWD;p0T{s&fZDkhD1|84~usFu%uX)53$~%bP^jaBOCFeIt~ynrkCR3{E1=kUr_22^oF{v z(J0I<2)v#A^{8A%@u8foQp4>YIejaNuMPmrh9Z0IWlCxxlOPCXvzOwp(7EYqQsF&i z$3EOO){5!W@qU+-+Bh{k;k`mfiVL7|&brZHrF7i6PVqw*S)(3H?8>u!Sw?&&9T@~G zC8{_3C)yV)DB4U1$z$w>eZ-w?&~`!xtA6FZc@2_EQf68GYC?WjS8jy#^hzp;Ah?icIH-V_I~wm zDCLrc)tNe7en^wfb7FTG8HJ0z4)MbCZV(_IG7;zkcq_f_Jxu_d?N#) z49t&Cc84O2DnGZfO)DGMX!nE_Mn`;--rw9yMxyPDS!#lOZi>I^)n|g&wVI%G!J)^) zc0imQLVSX{_0%;Tj^}K-qP}rLO$4+8X<35*HX3%dP%3B$#LsqK;-m!$_?|S9#g+Gx zl+5=nR7r|jl>jiElmo~k!wYCT>g_j)LFb;nZ)zJz)^`uzT?eQ-`&E!k;V+1FRUf$X z;1a|o#@t{5Iqt%DfysXWrPoy+g?s4w-l~4$c>0_6o z9ms^>dD%>&4o06R131a9tvsJ{h#>q8woFIR9A_G51nLZpw(r~|%C#eI3T;?6Ij*9@337nfI4~~eTp);w(rWP0}-HZ)lbn5 zoJ&ox8ALuq?&AGmOpkqZDQ+cHbbx;HNjf@5OUO}Kf}24Vex9gQ#O(%*DH8SRz3Sng zD~GM$XETCqD%!_u4F~pllHao-&`XjSQKs{{iF2tjl5dWgnx7vo5d$!}s2xb=b|Rd6 zwmx>zwZ<1_6QNYyj<8)zLVYR(+xczp!=A-l$+F%~Wfec9ibDw?!;5A>jj*5}EPFVW zG#pH^8$aSzBPM2ril!ZuJwQMONbt3H03FK6z||%|%Jh9y8kgw#@B*KxPX-lz`K6J~ zZcL&Z7OG>Y4>X2Jf~Kg=gapTG$8&=ik#PLxn@r*G@u;SvC+m&xvNEYs+m{p{NB9}< zci3DsLZAs4QR2qzedfej$9l3_UZkQm3_UFDX3NGoc~GwSYK10>(H+3de~mG{^#E#- z&#Og$>rFGX7P=3-(L>5|H0MT-=Ed7^1NBdj3XCjMzDkUFFAAR~{J8A^v*}7ozjDg* zJlKlm6GO0&%~@7Kl!Zo^In8~Q7H+EZ9CIB^ZVolaht-tw+8xV1bhi6F$m2bfkk+0X z50rJ^R+uM{K=Li3g$Ugp@i; zHl%fqm@A^C2BJQzk%VPjncO#;>mg#FB>tZBKq((c>EbpYQ)Y*@gGR1O?95H+)M5im zkiGj?l}FPzd&TtUC`fiLY1t*LBVDv_Eox!+W@Wp(5yQMcb61_E)QEBj7-Bi5p~-xB z%iu4M6;*thtWis?o}KmGJr9eRm_mS^5R-iSdK&48>~8crGpAdE5oGIteGWQTyoj8v z#1CK-Hj|NKFXx7CtoY?g9mzt)kck@f;tPXoE2JlSFa7D8KWaODY`aSOdi5uSF?k9yprQFo0G%S4_4Q?L zQvSy;=jq!Jy4mz<2k-!0HXU|*)U!STew=sg5dblTli-EIY*3;#7yci zqHeGiSo*`DQ}Y+}T1xDkwy!p;E-PW&%p7fnhxUEW17n-_R=RQHT`-vH@KJe`Qqhs3 zbFL8sod=1>HoP}lT9nVgb2##h4IXC0vz9jMK%yXtr5)lvaQ29Kh<53N@wFVB~2Oz9&P&3D5@iWo)9 z@w@NrGS(gMI!Pq0;6%Nc2EvwoJbOS|eeJ&m#vJ6Dz3n8mWf$RIot0EqH6{Wnn9jk7 z$*P8MbyRXwF0Cgu3&Zq+CDc|}gz=TPTw?D~gIv06rN{AYwFG(a)%Si6Hvx4ThHb4d ztE7=pfnCkdBT_Tf>WR>EMO_xs>xs4x)!pmeK(rG418Sl1dbyW953%DSTFX5pPY?nB zCn>7CH`wV^^>Trce7JpB1)8fY&32oFp%I^LZ0GN9RiR1Cqoo1dLwFpCSrCtA2asB3 zdcl@3Zoz1cvHzd?8|kIN8dUF!*V&^I!Z}95~dvuJd8f!RNmQ_%IOrdyw>^>8@4N(ykZKZNivihMOve|f)sXGy z?$kqP8>d^*yruA}KRpr1O3kW;MhCtOjIx&BX*n9{J29n4PIx9yN($L}g2!t-32<#{ zono0j)SqayU4mC(sGSx=FN1*-Ta%=LS?eG1aZkRadWrg+$7TEq%QGy>&;QJH@iB??g55mZPlanbm8IADZ6D1ZpS6U z`1J(7jyeyhiCl?0(QSknO9}lQ@nz`{WIG(H3YV9ofm{iQC_d5I|2O&ym|>Yp!%Zk8 z)^$2+7Ona_WVd!Dhy2F1+OZRYxh<_Dq6eY&=B=3tJzBJ0_;l1r>?}%$RH14guA8eU z|By4tR(jAgFyBS@yoYAF-P$s48$GyA@XG6nYPb*l0Su>)XBkR_Q+DhZ zj_Zgz_wUI2hv&$C6B2PH8;HWgc_3rg%X%N~C;s#S6X~ z_KeDa!X2SCQmc25HUNUD*Xylz9|b5_B38ydzqU@6eGj3yjI$)DM)X?g%oYe6O}ALn zZ%QN9hS_CuO9hW2bVhf2zF9Km=a(&;G3s5}#O-pjw-W}!b>DPLXE^E1W@CGQ&FTML zK(w(Qk|cEEA`aPMk!y1=Ic7(q7$X&#KnZ*_AYv~{&?haY?d&%P@lW)OA+$@LuiZw)ZPX0VINIH9EM{KZR^L) zTuPJ)#IKMQK_>u6CT%5En(*}BFB)I0m+~C=oy95FjO%7=5_TRD8z|x}n@+#zm)^?! zAD!E3FXd5F=ig@%9`!U10`eT-Gfg580t`;1mA*I`Lwt?6Lbg}+z_Bw(hNDZv*r+c*zT9;Sy1>98F7VP7@T z{Lq*D$_>FC%}k=jRu*M>t(sjLYEtuZ!JT0MGhL7qWj4l`5E8wK)r|&RGGu0fWf@M- zCq*^o+x(61gnop-+Xu0JjjH9Jei&+6z^m$6p*8q+7@>V6u+)eHLo3d_OB z6PNIyK2XF^6cuoipjY8-1QrRY)r^qbmrROIU)}$F)vzoe5sRLCUL+$aiu)~QKM69lQ zZvuRcRb)00{K3rl(aVRshZTBi0yYvX!bWS%)6HYq<^8GsW{1&w`R!yT{F;)DQvw&@ zL47$#2$~Fgn)sOJ{7>aIT-q^Mg=tOkuzKvYKtiK3Ew|1b&M_Z=PuNcow>m){j{lBV zRy}oe6jON1v>;9%hI9DhmKyb25EHFya3scwmr@PfEH|0L1Db}6C8Kw_kPXgQ-nW)s z>;&g|r5w$XCWSocLg&P`)EQRwTZ#K3U<6Pv(1Dl@WxAZrrpB0@nAz z))F#m>ZEeig2C^i2d=e|N8jnKmPUpR!brfAXN$*DvY1wK%3av z-2~~zmy20GWdLN|xj5lBK2w;ibhkbd7L1}|u^5^@U{Tn8;QSe_MD&8A-C7e-`vHj~7X3OjF4U_OS znl*3i)*fgH&g12on#i>`;_CZt4CfE!h!sR7nR9zr#p-sjjiS7E2xSwLezYFc_3Zs| zAf--S*#h%I-5DptB(N;sQv}$LGpKX!OOg(7r5h`N3M4@8$a{Yk>7_X|aaH~P#)uI^ zN1{m5$O6^WTos9wEm^Nt9Bylj&r`LM`gmY=@vJ;rYL>mvQ=S7}0n}+|%n<8Td7Wb4 zohB@yb|SSJ`LReVw{H0GL$6w`T(Dg=&^kLF;i$G;E}Z2F6Hs#hQE`%ft=#Ts zc_YNP%xE0fVSDxP4d()pXsWm4`op~Anf=*uW3h_1iR`k|@VpXWPo|&YO{7%XnxM^w zgYP~97T^fP`&y^4#wuYd*v@j=+0zFV7zIf$CwRWPnSUMbR_mbrc_jzY`w}uV>yn0%& zy*U>>o&zpeGP^lu-@(MMsM@8!jA8rn7P_#Q9B?EY1zVo%=2&HPj2>A!ywr8vDews# zwr%>|3%@simPIL=0hpLf>{wL4jig(tpbavKd~CCQaZOo{=-73)pdi7DTHHk2xgp$X zxwlNL`9dQ|IcVrfej=Gn-bL`VQwhJ)4U(JT-%(!pYeQ!8%h3bD&2*mEBJQnkWMa9| zh&LXk>a=CUtqr9d0G#PsV7ooND6>CXctk4tCy1yJ*S#EeTUMNi3LEUrly%?iXT>S8HTZ+ zuw&q7X}k?O{3M6D6-h;})lZd*SRVA-o8bKB#ezG*2ke*h+iRc-=UsTRu%i5Zx<*5&gswd# zhR)aQk@FLduuZ!>pX;%i&9mI>c>MyJq#AX+WHPUm6kkmQV5@zdDQ;3TSy+LODXE=Q zLBc_ahhUp|)HoNc;PIT)fSkm46Lne4s(RDJ6RoyutpJw^mfC%5YcmL}Yt1Cq;?ws@ zK4l*U+ZfQ}LbfTVaco0@0uhow9G>oMmJfISiVn%c3T9acY5(`ny?A0JecYmnG0sj; zG2Gmf@BT|le|9qN9>eTAm~Y$R;7Jyeo^GnbMK($8<~$ zY2;Iwx&`?V<9?+v%F|*wFBnGxT!`{FwO*}NDpIgUJp4b=( z1$Eym$cFcfw~g2$4~(4khD)Ry`*sYMOKc>{nY1X_F0|^hHe616V~$c;fzF~h#8DuO zztl3^k_=oWtGms<9}h`XX-BY&S&4N-hN`vDW(=V!;5;c!Igx#;4k;OJzpq(B>;~Jj zDvj&(_SG?sETD?hkl}SI6)^^P<6#si|K$=cwFXdC{1k$t{`*Y9*40y4!B4B_{Y6VX z&r1I!iSu1K#D;HkTEV|hHiVHqPb}>hibvR}Nx=%;sUUfjOnzTdE5)gbNC*@S?Nre} zQj~aR{d+?u_h3Olu+EZ_0^8vPOwug=Y!QM(B1)U{%x+jKf)~8v@^aMaB_T`5`^HW; zC|JvM*k-qcm4De?4pIX_i=Z8fGf`rli0GocTdTlDzGecmn?;@ix7wLi;D09}Gy=8{ zCAB>n(tIX244#01W>hP!L8@VrK`O6dudah|?d9$snt4x2ExOh?kZY95<=kMOY<$un z*_0>kHF}T^T-SIMQd9c4zq-ybPDbq#7?cbi@_wQ+t>E)mO%{KkRz-P0GgoK>g90I5 z=swp6rsSQ5Mb3;QFV2-*8j++i+58ibi$HuKU%69^Zmysq-gjXLYkPM)!s^bVo?-|7 zQ91_F%a)#yhq8}dq=qfIg^Pt;g_p|_h*4gVpHyANs?g zUx$H{@lSYLRP&gH8{m`92RWf~`Km|JS=zS&e3g~xQ&qeeo{hg@$WcBdpva-J3{5&P z2-lrtNEYPVX3oURh{Kb=3V^IMk9M|Mcr8^JkgtCai(6MW_>Jn}S3be3Q|Qepm3xG)0%1fY89uPHz)^p^ty{IX z7`DpYDbXLwYhdDXkx3Sk>M&w-&Y}5xl8n6e?QS&h7QV}z#@>M4+02i^f{>OkZzZe3 z>7cACn&ff5Ha>JAdu6FuGAvzJgyk4zepS2NQqWGu863C43g|6XKrfAMS2doT#Cf^? zxLc>xO?VcWMozKKRJ&j$_ll?aPO!2^(~_d@QYLLm$@Itn7ItRMAerNiO}SeBsgy4m z8qG5rhsr^hh#EsFg4_TtitOd7G{Pw*;0%NogZ@zh?ujjKy$&vn)Ei^jXh9Sq6SqyYj%7+`uUK{!UPburW_V{zd#}K}uKc--+!ZMF+H4LJs zC43s2p_o}rY?OS6%~Lf5O2(sAC}!aDw{z=^rI@wsG~XE2V0E_)JUZ4a%Jh`oZ3W`< zWJG9CkzE3vRt5$h9n#-}F8%nmQB_Kv8sm@g{loG5btN-MwU~%Y1EMQch$%LF@Da1W zY+_E8pRu%vGwNG{R-IP9&4+xoczpthaX>)zs|@n*blz5pz|T$@+5aOX=KY3W?6H3E z$m~4=F+0tvJQooFVmH*3>Rfs->T!EYmd1@Z3B11CuYSh;jc&@PU?wH5OVN7=?}CTb z`PQnyx!-*bBf~`^>C4C}?z_Kv>?N-n&-?v)?ASnFMM?l+PhBLRmMU~5`Tz6qUUrp= zxV>5D$8+fFm7C_2qQ3^d*JF-Zx%^`kZh=fjiprmJmJWm-+hbkXDUuv{xTA_`0 z2)(r3Yt!oTR{t#QxY#mvG+O@lG@O%);!)1Izt%K1aQqM1rXxK+!In`~(u(G)C&LT{=1! z>huMVkma~3xF%=;M76gT_*@O))81VJi6q1wpJAT{?)D-AnIAF6z|>0S>>2&qyL z5=y~04(HK&=NdingR$JJIxQ8;A|qp+-@Hi;&4wy&`)!Q2;PZIe&fT=2{l2VzDf^P} z5p5-lj>{s*-@!gKjY|JhBb_5lAWd8w3q22s5|=L0@VObK4Lo!@jg0biZjCZ)cgPh$ zS~sgC<~ZQ#3M*Y2`U&5sQ_Mp)`8>1tcfUejKtL)RgV%NR6wVWcf0-jK9EdnL**gpZ zW%8p>3TX0ye}Rf8vuR5UQ#T$W!qtZkw!A_Yf3E)iu=1g(C`M7haK@m(f_ljvAB2Dj&Yt!!{Axh&RR)N=ssL z$y`}*U1DAGMEf$6tdQj!@FB%AX8l7!ij{%xt&_mz7;toRDL5>jX%u#uYt2x8YI(NYv zeSCt+c(&yx4d#Eh@V%ApLa3ep4tn%~+95Gufg?CH4g9GKMv>-|itj?=Of;{rUdJE0SQbI-Ru~+;B{((;o;wzG?e0 zwm{JiDXaB*e3(98hjqG}o~IW77hu2T3e~y;3AsG9%Q4YMAKz7*W6DCcEd1)5(MeV~ zo<03cv4RdbdGLd!#kG5yp*Du3tE;IbI@6NKDruSM>4Hcse0J^@YA8xz`ASdW{6q-T z<-EXS46Fap+^Zyh{n=|2L&%RWXPrxLG4>9d=9o3F>tn^YyIH}d%XVGr{G6jpr^k)M z{b!1s)7I|Clomzs))r;_=d)zz0Od?Xkab&rMm~-sKz9)PzI$P5&hB-6J=KW^1EOgi zMNZyD3YCLtT<%2OyphgAvT9JBXB8aEhPQeORrwyW&HYtnd4~?dXbG^JMz^=-%9iG3 zaapT6W6!(F+!CHUIE1!azMWW9Rp*#iQpNkJWOh$VLQ`t4d!547Gqvwi?b4|Z;~ySR z<#skiB#KM*2;WUnoWFVJ!*rm6b2CeVLJ~O+p0Qk3^yQ)CUpLay>+6su>f&?|O)!u4} z1*i$s$^^lQ?ktg-^^nXCop*f{T|JoN8wkOeGMl#VgWIJ6u!w3S?`EN8PvG zo3s`Cq;}7+^nB6T%ID22Y9@36W{viNfM)?-z!8o;fS*jqx%EX<@1TB($0oMGO*hi* zx;pq($FoR-x;|nN*MCmYo2r?@R{G-q^vR#{7zbmj%C)wGop-62*uE`J`Agu!x;Hna zuhq>NJ}@G-!AH!dME2R)kd3}ja$p{E2+4$pl}Ot{(2wE?)_;yQd8W0=sfFj?H+wEC zNMWM7k8rir(~1c{aCJk-1m+LP^)SPu6X-bT$ZrCI?ytU9$3{(K_=D=9$#jn9wI+kj z7O9BNvj?jf37z`4aT`<`Ap9(=P>d1=$OqFGA4oB6HmCi(Z%7boi8(G_>AA_XX!!=<3Ch?mdhqS zSTc_5&Nxn+4pbW=mfFk)8{!I7iT@5ndTKqA0jW9`Z)SU%fXbre&*;jT74MlVC^_z- zhOktQ>PVE@4xTVa;9w59kS+Pd)8gG(Q$~U0@SZ20(JWPZd&A-U6m@<`^cUNFm)WLI zyy_D%(fcbD;(FztebhsIO7bj_Mu4M39ZiS!DAT+%Uk~hEIg4<4=5If2$>#N|mr31OI~0$8C{ z3!)1Hw&*g~$o-6tm?Rk)(n`b@nh0+fM3lpv63wI17~Ug}zA#gO$JLN(FaF+E4hTrW z5Ru{v!b7j|9eOyHAu)!uZX~RN7dAAP=K^WzY5Mu7&N{Dey1p zI2^Ed2r0I(@^uavosB;a*`Vt!O#+#N1xW9o;nS;>6PrGuphqxqXz8m5*PKM>y#?r7 zA1pS41X{;04Rgq>k7vp4bcoJZD=36R91Dic^fo6eFygchhEPMG8*2)Ag~~*r^lk*q zpUX+Ti;<{I%0i`II(-jGgV<+_v~et}S`ND)bCtK|6u%3bvc9aTBT$;1ge~#&!IE|7 zhMfz5&3m&fc=zejBK99AzqAt8#&<%O{vtdv109jeCOJ#fiAW*{UJg~)jdOGiR|M+% z-oIW{pI(m9iNWbJZW?o=%h7x8%*>z*A=GuQ`A@TTYa^NrXpHY|I&pxs>6b6dDY@(i z*f);!ik(9qU7;iU(xvYvdSU`cx@qbY*7NUM{hdIm>*fDWw}V}iV z$SFt=rPz2{#Ut&k{BB9j(`C2|k>f__4A={BwP&y1^JED*?nLmkI+ByENgPc9(>6J{ zlf2kl!Lus%2z{%cAWi4 zGm}$$C}S}CY3m&S?4kCGA9!woh*Ujs!t-1dpXbx~JbaMO5dgK5`9LsQG#iQwL5{{_ z^hiq@?UA?3(NdGaoEK}q1b6A|>Y1yaw57P@j%b9L-@kx2T34q7=t`wK_!CW5r3|-Cp_!Q7p+m+3x^IL@OW9-< zWc@1IF|tuaAA(4$MpTI@Nw)v~W#P%tuuAV%f8;x-=o0mhfcUc1+xKZNkHyYP*;ZGI z*S^lmSyq;m4c#>Eh&?smw#fnO!iw3d8G5l|c=oZlL!z4J5vlD}n@;AY#|dHL)RQ2M zjfcV2S#TQm^!a+fkN(IZfvqgJ0#dI0-oVFR@k>)Il&xmhQqk+C}w`-v!=PFZvrE5mJG4d0tl9S;vX z%M1f33>=f^bojB3ii85BsN!W}?XlMCXFPk$q)%>p*!E@s^0p zfT$qI``Ur6oqHjcB5{}ON^dzEcbCe)Qu+p(Aww8^DL?HbP zzf-&(c!g`meglI5(G;Zku}b!aNN$ND2&1R|z+5k?OJ$c|8!XhilQ)e4tci+3bE1}$ zA(<>;x_x$odJ$C;Y=pEwjNX;qRUw~+tZ+0hE=QBtJ_5PeS1c7!C0Ny_%l+!obBEW9zucv7ASvI6FBWVIbCuJM$AKqSQENO;Y>mch*MrAI#7>+ zHhcX$3oqaa2wnX)3~z2_XzKY@CUk!tuE+9AbKMhu-?pG;lG7bb>`xCT{&ozRtQAFt z5s%EuHRNje+u%Jg!q3sR-r5xlDa8}s{8I}suqCKulGvm}88Ob~oeL{9)a(DlV!lupMhQbgtW}<4 z)ApD^IrT|bN>N&GFqt(AZfM{uJop$!0(A1YTCmRpgRR&~9{syrSxKKEn8IEx!cKF3 z9by(pl4t5|OY1YdHsBdaHx*MAPc?S@#Tk}*7-1*5px(*U9bC(j75__FE77_u_fZ=_ z_bcgoMVPAKgo>h+bh;RxF*-g=Q=FxAS&Oi~$&34+M@u-um-=5v?}I^qFG^o7DfG%p z=%)K+9lSqw?wTspxhfA$82bt-RBYST%@nvS&@gGaq>_=M&|=XW-yOn>Ul+hGND73NN>74CGm{l-jW5 z`1`%_r>LV|2ic(9G0&?VekgTfE<=El(qX$X=2b$1(Pwp@PkI3d`Rxc2Gw0p^u_-VV z`5qTjg~o4WB-$Y~)Np8CT(@;{6LhO9)VHT&(^xYKUhcq)d2@*iWveilt-JGoZ(-w* zTy1F&F6haWCn+CYN_aW3Dvkve@#d*lTJ^MGTX%$M<2x|f z8&in}+kDYgTo}(8%v;jiQk2)=ct452M7YZU##pU+b#ltN(e%ETxP&-=$s~CdN62fEd zzui1T*|9XwJp^^bGO)UEzBhhJ0B{6<;2Nxf_sBebLsZA`@?Z#BDu%u5!%+QoE^vWW zQkDAPzPuG7*Ecr4tiGVww7yYdP&-C0@yN*_;9c^fl(M*?MYFhx?B)-&&uv%@>#Q!+ zi&N6)yuN~B{#6lQHYFX@B~Qb?v498L8U7?@!>{AydT%K<%6ZXyKMoywJ2m5{wdr&@ zr`*R6RIwhJSB)kmER;W9%cu6hCZ|OrpLx|5-$?)(%-cryrZp0kb(R$HT2Te)A%wr` zEp*^)G(cnj6&|5l{%SomIt=b%u*~pVw8~yBp&Hms2kCOBIib`V^08c$OT>5dE@BRv zGHHa~V|i2(C(4shM4-4h4uWl;qPAHMdU%SH>vD3s41n23D;9*(3V$irc^HL~DIw*} zjxUbM>y2>j1QaMcs#j3g@2AgM1S&KI`GngQ>~Bl$qHaYQ=aym5LKqt7`(d1daLd9+AXzt}sJ&DbBNUxuo}vjaS2Y=b8Ba-=C$YDS5uxHf>YZ|De3sw){e{PO zNqapu$wLaZdUFteQcsT5uXf8ymh@8IEl(;Jsvy2ab0tg##^bk-0*_7B(Qq|O$b6;; zg=$N~{kmu(x}~LLR>=kxB~Sb5R6V;0hpz1j3|r=KL#>L0t~fz+-QxU0Y8|eIjp5z@ zPd$7vqdZLFYnYFmC+O7lLx6-c3buXI<2xHIEF)AU>*}Uu`%s+A!;tllYfA4{`Nv3M z-M`QT$c3!4sRW4%ZN1lCx}_N}ElZ;AT*`&8%`*$Pp*yj0r11!QiNZT2b~1gguGhjf z2)Y7oD``Aox>?EyY3@|>KaOv#NR=XlY%jfrl`t|n^z4u(31a5v_yHK`|WB$+*Dp_q3O52b@&vADUh3oeztMqWU~*pR=1K3 zIFa~n*URUGdM%Iz)VYmEm!ey7wvDSc-;$Ru>+(f{e`QN#j^j$V5YDPD4 z2=?dhQ5@)iL`Sm5nA*e-hRy{Ki5#5Z+dn$FqI8iva*ba8zbse@yQ;n%PQ!`(p1NjX ztPN(DqAUqtf*kM~W;N@c%JKN*-S^cttC(@!T)WB+U3;X@$gCyCN_z#EsklU%qWJAT zK*poTu1<+oNx0GL&h-AiwiQwX-DbaK!8>b=DU6urTKsd_TGM_unjo6|YF&{y@`H{) zi@HivLp(H-jXyyH%WJx`A^&7TK@HT7L94m$0O9ZE$@K{L)l{{HI zPUh6+uj#R0=9|_EB)NjPURGbI5e0OzR#m#*NW5)&5{pS*#4!0Ai5BSTkgUc&4_is6 zNtklAblD5-7_yZe!JgFL1K*_v=X;3~$aTvzC@)POU6}(Y{q4&0Fw96IU^g&6r;LV< z64-d&RTW_-L1jO5d=U@T06FUGkSN-lD$OjjLHgD~Ye{yWv}>Q_X8b6?oYNhF`JH>f z$#TAxG5VDDbg;BELfB6Y>zgA!Ib3wD|Ja=3O77HJfd{|3>EHS=GSIqj7(2(Qvz7cf zysMnxHAv??-G>^mg4{TwABZn7NClxPk62gK-__$x@| z`BF;ra(ae}F8}*-qzJ^iDlzZ4SI_LXhA*Njv1r)bm~Mli+Mf=LNyqOR8aA$qV<-e|etU)i8rJ(c}mD&g{FPi!IxwA)}|ROP;` z?0hKulzG~W1EwSU;oMP?zZ4~k)k7w`vPLB%ZQ&3-Gp{UX23@BK+Ut_cMhysH5>j-` zxU+CVy@nEzF-)eZcFl9IL}6++O8jqb<~3GYGGG}@1Xuvw5KOR8jjz6~2;3Cyl`WXC z%2Z_j77G-hS>zCS4C}Ha%Yj*?2|4Aj{(Iak|1kxwLvlZaFwz&YP z=XaiI4}oWM3MG3DqS3F;o~xxKKBjr!I>sXjX@bpK$|l*n31WyaWZe^1#}jA9s~>gI zZ8O@{_!bQZbTn$JNekg@gru2r)w~K;Z?I419B6(CnY1J5jM)ZRK zEE8#jD_U8L(9QoD-Mo^A!7owy;Ug(SyFt;0dQmrB603Gqim@Ev@_}#%Cw3K&8WICZ z%>7*DM0h$>%O2R78e8@rnX#VMu}Uf@{;C3&2Wd7e*h?Lu;2~`R!XlTI<_!`-rurB- zI)kdZNR(r^-&M`)iUVm6NLw`AFHgi)OH)YZkW#0v6OU{RANaQm*J=KpsecvC#@uPoD{^Qa^#C+lG@R(fDobB>NBY;S#r4qTbNR zwT&maT`8wZM|qrkU%sF#=USq(ZxyzK{@OUT@K_j6+f-0#bez3`Ke`~PczovvCLlG$ zOJu5GK~|21q<+|Mx6{UxJhB0dT3$@@erjzHl*@&NyM-js_v;BqjZXRR)Ae2l{{jvFw8{N!*AJ0L%rifnvnyK|w= z#{pQQP}If+etlR(;TgC&LX4IcYVW!HWzJH7Q-pTm;b|ev@%)Nb#+NVJO=S3M(2`=W z65&@H*Tn8Pk;f`o|jR4-LDc^!Y{OcE{7(**Gf`3Mh$zL8#nMi zG=>-((qV?t?)@9~8BrT5XG9(&H?3KcJnWlDVVp#Kj9SS^3Oj{}%0lII!Ho3@%{=#z z|Ak=rkUbY+!>103!z!WgSAPWTa&gUGwY|tg2GfRMrBbgvW~!f4oX*Fbp^Y8qQNW{;$t@516x!e4wlY-bRQhCXcCDX}l9<^_ysG@Rj?%pP_rWYP+T zB>|NX*A0qcK^Y4XbE;G@=VP46E#UPg_F@ko%^*u!bWxqF68-p}T{PLI)nYi(x^wBC zT9<;5hS>$?Wi861f%&1A87V2>5zdFB_%TFVjpw5deI*QgVnCoo+kYQXwl1R7eK4-~ zWbiLLE87#@Vm)r)oKc z|ILw)_M%n=?)l9mHW$Kv^b1N)C)Y4VRJAEdCn+iWx1p6VX| zh;Lg#*vlh>YeE=i>d~F0|1Bl?stK|QnQ%LBv<_O8T^>SW&cD_FHCHKcQ$XiAw~IqG zPQ%apeXl{NR`j!VS!^1DMAB5~BYizt>O zjdBf@yrhec+mz!7UspWqLf<& zD22n(Wv%^O(ktUXxUUI%YXv{IR? zt;9GRohxZ&@qe(j#rDfyC!9yO!%>WUQR>iZlN!rja1c8t4UOzj_05iCgSvfP=u9)o ze#vp`Jlz}mj+gl8LwuE(3~pJ3#IJ3(#11j0uJ z374(U=7H?1rNEmJP3I|R)sG8oo>2z;*hW1e2S0(?`is1$#A=z#SCjijml=E#Y9X_F zk@}&UJlQns=Mri9VSni7hZ{Xwkq)}i$jr0BL_b_K}=^OcCC3@kEuUMUBEeW0{`^? z{XhTLuK))??KPU{Nyb(>n1wBzk&La(Qeu4>wYg(b8mJ`nysnPkn5nzg<(v& zT__mehq2S)nrxOu>jK`5_`=JZ8C0GVi<;}w2|R}`04+J(RTHd=4;$&yGGIqj)FR`m z@H?e<+t4&pdLtT1YfjqwS#JB2g>!GqCMxwdO=IQ03qS8Xg}V^mTLy%NQ5hJchD8{| zi;AV@a4J33ag*sS*JM38@440Ex(Pelo%BW6(X%W#pW==I0EcNYvmjfFT&+)Vw29A> z0@B+WN$nF_q4Q-0udQis+Ur$v-WYn$X@Fw_rw66AprOyB97Ve}0CnE_w z1=wykO;(@#TKG%O{a=SyB$g#jmg4OW_?qKgfOVdL{dr1ph)`^-6^9sp3UVxPx-HYq z;TFKPNLQpgRbg8tMViQ)e#d@@GXA>TXDX2|6ZFJGFA8~HH z3{c+~zNw%Kn393k!BXV{5VDg~aB&C9GNtRra`k^=;Iz&&R7VY(A~{J&z&Cq<@ow>Z zN~4gHmCtu&WyQVYbh>ZGGrNi|_)Zxb9(&BisC~p`Y%^}!3EEfGFoJ3rw3)sSSLvvw z^hB;IeK%J~ZFz*GYTI~N>n9V{%O((J0y%TH`Yl{s7(8D3k-$6luvz42+L5nFl(K%? zy{O7cb6_HkgqNy{OBElHGCS*31?ix|I~*INFfZZz@OK2~3xoA$&+>&@7s95fZcew^ zwI;5ldU2}kt%-$KI` z{l7VAVbf_yg-sjYcmnBePtG9ZpJfOU2Z#L&+9Sv|1IDTw*GLQk{9-`h`roO6Hb}^k zb=x@Kt=zcOwczxO!H4b1Vu)kgRxY)_SsqG@OmtZViccPRIDkwT(GJ&dPi#Xp@6eEeO`koW#56 zYG;cq;d>7&^Xb1(^)D;1xhvJG0hK9^wHRuY#QS`XQ9C*rcqA{4CmHO zw6vCD&8)cy&Vt<1U7kv==@B<=5;oA{GQ9j+?GEfostCj&Y3$ zN;tiM0}1zsDvE^+NH3-rOw(FN7;SxWV1*)dO7634&%(%3r&=W8naDFLnEjzLlYVb@ z4Q1MQ0=~u{J-D`Q~-YW*g)6t#@j|gl`@Do+Ek?FgP678prSgEd|iY$gA z1tdwz(Yqh*f(l!O7@`RA#kS#b=pq?L&%VF9jBZ3CW3>YX(>J!*XLXppF;reqpFELa z?Dnv=gXXP9?{gjX@>7~|6Q{o2bX?A)U3Muvzyj3*wqggMZg z$a(`JUH9W>{1&9*OIrtF0WMZt*`)5EA>%y`CGj> z0;UiW!rr|FRBWMHED*VRI5i7rYE=h6G0}A#C@D&bdZ~H$8_fH}YvXCcvfcF^Llw2@yc! zox_-=lniS);UwVs|`{pR)|bT=^~MQ6oC(e4AA;-t8@i|5Q%IsT3}u{4~DZ@`7K3wHc9;l zWYs4)K60q?t5iDRxo!ENA+3ISUd8bmgtVFsc`%Ik;TQBlPh>48K^5zilT!!6nXIK4 zqc&9;E2WTzE8aEwH$ghUJ6ufv78rG5Lyn|3FxYZWtnK*weN@f=pSiFWs!RJG4N!Wk zAsHDcl}quv$i^O8lk<*za+{y3gX$jI;CMt^sx{jgy&ii3mVNJ8vjrt5x+vLINtw*9ZOu~T1(~Pb z<5%_yMn-uT;-J;#9G(8)W`ZWZPY~q-Yq)H*GeTw=q}`uFa`H#xhVA2veu@IOrTm zYc)R9yso@I;)$a*r<>mmwP-5WF#`{6&?YOD91#RSfa5O10God!Wax*G7uyb-aqCey zA5ZE4`I7FJ4LL>e^J%;s-gay$)LPYvXW*Ti(KP0zy!TK@qA$@6P2m%#U-M-x*Bo#4 zgTFPPA&p}(x7*Uw*HDO=o#`&I1%_S46}RogK!Rwj7iDqTe_tXNc754VAkzIbwpq`D zzhR9|t9#256SxI~(f}B|m zl>w;$E_8ACD{aC3;*war zY+ShQ?5By7vCAAS;%R>z5BB4_B}_VNn@aBakP2s34meIV{ZQ0xt;QNqAQGVdR6%;4 z;tS2|8D%Jz97{lUqQ;zEOUX5Zefxi-ujORQ|<73sg24<|uQEGyWP3y>2* zT(m0R0$2u_%*OHNOfFJG-<2<_jHwj#O4+HpM@WQ5D%$|PvQDjxX!y65t1mdtSw)`Y9g|7h#o*CxSbT;L{Q5-8>X4*&}vxPek9b zHD-2?_qdgTWkwZ=GK>d>`0qn_Cp2^+y0)OJF*x*NEY&goCpHp&Gd1tmDY%U5yVoKCi`V92%41{G|A1E2YlW zOcsOG9*xeDswAm<0FI;iJkwe@`Kc{}Xwfai1YN*lX)2 zr0XKe;eFRA-2{bMxe2W9eFqUCWQ3S{M2J_YYB0(dEVKqaH;?08yR?22kSm$7$*&7_ z7~B&0h{vao78u+J`^>J->xtZ@x~foDZ$n@k%3(O) z8A!GriCZWM?P+?)dBs$u#?WwJY?Vc%3c^Ib6tD^{4W7+>?;OU*2%rgR)svt0ftg)Xj9$GhKKNuZ4LgLxfXK(Xg|0qNH_H5KS9c`A|~XK}H={Nw1`75swV#VF95_ zE~E~oj*n&kh7kNbZLcD1^4&HLz$o61Gz&?yo(4YE1$;`K&O1p9tg2zqEH7pDAI;CW zBRF9bNGq4}pxk3fm4{|$3>M6F%IR&*tx+<1UssReVom!dT8!$*`n$xqBGkh1iH2u! z^*;Jh1jo3E#<-Id2goT|jQ8k&^tJo7@qR|~{K}I6hOo9XI@j0~!W8!lAXs*VT`o4D zB;@_yldSijhlTMqB zDyl>^Q4lIJeksej)H7PalF+28k|fk_dXH*iOfLx&h>_;(j&?-Z^cC~TIwVq%)!)L= zPHnQwX2S+NbRVfxubQoSsn{PLThj*>m#84q zq*&(C2oIjIq#ZAnFdilftoxA>r0kxpJKU+CU3;#Z0|fe%985jjoCQuUnKsp=jjeL` z8@ZAX>cV!h4j{whE1}WL?!+SJ`Pz~J;gZQwQ>iBpj@d07{?xyfB-Nf#crtXSx~AY} zj2kb8G~nBA=j_hC6$H#t)4Y z*tTm#;Vf)u#kee!^$qhBGMmdsIl<(%#^iIreT`F)GP30_I;^AF(}2??(gKratON0pc||<6mB#QM zkMS{*{ycZEpem1!1d z^s#vgDK%F@gEhK3!7=U|c@|G$h4vJ5Px!E%if1Ng%}|(MD@vs;M6^gk?hbG$j0wI1 z0{i3cZrQs|F}A=n-U^ZWNVvlz|0N)y8V^`bPG47VfZE1vXd`XLeO|<0;)5UtR*MEV zavXuNVo%>TBa}-P8F1TaE3M=zb@oIrAsF_J9?=q;Z3(x~&UnLAe+=LsLvh*uB?2apiyKq|dhG(FncpS&5iVfbZiud~ZR8#Sgk$RmFsXydylEl#dU` z->2*9R*zqSx2sQ1(0uv$ISC7$432As!{~*h^P3;fnlE;r_yDXMtsS{C9cf%dbBNPW z1sfHtkQCq`(D_S$P5?}puwE-2n2%vJ%04~;Wy4;_3CqI}Bb-a9KF1)-thJ;r6A414 zy?IneshsK{C_xc#pVMrT8js;b+sb7GKuG5Gr^~Qk!d7DvEtR;c6SfzoPks|JU$m{V zI7aHUjFE;Zm0K9`>n4$vSS5B4Uf!;f+e+0bx=UwwV}a?ew? z^izM)#GkymPYV=Fy7K=COs#n^qf#Phm8Y-Sc}7Vs8LUsO@-RLOVjDB z-Y*jke-8!CfMny2KMKa1;W`|4zw-8De}p8WwZi%mB%KF zCd8QqV2u@6f6f4P0LFr5M4Z)D6d*lNBLC2N>2qkQyd40#R`D%A+$}f$H=x#Col?#2 z%mTe-a_{kF;qQ@nJfPLthcki#p`U{(jqF=tvf5$%V3xuKiTwH+V3HOT+0alu1*ewA}B&pwW#ijK7 zYN+m<7}n#os*t+KI#^f5-_%pQ(WAl?wJ*iZThe;_Qp>{QNqjW5p->tLt zjbtL2+1&gghOC#e7nvjIjQHQLyr@45M-C7zKY~ZU2cP4qQFcuB0l7O(@uu*%)agqW zQDbUH=_uT+2L;*hO`!T^uD(meN?zXPQc<$cRG}Rp1l(5xmpkOXuln>mpXZ_GF(G{N zhR5^Qm0XjaiJ654S^Td%H{8eQ!u50Ymie0Y3P!CZJ)DRZ0uF_RZq+>i1?`bfxh3{G zNmU`z6))d<4$mPu+p~|IG+Ub~A3QB{O)P}$s3TMPsR=OdaFhy2#{h-})jVY-leo%i zAD>U;a`gI0%E*-DZnAjU&pp-m+c)72r9+TM|LRxNfzRdNH~S%^z=da5@0kLM+23k+ zCk}<=E_3G{&0}s7Q~bjp$RZrgNa)j^Pt0n@iFmXglOXUzP9fk47WQpBW% zu()03by~M9^5)R^8jr_mBQ|w#DgW=^eCTt`*GpcB;%#47zo`@@){0%5dZQ~@`uKPH z10BxF?|!Y)yr#Wub|YrA=_vS^Agy;|ch5oRVNuE+Af7M#$4h#F(2{BiRd)J2gO;1` z<-y6JJh<#@DNuZU^Gz}%MrY$ysJ4jTw^twe9K3}u=SCk6#!Iwb-hl} zW}|yVw6CP<{V{Mf-W;EB*oa4SRIkVogY@8sRK7^N!Jz%Ln{LB{cnBSCtQCX2y${Zk zOICrMa(FX9n?ewILIrLN>r|tTVcXI5Y=3PpOrwu5WY-lou253sTWd=MDWOZoW;-ii zc3kD%XJDdw$dD!Nlg5!W#(J0|;c1$YDRi+Z1;&Bt2IVP!`Tc_aV3)kU zs88ju&;VOVm5uR%SQK^|aGr;A9$LDB02sho#r!~$;Xf_XU@)l%16GJ|j7Oo|IR>T2 zk{}Tp*5;bQkBE*p-RVM!r;;x^`!R6n&JWshP+kx=n^BLuYn10xX3-@MuEFc_ zM(#8jKhKF7=~RKRI)4e&$KIl50dZ4PZ?nIUb$L0GCz37ZE~VPiSs9}@D!`}^{6*SM z=-}lLE@+pl5YM9IzaV?{I+W9rYdhT&KkP}; z!~7$1W-o^~=_sEFK42`2+v9W4XtkP+&d}P$9vcXhD+l8%pCk=EFe7S4o}ppIHHa-+ z>c>slV8J!vD<3wljg3DU#boPL!}S>8e?x5^&G8O9~12!dMZi>xU&48%Te_Wey~DVrCeZG z4^=)YQgC7|JENdHIkz8AV~079=hQ{O@ONI{2r)f_F%xbEilC={(@Ivg2iCo+7OPIT z#snWuy>+c^eBYWux#sPxalEd{KmLu$qNHnEyJR2Ya55zVGoXQhxf0Ajq&)J7}$;kMnF)i9lmYFd|eV{P$_?2{ZZZEjo7-UaJwJg(*mnTbTw0>vN7g7JW{3w|9&yy{`uOe4*v{KVCaMK zdoNzHa)VGKkywJ{VqhCZCfcrg&5^7y2mt{X6TmU7rSezt6ov|rlDCA7ui54KP$sBO z*wiO^q~aE}GxWq4J3RpcTOzJ%SFH;ufe|3J@)!=!K;epvy!d1DoYQStXgZ_w>WzCg`k66XG9wea4+%&a^hFdSE5^N-`Tz zT7f{j1$?-NZrQxET=G&Iz_UgvxGnTmI|^DR3NG@(%)(+Bk&1d%(Z*L4MTR96RN4o4 zBm_mwzG5!&IyK8fWJf2-3o~LFRkR%kPHoT`ooJ(W8ejH9y}*Un?mX>F)H#=wOFBt$ zZ;eMq7s8>TQ6^`wXgR(bdU&{6j;VYIgut#`9Kl5^XI(W1d2i-*RUn!zKo-OjwPJt6|l*+$7H_5%0v}riRv8<@^YcX@59+=?4I&a$O12KB2<)+_=V+7 z9VnEzd0y0~e8j8}?>}hi(1aRuk4iUMOB#@)(NIJ>^=~DaauJN;OvojJnklP+zOj!` z${JWxEQX5x)?0}My48Nro%qhP(9VC94nf-K^3NhB;SP1w`SfuN;TDwi?p2{J?aWM8 z>uA1u-~^YlEzNH?-t{m2ejav&E=Zpqdci2%?y_naDSr*l(CmgY%h5*bx_tFrn2H$! z7E%|EiWQvJ9wW0yG}4oZ{n2?9SKd6SxYFa(Uey{&iJ!K+%Uk=}v_Im=wzc_f|HdD} zaZ=j7#DX%Zq-~88l@Q(c+W*c0kUDcaW3`pw@St%CiRC=$gxT!-@!(70NJL80Op&Vu zHNn`yyKAJ5C!{v}YnJOwOHzqZodyeIhag?3^dt+08y_dYZhW(pfsJbcIBDkD>&bt|mV8(x?EIn<|H*cr#(2|KC8v?=A?nUp85WS>iG zI<$O%dw2EZ-yTYhjhiwiuar+<-rYJq%gL*4(r%W$T?q&uXDe)XYQLI9203P4GIq#t zI?POU8sb>%8Ix{@T_eI#HEEkVxYa~kH^TOsaMdA1*K{BpA@xem?sBcG0IgLC31tl3 zRZ%D@cjR@hE{}q4SqinE)O2Kwo@Dg{18I?gDO;sBxc9YuH>^D12EK*!Du;(A#A-h^ zLqkV>&Ckj)lK9z`I%1YAOcscn=7At=(o^F&{}vdQ=OJKMx>b?`8XuPEe5v{2+-<%3 zHUyEJiMyHCE0#uR8Lc4k_H&?m1%dsSkTPG>y z*MP9e@A#v>K0LPY{dcRF_ScQcm(DbwYZum&iCN}Tho}*vfC4Qheb;qhXkVx@SVD&k zP8TXScI&9l*mN-Egzr}lD7P_SS#-44p+S+Or3LsFdV-Uhy>|=ur{q#$=G<$jh}gZy zz>=aK*vZoWFjsf1-g`va*^~`w)o9ByuIHptgnx750y7qOTKEM2ju-%7(|OV1Ix)Hf zC1!ye=Q?~07c#dYQc}+{Y-O)6tN>aQ)5l8eBlVd6<0k*-k~Ct+NQ>B9c)|krsi4=H zHJ=-BB6<0Cyr-y}CGb*am%_U1+6$C8R!M~#eKlhlDSUTULZB2l6z4e?u4;zLJXL#= ze69x>pwOQLmBs+Li*W%TXR1FFY7(wCwI;>KNHct?(bs6kA0n50xv-}}vri3|kUz80|z}2?8cc;;963u$9*#vQ)|8PjrOXKoX`rQ;xG`cweU+2{;Rbj6BO=q zsq~udtM5$#m^kp=L^x1{0)0;Dgjk|}BaxsPSDJD|rsTv)Kjjpj=Wsnu&nvxJNS^i-Sc%8DpnzrHC_Um| zR`nQ2u!-ym+UZmSD#8uAyEI$PJ3vA@#c2?OiCrpl?MM|CHrAQM^`=R{$EwUf`iS81 z2f9}s`%d7_Gqc9=^*EjSW0-!bn6h-2;_3W$a-^tJauYy4Pqafh+YkFaC4XiRxyuuJgbMy_ zjRz!26&YEwlMqTD=g4~Fg7fz~LhdUw<2+R7g%+|p9gV(ZwZs1Y4;AU$0qLCTUO`79 zgE+KWs|3dVE$&(%f*A$y3vtja(B6i@Z~s$2Bx8R>5@xG%*7(?Xu z;qSchP~f-t5oNA$Go0OQOSw%$Kvu8~l?7xQiFeBItpXqugzk0CGALT7YMIJr~i&r729--;c)bNo8)giG?jD4R8Mwz-^8g@+NQj^nYa_19Q*KJ{03(wGgn zr`RoAf_Pvhu0PMHT((iK$WbL$FxTaIF~L!Hi+Qrm8Tysmtqv$vGP1nB-!I!zz;~&X ztl4}_8cK>@165wj3!&Ype{+VEg;%MZ5cSw|RcX>`RN~6O%NA21Ar67il={QglKt1D zS`kJg{z@m@esqN>!1&lqcXFdS4Bg=uJQ}7`#KlOwP*gZbsR@5n9jjk#)}e619*vR> zG=LaK%SL-lN0N|XngT=5M(=(1>&U?aw+Ktfrl+>nPoki&q(p?b{EIG=T+B`}3s@&T zW(PwDDotLK^WVt$k1z4nRCng`1rGq<1fU*ySeDV!qd-CVhY6>!KzCzQyMzVj{1?q$ z4j2-eSLK4O%DYGvoXzx$Vxy^U|`xOT$79<3N1h+7oM+$`by2~q4#3gdX)3wyag%D;j1?d(qd4&m z8^FY!_Yk|+cBHBO?=0O|2U(i1N6T9Abdh1jy36LRWDSCJMKn!b1#oT_mgO24XKlxU z0)s;!z}6NM?Rg0XgtW&^i$lX=$dH3C?*f*lepv_uyp)2X0<9OWrZMhH1caZ=LMKoe zLruC>k9!mkSV*7-!@gSx(k?Bp`ecwlRAK81&yX;ThgR%wZ0#`VLEXN8Kc*iIhSy@N zcsaLRqYyO>EFW3BYXRN=>oUH97@E@FvMJ-G_fn{7<-F?B6EJHBNQ61hB`O&A6>?P; zVBk%zDOwei{Zo8q?QQKQ%gOmb@xN zRbO7J7G||6%oW*Gyr70xo0-a(!y1G4QZB7v`c1ZPX z|D!Jtu+jxey7MpPrN8d(yG9tIDfRbzd7^F`u?(Y{wm($Sgl=KL+w^6OuC2qO2OCEv zyI#iA)i*Jl7z?d#8(NsA8ggk~JoLYS)$j}Olc#j#ZHmodf3|FzpGYEOs!Q&8Y8aCA z*5p@(wA?x{*@p6=7Q22_j%yUlL_b~4F+_xDc^*@o4iEiyX)KzjhFJp|KHT3}v>p_j zJf%GLhhd2Z2v{55Hb#>UrqUa0j0;w5(ojyMi|kY_v^^;7hT{ zT}{&K&&|IW{ebxuP0`XRi{s?-8N(ix4}JlGS^zQ~@6|tX&4{Ww2~kqn3lr?k^?w{8nywEcFHzOR!^`{y>1c zIlbfNz`zAC-;v#H?TZ#|ThYO#rk^i|1N=p6)}nzI6m`nNpU)rEFYGw64c)o(QlKTo z@$}>jO-@`xCk$Q?Kzj(pkb3Lp^>ETqzMo^bS3}i8u!L|UTeIrwq6ENh{rXXo96!GI z$U{1mF=CA_G{-DAdlt>xz?I)>alENyub5{;aajj;f7q4W3?qrPJq1H$d1p_8Ff2x3 z9zFKuM7?K2G5l3Kf4Fp6qwf?4ABT+tcir3NAODaJ#Zr_7+sQzabXD;XpgjoEqXc`j zI}L;nB8uuPzsU~xNGZiBZ3uF#}gH*QdM5)A!t$eq;8!cU~FK5*aznrT2)wAVnN10K=$Cyeo1Zk3l zqBbaJr*M+W4f?04juV@4XM@i3qd-genrRJ_ySxwdd7n zgu1VM?&{)KTB67R1?2g` zf#YxTiC~OCIE4`2yh!G_HY%>{##sAs?S4m#2(68=0f!J(N4vS|cZiwOW>mV#Ga0iX zwd8i?pzA6QqrOK!3%;D{Ahv3YCZ`*XFD?#fx1f*C`5s!Ji}+Ncf5jZxqn|EvfF1S? ziIdiCqLt&&I&L)2sGTsiK}sUCM>$Zh&YZtl7biS)8*DT?vbL!~M~dKW-Mo%Fm-SeH z1j!Gpr}9xxX#8O$WIM6v!7f;GOo2BRLOw-afNA-eF2`W_5b8Py>IUM?!+v-^kynQY z0crPyHL%W zs3!Dj&`k6oT|#NnyiS4q0KKI`ij+gJ+Gs2RZuusfeu(0_oSsFFp8AX(pY)e6`~trE zx=5q#DXnumToCgy0^ht2q0Fu^5`=h)1Jl*f;L8_V-{?QrmH(K^|Bu}hmdjI%g6Y9t zJ#I^&yfmQdtS(!2z2iW+oQ2OyaS&o7n9 zO#0+H7PaP%@udzs7uVZKfJ?KW*5T;!<*o`N%D4_GQP>SNvwp~?{G`M`B63&0 zt1M~rcs149_jbLzva1HAfv>B7)i>bpYV6bnc7C9!rQg6rDe?U@_1iki57{+NeC5*0 zw%bD1cw_CNbSy!hUd7wajUMz9fg^lwPmY<*mzg!yJO`}g|BtYDTW%!Du0-GcE0AxJ z8ORT)d`PQSpH`=tN|IUCOq-r)K_W;-CxD<3K(a7T=QDK7=IiE5j<3t!?g3_Lp1LbZ z0&#KQzAx)SqAO;OEu3~Ka;&TS%|m_d4@L3|-%?pDtc)JMK6D|zg@3DytxDkVyO&Pd zd}5EHKjXi~B4yWrD>MY{?(w3^D<#F37OmXW<9U0uk1AcElpKCpE#2HJuguH%Fn68? zVPzaX;y6|=^EG^azD52e{O3e<*rmsMVW6xeMF%H>_vxV~2Is{Ps{Q3ODheHj>$c}s zaCsh|2M@+z^YncTS9YfTz%jmi5L{2>rSo?Qu1K3Qn*DT9< zi4@J*NK6a3j{LJ#Stq6qt|07dpnVAenpJiRkh=mwFUpu~aQ z+Gk=?YLv$~3vG$}&0mC{6fpW3ZV@>+4eV9A;Nl`WOi&K~*y9WktL?ALLw!sSO=zT% zh=rJ2aK2Cbpvywcx*o!NIsVmGcytk7F2@|jKSD8;0B<|GDNPIZTjlpovsm(@x*?nNE*e9}880vN!PNYa1lj$}f`s=CJE-VvVbl2tw!KVlbnkhP;ft(Ad z+0grlcDaD3Tf#CsC>&HuT(esnhv$#WqPj9@^69z=uiXNlO(Z@fmF#*N1_lTxf?|#RwA}l_E9I2xZbq=W}DmMoOoUZwVZatx=(d9B6dH< zO9g!sbM!)l9CETEojt^+MD^4pXL_p7{bz|*@_3Ri34fqJKMZ=7UyV4%lgT&zCo}1E zSs3~}&8N#@y9t`d-Q7|7fMDu(cMuzzB>Bs3d8dcieodxl&i3qHqAgtiVR{O4gvbZL z2J;SzmMJ2!W%{3*V~J2@`Pdc@lNu`t?1rcDxCl72c!IU{_Ngb*!XozCoMP&eKdOlm zlll9?91S7CnABi9LC@thK)*$6m!2q7+OSdw{&8$W)q*f3aKxn_!oaFypjy)K{n~+A zmj4ShS8?ZAh|1HwjdU6om&gD3d##ti5j_IgFGByCNl-5rE(kQyZP@CReuzY`4KSWm z$%mz$4868_l5zTg9H7L?umi85^w5}yfFJ;r`HYdF-k&QkcR2vupR`V7LFTEqn{`hyP z*!CzmX)XEGz-}_^D=p~sWbN8(QOQYaX zGJVC~NAsvNR%XS%2V7>izLj=xx0?-%KhaZb-eAdafuTE&xikpAlcKT2A)hDtY(Gh= z7JoqwJ&J#SijqCGo!7}%Kuz3hDUPZ5WlCxI7TX4Q1yU=L8mGT7pNML zY-<8lQ<^@;aRFLZg5P?3mN18mp~)O?ulD`Dsa?2d9#1N-FK=y;9{-zMCBG-AF~+BE z?0ImG9Hr=cLn35o+G(F11lMY6xzay}y~6t_-Y7aG?GQO#%qe}SKYaMZPwA4i_ra0X zc$1pp&&9mSlf2`XOrz9SVv|(vx7?Fm$4Y1|c!d$@+wlAKt0Z-N-E`}KX~^UFN9=>? zxqrko94u0d{kwAqP3u_uctF#62eBF5o()Qw1Nk>k7P18*$G5uaO3X}!+)w_2Xl>J_ zp`~O9kIm+C;Q|mTONRmxd?YLix!|K&^+9S(0Ih}`QQ@RlI1=HY!#JTv8j1X{9`%UZ zlidamVytB$Q^OY8cZYCWxG~o>cCnQ$Uv2_+3=QLh454AI`ordnc(1SX(>H4gLxPvupJ0%1U#Do&#Tl*TR9+(BD>|Xix`kn-tN67v7PWw*_gQFAmSDC3PCTH_jrE|W z2GJ`_1HTgEHgyxPpIITbisgY1^`ZB_3}=k4-8iA&Kke8zpeSJazAkesNZS+e1hK(d z26X9KeUU|uu=g1~=o`R=R(GN)K<-4lMnW#%)&vTRhqGkRvhEIX%30PFM4)`x{K|$& zy0-{Y(elW0NS%C=YUp<8otibsLMm@F=?cpU^AeG{97|W5Zhod7(;+*jjr^_`x+EaybK6Z!0K+A^jGe=5psr0|e$$e_NDlGG_-|NY+ zzvl22iA#_mt*Dc1ixttLG{f`pvi;5a6bhpbr zTNWT)UjpvlYv?ZdcoW&!i8-NJcYyZOxS!uf$+v%1(^is$ojr0w7p3hIDMv`(59JMA z*ScLVZ}sIYAry75SRLRjwUiH0yy1T1sYOma|35gE1 z#rR#X!%n4ZbLk9nO5KQWHB?^eKJG^=+(Zu?2}e!D&>D-)u_Hv|RLHo`-~;=9g#w+9 z%VmEyg>X8$MVIqexKYo$e%*IIE$${V*S39(v3Mss1qas&Nv~0dla}Mj>^p+JU z=Ku}J@^kp$tfmV^-xuT8)+yB*U>6j=%RL7q<{Fb>?dXb}$^&BB0~CIyb8EZiA;Wt- z-?}gyV3y=85@bbd$77+Dcj1V48(TQ@tffYQ@_}EAF_;q0b)4^8f>HJ66c2 z&2RmWsPY&a@6+agb~x=4N&WlUS*#`$H+AffntQ}W4Ts0cA*)kf;u6x>UoAksq0?1j zSYm#(@&$peW$wzQG$2s(bV4Ih8clcuY!9LxcOROK=Vp<2dm@|zcADHRldL+lX*j<( zO5MBmxHYLmGua%9rgEF*<=GXUw8fNF+>lwm;=S&yV^_D7Qf;+lgUq5(5bC%QM6-kI zK*~Y-5>oZ#4ZC*zBM%Fn(Vx>jikwgRTDZPQ$2#oB9<1&&IpM-Ql@rX{%r4})7skL) zoh$*4Y0s5R%hAfrI>sAXw@Ft1HF4Xp$f4&vzT_}UOMLy}q%GAud2BUC|Hd)Sy2;S( z*eAKcg*rQpIGZ)S(g3@s_Q;s&ymlsOg=78w&Ox+UK$+!ZUJS-niHv4>{wX!z%}Lm# zR(bD_TF5K-lm=8hO=VBkv6t|c(8jqa?KE2>Y{|Imi!UgY*Jp@c2V-c>pOFlj1ozCB zq_B^f(c4s~YU)Zc634yZE4<$WD{yOI-3o)y)3oJ|GyuN*_vSLq`X*TjO%4{f^Vt?~ zz1yO4D z`v$wndkSPWyZ0tsCBPXF_P)DEk*Pp(axkVhzxz1Vk4aoYE{0SswMjs){PBsWLNi4b zfZSl;Vw!9F3O&8~gjOR*Y3oJAd>bHj0xNaIeJ<(Ka~hxO&gu9_s{O++%-tv!a`L3C zcOK$h1TLu(Y`<^xaEdhH8l;l7)wNKtDsUg=#ypoy0Y;;AK5cpYrp7(I4#Ke=jg7jYpF_uqE${c5{xd#?nv3>hz{-zv|}cMmNpS~S*frPrm#F-s0~)@rKU&OYO98aAYt z_4cFXNsP(*YPun0isoAS6*km(!Lw0Nr)w|Nu}yVQLQiFWD!oPKObOBrguV8vyKFBf zGinZF+9!FLu-Py_#xn2h;TKyAp~#KPBwg@{CVwmk@xi42XvWoPKv|Zg$*e3lVgXwP zz=0bC&GEHV3eHq)B4tPNS;nGInUibIj_GR}w>>rIZbDH?P?Hr$r_M(`_dgQSTi337 zSWWnK6+Smr$MPnzT8fXcvnh}JfVspRM$e+R+1vJZm9i#cx}j=!n?FK%D`yY;M_5=D z%@dmmN%36TvnHCFw>}+nHhACJcXp*}kd^Wx;A;T7lJ(7gii%ip(`JeJYw|t=9{Soj zF|lrq2bT!@_fFr=C25Jl%d2RZCKXZl65ty;OEm zD+&(4Pv4q=0-!S70FR^I;|CcEj~Qz1G(AmkLw-{&{79eih%6H-f;9JoQI+?%&uYxM zl2fwh3C+~Ox`JBL0xu2cG~q6`#+|zM61Qw>wKV%T0eNkl7S_@YD^S-|kw`iBOlUm0 z0~8#kht_pCha(SERo>@^=4l>V1ck%5LUPafG#%z6 z>Qj*?ZuRo1uWKt~1JOAyAs(cK9P#wnzC2xc!(6^MR#C5%S943+^ut<&(PF07{L|CN zh1bL;0^=4SCj9-#Lt965&1+=n5!)EiKD+p#rExG)y%P|K{1pRa=Wnmx7D<2#H>*io?yo@A6_AUo_1b+epIpou7FqqI(o zFxzZ@x%t7T2GZkjQ&rpd@s|DaD|D4#)1pAJedW@#ZF*dUqdkrT-ojxTTLY1l6w5L%|Eso`^=JAof$`I05o_iaSDXQX5#^s+-*dM842J*r<&=^>F( zfjus3xX!{RNn-{^PxXmN0L=Dv9y>*aa&#z9tIICVsyw!`V=Vv!3rNAW^)q-jz#YLLzWUang^RY7>w0mmD{- zuv5Q_8tb(7)?#pb<(2l7OU=TW)pR0u(rHGs^w?^@x~s<`d|>mkx`x()Y?_8tvr1vs z305iM$9f)GlkFw@0Q-TUwEwRjjAF04k8q_5$TE5f`^It;ME6{NHmI^9Jf814HiaE|9*YPLC&zGSTrs0vIAb z^#CLS7ed4Xp!8&;{D){4&4bBjJF;AiMFyjAz0%%?6CJ|^s_I9fgT_~mxLjcrxkcjv zPY>^Bg3m9xR}^BbTa_J;r@cDKPG(?)x5A=|5MCpPi^VPuZA)*+B5KFj#B^GXo_YlJ zZ~0b}0)=rndV|~2z0mbl)VKD<`bfmzAVFM^i-KA4U%y;CO{MZQupbm?x!b_-w=r^< zU=Id9HcvJ-P0AnXBW?+9_2#N4R)eHJy+$sqI&FU~<8|cCKIFvs!CDsMxJ{vNj9$o& z^aE(;)&s8fp`_5FrWpUEQSF$+WJg;gSL<74mr2$D)KqiHsM+FldQwxJ$6Ke~9!_%N z#k6^Fdhpepe-ywo4c#St_@2683eLMPZAPvaD!eM5n0nsFYZ3OGC6!43!^Y))lKVE< zTxowVeI5ygOoABhr{|H%=6HKJuQ9#-bThI!pTrQ1VRo00Xq6)$s%!LgRXfa$`y-9Z zPEf!;j^Wq0zdSJ$q_ft2E&spUWlzR7=5g$5ba)a>l$|V2g_pT_Le#liv&U$_@2N2Z zcaTx&5mL~kzT#N@_tT&gbJIb$>t2vLq6tjVQESp2IZEk?nXC34C^_XQXHsO6xr6r+ z9qH-^+&g4`dLjn$v(iC6B-WJmKpjrzTEHM64$dc;yBBq@ zO*Ctuz#HYz42s*{xj*VoWEOUaJV2&|pL*IACH>ldK1wfS?`H7Z* zs)mAxN18*tj5*B{Q=+@FXgQatSanP|$27(sV`xF0m8TFjqAhv4 zD*7{$J-G=lP*IrW+ytMZG_6)|Biz;g;TXK^?7Yq!QCjhLrT> z)mz3%6KtX;WgCCvhpH1)95*E3*^ozY$V-UJ!_xaiZGlZ`19>Fh=qE+TF*C=Q)z8Cg6dKKgG@AEaEb7(_kE9`MB4^)U>Ua1EQ zfKq7R_3AUjb6u0<((^d2Dbzm3koTLv&`yZG9Ml-N%x#>sY@w?ZqgBkY0nN`XAl(^-c)X$3Kw-H#QeTM@ce45-pxv^%#vDJ^ z)NJ=@^NacCU$L?d9Q2p>6U70yE5#~qpjr`i{Q=vEpTO>$J)hGU{aVtuNuaBLWr^}i zR|ly9#lqGy-!{V?%w{)XyJ0BiTf?NX55k5=daNDt9^@^;Ce`81Zh- zl=Ja!I^OMvcg@+~PNa227LTJ2E#Gw}T|(z_TD)ApZ2m08Kw)**+$VswJF?C4n%8Ws zwFhS4L%GAFD~%t1Hgk!%NiR9O)Xr~kuS9rdrGQT^tb9tI6J!$1{0HBtDIsl z-_X2Gx_m?zra}0Ge3D|6qJG5NmtN3ypRToP34V^Y`>p|7+{G zQ7fE?g41Qh%0(?pT;#^W2CeO)@p|U(=0miN@;Up{G2rcec}iM6g~-;s8HZitPOMV3 zC6MwpJCN*ZGO@Q-Apofl%9w*AEjxIU<)AFoh4Vq+LKSa~9bQ4Xr5rN56JSsESGW5t1SQ zSeY`oGR87KotS3O8p4EUe|j(C%=yQ}Nn#-`k3;W3oP~&6Sb-Tf@SHw)t?R@I0q8=+ z7Gpu~PSmdN-N^aMn^^Cju%WQ+*oMH|&R{8Q`cMOBO&>WA>LYx}M>iIU|JhgFEO~## zc0=F9r!N2J$jkW+|97DCZM}Z9wBBrG zLE`s*p!)LTShvV7biTSX(;f%-BCnY+2m=y^_@AB^|3Bq(DyieaE}~Gx@GwjU&@|=@ z9QS}ER-hTJL|~C`v?E=LO96n0 zF6(9n?(5dP2*5AE2wVW8afCpuYTVo&&m9PfBjdNt9ehaOq}F=cEPd+=Pnh z@pra|Z-KL-F5&9Yz8{s0eH8)+G4gLz3OH|ia}d@8MVX7c2`eE0$>P;N++X8?CI5uG zQSFUR1A`91C-jT@bXWh>6IT2p*#$bg3mz>!b^^x3tWz<3{F1-f@`&kZos8%Uq|RSS zdg@+9R@bzn2VjEo`jM$xj0~**JOC$5J!@p@fbwRvm)__S$lzYPQSBR(=b(T7Y{@G;dl%-fITc?{;d2 z#-1`cf;Vr%qKf#Ms>uiKp_QXZ-*HN{!Ua*lVSg~H zMJF^ZtRS15#QKcO6LCB|*38M8^}_py!OqVsLHReFOfPsX zZ{ckz!_8TZm3c}iEFsnQoRZ2k-$QVj71#BM*o)p33_-obW>bQbOlGU;S&NcWI6TzR!D1+jke-H7vo{hIJIew~`1bLvq@Kd3n; z_T&v2O1*_gsEp^d4UA=4&qdT{SK1$FsIDEK=GE&)JUfq)l~7N&YknmyHuEV%mK1`*$MuUbIw=H_lk5q%=H2xd1sXsss>D3zwe`DbOt zxpI@~#yBIF^5@O&-Im|=lz9X9vPQ=iP$PCEuelS;DJf|lP;4Y=$WcTlgj&}~8QDe5a2A#V*BEh3Bcn!k zxaFC+EtiZpb-e4Vrp>4d#}#c;Pu~)9nd6bUkizh%34-Klv2`9qFwkHUt?ygp=&%^v zx6~@;3CTKh!W@cZuB2?bvx@qUK0W`8uDK%RMG1m2BL z6OeJCad{YfDKpxeceP8Xgx|mmD4gyv42zsFG;8ULAcj_9Vh2!;@XJlU$2NKjX6Fq9 zH1d%0><_wKwJMgUlZRcakC2jh zF;U#8tCjQ^>l1$4pf+;?I-_tgDftVA9FZG{01gT8p`(F7(uZ>Pqk`T+aW%`Ox(BiD zW!jdi+r=5L6JvJGWVLvb#RV-*zmHZ9ubXFnj-FMy9BbjR3*o1x%+nDhllT1iil%?> zH}JQyA-DeXQWMl#&PH)h@FOWg9uBMa0;u5kFb=9q)?RNv&}pLzslcj8*V!|(uq{Og z(6_?IEU4mv;@LJ_&x!Xr)I=$1@tG|h8;Wg4p6HPk7}IhBhF6}g$hzX$Pev14^G?Bb zJU&hPFcJ}k`?sD4={Wl?d9a31P6z<+Z7amf)z)zM`AJRS=aj8!cyzOBZ)L@xD2G|tkz^K#R!fur%mdc*ZgjTgK+BJ z7+z6QYe(H7yFG3~1ytU0ar`JPK3biwTOLKrV;hpz^2xJ|?Yd#r0M|sjX&f4i%OYyi z5HnF*2t*{7lv^h>^KV4A30=bJmiO5v_ZjbES?|ZC1+5(l)iTI>vAJC~O#b&VAgR&f zV`@LK=14K&Fg36~a`U*&Nj`(wpQ&TL$`FA3{ogzOdre$sn3#kjZcz|^%-IhV&;P8` zhM=PWAR`uVPK!rrO(m*IH15HdB%~y2-qk=RJgI9CiO)8lweF=JVK*&?JCA9ea*v$#G&xptO!7aKNYe>Vqzx zb}KZ{Q9yHTTiA}71Oa@Nv!4qintL;O%nmE3M22+)FdNk18pH~Rbb9toqI_`T)}?^U zFw}=|eYgrp#Fu(%#cSR`zqDV}zcqHwtwcS{W9o)XpT}<3Z?LJ3z{`iE59^!L_XL3) zuGajqy=z!@cZXqzGS%WcT#Kb~mUIMR!x;_+^$MjwoQN;R@qmCpm9{%9i~Bsn4XF^N zEOE=Gh!fCyu)!b;lOMFExwO)v%82)L*)Fx@IY?n_p7+76Kzw5kK<>&habZ3>0Dm}3 z%z7w%`#38f@sIT8Y`PUccvIbcDQ*ASpXO3-keVHx;LuP3Uxct<2sMJ;QMV-12nn#` zEtnme^;%owt>E|1tkcDrCLd%V;kzFs)N4BXt-bU3r*>p#=DCuLa2;(4FG00&j?@Iw zI8-@C9aO2>jMJm7-bpCM6Ld~-<4c$$cp7!ly}eHRd70l{X}!i=)U8tW%2_Egcxlf` zF%|KH2T+Mt*2nxUV~hi(kt>__jVqB^dO)2XYMxYsjMS_tE}T9{^{&ngDRI0;=Gp0k zItt@lD1+@}gCBJWy9EFa@*1|6HXfM`9ehO<0Qq2#*8cgB(%=Pu}&iFo_ zel$&oP?TkqkRrTf+c1-iex}&PLfM z?yvsZPV+5go;8#DA4f@vYv2jn7xE+PM*4@)mf2iKh;zoz9(Xqo{|K2=(O+lQ2a;tI zaG1P=IUftJ5f>qYwe98~ILhIqqK$>(U2-p^#@W|1L;y)Zwe%^(sL;H#tvqZQV5>sP=5 z$B%~08J2I!e!O-Cr+Q%T?z4cKUn{KC)3p`So|V%<;YH#UUz!kev%UPP`nQAYB@kil z&6yesD|xW!K}q9<=5@@>xNbXVAiT@+%e8Yz)>Nfy=BbHC6~eVtnat#^kqm+M=sVo@ z0h=vR%-|l7)oB_|6ZXs4uTWbNKqCG4@CQk-r#Ccb*OIf-POqX8-{ z)zXW^tUV+2JR~x#bmW5KWN9sUbx|95ozb5ZMgI)jHg#tougq87P=#1alfaB_WLG_= zhdWSoe`R92f=p|2%~i@R#|8|gpd z0(?5=yvtt=-}tbvc95t!sjwQT(mAz2T8xjzr2rzwmkk|?mlwE%aErV*pV<`7L%@V_ zxyDnw`)qQ=D0w{fQ|_0W#sbHX(H9*OQ&68Y5sb=_Fi;~a?0KL7G~D2yr6nK`D7)*m z@CLbV5o~HdPBlS_k`VKMu3-v8 zD^on1__wvJ<>lC#7r8Hyww)!!;6M?B%73%+xw3~ub6*>a$CK)hvNyy83~{D~z#Py5 z(J*VUrY!TDV=tShc8Nz2Eyi(YB|{yrPVO#j%H3hSwUVGnoR*l$EXLn}#$L~Np!)tw zbdd7_mV!tFs6f%U4&YU=1FyB&6Di_a4T?yLp%2fSwo^khvP*BHlIJV!Fa=`_ESvg=fjgT`+7c7C5Fxn+2b|8TG7 zz`-KWmd{o0#K}>%8-1Q8x@ILxF7cLh|8ZBkQz(8C#d5797$Q7SubB?L0zXGM5!$JC zw7}?IUq0?gsX^~t`vvQ85_1YWqZ!{eAx3L7mQ|STnw$7GI8Ps&AX_k2Lq%QZ5?txm6sVGQ{w1sl0chdB)K*)xs6^&akxYrJDj1ZtA1bllqWpCp)D>?tx(lJC&uH`gh#UD9k*D z%c6P91+1#Wcy?w&$p@)vm`7;2$-7$-JG41dC;~2RgcVslD$#oVw{d`9xfA+E!s@)? zK4)l&dq|7~+t8ulb2l!3xlAibCUtZa2m@B;VLi25M!0yDUK%||ES(M%K`-N3Rfq+i zAIAnc(JP4*=@hzRvnWXV?H3Mlj!nQU_IwMLHRb3iL+p|UygNX2R1~o%^lTDx6Z53} znPjA*E8IX_O=3Ufcbk)oq$6=H{vCD&W5fopNtYOVG5&M&zpJxl8YYm*{xb z-h7S-xNMOW_O$PXzT#)Ug>fV8csykOdUXEGG74?xYl^iAa8ATY$ScR=TB!O`2q@il zAU^!g`+#H=uT+f8(41B?6x~UcXz$08@Fjgi!yPXVv%^4xEqU`g==CiG&w+Z(6%IJy-h{2}5LBP} zy)}&YzpY#s5b1Mx^hSkG@^=p#o0_6yuI&0<&ybs;Zw$Q5CCt`~^}N`4-raHH>b9|& z9x&v6X!^wOp^aM9ctIla&?BcgIibWn=4CPoQIzd^2p7fqq z`hyKdfT@#U6rJ086M1W1vqs}Gmj9HuB6$nm9ZJ)BAOYVZNk=RpY!4myWGO#OBv*a< zkX!U#a1a{GgPGb7QSHy?=1pURRzK^MpjQ8ICl5gsq4j#EFcH8s+x4#WpXy`Ag<)|C zQk;u7MM7&ms~y|fZSc$pA{x1hoziB56hb8+-sRAQ#})wyur&uno7$FpV; z*#65VGMx93uBo;W)txdVH+3O0UXcMugAL&2KUIW&2pLj-#=mZNQjupRP{cs+i{%%@n=V%$=zM2m6BP(-CbDa3Ca=T{z-!qwW3I2FgU1RI&{QoY~Hzj z89InGO~O2=d)TArcUh39YoY?E)_%C~IUz&2p6WE6OBb{0xMU%3*fbApu~ zT?+RT!#FT59;D@M6Sf4UR+~Tt1)r5Ohm_e)OJ*(#XTqM)kLg)Q+{G6E5(Y~|*{*F< zC{MAQ!{3Qr;Mh5O8QeSqFn-x|dwn;zEF*H=dI%RfaJ4C6mY|!TSMQhsFJCbEsX5^h zXN~-sTY)c)JQ*C7$ypDMAJC6Ex+W2%Uf6LH!JiQ3{r(1AZ8G0DG%1(Ay>gLrf82~( z@)BxYxdGItIrFmv96y)!z{JlP?!j5bqB}==tko`Pyg5R~5MA^adVMWWId4-xn^8J<3rYT# zFs2xm9$lwh@Yc1Qiv%opMl3d64;Cg(*PMr=6v27SAC4W34TW6mow@7V^=KU5h`ZRY z-P;1X%j%$WgTzoO+R5*Fui8zFj34`31{buo*I5Y!h`iSo(n zNl;PjC8Nw9Kn6YK1hCU%j2WQFq-TQ7VnhZl3YZK0+Iw1Dj(Ec!-5C`ePpfIvUXF1H zE>t)5N3H}A$mbY#qHxkE1aKrm&UzYn8>IXcFSZvKj3!cv-j|wE<|B%zv69CN<+^*A zVXFe@>P{IpQEY}6p<3k4J;A=c(PckSEW{T7)a1B2v6PhjU(Wbt$`HNRbXq7ax&xGS z2dE!8c?yc@YN}HpV+nYZg-2+4ZS-2}V~F5>A)XBXXkmaCQ-=`)Y)Y zvfKBOXKCbL{U+n)N9%`12u&vBr8(L#qrV1XVmkIwpd{Vkb~xYzg!2KmCOjZmX1(|$ zK}ADK@qR{&u9$y8xTIJ>!F)b=;+wLFj#xqON5#NQ?O4V)r zOKM`&t-F_kO~KpvM*9apWqu84#q4`50t@!(D`(nRo!y|}vjsAJJ(vzlJce<9s8zDa zZmXsK8j8Y!@7>pPYrz&D+2LARvxcvf?`c0y zuM~XpTv}Zf;Q2Gx3{t&+8z8o}`3206m1{yI@I&4APoDGPkMsp?3%(M)PYIQfd;C6) z@BO5LV)wIxKhxTvkf~|1g1&UUXBtNaw7W2Ioz8itCchXT#`~>%W`;hr#_aqjJu?aH z)OZ^UGMu}dKi9B)&3U?StFG&3PjEF}EGsV$!CZ8!%?g1}Q4gpS1)4-yFIJ8NwD0Go zsr-byCt0lHnaqRw8heGQ-Z*1hmH|$?%>pB{5o^DA9b}T<_FOZF6weCCf>mS%{LjUH z9Q1*%Pf|v5YYx*<@|;{Kn7{B{AUU)4>bJ2mKR2!Owu{q*PI|jzvfL;3W?e4 zZ;zZdm=W}5sg7tmPREQ)!S4EV4)>;$S8t37(rr5@9-ZHfWqI?-6^Zk?7WGgdt<=~T z{%O@+cZQ?Etx=KV%Em49niLrEwb9Cs*=%P{MSr)ne$`ncJ@fNt9WfJhOQS=SK#Ax$ zzSo$1ehqW&<8Hik&eDw~7dD%kVkRB5HHMB_4e{|5`@;)ustnLA3*vFgkn<#Lzq7`R zaJR0#*t%mwW9W9RPK~ezhu!dY4#9`Uiz*h{SU%_iMi_n!oxd)O`BlH#nV_fuI@NEe zaVOKjzt7rO$1ts_zw>>BA!j4+8wmzI45vn4SLlpwcZPHekI!Ov8s9_7#&$-=d=2lV zT`YkQ!=6*wU7*}KEe{!I_47H&I7S&~sue~P@W;vZl5>y>H z>%AOR1~pXJ)NP$~GJ7?QcKK}MP|*0dMM@cv0bmOX;QDR%*CBHmNrJ1sr`MUpgSJDT zor&X@xU8NH+uzxmi>Iw8C2DrcNoweO$>lUYn@7=HkEAdtAHb`|Qte%&X`XW~`eT>y zG3Frj&^6ej&N&6h#0KgXZvPrCYIWaV&P*24GR4C<4%R#djE}9aN&6l*&g0GsRg`-( z-rE>Mi+d}_m`^n%GDje)t^PU$_LwC6^W0JDrR0b0K0v(2)Xr|165#Y}E)f_M5I2G* zN$GSy$;5K358>4qFvddzpm0pZ9<*Wp*thFlwY*uaSejq5<7IC!4a`THJqt!IqNz>> z*`t4v{R6gw`cy8b&wUF9WvW~KR1wzd5aS#ii;P;vHYKwz3r02$8$o$19gL1g$oK@@ z22|weAkc{i+i5Kzou#+fvLN&Pyk;c&wCm;Itcnz(8EOdiBCqdF+_iK5ioQ3IVa%gv z*;74VBpMRnmlbR&_la?8ClZGY^#w^>3Btf$LYhNGf)a2Fz)UDzqSv>i&|vt82#-e3 zU7VLsdwheQiT=TLu7RfPuNN_^Vl3a$HES^Y(g66KD@ZR%vjR4)1*iEPTy>U15$CO! ztnI+?$FTtv)xgmBMk-Q|M((4&=%{$hY?A>W$+7~IjH!Q*%jxl)xWl%nC z$2J9|Dp#!lns_72A)3WoI66A^X>Llah`cIx_TOE;k!}Jy_9Vt{{Z|Udr2nnKu<$K_!t|J|_6<+RBM&Bk8`9>{sFf8GW`xv)GFQ9JWEW9mlMq{7+dNVj!< zGaVYT7b&83^ccI}Qwd*W%EaEqLL>$y&rY_O+OwcXkswJwUXF|w+EG-*24OdwP%0v) z(s;M^y8a{Ys7dzJ|MxcHSvZg6?2iZjJ%-87OVVEJ92FEeDBj{qvx@v`Z8TuWrMNlbmwk|qBjdZz)VG`x-UvuC;vrZD40PdqiN-u72EFt+039bfQC zbU1^i!no_Z=QnwA*U`Zvp3^(5&%z$BAv+MTDo-Rr$u4D^3nq_=@#gzp3E^$6!eiz_ z;1QxQ@Jyks&83qxsyX2|gJK5z=8*MqHV1QaHt_+}&LYeo6ik4gK#H)nsc^G^mIGIP zX)o8Tr`R6Cs+wTaQA={C(lTQFkVyxYVhUO1)Rjs7V+i@u;KjN-ptc-5{axiep|URL z*5iLnt)$QaNm>7Zl9}P9br2aU+W| zcpsYhXkyl_GTPx5$vt;N(@EL3)G08)TK&25$Qt=x-#$dW0p|hCnpHE~*TOoENMuAC zLV0U@PtJYTV3A|Ow=hs3>0xPTk)OF|wqEjhk!_auxv0o_>&w}f*!*%8S0^Ql)PJP{x|dOsWLu@sTNluLxJu_R z{GF_N{IpjChUsvQQt2ka$SJ$W!i&xsku2U^q}xu!D77b!me(e9lW=(NTwd-|)6|fJ zQlw;D*mrduo-loSHMHh5n9f|^)#;!TpyX65S-GuO)aJKTRaj$GRvfM*!8=D@LCiXH z>Ohql>ujZua_fL06$az=S*-Vc@YhfQF;KZR=!#iAOt5EESJ&R_xaU%-?2&pdndW@z z2aR_}i0()`K02IRlrg*Dvr9Lz>;EyhPfZqK?e@xcokHb%D?bJ3RBsl*6;uIe&bWTG zAtOdWCe!h=2!7nOS~-py%(Qe<%Z&vAJkClF8l5z8lf(!k20i~I$L)0%1TLUeC2?MA zkv*QTO*a1ql?e-%dU2ed)wTIK7LhViPsb8Jk&>(Cfnh1w9MO%w!?P1@F{DvX09-gx z$-^x)yF+s&)UflnfE{||;U;g8ICk!4+~GOlm37``>pm}25X2rEpQPFE;cz*+jfi-W ziiY&c21t8_KK)z>!$ixG{i<+=LffrxGNa4kJ~DC)J%vnT%H?bo;_H5|wEbUalU&X^ zzj>r?El{TRMKc()Ja%L9HN!JT@mmdvKqx*APg$-yHNzjWTNq=6-pbKOuT9n6<|F9S z{fZi@-Xy7A_H)=n9Yc`dpthK()0r%p(`%N{Tp#2PoqBWnjuK;j9j5wI{5$-_mdQ}) zXbbVovEnw*%z(ZBV060q7bqm_*AC;S4s!EJ15Ny62dw-X)L{60ot1_`+!r2g6W>u} zai_~DKoRIR@8igX$lxM0no((&rVKSYC3PJ34E1d~-udj~d;I(OKFW@hkK5ID^WwyH zeQKit&23-ZkebeSck!Q53R}#+802+#x6CR!tOY@lWZ6Q{u981K?humsAJA<1BKo)LjuY$$z32At;=G`&cYc;oMc!H6z)Yq_2(Ou-u>ohyQ!IE z@~Mk!Vmnz82)6)xFi=pMF}Us4>JK?vqRNNUrgY)9kD>RsSTB`v(cvIE!Flr&d-|54h{Xs*@I78Z zxp@WJ$KPdq@@N8>+Bhg~nLSxgxIor)@8Q`V3flhQ{a@OOk zCRiF1jFgK^1NSgy`Ft}YZG_i62V&Ny!mj{1*~xG7qD}`Eyio_bSvfvkPI(ffrfa>z z7{G43q(J3#18FD`u;(E5~b7+7`KTIDO;( z*`*hoP-si^ukTA)n~BNV1z@}(^-lvT*gE}ZHRaH#i`jMw^@JGxB_Xz(gBkJBVAg?R zVJm{-T2{KIwu3msZZzE|K!PI<3sxz3)Tg@mv2NgZvtM-s3Aaw`xzJZBm=0Xqd@6?F z0e{twW$RQ%ebM5IjC>*Bq16A4v+qZ#c?ihx)~z2#U7q0@N(xRRlvW>Y`(-YWq%(=& zh*#(HObVlrAU&pDet72cjxo|uW+t`4m+APWhcn){=-8!htTQzStQQ=o*xp`%CCKqJ zNiF1;H;zV3kW;V#6Jt(m>Xw?i!D?ZD@5={)x992iFb=>1NYClzZ9eCJ;9$=G@Ss5= z2m3755pyz$7P)_3@!7qVxcjFa>?}iQw%FE%(et6+>jQ4^a_sTNM9f}ODL`aIJbVU# zZScUIfLN~R?(SCkS?xqd;Ly_Ybn3C*mYi7pIBMk&Q5ehAq}SN|Z7eq;QU<c`fMezY^elMKA&2cPcE}W}QS`gF_Lq1cXy69VQtOBHDAjY;RzAZuwv7b% zr}OHgoAHQU*}ix7V+exxWa}X`ZKkDz!I(t%#1lT$b12iCk&pj1Q!=X__ay_R+O+%t zcrw#}s8PS(c_>nmQ5su(JK2|GF744~l$Mj+s_^6-H~O5*CSPASFmDkjuaJ;D@{6Sd zWRo(B(7|p*VF*RcgW5vj+HKiZ8W4Nvaw7=e)TnSt)A^@l>byWgO$%IG{W4qR*hL~Jcx9{^xIpM6`+SvO^-h!-GEq<;M}_WG6!pEH0DH5b^)FBm?? zr*nAGla+FJ=T7J{q<|P{SZtuN`cuG+w=kWB@Y7QYAwGGNC9gvsKQB|KpIR4ZQzdPK z-Fg`!L5AM2Tm1EF;Zd*bkg59Jop8Klpz$*)^=Ub=Pe;Co{9l2C&ggUzjc(q&N0` zI-M^TIw+57o)ZAViORZ#ZmXc(XzR8&Xq$FfT?|ENdj|VLngH7PIh|sIUgPG~bhKZ` zRvl8rcA4eygPsw90)*yn0*%RDHafwncfZdW0Iy-Oz8k)ExR%FMf{5h(g(wq2A2=DY zjv6Ts&N$?VY) zp9Y7x6iek<%J}R+G6_S4mw<@J#2q z&?h&%CCT1W1;s6+-o}#6|M@iRd9xC&XOp%_1!jqk_Txz&rO)9e6YcVq_N_e@L(ko2 zJeYBQ5#>~9R}J*%)q2a$NI=xOr~F%7WsWZz>;c+F_q1CnD?43_oJ`M?y$gDGoMo&9 zJVM@ONu6c|Q@jOAvUdu-Y}71nqN$&{BZCRFF(i#^HsvAb42vBYq3`owEMW zTD$LL28kVey&QDJl=mTZmwOR*8+iUAb4CwF(YBagLYHO}Y9+SpXd*iuI>-rNQtR4a zs)0S$W;V&1!&!8!aev~I9M^KlofEc+U7WxyucP_eY7W%TplAmh18TZ#qwBIWTH-K< z4o2w?RXmd#ebq9zs1Z8Lll(lJ8?$T~`|$Ur%o45<+`)|kJ2ob*u2&OSzI3$+(wdai z3r)#!40ws8>!|o6#=A$s+eG?)Eefxa)U(M>6LA6Z1n|!t-o8>2ye|LVnG*x5fFBN#}|!wbtlyN8agIk z96LcAPD5dL*rvnqWG?#}YM}T~U;jz+YU`GlP5`J7=~NF^TR7E{OvS@F#>UCJbSuUg zz4AN{@B$cvbo!z&1lk!(x*MN<1=EgDF%ypov$9@&=p!nU;( zAplY1{V?sdqSH7xj4dQ1gu|NJ3n8J6{>{^A)&k%fwkmd5LpFy{55vaaCwd7J3;IxB z*F*erUsf=tzs@rl1@d7*V!VtUKKl4{-O_b-|6&WIy3lGVFvN7+raUfCHqR234U5e& zl}V}HKfsr87Zcco%60x|sw1gNchVHYdJVKdKzaq=tkbFUV9=%(qfCt4IjXC-nr|(_ zBAM-kwvqk!HZ`0OVF*E{u2Aw zIVY93`G{h1S~?vv!e$i?=grB31cH~o`Kj*)>jLmIn!toT%WotmLfS+@>usE&Lg8it znxb=w#jY(bOWR}?hP#x$P+xYEa8P28Ka)8ls{v}tVg(aja^F)@MsOA_o8YeI_x07P zo(Kn2!NW!2_tGES1@Fqsjz4|g{L|~H?tsIv+!s7-aq-6U(|yRMbd2#%X(W{6@r10b z5pbGdE1;+i$Ym3sPjiD+gM#7b=oX_U@QOfsY_rsGZrozGod>#*6{GFO)ySb;luu+M z@=Fhprt#De-RW6(_p*kYf=nN za;I@3bSlzif!%5zhrElwZ57}e=!$%w@%6t>CrZRj^$6ht+lHE*u(j0tG)x5(H~=(x zmGw>WB|V-(^_a+<=$x?U9kia_k9%QJw=@x97{U1=*Vo3HlAMk6=GQX_TcJ)goOLwU+Z&!6GHQQ z&9-Ez_1?;it$M%BA)}c@48Qx3LjDO7C<6B9PPcy3f5kS(P!5$L6eKJ;;EX{*k0HLW z+>(IH@lXdCbo}lG7|;g4bL*tp`A|(k?_5K?`Z@BX=7(?FvVRNs5ovV?Pcym8K?y z!9FhylQZa)Z{zqDPcxuG(WE%jYIFtk)3YWACtTc%)k(%2dDiQPn+TowYW9@v(5OpX<|iO#af*0I$_w(?H{L`S@&feA5W)BU)CFEYaTI0O zl^PFdo+Jga@#jOI0NW#kXmTK^kri@sTFNV->0mN1^SPX+)Y66~w}pq14H2XlC^^Vz z@x>44j#Eyy8KZkQ{-CUPO3`#}q)w#mmvlAQatd9ymR0=O>qJo!6bX6&1pE{_2e-)6 zOdv34M8Fm+A%5*98S)`l|D`U!?pJ|86gJfm(`mmukqBqatN=wCZigMO5-PXvK;|oI z&TY(ow~T6c$_+8drt4GO&8ag60YxbsczaNc;p<|HTE;eqa=&`eKLUK$*)edd^ z8U@O8{g)@2As#gcyDXTC4ouWL)RS6(JPe%nGkH1IrN9Y23y1G@o6-WDMI=v(Tu$Ix z_=H=)nb_s>=}ED&sA!C$Fpei5UycT_0J4?J>m1#?ruVx~YoguEbGxpDAFb?}?9tpJuo-8INh$!@v_%j8%WT){;43BB)Ylx~Jh5`cvs zI%|k1?^->yIB$t+?4dra2iXM3l8@>4L;a|SbzR4)zOv#STs}BSU0#jgD>_g!CLA?f zfWq5PcpA#qxU?ml#WBMX^aP~r7Hd47ugLP7z0K$Llx{?h@%OvkWIcU12dqm}Kk#WR z>#o?l_$PS72>gh6a)2Qp01@79F(zmDsY+Bbs09g5-FG4|D*L54WSageMc;EhO- zoNp`?1!z$`sgdHHc4{_;%AP zzB*at2K2nYg!o0%6@!`0+g_c^BGF~e!c7QHV2x;&!_gz)t%j~str|=y#eatzrVC2_ zN{wl6=pQ$;2g~(AeWT|Cx*qURsPJhzVc?#Zk&VpHWQ)uq`?uShci* z@7(Nuc=qm2!~3zl&bRtFF4H*xGVS16@b6A?rqMcZ1gUx!zETL8t&C?$?ocjKCK_7I4Wttwv(hHH+l)?iJ@5BT*jBSg zJa>DsGt0dZwY=~#Lw$@_`xXFA)ZDpai2BP*>r zEmT_n%GJTB0_7#IUH@^Wpq6Qtc|4|;t2+ju`ky<81ez$9U%kHTsn)eUhCc+P$AH6K ztS}NT!7K=#fYD6BcI}BDZ|E$Y1-M6pPG?OQp~LI&K%}T6x`}*f=?j zbq~AUACe0#Ty@@ivX^Iuqd5B1l8%!0r8Q7Jtcv`t`ck;(8wLZl{$69js5O}q%I0Xa z>C3$X>W9K^(6O#s^kj5n>FUd-FNF|w#84#2MNqb-%N?qbKh?nJ-QqkW++%P*8y*nnEPN^mYrThK zcwXSu=CRIg_ySjQzV-lMR#X}rRKhS#m&8(gbw$~@SP$Pc%k(=cGgrRmo*To_uldh>>V2A8nm>EJ$|Ab&!R94pjmY)eUfqOyfG{ImvQ$SF30wCUZ>t)z(zx z!qGaD7<5*Oh!bbT=k#(LBGI*@$VwGyy%gltCy6`Pn73s{GQqi6VNe*}&^1te05>7kwQ3omzB6;(!_A?wW)DF#pcZnME;pH|uEB8aqtJGSOh>dI0>Za+B8c9Pu~%lg6H~Zqw5*&51*MDb zD@J@;x+ta1lgD4c?1RZxwCCd-3R-hBwRYlt%Yx!seMBS&)W-0}z7e|Z>GKUOULVX9 X_J@c2{~rJV|NjF3P@u7{S3n8?q;DzF diff --git a/src/guidellm/data/utils.py b/src/guidellm/data/utils.py new file mode 100644 index 00000000..7d53a054 --- /dev/null +++ b/src/guidellm/data/utils.py @@ -0,0 +1,161 @@ +from __future__ import annotations + +import contextlib +import math +from collections.abc import Iterator +from typing import Any, Literal + +from datasets import Dataset, DatasetDict, IterableDataset, IterableDatasetDict + +__all__ = [ + "DEFAULT_COLUMN_NAMES", + "DEFAULT_SPLITS", + "datasets_item_iterator", + "resolve_dataset_split", +] + + +DEFAULT_SPLITS: dict[Literal["train", "calib", "val", "test"], list[str]] = { + "train": [ + "train", + "training", + "train_set", + "training_set", + "train_dataset", + "training_dataset", + "train_data", + "training_data", + "pretrain", + "pretrain_set", + "pretrain_dataset", + "pretrain_data", + "pretraining", + ], + "calib": [ + "calibration", + "calib", + "cal", + "calibration_set", + "calib_set", + "cal_set", + "calibration_dataset", + "calib_dataset", + "cal_set", + "calibration_data", + "calib_data", + "cal_data", + ], + "val": [ + "validation", + "val", + "valid", + "validation_set", + "val_set", + "validation_dataset", + "val_dataset", + "validation_data", + "val_data", + "dev", + "dev_set", + "dev_dataset", + "dev_data", + ], + "test": [ + "test", + "testing", + "test_set", + "testing_set", + "test_dataset", + "testing_dataset", + "test_data", + "testing_data", + "eval", + "eval_set", + "eval_dataset", + "eval_data", + ], +} + + +DEFAULT_COLUMN_NAMES: dict[str, list[str]] = { + "prompt_tokens_count": ["prompt_tokens_count", "input_tokens_count"], + "output_tokens_count": ["output_tokens_count", "completion_tokens_count"], + "text_column": [ + "prompt", + "instruction", + "question", + "input", + "context", + "content", + "conversation", + "turn", + "text", + ], + "image_column": [ + "image", + "picture", + "photo", + "img", + ], + "video_column": [ + "video", + "clip", + "movie", + "footage", + "mp4", + "mov", + "avi", + ], + "audio_column": [ + "audio", + "sound", + "voice", + "speech", + "wav", + "mp3", + ], +} + + +def resolve_dataset_split( + dataset: Dataset | IterableDataset | DatasetDict | IterableDatasetDict, + split: str | None, +) -> Dataset | IterableDataset: + if split is not None and isinstance(dataset, (DatasetDict, IterableDatasetDict)): + if split in dataset: + return dataset[split] + + raise ValueError(f"Requested split '{split}' not found in dataset: {dataset}.") + elif split is not None: + raise ValueError( + f"Requested split '{split}' but dataset has no splits: {dataset}." + ) + + if isinstance(dataset, (Dataset, IterableDataset)): + return dataset + + for _, default_splits in DEFAULT_SPLITS.items(): + for default_split in default_splits: + if default_split in dataset: + return dataset[default_split] + + return dataset[list(dataset.keys())[0]] + + +def datasets_item_iterator( + datasets: list[Dataset | IterableDataset], + data_samples: int, +) -> Iterator[dict[Literal["items"], tuple[dict[str, Any]]]]: + dataset_iters = [iter(dataset) for dataset in datasets] + gen_count = 0 + + with contextlib.suppress(StopIteration): + while gen_count < data_samples or data_samples <= 0 or data_samples == math.inf: + yield {"items": tuple(next(dataset_iter) for dataset_iter in dataset_iters)} + gen_count += 1 + + if gen_count < data_samples and data_samples > 0 and data_samples != math.inf: + raise ValueError( + f"Requested {data_samples} samples, but only {gen_count} available " + "from the provided datasets." + ) diff --git a/src/guidellm/dataset/__init__.py b/src/guidellm/dataset/__init__.py deleted file mode 100644 index b90b72ff..00000000 --- a/src/guidellm/dataset/__init__.py +++ /dev/null @@ -1,22 +0,0 @@ -from .creator import ColumnInputTypes, DatasetCreator -from .entrypoints import load_dataset -from .file import FileDatasetCreator -from .hf_datasets import HFDatasetsCreator -from .in_memory import InMemoryDatasetCreator -from .synthetic import ( - SyntheticDatasetConfig, - SyntheticDatasetCreator, - SyntheticTextItemsGenerator, -) - -__all__ = [ - "ColumnInputTypes", - "DatasetCreator", - "FileDatasetCreator", - "HFDatasetsCreator", - "InMemoryDatasetCreator", - "SyntheticDatasetConfig", - "SyntheticDatasetCreator", - "SyntheticTextItemsGenerator", - "load_dataset", -] diff --git a/src/guidellm/dataset/creator.py b/src/guidellm/dataset/creator.py deleted file mode 100644 index a74ec8c0..00000000 --- a/src/guidellm/dataset/creator.py +++ /dev/null @@ -1,213 +0,0 @@ -from abc import ABC, abstractmethod -from pathlib import Path -from typing import Any, Literal, Optional, Union - -from datasets import Dataset, DatasetDict, IterableDataset, IterableDatasetDict -from transformers import PreTrainedTokenizerBase # type: ignore[import] - -__all__ = ["ColumnInputTypes", "DatasetCreator"] - -ColumnInputTypes = Literal[ - "prompt_column", - "text_column", - "prompt_tokens_count_column", - "output_tokens_count_column", -] - - -class DatasetCreator(ABC): - DEFAULT_SPLITS_TRAIN = [ - "train", - "training", - "train_set", - "training_set", - "train_dataset", - "training_dataset", - "train_data", - "training_data", - "pretrain", - "pretrain_set", - "pretrain_dataset", - "pretrain_data", - "pretraining", - ] - DEFAULT_SPLITS_CALIB = [ - "calibration", - "calib", - "cal", - "calibration_set", - "calib_set", - "cal_set", - "calibration_dataset", - "calib_dataset", - "cal_set", - "calibration_data", - "calib_data", - "cal_data", - ] - DEFAULT_SPLITS_VAL = [ - "validation", - "val", - "valid", - "validation_set", - "val_set", - "validation_dataset", - "val_dataset", - "validation_data", - "val_data", - "dev", - "dev_set", - "dev_dataset", - "dev_data", - ] - DEFAULT_SPLITS_TEST = [ - "test", - "testing", - "test_set", - "testing_set", - "test_dataset", - "testing_dataset", - "test_data", - "testing_data", - "eval", - "eval_set", - "eval_dataset", - "eval_data", - ] - DEFAULT_SPLITS_DATASET: dict[str, str] = {} - - @classmethod - def create( - cls, - data: Any, - data_args: Optional[dict[str, Any]], - processor: Optional[Union[str, Path, PreTrainedTokenizerBase]], - processor_args: Optional[dict[str, Any]], - random_seed: int = 42, - split_pref_order: Optional[list[str]] = None, - ) -> tuple[Union[Dataset, IterableDataset], dict[ColumnInputTypes, str]]: - if not cls.is_supported(data, data_args): - raise ValueError(f"Unsupported data type: {type(data)} given for {data}. ") - - split = cls.extract_args_split(data_args) - column_mappings = cls.extract_args_column_mappings(data_args) - dataset = cls.handle_create( - data, data_args, processor, processor_args, random_seed - ) - - if isinstance(dataset, (DatasetDict, IterableDatasetDict)): - dataset = cls.extract_dataset_split(dataset, split, split_pref_order) - - if not isinstance(dataset, (Dataset, IterableDataset)): - raise ValueError( - f"Unsupported data type: {type(dataset)} given for {dataset}." - ) - - return dataset, column_mappings - - @classmethod - def extract_args_split(cls, data_args: Optional[dict[str, Any]]) -> str: - split = "auto" - - if data_args and "split" in data_args: - split = data_args["split"] - del data_args["split"] - - return split - - @classmethod - def extract_args_column_mappings( - cls, - data_args: Optional[dict[str, Any]], - ) -> dict[ColumnInputTypes, str]: - columns: dict[ColumnInputTypes, str] = {} - - if data_args: - if "prompt_column" in data_args: - columns["prompt_column"] = data_args["prompt_column"] - del data_args["prompt_column"] - - if "prompt_tokens_count_column" in data_args: - columns["prompt_tokens_count_column"] = data_args[ - "prompt_tokens_count_column" - ] - del data_args["prompt_tokens_count_column"] - - if "output_tokens_count_column" in data_args: - columns["output_tokens_count_column"] = data_args[ - "output_tokens_count_column" - ] - del data_args["output_tokens_count_column"] - - return columns - - @classmethod - def extract_dataset_name( - cls, dataset: Union[Dataset, IterableDataset, DatasetDict, IterableDatasetDict] - ) -> Optional[str]: - if isinstance(dataset, (DatasetDict, IterableDatasetDict)): - dataset = dataset[list(dataset.keys())[0]] - - if isinstance(dataset, (Dataset, IterableDataset)): - if not hasattr(dataset, "info") or not hasattr( - dataset.info, "dataset_name" - ): - return None - - return dataset.info.dataset_name - - raise ValueError(f"Unsupported data type: {type(dataset)} given for {dataset}.") - - @classmethod - def extract_dataset_split( - cls, - dataset: Union[DatasetDict, IterableDatasetDict], - specified_split: Union[Literal["auto"], str] = "auto", - split_pref_order: Optional[Union[Literal["auto"], list[str]]] = "auto", - ) -> Union[Dataset, IterableDataset]: - if not isinstance(dataset, (DatasetDict, IterableDatasetDict)): - raise ValueError( - f"Unsupported data type: {type(dataset)} given for {dataset}." - ) - - if specified_split != "auto": - if specified_split not in dataset: - raise ValueError( - f"Split {specified_split} not found in dataset {dataset}." - ) - - return dataset[specified_split] - - dataset_name = cls.extract_dataset_name(dataset) - - if dataset_name and dataset_name in cls.DEFAULT_SPLITS_DATASET: - return dataset[cls.DEFAULT_SPLITS_DATASET[dataset_name]] - - if split_pref_order == "auto": - split_pref_order = [ - *cls.DEFAULT_SPLITS_TEST, - *cls.DEFAULT_SPLITS_VAL, - *cls.DEFAULT_SPLITS_CALIB, - *cls.DEFAULT_SPLITS_TRAIN, - ] - - for test_split in split_pref_order or []: - if test_split in dataset: - return dataset[test_split] - - return dataset[list(dataset.keys())[0]] - - @classmethod - @abstractmethod - def is_supported(cls, data: Any, data_args: Optional[dict[str, Any]]) -> bool: ... - - @classmethod - @abstractmethod - def handle_create( - cls, - data: Any, - data_args: Optional[dict[str, Any]], - processor: Optional[Union[str, Path, PreTrainedTokenizerBase]], - processor_args: Optional[dict[str, Any]], - random_seed: int, - ) -> Union[Dataset, DatasetDict, IterableDataset, IterableDatasetDict]: ... diff --git a/src/guidellm/dataset/entrypoints.py b/src/guidellm/dataset/entrypoints.py deleted file mode 100644 index cf689956..00000000 --- a/src/guidellm/dataset/entrypoints.py +++ /dev/null @@ -1,42 +0,0 @@ -from pathlib import Path -from typing import Any, Optional, Union - -from datasets import Dataset, IterableDataset -from transformers import PreTrainedTokenizerBase # type: ignore[import] - -from guidellm.dataset.creator import ColumnInputTypes -from guidellm.dataset.file import FileDatasetCreator -from guidellm.dataset.hf_datasets import HFDatasetsCreator -from guidellm.dataset.in_memory import InMemoryDatasetCreator -from guidellm.dataset.synthetic import SyntheticDatasetCreator - -__all__ = ["load_dataset"] - - -def load_dataset( - data: Any, - data_args: Optional[dict[str, Any]], - processor: Optional[Union[str, Path, PreTrainedTokenizerBase]], - processor_args: Optional[dict[str, Any]], - random_seed: int = 42, - split_pref_order: Optional[list[str]] = None, -) -> tuple[Union[Dataset, IterableDataset], dict[ColumnInputTypes, str]]: - creators = [ - InMemoryDatasetCreator, - SyntheticDatasetCreator, - FileDatasetCreator, - HFDatasetsCreator, - ] - - for creator in creators: - if creator.is_supported(data, data_args): - return creator.create( - data, - data_args, - processor, - processor_args, - random_seed, - split_pref_order, - ) - - raise ValueError(f"Unsupported data type: {type(data)} given for {data}. ") diff --git a/src/guidellm/dataset/file.py b/src/guidellm/dataset/file.py deleted file mode 100644 index 5d6df1d9..00000000 --- a/src/guidellm/dataset/file.py +++ /dev/null @@ -1,92 +0,0 @@ -from pathlib import Path -from typing import Any, Optional, Union - -import pandas as pd # type: ignore[import] -from datasets import ( - Dataset, - DatasetDict, - IterableDataset, - IterableDatasetDict, - load_dataset, -) -from transformers import PreTrainedTokenizerBase # type: ignore[import] - -from guidellm.dataset.creator import DatasetCreator - -__all__ = ["FileDatasetCreator"] - - -class FileDatasetCreator(DatasetCreator): - SUPPORTED_TYPES = { - ".txt", - ".text", - ".csv", - ".json", - ".jsonl", - ".parquet", - ".arrow", - ".hdf5", - ".tar", - } - - @classmethod - def is_supported(cls, data: Any, data_args: Optional[dict[str, Any]]) -> bool: # noqa: ARG003 - if isinstance(data, (str, Path)) and (path := Path(data)).exists(): - # local folder or py file, assume supported - return path.suffix.lower() in cls.SUPPORTED_TYPES - - return False - - @classmethod - def handle_create( - cls, - data: Any, - data_args: Optional[dict[str, Any]], - processor: Optional[Union[str, Path, PreTrainedTokenizerBase]], # noqa: ARG003 - processor_args: Optional[dict[str, Any]], # noqa: ARG003 - random_seed: int, # noqa: ARG003 - ) -> Union[Dataset, DatasetDict, IterableDataset, IterableDatasetDict]: - if not isinstance(data, (str, Path)): - raise ValueError(f"Unsupported data type: {type(data)} given for {data}. ") - - path = Path(data) - if not path.exists(): - raise FileNotFoundError(f"File not found: {path}") - - if not path.is_file(): - raise ValueError(f"Unsupported data type: {path} given for {path}. ") - - if path.suffix.lower() not in cls.SUPPORTED_TYPES: - raise ValueError(f"Unsupported file type: {path.suffix} given for {path}. ") - - return cls.load_dataset(path, data_args) - - @classmethod - def load_dataset( - cls, path: Path, data_args: Optional[dict[str, Any]] - ) -> Union[Dataset, IterableDataset]: - if path.suffix.lower() in {".txt", ".text"}: - with path.open("r") as file: - items = file.readlines() - - dataset = Dataset.from_dict({"text": items}, **(data_args or {})) - elif path.suffix.lower() == ".csv": - dataset = load_dataset("csv", data_files=str(path), **(data_args or {})) - elif path.suffix.lower() in {".json", ".jsonl"}: - dataset = load_dataset("json", data_files=str(path), **(data_args or {})) - elif path.suffix.lower() == ".parquet": - dataset = load_dataset("parquet", data_files=str(path), **(data_args or {})) - elif path.suffix.lower() == ".arrow": - dataset = load_dataset("arrow", data_files=str(path), **(data_args or {})) - elif path.suffix.lower() == ".hdf5": - dataset = Dataset.from_pandas(pd.read_hdf(str(path)), **(data_args or {})) - elif path.suffix.lower() == ".db": - dataset = Dataset.from_sql(con=str(path), **(data_args or {})) - elif path.suffix.lower() == ".tar": - dataset = load_dataset( - "webdataset", data_files=str(path), **(data_args or {}) - ) - else: - raise ValueError(f"Unsupported file type: {path.suffix} given for {path}. ") - - return dataset diff --git a/src/guidellm/dataset/hf_datasets.py b/src/guidellm/dataset/hf_datasets.py deleted file mode 100644 index 7f91facd..00000000 --- a/src/guidellm/dataset/hf_datasets.py +++ /dev/null @@ -1,62 +0,0 @@ -from pathlib import Path -from typing import Any, Optional, Union - -from datasets import ( - Dataset, - DatasetDict, - IterableDataset, - IterableDatasetDict, - get_dataset_config_info, - load_dataset, -) -from transformers import PreTrainedTokenizerBase # type: ignore[import] - -from guidellm.dataset.creator import DatasetCreator - -__all__ = ["HFDatasetsCreator"] - - -class HFDatasetsCreator(DatasetCreator): - @classmethod - def is_supported(cls, data: Any, data_args: Optional[dict[str, Any]]) -> bool: # noqa: ARG003 - if isinstance( - data, (Dataset, DatasetDict, IterableDataset, IterableDatasetDict) - ): - # base type is supported - return True - - if isinstance(data, (str, Path)) and (path := Path(data)).exists(): - # local folder or py file, assume supported - return path.is_dir() or path.suffix == ".py" - - if isinstance(data, (str, Path)): - try: - # try to load dataset - return get_dataset_config_info(data) is not None - except Exception: # noqa: BLE001, S110 - pass - - return False - - @classmethod - def handle_create( - cls, - data: Any, - data_args: Optional[dict[str, Any]], - processor: Optional[Union[str, Path, PreTrainedTokenizerBase]], # noqa: ARG003 - processor_args: Optional[dict[str, Any]], # noqa: ARG003 - random_seed: int, # noqa: ARG003 - ) -> Union[Dataset, DatasetDict, IterableDataset, IterableDatasetDict]: - if isinstance(data, (str, Path)): - data = load_dataset(data, **(data_args or {})) - elif data_args: - raise ValueError( - f"data_args should not be provided when data is a {type(data)}" - ) - - if isinstance( - data, (Dataset, DatasetDict, IterableDataset, IterableDatasetDict) - ): - return data - - raise ValueError(f"Unsupported data type: {type(data)} given for {data}. ") diff --git a/src/guidellm/dataset/in_memory.py b/src/guidellm/dataset/in_memory.py deleted file mode 100644 index af84f658..00000000 --- a/src/guidellm/dataset/in_memory.py +++ /dev/null @@ -1,132 +0,0 @@ -from collections.abc import Iterable -from pathlib import Path -from typing import Any, Optional, Union - -from datasets import ( - Dataset, - DatasetDict, - IterableDataset, - IterableDatasetDict, -) -from transformers import PreTrainedTokenizerBase # type: ignore[import] - -from guidellm.dataset.creator import DatasetCreator - -__all__ = ["InMemoryDatasetCreator"] - - -class InMemoryDatasetCreator(DatasetCreator): - @classmethod - def is_supported(cls, data: Any, data_args: Optional[dict[str, Any]]) -> bool: # noqa: ARG003 - return isinstance(data, Iterable) and not isinstance(data, str) - - @classmethod - def handle_create( - cls, - data: Any, - data_args: Optional[dict[str, Any]], - processor: Optional[Union[str, Path, PreTrainedTokenizerBase]], # noqa: ARG003 - processor_args: Optional[dict[str, Any]], # noqa: ARG003 - random_seed: int, # noqa: ARG003 - ) -> Union[Dataset, DatasetDict, IterableDataset, IterableDatasetDict]: - if not isinstance(data, Iterable): - raise TypeError( - f"Unsupported data format. Expected Iterable[Any], got {type(data)}" - ) - - if not data: - raise ValueError("Data is empty") - - if isinstance(data, dict): - # assume data is a dictionary of columns and values: {"c1": ["i1", "i2"]} - data_dict = cls.format_data_dict(data) - elif isinstance(data[0], dict): # type: ignore[index] - # assume data is a list of dictionaries: [{"c1": "i1"}, {"c1": "i2"}] - data_dict = cls.format_data_iterable_dicts(data) - else: - # assume data is a list of items with no columns: ["i1", "i2"] - data_dict = cls.format_data_iterable_values(data) - - return Dataset.from_dict(data_dict, **(data_args or {})) - - @classmethod - def format_data_dict(cls, data: dict[Any, Any]) -> dict[str, Any]: - if not isinstance(data, dict): - raise TypeError( - f"Unsupported data format. Expected Dict[str, Iterable[Any]], " - f"got {type(data)}" - ) - - if not all( - isinstance(key, str) and isinstance(val, Iterable) - for key, val in data.items() - ): - raise TypeError( - "Unsupported data format. Expected Dict[str, Iterable[Any]], " - f"got {type(data)}" - ) - - samples = len(list(data.values())[0]) - if not all(len(val) == samples for val in data.values()): - raise ValueError( - "Unsupported data format. Not all columns have the same number samples " - f"for {data}" - ) - - return data - - @classmethod - def format_data_iterable_dicts( - cls, data: Iterable[dict[Any, Any]] - ) -> dict[str, Any]: - if not isinstance(data, Iterable): - raise TypeError( - f"Unsupported data format. Expected Iterable[Dict[str, Any]], " - f"got {type(data)}" - ) - - if not all(isinstance(item, dict) for item in data): - raise TypeError( - f"Unsupported data format. Expected Iterable[Dict[str, Any]], " - f"got {type(data)}" - ) - - if not all(isinstance(key, str) for key in data[0]): # type: ignore[index] - raise TypeError( - "Unsupported data format. Expected Dict[str, Any], " - f"but one of the items had a non string column for {data}" - ) - - columns = list(data[0].keys()) # type: ignore[index] - if not all( - len(item) == len(columns) and all(key in item for key in columns) - for item in data - ): - raise ValueError( - "Unsupported data format. Not all items have the same columns " - f"for {data}" - ) - - data_dict: dict[str, Any] = {key: [] for key in columns} - for item in data: - for key, value in item.items(): - data_dict[key].append(value) - - return data_dict - - @classmethod - def format_data_iterable_values(cls, data: Iterable[Any]) -> dict[str, Any]: - if not isinstance(data, Iterable): - raise TypeError( - f"Unsupported data format. Expected Iterable[Iterable[Any]], " - f"got {type(data)}" - ) - - first_item = next(iter(data), None) - first_type = type(first_item) - if not all(isinstance(item, first_type) for item in data): - raise TypeError( - f"Unsupported data format. Not all types are the same for {data}" - ) - - return {"data": list(data)} diff --git a/src/guidellm/dataset/synthetic.py b/src/guidellm/dataset/synthetic.py deleted file mode 100644 index 8c30f0f7..00000000 --- a/src/guidellm/dataset/synthetic.py +++ /dev/null @@ -1,287 +0,0 @@ -import json -import random -from collections.abc import Iterable, Iterator -from itertools import cycle -from pathlib import Path -from typing import Any, Literal, Optional, Union - -import yaml -from datasets import ( - Dataset, - DatasetDict, - IterableDataset, - IterableDatasetDict, -) -from pydantic import BaseModel, Field -from transformers import PreTrainedTokenizerBase # type: ignore[import] - -from guidellm.dataset.creator import ColumnInputTypes, DatasetCreator -from guidellm.utils import EndlessTextCreator, IntegerRangeSampler, check_load_processor - -__all__ = [ - "SyntheticDatasetConfig", - "SyntheticDatasetCreator", - "SyntheticTextItemsGenerator", -] - - -class SyntheticDatasetConfig(BaseModel): - prefix_tokens: int = Field( - description="The number of shared prefix tokens to prepend to each prompt.", - ge=0, - default=0, - ) - prompt_tokens: int = Field( - description="The average number of text tokens generated for prompts.", - gt=0, - ) - prompt_tokens_stdev: Optional[int] = Field( - description="The standard deviation of the tokens generated for prompts.", - gt=0, - default=None, - ) - prompt_tokens_min: Optional[int] = Field( - description="The minimum number of text tokens generated for prompts.", - gt=0, - default=None, - ) - prompt_tokens_max: Optional[int] = Field( - description="The maximum number of text tokens generated for prompts.", - gt=0, - default=None, - ) - output_tokens: int = Field( - description="The average number of text tokens generated for outputs.", - gt=0, - ) - output_tokens_stdev: Optional[int] = Field( - description="The standard deviation of the tokens generated for outputs.", - gt=0, - default=None, - ) - output_tokens_min: Optional[int] = Field( - description="The minimum number of text tokens generated for outputs.", - gt=0, - default=None, - ) - output_tokens_max: Optional[int] = Field( - description="The maximum number of text tokens generated for outputs.", - gt=0, - default=None, - ) - samples: int = Field( - description="The number of samples to generate for the dataset.", - gt=0, - default=1000, - ) - source: str = Field( - description="The source of the text data to be used for generation.", - default="data:prideandprejudice.txt.gz", - ) - - @staticmethod - def parse_str(data: Union[str, Path]) -> "SyntheticDatasetConfig": - if ( - isinstance(data, Path) - or data.strip().endswith(".config") - or data.strip().endswith(".yaml") - ): - return SyntheticDatasetConfig.parse_config_file(data) - - if data.strip().startswith("{"): - return SyntheticDatasetConfig.parse_json(data) - - if data.count("=") > 1: - return SyntheticDatasetConfig.parse_key_value_pairs(data) - - raise ValueError( - f"Unsupported data format. Expected JSON or key-value pairs, got {data}" - ) - - @staticmethod - def parse_json(data: str) -> "SyntheticDatasetConfig": - config_dict = json.loads(data.strip()) - - return SyntheticDatasetConfig(**config_dict) - - @staticmethod - def parse_key_value_pairs(data: str) -> "SyntheticDatasetConfig": - config_dict = {} - items = data.strip().split(",") - for item in items: - key, value = item.split("=") - config_dict[key.strip()] = ( - int(value.strip()) if value.strip().isnumeric() else value.strip() - ) - - return SyntheticDatasetConfig(**config_dict) # type: ignore[arg-type] - - @staticmethod - def parse_config_file(data: Union[str, Path]) -> "SyntheticDatasetConfig": - with Path(data).open("r") as file: - config_dict = yaml.safe_load(file) - - return SyntheticDatasetConfig(**config_dict) - - -class SyntheticTextItemsGenerator( - Iterable[ - dict[ - Literal["prompt", "prompt_tokens_count", "output_tokens_count"], - Union[str, int], - ] - ] -): - def __init__( - self, - config: SyntheticDatasetConfig, - processor: PreTrainedTokenizerBase, - random_seed: int, - ): - self.config = config - self.processor = processor - self.random_seed = random_seed - self.text_creator = EndlessTextCreator( - data=config.source, - ) - - def __iter__( - self, - ) -> Iterator[ - dict[ - Literal["prompt", "prompt_tokens_count", "output_tokens_count"], - Union[str, int], - ] - ]: - prompt_tokens_sampler = IntegerRangeSampler( - average=self.config.prompt_tokens, - variance=self.config.prompt_tokens_stdev, - min_value=self.config.prompt_tokens_min, - max_value=self.config.prompt_tokens_max, - random_seed=self.random_seed, - ) - output_tokens_sampler = IntegerRangeSampler( - average=self.config.output_tokens, - variance=self.config.output_tokens_stdev, - min_value=self.config.output_tokens_min, - max_value=self.config.output_tokens_max, - random_seed=self.random_seed + 1, # ensure diff dist from prompts - ) - # ensure diff distribution from output tokens - rand = random.Random(self.random_seed + 2) # noqa: S311 - unique_prefix_iter = cycle(self.processor.get_vocab().values()) - - prefix_index = rand.randint(0, len(self.text_creator.words)) - prefix_tokens = self._create_prompt(self.config.prefix_tokens, prefix_index) - - for _, prompt_tokens, output_tokens in zip( - range(self.config.samples), - prompt_tokens_sampler, - output_tokens_sampler, - ): - start_index = rand.randint(0, len(self.text_creator.words)) - prompt_text = self.processor.decode( - prefix_tokens - + self._create_prompt( - prompt_tokens, start_index, next(unique_prefix_iter) - ), - skip_special_tokens=True, - ) - yield { - "prompt": prompt_text, - "prompt_tokens_count": self.config.prefix_tokens + prompt_tokens, - "output_tokens_count": output_tokens, - } - - def _create_prompt( - self, prompt_tokens: int, start_index: int, unique_prefix: Optional[int] = None - ) -> list[int]: - if prompt_tokens <= 0: - return [] - - left = start_index - right = start_index + 4 * prompt_tokens - start_tokens = [unique_prefix] if unique_prefix else [] - - while left < right: - mid = (left + right) // 2 - test_prompt = self.text_creator.create_text(start_index, mid - start_index) - test_tokens = start_tokens + self.processor.encode(test_prompt) - - if len(test_tokens) == prompt_tokens: - return test_tokens - elif len(test_tokens) < prompt_tokens: - left = mid + 1 - else: - right = mid - - final_text = self.text_creator.create_text(start_index, left - start_index) - return start_tokens + self.processor.encode(final_text) - - -class SyntheticDatasetCreator(DatasetCreator): - @classmethod - def is_supported( - cls, - data: Any, - data_args: Optional[dict[str, Any]], # noqa: ARG003 - ) -> bool: - if ( - isinstance(data, Path) - and data.exists() - and data.suffix in {".config", ".yaml"} - ): - return True - - if isinstance(data, str): - data_str: str = data.strip() - if ( - data_str.startswith("{") - or data_str.count("=") > 1 - or data_str.endswith((".config", ".yaml")) - ): - return True - - return False - - @classmethod - def handle_create( - cls, - data: Any, - data_args: Optional[dict[str, Any]], - processor: Optional[Union[str, Path, PreTrainedTokenizerBase]], - processor_args: Optional[dict[str, Any]], - random_seed: int, - ) -> Union[Dataset, DatasetDict, IterableDataset, IterableDatasetDict]: - processor = check_load_processor( - processor, - processor_args, - error_msg=( - "Processor/tokenizer required for synthetic dataset generation." - ), - ) - - config = SyntheticDatasetConfig.parse_str(data) - generator = SyntheticTextItemsGenerator(config, processor, random_seed) - items = list(generator) - - return Dataset.from_list(items, **(data_args or {})) - - @classmethod - def extract_args_column_mappings( - cls, - data_args: Optional[dict[str, Any]], - ) -> dict[ColumnInputTypes, str]: - data_args_columns = super().extract_args_column_mappings(data_args) - - if data_args_columns: - raise ValueError( - f"Column mappings are not supported for synthetic datasets. " - f"Got {data_args_columns}" - ) - - return { - "prompt_column": "prompt", - "prompt_tokens_count_column": "prompt_tokens_count", - "output_tokens_count_column": "output_tokens_count", - } diff --git a/src/guidellm/logger.py b/src/guidellm/logger.py index 70259bad..da3464f9 100644 --- a/src/guidellm/logger.py +++ b/src/guidellm/logger.py @@ -72,7 +72,7 @@ def configure_logger(config: LoggingSettings = settings.logging): sys.stdout, level=config.console_log_level.upper(), format="{time:YY-MM-DD HH:mm:ss}|{level: <8} \ - |{name}:{function}:{line} - {message}" + |{name}:{function}:{line} - {message}", ) if config.log_file or config.log_file_level: diff --git a/src/guidellm/preprocess/dataset.py b/src/guidellm/preprocess/dataset.py index a94b8a14..9d65dcd6 100644 --- a/src/guidellm/preprocess/dataset.py +++ b/src/guidellm/preprocess/dataset.py @@ -11,7 +11,6 @@ from pydantic import BaseModel, Field from transformers import PreTrainedTokenizerBase -from guidellm.dataset import load_dataset as guidellm_load_dataset from guidellm.utils import IntegerRangeSampler, check_load_processor from guidellm.utils.hf_datasets import SUPPORTED_TYPES, save_dataset_to_file @@ -239,7 +238,7 @@ def process_dataset( prompt_tokens: Union[str, Path], output_tokens: Union[str, Path], processor_args: Optional[dict[str, Any]] = None, - data_args: Optional[dict[str, Any]] = None, + data_args: Optional[dict[str, Any]] = None, # noqa: ARG001 short_prompt_strategy: ShortPromptStrategy = ShortPromptStrategy.IGNORE, pad_char: Optional[str] = None, concat_delimiter: Optional[str] = None, @@ -271,9 +270,7 @@ def process_dataset( f"Starting dataset conversion | Input: {data} | Output directory: {output_path}" ) - dataset, column_mappings = guidellm_load_dataset( - data, data_args, processor, processor_args - ) + dataset, column_mappings = None, None tokenizer = check_load_processor( processor, processor_args, diff --git a/src/guidellm/request/__init__.py b/src/guidellm/request/__init__.py deleted file mode 100644 index 85b447d6..00000000 --- a/src/guidellm/request/__init__.py +++ /dev/null @@ -1,18 +0,0 @@ -from .loader import ( - GenerativeRequestLoader, - GenerativeRequestLoaderDescription, - RequestLoader, - RequestLoaderDescription, -) -from .request import GenerationRequest -from .types import RequestT, ResponseT - -__all__ = [ - "GenerationRequest", - "GenerativeRequestLoader", - "GenerativeRequestLoaderDescription", - "RequestLoader", - "RequestLoaderDescription", - "RequestT", - "ResponseT", -] diff --git a/src/guidellm/request/loader.py b/src/guidellm/request/loader.py deleted file mode 100644 index 607a7455..00000000 --- a/src/guidellm/request/loader.py +++ /dev/null @@ -1,284 +0,0 @@ -from abc import abstractmethod -from collections.abc import Iterable, Iterator -from pathlib import Path -from typing import ( - Any, - Literal, - Optional, - Union, -) - -from datasets import Dataset, DatasetDict, IterableDataset, IterableDatasetDict -from transformers import PreTrainedTokenizerBase # type: ignore[import] - -from guidellm.dataset import ColumnInputTypes, load_dataset -from guidellm.request.request import GenerationRequest -from guidellm.settings import settings -from guidellm.utils import StandardBaseModel - -__all__ = [ - "GenerativeRequestLoader", - "GenerativeRequestLoaderDescription", - "RequestLoader", - "RequestLoaderDescription", -] - - -class RequestLoaderDescription(StandardBaseModel): - type_: Literal["request_loader"] = "request_loader" - - -class RequestLoader(Iterable): - @abstractmethod - def __iter__(self) -> Iterator: ... - - @abstractmethod - def __len__(self) -> int: ... - - @property - @abstractmethod - def description(self) -> RequestLoaderDescription: ... - - -class GenerativeRequestLoaderDescription(RequestLoaderDescription): - type_: Literal["generative_request_loader"] = "generative_request_loader" # type: ignore[assignment] - data: str - data_args: Optional[dict[str, Any]] - processor: str - processor_args: Optional[dict[str, Any]] - - -class GenerativeRequestLoader(RequestLoader): - DEFAULT_PROMPT_COLUMNS = [ - "prompt", - "prompts", - "instruction", - "instructions", - "question", - "questions", - "input", - "inputs", - "context", - "content", - "conversation", - "conversations", - "turn", - "turns", - "text", - ] - - def __init__( - self, - data: Union[ - str, - Path, - Iterable[Union[str, dict[str, Any]]], - Dataset, - DatasetDict, - IterableDataset, - IterableDatasetDict, - ], - data_args: Optional[dict[str, Any]], - processor: Optional[Union[str, Path, PreTrainedTokenizerBase]], - processor_args: Optional[dict[str, Any]], - shuffle: bool = True, - iter_type: Literal["finite", "infinite"] = "finite", - random_seed: int = 42, - ): - self.data = data - self.data_args = data_args - dataset, args_column_mappings = load_dataset( - data, - data_args, - processor, - processor_args, - random_seed, - ) - self.dataset = dataset - self.processor = processor - self.processor_args = processor_args - self.shuffle = shuffle - self.iter_type = iter_type - self.random_seed = random_seed - - self.column_mappings = self._create_column_mappings(args_column_mappings) - self.preserve_iter_state = iter_type == "infinite" # ensure no caching requests - self._preserved_iter = None - - def __iter__(self) -> Iterator[GenerationRequest]: - scope_create_count = 0 - - while (dataset_iter := self._get_dataset_iter(scope_create_count)) is not None: - scope_create_count += 1 - - for item in dataset_iter: - yield self._create_request(item) - - self._preserved_iter = None - - def __len__(self) -> int: - if self.iter_type == "finite": - return self.num_unique_items() - - raise ValueError(f"Unable to determine length of dataset: {self.data}") - - @property - def description(self) -> GenerativeRequestLoaderDescription: - return GenerativeRequestLoaderDescription( - data=str(self.data), - data_args=self.data_args, - processor=str(self.processor), - processor_args=self.processor_args, - ) - - def num_unique_items(self, raise_err: bool = True) -> int: - try: - return len(self.dataset) - except Exception: # noqa: BLE001, S110 - pass - - dataset_size = self.dataset.info.dataset_size - if dataset_size is not None: - return dataset_size - - if raise_err: - raise ValueError("Unable to determine number of items in the dataset") - - return -1 - - def _create_column_mappings( - self, - args_column_mappings: dict[ColumnInputTypes, str], - ) -> dict[ColumnInputTypes, str]: - column_mappings: dict[ColumnInputTypes, str] = {} - - if "text_column" in args_column_mappings: - column_mappings["prompt_column"] = args_column_mappings["text_column"] - else: - column_mappings["prompt_column"] = self._extract_text_column() - - if "prompt_tokens_count_column" in args_column_mappings: - column_mappings["prompt_tokens_count_column"] = args_column_mappings[ - "prompt_tokens_count_column" - ] - elif prompt_tokens_count_column := self._extract_prompt_tokens_count_column(): - column_mappings["prompt_tokens_count_column"] = prompt_tokens_count_column - - if "output_tokens_count_column" in args_column_mappings: - column_mappings["output_tokens_count_column"] = args_column_mappings[ - "output_tokens_count_column" - ] - elif output_tokens_count_column := self._extract_output_tokens_count_column(): - column_mappings["output_tokens_count_column"] = output_tokens_count_column - - return column_mappings - - def _extract_text_column(self) -> str: - column_names = self._dataset_columns( - err_msg=( - "Unable to determine text column from dataset and it is required. " - "To specify the text column, set the 'text_column' key in the " - "'data_args' dictionary." - ) - ) - - if not column_names: - raise ValueError( - "Unable to determine text column from dataset and it is required. " - "To specify the text column, set the 'text_column' key in the " - "'data_args' dictionary." - ) - - if len(column_names) == 1: - return column_names[0] - - for def_column in self.DEFAULT_PROMPT_COLUMNS: - if def_column in column_names: - return def_column - - raise ValueError( - f"Unable to determine text column from dataset columns: {column_names}. " - "To specify the text column, set the 'text_column' key in the " - "'data_args' dictionary." - ) - - def _extract_prompt_tokens_count_column(self) -> Optional[str]: - column_names = self._dataset_columns() - - if column_names and "prompt_tokens_count" in column_names: - return "prompt_tokens_count" - - if column_names and "prompt_tokens" in column_names: - return "prompt_tokens" - - return None - - def _extract_output_tokens_count_column(self) -> Optional[str]: - column_names = self._dataset_columns() - - if column_names and "output_tokens_count" in column_names: - return "output_tokens_count" - - if column_names and "output_tokens" in column_names: - return "output_tokens" - - return None - - def _dataset_columns(self, err_msg: Optional[str] = None) -> Optional[list[str]]: - try: - column_names = self.dataset.column_names - - if not column_names and err_msg: - raise ValueError(f"No column names found in dataset: {self.data}") - except Exception as err: - if err_msg: - raise ValueError(err_msg) from err - - column_names = None - - return column_names - - def _get_dataset_iter( - self, scope_create_count: int - ) -> Optional[Iterator[dict[str, Any]]]: - if scope_create_count > 0 and self.iter_type != "infinite": - return None - - if self.preserve_iter_state and self._preserved_iter is not None: - return self._preserved_iter - - dataset = ( - self.dataset - if not self.shuffle - else self.dataset.shuffle(seed=self.random_seed) - ) - - dataset_iter = iter(dataset) - - if self.preserve_iter_state: - self._preserved_iter = dataset_iter - - return dataset_iter - - def _create_request(self, item: dict[str, Any]) -> GenerationRequest: - prompt_tokens = ( - item[self.column_mappings["prompt_tokens_count_column"]] - if "prompt_tokens_count_column" in self.column_mappings - else None - ) - output_tokens = ( - item[self.column_mappings["output_tokens_count_column"]] - if "output_tokens_count_column" in self.column_mappings - else None - ) - - return GenerationRequest( - request_type=settings.preferred_route, - content=item[self.column_mappings["prompt_column"]], - stats=( - {"prompt_tokens": prompt_tokens} if prompt_tokens is not None else {} - ), - constraints=( - {"output_tokens": output_tokens} if output_tokens is not None else {} - ), - ) diff --git a/src/guidellm/request/request.py b/src/guidellm/request/request.py deleted file mode 100644 index bf4e59fb..00000000 --- a/src/guidellm/request/request.py +++ /dev/null @@ -1,79 +0,0 @@ -import uuid -from typing import Any, Literal, Optional - -from pydantic import Field - -from guidellm.utils import StandardBaseModel - -__all__ = ["GenerationRequest"] - - -class GenerationRequest(StandardBaseModel): - """ - A class representing a request for generation. - This class is used to encapsulate the details of a generation request, - including the request ID, type, content, parameters, statistics, and constraints. - It is designed to be used with the BackendRequestsWorker class to handle - the generation process. - - :param request_id: The unique identifier for the request. - :param request_type: The type of request (e.g., text, chat). - :param content: The content for the request to send to the backend. - If request_type is 'text', this should be a string or list of strings - which will be resolved by backend.text_completions. - If request_type is 'chat', this should be a string, - a list of (str, Dict[str, Union[str, Dict[str, str]], Path, Image]), - or Any raw content which will be resolved by backend.chat_completions. - If raw content, raw_content=True must be passed in the params. - :param params: Additional parameters for the request passed in as kwargs. - For an http backend, these are passed into the body of the request. - :param stats: Statistics for the request, such as the number of prompt tokens. - Used for tracking and reporting purposes. - :param constraints: Constraints for the request, such as the maximum number - of output tokens. Used for controlling the behavior of the backend. - """ - - request_id: Optional[str] = Field( - default_factory=lambda: str(uuid.uuid4()), - description="The unique identifier for the request.", - ) - request_type: Literal["text_completions", "chat_completions"] = Field( - default="text_completions", - description=( - "The type of request (e.g., text, chat). " - "If request_type='text_completions', resolved by backend.text_completions. " - "If request_typ='chat_completions', resolved by backend.chat_completions." - ), - ) - content: Any = Field( - description=( - "The content for the request to send to the backend. " - "If request_type is 'text', this should be a string or list of strings " - "which will be resolved by backend.text_completions. " - "If request_type is 'chat', this should be a string, " - "a list of (str, Dict[str, Union[str, Dict[str, str]], Path, Image]), " - "or Any raw content which will be resolved by backend.chat_completions. " - "If raw content, raw_content=True must be passed in the params." - ) - ) - params: dict[str, Any] = Field( - default_factory=dict, - description=( - "Additional parameters for the request that will be passed in as kwargs. " - "For an http backend, these are passed into the body of the request. " - ), - ) - stats: dict[Literal["prompt_tokens"], int] = Field( - default_factory=dict, - description=( - "Statistics for the request, such as the number of prompt tokens. " - "Used for tracking and reporting purposes." - ), - ) - constraints: dict[Literal["output_tokens"], int] = Field( - default_factory=dict, - description=( - "Constraints for the request, such as the maximum number of output tokens. " - "Used for controlling the behavior of the backend." - ), - ) diff --git a/src/guidellm/request/types.py b/src/guidellm/request/types.py deleted file mode 100644 index f82493be..00000000 --- a/src/guidellm/request/types.py +++ /dev/null @@ -1,10 +0,0 @@ -from typing import TypeVar - -__all__ = [ - "RequestT", - "ResponseT", -] - - -RequestT = TypeVar("RequestT") -ResponseT = TypeVar("ResponseT") diff --git a/src/guidellm/scheduler/scheduler.py b/src/guidellm/scheduler/scheduler.py index e7d8b2c6..d9bb7c23 100644 --- a/src/guidellm/scheduler/scheduler.py +++ b/src/guidellm/scheduler/scheduler.py @@ -124,7 +124,7 @@ async def run( # Setup the worker group, sync start with the environment worker_group = WorkerProcessGroup[RequestT, ResponseT]( - requests=None, + requests=local_requests, cycle_requests=local_requests, backend=backend, strategy=local_strategy, diff --git a/src/guidellm/scheduler/worker_group.py b/src/guidellm/scheduler/worker_group.py index c1d516f1..e64d64fc 100644 --- a/src/guidellm/scheduler/worker_group.py +++ b/src/guidellm/scheduler/worker_group.py @@ -98,10 +98,9 @@ def __init__( :raises ValueError: If neither requests nor cycle_requests are provided, or if cycle_requests is an Iterator rather than Iterable """ - if not requests and not cycle_requests: + if requests is None and cycle_requests is None: raise ValueError( "At least one of 'requests' or 'cycle_requests' must be provided. " - f"Got requests: {requests}, cycle_requests: {cycle_requests}" ) if isinstance(cycle_requests, Iterator): @@ -487,10 +486,10 @@ def requests_generator( """ def _iter(): - if requests: + if requests is not None: yield from requests - if cycle_requests: + if cycle_requests is not None: while True: yield from cycle_requests @@ -512,6 +511,8 @@ def _iter(): scheduler_start_time=self.start_time, ) state_update = self._locked_update(request_info) + request_info.scheduler_timings.queued = time.time() + yield (request, request_info) if state_update.stop_queueing: From 32e4909cc677a41680afdb683646d1e103f306c3 Mon Sep 17 00:00:00 2001 From: Jared O'Connell Date: Wed, 1 Oct 2025 14:24:31 -0400 Subject: [PATCH 40/90] Progress towards fixing output tests Signed-off-by: Jared O'Connell --- src/guidellm/benchmark/output.py | 14 ---- src/guidellm/scheduler/objects.py | 2 +- tests/unit/benchmark/test_output.py | 102 ++++++++++------------------ tests/unit/mock_benchmark.py | 7 +- 4 files changed, 38 insertions(+), 87 deletions(-) diff --git a/src/guidellm/benchmark/output.py b/src/guidellm/benchmark/output.py index 28b983fb..6cbb1865 100644 --- a/src/guidellm/benchmark/output.py +++ b/src/guidellm/benchmark/output.py @@ -330,20 +330,6 @@ def _get_profile_str(self, benchmark: GenerativeBenchmark) -> str: return ", ".join(f"{key}={value}" for key, value in profile_args.items()) - def _get_args_str(self, benchmark: GenerativeBenchmark) -> str: - args = benchmark.args - args_dict = OrderedDict( - { - "max_number": args.max_number, - "max_duration": args.max_duration, - "warmup_number": args.warmup_number, - "warmup_duration": args.warmup_duration, - "cooldown_number": args.cooldown_number, - "cooldown_duration": args.cooldown_duration, - } - ) - return ", ".join(f"{key}={value}" for key, value in args_dict.items()) - def _print_section_header(self, title: str, indent: int = 0, new_lines: int = 2): self._print_line( f"{title}:", diff --git a/src/guidellm/scheduler/objects.py b/src/guidellm/scheduler/objects.py index b7f2efc3..fdca28b3 100644 --- a/src/guidellm/scheduler/objects.py +++ b/src/guidellm/scheduler/objects.py @@ -174,7 +174,7 @@ class ScheduledRequestInfo(StandardBaseModel): ) scheduler_start_time: float = Field( description="Unix timestamp for the local time when scheduler processing began", - default=-1, + default=-1.0, ) error: str | None = Field( diff --git a/tests/unit/benchmark/test_output.py b/tests/unit/benchmark/test_output.py index 9076834b..780ab9c5 100644 --- a/tests/unit/benchmark/test_output.py +++ b/tests/unit/benchmark/test_output.py @@ -10,7 +10,7 @@ from guidellm.benchmark import ( GenerativeBenchmarksReport, ) -from guidellm.benchmark.output import GenerativeBenchmarksConsole +from guidellm.benchmark.output import GenerativeBenchmarkerConsole, GenerativeBenchmarkerCSV from tests.unit.mock_benchmark import mock_generative_benchmark @@ -29,6 +29,7 @@ def test_generative_benchmark_invalid_initilization(): GenerativeBenchmarksReport(benchmarks="invalid_type") # type: ignore[arg-type] +@pytest.mark.skip(reason="Computed fields are duplicated.") def test_generative_benchmark_marshalling(): mock_benchmark = mock_generative_benchmark() report = GenerativeBenchmarksReport(benchmarks=[mock_benchmark]) @@ -37,10 +38,11 @@ def test_generative_benchmark_marshalling(): deserialized = GenerativeBenchmarksReport.model_validate(serialized) deserialized_benchmark = deserialized.benchmarks[0] - for field in mock_benchmark.model_fields: + for field in mock_benchmark.model_fields_set: assert getattr(mock_benchmark, field) == getattr(deserialized_benchmark, field) +@pytest.mark.skip(reason="Computed fields are duplicated.") def test_file_json(): mock_benchmark = mock_generative_benchmark() report = GenerativeBenchmarksReport(benchmarks=[mock_benchmark]) @@ -55,12 +57,13 @@ def test_file_json(): loaded_report = GenerativeBenchmarksReport.load_file(mock_path) loaded_benchmark = loaded_report.benchmarks[0] - for field in mock_benchmark.model_fields: + for field in mock_benchmark.model_fields_set: assert getattr(mock_benchmark, field) == getattr(loaded_benchmark, field) mock_path.unlink() +@pytest.mark.skip(reason="Computed fields are duplicated.") def test_file_yaml(): mock_benchmark = mock_generative_benchmark() report = GenerativeBenchmarksReport(benchmarks=[mock_benchmark]) @@ -75,18 +78,20 @@ def test_file_yaml(): loaded_report = GenerativeBenchmarksReport.load_file(mock_path) loaded_benchmark = loaded_report.benchmarks[0] - for field in mock_benchmark.model_fields: + for field in mock_benchmark.model_fields_set: assert getattr(mock_benchmark, field) == getattr(loaded_benchmark, field) mock_path.unlink() - -def test_file_csv(): +@pytest.mark.skip(reason="CSV fix not merged yet") +@pytest.mark.asyncio +async def test_file_csv(): mock_benchmark = mock_generative_benchmark() report = GenerativeBenchmarksReport(benchmarks=[mock_benchmark]) mock_path = Path("mock_report.csv") - report.save_csv(mock_path) + csv_benchmarker = GenerativeBenchmarkerCSV(output_path=mock_path) + await csv_benchmarker.finalize(report) with mock_path.open("r") as file: reader = csv.reader(file) @@ -100,109 +105,72 @@ def test_file_csv(): def test_console_benchmarks_profile_str(): - console = GenerativeBenchmarksConsole(enabled=True) + console = GenerativeBenchmarkerConsole() mock_benchmark = mock_generative_benchmark() - console.benchmarks = [mock_benchmark] assert ( - console.benchmarks_profile_str == "type=synchronous, strategies=['synchronous']" - ) - - -def test_console_benchmarks_args_str(): - console = GenerativeBenchmarksConsole(enabled=True) - mock_benchmark = mock_generative_benchmark() - console.benchmarks = [mock_benchmark] - assert console.benchmarks_args_str == ( - "max_number=None, max_duration=10.0, warmup_number=None, " - "warmup_duration=None, cooldown_number=None, cooldown_duration=None" + console._get_profile_str(mock_benchmark) == "type=synchronous, strategies=['synchronous']" ) -def test_console_benchmarks_worker_desc_str(): - console = GenerativeBenchmarksConsole(enabled=True) - mock_benchmark = mock_generative_benchmark() - console.benchmarks = [mock_benchmark] - assert console.benchmarks_worker_desc_str == str(mock_benchmark.worker) - - -def test_console_benchmarks_request_loader_desc_str(): - console = GenerativeBenchmarksConsole(enabled=True) - mock_benchmark = mock_generative_benchmark() - console.benchmarks = [mock_benchmark] - assert console.benchmarks_request_loader_desc_str == str( - mock_benchmark.request_loader - ) - - -def test_console_benchmarks_extras_str(): - console = GenerativeBenchmarksConsole(enabled=True) - mock_benchmark = mock_generative_benchmark() - console.benchmarks = [mock_benchmark] - assert console.benchmarks_extras_str == "None" - - def test_console_print_section_header(): - console = GenerativeBenchmarksConsole(enabled=True) + console = GenerativeBenchmarkerConsole() with patch.object(console.console, "print") as mock_print: - console.print_section_header("Test Header") + console._print_section_header("Test Header") mock_print.assert_called_once() def test_console_print_labeled_line(): - console = GenerativeBenchmarksConsole(enabled=True) + console = GenerativeBenchmarkerConsole() with patch.object(console.console, "print") as mock_print: - console.print_labeled_line("Label", "Value") + console._print_labeled_line("Label", "Value") mock_print.assert_called_once() def test_console_print_line(): - console = GenerativeBenchmarksConsole(enabled=True) + console = GenerativeBenchmarkerConsole() with patch.object(console.console, "print") as mock_print: - console.print_line("Test Line") + console._print_line("Test Line") mock_print.assert_called_once() def test_console_print_table(): - console = GenerativeBenchmarksConsole(enabled=True) + console = GenerativeBenchmarkerConsole() headers = ["Header1", "Header2"] rows = [["Row1Col1", "Row1Col2"], ["Row2Col1", "Row2Col2"]] with ( - patch.object(console, "print_section_header") as mock_header, - patch.object(console, "print_table_divider") as mock_divider, - patch.object(console, "print_table_row") as mock_row, + patch.object(console, "_print_section_header") as mock_header, + patch.object(console, "_print_table_divider") as mock_divider, + patch.object(console, "_print_table_row") as mock_row, ): - console.print_table(headers, rows, "Test Table") + console._print_table(headers, rows, "Test Table") mock_header.assert_called_once() mock_divider.assert_called() mock_row.assert_called() def test_console_print_benchmarks_metadata(): - console = GenerativeBenchmarksConsole(enabled=True) + console = GenerativeBenchmarkerConsole() mock_benchmark = mock_generative_benchmark() - console.benchmarks = [mock_benchmark] with ( - patch.object(console, "print_section_header") as mock_header, - patch.object(console, "print_labeled_line") as mock_labeled, + patch.object(console, "_print_section_header") as mock_header, + patch.object(console, "_print_labeled_line") as mock_labeled, ): - console.print_benchmarks_metadata() + console._print_benchmarks_metadata([mock_benchmark]) mock_header.assert_called_once() mock_labeled.assert_called() def test_console_print_benchmarks_info(): - console = GenerativeBenchmarksConsole(enabled=True) + console = GenerativeBenchmarkerConsole() mock_benchmark = mock_generative_benchmark() - console.benchmarks = [mock_benchmark] - with patch.object(console, "print_table") as mock_table: - console.print_benchmarks_info() + with patch.object(console, "_print_table") as mock_table: + console._print_benchmarks_info([mock_benchmark]) mock_table.assert_called_once() def test_console_print_benchmarks_stats(): - console = GenerativeBenchmarksConsole(enabled=True) + console = GenerativeBenchmarkerConsole() mock_benchmark = mock_generative_benchmark() - console.benchmarks = [mock_benchmark] - with patch.object(console, "print_table") as mock_table: - console.print_benchmarks_stats() + with patch.object(console, "_print_table") as mock_table: + console._print_benchmarks_stats([mock_benchmark]) mock_table.assert_called_once() diff --git a/tests/unit/mock_benchmark.py b/tests/unit/mock_benchmark.py index d846767d..c0d6aa34 100644 --- a/tests/unit/mock_benchmark.py +++ b/tests/unit/mock_benchmark.py @@ -1,6 +1,5 @@ """Mock benchmark objects for unit testing.""" - -from guidellm.backend import GenerationRequestTimings +from guidellm.backends import GenerationRequestTimings from guidellm.benchmark import ( BenchmarkSchedulerStats, GenerativeBenchmark, @@ -101,7 +100,7 @@ def mock_generative_benchmark() -> GenerativeBenchmark: worker_targeted_start_delay_avg=0.1, request_start_delay_avg=0.1, request_time_avg=0.1, - request_targeted_delay_avg=0.1, + request_targeted_start_delay_avg=0.1, ), start_time=1000.0, end_time=2000.0, @@ -130,8 +129,6 @@ def mock_generative_benchmark() -> GenerativeBenchmark: scheduler_info=ScheduledRequestInfo( request_timings=GenerationRequestTimings( request_start=1, - first_iteration=2, - last_iteration=6, request_end=6, ) ), From bbc88e484c8cc2b027435a1d2b021d59a5c91070 Mon Sep 17 00:00:00 2001 From: Jared O'Connell Date: Wed, 1 Oct 2025 17:12:31 -0400 Subject: [PATCH 41/90] Implement workaround for duplicate fields Signed-off-by: Jared O'Connell --- tests/unit/benchmark/test_output.py | 29 ++++++++++++++++++++--------- 1 file changed, 20 insertions(+), 9 deletions(-) diff --git a/tests/unit/benchmark/test_output.py b/tests/unit/benchmark/test_output.py index 780ab9c5..3b4e7580 100644 --- a/tests/unit/benchmark/test_output.py +++ b/tests/unit/benchmark/test_output.py @@ -29,20 +29,28 @@ def test_generative_benchmark_invalid_initilization(): GenerativeBenchmarksReport(benchmarks="invalid_type") # type: ignore[arg-type] -@pytest.mark.skip(reason="Computed fields are duplicated.") def test_generative_benchmark_marshalling(): mock_benchmark = mock_generative_benchmark() report = GenerativeBenchmarksReport(benchmarks=[mock_benchmark]) serialized = report.model_dump() + print("Serialized: ", serialized) deserialized = GenerativeBenchmarksReport.model_validate(serialized) + print("Deserialized: ", deserialized) deserialized_benchmark = deserialized.benchmarks[0] + serialized = deserialized.model_dump() + print("Re-serialized: ", serialized) - for field in mock_benchmark.model_fields_set: - assert getattr(mock_benchmark, field) == getattr(deserialized_benchmark, field) + print("Fields:", {name: getattr(deserialized, name) for name in deserialized.model_fields}) + print("Extras:", deserialized.__pydantic_extra__) + + + #for field in mock_benchmark.model_fields_set: + # assert getattr(mock_benchmark, field) == getattr(deserialized_benchmark, field) + # Workaround for duplicate fields for computed fields. + assert mock_benchmark.model_dump() == deserialized_benchmark.model_dump() -@pytest.mark.skip(reason="Computed fields are duplicated.") def test_file_json(): mock_benchmark = mock_generative_benchmark() report = GenerativeBenchmarksReport(benchmarks=[mock_benchmark]) @@ -57,13 +65,14 @@ def test_file_json(): loaded_report = GenerativeBenchmarksReport.load_file(mock_path) loaded_benchmark = loaded_report.benchmarks[0] - for field in mock_benchmark.model_fields_set: - assert getattr(mock_benchmark, field) == getattr(loaded_benchmark, field) + #for field in mock_benchmark.model_fields_set: + # assert getattr(mock_benchmark, field) == getattr(loaded_benchmark, field) + # Workaround for duplicate fields for computed fields. + assert mock_benchmark.model_dump() == loaded_benchmark.model_dump() mock_path.unlink() -@pytest.mark.skip(reason="Computed fields are duplicated.") def test_file_yaml(): mock_benchmark = mock_generative_benchmark() report = GenerativeBenchmarksReport(benchmarks=[mock_benchmark]) @@ -78,8 +87,10 @@ def test_file_yaml(): loaded_report = GenerativeBenchmarksReport.load_file(mock_path) loaded_benchmark = loaded_report.benchmarks[0] - for field in mock_benchmark.model_fields_set: - assert getattr(mock_benchmark, field) == getattr(loaded_benchmark, field) + #for field in mock_benchmark.model_fields_set: + # assert getattr(mock_benchmark, field) == getattr(loaded_benchmark, field) + # Workaround for duplicate fields for computed fields. + assert mock_benchmark.model_dump() == loaded_benchmark.model_dump() mock_path.unlink() From ff9a61a19fd88bd2f42e7daba092d8b0df7c43d7 Mon Sep 17 00:00:00 2001 From: Jared O'Connell Date: Fri, 3 Oct 2025 12:42:11 -0400 Subject: [PATCH 42/90] Fix leftover debugging code and fix linter errors Signed-off-by: Jared O'Connell --- tests/unit/benchmark/test_output.py | 20 +++----------------- 1 file changed, 3 insertions(+), 17 deletions(-) diff --git a/tests/unit/benchmark/test_output.py b/tests/unit/benchmark/test_output.py index 3b4e7580..85979c12 100644 --- a/tests/unit/benchmark/test_output.py +++ b/tests/unit/benchmark/test_output.py @@ -34,20 +34,10 @@ def test_generative_benchmark_marshalling(): report = GenerativeBenchmarksReport(benchmarks=[mock_benchmark]) serialized = report.model_dump() - print("Serialized: ", serialized) deserialized = GenerativeBenchmarksReport.model_validate(serialized) - print("Deserialized: ", deserialized) deserialized_benchmark = deserialized.benchmarks[0] - serialized = deserialized.model_dump() - print("Re-serialized: ", serialized) - print("Fields:", {name: getattr(deserialized, name) for name in deserialized.model_fields}) - print("Extras:", deserialized.__pydantic_extra__) - - - #for field in mock_benchmark.model_fields_set: - # assert getattr(mock_benchmark, field) == getattr(deserialized_benchmark, field) - # Workaround for duplicate fields for computed fields. + # model_dump as workaround for duplicate fields for computed fields. assert mock_benchmark.model_dump() == deserialized_benchmark.model_dump() @@ -65,9 +55,7 @@ def test_file_json(): loaded_report = GenerativeBenchmarksReport.load_file(mock_path) loaded_benchmark = loaded_report.benchmarks[0] - #for field in mock_benchmark.model_fields_set: - # assert getattr(mock_benchmark, field) == getattr(loaded_benchmark, field) - # Workaround for duplicate fields for computed fields. + # model_dump as workaround for duplicate fields for computed fields. assert mock_benchmark.model_dump() == loaded_benchmark.model_dump() mock_path.unlink() @@ -87,9 +75,7 @@ def test_file_yaml(): loaded_report = GenerativeBenchmarksReport.load_file(mock_path) loaded_benchmark = loaded_report.benchmarks[0] - #for field in mock_benchmark.model_fields_set: - # assert getattr(mock_benchmark, field) == getattr(loaded_benchmark, field) - # Workaround for duplicate fields for computed fields. + # model_dump as workaround for duplicate fields for computed fields. assert mock_benchmark.model_dump() == loaded_benchmark.model_dump() mock_path.unlink() From dd7a4b884f821da37e7a1c36cd9d9accf61d961a Mon Sep 17 00:00:00 2001 From: Samuel Monson Date: Fri, 3 Oct 2025 17:00:00 -0400 Subject: [PATCH 43/90] [GuideLLM Refactor] Advanced Prefix Cache Controls (#382) ## TODO - Docs - ~CSV arg string support~ CSV arg string now supports single bucket (see last example). Might leave it at that for now. - More validation ## Summary This PR is a port of #287 to the v0.4.0 refactor branch. Adds controls for sharing one or more fixed prefixes between samples. See examples bellow. ## Details Adds a `prefix_buckets` argument to the `SyntheticTextDatasetConfig`, each bucket consists of a prefix count, token count, and bucket weight. Prefix count sets the number of unique prefixes to generate for a given bucket, token count is the length of each prompt in the bucket, and bucket weight is used to calculate the proportion of requests the bucket applies to relative to the sum of all bucket weights. Here are a few examples: Here we have one bucket of 32 prefixes of length 2048. Since there are 1024 total samples each prefix will apply to 32 samples. If there is only one bucket than weight can be omitted as the bucket applies to 100% of samples. ```yaml data: prefix_buckets: - prefix_tokens: 2048 prefix_count: 32 prompt_tokens: 256 output_tokens: 256 samples: 1024 ``` In this modified version of the first example 16 of the prompts have 2048 tokens while the other 16 have 1024 tokens. ```yaml data: prefix_buckets: - prefix_tokens: 2048 prefix_count: 16 bucket_weight: 50 - prefix_tokens: 1024 prefix_count: 16 bucket_weight: 50 prompt_tokens: 256 output_tokens: 256 samples: 1024 ``` The prefix tokens of a bucket can also be 0 to disable prefixes for those samples. Here is an example where 40% of the samples have a prefix of 2048 tokens while the other 60% have no prefix. ```yaml data: prefix_buckets: - prefix_tokens: 2048 bucket_weight: 40 - prefix_tokens: 0 bucket_weight: 60 prompt_tokens: 256 output_tokens: 256 samples: 1000 ``` If only a single bucket is needed, it can be set at the top level. This make the changes backwards compatible with the previous interface and allows the CSV string format to work without parsing nested structures (at least for this use-case). ```yaml data: prefix_tokens: 128 prefix_count: 10 prompt_tokens: 256 output_tokens: 256 samples: 1000 ``` ## Test Plan - PR includes unit tests for all synthetic dataset changes (`pytest tests/unit/dataset`) - Scenearios in the Details section can be used against a model server with prefix caching and the cache rate can be confirmed by inspecting console output. ## Related Issues - Resolves #232 - Closes #287 --- - [x] "I certify that all code in this PR is my own, except as noted below." ## Use of AI - [x] Includes AI-assisted code completion - [ ] Includes code generated by an AI application - [x] Includes AI-generated tests (NOTE: AI written tests should have a docstring that includes `## WRITTEN BY AI ##`) --------- Signed-off-by: Samuel Monson --- pylock.toml | 2465 +++++++++-------- pyproject.toml | 9 + src/guidellm/data/deserializers/__init__.py | 2 + src/guidellm/data/deserializers/synthetic.py | 96 +- src/guidellm/data/formatters/templates.py | 16 +- src/guidellm/data/objects.py | 2 + src/guidellm/data/utils.py | 5 + tests/unit/{dataset => data}/__init__.py | 0 tests/unit/data/deserializers/__init__.py | 0 .../unit/data/deserializers/test_synthetic.py | 587 ++++ tests/unit/dataset/test_synthetic.py | 873 ------ 11 files changed, 2035 insertions(+), 2020 deletions(-) rename tests/unit/{dataset => data}/__init__.py (100%) create mode 100644 tests/unit/data/deserializers/__init__.py create mode 100644 tests/unit/data/deserializers/test_synthetic.py delete mode 100644 tests/unit/dataset/test_synthetic.py diff --git a/pylock.toml b/pylock.toml index 2fa1b28e..4c6468c2 100644 --- a/pylock.toml +++ b/pylock.toml @@ -3,8 +3,8 @@ lock-version = "1.0" requires-python = "<4.0,>=3.9.0" environments = [ - "python_version ~= \"3.10\"", - "python_version < \"3.10\" and python_version >= \"3.9\"", + "python_version ~= \"3.12\"", + "python_version < \"3.12\" and python_version >= \"3.9\"", ] extras = ["dev", "recommended"] dependency-groups = ["default"] @@ -44,6 +44,24 @@ dependencies = [ "tomli>=2.0.1; python_version < \"3.11\"", ] +[[packages]] +name = "blobfile" +version = "3.1.0" +requires-python = ">=3.8.0" +sdist = {name = "blobfile-3.1.0.tar.gz", url = "https://files.pythonhosted.org/packages/f0/6d/2e7567da75ddbb24fe979f52284b708da349d67a41042635af36071a5a6b/blobfile-3.1.0.tar.gz", hashes = {sha256 = "d45b6b1fa3b0920732314c23ddbdb4f494ca12f787c2b6eb6bba6faa51382671"}} +wheels = [ + {name = "blobfile-3.1.0-py3-none-any.whl",url = "https://files.pythonhosted.org/packages/77/a7/51af11120d75af2828f8eede0b13a4caff650d708ac50e62d000aefe1ffb/blobfile-3.1.0-py3-none-any.whl",hashes = {sha256 = "2b4c5e766ebb7dfa20e4990cf6ec3d2106bdc91d632fb9377f170a234c5a5c6a"}}, +] +marker = "\"recommended\" in extras" + +[packages.tool.pdm] +dependencies = [ + "pycryptodomex>=3.8", + "urllib3<3,>=1.25.3", + "lxml>=4.9", + "filelock>=3.0", +] + [[packages]] name = "build" version = "1.2.2.post1" @@ -63,6 +81,21 @@ dependencies = [ "tomli>=1.1.0; python_version < \"3.11\"", ] +[[packages]] +name = "culsans" +version = "0.9.0" +requires-python = ">=3.8" +sdist = {name = "culsans-0.9.0.tar.gz", url = "https://files.pythonhosted.org/packages/90/5d/12e7e16b0caafaa8cca0728dd817204afd1274ddb35531b029b1c5cf7b2a/culsans-0.9.0.tar.gz", hashes = {sha256 = "942dd3c3c77f20e9ac3383d9a5ef8b7b24c0dac1a593bdb20d46c8a38720a5f3"}} +wheels = [ + {name = "culsans-0.9.0-py3-none-any.whl",url = "https://files.pythonhosted.org/packages/6f/b4/1e3cccb48f09e89e0cfc06925182cbcd36abf80b8eda2489430b41c7eaff/culsans-0.9.0-py3-none-any.whl",hashes = {sha256 = "d3537b65bbb341c2ac72e7d152deb8ab893b2a00452d2a68702a1a1a41619d6f"}}, +] +marker = "\"default\" in dependency_groups" + +[packages.tool.pdm] +dependencies = [ + "aiologic>=0.13.0", +] + [[packages]] name = "ftfy" version = "6.3.1" @@ -78,6 +111,118 @@ dependencies = [ "wcwidth", ] +[[packages]] +name = "librosa" +version = "0.11.0" +requires-python = ">=3.8" +sdist = {name = "librosa-0.11.0.tar.gz", url = "https://files.pythonhosted.org/packages/64/36/360b5aafa0238e29758729e9486c6ed92a6f37fa403b7875e06c115cdf4a/librosa-0.11.0.tar.gz", hashes = {sha256 = "f5ed951ca189b375bbe2e33b2abd7e040ceeee302b9bbaeeffdfddb8d0ace908"}} +wheels = [ + {name = "librosa-0.11.0-py3-none-any.whl",url = "https://files.pythonhosted.org/packages/b5/ba/c63c5786dfee4c3417094c4b00966e61e4a63efecee22cb7b4c0387dda83/librosa-0.11.0-py3-none-any.whl",hashes = {sha256 = "0b6415c4fd68bff4c29288abe67c6d80b587e0e1e2cfb0aad23e4559504a7fa1"}}, +] +marker = "\"default\" in dependency_groups" + +[packages.tool.pdm] +dependencies = [ + "audioread>=2.1.9", + "numba>=0.51.0", + "numpy>=1.22.3", + "scipy>=1.6.0", + "scikit-learn>=1.1.0", + "joblib>=1.0", + "decorator>=4.3.0", + "soundfile>=0.12.1", + "pooch>=1.1", + "soxr>=0.3.2", + "typing-extensions>=4.1.1", + "lazy-loader>=0.1", + "msgpack>=1.0", + "standard-aifc; python_version >= \"3.13\"", + "standard-sunau; python_version >= \"3.13\"", +] + +[[packages]] +name = "scipy" +version = "1.15.3" +requires-python = ">=3.10" +sdist = {name = "scipy-1.15.3.tar.gz", url = "https://files.pythonhosted.org/packages/0f/37/6964b830433e654ec7485e45a00fc9a27cf868d622838f6b6d9c5ec0d532/scipy-1.15.3.tar.gz", hashes = {sha256 = "eae3cf522bc7df64b42cad3925c876e1b0b6c35c1337c93e12c0f366f55b0eaf"}} +wheels = [ + {name = "scipy-1.15.3-cp313-cp313-macosx_10_13_x86_64.whl",url = "https://files.pythonhosted.org/packages/73/18/ec27848c9baae6e0d6573eda6e01a602e5649ee72c27c3a8aad673ebecfd/scipy-1.15.3-cp313-cp313-macosx_10_13_x86_64.whl",hashes = {sha256 = "2c620736bcc334782e24d173c0fdbb7590a0a436d2fdf39310a8902505008759"}}, + {name = "scipy-1.15.3-cp313-cp313-macosx_12_0_arm64.whl",url = "https://files.pythonhosted.org/packages/74/cd/1aef2184948728b4b6e21267d53b3339762c285a46a274ebb7863c9e4742/scipy-1.15.3-cp313-cp313-macosx_12_0_arm64.whl",hashes = {sha256 = "7e11270a000969409d37ed399585ee530b9ef6aa99d50c019de4cb01e8e54e62"}}, + {name = "scipy-1.15.3-cp313-cp313-macosx_14_0_arm64.whl",url = "https://files.pythonhosted.org/packages/5b/d8/59e452c0a255ec352bd0a833537a3bc1bfb679944c4938ab375b0a6b3a3e/scipy-1.15.3-cp313-cp313-macosx_14_0_arm64.whl",hashes = {sha256 = "8c9ed3ba2c8a2ce098163a9bdb26f891746d02136995df25227a20e71c396ebb"}}, + {name = "scipy-1.15.3-cp313-cp313-macosx_14_0_x86_64.whl",url = "https://files.pythonhosted.org/packages/08/f5/456f56bbbfccf696263b47095291040655e3cbaf05d063bdc7c7517f32ac/scipy-1.15.3-cp313-cp313-macosx_14_0_x86_64.whl",hashes = {sha256 = "0bdd905264c0c9cfa74a4772cdb2070171790381a5c4d312c973382fc6eaf730"}}, + {name = "scipy-1.15.3-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",url = "https://files.pythonhosted.org/packages/a2/66/a9618b6a435a0f0c0b8a6d0a2efb32d4ec5a85f023c2b79d39512040355b/scipy-1.15.3-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",hashes = {sha256 = "79167bba085c31f38603e11a267d862957cbb3ce018d8b38f79ac043bc92d825"}}, + {name = "scipy-1.15.3-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",url = "https://files.pythonhosted.org/packages/b5/09/c5b6734a50ad4882432b6bb7c02baf757f5b2f256041da5df242e2d7e6b6/scipy-1.15.3-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",hashes = {sha256 = "c9deabd6d547aee2c9a81dee6cc96c6d7e9a9b1953f74850c179f91fdc729cb7"}}, + {name = "scipy-1.15.3-cp313-cp313-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/77/0a/eac00ff741f23bcabd352731ed9b8995a0a60ef57f5fd788d611d43d69a1/scipy-1.15.3-cp313-cp313-musllinux_1_2_aarch64.whl",hashes = {sha256 = "dde4fc32993071ac0c7dd2d82569e544f0bdaff66269cb475e0f369adad13f11"}}, + {name = "scipy-1.15.3-cp313-cp313-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/fe/54/4379be86dd74b6ad81551689107360d9a3e18f24d20767a2d5b9253a3f0a/scipy-1.15.3-cp313-cp313-musllinux_1_2_x86_64.whl",hashes = {sha256 = "f77f853d584e72e874d87357ad70f44b437331507d1c311457bed8ed2b956126"}}, + {name = "scipy-1.15.3-cp313-cp313-win_amd64.whl",url = "https://files.pythonhosted.org/packages/87/2e/892ad2862ba54f084ffe8cc4a22667eaf9c2bcec6d2bff1d15713c6c0703/scipy-1.15.3-cp313-cp313-win_amd64.whl",hashes = {sha256 = "b90ab29d0c37ec9bf55424c064312930ca5f4bde15ee8619ee44e69319aab163"}}, + {name = "scipy-1.15.3-cp313-cp313t-macosx_10_13_x86_64.whl",url = "https://files.pythonhosted.org/packages/1b/e9/7a879c137f7e55b30d75d90ce3eb468197646bc7b443ac036ae3fe109055/scipy-1.15.3-cp313-cp313t-macosx_10_13_x86_64.whl",hashes = {sha256 = "3ac07623267feb3ae308487c260ac684b32ea35fd81e12845039952f558047b8"}}, + {name = "scipy-1.15.3-cp313-cp313t-macosx_12_0_arm64.whl",url = "https://files.pythonhosted.org/packages/51/d1/226a806bbd69f62ce5ef5f3ffadc35286e9fbc802f606a07eb83bf2359de/scipy-1.15.3-cp313-cp313t-macosx_12_0_arm64.whl",hashes = {sha256 = "6487aa99c2a3d509a5227d9a5e889ff05830a06b2ce08ec30df6d79db5fcd5c5"}}, + {name = "scipy-1.15.3-cp313-cp313t-macosx_14_0_arm64.whl",url = "https://files.pythonhosted.org/packages/e5/9b/f32d1d6093ab9eeabbd839b0f7619c62e46cc4b7b6dbf05b6e615bbd4400/scipy-1.15.3-cp313-cp313t-macosx_14_0_arm64.whl",hashes = {sha256 = "50f9e62461c95d933d5c5ef4a1f2ebf9a2b4e83b0db374cb3f1de104d935922e"}}, + {name = "scipy-1.15.3-cp313-cp313t-macosx_14_0_x86_64.whl",url = "https://files.pythonhosted.org/packages/e7/29/c278f699b095c1a884f29fda126340fcc201461ee8bfea5c8bdb1c7c958b/scipy-1.15.3-cp313-cp313t-macosx_14_0_x86_64.whl",hashes = {sha256 = "14ed70039d182f411ffc74789a16df3835e05dc469b898233a245cdfd7f162cb"}}, + {name = "scipy-1.15.3-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",url = "https://files.pythonhosted.org/packages/24/18/9e5374b617aba742a990581373cd6b68a2945d65cc588482749ef2e64467/scipy-1.15.3-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",hashes = {sha256 = "0a769105537aa07a69468a0eefcd121be52006db61cdd8cac8a0e68980bbb723"}}, + {name = "scipy-1.15.3-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",url = "https://files.pythonhosted.org/packages/e1/fe/9c4361e7ba2927074360856db6135ef4904d505e9b3afbbcb073c4008328/scipy-1.15.3-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",hashes = {sha256 = "9db984639887e3dffb3928d118145ffe40eff2fa40cb241a306ec57c219ebbbb"}}, + {name = "scipy-1.15.3-cp313-cp313t-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/b7/8e/038ccfe29d272b30086b25a4960f757f97122cb2ec42e62b460d02fe98e9/scipy-1.15.3-cp313-cp313t-musllinux_1_2_aarch64.whl",hashes = {sha256 = "40e54d5c7e7ebf1aa596c374c49fa3135f04648a0caabcb66c52884b943f02b4"}}, + {name = "scipy-1.15.3-cp313-cp313t-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/10/7e/5c12285452970be5bdbe8352c619250b97ebf7917d7a9a9e96b8a8140f17/scipy-1.15.3-cp313-cp313t-musllinux_1_2_x86_64.whl",hashes = {sha256 = "5e721fed53187e71d0ccf382b6bf977644c533e506c4d33c3fb24de89f5c3ed5"}}, + {name = "scipy-1.15.3-cp313-cp313t-win_amd64.whl",url = "https://files.pythonhosted.org/packages/81/06/0a5e5349474e1cbc5757975b21bd4fad0e72ebf138c5592f191646154e06/scipy-1.15.3-cp313-cp313t-win_amd64.whl",hashes = {sha256 = "76ad1fb5f8752eabf0fa02e4cc0336b4e8f021e2d5f061ed37d6d264db35e3ca"}}, + {name = "scipy-1.15.3-cp312-cp312-macosx_10_13_x86_64.whl",url = "https://files.pythonhosted.org/packages/37/4b/683aa044c4162e10ed7a7ea30527f2cbd92e6999c10a8ed8edb253836e9c/scipy-1.15.3-cp312-cp312-macosx_10_13_x86_64.whl",hashes = {sha256 = "6ac6310fdbfb7aa6612408bd2f07295bcbd3fda00d2d702178434751fe48e019"}}, + {name = "scipy-1.15.3-cp312-cp312-macosx_12_0_arm64.whl",url = "https://files.pythonhosted.org/packages/7b/7e/f30be3d03de07f25dc0ec926d1681fed5c732d759ac8f51079708c79e680/scipy-1.15.3-cp312-cp312-macosx_12_0_arm64.whl",hashes = {sha256 = "185cd3d6d05ca4b44a8f1595af87f9c372bb6acf9c808e99aa3e9aa03bd98cf6"}}, + {name = "scipy-1.15.3-cp312-cp312-macosx_14_0_arm64.whl",url = "https://files.pythonhosted.org/packages/07/9c/0ddb0d0abdabe0d181c1793db51f02cd59e4901da6f9f7848e1f96759f0d/scipy-1.15.3-cp312-cp312-macosx_14_0_arm64.whl",hashes = {sha256 = "05dc6abcd105e1a29f95eada46d4a3f251743cfd7d3ae8ddb4088047f24ea477"}}, + {name = "scipy-1.15.3-cp312-cp312-macosx_14_0_x86_64.whl",url = "https://files.pythonhosted.org/packages/af/43/0bce905a965f36c58ff80d8bea33f1f9351b05fad4beaad4eae34699b7a1/scipy-1.15.3-cp312-cp312-macosx_14_0_x86_64.whl",hashes = {sha256 = "06efcba926324df1696931a57a176c80848ccd67ce6ad020c810736bfd58eb1c"}}, + {name = "scipy-1.15.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",url = "https://files.pythonhosted.org/packages/56/30/a6f08f84ee5b7b28b4c597aca4cbe545535c39fe911845a96414700b64ba/scipy-1.15.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",hashes = {sha256 = "c05045d8b9bfd807ee1b9f38761993297b10b245f012b11b13b91ba8945f7e45"}}, + {name = "scipy-1.15.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",url = "https://files.pythonhosted.org/packages/0b/1f/03f52c282437a168ee2c7c14a1a0d0781a9a4a8962d84ac05c06b4c5b555/scipy-1.15.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",hashes = {sha256 = "271e3713e645149ea5ea3e97b57fdab61ce61333f97cfae392c28ba786f9bb49"}}, + {name = "scipy-1.15.3-cp312-cp312-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/89/b1/fbb53137f42c4bf630b1ffdfc2151a62d1d1b903b249f030d2b1c0280af8/scipy-1.15.3-cp312-cp312-musllinux_1_2_aarch64.whl",hashes = {sha256 = "6cfd56fc1a8e53f6e89ba3a7a7251f7396412d655bca2aa5611c8ec9a6784a1e"}}, + {name = "scipy-1.15.3-cp312-cp312-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/2e/2e/025e39e339f5090df1ff266d021892694dbb7e63568edcfe43f892fa381d/scipy-1.15.3-cp312-cp312-musllinux_1_2_x86_64.whl",hashes = {sha256 = "0ff17c0bb1cb32952c09217d8d1eed9b53d1463e5f1dd6052c7857f83127d539"}}, + {name = "scipy-1.15.3-cp312-cp312-win_amd64.whl",url = "https://files.pythonhosted.org/packages/e6/eb/3bf6ea8ab7f1503dca3a10df2e4b9c3f6b3316df07f6c0ded94b281c7101/scipy-1.15.3-cp312-cp312-win_amd64.whl",hashes = {sha256 = "52092bc0472cfd17df49ff17e70624345efece4e1a12b23783a1ac59a1b728ed"}}, +] +marker = "python_version ~= \"3.12\"" + +[packages.tool.pdm] +dependencies = [ + "numpy<2.5,>=1.23.5", +] + +[[packages]] +name = "numpy" +version = "2.2.6" +requires-python = ">=3.10" +sdist = {name = "numpy-2.2.6.tar.gz", url = "https://files.pythonhosted.org/packages/76/21/7d2a95e4bba9dc13d043ee156a356c0a8f0c6309dff6b21b4d71a073b8a8/numpy-2.2.6.tar.gz", hashes = {sha256 = "e29554e2bef54a90aa5cc07da6ce955accb83f21ab5de01a62c8478897b264fd"}} +wheels = [ + {name = "numpy-2.2.6-cp313-cp313-macosx_10_13_x86_64.whl",url = "https://files.pythonhosted.org/packages/f9/5c/6657823f4f594f72b5471f1db1ab12e26e890bb2e41897522d134d2a3e81/numpy-2.2.6-cp313-cp313-macosx_10_13_x86_64.whl",hashes = {sha256 = "0811bb762109d9708cca4d0b13c4f67146e3c3b7cf8d34018c722adb2d957c84"}}, + {name = "numpy-2.2.6-cp313-cp313-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/dc/9e/14520dc3dadf3c803473bd07e9b2bd1b69bc583cb2497b47000fed2fa92f/numpy-2.2.6-cp313-cp313-macosx_11_0_arm64.whl",hashes = {sha256 = "287cc3162b6f01463ccd86be154f284d0893d2b3ed7292439ea97eafa8170e0b"}}, + {name = "numpy-2.2.6-cp313-cp313-macosx_14_0_arm64.whl",url = "https://files.pythonhosted.org/packages/4f/06/7e96c57d90bebdce9918412087fc22ca9851cceaf5567a45c1f404480e9e/numpy-2.2.6-cp313-cp313-macosx_14_0_arm64.whl",hashes = {sha256 = "f1372f041402e37e5e633e586f62aa53de2eac8d98cbfb822806ce4bbefcb74d"}}, + {name = "numpy-2.2.6-cp313-cp313-macosx_14_0_x86_64.whl",url = "https://files.pythonhosted.org/packages/73/ed/63d920c23b4289fdac96ddbdd6132e9427790977d5457cd132f18e76eae0/numpy-2.2.6-cp313-cp313-macosx_14_0_x86_64.whl",hashes = {sha256 = "55a4d33fa519660d69614a9fad433be87e5252f4b03850642f88993f7b2ca566"}}, + {name = "numpy-2.2.6-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",url = "https://files.pythonhosted.org/packages/85/c5/e19c8f99d83fd377ec8c7e0cf627a8049746da54afc24ef0a0cb73d5dfb5/numpy-2.2.6-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",hashes = {sha256 = "f92729c95468a2f4f15e9bb94c432a9229d0d50de67304399627a943201baa2f"}}, + {name = "numpy-2.2.6-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",url = "https://files.pythonhosted.org/packages/19/49/4df9123aafa7b539317bf6d342cb6d227e49f7a35b99c287a6109b13dd93/numpy-2.2.6-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",hashes = {sha256 = "1bc23a79bfabc5d056d106f9befb8d50c31ced2fbc70eedb8155aec74a45798f"}}, + {name = "numpy-2.2.6-cp313-cp313-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/b2/6c/04b5f47f4f32f7c2b0e7260442a8cbcf8168b0e1a41ff1495da42f42a14f/numpy-2.2.6-cp313-cp313-musllinux_1_2_aarch64.whl",hashes = {sha256 = "e3143e4451880bed956e706a3220b4e5cf6172ef05fcc397f6f36a550b1dd868"}}, + {name = "numpy-2.2.6-cp313-cp313-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/17/0a/5cd92e352c1307640d5b6fec1b2ffb06cd0dabe7d7b8227f97933d378422/numpy-2.2.6-cp313-cp313-musllinux_1_2_x86_64.whl",hashes = {sha256 = "b4f13750ce79751586ae2eb824ba7e1e8dba64784086c98cdbbcc6a42112ce0d"}}, + {name = "numpy-2.2.6-cp313-cp313-win32.whl",url = "https://files.pythonhosted.org/packages/f0/3b/5cba2b1d88760ef86596ad0f3d484b1cbff7c115ae2429678465057c5155/numpy-2.2.6-cp313-cp313-win32.whl",hashes = {sha256 = "5beb72339d9d4fa36522fc63802f469b13cdbe4fdab4a288f0c441b74272ebfd"}}, + {name = "numpy-2.2.6-cp313-cp313-win_amd64.whl",url = "https://files.pythonhosted.org/packages/cb/3b/d58c12eafcb298d4e6d0d40216866ab15f59e55d148a5658bb3132311fcf/numpy-2.2.6-cp313-cp313-win_amd64.whl",hashes = {sha256 = "b0544343a702fa80c95ad5d3d608ea3599dd54d4632df855e4c8d24eb6ecfa1c"}}, + {name = "numpy-2.2.6-cp313-cp313t-macosx_10_13_x86_64.whl",url = "https://files.pythonhosted.org/packages/6b/9e/4bf918b818e516322db999ac25d00c75788ddfd2d2ade4fa66f1f38097e1/numpy-2.2.6-cp313-cp313t-macosx_10_13_x86_64.whl",hashes = {sha256 = "0bca768cd85ae743b2affdc762d617eddf3bcf8724435498a1e80132d04879e6"}}, + {name = "numpy-2.2.6-cp313-cp313t-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/61/66/d2de6b291507517ff2e438e13ff7b1e2cdbdb7cb40b3ed475377aece69f9/numpy-2.2.6-cp313-cp313t-macosx_11_0_arm64.whl",hashes = {sha256 = "fc0c5673685c508a142ca65209b4e79ed6740a4ed6b2267dbba90f34b0b3cfda"}}, + {name = "numpy-2.2.6-cp313-cp313t-macosx_14_0_arm64.whl",url = "https://files.pythonhosted.org/packages/e4/25/480387655407ead912e28ba3a820bc69af9adf13bcbe40b299d454ec011f/numpy-2.2.6-cp313-cp313t-macosx_14_0_arm64.whl",hashes = {sha256 = "5bd4fc3ac8926b3819797a7c0e2631eb889b4118a9898c84f585a54d475b7e40"}}, + {name = "numpy-2.2.6-cp313-cp313t-macosx_14_0_x86_64.whl",url = "https://files.pythonhosted.org/packages/aa/4a/6e313b5108f53dcbf3aca0c0f3e9c92f4c10ce57a0a721851f9785872895/numpy-2.2.6-cp313-cp313t-macosx_14_0_x86_64.whl",hashes = {sha256 = "fee4236c876c4e8369388054d02d0e9bb84821feb1a64dd59e137e6511a551f8"}}, + {name = "numpy-2.2.6-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",url = "https://files.pythonhosted.org/packages/b7/30/172c2d5c4be71fdf476e9de553443cf8e25feddbe185e0bd88b096915bcc/numpy-2.2.6-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",hashes = {sha256 = "e1dda9c7e08dc141e0247a5b8f49cf05984955246a327d4c48bda16821947b2f"}}, + {name = "numpy-2.2.6-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",url = "https://files.pythonhosted.org/packages/12/fb/9e743f8d4e4d3c710902cf87af3512082ae3d43b945d5d16563f26ec251d/numpy-2.2.6-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",hashes = {sha256 = "f447e6acb680fd307f40d3da4852208af94afdfab89cf850986c3ca00562f4fa"}}, + {name = "numpy-2.2.6-cp313-cp313t-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/12/75/ee20da0e58d3a66f204f38916757e01e33a9737d0b22373b3eb5a27358f9/numpy-2.2.6-cp313-cp313t-musllinux_1_2_aarch64.whl",hashes = {sha256 = "389d771b1623ec92636b0786bc4ae56abafad4a4c513d36a55dce14bd9ce8571"}}, + {name = "numpy-2.2.6-cp313-cp313t-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/76/95/bef5b37f29fc5e739947e9ce5179ad402875633308504a52d188302319c8/numpy-2.2.6-cp313-cp313t-musllinux_1_2_x86_64.whl",hashes = {sha256 = "8e9ace4a37db23421249ed236fdcdd457d671e25146786dfc96835cd951aa7c1"}}, + {name = "numpy-2.2.6-cp313-cp313t-win32.whl",url = "https://files.pythonhosted.org/packages/09/04/f2f83279d287407cf36a7a8053a5abe7be3622a4363337338f2585e4afda/numpy-2.2.6-cp313-cp313t-win32.whl",hashes = {sha256 = "038613e9fb8c72b0a41f025a7e4c3f0b7a1b5d768ece4796b674c8f3fe13efff"}}, + {name = "numpy-2.2.6-cp313-cp313t-win_amd64.whl",url = "https://files.pythonhosted.org/packages/67/0e/35082d13c09c02c011cf21570543d202ad929d961c02a147493cb0c2bdf5/numpy-2.2.6-cp313-cp313t-win_amd64.whl",hashes = {sha256 = "6031dd6dfecc0cf9f668681a37648373bddd6421fff6c66ec1624eed0180ee06"}}, + {name = "numpy-2.2.6-cp312-cp312-macosx_10_13_x86_64.whl",url = "https://files.pythonhosted.org/packages/82/5d/c00588b6cf18e1da539b45d3598d3557084990dcc4331960c15ee776ee41/numpy-2.2.6-cp312-cp312-macosx_10_13_x86_64.whl",hashes = {sha256 = "41c5a21f4a04fa86436124d388f6ed60a9343a6f767fced1a8a71c3fbca038ff"}}, + {name = "numpy-2.2.6-cp312-cp312-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/66/ee/560deadcdde6c2f90200450d5938f63a34b37e27ebff162810f716f6a230/numpy-2.2.6-cp312-cp312-macosx_11_0_arm64.whl",hashes = {sha256 = "de749064336d37e340f640b05f24e9e3dd678c57318c7289d222a8a2f543e90c"}}, + {name = "numpy-2.2.6-cp312-cp312-macosx_14_0_arm64.whl",url = "https://files.pythonhosted.org/packages/3c/65/4baa99f1c53b30adf0acd9a5519078871ddde8d2339dc5a7fde80d9d87da/numpy-2.2.6-cp312-cp312-macosx_14_0_arm64.whl",hashes = {sha256 = "894b3a42502226a1cac872f840030665f33326fc3dac8e57c607905773cdcde3"}}, + {name = "numpy-2.2.6-cp312-cp312-macosx_14_0_x86_64.whl",url = "https://files.pythonhosted.org/packages/cc/89/e5a34c071a0570cc40c9a54eb472d113eea6d002e9ae12bb3a8407fb912e/numpy-2.2.6-cp312-cp312-macosx_14_0_x86_64.whl",hashes = {sha256 = "71594f7c51a18e728451bb50cc60a3ce4e6538822731b2933209a1f3614e9282"}}, + {name = "numpy-2.2.6-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",url = "https://files.pythonhosted.org/packages/f8/35/8c80729f1ff76b3921d5c9487c7ac3de9b2a103b1cd05e905b3090513510/numpy-2.2.6-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",hashes = {sha256 = "f2618db89be1b4e05f7a1a847a9c1c0abd63e63a1607d892dd54668dd92faf87"}}, + {name = "numpy-2.2.6-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",url = "https://files.pythonhosted.org/packages/8c/3d/1e1db36cfd41f895d266b103df00ca5b3cbe965184df824dec5c08c6b803/numpy-2.2.6-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",hashes = {sha256 = "fd83c01228a688733f1ded5201c678f0c53ecc1006ffbc404db9f7a899ac6249"}}, + {name = "numpy-2.2.6-cp312-cp312-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/61/c6/03ed30992602c85aa3cd95b9070a514f8b3c33e31124694438d88809ae36/numpy-2.2.6-cp312-cp312-musllinux_1_2_aarch64.whl",hashes = {sha256 = "37c0ca431f82cd5fa716eca9506aefcabc247fb27ba69c5062a6d3ade8cf8f49"}}, + {name = "numpy-2.2.6-cp312-cp312-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/b7/25/5761d832a81df431e260719ec45de696414266613c9ee268394dd5ad8236/numpy-2.2.6-cp312-cp312-musllinux_1_2_x86_64.whl",hashes = {sha256 = "fe27749d33bb772c80dcd84ae7e8df2adc920ae8297400dabec45f0dedb3f6de"}}, + {name = "numpy-2.2.6-cp312-cp312-win32.whl",url = "https://files.pythonhosted.org/packages/57/0a/72d5a3527c5ebffcd47bde9162c39fae1f90138c961e5296491ce778e682/numpy-2.2.6-cp312-cp312-win32.whl",hashes = {sha256 = "4eeaae00d789f66c7a25ac5f34b71a7035bb474e679f410e5e1a94deb24cf2d4"}}, + {name = "numpy-2.2.6-cp312-cp312-win_amd64.whl",url = "https://files.pythonhosted.org/packages/36/fa/8c9210162ca1b88529ab76b41ba02d433fd54fecaf6feb70ef9f124683f1/numpy-2.2.6-cp312-cp312-win_amd64.whl",hashes = {sha256 = "c1f9540be57940698ed329904db803cf7a402f3fc200bfe599334c9bd84a40b2"}}, +] +marker = "python_version ~= \"3.12\"" + +[packages.tool.pdm] +dependencies = [] + [[packages]] name = "lorem" version = "0.1.1" @@ -173,6 +318,38 @@ dependencies = [ "aiohttp>=3.3", ] +[[packages]] +name = "msgpack" +version = "1.1.1" +requires-python = ">=3.8" +sdist = {name = "msgpack-1.1.1.tar.gz", url = "https://files.pythonhosted.org/packages/45/b1/ea4f68038a18c77c9467400d166d74c4ffa536f34761f7983a104357e614/msgpack-1.1.1.tar.gz", hashes = {sha256 = "77b79ce34a2bdab2594f490c8e80dd62a02d650b91a75159a63ec413b8d104cd"}} +wheels = [ + {name = "msgpack-1.1.1-cp313-cp313-macosx_10_13_x86_64.whl",url = "https://files.pythonhosted.org/packages/a1/38/561f01cf3577430b59b340b51329803d3a5bf6a45864a55f4ef308ac11e3/msgpack-1.1.1-cp313-cp313-macosx_10_13_x86_64.whl",hashes = {sha256 = "3765afa6bd4832fc11c3749be4ba4b69a0e8d7b728f78e68120a157a4c5d41f0"}}, + {name = "msgpack-1.1.1-cp313-cp313-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/09/48/54a89579ea36b6ae0ee001cba8c61f776451fad3c9306cd80f5b5c55be87/msgpack-1.1.1-cp313-cp313-macosx_11_0_arm64.whl",hashes = {sha256 = "8ddb2bcfd1a8b9e431c8d6f4f7db0773084e107730ecf3472f1dfe9ad583f3d9"}}, + {name = "msgpack-1.1.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",url = "https://files.pythonhosted.org/packages/a0/60/daba2699b308e95ae792cdc2ef092a38eb5ee422f9d2fbd4101526d8a210/msgpack-1.1.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",hashes = {sha256 = "196a736f0526a03653d829d7d4c5500a97eea3648aebfd4b6743875f28aa2af8"}}, + {name = "msgpack-1.1.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",url = "https://files.pythonhosted.org/packages/20/22/2ebae7ae43cd8f2debc35c631172ddf14e2a87ffcc04cf43ff9df9fff0d3/msgpack-1.1.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",hashes = {sha256 = "9d592d06e3cc2f537ceeeb23d38799c6ad83255289bb84c2e5792e5a8dea268a"}}, + {name = "msgpack-1.1.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl",url = "https://files.pythonhosted.org/packages/40/1b/54c08dd5452427e1179a40b4b607e37e2664bca1c790c60c442c8e972e47/msgpack-1.1.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl",hashes = {sha256 = "4df2311b0ce24f06ba253fda361f938dfecd7b961576f9be3f3fbd60e87130ac"}}, + {name = "msgpack-1.1.1-cp313-cp313-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/2e/60/6bb17e9ffb080616a51f09928fdd5cac1353c9becc6c4a8abd4e57269a16/msgpack-1.1.1-cp313-cp313-musllinux_1_2_aarch64.whl",hashes = {sha256 = "e4141c5a32b5e37905b5940aacbc59739f036930367d7acce7a64e4dec1f5e0b"}}, + {name = "msgpack-1.1.1-cp313-cp313-musllinux_1_2_i686.whl",url = "https://files.pythonhosted.org/packages/ee/97/88983e266572e8707c1f4b99c8fd04f9eb97b43f2db40e3172d87d8642db/msgpack-1.1.1-cp313-cp313-musllinux_1_2_i686.whl",hashes = {sha256 = "b1ce7f41670c5a69e1389420436f41385b1aa2504c3b0c30620764b15dded2e7"}}, + {name = "msgpack-1.1.1-cp313-cp313-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/bc/66/36c78af2efaffcc15a5a61ae0df53a1d025f2680122e2a9eb8442fed3ae4/msgpack-1.1.1-cp313-cp313-musllinux_1_2_x86_64.whl",hashes = {sha256 = "4147151acabb9caed4e474c3344181e91ff7a388b888f1e19ea04f7e73dc7ad5"}}, + {name = "msgpack-1.1.1-cp313-cp313-win32.whl",url = "https://files.pythonhosted.org/packages/8c/87/a75eb622b555708fe0427fab96056d39d4c9892b0c784b3a721088c7ee37/msgpack-1.1.1-cp313-cp313-win32.whl",hashes = {sha256 = "500e85823a27d6d9bba1d057c871b4210c1dd6fb01fbb764e37e4e8847376323"}}, + {name = "msgpack-1.1.1-cp313-cp313-win_amd64.whl",url = "https://files.pythonhosted.org/packages/ca/91/7dc28d5e2a11a5ad804cf2b7f7a5fcb1eb5a4966d66a5d2b41aee6376543/msgpack-1.1.1-cp313-cp313-win_amd64.whl",hashes = {sha256 = "6d489fba546295983abd142812bda76b57e33d0b9f5d5b71c09a583285506f69"}}, + {name = "msgpack-1.1.1-cp312-cp312-macosx_10_13_x86_64.whl",url = "https://files.pythonhosted.org/packages/e3/26/389b9c593eda2b8551b2e7126ad3a06af6f9b44274eb3a4f054d48ff7e47/msgpack-1.1.1-cp312-cp312-macosx_10_13_x86_64.whl",hashes = {sha256 = "ae497b11f4c21558d95de9f64fff7053544f4d1a17731c866143ed6bb4591238"}}, + {name = "msgpack-1.1.1-cp312-cp312-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/ab/65/7d1de38c8a22cf8b1551469159d4b6cf49be2126adc2482de50976084d78/msgpack-1.1.1-cp312-cp312-macosx_11_0_arm64.whl",hashes = {sha256 = "33be9ab121df9b6b461ff91baac6f2731f83d9b27ed948c5b9d1978ae28bf157"}}, + {name = "msgpack-1.1.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",url = "https://files.pythonhosted.org/packages/0f/bd/cacf208b64d9577a62c74b677e1ada005caa9b69a05a599889d6fc2ab20a/msgpack-1.1.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",hashes = {sha256 = "6f64ae8fe7ffba251fecb8408540c34ee9df1c26674c50c4544d72dbf792e5ce"}}, + {name = "msgpack-1.1.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",url = "https://files.pythonhosted.org/packages/4d/ec/fd869e2567cc9c01278a736cfd1697941ba0d4b81a43e0aa2e8d71dab208/msgpack-1.1.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",hashes = {sha256 = "a494554874691720ba5891c9b0b39474ba43ffb1aaf32a5dac874effb1619e1a"}}, + {name = "msgpack-1.1.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl",url = "https://files.pythonhosted.org/packages/55/2a/35860f33229075bce803a5593d046d8b489d7ba2fc85701e714fc1aaf898/msgpack-1.1.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl",hashes = {sha256 = "cb643284ab0ed26f6957d969fe0dd8bb17beb567beb8998140b5e38a90974f6c"}}, + {name = "msgpack-1.1.1-cp312-cp312-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/8c/16/69ed8f3ada150bf92745fb4921bd621fd2cdf5a42e25eb50bcc57a5328f0/msgpack-1.1.1-cp312-cp312-musllinux_1_2_aarch64.whl",hashes = {sha256 = "d275a9e3c81b1093c060c3837e580c37f47c51eca031f7b5fb76f7b8470f5f9b"}}, + {name = "msgpack-1.1.1-cp312-cp312-musllinux_1_2_i686.whl",url = "https://files.pythonhosted.org/packages/c6/b6/0c398039e4c6d0b2e37c61d7e0e9d13439f91f780686deb8ee64ecf1ae71/msgpack-1.1.1-cp312-cp312-musllinux_1_2_i686.whl",hashes = {sha256 = "4fd6b577e4541676e0cc9ddc1709d25014d3ad9a66caa19962c4f5de30fc09ef"}}, + {name = "msgpack-1.1.1-cp312-cp312-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/b8/d0/0cf4a6ecb9bc960d624c93effaeaae75cbf00b3bc4a54f35c8507273cda1/msgpack-1.1.1-cp312-cp312-musllinux_1_2_x86_64.whl",hashes = {sha256 = "bb29aaa613c0a1c40d1af111abf025f1732cab333f96f285d6a93b934738a68a"}}, + {name = "msgpack-1.1.1-cp312-cp312-win32.whl",url = "https://files.pythonhosted.org/packages/62/83/9697c211720fa71a2dfb632cad6196a8af3abea56eece220fde4674dc44b/msgpack-1.1.1-cp312-cp312-win32.whl",hashes = {sha256 = "870b9a626280c86cff9c576ec0d9cbcc54a1e5ebda9cd26dab12baf41fee218c"}}, + {name = "msgpack-1.1.1-cp312-cp312-win_amd64.whl",url = "https://files.pythonhosted.org/packages/c0/23/0abb886e80eab08f5e8c485d6f13924028602829f63b8f5fa25a06636628/msgpack-1.1.1-cp312-cp312-win_amd64.whl",hashes = {sha256 = "5692095123007180dca3e788bb4c399cc26626da51629a31d40207cb262e67f4"}}, +] +marker = "\"default\" in dependency_groups" + +[packages.tool.pdm] +dependencies = [] + [[packages]] name = "mypy" version = "1.15.0" @@ -191,25 +368,7 @@ wheels = [ {name = "mypy-1.15.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl",url = "https://files.pythonhosted.org/packages/b3/d0/92ae4cde706923a2d3f2d6c39629134063ff64b9dedca9c1388363da072d/mypy-1.15.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl",hashes = {sha256 = "8023ff13985661b50a5928fc7a5ca15f3d1affb41e5f0a9952cb68ef090b31ee"}}, {name = "mypy-1.15.0-cp312-cp312-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/46/8b/df49974b337cce35f828ba6fda228152d6db45fed4c86ba56ffe442434fd/mypy-1.15.0-cp312-cp312-musllinux_1_2_x86_64.whl",hashes = {sha256 = "1124a18bc11a6a62887e3e137f37f53fbae476dc36c185d549d4f837a2a6a14e"}}, {name = "mypy-1.15.0-cp312-cp312-win_amd64.whl",url = "https://files.pythonhosted.org/packages/13/50/da5203fcf6c53044a0b699939f31075c45ae8a4cadf538a9069b165c1050/mypy-1.15.0-cp312-cp312-win_amd64.whl",hashes = {sha256 = "171a9ca9a40cd1843abeca0e405bc1940cd9b305eaeea2dda769ba096932bb22"}}, - {name = "mypy-1.15.0-cp311-cp311-macosx_10_9_x86_64.whl",url = "https://files.pythonhosted.org/packages/03/bc/f6339726c627bd7ca1ce0fa56c9ae2d0144604a319e0e339bdadafbbb599/mypy-1.15.0-cp311-cp311-macosx_10_9_x86_64.whl",hashes = {sha256 = "2922d42e16d6de288022e5ca321cd0618b238cfc5570e0263e5ba0a77dbef56f"}}, - {name = "mypy-1.15.0-cp311-cp311-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/e2/90/8dcf506ca1a09b0d17555cc00cd69aee402c203911410136cd716559efe7/mypy-1.15.0-cp311-cp311-macosx_11_0_arm64.whl",hashes = {sha256 = "2ee2d57e01a7c35de00f4634ba1bbf015185b219e4dc5909e281016df43f5ee5"}}, - {name = "mypy-1.15.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl",url = "https://files.pythonhosted.org/packages/05/05/a10f9479681e5da09ef2f9426f650d7b550d4bafbef683b69aad1ba87457/mypy-1.15.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl",hashes = {sha256 = "973500e0774b85d9689715feeffcc980193086551110fd678ebe1f4342fb7c5e"}}, - {name = "mypy-1.15.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl",url = "https://files.pythonhosted.org/packages/e9/9a/1f7d18b30edd57441a6411fcbc0c6869448d1a4bacbaee60656ac0fc29c8/mypy-1.15.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl",hashes = {sha256 = "5a95fb17c13e29d2d5195869262f8125dfdb5c134dc8d9a9d0aecf7525b10c2c"}}, - {name = "mypy-1.15.0-cp311-cp311-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/72/af/19ff499b6f1dafcaf56f9881f7a965ac2f474f69f6f618b5175b044299f5/mypy-1.15.0-cp311-cp311-musllinux_1_2_x86_64.whl",hashes = {sha256 = "1905f494bfd7d85a23a88c5d97840888a7bd516545fc5aaedff0267e0bb54e2f"}}, - {name = "mypy-1.15.0-cp311-cp311-win_amd64.whl",url = "https://files.pythonhosted.org/packages/96/39/11b57431a1f686c1aed54bf794870efe0f6aeca11aca281a0bd87a5ad42c/mypy-1.15.0-cp311-cp311-win_amd64.whl",hashes = {sha256 = "c9817fa23833ff189db061e6d2eff49b2f3b6ed9856b4a0a73046e41932d744f"}}, - {name = "mypy-1.15.0-cp310-cp310-macosx_10_9_x86_64.whl",url = "https://files.pythonhosted.org/packages/68/f8/65a7ce8d0e09b6329ad0c8d40330d100ea343bd4dd04c4f8ae26462d0a17/mypy-1.15.0-cp310-cp310-macosx_10_9_x86_64.whl",hashes = {sha256 = "979e4e1a006511dacf628e36fadfecbcc0160a8af6ca7dad2f5025529e082c13"}}, - {name = "mypy-1.15.0-cp310-cp310-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/b4/95/9c0ecb8eacfe048583706249439ff52105b3f552ea9c4024166c03224270/mypy-1.15.0-cp310-cp310-macosx_11_0_arm64.whl",hashes = {sha256 = "c4bb0e1bd29f7d34efcccd71cf733580191e9a264a2202b0239da95984c5b559"}}, - {name = "mypy-1.15.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl",url = "https://files.pythonhosted.org/packages/84/09/9ec95e982e282e20c0d5407bc65031dfd0f0f8ecc66b69538296e06fcbee/mypy-1.15.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl",hashes = {sha256 = "be68172e9fd9ad8fb876c6389f16d1c1b5f100ffa779f77b1fb2176fcc9ab95b"}}, - {name = "mypy-1.15.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl",url = "https://files.pythonhosted.org/packages/78/13/f7d14e55865036a1e6a0a69580c240f43bc1f37407fe9235c0d4ef25ffb0/mypy-1.15.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl",hashes = {sha256 = "c7be1e46525adfa0d97681432ee9fcd61a3964c2446795714699a998d193f1a3"}}, - {name = "mypy-1.15.0-cp310-cp310-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/48/e1/301a73852d40c241e915ac6d7bcd7fedd47d519246db2d7b86b9d7e7a0cb/mypy-1.15.0-cp310-cp310-musllinux_1_2_x86_64.whl",hashes = {sha256 = "2e2c2e6d3593f6451b18588848e66260ff62ccca522dd231cd4dd59b0160668b"}}, - {name = "mypy-1.15.0-cp310-cp310-win_amd64.whl",url = "https://files.pythonhosted.org/packages/77/ba/c37bc323ae5fe7f3f15a28e06ab012cd0b7552886118943e90b15af31195/mypy-1.15.0-cp310-cp310-win_amd64.whl",hashes = {sha256 = "6983aae8b2f653e098edb77f893f7b6aca69f6cffb19b2cc7443f23cce5f4828"}}, {name = "mypy-1.15.0-py3-none-any.whl",url = "https://files.pythonhosted.org/packages/09/4e/a7d65c7322c510de2c409ff3828b03354a7c43f5a8ed458a7a131b41c7b9/mypy-1.15.0-py3-none-any.whl",hashes = {sha256 = "5469affef548bd1895d86d3bf10ce2b44e33d86923c29e4d675b3e323437ea3e"}}, - {name = "mypy-1.15.0-cp39-cp39-macosx_10_9_x86_64.whl",url = "https://files.pythonhosted.org/packages/5a/fa/79cf41a55b682794abe71372151dbbf856e3008f6767057229e6649d294a/mypy-1.15.0-cp39-cp39-macosx_10_9_x86_64.whl",hashes = {sha256 = "e601a7fa172c2131bff456bb3ee08a88360760d0d2f8cbd7a75a65497e2df078"}}, - {name = "mypy-1.15.0-cp39-cp39-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/d3/33/dd8feb2597d648de29e3da0a8bf4e1afbda472964d2a4a0052203a6f3594/mypy-1.15.0-cp39-cp39-macosx_11_0_arm64.whl",hashes = {sha256 = "712e962a6357634fef20412699a3655c610110e01cdaa6180acec7fc9f8513ba"}}, - {name = "mypy-1.15.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl",url = "https://files.pythonhosted.org/packages/e4/b5/74508959c1b06b96674b364ffeb7ae5802646b32929b7701fc6b18447592/mypy-1.15.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl",hashes = {sha256 = "f95579473af29ab73a10bada2f9722856792a36ec5af5399b653aa28360290a5"}}, - {name = "mypy-1.15.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl",url = "https://files.pythonhosted.org/packages/6c/53/da61b9d9973efcd6507183fdad96606996191657fe79701b2c818714d573/mypy-1.15.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl",hashes = {sha256 = "8f8722560a14cde92fdb1e31597760dc35f9f5524cce17836c0d22841830fd5b"}}, - {name = "mypy-1.15.0-cp39-cp39-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/c1/72/965bd9ee89540c79a25778cc080c7e6ef40aa1eeac4d52cec7eae6eb5228/mypy-1.15.0-cp39-cp39-musllinux_1_2_x86_64.whl",hashes = {sha256 = "1fbb8da62dc352133d7d7ca90ed2fb0e9d42bb1a32724c287d3c76c58cbaa9c2"}}, - {name = "mypy-1.15.0-cp39-cp39-win_amd64.whl",url = "https://files.pythonhosted.org/packages/46/d0/f41645c2eb263e6c77ada7d76f894c580c9ddb20d77f0c24d34273a4dab2/mypy-1.15.0-cp39-cp39-win_amd64.whl",hashes = {sha256 = "d10d994b41fb3497719bbf866f227b3489048ea4bbbb5015357db306249f7980"}}, ] marker = "\"dev\" in extras" @@ -263,33 +422,6 @@ wheels = [ {name = "PyYAML-6.0.2-cp312-cp312-musllinux_1_1_x86_64.whl",url = "https://files.pythonhosted.org/packages/c9/1f/4f998c900485e5c0ef43838363ba4a9723ac0ad73a9dc42068b12aaba4e4/PyYAML-6.0.2-cp312-cp312-musllinux_1_1_x86_64.whl",hashes = {sha256 = "8b9c7197f7cb2738065c481a0461e50ad02f18c78cd75775628afb4d7137fb3b"}}, {name = "PyYAML-6.0.2-cp312-cp312-win32.whl",url = "https://files.pythonhosted.org/packages/df/d1/f5a275fdb252768b7a11ec63585bc38d0e87c9e05668a139fea92b80634c/PyYAML-6.0.2-cp312-cp312-win32.whl",hashes = {sha256 = "ef6107725bd54b262d6dedcc2af448a266975032bc85ef0172c5f059da6325b4"}}, {name = "PyYAML-6.0.2-cp312-cp312-win_amd64.whl",url = "https://files.pythonhosted.org/packages/0c/e8/4f648c598b17c3d06e8753d7d13d57542b30d56e6c2dedf9c331ae56312e/PyYAML-6.0.2-cp312-cp312-win_amd64.whl",hashes = {sha256 = "7e7401d0de89a9a855c839bc697c079a4af81cf878373abd7dc625847d25cbd8"}}, - {name = "PyYAML-6.0.2-cp311-cp311-macosx_10_9_x86_64.whl",url = "https://files.pythonhosted.org/packages/f8/aa/7af4e81f7acba21a4c6be026da38fd2b872ca46226673c89a758ebdc4fd2/PyYAML-6.0.2-cp311-cp311-macosx_10_9_x86_64.whl",hashes = {sha256 = "cc1c1159b3d456576af7a3e4d1ba7e6924cb39de8f67111c735f6fc832082774"}}, - {name = "PyYAML-6.0.2-cp311-cp311-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/8b/62/b9faa998fd185f65c1371643678e4d58254add437edb764a08c5a98fb986/PyYAML-6.0.2-cp311-cp311-macosx_11_0_arm64.whl",hashes = {sha256 = "1e2120ef853f59c7419231f3bf4e7021f1b936f6ebd222406c3b60212205d2ee"}}, - {name = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",url = "https://files.pythonhosted.org/packages/ad/0c/c804f5f922a9a6563bab712d8dcc70251e8af811fce4524d57c2c0fd49a4/PyYAML-6.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",hashes = {sha256 = "5d225db5a45f21e78dd9358e58a98702a0302f2659a3c6cd320564b75b86f47c"}}, - {name = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl",url = "https://files.pythonhosted.org/packages/51/16/6af8d6a6b210c8e54f1406a6b9481febf9c64a3109c541567e35a49aa2e7/PyYAML-6.0.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl",hashes = {sha256 = "5ac9328ec4831237bec75defaf839f7d4564be1e6b25ac710bd1a96321cc8317"}}, - {name = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",url = "https://files.pythonhosted.org/packages/75/e4/2c27590dfc9992f73aabbeb9241ae20220bd9452df27483b6e56d3975cc5/PyYAML-6.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",hashes = {sha256 = "3ad2a3decf9aaba3d29c8f537ac4b243e36bef957511b4766cb0057d32b0be85"}}, - {name = "PyYAML-6.0.2-cp311-cp311-musllinux_1_1_aarch64.whl",url = "https://files.pythonhosted.org/packages/9b/97/ecc1abf4a823f5ac61941a9c00fe501b02ac3ab0e373c3857f7d4b83e2b6/PyYAML-6.0.2-cp311-cp311-musllinux_1_1_aarch64.whl",hashes = {sha256 = "ff3824dc5261f50c9b0dfb3be22b4567a6f938ccce4587b38952d85fd9e9afe4"}}, - {name = "PyYAML-6.0.2-cp311-cp311-musllinux_1_1_x86_64.whl",url = "https://files.pythonhosted.org/packages/45/73/0f49dacd6e82c9430e46f4a027baa4ca205e8b0a9dce1397f44edc23559d/PyYAML-6.0.2-cp311-cp311-musllinux_1_1_x86_64.whl",hashes = {sha256 = "797b4f722ffa07cc8d62053e4cff1486fa6dc094105d13fea7b1de7d8bf71c9e"}}, - {name = "PyYAML-6.0.2-cp311-cp311-win32.whl",url = "https://files.pythonhosted.org/packages/22/5f/956f0f9fc65223a58fbc14459bf34b4cc48dec52e00535c79b8db361aabd/PyYAML-6.0.2-cp311-cp311-win32.whl",hashes = {sha256 = "11d8f3dd2b9c1207dcaf2ee0bbbfd5991f571186ec9cc78427ba5bd32afae4b5"}}, - {name = "PyYAML-6.0.2-cp311-cp311-win_amd64.whl",url = "https://files.pythonhosted.org/packages/ed/23/8da0bbe2ab9dcdd11f4f4557ccaf95c10b9811b13ecced089d43ce59c3c8/PyYAML-6.0.2-cp311-cp311-win_amd64.whl",hashes = {sha256 = "e10ce637b18caea04431ce14fabcf5c64a1c61ec9c56b071a4b7ca131ca52d44"}}, - {name = "PyYAML-6.0.2-cp310-cp310-macosx_10_9_x86_64.whl",url = "https://files.pythonhosted.org/packages/9b/95/a3fac87cb7158e231b5a6012e438c647e1a87f09f8e0d123acec8ab8bf71/PyYAML-6.0.2-cp310-cp310-macosx_10_9_x86_64.whl",hashes = {sha256 = "0a9a2848a5b7feac301353437eb7d5957887edbf81d56e903999a75a3d743086"}}, - {name = "PyYAML-6.0.2-cp310-cp310-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/c7/7a/68bd47624dab8fd4afbfd3c48e3b79efe09098ae941de5b58abcbadff5cb/PyYAML-6.0.2-cp310-cp310-macosx_11_0_arm64.whl",hashes = {sha256 = "29717114e51c84ddfba879543fb232a6ed60086602313ca38cce623c1d62cfbf"}}, - {name = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",url = "https://files.pythonhosted.org/packages/49/ee/14c54df452143b9ee9f0f29074d7ca5516a36edb0b4cc40c3f280131656f/PyYAML-6.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",hashes = {sha256 = "8824b5a04a04a047e72eea5cec3bc266db09e35de6bdfe34c9436ac5ee27d237"}}, - {name = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl",url = "https://files.pythonhosted.org/packages/4d/61/de363a97476e766574650d742205be468921a7b532aa2499fcd886b62530/PyYAML-6.0.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl",hashes = {sha256 = "7c36280e6fb8385e520936c3cb3b8042851904eba0e58d277dca80a5cfed590b"}}, - {name = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",url = "https://files.pythonhosted.org/packages/6b/4e/1523cb902fd98355e2e9ea5e5eb237cbc5f3ad5f3075fa65087aa0ecb669/PyYAML-6.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",hashes = {sha256 = "ec031d5d2feb36d1d1a24380e4db6d43695f3748343d99434e6f5f9156aaa2ed"}}, - {name = "PyYAML-6.0.2-cp310-cp310-musllinux_1_1_aarch64.whl",url = "https://files.pythonhosted.org/packages/b7/33/5504b3a9a4464893c32f118a9cc045190a91637b119a9c881da1cf6b7a72/PyYAML-6.0.2-cp310-cp310-musllinux_1_1_aarch64.whl",hashes = {sha256 = "936d68689298c36b53b29f23c6dbb74de12b4ac12ca6cfe0e047bedceea56180"}}, - {name = "PyYAML-6.0.2-cp310-cp310-musllinux_1_1_x86_64.whl",url = "https://files.pythonhosted.org/packages/5c/20/8347dcabd41ef3a3cdc4f7b7a2aff3d06598c8779faa189cdbf878b626a4/PyYAML-6.0.2-cp310-cp310-musllinux_1_1_x86_64.whl",hashes = {sha256 = "23502f431948090f597378482b4812b0caae32c22213aecf3b55325e049a6c68"}}, - {name = "PyYAML-6.0.2-cp310-cp310-win32.whl",url = "https://files.pythonhosted.org/packages/be/aa/5afe99233fb360d0ff37377145a949ae258aaab831bde4792b32650a4378/PyYAML-6.0.2-cp310-cp310-win32.whl",hashes = {sha256 = "2e99c6826ffa974fe6e27cdb5ed0021786b03fc98e5ee3c5bfe1fd5015f42b99"}}, - {name = "PyYAML-6.0.2-cp310-cp310-win_amd64.whl",url = "https://files.pythonhosted.org/packages/b5/84/0fa4b06f6d6c958d207620fc60005e241ecedceee58931bb20138e1e5776/PyYAML-6.0.2-cp310-cp310-win_amd64.whl",hashes = {sha256 = "a4d3091415f010369ae4ed1fc6b79def9416358877534caf6a0fdd2146c87a3e"}}, - {name = "PyYAML-6.0.2-cp39-cp39-macosx_10_9_x86_64.whl",url = "https://files.pythonhosted.org/packages/65/d8/b7a1db13636d7fb7d4ff431593c510c8b8fca920ade06ca8ef20015493c5/PyYAML-6.0.2-cp39-cp39-macosx_10_9_x86_64.whl",hashes = {sha256 = "688ba32a1cffef67fd2e9398a2efebaea461578b0923624778664cc1c914db5d"}}, - {name = "PyYAML-6.0.2-cp39-cp39-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/0a/02/6ec546cd45143fdf9840b2c6be8d875116a64076218b61d68e12548e5839/PyYAML-6.0.2-cp39-cp39-macosx_11_0_arm64.whl",hashes = {sha256 = "a8786accb172bd8afb8be14490a16625cbc387036876ab6ba70912730faf8e1f"}}, - {name = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",url = "https://files.pythonhosted.org/packages/0e/9a/8cc68be846c972bda34f6c2a93abb644fb2476f4dcc924d52175786932c9/PyYAML-6.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",hashes = {sha256 = "d8e03406cac8513435335dbab54c0d385e4a49e4945d2909a581c83647ca0290"}}, - {name = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl",url = "https://files.pythonhosted.org/packages/e9/6c/6e1b7f40181bc4805e2e07f4abc10a88ce4648e7e95ff1abe4ae4014a9b2/PyYAML-6.0.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl",hashes = {sha256 = "f753120cb8181e736c57ef7636e83f31b9c0d1722c516f7e86cf15b7aa57ff12"}}, - {name = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",url = "https://files.pythonhosted.org/packages/3d/32/e7bd8535d22ea2874cef6a81021ba019474ace0d13a4819c2a4bce79bd6a/PyYAML-6.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",hashes = {sha256 = "3b1fdb9dc17f5a7677423d508ab4f243a726dea51fa5e70992e59a7411c89d19"}}, - {name = "PyYAML-6.0.2-cp39-cp39-musllinux_1_1_aarch64.whl",url = "https://files.pythonhosted.org/packages/d7/12/7322c1e30b9be969670b672573d45479edef72c9a0deac3bb2868f5d7469/PyYAML-6.0.2-cp39-cp39-musllinux_1_1_aarch64.whl",hashes = {sha256 = "0b69e4ce7a131fe56b7e4d770c67429700908fc0752af059838b1cfb41960e4e"}}, - {name = "PyYAML-6.0.2-cp39-cp39-musllinux_1_1_x86_64.whl",url = "https://files.pythonhosted.org/packages/82/72/04fcad41ca56491995076630c3ec1e834be241664c0c09a64c9a2589b507/PyYAML-6.0.2-cp39-cp39-musllinux_1_1_x86_64.whl",hashes = {sha256 = "a9f8c2e67970f13b16084e04f134610fd1d374bf477b17ec1599185cf611d725"}}, - {name = "PyYAML-6.0.2-cp39-cp39-win32.whl",url = "https://files.pythonhosted.org/packages/ed/5e/46168b1f2757f1fcd442bc3029cd8767d88a98c9c05770d8b420948743bb/PyYAML-6.0.2-cp39-cp39-win32.whl",hashes = {sha256 = "6395c297d42274772abc367baaa79683958044e5d3835486c16da75d2a694631"}}, - {name = "PyYAML-6.0.2-cp39-cp39-win_amd64.whl",url = "https://files.pythonhosted.org/packages/19/87/5124b1c1f2412bb95c59ec481eaf936cd32f0fe2a7b16b97b81c4c017a6a/PyYAML-6.0.2-cp39-cp39-win_amd64.whl",hashes = {sha256 = "39693e1f8320ae4f43943590b49779ffb98acb81f788220ea932a6b6c51004d8"}}, ] marker = "\"default\" in dependency_groups or \"dev\" in extras" @@ -353,17 +485,19 @@ dependencies = [ [[packages]] name = "pytest-asyncio" -version = "0.23.8" -requires-python = ">=3.8" -sdist = {name = "pytest_asyncio-0.23.8.tar.gz", url = "https://files.pythonhosted.org/packages/de/b4/0b378b7bf26a8ae161c3890c0b48a91a04106c5713ce81b4b080ea2f4f18/pytest_asyncio-0.23.8.tar.gz", hashes = {sha256 = "759b10b33a6dc61cce40a8bd5205e302978bbbcc00e279a8b61d9a6a3c82e4d3"}} +version = "1.1.1" +requires-python = ">=3.9" +sdist = {name = "pytest_asyncio-1.1.1.tar.gz", url = "https://files.pythonhosted.org/packages/8d/1e/2aa43805d4a320a9489d2b99f7877b69f9094c79aa0732159a1415dd6cd4/pytest_asyncio-1.1.1.tar.gz", hashes = {sha256 = "b72d215c38e2c91dbb32f275e0b5be69602d7869910e109360e375129960a649"}} wheels = [ - {name = "pytest_asyncio-0.23.8-py3-none-any.whl",url = "https://files.pythonhosted.org/packages/ee/82/62e2d63639ecb0fbe8a7ee59ef0bc69a4669ec50f6d3459f74ad4e4189a2/pytest_asyncio-0.23.8-py3-none-any.whl",hashes = {sha256 = "50265d892689a5faefb84df80819d1ecef566eb3549cf915dfb33569359d1ce2"}}, + {name = "pytest_asyncio-1.1.1-py3-none-any.whl",url = "https://files.pythonhosted.org/packages/28/de/aba79e9ccdb51b5d0d65c67dd857bd78b00c64723df16b9fc800d8b94ce6/pytest_asyncio-1.1.1-py3-none-any.whl",hashes = {sha256 = "726339d30fcfde24691f589445b9b67d058b311ac632b1d704e97f20f1d878da"}}, ] marker = "\"dev\" in extras" [packages.tool.pdm] dependencies = [ - "pytest<9,>=7.0.0", + "backports-asyncio-runner<2,>=1.1; python_version < \"3.11\"", + "pytest<9,>=8.2", + "typing-extensions>=4.12; python_version < \"3.10\"", ] [[packages]] @@ -457,131 +591,6 @@ marker = "\"dev\" in extras" [packages.tool.pdm] dependencies = [] -[[packages]] -name = "scipy" -version = "1.15.3" -requires-python = ">=3.10" -sdist = {name = "scipy-1.15.3.tar.gz", url = "https://files.pythonhosted.org/packages/0f/37/6964b830433e654ec7485e45a00fc9a27cf868d622838f6b6d9c5ec0d532/scipy-1.15.3.tar.gz", hashes = {sha256 = "eae3cf522bc7df64b42cad3925c876e1b0b6c35c1337c93e12c0f366f55b0eaf"}} -wheels = [ - {name = "scipy-1.15.3-cp313-cp313-macosx_10_13_x86_64.whl",url = "https://files.pythonhosted.org/packages/73/18/ec27848c9baae6e0d6573eda6e01a602e5649ee72c27c3a8aad673ebecfd/scipy-1.15.3-cp313-cp313-macosx_10_13_x86_64.whl",hashes = {sha256 = "2c620736bcc334782e24d173c0fdbb7590a0a436d2fdf39310a8902505008759"}}, - {name = "scipy-1.15.3-cp313-cp313-macosx_12_0_arm64.whl",url = "https://files.pythonhosted.org/packages/74/cd/1aef2184948728b4b6e21267d53b3339762c285a46a274ebb7863c9e4742/scipy-1.15.3-cp313-cp313-macosx_12_0_arm64.whl",hashes = {sha256 = "7e11270a000969409d37ed399585ee530b9ef6aa99d50c019de4cb01e8e54e62"}}, - {name = "scipy-1.15.3-cp313-cp313-macosx_14_0_arm64.whl",url = "https://files.pythonhosted.org/packages/5b/d8/59e452c0a255ec352bd0a833537a3bc1bfb679944c4938ab375b0a6b3a3e/scipy-1.15.3-cp313-cp313-macosx_14_0_arm64.whl",hashes = {sha256 = "8c9ed3ba2c8a2ce098163a9bdb26f891746d02136995df25227a20e71c396ebb"}}, - {name = "scipy-1.15.3-cp313-cp313-macosx_14_0_x86_64.whl",url = "https://files.pythonhosted.org/packages/08/f5/456f56bbbfccf696263b47095291040655e3cbaf05d063bdc7c7517f32ac/scipy-1.15.3-cp313-cp313-macosx_14_0_x86_64.whl",hashes = {sha256 = "0bdd905264c0c9cfa74a4772cdb2070171790381a5c4d312c973382fc6eaf730"}}, - {name = "scipy-1.15.3-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",url = "https://files.pythonhosted.org/packages/a2/66/a9618b6a435a0f0c0b8a6d0a2efb32d4ec5a85f023c2b79d39512040355b/scipy-1.15.3-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",hashes = {sha256 = "79167bba085c31f38603e11a267d862957cbb3ce018d8b38f79ac043bc92d825"}}, - {name = "scipy-1.15.3-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",url = "https://files.pythonhosted.org/packages/b5/09/c5b6734a50ad4882432b6bb7c02baf757f5b2f256041da5df242e2d7e6b6/scipy-1.15.3-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",hashes = {sha256 = "c9deabd6d547aee2c9a81dee6cc96c6d7e9a9b1953f74850c179f91fdc729cb7"}}, - {name = "scipy-1.15.3-cp313-cp313-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/77/0a/eac00ff741f23bcabd352731ed9b8995a0a60ef57f5fd788d611d43d69a1/scipy-1.15.3-cp313-cp313-musllinux_1_2_aarch64.whl",hashes = {sha256 = "dde4fc32993071ac0c7dd2d82569e544f0bdaff66269cb475e0f369adad13f11"}}, - {name = "scipy-1.15.3-cp313-cp313-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/fe/54/4379be86dd74b6ad81551689107360d9a3e18f24d20767a2d5b9253a3f0a/scipy-1.15.3-cp313-cp313-musllinux_1_2_x86_64.whl",hashes = {sha256 = "f77f853d584e72e874d87357ad70f44b437331507d1c311457bed8ed2b956126"}}, - {name = "scipy-1.15.3-cp313-cp313-win_amd64.whl",url = "https://files.pythonhosted.org/packages/87/2e/892ad2862ba54f084ffe8cc4a22667eaf9c2bcec6d2bff1d15713c6c0703/scipy-1.15.3-cp313-cp313-win_amd64.whl",hashes = {sha256 = "b90ab29d0c37ec9bf55424c064312930ca5f4bde15ee8619ee44e69319aab163"}}, - {name = "scipy-1.15.3-cp313-cp313t-macosx_10_13_x86_64.whl",url = "https://files.pythonhosted.org/packages/1b/e9/7a879c137f7e55b30d75d90ce3eb468197646bc7b443ac036ae3fe109055/scipy-1.15.3-cp313-cp313t-macosx_10_13_x86_64.whl",hashes = {sha256 = "3ac07623267feb3ae308487c260ac684b32ea35fd81e12845039952f558047b8"}}, - {name = "scipy-1.15.3-cp313-cp313t-macosx_12_0_arm64.whl",url = "https://files.pythonhosted.org/packages/51/d1/226a806bbd69f62ce5ef5f3ffadc35286e9fbc802f606a07eb83bf2359de/scipy-1.15.3-cp313-cp313t-macosx_12_0_arm64.whl",hashes = {sha256 = "6487aa99c2a3d509a5227d9a5e889ff05830a06b2ce08ec30df6d79db5fcd5c5"}}, - {name = "scipy-1.15.3-cp313-cp313t-macosx_14_0_arm64.whl",url = "https://files.pythonhosted.org/packages/e5/9b/f32d1d6093ab9eeabbd839b0f7619c62e46cc4b7b6dbf05b6e615bbd4400/scipy-1.15.3-cp313-cp313t-macosx_14_0_arm64.whl",hashes = {sha256 = "50f9e62461c95d933d5c5ef4a1f2ebf9a2b4e83b0db374cb3f1de104d935922e"}}, - {name = "scipy-1.15.3-cp313-cp313t-macosx_14_0_x86_64.whl",url = "https://files.pythonhosted.org/packages/e7/29/c278f699b095c1a884f29fda126340fcc201461ee8bfea5c8bdb1c7c958b/scipy-1.15.3-cp313-cp313t-macosx_14_0_x86_64.whl",hashes = {sha256 = "14ed70039d182f411ffc74789a16df3835e05dc469b898233a245cdfd7f162cb"}}, - {name = "scipy-1.15.3-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",url = "https://files.pythonhosted.org/packages/24/18/9e5374b617aba742a990581373cd6b68a2945d65cc588482749ef2e64467/scipy-1.15.3-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",hashes = {sha256 = "0a769105537aa07a69468a0eefcd121be52006db61cdd8cac8a0e68980bbb723"}}, - {name = "scipy-1.15.3-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",url = "https://files.pythonhosted.org/packages/e1/fe/9c4361e7ba2927074360856db6135ef4904d505e9b3afbbcb073c4008328/scipy-1.15.3-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",hashes = {sha256 = "9db984639887e3dffb3928d118145ffe40eff2fa40cb241a306ec57c219ebbbb"}}, - {name = "scipy-1.15.3-cp313-cp313t-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/b7/8e/038ccfe29d272b30086b25a4960f757f97122cb2ec42e62b460d02fe98e9/scipy-1.15.3-cp313-cp313t-musllinux_1_2_aarch64.whl",hashes = {sha256 = "40e54d5c7e7ebf1aa596c374c49fa3135f04648a0caabcb66c52884b943f02b4"}}, - {name = "scipy-1.15.3-cp313-cp313t-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/10/7e/5c12285452970be5bdbe8352c619250b97ebf7917d7a9a9e96b8a8140f17/scipy-1.15.3-cp313-cp313t-musllinux_1_2_x86_64.whl",hashes = {sha256 = "5e721fed53187e71d0ccf382b6bf977644c533e506c4d33c3fb24de89f5c3ed5"}}, - {name = "scipy-1.15.3-cp313-cp313t-win_amd64.whl",url = "https://files.pythonhosted.org/packages/81/06/0a5e5349474e1cbc5757975b21bd4fad0e72ebf138c5592f191646154e06/scipy-1.15.3-cp313-cp313t-win_amd64.whl",hashes = {sha256 = "76ad1fb5f8752eabf0fa02e4cc0336b4e8f021e2d5f061ed37d6d264db35e3ca"}}, - {name = "scipy-1.15.3-cp312-cp312-macosx_10_13_x86_64.whl",url = "https://files.pythonhosted.org/packages/37/4b/683aa044c4162e10ed7a7ea30527f2cbd92e6999c10a8ed8edb253836e9c/scipy-1.15.3-cp312-cp312-macosx_10_13_x86_64.whl",hashes = {sha256 = "6ac6310fdbfb7aa6612408bd2f07295bcbd3fda00d2d702178434751fe48e019"}}, - {name = "scipy-1.15.3-cp312-cp312-macosx_12_0_arm64.whl",url = "https://files.pythonhosted.org/packages/7b/7e/f30be3d03de07f25dc0ec926d1681fed5c732d759ac8f51079708c79e680/scipy-1.15.3-cp312-cp312-macosx_12_0_arm64.whl",hashes = {sha256 = "185cd3d6d05ca4b44a8f1595af87f9c372bb6acf9c808e99aa3e9aa03bd98cf6"}}, - {name = "scipy-1.15.3-cp312-cp312-macosx_14_0_arm64.whl",url = "https://files.pythonhosted.org/packages/07/9c/0ddb0d0abdabe0d181c1793db51f02cd59e4901da6f9f7848e1f96759f0d/scipy-1.15.3-cp312-cp312-macosx_14_0_arm64.whl",hashes = {sha256 = "05dc6abcd105e1a29f95eada46d4a3f251743cfd7d3ae8ddb4088047f24ea477"}}, - {name = "scipy-1.15.3-cp312-cp312-macosx_14_0_x86_64.whl",url = "https://files.pythonhosted.org/packages/af/43/0bce905a965f36c58ff80d8bea33f1f9351b05fad4beaad4eae34699b7a1/scipy-1.15.3-cp312-cp312-macosx_14_0_x86_64.whl",hashes = {sha256 = "06efcba926324df1696931a57a176c80848ccd67ce6ad020c810736bfd58eb1c"}}, - {name = "scipy-1.15.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",url = "https://files.pythonhosted.org/packages/56/30/a6f08f84ee5b7b28b4c597aca4cbe545535c39fe911845a96414700b64ba/scipy-1.15.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",hashes = {sha256 = "c05045d8b9bfd807ee1b9f38761993297b10b245f012b11b13b91ba8945f7e45"}}, - {name = "scipy-1.15.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",url = "https://files.pythonhosted.org/packages/0b/1f/03f52c282437a168ee2c7c14a1a0d0781a9a4a8962d84ac05c06b4c5b555/scipy-1.15.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",hashes = {sha256 = "271e3713e645149ea5ea3e97b57fdab61ce61333f97cfae392c28ba786f9bb49"}}, - {name = "scipy-1.15.3-cp312-cp312-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/89/b1/fbb53137f42c4bf630b1ffdfc2151a62d1d1b903b249f030d2b1c0280af8/scipy-1.15.3-cp312-cp312-musllinux_1_2_aarch64.whl",hashes = {sha256 = "6cfd56fc1a8e53f6e89ba3a7a7251f7396412d655bca2aa5611c8ec9a6784a1e"}}, - {name = "scipy-1.15.3-cp312-cp312-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/2e/2e/025e39e339f5090df1ff266d021892694dbb7e63568edcfe43f892fa381d/scipy-1.15.3-cp312-cp312-musllinux_1_2_x86_64.whl",hashes = {sha256 = "0ff17c0bb1cb32952c09217d8d1eed9b53d1463e5f1dd6052c7857f83127d539"}}, - {name = "scipy-1.15.3-cp312-cp312-win_amd64.whl",url = "https://files.pythonhosted.org/packages/e6/eb/3bf6ea8ab7f1503dca3a10df2e4b9c3f6b3316df07f6c0ded94b281c7101/scipy-1.15.3-cp312-cp312-win_amd64.whl",hashes = {sha256 = "52092bc0472cfd17df49ff17e70624345efece4e1a12b23783a1ac59a1b728ed"}}, - {name = "scipy-1.15.3-cp311-cp311-macosx_10_13_x86_64.whl",url = "https://files.pythonhosted.org/packages/96/ab/5cc9f80f28f6a7dff646c5756e559823614a42b1939d86dd0ed550470210/scipy-1.15.3-cp311-cp311-macosx_10_13_x86_64.whl",hashes = {sha256 = "993439ce220d25e3696d1b23b233dd010169b62f6456488567e830654ee37a6b"}}, - {name = "scipy-1.15.3-cp311-cp311-macosx_12_0_arm64.whl",url = "https://files.pythonhosted.org/packages/4a/4a/66ba30abe5ad1a3ad15bfb0b59d22174012e8056ff448cb1644deccbfed2/scipy-1.15.3-cp311-cp311-macosx_12_0_arm64.whl",hashes = {sha256 = "34716e281f181a02341ddeaad584205bd2fd3c242063bd3423d61ac259ca7eba"}}, - {name = "scipy-1.15.3-cp311-cp311-macosx_14_0_arm64.whl",url = "https://files.pythonhosted.org/packages/4b/fa/a7e5b95afd80d24313307f03624acc65801846fa75599034f8ceb9e2cbf6/scipy-1.15.3-cp311-cp311-macosx_14_0_arm64.whl",hashes = {sha256 = "3b0334816afb8b91dab859281b1b9786934392aa3d527cd847e41bb6f45bee65"}}, - {name = "scipy-1.15.3-cp311-cp311-macosx_14_0_x86_64.whl",url = "https://files.pythonhosted.org/packages/17/99/f3aaddccf3588bb4aea70ba35328c204cadd89517a1612ecfda5b2dd9d7a/scipy-1.15.3-cp311-cp311-macosx_14_0_x86_64.whl",hashes = {sha256 = "6db907c7368e3092e24919b5e31c76998b0ce1684d51a90943cb0ed1b4ffd6c1"}}, - {name = "scipy-1.15.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",url = "https://files.pythonhosted.org/packages/56/c5/1032cdb565f146109212153339f9cb8b993701e9fe56b1c97699eee12586/scipy-1.15.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",hashes = {sha256 = "721d6b4ef5dc82ca8968c25b111e307083d7ca9091bc38163fb89243e85e3889"}}, - {name = "scipy-1.15.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",url = "https://files.pythonhosted.org/packages/bd/37/89f19c8c05505d0601ed5650156e50eb881ae3918786c8fd7262b4ee66d3/scipy-1.15.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",hashes = {sha256 = "39cb9c62e471b1bb3750066ecc3a3f3052b37751c7c3dfd0fd7e48900ed52982"}}, - {name = "scipy-1.15.3-cp311-cp311-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/7e/31/be59513aa9695519b18e1851bb9e487de66f2d31f835201f1b42f5d4d475/scipy-1.15.3-cp311-cp311-musllinux_1_2_aarch64.whl",hashes = {sha256 = "795c46999bae845966368a3c013e0e00947932d68e235702b5c3f6ea799aa8c9"}}, - {name = "scipy-1.15.3-cp311-cp311-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/10/c0/4f5f3eeccc235632aab79b27a74a9130c6c35df358129f7ac8b29f562ac7/scipy-1.15.3-cp311-cp311-musllinux_1_2_x86_64.whl",hashes = {sha256 = "18aaacb735ab38b38db42cb01f6b92a2d0d4b6aabefeb07f02849e47f8fb3594"}}, - {name = "scipy-1.15.3-cp311-cp311-win_amd64.whl",url = "https://files.pythonhosted.org/packages/ab/a7/0ddaf514ce8a8714f6ed243a2b391b41dbb65251affe21ee3077ec45ea9a/scipy-1.15.3-cp311-cp311-win_amd64.whl",hashes = {sha256 = "ae48a786a28412d744c62fd7816a4118ef97e5be0bee968ce8f0a2fba7acf3bb"}}, - {name = "scipy-1.15.3-cp310-cp310-macosx_10_13_x86_64.whl",url = "https://files.pythonhosted.org/packages/78/2f/4966032c5f8cc7e6a60f1b2e0ad686293b9474b65246b0c642e3ef3badd0/scipy-1.15.3-cp310-cp310-macosx_10_13_x86_64.whl",hashes = {sha256 = "a345928c86d535060c9c2b25e71e87c39ab2f22fc96e9636bd74d1dbf9de448c"}}, - {name = "scipy-1.15.3-cp310-cp310-macosx_12_0_arm64.whl",url = "https://files.pythonhosted.org/packages/a0/6e/0c3bf90fae0e910c274db43304ebe25a6b391327f3f10b5dcc638c090795/scipy-1.15.3-cp310-cp310-macosx_12_0_arm64.whl",hashes = {sha256 = "ad3432cb0f9ed87477a8d97f03b763fd1d57709f1bbde3c9369b1dff5503b253"}}, - {name = "scipy-1.15.3-cp310-cp310-macosx_14_0_arm64.whl",url = "https://files.pythonhosted.org/packages/ea/b1/4deb37252311c1acff7f101f6453f0440794f51b6eacb1aad4459a134081/scipy-1.15.3-cp310-cp310-macosx_14_0_arm64.whl",hashes = {sha256 = "aef683a9ae6eb00728a542b796f52a5477b78252edede72b8327a886ab63293f"}}, - {name = "scipy-1.15.3-cp310-cp310-macosx_14_0_x86_64.whl",url = "https://files.pythonhosted.org/packages/38/7d/f457626e3cd3c29b3a49ca115a304cebb8cc6f31b04678f03b216899d3c6/scipy-1.15.3-cp310-cp310-macosx_14_0_x86_64.whl",hashes = {sha256 = "1c832e1bd78dea67d5c16f786681b28dd695a8cb1fb90af2e27580d3d0967e92"}}, - {name = "scipy-1.15.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",url = "https://files.pythonhosted.org/packages/db/0a/92b1de4a7adc7a15dcf5bddc6e191f6f29ee663b30511ce20467ef9b82e4/scipy-1.15.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",hashes = {sha256 = "263961f658ce2165bbd7b99fa5135195c3a12d9bef045345016b8b50c315cb82"}}, - {name = "scipy-1.15.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",url = "https://files.pythonhosted.org/packages/8e/6d/41991e503e51fc1134502694c5fa7a1671501a17ffa12716a4a9151af3df/scipy-1.15.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",hashes = {sha256 = "9e2abc762b0811e09a0d3258abee2d98e0c703eee49464ce0069590846f31d40"}}, - {name = "scipy-1.15.3-cp310-cp310-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/25/e1/3df8f83cb15f3500478c889be8fb18700813b95e9e087328230b98d547ff/scipy-1.15.3-cp310-cp310-musllinux_1_2_aarch64.whl",hashes = {sha256 = "ed7284b21a7a0c8f1b6e5977ac05396c0d008b89e05498c8b7e8f4a1423bba0e"}}, - {name = "scipy-1.15.3-cp310-cp310-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/93/3e/b3257cf446f2a3533ed7809757039016b74cd6f38271de91682aa844cfc5/scipy-1.15.3-cp310-cp310-musllinux_1_2_x86_64.whl",hashes = {sha256 = "5380741e53df2c566f4d234b100a484b420af85deb39ea35a1cc1be84ff53a5c"}}, - {name = "scipy-1.15.3-cp310-cp310-win_amd64.whl",url = "https://files.pythonhosted.org/packages/d1/84/55bc4881973d3f79b479a5a2e2df61c8c9a04fcb986a213ac9c02cfb659b/scipy-1.15.3-cp310-cp310-win_amd64.whl",hashes = {sha256 = "9d61e97b186a57350f6d6fd72640f9e99d5a4a2b8fbf4b9ee9a841eab327dc13"}}, -] -marker = "python_version ~= \"3.10\" and \"dev\" in extras" - -[packages.tool.pdm] -dependencies = [ - "numpy<2.5,>=1.23.5", -] - -[[packages]] -name = "numpy" -version = "2.2.6" -requires-python = ">=3.10" -sdist = {name = "numpy-2.2.6.tar.gz", url = "https://files.pythonhosted.org/packages/76/21/7d2a95e4bba9dc13d043ee156a356c0a8f0c6309dff6b21b4d71a073b8a8/numpy-2.2.6.tar.gz", hashes = {sha256 = "e29554e2bef54a90aa5cc07da6ce955accb83f21ab5de01a62c8478897b264fd"}} -wheels = [ - {name = "numpy-2.2.6-cp313-cp313-macosx_10_13_x86_64.whl",url = "https://files.pythonhosted.org/packages/f9/5c/6657823f4f594f72b5471f1db1ab12e26e890bb2e41897522d134d2a3e81/numpy-2.2.6-cp313-cp313-macosx_10_13_x86_64.whl",hashes = {sha256 = "0811bb762109d9708cca4d0b13c4f67146e3c3b7cf8d34018c722adb2d957c84"}}, - {name = "numpy-2.2.6-cp313-cp313-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/dc/9e/14520dc3dadf3c803473bd07e9b2bd1b69bc583cb2497b47000fed2fa92f/numpy-2.2.6-cp313-cp313-macosx_11_0_arm64.whl",hashes = {sha256 = "287cc3162b6f01463ccd86be154f284d0893d2b3ed7292439ea97eafa8170e0b"}}, - {name = "numpy-2.2.6-cp313-cp313-macosx_14_0_arm64.whl",url = "https://files.pythonhosted.org/packages/4f/06/7e96c57d90bebdce9918412087fc22ca9851cceaf5567a45c1f404480e9e/numpy-2.2.6-cp313-cp313-macosx_14_0_arm64.whl",hashes = {sha256 = "f1372f041402e37e5e633e586f62aa53de2eac8d98cbfb822806ce4bbefcb74d"}}, - {name = "numpy-2.2.6-cp313-cp313-macosx_14_0_x86_64.whl",url = "https://files.pythonhosted.org/packages/73/ed/63d920c23b4289fdac96ddbdd6132e9427790977d5457cd132f18e76eae0/numpy-2.2.6-cp313-cp313-macosx_14_0_x86_64.whl",hashes = {sha256 = "55a4d33fa519660d69614a9fad433be87e5252f4b03850642f88993f7b2ca566"}}, - {name = "numpy-2.2.6-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",url = "https://files.pythonhosted.org/packages/85/c5/e19c8f99d83fd377ec8c7e0cf627a8049746da54afc24ef0a0cb73d5dfb5/numpy-2.2.6-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",hashes = {sha256 = "f92729c95468a2f4f15e9bb94c432a9229d0d50de67304399627a943201baa2f"}}, - {name = "numpy-2.2.6-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",url = "https://files.pythonhosted.org/packages/19/49/4df9123aafa7b539317bf6d342cb6d227e49f7a35b99c287a6109b13dd93/numpy-2.2.6-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",hashes = {sha256 = "1bc23a79bfabc5d056d106f9befb8d50c31ced2fbc70eedb8155aec74a45798f"}}, - {name = "numpy-2.2.6-cp313-cp313-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/b2/6c/04b5f47f4f32f7c2b0e7260442a8cbcf8168b0e1a41ff1495da42f42a14f/numpy-2.2.6-cp313-cp313-musllinux_1_2_aarch64.whl",hashes = {sha256 = "e3143e4451880bed956e706a3220b4e5cf6172ef05fcc397f6f36a550b1dd868"}}, - {name = "numpy-2.2.6-cp313-cp313-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/17/0a/5cd92e352c1307640d5b6fec1b2ffb06cd0dabe7d7b8227f97933d378422/numpy-2.2.6-cp313-cp313-musllinux_1_2_x86_64.whl",hashes = {sha256 = "b4f13750ce79751586ae2eb824ba7e1e8dba64784086c98cdbbcc6a42112ce0d"}}, - {name = "numpy-2.2.6-cp313-cp313-win32.whl",url = "https://files.pythonhosted.org/packages/f0/3b/5cba2b1d88760ef86596ad0f3d484b1cbff7c115ae2429678465057c5155/numpy-2.2.6-cp313-cp313-win32.whl",hashes = {sha256 = "5beb72339d9d4fa36522fc63802f469b13cdbe4fdab4a288f0c441b74272ebfd"}}, - {name = "numpy-2.2.6-cp313-cp313-win_amd64.whl",url = "https://files.pythonhosted.org/packages/cb/3b/d58c12eafcb298d4e6d0d40216866ab15f59e55d148a5658bb3132311fcf/numpy-2.2.6-cp313-cp313-win_amd64.whl",hashes = {sha256 = "b0544343a702fa80c95ad5d3d608ea3599dd54d4632df855e4c8d24eb6ecfa1c"}}, - {name = "numpy-2.2.6-cp313-cp313t-macosx_10_13_x86_64.whl",url = "https://files.pythonhosted.org/packages/6b/9e/4bf918b818e516322db999ac25d00c75788ddfd2d2ade4fa66f1f38097e1/numpy-2.2.6-cp313-cp313t-macosx_10_13_x86_64.whl",hashes = {sha256 = "0bca768cd85ae743b2affdc762d617eddf3bcf8724435498a1e80132d04879e6"}}, - {name = "numpy-2.2.6-cp313-cp313t-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/61/66/d2de6b291507517ff2e438e13ff7b1e2cdbdb7cb40b3ed475377aece69f9/numpy-2.2.6-cp313-cp313t-macosx_11_0_arm64.whl",hashes = {sha256 = "fc0c5673685c508a142ca65209b4e79ed6740a4ed6b2267dbba90f34b0b3cfda"}}, - {name = "numpy-2.2.6-cp313-cp313t-macosx_14_0_arm64.whl",url = "https://files.pythonhosted.org/packages/e4/25/480387655407ead912e28ba3a820bc69af9adf13bcbe40b299d454ec011f/numpy-2.2.6-cp313-cp313t-macosx_14_0_arm64.whl",hashes = {sha256 = "5bd4fc3ac8926b3819797a7c0e2631eb889b4118a9898c84f585a54d475b7e40"}}, - {name = "numpy-2.2.6-cp313-cp313t-macosx_14_0_x86_64.whl",url = "https://files.pythonhosted.org/packages/aa/4a/6e313b5108f53dcbf3aca0c0f3e9c92f4c10ce57a0a721851f9785872895/numpy-2.2.6-cp313-cp313t-macosx_14_0_x86_64.whl",hashes = {sha256 = "fee4236c876c4e8369388054d02d0e9bb84821feb1a64dd59e137e6511a551f8"}}, - {name = "numpy-2.2.6-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",url = "https://files.pythonhosted.org/packages/b7/30/172c2d5c4be71fdf476e9de553443cf8e25feddbe185e0bd88b096915bcc/numpy-2.2.6-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",hashes = {sha256 = "e1dda9c7e08dc141e0247a5b8f49cf05984955246a327d4c48bda16821947b2f"}}, - {name = "numpy-2.2.6-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",url = "https://files.pythonhosted.org/packages/12/fb/9e743f8d4e4d3c710902cf87af3512082ae3d43b945d5d16563f26ec251d/numpy-2.2.6-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",hashes = {sha256 = "f447e6acb680fd307f40d3da4852208af94afdfab89cf850986c3ca00562f4fa"}}, - {name = "numpy-2.2.6-cp313-cp313t-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/12/75/ee20da0e58d3a66f204f38916757e01e33a9737d0b22373b3eb5a27358f9/numpy-2.2.6-cp313-cp313t-musllinux_1_2_aarch64.whl",hashes = {sha256 = "389d771b1623ec92636b0786bc4ae56abafad4a4c513d36a55dce14bd9ce8571"}}, - {name = "numpy-2.2.6-cp313-cp313t-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/76/95/bef5b37f29fc5e739947e9ce5179ad402875633308504a52d188302319c8/numpy-2.2.6-cp313-cp313t-musllinux_1_2_x86_64.whl",hashes = {sha256 = "8e9ace4a37db23421249ed236fdcdd457d671e25146786dfc96835cd951aa7c1"}}, - {name = "numpy-2.2.6-cp313-cp313t-win32.whl",url = "https://files.pythonhosted.org/packages/09/04/f2f83279d287407cf36a7a8053a5abe7be3622a4363337338f2585e4afda/numpy-2.2.6-cp313-cp313t-win32.whl",hashes = {sha256 = "038613e9fb8c72b0a41f025a7e4c3f0b7a1b5d768ece4796b674c8f3fe13efff"}}, - {name = "numpy-2.2.6-cp313-cp313t-win_amd64.whl",url = "https://files.pythonhosted.org/packages/67/0e/35082d13c09c02c011cf21570543d202ad929d961c02a147493cb0c2bdf5/numpy-2.2.6-cp313-cp313t-win_amd64.whl",hashes = {sha256 = "6031dd6dfecc0cf9f668681a37648373bddd6421fff6c66ec1624eed0180ee06"}}, - {name = "numpy-2.2.6-cp312-cp312-macosx_10_13_x86_64.whl",url = "https://files.pythonhosted.org/packages/82/5d/c00588b6cf18e1da539b45d3598d3557084990dcc4331960c15ee776ee41/numpy-2.2.6-cp312-cp312-macosx_10_13_x86_64.whl",hashes = {sha256 = "41c5a21f4a04fa86436124d388f6ed60a9343a6f767fced1a8a71c3fbca038ff"}}, - {name = "numpy-2.2.6-cp312-cp312-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/66/ee/560deadcdde6c2f90200450d5938f63a34b37e27ebff162810f716f6a230/numpy-2.2.6-cp312-cp312-macosx_11_0_arm64.whl",hashes = {sha256 = "de749064336d37e340f640b05f24e9e3dd678c57318c7289d222a8a2f543e90c"}}, - {name = "numpy-2.2.6-cp312-cp312-macosx_14_0_arm64.whl",url = "https://files.pythonhosted.org/packages/3c/65/4baa99f1c53b30adf0acd9a5519078871ddde8d2339dc5a7fde80d9d87da/numpy-2.2.6-cp312-cp312-macosx_14_0_arm64.whl",hashes = {sha256 = "894b3a42502226a1cac872f840030665f33326fc3dac8e57c607905773cdcde3"}}, - {name = "numpy-2.2.6-cp312-cp312-macosx_14_0_x86_64.whl",url = "https://files.pythonhosted.org/packages/cc/89/e5a34c071a0570cc40c9a54eb472d113eea6d002e9ae12bb3a8407fb912e/numpy-2.2.6-cp312-cp312-macosx_14_0_x86_64.whl",hashes = {sha256 = "71594f7c51a18e728451bb50cc60a3ce4e6538822731b2933209a1f3614e9282"}}, - {name = "numpy-2.2.6-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",url = "https://files.pythonhosted.org/packages/f8/35/8c80729f1ff76b3921d5c9487c7ac3de9b2a103b1cd05e905b3090513510/numpy-2.2.6-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",hashes = {sha256 = "f2618db89be1b4e05f7a1a847a9c1c0abd63e63a1607d892dd54668dd92faf87"}}, - {name = "numpy-2.2.6-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",url = "https://files.pythonhosted.org/packages/8c/3d/1e1db36cfd41f895d266b103df00ca5b3cbe965184df824dec5c08c6b803/numpy-2.2.6-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",hashes = {sha256 = "fd83c01228a688733f1ded5201c678f0c53ecc1006ffbc404db9f7a899ac6249"}}, - {name = "numpy-2.2.6-cp312-cp312-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/61/c6/03ed30992602c85aa3cd95b9070a514f8b3c33e31124694438d88809ae36/numpy-2.2.6-cp312-cp312-musllinux_1_2_aarch64.whl",hashes = {sha256 = "37c0ca431f82cd5fa716eca9506aefcabc247fb27ba69c5062a6d3ade8cf8f49"}}, - {name = "numpy-2.2.6-cp312-cp312-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/b7/25/5761d832a81df431e260719ec45de696414266613c9ee268394dd5ad8236/numpy-2.2.6-cp312-cp312-musllinux_1_2_x86_64.whl",hashes = {sha256 = "fe27749d33bb772c80dcd84ae7e8df2adc920ae8297400dabec45f0dedb3f6de"}}, - {name = "numpy-2.2.6-cp312-cp312-win32.whl",url = "https://files.pythonhosted.org/packages/57/0a/72d5a3527c5ebffcd47bde9162c39fae1f90138c961e5296491ce778e682/numpy-2.2.6-cp312-cp312-win32.whl",hashes = {sha256 = "4eeaae00d789f66c7a25ac5f34b71a7035bb474e679f410e5e1a94deb24cf2d4"}}, - {name = "numpy-2.2.6-cp312-cp312-win_amd64.whl",url = "https://files.pythonhosted.org/packages/36/fa/8c9210162ca1b88529ab76b41ba02d433fd54fecaf6feb70ef9f124683f1/numpy-2.2.6-cp312-cp312-win_amd64.whl",hashes = {sha256 = "c1f9540be57940698ed329904db803cf7a402f3fc200bfe599334c9bd84a40b2"}}, - {name = "numpy-2.2.6-cp311-cp311-macosx_10_9_x86_64.whl",url = "https://files.pythonhosted.org/packages/da/a8/4f83e2aa666a9fbf56d6118faaaf5f1974d456b1823fda0a176eff722839/numpy-2.2.6-cp311-cp311-macosx_10_9_x86_64.whl",hashes = {sha256 = "f9f1adb22318e121c5c69a09142811a201ef17ab257a1e66ca3025065b7f53ae"}}, - {name = "numpy-2.2.6-cp311-cp311-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/b3/2b/64e1affc7972decb74c9e29e5649fac940514910960ba25cd9af4488b66c/numpy-2.2.6-cp311-cp311-macosx_11_0_arm64.whl",hashes = {sha256 = "c820a93b0255bc360f53eca31a0e676fd1101f673dda8da93454a12e23fc5f7a"}}, - {name = "numpy-2.2.6-cp311-cp311-macosx_14_0_arm64.whl",url = "https://files.pythonhosted.org/packages/4a/9f/0121e375000b5e50ffdd8b25bf78d8e1a5aa4cca3f185d41265198c7b834/numpy-2.2.6-cp311-cp311-macosx_14_0_arm64.whl",hashes = {sha256 = "3d70692235e759f260c3d837193090014aebdf026dfd167834bcba43e30c2a42"}}, - {name = "numpy-2.2.6-cp311-cp311-macosx_14_0_x86_64.whl",url = "https://files.pythonhosted.org/packages/31/0d/b48c405c91693635fbe2dcd7bc84a33a602add5f63286e024d3b6741411c/numpy-2.2.6-cp311-cp311-macosx_14_0_x86_64.whl",hashes = {sha256 = "481b49095335f8eed42e39e8041327c05b0f6f4780488f61286ed3c01368d491"}}, - {name = "numpy-2.2.6-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",url = "https://files.pythonhosted.org/packages/52/b8/7f0554d49b565d0171eab6e99001846882000883998e7b7d9f0d98b1f934/numpy-2.2.6-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",hashes = {sha256 = "b64d8d4d17135e00c8e346e0a738deb17e754230d7e0810ac5012750bbd85a5a"}}, - {name = "numpy-2.2.6-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",url = "https://files.pythonhosted.org/packages/b3/dd/2238b898e51bd6d389b7389ffb20d7f4c10066d80351187ec8e303a5a475/numpy-2.2.6-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",hashes = {sha256 = "ba10f8411898fc418a521833e014a77d3ca01c15b0c6cdcce6a0d2897e6dbbdf"}}, - {name = "numpy-2.2.6-cp311-cp311-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/83/6c/44d0325722cf644f191042bf47eedad61c1e6df2432ed65cbe28509d404e/numpy-2.2.6-cp311-cp311-musllinux_1_2_aarch64.whl",hashes = {sha256 = "bd48227a919f1bafbdda0583705e547892342c26fb127219d60a5c36882609d1"}}, - {name = "numpy-2.2.6-cp311-cp311-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/ae/9d/81e8216030ce66be25279098789b665d49ff19eef08bfa8cb96d4957f422/numpy-2.2.6-cp311-cp311-musllinux_1_2_x86_64.whl",hashes = {sha256 = "9551a499bf125c1d4f9e250377c1ee2eddd02e01eac6644c080162c0c51778ab"}}, - {name = "numpy-2.2.6-cp311-cp311-win32.whl",url = "https://files.pythonhosted.org/packages/6a/fd/e19617b9530b031db51b0926eed5345ce8ddc669bb3bc0044b23e275ebe8/numpy-2.2.6-cp311-cp311-win32.whl",hashes = {sha256 = "0678000bb9ac1475cd454c6b8c799206af8107e310843532b04d49649c717a47"}}, - {name = "numpy-2.2.6-cp311-cp311-win_amd64.whl",url = "https://files.pythonhosted.org/packages/31/0a/f354fb7176b81747d870f7991dc763e157a934c717b67b58456bc63da3df/numpy-2.2.6-cp311-cp311-win_amd64.whl",hashes = {sha256 = "e8213002e427c69c45a52bbd94163084025f533a55a59d6f9c5b820774ef3303"}}, - {name = "numpy-2.2.6-cp310-cp310-macosx_10_9_x86_64.whl",url = "https://files.pythonhosted.org/packages/9a/3e/ed6db5be21ce87955c0cbd3009f2803f59fa08df21b5df06862e2d8e2bdd/numpy-2.2.6-cp310-cp310-macosx_10_9_x86_64.whl",hashes = {sha256 = "b412caa66f72040e6d268491a59f2c43bf03eb6c96dd8f0307829feb7fa2b6fb"}}, - {name = "numpy-2.2.6-cp310-cp310-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/22/c2/4b9221495b2a132cc9d2eb862e21d42a009f5a60e45fc44b00118c174bff/numpy-2.2.6-cp310-cp310-macosx_11_0_arm64.whl",hashes = {sha256 = "8e41fd67c52b86603a91c1a505ebaef50b3314de0213461c7a6e99c9a3beff90"}}, - {name = "numpy-2.2.6-cp310-cp310-macosx_14_0_arm64.whl",url = "https://files.pythonhosted.org/packages/fd/77/dc2fcfc66943c6410e2bf598062f5959372735ffda175b39906d54f02349/numpy-2.2.6-cp310-cp310-macosx_14_0_arm64.whl",hashes = {sha256 = "37e990a01ae6ec7fe7fa1c26c55ecb672dd98b19c3d0e1d1f326fa13cb38d163"}}, - {name = "numpy-2.2.6-cp310-cp310-macosx_14_0_x86_64.whl",url = "https://files.pythonhosted.org/packages/7a/4f/1cb5fdc353a5f5cc7feb692db9b8ec2c3d6405453f982435efc52561df58/numpy-2.2.6-cp310-cp310-macosx_14_0_x86_64.whl",hashes = {sha256 = "5a6429d4be8ca66d889b7cf70f536a397dc45ba6faeb5f8c5427935d9592e9cf"}}, - {name = "numpy-2.2.6-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",url = "https://files.pythonhosted.org/packages/eb/17/96a3acd228cec142fcb8723bd3cc39c2a474f7dcf0a5d16731980bcafa95/numpy-2.2.6-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",hashes = {sha256 = "efd28d4e9cd7d7a8d39074a4d44c63eda73401580c5c76acda2ce969e0a38e83"}}, - {name = "numpy-2.2.6-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",url = "https://files.pythonhosted.org/packages/b4/63/3de6a34ad7ad6646ac7d2f55ebc6ad439dbbf9c4370017c50cf403fb19b5/numpy-2.2.6-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",hashes = {sha256 = "fc7b73d02efb0e18c000e9ad8b83480dfcd5dfd11065997ed4c6747470ae8915"}}, - {name = "numpy-2.2.6-cp310-cp310-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/07/b6/89d837eddef52b3d0cec5c6ba0456c1bf1b9ef6a6672fc2b7873c3ec4e2e/numpy-2.2.6-cp310-cp310-musllinux_1_2_aarch64.whl",hashes = {sha256 = "74d4531beb257d2c3f4b261bfb0fc09e0f9ebb8842d82a7b4209415896adc680"}}, - {name = "numpy-2.2.6-cp310-cp310-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/01/c8/dc6ae86e3c61cfec1f178e5c9f7858584049b6093f843bca541f94120920/numpy-2.2.6-cp310-cp310-musllinux_1_2_x86_64.whl",hashes = {sha256 = "8fc377d995680230e83241d8a96def29f204b5782f371c532579b4f20607a289"}}, - {name = "numpy-2.2.6-cp310-cp310-win32.whl",url = "https://files.pythonhosted.org/packages/5b/c5/0064b1b7e7c89137b471ccec1fd2282fceaae0ab3a9550f2568782d80357/numpy-2.2.6-cp310-cp310-win32.whl",hashes = {sha256 = "b093dd74e50a8cba3e873868d9e93a85b78e0daf2e98c6797566ad8044e8363d"}}, - {name = "numpy-2.2.6-cp310-cp310-win_amd64.whl",url = "https://files.pythonhosted.org/packages/a3/dd/4b822569d6b96c39d1215dbae0582fd99954dcbcf0c1a13c61783feaca3f/numpy-2.2.6-cp310-cp310-win_amd64.whl",hashes = {sha256 = "f0fd6321b839904e15c46e0d257fdd101dd7f530fe03fd6359c1ea63738703f3"}}, - {name = "numpy-2.2.6-pp310-pypy310_pp73-macosx_10_15_x86_64.whl",url = "https://files.pythonhosted.org/packages/9e/3b/d94a75f4dbf1ef5d321523ecac21ef23a3cd2ac8b78ae2aac40873590229/numpy-2.2.6-pp310-pypy310_pp73-macosx_10_15_x86_64.whl",hashes = {sha256 = "0b605b275d7bd0c640cad4e5d30fa701a8d59302e127e5f79138ad62762c3e3d"}}, - {name = "numpy-2.2.6-pp310-pypy310_pp73-macosx_14_0_x86_64.whl",url = "https://files.pythonhosted.org/packages/17/f4/09b2fa1b58f0fb4f7c7963a1649c64c4d315752240377ed74d9cd878f7b5/numpy-2.2.6-pp310-pypy310_pp73-macosx_14_0_x86_64.whl",hashes = {sha256 = "7befc596a7dc9da8a337f79802ee8adb30a552a94f792b9c9d18c840055907db"}}, - {name = "numpy-2.2.6-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",url = "https://files.pythonhosted.org/packages/af/30/feba75f143bdc868a1cc3f44ccfa6c4b9ec522b36458e738cd00f67b573f/numpy-2.2.6-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",hashes = {sha256 = "ce47521a4754c8f4593837384bd3424880629f718d87c5d44f8ed763edd63543"}}, - {name = "numpy-2.2.6-pp310-pypy310_pp73-win_amd64.whl",url = "https://files.pythonhosted.org/packages/37/48/ac2a9584402fb6c0cd5b5d1a91dcf176b15760130dd386bbafdbfe3640bf/numpy-2.2.6-pp310-pypy310_pp73-win_amd64.whl",hashes = {sha256 = "d042d24c90c41b54fd506da306759e06e568864df8ec17ccc17e9e884634fd00"}}, -] -marker = "\"default\" in dependency_groups and python_version ~= \"3.10\" or \"dev\" in extras and python_version ~= \"3.10\"" - -[packages.tool.pdm] -dependencies = [] - [[packages]] name = "setuptools" version = "80.9.0" @@ -590,7 +599,7 @@ sdist = {name = "setuptools-80.9.0.tar.gz", url = "https://files.pythonhosted.or wheels = [ {name = "setuptools-80.9.0-py3-none-any.whl",url = "https://files.pythonhosted.org/packages/a3/dc/17031897dae0efacfea57dfd3a82fdd2a2aeb58e0ff71b77b87e44edc772/setuptools-80.9.0-py3-none-any.whl",hashes = {sha256 = "062d34222ad13e0cc312a4c02d73f059e86a4acbfbdea8f8f76b28c99f306922"}}, ] -marker = "\"dev\" in extras" +marker = "\"default\" in dependency_groups or \"dev\" in extras" [packages.tool.pdm] dependencies = [] @@ -644,24 +653,6 @@ wheels = [ {name = "tiktoken-0.11.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",url = "https://files.pythonhosted.org/packages/d5/2d/4d77f6feb9292bfdd23d5813e442b3bba883f42d0ac78ef5fdc56873f756/tiktoken-0.11.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",hashes = {sha256 = "7f929255c705efec7a28bf515e29dc74220b2f07544a8c81b8d69e8efc4578bd"}}, {name = "tiktoken-0.11.0-cp312-cp312-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/7a/65/7ff0a65d3bb0fc5a1fb6cc71b03e0f6e71a68c5eea230d1ff1ba3fd6df49/tiktoken-0.11.0-cp312-cp312-musllinux_1_2_x86_64.whl",hashes = {sha256 = "61f1d15822e4404953d499fd1dcc62817a12ae9fb1e4898033ec8fe3915fdf8e"}}, {name = "tiktoken-0.11.0-cp312-cp312-win_amd64.whl",url = "https://files.pythonhosted.org/packages/f5/6e/5b71578799b72e5bdcef206a214c3ce860d999d579a3b56e74a6c8989ee2/tiktoken-0.11.0-cp312-cp312-win_amd64.whl",hashes = {sha256 = "45927a71ab6643dfd3ef57d515a5db3d199137adf551f66453be098502838b0f"}}, - {name = "tiktoken-0.11.0-cp311-cp311-macosx_10_12_x86_64.whl",url = "https://files.pythonhosted.org/packages/8a/91/912b459799a025d2842566fe1e902f7f50d54a1ce8a0f236ab36b5bd5846/tiktoken-0.11.0-cp311-cp311-macosx_10_12_x86_64.whl",hashes = {sha256 = "4ae374c46afadad0f501046db3da1b36cd4dfbfa52af23c998773682446097cf"}}, - {name = "tiktoken-0.11.0-cp311-cp311-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/8c/e9/6faa6870489ce64f5f75dcf91512bf35af5864583aee8fcb0dcb593121f5/tiktoken-0.11.0-cp311-cp311-macosx_11_0_arm64.whl",hashes = {sha256 = "25a512ff25dc6c85b58f5dd4f3d8c674dc05f96b02d66cdacf628d26a4e4866b"}}, - {name = "tiktoken-0.11.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",url = "https://files.pythonhosted.org/packages/a1/3e/a05d1547cf7db9dc75d1461cfa7b556a3b48e0516ec29dfc81d984a145f6/tiktoken-0.11.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",hashes = {sha256 = "2130127471e293d385179c1f3f9cd445070c0772be73cdafb7cec9a3684c0458"}}, - {name = "tiktoken-0.11.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",url = "https://files.pythonhosted.org/packages/34/9a/db7a86b829e05a01fd4daa492086f708e0a8b53952e1dbc9d380d2b03677/tiktoken-0.11.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",hashes = {sha256 = "21e43022bf2c33f733ea9b54f6a3f6b4354b909f5a73388fb1b9347ca54a069c"}}, - {name = "tiktoken-0.11.0-cp311-cp311-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/9d/bb/52edc8e078cf062ed749248f1454e9e5cfd09979baadb830b3940e522015/tiktoken-0.11.0-cp311-cp311-musllinux_1_2_x86_64.whl",hashes = {sha256 = "adb4e308eb64380dc70fa30493e21c93475eaa11669dea313b6bbf8210bfd013"}}, - {name = "tiktoken-0.11.0-cp311-cp311-win_amd64.whl",url = "https://files.pythonhosted.org/packages/60/d9/884b6cd7ae2570ecdcaffa02b528522b18fef1cbbfdbcaa73799807d0d3b/tiktoken-0.11.0-cp311-cp311-win_amd64.whl",hashes = {sha256 = "ece6b76bfeeb61a125c44bbefdfccc279b5288e6007fbedc0d32bfec602df2f2"}}, - {name = "tiktoken-0.11.0-cp310-cp310-macosx_10_12_x86_64.whl",url = "https://files.pythonhosted.org/packages/8b/4d/c6a2e7dca2b4f2e9e0bfd62b3fe4f114322e2c028cfba905a72bc76ce479/tiktoken-0.11.0-cp310-cp310-macosx_10_12_x86_64.whl",hashes = {sha256 = "8a9b517d6331d7103f8bef29ef93b3cca95fa766e293147fe7bacddf310d5917"}}, - {name = "tiktoken-0.11.0-cp310-cp310-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/41/54/3739d35b9f94cb8dc7b0db2edca7192d5571606aa2369a664fa27e811804/tiktoken-0.11.0-cp310-cp310-macosx_11_0_arm64.whl",hashes = {sha256 = "b4ddb1849e6bf0afa6cc1c5d809fb980ca240a5fffe585a04e119519758788c0"}}, - {name = "tiktoken-0.11.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",url = "https://files.pythonhosted.org/packages/dd/f4/ec8d43338d28d53513004ebf4cd83732a135d11011433c58bf045890cc10/tiktoken-0.11.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",hashes = {sha256 = "10331d08b5ecf7a780b4fe4d0281328b23ab22cdb4ff65e68d56caeda9940ecc"}}, - {name = "tiktoken-0.11.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",url = "https://files.pythonhosted.org/packages/94/80/fb0ada0a882cb453caf519a4bf0d117c2a3ee2e852c88775abff5413c176/tiktoken-0.11.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",hashes = {sha256 = "b062c82300341dc87e0258c69f79bed725f87e753c21887aea90d272816be882"}}, - {name = "tiktoken-0.11.0-cp310-cp310-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/2f/e9/6c104355b463601719582823f3ea658bc3aa7c73d1b3b7553ebdc48468ce/tiktoken-0.11.0-cp310-cp310-musllinux_1_2_x86_64.whl",hashes = {sha256 = "195d84bec46169af3b1349a1495c151d37a0ff4cba73fd08282736be7f92cc6c"}}, - {name = "tiktoken-0.11.0-cp310-cp310-win_amd64.whl",url = "https://files.pythonhosted.org/packages/94/75/eaa6068f47e8b3f0aab9e05177cce2cf5aa2cc0ca93981792e620d4d4117/tiktoken-0.11.0-cp310-cp310-win_amd64.whl",hashes = {sha256 = "fe91581b0ecdd8783ce8cb6e3178f2260a3912e8724d2f2d49552b98714641a1"}}, - {name = "tiktoken-0.11.0-cp39-cp39-macosx_10_12_x86_64.whl",url = "https://files.pythonhosted.org/packages/aa/b6/81c5799ab77a9580c6d840cf77d4717e929193a42190fd623a080c647aa6/tiktoken-0.11.0-cp39-cp39-macosx_10_12_x86_64.whl",hashes = {sha256 = "13220f12c9e82e399377e768640ddfe28bea962739cc3a869cad98f42c419a89"}}, - {name = "tiktoken-0.11.0-cp39-cp39-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/50/89/faa668066b2a4640534ef5797c09ecd0a48b43367502129b217339dfaa97/tiktoken-0.11.0-cp39-cp39-macosx_11_0_arm64.whl",hashes = {sha256 = "7f2db627f5c74477c0404b4089fd8a28ae22fa982a6f7d9c7d4c305c375218f3"}}, - {name = "tiktoken-0.11.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",url = "https://files.pythonhosted.org/packages/aa/7f/5f950528b54cb3025af4bc3522c23dbfb691afe8ffb292aa1e8dc2e6bddf/tiktoken-0.11.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",hashes = {sha256 = "2302772f035dceb2bcf8e55a735e4604a0b51a6dd50f38218ff664d46ec43807"}}, - {name = "tiktoken-0.11.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",url = "https://files.pythonhosted.org/packages/27/a4/e82ddf0773835ba24536ac8c0dce561e697698ec020a93212a1e041d39b4/tiktoken-0.11.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",hashes = {sha256 = "20b977989afe44c94bcc50db1f76971bb26dca44218bd203ba95925ef56f8e7a"}}, - {name = "tiktoken-0.11.0-cp39-cp39-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/1b/c2/06361e41d176e62797ae65fa678111cdd30553321cf4d83e7b84107ea95f/tiktoken-0.11.0-cp39-cp39-musllinux_1_2_x86_64.whl",hashes = {sha256 = "669a1aa1ad6ebf1b3c26b45deb346f345da7680f845b5ea700bba45c20dea24c"}}, - {name = "tiktoken-0.11.0-cp39-cp39-win_amd64.whl",url = "https://files.pythonhosted.org/packages/bb/ad/ca37e15c46741ebb3904d562d03194e845539a08f7751a6df0f391757312/tiktoken-0.11.0-cp39-cp39-win_amd64.whl",hashes = {sha256 = "e363f33c720a055586f730c00e330df4c7ea0024bf1c83a8a9a9dbc054c4f304"}}, ] marker = "\"recommended\" in extras" @@ -671,6 +662,41 @@ dependencies = [ "requests>=2.26.0", ] +[[packages]] +name = "torch" +version = "2.8.0+cpu" +requires-python = ">=3.9.0" +wheels = [ + {name = "torch-2.8.0+cpu-cp313-cp313-linux_s390x.whl",url = "https://download.pytorch.org/whl/cpu/torch-2.8.0%2Bcpu-cp313-cp313-linux_s390x.whl",hashes = {sha256 = "8b5882276633cf91fe3d2d7246c743b94d44a7e660b27f1308007fdb1bb89f7d"}}, + {name = "torch-2.8.0+cpu-cp313-cp313-manylinux_2_28_aarch64.whl",url = "https://download.pytorch.org/whl/cpu/torch-2.8.0%2Bcpu-cp313-cp313-manylinux_2_28_aarch64.whl",hashes = {sha256 = "a5064b5e23772c8d164068cc7c12e01a75faf7b948ecd95a0d4007d7487e5f25"}}, + {name = "torch-2.8.0+cpu-cp313-cp313-manylinux_2_28_x86_64.whl",url = "https://download.pytorch.org/whl/cpu/torch-2.8.0%2Bcpu-cp313-cp313-manylinux_2_28_x86_64.whl",hashes = {sha256 = "8f81dedb4c6076ec325acc3b47525f9c550e5284a18eae1d9061c543f7b6e7de"}}, + {name = "torch-2.8.0+cpu-cp313-cp313-win_amd64.whl",url = "https://download.pytorch.org/whl/cpu/torch-2.8.0%2Bcpu-cp313-cp313-win_amd64.whl",hashes = {sha256 = "e1ee1b2346ade3ea90306dfbec7e8ff17bc220d344109d189ae09078333b0856"}}, + {name = "torch-2.8.0+cpu-cp313-cp313-win_arm64.whl",url = "https://download.pytorch.org/whl/cpu/torch-2.8.0%2Bcpu-cp313-cp313-win_arm64.whl",hashes = {sha256 = "64c187345509f2b1bb334feed4666e2c781ca381874bde589182f81247e61f88"}}, + {name = "torch-2.8.0+cpu-cp313-cp313t-manylinux_2_28_aarch64.whl",url = "https://download.pytorch.org/whl/cpu/torch-2.8.0%2Bcpu-cp313-cp313t-manylinux_2_28_aarch64.whl",hashes = {sha256 = "af81283ac671f434b1b25c95ba295f270e72db1fad48831eb5e4748ff9840041"}}, + {name = "torch-2.8.0+cpu-cp313-cp313t-manylinux_2_28_x86_64.whl",url = "https://download.pytorch.org/whl/cpu/torch-2.8.0%2Bcpu-cp313-cp313t-manylinux_2_28_x86_64.whl",hashes = {sha256 = "a9dbb6f64f63258bc811e2c0c99640a81e5af93c531ad96e95c5ec777ea46dab"}}, + {name = "torch-2.8.0+cpu-cp313-cp313t-win_amd64.whl",url = "https://download.pytorch.org/whl/cpu/torch-2.8.0%2Bcpu-cp313-cp313t-win_amd64.whl",hashes = {sha256 = "6d93a7165419bc4b2b907e859ccab0dea5deeab261448ae9a5ec5431f14c0e64"}}, + {name = "torch-2.8.0-cp313-cp313t-macosx_14_0_arm64.whl",url = "https://download.pytorch.org/whl/cpu/torch-2.8.0-cp313-cp313t-macosx_14_0_arm64.whl",hashes = {sha256 = "fbe2e149c5174ef90d29a5f84a554dfaf28e003cb4f61fa2c8c024c17ec7ca58"}}, + {name = "torch-2.8.0-cp313-none-macosx_11_0_arm64.whl",url = "https://download.pytorch.org/whl/cpu/torch-2.8.0-cp313-none-macosx_11_0_arm64.whl",hashes = {sha256 = "057efd30a6778d2ee5e2374cd63a63f63311aa6f33321e627c655df60abdd390"}}, + {name = "torch-2.8.0+cpu-cp312-cp312-linux_s390x.whl",url = "https://download.pytorch.org/whl/cpu/torch-2.8.0%2Bcpu-cp312-cp312-linux_s390x.whl",hashes = {sha256 = "0e34e276722ab7dd0dffa9e12fe2135a9b34a0e300c456ed7ad6430229404eb5"}}, + {name = "torch-2.8.0+cpu-cp312-cp312-manylinux_2_28_aarch64.whl",url = "https://download.pytorch.org/whl/cpu/torch-2.8.0%2Bcpu-cp312-cp312-manylinux_2_28_aarch64.whl",hashes = {sha256 = "610f600c102386e581327d5efc18c0d6edecb9820b4140d26163354a99cd800d"}}, + {name = "torch-2.8.0+cpu-cp312-cp312-manylinux_2_28_x86_64.whl",url = "https://download.pytorch.org/whl/cpu/torch-2.8.0%2Bcpu-cp312-cp312-manylinux_2_28_x86_64.whl",hashes = {sha256 = "cb9a8ba8137ab24e36bf1742cb79a1294bd374db570f09fc15a5e1318160db4e"}}, + {name = "torch-2.8.0+cpu-cp312-cp312-win_amd64.whl",url = "https://download.pytorch.org/whl/cpu/torch-2.8.0%2Bcpu-cp312-cp312-win_amd64.whl",hashes = {sha256 = "2be20b2c05a0cce10430cc25f32b689259640d273232b2de357c35729132256d"}}, + {name = "torch-2.8.0+cpu-cp312-cp312-win_arm64.whl",url = "https://download.pytorch.org/whl/cpu/torch-2.8.0%2Bcpu-cp312-cp312-win_arm64.whl",hashes = {sha256 = "99fc421a5d234580e45957a7b02effbf3e1c884a5dd077afc85352c77bf41434"}}, + {name = "torch-2.8.0-cp312-none-macosx_11_0_arm64.whl",url = "https://download.pytorch.org/whl/cpu/torch-2.8.0-cp312-none-macosx_11_0_arm64.whl",hashes = {sha256 = "a47b7986bee3f61ad217d8a8ce24605809ab425baf349f97de758815edd2ef54"}}, +] +marker = "\"default\" in dependency_groups" + +[packages.tool.pdm] +dependencies = [ + "filelock", + "typing-extensions>=4.10.0", + "sympy>=1.13.3", + "networkx", + "jinja2", + "fsspec", + "setuptools; python_version >= \"3.12\"", +] + [[packages]] name = "tox" version = "4.16.0" @@ -724,22 +750,28 @@ dependencies = [ ] [[packages]] -name = "blobfile" -version = "3.1.0" +name = "uvloop" +version = "0.21.0" requires-python = ">=3.8.0" -sdist = {name = "blobfile-3.1.0.tar.gz", url = "https://files.pythonhosted.org/packages/f0/6d/2e7567da75ddbb24fe979f52284b708da349d67a41042635af36071a5a6b/blobfile-3.1.0.tar.gz", hashes = {sha256 = "d45b6b1fa3b0920732314c23ddbdb4f494ca12f787c2b6eb6bba6faa51382671"}} -wheels = [ - {name = "blobfile-3.1.0-py3-none-any.whl",url = "https://files.pythonhosted.org/packages/77/a7/51af11120d75af2828f8eede0b13a4caff650d708ac50e62d000aefe1ffb/blobfile-3.1.0-py3-none-any.whl",hashes = {sha256 = "2b4c5e766ebb7dfa20e4990cf6ec3d2106bdc91d632fb9377f170a234c5a5c6a"}}, +sdist = {name = "uvloop-0.21.0.tar.gz", url = "https://files.pythonhosted.org/packages/af/c0/854216d09d33c543f12a44b393c402e89a920b1a0a7dc634c42de91b9cf6/uvloop-0.21.0.tar.gz", hashes = {sha256 = "3bf12b0fda68447806a7ad847bfa591613177275d35b6724b1ee573faa3704e3"}} +wheels = [ + {name = "uvloop-0.21.0-cp313-cp313-macosx_10_13_universal2.whl",url = "https://files.pythonhosted.org/packages/3f/8d/2cbef610ca21539f0f36e2b34da49302029e7c9f09acef0b1c3b5839412b/uvloop-0.21.0-cp313-cp313-macosx_10_13_universal2.whl",hashes = {sha256 = "bfd55dfcc2a512316e65f16e503e9e450cab148ef11df4e4e679b5e8253a5281"}}, + {name = "uvloop-0.21.0-cp313-cp313-macosx_10_13_x86_64.whl",url = "https://files.pythonhosted.org/packages/93/0d/b0038d5a469f94ed8f2b2fce2434a18396d8fbfb5da85a0a9781ebbdec14/uvloop-0.21.0-cp313-cp313-macosx_10_13_x86_64.whl",hashes = {sha256 = "787ae31ad8a2856fc4e7c095341cccc7209bd657d0e71ad0dc2ea83c4a6fa8af"}}, + {name = "uvloop-0.21.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",url = "https://files.pythonhosted.org/packages/50/94/0a687f39e78c4c1e02e3272c6b2ccdb4e0085fda3b8352fecd0410ccf915/uvloop-0.21.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",hashes = {sha256 = "5ee4d4ef48036ff6e5cfffb09dd192c7a5027153948d85b8da7ff705065bacc6"}}, + {name = "uvloop-0.21.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",url = "https://files.pythonhosted.org/packages/d2/19/f5b78616566ea68edd42aacaf645adbf71fbd83fc52281fba555dc27e3f1/uvloop-0.21.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",hashes = {sha256 = "f3df876acd7ec037a3d005b3ab85a7e4110422e4d9c1571d4fc89b0fc41b6816"}}, + {name = "uvloop-0.21.0-cp313-cp313-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/47/57/66f061ee118f413cd22a656de622925097170b9380b30091b78ea0c6ea75/uvloop-0.21.0-cp313-cp313-musllinux_1_2_aarch64.whl",hashes = {sha256 = "bd53ecc9a0f3d87ab847503c2e1552b690362e005ab54e8a48ba97da3924c0dc"}}, + {name = "uvloop-0.21.0-cp313-cp313-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/63/9a/0962b05b308494e3202d3f794a6e85abe471fe3cafdbcf95c2e8c713aabd/uvloop-0.21.0-cp313-cp313-musllinux_1_2_x86_64.whl",hashes = {sha256 = "a5c39f217ab3c663dc699c04cbd50c13813e31d917642d459fdcec07555cc553"}}, + {name = "uvloop-0.21.0-cp312-cp312-macosx_10_13_universal2.whl",url = "https://files.pythonhosted.org/packages/8c/4c/03f93178830dc7ce8b4cdee1d36770d2f5ebb6f3d37d354e061eefc73545/uvloop-0.21.0-cp312-cp312-macosx_10_13_universal2.whl",hashes = {sha256 = "359ec2c888397b9e592a889c4d72ba3d6befba8b2bb01743f72fffbde663b59c"}}, + {name = "uvloop-0.21.0-cp312-cp312-macosx_10_13_x86_64.whl",url = "https://files.pythonhosted.org/packages/43/3e/92c03f4d05e50f09251bd8b2b2b584a2a7f8fe600008bcc4523337abe676/uvloop-0.21.0-cp312-cp312-macosx_10_13_x86_64.whl",hashes = {sha256 = "f7089d2dc73179ce5ac255bdf37c236a9f914b264825fdaacaded6990a7fb4c2"}}, + {name = "uvloop-0.21.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",url = "https://files.pythonhosted.org/packages/a6/ef/a02ec5da49909dbbfb1fd205a9a1ac4e88ea92dcae885e7c961847cd51e2/uvloop-0.21.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",hashes = {sha256 = "baa4dcdbd9ae0a372f2167a207cd98c9f9a1ea1188a8a526431eef2f8116cc8d"}}, + {name = "uvloop-0.21.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",url = "https://files.pythonhosted.org/packages/06/a7/b4e6a19925c900be9f98bec0a75e6e8f79bb53bdeb891916609ab3958967/uvloop-0.21.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",hashes = {sha256 = "86975dca1c773a2c9864f4c52c5a55631038e387b47eaf56210f873887b6c8dc"}}, + {name = "uvloop-0.21.0-cp312-cp312-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/ce/0c/f07435a18a4b94ce6bd0677d8319cd3de61f3a9eeb1e5f8ab4e8b5edfcb3/uvloop-0.21.0-cp312-cp312-musllinux_1_2_aarch64.whl",hashes = {sha256 = "461d9ae6660fbbafedd07559c6a2e57cd553b34b0065b6550685f6653a98c1cb"}}, + {name = "uvloop-0.21.0-cp312-cp312-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/8f/eb/f7032be105877bcf924709c97b1bf3b90255b4ec251f9340cef912559f28/uvloop-0.21.0-cp312-cp312-musllinux_1_2_x86_64.whl",hashes = {sha256 = "183aef7c8730e54c9a3ee3227464daed66e37ba13040bb3f350bc2ddc040f22f"}}, ] -marker = "\"recommended\" in extras" +marker = "\"default\" in dependency_groups" [packages.tool.pdm] -dependencies = [ - "pycryptodomex>=3.8", - "urllib3<3,>=1.25.3", - "lxml>=4.9", - "filelock>=3.0", -] +dependencies = [] [[packages]] name = "datasets" @@ -769,31 +801,59 @@ dependencies = [ ] [[packages]] -name = "loguru" -version = "0.7.3" -requires-python = "<4.0,>=3.5" -sdist = {name = "loguru-0.7.3.tar.gz", url = "https://files.pythonhosted.org/packages/3a/05/a1dae3dffd1116099471c643b8924f5aa6524411dc6c63fdae648c4f1aca/loguru-0.7.3.tar.gz", hashes = {sha256 = "19480589e77d47b8d85b2c827ad95d49bf31b0dcde16593892eb51dd18706eb6"}} +name = "eval-type-backport" +version = "0.2.2" +requires-python = ">=3.8" +sdist = {name = "eval_type_backport-0.2.2.tar.gz", url = "https://files.pythonhosted.org/packages/30/ea/8b0ac4469d4c347c6a385ff09dc3c048c2d021696664e26c7ee6791631b5/eval_type_backport-0.2.2.tar.gz", hashes = {sha256 = "f0576b4cf01ebb5bd358d02314d31846af5e07678387486e2c798af0e7d849c1"}} wheels = [ - {name = "loguru-0.7.3-py3-none-any.whl",url = "https://files.pythonhosted.org/packages/0c/29/0348de65b8cc732daa3e33e67806420b2ae89bdce2b04af740289c5c6c8c/loguru-0.7.3-py3-none-any.whl",hashes = {sha256 = "31a33c10c8e1e10422bfd431aeb5d351c7cf7fa671e3c4df004162264b28220c"}}, + {name = "eval_type_backport-0.2.2-py3-none-any.whl",url = "https://files.pythonhosted.org/packages/ce/31/55cd413eaccd39125368be33c46de24a1f639f2e12349b0361b4678f3915/eval_type_backport-0.2.2-py3-none-any.whl",hashes = {sha256 = "cb6ad7c393517f476f96d456d0412ea80f0a8cf96f6892834cd9340149111b0a"}}, ] marker = "\"default\" in dependency_groups" [packages.tool.pdm] -dependencies = [ - "colorama>=0.3.4; sys_platform == \"win32\"", - "aiocontextvars>=0.2.0; python_version < \"3.7\"", - "win32-setctime>=1.0.0; sys_platform == \"win32\"", -] +dependencies = [] [[packages]] -name = "pillow" -version = "11.3.0" +name = "faker" +version = "37.8.0" requires-python = ">=3.9" -sdist = {name = "pillow-11.3.0.tar.gz", url = "https://files.pythonhosted.org/packages/f3/0d/d0d6dea55cd152ce3d6767bb38a8fc10e33796ba4ba210cbab9354b6d238/pillow-11.3.0.tar.gz", hashes = {sha256 = "3828ee7586cd0b2091b6209e5ad53e20d0649bbe87164a459d0676e035e8f523"}} +sdist = {name = "faker-37.8.0.tar.gz", url = "https://files.pythonhosted.org/packages/3a/da/1336008d39e5d4076dddb4e0f3a52ada41429274bf558a3cc28030d324a3/faker-37.8.0.tar.gz", hashes = {sha256 = "090bb5abbec2b30949a95ce1ba6b20d1d0ed222883d63483a0d4be4a970d6fb8"}} wheels = [ - {name = "pillow-11.3.0-cp314-cp314-macosx_10_13_x86_64.whl",url = "https://files.pythonhosted.org/packages/73/f4/04905af42837292ed86cb1b1dabe03dce1edc008ef14c473c5c7e1443c5d/pillow-11.3.0-cp314-cp314-macosx_10_13_x86_64.whl",hashes = {sha256 = "d9da3df5f9ea2a89b81bb6087177fb1f4d1c7146d583a3fe5c672c0d94e55e12"}}, - {name = "pillow-11.3.0-cp314-cp314-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/41/b0/33d79e377a336247df6348a54e6d2a2b85d644ca202555e3faa0cf811ecc/pillow-11.3.0-cp314-cp314-macosx_11_0_arm64.whl",hashes = {sha256 = "0b275ff9b04df7b640c59ec5a3cb113eefd3795a8df80bac69646ef699c6981a"}}, - {name = "pillow-11.3.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.whl",url = "https://files.pythonhosted.org/packages/49/2d/ed8bc0ab219ae8768f529597d9509d184fe8a6c4741a6864fea334d25f3f/pillow-11.3.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.whl",hashes = {sha256 = "0743841cabd3dba6a83f38a92672cccbd69af56e3e91777b0ee7f4dba4385632"}}, + {name = "faker-37.8.0-py3-none-any.whl",url = "https://files.pythonhosted.org/packages/f5/11/02ebebb09ff2104b690457cb7bc6ed700c9e0ce88cf581486bb0a5d3c88b/faker-37.8.0-py3-none-any.whl",hashes = {sha256 = "b08233118824423b5fc239f7dd51f145e7018082b4164f8da6a9994e1f1ae793"}}, +] +marker = "\"default\" in dependency_groups" + +[packages.tool.pdm] +dependencies = [ + "tzdata", +] + +[[packages]] +name = "loguru" +version = "0.7.3" +requires-python = "<4.0,>=3.5" +sdist = {name = "loguru-0.7.3.tar.gz", url = "https://files.pythonhosted.org/packages/3a/05/a1dae3dffd1116099471c643b8924f5aa6524411dc6c63fdae648c4f1aca/loguru-0.7.3.tar.gz", hashes = {sha256 = "19480589e77d47b8d85b2c827ad95d49bf31b0dcde16593892eb51dd18706eb6"}} +wheels = [ + {name = "loguru-0.7.3-py3-none-any.whl",url = "https://files.pythonhosted.org/packages/0c/29/0348de65b8cc732daa3e33e67806420b2ae89bdce2b04af740289c5c6c8c/loguru-0.7.3-py3-none-any.whl",hashes = {sha256 = "31a33c10c8e1e10422bfd431aeb5d351c7cf7fa671e3c4df004162264b28220c"}}, +] +marker = "\"default\" in dependency_groups" + +[packages.tool.pdm] +dependencies = [ + "colorama>=0.3.4; sys_platform == \"win32\"", + "aiocontextvars>=0.2.0; python_version < \"3.7\"", + "win32-setctime>=1.0.0; sys_platform == \"win32\"", +] + +[[packages]] +name = "pillow" +version = "11.3.0" +requires-python = ">=3.9" +sdist = {name = "pillow-11.3.0.tar.gz", url = "https://files.pythonhosted.org/packages/f3/0d/d0d6dea55cd152ce3d6767bb38a8fc10e33796ba4ba210cbab9354b6d238/pillow-11.3.0.tar.gz", hashes = {sha256 = "3828ee7586cd0b2091b6209e5ad53e20d0649bbe87164a459d0676e035e8f523"}} +wheels = [ + {name = "pillow-11.3.0-cp314-cp314-macosx_10_13_x86_64.whl",url = "https://files.pythonhosted.org/packages/73/f4/04905af42837292ed86cb1b1dabe03dce1edc008ef14c473c5c7e1443c5d/pillow-11.3.0-cp314-cp314-macosx_10_13_x86_64.whl",hashes = {sha256 = "d9da3df5f9ea2a89b81bb6087177fb1f4d1c7146d583a3fe5c672c0d94e55e12"}}, + {name = "pillow-11.3.0-cp314-cp314-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/41/b0/33d79e377a336247df6348a54e6d2a2b85d644ca202555e3faa0cf811ecc/pillow-11.3.0-cp314-cp314-macosx_11_0_arm64.whl",hashes = {sha256 = "0b275ff9b04df7b640c59ec5a3cb113eefd3795a8df80bac69646ef699c6981a"}}, + {name = "pillow-11.3.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.whl",url = "https://files.pythonhosted.org/packages/49/2d/ed8bc0ab219ae8768f529597d9509d184fe8a6c4741a6864fea334d25f3f/pillow-11.3.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.whl",hashes = {sha256 = "0743841cabd3dba6a83f38a92672cccbd69af56e3e91777b0ee7f4dba4385632"}}, {name = "pillow-11.3.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl",url = "https://files.pythonhosted.org/packages/b5/3d/b932bb4225c80b58dfadaca9d42d08d0b7064d2d1791b6a237f87f661834/pillow-11.3.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl",hashes = {sha256 = "2465a69cf967b8b49ee1b96d76718cd98c4e925414ead59fdf75cf0fd07df673"}}, {name = "pillow-11.3.0-cp314-cp314-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl",url = "https://files.pythonhosted.org/packages/09/b5/0487044b7c096f1b48f0d7ad416472c02e0e4bf6919541b111efd3cae690/pillow-11.3.0-cp314-cp314-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl",hashes = {sha256 = "41742638139424703b4d01665b807c6468e23e699e8e90cffefe291c5832b027"}}, {name = "pillow-11.3.0-cp314-cp314-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl",url = "https://files.pythonhosted.org/packages/a8/2d/524f9318f6cbfcc79fbc004801ea6b607ec3f843977652fdee4857a7568b/pillow-11.3.0-cp314-cp314-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl",hashes = {sha256 = "93efb0b4de7e340d99057415c749175e24c8864302369e05914682ba642e5d77"}}, @@ -849,53 +909,6 @@ wheels = [ {name = "pillow-11.3.0-cp312-cp312-win32.whl",url = "https://files.pythonhosted.org/packages/26/7d/73699ad77895f69edff76b0f332acc3d497f22f5d75e5360f78cbcaff248/pillow-11.3.0-cp312-cp312-win32.whl",hashes = {sha256 = "7b161756381f0918e05e7cb8a371fff367e807770f8fe92ecb20d905d0e1c149"}}, {name = "pillow-11.3.0-cp312-cp312-win_amd64.whl",url = "https://files.pythonhosted.org/packages/8c/ce/e7dfc873bdd9828f3b6e5c2bbb74e47a98ec23cc5c74fc4e54462f0d9204/pillow-11.3.0-cp312-cp312-win_amd64.whl",hashes = {sha256 = "a6444696fce635783440b7f7a9fc24b3ad10a9ea3f0ab66c5905be1c19ccf17d"}}, {name = "pillow-11.3.0-cp312-cp312-win_arm64.whl",url = "https://files.pythonhosted.org/packages/16/8f/b13447d1bf0b1f7467ce7d86f6e6edf66c0ad7cf44cf5c87a37f9bed9936/pillow-11.3.0-cp312-cp312-win_arm64.whl",hashes = {sha256 = "2aceea54f957dd4448264f9bf40875da0415c83eb85f55069d89c0ed436e3542"}}, - {name = "pillow-11.3.0-cp311-cp311-macosx_10_10_x86_64.whl",url = "https://files.pythonhosted.org/packages/db/26/77f8ed17ca4ffd60e1dcd220a6ec6d71210ba398cfa33a13a1cd614c5613/pillow-11.3.0-cp311-cp311-macosx_10_10_x86_64.whl",hashes = {sha256 = "1cd110edf822773368b396281a2293aeb91c90a2db00d78ea43e7e861631b722"}}, - {name = "pillow-11.3.0-cp311-cp311-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/cb/39/ee475903197ce709322a17a866892efb560f57900d9af2e55f86db51b0a5/pillow-11.3.0-cp311-cp311-macosx_11_0_arm64.whl",hashes = {sha256 = "9c412fddd1b77a75aa904615ebaa6001f169b26fd467b4be93aded278266b288"}}, - {name = "pillow-11.3.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl",url = "https://files.pythonhosted.org/packages/d5/90/442068a160fd179938ba55ec8c97050a612426fae5ec0a764e345839f76d/pillow-11.3.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl",hashes = {sha256 = "7d1aa4de119a0ecac0a34a9c8bde33f34022e2e8f99104e47a3ca392fd60e37d"}}, - {name = "pillow-11.3.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl",url = "https://files.pythonhosted.org/packages/13/92/dcdd147ab02daf405387f0218dcf792dc6dd5b14d2573d40b4caeef01059/pillow-11.3.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl",hashes = {sha256 = "91da1d88226663594e3f6b4b8c3c8d85bd504117d043740a8e0ec449087cc494"}}, - {name = "pillow-11.3.0-cp311-cp311-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl",url = "https://files.pythonhosted.org/packages/6e/db/839d6ba7fd38b51af641aa904e2960e7a5644d60ec754c046b7d2aee00e5/pillow-11.3.0-cp311-cp311-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl",hashes = {sha256 = "643f189248837533073c405ec2f0bb250ba54598cf80e8c1e043381a60632f58"}}, - {name = "pillow-11.3.0-cp311-cp311-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl",url = "https://files.pythonhosted.org/packages/f2/2f/d7675ecae6c43e9f12aa8d58b6012683b20b6edfbdac7abcb4e6af7a3784/pillow-11.3.0-cp311-cp311-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl",hashes = {sha256 = "106064daa23a745510dabce1d84f29137a37224831d88eb4ce94bb187b1d7e5f"}}, - {name = "pillow-11.3.0-cp311-cp311-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/45/ad/931694675ede172e15b2ff03c8144a0ddaea1d87adb72bb07655eaffb654/pillow-11.3.0-cp311-cp311-musllinux_1_2_aarch64.whl",hashes = {sha256 = "cd8ff254faf15591e724dc7c4ddb6bf4793efcbe13802a4ae3e863cd300b493e"}}, - {name = "pillow-11.3.0-cp311-cp311-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/3a/04/ba8f2b11fc80d2dd462d7abec16351b45ec99cbbaea4387648a44190351a/pillow-11.3.0-cp311-cp311-musllinux_1_2_x86_64.whl",hashes = {sha256 = "932c754c2d51ad2b2271fd01c3d121daaa35e27efae2a616f77bf164bc0b3e94"}}, - {name = "pillow-11.3.0-cp311-cp311-win32.whl",url = "https://files.pythonhosted.org/packages/48/59/8cd06d7f3944cc7d892e8533c56b0acb68399f640786313275faec1e3b6f/pillow-11.3.0-cp311-cp311-win32.whl",hashes = {sha256 = "b4b8f3efc8d530a1544e5962bd6b403d5f7fe8b9e08227c6b255f98ad82b4ba0"}}, - {name = "pillow-11.3.0-cp311-cp311-win_amd64.whl",url = "https://files.pythonhosted.org/packages/f1/cc/29c0f5d64ab8eae20f3232da8f8571660aa0ab4b8f1331da5c2f5f9a938e/pillow-11.3.0-cp311-cp311-win_amd64.whl",hashes = {sha256 = "1a992e86b0dd7aeb1f053cd506508c0999d710a8f07b4c791c63843fc6a807ac"}}, - {name = "pillow-11.3.0-cp311-cp311-win_arm64.whl",url = "https://files.pythonhosted.org/packages/c6/df/90bd886fabd544c25addd63e5ca6932c86f2b701d5da6c7839387a076b4a/pillow-11.3.0-cp311-cp311-win_arm64.whl",hashes = {sha256 = "30807c931ff7c095620fe04448e2c2fc673fcbb1ffe2a7da3fb39613489b1ddd"}}, - {name = "pillow-11.3.0-pp311-pypy311_pp73-macosx_10_15_x86_64.whl",url = "https://files.pythonhosted.org/packages/9e/e3/6fa84033758276fb31da12e5fb66ad747ae83b93c67af17f8c6ff4cc8f34/pillow-11.3.0-pp311-pypy311_pp73-macosx_10_15_x86_64.whl",hashes = {sha256 = "7c8ec7a017ad1bd562f93dbd8505763e688d388cde6e4a010ae1486916e713e6"}}, - {name = "pillow-11.3.0-pp311-pypy311_pp73-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/5b/ee/e8d2e1ab4892970b561e1ba96cbd59c0d28cf66737fc44abb2aec3795a4e/pillow-11.3.0-pp311-pypy311_pp73-macosx_11_0_arm64.whl",hashes = {sha256 = "9ab6ae226de48019caa8074894544af5b53a117ccb9d3b3dcb2871464c829438"}}, - {name = "pillow-11.3.0-pp311-pypy311_pp73-manylinux2014_aarch64.manylinux_2_17_aarch64.whl",url = "https://files.pythonhosted.org/packages/f2/6d/17f80f4e1f0761f02160fc433abd4109fa1548dcfdca46cfdadaf9efa565/pillow-11.3.0-pp311-pypy311_pp73-manylinux2014_aarch64.manylinux_2_17_aarch64.whl",hashes = {sha256 = "fe27fb049cdcca11f11a7bfda64043c37b30e6b91f10cb5bab275806c32f6ab3"}}, - {name = "pillow-11.3.0-pp311-pypy311_pp73-manylinux2014_x86_64.manylinux_2_17_x86_64.whl",url = "https://files.pythonhosted.org/packages/de/5f/c22340acd61cef960130585bbe2120e2fd8434c214802f07e8c03596b17e/pillow-11.3.0-pp311-pypy311_pp73-manylinux2014_x86_64.manylinux_2_17_x86_64.whl",hashes = {sha256 = "465b9e8844e3c3519a983d58b80be3f668e2a7a5db97f2784e7079fbc9f9822c"}}, - {name = "pillow-11.3.0-pp311-pypy311_pp73-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl",url = "https://files.pythonhosted.org/packages/31/5e/03966aedfbfcbb4d5f8aa042452d3361f325b963ebbadddac05b122e47dd/pillow-11.3.0-pp311-pypy311_pp73-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl",hashes = {sha256 = "5418b53c0d59b3824d05e029669efa023bbef0f3e92e75ec8428f3799487f361"}}, - {name = "pillow-11.3.0-pp311-pypy311_pp73-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl",url = "https://files.pythonhosted.org/packages/cc/2d/e082982aacc927fc2cab48e1e731bdb1643a1406acace8bed0900a61464e/pillow-11.3.0-pp311-pypy311_pp73-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl",hashes = {sha256 = "504b6f59505f08ae014f724b6207ff6222662aab5cc9542577fb084ed0676ac7"}}, - {name = "pillow-11.3.0-pp311-pypy311_pp73-win_amd64.whl",url = "https://files.pythonhosted.org/packages/34/e7/ae39f538fd6844e982063c3a5e4598b8ced43b9633baa3a85ef33af8c05c/pillow-11.3.0-pp311-pypy311_pp73-win_amd64.whl",hashes = {sha256 = "c84d689db21a1c397d001aa08241044aa2069e7587b398c8cc63020390b1c1b8"}}, - {name = "pillow-11.3.0-cp310-cp310-macosx_10_10_x86_64.whl",url = "https://files.pythonhosted.org/packages/4c/5d/45a3553a253ac8763f3561371432a90bdbe6000fbdcf1397ffe502aa206c/pillow-11.3.0-cp310-cp310-macosx_10_10_x86_64.whl",hashes = {sha256 = "1b9c17fd4ace828b3003dfd1e30bff24863e0eb59b535e8f80194d9cc7ecf860"}}, - {name = "pillow-11.3.0-cp310-cp310-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/7c/c8/67c12ab069ef586a25a4a79ced553586748fad100c77c0ce59bb4983ac98/pillow-11.3.0-cp310-cp310-macosx_11_0_arm64.whl",hashes = {sha256 = "65dc69160114cdd0ca0f35cb434633c75e8e7fad4cf855177a05bf38678f73ad"}}, - {name = "pillow-11.3.0-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.whl",url = "https://files.pythonhosted.org/packages/2f/bd/6741ebd56263390b382ae4c5de02979af7f8bd9807346d068700dd6d5cf9/pillow-11.3.0-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.whl",hashes = {sha256 = "7107195ddc914f656c7fc8e4a5e1c25f32e9236ea3ea860f257b0436011fddd0"}}, - {name = "pillow-11.3.0-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.whl",url = "https://files.pythonhosted.org/packages/ca/0b/c412a9e27e1e6a829e6ab6c2dca52dd563efbedf4c9c6aa453d9a9b77359/pillow-11.3.0-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.whl",hashes = {sha256 = "cc3e831b563b3114baac7ec2ee86819eb03caa1a2cef0b481a5675b59c4fe23b"}}, - {name = "pillow-11.3.0-cp310-cp310-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl",url = "https://files.pythonhosted.org/packages/59/9d/9b7076aaf30f5dd17e5e5589b2d2f5a5d7e30ff67a171eb686e4eecc2adf/pillow-11.3.0-cp310-cp310-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl",hashes = {sha256 = "f1f182ebd2303acf8c380a54f615ec883322593320a9b00438eb842c1f37ae50"}}, - {name = "pillow-11.3.0-cp310-cp310-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl",url = "https://files.pythonhosted.org/packages/f0/16/1a6bf01fb622fb9cf5c91683823f073f053005c849b1f52ed613afcf8dae/pillow-11.3.0-cp310-cp310-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl",hashes = {sha256 = "4445fa62e15936a028672fd48c4c11a66d641d2c05726c7ec1f8ba6a572036ae"}}, - {name = "pillow-11.3.0-cp310-cp310-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/7b/e6/6ff7077077eb47fde78739e7d570bdcd7c10495666b6afcd23ab56b19a43/pillow-11.3.0-cp310-cp310-musllinux_1_2_aarch64.whl",hashes = {sha256 = "71f511f6b3b91dd543282477be45a033e4845a40278fa8dcdbfdb07109bf18f9"}}, - {name = "pillow-11.3.0-cp310-cp310-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/c3/3a/b13f36832ea6d279a697231658199e0a03cd87ef12048016bdcc84131601/pillow-11.3.0-cp310-cp310-musllinux_1_2_x86_64.whl",hashes = {sha256 = "040a5b691b0713e1f6cbe222e0f4f74cd233421e105850ae3b3c0ceda520f42e"}}, - {name = "pillow-11.3.0-cp310-cp310-win32.whl",url = "https://files.pythonhosted.org/packages/6c/e4/61b2e1a7528740efbc70b3d581f33937e38e98ef3d50b05007267a55bcb2/pillow-11.3.0-cp310-cp310-win32.whl",hashes = {sha256 = "89bd777bc6624fe4115e9fac3352c79ed60f3bb18651420635f26e643e3dd1f6"}}, - {name = "pillow-11.3.0-cp310-cp310-win_amd64.whl",url = "https://files.pythonhosted.org/packages/a9/d3/60c781c83a785d6afbd6a326ed4d759d141de43aa7365725cbcd65ce5e54/pillow-11.3.0-cp310-cp310-win_amd64.whl",hashes = {sha256 = "19d2ff547c75b8e3ff46f4d9ef969a06c30ab2d4263a9e287733aa8b2429ce8f"}}, - {name = "pillow-11.3.0-cp310-cp310-win_arm64.whl",url = "https://files.pythonhosted.org/packages/9f/28/4f4a0203165eefb3763939c6789ba31013a2e90adffb456610f30f613850/pillow-11.3.0-cp310-cp310-win_arm64.whl",hashes = {sha256 = "819931d25e57b513242859ce1876c58c59dc31587847bf74cfe06b2e0cb22d2f"}}, - {name = "pillow-11.3.0-pp310-pypy310_pp73-macosx_10_15_x86_64.whl",url = "https://files.pythonhosted.org/packages/6f/8b/209bd6b62ce8367f47e68a218bffac88888fdf2c9fcf1ecadc6c3ec1ebc7/pillow-11.3.0-pp310-pypy310_pp73-macosx_10_15_x86_64.whl",hashes = {sha256 = "3cee80663f29e3843b68199b9d6f4f54bd1d4a6b59bdd91bceefc51238bcb967"}}, - {name = "pillow-11.3.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/2e/e6/231a0b76070c2cfd9e260a7a5b504fb72da0a95279410fa7afd99d9751d6/pillow-11.3.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl",hashes = {sha256 = "b5f56c3f344f2ccaf0dd875d3e180f631dc60a51b314295a3e681fe8cf851fbe"}}, - {name = "pillow-11.3.0-pp310-pypy310_pp73-manylinux2014_aarch64.manylinux_2_17_aarch64.whl",url = "https://files.pythonhosted.org/packages/13/f4/10cf94fda33cb12765f2397fc285fa6d8eb9c29de7f3185165b702fc7386/pillow-11.3.0-pp310-pypy310_pp73-manylinux2014_aarch64.manylinux_2_17_aarch64.whl",hashes = {sha256 = "e67d793d180c9df62f1f40aee3accca4829d3794c95098887edc18af4b8b780c"}}, - {name = "pillow-11.3.0-pp310-pypy310_pp73-manylinux2014_x86_64.manylinux_2_17_x86_64.whl",url = "https://files.pythonhosted.org/packages/72/c9/583821097dc691880c92892e8e2d41fe0a5a3d6021f4963371d2f6d57250/pillow-11.3.0-pp310-pypy310_pp73-manylinux2014_x86_64.manylinux_2_17_x86_64.whl",hashes = {sha256 = "d000f46e2917c705e9fb93a3606ee4a819d1e3aa7a9b442f6444f07e77cf5e25"}}, - {name = "pillow-11.3.0-pp310-pypy310_pp73-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl",url = "https://files.pythonhosted.org/packages/3b/8e/5c9d410f9217b12320efc7c413e72693f48468979a013ad17fd690397b9a/pillow-11.3.0-pp310-pypy310_pp73-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl",hashes = {sha256 = "527b37216b6ac3a12d7838dc3bd75208ec57c1c6d11ef01902266a5a0c14fc27"}}, - {name = "pillow-11.3.0-pp310-pypy310_pp73-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl",url = "https://files.pythonhosted.org/packages/62/bb/78347dbe13219991877ffb3a91bf09da8317fbfcd4b5f9140aeae020ad71/pillow-11.3.0-pp310-pypy310_pp73-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl",hashes = {sha256 = "be5463ac478b623b9dd3937afd7fb7ab3d79dd290a28e2b6df292dc75063eb8a"}}, - {name = "pillow-11.3.0-pp310-pypy310_pp73-win_amd64.whl",url = "https://files.pythonhosted.org/packages/d9/28/1000353d5e61498aaeaaf7f1e4b49ddb05f2c6575f9d4f9f914a3538b6e1/pillow-11.3.0-pp310-pypy310_pp73-win_amd64.whl",hashes = {sha256 = "8dc70ca24c110503e16918a658b869019126ecfe03109b754c402daff12b3d9f"}}, - {name = "pillow-11.3.0-cp39-cp39-macosx_10_10_x86_64.whl",url = "https://files.pythonhosted.org/packages/9e/8e/9c089f01677d1264ab8648352dcb7773f37da6ad002542760c80107da816/pillow-11.3.0-cp39-cp39-macosx_10_10_x86_64.whl",hashes = {sha256 = "48d254f8a4c776de343051023eb61ffe818299eeac478da55227d96e241de53f"}}, - {name = "pillow-11.3.0-cp39-cp39-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/b5/a9/5749930caf674695867eb56a581e78eb5f524b7583ff10b01b6e5048acb3/pillow-11.3.0-cp39-cp39-macosx_11_0_arm64.whl",hashes = {sha256 = "7aee118e30a4cf54fdd873bd3a29de51e29105ab11f9aad8c32123f58c8f8081"}}, - {name = "pillow-11.3.0-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.whl",url = "https://files.pythonhosted.org/packages/43/46/0b85b763eb292b691030795f9f6bb6fcaf8948c39413c81696a01c3577f7/pillow-11.3.0-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.whl",hashes = {sha256 = "23cff760a9049c502721bdb743a7cb3e03365fafcdfc2ef9784610714166e5a4"}}, - {name = "pillow-11.3.0-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.whl",url = "https://files.pythonhosted.org/packages/5e/c6/1a230ec0067243cbd60bc2dad5dc3ab46a8a41e21c15f5c9b52b26873069/pillow-11.3.0-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.whl",hashes = {sha256 = "6359a3bc43f57d5b375d1ad54a0074318a0844d11b76abccf478c37c986d3cfc"}}, - {name = "pillow-11.3.0-cp39-cp39-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl",url = "https://files.pythonhosted.org/packages/63/dd/f296c27ffba447bfad76c6a0c44c1ea97a90cb9472b9304c94a732e8dbfb/pillow-11.3.0-cp39-cp39-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl",hashes = {sha256 = "092c80c76635f5ecb10f3f83d76716165c96f5229addbd1ec2bdbbda7d496e06"}}, - {name = "pillow-11.3.0-cp39-cp39-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl",url = "https://files.pythonhosted.org/packages/a5/a0/98a3630f0b57f77bae67716562513d3032ae70414fcaf02750279c389a9e/pillow-11.3.0-cp39-cp39-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl",hashes = {sha256 = "cadc9e0ea0a2431124cde7e1697106471fc4c1da01530e679b2391c37d3fbb3a"}}, - {name = "pillow-11.3.0-cp39-cp39-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/de/e6/83dfba5646a290edd9a21964da07674409e410579c341fc5b8f7abd81620/pillow-11.3.0-cp39-cp39-musllinux_1_2_aarch64.whl",hashes = {sha256 = "6a418691000f2a418c9135a7cf0d797c1bb7d9a485e61fe8e7722845b95ef978"}}, - {name = "pillow-11.3.0-cp39-cp39-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/bc/41/15ab268fe6ee9a2bc7391e2bbb20a98d3974304ab1a406a992dcb297a370/pillow-11.3.0-cp39-cp39-musllinux_1_2_x86_64.whl",hashes = {sha256 = "97afb3a00b65cc0804d1c7abddbf090a81eaac02768af58cbdcaaa0a931e0b6d"}}, - {name = "pillow-11.3.0-cp39-cp39-win32.whl",url = "https://files.pythonhosted.org/packages/64/79/6d4f638b288300bed727ff29f2a3cb63db054b33518a95f27724915e3fbc/pillow-11.3.0-cp39-cp39-win32.whl",hashes = {sha256 = "ea944117a7974ae78059fcc1800e5d3295172bb97035c0c1d9345fca1419da71"}}, - {name = "pillow-11.3.0-cp39-cp39-win_amd64.whl",url = "https://files.pythonhosted.org/packages/46/05/4106422f45a05716fd34ed21763f8ec182e8ea00af6e9cb05b93a247361a/pillow-11.3.0-cp39-cp39-win_amd64.whl",hashes = {sha256 = "e5c5858ad8ec655450a7c7df532e9842cf8df7cc349df7225c60d5d348c8aada"}}, - {name = "pillow-11.3.0-cp39-cp39-win_arm64.whl",url = "https://files.pythonhosted.org/packages/63/c6/287fd55c2c12761d0591549d48885187579b7c257bef0c6660755b0b59ae/pillow-11.3.0-cp39-cp39-win_arm64.whl",hashes = {sha256 = "6abdbfd3aea42be05702a8dd98832329c167ee84400a1d1f61ab11437f1717eb"}}, ] marker = "\"default\" in dependency_groups" @@ -914,8 +927,6 @@ wheels = [ {name = "protobuf-6.31.1-cp39-abi3-manylinux2014_aarch64.whl",url = "https://files.pythonhosted.org/packages/76/a1/7a5a94032c83375e4fe7e7f56e3976ea6ac90c5e85fac8576409e25c39c3/protobuf-6.31.1-cp39-abi3-manylinux2014_aarch64.whl",hashes = {sha256 = "a40fc12b84c154884d7d4c4ebd675d5b3b5283e155f324049ae396b95ddebc39"}}, {name = "protobuf-6.31.1-cp39-abi3-manylinux2014_x86_64.whl",url = "https://files.pythonhosted.org/packages/fa/b1/b59d405d64d31999244643d88c45c8241c58f17cc887e73bcb90602327f8/protobuf-6.31.1-cp39-abi3-manylinux2014_x86_64.whl",hashes = {sha256 = "4ee898bf66f7a8b0bd21bce523814e6fbd8c6add948045ce958b73af7e8878c6"}}, {name = "protobuf-6.31.1-py3-none-any.whl",url = "https://files.pythonhosted.org/packages/f7/af/ab3c51ab7507a7325e98ffe691d9495ee3d3aa5f589afad65ec920d39821/protobuf-6.31.1-py3-none-any.whl",hashes = {sha256 = "720a6c7e6b77288b85063569baae8536671b39f15cc22037ec7045658d80489e"}}, - {name = "protobuf-6.31.1-cp39-cp39-win32.whl",url = "https://files.pythonhosted.org/packages/b1/f0/4160dbd205eee8fdf8647d154e7ceaa9d25b3a877b6311274eb6dc896b75/protobuf-6.31.1-cp39-cp39-win32.whl",hashes = {sha256 = "0414e3aa5a5f3ff423828e1e6a6e907d6c65c1d5b7e6e975793d5590bdeecc16"}}, - {name = "protobuf-6.31.1-cp39-cp39-win_amd64.whl",url = "https://files.pythonhosted.org/packages/09/34/13989eb9f482409ed821bfa3e34e6a3878b42607c38e7f7572b4cc825091/protobuf-6.31.1-cp39-cp39-win_amd64.whl",hashes = {sha256 = "8764cf4587791e7564051b35524b72844f845ad0bb011704c3736cce762d8fe9"}}, ] marker = "\"default\" in dependency_groups" @@ -939,6 +950,31 @@ dependencies = [ "typing-extensions<5.0,>=4.0.0; python_version < \"3.11\"", ] +[[packages]] +name = "sanic" +version = "25.3.0" +requires-python = ">=3.8" +sdist = {name = "sanic-25.3.0.tar.gz", url = "https://files.pythonhosted.org/packages/df/8b/08dc376390fe854ef32984973883b646ee68c6727da72ffcc65340d8f192/sanic-25.3.0.tar.gz", hashes = {sha256 = "775d522001ec81f034ec8e4d7599e2175bfc097b8d57884f5e4c9322f5e369bb"}} +wheels = [ + {name = "sanic-25.3.0-py3-none-any.whl",url = "https://files.pythonhosted.org/packages/a6/e1/b36ddc16862d63d22986ae21b04a79c8fb7ec48d5d664acdfd1c2acf78ac/sanic-25.3.0-py3-none-any.whl",hashes = {sha256 = "fb519b38b4c220569b0e2e868583ffeaffaab96a78b2e42ae78bc56a644a4cd7"}}, +] +marker = "\"default\" in dependency_groups" + +[packages.tool.pdm] +dependencies = [ + "sanic-routing>=23.12.0", + "httptools>=0.0.10", + "uvloop>=0.15.0; sys_platform != \"win32\" and implementation_name == \"cpython\"", + "ujson>=1.35; sys_platform != \"win32\" and implementation_name == \"cpython\"", + "aiofiles>=0.6.0", + "websockets>=10.0", + "multidict<7.0,>=5.0", + "html5tagger>=1.2.1", + "tracerite>=1.0.0", + "typing-extensions>=4.4.0", + "setuptools>=70.1.0", +] + [[packages]] name = "transformers" version = "4.53.1" @@ -1047,73 +1083,6 @@ wheels = [ {name = "pydantic_core-2.33.2-cp312-cp312-win32.whl",url = "https://files.pythonhosted.org/packages/b0/6a/adf5734ffd52bf86d865093ad70b2ce543415e0e356f6cacabbc0d9ad910/pydantic_core-2.33.2-cp312-cp312-win32.whl",hashes = {sha256 = "9cb1da0f5a471435a7bc7e439b8a728e8b61e59784b2af70d7c169f8dd8ae290"}}, {name = "pydantic_core-2.33.2-cp312-cp312-win_amd64.whl",url = "https://files.pythonhosted.org/packages/43/e4/5479fecb3606c1368d496a825d8411e126133c41224c1e7238be58b87d7e/pydantic_core-2.33.2-cp312-cp312-win_amd64.whl",hashes = {sha256 = "f941635f2a3d96b2973e867144fde513665c87f13fe0e193c158ac51bfaaa7b2"}}, {name = "pydantic_core-2.33.2-cp312-cp312-win_arm64.whl",url = "https://files.pythonhosted.org/packages/0d/24/8b11e8b3e2be9dd82df4b11408a67c61bb4dc4f8e11b5b0fc888b38118b5/pydantic_core-2.33.2-cp312-cp312-win_arm64.whl",hashes = {sha256 = "cca3868ddfaccfbc4bfb1d608e2ccaaebe0ae628e1416aeb9c4d88c001bb45ab"}}, - {name = "pydantic_core-2.33.2-cp311-cp311-macosx_10_12_x86_64.whl",url = "https://files.pythonhosted.org/packages/3f/8d/71db63483d518cbbf290261a1fc2839d17ff89fce7089e08cad07ccfce67/pydantic_core-2.33.2-cp311-cp311-macosx_10_12_x86_64.whl",hashes = {sha256 = "4c5b0a576fb381edd6d27f0a85915c6daf2f8138dc5c267a57c08a62900758c7"}}, - {name = "pydantic_core-2.33.2-cp311-cp311-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/24/2f/3cfa7244ae292dd850989f328722d2aef313f74ffc471184dc509e1e4e5a/pydantic_core-2.33.2-cp311-cp311-macosx_11_0_arm64.whl",hashes = {sha256 = "e799c050df38a639db758c617ec771fd8fb7a5f8eaaa4b27b101f266b216a246"}}, - {name = "pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",url = "https://files.pythonhosted.org/packages/b3/d3/4ae42d33f5e3f50dd467761304be2fa0a9417fbf09735bc2cce003480f2a/pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",hashes = {sha256 = "dc46a01bf8d62f227d5ecee74178ffc448ff4e5197c756331f71efcc66dc980f"}}, - {name = "pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl",url = "https://files.pythonhosted.org/packages/f4/f3/aa5976e8352b7695ff808599794b1fba2a9ae2ee954a3426855935799488/pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl",hashes = {sha256 = "a144d4f717285c6d9234a66778059f33a89096dfb9b39117663fd8413d582dcc"}}, - {name = "pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl",url = "https://files.pythonhosted.org/packages/d5/7a/cda9b5a23c552037717f2b2a5257e9b2bfe45e687386df9591eff7b46d28/pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl",hashes = {sha256 = "73cf6373c21bc80b2e0dc88444f41ae60b2f070ed02095754eb5a01df12256de"}}, - {name = "pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl",url = "https://files.pythonhosted.org/packages/2b/9f/b8f9ec8dd1417eb9da784e91e1667d58a2a4a7b7b34cf4af765ef663a7e5/pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl",hashes = {sha256 = "3dc625f4aa79713512d1976fe9f0bc99f706a9dee21dfd1810b4bbbf228d0e8a"}}, - {name = "pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",url = "https://files.pythonhosted.org/packages/47/bc/cd720e078576bdb8255d5032c5d63ee5c0bf4b7173dd955185a1d658c456/pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",hashes = {sha256 = "881b21b5549499972441da4758d662aeea93f1923f953e9cbaff14b8b9565aef"}}, - {name = "pydantic_core-2.33.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl",url = "https://files.pythonhosted.org/packages/ca/22/3602b895ee2cd29d11a2b349372446ae9727c32e78a94b3d588a40fdf187/pydantic_core-2.33.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl",hashes = {sha256 = "bdc25f3681f7b78572699569514036afe3c243bc3059d3942624e936ec93450e"}}, - {name = "pydantic_core-2.33.2-cp311-cp311-musllinux_1_1_aarch64.whl",url = "https://files.pythonhosted.org/packages/ff/e6/e3c5908c03cf00d629eb38393a98fccc38ee0ce8ecce32f69fc7d7b558a7/pydantic_core-2.33.2-cp311-cp311-musllinux_1_1_aarch64.whl",hashes = {sha256 = "fe5b32187cbc0c862ee201ad66c30cf218e5ed468ec8dc1cf49dec66e160cc4d"}}, - {name = "pydantic_core-2.33.2-cp311-cp311-musllinux_1_1_armv7l.whl",url = "https://files.pythonhosted.org/packages/12/e7/6a36a07c59ebefc8777d1ffdaf5ae71b06b21952582e4b07eba88a421c79/pydantic_core-2.33.2-cp311-cp311-musllinux_1_1_armv7l.whl",hashes = {sha256 = "bc7aee6f634a6f4a95676fcb5d6559a2c2a390330098dba5e5a5f28a2e4ada30"}}, - {name = "pydantic_core-2.33.2-cp311-cp311-musllinux_1_1_x86_64.whl",url = "https://files.pythonhosted.org/packages/16/3f/59b3187aaa6cc0c1e6616e8045b284de2b6a87b027cce2ffcea073adf1d2/pydantic_core-2.33.2-cp311-cp311-musllinux_1_1_x86_64.whl",hashes = {sha256 = "235f45e5dbcccf6bd99f9f472858849f73d11120d76ea8707115415f8e5ebebf"}}, - {name = "pydantic_core-2.33.2-cp311-cp311-win32.whl",url = "https://files.pythonhosted.org/packages/e0/ed/55532bb88f674d5d8f67ab121a2a13c385df382de2a1677f30ad385f7438/pydantic_core-2.33.2-cp311-cp311-win32.whl",hashes = {sha256 = "6368900c2d3ef09b69cb0b913f9f8263b03786e5b2a387706c5afb66800efd51"}}, - {name = "pydantic_core-2.33.2-cp311-cp311-win_amd64.whl",url = "https://files.pythonhosted.org/packages/fe/1b/25b7cccd4519c0b23c2dd636ad39d381abf113085ce4f7bec2b0dc755eb1/pydantic_core-2.33.2-cp311-cp311-win_amd64.whl",hashes = {sha256 = "1e063337ef9e9820c77acc768546325ebe04ee38b08703244c1309cccc4f1bab"}}, - {name = "pydantic_core-2.33.2-cp311-cp311-win_arm64.whl",url = "https://files.pythonhosted.org/packages/49/a9/d809358e49126438055884c4366a1f6227f0f84f635a9014e2deb9b9de54/pydantic_core-2.33.2-cp311-cp311-win_arm64.whl",hashes = {sha256 = "6b99022f1d19bc32a4c2a0d544fc9a76e3be90f0b3f4af413f87d38749300e65"}}, - {name = "pydantic_core-2.33.2-pp311-pypy311_pp73-macosx_10_12_x86_64.whl",url = "https://files.pythonhosted.org/packages/7b/27/d4ae6487d73948d6f20dddcd94be4ea43e74349b56eba82e9bdee2d7494c/pydantic_core-2.33.2-pp311-pypy311_pp73-macosx_10_12_x86_64.whl",hashes = {sha256 = "dd14041875d09cc0f9308e37a6f8b65f5585cf2598a53aa0123df8b129d481f8"}}, - {name = "pydantic_core-2.33.2-pp311-pypy311_pp73-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/f1/b8/b3cb95375f05d33801024079b9392a5ab45267a63400bf1866e7ce0f0de4/pydantic_core-2.33.2-pp311-pypy311_pp73-macosx_11_0_arm64.whl",hashes = {sha256 = "d87c561733f66531dced0da6e864f44ebf89a8fba55f31407b00c2f7f9449593"}}, - {name = "pydantic_core-2.33.2-pp311-pypy311_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",url = "https://files.pythonhosted.org/packages/05/bc/0d0b5adeda59a261cd30a1235a445bf55c7e46ae44aea28f7bd6ed46e091/pydantic_core-2.33.2-pp311-pypy311_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",hashes = {sha256 = "2f82865531efd18d6e07a04a17331af02cb7a651583c418df8266f17a63c6612"}}, - {name = "pydantic_core-2.33.2-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",url = "https://files.pythonhosted.org/packages/3e/11/d37bdebbda2e449cb3f519f6ce950927b56d62f0b84fd9cb9e372a26a3d5/pydantic_core-2.33.2-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",hashes = {sha256 = "2bfb5112df54209d820d7bf9317c7a6c9025ea52e49f46b6a2060104bba37de7"}}, - {name = "pydantic_core-2.33.2-pp311-pypy311_pp73-manylinux_2_5_i686.manylinux1_i686.whl",url = "https://files.pythonhosted.org/packages/8c/55/1f95f0a05ce72ecb02a8a8a1c3be0579bbc29b1d5ab68f1378b7bebc5057/pydantic_core-2.33.2-pp311-pypy311_pp73-manylinux_2_5_i686.manylinux1_i686.whl",hashes = {sha256 = "64632ff9d614e5eecfb495796ad51b0ed98c453e447a76bcbeeb69615079fc7e"}}, - {name = "pydantic_core-2.33.2-pp311-pypy311_pp73-musllinux_1_1_aarch64.whl",url = "https://files.pythonhosted.org/packages/53/89/2b2de6c81fa131f423246a9109d7b2a375e83968ad0800d6e57d0574629b/pydantic_core-2.33.2-pp311-pypy311_pp73-musllinux_1_1_aarch64.whl",hashes = {sha256 = "f889f7a40498cc077332c7ab6b4608d296d852182211787d4f3ee377aaae66e8"}}, - {name = "pydantic_core-2.33.2-pp311-pypy311_pp73-musllinux_1_1_armv7l.whl",url = "https://files.pythonhosted.org/packages/b8/e9/1f7efbe20d0b2b10f6718944b5d8ece9152390904f29a78e68d4e7961159/pydantic_core-2.33.2-pp311-pypy311_pp73-musllinux_1_1_armv7l.whl",hashes = {sha256 = "de4b83bb311557e439b9e186f733f6c645b9417c84e2eb8203f3f820a4b988bf"}}, - {name = "pydantic_core-2.33.2-pp311-pypy311_pp73-musllinux_1_1_x86_64.whl",url = "https://files.pythonhosted.org/packages/3c/b2/5309c905a93811524a49b4e031e9851a6b00ff0fb668794472ea7746b448/pydantic_core-2.33.2-pp311-pypy311_pp73-musllinux_1_1_x86_64.whl",hashes = {sha256 = "82f68293f055f51b51ea42fafc74b6aad03e70e191799430b90c13d643059ebb"}}, - {name = "pydantic_core-2.33.2-pp311-pypy311_pp73-win_amd64.whl",url = "https://files.pythonhosted.org/packages/32/56/8a7ca5d2cd2cda1d245d34b1c9a942920a718082ae8e54e5f3e5a58b7add/pydantic_core-2.33.2-pp311-pypy311_pp73-win_amd64.whl",hashes = {sha256 = "329467cecfb529c925cf2bbd4d60d2c509bc2fb52a20c1045bf09bb70971a9c1"}}, - {name = "pydantic_core-2.33.2-cp310-cp310-macosx_10_12_x86_64.whl",url = "https://files.pythonhosted.org/packages/e5/92/b31726561b5dae176c2d2c2dc43a9c5bfba5d32f96f8b4c0a600dd492447/pydantic_core-2.33.2-cp310-cp310-macosx_10_12_x86_64.whl",hashes = {sha256 = "2b3d326aaef0c0399d9afffeb6367d5e26ddc24d351dbc9c636840ac355dc5d8"}}, - {name = "pydantic_core-2.33.2-cp310-cp310-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/a3/44/3f0b95fafdaca04a483c4e685fe437c6891001bf3ce8b2fded82b9ea3aa1/pydantic_core-2.33.2-cp310-cp310-macosx_11_0_arm64.whl",hashes = {sha256 = "0e5b2671f05ba48b94cb90ce55d8bdcaaedb8ba00cc5359f6810fc918713983d"}}, - {name = "pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",url = "https://files.pythonhosted.org/packages/30/97/e8f13b55766234caae05372826e8e4b3b96e7b248be3157f53237682e43c/pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",hashes = {sha256 = "0069c9acc3f3981b9ff4cdfaf088e98d83440a4c7ea1bc07460af3d4dc22e72d"}}, - {name = "pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl",url = "https://files.pythonhosted.org/packages/9b/a3/99c48cf7bafc991cc3ee66fd544c0aae8dc907b752f1dad2d79b1b5a471f/pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl",hashes = {sha256 = "d53b22f2032c42eaaf025f7c40c2e3b94568ae077a606f006d206a463bc69572"}}, - {name = "pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl",url = "https://files.pythonhosted.org/packages/de/8e/a5b882ec4307010a840fb8b58bd9bf65d1840c92eae7534c7441709bf54b/pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl",hashes = {sha256 = "0405262705a123b7ce9f0b92f123334d67b70fd1f20a9372b907ce1080c7ba02"}}, - {name = "pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl",url = "https://files.pythonhosted.org/packages/e4/bb/71e35fc3ed05af6834e890edb75968e2802fe98778971ab5cba20a162315/pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl",hashes = {sha256 = "4b25d91e288e2c4e0662b8038a28c6a07eaac3e196cfc4ff69de4ea3db992a1b"}}, - {name = "pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",url = "https://files.pythonhosted.org/packages/31/0d/c8f7593e6bc7066289bbc366f2235701dcbebcd1ff0ef8e64f6f239fb47d/pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",hashes = {sha256 = "6bdfe4b3789761f3bcb4b1ddf33355a71079858958e3a552f16d5af19768fef2"}}, - {name = "pydantic_core-2.33.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl",url = "https://files.pythonhosted.org/packages/d2/7a/996d8bd75f3eda405e3dd219ff5ff0a283cd8e34add39d8ef9157e722867/pydantic_core-2.33.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl",hashes = {sha256 = "efec8db3266b76ef9607c2c4c419bdb06bf335ae433b80816089ea7585816f6a"}}, - {name = "pydantic_core-2.33.2-cp310-cp310-musllinux_1_1_aarch64.whl",url = "https://files.pythonhosted.org/packages/ff/84/daf2a6fb2db40ffda6578a7e8c5a6e9c8affb251a05c233ae37098118788/pydantic_core-2.33.2-cp310-cp310-musllinux_1_1_aarch64.whl",hashes = {sha256 = "031c57d67ca86902726e0fae2214ce6770bbe2f710dc33063187a68744a5ecac"}}, - {name = "pydantic_core-2.33.2-cp310-cp310-musllinux_1_1_armv7l.whl",url = "https://files.pythonhosted.org/packages/77/fb/2258da019f4825128445ae79456a5499c032b55849dbd5bed78c95ccf163/pydantic_core-2.33.2-cp310-cp310-musllinux_1_1_armv7l.whl",hashes = {sha256 = "f8de619080e944347f5f20de29a975c2d815d9ddd8be9b9b7268e2e3ef68605a"}}, - {name = "pydantic_core-2.33.2-cp310-cp310-musllinux_1_1_x86_64.whl",url = "https://files.pythonhosted.org/packages/d8/7a/925ff73756031289468326e355b6fa8316960d0d65f8b5d6b3a3e7866de7/pydantic_core-2.33.2-cp310-cp310-musllinux_1_1_x86_64.whl",hashes = {sha256 = "73662edf539e72a9440129f231ed3757faab89630d291b784ca99237fb94db2b"}}, - {name = "pydantic_core-2.33.2-cp310-cp310-win32.whl",url = "https://files.pythonhosted.org/packages/0b/b0/249ee6d2646f1cdadcb813805fe76265745c4010cf20a8eba7b0e639d9b2/pydantic_core-2.33.2-cp310-cp310-win32.whl",hashes = {sha256 = "0a39979dcbb70998b0e505fb1556a1d550a0781463ce84ebf915ba293ccb7e22"}}, - {name = "pydantic_core-2.33.2-cp310-cp310-win_amd64.whl",url = "https://files.pythonhosted.org/packages/66/ff/172ba8f12a42d4b552917aa65d1f2328990d3ccfc01d5b7c943ec084299f/pydantic_core-2.33.2-cp310-cp310-win_amd64.whl",hashes = {sha256 = "b0379a2b24882fef529ec3b4987cb5d003b9cda32256024e6fe1586ac45fc640"}}, - {name = "pydantic_core-2.33.2-pp310-pypy310_pp73-macosx_10_12_x86_64.whl",url = "https://files.pythonhosted.org/packages/30/68/373d55e58b7e83ce371691f6eaa7175e3a24b956c44628eb25d7da007917/pydantic_core-2.33.2-pp310-pypy310_pp73-macosx_10_12_x86_64.whl",hashes = {sha256 = "5c4aa4e82353f65e548c476b37e64189783aa5384903bfea4f41580f255fddfa"}}, - {name = "pydantic_core-2.33.2-pp310-pypy310_pp73-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/a4/16/145f54ac08c96a63d8ed6442f9dec17b2773d19920b627b18d4f10a061ea/pydantic_core-2.33.2-pp310-pypy310_pp73-macosx_11_0_arm64.whl",hashes = {sha256 = "d946c8bf0d5c24bf4fe333af284c59a19358aa3ec18cb3dc4370080da1e8ad29"}}, - {name = "pydantic_core-2.33.2-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",url = "https://files.pythonhosted.org/packages/41/b1/c6dc6c3e2de4516c0bb2c46f6a373b91b5660312342a0cf5826e38ad82fa/pydantic_core-2.33.2-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",hashes = {sha256 = "87b31b6846e361ef83fedb187bb5b4372d0da3f7e28d85415efa92d6125d6e6d"}}, - {name = "pydantic_core-2.33.2-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",url = "https://files.pythonhosted.org/packages/12/73/8cd57e20afba760b21b742106f9dbdfa6697f1570b189c7457a1af4cd8a0/pydantic_core-2.33.2-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",hashes = {sha256 = "aa9d91b338f2df0508606f7009fde642391425189bba6d8c653afd80fd6bb64e"}}, - {name = "pydantic_core-2.33.2-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl",url = "https://files.pythonhosted.org/packages/e3/d5/0bb5d988cc019b3cba4a78f2d4b3854427fc47ee8ec8e9eaabf787da239c/pydantic_core-2.33.2-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl",hashes = {sha256 = "2058a32994f1fde4ca0480ab9d1e75a0e8c87c22b53a3ae66554f9af78f2fe8c"}}, - {name = "pydantic_core-2.33.2-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl",url = "https://files.pythonhosted.org/packages/f1/c5/00c02d1571913d496aabf146106ad8239dc132485ee22efe08085084ff7c/pydantic_core-2.33.2-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl",hashes = {sha256 = "0e03262ab796d986f978f79c943fc5f620381be7287148b8010b4097f79a39ec"}}, - {name = "pydantic_core-2.33.2-pp310-pypy310_pp73-musllinux_1_1_armv7l.whl",url = "https://files.pythonhosted.org/packages/22/a8/dccc38768274d3ed3a59b5d06f59ccb845778687652daa71df0cab4040d7/pydantic_core-2.33.2-pp310-pypy310_pp73-musllinux_1_1_armv7l.whl",hashes = {sha256 = "1a8695a8d00c73e50bff9dfda4d540b7dee29ff9b8053e38380426a85ef10052"}}, - {name = "pydantic_core-2.33.2-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl",url = "https://files.pythonhosted.org/packages/d4/e7/4f98c0b125dda7cf7ccd14ba936218397b44f50a56dd8c16a3091df116c3/pydantic_core-2.33.2-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl",hashes = {sha256 = "fa754d1850735a0b0e03bcffd9d4b4343eb417e47196e4485d9cca326073a42c"}}, - {name = "pydantic_core-2.33.2-pp310-pypy310_pp73-win_amd64.whl",url = "https://files.pythonhosted.org/packages/ce/91/2ec36480fdb0b783cd9ef6795753c1dea13882f2e68e73bce76ae8c21e6a/pydantic_core-2.33.2-pp310-pypy310_pp73-win_amd64.whl",hashes = {sha256 = "a11c8d26a50bfab49002947d3d237abe4d9e4b5bdc8846a63537b6488e197808"}}, - {name = "pydantic_core-2.33.2-cp39-cp39-macosx_10_12_x86_64.whl",url = "https://files.pythonhosted.org/packages/53/ea/bbe9095cdd771987d13c82d104a9c8559ae9aec1e29f139e286fd2e9256e/pydantic_core-2.33.2-cp39-cp39-macosx_10_12_x86_64.whl",hashes = {sha256 = "a2b911a5b90e0374d03813674bf0a5fbbb7741570dcd4b4e85a2e48d17def29d"}}, - {name = "pydantic_core-2.33.2-cp39-cp39-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/49/1d/4ac5ed228078737d457a609013e8f7edc64adc37b91d619ea965758369e5/pydantic_core-2.33.2-cp39-cp39-macosx_11_0_arm64.whl",hashes = {sha256 = "6fa6dfc3e4d1f734a34710f391ae822e0a8eb8559a85c6979e14e65ee6ba2954"}}, - {name = "pydantic_core-2.33.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",url = "https://files.pythonhosted.org/packages/23/9a/2e70d6388d7cda488ae38f57bc2f7b03ee442fbcf0d75d848304ac7e405b/pydantic_core-2.33.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",hashes = {sha256 = "c54c939ee22dc8e2d545da79fc5381f1c020d6d3141d3bd747eab59164dc89fb"}}, - {name = "pydantic_core-2.33.2-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl",url = "https://files.pythonhosted.org/packages/ff/2e/1568934feb43370c1ffb78a77f0baaa5a8b6897513e7a91051af707ffdc4/pydantic_core-2.33.2-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl",hashes = {sha256 = "53a57d2ed685940a504248187d5685e49eb5eef0f696853647bf37c418c538f7"}}, - {name = "pydantic_core-2.33.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl",url = "https://files.pythonhosted.org/packages/01/1a/1a1118f38ab64eac2f6269eb8c120ab915be30e387bb561e3af904b12499/pydantic_core-2.33.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl",hashes = {sha256 = "09fb9dd6571aacd023fe6aaca316bd01cf60ab27240d7eb39ebd66a3a15293b4"}}, - {name = "pydantic_core-2.33.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl",url = "https://files.pythonhosted.org/packages/5c/da/44754d1d7ae0f22d6d3ce6c6b1486fc07ac2c524ed8f6eca636e2e1ee49b/pydantic_core-2.33.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl",hashes = {sha256 = "0e6116757f7959a712db11f3e9c0a99ade00a5bbedae83cb801985aa154f071b"}}, - {name = "pydantic_core-2.33.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",url = "https://files.pythonhosted.org/packages/4d/98/f43cd89172220ec5aa86654967b22d862146bc4d736b1350b4c41e7c9c03/pydantic_core-2.33.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",hashes = {sha256 = "8d55ab81c57b8ff8548c3e4947f119551253f4e3787a7bbc0b6b3ca47498a9d3"}}, - {name = "pydantic_core-2.33.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl",url = "https://files.pythonhosted.org/packages/2b/cc/f77e8e242171d2158309f830f7d5d07e0531b756106f36bc18712dc439df/pydantic_core-2.33.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl",hashes = {sha256 = "c20c462aa4434b33a2661701b861604913f912254e441ab8d78d30485736115a"}}, - {name = "pydantic_core-2.33.2-cp39-cp39-musllinux_1_1_aarch64.whl",url = "https://files.pythonhosted.org/packages/54/7a/7be6a7bd43e0a47c147ba7fbf124fe8aaf1200bc587da925509641113b2d/pydantic_core-2.33.2-cp39-cp39-musllinux_1_1_aarch64.whl",hashes = {sha256 = "44857c3227d3fb5e753d5fe4a3420d6376fa594b07b621e220cd93703fe21782"}}, - {name = "pydantic_core-2.33.2-cp39-cp39-musllinux_1_1_armv7l.whl",url = "https://files.pythonhosted.org/packages/2a/07/31cf8fadffbb03be1cb520850e00a8490c0927ec456e8293cafda0726184/pydantic_core-2.33.2-cp39-cp39-musllinux_1_1_armv7l.whl",hashes = {sha256 = "eb9b459ca4df0e5c87deb59d37377461a538852765293f9e6ee834f0435a93b9"}}, - {name = "pydantic_core-2.33.2-cp39-cp39-musllinux_1_1_x86_64.whl",url = "https://files.pythonhosted.org/packages/b6/8d/bbaf4c6721b668d44f01861f297eb01c9b35f612f6b8e14173cb204e6240/pydantic_core-2.33.2-cp39-cp39-musllinux_1_1_x86_64.whl",hashes = {sha256 = "9fcd347d2cc5c23b06de6d3b7b8275be558a0c90549495c699e379a80bf8379e"}}, - {name = "pydantic_core-2.33.2-cp39-cp39-win32.whl",url = "https://files.pythonhosted.org/packages/bb/93/3cc157026bca8f5006250e74515119fcaa6d6858aceee8f67ab6dc548c16/pydantic_core-2.33.2-cp39-cp39-win32.whl",hashes = {sha256 = "83aa99b1285bc8f038941ddf598501a86f1536789740991d7d8756e34f1e74d9"}}, - {name = "pydantic_core-2.33.2-cp39-cp39-win_amd64.whl",url = "https://files.pythonhosted.org/packages/5b/90/7edc3b2a0d9f0dda8806c04e511a67b0b7a41d2187e2003673a996fb4310/pydantic_core-2.33.2-cp39-cp39-win_amd64.whl",hashes = {sha256 = "f481959862f57f29601ccced557cc2e817bce7533ab8e01a797a48b49c9692b3"}}, - {name = "pydantic_core-2.33.2-pp39-pypy39_pp73-macosx_10_12_x86_64.whl",url = "https://files.pythonhosted.org/packages/08/98/dbf3fdfabaf81cda5622154fda78ea9965ac467e3239078e0dcd6df159e7/pydantic_core-2.33.2-pp39-pypy39_pp73-macosx_10_12_x86_64.whl",hashes = {sha256 = "87acbfcf8e90ca885206e98359d7dca4bcbb35abdc0ff66672a293e1d7a19101"}}, - {name = "pydantic_core-2.33.2-pp39-pypy39_pp73-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/8d/99/7810aa9256e7f2ccd492590f86b79d370df1e9292f1f80b000b6a75bd2fb/pydantic_core-2.33.2-pp39-pypy39_pp73-macosx_11_0_arm64.whl",hashes = {sha256 = "7f92c15cd1e97d4b12acd1cc9004fa092578acfa57b67ad5e43a197175d01a64"}}, - {name = "pydantic_core-2.33.2-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",url = "https://files.pythonhosted.org/packages/d8/60/bc06fa9027c7006cc6dd21e48dbf39076dc39d9abbaf718a1604973a9670/pydantic_core-2.33.2-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",hashes = {sha256 = "d3f26877a748dc4251cfcfda9dfb5f13fcb034f5308388066bcfe9031b63ae7d"}}, - {name = "pydantic_core-2.33.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",url = "https://files.pythonhosted.org/packages/f2/40/9d03997d9518816c68b4dfccb88969756b9146031b61cd37f781c74c9b6a/pydantic_core-2.33.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",hashes = {sha256 = "dac89aea9af8cd672fa7b510e7b8c33b0bba9a43186680550ccf23020f32d535"}}, - {name = "pydantic_core-2.33.2-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl",url = "https://files.pythonhosted.org/packages/d8/62/d490198d05d2d86672dc269f52579cad7261ced64c2df213d5c16e0aecb1/pydantic_core-2.33.2-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl",hashes = {sha256 = "970919794d126ba8645f3837ab6046fb4e72bbc057b3709144066204c19a455d"}}, - {name = "pydantic_core-2.33.2-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl",url = "https://files.pythonhosted.org/packages/9a/ec/4cd215534fd10b8549015f12ea650a1a973da20ce46430b68fc3185573e8/pydantic_core-2.33.2-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl",hashes = {sha256 = "3eb3fe62804e8f859c49ed20a8451342de53ed764150cb14ca71357c765dc2a6"}}, - {name = "pydantic_core-2.33.2-pp39-pypy39_pp73-musllinux_1_1_armv7l.whl",url = "https://files.pythonhosted.org/packages/1a/1a/abbd63d47e1d9b0d632fee6bb15785d0889c8a6e0a6c3b5a8e28ac1ec5d2/pydantic_core-2.33.2-pp39-pypy39_pp73-musllinux_1_1_armv7l.whl",hashes = {sha256 = "3abcd9392a36025e3bd55f9bd38d908bd17962cc49bc6da8e7e96285336e2bca"}}, - {name = "pydantic_core-2.33.2-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl",url = "https://files.pythonhosted.org/packages/80/1c/fa883643429908b1c90598fd2642af8839efd1d835b65af1f75fba4d94fe/pydantic_core-2.33.2-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl",hashes = {sha256 = "3a1c81334778f9e3af2f8aeb7a960736e5cab1dfebfb26aabca09afd2906c039"}}, - {name = "pydantic_core-2.33.2-pp39-pypy39_pp73-win_amd64.whl",url = "https://files.pythonhosted.org/packages/d4/29/3cade8a924a61f60ccfa10842f75eb12787e1440e2b8660ceffeb26685e7/pydantic_core-2.33.2-pp39-pypy39_pp73-win_amd64.whl",hashes = {sha256 = "2807668ba86cb38c6817ad9bc66215ab8584d1d304030ce4f0887336f28a5e27"}}, ] marker = "\"default\" in dependency_groups" @@ -1122,49 +1091,6 @@ dependencies = [ "typing-extensions!=4.7.0,>=4.6.0", ] -[[packages]] -name = "tomli" -version = "2.2.1" -requires-python = ">=3.8" -sdist = {name = "tomli-2.2.1.tar.gz", url = "https://files.pythonhosted.org/packages/18/87/302344fed471e44a87289cf4967697d07e532f2421fdaf868a303cbae4ff/tomli-2.2.1.tar.gz", hashes = {sha256 = "cd45e1dc79c835ce60f7404ec8119f2eb06d38b1deba146f07ced3bbc44505ff"}} -wheels = [ - {name = "tomli-2.2.1-cp313-cp313-macosx_10_13_x86_64.whl",url = "https://files.pythonhosted.org/packages/04/90/2ee5f2e0362cb8a0b6499dc44f4d7d48f8fff06d28ba46e6f1eaa61a1388/tomli-2.2.1-cp313-cp313-macosx_10_13_x86_64.whl",hashes = {sha256 = "f4039b9cbc3048b2416cc57ab3bda989a6fcf9b36cf8937f01a6e731b64f80d7"}}, - {name = "tomli-2.2.1-cp313-cp313-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/c0/ec/46b4108816de6b385141f082ba99e315501ccd0a2ea23db4a100dd3990ea/tomli-2.2.1-cp313-cp313-macosx_11_0_arm64.whl",hashes = {sha256 = "286f0ca2ffeeb5b9bd4fcc8d6c330534323ec51b2f52da063b11c502da16f30c"}}, - {name = "tomli-2.2.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",url = "https://files.pythonhosted.org/packages/a0/bd/b470466d0137b37b68d24556c38a0cc819e8febe392d5b199dcd7f578365/tomli-2.2.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",hashes = {sha256 = "a92ef1a44547e894e2a17d24e7557a5e85a9e1d0048b0b5e7541f76c5032cb13"}}, - {name = "tomli-2.2.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",url = "https://files.pythonhosted.org/packages/d9/e5/82e80ff3b751373f7cead2815bcbe2d51c895b3c990686741a8e56ec42ab/tomli-2.2.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",hashes = {sha256 = "9316dc65bed1684c9a98ee68759ceaed29d229e985297003e494aa825ebb0281"}}, - {name = "tomli-2.2.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl",url = "https://files.pythonhosted.org/packages/05/7e/2a110bc2713557d6a1bfb06af23dd01e7dde52b6ee7dadc589868f9abfac/tomli-2.2.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl",hashes = {sha256 = "e85e99945e688e32d5a35c1ff38ed0b3f41f43fad8df0bdf79f72b2ba7bc5272"}}, - {name = "tomli-2.2.1-cp313-cp313-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/64/7b/22d713946efe00e0adbcdfd6d1aa119ae03fd0b60ebed51ebb3fa9f5a2e5/tomli-2.2.1-cp313-cp313-musllinux_1_2_aarch64.whl",hashes = {sha256 = "ac065718db92ca818f8d6141b5f66369833d4a80a9d74435a268c52bdfa73140"}}, - {name = "tomli-2.2.1-cp313-cp313-musllinux_1_2_i686.whl",url = "https://files.pythonhosted.org/packages/38/31/3a76f67da4b0cf37b742ca76beaf819dca0ebef26d78fc794a576e08accf/tomli-2.2.1-cp313-cp313-musllinux_1_2_i686.whl",hashes = {sha256 = "d920f33822747519673ee656a4b6ac33e382eca9d331c87770faa3eef562aeb2"}}, - {name = "tomli-2.2.1-cp313-cp313-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/07/10/5af1293da642aded87e8a988753945d0cf7e00a9452d3911dd3bb354c9e2/tomli-2.2.1-cp313-cp313-musllinux_1_2_x86_64.whl",hashes = {sha256 = "a198f10c4d1b1375d7687bc25294306e551bf1abfa4eace6650070a5c1ae2744"}}, - {name = "tomli-2.2.1-cp313-cp313-win32.whl",url = "https://files.pythonhosted.org/packages/5b/b9/1ed31d167be802da0fc95020d04cd27b7d7065cc6fbefdd2f9186f60d7bd/tomli-2.2.1-cp313-cp313-win32.whl",hashes = {sha256 = "d3f5614314d758649ab2ab3a62d4f2004c825922f9e370b29416484086b264ec"}}, - {name = "tomli-2.2.1-cp313-cp313-win_amd64.whl",url = "https://files.pythonhosted.org/packages/c7/32/b0963458706accd9afcfeb867c0f9175a741bf7b19cd424230714d722198/tomli-2.2.1-cp313-cp313-win_amd64.whl",hashes = {sha256 = "a38aa0308e754b0e3c67e344754dff64999ff9b513e691d0e786265c93583c69"}}, - {name = "tomli-2.2.1-cp312-cp312-macosx_10_13_x86_64.whl",url = "https://files.pythonhosted.org/packages/52/e1/f8af4c2fcde17500422858155aeb0d7e93477a0d59a98e56cbfe75070fd0/tomli-2.2.1-cp312-cp312-macosx_10_13_x86_64.whl",hashes = {sha256 = "4a8f6e44de52d5e6c657c9fe83b562f5f4256d8ebbfe4ff922c495620a7f6cea"}}, - {name = "tomli-2.2.1-cp312-cp312-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/03/b8/152c68bb84fc00396b83e7bbddd5ec0bd3dd409db4195e2a9b3e398ad2e3/tomli-2.2.1-cp312-cp312-macosx_11_0_arm64.whl",hashes = {sha256 = "8d57ca8095a641b8237d5b079147646153d22552f1c637fd3ba7f4b0b29167a8"}}, - {name = "tomli-2.2.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",url = "https://files.pythonhosted.org/packages/c8/d6/fc9267af9166f79ac528ff7e8c55c8181ded34eb4b0e93daa767b8841573/tomli-2.2.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",hashes = {sha256 = "4e340144ad7ae1533cb897d406382b4b6fede8890a03738ff1683af800d54192"}}, - {name = "tomli-2.2.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",url = "https://files.pythonhosted.org/packages/5c/51/51c3f2884d7bab89af25f678447ea7d297b53b5a3b5730a7cb2ef6069f07/tomli-2.2.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",hashes = {sha256 = "db2b95f9de79181805df90bedc5a5ab4c165e6ec3fe99f970d0e302f384ad222"}}, - {name = "tomli-2.2.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl",url = "https://files.pythonhosted.org/packages/ab/df/bfa89627d13a5cc22402e441e8a931ef2108403db390ff3345c05253935e/tomli-2.2.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl",hashes = {sha256 = "40741994320b232529c802f8bc86da4e1aa9f413db394617b9a256ae0f9a7f77"}}, - {name = "tomli-2.2.1-cp312-cp312-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/9e/6e/fa2b916dced65763a5168c6ccb91066f7639bdc88b48adda990db10c8c0b/tomli-2.2.1-cp312-cp312-musllinux_1_2_aarch64.whl",hashes = {sha256 = "400e720fe168c0f8521520190686ef8ef033fb19fc493da09779e592861b78c6"}}, - {name = "tomli-2.2.1-cp312-cp312-musllinux_1_2_i686.whl",url = "https://files.pythonhosted.org/packages/b4/04/885d3b1f650e1153cbb93a6a9782c58a972b94ea4483ae4ac5cedd5e4a09/tomli-2.2.1-cp312-cp312-musllinux_1_2_i686.whl",hashes = {sha256 = "02abe224de6ae62c19f090f68da4e27b10af2b93213d36cf44e6e1c5abd19fdd"}}, - {name = "tomli-2.2.1-cp312-cp312-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/9c/de/6b432d66e986e501586da298e28ebeefd3edc2c780f3ad73d22566034239/tomli-2.2.1-cp312-cp312-musllinux_1_2_x86_64.whl",hashes = {sha256 = "b82ebccc8c8a36f2094e969560a1b836758481f3dc360ce9a3277c65f374285e"}}, - {name = "tomli-2.2.1-cp312-cp312-win32.whl",url = "https://files.pythonhosted.org/packages/1c/9a/47c0449b98e6e7d1be6cbac02f93dd79003234ddc4aaab6ba07a9a7482e2/tomli-2.2.1-cp312-cp312-win32.whl",hashes = {sha256 = "889f80ef92701b9dbb224e49ec87c645ce5df3fa2cc548664eb8a25e03127a98"}}, - {name = "tomli-2.2.1-cp312-cp312-win_amd64.whl",url = "https://files.pythonhosted.org/packages/ef/60/9b9638f081c6f1261e2688bd487625cd1e660d0a85bd469e91d8db969734/tomli-2.2.1-cp312-cp312-win_amd64.whl",hashes = {sha256 = "7fc04e92e1d624a4a63c76474610238576942d6b8950a2d7f908a340494e67e4"}}, - {name = "tomli-2.2.1-cp311-cp311-macosx_10_9_x86_64.whl",url = "https://files.pythonhosted.org/packages/43/ca/75707e6efa2b37c77dadb324ae7d9571cb424e61ea73fad7c56c2d14527f/tomli-2.2.1-cp311-cp311-macosx_10_9_x86_64.whl",hashes = {sha256 = "678e4fa69e4575eb77d103de3df8a895e1591b48e740211bd1067378c69e8249"}}, - {name = "tomli-2.2.1-cp311-cp311-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/c7/16/51ae563a8615d472fdbffc43a3f3d46588c264ac4f024f63f01283becfbb/tomli-2.2.1-cp311-cp311-macosx_11_0_arm64.whl",hashes = {sha256 = "023aa114dd824ade0100497eb2318602af309e5a55595f76b626d6d9f3b7b0a6"}}, - {name = "tomli-2.2.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",url = "https://files.pythonhosted.org/packages/f1/dd/4f6cd1e7b160041db83c694abc78e100473c15d54620083dbd5aae7b990e/tomli-2.2.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",hashes = {sha256 = "ece47d672db52ac607a3d9599a9d48dcb2f2f735c6c2d1f34130085bb12b112a"}}, - {name = "tomli-2.2.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",url = "https://files.pythonhosted.org/packages/a9/6b/c54ede5dc70d648cc6361eaf429304b02f2871a345bbdd51e993d6cdf550/tomli-2.2.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",hashes = {sha256 = "6972ca9c9cc9f0acaa56a8ca1ff51e7af152a9f87fb64623e31d5c83700080ee"}}, - {name = "tomli-2.2.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl",url = "https://files.pythonhosted.org/packages/1f/47/999514fa49cfaf7a92c805a86c3c43f4215621855d151b61c602abb38091/tomli-2.2.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl",hashes = {sha256 = "c954d2250168d28797dd4e3ac5cf812a406cd5a92674ee4c8f123c889786aa8e"}}, - {name = "tomli-2.2.1-cp311-cp311-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/73/41/0a01279a7ae09ee1573b423318e7934674ce06eb33f50936655071d81a24/tomli-2.2.1-cp311-cp311-musllinux_1_2_aarch64.whl",hashes = {sha256 = "8dd28b3e155b80f4d54beb40a441d366adcfe740969820caf156c019fb5c7ec4"}}, - {name = "tomli-2.2.1-cp311-cp311-musllinux_1_2_i686.whl",url = "https://files.pythonhosted.org/packages/55/18/5d8bc5b0a0362311ce4d18830a5d28943667599a60d20118074ea1b01bb7/tomli-2.2.1-cp311-cp311-musllinux_1_2_i686.whl",hashes = {sha256 = "e59e304978767a54663af13c07b3d1af22ddee3bb2fb0618ca1593e4f593a106"}}, - {name = "tomli-2.2.1-cp311-cp311-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/92/a3/7ade0576d17f3cdf5ff44d61390d4b3febb8a9fc2b480c75c47ea048c646/tomli-2.2.1-cp311-cp311-musllinux_1_2_x86_64.whl",hashes = {sha256 = "33580bccab0338d00994d7f16f4c4ec25b776af3ffaac1ed74e0b3fc95e885a8"}}, - {name = "tomli-2.2.1-cp311-cp311-win32.whl",url = "https://files.pythonhosted.org/packages/72/6f/fa64ef058ac1446a1e51110c375339b3ec6be245af9d14c87c4a6412dd32/tomli-2.2.1-cp311-cp311-win32.whl",hashes = {sha256 = "465af0e0875402f1d226519c9904f37254b3045fc5084697cefb9bdde1ff99ff"}}, - {name = "tomli-2.2.1-cp311-cp311-win_amd64.whl",url = "https://files.pythonhosted.org/packages/6a/1c/4a2dcde4a51b81be3530565e92eda625d94dafb46dbeb15069df4caffc34/tomli-2.2.1-cp311-cp311-win_amd64.whl",hashes = {sha256 = "2d0f2fdd22b02c6d81637a3c95f8cd77f995846af7414c5c4b8d0545afa1bc4b"}}, - {name = "tomli-2.2.1-py3-none-any.whl",url = "https://files.pythonhosted.org/packages/6e/c2/61d3e0f47e2b74ef40a68b9e6ad5984f6241a942f7cd3bbfbdbd03861ea9/tomli-2.2.1-py3-none-any.whl",hashes = {sha256 = "cb55c73c5f4408779d0cf3eef9f762b9c9f147a77de7b258bef0a5628adc85cc"}}, -] -marker = "python_version < \"3.11\" and python_version >= \"3.9\" and \"dev\" in extras" - -[packages.tool.pdm] -dependencies = [] - [[packages]] name = "typing-extensions" version = "4.14.1" @@ -1358,46 +1284,7 @@ wheels = [ {name = "charset_normalizer-3.4.2-cp312-cp312-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/77/1a/5eefc0ce04affb98af07bc05f3bac9094513c0e23b0562d64af46a06aae4/charset_normalizer-3.4.2-cp312-cp312-musllinux_1_2_x86_64.whl",hashes = {sha256 = "dedb8adb91d11846ee08bec4c8236c8549ac721c245678282dcb06b221aab59f"}}, {name = "charset_normalizer-3.4.2-cp312-cp312-win32.whl",url = "https://files.pythonhosted.org/packages/37/a0/2410e5e6032a174c95e0806b1a6585eb21e12f445ebe239fac441995226a/charset_normalizer-3.4.2-cp312-cp312-win32.whl",hashes = {sha256 = "db4c7bf0e07fc3b7d89ac2a5880a6a8062056801b83ff56d8464b70f65482b6c"}}, {name = "charset_normalizer-3.4.2-cp312-cp312-win_amd64.whl",url = "https://files.pythonhosted.org/packages/6c/4f/c02d5c493967af3eda9c771ad4d2bbc8df6f99ddbeb37ceea6e8716a32bc/charset_normalizer-3.4.2-cp312-cp312-win_amd64.whl",hashes = {sha256 = "5a9979887252a82fefd3d3ed2a8e3b937a7a809f65dcb1e068b090e165bbe99e"}}, - {name = "charset_normalizer-3.4.2-cp311-cp311-macosx_10_9_universal2.whl",url = "https://files.pythonhosted.org/packages/05/85/4c40d00dcc6284a1c1ad5de5e0996b06f39d8232f1031cd23c2f5c07ee86/charset_normalizer-3.4.2-cp311-cp311-macosx_10_9_universal2.whl",hashes = {sha256 = "be1e352acbe3c78727a16a455126d9ff83ea2dfdcbc83148d2982305a04714c2"}}, - {name = "charset_normalizer-3.4.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",url = "https://files.pythonhosted.org/packages/41/d9/7a6c0b9db952598e97e93cbdfcb91bacd89b9b88c7c983250a77c008703c/charset_normalizer-3.4.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",hashes = {sha256 = "aa88ca0b1932e93f2d961bf3addbb2db902198dca337d88c89e1559e066e7645"}}, - {name = "charset_normalizer-3.4.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl",url = "https://files.pythonhosted.org/packages/66/82/a37989cda2ace7e37f36c1a8ed16c58cf48965a79c2142713244bf945c89/charset_normalizer-3.4.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl",hashes = {sha256 = "d524ba3f1581b35c03cb42beebab4a13e6cdad7b36246bd22541fa585a56cccd"}}, - {name = "charset_normalizer-3.4.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl",url = "https://files.pythonhosted.org/packages/df/68/a576b31b694d07b53807269d05ec3f6f1093e9545e8607121995ba7a8313/charset_normalizer-3.4.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl",hashes = {sha256 = "28a1005facc94196e1fb3e82a3d442a9d9110b8434fc1ded7a24a2983c9888d8"}}, - {name = "charset_normalizer-3.4.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",url = "https://files.pythonhosted.org/packages/92/9b/ad67f03d74554bed3aefd56fe836e1623a50780f7c998d00ca128924a499/charset_normalizer-3.4.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",hashes = {sha256 = "fdb20a30fe1175ecabed17cbf7812f7b804b8a315a25f24678bcdf120a90077f"}}, - {name = "charset_normalizer-3.4.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl",url = "https://files.pythonhosted.org/packages/a6/e6/8aebae25e328160b20e31a7e9929b1578bbdc7f42e66f46595a432f8539e/charset_normalizer-3.4.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl",hashes = {sha256 = "0f5d9ed7f254402c9e7d35d2f5972c9bbea9040e99cd2861bd77dc68263277c7"}}, - {name = "charset_normalizer-3.4.2-cp311-cp311-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/8b/f2/b3c2f07dbcc248805f10e67a0262c93308cfa149a4cd3d1fe01f593e5fd2/charset_normalizer-3.4.2-cp311-cp311-musllinux_1_2_aarch64.whl",hashes = {sha256 = "efd387a49825780ff861998cd959767800d54f8308936b21025326de4b5a42b9"}}, - {name = "charset_normalizer-3.4.2-cp311-cp311-musllinux_1_2_i686.whl",url = "https://files.pythonhosted.org/packages/60/5b/c3f3a94bc345bc211622ea59b4bed9ae63c00920e2e8f11824aa5708e8b7/charset_normalizer-3.4.2-cp311-cp311-musllinux_1_2_i686.whl",hashes = {sha256 = "f0aa37f3c979cf2546b73e8222bbfa3dc07a641585340179d768068e3455e544"}}, - {name = "charset_normalizer-3.4.2-cp311-cp311-musllinux_1_2_ppc64le.whl",url = "https://files.pythonhosted.org/packages/e2/4d/ff460c8b474122334c2fa394a3f99a04cf11c646da895f81402ae54f5c42/charset_normalizer-3.4.2-cp311-cp311-musllinux_1_2_ppc64le.whl",hashes = {sha256 = "e70e990b2137b29dc5564715de1e12701815dacc1d056308e2b17e9095372a82"}}, - {name = "charset_normalizer-3.4.2-cp311-cp311-musllinux_1_2_s390x.whl",url = "https://files.pythonhosted.org/packages/a2/2b/b964c6a2fda88611a1fe3d4c400d39c66a42d6c169c924818c848f922415/charset_normalizer-3.4.2-cp311-cp311-musllinux_1_2_s390x.whl",hashes = {sha256 = "0c8c57f84ccfc871a48a47321cfa49ae1df56cd1d965a09abe84066f6853b9c0"}}, - {name = "charset_normalizer-3.4.2-cp311-cp311-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/59/2e/d3b9811db26a5ebf444bc0fa4f4be5aa6d76fc6e1c0fd537b16c14e849b6/charset_normalizer-3.4.2-cp311-cp311-musllinux_1_2_x86_64.whl",hashes = {sha256 = "6b66f92b17849b85cad91259efc341dce9c1af48e2173bf38a85c6329f1033e5"}}, - {name = "charset_normalizer-3.4.2-cp311-cp311-win32.whl",url = "https://files.pythonhosted.org/packages/90/07/c5fd7c11eafd561bb51220d600a788f1c8d77c5eef37ee49454cc5c35575/charset_normalizer-3.4.2-cp311-cp311-win32.whl",hashes = {sha256 = "daac4765328a919a805fa5e2720f3e94767abd632ae410a9062dff5412bae65a"}}, - {name = "charset_normalizer-3.4.2-cp311-cp311-win_amd64.whl",url = "https://files.pythonhosted.org/packages/a8/05/5e33dbef7e2f773d672b6d79f10ec633d4a71cd96db6673625838a4fd532/charset_normalizer-3.4.2-cp311-cp311-win_amd64.whl",hashes = {sha256 = "e53efc7c7cee4c1e70661e2e112ca46a575f90ed9ae3fef200f2a25e954f4b28"}}, - {name = "charset_normalizer-3.4.2-cp310-cp310-macosx_10_9_universal2.whl",url = "https://files.pythonhosted.org/packages/95/28/9901804da60055b406e1a1c5ba7aac1276fb77f1dde635aabfc7fd84b8ab/charset_normalizer-3.4.2-cp310-cp310-macosx_10_9_universal2.whl",hashes = {sha256 = "7c48ed483eb946e6c04ccbe02c6b4d1d48e51944b6db70f697e089c193404941"}}, - {name = "charset_normalizer-3.4.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",url = "https://files.pythonhosted.org/packages/d9/9b/892a8c8af9110935e5adcbb06d9c6fe741b6bb02608c6513983048ba1a18/charset_normalizer-3.4.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",hashes = {sha256 = "b2d318c11350e10662026ad0eb71bb51c7812fc8590825304ae0bdd4ac283acd"}}, - {name = "charset_normalizer-3.4.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl",url = "https://files.pythonhosted.org/packages/7b/a5/4179abd063ff6414223575e008593861d62abfc22455b5d1a44995b7c101/charset_normalizer-3.4.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl",hashes = {sha256 = "9cbfacf36cb0ec2897ce0ebc5d08ca44213af24265bd56eca54bee7923c48fd6"}}, - {name = "charset_normalizer-3.4.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl",url = "https://files.pythonhosted.org/packages/3b/95/bc08c7dfeddd26b4be8c8287b9bb055716f31077c8b0ea1cd09553794665/charset_normalizer-3.4.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl",hashes = {sha256 = "18dd2e350387c87dabe711b86f83c9c78af772c748904d372ade190b5c7c9d4d"}}, - {name = "charset_normalizer-3.4.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",url = "https://files.pythonhosted.org/packages/a8/2d/7a5b635aa65284bf3eab7653e8b4151ab420ecbae918d3e359d1947b4d61/charset_normalizer-3.4.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",hashes = {sha256 = "8075c35cd58273fee266c58c0c9b670947c19df5fb98e7b66710e04ad4e9ff86"}}, - {name = "charset_normalizer-3.4.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl",url = "https://files.pythonhosted.org/packages/ae/38/51fc6ac74251fd331a8cfdb7ec57beba8c23fd5493f1050f71c87ef77ed0/charset_normalizer-3.4.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl",hashes = {sha256 = "5bf4545e3b962767e5c06fe1738f951f77d27967cb2caa64c28be7c4563e162c"}}, - {name = "charset_normalizer-3.4.2-cp310-cp310-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/b7/17/edee1e32215ee6e9e46c3e482645b46575a44a2d72c7dfd49e49f60ce6bf/charset_normalizer-3.4.2-cp310-cp310-musllinux_1_2_aarch64.whl",hashes = {sha256 = "7a6ab32f7210554a96cd9e33abe3ddd86732beeafc7a28e9955cdf22ffadbab0"}}, - {name = "charset_normalizer-3.4.2-cp310-cp310-musllinux_1_2_i686.whl",url = "https://files.pythonhosted.org/packages/26/2c/ea3e66f2b5f21fd00b2825c94cafb8c326ea6240cd80a91eb09e4a285830/charset_normalizer-3.4.2-cp310-cp310-musllinux_1_2_i686.whl",hashes = {sha256 = "b33de11b92e9f75a2b545d6e9b6f37e398d86c3e9e9653c4864eb7e89c5773ef"}}, - {name = "charset_normalizer-3.4.2-cp310-cp310-musllinux_1_2_ppc64le.whl",url = "https://files.pythonhosted.org/packages/52/47/7be7fa972422ad062e909fd62460d45c3ef4c141805b7078dbab15904ff7/charset_normalizer-3.4.2-cp310-cp310-musllinux_1_2_ppc64le.whl",hashes = {sha256 = "8755483f3c00d6c9a77f490c17e6ab0c8729e39e6390328e42521ef175380ae6"}}, - {name = "charset_normalizer-3.4.2-cp310-cp310-musllinux_1_2_s390x.whl",url = "https://files.pythonhosted.org/packages/2f/42/9f02c194da282b2b340f28e5fb60762de1151387a36842a92b533685c61e/charset_normalizer-3.4.2-cp310-cp310-musllinux_1_2_s390x.whl",hashes = {sha256 = "68a328e5f55ec37c57f19ebb1fdc56a248db2e3e9ad769919a58672958e8f366"}}, - {name = "charset_normalizer-3.4.2-cp310-cp310-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/67/44/89cacd6628f31fb0b63201a618049be4be2a7435a31b55b5eb1c3674547a/charset_normalizer-3.4.2-cp310-cp310-musllinux_1_2_x86_64.whl",hashes = {sha256 = "21b2899062867b0e1fde9b724f8aecb1af14f2778d69aacd1a5a1853a597a5db"}}, - {name = "charset_normalizer-3.4.2-cp310-cp310-win32.whl",url = "https://files.pythonhosted.org/packages/1f/79/4b8da9f712bc079c0f16b6d67b099b0b8d808c2292c937f267d816ec5ecc/charset_normalizer-3.4.2-cp310-cp310-win32.whl",hashes = {sha256 = "e8082b26888e2f8b36a042a58307d5b917ef2b1cacab921ad3323ef91901c71a"}}, - {name = "charset_normalizer-3.4.2-cp310-cp310-win_amd64.whl",url = "https://files.pythonhosted.org/packages/7d/d7/96970afb4fb66497a40761cdf7bd4f6fca0fc7bafde3a84f836c1f57a926/charset_normalizer-3.4.2-cp310-cp310-win_amd64.whl",hashes = {sha256 = "f69a27e45c43520f5487f27627059b64aaf160415589230992cec34c5e18a509"}}, {name = "charset_normalizer-3.4.2-py3-none-any.whl",url = "https://files.pythonhosted.org/packages/20/94/c5790835a017658cbfabd07f3bfb549140c3ac458cfc196323996b10095a/charset_normalizer-3.4.2-py3-none-any.whl",hashes = {sha256 = "7f56930ab0abd1c45cd15be65cc741c28b1c9a34876ce8c17a2fa107810c0af0"}}, - {name = "charset_normalizer-3.4.2-cp39-cp39-macosx_10_9_universal2.whl",url = "https://files.pythonhosted.org/packages/28/f8/dfb01ff6cc9af38552c69c9027501ff5a5117c4cc18dcd27cb5259fa1888/charset_normalizer-3.4.2-cp39-cp39-macosx_10_9_universal2.whl",hashes = {sha256 = "005fa3432484527f9732ebd315da8da8001593e2cf46a3d817669f062c3d9ed4"}}, - {name = "charset_normalizer-3.4.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",url = "https://files.pythonhosted.org/packages/32/fb/74e26ee556a9dbfe3bd264289b67be1e6d616329403036f6507bb9f3f29c/charset_normalizer-3.4.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",hashes = {sha256 = "e92fca20c46e9f5e1bb485887d074918b13543b1c2a1185e69bb8d17ab6236a7"}}, - {name = "charset_normalizer-3.4.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl",url = "https://files.pythonhosted.org/packages/ad/06/8499ee5aa7addc6f6d72e068691826ff093329fe59891e83b092ae4c851c/charset_normalizer-3.4.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl",hashes = {sha256 = "50bf98d5e563b83cc29471fa114366e6806bc06bc7a25fd59641e41445327836"}}, - {name = "charset_normalizer-3.4.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl",url = "https://files.pythonhosted.org/packages/f1/a2/5e4c187680728219254ef107a6949c60ee0e9a916a5dadb148c7ae82459c/charset_normalizer-3.4.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl",hashes = {sha256 = "721c76e84fe669be19c5791da68232ca2e05ba5185575086e384352e2c309597"}}, - {name = "charset_normalizer-3.4.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",url = "https://files.pythonhosted.org/packages/4c/fe/56aca740dda674f0cc1ba1418c4d84534be51f639b5f98f538b332dc9a95/charset_normalizer-3.4.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",hashes = {sha256 = "82d8fd25b7f4675d0c47cf95b594d4e7b158aca33b76aa63d07186e13c0e0ab7"}}, - {name = "charset_normalizer-3.4.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl",url = "https://files.pythonhosted.org/packages/53/13/db2e7779f892386b589173dd689c1b1e304621c5792046edd8a978cbf9e0/charset_normalizer-3.4.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl",hashes = {sha256 = "b3daeac64d5b371dea99714f08ffc2c208522ec6b06fbc7866a450dd446f5c0f"}}, - {name = "charset_normalizer-3.4.2-cp39-cp39-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/69/35/e52ab9a276186f729bce7a0638585d2982f50402046e4b0faa5d2c3ef2da/charset_normalizer-3.4.2-cp39-cp39-musllinux_1_2_aarch64.whl",hashes = {sha256 = "dccab8d5fa1ef9bfba0590ecf4d46df048d18ffe3eec01eeb73a42e0d9e7a8ba"}}, - {name = "charset_normalizer-3.4.2-cp39-cp39-musllinux_1_2_i686.whl",url = "https://files.pythonhosted.org/packages/a6/d8/af7333f732fc2e7635867d56cb7c349c28c7094910c72267586947561b4b/charset_normalizer-3.4.2-cp39-cp39-musllinux_1_2_i686.whl",hashes = {sha256 = "aaf27faa992bfee0264dc1f03f4c75e9fcdda66a519db6b957a3f826e285cf12"}}, - {name = "charset_normalizer-3.4.2-cp39-cp39-musllinux_1_2_ppc64le.whl",url = "https://files.pythonhosted.org/packages/7a/3d/a5b2e48acef264d71e036ff30bcc49e51bde80219bb628ba3e00cf59baac/charset_normalizer-3.4.2-cp39-cp39-musllinux_1_2_ppc64le.whl",hashes = {sha256 = "eb30abc20df9ab0814b5a2524f23d75dcf83cde762c161917a2b4b7b55b1e518"}}, - {name = "charset_normalizer-3.4.2-cp39-cp39-musllinux_1_2_s390x.whl",url = "https://files.pythonhosted.org/packages/85/d8/23e2c112532a29f3eef374375a8684a4f3b8e784f62b01da931186f43494/charset_normalizer-3.4.2-cp39-cp39-musllinux_1_2_s390x.whl",hashes = {sha256 = "c72fbbe68c6f32f251bdc08b8611c7b3060612236e960ef848e0a517ddbe76c5"}}, - {name = "charset_normalizer-3.4.2-cp39-cp39-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/c7/57/93e0169f08ecc20fe82d12254a200dfaceddc1c12a4077bf454ecc597e33/charset_normalizer-3.4.2-cp39-cp39-musllinux_1_2_x86_64.whl",hashes = {sha256 = "982bb1e8b4ffda883b3d0a521e23abcd6fd17418f6d2c4118d257a10199c0ce3"}}, - {name = "charset_normalizer-3.4.2-cp39-cp39-win32.whl",url = "https://files.pythonhosted.org/packages/2c/9d/9bf2b005138e7e060d7ebdec7503d0ef3240141587651f4b445bdf7286c2/charset_normalizer-3.4.2-cp39-cp39-win32.whl",hashes = {sha256 = "43e0933a0eff183ee85833f341ec567c0980dae57c464d8a508e1b2ceb336471"}}, - {name = "charset_normalizer-3.4.2-cp39-cp39-win_amd64.whl",url = "https://files.pythonhosted.org/packages/6d/24/5849d46cf4311bbf21b424c443b09b459f5b436b1558c04e45dbb7cc478b/charset_normalizer-3.4.2-cp39-cp39-win_amd64.whl",hashes = {sha256 = "d11b54acf878eef558599658b0ffca78138c8c3655cf4f3a4a673c437e67732e"}}, ] marker = "\"default\" in dependency_groups or \"dev\" in extras or \"recommended\" in extras" @@ -1496,57 +1383,6 @@ wheels = [ {name = "aiohttp-3.12.14-cp312-cp312-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/22/32/5501ab525a47ba23c20613e568174d6c63aa09e2caa22cded5c6ea8e3ada/aiohttp-3.12.14-cp312-cp312-musllinux_1_2_x86_64.whl",hashes = {sha256 = "2785b112346e435dd3a1a67f67713a3fe692d288542f1347ad255683f066d8e0"}}, {name = "aiohttp-3.12.14-cp312-cp312-win32.whl",url = "https://files.pythonhosted.org/packages/06/af/28e24574801fcf1657945347ee10df3892311c2829b41232be6089e461e7/aiohttp-3.12.14-cp312-cp312-win32.whl",hashes = {sha256 = "15f5f4792c9c999a31d8decf444e79fcfd98497bf98e94284bf390a7bb8c1729"}}, {name = "aiohttp-3.12.14-cp312-cp312-win_amd64.whl",url = "https://files.pythonhosted.org/packages/98/d5/7ac2464aebd2eecac38dbe96148c9eb487679c512449ba5215d233755582/aiohttp-3.12.14-cp312-cp312-win_amd64.whl",hashes = {sha256 = "3b66e1a182879f579b105a80d5c4bd448b91a57e8933564bf41665064796a338"}}, - {name = "aiohttp-3.12.14-cp311-cp311-macosx_10_9_universal2.whl",url = "https://files.pythonhosted.org/packages/53/e1/8029b29316971c5fa89cec170274582619a01b3d82dd1036872acc9bc7e8/aiohttp-3.12.14-cp311-cp311-macosx_10_9_universal2.whl",hashes = {sha256 = "f4552ff7b18bcec18b60a90c6982049cdb9dac1dba48cf00b97934a06ce2e597"}}, - {name = "aiohttp-3.12.14-cp311-cp311-macosx_10_9_x86_64.whl",url = "https://files.pythonhosted.org/packages/96/bd/4f204cf1e282041f7b7e8155f846583b19149e0872752711d0da5e9cc023/aiohttp-3.12.14-cp311-cp311-macosx_10_9_x86_64.whl",hashes = {sha256 = "8283f42181ff6ccbcf25acaae4e8ab2ff7e92b3ca4a4ced73b2c12d8cd971393"}}, - {name = "aiohttp-3.12.14-cp311-cp311-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/d6/0f/2a580fcdd113fe2197a3b9df30230c7e85bb10bf56f7915457c60e9addd9/aiohttp-3.12.14-cp311-cp311-macosx_11_0_arm64.whl",hashes = {sha256 = "040afa180ea514495aaff7ad34ec3d27826eaa5d19812730fe9e529b04bb2179"}}, - {name = "aiohttp-3.12.14-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",url = "https://files.pythonhosted.org/packages/38/78/2c1089f6adca90c3dd74915bafed6d6d8a87df5e3da74200f6b3a8b8906f/aiohttp-3.12.14-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",hashes = {sha256 = "b413c12f14c1149f0ffd890f4141a7471ba4b41234fe4fd4a0ff82b1dc299dbb"}}, - {name = "aiohttp-3.12.14-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl",url = "https://files.pythonhosted.org/packages/4a/c8/ce6c7a34d9c589f007cfe064da2d943b3dee5aabc64eaecd21faf927ab11/aiohttp-3.12.14-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl",hashes = {sha256 = "1d6f607ce2e1a93315414e3d448b831238f1874b9968e1195b06efaa5c87e245"}}, - {name = "aiohttp-3.12.14-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl",url = "https://files.pythonhosted.org/packages/18/10/431cd3d089de700756a56aa896faf3ea82bee39d22f89db7ddc957580308/aiohttp-3.12.14-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl",hashes = {sha256 = "565e70d03e924333004ed101599902bba09ebb14843c8ea39d657f037115201b"}}, - {name = "aiohttp-3.12.14-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl",url = "https://files.pythonhosted.org/packages/fa/b2/26f4524184e0f7ba46671c512d4b03022633bcf7d32fa0c6f1ef49d55800/aiohttp-3.12.14-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl",hashes = {sha256 = "4699979560728b168d5ab63c668a093c9570af2c7a78ea24ca5212c6cdc2b641"}}, - {name = "aiohttp-3.12.14-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",url = "https://files.pythonhosted.org/packages/e0/30/aadcdf71b510a718e3d98a7bfeaea2396ac847f218b7e8edb241b09bd99a/aiohttp-3.12.14-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",hashes = {sha256 = "ad5fdf6af93ec6c99bf800eba3af9a43d8bfd66dce920ac905c817ef4a712afe"}}, - {name = "aiohttp-3.12.14-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl",url = "https://files.pythonhosted.org/packages/67/7f/7ccf11756ae498fdedc3d689a0c36ace8fc82f9d52d3517da24adf6e9a74/aiohttp-3.12.14-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl",hashes = {sha256 = "4ac76627c0b7ee0e80e871bde0d376a057916cb008a8f3ffc889570a838f5cc7"}}, - {name = "aiohttp-3.12.14-cp311-cp311-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/6b/4d/35ebc170b1856dd020c92376dbfe4297217625ef4004d56587024dc2289c/aiohttp-3.12.14-cp311-cp311-musllinux_1_2_aarch64.whl",hashes = {sha256 = "798204af1180885651b77bf03adc903743a86a39c7392c472891649610844635"}}, - {name = "aiohttp-3.12.14-cp311-cp311-musllinux_1_2_armv7l.whl",url = "https://files.pythonhosted.org/packages/7b/24/46dc0380146f33e2e4aa088b92374b598f5bdcde1718c77e8d1a0094f1a4/aiohttp-3.12.14-cp311-cp311-musllinux_1_2_armv7l.whl",hashes = {sha256 = "4f1205f97de92c37dd71cf2d5bcfb65fdaed3c255d246172cce729a8d849b4da"}}, - {name = "aiohttp-3.12.14-cp311-cp311-musllinux_1_2_i686.whl",url = "https://files.pythonhosted.org/packages/2f/0a/46599d7d19b64f4d0fe1b57bdf96a9a40b5c125f0ae0d8899bc22e91fdce/aiohttp-3.12.14-cp311-cp311-musllinux_1_2_i686.whl",hashes = {sha256 = "76ae6f1dd041f85065d9df77c6bc9c9703da9b5c018479d20262acc3df97d419"}}, - {name = "aiohttp-3.12.14-cp311-cp311-musllinux_1_2_ppc64le.whl",url = "https://files.pythonhosted.org/packages/08/86/b21b682e33d5ca317ef96bd21294984f72379454e689d7da584df1512a19/aiohttp-3.12.14-cp311-cp311-musllinux_1_2_ppc64le.whl",hashes = {sha256 = "a194ace7bc43ce765338ca2dfb5661489317db216ea7ea700b0332878b392cab"}}, - {name = "aiohttp-3.12.14-cp311-cp311-musllinux_1_2_s390x.whl",url = "https://files.pythonhosted.org/packages/4f/45/f639482530b1396c365f23c5e3b1ae51c9bc02ba2b2248ca0c855a730059/aiohttp-3.12.14-cp311-cp311-musllinux_1_2_s390x.whl",hashes = {sha256 = "16260e8e03744a6fe3fcb05259eeab8e08342c4c33decf96a9dad9f1187275d0"}}, - {name = "aiohttp-3.12.14-cp311-cp311-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/7e/e5/39635a9e06eed1d73671bd4079a3caf9cf09a49df08490686f45a710b80e/aiohttp-3.12.14-cp311-cp311-musllinux_1_2_x86_64.whl",hashes = {sha256 = "8c779e5ebbf0e2e15334ea404fcce54009dc069210164a244d2eac8352a44b28"}}, - {name = "aiohttp-3.12.14-cp311-cp311-win32.whl",url = "https://files.pythonhosted.org/packages/51/e1/7f1c77515d369b7419c5b501196526dad3e72800946c0099594c1f0c20b4/aiohttp-3.12.14-cp311-cp311-win32.whl",hashes = {sha256 = "a289f50bf1bd5be227376c067927f78079a7bdeccf8daa6a9e65c38bae14324b"}}, - {name = "aiohttp-3.12.14-cp311-cp311-win_amd64.whl",url = "https://files.pythonhosted.org/packages/06/24/a6bf915c85b7a5b07beba3d42b3282936b51e4578b64a51e8e875643c276/aiohttp-3.12.14-cp311-cp311-win_amd64.whl",hashes = {sha256 = "0b8a69acaf06b17e9c54151a6c956339cf46db4ff72b3ac28516d0f7068f4ced"}}, - {name = "aiohttp-3.12.14-cp310-cp310-macosx_10_9_universal2.whl",url = "https://files.pythonhosted.org/packages/0c/88/f161f429f9de391eee6a5c2cffa54e2ecd5b7122ae99df247f7734dfefcb/aiohttp-3.12.14-cp310-cp310-macosx_10_9_universal2.whl",hashes = {sha256 = "906d5075b5ba0dd1c66fcaaf60eb09926a9fef3ca92d912d2a0bbdbecf8b1248"}}, - {name = "aiohttp-3.12.14-cp310-cp310-macosx_10_9_x86_64.whl",url = "https://files.pythonhosted.org/packages/fe/b5/24fa382a69a25d242e2baa3e56d5ea5227d1b68784521aaf3a1a8b34c9a4/aiohttp-3.12.14-cp310-cp310-macosx_10_9_x86_64.whl",hashes = {sha256 = "c875bf6fc2fd1a572aba0e02ef4e7a63694778c5646cdbda346ee24e630d30fb"}}, - {name = "aiohttp-3.12.14-cp310-cp310-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/09/67/fda1bc34adbfaa950d98d934a23900918f9d63594928c70e55045838c943/aiohttp-3.12.14-cp310-cp310-macosx_11_0_arm64.whl",hashes = {sha256 = "fbb284d15c6a45fab030740049d03c0ecd60edad9cd23b211d7e11d3be8d56fd"}}, - {name = "aiohttp-3.12.14-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",url = "https://files.pythonhosted.org/packages/36/96/3ce1ea96d3cf6928b87cfb8cdd94650367f5c2f36e686a1f5568f0f13754/aiohttp-3.12.14-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",hashes = {sha256 = "38e360381e02e1a05d36b223ecab7bc4a6e7b5ab15760022dc92589ee1d4238c"}}, - {name = "aiohttp-3.12.14-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl",url = "https://files.pythonhosted.org/packages/be/04/ddea06cb4bc7d8db3745cf95e2c42f310aad485ca075bd685f0e4f0f6b65/aiohttp-3.12.14-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl",hashes = {sha256 = "aaf90137b5e5d84a53632ad95ebee5c9e3e7468f0aab92ba3f608adcb914fa95"}}, - {name = "aiohttp-3.12.14-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl",url = "https://files.pythonhosted.org/packages/73/66/63942f104d33ce6ca7871ac6c1e2ebab48b88f78b2b7680c37de60f5e8cd/aiohttp-3.12.14-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl",hashes = {sha256 = "e532a25e4a0a2685fa295a31acf65e027fbe2bea7a4b02cdfbbba8a064577663"}}, - {name = "aiohttp-3.12.14-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl",url = "https://files.pythonhosted.org/packages/20/00/aab615742b953f04b48cb378ee72ada88555b47b860b98c21c458c030a23/aiohttp-3.12.14-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl",hashes = {sha256 = "eab9762c4d1b08ae04a6c77474e6136da722e34fdc0e6d6eab5ee93ac29f35d1"}}, - {name = "aiohttp-3.12.14-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",url = "https://files.pythonhosted.org/packages/d6/4f/ef6d9f77225cf27747368c37b3d69fac1f8d6f9d3d5de2d410d155639524/aiohttp-3.12.14-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",hashes = {sha256 = "abe53c3812b2899889a7fca763cdfaeee725f5be68ea89905e4275476ffd7e61"}}, - {name = "aiohttp-3.12.14-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl",url = "https://files.pythonhosted.org/packages/37/e1/e98a43c15aa52e9219a842f18c59cbae8bbe2d50c08d298f17e9e8bafa38/aiohttp-3.12.14-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl",hashes = {sha256 = "5760909b7080aa2ec1d320baee90d03b21745573780a072b66ce633eb77a8656"}}, - {name = "aiohttp-3.12.14-cp310-cp310-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/71/5c/29c6dfb49323bcdb0239bf3fc97ffcf0eaf86d3a60426a3287ec75d67721/aiohttp-3.12.14-cp310-cp310-musllinux_1_2_aarch64.whl",hashes = {sha256 = "02fcd3f69051467bbaa7f84d7ec3267478c7df18d68b2e28279116e29d18d4f3"}}, - {name = "aiohttp-3.12.14-cp310-cp310-musllinux_1_2_armv7l.whl",url = "https://files.pythonhosted.org/packages/79/60/ec90782084090c4a6b459790cfd8d17be2c5662c9c4b2d21408b2f2dc36c/aiohttp-3.12.14-cp310-cp310-musllinux_1_2_armv7l.whl",hashes = {sha256 = "4dcd1172cd6794884c33e504d3da3c35648b8be9bfa946942d353b939d5f1288"}}, - {name = "aiohttp-3.12.14-cp310-cp310-musllinux_1_2_i686.whl",url = "https://files.pythonhosted.org/packages/22/89/205d3ad30865c32bc472ac13f94374210745b05bd0f2856996cb34d53396/aiohttp-3.12.14-cp310-cp310-musllinux_1_2_i686.whl",hashes = {sha256 = "224d0da41355b942b43ad08101b1b41ce633a654128ee07e36d75133443adcda"}}, - {name = "aiohttp-3.12.14-cp310-cp310-musllinux_1_2_ppc64le.whl",url = "https://files.pythonhosted.org/packages/48/ae/2f66edaa8bd6db2a4cba0386881eb92002cdc70834e2a93d1d5607132c7e/aiohttp-3.12.14-cp310-cp310-musllinux_1_2_ppc64le.whl",hashes = {sha256 = "e387668724f4d734e865c1776d841ed75b300ee61059aca0b05bce67061dcacc"}}, - {name = "aiohttp-3.12.14-cp310-cp310-musllinux_1_2_s390x.whl",url = "https://files.pythonhosted.org/packages/08/3a/fa73bfc6e21407ea57f7906a816f0dc73663d9549da703be05dbd76d2dc3/aiohttp-3.12.14-cp310-cp310-musllinux_1_2_s390x.whl",hashes = {sha256 = "dec9cde5b5a24171e0b0a4ca064b1414950904053fb77c707efd876a2da525d8"}}, - {name = "aiohttp-3.12.14-cp310-cp310-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/e3/b3/751124b8ceb0831c17960d06ee31a4732cb4a6a006fdbfa1153d07c52226/aiohttp-3.12.14-cp310-cp310-musllinux_1_2_x86_64.whl",hashes = {sha256 = "bbad68a2af4877cc103cd94af9160e45676fc6f0c14abb88e6e092b945c2c8e3"}}, - {name = "aiohttp-3.12.14-cp310-cp310-win32.whl",url = "https://files.pythonhosted.org/packages/81/3c/72477a1d34edb8ab8ce8013086a41526d48b64f77e381c8908d24e1c18f5/aiohttp-3.12.14-cp310-cp310-win32.whl",hashes = {sha256 = "ee580cb7c00bd857b3039ebca03c4448e84700dc1322f860cf7a500a6f62630c"}}, - {name = "aiohttp-3.12.14-cp310-cp310-win_amd64.whl",url = "https://files.pythonhosted.org/packages/a2/c4/8aec4ccf1b822ec78e7982bd5cf971113ecce5f773f04039c76a083116fc/aiohttp-3.12.14-cp310-cp310-win_amd64.whl",hashes = {sha256 = "cf4f05b8cea571e2ccc3ca744e35ead24992d90a72ca2cf7ab7a2efbac6716db"}}, - {name = "aiohttp-3.12.14-cp39-cp39-macosx_10_9_universal2.whl",url = "https://files.pythonhosted.org/packages/cf/54/8a65095784f5c8b2a60a8baa2baabb15b8d507efb0911d59f94af04ba908/aiohttp-3.12.14-cp39-cp39-macosx_10_9_universal2.whl",hashes = {sha256 = "b8cc6b05e94d837bcd71c6531e2344e1ff0fb87abe4ad78a9261d67ef5d83eae"}}, - {name = "aiohttp-3.12.14-cp39-cp39-macosx_10_9_x86_64.whl",url = "https://files.pythonhosted.org/packages/d0/23/65a82d33841c790178aed8aa6b5e720e37f08bdf7256936fa3bc86f03257/aiohttp-3.12.14-cp39-cp39-macosx_10_9_x86_64.whl",hashes = {sha256 = "d1dcb015ac6a3b8facd3677597edd5ff39d11d937456702f0bb2b762e390a21b"}}, - {name = "aiohttp-3.12.14-cp39-cp39-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/10/66/9d51ec40613aca2f38d6ac527b592686a302197109aa1c0fe045040835ec/aiohttp-3.12.14-cp39-cp39-macosx_11_0_arm64.whl",hashes = {sha256 = "3779ed96105cd70ee5e85ca4f457adbce3d9ff33ec3d0ebcdf6c5727f26b21b3"}}, - {name = "aiohttp-3.12.14-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",url = "https://files.pythonhosted.org/packages/48/9e/2f14e4780a461351325d7821fb64e9107189315dd8f6e8a67e7afdbf875c/aiohttp-3.12.14-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",hashes = {sha256 = "717a0680729b4ebd7569c1dcd718c46b09b360745fd8eb12317abc74b14d14d0"}}, - {name = "aiohttp-3.12.14-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl",url = "https://files.pythonhosted.org/packages/b8/26/26ef03e6cc4b7fb275eaa76b33c128f72729e8833e512b6770f877560b6e/aiohttp-3.12.14-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl",hashes = {sha256 = "b5dd3a2ef7c7e968dbbac8f5574ebeac4d2b813b247e8cec28174a2ba3627170"}}, - {name = "aiohttp-3.12.14-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl",url = "https://files.pythonhosted.org/packages/68/cf/fffc2a9edacbd475cfb508075bad052426ce0b9100f1045536ee1b683872/aiohttp-3.12.14-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl",hashes = {sha256 = "4710f77598c0092239bc12c1fcc278a444e16c7032d91babf5abbf7166463f7b"}}, - {name = "aiohttp-3.12.14-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl",url = "https://files.pythonhosted.org/packages/0b/c5/bb8b29ef079d3ecb5960ec1b547b56bc52ee5ffc43c8a30ef21f9afeb67b/aiohttp-3.12.14-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl",hashes = {sha256 = "f3e9f75ae842a6c22a195d4a127263dbf87cbab729829e0bd7857fb1672400b2"}}, - {name = "aiohttp-3.12.14-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",url = "https://files.pythonhosted.org/packages/09/0d/d18e2d2754497bf91b9559425e8c4286af61bdbe42d49c43d955c7269680/aiohttp-3.12.14-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",hashes = {sha256 = "5f9c8d55d6802086edd188e3a7d85a77787e50d56ce3eb4757a3205fa4657922"}}, - {name = "aiohttp-3.12.14-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl",url = "https://files.pythonhosted.org/packages/33/c8/2c32cd25deb9f590cb8d50ff33fb3bb2cc8d1761958989f6f64cf00ef1cb/aiohttp-3.12.14-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl",hashes = {sha256 = "79b29053ff3ad307880d94562cca80693c62062a098a5776ea8ef5ef4b28d140"}}, - {name = "aiohttp-3.12.14-cp39-cp39-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/0f/36/1b36ae47b9d6afdd39072373bb7157b464996376d562d3c50950ddf6d10e/aiohttp-3.12.14-cp39-cp39-musllinux_1_2_aarch64.whl",hashes = {sha256 = "23e1332fff36bebd3183db0c7a547a1da9d3b4091509f6d818e098855f2f27d3"}}, - {name = "aiohttp-3.12.14-cp39-cp39-musllinux_1_2_armv7l.whl",url = "https://files.pythonhosted.org/packages/2b/e8/6864b7812351821168e80ca102d7fa244a78fefe9690995a40e8b5c19f4b/aiohttp-3.12.14-cp39-cp39-musllinux_1_2_armv7l.whl",hashes = {sha256 = "a564188ce831fd110ea76bcc97085dd6c625b427db3f1dbb14ca4baa1447dcbc"}}, - {name = "aiohttp-3.12.14-cp39-cp39-musllinux_1_2_i686.whl",url = "https://files.pythonhosted.org/packages/9b/55/f90e3eb25330f8a564a6e6b4d3cc15d3630bd28b0795a025e397e3279411/aiohttp-3.12.14-cp39-cp39-musllinux_1_2_i686.whl",hashes = {sha256 = "a7a1b4302f70bb3ec40ca86de82def532c97a80db49cac6a6700af0de41af5ee"}}, - {name = "aiohttp-3.12.14-cp39-cp39-musllinux_1_2_ppc64le.whl",url = "https://files.pythonhosted.org/packages/1b/f7/39c3570434bb7e81601155ba71327735b26548473cca2d5c7f5badabb140/aiohttp-3.12.14-cp39-cp39-musllinux_1_2_ppc64le.whl",hashes = {sha256 = "1b07ccef62950a2519f9bfc1e5b294de5dd84329f444ca0b329605ea787a3de5"}}, - {name = "aiohttp-3.12.14-cp39-cp39-musllinux_1_2_s390x.whl",url = "https://files.pythonhosted.org/packages/46/0d/caee8733fbe511c34a54e93ee26c4b8d505e12785444d31f772a610df7ab/aiohttp-3.12.14-cp39-cp39-musllinux_1_2_s390x.whl",hashes = {sha256 = "938bd3ca6259e7e48b38d84f753d548bd863e0c222ed6ee6ace3fd6752768a84"}}, - {name = "aiohttp-3.12.14-cp39-cp39-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/24/f3/5d21196abf74dee66c5809e764cc27a2275e54c9355019c21be3bf77dd77/aiohttp-3.12.14-cp39-cp39-musllinux_1_2_x86_64.whl",hashes = {sha256 = "8bc784302b6b9f163b54c4e93d7a6f09563bd01ff2b841b29ed3ac126e5040bf"}}, - {name = "aiohttp-3.12.14-cp39-cp39-win32.whl",url = "https://files.pythonhosted.org/packages/54/bb/b4226f4fd0597d5245f284d10be48bf1ef610ab4f57d4239686fb03d1814/aiohttp-3.12.14-cp39-cp39-win32.whl",hashes = {sha256 = "a3416f95961dd7d5393ecff99e3f41dc990fb72eda86c11f2a60308ac6dcd7a0"}}, - {name = "aiohttp-3.12.14-cp39-cp39-win_amd64.whl",url = "https://files.pythonhosted.org/packages/a0/c0/2f1cefb7b077bf5c19f01bdf0d82b89de0bf2801b441eda23ada0b8966ac/aiohttp-3.12.14-cp39-cp39-win_amd64.whl",hashes = {sha256 = "196858b8820d7f60578f8b47e5669b3195c21d8ab261e39b1d705346458f445f"}}, ] marker = "\"default\" in dependency_groups or \"dev\" in extras" @@ -1563,17 +1399,73 @@ dependencies = [ ] [[packages]] -name = "async-timeout" -version = "5.0.1" -requires-python = ">=3.8" -sdist = {name = "async_timeout-5.0.1.tar.gz", url = "https://files.pythonhosted.org/packages/a5/ae/136395dfbfe00dfc94da3f3e136d0b13f394cba8f4841120e34226265780/async_timeout-5.0.1.tar.gz", hashes = {sha256 = "d9321a7a3d5a6a5e187e824d2fa0793ce379a202935782d555d6e9d2735677d3"}} +name = "multidict" +version = "6.6.3" +requires-python = ">=3.9" +sdist = {name = "multidict-6.6.3.tar.gz", url = "https://files.pythonhosted.org/packages/3d/2c/5dad12e82fbdf7470f29bff2171484bf07cb3b16ada60a6589af8f376440/multidict-6.6.3.tar.gz", hashes = {sha256 = "798a9eb12dab0a6c2e29c1de6f3468af5cb2da6053a20dfa3344907eed0937cc"}} wheels = [ - {name = "async_timeout-5.0.1-py3-none-any.whl",url = "https://files.pythonhosted.org/packages/fe/ba/e2081de779ca30d473f21f5b30e0e737c438205440784c7dfc81efc2b029/async_timeout-5.0.1-py3-none-any.whl",hashes = {sha256 = "39e3809566ff85354557ec2398b55e096c8364bacac9405a7a1fa429e77fe76c"}}, + {name = "multidict-6.6.3-cp313-cp313-macosx_10_13_universal2.whl",url = "https://files.pythonhosted.org/packages/52/1d/0bebcbbb4f000751fbd09957257903d6e002943fc668d841a4cf2fb7f872/multidict-6.6.3-cp313-cp313-macosx_10_13_universal2.whl",hashes = {sha256 = "540d3c06d48507357a7d57721e5094b4f7093399a0106c211f33540fdc374d55"}}, + {name = "multidict-6.6.3-cp313-cp313-macosx_10_13_x86_64.whl",url = "https://files.pythonhosted.org/packages/07/8f/cbe241b0434cfe257f65c2b1bcf9e8d5fb52bc708c5061fb29b0fed22bdf/multidict-6.6.3-cp313-cp313-macosx_10_13_x86_64.whl",hashes = {sha256 = "9c19cea2a690f04247d43f366d03e4eb110a0dc4cd1bbeee4d445435428ed35b"}}, + {name = "multidict-6.6.3-cp313-cp313-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/32/d2/0b3b23f9dbad5b270b22a3ac3ea73ed0a50ef2d9a390447061178ed6bdb8/multidict-6.6.3-cp313-cp313-macosx_11_0_arm64.whl",hashes = {sha256 = "7af039820cfd00effec86bda5d8debef711a3e86a1d3772e85bea0f243a4bd65"}}, + {name = "multidict-6.6.3-cp313-cp313-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl",url = "https://files.pythonhosted.org/packages/fd/fe/6eb68927e823999e3683bc49678eb20374ba9615097d085298fd5b386564/multidict-6.6.3-cp313-cp313-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl",hashes = {sha256 = "500b84f51654fdc3944e936f2922114349bf8fdcac77c3092b03449f0e5bc2b3"}}, + {name = "multidict-6.6.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl",url = "https://files.pythonhosted.org/packages/e7/ab/320d8507e7726c460cb77117848b3834ea0d59e769f36fdae495f7669929/multidict-6.6.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl",hashes = {sha256 = "f3fc723ab8a5c5ed6c50418e9bfcd8e6dceba6c271cee6728a10a4ed8561520c"}}, + {name = "multidict-6.6.3-cp313-cp313-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl",url = "https://files.pythonhosted.org/packages/76/60/38ee422db515ac69834e60142a1a69111ac96026e76e8e9aa347fd2e4591/multidict-6.6.3-cp313-cp313-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl",hashes = {sha256 = "94c47ea3ade005b5976789baaed66d4de4480d0a0bf31cef6edaa41c1e7b56a6"}}, + {name = "multidict-6.6.3-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl",url = "https://files.pythonhosted.org/packages/27/fb/905224fde2dff042b030c27ad95a7ae744325cf54b890b443d30a789b80e/multidict-6.6.3-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl",hashes = {sha256 = "dbc7cf464cc6d67e83e136c9f55726da3a30176f020a36ead246eceed87f1cd8"}}, + {name = "multidict-6.6.3-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl",url = "https://files.pythonhosted.org/packages/76/35/dc38ab361051beae08d1a53965e3e1a418752fc5be4d3fb983c5582d8784/multidict-6.6.3-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl",hashes = {sha256 = "900eb9f9da25ada070f8ee4a23f884e0ee66fe4e1a38c3af644256a508ad81ca"}}, + {name = "multidict-6.6.3-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl",url = "https://files.pythonhosted.org/packages/1f/a3/0a485b7f36e422421b17e2bbb5a81c1af10eac1d4476f2ff92927c730479/multidict-6.6.3-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl",hashes = {sha256 = "7c6df517cf177da5d47ab15407143a89cd1a23f8b335f3a28d57e8b0a3dbb884"}}, + {name = "multidict-6.6.3-cp313-cp313-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/b4/59/bcdd52c1dab7c0e0d75ff19cac751fbd5f850d1fc39172ce809a74aa9ea4/multidict-6.6.3-cp313-cp313-musllinux_1_2_aarch64.whl",hashes = {sha256 = "4ef421045f13879e21c994b36e728d8e7d126c91a64b9185810ab51d474f27e7"}}, + {name = "multidict-6.6.3-cp313-cp313-musllinux_1_2_armv7l.whl",url = "https://files.pythonhosted.org/packages/bb/a4/2d96aaa6eae8067ce108d4acee6f45ced5728beda55c0f02ae1072c730d1/multidict-6.6.3-cp313-cp313-musllinux_1_2_armv7l.whl",hashes = {sha256 = "6c1e61bb4f80895c081790b6b09fa49e13566df8fbff817da3f85b3a8192e36b"}}, + {name = "multidict-6.6.3-cp313-cp313-musllinux_1_2_i686.whl",url = "https://files.pythonhosted.org/packages/25/d2/ed9f847fa5c7d0677d4f02ea2c163d5e48573de3f57bacf5670e43a5ffaa/multidict-6.6.3-cp313-cp313-musllinux_1_2_i686.whl",hashes = {sha256 = "e5e8523bb12d7623cd8300dbd91b9e439a46a028cd078ca695eb66ba31adee3c"}}, + {name = "multidict-6.6.3-cp313-cp313-musllinux_1_2_ppc64le.whl",url = "https://files.pythonhosted.org/packages/1f/af/9155850372563fc550803d3f25373308aa70f59b52cff25854086ecb4a79/multidict-6.6.3-cp313-cp313-musllinux_1_2_ppc64le.whl",hashes = {sha256 = "ef58340cc896219e4e653dade08fea5c55c6df41bcc68122e3be3e9d873d9a7b"}}, + {name = "multidict-6.6.3-cp313-cp313-musllinux_1_2_s390x.whl",url = "https://files.pythonhosted.org/packages/36/2f/c6a728f699896252cf309769089568a33c6439626648843f78743660709d/multidict-6.6.3-cp313-cp313-musllinux_1_2_s390x.whl",hashes = {sha256 = "fc9dc435ec8699e7b602b94fe0cd4703e69273a01cbc34409af29e7820f777f1"}}, + {name = "multidict-6.6.3-cp313-cp313-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/d0/60/689880776d6b18fa2b70f6cc74ff87dd6c6b9b47bd9cf74c16fecfaa6ad9/multidict-6.6.3-cp313-cp313-musllinux_1_2_x86_64.whl",hashes = {sha256 = "9e864486ef4ab07db5e9cb997bad2b681514158d6954dd1958dfb163b83d53e6"}}, + {name = "multidict-6.6.3-cp313-cp313-win32.whl",url = "https://files.pythonhosted.org/packages/75/5e/325b11f2222a549019cf2ef879c1f81f94a0d40ace3ef55cf529915ba6cc/multidict-6.6.3-cp313-cp313-win32.whl",hashes = {sha256 = "5633a82fba8e841bc5c5c06b16e21529573cd654f67fd833650a215520a6210e"}}, + {name = "multidict-6.6.3-cp313-cp313-win_amd64.whl",url = "https://files.pythonhosted.org/packages/b1/ad/cf46e73f5d6e3c775cabd2a05976547f3f18b39bee06260369a42501f053/multidict-6.6.3-cp313-cp313-win_amd64.whl",hashes = {sha256 = "e93089c1570a4ad54c3714a12c2cef549dc9d58e97bcded193d928649cab78e9"}}, + {name = "multidict-6.6.3-cp313-cp313-win_arm64.whl",url = "https://files.pythonhosted.org/packages/c5/c9/2e3fe950db28fb7c62e1a5f46e1e38759b072e2089209bc033c2798bb5ec/multidict-6.6.3-cp313-cp313-win_arm64.whl",hashes = {sha256 = "c60b401f192e79caec61f166da9c924e9f8bc65548d4246842df91651e83d600"}}, + {name = "multidict-6.6.3-cp313-cp313t-macosx_10_13_universal2.whl",url = "https://files.pythonhosted.org/packages/3a/58/aaf8114cf34966e084a8cc9517771288adb53465188843d5a19862cb6dc3/multidict-6.6.3-cp313-cp313t-macosx_10_13_universal2.whl",hashes = {sha256 = "02fd8f32d403a6ff13864b0851f1f523d4c988051eea0471d4f1fd8010f11134"}}, + {name = "multidict-6.6.3-cp313-cp313t-macosx_10_13_x86_64.whl",url = "https://files.pythonhosted.org/packages/71/af/5402e7b58a1f5b987a07ad98f2501fdba2a4f4b4c30cf114e3ce8db64c87/multidict-6.6.3-cp313-cp313t-macosx_10_13_x86_64.whl",hashes = {sha256 = "f3aa090106b1543f3f87b2041eef3c156c8da2aed90c63a2fbed62d875c49c37"}}, + {name = "multidict-6.6.3-cp313-cp313t-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/39/65/ab3c8cafe21adb45b24a50266fd747147dec7847425bc2a0f6934b3ae9ce/multidict-6.6.3-cp313-cp313t-macosx_11_0_arm64.whl",hashes = {sha256 = "e924fb978615a5e33ff644cc42e6aa241effcf4f3322c09d4f8cebde95aff5f8"}}, + {name = "multidict-6.6.3-cp313-cp313t-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl",url = "https://files.pythonhosted.org/packages/49/ba/9fcc1b332f67cc0c0c8079e263bfab6660f87fe4e28a35921771ff3eea0d/multidict-6.6.3-cp313-cp313t-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl",hashes = {sha256 = "b9fe5a0e57c6dbd0e2ce81ca66272282c32cd11d31658ee9553849d91289e1c1"}}, + {name = "multidict-6.6.3-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl",url = "https://files.pythonhosted.org/packages/a4/14/0145a251f555f7c754ce2dcbcd012939bbd1f34f066fa5d28a50e722a054/multidict-6.6.3-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl",hashes = {sha256 = "b24576f208793ebae00280c59927c3b7c2a3b1655e443a25f753c4611bc1c373"}}, + {name = "multidict-6.6.3-cp313-cp313t-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl",url = "https://files.pythonhosted.org/packages/9e/d4/d5c0bd2bbb173b586c249a151a26d2fb3ec7d53c96e42091c9fef4e1f10c/multidict-6.6.3-cp313-cp313t-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl",hashes = {sha256 = "135631cb6c58eac37d7ac0df380294fecdc026b28837fa07c02e459c7fb9c54e"}}, + {name = "multidict-6.6.3-cp313-cp313t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl",url = "https://files.pythonhosted.org/packages/21/32/c9a2d8444a50ec48c4733ccc67254100c10e1c8ae8e40c7a2d2183b59b97/multidict-6.6.3-cp313-cp313t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl",hashes = {sha256 = "274d416b0df887aef98f19f21578653982cfb8a05b4e187d4a17103322eeaf8f"}}, + {name = "multidict-6.6.3-cp313-cp313t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl",url = "https://files.pythonhosted.org/packages/68/d0/14fa1699f4ef629eae08ad6201c6b476098f5efb051b296f4c26be7a9fdf/multidict-6.6.3-cp313-cp313t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl",hashes = {sha256 = "e252017a817fad7ce05cafbe5711ed40faeb580e63b16755a3a24e66fa1d87c0"}}, + {name = "multidict-6.6.3-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl",url = "https://files.pythonhosted.org/packages/da/88/84a27570fbe303c65607d517a5f147cd2fc046c2d1da02b84b17b9bdc2aa/multidict-6.6.3-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl",hashes = {sha256 = "2e4cc8d848cd4fe1cdee28c13ea79ab0ed37fc2e89dd77bac86a2e7959a8c3bc"}}, + {name = "multidict-6.6.3-cp313-cp313t-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/1c/60/dca352a0c999ce96a5d8b8ee0b2b9f729dcad2e0b0c195f8286269a2074c/multidict-6.6.3-cp313-cp313t-musllinux_1_2_aarch64.whl",hashes = {sha256 = "9e236a7094b9c4c1b7585f6b9cca34b9d833cf079f7e4c49e6a4a6ec9bfdc68f"}}, + {name = "multidict-6.6.3-cp313-cp313t-musllinux_1_2_armv7l.whl",url = "https://files.pythonhosted.org/packages/50/ef/433fa3ed06028f03946f3993223dada70fb700f763f70c00079533c34578/multidict-6.6.3-cp313-cp313t-musllinux_1_2_armv7l.whl",hashes = {sha256 = "e0cb0ab69915c55627c933f0b555a943d98ba71b4d1c57bc0d0a66e2567c7471"}}, + {name = "multidict-6.6.3-cp313-cp313t-musllinux_1_2_i686.whl",url = "https://files.pythonhosted.org/packages/1b/1f/487612ab56fbe35715320905215a57fede20de7db40a261759690dc80471/multidict-6.6.3-cp313-cp313t-musllinux_1_2_i686.whl",hashes = {sha256 = "81ef2f64593aba09c5212a3d0f8c906a0d38d710a011f2f42759704d4557d3f2"}}, + {name = "multidict-6.6.3-cp313-cp313t-musllinux_1_2_ppc64le.whl",url = "https://files.pythonhosted.org/packages/da/6f/ce8b79de16cd885c6f9052c96a3671373d00c59b3ee635ea93e6e81b8ccf/multidict-6.6.3-cp313-cp313t-musllinux_1_2_ppc64le.whl",hashes = {sha256 = "b9cbc60010de3562545fa198bfc6d3825df430ea96d2cc509c39bd71e2e7d648"}}, + {name = "multidict-6.6.3-cp313-cp313t-musllinux_1_2_s390x.whl",url = "https://files.pythonhosted.org/packages/bb/fe/a2514a6aba78e5abefa1624ca85ae18f542d95ac5cde2e3815a9fbf369aa/multidict-6.6.3-cp313-cp313t-musllinux_1_2_s390x.whl",hashes = {sha256 = "70d974eaaa37211390cd02ef93b7e938de564bbffa866f0b08d07e5e65da783d"}}, + {name = "multidict-6.6.3-cp313-cp313t-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/8c/22/b788718d63bb3cce752d107a57c85fcd1a212c6c778628567c9713f9345a/multidict-6.6.3-cp313-cp313t-musllinux_1_2_x86_64.whl",hashes = {sha256 = "3713303e4a6663c6d01d648a68f2848701001f3390a030edaaf3fc949c90bf7c"}}, + {name = "multidict-6.6.3-cp313-cp313t-win32.whl",url = "https://files.pythonhosted.org/packages/22/d6/fdb3d0670819f2228f3f7d9af613d5e652c15d170c83e5f1c94fbc55a25b/multidict-6.6.3-cp313-cp313t-win32.whl",hashes = {sha256 = "639ecc9fe7cd73f2495f62c213e964843826f44505a3e5d82805aa85cac6f89e"}}, + {name = "multidict-6.6.3-cp313-cp313t-win_amd64.whl",url = "https://files.pythonhosted.org/packages/b6/d6/a9d2c808f2c489ad199723197419207ecbfbc1776f6e155e1ecea9c883aa/multidict-6.6.3-cp313-cp313t-win_amd64.whl",hashes = {sha256 = "9f97e181f344a0ef3881b573d31de8542cc0dbc559ec68c8f8b5ce2c2e91646d"}}, + {name = "multidict-6.6.3-cp313-cp313t-win_arm64.whl",url = "https://files.pythonhosted.org/packages/f2/40/b68001cba8188dd267590a111f9661b6256debc327137667e832bf5d66e8/multidict-6.6.3-cp313-cp313t-win_arm64.whl",hashes = {sha256 = "ce8b7693da41a3c4fde5871c738a81490cea5496c671d74374c8ab889e1834fb"}}, + {name = "multidict-6.6.3-cp312-cp312-macosx_10_13_universal2.whl",url = "https://files.pythonhosted.org/packages/0e/a0/6b57988ea102da0623ea814160ed78d45a2645e4bbb499c2896d12833a70/multidict-6.6.3-cp312-cp312-macosx_10_13_universal2.whl",hashes = {sha256 = "056bebbeda16b2e38642d75e9e5310c484b7c24e3841dc0fb943206a72ec89d6"}}, + {name = "multidict-6.6.3-cp312-cp312-macosx_10_13_x86_64.whl",url = "https://files.pythonhosted.org/packages/07/7a/d1e92665b0850c6c0508f101f9cf0410c1afa24973e1115fe9c6a185ebf7/multidict-6.6.3-cp312-cp312-macosx_10_13_x86_64.whl",hashes = {sha256 = "e5f481cccb3c5c5e5de5d00b5141dc589c1047e60d07e85bbd7dea3d4580d63f"}}, + {name = "multidict-6.6.3-cp312-cp312-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/52/6f/dd104490e01be6ef8bf9573705d8572f8c2d2c561f06e3826b081d9e6591/multidict-6.6.3-cp312-cp312-macosx_11_0_arm64.whl",hashes = {sha256 = "10bea2ee839a759ee368b5a6e47787f399b41e70cf0c20d90dfaf4158dfb4e55"}}, + {name = "multidict-6.6.3-cp312-cp312-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl",url = "https://files.pythonhosted.org/packages/44/fe/06e0e01b1b0611e6581b7fd5a85b43dacc08b6cea3034f902f383b0873e5/multidict-6.6.3-cp312-cp312-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl",hashes = {sha256 = "2334cfb0fa9549d6ce2c21af2bfbcd3ac4ec3646b1b1581c88e3e2b1779ec92b"}}, + {name = "multidict-6.6.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl",url = "https://files.pythonhosted.org/packages/ce/71/4f0e558fb77696b89c233c1ee2d92f3e1d5459070a0e89153c9e9e804186/multidict-6.6.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl",hashes = {sha256 = "b8fee016722550a2276ca2cb5bb624480e0ed2bd49125b2b73b7010b9090e888"}}, + {name = "multidict-6.6.3-cp312-cp312-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl",url = "https://files.pythonhosted.org/packages/e3/25/cca0e68228addad24903801ed1ab42e21307a1b4b6dd2cf63da5d3ae082a/multidict-6.6.3-cp312-cp312-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl",hashes = {sha256 = "e5511cb35f5c50a2db21047c875eb42f308c5583edf96bd8ebf7d770a9d68f6d"}}, + {name = "multidict-6.6.3-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl",url = "https://files.pythonhosted.org/packages/6e/a3/46f2d420d86bbcb8fe660b26a10a219871a0fbf4d43cb846a4031533f3e0/multidict-6.6.3-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl",hashes = {sha256 = "712b348f7f449948e0a6c4564a21c7db965af900973a67db432d724619b3c680"}}, + {name = "multidict-6.6.3-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl",url = "https://files.pythonhosted.org/packages/9e/73/1c743542fe00794a2ec7466abd3f312ccb8fad8dff9f36d42e18fb1ec33e/multidict-6.6.3-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl",hashes = {sha256 = "e4e15d2138ee2694e038e33b7c3da70e6b0ad8868b9f8094a72e1414aeda9c1a"}}, + {name = "multidict-6.6.3-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl",url = "https://files.pythonhosted.org/packages/a4/11/6ec9dcbe2264b92778eeb85407d1df18812248bf3506a5a1754bc035db0c/multidict-6.6.3-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl",hashes = {sha256 = "8df25594989aebff8a130f7899fa03cbfcc5d2b5f4a461cf2518236fe6f15961"}}, + {name = "multidict-6.6.3-cp312-cp312-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/9b/2b/631b1e2afeb5f1696846d747d36cda075bfdc0bc7245d6ba5c319278d6c4/multidict-6.6.3-cp312-cp312-musllinux_1_2_aarch64.whl",hashes = {sha256 = "159ca68bfd284a8860f8d8112cf0521113bffd9c17568579e4d13d1f1dc76b65"}}, + {name = "multidict-6.6.3-cp312-cp312-musllinux_1_2_armv7l.whl",url = "https://files.pythonhosted.org/packages/bf/0e/7e3b93f79efeb6111d3bf9a1a69e555ba1d07ad1c11bceb56b7310d0d7ee/multidict-6.6.3-cp312-cp312-musllinux_1_2_armv7l.whl",hashes = {sha256 = "e098c17856a8c9ade81b4810888c5ad1914099657226283cab3062c0540b0643"}}, + {name = "multidict-6.6.3-cp312-cp312-musllinux_1_2_i686.whl",url = "https://files.pythonhosted.org/packages/ad/9e/086846c1d6601948e7de556ee464a2d4c85e33883e749f46b9547d7b0704/multidict-6.6.3-cp312-cp312-musllinux_1_2_i686.whl",hashes = {sha256 = "67c92ed673049dec52d7ed39f8cf9ebbadf5032c774058b4406d18c8f8fe7063"}}, + {name = "multidict-6.6.3-cp312-cp312-musllinux_1_2_ppc64le.whl",url = "https://files.pythonhosted.org/packages/8c/7b/86ec260118e522f1a31550e87b23542294880c97cfbf6fb18cc67b044c66/multidict-6.6.3-cp312-cp312-musllinux_1_2_ppc64le.whl",hashes = {sha256 = "bd0578596e3a835ef451784053cfd327d607fc39ea1a14812139339a18a0dbc3"}}, + {name = "multidict-6.6.3-cp312-cp312-musllinux_1_2_s390x.whl",url = "https://files.pythonhosted.org/packages/8c/bd/22ce8f47abb0be04692c9fc4638508b8340987b18691aa7775d927b73f72/multidict-6.6.3-cp312-cp312-musllinux_1_2_s390x.whl",hashes = {sha256 = "346055630a2df2115cd23ae271910b4cae40f4e336773550dca4889b12916e75"}}, + {name = "multidict-6.6.3-cp312-cp312-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/07/9c/91b7ac1691be95cd1f4a26e36a74b97cda6aa9820632d31aab4410f46ebd/multidict-6.6.3-cp312-cp312-musllinux_1_2_x86_64.whl",hashes = {sha256 = "555ff55a359302b79de97e0468e9ee80637b0de1fce77721639f7cd9440b3a10"}}, + {name = "multidict-6.6.3-cp312-cp312-win32.whl",url = "https://files.pythonhosted.org/packages/6f/5c/4d7adc739884f7a9fbe00d1eac8c034023ef8bad71f2ebe12823ca2e3649/multidict-6.6.3-cp312-cp312-win32.whl",hashes = {sha256 = "73ab034fb8d58ff85c2bcbadc470efc3fafeea8affcf8722855fb94557f14cc5"}}, + {name = "multidict-6.6.3-cp312-cp312-win_amd64.whl",url = "https://files.pythonhosted.org/packages/6a/a3/0fbc7afdf7cb1aa12a086b02959307848eb6bcc8f66fcb66c0cb57e2a2c1/multidict-6.6.3-cp312-cp312-win_amd64.whl",hashes = {sha256 = "04cbcce84f63b9af41bad04a54d4cc4e60e90c35b9e6ccb130be2d75b71f8c17"}}, + {name = "multidict-6.6.3-cp312-cp312-win_arm64.whl",url = "https://files.pythonhosted.org/packages/b8/95/8c825bd70ff9b02462dc18d1295dd08d3e9e4eb66856d292ffa62cfe1920/multidict-6.6.3-cp312-cp312-win_arm64.whl",hashes = {sha256 = "0f1130b896ecb52d2a1e615260f3ea2af55fa7dc3d7c3003ba0c3121a759b18b"}}, + {name = "multidict-6.6.3-py3-none-any.whl",url = "https://files.pythonhosted.org/packages/d8/30/9aec301e9772b098c1f5c0ca0279237c9766d94b97802e9888010c64b0ed/multidict-6.6.3-py3-none-any.whl",hashes = {sha256 = "8db10f29c7541fc5da4defd8cd697e1ca429db743fa716325f236079b96f775a"}}, ] -marker = "python_version < \"3.11\" and python_version >= \"3.9\" and \"default\" in dependency_groups or python_version < \"3.11\" and python_version >= \"3.9\" and \"dev\" in extras" +marker = "\"default\" in dependency_groups or \"dev\" in extras" [packages.tool.pdm] -dependencies = [] +dependencies = [ + "typing-extensions>=4.1.0; python_version < \"3.11\"", +] [[packages]] name = "h2" @@ -1664,129 +1556,6 @@ dependencies = [ "markdown-it-py<4.0.0,>=1.0.0", ] -[[packages]] -name = "multidict" -version = "6.6.3" -requires-python = ">=3.9" -sdist = {name = "multidict-6.6.3.tar.gz", url = "https://files.pythonhosted.org/packages/3d/2c/5dad12e82fbdf7470f29bff2171484bf07cb3b16ada60a6589af8f376440/multidict-6.6.3.tar.gz", hashes = {sha256 = "798a9eb12dab0a6c2e29c1de6f3468af5cb2da6053a20dfa3344907eed0937cc"}} -wheels = [ - {name = "multidict-6.6.3-cp313-cp313-macosx_10_13_universal2.whl",url = "https://files.pythonhosted.org/packages/52/1d/0bebcbbb4f000751fbd09957257903d6e002943fc668d841a4cf2fb7f872/multidict-6.6.3-cp313-cp313-macosx_10_13_universal2.whl",hashes = {sha256 = "540d3c06d48507357a7d57721e5094b4f7093399a0106c211f33540fdc374d55"}}, - {name = "multidict-6.6.3-cp313-cp313-macosx_10_13_x86_64.whl",url = "https://files.pythonhosted.org/packages/07/8f/cbe241b0434cfe257f65c2b1bcf9e8d5fb52bc708c5061fb29b0fed22bdf/multidict-6.6.3-cp313-cp313-macosx_10_13_x86_64.whl",hashes = {sha256 = "9c19cea2a690f04247d43f366d03e4eb110a0dc4cd1bbeee4d445435428ed35b"}}, - {name = "multidict-6.6.3-cp313-cp313-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/32/d2/0b3b23f9dbad5b270b22a3ac3ea73ed0a50ef2d9a390447061178ed6bdb8/multidict-6.6.3-cp313-cp313-macosx_11_0_arm64.whl",hashes = {sha256 = "7af039820cfd00effec86bda5d8debef711a3e86a1d3772e85bea0f243a4bd65"}}, - {name = "multidict-6.6.3-cp313-cp313-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl",url = "https://files.pythonhosted.org/packages/fd/fe/6eb68927e823999e3683bc49678eb20374ba9615097d085298fd5b386564/multidict-6.6.3-cp313-cp313-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl",hashes = {sha256 = "500b84f51654fdc3944e936f2922114349bf8fdcac77c3092b03449f0e5bc2b3"}}, - {name = "multidict-6.6.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl",url = "https://files.pythonhosted.org/packages/e7/ab/320d8507e7726c460cb77117848b3834ea0d59e769f36fdae495f7669929/multidict-6.6.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl",hashes = {sha256 = "f3fc723ab8a5c5ed6c50418e9bfcd8e6dceba6c271cee6728a10a4ed8561520c"}}, - {name = "multidict-6.6.3-cp313-cp313-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl",url = "https://files.pythonhosted.org/packages/76/60/38ee422db515ac69834e60142a1a69111ac96026e76e8e9aa347fd2e4591/multidict-6.6.3-cp313-cp313-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl",hashes = {sha256 = "94c47ea3ade005b5976789baaed66d4de4480d0a0bf31cef6edaa41c1e7b56a6"}}, - {name = "multidict-6.6.3-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl",url = "https://files.pythonhosted.org/packages/27/fb/905224fde2dff042b030c27ad95a7ae744325cf54b890b443d30a789b80e/multidict-6.6.3-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl",hashes = {sha256 = "dbc7cf464cc6d67e83e136c9f55726da3a30176f020a36ead246eceed87f1cd8"}}, - {name = "multidict-6.6.3-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl",url = "https://files.pythonhosted.org/packages/76/35/dc38ab361051beae08d1a53965e3e1a418752fc5be4d3fb983c5582d8784/multidict-6.6.3-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl",hashes = {sha256 = "900eb9f9da25ada070f8ee4a23f884e0ee66fe4e1a38c3af644256a508ad81ca"}}, - {name = "multidict-6.6.3-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl",url = "https://files.pythonhosted.org/packages/1f/a3/0a485b7f36e422421b17e2bbb5a81c1af10eac1d4476f2ff92927c730479/multidict-6.6.3-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl",hashes = {sha256 = "7c6df517cf177da5d47ab15407143a89cd1a23f8b335f3a28d57e8b0a3dbb884"}}, - {name = "multidict-6.6.3-cp313-cp313-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/b4/59/bcdd52c1dab7c0e0d75ff19cac751fbd5f850d1fc39172ce809a74aa9ea4/multidict-6.6.3-cp313-cp313-musllinux_1_2_aarch64.whl",hashes = {sha256 = "4ef421045f13879e21c994b36e728d8e7d126c91a64b9185810ab51d474f27e7"}}, - {name = "multidict-6.6.3-cp313-cp313-musllinux_1_2_armv7l.whl",url = "https://files.pythonhosted.org/packages/bb/a4/2d96aaa6eae8067ce108d4acee6f45ced5728beda55c0f02ae1072c730d1/multidict-6.6.3-cp313-cp313-musllinux_1_2_armv7l.whl",hashes = {sha256 = "6c1e61bb4f80895c081790b6b09fa49e13566df8fbff817da3f85b3a8192e36b"}}, - {name = "multidict-6.6.3-cp313-cp313-musllinux_1_2_i686.whl",url = "https://files.pythonhosted.org/packages/25/d2/ed9f847fa5c7d0677d4f02ea2c163d5e48573de3f57bacf5670e43a5ffaa/multidict-6.6.3-cp313-cp313-musllinux_1_2_i686.whl",hashes = {sha256 = "e5e8523bb12d7623cd8300dbd91b9e439a46a028cd078ca695eb66ba31adee3c"}}, - {name = "multidict-6.6.3-cp313-cp313-musllinux_1_2_ppc64le.whl",url = "https://files.pythonhosted.org/packages/1f/af/9155850372563fc550803d3f25373308aa70f59b52cff25854086ecb4a79/multidict-6.6.3-cp313-cp313-musllinux_1_2_ppc64le.whl",hashes = {sha256 = "ef58340cc896219e4e653dade08fea5c55c6df41bcc68122e3be3e9d873d9a7b"}}, - {name = "multidict-6.6.3-cp313-cp313-musllinux_1_2_s390x.whl",url = "https://files.pythonhosted.org/packages/36/2f/c6a728f699896252cf309769089568a33c6439626648843f78743660709d/multidict-6.6.3-cp313-cp313-musllinux_1_2_s390x.whl",hashes = {sha256 = "fc9dc435ec8699e7b602b94fe0cd4703e69273a01cbc34409af29e7820f777f1"}}, - {name = "multidict-6.6.3-cp313-cp313-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/d0/60/689880776d6b18fa2b70f6cc74ff87dd6c6b9b47bd9cf74c16fecfaa6ad9/multidict-6.6.3-cp313-cp313-musllinux_1_2_x86_64.whl",hashes = {sha256 = "9e864486ef4ab07db5e9cb997bad2b681514158d6954dd1958dfb163b83d53e6"}}, - {name = "multidict-6.6.3-cp313-cp313-win32.whl",url = "https://files.pythonhosted.org/packages/75/5e/325b11f2222a549019cf2ef879c1f81f94a0d40ace3ef55cf529915ba6cc/multidict-6.6.3-cp313-cp313-win32.whl",hashes = {sha256 = "5633a82fba8e841bc5c5c06b16e21529573cd654f67fd833650a215520a6210e"}}, - {name = "multidict-6.6.3-cp313-cp313-win_amd64.whl",url = "https://files.pythonhosted.org/packages/b1/ad/cf46e73f5d6e3c775cabd2a05976547f3f18b39bee06260369a42501f053/multidict-6.6.3-cp313-cp313-win_amd64.whl",hashes = {sha256 = "e93089c1570a4ad54c3714a12c2cef549dc9d58e97bcded193d928649cab78e9"}}, - {name = "multidict-6.6.3-cp313-cp313-win_arm64.whl",url = "https://files.pythonhosted.org/packages/c5/c9/2e3fe950db28fb7c62e1a5f46e1e38759b072e2089209bc033c2798bb5ec/multidict-6.6.3-cp313-cp313-win_arm64.whl",hashes = {sha256 = "c60b401f192e79caec61f166da9c924e9f8bc65548d4246842df91651e83d600"}}, - {name = "multidict-6.6.3-cp313-cp313t-macosx_10_13_universal2.whl",url = "https://files.pythonhosted.org/packages/3a/58/aaf8114cf34966e084a8cc9517771288adb53465188843d5a19862cb6dc3/multidict-6.6.3-cp313-cp313t-macosx_10_13_universal2.whl",hashes = {sha256 = "02fd8f32d403a6ff13864b0851f1f523d4c988051eea0471d4f1fd8010f11134"}}, - {name = "multidict-6.6.3-cp313-cp313t-macosx_10_13_x86_64.whl",url = "https://files.pythonhosted.org/packages/71/af/5402e7b58a1f5b987a07ad98f2501fdba2a4f4b4c30cf114e3ce8db64c87/multidict-6.6.3-cp313-cp313t-macosx_10_13_x86_64.whl",hashes = {sha256 = "f3aa090106b1543f3f87b2041eef3c156c8da2aed90c63a2fbed62d875c49c37"}}, - {name = "multidict-6.6.3-cp313-cp313t-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/39/65/ab3c8cafe21adb45b24a50266fd747147dec7847425bc2a0f6934b3ae9ce/multidict-6.6.3-cp313-cp313t-macosx_11_0_arm64.whl",hashes = {sha256 = "e924fb978615a5e33ff644cc42e6aa241effcf4f3322c09d4f8cebde95aff5f8"}}, - {name = "multidict-6.6.3-cp313-cp313t-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl",url = "https://files.pythonhosted.org/packages/49/ba/9fcc1b332f67cc0c0c8079e263bfab6660f87fe4e28a35921771ff3eea0d/multidict-6.6.3-cp313-cp313t-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl",hashes = {sha256 = "b9fe5a0e57c6dbd0e2ce81ca66272282c32cd11d31658ee9553849d91289e1c1"}}, - {name = "multidict-6.6.3-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl",url = "https://files.pythonhosted.org/packages/a4/14/0145a251f555f7c754ce2dcbcd012939bbd1f34f066fa5d28a50e722a054/multidict-6.6.3-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl",hashes = {sha256 = "b24576f208793ebae00280c59927c3b7c2a3b1655e443a25f753c4611bc1c373"}}, - {name = "multidict-6.6.3-cp313-cp313t-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl",url = "https://files.pythonhosted.org/packages/9e/d4/d5c0bd2bbb173b586c249a151a26d2fb3ec7d53c96e42091c9fef4e1f10c/multidict-6.6.3-cp313-cp313t-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl",hashes = {sha256 = "135631cb6c58eac37d7ac0df380294fecdc026b28837fa07c02e459c7fb9c54e"}}, - {name = "multidict-6.6.3-cp313-cp313t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl",url = "https://files.pythonhosted.org/packages/21/32/c9a2d8444a50ec48c4733ccc67254100c10e1c8ae8e40c7a2d2183b59b97/multidict-6.6.3-cp313-cp313t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl",hashes = {sha256 = "274d416b0df887aef98f19f21578653982cfb8a05b4e187d4a17103322eeaf8f"}}, - {name = "multidict-6.6.3-cp313-cp313t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl",url = "https://files.pythonhosted.org/packages/68/d0/14fa1699f4ef629eae08ad6201c6b476098f5efb051b296f4c26be7a9fdf/multidict-6.6.3-cp313-cp313t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl",hashes = {sha256 = "e252017a817fad7ce05cafbe5711ed40faeb580e63b16755a3a24e66fa1d87c0"}}, - {name = "multidict-6.6.3-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl",url = "https://files.pythonhosted.org/packages/da/88/84a27570fbe303c65607d517a5f147cd2fc046c2d1da02b84b17b9bdc2aa/multidict-6.6.3-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl",hashes = {sha256 = "2e4cc8d848cd4fe1cdee28c13ea79ab0ed37fc2e89dd77bac86a2e7959a8c3bc"}}, - {name = "multidict-6.6.3-cp313-cp313t-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/1c/60/dca352a0c999ce96a5d8b8ee0b2b9f729dcad2e0b0c195f8286269a2074c/multidict-6.6.3-cp313-cp313t-musllinux_1_2_aarch64.whl",hashes = {sha256 = "9e236a7094b9c4c1b7585f6b9cca34b9d833cf079f7e4c49e6a4a6ec9bfdc68f"}}, - {name = "multidict-6.6.3-cp313-cp313t-musllinux_1_2_armv7l.whl",url = "https://files.pythonhosted.org/packages/50/ef/433fa3ed06028f03946f3993223dada70fb700f763f70c00079533c34578/multidict-6.6.3-cp313-cp313t-musllinux_1_2_armv7l.whl",hashes = {sha256 = "e0cb0ab69915c55627c933f0b555a943d98ba71b4d1c57bc0d0a66e2567c7471"}}, - {name = "multidict-6.6.3-cp313-cp313t-musllinux_1_2_i686.whl",url = "https://files.pythonhosted.org/packages/1b/1f/487612ab56fbe35715320905215a57fede20de7db40a261759690dc80471/multidict-6.6.3-cp313-cp313t-musllinux_1_2_i686.whl",hashes = {sha256 = "81ef2f64593aba09c5212a3d0f8c906a0d38d710a011f2f42759704d4557d3f2"}}, - {name = "multidict-6.6.3-cp313-cp313t-musllinux_1_2_ppc64le.whl",url = "https://files.pythonhosted.org/packages/da/6f/ce8b79de16cd885c6f9052c96a3671373d00c59b3ee635ea93e6e81b8ccf/multidict-6.6.3-cp313-cp313t-musllinux_1_2_ppc64le.whl",hashes = {sha256 = "b9cbc60010de3562545fa198bfc6d3825df430ea96d2cc509c39bd71e2e7d648"}}, - {name = "multidict-6.6.3-cp313-cp313t-musllinux_1_2_s390x.whl",url = "https://files.pythonhosted.org/packages/bb/fe/a2514a6aba78e5abefa1624ca85ae18f542d95ac5cde2e3815a9fbf369aa/multidict-6.6.3-cp313-cp313t-musllinux_1_2_s390x.whl",hashes = {sha256 = "70d974eaaa37211390cd02ef93b7e938de564bbffa866f0b08d07e5e65da783d"}}, - {name = "multidict-6.6.3-cp313-cp313t-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/8c/22/b788718d63bb3cce752d107a57c85fcd1a212c6c778628567c9713f9345a/multidict-6.6.3-cp313-cp313t-musllinux_1_2_x86_64.whl",hashes = {sha256 = "3713303e4a6663c6d01d648a68f2848701001f3390a030edaaf3fc949c90bf7c"}}, - {name = "multidict-6.6.3-cp313-cp313t-win32.whl",url = "https://files.pythonhosted.org/packages/22/d6/fdb3d0670819f2228f3f7d9af613d5e652c15d170c83e5f1c94fbc55a25b/multidict-6.6.3-cp313-cp313t-win32.whl",hashes = {sha256 = "639ecc9fe7cd73f2495f62c213e964843826f44505a3e5d82805aa85cac6f89e"}}, - {name = "multidict-6.6.3-cp313-cp313t-win_amd64.whl",url = "https://files.pythonhosted.org/packages/b6/d6/a9d2c808f2c489ad199723197419207ecbfbc1776f6e155e1ecea9c883aa/multidict-6.6.3-cp313-cp313t-win_amd64.whl",hashes = {sha256 = "9f97e181f344a0ef3881b573d31de8542cc0dbc559ec68c8f8b5ce2c2e91646d"}}, - {name = "multidict-6.6.3-cp313-cp313t-win_arm64.whl",url = "https://files.pythonhosted.org/packages/f2/40/b68001cba8188dd267590a111f9661b6256debc327137667e832bf5d66e8/multidict-6.6.3-cp313-cp313t-win_arm64.whl",hashes = {sha256 = "ce8b7693da41a3c4fde5871c738a81490cea5496c671d74374c8ab889e1834fb"}}, - {name = "multidict-6.6.3-cp312-cp312-macosx_10_13_universal2.whl",url = "https://files.pythonhosted.org/packages/0e/a0/6b57988ea102da0623ea814160ed78d45a2645e4bbb499c2896d12833a70/multidict-6.6.3-cp312-cp312-macosx_10_13_universal2.whl",hashes = {sha256 = "056bebbeda16b2e38642d75e9e5310c484b7c24e3841dc0fb943206a72ec89d6"}}, - {name = "multidict-6.6.3-cp312-cp312-macosx_10_13_x86_64.whl",url = "https://files.pythonhosted.org/packages/07/7a/d1e92665b0850c6c0508f101f9cf0410c1afa24973e1115fe9c6a185ebf7/multidict-6.6.3-cp312-cp312-macosx_10_13_x86_64.whl",hashes = {sha256 = "e5f481cccb3c5c5e5de5d00b5141dc589c1047e60d07e85bbd7dea3d4580d63f"}}, - {name = "multidict-6.6.3-cp312-cp312-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/52/6f/dd104490e01be6ef8bf9573705d8572f8c2d2c561f06e3826b081d9e6591/multidict-6.6.3-cp312-cp312-macosx_11_0_arm64.whl",hashes = {sha256 = "10bea2ee839a759ee368b5a6e47787f399b41e70cf0c20d90dfaf4158dfb4e55"}}, - {name = "multidict-6.6.3-cp312-cp312-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl",url = "https://files.pythonhosted.org/packages/44/fe/06e0e01b1b0611e6581b7fd5a85b43dacc08b6cea3034f902f383b0873e5/multidict-6.6.3-cp312-cp312-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl",hashes = {sha256 = "2334cfb0fa9549d6ce2c21af2bfbcd3ac4ec3646b1b1581c88e3e2b1779ec92b"}}, - {name = "multidict-6.6.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl",url = "https://files.pythonhosted.org/packages/ce/71/4f0e558fb77696b89c233c1ee2d92f3e1d5459070a0e89153c9e9e804186/multidict-6.6.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl",hashes = {sha256 = "b8fee016722550a2276ca2cb5bb624480e0ed2bd49125b2b73b7010b9090e888"}}, - {name = "multidict-6.6.3-cp312-cp312-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl",url = "https://files.pythonhosted.org/packages/e3/25/cca0e68228addad24903801ed1ab42e21307a1b4b6dd2cf63da5d3ae082a/multidict-6.6.3-cp312-cp312-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl",hashes = {sha256 = "e5511cb35f5c50a2db21047c875eb42f308c5583edf96bd8ebf7d770a9d68f6d"}}, - {name = "multidict-6.6.3-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl",url = "https://files.pythonhosted.org/packages/6e/a3/46f2d420d86bbcb8fe660b26a10a219871a0fbf4d43cb846a4031533f3e0/multidict-6.6.3-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl",hashes = {sha256 = "712b348f7f449948e0a6c4564a21c7db965af900973a67db432d724619b3c680"}}, - {name = "multidict-6.6.3-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl",url = "https://files.pythonhosted.org/packages/9e/73/1c743542fe00794a2ec7466abd3f312ccb8fad8dff9f36d42e18fb1ec33e/multidict-6.6.3-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl",hashes = {sha256 = "e4e15d2138ee2694e038e33b7c3da70e6b0ad8868b9f8094a72e1414aeda9c1a"}}, - {name = "multidict-6.6.3-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl",url = "https://files.pythonhosted.org/packages/a4/11/6ec9dcbe2264b92778eeb85407d1df18812248bf3506a5a1754bc035db0c/multidict-6.6.3-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl",hashes = {sha256 = "8df25594989aebff8a130f7899fa03cbfcc5d2b5f4a461cf2518236fe6f15961"}}, - {name = "multidict-6.6.3-cp312-cp312-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/9b/2b/631b1e2afeb5f1696846d747d36cda075bfdc0bc7245d6ba5c319278d6c4/multidict-6.6.3-cp312-cp312-musllinux_1_2_aarch64.whl",hashes = {sha256 = "159ca68bfd284a8860f8d8112cf0521113bffd9c17568579e4d13d1f1dc76b65"}}, - {name = "multidict-6.6.3-cp312-cp312-musllinux_1_2_armv7l.whl",url = "https://files.pythonhosted.org/packages/bf/0e/7e3b93f79efeb6111d3bf9a1a69e555ba1d07ad1c11bceb56b7310d0d7ee/multidict-6.6.3-cp312-cp312-musllinux_1_2_armv7l.whl",hashes = {sha256 = "e098c17856a8c9ade81b4810888c5ad1914099657226283cab3062c0540b0643"}}, - {name = "multidict-6.6.3-cp312-cp312-musllinux_1_2_i686.whl",url = "https://files.pythonhosted.org/packages/ad/9e/086846c1d6601948e7de556ee464a2d4c85e33883e749f46b9547d7b0704/multidict-6.6.3-cp312-cp312-musllinux_1_2_i686.whl",hashes = {sha256 = "67c92ed673049dec52d7ed39f8cf9ebbadf5032c774058b4406d18c8f8fe7063"}}, - {name = "multidict-6.6.3-cp312-cp312-musllinux_1_2_ppc64le.whl",url = "https://files.pythonhosted.org/packages/8c/7b/86ec260118e522f1a31550e87b23542294880c97cfbf6fb18cc67b044c66/multidict-6.6.3-cp312-cp312-musllinux_1_2_ppc64le.whl",hashes = {sha256 = "bd0578596e3a835ef451784053cfd327d607fc39ea1a14812139339a18a0dbc3"}}, - {name = "multidict-6.6.3-cp312-cp312-musllinux_1_2_s390x.whl",url = "https://files.pythonhosted.org/packages/8c/bd/22ce8f47abb0be04692c9fc4638508b8340987b18691aa7775d927b73f72/multidict-6.6.3-cp312-cp312-musllinux_1_2_s390x.whl",hashes = {sha256 = "346055630a2df2115cd23ae271910b4cae40f4e336773550dca4889b12916e75"}}, - {name = "multidict-6.6.3-cp312-cp312-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/07/9c/91b7ac1691be95cd1f4a26e36a74b97cda6aa9820632d31aab4410f46ebd/multidict-6.6.3-cp312-cp312-musllinux_1_2_x86_64.whl",hashes = {sha256 = "555ff55a359302b79de97e0468e9ee80637b0de1fce77721639f7cd9440b3a10"}}, - {name = "multidict-6.6.3-cp312-cp312-win32.whl",url = "https://files.pythonhosted.org/packages/6f/5c/4d7adc739884f7a9fbe00d1eac8c034023ef8bad71f2ebe12823ca2e3649/multidict-6.6.3-cp312-cp312-win32.whl",hashes = {sha256 = "73ab034fb8d58ff85c2bcbadc470efc3fafeea8affcf8722855fb94557f14cc5"}}, - {name = "multidict-6.6.3-cp312-cp312-win_amd64.whl",url = "https://files.pythonhosted.org/packages/6a/a3/0fbc7afdf7cb1aa12a086b02959307848eb6bcc8f66fcb66c0cb57e2a2c1/multidict-6.6.3-cp312-cp312-win_amd64.whl",hashes = {sha256 = "04cbcce84f63b9af41bad04a54d4cc4e60e90c35b9e6ccb130be2d75b71f8c17"}}, - {name = "multidict-6.6.3-cp312-cp312-win_arm64.whl",url = "https://files.pythonhosted.org/packages/b8/95/8c825bd70ff9b02462dc18d1295dd08d3e9e4eb66856d292ffa62cfe1920/multidict-6.6.3-cp312-cp312-win_arm64.whl",hashes = {sha256 = "0f1130b896ecb52d2a1e615260f3ea2af55fa7dc3d7c3003ba0c3121a759b18b"}}, - {name = "multidict-6.6.3-cp311-cp311-macosx_10_9_universal2.whl",url = "https://files.pythonhosted.org/packages/08/f0/1a39863ced51f639c81a5463fbfa9eb4df59c20d1a8769ab9ef4ca57ae04/multidict-6.6.3-cp311-cp311-macosx_10_9_universal2.whl",hashes = {sha256 = "18f4eba0cbac3546b8ae31e0bbc55b02c801ae3cbaf80c247fcdd89b456ff58c"}}, - {name = "multidict-6.6.3-cp311-cp311-macosx_10_9_x86_64.whl",url = "https://files.pythonhosted.org/packages/c9/0e/a7cfa451c7b0365cd844e90b41e21fab32edaa1e42fc0c9f68461ce44ed7/multidict-6.6.3-cp311-cp311-macosx_10_9_x86_64.whl",hashes = {sha256 = "ef43b5dd842382329e4797c46f10748d8c2b6e0614f46b4afe4aee9ac33159df"}}, - {name = "multidict-6.6.3-cp311-cp311-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/c6/bb/a14a4efc5ee748cc1904b0748be278c31b9295ce5f4d2ef66526f410b94d/multidict-6.6.3-cp311-cp311-macosx_11_0_arm64.whl",hashes = {sha256 = "bf9bd1fd5eec01494e0f2e8e446a74a85d5e49afb63d75a9934e4a5423dba21d"}}, - {name = "multidict-6.6.3-cp311-cp311-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl",url = "https://files.pythonhosted.org/packages/c2/f8/410677d563c2d55e063ef74fe578f9d53fe6b0a51649597a5861f83ffa15/multidict-6.6.3-cp311-cp311-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl",hashes = {sha256 = "5bd8d6f793a787153956cd35e24f60485bf0651c238e207b9a54f7458b16d539"}}, - {name = "multidict-6.6.3-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl",url = "https://files.pythonhosted.org/packages/fd/df/2b787f80059314a98e1ec6a4cc7576244986df3e56b3c755e6fc7c99e038/multidict-6.6.3-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl",hashes = {sha256 = "1bf99b4daf908c73856bd87ee0a2499c3c9a3d19bb04b9c6025e66af3fd07462"}}, - {name = "multidict-6.6.3-cp311-cp311-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl",url = "https://files.pythonhosted.org/packages/05/f2/f9117089151b9a8ab39f9019620d10d9718eec2ac89e7ca9d30f3ec78e96/multidict-6.6.3-cp311-cp311-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl",hashes = {sha256 = "0b9e59946b49dafaf990fd9c17ceafa62976e8471a14952163d10a7a630413a9"}}, - {name = "multidict-6.6.3-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl",url = "https://files.pythonhosted.org/packages/93/2d/7115300ec5b699faa152c56799b089a53ed69e399c3c2d528251f0aeda1a/multidict-6.6.3-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl",hashes = {sha256 = "e2db616467070d0533832d204c54eea6836a5e628f2cb1e6dfd8cd6ba7277cb7"}}, - {name = "multidict-6.6.3-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl",url = "https://files.pythonhosted.org/packages/15/ea/ff4bab367623e39c20d3b07637225c7688d79e4f3cc1f3b9f89867677f9a/multidict-6.6.3-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl",hashes = {sha256 = "7394888236621f61dcdd25189b2768ae5cc280f041029a5bcf1122ac63df79f9"}}, - {name = "multidict-6.6.3-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl",url = "https://files.pythonhosted.org/packages/74/07/2c9246cda322dfe08be85f1b8739646f2c4c5113a1422d7a407763422ec4/multidict-6.6.3-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl",hashes = {sha256 = "f114d8478733ca7388e7c7e0ab34b72547476b97009d643644ac33d4d3fe1821"}}, - {name = "multidict-6.6.3-cp311-cp311-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/a8/62/279c13d584207d5697a752a66ffc9bb19355a95f7659140cb1b3cf82180e/multidict-6.6.3-cp311-cp311-musllinux_1_2_aarch64.whl",hashes = {sha256 = "cdf22e4db76d323bcdc733514bf732e9fb349707c98d341d40ebcc6e9318ef3d"}}, - {name = "multidict-6.6.3-cp311-cp311-musllinux_1_2_armv7l.whl",url = "https://files.pythonhosted.org/packages/69/cc/e06636f48c6d51e724a8bc8d9e1db5f136fe1df066d7cafe37ef4000f86a/multidict-6.6.3-cp311-cp311-musllinux_1_2_armv7l.whl",hashes = {sha256 = "e995a34c3d44ab511bfc11aa26869b9d66c2d8c799fa0e74b28a473a692532d6"}}, - {name = "multidict-6.6.3-cp311-cp311-musllinux_1_2_i686.whl",url = "https://files.pythonhosted.org/packages/89/a4/66c9d8fb9acf3b226cdd468ed009537ac65b520aebdc1703dd6908b19d33/multidict-6.6.3-cp311-cp311-musllinux_1_2_i686.whl",hashes = {sha256 = "766a4a5996f54361d8d5a9050140aa5362fe48ce51c755a50c0bc3706460c430"}}, - {name = "multidict-6.6.3-cp311-cp311-musllinux_1_2_ppc64le.whl",url = "https://files.pythonhosted.org/packages/cf/01/c69e0317be556e46257826d5449feb4e6aa0d18573e567a48a2c14156f1f/multidict-6.6.3-cp311-cp311-musllinux_1_2_ppc64le.whl",hashes = {sha256 = "3893a0d7d28a7fe6ca7a1f760593bc13038d1d35daf52199d431b61d2660602b"}}, - {name = "multidict-6.6.3-cp311-cp311-musllinux_1_2_s390x.whl",url = "https://files.pythonhosted.org/packages/c0/da/9cc1da0299762d20e626fe0042e71b5694f9f72d7d3f9678397cbaa71b2b/multidict-6.6.3-cp311-cp311-musllinux_1_2_s390x.whl",hashes = {sha256 = "934796c81ea996e61914ba58064920d6cad5d99140ac3167901eb932150e2e56"}}, - {name = "multidict-6.6.3-cp311-cp311-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/e6/91/b22756afec99cc31105ddd4a52f95ab32b1a4a58f4d417979c570c4a922e/multidict-6.6.3-cp311-cp311-musllinux_1_2_x86_64.whl",hashes = {sha256 = "9ed948328aec2072bc00f05d961ceadfd3e9bfc2966c1319aeaf7b7c21219183"}}, - {name = "multidict-6.6.3-cp311-cp311-win32.whl",url = "https://files.pythonhosted.org/packages/be/f1/adcc185b878036a20399d5be5228f3cbe7f823d78985d101d425af35c800/multidict-6.6.3-cp311-cp311-win32.whl",hashes = {sha256 = "9f5b28c074c76afc3e4c610c488e3493976fe0e596dd3db6c8ddfbb0134dcac5"}}, - {name = "multidict-6.6.3-cp311-cp311-win_amd64.whl",url = "https://files.pythonhosted.org/packages/e0/d4/27652c1c6526ea6b4f5ddd397e93f4232ff5de42bea71d339bc6a6cc497f/multidict-6.6.3-cp311-cp311-win_amd64.whl",hashes = {sha256 = "bc7f6fbc61b1c16050a389c630da0b32fc6d4a3d191394ab78972bf5edc568c2"}}, - {name = "multidict-6.6.3-cp311-cp311-win_arm64.whl",url = "https://files.pythonhosted.org/packages/16/18/23f4932019804e56d3c2413e237f866444b774b0263bcb81df2fdecaf593/multidict-6.6.3-cp311-cp311-win_arm64.whl",hashes = {sha256 = "d4e47d8faffaae822fb5cba20937c048d4f734f43572e7079298a6c39fb172cb"}}, - {name = "multidict-6.6.3-cp310-cp310-macosx_10_9_universal2.whl",url = "https://files.pythonhosted.org/packages/0b/67/414933982bce2efce7cbcb3169eaaf901e0f25baec69432b4874dfb1f297/multidict-6.6.3-cp310-cp310-macosx_10_9_universal2.whl",hashes = {sha256 = "a2be5b7b35271f7fff1397204ba6708365e3d773579fe2a30625e16c4b4ce817"}}, - {name = "multidict-6.6.3-cp310-cp310-macosx_10_9_x86_64.whl",url = "https://files.pythonhosted.org/packages/8a/fe/d8a3ee1fad37dc2ef4f75488b0d9d4f25bf204aad8306cbab63d97bff64a/multidict-6.6.3-cp310-cp310-macosx_10_9_x86_64.whl",hashes = {sha256 = "12f4581d2930840295c461764b9a65732ec01250b46c6b2c510d7ee68872b140"}}, - {name = "multidict-6.6.3-cp310-cp310-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/1f/e0/265d89af8c98240265d82b8cbcf35897f83b76cd59ee3ab3879050fd8c45/multidict-6.6.3-cp310-cp310-macosx_11_0_arm64.whl",hashes = {sha256 = "dd7793bab517e706c9ed9d7310b06c8672fd0aeee5781bfad612f56b8e0f7d14"}}, - {name = "multidict-6.6.3-cp310-cp310-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl",url = "https://files.pythonhosted.org/packages/e6/05/6b759379f7e8e04ccc97cfb2a5dcc5cdbd44a97f072b2272dc51281e6a40/multidict-6.6.3-cp310-cp310-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl",hashes = {sha256 = "72d8815f2cd3cf3df0f83cac3f3ef801d908b2d90409ae28102e0553af85545a"}}, - {name = "multidict-6.6.3-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl",url = "https://files.pythonhosted.org/packages/4e/f5/8d5a15488edd9a91fa4aad97228d785df208ed6298580883aa3d9def1959/multidict-6.6.3-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl",hashes = {sha256 = "531e331a2ee53543ab32b16334e2deb26f4e6b9b28e41f8e0c87e99a6c8e2d69"}}, - {name = "multidict-6.6.3-cp310-cp310-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl",url = "https://files.pythonhosted.org/packages/6e/b5/a8f317d47d0ac5bb746d6d8325885c8967c2a8ce0bb57be5399e3642cccb/multidict-6.6.3-cp310-cp310-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl",hashes = {sha256 = "42ca5aa9329a63be8dc49040f63817d1ac980e02eeddba763a9ae5b4027b9c9c"}}, - {name = "multidict-6.6.3-cp310-cp310-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl",url = "https://files.pythonhosted.org/packages/76/88/18b2a0d5e80515fa22716556061189c2853ecf2aa2133081ebbe85ebea38/multidict-6.6.3-cp310-cp310-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl",hashes = {sha256 = "208b9b9757060b9faa6f11ab4bc52846e4f3c2fb8b14d5680c8aac80af3dc751"}}, - {name = "multidict-6.6.3-cp310-cp310-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl",url = "https://files.pythonhosted.org/packages/62/bf/ebfcfd6b55a1b05ef16d0775ae34c0fe15e8dab570d69ca9941073b969e7/multidict-6.6.3-cp310-cp310-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl",hashes = {sha256 = "acf6b97bd0884891af6a8b43d0f586ab2fcf8e717cbd47ab4bdddc09e20652d8"}}, - {name = "multidict-6.6.3-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl",url = "https://files.pythonhosted.org/packages/44/11/780615a98fd3775fc309d0234d563941af69ade2df0bb82c91dda6ddaea1/multidict-6.6.3-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl",hashes = {sha256 = "68e9e12ed00e2089725669bdc88602b0b6f8d23c0c95e52b95f0bc69f7fe9b55"}}, - {name = "multidict-6.6.3-cp310-cp310-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/28/3d/35f33045e21034b388686213752cabc3a1b9d03e20969e6fa8f1b1d82db1/multidict-6.6.3-cp310-cp310-musllinux_1_2_aarch64.whl",hashes = {sha256 = "05db2f66c9addb10cfa226e1acb363450fab2ff8a6df73c622fefe2f5af6d4e7"}}, - {name = "multidict-6.6.3-cp310-cp310-musllinux_1_2_armv7l.whl",url = "https://files.pythonhosted.org/packages/6e/cc/ff84c03b95b430015d2166d9aae775a3985d757b94f6635010d0038d9241/multidict-6.6.3-cp310-cp310-musllinux_1_2_armv7l.whl",hashes = {sha256 = "0db58da8eafb514db832a1b44f8fa7906fdd102f7d982025f816a93ba45e3dcb"}}, - {name = "multidict-6.6.3-cp310-cp310-musllinux_1_2_i686.whl",url = "https://files.pythonhosted.org/packages/2e/f0/8cd49a0b37bdea673a4b793c2093f2f4ba8e7c9d6d7c9bd672fd6d38cd11/multidict-6.6.3-cp310-cp310-musllinux_1_2_i686.whl",hashes = {sha256 = "14117a41c8fdb3ee19c743b1c027da0736fdb79584d61a766da53d399b71176c"}}, - {name = "multidict-6.6.3-cp310-cp310-musllinux_1_2_ppc64le.whl",url = "https://files.pythonhosted.org/packages/96/19/5d9a0cfdafe65d82b616a45ae950975820289069f885328e8185e64283c2/multidict-6.6.3-cp310-cp310-musllinux_1_2_ppc64le.whl",hashes = {sha256 = "877443eaaabcd0b74ff32ebeed6f6176c71850feb7d6a1d2db65945256ea535c"}}, - {name = "multidict-6.6.3-cp310-cp310-musllinux_1_2_s390x.whl",url = "https://files.pythonhosted.org/packages/e6/dc/c90066151da87d1e489f147b9b4327927241e65f1876702fafec6729c014/multidict-6.6.3-cp310-cp310-musllinux_1_2_s390x.whl",hashes = {sha256 = "70b72e749a4f6e7ed8fb334fa8d8496384840319512746a5f42fa0aec79f4d61"}}, - {name = "multidict-6.6.3-cp310-cp310-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/ec/39/458afb0cccbb0ee9164365273be3e039efddcfcb94ef35924b7dbdb05db0/multidict-6.6.3-cp310-cp310-musllinux_1_2_x86_64.whl",hashes = {sha256 = "43571f785b86afd02b3855c5ac8e86ec921b760298d6f82ff2a61daf5a35330b"}}, - {name = "multidict-6.6.3-cp310-cp310-win32.whl",url = "https://files.pythonhosted.org/packages/35/38/0016adac3990426610a081787011177e661875546b434f50a26319dc8372/multidict-6.6.3-cp310-cp310-win32.whl",hashes = {sha256 = "20c5a0c3c13a15fd5ea86c42311859f970070e4e24de5a550e99d7c271d76318"}}, - {name = "multidict-6.6.3-cp310-cp310-win_amd64.whl",url = "https://files.pythonhosted.org/packages/f3/d2/17897a8f3f2c5363d969b4c635aa40375fe1f09168dc09a7826780bfb2a4/multidict-6.6.3-cp310-cp310-win_amd64.whl",hashes = {sha256 = "ab0a34a007704c625e25a9116c6770b4d3617a071c8a7c30cd338dfbadfe6485"}}, - {name = "multidict-6.6.3-cp310-cp310-win_arm64.whl",url = "https://files.pythonhosted.org/packages/2d/5f/d4a717c1e457fe44072e33fa400d2b93eb0f2819c4d669381f925b7cba1f/multidict-6.6.3-cp310-cp310-win_arm64.whl",hashes = {sha256 = "769841d70ca8bdd140a715746199fc6473414bd02efd678d75681d2d6a8986c5"}}, - {name = "multidict-6.6.3-py3-none-any.whl",url = "https://files.pythonhosted.org/packages/d8/30/9aec301e9772b098c1f5c0ca0279237c9766d94b97802e9888010c64b0ed/multidict-6.6.3-py3-none-any.whl",hashes = {sha256 = "8db10f29c7541fc5da4defd8cd697e1ca429db743fa716325f236079b96f775a"}}, - {name = "multidict-6.6.3-cp39-cp39-macosx_10_9_universal2.whl",url = "https://files.pythonhosted.org/packages/d2/64/ba29bd6dfc895e592b2f20f92378e692ac306cf25dd0be2f8e0a0f898edb/multidict-6.6.3-cp39-cp39-macosx_10_9_universal2.whl",hashes = {sha256 = "c8161b5a7778d3137ea2ee7ae8a08cce0010de3b00ac671c5ebddeaa17cefd22"}}, - {name = "multidict-6.6.3-cp39-cp39-macosx_10_9_x86_64.whl",url = "https://files.pythonhosted.org/packages/ca/cd/872ae4c134257dacebff59834983c1615d6ec863b6e3d360f3203aad8400/multidict-6.6.3-cp39-cp39-macosx_10_9_x86_64.whl",hashes = {sha256 = "1328201ee930f069961ae707d59c6627ac92e351ed5b92397cf534d1336ce557"}}, - {name = "multidict-6.6.3-cp39-cp39-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/15/35/d417d8f62f2886784b76df60522d608aba39dfc83dd53b230ca71f2d4c53/multidict-6.6.3-cp39-cp39-macosx_11_0_arm64.whl",hashes = {sha256 = "b1db4d2093d6b235de76932febf9d50766cf49a5692277b2c28a501c9637f616"}}, - {name = "multidict-6.6.3-cp39-cp39-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl",url = "https://files.pythonhosted.org/packages/85/59/25cddf781f12cddb2386baa29744a3fdd160eb705539b48065f0cffd86d5/multidict-6.6.3-cp39-cp39-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl",hashes = {sha256 = "53becb01dd8ebd19d1724bebe369cfa87e4e7f29abbbe5c14c98ce4c383e16cd"}}, - {name = "multidict-6.6.3-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl",url = "https://files.pythonhosted.org/packages/c4/21/4055b6a527954c572498a8068c26bd3b75f2b959080e17e12104b592273c/multidict-6.6.3-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl",hashes = {sha256 = "41bb9d1d4c303886e2d85bade86e59885112a7f4277af5ad47ab919a2251f306"}}, - {name = "multidict-6.6.3-cp39-cp39-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl",url = "https://files.pythonhosted.org/packages/58/98/17f1f80bdba0b2fef49cf4ba59cebf8a81797f745f547abb5c9a4039df62/multidict-6.6.3-cp39-cp39-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl",hashes = {sha256 = "775b464d31dac90f23192af9c291dc9f423101857e33e9ebf0020a10bfcf4144"}}, - {name = "multidict-6.6.3-cp39-cp39-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl",url = "https://files.pythonhosted.org/packages/f8/0e/a5e595fdd0820069f0c29911d5dc9dc3a75ec755ae733ce59a4e6962ae42/multidict-6.6.3-cp39-cp39-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl",hashes = {sha256 = "d04d01f0a913202205a598246cf77826fe3baa5a63e9f6ccf1ab0601cf56eca0"}}, - {name = "multidict-6.6.3-cp39-cp39-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl",url = "https://files.pythonhosted.org/packages/66/9e/0f51e4cffea2daf24c137feabc9ec848ce50f8379c9badcbac00b41ab55e/multidict-6.6.3-cp39-cp39-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl",hashes = {sha256 = "d25594d3b38a2e6cabfdcafef339f754ca6e81fbbdb6650ad773ea9775af35ab"}}, - {name = "multidict-6.6.3-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl",url = "https://files.pythonhosted.org/packages/49/a0/a7cfc13c9a71ceb8c1c55457820733af9ce01e121139271f7b13e30c29d2/multidict-6.6.3-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl",hashes = {sha256 = "35712f1748d409e0707b165bf49f9f17f9e28ae85470c41615778f8d4f7d9609"}}, - {name = "multidict-6.6.3-cp39-cp39-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/c7/50/7ae0d1149ac71cab6e20bb7faf2a1868435974994595dadfdb7377f7140f/multidict-6.6.3-cp39-cp39-musllinux_1_2_aarch64.whl",hashes = {sha256 = "1c8082e5814b662de8589d6a06c17e77940d5539080cbab9fe6794b5241b76d9"}}, - {name = "multidict-6.6.3-cp39-cp39-musllinux_1_2_armv7l.whl",url = "https://files.pythonhosted.org/packages/b4/ac/2d0bf836c9c63a57360d57b773359043b371115e1c78ff648993bf19abd0/multidict-6.6.3-cp39-cp39-musllinux_1_2_armv7l.whl",hashes = {sha256 = "61af8a4b771f1d4d000b3168c12c3120ccf7284502a94aa58c68a81f5afac090"}}, - {name = "multidict-6.6.3-cp39-cp39-musllinux_1_2_i686.whl",url = "https://files.pythonhosted.org/packages/85/e1/68a65f069df298615591e70e48bfd379c27d4ecb252117c18bf52eebc237/multidict-6.6.3-cp39-cp39-musllinux_1_2_i686.whl",hashes = {sha256 = "448e4a9afccbf297577f2eaa586f07067441e7b63c8362a3540ba5a38dc0f14a"}}, - {name = "multidict-6.6.3-cp39-cp39-musllinux_1_2_ppc64le.whl",url = "https://files.pythonhosted.org/packages/ae/ab/702f1baca649f88ea1dc6259fc2aa4509f4ad160ba48c8e61fbdb4a5a365/multidict-6.6.3-cp39-cp39-musllinux_1_2_ppc64le.whl",hashes = {sha256 = "233ad16999afc2bbd3e534ad8dbe685ef8ee49a37dbc2cdc9514e57b6d589ced"}}, - {name = "multidict-6.6.3-cp39-cp39-musllinux_1_2_s390x.whl",url = "https://files.pythonhosted.org/packages/5e/0b/726e690bfbf887985a8710ef2f25f1d6dd184a35bd3b36429814f810a2fc/multidict-6.6.3-cp39-cp39-musllinux_1_2_s390x.whl",hashes = {sha256 = "bb933c891cd4da6bdcc9733d048e994e22e1883287ff7540c2a0f3b117605092"}}, - {name = "multidict-6.6.3-cp39-cp39-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/73/bb/839486b27bcbcc2e0d875fb9d4012b4b6aa99639137343106aa7210e047a/multidict-6.6.3-cp39-cp39-musllinux_1_2_x86_64.whl",hashes = {sha256 = "37b09ca60998e87734699e88c2363abfd457ed18cfbf88e4009a4e83788e63ed"}}, - {name = "multidict-6.6.3-cp39-cp39-win32.whl",url = "https://files.pythonhosted.org/packages/e3/46/574d75ab7b9ae8690fe27e89f5fcd0121633112b438edfb9ed2be8be096b/multidict-6.6.3-cp39-cp39-win32.whl",hashes = {sha256 = "f54cb79d26d0cd420637d184af38f0668558f3c4bbe22ab7ad830e67249f2e0b"}}, - {name = "multidict-6.6.3-cp39-cp39-win_amd64.whl",url = "https://files.pythonhosted.org/packages/78/c3/8b3bc755508b777868349f4bfa844d3d31832f075ee800a3d6f1807338c5/multidict-6.6.3-cp39-cp39-win_amd64.whl",hashes = {sha256 = "295adc9c0551e5d5214b45cf29ca23dbc28c2d197a9c30d51aed9e037cb7c578"}}, - {name = "multidict-6.6.3-cp39-cp39-win_arm64.whl",url = "https://files.pythonhosted.org/packages/b2/30/5a66e7e4550e80975faee5b5dd9e9bd09194d2fd8f62363119b9e46e204b/multidict-6.6.3-cp39-cp39-win_arm64.whl",hashes = {sha256 = "15332783596f227db50fb261c2c251a58ac3873c457f3a550a95d5c0aa3c770d"}}, -] -marker = "\"default\" in dependency_groups or \"dev\" in extras" - -[packages.tool.pdm] -dependencies = [ - "typing-extensions>=4.1.0; python_version < \"3.11\"", -] - [[packages]] name = "regex" version = "2024.11.6" @@ -1823,53 +1592,6 @@ wheels = [ {name = "regex-2024.11.6-cp312-cp312-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/93/2d/dd56bb76bd8e95bbce684326302f287455b56242a4f9c61f1bc76e28360e/regex-2024.11.6-cp312-cp312-musllinux_1_2_x86_64.whl",hashes = {sha256 = "0a86e7eeca091c09e021db8eb72d54751e527fa47b8d5787caf96d9831bd02ad"}}, {name = "regex-2024.11.6-cp312-cp312-win32.whl",url = "https://files.pythonhosted.org/packages/0b/55/31877a249ab7a5156758246b9c59539abbeba22461b7d8adc9e8475ff73e/regex-2024.11.6-cp312-cp312-win32.whl",hashes = {sha256 = "32f9a4c643baad4efa81d549c2aadefaeba12249b2adc5af541759237eee1c54"}}, {name = "regex-2024.11.6-cp312-cp312-win_amd64.whl",url = "https://files.pythonhosted.org/packages/38/ec/ad2d7de49a600cdb8dd78434a1aeffe28b9d6fc42eb36afab4a27ad23384/regex-2024.11.6-cp312-cp312-win_amd64.whl",hashes = {sha256 = "a93c194e2df18f7d264092dc8539b8ffb86b45b899ab976aa15d48214138e81b"}}, - {name = "regex-2024.11.6-cp311-cp311-macosx_10_9_universal2.whl",url = "https://files.pythonhosted.org/packages/58/58/7e4d9493a66c88a7da6d205768119f51af0f684fe7be7bac8328e217a52c/regex-2024.11.6-cp311-cp311-macosx_10_9_universal2.whl",hashes = {sha256 = "5478c6962ad548b54a591778e93cd7c456a7a29f8eca9c49e4f9a806dcc5d638"}}, - {name = "regex-2024.11.6-cp311-cp311-macosx_10_9_x86_64.whl",url = "https://files.pythonhosted.org/packages/34/4c/8f8e631fcdc2ff978609eaeef1d6994bf2f028b59d9ac67640ed051f1218/regex-2024.11.6-cp311-cp311-macosx_10_9_x86_64.whl",hashes = {sha256 = "2c89a8cc122b25ce6945f0423dc1352cb9593c68abd19223eebbd4e56612c5b7"}}, - {name = "regex-2024.11.6-cp311-cp311-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/c5/1b/f0e4d13e6adf866ce9b069e191f303a30ab1277e037037a365c3aad5cc9c/regex-2024.11.6-cp311-cp311-macosx_11_0_arm64.whl",hashes = {sha256 = "94d87b689cdd831934fa3ce16cc15cd65748e6d689f5d2b8f4f4df2065c9fa20"}}, - {name = "regex-2024.11.6-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",url = "https://files.pythonhosted.org/packages/25/4d/ab21047f446693887f25510887e6820b93f791992994f6498b0318904d4a/regex-2024.11.6-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",hashes = {sha256 = "1062b39a0a2b75a9c694f7a08e7183a80c63c0d62b301418ffd9c35f55aaa114"}}, - {name = "regex-2024.11.6-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl",url = "https://files.pythonhosted.org/packages/45/ee/c867e15cd894985cb32b731d89576c41a4642a57850c162490ea34b78c3b/regex-2024.11.6-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl",hashes = {sha256 = "167ed4852351d8a750da48712c3930b031f6efdaa0f22fa1933716bfcd6bf4a3"}}, - {name = "regex-2024.11.6-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl",url = "https://files.pythonhosted.org/packages/b3/12/b0f480726cf1c60f6536fa5e1c95275a77624f3ac8fdccf79e6727499e28/regex-2024.11.6-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl",hashes = {sha256 = "2d548dafee61f06ebdb584080621f3e0c23fff312f0de1afc776e2a2ba99a74f"}}, - {name = "regex-2024.11.6-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",url = "https://files.pythonhosted.org/packages/bf/ce/0d0e61429f603bac433910d99ef1a02ce45a8967ffbe3cbee48599e62d88/regex-2024.11.6-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",hashes = {sha256 = "f2a19f302cd1ce5dd01a9099aaa19cae6173306d1302a43b627f62e21cf18ac0"}}, - {name = "regex-2024.11.6-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl",url = "https://files.pythonhosted.org/packages/e4/c1/243c83c53d4a419c1556f43777ccb552bccdf79d08fda3980e4e77dd9137/regex-2024.11.6-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl",hashes = {sha256 = "bec9931dfb61ddd8ef2ebc05646293812cb6b16b60cf7c9511a832b6f1854b55"}}, - {name = "regex-2024.11.6-cp311-cp311-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/c5/f4/75eb0dd4ce4b37f04928987f1d22547ddaf6c4bae697623c1b05da67a8aa/regex-2024.11.6-cp311-cp311-musllinux_1_2_aarch64.whl",hashes = {sha256 = "9714398225f299aa85267fd222f7142fcb5c769e73d7733344efc46f2ef5cf89"}}, - {name = "regex-2024.11.6-cp311-cp311-musllinux_1_2_i686.whl",url = "https://files.pythonhosted.org/packages/16/5d/95c568574e630e141a69ff8a254c2f188b4398e813c40d49228c9bbd9875/regex-2024.11.6-cp311-cp311-musllinux_1_2_i686.whl",hashes = {sha256 = "202eb32e89f60fc147a41e55cb086db2a3f8cb82f9a9a88440dcfc5d37faae8d"}}, - {name = "regex-2024.11.6-cp311-cp311-musllinux_1_2_ppc64le.whl",url = "https://files.pythonhosted.org/packages/8e/b5/f8495c7917f15cc6fee1e7f395e324ec3e00ab3c665a7dc9d27562fd5290/regex-2024.11.6-cp311-cp311-musllinux_1_2_ppc64le.whl",hashes = {sha256 = "4181b814e56078e9b00427ca358ec44333765f5ca1b45597ec7446d3a1ef6e34"}}, - {name = "regex-2024.11.6-cp311-cp311-musllinux_1_2_s390x.whl",url = "https://files.pythonhosted.org/packages/1c/80/6dd7118e8cb212c3c60b191b932dc57db93fb2e36fb9e0e92f72a5909af9/regex-2024.11.6-cp311-cp311-musllinux_1_2_s390x.whl",hashes = {sha256 = "068376da5a7e4da51968ce4c122a7cd31afaaec4fccc7856c92f63876e57b51d"}}, - {name = "regex-2024.11.6-cp311-cp311-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/11/9b/5a05d2040297d2d254baf95eeeb6df83554e5e1df03bc1a6687fc4ba1f66/regex-2024.11.6-cp311-cp311-musllinux_1_2_x86_64.whl",hashes = {sha256 = "ac10f2c4184420d881a3475fb2c6f4d95d53a8d50209a2500723d831036f7c45"}}, - {name = "regex-2024.11.6-cp311-cp311-win32.whl",url = "https://files.pythonhosted.org/packages/26/b7/b14e2440156ab39e0177506c08c18accaf2b8932e39fb092074de733d868/regex-2024.11.6-cp311-cp311-win32.whl",hashes = {sha256 = "c36f9b6f5f8649bb251a5f3f66564438977b7ef8386a52460ae77e6070d309d9"}}, - {name = "regex-2024.11.6-cp311-cp311-win_amd64.whl",url = "https://files.pythonhosted.org/packages/80/32/763a6cc01d21fb3819227a1cc3f60fd251c13c37c27a73b8ff4315433a8e/regex-2024.11.6-cp311-cp311-win_amd64.whl",hashes = {sha256 = "02e28184be537f0e75c1f9b2f8847dc51e08e6e171c6bde130b2687e0c33cf60"}}, - {name = "regex-2024.11.6-cp310-cp310-macosx_10_9_universal2.whl",url = "https://files.pythonhosted.org/packages/95/3c/4651f6b130c6842a8f3df82461a8950f923925db8b6961063e82744bddcc/regex-2024.11.6-cp310-cp310-macosx_10_9_universal2.whl",hashes = {sha256 = "ff590880083d60acc0433f9c3f713c51f7ac6ebb9adf889c79a261ecf541aa91"}}, - {name = "regex-2024.11.6-cp310-cp310-macosx_10_9_x86_64.whl",url = "https://files.pythonhosted.org/packages/15/51/9f35d12da8434b489c7b7bffc205c474a0a9432a889457026e9bc06a297a/regex-2024.11.6-cp310-cp310-macosx_10_9_x86_64.whl",hashes = {sha256 = "658f90550f38270639e83ce492f27d2c8d2cd63805c65a13a14d36ca126753f0"}}, - {name = "regex-2024.11.6-cp310-cp310-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/bd/18/b731f5510d1b8fb63c6b6d3484bfa9a59b84cc578ac8b5172970e05ae07c/regex-2024.11.6-cp310-cp310-macosx_11_0_arm64.whl",hashes = {sha256 = "164d8b7b3b4bcb2068b97428060b2a53be050085ef94eca7f240e7947f1b080e"}}, - {name = "regex-2024.11.6-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",url = "https://files.pythonhosted.org/packages/78/a2/6dd36e16341ab95e4c6073426561b9bfdeb1a9c9b63ab1b579c2e96cb105/regex-2024.11.6-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",hashes = {sha256 = "d3660c82f209655a06b587d55e723f0b813d3a7db2e32e5e7dc64ac2a9e86fde"}}, - {name = "regex-2024.11.6-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl",url = "https://files.pythonhosted.org/packages/1b/2b/323e72d5d2fd8de0d9baa443e1ed70363ed7e7b2fb526f5950c5cb99c364/regex-2024.11.6-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl",hashes = {sha256 = "d22326fcdef5e08c154280b71163ced384b428343ae16a5ab2b3354aed12436e"}}, - {name = "regex-2024.11.6-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl",url = "https://files.pythonhosted.org/packages/90/30/63373b9ea468fbef8a907fd273e5c329b8c9535fee36fc8dba5fecac475d/regex-2024.11.6-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl",hashes = {sha256 = "f1ac758ef6aebfc8943560194e9fd0fa18bcb34d89fd8bd2af18183afd8da3a2"}}, - {name = "regex-2024.11.6-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",url = "https://files.pythonhosted.org/packages/f2/98/26d3830875b53071f1f0ae6d547f1d98e964dd29ad35cbf94439120bb67a/regex-2024.11.6-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",hashes = {sha256 = "997d6a487ff00807ba810e0f8332c18b4eb8d29463cfb7c820dc4b6e7562d0cf"}}, - {name = "regex-2024.11.6-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl",url = "https://files.pythonhosted.org/packages/87/55/eb2a068334274db86208ab9d5599ffa63631b9f0f67ed70ea7c82a69bbc8/regex-2024.11.6-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl",hashes = {sha256 = "02a02d2bb04fec86ad61f3ea7f49c015a0681bf76abb9857f945d26159d2968c"}}, - {name = "regex-2024.11.6-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl",url = "https://files.pythonhosted.org/packages/74/c0/be707bcfe98254d8f9d2cff55d216e946f4ea48ad2fd8cf1428f8c5332ba/regex-2024.11.6-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl",hashes = {sha256 = "f02f93b92358ee3f78660e43b4b0091229260c5d5c408d17d60bf26b6c900e86"}}, - {name = "regex-2024.11.6-cp310-cp310-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/49/dc/bb45572ceb49e0f6509f7596e4ba7031f6819ecb26bc7610979af5a77f45/regex-2024.11.6-cp310-cp310-musllinux_1_2_aarch64.whl",hashes = {sha256 = "06eb1be98df10e81ebaded73fcd51989dcf534e3c753466e4b60c4697a003b67"}}, - {name = "regex-2024.11.6-cp310-cp310-musllinux_1_2_i686.whl",url = "https://files.pythonhosted.org/packages/5a/db/f43fd75dc4c0c2d96d0881967897926942e935d700863666f3c844a72ce6/regex-2024.11.6-cp310-cp310-musllinux_1_2_i686.whl",hashes = {sha256 = "040df6fe1a5504eb0f04f048e6d09cd7c7110fef851d7c567a6b6e09942feb7d"}}, - {name = "regex-2024.11.6-cp310-cp310-musllinux_1_2_ppc64le.whl",url = "https://files.pythonhosted.org/packages/99/d7/f94154db29ab5a89d69ff893159b19ada89e76b915c1293e98603d39838c/regex-2024.11.6-cp310-cp310-musllinux_1_2_ppc64le.whl",hashes = {sha256 = "fdabbfc59f2c6edba2a6622c647b716e34e8e3867e0ab975412c5c2f79b82da2"}}, - {name = "regex-2024.11.6-cp310-cp310-musllinux_1_2_s390x.whl",url = "https://files.pythonhosted.org/packages/f7/17/3cbfab1f23356fbbf07708220ab438a7efa1e0f34195bf857433f79f1788/regex-2024.11.6-cp310-cp310-musllinux_1_2_s390x.whl",hashes = {sha256 = "8447d2d39b5abe381419319f942de20b7ecd60ce86f16a23b0698f22e1b70008"}}, - {name = "regex-2024.11.6-cp310-cp310-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/7e/f2/48b393b51900456155de3ad001900f94298965e1cad1c772b87f9cfea011/regex-2024.11.6-cp310-cp310-musllinux_1_2_x86_64.whl",hashes = {sha256 = "da8f5fc57d1933de22a9e23eec290a0d8a5927a5370d24bda9a6abe50683fe62"}}, - {name = "regex-2024.11.6-cp310-cp310-win32.whl",url = "https://files.pythonhosted.org/packages/45/3f/ef9589aba93e084cd3f8471fded352826dcae8489b650d0b9b27bc5bba8a/regex-2024.11.6-cp310-cp310-win32.whl",hashes = {sha256 = "b489578720afb782f6ccf2840920f3a32e31ba28a4b162e13900c3e6bd3f930e"}}, - {name = "regex-2024.11.6-cp310-cp310-win_amd64.whl",url = "https://files.pythonhosted.org/packages/42/7e/5f1b92c8468290c465fd50c5318da64319133231415a8aa6ea5ab995a815/regex-2024.11.6-cp310-cp310-win_amd64.whl",hashes = {sha256 = "5071b2093e793357c9d8b2929dfc13ac5f0a6c650559503bb81189d0a3814519"}}, - {name = "regex-2024.11.6-cp39-cp39-macosx_10_9_universal2.whl",url = "https://files.pythonhosted.org/packages/89/23/c4a86df398e57e26f93b13ae63acce58771e04bdde86092502496fa57f9c/regex-2024.11.6-cp39-cp39-macosx_10_9_universal2.whl",hashes = {sha256 = "5704e174f8ccab2026bd2f1ab6c510345ae8eac818b613d7d73e785f1310f839"}}, - {name = "regex-2024.11.6-cp39-cp39-macosx_10_9_x86_64.whl",url = "https://files.pythonhosted.org/packages/3c/8b/45c24ab7a51a1658441b961b86209c43e6bb9d39caf1e63f46ce6ea03bc7/regex-2024.11.6-cp39-cp39-macosx_10_9_x86_64.whl",hashes = {sha256 = "220902c3c5cc6af55d4fe19ead504de80eb91f786dc102fbd74894b1551f095e"}}, - {name = "regex-2024.11.6-cp39-cp39-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/7a/d1/598de10b17fdafc452d11f7dada11c3be4e379a8671393e4e3da3c4070df/regex-2024.11.6-cp39-cp39-macosx_11_0_arm64.whl",hashes = {sha256 = "5e7e351589da0850c125f1600a4c4ba3c722efefe16b297de54300f08d734fbf"}}, - {name = "regex-2024.11.6-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",url = "https://files.pythonhosted.org/packages/49/70/c7eaa219efa67a215846766fde18d92d54cb590b6a04ffe43cef30057622/regex-2024.11.6-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",hashes = {sha256 = "5056b185ca113c88e18223183aa1a50e66507769c9640a6ff75859619d73957b"}}, - {name = "regex-2024.11.6-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl",url = "https://files.pythonhosted.org/packages/89/e5/ef52c7eb117dd20ff1697968219971d052138965a4d3d9b95e92e549f505/regex-2024.11.6-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl",hashes = {sha256 = "2e34b51b650b23ed3354b5a07aab37034d9f923db2a40519139af34f485f77d0"}}, - {name = "regex-2024.11.6-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl",url = "https://files.pythonhosted.org/packages/5f/3f/9f5da81aff1d4167ac52711acf789df13e789fe6ac9545552e49138e3282/regex-2024.11.6-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl",hashes = {sha256 = "5670bce7b200273eee1840ef307bfa07cda90b38ae56e9a6ebcc9f50da9c469b"}}, - {name = "regex-2024.11.6-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",url = "https://files.pythonhosted.org/packages/86/44/2101cc0890c3621b90365c9ee8d7291a597c0722ad66eccd6ffa7f1bcc09/regex-2024.11.6-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",hashes = {sha256 = "08986dce1339bc932923e7d1232ce9881499a0e02925f7402fb7c982515419ef"}}, - {name = "regex-2024.11.6-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl",url = "https://files.pythonhosted.org/packages/ce/2e/3e0668d8d1c7c3c0d397bf54d92fc182575b3a26939aed5000d3cc78760f/regex-2024.11.6-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl",hashes = {sha256 = "93c0b12d3d3bc25af4ebbf38f9ee780a487e8bf6954c115b9f015822d3bb8e48"}}, - {name = "regex-2024.11.6-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl",url = "https://files.pythonhosted.org/packages/a6/49/1bc4584254355e3dba930a3a2fd7ad26ccba3ebbab7d9100db0aff2eedb0/regex-2024.11.6-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl",hashes = {sha256 = "764e71f22ab3b305e7f4c21f1a97e1526a25ebdd22513e251cf376760213da13"}}, - {name = "regex-2024.11.6-cp39-cp39-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/c8/dd/42879c1fc8a37a887cd08e358af3d3ba9e23038cd77c7fe044a86d9450ba/regex-2024.11.6-cp39-cp39-musllinux_1_2_aarch64.whl",hashes = {sha256 = "f056bf21105c2515c32372bbc057f43eb02aae2fda61052e2f7622c801f0b4e2"}}, - {name = "regex-2024.11.6-cp39-cp39-musllinux_1_2_i686.whl",url = "https://files.pythonhosted.org/packages/89/96/c05a0fe173cd2acd29d5e13c1adad8b706bcaa71b169e1ee57dcf2e74584/regex-2024.11.6-cp39-cp39-musllinux_1_2_i686.whl",hashes = {sha256 = "69ab78f848845569401469da20df3e081e6b5a11cb086de3eed1d48f5ed57c95"}}, - {name = "regex-2024.11.6-cp39-cp39-musllinux_1_2_ppc64le.whl",url = "https://files.pythonhosted.org/packages/b5/f3/a757748066255f97f14506483436c5f6aded7af9e37bca04ec30c90ca683/regex-2024.11.6-cp39-cp39-musllinux_1_2_ppc64le.whl",hashes = {sha256 = "86fddba590aad9208e2fa8b43b4c098bb0ec74f15718bb6a704e3c63e2cef3e9"}}, - {name = "regex-2024.11.6-cp39-cp39-musllinux_1_2_s390x.whl",url = "https://files.pythonhosted.org/packages/5c/93/c6d2092fd479dcaeea40fc8fa673822829181ded77d294a7f950f1dda6e2/regex-2024.11.6-cp39-cp39-musllinux_1_2_s390x.whl",hashes = {sha256 = "684d7a212682996d21ca12ef3c17353c021fe9de6049e19ac8481ec35574a70f"}}, - {name = "regex-2024.11.6-cp39-cp39-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/ff/9c/daa99532c72f25051a90ef90e1413a8d54413a9e64614d9095b0c1c154d0/regex-2024.11.6-cp39-cp39-musllinux_1_2_x86_64.whl",hashes = {sha256 = "a03e02f48cd1abbd9f3b7e3586d97c8f7a9721c436f51a5245b3b9483044480b"}}, - {name = "regex-2024.11.6-cp39-cp39-win32.whl",url = "https://files.pythonhosted.org/packages/13/5d/61a533ccb8c231b474ac8e3a7d70155b00dfc61af6cafdccd1947df6d735/regex-2024.11.6-cp39-cp39-win32.whl",hashes = {sha256 = "41758407fc32d5c3c5de163888068cfee69cb4c2be844e7ac517a52770f9af57"}}, - {name = "regex-2024.11.6-cp39-cp39-win_amd64.whl",url = "https://files.pythonhosted.org/packages/dc/7b/e59b7f7c91ae110d154370c24133f947262525b5d6406df65f23422acc17/regex-2024.11.6-cp39-cp39-win_amd64.whl",hashes = {sha256 = "b2837718570f95dd41675328e111345f9b7095d821bac435aac173ac80b19983"}}, ] marker = "\"default\" in dependency_groups or \"recommended\" in extras" @@ -1945,7 +1667,7 @@ sdist = {name = "platformdirs-4.3.8.tar.gz", url = "https://files.pythonhosted.o wheels = [ {name = "platformdirs-4.3.8-py3-none-any.whl",url = "https://files.pythonhosted.org/packages/fe/39/979e8e21520d4e47a0bbe349e2713c0aac6f3d853d0e5b34d76206c439aa/platformdirs-4.3.8-py3-none-any.whl",hashes = {sha256 = "ff7059bb7eb1179e2685604f4aaf157cfd9535242bd23742eadc3c13542139b4"}}, ] -marker = "\"dev\" in extras" +marker = "\"default\" in dependency_groups or \"dev\" in extras" [packages.tool.pdm] dependencies = [] @@ -2019,58 +1741,7 @@ wheels = [ {name = "yarl-1.20.1-cp312-cp312-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/af/85/9363f77bdfa1e4d690957cd39d192c4cacd1c58965df0470a4905253b54f/yarl-1.20.1-cp312-cp312-musllinux_1_2_x86_64.whl",hashes = {sha256 = "564ab3d517e3d01c408c67f2e5247aad4019dcf1969982aba3974b4093279004"}}, {name = "yarl-1.20.1-cp312-cp312-win32.whl",url = "https://files.pythonhosted.org/packages/35/99/9918c8739ba271dcd935400cff8b32e3cd319eaf02fcd023d5dcd487a7c8/yarl-1.20.1-cp312-cp312-win32.whl",hashes = {sha256 = "daea0d313868da1cf2fac6b2d3a25c6e3a9e879483244be38c8e6a41f1d876a5"}}, {name = "yarl-1.20.1-cp312-cp312-win_amd64.whl",url = "https://files.pythonhosted.org/packages/eb/83/5d9092950565481b413b31a23e75dd3418ff0a277d6e0abf3729d4d1ce25/yarl-1.20.1-cp312-cp312-win_amd64.whl",hashes = {sha256 = "48ea7d7f9be0487339828a4de0360d7ce0efc06524a48e1810f945c45b813698"}}, - {name = "yarl-1.20.1-cp311-cp311-macosx_10_9_universal2.whl",url = "https://files.pythonhosted.org/packages/b1/18/893b50efc2350e47a874c5c2d67e55a0ea5df91186b2a6f5ac52eff887cd/yarl-1.20.1-cp311-cp311-macosx_10_9_universal2.whl",hashes = {sha256 = "47ee6188fea634bdfaeb2cc420f5b3b17332e6225ce88149a17c413c77ff269e"}}, - {name = "yarl-1.20.1-cp311-cp311-macosx_10_9_x86_64.whl",url = "https://files.pythonhosted.org/packages/89/ed/b8773448030e6fc47fa797f099ab9eab151a43a25717f9ac043844ad5ea3/yarl-1.20.1-cp311-cp311-macosx_10_9_x86_64.whl",hashes = {sha256 = "d0f6500f69e8402d513e5eedb77a4e1818691e8f45e6b687147963514d84b44b"}}, - {name = "yarl-1.20.1-cp311-cp311-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/e3/e3/409bd17b1e42619bf69f60e4f031ce1ccb29bd7380117a55529e76933464/yarl-1.20.1-cp311-cp311-macosx_11_0_arm64.whl",hashes = {sha256 = "7a8900a42fcdaad568de58887c7b2f602962356908eedb7628eaf6021a6e435b"}}, - {name = "yarl-1.20.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",url = "https://files.pythonhosted.org/packages/f8/77/64d8431a4d77c856eb2d82aa3de2ad6741365245a29b3a9543cd598ed8c5/yarl-1.20.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",hashes = {sha256 = "bad6d131fda8ef508b36be3ece16d0902e80b88ea7200f030a0f6c11d9e508d4"}}, - {name = "yarl-1.20.1-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl",url = "https://files.pythonhosted.org/packages/8d/d2/0c7e4def093dcef0bd9fa22d4d24b023788b0a33b8d0088b51aa51e21e99/yarl-1.20.1-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl",hashes = {sha256 = "df018d92fe22aaebb679a7f89fe0c0f368ec497e3dda6cb81a567610f04501f1"}}, - {name = "yarl-1.20.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl",url = "https://files.pythonhosted.org/packages/f0/f3/fc514f4b2cf02cb59d10cbfe228691d25929ce8f72a38db07d3febc3f706/yarl-1.20.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl",hashes = {sha256 = "8f969afbb0a9b63c18d0feecf0db09d164b7a44a053e78a7d05f5df163e43833"}}, - {name = "yarl-1.20.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl",url = "https://files.pythonhosted.org/packages/ea/6d/a313ac8d8391381ff9006ac05f1d4331cee3b1efaa833a53d12253733255/yarl-1.20.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl",hashes = {sha256 = "812303eb4aa98e302886ccda58d6b099e3576b1b9276161469c25803a8db277d"}}, - {name = "yarl-1.20.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",url = "https://files.pythonhosted.org/packages/00/70/8f78a95d6935a70263d46caa3dd18e1f223cf2f2ff2037baa01a22bc5b22/yarl-1.20.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",hashes = {sha256 = "98c4a7d166635147924aa0bf9bfe8d8abad6fffa6102de9c99ea04a1376f91e8"}}, - {name = "yarl-1.20.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl",url = "https://files.pythonhosted.org/packages/cb/05/42773027968968f4f15143553970ee36ead27038d627f457cc44bbbeecf3/yarl-1.20.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl",hashes = {sha256 = "12e768f966538e81e6e7550f9086a6236b16e26cd964cf4df35349970f3551cf"}}, - {name = "yarl-1.20.1-cp311-cp311-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/05/be/665634aa196954156741ea591d2f946f1b78ceee8bb8f28488bf28c0dd62/yarl-1.20.1-cp311-cp311-musllinux_1_2_aarch64.whl",hashes = {sha256 = "fe41919b9d899661c5c28a8b4b0acf704510b88f27f0934ac7a7bebdd8938d5e"}}, - {name = "yarl-1.20.1-cp311-cp311-musllinux_1_2_armv7l.whl",url = "https://files.pythonhosted.org/packages/eb/90/73448401d36fa4e210ece5579895731f190d5119c4b66b43b52182e88cd5/yarl-1.20.1-cp311-cp311-musllinux_1_2_armv7l.whl",hashes = {sha256 = "8601bc010d1d7780592f3fc1bdc6c72e2b6466ea34569778422943e1a1f3c389"}}, - {name = "yarl-1.20.1-cp311-cp311-musllinux_1_2_i686.whl",url = "https://files.pythonhosted.org/packages/c3/b0/fce922d46dc1eb43c811f1889f7daa6001b27a4005587e94878570300881/yarl-1.20.1-cp311-cp311-musllinux_1_2_i686.whl",hashes = {sha256 = "daadbdc1f2a9033a2399c42646fbd46da7992e868a5fe9513860122d7fe7a73f"}}, - {name = "yarl-1.20.1-cp311-cp311-musllinux_1_2_ppc64le.whl",url = "https://files.pythonhosted.org/packages/f1/0d/b172628fce039dae8977fd22caeff3eeebffd52e86060413f5673767c427/yarl-1.20.1-cp311-cp311-musllinux_1_2_ppc64le.whl",hashes = {sha256 = "03aa1e041727cb438ca762628109ef1333498b122e4c76dd858d186a37cec845"}}, - {name = "yarl-1.20.1-cp311-cp311-musllinux_1_2_s390x.whl",url = "https://files.pythonhosted.org/packages/6b/9b/5b886d7671f4580209e855974fe1cecec409aa4a89ea58b8f0560dc529b1/yarl-1.20.1-cp311-cp311-musllinux_1_2_s390x.whl",hashes = {sha256 = "642980ef5e0fa1de5fa96d905c7e00cb2c47cb468bfcac5a18c58e27dbf8d8d1"}}, - {name = "yarl-1.20.1-cp311-cp311-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/73/be/75ef5fd0fcd8f083a5d13f78fd3f009528132a1f2a1d7c925c39fa20aa79/yarl-1.20.1-cp311-cp311-musllinux_1_2_x86_64.whl",hashes = {sha256 = "86971e2795584fe8c002356d3b97ef6c61862720eeff03db2a7c86b678d85b3e"}}, - {name = "yarl-1.20.1-cp311-cp311-win32.whl",url = "https://files.pythonhosted.org/packages/50/4f/62faab3b479dfdcb741fe9e3f0323e2a7d5cd1ab2edc73221d57ad4834b2/yarl-1.20.1-cp311-cp311-win32.whl",hashes = {sha256 = "597f40615b8d25812f14562699e287f0dcc035d25eb74da72cae043bb884d773"}}, - {name = "yarl-1.20.1-cp311-cp311-win_amd64.whl",url = "https://files.pythonhosted.org/packages/f0/09/d9c7942f8f05c32ec72cd5c8e041c8b29b5807328b68b4801ff2511d4d5e/yarl-1.20.1-cp311-cp311-win_amd64.whl",hashes = {sha256 = "26ef53a9e726e61e9cd1cda6b478f17e350fb5800b4bd1cd9fe81c4d91cfeb2e"}}, - {name = "yarl-1.20.1-cp310-cp310-macosx_10_9_universal2.whl",url = "https://files.pythonhosted.org/packages/cb/65/7fed0d774abf47487c64be14e9223749468922817b5e8792b8a64792a1bb/yarl-1.20.1-cp310-cp310-macosx_10_9_universal2.whl",hashes = {sha256 = "6032e6da6abd41e4acda34d75a816012717000fa6839f37124a47fcefc49bec4"}}, - {name = "yarl-1.20.1-cp310-cp310-macosx_10_9_x86_64.whl",url = "https://files.pythonhosted.org/packages/8a/7b/988f55a52da99df9e56dc733b8e4e5a6ae2090081dc2754fc8fd34e60aa0/yarl-1.20.1-cp310-cp310-macosx_10_9_x86_64.whl",hashes = {sha256 = "2c7b34d804b8cf9b214f05015c4fee2ebe7ed05cf581e7192c06555c71f4446a"}}, - {name = "yarl-1.20.1-cp310-cp310-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/f7/de/30d98f03e95d30c7e3cc093759982d038c8833ec2451001d45ef4854edc1/yarl-1.20.1-cp310-cp310-macosx_11_0_arm64.whl",hashes = {sha256 = "0c869f2651cc77465f6cd01d938d91a11d9ea5d798738c1dc077f3de0b5e5fed"}}, - {name = "yarl-1.20.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",url = "https://files.pythonhosted.org/packages/e0/7a/f2f314f5ebfe9200724b0b748de2186b927acb334cf964fd312eb86fc286/yarl-1.20.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",hashes = {sha256 = "62915e6688eb4d180d93840cda4110995ad50c459bf931b8b3775b37c264af1e"}}, - {name = "yarl-1.20.1-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl",url = "https://files.pythonhosted.org/packages/15/3f/718d26f189db96d993d14b984ce91de52e76309d0fd1d4296f34039856aa/yarl-1.20.1-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl",hashes = {sha256 = "41ebd28167bc6af8abb97fec1a399f412eec5fd61a3ccbe2305a18b84fb4ca73"}}, - {name = "yarl-1.20.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl",url = "https://files.pythonhosted.org/packages/a5/76/8fcfbf5fa2369157b9898962a4a7d96764b287b085b5b3d9ffae69cdefd1/yarl-1.20.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl",hashes = {sha256 = "21242b4288a6d56f04ea193adde174b7e347ac46ce6bc84989ff7c1b1ecea84e"}}, - {name = "yarl-1.20.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl",url = "https://files.pythonhosted.org/packages/3c/95/d7fc301cc4661785967acc04f54a4a42d5124905e27db27bb578aac49b5c/yarl-1.20.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl",hashes = {sha256 = "bea21cdae6c7eb02ba02a475f37463abfe0a01f5d7200121b03e605d6a0439f8"}}, - {name = "yarl-1.20.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",url = "https://files.pythonhosted.org/packages/65/94/e21269718349582eee81efc5c1c08ee71c816bfc1585b77d0ec3f58089eb/yarl-1.20.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",hashes = {sha256 = "1f8a891e4a22a89f5dde7862994485e19db246b70bb288d3ce73a34422e55b23"}}, - {name = "yarl-1.20.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl",url = "https://files.pythonhosted.org/packages/32/ae/8616d1f07853704523519f6131d21f092e567c5af93de7e3e94b38d7f065/yarl-1.20.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl",hashes = {sha256 = "dd803820d44c8853a109a34e3660e5a61beae12970da479cf44aa2954019bf70"}}, - {name = "yarl-1.20.1-cp310-cp310-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/48/aa/0ace06280861ef055855333707db5e49c6e3a08840a7ce62682259d0a6c0/yarl-1.20.1-cp310-cp310-musllinux_1_2_aarch64.whl",hashes = {sha256 = "b982fa7f74c80d5c0c7b5b38f908971e513380a10fecea528091405f519b9ebb"}}, - {name = "yarl-1.20.1-cp310-cp310-musllinux_1_2_armv7l.whl",url = "https://files.pythonhosted.org/packages/20/52/1e9d0e6916f45a8fb50e6844f01cb34692455f1acd548606cbda8134cd1e/yarl-1.20.1-cp310-cp310-musllinux_1_2_armv7l.whl",hashes = {sha256 = "33f29ecfe0330c570d997bcf1afd304377f2e48f61447f37e846a6058a4d33b2"}}, - {name = "yarl-1.20.1-cp310-cp310-musllinux_1_2_i686.whl",url = "https://files.pythonhosted.org/packages/f2/65/60452df742952c630e82f394cd409de10610481d9043aa14c61bf846b7b1/yarl-1.20.1-cp310-cp310-musllinux_1_2_i686.whl",hashes = {sha256 = "835ab2cfc74d5eb4a6a528c57f05688099da41cf4957cf08cad38647e4a83b30"}}, - {name = "yarl-1.20.1-cp310-cp310-musllinux_1_2_ppc64le.whl",url = "https://files.pythonhosted.org/packages/7b/f5/6cd4ff38dcde57a70f23719a838665ee17079640c77087404c3d34da6727/yarl-1.20.1-cp310-cp310-musllinux_1_2_ppc64le.whl",hashes = {sha256 = "46b5e0ccf1943a9a6e766b2c2b8c732c55b34e28be57d8daa2b3c1d1d4009309"}}, - {name = "yarl-1.20.1-cp310-cp310-musllinux_1_2_s390x.whl",url = "https://files.pythonhosted.org/packages/d1/90/c42eefd79d0d8222cb3227bdd51b640c0c1d0aa33fe4cc86c36eccba77d3/yarl-1.20.1-cp310-cp310-musllinux_1_2_s390x.whl",hashes = {sha256 = "df47c55f7d74127d1b11251fe6397d84afdde0d53b90bedb46a23c0e534f9d24"}}, - {name = "yarl-1.20.1-cp310-cp310-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/03/c8/cea6b232cb4617514232e0f8a718153a95b5d82b5290711b201545825532/yarl-1.20.1-cp310-cp310-musllinux_1_2_x86_64.whl",hashes = {sha256 = "76d12524d05841276b0e22573f28d5fbcb67589836772ae9244d90dd7d66aa13"}}, - {name = "yarl-1.20.1-cp310-cp310-win32.whl",url = "https://files.pythonhosted.org/packages/ce/a3/eaa0ab9712f1f3d01faf43cf6f1f7210ce4ea4a7e9b28b489a2261ca8db9/yarl-1.20.1-cp310-cp310-win32.whl",hashes = {sha256 = "6c4fbf6b02d70e512d7ade4b1f998f237137f1417ab07ec06358ea04f69134f8"}}, - {name = "yarl-1.20.1-cp310-cp310-win_amd64.whl",url = "https://files.pythonhosted.org/packages/8f/34/e4abde70a9256465fe31c88ed02c3f8502b7b5dead693a4f350a06413f28/yarl-1.20.1-cp310-cp310-win_amd64.whl",hashes = {sha256 = "aef6c4d69554d44b7f9d923245f8ad9a707d971e6209d51279196d8e8fe1ae16"}}, {name = "yarl-1.20.1-py3-none-any.whl",url = "https://files.pythonhosted.org/packages/b4/2d/2345fce04cfd4bee161bf1e7d9cdc702e3e16109021035dbb24db654a622/yarl-1.20.1-py3-none-any.whl",hashes = {sha256 = "83b8eb083fe4683c6115795d9fc1cfaf2cbbefb19b3a1cb68f6527460f483a77"}}, - {name = "yarl-1.20.1-cp39-cp39-macosx_10_9_universal2.whl",url = "https://files.pythonhosted.org/packages/01/75/0d37402d208d025afa6b5b8eb80e466d267d3fd1927db8e317d29a94a4cb/yarl-1.20.1-cp39-cp39-macosx_10_9_universal2.whl",hashes = {sha256 = "e42ba79e2efb6845ebab49c7bf20306c4edf74a0b20fc6b2ccdd1a219d12fad3"}}, - {name = "yarl-1.20.1-cp39-cp39-macosx_10_9_x86_64.whl",url = "https://files.pythonhosted.org/packages/73/84/1fb6c85ae0cf9901046f07d0ac9eb162f7ce6d95db541130aa542ed377e6/yarl-1.20.1-cp39-cp39-macosx_10_9_x86_64.whl",hashes = {sha256 = "41493b9b7c312ac448b7f0a42a089dffe1d6e6e981a2d76205801a023ed26a2b"}}, - {name = "yarl-1.20.1-cp39-cp39-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/f3/9c/eae746b24c4ea29a5accba9a06c197a70fa38a49c7df244e0d3951108861/yarl-1.20.1-cp39-cp39-macosx_11_0_arm64.whl",hashes = {sha256 = "f5a5928ff5eb13408c62a968ac90d43f8322fd56d87008b8f9dabf3c0f6ee983"}}, - {name = "yarl-1.20.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",url = "https://files.pythonhosted.org/packages/fb/30/693e71003ec4bc1daf2e4cf7c478c417d0985e0a8e8f00b2230d517876fc/yarl-1.20.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",hashes = {sha256 = "30c41ad5d717b3961b2dd785593b67d386b73feca30522048d37298fee981805"}}, - {name = "yarl-1.20.1-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl",url = "https://files.pythonhosted.org/packages/0f/a2/5264dbebf90763139aeb0b0b3154763239398400f754ae19a0518b654117/yarl-1.20.1-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl",hashes = {sha256 = "59febc3969b0781682b469d4aca1a5cab7505a4f7b85acf6db01fa500fa3f6ba"}}, - {name = "yarl-1.20.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl",url = "https://files.pythonhosted.org/packages/e7/17/77c7a89b3c05856489777e922f41db79ab4faf58621886df40d812c7facd/yarl-1.20.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl",hashes = {sha256 = "d2b6fb3622b7e5bf7a6e5b679a69326b4279e805ed1699d749739a61d242449e"}}, - {name = "yarl-1.20.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl",url = "https://files.pythonhosted.org/packages/6d/55/28409330b8ef5f2f681f5b478150496ec9cf3309b149dab7ec8ab5cfa3f0/yarl-1.20.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl",hashes = {sha256 = "749d73611db8d26a6281086f859ea7ec08f9c4c56cec864e52028c8b328db723"}}, - {name = "yarl-1.20.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",url = "https://files.pythonhosted.org/packages/85/58/cb0257cbd4002828ff735f44d3c5b6966c4fd1fc8cc1cd3cd8a143fbc513/yarl-1.20.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",hashes = {sha256 = "9427925776096e664c39e131447aa20ec738bdd77c049c48ea5200db2237e000"}}, - {name = "yarl-1.20.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl",url = "https://files.pythonhosted.org/packages/53/f6/c77960370cfa46f6fb3d6a5a79a49d3abfdb9ef92556badc2dcd2748bc2a/yarl-1.20.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl",hashes = {sha256 = "ff70f32aa316393eaf8222d518ce9118148eddb8a53073c2403863b41033eed5"}}, - {name = "yarl-1.20.1-cp39-cp39-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/64/ab/be0b10b8e029553c10905b6b00c64ecad3ebc8ace44b02293a62579343f6/yarl-1.20.1-cp39-cp39-musllinux_1_2_aarch64.whl",hashes = {sha256 = "c7ddf7a09f38667aea38801da8b8d6bfe81df767d9dfc8c88eb45827b195cd1c"}}, - {name = "yarl-1.20.1-cp39-cp39-musllinux_1_2_armv7l.whl",url = "https://files.pythonhosted.org/packages/c5/c3/3f327bd3905a4916029bf5feb7f86dcf864c7704f099715f62155fb386b2/yarl-1.20.1-cp39-cp39-musllinux_1_2_armv7l.whl",hashes = {sha256 = "57edc88517d7fc62b174fcfb2e939fbc486a68315d648d7e74d07fac42cec240"}}, - {name = "yarl-1.20.1-cp39-cp39-musllinux_1_2_i686.whl",url = "https://files.pythonhosted.org/packages/d1/42/040bdd5d3b3bb02b4a6ace4ed4075e02f85df964d6e6cb321795d2a6496a/yarl-1.20.1-cp39-cp39-musllinux_1_2_i686.whl",hashes = {sha256 = "dab096ce479d5894d62c26ff4f699ec9072269d514b4edd630a393223f45a0ee"}}, - {name = "yarl-1.20.1-cp39-cp39-musllinux_1_2_ppc64le.whl",url = "https://files.pythonhosted.org/packages/0d/1c/911867b8e8c7463b84dfdc275e0d99b04b66ad5132b503f184fe76be8ea4/yarl-1.20.1-cp39-cp39-musllinux_1_2_ppc64le.whl",hashes = {sha256 = "14a85f3bd2d7bb255be7183e5d7d6e70add151a98edf56a770d6140f5d5f4010"}}, - {name = "yarl-1.20.1-cp39-cp39-musllinux_1_2_s390x.whl",url = "https://files.pythonhosted.org/packages/e2/31/8c389f6c6ca0379b57b2da87f1f126c834777b4931c5ee8427dd65d0ff6b/yarl-1.20.1-cp39-cp39-musllinux_1_2_s390x.whl",hashes = {sha256 = "2c89b5c792685dd9cd3fa9761c1b9f46fc240c2a3265483acc1565769996a3f8"}}, - {name = "yarl-1.20.1-cp39-cp39-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/7f/09/ae4a649fb3964324c70a3e2b61f45e566d9ffc0affd2b974cbf628957673/yarl-1.20.1-cp39-cp39-musllinux_1_2_x86_64.whl",hashes = {sha256 = "69e9b141de5511021942a6866990aea6d111c9042235de90e08f94cf972ca03d"}}, - {name = "yarl-1.20.1-cp39-cp39-win32.whl",url = "https://files.pythonhosted.org/packages/8d/43/bbb4ed4c34d5bb62b48bf957f68cd43f736f79059d4f85225ab1ef80f4b9/yarl-1.20.1-cp39-cp39-win32.whl",hashes = {sha256 = "b5f307337819cdfdbb40193cad84978a029f847b0a357fbe49f712063cfc4f06"}}, - {name = "yarl-1.20.1-cp39-cp39-win_amd64.whl",url = "https://files.pythonhosted.org/packages/d7/cd/ce185848a7dba68ea69e932674b5c1a42a1852123584bccc5443120f857c/yarl-1.20.1-cp39-cp39-win_amd64.whl",hashes = {sha256 = "eae7bfe2069f9c1c5b05fc7fe5d612e5bbc089a39309904ee8b829e322dcad00"}}, ] marker = "\"default\" in dependency_groups or \"dev\" in extras" @@ -2135,61 +1806,26 @@ wheels = [ {name = "propcache-0.3.2-cp312-cp312-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/a4/7c/3f539fcae630408d0bd8bf3208b9a647ccad10976eda62402a80adf8fc34/propcache-0.3.2-cp312-cp312-musllinux_1_2_x86_64.whl",hashes = {sha256 = "62b4239611205294cc433845b914131b2a1f03500ff3c1ed093ed216b82621e1"}}, {name = "propcache-0.3.2-cp312-cp312-win32.whl",url = "https://files.pythonhosted.org/packages/7c/d2/34b9eac8c35f79f8a962546b3e97e9d4b990c420ee66ac8255d5d9611648/propcache-0.3.2-cp312-cp312-win32.whl",hashes = {sha256 = "df4a81b9b53449ebc90cc4deefb052c1dd934ba85012aa912c7ea7b7e38b60c1"}}, {name = "propcache-0.3.2-cp312-cp312-win_amd64.whl",url = "https://files.pythonhosted.org/packages/19/61/d582be5d226cf79071681d1b46b848d6cb03d7b70af7063e33a2787eaa03/propcache-0.3.2-cp312-cp312-win_amd64.whl",hashes = {sha256 = "7046e79b989d7fe457bb755844019e10f693752d169076138abf17f31380800c"}}, - {name = "propcache-0.3.2-cp311-cp311-macosx_10_9_universal2.whl",url = "https://files.pythonhosted.org/packages/80/8d/e8b436717ab9c2cfc23b116d2c297305aa4cd8339172a456d61ebf5669b8/propcache-0.3.2-cp311-cp311-macosx_10_9_universal2.whl",hashes = {sha256 = "0b8d2f607bd8f80ddc04088bc2a037fdd17884a6fcadc47a96e334d72f3717be"}}, - {name = "propcache-0.3.2-cp311-cp311-macosx_10_9_x86_64.whl",url = "https://files.pythonhosted.org/packages/d6/29/1e34000e9766d112171764b9fa3226fa0153ab565d0c242c70e9945318a7/propcache-0.3.2-cp311-cp311-macosx_10_9_x86_64.whl",hashes = {sha256 = "06766d8f34733416e2e34f46fea488ad5d60726bb9481d3cddf89a6fa2d9603f"}}, - {name = "propcache-0.3.2-cp311-cp311-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/46/92/1ad5af0df781e76988897da39b5f086c2bf0f028b7f9bd1f409bb05b6874/propcache-0.3.2-cp311-cp311-macosx_11_0_arm64.whl",hashes = {sha256 = "a2dc1f4a1df4fecf4e6f68013575ff4af84ef6f478fe5344317a65d38a8e6dc9"}}, - {name = "propcache-0.3.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",url = "https://files.pythonhosted.org/packages/b3/ce/e96392460f9fb68461fabab3e095cb00c8ddf901205be4eae5ce246e5b7e/propcache-0.3.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",hashes = {sha256 = "be29c4f4810c5789cf10ddf6af80b041c724e629fa51e308a7a0fb19ed1ef7bf"}}, - {name = "propcache-0.3.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl",url = "https://files.pythonhosted.org/packages/c5/2a/866726ea345299f7ceefc861a5e782b045545ae6940851930a6adaf1fca6/propcache-0.3.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl",hashes = {sha256 = "59d61f6970ecbd8ff2e9360304d5c8876a6abd4530cb752c06586849ac8a9dc9"}}, - {name = "propcache-0.3.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl",url = "https://files.pythonhosted.org/packages/de/03/07d992ccb6d930398689187e1b3c718339a1c06b8b145a8d9650e4726166/propcache-0.3.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl",hashes = {sha256 = "62180e0b8dbb6b004baec00a7983e4cc52f5ada9cd11f48c3528d8cfa7b96a66"}}, - {name = "propcache-0.3.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",url = "https://files.pythonhosted.org/packages/5d/e6/116ba39448753b1330f48ab8ba927dcd6cf0baea8a0ccbc512dfb49ba670/propcache-0.3.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",hashes = {sha256 = "c144ca294a204c470f18cf4c9d78887810d04a3e2fbb30eea903575a779159df"}}, - {name = "propcache-0.3.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl",url = "https://files.pythonhosted.org/packages/a6/85/f01f5d97e54e428885a5497ccf7f54404cbb4f906688a1690cd51bf597dc/propcache-0.3.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl",hashes = {sha256 = "c5c2a784234c28854878d68978265617aa6dc0780e53d44b4d67f3651a17a9a2"}}, - {name = "propcache-0.3.2-cp311-cp311-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/e3/79/7bf5ab9033b8b8194cc3f7cf1aaa0e9c3256320726f64a3e1f113a812dce/propcache-0.3.2-cp311-cp311-musllinux_1_2_aarch64.whl",hashes = {sha256 = "5745bc7acdafa978ca1642891b82c19238eadc78ba2aaa293c6863b304e552d7"}}, - {name = "propcache-0.3.2-cp311-cp311-musllinux_1_2_armv7l.whl",url = "https://files.pythonhosted.org/packages/31/0b/bd3e0c00509b609317df4a18e6b05a450ef2d9a963e1d8bc9c9415d86f30/propcache-0.3.2-cp311-cp311-musllinux_1_2_armv7l.whl",hashes = {sha256 = "c0075bf773d66fa8c9d41f66cc132ecc75e5bb9dd7cce3cfd14adc5ca184cb95"}}, - {name = "propcache-0.3.2-cp311-cp311-musllinux_1_2_i686.whl",url = "https://files.pythonhosted.org/packages/7a/23/fae0ff9b54b0de4e819bbe559508da132d5683c32d84d0dc2ccce3563ed4/propcache-0.3.2-cp311-cp311-musllinux_1_2_i686.whl",hashes = {sha256 = "5f57aa0847730daceff0497f417c9de353c575d8da3579162cc74ac294c5369e"}}, - {name = "propcache-0.3.2-cp311-cp311-musllinux_1_2_ppc64le.whl",url = "https://files.pythonhosted.org/packages/b7/7f/ad6a3c22630aaa5f618b4dc3c3598974a72abb4c18e45a50b3cdd091eb2f/propcache-0.3.2-cp311-cp311-musllinux_1_2_ppc64le.whl",hashes = {sha256 = "eef914c014bf72d18efb55619447e0aecd5fb7c2e3fa7441e2e5d6099bddff7e"}}, - {name = "propcache-0.3.2-cp311-cp311-musllinux_1_2_s390x.whl",url = "https://files.pythonhosted.org/packages/5b/2c/ba4f1c0e8a4b4c75910742f0d333759d441f65a1c7f34683b4a74c0ee015/propcache-0.3.2-cp311-cp311-musllinux_1_2_s390x.whl",hashes = {sha256 = "2a4092e8549031e82facf3decdbc0883755d5bbcc62d3aea9d9e185549936dcf"}}, - {name = "propcache-0.3.2-cp311-cp311-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/88/e4/ebe30fc399e98572019eee82ad0caf512401661985cbd3da5e3140ffa1b0/propcache-0.3.2-cp311-cp311-musllinux_1_2_x86_64.whl",hashes = {sha256 = "85871b050f174bc0bfb437efbdb68aaf860611953ed12418e4361bc9c392749e"}}, - {name = "propcache-0.3.2-cp311-cp311-win32.whl",url = "https://files.pythonhosted.org/packages/96/0a/7d5260b914e01d1d0906f7f38af101f8d8ed0dc47426219eeaf05e8ea7c2/propcache-0.3.2-cp311-cp311-win32.whl",hashes = {sha256 = "36c8d9b673ec57900c3554264e630d45980fd302458e4ac801802a7fd2ef7897"}}, - {name = "propcache-0.3.2-cp311-cp311-win_amd64.whl",url = "https://files.pythonhosted.org/packages/e1/2d/89fe4489a884bc0da0c3278c552bd4ffe06a1ace559db5ef02ef24ab446b/propcache-0.3.2-cp311-cp311-win_amd64.whl",hashes = {sha256 = "e53af8cb6a781b02d2ea079b5b853ba9430fcbe18a8e3ce647d5982a3ff69f39"}}, - {name = "propcache-0.3.2-cp310-cp310-macosx_10_9_universal2.whl",url = "https://files.pythonhosted.org/packages/ab/14/510deed325e262afeb8b360043c5d7c960da7d3ecd6d6f9496c9c56dc7f4/propcache-0.3.2-cp310-cp310-macosx_10_9_universal2.whl",hashes = {sha256 = "22d9962a358aedbb7a2e36187ff273adeaab9743373a272976d2e348d08c7770"}}, - {name = "propcache-0.3.2-cp310-cp310-macosx_10_9_x86_64.whl",url = "https://files.pythonhosted.org/packages/cd/4e/ad52a7925ff01c1325653a730c7ec3175a23f948f08626a534133427dcff/propcache-0.3.2-cp310-cp310-macosx_10_9_x86_64.whl",hashes = {sha256 = "0d0fda578d1dc3f77b6b5a5dce3b9ad69a8250a891760a548df850a5e8da87f3"}}, - {name = "propcache-0.3.2-cp310-cp310-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/63/7c/e9399ba5da7780871db4eac178e9c2e204c23dd3e7d32df202092a1ed400/propcache-0.3.2-cp310-cp310-macosx_11_0_arm64.whl",hashes = {sha256 = "3def3da3ac3ce41562d85db655d18ebac740cb3fa4367f11a52b3da9d03a5cc3"}}, - {name = "propcache-0.3.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",url = "https://files.pythonhosted.org/packages/22/e1/58da211eb8fdc6fc854002387d38f415a6ca5f5c67c1315b204a5d3e9d7a/propcache-0.3.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",hashes = {sha256 = "9bec58347a5a6cebf239daba9bda37dffec5b8d2ce004d9fe4edef3d2815137e"}}, - {name = "propcache-0.3.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl",url = "https://files.pythonhosted.org/packages/c4/0a/550ea0f52aac455cb90111c8bab995208443e46d925e51e2f6ebdf869525/propcache-0.3.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl",hashes = {sha256 = "55ffda449a507e9fbd4aca1a7d9aa6753b07d6166140e5a18d2ac9bc49eac220"}}, - {name = "propcache-0.3.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl",url = "https://files.pythonhosted.org/packages/5a/af/9893b7d878deda9bb69fcf54600b247fba7317761b7db11fede6e0f28bd0/propcache-0.3.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl",hashes = {sha256 = "64a67fb39229a8a8491dd42f864e5e263155e729c2e7ff723d6e25f596b1e8cb"}}, - {name = "propcache-0.3.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",url = "https://files.pythonhosted.org/packages/7c/bb/38fd08b278ca85cde36d848091ad2b45954bc5f15cce494bb300b9285831/propcache-0.3.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",hashes = {sha256 = "9da1cf97b92b51253d5b68cf5a2b9e0dafca095e36b7f2da335e27dc6172a614"}}, - {name = "propcache-0.3.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl",url = "https://files.pythonhosted.org/packages/78/8c/9fe55bd01d362bafb413dfe508c48753111a1e269737fa143ba85693592c/propcache-0.3.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl",hashes = {sha256 = "5f559e127134b07425134b4065be45b166183fdcb433cb6c24c8e4149056ad50"}}, - {name = "propcache-0.3.2-cp310-cp310-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/54/14/4701c33852937a22584e08abb531d654c8bcf7948a8f87ad0a4822394147/propcache-0.3.2-cp310-cp310-musllinux_1_2_aarch64.whl",hashes = {sha256 = "aff2e4e06435d61f11a428360a932138d0ec288b0a31dd9bd78d200bd4a2b339"}}, - {name = "propcache-0.3.2-cp310-cp310-musllinux_1_2_armv7l.whl",url = "https://files.pythonhosted.org/packages/16/44/447f2253d859602095356007657ee535e0093215ea0b3d1d6a41d16e5201/propcache-0.3.2-cp310-cp310-musllinux_1_2_armv7l.whl",hashes = {sha256 = "4927842833830942a5d0a56e6f4839bc484785b8e1ce8d287359794818633ba0"}}, - {name = "propcache-0.3.2-cp310-cp310-musllinux_1_2_i686.whl",url = "https://files.pythonhosted.org/packages/f2/b3/e4756258749bb2d3b46defcff606a2f47410bab82be5824a67e84015b267/propcache-0.3.2-cp310-cp310-musllinux_1_2_i686.whl",hashes = {sha256 = "6107ddd08b02654a30fb8ad7a132021759d750a82578b94cd55ee2772b6ebea2"}}, - {name = "propcache-0.3.2-cp310-cp310-musllinux_1_2_ppc64le.whl",url = "https://files.pythonhosted.org/packages/1e/df/e6d3c7574233164b6330b9fd697beeac402afd367280e6dc377bb99b43d9/propcache-0.3.2-cp310-cp310-musllinux_1_2_ppc64le.whl",hashes = {sha256 = "70bd8b9cd6b519e12859c99f3fc9a93f375ebd22a50296c3a295028bea73b9e7"}}, - {name = "propcache-0.3.2-cp310-cp310-musllinux_1_2_s390x.whl",url = "https://files.pythonhosted.org/packages/b2/53/e4d31dd5170b4a0e2e6b730f2385a96410633b4833dc25fe5dffd1f73294/propcache-0.3.2-cp310-cp310-musllinux_1_2_s390x.whl",hashes = {sha256 = "2183111651d710d3097338dd1893fcf09c9f54e27ff1a8795495a16a469cc90b"}}, - {name = "propcache-0.3.2-cp310-cp310-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/7f/fe/74d54cf9fbe2a20ff786e5f7afcfde446588f0cf15fb2daacfbc267b866c/propcache-0.3.2-cp310-cp310-musllinux_1_2_x86_64.whl",hashes = {sha256 = "fb075ad271405dcad8e2a7ffc9a750a3bf70e533bd86e89f0603e607b93aa64c"}}, - {name = "propcache-0.3.2-cp310-cp310-win32.whl",url = "https://files.pythonhosted.org/packages/22/ec/c469c9d59dada8a7679625e0440b544fe72e99311a4679c279562051f6fc/propcache-0.3.2-cp310-cp310-win32.whl",hashes = {sha256 = "404d70768080d3d3bdb41d0771037da19d8340d50b08e104ca0e7f9ce55fce70"}}, - {name = "propcache-0.3.2-cp310-cp310-win_amd64.whl",url = "https://files.pythonhosted.org/packages/38/35/07a471371ac89d418f8d0b699c75ea6dca2041fbda360823de21f6a9ce0a/propcache-0.3.2-cp310-cp310-win_amd64.whl",hashes = {sha256 = "7435d766f978b4ede777002e6b3b6641dd229cd1da8d3d3106a45770365f9ad9"}}, {name = "propcache-0.3.2-py3-none-any.whl",url = "https://files.pythonhosted.org/packages/cc/35/cc0aaecf278bb4575b8555f2b137de5ab821595ddae9da9d3cd1da4072c7/propcache-0.3.2-py3-none-any.whl",hashes = {sha256 = "98f1ec44fb675f5052cccc8e609c46ed23a35a1cfd18545ad4e29002d858a43f"}}, - {name = "propcache-0.3.2-cp39-cp39-macosx_10_9_universal2.whl",url = "https://files.pythonhosted.org/packages/6c/39/8ea9bcfaaff16fd0b0fc901ee522e24c9ec44b4ca0229cfffb8066a06959/propcache-0.3.2-cp39-cp39-macosx_10_9_universal2.whl",hashes = {sha256 = "a7fad897f14d92086d6b03fdd2eb844777b0c4d7ec5e3bac0fbae2ab0602bbe5"}}, - {name = "propcache-0.3.2-cp39-cp39-macosx_10_9_x86_64.whl",url = "https://files.pythonhosted.org/packages/d3/85/cab84c86966e1d354cf90cdc4ba52f32f99a5bca92a1529d666d957d7686/propcache-0.3.2-cp39-cp39-macosx_10_9_x86_64.whl",hashes = {sha256 = "1f43837d4ca000243fd7fd6301947d7cb93360d03cd08369969450cc6b2ce3b4"}}, - {name = "propcache-0.3.2-cp39-cp39-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/23/f7/9cb719749152d8b26d63801b3220ce2d3931312b2744d2b3a088b0ee9947/propcache-0.3.2-cp39-cp39-macosx_11_0_arm64.whl",hashes = {sha256 = "261df2e9474a5949c46e962065d88eb9b96ce0f2bd30e9d3136bcde84befd8f2"}}, - {name = "propcache-0.3.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",url = "https://files.pythonhosted.org/packages/a2/a2/0b2b5a210ff311260002a315f6f9531b65a36064dfb804655432b2f7d3e3/propcache-0.3.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",hashes = {sha256 = "e514326b79e51f0a177daab1052bc164d9d9e54133797a3a58d24c9c87a3fe6d"}}, - {name = "propcache-0.3.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl",url = "https://files.pythonhosted.org/packages/3f/e0/7aff5de0c535f783b0c8be5bdb750c305c1961d69fbb136939926e155d98/propcache-0.3.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl",hashes = {sha256 = "d4a996adb6904f85894570301939afeee65f072b4fd265ed7e569e8d9058e4ec"}}, - {name = "propcache-0.3.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl",url = "https://files.pythonhosted.org/packages/92/1d/65fa889eb3b2a7d6e4ed3c2b568a9cb8817547a1450b572de7bf24872800/propcache-0.3.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl",hashes = {sha256 = "76cace5d6b2a54e55b137669b30f31aa15977eeed390c7cbfb1dafa8dfe9a701"}}, - {name = "propcache-0.3.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",url = "https://files.pythonhosted.org/packages/9a/e2/eecf6989870988dfd731de408a6fa366e853d361a06c2133b5878ce821ad/propcache-0.3.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",hashes = {sha256 = "31248e44b81d59d6addbb182c4720f90b44e1efdc19f58112a3c3a1615fb47ef"}}, - {name = "propcache-0.3.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl",url = "https://files.pythonhosted.org/packages/12/06/c32be4950967f18f77489268488c7cdc78cbfc65a8ba8101b15e526b83dc/propcache-0.3.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl",hashes = {sha256 = "abb7fa19dbf88d3857363e0493b999b8011eea856b846305d8c0512dfdf8fbb1"}}, - {name = "propcache-0.3.2-cp39-cp39-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/46/6c/17b521a6b3b7cbe277a4064ff0aa9129dd8c89f425a5a9b6b4dd51cc3ff4/propcache-0.3.2-cp39-cp39-musllinux_1_2_aarch64.whl",hashes = {sha256 = "d81ac3ae39d38588ad0549e321e6f773a4e7cc68e7751524a22885d5bbadf886"}}, - {name = "propcache-0.3.2-cp39-cp39-musllinux_1_2_armv7l.whl",url = "https://files.pythonhosted.org/packages/62/cb/3bdba2b736b3e45bc0e40f4370f745b3e711d439ffbffe3ae416393eece9/propcache-0.3.2-cp39-cp39-musllinux_1_2_armv7l.whl",hashes = {sha256 = "cc2782eb0f7a16462285b6f8394bbbd0e1ee5f928034e941ffc444012224171b"}}, - {name = "propcache-0.3.2-cp39-cp39-musllinux_1_2_i686.whl",url = "https://files.pythonhosted.org/packages/29/bd/760c5c6a60a4a2c55a421bc34a25ba3919d49dee411ddb9d1493bb51d46e/propcache-0.3.2-cp39-cp39-musllinux_1_2_i686.whl",hashes = {sha256 = "db429c19a6c7e8a1c320e6a13c99799450f411b02251fb1b75e6217cf4a14fcb"}}, - {name = "propcache-0.3.2-cp39-cp39-musllinux_1_2_ppc64le.whl",url = "https://files.pythonhosted.org/packages/76/58/ced2757a46f55b8c84358d6ab8de4faf57cba831c51e823654da7144b13a/propcache-0.3.2-cp39-cp39-musllinux_1_2_ppc64le.whl",hashes = {sha256 = "21d8759141a9e00a681d35a1f160892a36fb6caa715ba0b832f7747da48fb6ea"}}, - {name = "propcache-0.3.2-cp39-cp39-musllinux_1_2_s390x.whl",url = "https://files.pythonhosted.org/packages/bb/ec/d98ea8d5a4d8fe0e372033f5254eddf3254344c0c5dc6c49ab84349e4733/propcache-0.3.2-cp39-cp39-musllinux_1_2_s390x.whl",hashes = {sha256 = "2ca6d378f09adb13837614ad2754fa8afaee330254f404299611bce41a8438cb"}}, - {name = "propcache-0.3.2-cp39-cp39-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/56/84/b6d8a7ecf3f62d7dd09d9d10bbf89fad6837970ef868b35b5ffa0d24d9de/propcache-0.3.2-cp39-cp39-musllinux_1_2_x86_64.whl",hashes = {sha256 = "34a624af06c048946709f4278b4176470073deda88d91342665d95f7c6270fbe"}}, - {name = "propcache-0.3.2-cp39-cp39-win32.whl",url = "https://files.pythonhosted.org/packages/bf/32/889f4903ddfe4a9dc61da71ee58b763758cf2d608fe1decede06e6467f8d/propcache-0.3.2-cp39-cp39-win32.whl",hashes = {sha256 = "4ba3fef1c30f306b1c274ce0b8baaa2c3cdd91f645c48f06394068f37d3837a1"}}, - {name = "propcache-0.3.2-cp39-cp39-win_amd64.whl",url = "https://files.pythonhosted.org/packages/67/74/d666795fb9ba1dc139d30de64f3b6fd1ff9c9d3d96ccfdb992cd715ce5d2/propcache-0.3.2-cp39-cp39-win_amd64.whl",hashes = {sha256 = "7a2368eed65fc69a7a7a40b27f22e85e7627b74216f0846b04ba5c116e191ec9"}}, ] marker = "\"default\" in dependency_groups or \"dev\" in extras" [packages.tool.pdm] dependencies = [] +[[packages]] +name = "aiofiles" +version = "24.1.0" +requires-python = ">=3.8" +sdist = {name = "aiofiles-24.1.0.tar.gz", url = "https://files.pythonhosted.org/packages/0b/03/a88171e277e8caa88a4c77808c20ebb04ba74cc4681bf1e9416c862de237/aiofiles-24.1.0.tar.gz", hashes = {sha256 = "22a075c9e5a3810f0c2e48f3008c94d68c65d763b9b03857924c99e57355166c"}} +wheels = [ + {name = "aiofiles-24.1.0-py3-none-any.whl",url = "https://files.pythonhosted.org/packages/a5/45/30bb92d442636f570cb5651bc661f52b610e2eec3f891a5dc3a4c3667db0/aiofiles-24.1.0-py3-none-any.whl",hashes = {sha256 = "b4ec55f4195e3eb5d7abd1bf7e061763e864dd4954231fb8539a0ef8bb8260e5"}}, +] +marker = "\"default\" in dependency_groups" + +[packages.tool.pdm] +dependencies = [] + [[packages]] name = "aiohappyeyeballs" version = "2.6.1" @@ -2203,6 +1839,21 @@ marker = "\"default\" in dependency_groups or \"dev\" in extras" [packages.tool.pdm] dependencies = [] +[[packages]] +name = "aiologic" +version = "0.14.0" +requires-python = ">=3.8" +sdist = {name = "aiologic-0.14.0.tar.gz", url = "https://files.pythonhosted.org/packages/7e/2d/e893dcfa041dab1d045abfc8898239747cde19881796640861609138d360/aiologic-0.14.0.tar.gz", hashes = {sha256 = "c87925fa2bfe9ae292859e1094eb8fb6d456c8202a16405b0a44134803c8a791"}} +wheels = [ + {name = "aiologic-0.14.0-py3-none-any.whl",url = "https://files.pythonhosted.org/packages/4d/1f/f797b684fb4e11a5066ab464b460b5cfdbaedea9c4a3d0f0afc8e894ada0/aiologic-0.14.0-py3-none-any.whl",hashes = {sha256 = "cc59d39dc1d5e2575b4a6b5faf678b551fb0f910c7cb42e4c5f5689ffedcce78"}}, +] +marker = "\"default\" in dependency_groups" + +[packages.tool.pdm] +dependencies = [ + "wrapt>=1.16.0", +] + [[packages]] name = "aiosignal" version = "1.4.0" @@ -2276,58 +1927,7 @@ wheels = [ {name = "frozenlist-1.7.0-cp312-cp312-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/1c/80/9a0eb48b944050f94cc51ee1c413eb14a39543cc4f760ed12657a5a3c45a/frozenlist-1.7.0-cp312-cp312-musllinux_1_2_x86_64.whl",hashes = {sha256 = "290a172aae5a4c278c6da8a96222e6337744cd9c77313efe33d5670b9f65fc43"}}, {name = "frozenlist-1.7.0-cp312-cp312-win32.whl",url = "https://files.pythonhosted.org/packages/f3/74/87601e0fb0369b7a2baf404ea921769c53b7ae00dee7dcfe5162c8c6dbf0/frozenlist-1.7.0-cp312-cp312-win32.whl",hashes = {sha256 = "426c7bc70e07cfebc178bc4c2bf2d861d720c4fff172181eeb4a4c41d4ca2ad3"}}, {name = "frozenlist-1.7.0-cp312-cp312-win_amd64.whl",url = "https://files.pythonhosted.org/packages/0b/15/c026e9a9fc17585a9d461f65d8593d281fedf55fbf7eb53f16c6df2392f9/frozenlist-1.7.0-cp312-cp312-win_amd64.whl",hashes = {sha256 = "563b72efe5da92e02eb68c59cb37205457c977aa7a449ed1b37e6939e5c47c6a"}}, - {name = "frozenlist-1.7.0-cp311-cp311-macosx_10_9_universal2.whl",url = "https://files.pythonhosted.org/packages/34/7e/803dde33760128acd393a27eb002f2020ddb8d99d30a44bfbaab31c5f08a/frozenlist-1.7.0-cp311-cp311-macosx_10_9_universal2.whl",hashes = {sha256 = "aa51e147a66b2d74de1e6e2cf5921890de6b0f4820b257465101d7f37b49fb5a"}}, - {name = "frozenlist-1.7.0-cp311-cp311-macosx_10_9_x86_64.whl",url = "https://files.pythonhosted.org/packages/75/a9/9c2c5760b6ba45eae11334db454c189d43d34a4c0b489feb2175e5e64277/frozenlist-1.7.0-cp311-cp311-macosx_10_9_x86_64.whl",hashes = {sha256 = "9b35db7ce1cd71d36ba24f80f0c9e7cff73a28d7a74e91fe83e23d27c7828750"}}, - {name = "frozenlist-1.7.0-cp311-cp311-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/47/be/4038e2d869f8a2da165f35a6befb9158c259819be22eeaf9c9a8f6a87771/frozenlist-1.7.0-cp311-cp311-macosx_11_0_arm64.whl",hashes = {sha256 = "34a69a85e34ff37791e94542065c8416c1afbf820b68f720452f636d5fb990cd"}}, - {name = "frozenlist-1.7.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",url = "https://files.pythonhosted.org/packages/79/26/85314b8a83187c76a37183ceed886381a5f992975786f883472fcb6dc5f2/frozenlist-1.7.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",hashes = {sha256 = "4a646531fa8d82c87fe4bb2e596f23173caec9185bfbca5d583b4ccfb95183e2"}}, - {name = "frozenlist-1.7.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl",url = "https://files.pythonhosted.org/packages/1f/fd/e5b64f7d2c92a41639ffb2ad44a6a82f347787abc0c7df5f49057cf11770/frozenlist-1.7.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl",hashes = {sha256 = "79b2ffbba483f4ed36a0f236ccb85fbb16e670c9238313709638167670ba235f"}}, - {name = "frozenlist-1.7.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl",url = "https://files.pythonhosted.org/packages/20/fb/03395c0a43a5976af4bf7534759d214405fbbb4c114683f434dfdd3128ef/frozenlist-1.7.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl",hashes = {sha256 = "a26f205c9ca5829cbf82bb2a84b5c36f7184c4316617d7ef1b271a56720d6b30"}}, - {name = "frozenlist-1.7.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl",url = "https://files.pythonhosted.org/packages/d0/15/c01c8e1dffdac5d9803507d824f27aed2ba76b6ed0026fab4d9866e82f1f/frozenlist-1.7.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl",hashes = {sha256 = "bcacfad3185a623fa11ea0e0634aac7b691aa925d50a440f39b458e41c561d98"}}, - {name = "frozenlist-1.7.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl",url = "https://files.pythonhosted.org/packages/14/99/3f4c6fe882c1f5514b6848aa0a69b20cb5e5d8e8f51a339d48c0e9305ed0/frozenlist-1.7.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl",hashes = {sha256 = "72c1b0fe8fe451b34f12dce46445ddf14bd2a5bcad7e324987194dc8e3a74c86"}}, - {name = "frozenlist-1.7.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl",url = "https://files.pythonhosted.org/packages/4d/83/220a374bd7b2aeba9d0725130665afe11de347d95c3620b9b82cc2fcab97/frozenlist-1.7.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl",hashes = {sha256 = "61d1a5baeaac6c0798ff6edfaeaa00e0e412d49946c53fae8d4b8e8b3566c4ae"}}, - {name = "frozenlist-1.7.0-cp311-cp311-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/03/3c/3e3390d75334a063181625343e8daab61b77e1b8214802cc4e8a1bb678fc/frozenlist-1.7.0-cp311-cp311-musllinux_1_2_aarch64.whl",hashes = {sha256 = "7edf5c043c062462f09b6820de9854bf28cc6cc5b6714b383149745e287181a8"}}, - {name = "frozenlist-1.7.0-cp311-cp311-musllinux_1_2_armv7l.whl",url = "https://files.pythonhosted.org/packages/23/1e/58232c19608b7a549d72d9903005e2d82488f12554a32de2d5fb59b9b1ba/frozenlist-1.7.0-cp311-cp311-musllinux_1_2_armv7l.whl",hashes = {sha256 = "d50ac7627b3a1bd2dcef6f9da89a772694ec04d9a61b66cf87f7d9446b4a0c31"}}, - {name = "frozenlist-1.7.0-cp311-cp311-musllinux_1_2_i686.whl",url = "https://files.pythonhosted.org/packages/c0/a4/e4a567e01702a88a74ce8a324691e62a629bf47d4f8607f24bf1c7216e7f/frozenlist-1.7.0-cp311-cp311-musllinux_1_2_i686.whl",hashes = {sha256 = "ce48b2fece5aeb45265bb7a58259f45027db0abff478e3077e12b05b17fb9da7"}}, - {name = "frozenlist-1.7.0-cp311-cp311-musllinux_1_2_ppc64le.whl",url = "https://files.pythonhosted.org/packages/73/a6/63b3374f7d22268b41a9db73d68a8233afa30ed164c46107b33c4d18ecdd/frozenlist-1.7.0-cp311-cp311-musllinux_1_2_ppc64le.whl",hashes = {sha256 = "fe2365ae915a1fafd982c146754e1de6ab3478def8a59c86e1f7242d794f97d5"}}, - {name = "frozenlist-1.7.0-cp311-cp311-musllinux_1_2_s390x.whl",url = "https://files.pythonhosted.org/packages/6d/eb/d18b3f6e64799a79673c4ba0b45e4cfbe49c240edfd03a68be20002eaeaa/frozenlist-1.7.0-cp311-cp311-musllinux_1_2_s390x.whl",hashes = {sha256 = "45a6f2fdbd10e074e8814eb98b05292f27bad7d1883afbe009d96abdcf3bc898"}}, - {name = "frozenlist-1.7.0-cp311-cp311-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/5a/f5/720f3812e3d06cd89a1d5db9ff6450088b8f5c449dae8ffb2971a44da506/frozenlist-1.7.0-cp311-cp311-musllinux_1_2_x86_64.whl",hashes = {sha256 = "21884e23cffabb157a9dd7e353779077bf5b8f9a58e9b262c6caad2ef5f80a56"}}, - {name = "frozenlist-1.7.0-cp311-cp311-win32.whl",url = "https://files.pythonhosted.org/packages/69/68/03efbf545e217d5db8446acfd4c447c15b7c8cf4dbd4a58403111df9322d/frozenlist-1.7.0-cp311-cp311-win32.whl",hashes = {sha256 = "284d233a8953d7b24f9159b8a3496fc1ddc00f4db99c324bd5fb5f22d8698ea7"}}, - {name = "frozenlist-1.7.0-cp311-cp311-win_amd64.whl",url = "https://files.pythonhosted.org/packages/58/17/fe61124c5c333ae87f09bb67186d65038834a47d974fc10a5fadb4cc5ae1/frozenlist-1.7.0-cp311-cp311-win_amd64.whl",hashes = {sha256 = "387cbfdcde2f2353f19c2f66bbb52406d06ed77519ac7ee21be0232147c2592d"}}, - {name = "frozenlist-1.7.0-cp310-cp310-macosx_10_9_universal2.whl",url = "https://files.pythonhosted.org/packages/af/36/0da0a49409f6b47cc2d060dc8c9040b897b5902a8a4e37d9bc1deb11f680/frozenlist-1.7.0-cp310-cp310-macosx_10_9_universal2.whl",hashes = {sha256 = "cc4df77d638aa2ed703b878dd093725b72a824c3c546c076e8fdf276f78ee84a"}}, - {name = "frozenlist-1.7.0-cp310-cp310-macosx_10_9_x86_64.whl",url = "https://files.pythonhosted.org/packages/77/f0/77c11d13d39513b298e267b22eb6cb559c103d56f155aa9a49097221f0b6/frozenlist-1.7.0-cp310-cp310-macosx_10_9_x86_64.whl",hashes = {sha256 = "716a9973a2cc963160394f701964fe25012600f3d311f60c790400b00e568b61"}}, - {name = "frozenlist-1.7.0-cp310-cp310-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/37/12/9d07fa18971a44150593de56b2f2947c46604819976784bcf6ea0d5db43b/frozenlist-1.7.0-cp310-cp310-macosx_11_0_arm64.whl",hashes = {sha256 = "a0fd1bad056a3600047fb9462cff4c5322cebc59ebf5d0a3725e0ee78955001d"}}, - {name = "frozenlist-1.7.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",url = "https://files.pythonhosted.org/packages/70/34/f73539227e06288fcd1f8a76853e755b2b48bca6747e99e283111c18bcd4/frozenlist-1.7.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",hashes = {sha256 = "3789ebc19cb811163e70fe2bd354cea097254ce6e707ae42e56f45e31e96cb8e"}}, - {name = "frozenlist-1.7.0-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl",url = "https://files.pythonhosted.org/packages/fb/68/c1d9c2f4a6e438e14613bad0f2973567586610cc22dcb1e1241da71de9d3/frozenlist-1.7.0-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl",hashes = {sha256 = "af369aa35ee34f132fcfad5be45fbfcde0e3a5f6a1ec0712857f286b7d20cca9"}}, - {name = "frozenlist-1.7.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl",url = "https://files.pythonhosted.org/packages/b9/d0/98e8f9a515228d708344d7c6986752be3e3192d1795f748c24bcf154ad99/frozenlist-1.7.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl",hashes = {sha256 = "ac64b6478722eeb7a3313d494f8342ef3478dff539d17002f849101b212ef97c"}}, - {name = "frozenlist-1.7.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl",url = "https://files.pythonhosted.org/packages/79/df/8a11bcec5600557f40338407d3e5bea80376ed1c01a6c0910fcfdc4b8993/frozenlist-1.7.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl",hashes = {sha256 = "f89f65d85774f1797239693cef07ad4c97fdd0639544bad9ac4b869782eb1981"}}, - {name = "frozenlist-1.7.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl",url = "https://files.pythonhosted.org/packages/50/82/41cb97d9c9a5ff94438c63cc343eb7980dac4187eb625a51bdfdb7707314/frozenlist-1.7.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl",hashes = {sha256 = "1073557c941395fdfcfac13eb2456cb8aad89f9de27bae29fabca8e563b12615"}}, - {name = "frozenlist-1.7.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl",url = "https://files.pythonhosted.org/packages/13/47/f9179ee5ee4f55629e4f28c660b3fdf2775c8bfde8f9c53f2de2d93f52a9/frozenlist-1.7.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl",hashes = {sha256 = "1ed8d2fa095aae4bdc7fdd80351009a48d286635edffee66bf865e37a9125c50"}}, - {name = "frozenlist-1.7.0-cp310-cp310-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/1a/52/df81e41ec6b953902c8b7e3a83bee48b195cb0e5ec2eabae5d8330c78038/frozenlist-1.7.0-cp310-cp310-musllinux_1_2_aarch64.whl",hashes = {sha256 = "24c34bea555fe42d9f928ba0a740c553088500377448febecaa82cc3e88aa1fa"}}, - {name = "frozenlist-1.7.0-cp310-cp310-musllinux_1_2_armv7l.whl",url = "https://files.pythonhosted.org/packages/84/17/30d6ea87fa95a9408245a948604b82c1a4b8b3e153cea596421a2aef2754/frozenlist-1.7.0-cp310-cp310-musllinux_1_2_armv7l.whl",hashes = {sha256 = "69cac419ac6a6baad202c85aaf467b65ac860ac2e7f2ac1686dc40dbb52f6577"}}, - {name = "frozenlist-1.7.0-cp310-cp310-musllinux_1_2_i686.whl",url = "https://files.pythonhosted.org/packages/8f/00/ecbeb51669e3c3df76cf2ddd66ae3e48345ec213a55e3887d216eb4fbab3/frozenlist-1.7.0-cp310-cp310-musllinux_1_2_i686.whl",hashes = {sha256 = "960d67d0611f4c87da7e2ae2eacf7ea81a5be967861e0c63cf205215afbfac59"}}, - {name = "frozenlist-1.7.0-cp310-cp310-musllinux_1_2_ppc64le.whl",url = "https://files.pythonhosted.org/packages/1a/c0/c224ce0e0eb31cc57f67742071bb470ba8246623c1823a7530be0e76164c/frozenlist-1.7.0-cp310-cp310-musllinux_1_2_ppc64le.whl",hashes = {sha256 = "41be2964bd4b15bf575e5daee5a5ce7ed3115320fb3c2b71fca05582ffa4dc9e"}}, - {name = "frozenlist-1.7.0-cp310-cp310-musllinux_1_2_s390x.whl",url = "https://files.pythonhosted.org/packages/55/3c/34cb694abf532f31f365106deebdeac9e45c19304d83cf7d51ebbb4ca4d1/frozenlist-1.7.0-cp310-cp310-musllinux_1_2_s390x.whl",hashes = {sha256 = "46d84d49e00c9429238a7ce02dc0be8f6d7cd0cd405abd1bebdc991bf27c15bd"}}, - {name = "frozenlist-1.7.0-cp310-cp310-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/98/c0/2052d8b6cecda2e70bd81299e3512fa332abb6dcd2969b9c80dfcdddbf75/frozenlist-1.7.0-cp310-cp310-musllinux_1_2_x86_64.whl",hashes = {sha256 = "15900082e886edb37480335d9d518cec978afc69ccbc30bd18610b7c1b22a718"}}, - {name = "frozenlist-1.7.0-cp310-cp310-win32.whl",url = "https://files.pythonhosted.org/packages/c5/bf/7dcebae315436903b1d98ffb791a09d674c88480c158aa171958a3ac07f0/frozenlist-1.7.0-cp310-cp310-win32.whl",hashes = {sha256 = "400ddd24ab4e55014bba442d917203c73b2846391dd42ca5e38ff52bb18c3c5e"}}, - {name = "frozenlist-1.7.0-cp310-cp310-win_amd64.whl",url = "https://files.pythonhosted.org/packages/8f/5f/f69818f017fa9a3d24d1ae39763e29b7f60a59e46d5f91b9c6b21622f4cd/frozenlist-1.7.0-cp310-cp310-win_amd64.whl",hashes = {sha256 = "6eb93efb8101ef39d32d50bce242c84bcbddb4f7e9febfa7b524532a239b4464"}}, {name = "frozenlist-1.7.0-py3-none-any.whl",url = "https://files.pythonhosted.org/packages/ee/45/b82e3c16be2182bff01179db177fe144d58b5dc787a7d4492c6ed8b9317f/frozenlist-1.7.0-py3-none-any.whl",hashes = {sha256 = "9a5af342e34f7e97caf8c995864c7a396418ae2859cc6fdf1b1073020d516a7e"}}, - {name = "frozenlist-1.7.0-cp39-cp39-macosx_10_9_universal2.whl",url = "https://files.pythonhosted.org/packages/dd/b1/ee59496f51cd244039330015d60f13ce5a54a0f2bd8d79e4a4a375ab7469/frozenlist-1.7.0-cp39-cp39-macosx_10_9_universal2.whl",hashes = {sha256 = "cea3dbd15aea1341ea2de490574a4a37ca080b2ae24e4b4f4b51b9057b4c3630"}}, - {name = "frozenlist-1.7.0-cp39-cp39-macosx_10_9_x86_64.whl",url = "https://files.pythonhosted.org/packages/75/e1/d518391ce36a6279b3fa5bc14327dde80bcb646bb50d059c6ca0756b8d05/frozenlist-1.7.0-cp39-cp39-macosx_10_9_x86_64.whl",hashes = {sha256 = "7d536ee086b23fecc36c2073c371572374ff50ef4db515e4e503925361c24f71"}}, - {name = "frozenlist-1.7.0-cp39-cp39-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/b7/8d/a0d04f28b6e821a9685c22e67b5fb798a5a7b68752f104bfbc2dccf080c4/frozenlist-1.7.0-cp39-cp39-macosx_11_0_arm64.whl",hashes = {sha256 = "dfcebf56f703cb2e346315431699f00db126d158455e513bd14089d992101e44"}}, - {name = "frozenlist-1.7.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",url = "https://files.pythonhosted.org/packages/93/3a/a5334c0535c8b7c78eeabda1579179e44fe3d644e07118e59a2276dedaf1/frozenlist-1.7.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",hashes = {sha256 = "974c5336e61d6e7eb1ea5b929cb645e882aadab0095c5a6974a111e6479f8878"}}, - {name = "frozenlist-1.7.0-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl",url = "https://files.pythonhosted.org/packages/0a/67/8258d971f519dc3f278c55069a775096cda6610a267b53f6248152b72b2f/frozenlist-1.7.0-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl",hashes = {sha256 = "c70db4a0ab5ab20878432c40563573229a7ed9241506181bba12f6b7d0dc41cb"}}, - {name = "frozenlist-1.7.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl",url = "https://files.pythonhosted.org/packages/fc/89/8225905bf889b97c6d935dd3aeb45668461e59d415cb019619383a8a7c3b/frozenlist-1.7.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl",hashes = {sha256 = "1137b78384eebaf70560a36b7b229f752fb64d463d38d1304939984d5cb887b6"}}, - {name = "frozenlist-1.7.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl",url = "https://files.pythonhosted.org/packages/54/6e/ef52375aa93d4bc510d061df06205fa6dcfd94cd631dd22956b09128f0d4/frozenlist-1.7.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl",hashes = {sha256 = "e793a9f01b3e8b5c0bc646fb59140ce0efcc580d22a3468d70766091beb81b35"}}, - {name = "frozenlist-1.7.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl",url = "https://files.pythonhosted.org/packages/ee/55/62c87d1a6547bfbcd645df10432c129100c5bd0fd92a384de6e3378b07c1/frozenlist-1.7.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl",hashes = {sha256 = "74739ba8e4e38221d2c5c03d90a7e542cb8ad681915f4ca8f68d04f810ee0a87"}}, - {name = "frozenlist-1.7.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl",url = "https://files.pythonhosted.org/packages/45/d2/263fea1f658b8ad648c7d94d18a87bca7e8c67bd6a1bbf5445b1bd5b158c/frozenlist-1.7.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl",hashes = {sha256 = "1e63344c4e929b1a01e29bc184bbb5fd82954869033765bfe8d65d09e336a677"}}, - {name = "frozenlist-1.7.0-cp39-cp39-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/7b/22/7145e35d12fb368d92124f679bea87309495e2e9ddf14c6533990cb69218/frozenlist-1.7.0-cp39-cp39-musllinux_1_2_aarch64.whl",hashes = {sha256 = "2ea2a7369eb76de2217a842f22087913cdf75f63cf1307b9024ab82dfb525938"}}, - {name = "frozenlist-1.7.0-cp39-cp39-musllinux_1_2_armv7l.whl",url = "https://files.pythonhosted.org/packages/44/1e/7dae8c54301beb87bcafc6144b9a103bfd2c8f38078c7902984c9a0c4e5b/frozenlist-1.7.0-cp39-cp39-musllinux_1_2_armv7l.whl",hashes = {sha256 = "836b42f472a0e006e02499cef9352ce8097f33df43baaba3e0a28a964c26c7d2"}}, - {name = "frozenlist-1.7.0-cp39-cp39-musllinux_1_2_i686.whl",url = "https://files.pythonhosted.org/packages/4b/1e/99c93e54aa382e949a98976a73b9b20c3aae6d9d893f31bbe4991f64e3a8/frozenlist-1.7.0-cp39-cp39-musllinux_1_2_i686.whl",hashes = {sha256 = "e22b9a99741294b2571667c07d9f8cceec07cb92aae5ccda39ea1b6052ed4319"}}, - {name = "frozenlist-1.7.0-cp39-cp39-musllinux_1_2_ppc64le.whl",url = "https://files.pythonhosted.org/packages/5e/9c/ca5105fa7fb5abdfa8837581be790447ae051da75d32f25c8f81082ffc45/frozenlist-1.7.0-cp39-cp39-musllinux_1_2_ppc64le.whl",hashes = {sha256 = "9a19e85cc503d958abe5218953df722748d87172f71b73cf3c9257a91b999890"}}, - {name = "frozenlist-1.7.0-cp39-cp39-musllinux_1_2_s390x.whl",url = "https://files.pythonhosted.org/packages/8d/4d/e99014756093b4ddbb67fb8f0df11fe7a415760d69ace98e2ac6d5d43402/frozenlist-1.7.0-cp39-cp39-musllinux_1_2_s390x.whl",hashes = {sha256 = "f22dac33bb3ee8fe3e013aa7b91dc12f60d61d05b7fe32191ffa84c3aafe77bd"}}, - {name = "frozenlist-1.7.0-cp39-cp39-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/8b/72/a19a40bcdaa28a51add2aaa3a1a294ec357f36f27bd836a012e070c5e8a5/frozenlist-1.7.0-cp39-cp39-musllinux_1_2_x86_64.whl",hashes = {sha256 = "9ccec739a99e4ccf664ea0775149f2749b8a6418eb5b8384b4dc0a7d15d304cb"}}, - {name = "frozenlist-1.7.0-cp39-cp39-win32.whl",url = "https://files.pythonhosted.org/packages/08/49/0042469993e023a758af81db68c76907cd29e847d772334d4d201cbe9a42/frozenlist-1.7.0-cp39-cp39-win32.whl",hashes = {sha256 = "b3950f11058310008a87757f3eee16a8e1ca97979833239439586857bc25482e"}}, - {name = "frozenlist-1.7.0-cp39-cp39-win_amd64.whl",url = "https://files.pythonhosted.org/packages/5a/45/827d86ee475c877f5f766fbc23fb6acb6fada9e52f1c9720e2ba3eae32da/frozenlist-1.7.0-cp39-cp39-win_amd64.whl",hashes = {sha256 = "43a82fce6769c70f2f5a06248b614a7d268080a9d20f7457ef10ecee5af82b63"}}, ] marker = "\"default\" in dependency_groups or \"dev\" in extras" @@ -2362,6 +1962,19 @@ marker = "\"default\" in dependency_groups or \"dev\" in extras" [packages.tool.pdm] dependencies = [] +[[packages]] +name = "audioread" +version = "3.0.1" +requires-python = ">=3.6" +sdist = {name = "audioread-3.0.1.tar.gz", url = "https://files.pythonhosted.org/packages/db/d2/87016ca9f083acadffb2d8da59bfa3253e4da7eeb9f71fb8e7708dc97ecd/audioread-3.0.1.tar.gz", hashes = {sha256 = "ac5460a5498c48bdf2e8e767402583a4dcd13f4414d286f42ce4379e8b35066d"}} +wheels = [ + {name = "audioread-3.0.1-py3-none-any.whl",url = "https://files.pythonhosted.org/packages/57/8d/30aa32745af16af0a9a650115fbe81bde7c610ed5c21b381fca0196f3a7f/audioread-3.0.1-py3-none-any.whl",hashes = {sha256 = "4cdce70b8adc0da0a3c9e0d85fb10b3ace30fbdf8d1670fd443929b61d117c33"}}, +] +marker = "\"default\" in dependency_groups" + +[packages.tool.pdm] +dependencies = [] + [[packages]] name = "babel" version = "2.17.0" @@ -2431,87 +2044,76 @@ dependencies = [] [[packages]] name = "coverage" -version = "7.10.6" +version = "7.10.7" requires-python = ">=3.9" -sdist = {name = "coverage-7.10.6.tar.gz", url = "https://files.pythonhosted.org/packages/14/70/025b179c993f019105b79575ac6edb5e084fb0f0e63f15cdebef4e454fb5/coverage-7.10.6.tar.gz", hashes = {sha256 = "f644a3ae5933a552a29dbb9aa2f90c677a875f80ebea028e5a52a4f429044b90"}} -wheels = [ - {name = "coverage-7.10.6-cp314-cp314-macosx_10_13_x86_64.whl",url = "https://files.pythonhosted.org/packages/d3/aa/76cf0b5ec00619ef208da4689281d48b57f2c7fde883d14bf9441b74d59f/coverage-7.10.6-cp314-cp314-macosx_10_13_x86_64.whl",hashes = {sha256 = "6008a021907be8c4c02f37cdc3ffb258493bdebfeaf9a839f9e71dfdc47b018e"}}, - {name = "coverage-7.10.6-cp314-cp314-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/65/91/8e41b8c7c505d398d7730206f3cbb4a875a35ca1041efc518051bfce0f6b/coverage-7.10.6-cp314-cp314-macosx_11_0_arm64.whl",hashes = {sha256 = "5e75e37f23eb144e78940b40395b42f2321951206a4f50e23cfd6e8a198d3ceb"}}, - {name = "coverage-7.10.6-cp314-cp314-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl",url = "https://files.pythonhosted.org/packages/87/7f/f718e732a423d442e6616580a951b8d1ec3575ea48bcd0e2228386805e79/coverage-7.10.6-cp314-cp314-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl",hashes = {sha256 = "0f7cb359a448e043c576f0da00aa8bfd796a01b06aa610ca453d4dde09cc1034"}}, - {name = "coverage-7.10.6-cp314-cp314-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl",url = "https://files.pythonhosted.org/packages/e6/52/c1106120e6d801ac03e12b5285e971e758e925b6f82ee9b86db3aa10045d/coverage-7.10.6-cp314-cp314-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl",hashes = {sha256 = "c68018e4fc4e14b5668f1353b41ccf4bc83ba355f0e1b3836861c6f042d89ac1"}}, - {name = "coverage-7.10.6-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl",url = "https://files.pythonhosted.org/packages/3d/ec/3a8645b1bb40e36acde9c0609f08942852a4af91a937fe2c129a38f2d3f5/coverage-7.10.6-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl",hashes = {sha256 = "cd4b2b0707fc55afa160cd5fc33b27ccbf75ca11d81f4ec9863d5793fc6df56a"}}, - {name = "coverage-7.10.6-cp314-cp314-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/a1/70/09ecb68eeb1155b28a1d16525fd3a9b65fbe75337311a99830df935d62b6/coverage-7.10.6-cp314-cp314-musllinux_1_2_aarch64.whl",hashes = {sha256 = "4cec13817a651f8804a86e4f79d815b3b28472c910e099e4d5a0e8a3b6a1d4cb"}}, - {name = "coverage-7.10.6-cp314-cp314-musllinux_1_2_i686.whl",url = "https://files.pythonhosted.org/packages/c6/80/47df374b893fa812e953b5bc93dcb1427a7b3d7a1a7d2db33043d17f74b9/coverage-7.10.6-cp314-cp314-musllinux_1_2_i686.whl",hashes = {sha256 = "f2a6a8e06bbda06f78739f40bfb56c45d14eb8249d0f0ea6d4b3d48e1f7c695d"}}, - {name = "coverage-7.10.6-cp314-cp314-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/4a/65/9f98640979ecee1b0d1a7164b589de720ddf8100d1747d9bbdb84be0c0fb/coverage-7.10.6-cp314-cp314-musllinux_1_2_x86_64.whl",hashes = {sha256 = "081b98395ced0d9bcf60ada7661a0b75f36b78b9d7e39ea0790bb4ed8da14747"}}, - {name = "coverage-7.10.6-cp314-cp314-win32.whl",url = "https://files.pythonhosted.org/packages/1f/55/eeb6603371e6629037f47bd25bef300387257ed53a3c5fdb159b7ac8c651/coverage-7.10.6-cp314-cp314-win32.whl",hashes = {sha256 = "6937347c5d7d069ee776b2bf4e1212f912a9f1f141a429c475e6089462fcecc5"}}, - {name = "coverage-7.10.6-cp314-cp314-win_amd64.whl",url = "https://files.pythonhosted.org/packages/15/d1/a0912b7611bc35412e919a2cd59ae98e7ea3b475e562668040a43fb27897/coverage-7.10.6-cp314-cp314-win_amd64.whl",hashes = {sha256 = "adec1d980fa07e60b6ef865f9e5410ba760e4e1d26f60f7e5772c73b9a5b0713"}}, - {name = "coverage-7.10.6-cp314-cp314-win_arm64.whl",url = "https://files.pythonhosted.org/packages/ef/2d/11880bb8ef80a45338e0b3e0725e4c2d73ffbb4822c29d987078224fd6a5/coverage-7.10.6-cp314-cp314-win_arm64.whl",hashes = {sha256 = "a80f7aef9535442bdcf562e5a0d5a5538ce8abe6bb209cfbf170c462ac2c2a32"}}, - {name = "coverage-7.10.6-cp314-cp314t-macosx_10_13_x86_64.whl",url = "https://files.pythonhosted.org/packages/83/c0/1f00caad775c03a700146f55536ecd097a881ff08d310a58b353a1421be0/coverage-7.10.6-cp314-cp314t-macosx_10_13_x86_64.whl",hashes = {sha256 = "0de434f4fbbe5af4fa7989521c655c8c779afb61c53ab561b64dcee6149e4c65"}}, - {name = "coverage-7.10.6-cp314-cp314t-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/a9/c4/b1c5d2bd7cc412cbeb035e257fd06ed4e3e139ac871d16a07434e145d18d/coverage-7.10.6-cp314-cp314t-macosx_11_0_arm64.whl",hashes = {sha256 = "6e31b8155150c57e5ac43ccd289d079eb3f825187d7c66e755a055d2c85794c6"}}, - {name = "coverage-7.10.6-cp314-cp314t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl",url = "https://files.pythonhosted.org/packages/3f/07/4468d37c94724bf6ec354e4ec2f205fda194343e3e85fd2e59cec57e6a54/coverage-7.10.6-cp314-cp314t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl",hashes = {sha256 = "98cede73eb83c31e2118ae8d379c12e3e42736903a8afcca92a7218e1f2903b0"}}, - {name = "coverage-7.10.6-cp314-cp314t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl",url = "https://files.pythonhosted.org/packages/82/d8/f8fb351be5fee31690cd8da768fd62f1cfab33c31d9f7baba6cd8960f6b8/coverage-7.10.6-cp314-cp314t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl",hashes = {sha256 = "f863c08f4ff6b64fa8045b1e3da480f5374779ef187f07b82e0538c68cb4ff8e"}}, - {name = "coverage-7.10.6-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl",url = "https://files.pythonhosted.org/packages/e8/70/65d4d7cfc75c5c6eb2fed3ee5cdf420fd8ae09c4808723a89a81d5b1b9c3/coverage-7.10.6-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl",hashes = {sha256 = "2b38261034fda87be356f2c3f42221fdb4171c3ce7658066ae449241485390d5"}}, - {name = "coverage-7.10.6-cp314-cp314t-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/98/3c/069df106d19024324cde10e4ec379fe2fb978017d25e97ebee23002fbadf/coverage-7.10.6-cp314-cp314t-musllinux_1_2_aarch64.whl",hashes = {sha256 = "0e93b1476b79eae849dc3872faeb0bf7948fd9ea34869590bc16a2a00b9c82a7"}}, - {name = "coverage-7.10.6-cp314-cp314t-musllinux_1_2_i686.whl",url = "https://files.pythonhosted.org/packages/fc/8a/2974d53904080c5dc91af798b3a54a4ccb99a45595cc0dcec6eb9616a57d/coverage-7.10.6-cp314-cp314t-musllinux_1_2_i686.whl",hashes = {sha256 = "ff8a991f70f4c0cf53088abf1e3886edcc87d53004c7bb94e78650b4d3dac3b5"}}, - {name = "coverage-7.10.6-cp314-cp314t-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/30/38/9616a6b49c686394b318974d7f6e08f38b8af2270ce7488e879888d1e5db/coverage-7.10.6-cp314-cp314t-musllinux_1_2_x86_64.whl",hashes = {sha256 = "ac765b026c9f33044419cbba1da913cfb82cca1b60598ac1c7a5ed6aac4621a0"}}, - {name = "coverage-7.10.6-cp314-cp314t-win32.whl",url = "https://files.pythonhosted.org/packages/76/16/3ed2d6312b371a8cf804abf4e14895b70e4c3491c6e53536d63fd0958a8d/coverage-7.10.6-cp314-cp314t-win32.whl",hashes = {sha256 = "441c357d55f4936875636ef2cfb3bee36e466dcf50df9afbd398ce79dba1ebb7"}}, - {name = "coverage-7.10.6-cp314-cp314t-win_amd64.whl",url = "https://files.pythonhosted.org/packages/d5/e5/d38d0cb830abede2adb8b147770d2a3d0e7fecc7228245b9b1ae6c24930a/coverage-7.10.6-cp314-cp314t-win_amd64.whl",hashes = {sha256 = "073711de3181b2e204e4870ac83a7c4853115b42e9cd4d145f2231e12d670930"}}, - {name = "coverage-7.10.6-cp314-cp314t-win_arm64.whl",url = "https://files.pythonhosted.org/packages/f4/51/e48e550f6279349895b0ffcd6d2a690e3131ba3a7f4eafccc141966d4dea/coverage-7.10.6-cp314-cp314t-win_arm64.whl",hashes = {sha256 = "137921f2bac5559334ba66122b753db6dc5d1cf01eb7b64eb412bb0d064ef35b"}}, - {name = "coverage-7.10.6-cp313-cp313-macosx_10_13_x86_64.whl",url = "https://files.pythonhosted.org/packages/bd/e7/917e5953ea29a28c1057729c1d5af9084ab6d9c66217523fd0e10f14d8f6/coverage-7.10.6-cp313-cp313-macosx_10_13_x86_64.whl",hashes = {sha256 = "ffea0575345e9ee0144dfe5701aa17f3ba546f8c3bb48db62ae101afb740e7d6"}}, - {name = "coverage-7.10.6-cp313-cp313-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/eb/86/2e161b93a4f11d0ea93f9bebb6a53f113d5d6e416d7561ca41bb0a29996b/coverage-7.10.6-cp313-cp313-macosx_11_0_arm64.whl",hashes = {sha256 = "95d91d7317cde40a1c249d6b7382750b7e6d86fad9d8eaf4fa3f8f44cf171e80"}}, - {name = "coverage-7.10.6-cp313-cp313-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl",url = "https://files.pythonhosted.org/packages/0e/66/d03348fdd8df262b3a7fb4ee5727e6e4936e39e2f3a842e803196946f200/coverage-7.10.6-cp313-cp313-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl",hashes = {sha256 = "3e23dd5408fe71a356b41baa82892772a4cefcf758f2ca3383d2aa39e1b7a003"}}, - {name = "coverage-7.10.6-cp313-cp313-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl",url = "https://files.pythonhosted.org/packages/73/dd/508420fb47d09d904d962f123221bc249f64b5e56aa93d5f5f7603be475f/coverage-7.10.6-cp313-cp313-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl",hashes = {sha256 = "0f3f56e4cb573755e96a16501a98bf211f100463d70275759e73f3cbc00d4f27"}}, - {name = "coverage-7.10.6-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl",url = "https://files.pythonhosted.org/packages/e9/1f/9020135734184f439da85c70ea78194c2730e56c2d18aee6e8ff1719d50d/coverage-7.10.6-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl",hashes = {sha256 = "db4a1d897bbbe7339946ffa2fe60c10cc81c43fab8b062d3fcb84188688174a4"}}, - {name = "coverage-7.10.6-cp313-cp313-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/a4/a4/3d228f3942bb5a2051fde28c136eea23a761177dc4ff4ef54533164ce255/coverage-7.10.6-cp313-cp313-musllinux_1_2_aarch64.whl",hashes = {sha256 = "d8fd7879082953c156d5b13c74aa6cca37f6a6f4747b39538504c3f9c63d043d"}}, - {name = "coverage-7.10.6-cp313-cp313-musllinux_1_2_i686.whl",url = "https://files.pythonhosted.org/packages/36/e3/293dce8cdb9a83de971637afc59b7190faad60603b40e32635cbd15fbf61/coverage-7.10.6-cp313-cp313-musllinux_1_2_i686.whl",hashes = {sha256 = "28395ca3f71cd103b8c116333fa9db867f3a3e1ad6a084aa3725ae002b6583bc"}}, - {name = "coverage-7.10.6-cp313-cp313-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/90/26/64eecfa214e80dd1d101e420cab2901827de0e49631d666543d0e53cf597/coverage-7.10.6-cp313-cp313-musllinux_1_2_x86_64.whl",hashes = {sha256 = "61c950fc33d29c91b9e18540e1aed7d9f6787cc870a3e4032493bbbe641d12fc"}}, - {name = "coverage-7.10.6-cp313-cp313-win32.whl",url = "https://files.pythonhosted.org/packages/3e/70/bd80588338f65ea5b0d97e424b820fb4068b9cfb9597fbd91963086e004b/coverage-7.10.6-cp313-cp313-win32.whl",hashes = {sha256 = "160c00a5e6b6bdf4e5984b0ef21fc860bc94416c41b7df4d63f536d17c38902e"}}, - {name = "coverage-7.10.6-cp313-cp313-win_amd64.whl",url = "https://files.pythonhosted.org/packages/a7/14/0b831122305abcc1060c008f6c97bbdc0a913ab47d65070a01dc50293c2b/coverage-7.10.6-cp313-cp313-win_amd64.whl",hashes = {sha256 = "628055297f3e2aa181464c3808402887643405573eb3d9de060d81531fa79d32"}}, - {name = "coverage-7.10.6-cp313-cp313-win_arm64.whl",url = "https://files.pythonhosted.org/packages/83/c6/81a83778c1f83f1a4a168ed6673eeedc205afb562d8500175292ca64b94e/coverage-7.10.6-cp313-cp313-win_arm64.whl",hashes = {sha256 = "df4ec1f8540b0bcbe26ca7dd0f541847cc8a108b35596f9f91f59f0c060bfdd2"}}, - {name = "coverage-7.10.6-cp313-cp313t-macosx_10_13_x86_64.whl",url = "https://files.pythonhosted.org/packages/d7/1c/ccccf4bf116f9517275fa85047495515add43e41dfe8e0bef6e333c6b344/coverage-7.10.6-cp313-cp313t-macosx_10_13_x86_64.whl",hashes = {sha256 = "c9a8b7a34a4de3ed987f636f71881cd3b8339f61118b1aa311fbda12741bff0b"}}, - {name = "coverage-7.10.6-cp313-cp313t-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/92/97/8a3ceff833d27c7492af4f39d5da6761e9ff624831db9e9f25b3886ddbca/coverage-7.10.6-cp313-cp313t-macosx_11_0_arm64.whl",hashes = {sha256 = "8dd5af36092430c2b075cee966719898f2ae87b636cefb85a653f1d0ba5d5393"}}, - {name = "coverage-7.10.6-cp313-cp313t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl",url = "https://files.pythonhosted.org/packages/92/d8/50b4a32580cf41ff0423777a2791aaf3269ab60c840b62009aec12d3970d/coverage-7.10.6-cp313-cp313t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl",hashes = {sha256 = "b0353b0f0850d49ada66fdd7d0c7cdb0f86b900bb9e367024fd14a60cecc1e27"}}, - {name = "coverage-7.10.6-cp313-cp313t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl",url = "https://files.pythonhosted.org/packages/7e/7e/6a7df5a6fb440a0179d94a348eb6616ed4745e7df26bf2a02bc4db72c421/coverage-7.10.6-cp313-cp313t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl",hashes = {sha256 = "d6b9ae13d5d3e8aeca9ca94198aa7b3ebbc5acfada557d724f2a1f03d2c0b0df"}}, - {name = "coverage-7.10.6-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl",url = "https://files.pythonhosted.org/packages/3a/4c/a270a414f4ed5d196b9d3d67922968e768cd971d1b251e1b4f75e9362f75/coverage-7.10.6-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl",hashes = {sha256 = "675824a363cc05781b1527b39dc2587b8984965834a748177ee3c37b64ffeafb"}}, - {name = "coverage-7.10.6-cp313-cp313t-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/9c/8b/3210d663d594926c12f373c5370bf1e7c5c3a427519a8afa65b561b9a55c/coverage-7.10.6-cp313-cp313t-musllinux_1_2_aarch64.whl",hashes = {sha256 = "692d70ea725f471a547c305f0d0fc6a73480c62fb0da726370c088ab21aed282"}}, - {name = "coverage-7.10.6-cp313-cp313t-musllinux_1_2_i686.whl",url = "https://files.pythonhosted.org/packages/72/d0/e1961eff67e9e1dba3fc5eb7a4caf726b35a5b03776892da8d79ec895775/coverage-7.10.6-cp313-cp313t-musllinux_1_2_i686.whl",hashes = {sha256 = "851430a9a361c7a8484a36126d1d0ff8d529d97385eacc8dfdc9bfc8c2d2cbe4"}}, - {name = "coverage-7.10.6-cp313-cp313t-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/3a/06/d6478d152cd189b33eac691cba27a40704990ba95de49771285f34a5861e/coverage-7.10.6-cp313-cp313t-musllinux_1_2_x86_64.whl",hashes = {sha256 = "d9369a23186d189b2fc95cc08b8160ba242057e887d766864f7adf3c46b2df21"}}, - {name = "coverage-7.10.6-cp313-cp313t-win32.whl",url = "https://files.pythonhosted.org/packages/ed/73/737440247c914a332f0b47f7598535b29965bf305e19bbc22d4c39615d2b/coverage-7.10.6-cp313-cp313t-win32.whl",hashes = {sha256 = "92be86fcb125e9bda0da7806afd29a3fd33fdf58fba5d60318399adf40bf37d0"}}, - {name = "coverage-7.10.6-cp313-cp313t-win_amd64.whl",url = "https://files.pythonhosted.org/packages/bd/76/b92d3214740f2357ef4a27c75a526eb6c28f79c402e9f20a922c295c05e2/coverage-7.10.6-cp313-cp313t-win_amd64.whl",hashes = {sha256 = "6b3039e2ca459a70c79523d39347d83b73f2f06af5624905eba7ec34d64d80b5"}}, - {name = "coverage-7.10.6-cp313-cp313t-win_arm64.whl",url = "https://files.pythonhosted.org/packages/fc/8e/6dcb29c599c8a1f654ec6cb68d76644fe635513af16e932d2d4ad1e5ac6e/coverage-7.10.6-cp313-cp313t-win_arm64.whl",hashes = {sha256 = "3fb99d0786fe17b228eab663d16bee2288e8724d26a199c29325aac4b0319b9b"}}, - {name = "coverage-7.10.6-cp312-cp312-macosx_10_13_x86_64.whl",url = "https://files.pythonhosted.org/packages/26/06/263f3305c97ad78aab066d116b52250dd316e74fcc20c197b61e07eb391a/coverage-7.10.6-cp312-cp312-macosx_10_13_x86_64.whl",hashes = {sha256 = "5b2dd6059938063a2c9fee1af729d4f2af28fd1a545e9b7652861f0d752ebcea"}}, - {name = "coverage-7.10.6-cp312-cp312-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/e9/60/1e1ded9a4fe80d843d7d53b3e395c1db3ff32d6c301e501f393b2e6c1c1f/coverage-7.10.6-cp312-cp312-macosx_11_0_arm64.whl",hashes = {sha256 = "388d80e56191bf846c485c14ae2bc8898aa3124d9d35903fef7d907780477634"}}, - {name = "coverage-7.10.6-cp312-cp312-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl",url = "https://files.pythonhosted.org/packages/b8/25/52136173c14e26dfed8b106ed725811bb53c30b896d04d28d74cb64318b3/coverage-7.10.6-cp312-cp312-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl",hashes = {sha256 = "90cb5b1a4670662719591aa92d0095bb41714970c0b065b02a2610172dbf0af6"}}, - {name = "coverage-7.10.6-cp312-cp312-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl",url = "https://files.pythonhosted.org/packages/cb/1d/ae25a7dc58fcce8b172d42ffe5313fc267afe61c97fa872b80ee72d9515a/coverage-7.10.6-cp312-cp312-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl",hashes = {sha256 = "961834e2f2b863a0e14260a9a273aff07ff7818ab6e66d2addf5628590c628f9"}}, - {name = "coverage-7.10.6-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl",url = "https://files.pythonhosted.org/packages/f5/7a/1f561d47743710fe996957ed7c124b421320f150f1d38523d8d9102d3e2a/coverage-7.10.6-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl",hashes = {sha256 = "bf9a19f5012dab774628491659646335b1928cfc931bf8d97b0d5918dd58033c"}}, - {name = "coverage-7.10.6-cp312-cp312-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/6c/ad/8b97cd5d28aecdfde792dcbf646bac141167a5cacae2cd775998b45fabb5/coverage-7.10.6-cp312-cp312-musllinux_1_2_aarch64.whl",hashes = {sha256 = "99c4283e2a0e147b9c9cc6bc9c96124de9419d6044837e9799763a0e29a7321a"}}, - {name = "coverage-7.10.6-cp312-cp312-musllinux_1_2_i686.whl",url = "https://files.pythonhosted.org/packages/33/6a/95c32b558d9a61858ff9d79580d3877df3eb5bc9eed0941b1f187c89e143/coverage-7.10.6-cp312-cp312-musllinux_1_2_i686.whl",hashes = {sha256 = "282b1b20f45df57cc508c1e033403f02283adfb67d4c9c35a90281d81e5c52c5"}}, - {name = "coverage-7.10.6-cp312-cp312-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/0d/9c/8ce95dee640a38e760d5b747c10913e7a06554704d60b41e73fdea6a1ffd/coverage-7.10.6-cp312-cp312-musllinux_1_2_x86_64.whl",hashes = {sha256 = "8cdbe264f11afd69841bd8c0d83ca10b5b32853263ee62e6ac6a0ab63895f972"}}, - {name = "coverage-7.10.6-cp312-cp312-win32.whl",url = "https://files.pythonhosted.org/packages/04/12/7a55b0bdde78a98e2eb2356771fd2dcddb96579e8342bb52aa5bc52e96f0/coverage-7.10.6-cp312-cp312-win32.whl",hashes = {sha256 = "a517feaf3a0a3eca1ee985d8373135cfdedfbba3882a5eab4362bda7c7cf518d"}}, - {name = "coverage-7.10.6-cp312-cp312-win_amd64.whl",url = "https://files.pythonhosted.org/packages/36/4a/32b185b8b8e327802c9efce3d3108d2fe2d9d31f153a0f7ecfd59c773705/coverage-7.10.6-cp312-cp312-win_amd64.whl",hashes = {sha256 = "856986eadf41f52b214176d894a7de05331117f6035a28ac0016c0f63d887629"}}, - {name = "coverage-7.10.6-cp312-cp312-win_arm64.whl",url = "https://files.pythonhosted.org/packages/08/3a/d5d8dc703e4998038c3099eaf77adddb00536a3cec08c8dcd556a36a3eb4/coverage-7.10.6-cp312-cp312-win_arm64.whl",hashes = {sha256 = "acf36b8268785aad739443fa2780c16260ee3fa09d12b3a70f772ef100939d80"}}, - {name = "coverage-7.10.6-cp311-cp311-macosx_10_9_x86_64.whl",url = "https://files.pythonhosted.org/packages/d4/16/2bea27e212c4980753d6d563a0803c150edeaaddb0771a50d2afc410a261/coverage-7.10.6-cp311-cp311-macosx_10_9_x86_64.whl",hashes = {sha256 = "c706db3cabb7ceef779de68270150665e710b46d56372455cd741184f3868d8f"}}, - {name = "coverage-7.10.6-cp311-cp311-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/2a/51/e7159e068831ab37e31aac0969d47b8c5ee25b7d307b51e310ec34869315/coverage-7.10.6-cp311-cp311-macosx_11_0_arm64.whl",hashes = {sha256 = "8e0c38dc289e0508ef68ec95834cb5d2e96fdbe792eaccaa1bccac3966bbadcc"}}, - {name = "coverage-7.10.6-cp311-cp311-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl",url = "https://files.pythonhosted.org/packages/e7/c0/246ccbea53d6099325d25cd208df94ea435cd55f0db38099dd721efc7a1f/coverage-7.10.6-cp311-cp311-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl",hashes = {sha256 = "752a3005a1ded28f2f3a6e8787e24f28d6abe176ca64677bcd8d53d6fe2ec08a"}}, - {name = "coverage-7.10.6-cp311-cp311-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl",url = "https://files.pythonhosted.org/packages/7d/fb/7435ef8ab9b2594a6e3f58505cc30e98ae8b33265d844007737946c59389/coverage-7.10.6-cp311-cp311-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl",hashes = {sha256 = "689920ecfd60f992cafca4f5477d55720466ad2c7fa29bb56ac8d44a1ac2b47a"}}, - {name = "coverage-7.10.6-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl",url = "https://files.pythonhosted.org/packages/51/f8/d9d64e8da7bcddb094d511154824038833c81e3a039020a9d6539bf303e9/coverage-7.10.6-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl",hashes = {sha256 = "ec98435796d2624d6905820a42f82149ee9fc4f2d45c2c5bc5a44481cc50db62"}}, - {name = "coverage-7.10.6-cp311-cp311-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/43/28/c43ba0ef19f446d6463c751315140d8f2a521e04c3e79e5c5fe211bfa430/coverage-7.10.6-cp311-cp311-musllinux_1_2_aarch64.whl",hashes = {sha256 = "b37201ce4a458c7a758ecc4efa92fa8ed783c66e0fa3c42ae19fc454a0792153"}}, - {name = "coverage-7.10.6-cp311-cp311-musllinux_1_2_i686.whl",url = "https://files.pythonhosted.org/packages/79/3e/53635bd0b72beaacf265784508a0b386defc9ab7fad99ff95f79ce9db555/coverage-7.10.6-cp311-cp311-musllinux_1_2_i686.whl",hashes = {sha256 = "2904271c80898663c810a6b067920a61dd8d38341244a3605bd31ab55250dad5"}}, - {name = "coverage-7.10.6-cp311-cp311-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/4c/55/0964aa87126624e8c159e32b0bc4e84edef78c89a1a4b924d28dd8265625/coverage-7.10.6-cp311-cp311-musllinux_1_2_x86_64.whl",hashes = {sha256 = "5aea98383463d6e1fa4e95416d8de66f2d0cb588774ee20ae1b28df826bcb619"}}, - {name = "coverage-7.10.6-cp311-cp311-win32.whl",url = "https://files.pythonhosted.org/packages/eb/ab/6cfa9dc518c6c8e14a691c54e53a9433ba67336c760607e299bfcf520cb1/coverage-7.10.6-cp311-cp311-win32.whl",hashes = {sha256 = "e3fb1fa01d3598002777dd259c0c2e6d9d5e10e7222976fc8e03992f972a2cba"}}, - {name = "coverage-7.10.6-cp311-cp311-win_amd64.whl",url = "https://files.pythonhosted.org/packages/5b/18/99b25346690cbc55922e7cfef06d755d4abee803ef335baff0014268eff4/coverage-7.10.6-cp311-cp311-win_amd64.whl",hashes = {sha256 = "f35ed9d945bece26553d5b4c8630453169672bea0050a564456eb88bdffd927e"}}, - {name = "coverage-7.10.6-cp311-cp311-win_arm64.whl",url = "https://files.pythonhosted.org/packages/d8/ed/81d86648a07ccb124a5cf1f1a7788712b8d7216b593562683cd5c9b0d2c1/coverage-7.10.6-cp311-cp311-win_arm64.whl",hashes = {sha256 = "99e1a305c7765631d74b98bf7dbf54eeea931f975e80f115437d23848ee8c27c"}}, - {name = "coverage-7.10.6-cp310-cp310-macosx_10_9_x86_64.whl",url = "https://files.pythonhosted.org/packages/a8/1d/2e64b43d978b5bd184e0756a41415597dfef30fcbd90b747474bd749d45f/coverage-7.10.6-cp310-cp310-macosx_10_9_x86_64.whl",hashes = {sha256 = "70e7bfbd57126b5554aa482691145f798d7df77489a177a6bef80de78860a356"}}, - {name = "coverage-7.10.6-cp310-cp310-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/23/62/b1e0f513417c02cc10ef735c3ee5186df55f190f70498b3702d516aad06f/coverage-7.10.6-cp310-cp310-macosx_11_0_arm64.whl",hashes = {sha256 = "e41be6f0f19da64af13403e52f2dec38bbc2937af54df8ecef10850ff8d35301"}}, - {name = "coverage-7.10.6-cp310-cp310-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl",url = "https://files.pythonhosted.org/packages/e7/16/b800640b7a43e7c538429e4d7223e0a94fd72453a1a048f70bf766f12e96/coverage-7.10.6-cp310-cp310-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl",hashes = {sha256 = "c61fc91ab80b23f5fddbee342d19662f3d3328173229caded831aa0bd7595460"}}, - {name = "coverage-7.10.6-cp310-cp310-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl",url = "https://files.pythonhosted.org/packages/fb/6f/5e03631c3305cad187eaf76af0b559fff88af9a0b0c180d006fb02413d7a/coverage-7.10.6-cp310-cp310-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl",hashes = {sha256 = "10356fdd33a7cc06e8051413140bbdc6f972137508a3572e3f59f805cd2832fd"}}, - {name = "coverage-7.10.6-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl",url = "https://files.pythonhosted.org/packages/eb/a1/f30ea0fb400b080730125b490771ec62b3375789f90af0bb68bfb8a921d7/coverage-7.10.6-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl",hashes = {sha256 = "80b1695cf7c5ebe7b44bf2521221b9bb8cdf69b1f24231149a7e3eb1ae5fa2fb"}}, - {name = "coverage-7.10.6-cp310-cp310-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/02/8e/cfa8fee8e8ef9a6bb76c7bef039f3302f44e615d2194161a21d3d83ac2e9/coverage-7.10.6-cp310-cp310-musllinux_1_2_aarch64.whl",hashes = {sha256 = "2e4c33e6378b9d52d3454bd08847a8651f4ed23ddbb4a0520227bd346382bbc6"}}, - {name = "coverage-7.10.6-cp310-cp310-musllinux_1_2_i686.whl",url = "https://files.pythonhosted.org/packages/93/a9/51be09b75c55c4f6c16d8d73a6a1d46ad764acca0eab48fa2ffaef5958fe/coverage-7.10.6-cp310-cp310-musllinux_1_2_i686.whl",hashes = {sha256 = "c8a3ec16e34ef980a46f60dc6ad86ec60f763c3f2fa0db6d261e6e754f72e945"}}, - {name = "coverage-7.10.6-cp310-cp310-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/e9/a6/ba188b376529ce36483b2d585ca7bdac64aacbe5aa10da5978029a9c94db/coverage-7.10.6-cp310-cp310-musllinux_1_2_x86_64.whl",hashes = {sha256 = "7d79dabc0a56f5af990cc6da9ad1e40766e82773c075f09cc571e2076fef882e"}}, - {name = "coverage-7.10.6-cp310-cp310-win32.whl",url = "https://files.pythonhosted.org/packages/d0/4c/37ed872374a21813e0d3215256180c9a382c3f5ced6f2e5da0102fc2fd3e/coverage-7.10.6-cp310-cp310-win32.whl",hashes = {sha256 = "86b9b59f2b16e981906e9d6383eb6446d5b46c278460ae2c36487667717eccf1"}}, - {name = "coverage-7.10.6-cp310-cp310-win_amd64.whl",url = "https://files.pythonhosted.org/packages/8e/36/9311352fdc551dec5b973b61f4e453227ce482985a9368305880af4f85dd/coverage-7.10.6-cp310-cp310-win_amd64.whl",hashes = {sha256 = "e132b9152749bd33534e5bd8565c7576f135f157b4029b975e15ee184325f528"}}, - {name = "coverage-7.10.6-py3-none-any.whl",url = "https://files.pythonhosted.org/packages/44/0c/50db5379b615854b5cf89146f8f5bd1d5a9693d7f3a987e269693521c404/coverage-7.10.6-py3-none-any.whl",hashes = {sha256 = "92c4ecf6bf11b2e85fd4d8204814dc26e6a19f0c9d938c207c5cb0eadfcabbe3"}}, +sdist = {name = "coverage-7.10.7.tar.gz", url = "https://files.pythonhosted.org/packages/51/26/d22c300112504f5f9a9fd2297ce33c35f3d353e4aeb987c8419453b2a7c2/coverage-7.10.7.tar.gz", hashes = {sha256 = "f4ab143ab113be368a3e9b795f9cd7906c5ef407d6173fe9675a902e1fffc239"}} +wheels = [ + {name = "coverage-7.10.7-cp314-cp314-macosx_10_13_x86_64.whl",url = "https://files.pythonhosted.org/packages/23/9c/5844ab4ca6a4dd97a1850e030a15ec7d292b5c5cb93082979225126e35dd/coverage-7.10.7-cp314-cp314-macosx_10_13_x86_64.whl",hashes = {sha256 = "b06f260b16ead11643a5a9f955bd4b5fd76c1a4c6796aeade8520095b75de520"}}, + {name = "coverage-7.10.7-cp314-cp314-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/f0/89/673f6514b0961d1f0e20ddc242e9342f6da21eaba3489901b565c0689f34/coverage-7.10.7-cp314-cp314-macosx_11_0_arm64.whl",hashes = {sha256 = "212f8f2e0612778f09c55dd4872cb1f64a1f2b074393d139278ce902064d5b32"}}, + {name = "coverage-7.10.7-cp314-cp314-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl",url = "https://files.pythonhosted.org/packages/05/e8/261cae479e85232828fb17ad536765c88dd818c8470aca690b0ac6feeaa3/coverage-7.10.7-cp314-cp314-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl",hashes = {sha256 = "3445258bcded7d4aa630ab8296dea4d3f15a255588dd535f980c193ab6b95f3f"}}, + {name = "coverage-7.10.7-cp314-cp314-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl",url = "https://files.pythonhosted.org/packages/82/62/14ed6546d0207e6eda876434e3e8475a3e9adbe32110ce896c9e0c06bb9a/coverage-7.10.7-cp314-cp314-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl",hashes = {sha256 = "bb45474711ba385c46a0bfe696c695a929ae69ac636cda8f532be9e8c93d720a"}}, + {name = "coverage-7.10.7-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl",url = "https://files.pythonhosted.org/packages/ff/49/07f00db9ac6478e4358165a08fb41b469a1b053212e8a00cb02f0d27a05f/coverage-7.10.7-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl",hashes = {sha256 = "813922f35bd800dca9994c5971883cbc0d291128a5de6b167c7aa697fcf59360"}}, + {name = "coverage-7.10.7-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl",url = "https://files.pythonhosted.org/packages/a2/59/c5201c62dbf165dfbc91460f6dbbaa85a8b82cfa6131ac45d6c1bfb52deb/coverage-7.10.7-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl",hashes = {sha256 = "93c1b03552081b2a4423091d6fb3787265b8f86af404cff98d1b5342713bdd69"}}, + {name = "coverage-7.10.7-cp314-cp314-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/07/ae/5920097195291a51fb00b3a70b9bbd2edbfe3c84876a1762bd1ef1565ebc/coverage-7.10.7-cp314-cp314-musllinux_1_2_aarch64.whl",hashes = {sha256 = "cc87dd1b6eaf0b848eebb1c86469b9f72a1891cb42ac7adcfbce75eadb13dd14"}}, + {name = "coverage-7.10.7-cp314-cp314-musllinux_1_2_i686.whl",url = "https://files.pythonhosted.org/packages/b9/3c/a815dde77a2981f5743a60b63df31cb322c944843e57dbd579326625a413/coverage-7.10.7-cp314-cp314-musllinux_1_2_i686.whl",hashes = {sha256 = "39508ffda4f343c35f3236fe8d1a6634a51f4581226a1262769d7f970e73bffe"}}, + {name = "coverage-7.10.7-cp314-cp314-musllinux_1_2_riscv64.whl",url = "https://files.pythonhosted.org/packages/aa/99/f5cdd8421ea656abefb6c0ce92556709db2265c41e8f9fc6c8ae0f7824c9/coverage-7.10.7-cp314-cp314-musllinux_1_2_riscv64.whl",hashes = {sha256 = "925a1edf3d810537c5a3abe78ec5530160c5f9a26b1f4270b40e62cc79304a1e"}}, + {name = "coverage-7.10.7-cp314-cp314-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/c3/7a/e9a2da6a1fc5d007dd51fca083a663ab930a8c4d149c087732a5dbaa0029/coverage-7.10.7-cp314-cp314-musllinux_1_2_x86_64.whl",hashes = {sha256 = "2c8b9a0636f94c43cd3576811e05b89aa9bc2d0a85137affc544ae5cb0e4bfbd"}}, + {name = "coverage-7.10.7-cp314-cp314-win32.whl",url = "https://files.pythonhosted.org/packages/ef/5b/0b5799aa30380a949005a353715095d6d1da81927d6dbed5def2200a4e25/coverage-7.10.7-cp314-cp314-win32.whl",hashes = {sha256 = "b7b8288eb7cdd268b0304632da8cb0bb93fadcfec2fe5712f7b9cc8f4d487be2"}}, + {name = "coverage-7.10.7-cp314-cp314-win_amd64.whl",url = "https://files.pythonhosted.org/packages/da/b0/e802fbb6eb746de006490abc9bb554b708918b6774b722bb3a0e6aa1b7de/coverage-7.10.7-cp314-cp314-win_amd64.whl",hashes = {sha256 = "1ca6db7c8807fb9e755d0379ccc39017ce0a84dcd26d14b5a03b78563776f681"}}, + {name = "coverage-7.10.7-cp314-cp314-win_arm64.whl",url = "https://files.pythonhosted.org/packages/9e/e8/71d0c8e374e31f39e3389bb0bd19e527d46f00ea8571ec7ec8fd261d8b44/coverage-7.10.7-cp314-cp314-win_arm64.whl",hashes = {sha256 = "097c1591f5af4496226d5783d036bf6fd6cd0cbc132e071b33861de756efb880"}}, + {name = "coverage-7.10.7-cp314-cp314t-macosx_10_13_x86_64.whl",url = "https://files.pythonhosted.org/packages/62/09/9a5608d319fa3eba7a2019addeacb8c746fb50872b57a724c9f79f146969/coverage-7.10.7-cp314-cp314t-macosx_10_13_x86_64.whl",hashes = {sha256 = "a62c6ef0d50e6de320c270ff91d9dd0a05e7250cac2a800b7784bae474506e63"}}, + {name = "coverage-7.10.7-cp314-cp314t-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/f5/6f/f58d46f33db9f2e3647b2d0764704548c184e6f5e014bef528b7f979ef84/coverage-7.10.7-cp314-cp314t-macosx_11_0_arm64.whl",hashes = {sha256 = "9fa6e4dd51fe15d8738708a973470f67a855ca50002294852e9571cdbd9433f2"}}, + {name = "coverage-7.10.7-cp314-cp314t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl",url = "https://files.pythonhosted.org/packages/74/5c/183ffc817ba68e0b443b8c934c8795553eb0c14573813415bd59941ee165/coverage-7.10.7-cp314-cp314t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl",hashes = {sha256 = "8fb190658865565c549b6b4706856d6a7b09302c797eb2cf8e7fe9dabb043f0d"}}, + {name = "coverage-7.10.7-cp314-cp314t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl",url = "https://files.pythonhosted.org/packages/0f/48/71a8abe9c1ad7e97548835e3cc1adbf361e743e9d60310c5f75c9e7bf847/coverage-7.10.7-cp314-cp314t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl",hashes = {sha256 = "affef7c76a9ef259187ef31599a9260330e0335a3011732c4b9effa01e1cd6e0"}}, + {name = "coverage-7.10.7-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl",url = "https://files.pythonhosted.org/packages/84/fd/193a8fb132acfc0a901f72020e54be5e48021e1575bb327d8ee1097a28fd/coverage-7.10.7-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl",hashes = {sha256 = "6e16e07d85ca0cf8bafe5f5d23a0b850064e8e945d5677492b06bbe6f09cc699"}}, + {name = "coverage-7.10.7-cp314-cp314t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl",url = "https://files.pythonhosted.org/packages/b1/8f/74ecc30607dd95ad50e3034221113ccb1c6d4e8085cc761134782995daae/coverage-7.10.7-cp314-cp314t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl",hashes = {sha256 = "03ffc58aacdf65d2a82bbeb1ffe4d01ead4017a21bfd0454983b88ca73af94b9"}}, + {name = "coverage-7.10.7-cp314-cp314t-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/0f/55/79ff53a769f20d71b07023ea115c9167c0bb56f281320520cf64c5298a96/coverage-7.10.7-cp314-cp314t-musllinux_1_2_aarch64.whl",hashes = {sha256 = "1b4fd784344d4e52647fd7857b2af5b3fbe6c239b0b5fa63e94eb67320770e0f"}}, + {name = "coverage-7.10.7-cp314-cp314t-musllinux_1_2_i686.whl",url = "https://files.pythonhosted.org/packages/88/e2/dac66c140009b61ac3fc13af673a574b00c16efdf04f9b5c740703e953c0/coverage-7.10.7-cp314-cp314t-musllinux_1_2_i686.whl",hashes = {sha256 = "0ebbaddb2c19b71912c6f2518e791aa8b9f054985a0769bdb3a53ebbc765c6a1"}}, + {name = "coverage-7.10.7-cp314-cp314t-musllinux_1_2_riscv64.whl",url = "https://files.pythonhosted.org/packages/a2/f1/f48f645e3f33bb9ca8a496bc4a9671b52f2f353146233ebd7c1df6160440/coverage-7.10.7-cp314-cp314t-musllinux_1_2_riscv64.whl",hashes = {sha256 = "a2d9a3b260cc1d1dbdb1c582e63ddcf5363426a1a68faa0f5da28d8ee3c722a0"}}, + {name = "coverage-7.10.7-cp314-cp314t-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/bb/3b/8442618972c51a7affeead957995cfa8323c0c9bcf8fa5a027421f720ff4/coverage-7.10.7-cp314-cp314t-musllinux_1_2_x86_64.whl",hashes = {sha256 = "a3cc8638b2480865eaa3926d192e64ce6c51e3d29c849e09d5b4ad95efae5399"}}, + {name = "coverage-7.10.7-cp314-cp314t-win32.whl",url = "https://files.pythonhosted.org/packages/b2/dc/101f3fa3a45146db0cb03f5b4376e24c0aac818309da23e2de0c75295a91/coverage-7.10.7-cp314-cp314t-win32.whl",hashes = {sha256 = "67f8c5cbcd3deb7a60b3345dffc89a961a484ed0af1f6f73de91705cc6e31235"}}, + {name = "coverage-7.10.7-cp314-cp314t-win_amd64.whl",url = "https://files.pythonhosted.org/packages/4c/a1/74c51803fc70a8a40d7346660379e144be772bab4ac7bb6e6b905152345c/coverage-7.10.7-cp314-cp314t-win_amd64.whl",hashes = {sha256 = "e1ed71194ef6dea7ed2d5cb5f7243d4bcd334bfb63e59878519be558078f848d"}}, + {name = "coverage-7.10.7-cp314-cp314t-win_arm64.whl",url = "https://files.pythonhosted.org/packages/12/65/f116a6d2127df30bcafbceef0302d8a64ba87488bf6f73a6d8eebf060873/coverage-7.10.7-cp314-cp314t-win_arm64.whl",hashes = {sha256 = "7fe650342addd8524ca63d77b2362b02345e5f1a093266787d210c70a50b471a"}}, + {name = "coverage-7.10.7-cp313-cp313-macosx_10_13_x86_64.whl",url = "https://files.pythonhosted.org/packages/9a/94/b765c1abcb613d103b64fcf10395f54d69b0ef8be6a0dd9c524384892cc7/coverage-7.10.7-cp313-cp313-macosx_10_13_x86_64.whl",hashes = {sha256 = "981a651f543f2854abd3b5fcb3263aac581b18209be49863ba575de6edf4c14d"}}, + {name = "coverage-7.10.7-cp313-cp313-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/72/4f/732fff31c119bb73b35236dd333030f32c4bfe909f445b423e6c7594f9a2/coverage-7.10.7-cp313-cp313-macosx_11_0_arm64.whl",hashes = {sha256 = "73ab1601f84dc804f7812dc297e93cd99381162da39c47040a827d4e8dafe63b"}}, + {name = "coverage-7.10.7-cp313-cp313-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl",url = "https://files.pythonhosted.org/packages/87/02/ae7e0af4b674be47566707777db1aa375474f02a1d64b9323e5813a6cdd5/coverage-7.10.7-cp313-cp313-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl",hashes = {sha256 = "a8b6f03672aa6734e700bbcd65ff050fd19cddfec4b031cc8cf1c6967de5a68e"}}, + {name = "coverage-7.10.7-cp313-cp313-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl",url = "https://files.pythonhosted.org/packages/a2/77/8c6d22bf61921a59bce5471c2f1f7ac30cd4ac50aadde72b8c48d5727902/coverage-7.10.7-cp313-cp313-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl",hashes = {sha256 = "10b6ba00ab1132a0ce4428ff68cf50a25efd6840a42cdf4239c9b99aad83be8b"}}, + {name = "coverage-7.10.7-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl",url = "https://files.pythonhosted.org/packages/b1/20/b6ea4f69bbb52dac0aebd62157ba6a9dddbfe664f5af8122dac296c3ee15/coverage-7.10.7-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl",hashes = {sha256 = "c79124f70465a150e89340de5963f936ee97097d2ef76c869708c4248c63ca49"}}, + {name = "coverage-7.10.7-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl",url = "https://files.pythonhosted.org/packages/f9/28/4831523ba483a7f90f7b259d2018fef02cb4d5b90bc7c1505d6e5a84883c/coverage-7.10.7-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl",hashes = {sha256 = "69212fbccdbd5b0e39eac4067e20a4a5256609e209547d86f740d68ad4f04911"}}, + {name = "coverage-7.10.7-cp313-cp313-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/a7/9f/4331142bc98c10ca6436d2d620c3e165f31e6c58d43479985afce6f3191c/coverage-7.10.7-cp313-cp313-musllinux_1_2_aarch64.whl",hashes = {sha256 = "7ea7c6c9d0d286d04ed3541747e6597cbe4971f22648b68248f7ddcd329207f0"}}, + {name = "coverage-7.10.7-cp313-cp313-musllinux_1_2_i686.whl",url = "https://files.pythonhosted.org/packages/ce/60/bda83b96602036b77ecf34e6393a3836365481b69f7ed7079ab85048202b/coverage-7.10.7-cp313-cp313-musllinux_1_2_i686.whl",hashes = {sha256 = "b9be91986841a75042b3e3243d0b3cb0b2434252b977baaf0cd56e960fe1e46f"}}, + {name = "coverage-7.10.7-cp313-cp313-musllinux_1_2_riscv64.whl",url = "https://files.pythonhosted.org/packages/5f/af/152633ff35b2af63977edd835d8e6430f0caef27d171edf2fc76c270ef31/coverage-7.10.7-cp313-cp313-musllinux_1_2_riscv64.whl",hashes = {sha256 = "b281d5eca50189325cfe1f365fafade89b14b4a78d9b40b05ddd1fc7d2a10a9c"}}, + {name = "coverage-7.10.7-cp313-cp313-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/9d/71/d92105d122bd21cebba877228990e1646d862e34a98bb3374d3fece5a794/coverage-7.10.7-cp313-cp313-musllinux_1_2_x86_64.whl",hashes = {sha256 = "99e4aa63097ab1118e75a848a28e40d68b08a5e19ce587891ab7fd04475e780f"}}, + {name = "coverage-7.10.7-cp313-cp313-win32.whl",url = "https://files.pythonhosted.org/packages/a2/9e/9fdb08f4bf476c912f0c3ca292e019aab6712c93c9344a1653986c3fd305/coverage-7.10.7-cp313-cp313-win32.whl",hashes = {sha256 = "dc7c389dce432500273eaf48f410b37886be9208b2dd5710aaf7c57fd442c698"}}, + {name = "coverage-7.10.7-cp313-cp313-win_amd64.whl",url = "https://files.pythonhosted.org/packages/b1/b1/a75fd25df44eab52d1931e89980d1ada46824c7a3210be0d3c88a44aaa99/coverage-7.10.7-cp313-cp313-win_amd64.whl",hashes = {sha256 = "cac0fdca17b036af3881a9d2729a850b76553f3f716ccb0360ad4dbc06b3b843"}}, + {name = "coverage-7.10.7-cp313-cp313-win_arm64.whl",url = "https://files.pythonhosted.org/packages/14/3a/d720d7c989562a6e9a14b2c9f5f2876bdb38e9367126d118495b89c99c37/coverage-7.10.7-cp313-cp313-win_arm64.whl",hashes = {sha256 = "4b6f236edf6e2f9ae8fcd1332da4e791c1b6ba0dc16a2dc94590ceccb482e546"}}, + {name = "coverage-7.10.7-cp313-cp313t-macosx_10_13_x86_64.whl",url = "https://files.pythonhosted.org/packages/bb/22/e04514bf2a735d8b0add31d2b4ab636fc02370730787c576bb995390d2d5/coverage-7.10.7-cp313-cp313t-macosx_10_13_x86_64.whl",hashes = {sha256 = "a0ec07fd264d0745ee396b666d47cef20875f4ff2375d7c4f58235886cc1ef0c"}}, + {name = "coverage-7.10.7-cp313-cp313t-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/11/0b/91128e099035ece15da3445d9015e4b4153a6059403452d324cbb0a575fa/coverage-7.10.7-cp313-cp313t-macosx_11_0_arm64.whl",hashes = {sha256 = "dd5e856ebb7bfb7672b0086846db5afb4567a7b9714b8a0ebafd211ec7ce6a15"}}, + {name = "coverage-7.10.7-cp313-cp313t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl",url = "https://files.pythonhosted.org/packages/8b/51/66420081e72801536a091a0c8f8c1f88a5c4bf7b9b1bdc6222c7afe6dc9b/coverage-7.10.7-cp313-cp313t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl",hashes = {sha256 = "f57b2a3c8353d3e04acf75b3fed57ba41f5c0646bbf1d10c7c282291c97936b4"}}, + {name = "coverage-7.10.7-cp313-cp313t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl",url = "https://files.pythonhosted.org/packages/5d/22/9b8d458c2881b22df3db5bb3e7369e63d527d986decb6c11a591ba2364f7/coverage-7.10.7-cp313-cp313t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl",hashes = {sha256 = "1ef2319dd15a0b009667301a3f84452a4dc6fddfd06b0c5c53ea472d3989fbf0"}}, + {name = "coverage-7.10.7-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl",url = "https://files.pythonhosted.org/packages/f7/08/16bee2c433e60913c610ea200b276e8eeef084b0d200bdcff69920bd5828/coverage-7.10.7-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl",hashes = {sha256 = "83082a57783239717ceb0ad584de3c69cf581b2a95ed6bf81ea66034f00401c0"}}, + {name = "coverage-7.10.7-cp313-cp313t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl",url = "https://files.pythonhosted.org/packages/20/9d/e53eb9771d154859b084b90201e5221bca7674ba449a17c101a5031d4054/coverage-7.10.7-cp313-cp313t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl",hashes = {sha256 = "50aa94fb1fb9a397eaa19c0d5ec15a5edd03a47bf1a3a6111a16b36e190cff65"}}, + {name = "coverage-7.10.7-cp313-cp313t-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/ad/b0/69bc7050f8d4e56a89fb550a1577d5d0d1db2278106f6f626464067b3817/coverage-7.10.7-cp313-cp313t-musllinux_1_2_aarch64.whl",hashes = {sha256 = "2120043f147bebb41c85b97ac45dd173595ff14f2a584f2963891cbcc3091541"}}, + {name = "coverage-7.10.7-cp313-cp313t-musllinux_1_2_i686.whl",url = "https://files.pythonhosted.org/packages/ef/4b/2514b060dbd1bc0aaf23b852c14bb5818f244c664cb16517feff6bb3a5ab/coverage-7.10.7-cp313-cp313t-musllinux_1_2_i686.whl",hashes = {sha256 = "2fafd773231dd0378fdba66d339f84904a8e57a262f583530f4f156ab83863e6"}}, + {name = "coverage-7.10.7-cp313-cp313t-musllinux_1_2_riscv64.whl",url = "https://files.pythonhosted.org/packages/54/78/7ba2175007c246d75e496f64c06e94122bdb914790a1285d627a918bd271/coverage-7.10.7-cp313-cp313t-musllinux_1_2_riscv64.whl",hashes = {sha256 = "0b944ee8459f515f28b851728ad224fa2d068f1513ef6b7ff1efafeb2185f999"}}, + {name = "coverage-7.10.7-cp313-cp313t-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/c0/b3/fac9f7abbc841409b9a410309d73bfa6cfb2e51c3fada738cb607ce174f8/coverage-7.10.7-cp313-cp313t-musllinux_1_2_x86_64.whl",hashes = {sha256 = "4b583b97ab2e3efe1b3e75248a9b333bd3f8b0b1b8e5b45578e05e5850dfb2c2"}}, + {name = "coverage-7.10.7-cp313-cp313t-win32.whl",url = "https://files.pythonhosted.org/packages/ee/51/a03bec00d37faaa891b3ff7387192cef20f01604e5283a5fabc95346befa/coverage-7.10.7-cp313-cp313t-win32.whl",hashes = {sha256 = "2a78cd46550081a7909b3329e2266204d584866e8d97b898cd7fb5ac8d888b1a"}}, + {name = "coverage-7.10.7-cp313-cp313t-win_amd64.whl",url = "https://files.pythonhosted.org/packages/53/22/3cf25d614e64bf6d8e59c7c669b20d6d940bb337bdee5900b9ca41c820bb/coverage-7.10.7-cp313-cp313t-win_amd64.whl",hashes = {sha256 = "33a5e6396ab684cb43dc7befa386258acb2d7fae7f67330ebb85ba4ea27938eb"}}, + {name = "coverage-7.10.7-cp313-cp313t-win_arm64.whl",url = "https://files.pythonhosted.org/packages/49/a1/00164f6d30d8a01c3c9c48418a7a5be394de5349b421b9ee019f380df2a0/coverage-7.10.7-cp313-cp313t-win_arm64.whl",hashes = {sha256 = "86b0e7308289ddde73d863b7683f596d8d21c7d8664ce1dee061d0bcf3fbb4bb"}}, + {name = "coverage-7.10.7-cp312-cp312-macosx_10_13_x86_64.whl",url = "https://files.pythonhosted.org/packages/13/e4/eb12450f71b542a53972d19117ea5a5cea1cab3ac9e31b0b5d498df1bd5a/coverage-7.10.7-cp312-cp312-macosx_10_13_x86_64.whl",hashes = {sha256 = "7bb3b9ddb87ef7725056572368040c32775036472d5a033679d1fa6c8dc08417"}}, + {name = "coverage-7.10.7-cp312-cp312-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/37/66/593f9be12fc19fb36711f19a5371af79a718537204d16ea1d36f16bd78d2/coverage-7.10.7-cp312-cp312-macosx_11_0_arm64.whl",hashes = {sha256 = "18afb24843cbc175687225cab1138c95d262337f5473512010e46831aa0c2973"}}, + {name = "coverage-7.10.7-cp312-cp312-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl",url = "https://files.pythonhosted.org/packages/66/80/4c49f7ae09cafdacc73fbc30949ffe77359635c168f4e9ff33c9ebb07838/coverage-7.10.7-cp312-cp312-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl",hashes = {sha256 = "399a0b6347bcd3822be369392932884b8216d0944049ae22925631a9b3d4ba4c"}}, + {name = "coverage-7.10.7-cp312-cp312-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl",url = "https://files.pythonhosted.org/packages/a6/90/a64aaacab3b37a17aaedd83e8000142561a29eb262cede42d94a67f7556b/coverage-7.10.7-cp312-cp312-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl",hashes = {sha256 = "314f2c326ded3f4b09be11bc282eb2fc861184bc95748ae67b360ac962770be7"}}, + {name = "coverage-7.10.7-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl",url = "https://files.pythonhosted.org/packages/98/2e/2dda59afd6103b342e096f246ebc5f87a3363b5412609946c120f4e7750d/coverage-7.10.7-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl",hashes = {sha256 = "c41e71c9cfb854789dee6fc51e46743a6d138b1803fab6cb860af43265b42ea6"}}, + {name = "coverage-7.10.7-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl",url = "https://files.pythonhosted.org/packages/53/dc/8d8119c9051d50f3119bb4a75f29f1e4a6ab9415cd1fa8bf22fcc3fb3b5f/coverage-7.10.7-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl",hashes = {sha256 = "bc01f57ca26269c2c706e838f6422e2a8788e41b3e3c65e2f41148212e57cd59"}}, + {name = "coverage-7.10.7-cp312-cp312-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/98/b3/edaff9c5d79ee4d4b6d3fe046f2b1d799850425695b789d491a64225d493/coverage-7.10.7-cp312-cp312-musllinux_1_2_aarch64.whl",hashes = {sha256 = "a6442c59a8ac8b85812ce33bc4d05bde3fb22321fa8294e2a5b487c3505f611b"}}, + {name = "coverage-7.10.7-cp312-cp312-musllinux_1_2_i686.whl",url = "https://files.pythonhosted.org/packages/11/25/9a0728564bb05863f7e513e5a594fe5ffef091b325437f5430e8cfb0d530/coverage-7.10.7-cp312-cp312-musllinux_1_2_i686.whl",hashes = {sha256 = "78a384e49f46b80fb4c901d52d92abe098e78768ed829c673fbb53c498bef73a"}}, + {name = "coverage-7.10.7-cp312-cp312-musllinux_1_2_riscv64.whl",url = "https://files.pythonhosted.org/packages/e0/fd/ca2650443bfbef5b0e74373aac4df67b08180d2f184b482c41499668e258/coverage-7.10.7-cp312-cp312-musllinux_1_2_riscv64.whl",hashes = {sha256 = "5e1e9802121405ede4b0133aa4340ad8186a1d2526de5b7c3eca519db7bb89fb"}}, + {name = "coverage-7.10.7-cp312-cp312-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/24/79/f692f125fb4299b6f963b0745124998ebb8e73ecdfce4ceceb06a8c6bec5/coverage-7.10.7-cp312-cp312-musllinux_1_2_x86_64.whl",hashes = {sha256 = "d41213ea25a86f69efd1575073d34ea11aabe075604ddf3d148ecfec9e1e96a1"}}, + {name = "coverage-7.10.7-cp312-cp312-win32.whl",url = "https://files.pythonhosted.org/packages/5e/75/61b9bbd6c7d24d896bfeec57acba78e0f8deac68e6baf2d4804f7aae1f88/coverage-7.10.7-cp312-cp312-win32.whl",hashes = {sha256 = "77eb4c747061a6af8d0f7bdb31f1e108d172762ef579166ec84542f711d90256"}}, + {name = "coverage-7.10.7-cp312-cp312-win_amd64.whl",url = "https://files.pythonhosted.org/packages/ca/f3/3bf7905288b45b075918d372498f1cf845b5b579b723c8fd17168018d5f5/coverage-7.10.7-cp312-cp312-win_amd64.whl",hashes = {sha256 = "f51328ffe987aecf6d09f3cd9d979face89a617eacdaea43e7b3080777f647ba"}}, + {name = "coverage-7.10.7-cp312-cp312-win_arm64.whl",url = "https://files.pythonhosted.org/packages/5c/44/3e32dbe933979d05cf2dac5e697c8599cfe038aaf51223ab901e208d5a62/coverage-7.10.7-cp312-cp312-win_arm64.whl",hashes = {sha256 = "bda5e34f8a75721c96085903c6f2197dc398c20ffd98df33f866a9c8fd95f4bf"}}, + {name = "coverage-7.10.7-py3-none-any.whl",url = "https://files.pythonhosted.org/packages/ec/16/114df1c291c22cac3b0c127a73e0af5c12ed7bbb6558d310429a0ae24023/coverage-7.10.7-py3-none-any.whl",hashes = {sha256 = "f7941f6f2fe6dd6807a1208737b8a0cbcf1cc6d7b07d24998ad2d63590868260"}}, ] marker = "\"dev\" in extras" @@ -2519,19 +2121,17 @@ marker = "\"dev\" in extras" dependencies = [] [[packages]] -name = "exceptiongroup" -version = "1.3.0" -requires-python = ">=3.7" -sdist = {name = "exceptiongroup-1.3.0.tar.gz", url = "https://files.pythonhosted.org/packages/0b/9f/a65090624ecf468cdca03533906e7c69ed7588582240cfe7cc9e770b50eb/exceptiongroup-1.3.0.tar.gz", hashes = {sha256 = "b241f5885f560bc56a59ee63ca4c6a8bfa46ae4ad651af316d4e81817bb9fd88"}} +name = "decorator" +version = "5.2.1" +requires-python = ">=3.8" +sdist = {name = "decorator-5.2.1.tar.gz", url = "https://files.pythonhosted.org/packages/43/fa/6d96a0978d19e17b68d634497769987b16c8f4cd0a7a05048bec693caa6b/decorator-5.2.1.tar.gz", hashes = {sha256 = "65f266143752f734b0a7cc83c46f4618af75b8c5911b00ccb61d0ac9b6da0360"}} wheels = [ - {name = "exceptiongroup-1.3.0-py3-none-any.whl",url = "https://files.pythonhosted.org/packages/36/f4/c6e662dade71f56cd2f3735141b265c3c79293c109549c1e6933b0651ffc/exceptiongroup-1.3.0-py3-none-any.whl",hashes = {sha256 = "4d111e6e0c13d0644cad6ddaa7ed0261a0b36971f6d23e7ec9b4b9097da78a10"}}, + {name = "decorator-5.2.1-py3-none-any.whl",url = "https://files.pythonhosted.org/packages/4e/8c/f3147f5c4b73e7550fe5f9352eaa956ae838d5c51eb58e7a25b9f3e2643b/decorator-5.2.1-py3-none-any.whl",hashes = {sha256 = "d316bb415a2d9e2d2b3abcc4084c6502fc09240e292cd76a76afc106a1c8e04a"}}, ] -marker = "python_version < \"3.11\" and python_version >= \"3.9\" and \"default\" in dependency_groups or python_version < \"3.11\" and python_version >= \"3.9\" and \"dev\" in extras" +marker = "\"default\" in dependency_groups" [packages.tool.pdm] -dependencies = [ - "typing-extensions>=4.6.0; python_version < \"3.13\"", -] +dependencies = [] [[packages]] name = "h11" @@ -2547,12 +2147,51 @@ marker = "\"default\" in dependency_groups or \"dev\" in extras" dependencies = [] [[packages]] -name = "identify" -version = "2.6.12" -requires-python = ">=3.9" -sdist = {name = "identify-2.6.12.tar.gz", url = "https://files.pythonhosted.org/packages/a2/88/d193a27416618628a5eea64e3223acd800b40749a96ffb322a9b55a49ed1/identify-2.6.12.tar.gz", hashes = {sha256 = "d8de45749f1efb108badef65ee8386f0f7bb19a7f26185f74de6367bffbaf0e6"}} +name = "html5tagger" +version = "1.3.0" +requires-python = ">=3.7" +sdist = {name = "html5tagger-1.3.0.tar.gz", url = "https://files.pythonhosted.org/packages/9e/02/2ae5f46d517a2c1d4a17f2b1e4834c2c7cc0fb3a69c92389172fa16ab389/html5tagger-1.3.0.tar.gz", hashes = {sha256 = "84fa3dfb49e5c83b79bbd856ab7b1de8e2311c3bb46a8be925f119e3880a8da9"}} wheels = [ - {name = "identify-2.6.12-py2.py3-none-any.whl",url = "https://files.pythonhosted.org/packages/7a/cd/18f8da995b658420625f7ef13f037be53ae04ec5ad33f9b718240dcfd48c/identify-2.6.12-py2.py3-none-any.whl",hashes = {sha256 = "ad9672d5a72e0d2ff7c5c8809b62dfa60458626352fb0eb7b55e69bdc45334a2"}}, + {name = "html5tagger-1.3.0-py3-none-any.whl",url = "https://files.pythonhosted.org/packages/9b/12/2f5d43ee912ea14a6baba4b3db6d309b02d932e3b7074c3339b4aded98ff/html5tagger-1.3.0-py3-none-any.whl",hashes = {sha256 = "ce14313515edffec8ed8a36c5890d023922641171b4e6e5774ad1a74998f5351"}}, +] +marker = "\"default\" in dependency_groups" + +[packages.tool.pdm] +dependencies = [] + +[[packages]] +name = "httptools" +version = "0.6.4" +requires-python = ">=3.8.0" +sdist = {name = "httptools-0.6.4.tar.gz", url = "https://files.pythonhosted.org/packages/a7/9a/ce5e1f7e131522e6d3426e8e7a490b3a01f39a6696602e1c4f33f9e94277/httptools-0.6.4.tar.gz", hashes = {sha256 = "4e93eee4add6493b59a5c514da98c939b244fce4a0d8879cd3f466562f4b7d5c"}} +wheels = [ + {name = "httptools-0.6.4-cp313-cp313-macosx_10_13_universal2.whl",url = "https://files.pythonhosted.org/packages/94/a3/9fe9ad23fd35f7de6b91eeb60848986058bd8b5a5c1e256f5860a160cc3e/httptools-0.6.4-cp313-cp313-macosx_10_13_universal2.whl",hashes = {sha256 = "ade273d7e767d5fae13fa637f4d53b6e961fb7fd93c7797562663f0171c26660"}}, + {name = "httptools-0.6.4-cp313-cp313-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/ea/d9/82d5e68bab783b632023f2fa31db20bebb4e89dfc4d2293945fd68484ee4/httptools-0.6.4-cp313-cp313-macosx_11_0_arm64.whl",hashes = {sha256 = "856f4bc0478ae143bad54a4242fccb1f3f86a6e1be5548fecfd4102061b3a083"}}, + {name = "httptools-0.6.4-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",url = "https://files.pythonhosted.org/packages/96/c1/cb499655cbdbfb57b577734fde02f6fa0bbc3fe9fb4d87b742b512908dff/httptools-0.6.4-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",hashes = {sha256 = "322d20ea9cdd1fa98bd6a74b77e2ec5b818abdc3d36695ab402a0de8ef2865a3"}}, + {name = "httptools-0.6.4-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl",url = "https://files.pythonhosted.org/packages/af/71/ee32fd358f8a3bb199b03261f10921716990808a675d8160b5383487a317/httptools-0.6.4-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl",hashes = {sha256 = "4d87b29bd4486c0093fc64dea80231f7c7f7eb4dc70ae394d70a495ab8436071"}}, + {name = "httptools-0.6.4-cp313-cp313-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/8a/0a/0d4df132bfca1507114198b766f1737d57580c9ad1cf93c1ff673e3387be/httptools-0.6.4-cp313-cp313-musllinux_1_2_aarch64.whl",hashes = {sha256 = "342dd6946aa6bda4b8f18c734576106b8a31f2fe31492881a9a160ec84ff4bd5"}}, + {name = "httptools-0.6.4-cp313-cp313-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/1e/6a/787004fdef2cabea27bad1073bf6a33f2437b4dbd3b6fb4a9d71172b1c7c/httptools-0.6.4-cp313-cp313-musllinux_1_2_x86_64.whl",hashes = {sha256 = "4b36913ba52008249223042dca46e69967985fb4051951f94357ea681e1f5dc0"}}, + {name = "httptools-0.6.4-cp313-cp313-win_amd64.whl",url = "https://files.pythonhosted.org/packages/4d/dc/7decab5c404d1d2cdc1bb330b1bf70e83d6af0396fd4fc76fc60c0d522bf/httptools-0.6.4-cp313-cp313-win_amd64.whl",hashes = {sha256 = "28908df1b9bb8187393d5b5db91435ccc9c8e891657f9cbb42a2541b44c82fc8"}}, + {name = "httptools-0.6.4-cp312-cp312-macosx_10_13_universal2.whl",url = "https://files.pythonhosted.org/packages/bb/0e/d0b71465c66b9185f90a091ab36389a7352985fe857e352801c39d6127c8/httptools-0.6.4-cp312-cp312-macosx_10_13_universal2.whl",hashes = {sha256 = "df017d6c780287d5c80601dafa31f17bddb170232d85c066604d8558683711a2"}}, + {name = "httptools-0.6.4-cp312-cp312-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/e2/b8/412a9bb28d0a8988de3296e01efa0bd62068b33856cdda47fe1b5e890954/httptools-0.6.4-cp312-cp312-macosx_11_0_arm64.whl",hashes = {sha256 = "85071a1e8c2d051b507161f6c3e26155b5c790e4e28d7f236422dbacc2a9cc44"}}, + {name = "httptools-0.6.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",url = "https://files.pythonhosted.org/packages/9b/01/6fb20be3196ffdc8eeec4e653bc2a275eca7f36634c86302242c4fbb2760/httptools-0.6.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",hashes = {sha256 = "69422b7f458c5af875922cdb5bd586cc1f1033295aa9ff63ee196a87519ac8e1"}}, + {name = "httptools-0.6.4-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl",url = "https://files.pythonhosted.org/packages/f7/d8/b644c44acc1368938317d76ac991c9bba1166311880bcc0ac297cb9d6bd7/httptools-0.6.4-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl",hashes = {sha256 = "16e603a3bff50db08cd578d54f07032ca1631450ceb972c2f834c2b860c28ea2"}}, + {name = "httptools-0.6.4-cp312-cp312-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/52/d8/254d16a31d543073a0e57f1c329ca7378d8924e7e292eda72d0064987486/httptools-0.6.4-cp312-cp312-musllinux_1_2_aarch64.whl",hashes = {sha256 = "ec4f178901fa1834d4a060320d2f3abc5c9e39766953d038f1458cb885f47e81"}}, + {name = "httptools-0.6.4-cp312-cp312-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/5f/3c/4aee161b4b7a971660b8be71a92c24d6c64372c1ab3ae7f366b3680df20f/httptools-0.6.4-cp312-cp312-musllinux_1_2_x86_64.whl",hashes = {sha256 = "f9eb89ecf8b290f2e293325c646a211ff1c2493222798bb80a530c5e7502494f"}}, + {name = "httptools-0.6.4-cp312-cp312-win_amd64.whl",url = "https://files.pythonhosted.org/packages/12/b7/5cae71a8868e555f3f67a50ee7f673ce36eac970f029c0c5e9d584352961/httptools-0.6.4-cp312-cp312-win_amd64.whl",hashes = {sha256 = "db78cb9ca56b59b016e64b6031eda5653be0589dba2b1b43453f6e8b405a0970"}}, +] +marker = "\"default\" in dependency_groups" + +[packages.tool.pdm] +dependencies = [] + +[[packages]] +name = "identify" +version = "2.6.12" +requires-python = ">=3.9" +sdist = {name = "identify-2.6.12.tar.gz", url = "https://files.pythonhosted.org/packages/a2/88/d193a27416618628a5eea64e3223acd800b40749a96ffb322a9b55a49ed1/identify-2.6.12.tar.gz", hashes = {sha256 = "d8de45749f1efb108badef65ee8386f0f7bb19a7f26185f74de6367bffbaf0e6"}} +wheels = [ + {name = "identify-2.6.12-py2.py3-none-any.whl",url = "https://files.pythonhosted.org/packages/7a/cd/18f8da995b658420625f7ef13f037be53ae04ec5ad33f9b718240dcfd48c/identify-2.6.12-py2.py3-none-any.whl",hashes = {sha256 = "ad9672d5a72e0d2ff7c5c8809b62dfa60458626352fb0eb7b55e69bdc45334a2"}}, ] marker = "\"dev\" in extras" @@ -2573,34 +2212,47 @@ marker = "\"dev\" in extras" dependencies = [] [[packages]] -name = "importlib-metadata" -version = "8.7.0" -requires-python = ">=3.9" -sdist = {name = "importlib_metadata-8.7.0.tar.gz", url = "https://files.pythonhosted.org/packages/76/66/650a33bd90f786193e4de4b3ad86ea60b53c89b669a5c7be931fac31cdb0/importlib_metadata-8.7.0.tar.gz", hashes = {sha256 = "d13b81ad223b890aa16c5471f2ac3056cf76c5f10f82d6f9292f0b415f389000"}} +name = "jinja2" +version = "3.1.6" +requires-python = ">=3.7" +sdist = {name = "jinja2-3.1.6.tar.gz", url = "https://files.pythonhosted.org/packages/df/bf/f7da0350254c0ed7c72f3e33cef02e048281fec7ecec5f032d4aac52226b/jinja2-3.1.6.tar.gz", hashes = {sha256 = "0137fb05990d35f1275a587e9aee6d56da821fc83491a0fb838183be43f66d6d"}} wheels = [ - {name = "importlib_metadata-8.7.0-py3-none-any.whl",url = "https://files.pythonhosted.org/packages/20/b0/36bd937216ec521246249be3bf9855081de4c5e06a0c9b4219dbeda50373/importlib_metadata-8.7.0-py3-none-any.whl",hashes = {sha256 = "e5dd1551894c77868a30651cef00984d50e1002d06942a7101d34870c5f02afd"}}, + {name = "jinja2-3.1.6-py3-none-any.whl",url = "https://files.pythonhosted.org/packages/62/a1/3d680cbfd5f4b8f15abc1d571870c5fc3e594bb582bc3b64ea099db13e56/jinja2-3.1.6-py3-none-any.whl",hashes = {sha256 = "85ece4451f492d0c13c5dd7c13a64681a86afae63a5f347908daf103ce6d2f67"}}, ] -marker = "python_full_version < \"3.10.2\" and python_version >= \"3.9\" and \"dev\" in extras" +marker = "\"default\" in dependency_groups or \"dev\" in extras" [packages.tool.pdm] dependencies = [ - "zipp>=3.20", - "typing-extensions>=3.6.4; python_version < \"3.8\"", + "MarkupSafe>=2.0", ] [[packages]] -name = "jinja2" -version = "3.1.6" +name = "joblib" +version = "1.5.2" +requires-python = ">=3.9" +sdist = {name = "joblib-1.5.2.tar.gz", url = "https://files.pythonhosted.org/packages/e8/5d/447af5ea094b9e4c4054f82e223ada074c552335b9b4b2d14bd9b35a67c4/joblib-1.5.2.tar.gz", hashes = {sha256 = "3faa5c39054b2f03ca547da9b2f52fde67c06240c31853f306aea97f13647b55"}} +wheels = [ + {name = "joblib-1.5.2-py3-none-any.whl",url = "https://files.pythonhosted.org/packages/1e/e8/685f47e0d754320684db4425a0967f7d3fa70126bffd76110b7009a0090f/joblib-1.5.2-py3-none-any.whl",hashes = {sha256 = "4e1f0bdbb987e6d843c70cf43714cb276623def372df3c22fe5266b2670bc241"}}, +] +marker = "\"default\" in dependency_groups" + +[packages.tool.pdm] +dependencies = [] + +[[packages]] +name = "lazy-loader" +version = "0.4" requires-python = ">=3.7" -sdist = {name = "jinja2-3.1.6.tar.gz", url = "https://files.pythonhosted.org/packages/df/bf/f7da0350254c0ed7c72f3e33cef02e048281fec7ecec5f032d4aac52226b/jinja2-3.1.6.tar.gz", hashes = {sha256 = "0137fb05990d35f1275a587e9aee6d56da821fc83491a0fb838183be43f66d6d"}} +sdist = {name = "lazy_loader-0.4.tar.gz", url = "https://files.pythonhosted.org/packages/6f/6b/c875b30a1ba490860c93da4cabf479e03f584eba06fe5963f6f6644653d8/lazy_loader-0.4.tar.gz", hashes = {sha256 = "47c75182589b91a4e1a85a136c074285a5ad4d9f39c63e0d7fb76391c4574cd1"}} wheels = [ - {name = "jinja2-3.1.6-py3-none-any.whl",url = "https://files.pythonhosted.org/packages/62/a1/3d680cbfd5f4b8f15abc1d571870c5fc3e594bb582bc3b64ea099db13e56/jinja2-3.1.6-py3-none-any.whl",hashes = {sha256 = "85ece4451f492d0c13c5dd7c13a64681a86afae63a5f347908daf103ce6d2f67"}}, + {name = "lazy_loader-0.4-py3-none-any.whl",url = "https://files.pythonhosted.org/packages/83/60/d497a310bde3f01cb805196ac61b7ad6dc5dcf8dce66634dc34364b20b4f/lazy_loader-0.4-py3-none-any.whl",hashes = {sha256 = "342aa8e14d543a154047afb4ba8ef17f5563baad3fc610d7b15b213b0f119efc"}}, ] -marker = "\"dev\" in extras" +marker = "\"default\" in dependency_groups" [packages.tool.pdm] dependencies = [ - "MarkupSafe>=2.0", + "packaging", + "importlib-metadata; python_version < \"3.8\"", ] [[packages]] @@ -2663,46 +2315,6 @@ wheels = [ {name = "lxml-6.0.1-cp312-cp312-win32.whl",url = "https://files.pythonhosted.org/packages/bc/1f/962ea2696759abe331c3b0e838bb17e92224f39c638c2068bf0d8345e913/lxml-6.0.1-cp312-cp312-win32.whl",hashes = {sha256 = "987ad5c3941c64031f59c226167f55a04d1272e76b241bfafc968bdb778e07fb"}}, {name = "lxml-6.0.1-cp312-cp312-win_amd64.whl",url = "https://files.pythonhosted.org/packages/41/e2/22c86a990b51b44442b75c43ecb2f77b8daba8c4ba63696921966eac7022/lxml-6.0.1-cp312-cp312-win_amd64.whl",hashes = {sha256 = "abb05a45394fd76bf4a60c1b7bec0e6d4e8dfc569fc0e0b1f634cd983a006ddc"}}, {name = "lxml-6.0.1-cp312-cp312-win_arm64.whl",url = "https://files.pythonhosted.org/packages/b2/21/dc0c73325e5eb94ef9c9d60dbb5dcdcb2e7114901ea9509735614a74e75a/lxml-6.0.1-cp312-cp312-win_arm64.whl",hashes = {sha256 = "c4be29bce35020d8579d60aa0a4e95effd66fcfce31c46ffddf7e5422f73a299"}}, - {name = "lxml-6.0.1-cp311-cp311-macosx_10_9_universal2.whl",url = "https://files.pythonhosted.org/packages/29/c8/262c1d19339ef644cdc9eb5aad2e85bd2d1fa2d7c71cdef3ede1a3eed84d/lxml-6.0.1-cp311-cp311-macosx_10_9_universal2.whl",hashes = {sha256 = "c6acde83f7a3d6399e6d83c1892a06ac9b14ea48332a5fbd55d60b9897b9570a"}}, - {name = "lxml-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl",url = "https://files.pythonhosted.org/packages/e5/d4/1b0afbeb801468a310642c3a6f6704e53c38a4a6eb1ca6faea013333e02f/lxml-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl",hashes = {sha256 = "0d21c9cacb6a889cbb8eeb46c77ef2c1dd529cde10443fdeb1de847b3193c541"}}, - {name = "lxml-6.0.1-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl",url = "https://files.pythonhosted.org/packages/5b/c1/8db9b5402bf52ceb758618313f7423cd54aea85679fcf607013707d854a8/lxml-6.0.1-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl",hashes = {sha256 = "847458b7cd0d04004895f1fb2cca8e7c0f8ec923c49c06b7a72ec2d48ea6aca2"}}, - {name = "lxml-6.0.1-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl",url = "https://files.pythonhosted.org/packages/e7/78/838e115358dd2369c1c5186080dd874a50a691fb5cd80db6afe5e816e2c6/lxml-6.0.1-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl",hashes = {sha256 = "1dc13405bf315d008fe02b1472d2a9d65ee1c73c0a06de5f5a45e6e404d9a1c0"}}, - {name = "lxml-6.0.1-cp311-cp311-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl",url = "https://files.pythonhosted.org/packages/c7/b6/bdcb3a3ddd2438c5b1a1915161f34e8c85c96dc574b0ef3be3924f36315c/lxml-6.0.1-cp311-cp311-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl",hashes = {sha256 = "70f540c229a8c0a770dcaf6d5af56a5295e0fc314fc7ef4399d543328054bcea"}}, - {name = "lxml-6.0.1-cp311-cp311-manylinux_2_26_i686.manylinux_2_28_i686.whl",url = "https://files.pythonhosted.org/packages/73/e5/1bfb96185dc1a64c7c6fbb7369192bda4461952daa2025207715f9968205/lxml-6.0.1-cp311-cp311-manylinux_2_26_i686.manylinux_2_28_i686.whl",hashes = {sha256 = "d2f73aef768c70e8deb8c4742fca4fd729b132fda68458518851c7735b55297e"}}, - {name = "lxml-6.0.1-cp311-cp311-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl",url = "https://files.pythonhosted.org/packages/a2/ae/df3ea9ebc3c493b9c6bdc6bd8c554ac4e147f8d7839993388aab57ec606d/lxml-6.0.1-cp311-cp311-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl",hashes = {sha256 = "e7f4066b85a4fa25ad31b75444bd578c3ebe6b8ed47237896341308e2ce923c3"}}, - {name = "lxml-6.0.1-cp311-cp311-manylinux_2_31_armv7l.whl",url = "https://files.pythonhosted.org/packages/37/b3/65e1e33600542c08bc03a4c5c9c306c34696b0966a424a3be6ffec8038ed/lxml-6.0.1-cp311-cp311-manylinux_2_31_armv7l.whl",hashes = {sha256 = "0cce65db0cd8c750a378639900d56f89f7d6af11cd5eda72fde054d27c54b8ce"}}, - {name = "lxml-6.0.1-cp311-cp311-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/7a/46/ee3ed8f3a60e9457d7aea46542d419917d81dbfd5700fe64b2a36fb5ef61/lxml-6.0.1-cp311-cp311-musllinux_1_2_aarch64.whl",hashes = {sha256 = "c372d42f3eee5844b69dcab7b8d18b2f449efd54b46ac76970d6e06b8e8d9a66"}}, - {name = "lxml-6.0.1-cp311-cp311-musllinux_1_2_armv7l.whl",url = "https://files.pythonhosted.org/packages/9c/b9/8394538e7cdbeb3bfa36bc74924be1a4383e0bb5af75f32713c2c4aa0479/lxml-6.0.1-cp311-cp311-musllinux_1_2_armv7l.whl",hashes = {sha256 = "2e2b0e042e1408bbb1c5f3cfcb0f571ff4ac98d8e73f4bf37c5dd179276beedd"}}, - {name = "lxml-6.0.1-cp311-cp311-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/b3/21/3ef7da1ea2a73976c1a5a311d7cde5d379234eec0968ee609517714940b4/lxml-6.0.1-cp311-cp311-musllinux_1_2_x86_64.whl",hashes = {sha256 = "cc73bb8640eadd66d25c5a03175de6801f63c535f0f3cf50cac2f06a8211f420"}}, - {name = "lxml-6.0.1-cp311-cp311-win32.whl",url = "https://files.pythonhosted.org/packages/26/7d/0980016f124f00c572cba6f4243e13a8e80650843c66271ee692cddf25f3/lxml-6.0.1-cp311-cp311-win32.whl",hashes = {sha256 = "7c23fd8c839708d368e406282d7953cee5134f4592ef4900026d84566d2b4c88"}}, - {name = "lxml-6.0.1-cp311-cp311-win_amd64.whl",url = "https://files.pythonhosted.org/packages/b1/08/28440437521f265eff4413eb2a65efac269c4c7db5fd8449b586e75d8de2/lxml-6.0.1-cp311-cp311-win_amd64.whl",hashes = {sha256 = "2516acc6947ecd3c41a4a4564242a87c6786376989307284ddb115f6a99d927f"}}, - {name = "lxml-6.0.1-cp311-cp311-win_arm64.whl",url = "https://files.pythonhosted.org/packages/7b/dc/617e67296d98099213a505d781f04804e7b12923ecd15a781a4ab9181992/lxml-6.0.1-cp311-cp311-win_arm64.whl",hashes = {sha256 = "cb46f8cfa1b0334b074f40c0ff94ce4d9a6755d492e6c116adb5f4a57fb6ad96"}}, - {name = "lxml-6.0.1-pp311-pypy311_pp73-macosx_10_15_x86_64.whl",url = "https://files.pythonhosted.org/packages/41/37/41961f53f83ded57b37e65e4f47d1c6c6ef5fd02cb1d6ffe028ba0efa7d4/lxml-6.0.1-pp311-pypy311_pp73-macosx_10_15_x86_64.whl",hashes = {sha256 = "b556aaa6ef393e989dac694b9c95761e32e058d5c4c11ddeef33f790518f7a5e"}}, - {name = "lxml-6.0.1-pp311-pypy311_pp73-manylinux2014_aarch64.manylinux_2_17_aarch64.whl",url = "https://files.pythonhosted.org/packages/3d/47/8631ea73f3dc776fb6517ccde4d5bd5072f35f9eacbba8c657caa4037a69/lxml-6.0.1-pp311-pypy311_pp73-manylinux2014_aarch64.manylinux_2_17_aarch64.whl",hashes = {sha256 = "64fac7a05ebb3737b79fd89fe5a5b6c5546aac35cfcfd9208eb6e5d13215771c"}}, - {name = "lxml-6.0.1-pp311-pypy311_pp73-manylinux2014_x86_64.manylinux_2_17_x86_64.whl",url = "https://files.pythonhosted.org/packages/3d/b8/39ae30ca3b1516729faeef941ed84bf8f12321625f2644492ed8320cb254/lxml-6.0.1-pp311-pypy311_pp73-manylinux2014_x86_64.manylinux_2_17_x86_64.whl",hashes = {sha256 = "038d3c08babcfce9dc89aaf498e6da205efad5b7106c3b11830a488d4eadf56b"}}, - {name = "lxml-6.0.1-pp311-pypy311_pp73-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl",url = "https://files.pythonhosted.org/packages/9c/ea/048dea6cdfc7a72d40ae8ed7e7d23cf4a6b6a6547b51b492a3be50af0e80/lxml-6.0.1-pp311-pypy311_pp73-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl",hashes = {sha256 = "445f2cee71c404ab4259bc21e20339a859f75383ba2d7fb97dfe7c163994287b"}}, - {name = "lxml-6.0.1-pp311-pypy311_pp73-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl",url = "https://files.pythonhosted.org/packages/6b/d4/c2b46e432377c45d611ae2f669aa47971df1586c1a5240675801d0f02bac/lxml-6.0.1-pp311-pypy311_pp73-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl",hashes = {sha256 = "e352d8578e83822d70bea88f3d08b9912528e4c338f04ab707207ab12f4b7aac"}}, - {name = "lxml-6.0.1-pp311-pypy311_pp73-win_amd64.whl",url = "https://files.pythonhosted.org/packages/b6/db/8f620f1ac62cf32554821b00b768dd5957ac8e3fd051593532be5b40b438/lxml-6.0.1-pp311-pypy311_pp73-win_amd64.whl",hashes = {sha256 = "51bd5d1a9796ca253db6045ab45ca882c09c071deafffc22e06975b7ace36300"}}, - {name = "lxml-6.0.1-cp310-cp310-macosx_10_9_universal2.whl",url = "https://files.pythonhosted.org/packages/b2/06/29693634ad5fc8ae0bab6723ba913c821c780614eea9ab9ebb5b2105d0e4/lxml-6.0.1-cp310-cp310-macosx_10_9_universal2.whl",hashes = {sha256 = "3b38e20c578149fdbba1fd3f36cb1928a3aaca4b011dfd41ba09d11fb396e1b9"}}, - {name = "lxml-6.0.1-cp310-cp310-macosx_10_9_x86_64.whl",url = "https://files.pythonhosted.org/packages/97/e0/69d4113afbda9441f0e4d5574d9336535ead6a0608ee6751b3db0832ade0/lxml-6.0.1-cp310-cp310-macosx_10_9_x86_64.whl",hashes = {sha256 = "11a052cbd013b7140bbbb38a14e2329b6192478344c99097e378c691b7119551"}}, - {name = "lxml-6.0.1-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.whl",url = "https://files.pythonhosted.org/packages/eb/3d/8fa1dbf48a3ea0d6c646f0129bef89a5ecf9a1cfe935e26e07554261d728/lxml-6.0.1-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.whl",hashes = {sha256 = "21344d29c82ca8547ea23023bb8e7538fa5d4615a1773b991edf8176a870c1ea"}}, - {name = "lxml-6.0.1-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.whl",url = "https://files.pythonhosted.org/packages/2c/52/a48331a269900488b886d527611ab66238cddc6373054a60b3c15d4cefb2/lxml-6.0.1-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.whl",hashes = {sha256 = "aa8f130f4b2dc94baa909c17bb7994f0268a2a72b9941c872e8e558fd6709050"}}, - {name = "lxml-6.0.1-cp310-cp310-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl",url = "https://files.pythonhosted.org/packages/33/3b/8f6778a6fb9d30a692db2b1f5a9547dfcb674b27b397e1d864ca797486b1/lxml-6.0.1-cp310-cp310-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl",hashes = {sha256 = "4588806a721552692310ebe9f90c17ac6c7c5dac438cd93e3d74dd60531c3211"}}, - {name = "lxml-6.0.1-cp310-cp310-manylinux_2_26_i686.manylinux_2_28_i686.whl",url = "https://files.pythonhosted.org/packages/42/15/c9364f23fa89ef2d3dbb896912aa313108820286223cfa833a0a9e183c9e/lxml-6.0.1-cp310-cp310-manylinux_2_26_i686.manylinux_2_28_i686.whl",hashes = {sha256 = "8466faa66b0353802fb7c054a400ac17ce2cf416e3ad8516eadeff9cba85b741"}}, - {name = "lxml-6.0.1-cp310-cp310-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl",url = "https://files.pythonhosted.org/packages/04/af/11985b0d47786161ddcdc53dc06142dc863b81a38da7f221c7b997dd5d4b/lxml-6.0.1-cp310-cp310-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl",hashes = {sha256 = "50b5e54f6a9461b1e9c08b4a3420415b538d4773bd9df996b9abcbfe95f4f1fd"}}, - {name = "lxml-6.0.1-cp310-cp310-manylinux_2_31_armv7l.whl",url = "https://files.pythonhosted.org/packages/6a/42/74b35ccc9ef1bb53f0487a4dace5ff612f1652d27faafe91ada7f7b9ee60/lxml-6.0.1-cp310-cp310-manylinux_2_31_armv7l.whl",hashes = {sha256 = "6f393e10685b37f15b1daef8aa0d734ec61860bb679ec447afa0001a31e7253f"}}, - {name = "lxml-6.0.1-cp310-cp310-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/b0/5a/b934534f83561ad71fb64ba1753992e836ea73776cfb56fc0758dbb46bdf/lxml-6.0.1-cp310-cp310-musllinux_1_2_aarch64.whl",hashes = {sha256 = "07038c62fd0fe2743e2f5326f54d464715373c791035d7dda377b3c9a5d0ad77"}}, - {name = "lxml-6.0.1-cp310-cp310-musllinux_1_2_armv7l.whl",url = "https://files.pythonhosted.org/packages/6c/26/d833a56ec8ca943b696f3a7a1e54f97cfb63754c951037de5e222c011f3b/lxml-6.0.1-cp310-cp310-musllinux_1_2_armv7l.whl",hashes = {sha256 = "7a44a5fb1edd11b3a65c12c23e1049c8ae49d90a24253ff18efbcb6aa042d012"}}, - {name = "lxml-6.0.1-cp310-cp310-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/3f/cb/601aa274c7cda51d0cc84a13d9639096c1191de9d9adf58f6c195d4822a2/lxml-6.0.1-cp310-cp310-musllinux_1_2_x86_64.whl",hashes = {sha256 = "a57d9eb9aadf311c9e8785230eec83c6abb9aef2adac4c0587912caf8f3010b8"}}, - {name = "lxml-6.0.1-cp310-cp310-win32.whl",url = "https://files.pythonhosted.org/packages/76/4e/e079f7b324e6d5f83007f30855448646e1cba74b5c30da1a081df75eba89/lxml-6.0.1-cp310-cp310-win32.whl",hashes = {sha256 = "d877874a31590b72d1fa40054b50dc33084021bfc15d01b3a661d85a302af821"}}, - {name = "lxml-6.0.1-cp310-cp310-win_amd64.whl",url = "https://files.pythonhosted.org/packages/65/0a/da298d7a96316c75ae096686de8d036d814ec3b72c7d643a2c226c364168/lxml-6.0.1-cp310-cp310-win_amd64.whl",hashes = {sha256 = "c43460f4aac016ee0e156bfa14a9de9b3e06249b12c228e27654ac3996a46d5b"}}, - {name = "lxml-6.0.1-cp310-cp310-win_arm64.whl",url = "https://files.pythonhosted.org/packages/0f/65/d7f61082fecf4543ab084e8bd3d4b9be0c1a0c83979f1fa2258e2a7987fb/lxml-6.0.1-cp310-cp310-win_arm64.whl",hashes = {sha256 = "615bb6c73fed7929e3a477a3297a797892846b253d59c84a62c98bdce3849a0a"}}, - {name = "lxml-6.0.1-pp310-pypy310_pp73-macosx_10_15_x86_64.whl",url = "https://files.pythonhosted.org/packages/ae/61/ad51fbecaf741f825d496947b19d8aea0dcd323fdc2be304e93ce59f66f0/lxml-6.0.1-pp310-pypy310_pp73-macosx_10_15_x86_64.whl",hashes = {sha256 = "0abfbaf4ebbd7fd33356217d317b6e4e2ef1648be6a9476a52b57ffc6d8d1780"}}, - {name = "lxml-6.0.1-pp310-pypy310_pp73-manylinux2014_aarch64.manylinux_2_17_aarch64.whl",url = "https://files.pythonhosted.org/packages/1b/7f/310bef082cc69d0db46a8b9d8ca5f4a8fb41e1c5d299ef4ca5f391c4f12d/lxml-6.0.1-pp310-pypy310_pp73-manylinux2014_aarch64.manylinux_2_17_aarch64.whl",hashes = {sha256 = "1ebbf2d9775be149235abebdecae88fe3b3dd06b1797cd0f6dffe6948e85309d"}}, - {name = "lxml-6.0.1-pp310-pypy310_pp73-manylinux2014_x86_64.manylinux_2_17_x86_64.whl",url = "https://files.pythonhosted.org/packages/86/cc/dc5833def5998c783500666468df127d6d919e8b9678866904e5680b0b13/lxml-6.0.1-pp310-pypy310_pp73-manylinux2014_x86_64.manylinux_2_17_x86_64.whl",hashes = {sha256 = "a389e9f11c010bd30531325805bbe97bdf7f728a73d0ec475adef57ffec60547"}}, - {name = "lxml-6.0.1-pp310-pypy310_pp73-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl",url = "https://files.pythonhosted.org/packages/1b/dc/bdd4d413844b5348134444d64911f6f34b211f8b778361946d07623fc904/lxml-6.0.1-pp310-pypy310_pp73-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl",hashes = {sha256 = "8f5cf2addfbbe745251132c955ad62d8519bb4b2c28b0aa060eca4541798d86e"}}, - {name = "lxml-6.0.1-pp310-pypy310_pp73-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl",url = "https://files.pythonhosted.org/packages/d9/14/e60e9d46972603753824eb7bea06fbe4153c627cc0f7110111253b7c9fc5/lxml-6.0.1-pp310-pypy310_pp73-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl",hashes = {sha256 = "f1b60a3287bf33a2a54805d76b82055bcc076e445fd539ee9ae1fe85ed373691"}}, - {name = "lxml-6.0.1-pp310-pypy310_pp73-win_amd64.whl",url = "https://files.pythonhosted.org/packages/42/fa/268c9be8c69a418b8106e096687aba2b1a781fb6fc1b3f04955fac2be2b9/lxml-6.0.1-pp310-pypy310_pp73-win_amd64.whl",hashes = {sha256 = "f7bbfb0751551a8786915fc6b615ee56344dacc1b1033697625b553aefdd9837"}}, ] marker = "\"recommended\" in extras" @@ -2745,38 +2357,8 @@ wheels = [ {name = "MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/a2/82/8be4c96ffee03c5b4a034e60a31294daf481e12c7c43ab8e34a1453ee48b/MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_x86_64.whl",hashes = {sha256 = "ad10d3ded218f1039f11a75f8091880239651b52e9bb592ca27de44eed242a48"}}, {name = "MarkupSafe-3.0.2-cp312-cp312-win32.whl",url = "https://files.pythonhosted.org/packages/51/ae/97827349d3fcffee7e184bdf7f41cd6b88d9919c80f0263ba7acd1bbcb18/MarkupSafe-3.0.2-cp312-cp312-win32.whl",hashes = {sha256 = "0f4ca02bea9a23221c0182836703cbf8930c5e9454bacce27e767509fa286a30"}}, {name = "MarkupSafe-3.0.2-cp312-cp312-win_amd64.whl",url = "https://files.pythonhosted.org/packages/c1/80/a61f99dc3a936413c3ee4e1eecac96c0da5ed07ad56fd975f1a9da5bc630/MarkupSafe-3.0.2-cp312-cp312-win_amd64.whl",hashes = {sha256 = "8e06879fc22a25ca47312fbe7c8264eb0b662f6db27cb2d3bbbc74b1df4b9b87"}}, - {name = "MarkupSafe-3.0.2-cp311-cp311-macosx_10_9_universal2.whl",url = "https://files.pythonhosted.org/packages/6b/28/bbf83e3f76936960b850435576dd5e67034e200469571be53f69174a2dfd/MarkupSafe-3.0.2-cp311-cp311-macosx_10_9_universal2.whl",hashes = {sha256 = "9025b4018f3a1314059769c7bf15441064b2207cb3f065e6ea1e7359cb46db9d"}}, - {name = "MarkupSafe-3.0.2-cp311-cp311-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/6c/30/316d194b093cde57d448a4c3209f22e3046c5bb2fb0820b118292b334be7/MarkupSafe-3.0.2-cp311-cp311-macosx_11_0_arm64.whl",hashes = {sha256 = "93335ca3812df2f366e80509ae119189886b0f3c2b81325d39efdb84a1e2ae93"}}, - {name = "MarkupSafe-3.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",url = "https://files.pythonhosted.org/packages/f2/96/9cdafba8445d3a53cae530aaf83c38ec64c4d5427d975c974084af5bc5d2/MarkupSafe-3.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",hashes = {sha256 = "2cb8438c3cbb25e220c2ab33bb226559e7afb3baec11c4f218ffa7308603c832"}}, - {name = "MarkupSafe-3.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",url = "https://files.pythonhosted.org/packages/f1/a4/aefb044a2cd8d7334c8a47d3fb2c9f328ac48cb349468cc31c20b539305f/MarkupSafe-3.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",hashes = {sha256 = "a123e330ef0853c6e822384873bef7507557d8e4a082961e1defa947aa59ba84"}}, - {name = "MarkupSafe-3.0.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl",url = "https://files.pythonhosted.org/packages/8d/21/5e4851379f88f3fad1de30361db501300d4f07bcad047d3cb0449fc51f8c/MarkupSafe-3.0.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl",hashes = {sha256 = "1e084f686b92e5b83186b07e8a17fc09e38fff551f3602b249881fec658d3eca"}}, - {name = "MarkupSafe-3.0.2-cp311-cp311-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/00/7b/e92c64e079b2d0d7ddf69899c98842f3f9a60a1ae72657c89ce2655c999d/MarkupSafe-3.0.2-cp311-cp311-musllinux_1_2_aarch64.whl",hashes = {sha256 = "d8213e09c917a951de9d09ecee036d5c7d36cb6cb7dbaece4c71a60d79fb9798"}}, - {name = "MarkupSafe-3.0.2-cp311-cp311-musllinux_1_2_i686.whl",url = "https://files.pythonhosted.org/packages/f9/ac/46f960ca323037caa0a10662ef97d0a4728e890334fc156b9f9e52bcc4ca/MarkupSafe-3.0.2-cp311-cp311-musllinux_1_2_i686.whl",hashes = {sha256 = "5b02fb34468b6aaa40dfc198d813a641e3a63b98c2b05a16b9f80b7ec314185e"}}, - {name = "MarkupSafe-3.0.2-cp311-cp311-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/69/84/83439e16197337b8b14b6a5b9c2105fff81d42c2a7c5b58ac7b62ee2c3b1/MarkupSafe-3.0.2-cp311-cp311-musllinux_1_2_x86_64.whl",hashes = {sha256 = "0bff5e0ae4ef2e1ae4fdf2dfd5b76c75e5c2fa4132d05fc1b0dabcd20c7e28c4"}}, - {name = "MarkupSafe-3.0.2-cp311-cp311-win32.whl",url = "https://files.pythonhosted.org/packages/9a/34/a15aa69f01e2181ed8d2b685c0d2f6655d5cca2c4db0ddea775e631918cd/MarkupSafe-3.0.2-cp311-cp311-win32.whl",hashes = {sha256 = "6c89876f41da747c8d3677a2b540fb32ef5715f97b66eeb0c6b66f5e3ef6f59d"}}, - {name = "MarkupSafe-3.0.2-cp311-cp311-win_amd64.whl",url = "https://files.pythonhosted.org/packages/da/b8/3a3bd761922d416f3dc5d00bfbed11f66b1ab89a0c2b6e887240a30b0f6b/MarkupSafe-3.0.2-cp311-cp311-win_amd64.whl",hashes = {sha256 = "70a87b411535ccad5ef2f1df5136506a10775d267e197e4cf531ced10537bd6b"}}, - {name = "MarkupSafe-3.0.2-cp310-cp310-macosx_10_9_universal2.whl",url = "https://files.pythonhosted.org/packages/04/90/d08277ce111dd22f77149fd1a5d4653eeb3b3eaacbdfcbae5afb2600eebd/MarkupSafe-3.0.2-cp310-cp310-macosx_10_9_universal2.whl",hashes = {sha256 = "7e94c425039cde14257288fd61dcfb01963e658efbc0ff54f5306b06054700f8"}}, - {name = "MarkupSafe-3.0.2-cp310-cp310-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/04/e1/6e2194baeae0bca1fae6629dc0cbbb968d4d941469cbab11a3872edff374/MarkupSafe-3.0.2-cp310-cp310-macosx_11_0_arm64.whl",hashes = {sha256 = "9e2d922824181480953426608b81967de705c3cef4d1af983af849d7bd619158"}}, - {name = "MarkupSafe-3.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",url = "https://files.pythonhosted.org/packages/1d/69/35fa85a8ece0a437493dc61ce0bb6d459dcba482c34197e3efc829aa357f/MarkupSafe-3.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",hashes = {sha256 = "38a9ef736c01fccdd6600705b09dc574584b89bea478200c5fbf112a6b0d5579"}}, - {name = "MarkupSafe-3.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",url = "https://files.pythonhosted.org/packages/22/35/137da042dfb4720b638d2937c38a9c2df83fe32d20e8c8f3185dbfef05f7/MarkupSafe-3.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",hashes = {sha256 = "bbcb445fa71794da8f178f0f6d66789a28d7319071af7a496d4d507ed566270d"}}, - {name = "MarkupSafe-3.0.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl",url = "https://files.pythonhosted.org/packages/29/28/6d029a903727a1b62edb51863232152fd335d602def598dade38996887f0/MarkupSafe-3.0.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl",hashes = {sha256 = "57cb5a3cf367aeb1d316576250f65edec5bb3be939e9247ae594b4bcbc317dfb"}}, - {name = "MarkupSafe-3.0.2-cp310-cp310-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/cc/cd/07438f95f83e8bc028279909d9c9bd39e24149b0d60053a97b2bc4f8aa51/MarkupSafe-3.0.2-cp310-cp310-musllinux_1_2_aarch64.whl",hashes = {sha256 = "3809ede931876f5b2ec92eef964286840ed3540dadf803dd570c3b7e13141a3b"}}, - {name = "MarkupSafe-3.0.2-cp310-cp310-musllinux_1_2_i686.whl",url = "https://files.pythonhosted.org/packages/29/01/84b57395b4cc062f9c4c55ce0df7d3108ca32397299d9df00fedd9117d3d/MarkupSafe-3.0.2-cp310-cp310-musllinux_1_2_i686.whl",hashes = {sha256 = "e07c3764494e3776c602c1e78e298937c3315ccc9043ead7e685b7f2b8d47b3c"}}, - {name = "MarkupSafe-3.0.2-cp310-cp310-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/bd/6e/61ebf08d8940553afff20d1fb1ba7294b6f8d279df9fd0c0db911b4bbcfd/MarkupSafe-3.0.2-cp310-cp310-musllinux_1_2_x86_64.whl",hashes = {sha256 = "b424c77b206d63d500bcb69fa55ed8d0e6a3774056bdc4839fc9298a7edca171"}}, - {name = "MarkupSafe-3.0.2-cp310-cp310-win32.whl",url = "https://files.pythonhosted.org/packages/11/23/ffbf53694e8c94ebd1e7e491de185124277964344733c45481f32ede2499/MarkupSafe-3.0.2-cp310-cp310-win32.whl",hashes = {sha256 = "fcabf5ff6eea076f859677f5f0b6b5c1a51e70a376b0579e0eadef8db48c6b50"}}, - {name = "MarkupSafe-3.0.2-cp310-cp310-win_amd64.whl",url = "https://files.pythonhosted.org/packages/44/06/e7175d06dd6e9172d4a69a72592cb3f7a996a9c396eee29082826449bbc3/MarkupSafe-3.0.2-cp310-cp310-win_amd64.whl",hashes = {sha256 = "6af100e168aa82a50e186c82875a5893c5597a0c1ccdb0d8b40240b1f28b969a"}}, - {name = "MarkupSafe-3.0.2-cp39-cp39-macosx_10_9_universal2.whl",url = "https://files.pythonhosted.org/packages/a7/ea/9b1530c3fdeeca613faeb0fb5cbcf2389d816072fab72a71b45749ef6062/MarkupSafe-3.0.2-cp39-cp39-macosx_10_9_universal2.whl",hashes = {sha256 = "eaa0a10b7f72326f1372a713e73c3f739b524b3af41feb43e4921cb529f5929a"}}, - {name = "MarkupSafe-3.0.2-cp39-cp39-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/4b/c2/fbdbfe48848e7112ab05e627e718e854d20192b674952d9042ebd8c9e5de/MarkupSafe-3.0.2-cp39-cp39-macosx_11_0_arm64.whl",hashes = {sha256 = "48032821bbdf20f5799ff537c7ac3d1fba0ba032cfc06194faffa8cda8b560ff"}}, - {name = "MarkupSafe-3.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",url = "https://files.pythonhosted.org/packages/f0/25/7a7c6e4dbd4f867d95d94ca15449e91e52856f6ed1905d58ef1de5e211d0/MarkupSafe-3.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",hashes = {sha256 = "1a9d3f5f0901fdec14d8d2f66ef7d035f2157240a433441719ac9a3fba440b13"}}, - {name = "MarkupSafe-3.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",url = "https://files.pythonhosted.org/packages/53/8f/f339c98a178f3c1e545622206b40986a4c3307fe39f70ccd3d9df9a9e425/MarkupSafe-3.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",hashes = {sha256 = "88b49a3b9ff31e19998750c38e030fc7bb937398b1f78cfa599aaef92d693144"}}, - {name = "MarkupSafe-3.0.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl",url = "https://files.pythonhosted.org/packages/1a/03/8496a1a78308456dbd50b23a385c69b41f2e9661c67ea1329849a598a8f9/MarkupSafe-3.0.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl",hashes = {sha256 = "cfad01eed2c2e0c01fd0ecd2ef42c492f7f93902e39a42fc9ee1692961443a29"}}, - {name = "MarkupSafe-3.0.2-cp39-cp39-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/e6/cf/0a490a4bd363048c3022f2f475c8c05582179bb179defcee4766fb3dcc18/MarkupSafe-3.0.2-cp39-cp39-musllinux_1_2_aarch64.whl",hashes = {sha256 = "1225beacc926f536dc82e45f8a4d68502949dc67eea90eab715dea3a21c1b5f0"}}, - {name = "MarkupSafe-3.0.2-cp39-cp39-musllinux_1_2_i686.whl",url = "https://files.pythonhosted.org/packages/19/a3/34187a78613920dfd3cdf68ef6ce5e99c4f3417f035694074beb8848cd77/MarkupSafe-3.0.2-cp39-cp39-musllinux_1_2_i686.whl",hashes = {sha256 = "3169b1eefae027567d1ce6ee7cae382c57fe26e82775f460f0b2778beaad66c0"}}, - {name = "MarkupSafe-3.0.2-cp39-cp39-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/17/d8/5811082f85bb88410ad7e452263af048d685669bbbfb7b595e8689152498/MarkupSafe-3.0.2-cp39-cp39-musllinux_1_2_x86_64.whl",hashes = {sha256 = "eb7972a85c54febfb25b5c4b4f3af4dcc731994c7da0d8a0b4a6eb0640e1d178"}}, - {name = "MarkupSafe-3.0.2-cp39-cp39-win32.whl",url = "https://files.pythonhosted.org/packages/7c/31/bd635fb5989440d9365c5e3c47556cfea121c7803f5034ac843e8f37c2f2/MarkupSafe-3.0.2-cp39-cp39-win32.whl",hashes = {sha256 = "8c4e8c3ce11e1f92f6536ff07154f9d49677ebaaafc32db9db4620bc11ed480f"}}, - {name = "MarkupSafe-3.0.2-cp39-cp39-win_amd64.whl",url = "https://files.pythonhosted.org/packages/b3/73/085399401383ce949f727afec55ec3abd76648d04b9f22e1c0e99cb4bec3/MarkupSafe-3.0.2-cp39-cp39-win_amd64.whl",hashes = {sha256 = "6e296a513ca3d94054c2c881cc913116e90fd030ad1c656b3869762b754f5f8a"}}, ] -marker = "\"dev\" in extras" +marker = "\"default\" in dependency_groups or \"dev\" in extras" [packages.tool.pdm] dependencies = [] @@ -2817,13 +2399,6 @@ requires-python = ">=3.8" sdist = {name = "multiprocess-0.70.16.tar.gz", url = "https://files.pythonhosted.org/packages/b5/ae/04f39c5d0d0def03247c2893d6f2b83c136bf3320a2154d7b8858f2ba72d/multiprocess-0.70.16.tar.gz", hashes = {sha256 = "161af703d4652a0e1410be6abccecde4a7ddffd19341be0a7011b94aeb171ac1"}} wheels = [ {name = "multiprocess-0.70.16-py312-none-any.whl",url = "https://files.pythonhosted.org/packages/0a/7d/a988f258104dcd2ccf1ed40fdc97e26c4ac351eeaf81d76e266c52d84e2f/multiprocess-0.70.16-py312-none-any.whl",hashes = {sha256 = "fc0544c531920dde3b00c29863377f87e1632601092ea2daca74e4beb40faa2e"}}, - {name = "multiprocess-0.70.16-py311-none-any.whl",url = "https://files.pythonhosted.org/packages/50/15/b56e50e8debaf439f44befec5b2af11db85f6e0f344c3113ae0be0593a91/multiprocess-0.70.16-py311-none-any.whl",hashes = {sha256 = "af4cabb0dac72abfb1e794fa7855c325fd2b55a10a44628a3c1ad3311c04127a"}}, - {name = "multiprocess-0.70.16-pp310-pypy310_pp73-macosx_10_13_x86_64.whl",url = "https://files.pythonhosted.org/packages/ef/76/6e712a2623d146d314f17598df5de7224c85c0060ef63fd95cc15a25b3fa/multiprocess-0.70.16-pp310-pypy310_pp73-macosx_10_13_x86_64.whl",hashes = {sha256 = "476887be10e2f59ff183c006af746cb6f1fd0eadcfd4ef49e605cbe2659920ee"}}, - {name = "multiprocess-0.70.16-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl",url = "https://files.pythonhosted.org/packages/0f/ab/1e6e8009e380e22254ff539ebe117861e5bdb3bff1fc977920972237c6c7/multiprocess-0.70.16-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl",hashes = {sha256 = "d951bed82c8f73929ac82c61f01a7b5ce8f3e5ef40f5b52553b4f547ce2b08ec"}}, - {name = "multiprocess-0.70.16-py310-none-any.whl",url = "https://files.pythonhosted.org/packages/bc/f7/7ec7fddc92e50714ea3745631f79bd9c96424cb2702632521028e57d3a36/multiprocess-0.70.16-py310-none-any.whl",hashes = {sha256 = "c4a9944c67bd49f823687463660a2d6daae94c289adff97e0f9d696ba6371d02"}}, - {name = "multiprocess-0.70.16-pp39-pypy39_pp73-macosx_10_13_x86_64.whl",url = "https://files.pythonhosted.org/packages/d8/94/8638a89f93c80df329116e6781a060506c7e91e1f4370dc831e9d17a041d/multiprocess-0.70.16-pp39-pypy39_pp73-macosx_10_13_x86_64.whl",hashes = {sha256 = "0dfd078c306e08d46d7a8d06fb120313d87aa43af60d66da43ffff40b44d2f41"}}, - {name = "multiprocess-0.70.16-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl",url = "https://files.pythonhosted.org/packages/89/21/222066f6bb8d8af287923ae3bd26cf4699a9ce020228ac273caca1de8250/multiprocess-0.70.16-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl",hashes = {sha256 = "e7b9d0f307cd9bd50851afaac0dba2cb6c44449efff697df7c7645f7d3f2be3a"}}, - {name = "multiprocess-0.70.16-py39-none-any.whl",url = "https://files.pythonhosted.org/packages/da/d9/f7f9379981e39b8c2511c9e0326d212accacb82f12fbfdc1aa2ce2a7b2b6/multiprocess-0.70.16-py39-none-any.whl",hashes = {sha256 = "a0bafd3ae1b732eac64be2e72038231c1ba97724b60b09400d68f229fcc2fbf3"}}, ] marker = "\"default\" in dependency_groups" @@ -2858,6 +2433,70 @@ marker = "\"dev\" in extras" [packages.tool.pdm] dependencies = [] +[[packages]] +name = "numba" +version = "0.62.1" +requires-python = ">=3.10" +sdist = {name = "numba-0.62.1.tar.gz", url = "https://files.pythonhosted.org/packages/a3/20/33dbdbfe60e5fd8e3dbfde299d106279a33d9f8308346022316781368591/numba-0.62.1.tar.gz", hashes = {sha256 = "7b774242aa890e34c21200a1fc62e5b5757d5286267e71103257f4e2af0d5161"}} +wheels = [ + {name = "numba-0.62.1-cp313-cp313-macosx_10_15_x86_64.whl",url = "https://files.pythonhosted.org/packages/22/76/501ea2c07c089ef1386868f33dff2978f43f51b854e34397b20fc55e0a58/numba-0.62.1-cp313-cp313-macosx_10_15_x86_64.whl",hashes = {sha256 = "b72489ba8411cc9fdcaa2458d8f7677751e94f0109eeb53e5becfdc818c64afb"}}, + {name = "numba-0.62.1-cp313-cp313-macosx_12_0_arm64.whl",url = "https://files.pythonhosted.org/packages/80/68/444986ed95350c0611d5c7b46828411c222ce41a0c76707c36425d27ce29/numba-0.62.1-cp313-cp313-macosx_12_0_arm64.whl",hashes = {sha256 = "44a1412095534a26fb5da2717bc755b57da5f3053965128fe3dc286652cc6a92"}}, + {name = "numba-0.62.1-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl",url = "https://files.pythonhosted.org/packages/78/7e/bf2e3634993d57f95305c7cee4c9c6cb3c9c78404ee7b49569a0dfecfe33/numba-0.62.1-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl",hashes = {sha256 = "8c9460b9e936c5bd2f0570e20a0a5909ee6e8b694fd958b210e3bde3a6dba2d7"}}, + {name = "numba-0.62.1-cp313-cp313-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl",url = "https://files.pythonhosted.org/packages/e8/b6/8a1723fff71f63bbb1354bdc60a1513a068acc0f5322f58da6f022d20247/numba-0.62.1-cp313-cp313-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl",hashes = {sha256 = "728f91a874192df22d74e3fd42c12900b7ce7190b1aad3574c6c61b08313e4c5"}}, + {name = "numba-0.62.1-cp313-cp313-win_amd64.whl",url = "https://files.pythonhosted.org/packages/9c/ec/9d414e7a80d6d1dc4af0e07c6bfe293ce0b04ea4d0ed6c45dad9bd6e72eb/numba-0.62.1-cp313-cp313-win_amd64.whl",hashes = {sha256 = "bbf3f88b461514287df66bc8d0307e949b09f2b6f67da92265094e8fa1282dd8"}}, + {name = "numba-0.62.1-cp312-cp312-macosx_10_15_x86_64.whl",url = "https://files.pythonhosted.org/packages/5e/fa/30fa6873e9f821c0ae755915a3ca444e6ff8d6a7b6860b669a3d33377ac7/numba-0.62.1-cp312-cp312-macosx_10_15_x86_64.whl",hashes = {sha256 = "1b743b32f8fa5fff22e19c2e906db2f0a340782caf024477b97801b918cf0494"}}, + {name = "numba-0.62.1-cp312-cp312-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/a9/d5/504ce8dc46e0dba2790c77e6b878ee65b60fe3e7d6d0006483ef6fde5a97/numba-0.62.1-cp312-cp312-macosx_11_0_arm64.whl",hashes = {sha256 = "90fa21b0142bcf08ad8e32a97d25d0b84b1e921bc9423f8dda07d3652860eef6"}}, + {name = "numba-0.62.1-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl",url = "https://files.pythonhosted.org/packages/50/5f/6a802741176c93f2ebe97ad90751894c7b0c922b52ba99a4395e79492205/numba-0.62.1-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl",hashes = {sha256 = "6ef84d0ac19f1bf80431347b6f4ce3c39b7ec13f48f233a48c01e2ec06ecbc59"}}, + {name = "numba-0.62.1-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl",url = "https://files.pythonhosted.org/packages/7e/df/efd21527d25150c4544eccc9d0b7260a5dec4b7e98b5a581990e05a133c0/numba-0.62.1-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl",hashes = {sha256 = "9315cc5e441300e0ca07c828a627d92a6802bcbf27c5487f31ae73783c58da53"}}, + {name = "numba-0.62.1-cp312-cp312-win_amd64.whl",url = "https://files.pythonhosted.org/packages/80/44/79bfdab12a02796bf4f1841630355c82b5a69933b1d50eb15c7fa37dabe8/numba-0.62.1-cp312-cp312-win_amd64.whl",hashes = {sha256 = "44e3aa6228039992f058f5ebfcfd372c83798e9464297bdad8cc79febcf7891e"}}, +] +marker = "python_version ~= \"3.12\"" + +[packages.tool.pdm] +dependencies = [ + "llvmlite<0.46,>=0.45.0dev0", + "numpy<2.4,>=1.22", +] + +[[packages]] +name = "llvmlite" +version = "0.45.1" +requires-python = ">=3.10" +sdist = {name = "llvmlite-0.45.1.tar.gz", url = "https://files.pythonhosted.org/packages/99/8d/5baf1cef7f9c084fb35a8afbde88074f0d6a727bc63ef764fe0e7543ba40/llvmlite-0.45.1.tar.gz", hashes = {sha256 = "09430bb9d0bb58fc45a45a57c7eae912850bedc095cd0810a57de109c69e1c32"}} +wheels = [ + {name = "llvmlite-0.45.1-cp313-cp313-macosx_10_15_x86_64.whl",url = "https://files.pythonhosted.org/packages/1d/e2/c185bb7e88514d5025f93c6c4092f6120c6cea8fe938974ec9860fb03bbb/llvmlite-0.45.1-cp313-cp313-macosx_10_15_x86_64.whl",hashes = {sha256 = "d9ea9e6f17569a4253515cc01dade70aba536476e3d750b2e18d81d7e670eb15"}}, + {name = "llvmlite-0.45.1-cp313-cp313-macosx_12_0_arm64.whl",url = "https://files.pythonhosted.org/packages/09/b8/b5437b9ecb2064e89ccf67dccae0d02cd38911705112dd0dcbfa9cd9a9de/llvmlite-0.45.1-cp313-cp313-macosx_12_0_arm64.whl",hashes = {sha256 = "c9f3cadee1630ce4ac18ea38adebf2a4f57a89bd2740ce83746876797f6e0bfb"}}, + {name = "llvmlite-0.45.1-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl",url = "https://files.pythonhosted.org/packages/f7/97/ad1a907c0173a90dd4df7228f24a3ec61058bc1a9ff8a0caec20a0cc622e/llvmlite-0.45.1-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl",hashes = {sha256 = "57c48bf2e1083eedbc9406fb83c4e6483017879714916fe8be8a72a9672c995a"}}, + {name = "llvmlite-0.45.1-cp313-cp313-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl",url = "https://files.pythonhosted.org/packages/32/d8/c99c8ac7a326e9735401ead3116f7685a7ec652691aeb2615aa732b1fc4a/llvmlite-0.45.1-cp313-cp313-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl",hashes = {sha256 = "3aa3dfceda4219ae39cf18806c60eeb518c1680ff834b8b311bd784160b9ce40"}}, + {name = "llvmlite-0.45.1-cp313-cp313-win_amd64.whl",url = "https://files.pythonhosted.org/packages/09/56/ed35668130e32dbfad2eb37356793b0a95f23494ab5be7d9bf5cb75850ee/llvmlite-0.45.1-cp313-cp313-win_amd64.whl",hashes = {sha256 = "080e6f8d0778a8239cd47686d402cb66eb165e421efa9391366a9b7e5810a38b"}}, + {name = "llvmlite-0.45.1-cp312-cp312-macosx_10_15_x86_64.whl",url = "https://files.pythonhosted.org/packages/e2/7c/82cbd5c656e8991bcc110c69d05913be2229302a92acb96109e166ae31fb/llvmlite-0.45.1-cp312-cp312-macosx_10_15_x86_64.whl",hashes = {sha256 = "28e763aba92fe9c72296911e040231d486447c01d4f90027c8e893d89d49b20e"}}, + {name = "llvmlite-0.45.1-cp312-cp312-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/9d/bc/5314005bb2c7ee9f33102c6456c18cc81745d7055155d1218f1624463774/llvmlite-0.45.1-cp312-cp312-macosx_11_0_arm64.whl",hashes = {sha256 = "1a53f4b74ee9fd30cb3d27d904dadece67a7575198bd80e687ee76474620735f"}}, + {name = "llvmlite-0.45.1-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl",url = "https://files.pythonhosted.org/packages/96/76/0f7154952f037cb320b83e1c952ec4a19d5d689cf7d27cb8a26887d7bbc1/llvmlite-0.45.1-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl",hashes = {sha256 = "5b3796b1b1e1c14dcae34285d2f4ea488402fbd2c400ccf7137603ca3800864f"}}, + {name = "llvmlite-0.45.1-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl",url = "https://files.pythonhosted.org/packages/00/b1/0b581942be2683ceb6862d558979e87387e14ad65a1e4db0e7dd671fa315/llvmlite-0.45.1-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl",hashes = {sha256 = "779e2f2ceefef0f4368548685f0b4adde34e5f4b457e90391f570a10b348d433"}}, + {name = "llvmlite-0.45.1-cp312-cp312-win_amd64.whl",url = "https://files.pythonhosted.org/packages/33/94/9ba4ebcf4d541a325fd8098ddc073b663af75cc8b065b6059848f7d4dce7/llvmlite-0.45.1-cp312-cp312-win_amd64.whl",hashes = {sha256 = "9e6c9949baf25d9aa9cd7cf0f6d011b9ca660dd17f5ba2b23bdbdb77cc86b116"}}, +] +marker = "python_version ~= \"3.12\"" + +[packages.tool.pdm] +dependencies = [] + +[[packages]] +name = "pooch" +version = "1.8.2" +requires-python = ">=3.7" +sdist = {name = "pooch-1.8.2.tar.gz", url = "https://files.pythonhosted.org/packages/c6/77/b3d3e00c696c16cf99af81ef7b1f5fe73bd2a307abca41bd7605429fe6e5/pooch-1.8.2.tar.gz", hashes = {sha256 = "76561f0de68a01da4df6af38e9955c4c9d1a5c90da73f7e40276a5728ec83d10"}} +wheels = [ + {name = "pooch-1.8.2-py3-none-any.whl",url = "https://files.pythonhosted.org/packages/a8/87/77cc11c7a9ea9fd05503def69e3d18605852cd0d4b0d3b8f15bbeb3ef1d1/pooch-1.8.2-py3-none-any.whl",hashes = {sha256 = "3529a57096f7198778a5ceefd5ac3ef0e4d06a6ddaf9fc2d609b806f25302c47"}}, +] +marker = "\"default\" in dependency_groups" + +[packages.tool.pdm] +dependencies = [ + "platformdirs>=2.5.0", + "packaging>=20.0", + "requests>=2.19.0", +] + [[packages]] name = "pyarrow" version = "20.0.0" @@ -2891,33 +2530,6 @@ wheels = [ {name = "pyarrow-20.0.0-cp312-cp312-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/b2/66/2d976c0c7158fd25591c8ca55aee026e6d5745a021915a1835578707feb3/pyarrow-20.0.0-cp312-cp312-musllinux_1_2_aarch64.whl",hashes = {sha256 = "89e030dc58fc760e4010148e6ff164d2f44441490280ef1e97a542375e41058e"}}, {name = "pyarrow-20.0.0-cp312-cp312-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/31/a9/dfb999c2fc6911201dcbf348247f9cc382a8990f9ab45c12eabfd7243a38/pyarrow-20.0.0-cp312-cp312-musllinux_1_2_x86_64.whl",hashes = {sha256 = "6102b4864d77102dbbb72965618e204e550135a940c2534711d5ffa787df2a5a"}}, {name = "pyarrow-20.0.0-cp312-cp312-win_amd64.whl",url = "https://files.pythonhosted.org/packages/a0/8e/9adee63dfa3911be2382fb4d92e4b2e7d82610f9d9f668493bebaa2af50f/pyarrow-20.0.0-cp312-cp312-win_amd64.whl",hashes = {sha256 = "96d6a0a37d9c98be08f5ed6a10831d88d52cac7b13f5287f1e0f625a0de8062b"}}, - {name = "pyarrow-20.0.0-cp311-cp311-macosx_12_0_arm64.whl",url = "https://files.pythonhosted.org/packages/47/a2/b7930824181ceadd0c63c1042d01fa4ef63eee233934826a7a2a9af6e463/pyarrow-20.0.0-cp311-cp311-macosx_12_0_arm64.whl",hashes = {sha256 = "24ca380585444cb2a31324c546a9a56abbe87e26069189e14bdba19c86c049f0"}}, - {name = "pyarrow-20.0.0-cp311-cp311-macosx_12_0_x86_64.whl",url = "https://files.pythonhosted.org/packages/9b/18/c765770227d7f5bdfa8a69f64b49194352325c66a5c3bb5e332dfd5867d9/pyarrow-20.0.0-cp311-cp311-macosx_12_0_x86_64.whl",hashes = {sha256 = "95b330059ddfdc591a3225f2d272123be26c8fa76e8c9ee1a77aad507361cfdb"}}, - {name = "pyarrow-20.0.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",url = "https://files.pythonhosted.org/packages/44/fb/dfb2dfdd3e488bb14f822d7335653092dde150cffc2da97de6e7500681f9/pyarrow-20.0.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",hashes = {sha256 = "5f0fb1041267e9968c6d0d2ce3ff92e3928b243e2b6d11eeb84d9ac547308232"}}, - {name = "pyarrow-20.0.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",url = "https://files.pythonhosted.org/packages/58/0d/08a95878d38808051a953e887332d4a76bc06c6ee04351918ee1155407eb/pyarrow-20.0.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",hashes = {sha256 = "b8ff87cc837601532cc8242d2f7e09b4e02404de1b797aee747dd4ba4bd6313f"}}, - {name = "pyarrow-20.0.0-cp311-cp311-manylinux_2_28_aarch64.whl",url = "https://files.pythonhosted.org/packages/f3/cd/efa271234dfe38f0271561086eedcad7bc0f2ddd1efba423916ff0883684/pyarrow-20.0.0-cp311-cp311-manylinux_2_28_aarch64.whl",hashes = {sha256 = "7a3a5dcf54286e6141d5114522cf31dd67a9e7c9133d150799f30ee302a7a1ab"}}, - {name = "pyarrow-20.0.0-cp311-cp311-manylinux_2_28_x86_64.whl",url = "https://files.pythonhosted.org/packages/46/1f/7f02009bc7fc8955c391defee5348f510e589a020e4b40ca05edcb847854/pyarrow-20.0.0-cp311-cp311-manylinux_2_28_x86_64.whl",hashes = {sha256 = "a6ad3e7758ecf559900261a4df985662df54fb7fdb55e8e3b3aa99b23d526b62"}}, - {name = "pyarrow-20.0.0-cp311-cp311-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/4f/92/692c562be4504c262089e86757a9048739fe1acb4024f92d39615e7bab3f/pyarrow-20.0.0-cp311-cp311-musllinux_1_2_aarch64.whl",hashes = {sha256 = "6bb830757103a6cb300a04610e08d9636f0cd223d32f388418ea893a3e655f1c"}}, - {name = "pyarrow-20.0.0-cp311-cp311-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/a4/ec/9f5c7e7c828d8e0a3c7ef50ee62eca38a7de2fa6eb1b8fa43685c9414fef/pyarrow-20.0.0-cp311-cp311-musllinux_1_2_x86_64.whl",hashes = {sha256 = "96e37f0766ecb4514a899d9a3554fadda770fb57ddf42b63d80f14bc20aa7db3"}}, - {name = "pyarrow-20.0.0-cp311-cp311-win_amd64.whl",url = "https://files.pythonhosted.org/packages/54/96/46613131b4727f10fd2ffa6d0d6f02efcc09a0e7374eff3b5771548aa95b/pyarrow-20.0.0-cp311-cp311-win_amd64.whl",hashes = {sha256 = "3346babb516f4b6fd790da99b98bed9708e3f02e734c84971faccb20736848dc"}}, - {name = "pyarrow-20.0.0-cp310-cp310-macosx_12_0_arm64.whl",url = "https://files.pythonhosted.org/packages/5b/23/77094eb8ee0dbe88441689cb6afc40ac312a1e15d3a7acc0586999518222/pyarrow-20.0.0-cp310-cp310-macosx_12_0_arm64.whl",hashes = {sha256 = "c7dd06fd7d7b410ca5dc839cc9d485d2bc4ae5240851bcd45d85105cc90a47d7"}}, - {name = "pyarrow-20.0.0-cp310-cp310-macosx_12_0_x86_64.whl",url = "https://files.pythonhosted.org/packages/c3/d5/48cc573aff00d62913701d9fac478518f693b30c25f2c157550b0b2565cb/pyarrow-20.0.0-cp310-cp310-macosx_12_0_x86_64.whl",hashes = {sha256 = "d5382de8dc34c943249b01c19110783d0d64b207167c728461add1ecc2db88e4"}}, - {name = "pyarrow-20.0.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",url = "https://files.pythonhosted.org/packages/37/df/4099b69a432b5cb412dd18adc2629975544d656df3d7fda6d73c5dba935d/pyarrow-20.0.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",hashes = {sha256 = "6415a0d0174487456ddc9beaead703d0ded5966129fa4fd3114d76b5d1c5ceae"}}, - {name = "pyarrow-20.0.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",url = "https://files.pythonhosted.org/packages/4c/27/99922a9ac1c9226f346e3a1e15e63dee6f623ed757ff2893f9d6994a69d3/pyarrow-20.0.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",hashes = {sha256 = "15aa1b3b2587e74328a730457068dc6c89e6dcbf438d4369f572af9d320a25ee"}}, - {name = "pyarrow-20.0.0-cp310-cp310-manylinux_2_28_aarch64.whl",url = "https://files.pythonhosted.org/packages/21/d1/71d91b2791b829c9e98f1e0d85be66ed93aff399f80abb99678511847eaa/pyarrow-20.0.0-cp310-cp310-manylinux_2_28_aarch64.whl",hashes = {sha256 = "5605919fbe67a7948c1f03b9f3727d82846c053cd2ce9303ace791855923fd20"}}, - {name = "pyarrow-20.0.0-cp310-cp310-manylinux_2_28_x86_64.whl",url = "https://files.pythonhosted.org/packages/f1/ca/ae10fba419a6e94329707487835ec721f5a95f3ac9168500bcf7aa3813c7/pyarrow-20.0.0-cp310-cp310-manylinux_2_28_x86_64.whl",hashes = {sha256 = "a5704f29a74b81673d266e5ec1fe376f060627c2e42c5c7651288ed4b0db29e9"}}, - {name = "pyarrow-20.0.0-cp310-cp310-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/7a/a6/aba40a2bf01b5d00cf9cd16d427a5da1fad0fb69b514ce8c8292ab80e968/pyarrow-20.0.0-cp310-cp310-musllinux_1_2_aarch64.whl",hashes = {sha256 = "00138f79ee1b5aca81e2bdedb91e3739b987245e11fa3c826f9e57c5d102fb75"}}, - {name = "pyarrow-20.0.0-cp310-cp310-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/93/6b/98b39650cd64f32bf2ec6d627a9bd24fcb3e4e6ea1873c5e1ea8a83b1a18/pyarrow-20.0.0-cp310-cp310-musllinux_1_2_x86_64.whl",hashes = {sha256 = "f2d67ac28f57a362f1a2c1e6fa98bfe2f03230f7e15927aecd067433b1e70ce8"}}, - {name = "pyarrow-20.0.0-cp310-cp310-win_amd64.whl",url = "https://files.pythonhosted.org/packages/ab/32/340238be1eb5037e7b5de7e640ee22334417239bc347eadefaf8c373936d/pyarrow-20.0.0-cp310-cp310-win_amd64.whl",hashes = {sha256 = "4a8b029a07956b8d7bd742ffca25374dd3f634b35e46cc7a7c3fa4c75b297191"}}, - {name = "pyarrow-20.0.0-cp39-cp39-macosx_12_0_arm64.whl",url = "https://files.pythonhosted.org/packages/10/53/421820fa125138c868729b930d4bc487af2c4b01b1c6104818aab7e98f13/pyarrow-20.0.0-cp39-cp39-macosx_12_0_arm64.whl",hashes = {sha256 = "1bcbe471ef3349be7714261dea28fe280db574f9d0f77eeccc195a2d161fd861"}}, - {name = "pyarrow-20.0.0-cp39-cp39-macosx_12_0_x86_64.whl",url = "https://files.pythonhosted.org/packages/2e/70/fd75e03312b715e90d928fb91ed8d45c9b0520346e5231b1c69293afd4c7/pyarrow-20.0.0-cp39-cp39-macosx_12_0_x86_64.whl",hashes = {sha256 = "a18a14baef7d7ae49247e75641fd8bcbb39f44ed49a9fc4ec2f65d5031aa3b96"}}, - {name = "pyarrow-20.0.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",url = "https://files.pythonhosted.org/packages/c4/e3/21e5758e46219fdedf5e6c800574dd9d17e962e80014cfe08d6d475be863/pyarrow-20.0.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",hashes = {sha256 = "cb497649e505dc36542d0e68eca1a3c94ecbe9799cb67b578b55f2441a247fbc"}}, - {name = "pyarrow-20.0.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",url = "https://files.pythonhosted.org/packages/ac/f5/ed6a4c4b11f9215092a35097a985485bb7d879cb79d93d203494e8604f4e/pyarrow-20.0.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",hashes = {sha256 = "11529a2283cb1f6271d7c23e4a8f9f8b7fd173f7360776b668e509d712a02eec"}}, - {name = "pyarrow-20.0.0-cp39-cp39-manylinux_2_28_aarch64.whl",url = "https://files.pythonhosted.org/packages/44/e5/466a63668ba25788ee8d38d55f853a60469ae7ad1cda343db9f3f45e0b0a/pyarrow-20.0.0-cp39-cp39-manylinux_2_28_aarch64.whl",hashes = {sha256 = "6fc1499ed3b4b57ee4e090e1cea6eb3584793fe3d1b4297bbf53f09b434991a5"}}, - {name = "pyarrow-20.0.0-cp39-cp39-manylinux_2_28_x86_64.whl",url = "https://files.pythonhosted.org/packages/e8/d7/4c4d4e4cf6e53e16a519366dfe9223ee4a7a38e6e28c1c0d372b38ba3fe7/pyarrow-20.0.0-cp39-cp39-manylinux_2_28_x86_64.whl",hashes = {sha256 = "db53390eaf8a4dab4dbd6d93c85c5cf002db24902dbff0ca7d988beb5c9dd15b"}}, - {name = "pyarrow-20.0.0-cp39-cp39-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/07/d5/79effb32585b7c18897d3047a2163034f3f9c944d12f7b2fd8df6a2edc70/pyarrow-20.0.0-cp39-cp39-musllinux_1_2_aarch64.whl",hashes = {sha256 = "851c6a8260ad387caf82d2bbf54759130534723e37083111d4ed481cb253cc0d"}}, - {name = "pyarrow-20.0.0-cp39-cp39-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/09/5c/f707603552c058b2e9129732de99a67befb1f13f008cc58856304a62c38b/pyarrow-20.0.0-cp39-cp39-musllinux_1_2_x86_64.whl",hashes = {sha256 = "e22f80b97a271f0a7d9cd07394a7d348f80d3ac63ed7cc38b6d1b696ab3b2619"}}, - {name = "pyarrow-20.0.0-cp39-cp39-win_amd64.whl",url = "https://files.pythonhosted.org/packages/26/cc/1eb6a01c1bbc787f596c270c46bcd2273e35154a84afcb1d0cb4cc72457e/pyarrow-20.0.0-cp39-cp39-win_amd64.whl",hashes = {sha256 = "9965a050048ab02409fb7cbbefeedba04d3d67f2cc899eff505cc084345959ca"}}, ] marker = "\"default\" in dependency_groups" @@ -2941,11 +2553,6 @@ wheels = [ {name = "pycryptodomex-3.23.0-cp313-cp313t-win32.whl",url = "https://files.pythonhosted.org/packages/ca/18/4ca89ac737230b52ac8ffaca42f9c6f1fd07c81a6cd821e91af79db60632/pycryptodomex-3.23.0-cp313-cp313t-win32.whl",hashes = {sha256 = "a9d446e844f08299236780f2efa9898c818fe7e02f17263866b8550c7d5fb328"}}, {name = "pycryptodomex-3.23.0-cp313-cp313t-win_amd64.whl",url = "https://files.pythonhosted.org/packages/73/34/13e01c322db027682e00986873eca803f11c56ade9ba5bbf3225841ea2d4/pycryptodomex-3.23.0-cp313-cp313t-win_amd64.whl",hashes = {sha256 = "bc65bdd9fc8de7a35a74cab1c898cab391a4add33a8fe740bda00f5976ca4708"}}, {name = "pycryptodomex-3.23.0-cp313-cp313t-win_arm64.whl",url = "https://files.pythonhosted.org/packages/54/68/9504c8796b1805d58f4425002bcca20f12880e6fa4dc2fc9a668705c7a08/pycryptodomex-3.23.0-cp313-cp313t-win_arm64.whl",hashes = {sha256 = "c885da45e70139464f082018ac527fdaad26f1657a99ee13eecdce0f0ca24ab4"}}, - {name = "pycryptodomex-3.23.0-pp310-pypy310_pp73-macosx_10_15_x86_64.whl",url = "https://files.pythonhosted.org/packages/f3/b8/3e76d948c3c4ac71335bbe75dac53e154b40b0f8f1f022dfa295257a0c96/pycryptodomex-3.23.0-pp310-pypy310_pp73-macosx_10_15_x86_64.whl",hashes = {sha256 = "ebfff755c360d674306e5891c564a274a47953562b42fb74a5c25b8fc1fb1cb5"}}, - {name = "pycryptodomex-3.23.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",url = "https://files.pythonhosted.org/packages/6a/cf/80f4297a4820dfdfd1c88cf6c4666a200f204b3488103d027b5edd9176ec/pycryptodomex-3.23.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",hashes = {sha256 = "eca54f4bb349d45afc17e3011ed4264ef1cc9e266699874cdd1349c504e64798"}}, - {name = "pycryptodomex-3.23.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",url = "https://files.pythonhosted.org/packages/d1/42/1e969ee0ad19fe3134b0e1b856c39bd0b70d47a4d0e81c2a8b05727394c9/pycryptodomex-3.23.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",hashes = {sha256 = "4f2596e643d4365e14d0879dc5aafe6355616c61c2176009270f3048f6d9a61f"}}, - {name = "pycryptodomex-3.23.0-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl",url = "https://files.pythonhosted.org/packages/6e/c3/1de4f7631fea8a992a44ba632aa40e0008764c0fb9bf2854b0acf78c2cf2/pycryptodomex-3.23.0-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl",hashes = {sha256 = "fdfac7cda115bca3a5abb2f9e43bc2fb66c2b65ab074913643803ca7083a79ea"}}, - {name = "pycryptodomex-3.23.0-pp310-pypy310_pp73-win_amd64.whl",url = "https://files.pythonhosted.org/packages/f2/5f/af7da8e6f1e42b52f44a24d08b8e4c726207434e2593732d39e7af5e7256/pycryptodomex-3.23.0-pp310-pypy310_pp73-win_amd64.whl",hashes = {sha256 = "14c37aaece158d0ace436f76a7bb19093db3b4deade9797abfc39ec6cd6cc2fe"}}, {name = "pycryptodomex-3.23.0-cp37-abi3-macosx_10_9_universal2.whl",url = "https://files.pythonhosted.org/packages/dd/9c/1a8f35daa39784ed8adf93a694e7e5dc15c23c741bbda06e1d45f8979e9e/pycryptodomex-3.23.0-cp37-abi3-macosx_10_9_universal2.whl",hashes = {sha256 = "06698f957fe1ab229a99ba2defeeae1c09af185baa909a31a5d1f9d42b1aaed6"}}, {name = "pycryptodomex-3.23.0-cp37-abi3-macosx_10_9_x86_64.whl",url = "https://files.pythonhosted.org/packages/7a/62/f5221a191a97157d240cf6643747558759126c76ee92f29a3f4aee3197a5/pycryptodomex-3.23.0-cp37-abi3-macosx_10_9_x86_64.whl",hashes = {sha256 = "b2c2537863eccef2d41061e82a881dcabb04944c5c06c5aa7110b577cc487545"}}, {name = "pycryptodomex-3.23.0-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",url = "https://files.pythonhosted.org/packages/8c/fd/5a054543c8988d4ed7b612721d7e78a4b9bf36bc3c5ad45ef45c22d0060e/pycryptodomex-3.23.0-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",hashes = {sha256 = "43c446e2ba8df8889e0e16f02211c25b4934898384c1ec1ec04d7889c0333587"}}, @@ -3018,6 +2625,55 @@ marker = "\"default\" in dependency_groups" [packages.tool.pdm] dependencies = [] +[[packages]] +name = "sanic-routing" +version = "23.12.0" +sdist = {name = "sanic-routing-23.12.0.tar.gz", url = "https://files.pythonhosted.org/packages/d1/5c/2a7edd14fbccca3719a8d680951d4b25f986752c781c61ccf156a6d1ebff/sanic-routing-23.12.0.tar.gz", hashes = {sha256 = "1dcadc62c443e48c852392dba03603f9862b6197fc4cba5bbefeb1ace0848b04"}} +wheels = [ + {name = "sanic_routing-23.12.0-py3-none-any.whl",url = "https://files.pythonhosted.org/packages/cf/e3/3425c9a8773807ac2c01d6a56c8521733f09b627e5827e733c5cd36b9ac5/sanic_routing-23.12.0-py3-none-any.whl",hashes = {sha256 = "1558a72afcb9046ed3134a5edae02fc1552cff08f0fff2e8d5de0877ea43ed73"}}, +] +marker = "\"default\" in dependency_groups" + +[packages.tool.pdm] +dependencies = [] + +[[packages]] +name = "scikit-learn" +version = "1.7.2" +requires-python = ">=3.10" +sdist = {name = "scikit_learn-1.7.2.tar.gz", url = "https://files.pythonhosted.org/packages/98/c2/a7855e41c9d285dfe86dc50b250978105dce513d6e459ea66a6aeb0e1e0c/scikit_learn-1.7.2.tar.gz", hashes = {sha256 = "20e9e49ecd130598f1ca38a1d85090e1a600147b9c02fa6f15d69cb53d968fda"}} +wheels = [ + {name = "scikit_learn-1.7.2-cp314-cp314-macosx_10_13_x86_64.whl",url = "https://files.pythonhosted.org/packages/d9/82/dee5acf66837852e8e68df6d8d3a6cb22d3df997b733b032f513d95205b7/scikit_learn-1.7.2-cp314-cp314-macosx_10_13_x86_64.whl",hashes = {sha256 = "fa8f63940e29c82d1e67a45d5297bdebbcb585f5a5a50c4914cc2e852ab77f33"}}, + {name = "scikit_learn-1.7.2-cp314-cp314-macosx_12_0_arm64.whl",url = "https://files.pythonhosted.org/packages/3c/30/9029e54e17b87cb7d50d51a5926429c683d5b4c1732f0507a6c3bed9bf65/scikit_learn-1.7.2-cp314-cp314-macosx_12_0_arm64.whl",hashes = {sha256 = "f95dc55b7902b91331fa4e5845dd5bde0580c9cd9612b1b2791b7e80c3d32615"}}, + {name = "scikit_learn-1.7.2-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl",url = "https://files.pythonhosted.org/packages/60/18/4a52c635c71b536879f4b971c2cedf32c35ee78f48367885ed8025d1f7ee/scikit_learn-1.7.2-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl",hashes = {sha256 = "9656e4a53e54578ad10a434dc1f993330568cfee176dff07112b8785fb413106"}}, + {name = "scikit_learn-1.7.2-cp314-cp314-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl",url = "https://files.pythonhosted.org/packages/99/7e/290362f6ab582128c53445458a5befd471ed1ea37953d5bcf80604619250/scikit_learn-1.7.2-cp314-cp314-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl",hashes = {sha256 = "96dc05a854add0e50d3f47a1ef21a10a595016da5b007c7d9cd9d0bffd1fcc61"}}, + {name = "scikit_learn-1.7.2-cp314-cp314-win_amd64.whl",url = "https://files.pythonhosted.org/packages/8e/87/24f541b6d62b1794939ae6422f8023703bbf6900378b2b34e0b4384dfefd/scikit_learn-1.7.2-cp314-cp314-win_amd64.whl",hashes = {sha256 = "bb24510ed3f9f61476181e4db51ce801e2ba37541def12dc9333b946fc7a9cf8"}}, + {name = "scikit_learn-1.7.2-cp313-cp313-macosx_10_13_x86_64.whl",url = "https://files.pythonhosted.org/packages/ae/93/a3038cb0293037fd335f77f31fe053b89c72f17b1c8908c576c29d953e84/scikit_learn-1.7.2-cp313-cp313-macosx_10_13_x86_64.whl",hashes = {sha256 = "0b7dacaa05e5d76759fb071558a8b5130f4845166d88654a0f9bdf3eb57851b7"}}, + {name = "scikit_learn-1.7.2-cp313-cp313-macosx_12_0_arm64.whl",url = "https://files.pythonhosted.org/packages/40/dd/9a88879b0c1104259136146e4742026b52df8540c39fec21a6383f8292c7/scikit_learn-1.7.2-cp313-cp313-macosx_12_0_arm64.whl",hashes = {sha256 = "abebbd61ad9e1deed54cca45caea8ad5f79e1b93173dece40bb8e0c658dbe6fe"}}, + {name = "scikit_learn-1.7.2-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl",url = "https://files.pythonhosted.org/packages/46/af/c5e286471b7d10871b811b72ae794ac5fe2989c0a2df07f0ec723030f5f5/scikit_learn-1.7.2-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl",hashes = {sha256 = "502c18e39849c0ea1a5d681af1dbcf15f6cce601aebb657aabbfe84133c1907f"}}, + {name = "scikit_learn-1.7.2-cp313-cp313-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl",url = "https://files.pythonhosted.org/packages/f1/fd/df59faa53312d585023b2da27e866524ffb8faf87a68516c23896c718320/scikit_learn-1.7.2-cp313-cp313-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl",hashes = {sha256 = "7a4c328a71785382fe3fe676a9ecf2c86189249beff90bf85e22bdb7efaf9ae0"}}, + {name = "scikit_learn-1.7.2-cp313-cp313-win_amd64.whl",url = "https://files.pythonhosted.org/packages/a7/c7/03000262759d7b6f38c836ff9d512f438a70d8a8ddae68ee80de72dcfb63/scikit_learn-1.7.2-cp313-cp313-win_amd64.whl",hashes = {sha256 = "63a9afd6f7b229aad94618c01c252ce9e6fa97918c5ca19c9a17a087d819440c"}}, + {name = "scikit_learn-1.7.2-cp313-cp313t-macosx_10_13_x86_64.whl",url = "https://files.pythonhosted.org/packages/55/87/ef5eb1f267084532c8e4aef98a28b6ffe7425acbfd64b5e2f2e066bc29b3/scikit_learn-1.7.2-cp313-cp313t-macosx_10_13_x86_64.whl",hashes = {sha256 = "9acb6c5e867447b4e1390930e3944a005e2cb115922e693c08a323421a6966e8"}}, + {name = "scikit_learn-1.7.2-cp313-cp313t-macosx_12_0_arm64.whl",url = "https://files.pythonhosted.org/packages/93/f8/6c1e3fc14b10118068d7938878a9f3f4e6d7b74a8ddb1e5bed65159ccda8/scikit_learn-1.7.2-cp313-cp313t-macosx_12_0_arm64.whl",hashes = {sha256 = "2a41e2a0ef45063e654152ec9d8bcfc39f7afce35b08902bfe290c2498a67a6a"}}, + {name = "scikit_learn-1.7.2-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl",url = "https://files.pythonhosted.org/packages/83/87/066cafc896ee540c34becf95d30375fe5cbe93c3b75a0ee9aa852cd60021/scikit_learn-1.7.2-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl",hashes = {sha256 = "98335fb98509b73385b3ab2bd0639b1f610541d3988ee675c670371d6a87aa7c"}}, + {name = "scikit_learn-1.7.2-cp313-cp313t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl",url = "https://files.pythonhosted.org/packages/9c/2b/4903e1ccafa1f6453b1ab78413938c8800633988c838aa0be386cbb33072/scikit_learn-1.7.2-cp313-cp313t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl",hashes = {sha256 = "191e5550980d45449126e23ed1d5e9e24b2c68329ee1f691a3987476e115e09c"}}, + {name = "scikit_learn-1.7.2-cp313-cp313t-win_amd64.whl",url = "https://files.pythonhosted.org/packages/b5/aa/8444be3cfb10451617ff9d177b3c190288f4563e6c50ff02728be67ad094/scikit_learn-1.7.2-cp313-cp313t-win_amd64.whl",hashes = {sha256 = "57dc4deb1d3762c75d685507fbd0bc17160144b2f2ba4ccea5dc285ab0d0e973"}}, + {name = "scikit_learn-1.7.2-cp312-cp312-macosx_10_13_x86_64.whl",url = "https://files.pythonhosted.org/packages/a7/aa/3996e2196075689afb9fce0410ebdb4a09099d7964d061d7213700204409/scikit_learn-1.7.2-cp312-cp312-macosx_10_13_x86_64.whl",hashes = {sha256 = "8d91a97fa2b706943822398ab943cde71858a50245e31bc71dba62aab1d60a96"}}, + {name = "scikit_learn-1.7.2-cp312-cp312-macosx_12_0_arm64.whl",url = "https://files.pythonhosted.org/packages/43/5d/779320063e88af9c4a7c2cf463ff11c21ac9c8bd730c4a294b0000b666c9/scikit_learn-1.7.2-cp312-cp312-macosx_12_0_arm64.whl",hashes = {sha256 = "acbc0f5fd2edd3432a22c69bed78e837c70cf896cd7993d71d51ba6708507476"}}, + {name = "scikit_learn-1.7.2-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl",url = "https://files.pythonhosted.org/packages/5c/d0/0c577d9325b05594fdd33aa970bf53fb673f051a45496842caee13cfd7fe/scikit_learn-1.7.2-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl",hashes = {sha256 = "e5bf3d930aee75a65478df91ac1225ff89cd28e9ac7bd1196853a9229b6adb0b"}}, + {name = "scikit_learn-1.7.2-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl",url = "https://files.pythonhosted.org/packages/82/70/8bf44b933837ba8494ca0fc9a9ab60f1c13b062ad0197f60a56e2fc4c43e/scikit_learn-1.7.2-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl",hashes = {sha256 = "b4d6e9deed1a47aca9fe2f267ab8e8fe82ee20b4526b2c0cd9e135cea10feb44"}}, + {name = "scikit_learn-1.7.2-cp312-cp312-win_amd64.whl",url = "https://files.pythonhosted.org/packages/c6/99/ed35197a158f1fdc2fe7c3680e9c70d0128f662e1fee4ed495f4b5e13db0/scikit_learn-1.7.2-cp312-cp312-win_amd64.whl",hashes = {sha256 = "6088aa475f0785e01bcf8529f55280a3d7d298679f50c0bb70a2364a82d0b290"}}, +] +marker = "python_version ~= \"3.12\"" + +[packages.tool.pdm] +dependencies = [ + "numpy>=1.22.0", + "scipy>=1.8.0", + "joblib>=1.2.0", + "threadpoolctl>=3.1.0", +] + [[packages]] name = "snowballstemmer" version = "3.0.1" @@ -3031,6 +2687,111 @@ marker = "\"dev\" in extras" [packages.tool.pdm] dependencies = [] +[[packages]] +name = "soundfile" +version = "0.13.1" +sdist = {name = "soundfile-0.13.1.tar.gz", url = "https://files.pythonhosted.org/packages/e1/41/9b873a8c055582859b239be17902a85339bec6a30ad162f98c9b0288a2cc/soundfile-0.13.1.tar.gz", hashes = {sha256 = "b2c68dab1e30297317080a5b43df57e302584c49e2942defdde0acccc53f0e5b"}} +wheels = [ + {name = "soundfile-0.13.1-py2.py3-none-any.whl",url = "https://files.pythonhosted.org/packages/64/28/e2a36573ccbcf3d57c00626a21fe51989380636e821b341d36ccca0c1c3a/soundfile-0.13.1-py2.py3-none-any.whl",hashes = {sha256 = "a23c717560da2cf4c7b5ae1142514e0fd82d6bbd9dfc93a50423447142f2c445"}}, + {name = "soundfile-0.13.1-py2.py3-none-macosx_10_9_x86_64.whl",url = "https://files.pythonhosted.org/packages/ea/ab/73e97a5b3cc46bba7ff8650a1504348fa1863a6f9d57d7001c6b67c5f20e/soundfile-0.13.1-py2.py3-none-macosx_10_9_x86_64.whl",hashes = {sha256 = "82dc664d19831933fe59adad199bf3945ad06d84bc111a5b4c0d3089a5b9ec33"}}, + {name = "soundfile-0.13.1-py2.py3-none-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/a0/e5/58fd1a8d7b26fc113af244f966ee3aecf03cb9293cb935daaddc1e455e18/soundfile-0.13.1-py2.py3-none-macosx_11_0_arm64.whl",hashes = {sha256 = "743f12c12c4054921e15736c6be09ac26b3b3d603aef6fd69f9dde68748f2593"}}, + {name = "soundfile-0.13.1-py2.py3-none-manylinux_2_28_aarch64.whl",url = "https://files.pythonhosted.org/packages/58/ae/c0e4a53d77cf6e9a04179535766b3321b0b9ced5f70522e4caf9329f0046/soundfile-0.13.1-py2.py3-none-manylinux_2_28_aarch64.whl",hashes = {sha256 = "9c9e855f5a4d06ce4213f31918653ab7de0c5a8d8107cd2427e44b42df547deb"}}, + {name = "soundfile-0.13.1-py2.py3-none-manylinux_2_28_x86_64.whl",url = "https://files.pythonhosted.org/packages/57/5e/70bdd9579b35003a489fc850b5047beeda26328053ebadc1fb60f320f7db/soundfile-0.13.1-py2.py3-none-manylinux_2_28_x86_64.whl",hashes = {sha256 = "03267c4e493315294834a0870f31dbb3b28a95561b80b134f0bd3cf2d5f0e618"}}, + {name = "soundfile-0.13.1-py2.py3-none-win32.whl",url = "https://files.pythonhosted.org/packages/fe/df/8c11dc4dfceda14e3003bb81a0d0edcaaf0796dd7b4f826ea3e532146bba/soundfile-0.13.1-py2.py3-none-win32.whl",hashes = {sha256 = "c734564fab7c5ddf8e9be5bf70bab68042cd17e9c214c06e365e20d64f9a69d5"}}, + {name = "soundfile-0.13.1-py2.py3-none-win_amd64.whl",url = "https://files.pythonhosted.org/packages/14/e9/6b761de83277f2f02ded7e7ea6f07828ec78e4b229b80e4ca55dd205b9dc/soundfile-0.13.1-py2.py3-none-win_amd64.whl",hashes = {sha256 = "1e70a05a0626524a69e9f0f4dd2ec174b4e9567f4d8b6c11d38b5c289be36ee9"}}, +] +marker = "\"default\" in dependency_groups" + +[packages.tool.pdm] +dependencies = [ + "cffi>=1.0", + "numpy", +] + +[[packages]] +name = "cffi" +version = "2.0.0" +requires-python = ">=3.9" +sdist = {name = "cffi-2.0.0.tar.gz", url = "https://files.pythonhosted.org/packages/eb/56/b1ba7935a17738ae8453301356628e8147c79dbb825bcbc73dc7401f9846/cffi-2.0.0.tar.gz", hashes = {sha256 = "44d1b5909021139fe36001ae048dbdde8214afa20200eda0f64c068cac5d5529"}} +wheels = [ + {name = "cffi-2.0.0-cp314-cp314-macosx_10_13_x86_64.whl",url = "https://files.pythonhosted.org/packages/92/c4/3ce07396253a83250ee98564f8d7e9789fab8e58858f35d07a9a2c78de9f/cffi-2.0.0-cp314-cp314-macosx_10_13_x86_64.whl",hashes = {sha256 = "fc33c5141b55ed366cfaad382df24fe7dcbc686de5be719b207bb248e3053dc5"}}, + {name = "cffi-2.0.0-cp314-cp314-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/59/dd/27e9fa567a23931c838c6b02d0764611c62290062a6d4e8ff7863daf9730/cffi-2.0.0-cp314-cp314-macosx_11_0_arm64.whl",hashes = {sha256 = "c654de545946e0db659b3400168c9ad31b5d29593291482c43e3564effbcee13"}}, + {name = "cffi-2.0.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.whl",url = "https://files.pythonhosted.org/packages/d6/43/0e822876f87ea8a4ef95442c3d766a06a51fc5298823f884ef87aaad168c/cffi-2.0.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.whl",hashes = {sha256 = "24b6f81f1983e6df8db3adc38562c83f7d4a0c36162885ec7f7b77c7dcbec97b"}}, + {name = "cffi-2.0.0-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl",url = "https://files.pythonhosted.org/packages/b4/89/76799151d9c2d2d1ead63c2429da9ea9d7aac304603de0c6e8764e6e8e70/cffi-2.0.0-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl",hashes = {sha256 = "12873ca6cb9b0f0d3a0da705d6086fe911591737a59f28b7936bdfed27c0d47c"}}, + {name = "cffi-2.0.0-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.whl",url = "https://files.pythonhosted.org/packages/bb/dd/3465b14bb9e24ee24cb88c9e3730f6de63111fffe513492bf8c808a3547e/cffi-2.0.0-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.whl",hashes = {sha256 = "d9b97165e8aed9272a6bb17c01e3cc5871a594a446ebedc996e2397a1c1ea8ef"}}, + {name = "cffi-2.0.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl",url = "https://files.pythonhosted.org/packages/47/d9/d83e293854571c877a92da46fdec39158f8d7e68da75bf73581225d28e90/cffi-2.0.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl",hashes = {sha256 = "afb8db5439b81cf9c9d0c80404b60c3cc9c3add93e114dcae767f1477cb53775"}}, + {name = "cffi-2.0.0-cp314-cp314-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/2b/0f/1f177e3683aead2bb00f7679a16451d302c436b5cbf2505f0ea8146ef59e/cffi-2.0.0-cp314-cp314-musllinux_1_2_aarch64.whl",hashes = {sha256 = "737fe7d37e1a1bffe70bd5754ea763a62a066dc5913ca57e957824b72a85e205"}}, + {name = "cffi-2.0.0-cp314-cp314-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/c6/0f/cafacebd4b040e3119dcb32fed8bdef8dfe94da653155f9d0b9dc660166e/cffi-2.0.0-cp314-cp314-musllinux_1_2_x86_64.whl",hashes = {sha256 = "38100abb9d1b1435bc4cc340bb4489635dc2f0da7456590877030c9b3d40b0c1"}}, + {name = "cffi-2.0.0-cp314-cp314-win32.whl",url = "https://files.pythonhosted.org/packages/3e/aa/df335faa45b395396fcbc03de2dfcab242cd61a9900e914fe682a59170b1/cffi-2.0.0-cp314-cp314-win32.whl",hashes = {sha256 = "087067fa8953339c723661eda6b54bc98c5625757ea62e95eb4898ad5e776e9f"}}, + {name = "cffi-2.0.0-cp314-cp314-win_amd64.whl",url = "https://files.pythonhosted.org/packages/bb/92/882c2d30831744296ce713f0feb4c1cd30f346ef747b530b5318715cc367/cffi-2.0.0-cp314-cp314-win_amd64.whl",hashes = {sha256 = "203a48d1fb583fc7d78a4c6655692963b860a417c0528492a6bc21f1aaefab25"}}, + {name = "cffi-2.0.0-cp314-cp314-win_arm64.whl",url = "https://files.pythonhosted.org/packages/9f/2c/98ece204b9d35a7366b5b2c6539c350313ca13932143e79dc133ba757104/cffi-2.0.0-cp314-cp314-win_arm64.whl",hashes = {sha256 = "dbd5c7a25a7cb98f5ca55d258b103a2054f859a46ae11aaf23134f9cc0d356ad"}}, + {name = "cffi-2.0.0-cp314-cp314t-macosx_10_13_x86_64.whl",url = "https://files.pythonhosted.org/packages/3e/61/c768e4d548bfa607abcda77423448df8c471f25dbe64fb2ef6d555eae006/cffi-2.0.0-cp314-cp314t-macosx_10_13_x86_64.whl",hashes = {sha256 = "9a67fc9e8eb39039280526379fb3a70023d77caec1852002b4da7e8b270c4dd9"}}, + {name = "cffi-2.0.0-cp314-cp314t-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/2c/ea/5f76bce7cf6fcd0ab1a1058b5af899bfbef198bea4d5686da88471ea0336/cffi-2.0.0-cp314-cp314t-macosx_11_0_arm64.whl",hashes = {sha256 = "7a66c7204d8869299919db4d5069a82f1561581af12b11b3c9f48c584eb8743d"}}, + {name = "cffi-2.0.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl",url = "https://files.pythonhosted.org/packages/be/b4/c56878d0d1755cf9caa54ba71e5d049479c52f9e4afc230f06822162ab2f/cffi-2.0.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl",hashes = {sha256 = "7cc09976e8b56f8cebd752f7113ad07752461f48a58cbba644139015ac24954c"}}, + {name = "cffi-2.0.0-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl",url = "https://files.pythonhosted.org/packages/e0/0d/eb704606dfe8033e7128df5e90fee946bbcb64a04fcdaa97321309004000/cffi-2.0.0-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl",hashes = {sha256 = "92b68146a71df78564e4ef48af17551a5ddd142e5190cdf2c5624d0c3ff5b2e8"}}, + {name = "cffi-2.0.0-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.whl",url = "https://files.pythonhosted.org/packages/d8/19/3c435d727b368ca475fb8742ab97c9cb13a0de600ce86f62eab7fa3eea60/cffi-2.0.0-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.whl",hashes = {sha256 = "b1e74d11748e7e98e2f426ab176d4ed720a64412b6a15054378afdb71e0f37dc"}}, + {name = "cffi-2.0.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl",url = "https://files.pythonhosted.org/packages/d0/44/681604464ed9541673e486521497406fadcc15b5217c3e326b061696899a/cffi-2.0.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl",hashes = {sha256 = "28a3a209b96630bca57cce802da70c266eb08c6e97e5afd61a75611ee6c64592"}}, + {name = "cffi-2.0.0-cp314-cp314t-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/25/8e/342a504ff018a2825d395d44d63a767dd8ebc927ebda557fecdaca3ac33a/cffi-2.0.0-cp314-cp314t-musllinux_1_2_aarch64.whl",hashes = {sha256 = "7553fb2090d71822f02c629afe6042c299edf91ba1bf94951165613553984512"}}, + {name = "cffi-2.0.0-cp314-cp314t-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/e1/5e/b666bacbbc60fbf415ba9988324a132c9a7a0448a9a8f125074671c0f2c3/cffi-2.0.0-cp314-cp314t-musllinux_1_2_x86_64.whl",hashes = {sha256 = "6c6c373cfc5c83a975506110d17457138c8c63016b563cc9ed6e056a82f13ce4"}}, + {name = "cffi-2.0.0-cp314-cp314t-win32.whl",url = "https://files.pythonhosted.org/packages/a0/1d/ec1a60bd1a10daa292d3cd6bb0b359a81607154fb8165f3ec95fe003b85c/cffi-2.0.0-cp314-cp314t-win32.whl",hashes = {sha256 = "1fc9ea04857caf665289b7a75923f2c6ed559b8298a1b8c49e59f7dd95c8481e"}}, + {name = "cffi-2.0.0-cp314-cp314t-win_amd64.whl",url = "https://files.pythonhosted.org/packages/bf/41/4c1168c74fac325c0c8156f04b6749c8b6a8f405bbf91413ba088359f60d/cffi-2.0.0-cp314-cp314t-win_amd64.whl",hashes = {sha256 = "d68b6cef7827e8641e8ef16f4494edda8b36104d79773a334beaa1e3521430f6"}}, + {name = "cffi-2.0.0-cp314-cp314t-win_arm64.whl",url = "https://files.pythonhosted.org/packages/ae/3a/dbeec9d1ee0844c679f6bb5d6ad4e9f198b1224f4e7a32825f47f6192b0c/cffi-2.0.0-cp314-cp314t-win_arm64.whl",hashes = {sha256 = "0a1527a803f0a659de1af2e1fd700213caba79377e27e4693648c2923da066f9"}}, + {name = "cffi-2.0.0-cp313-cp313-macosx_10_13_x86_64.whl",url = "https://files.pythonhosted.org/packages/4b/8d/a0a47a0c9e413a658623d014e91e74a50cdd2c423f7ccfd44086ef767f90/cffi-2.0.0-cp313-cp313-macosx_10_13_x86_64.whl",hashes = {sha256 = "00bdf7acc5f795150faa6957054fbbca2439db2f775ce831222b66f192f03beb"}}, + {name = "cffi-2.0.0-cp313-cp313-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/4a/d2/a6c0296814556c68ee32009d9c2ad4f85f2707cdecfd7727951ec228005d/cffi-2.0.0-cp313-cp313-macosx_11_0_arm64.whl",hashes = {sha256 = "45d5e886156860dc35862657e1494b9bae8dfa63bf56796f2fb56e1679fc0bca"}}, + {name = "cffi-2.0.0-cp313-cp313-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl",url = "https://files.pythonhosted.org/packages/b0/1e/d22cc63332bd59b06481ceaac49d6c507598642e2230f201649058a7e704/cffi-2.0.0-cp313-cp313-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl",hashes = {sha256 = "07b271772c100085dd28b74fa0cd81c8fb1a3ba18b21e03d7c27f3436a10606b"}}, + {name = "cffi-2.0.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl",url = "https://files.pythonhosted.org/packages/a9/f5/a2c23eb03b61a0b8747f211eb716446c826ad66818ddc7810cc2cc19b3f2/cffi-2.0.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl",hashes = {sha256 = "d48a880098c96020b02d5a1f7d9251308510ce8858940e6fa99ece33f610838b"}}, + {name = "cffi-2.0.0-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl",url = "https://files.pythonhosted.org/packages/f2/7f/e6647792fc5850d634695bc0e6ab4111ae88e89981d35ac269956605feba/cffi-2.0.0-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl",hashes = {sha256 = "f93fd8e5c8c0a4aa1f424d6173f14a892044054871c771f8566e4008eaa359d2"}}, + {name = "cffi-2.0.0-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.whl",url = "https://files.pythonhosted.org/packages/cb/1e/a5a1bd6f1fb30f22573f76533de12a00bf274abcdc55c8edab639078abb6/cffi-2.0.0-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.whl",hashes = {sha256 = "dd4f05f54a52fb558f1ba9f528228066954fee3ebe629fc1660d874d040ae5a3"}}, + {name = "cffi-2.0.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl",url = "https://files.pythonhosted.org/packages/98/df/0a1755e750013a2081e863e7cd37e0cdd02664372c754e5560099eb7aa44/cffi-2.0.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl",hashes = {sha256 = "c8d3b5532fc71b7a77c09192b4a5a200ea992702734a2e9279a37f2478236f26"}}, + {name = "cffi-2.0.0-cp313-cp313-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/50/e1/a969e687fcf9ea58e6e2a928ad5e2dd88cc12f6f0ab477e9971f2309b57c/cffi-2.0.0-cp313-cp313-musllinux_1_2_aarch64.whl",hashes = {sha256 = "d9b29c1f0ae438d5ee9acb31cadee00a58c46cc9c0b2f9038c6b0b3470877a8c"}}, + {name = "cffi-2.0.0-cp313-cp313-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/36/54/0362578dd2c9e557a28ac77698ed67323ed5b9775ca9d3fe73fe191bb5d8/cffi-2.0.0-cp313-cp313-musllinux_1_2_x86_64.whl",hashes = {sha256 = "6d50360be4546678fc1b79ffe7a66265e28667840010348dd69a314145807a1b"}}, + {name = "cffi-2.0.0-cp313-cp313-win32.whl",url = "https://files.pythonhosted.org/packages/eb/6d/bf9bda840d5f1dfdbf0feca87fbdb64a918a69bca42cfa0ba7b137c48cb8/cffi-2.0.0-cp313-cp313-win32.whl",hashes = {sha256 = "74a03b9698e198d47562765773b4a8309919089150a0bb17d829ad7b44b60d27"}}, + {name = "cffi-2.0.0-cp313-cp313-win_amd64.whl",url = "https://files.pythonhosted.org/packages/37/18/6519e1ee6f5a1e579e04b9ddb6f1676c17368a7aba48299c3759bbc3c8b3/cffi-2.0.0-cp313-cp313-win_amd64.whl",hashes = {sha256 = "19f705ada2530c1167abacb171925dd886168931e0a7b78f5bffcae5c6b5be75"}}, + {name = "cffi-2.0.0-cp313-cp313-win_arm64.whl",url = "https://files.pythonhosted.org/packages/cb/0e/02ceeec9a7d6ee63bb596121c2c8e9b3a9e150936f4fbef6ca1943e6137c/cffi-2.0.0-cp313-cp313-win_arm64.whl",hashes = {sha256 = "256f80b80ca3853f90c21b23ee78cd008713787b1b1e93eae9f3d6a7134abd91"}}, + {name = "cffi-2.0.0-cp312-cp312-macosx_10_13_x86_64.whl",url = "https://files.pythonhosted.org/packages/ea/47/4f61023ea636104d4f16ab488e268b93008c3d0bb76893b1b31db1f96802/cffi-2.0.0-cp312-cp312-macosx_10_13_x86_64.whl",hashes = {sha256 = "6d02d6655b0e54f54c4ef0b94eb6be0607b70853c45ce98bd278dc7de718be5d"}}, + {name = "cffi-2.0.0-cp312-cp312-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/df/a2/781b623f57358e360d62cdd7a8c681f074a71d445418a776eef0aadb4ab4/cffi-2.0.0-cp312-cp312-macosx_11_0_arm64.whl",hashes = {sha256 = "8eca2a813c1cb7ad4fb74d368c2ffbbb4789d377ee5bb8df98373c2cc0dee76c"}}, + {name = "cffi-2.0.0-cp312-cp312-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl",url = "https://files.pythonhosted.org/packages/ff/df/a4f0fbd47331ceeba3d37c2e51e9dfc9722498becbeec2bd8bc856c9538a/cffi-2.0.0-cp312-cp312-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl",hashes = {sha256 = "21d1152871b019407d8ac3985f6775c079416c282e431a4da6afe7aefd2bccbe"}}, + {name = "cffi-2.0.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl",url = "https://files.pythonhosted.org/packages/d5/72/12b5f8d3865bf0f87cf1404d8c374e7487dcf097a1c91c436e72e6badd83/cffi-2.0.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl",hashes = {sha256 = "b21e08af67b8a103c71a250401c78d5e0893beff75e28c53c98f4de42f774062"}}, + {name = "cffi-2.0.0-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl",url = "https://files.pythonhosted.org/packages/c2/95/7a135d52a50dfa7c882ab0ac17e8dc11cec9d55d2c18dda414c051c5e69e/cffi-2.0.0-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl",hashes = {sha256 = "1e3a615586f05fc4065a8b22b8152f0c1b00cdbc60596d187c2a74f9e3036e4e"}}, + {name = "cffi-2.0.0-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.whl",url = "https://files.pythonhosted.org/packages/3a/c8/15cb9ada8895957ea171c62dc78ff3e99159ee7adb13c0123c001a2546c1/cffi-2.0.0-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.whl",hashes = {sha256 = "81afed14892743bbe14dacb9e36d9e0e504cd204e0b165062c488942b9718037"}}, + {name = "cffi-2.0.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl",url = "https://files.pythonhosted.org/packages/78/2d/7fa73dfa841b5ac06c7b8855cfc18622132e365f5b81d02230333ff26e9e/cffi-2.0.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl",hashes = {sha256 = "3e17ed538242334bf70832644a32a7aae3d83b57567f9fd60a26257e992b79ba"}}, + {name = "cffi-2.0.0-cp312-cp312-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/07/e0/267e57e387b4ca276b90f0434ff88b2c2241ad72b16d31836adddfd6031b/cffi-2.0.0-cp312-cp312-musllinux_1_2_aarch64.whl",hashes = {sha256 = "3925dd22fa2b7699ed2617149842d2e6adde22b262fcbfada50e3d195e4b3a94"}}, + {name = "cffi-2.0.0-cp312-cp312-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/b6/75/1f2747525e06f53efbd878f4d03bac5b859cbc11c633d0fb81432d98a795/cffi-2.0.0-cp312-cp312-musllinux_1_2_x86_64.whl",hashes = {sha256 = "2c8f814d84194c9ea681642fd164267891702542f028a15fc97d4674b6206187"}}, + {name = "cffi-2.0.0-cp312-cp312-win32.whl",url = "https://files.pythonhosted.org/packages/7b/2b/2b6435f76bfeb6bbf055596976da087377ede68df465419d192acf00c437/cffi-2.0.0-cp312-cp312-win32.whl",hashes = {sha256 = "da902562c3e9c550df360bfa53c035b2f241fed6d9aef119048073680ace4a18"}}, + {name = "cffi-2.0.0-cp312-cp312-win_amd64.whl",url = "https://files.pythonhosted.org/packages/f8/ed/13bd4418627013bec4ed6e54283b1959cf6db888048c7cf4b4c3b5b36002/cffi-2.0.0-cp312-cp312-win_amd64.whl",hashes = {sha256 = "da68248800ad6320861f129cd9c1bf96ca849a2771a59e0344e88681905916f5"}}, + {name = "cffi-2.0.0-cp312-cp312-win_arm64.whl",url = "https://files.pythonhosted.org/packages/95/31/9f7f93ad2f8eff1dbc1c3656d7ca5bfd8fb52c9d786b4dcf19b2d02217fa/cffi-2.0.0-cp312-cp312-win_arm64.whl",hashes = {sha256 = "4671d9dd5ec934cb9a73e7ee9676f9362aba54f7f34910956b84d727b0d73fb6"}}, +] +marker = "\"default\" in dependency_groups" + +[packages.tool.pdm] +dependencies = [ + "pycparser; implementation_name != \"PyPy\"", +] + +[[packages]] +name = "soxr" +version = "1.0.0" +requires-python = ">=3.9" +sdist = {name = "soxr-1.0.0.tar.gz", url = "https://files.pythonhosted.org/packages/42/7e/f4b461944662ad75036df65277d6130f9411002bfb79e9df7dff40a31db9/soxr-1.0.0.tar.gz", hashes = {sha256 = "e07ee6c1d659bc6957034f4800c60cb8b98de798823e34d2a2bba1caa85a4509"}} +wheels = [ + {name = "soxr-1.0.0-cp314-cp314t-macosx_10_14_x86_64.whl",url = "https://files.pythonhosted.org/packages/99/77/d3b3c25b4f1b1aa4a73f669355edcaee7a52179d0c50407697200a0e55b9/soxr-1.0.0-cp314-cp314t-macosx_10_14_x86_64.whl",hashes = {sha256 = "392a5c70c04eb939c9c176bd6f654dec9a0eaa9ba33d8f1024ed63cf68cdba0a"}}, + {name = "soxr-1.0.0-cp314-cp314t-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/8a/ee/3ca73e18781bb2aff92b809f1c17c356dfb9a1870652004bd432e79afbfa/soxr-1.0.0-cp314-cp314t-macosx_11_0_arm64.whl",hashes = {sha256 = "fdc41a1027ba46777186f26a8fba7893be913383414135577522da2fcc684490"}}, + {name = "soxr-1.0.0-cp314-cp314t-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl",url = "https://files.pythonhosted.org/packages/bd/f0/eea8b5f587a2531657dc5081d2543a5a845f271a3bea1c0fdee5cebde021/soxr-1.0.0-cp314-cp314t-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl",hashes = {sha256 = "449acd1dfaf10f0ce6dfd75c7e2ef984890df94008765a6742dafb42061c1a24"}}, + {name = "soxr-1.0.0-cp314-cp314t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl",url = "https://files.pythonhosted.org/packages/64/59/2430a48c705565eb09e78346950b586f253a11bd5313426ced3ecd9b0feb/soxr-1.0.0-cp314-cp314t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl",hashes = {sha256 = "38b35c99e408b8f440c9376a5e1dd48014857cd977c117bdaa4304865ae0edd0"}}, + {name = "soxr-1.0.0-cp314-cp314t-win_amd64.whl",url = "https://files.pythonhosted.org/packages/3c/1b/f84a2570a74094e921bbad5450b2a22a85d58585916e131d9b98029c3e69/soxr-1.0.0-cp314-cp314t-win_amd64.whl",hashes = {sha256 = "a39b519acca2364aa726b24a6fd55acf29e4c8909102e0b858c23013c38328e5"}}, + {name = "soxr-1.0.0-cp312-abi3-macosx_10_14_x86_64.whl",url = "https://files.pythonhosted.org/packages/c5/c7/f92b81f1a151c13afb114f57799b86da9330bec844ea5a0d3fe6a8732678/soxr-1.0.0-cp312-abi3-macosx_10_14_x86_64.whl",hashes = {sha256 = "abecf4e39017f3fadb5e051637c272ae5778d838e5c3926a35db36a53e3a607f"}}, + {name = "soxr-1.0.0-cp312-abi3-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/ff/1d/c945fea9d83ea1f2be9d116b3674dbaef26ed090374a77c394b31e3b083b/soxr-1.0.0-cp312-abi3-macosx_11_0_arm64.whl",hashes = {sha256 = "e973d487ee46aa8023ca00a139db6e09af053a37a032fe22f9ff0cc2e19c94b4"}}, + {name = "soxr-1.0.0-cp312-abi3-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl",url = "https://files.pythonhosted.org/packages/b5/80/10640970998a1d2199bef6c4d92205f36968cddaf3e4d0e9fe35ddd405bd/soxr-1.0.0-cp312-abi3-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl",hashes = {sha256 = "e8ce273cca101aff3d8c387db5a5a41001ba76ef1837883438d3c652507a9ccc"}}, + {name = "soxr-1.0.0-cp312-abi3-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl",url = "https://files.pythonhosted.org/packages/b1/87/2726603c13c2126cb8ded9e57381b7377f4f0df6ba4408e1af5ddbfdc3dd/soxr-1.0.0-cp312-abi3-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl",hashes = {sha256 = "e8f2a69686f2856d37823bbb7b78c3d44904f311fe70ba49b893af11d6b6047b"}}, + {name = "soxr-1.0.0-cp312-abi3-win_amd64.whl",url = "https://files.pythonhosted.org/packages/ce/04/530252227f4d0721a5524a936336485dfb429bb206a66baf8e470384f4a2/soxr-1.0.0-cp312-abi3-win_amd64.whl",hashes = {sha256 = "2a3b77b115ae7c478eecdbd060ed4f61beda542dfb70639177ac263aceda42a2"}}, +] +marker = "\"default\" in dependency_groups" + +[packages.tool.pdm] +dependencies = [ + "numpy", +] + [[packages]] name = "sphinxcontrib-htmlhelp" version = "2.1.0" @@ -3057,6 +2818,116 @@ marker = "\"dev\" in extras" [packages.tool.pdm] dependencies = [] +[[packages]] +name = "sympy" +version = "1.14.0" +requires-python = ">=3.9" +sdist = {name = "sympy-1.14.0.tar.gz", url = "https://files.pythonhosted.org/packages/83/d3/803453b36afefb7c2bb238361cd4ae6125a569b4db67cd9e79846ba2d68c/sympy-1.14.0.tar.gz", hashes = {sha256 = "d3d3fe8df1e5a0b42f0e7bdf50541697dbe7d23746e894990c030e2b05e72517"}} +wheels = [ + {name = "sympy-1.14.0-py3-none-any.whl",url = "https://files.pythonhosted.org/packages/a2/09/77d55d46fd61b4a135c444fc97158ef34a095e5681d0a6c10b75bf356191/sympy-1.14.0-py3-none-any.whl",hashes = {sha256 = "e091cc3e99d2141a0ba2847328f5479b05d94a6635cb96148ccb3f34671bd8f5"}}, +] +marker = "\"default\" in dependency_groups" + +[packages.tool.pdm] +dependencies = [ + "mpmath<1.4,>=1.1.0", +] + +[[packages]] +name = "mpmath" +version = "1.3.0" +sdist = {name = "mpmath-1.3.0.tar.gz", url = "https://files.pythonhosted.org/packages/e0/47/dd32fa426cc72114383ac549964eecb20ecfd886d1e5ccf5340b55b02f57/mpmath-1.3.0.tar.gz", hashes = {sha256 = "7a28eb2a9774d00c7bc92411c19a89209d5da7c4c9a9e227be8330a23a25b91f"}} +wheels = [ + {name = "mpmath-1.3.0-py3-none-any.whl",url = "https://files.pythonhosted.org/packages/43/e3/7d92a15f894aa0c9c4b49b8ee9ac9850d6e63b03c9c32c0367a13ae62209/mpmath-1.3.0-py3-none-any.whl",hashes = {sha256 = "a0b2b9fe80bbcd81a6647ff13108738cfb482d481d826cc0e02f5b35e5c88d2c"}}, +] +marker = "\"default\" in dependency_groups" + +[packages.tool.pdm] +dependencies = [] + +[[packages]] +name = "threadpoolctl" +version = "3.6.0" +requires-python = ">=3.9" +sdist = {name = "threadpoolctl-3.6.0.tar.gz", url = "https://files.pythonhosted.org/packages/b7/4d/08c89e34946fce2aec4fbb45c9016efd5f4d7f24af8e5d93296e935631d8/threadpoolctl-3.6.0.tar.gz", hashes = {sha256 = "8ab8b4aa3491d812b623328249fab5302a68d2d71745c8a4c719a2fcaba9f44e"}} +wheels = [ + {name = "threadpoolctl-3.6.0-py3-none-any.whl",url = "https://files.pythonhosted.org/packages/32/d5/f9a850d79b0851d1d4ef6456097579a9005b31fea68726a4ae5f2d82ddd9/threadpoolctl-3.6.0-py3-none-any.whl",hashes = {sha256 = "43a0b8fd5a2928500110039e43a5eed8480b918967083ea48dc3ab9f13c4a7fb"}}, +] +marker = "\"default\" in dependency_groups" + +[packages.tool.pdm] +dependencies = [] + +[[packages]] +name = "tracerite" +version = "1.1.3" +sdist = {name = "tracerite-1.1.3.tar.gz", url = "https://files.pythonhosted.org/packages/27/b2/37b825b881f23bc56384c3142214ccbe5d9de7e7c5fe3d155fa032738b98/tracerite-1.1.3.tar.gz", hashes = {sha256 = "119fc006f240aa03fffb41cf99cf82fda5c0449c7d4b6fe42c6340403578b31e"}} +wheels = [ + {name = "tracerite-1.1.3-py3-none-any.whl",url = "https://files.pythonhosted.org/packages/e6/bf/c65d36ec5a93048dd55b3247be26059970daad72263e35ecace2f3188b2c/tracerite-1.1.3-py3-none-any.whl",hashes = {sha256 = "811d8e2e0fb563b77340eebe2e9f7b324acfe01e09ea58db8bcaecb24327c823"}}, +] +marker = "\"default\" in dependency_groups" + +[packages.tool.pdm] +dependencies = [ + "html5tagger>=1.2.1", +] + +[[packages]] +name = "ujson" +version = "5.11.0" +requires-python = ">=3.9" +sdist = {name = "ujson-5.11.0.tar.gz", url = "https://files.pythonhosted.org/packages/43/d9/3f17e3c5773fb4941c68d9a37a47b1a79c9649d6c56aefbed87cc409d18a/ujson-5.11.0.tar.gz", hashes = {sha256 = "e204ae6f909f099ba6b6b942131cee359ddda2b6e4ea39c12eb8b991fe2010e0"}} +wheels = [ + {name = "ujson-5.11.0-cp314-cp314-macosx_10_13_x86_64.whl",url = "https://files.pythonhosted.org/packages/28/08/4518146f4984d112764b1dfa6fb7bad691c44a401adadaa5e23ccd930053/ujson-5.11.0-cp314-cp314-macosx_10_13_x86_64.whl",hashes = {sha256 = "65724738c73645db88f70ba1f2e6fb678f913281804d5da2fd02c8c5839af302"}}, + {name = "ujson-5.11.0-cp314-cp314-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/29/37/2107b9a62168867a692654d8766b81bd2fd1e1ba13e2ec90555861e02b0c/ujson-5.11.0-cp314-cp314-macosx_11_0_arm64.whl",hashes = {sha256 = "29113c003ca33ab71b1b480bde952fbab2a0b6b03a4ee4c3d71687cdcbd1a29d"}}, + {name = "ujson-5.11.0-cp314-cp314-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl",url = "https://files.pythonhosted.org/packages/9b/f8/25583c70f83788edbe3ca62ce6c1b79eff465d78dec5eb2b2b56b3e98b33/ujson-5.11.0-cp314-cp314-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl",hashes = {sha256 = "c44c703842024d796b4c78542a6fcd5c3cb948b9fc2a73ee65b9c86a22ee3638"}}, + {name = "ujson-5.11.0-cp314-cp314-manylinux_2_24_i686.manylinux_2_28_i686.whl",url = "https://files.pythonhosted.org/packages/ed/ca/19b3a632933a09d696f10dc1b0dfa1d692e65ad507d12340116ce4f67967/ujson-5.11.0-cp314-cp314-manylinux_2_24_i686.manylinux_2_28_i686.whl",hashes = {sha256 = "e750c436fb90edf85585f5c62a35b35082502383840962c6983403d1bd96a02c"}}, + {name = "ujson-5.11.0-cp314-cp314-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl",url = "https://files.pythonhosted.org/packages/55/7a/4572af5324ad4b2bfdd2321e898a527050290147b4ea337a79a0e4e87ec7/ujson-5.11.0-cp314-cp314-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl",hashes = {sha256 = "f278b31a7c52eb0947b2db55a5133fbc46b6f0ef49972cd1a80843b72e135aba"}}, + {name = "ujson-5.11.0-cp314-cp314-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/7b/71/a2b8c19cf4e1efe53cf439cdf7198ac60ae15471d2f1040b490c1f0f831f/ujson-5.11.0-cp314-cp314-musllinux_1_2_aarch64.whl",hashes = {sha256 = "ab2cb8351d976e788669c8281465d44d4e94413718af497b4e7342d7b2f78018"}}, + {name = "ujson-5.11.0-cp314-cp314-musllinux_1_2_i686.whl",url = "https://files.pythonhosted.org/packages/7a/3e/7b98668cba3bb3735929c31b999b374ebc02c19dfa98dfebaeeb5c8597ca/ujson-5.11.0-cp314-cp314-musllinux_1_2_i686.whl",hashes = {sha256 = "090b4d11b380ae25453100b722d0609d5051ffe98f80ec52853ccf8249dfd840"}}, + {name = "ujson-5.11.0-cp314-cp314-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/a1/ea/8870f208c20b43571a5c409ebb2fe9b9dba5f494e9e60f9314ac01ea8f78/ujson-5.11.0-cp314-cp314-musllinux_1_2_x86_64.whl",hashes = {sha256 = "80017e870d882d5517d28995b62e4e518a894f932f1e242cbc802a2fd64d365c"}}, + {name = "ujson-5.11.0-cp314-cp314-win32.whl",url = "https://files.pythonhosted.org/packages/63/b6/c0e6607e37fa47929920a685a968c6b990a802dec65e9c5181e97845985d/ujson-5.11.0-cp314-cp314-win32.whl",hashes = {sha256 = "1d663b96eb34c93392e9caae19c099ec4133ba21654b081956613327f0e973ac"}}, + {name = "ujson-5.11.0-cp314-cp314-win_amd64.whl",url = "https://files.pythonhosted.org/packages/4e/56/f4fe86b4c9000affd63e9219e59b222dc48b01c534533093e798bf617a7e/ujson-5.11.0-cp314-cp314-win_amd64.whl",hashes = {sha256 = "849e65b696f0d242833f1df4182096cedc50d414215d1371fca85c541fbff629"}}, + {name = "ujson-5.11.0-cp314-cp314-win_arm64.whl",url = "https://files.pythonhosted.org/packages/0a/f3/669437f0280308db4783b12a6d88c00730b394327d8334cc7a32ef218e64/ujson-5.11.0-cp314-cp314-win_arm64.whl",hashes = {sha256 = "e73df8648c9470af2b6a6bf5250d4744ad2cf3d774dcf8c6e31f018bdd04d764"}}, + {name = "ujson-5.11.0-cp314-cp314t-macosx_10_13_x86_64.whl",url = "https://files.pythonhosted.org/packages/6e/cd/e9809b064a89fe5c4184649adeb13c1b98652db3f8518980b04227358574/ujson-5.11.0-cp314-cp314t-macosx_10_13_x86_64.whl",hashes = {sha256 = "de6e88f62796372fba1de973c11138f197d3e0e1d80bcb2b8aae1e826096d433"}}, + {name = "ujson-5.11.0-cp314-cp314t-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/1b/be/ae26a6321179ebbb3a2e2685b9007c71bcda41ad7a77bbbe164005e956fc/ujson-5.11.0-cp314-cp314t-macosx_11_0_arm64.whl",hashes = {sha256 = "49e56ef8066f11b80d620985ae36869a3ff7e4b74c3b6129182ec5d1df0255f3"}}, + {name = "ujson-5.11.0-cp314-cp314t-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl",url = "https://files.pythonhosted.org/packages/ae/e9/fb4a220ee6939db099f4cfeeae796ecb91e7584ad4d445d4ca7f994a9135/ujson-5.11.0-cp314-cp314t-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl",hashes = {sha256 = "1a325fd2c3a056cf6c8e023f74a0c478dd282a93141356ae7f16d5309f5ff823"}}, + {name = "ujson-5.11.0-cp314-cp314t-manylinux_2_24_i686.manylinux_2_28_i686.whl",url = "https://files.pythonhosted.org/packages/bd/f8/fc4b952b8f5fea09ea3397a0bd0ad019e474b204cabcb947cead5d4d1ffc/ujson-5.11.0-cp314-cp314t-manylinux_2_24_i686.manylinux_2_28_i686.whl",hashes = {sha256 = "a0af6574fc1d9d53f4ff371f58c96673e6d988ed2b5bf666a6143c782fa007e9"}}, + {name = "ujson-5.11.0-cp314-cp314t-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl",url = "https://files.pythonhosted.org/packages/2e/e5/af5491dfda4f8b77e24cf3da68ee0d1552f99a13e5c622f4cef1380925c3/ujson-5.11.0-cp314-cp314t-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl",hashes = {sha256 = "10f29e71ecf4ecd93a6610bd8efa8e7b6467454a363c3d6416db65de883eb076"}}, + {name = "ujson-5.11.0-cp314-cp314t-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/c4/09/0945349dd41f25cc8c38d78ace49f14c5052c5bbb7257d2f466fa7bdb533/ujson-5.11.0-cp314-cp314t-musllinux_1_2_aarch64.whl",hashes = {sha256 = "1a0a9b76a89827a592656fe12e000cf4f12da9692f51a841a4a07aa4c7ecc41c"}}, + {name = "ujson-5.11.0-cp314-cp314t-musllinux_1_2_i686.whl",url = "https://files.pythonhosted.org/packages/49/44/8e04496acb3d5a1cbee3a54828d9652f67a37523efa3d3b18a347339680a/ujson-5.11.0-cp314-cp314t-musllinux_1_2_i686.whl",hashes = {sha256 = "b16930f6a0753cdc7d637b33b4e8f10d5e351e1fb83872ba6375f1e87be39746"}}, + {name = "ujson-5.11.0-cp314-cp314t-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/64/ae/4bc825860d679a0f208a19af2f39206dfd804ace2403330fdc3170334a2f/ujson-5.11.0-cp314-cp314t-musllinux_1_2_x86_64.whl",hashes = {sha256 = "04c41afc195fd477a59db3a84d5b83a871bd648ef371cf8c6f43072d89144eef"}}, + {name = "ujson-5.11.0-cp314-cp314t-win32.whl",url = "https://files.pythonhosted.org/packages/30/ed/5a057199fb0a5deabe0957073a1c1c1c02a3e99476cd03daee98ea21fa57/ujson-5.11.0-cp314-cp314t-win32.whl",hashes = {sha256 = "aa6d7a5e09217ff93234e050e3e380da62b084e26b9f2e277d2606406a2fc2e5"}}, + {name = "ujson-5.11.0-cp314-cp314t-win_amd64.whl",url = "https://files.pythonhosted.org/packages/aa/03/b19c6176bdf1dc13ed84b886e99677a52764861b6cc023d5e7b6ebda249d/ujson-5.11.0-cp314-cp314t-win_amd64.whl",hashes = {sha256 = "48055e1061c1bb1f79e75b4ac39e821f3f35a9b82de17fce92c3140149009bec"}}, + {name = "ujson-5.11.0-cp314-cp314t-win_arm64.whl",url = "https://files.pythonhosted.org/packages/5d/ca/a0413a3874b2dc1708b8796ca895bf363292f9c70b2e8ca482b7dbc0259d/ujson-5.11.0-cp314-cp314t-win_arm64.whl",hashes = {sha256 = "1194b943e951092db611011cb8dbdb6cf94a3b816ed07906e14d3bc6ce0e90ab"}}, + {name = "ujson-5.11.0-cp313-cp313-macosx_10_13_x86_64.whl",url = "https://files.pythonhosted.org/packages/1c/ec/2de9dd371d52c377abc05d2b725645326c4562fc87296a8907c7bcdf2db7/ujson-5.11.0-cp313-cp313-macosx_10_13_x86_64.whl",hashes = {sha256 = "109f59885041b14ee9569bf0bb3f98579c3fa0652317b355669939e5fc5ede53"}}, + {name = "ujson-5.11.0-cp313-cp313-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/5b/a4/f611f816eac3a581d8a4372f6967c3ed41eddbae4008d1d77f223f1a4e0a/ujson-5.11.0-cp313-cp313-macosx_11_0_arm64.whl",hashes = {sha256 = "a31c6b8004438e8c20fc55ac1c0e07dad42941db24176fe9acf2815971f8e752"}}, + {name = "ujson-5.11.0-cp313-cp313-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl",url = "https://files.pythonhosted.org/packages/e9/c5/c161940967184de96f5cbbbcce45b562a4bf851d60f4c677704b1770136d/ujson-5.11.0-cp313-cp313-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl",hashes = {sha256 = "78c684fb21255b9b90320ba7e199780f653e03f6c2528663768965f4126a5b50"}}, + {name = "ujson-5.11.0-cp313-cp313-manylinux_2_24_i686.manylinux_2_28_i686.whl",url = "https://files.pythonhosted.org/packages/2b/d6/c7b2444238f5b2e2d0e3dab300b9ddc3606e4b1f0e4bed5a48157cebc792/ujson-5.11.0-cp313-cp313-manylinux_2_24_i686.manylinux_2_28_i686.whl",hashes = {sha256 = "4c9f5d6a27d035dd90a146f7761c2272cf7103de5127c9ab9c4cd39ea61e878a"}}, + {name = "ujson-5.11.0-cp313-cp313-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl",url = "https://files.pythonhosted.org/packages/fe/a3/292551f936d3d02d9af148f53e1bc04306b00a7cf1fcbb86fa0d1c887242/ujson-5.11.0-cp313-cp313-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl",hashes = {sha256 = "837da4d27fed5fdc1b630bd18f519744b23a0b5ada1bbde1a36ba463f2900c03"}}, + {name = "ujson-5.11.0-cp313-cp313-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/90/a6/82cfa70448831b1a9e73f882225980b5c689bf539ec6400b31656a60ea46/ujson-5.11.0-cp313-cp313-musllinux_1_2_aarch64.whl",hashes = {sha256 = "787aff4a84da301b7f3bac09bc696e2e5670df829c6f8ecf39916b4e7e24e701"}}, + {name = "ujson-5.11.0-cp313-cp313-musllinux_1_2_i686.whl",url = "https://files.pythonhosted.org/packages/84/5c/96e2266be50f21e9b27acaee8ca8f23ea0b85cb998c33d4f53147687839b/ujson-5.11.0-cp313-cp313-musllinux_1_2_i686.whl",hashes = {sha256 = "6dd703c3e86dc6f7044c5ac0b3ae079ed96bf297974598116aa5fb7f655c3a60"}}, + {name = "ujson-5.11.0-cp313-cp313-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/8d/20/78abe3d808cf3bb3e76f71fca46cd208317bf461c905d79f0d26b9df20f1/ujson-5.11.0-cp313-cp313-musllinux_1_2_x86_64.whl",hashes = {sha256 = "3772e4fe6b0c1e025ba3c50841a0ca4786825a4894c8411bf8d3afe3a8061328"}}, + {name = "ujson-5.11.0-cp313-cp313-win32.whl",url = "https://files.pythonhosted.org/packages/d8/50/8856e24bec5e2fc7f775d867aeb7a3f137359356200ac44658f1f2c834b2/ujson-5.11.0-cp313-cp313-win32.whl",hashes = {sha256 = "8fa2af7c1459204b7a42e98263b069bd535ea0cd978b4d6982f35af5a04a4241"}}, + {name = "ujson-5.11.0-cp313-cp313-win_amd64.whl",url = "https://files.pythonhosted.org/packages/5b/d8/1baee0f4179a4d0f5ce086832147b6cc9b7731c24ca08e14a3fdb8d39c32/ujson-5.11.0-cp313-cp313-win_amd64.whl",hashes = {sha256 = "34032aeca4510a7c7102bd5933f59a37f63891f30a0706fb46487ab6f0edf8f0"}}, + {name = "ujson-5.11.0-cp313-cp313-win_arm64.whl",url = "https://files.pythonhosted.org/packages/a9/8c/6d85ef5be82c6d66adced3ec5ef23353ed710a11f70b0b6a836878396334/ujson-5.11.0-cp313-cp313-win_arm64.whl",hashes = {sha256 = "ce076f2df2e1aa62b685086fbad67f2b1d3048369664b4cdccc50707325401f9"}}, + {name = "ujson-5.11.0-cp312-cp312-macosx_10_13_x86_64.whl",url = "https://files.pythonhosted.org/packages/b9/ef/a9cb1fce38f699123ff012161599fb9f2ff3f8d482b4b18c43a2dc35073f/ujson-5.11.0-cp312-cp312-macosx_10_13_x86_64.whl",hashes = {sha256 = "7895f0d2d53bd6aea11743bd56e3cb82d729980636cd0ed9b89418bf66591702"}}, + {name = "ujson-5.11.0-cp312-cp312-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/b1/05/dba51a00eb30bd947791b173766cbed3492269c150a7771d2750000c965f/ujson-5.11.0-cp312-cp312-macosx_11_0_arm64.whl",hashes = {sha256 = "12b5e7e22a1fe01058000d1b317d3b65cc3daf61bd2ea7a2b76721fe160fa74d"}}, + {name = "ujson-5.11.0-cp312-cp312-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl",url = "https://files.pythonhosted.org/packages/03/3c/fd11a224f73fbffa299fb9644e425f38b38b30231f7923a088dd513aabb4/ujson-5.11.0-cp312-cp312-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl",hashes = {sha256 = "0180a480a7d099082501cad1fe85252e4d4bf926b40960fb3d9e87a3a6fbbc80"}}, + {name = "ujson-5.11.0-cp312-cp312-manylinux_2_24_i686.manylinux_2_28_i686.whl",url = "https://files.pythonhosted.org/packages/55/b9/405103cae24899df688a3431c776e00528bd4799e7d68820e7ebcf824f92/ujson-5.11.0-cp312-cp312-manylinux_2_24_i686.manylinux_2_28_i686.whl",hashes = {sha256 = "fa79fdb47701942c2132a9dd2297a1a85941d966d8c87bfd9e29b0cf423f26cc"}}, + {name = "ujson-5.11.0-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl",url = "https://files.pythonhosted.org/packages/17/7b/2dcbc2bbfdbf68f2368fb21ab0f6735e872290bb604c75f6e06b81edcb3f/ujson-5.11.0-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl",hashes = {sha256 = "8254e858437c00f17cb72e7a644fc42dad0ebb21ea981b71df6e84b1072aaa7c"}}, + {name = "ujson-5.11.0-cp312-cp312-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/d1/71/fea2ca18986a366c750767b694430d5ded6b20b6985fddca72f74af38a4c/ujson-5.11.0-cp312-cp312-musllinux_1_2_aarch64.whl",hashes = {sha256 = "1aa8a2ab482f09f6c10fba37112af5f957689a79ea598399c85009f2f29898b5"}}, + {name = "ujson-5.11.0-cp312-cp312-musllinux_1_2_i686.whl",url = "https://files.pythonhosted.org/packages/a3/bb/d4220bd7532eac6288d8115db51710fa2d7d271250797b0bfba9f1e755af/ujson-5.11.0-cp312-cp312-musllinux_1_2_i686.whl",hashes = {sha256 = "a638425d3c6eed0318df663df44480f4a40dc87cc7c6da44d221418312f6413b"}}, + {name = "ujson-5.11.0-cp312-cp312-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/80/47/226e540aa38878ce1194454385701d82df538ccb5ff8db2cf1641dde849a/ujson-5.11.0-cp312-cp312-musllinux_1_2_x86_64.whl",hashes = {sha256 = "7e3cff632c1d78023b15f7e3a81c3745cd3f94c044d1e8fa8efbd6b161997bbc"}}, + {name = "ujson-5.11.0-cp312-cp312-win32.whl",url = "https://files.pythonhosted.org/packages/7e/81/546042f0b23c9040d61d46ea5ca76f0cc5e0d399180ddfb2ae976ebff5b5/ujson-5.11.0-cp312-cp312-win32.whl",hashes = {sha256 = "be6b0eaf92cae8cdee4d4c9e074bde43ef1c590ed5ba037ea26c9632fb479c88"}}, + {name = "ujson-5.11.0-cp312-cp312-win_amd64.whl",url = "https://files.pythonhosted.org/packages/44/1b/27c05dc8c9728f44875d74b5bfa948ce91f6c33349232619279f35c6e817/ujson-5.11.0-cp312-cp312-win_amd64.whl",hashes = {sha256 = "b7b136cc6abc7619124fd897ef75f8e63105298b5ca9bdf43ebd0e1fa0ee105f"}}, + {name = "ujson-5.11.0-cp312-cp312-win_arm64.whl",url = "https://files.pythonhosted.org/packages/22/2d/37b6557c97c3409c202c838aa9c960ca3896843b4295c4b7bb2bbd260664/ujson-5.11.0-cp312-cp312-win_arm64.whl",hashes = {sha256 = "6cd2df62f24c506a0ba322d5e4fe4466d47a9467b57e881ee15a31f7ecf68ff6"}}, +] +marker = "sys_platform != \"win32\" and implementation_name == \"cpython\" and \"default\" in dependency_groups" + +[packages.tool.pdm] +dependencies = [] + [[packages]] name = "wcwidth" version = "0.2.13" @@ -3071,6 +2942,41 @@ dependencies = [ "backports-functools-lru-cache>=1.2.1; python_version < \"3.2\"", ] +[[packages]] +name = "websockets" +version = "15.0.1" +requires-python = ">=3.9" +sdist = {name = "websockets-15.0.1.tar.gz", url = "https://files.pythonhosted.org/packages/21/e6/26d09fab466b7ca9c7737474c52be4f76a40301b08362eb2dbc19dcc16c1/websockets-15.0.1.tar.gz", hashes = {sha256 = "82544de02076bafba038ce055ee6412d68da13ab47f0c60cab827346de828dee"}} +wheels = [ + {name = "websockets-15.0.1-cp313-cp313-macosx_10_13_universal2.whl",url = "https://files.pythonhosted.org/packages/cb/9f/51f0cf64471a9d2b4d0fc6c534f323b664e7095640c34562f5182e5a7195/websockets-15.0.1-cp313-cp313-macosx_10_13_universal2.whl",hashes = {sha256 = "ee443ef070bb3b6ed74514f5efaa37a252af57c90eb33b956d35c8e9c10a1931"}}, + {name = "websockets-15.0.1-cp313-cp313-macosx_10_13_x86_64.whl",url = "https://files.pythonhosted.org/packages/8a/05/aa116ec9943c718905997412c5989f7ed671bc0188ee2ba89520e8765d7b/websockets-15.0.1-cp313-cp313-macosx_10_13_x86_64.whl",hashes = {sha256 = "5a939de6b7b4e18ca683218320fc67ea886038265fd1ed30173f5ce3f8e85675"}}, + {name = "websockets-15.0.1-cp313-cp313-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/ff/0b/33cef55ff24f2d92924923c99926dcce78e7bd922d649467f0eda8368923/websockets-15.0.1-cp313-cp313-macosx_11_0_arm64.whl",hashes = {sha256 = "746ee8dba912cd6fc889a8147168991d50ed70447bf18bcda7039f7d2e3d9151"}}, + {name = "websockets-15.0.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",url = "https://files.pythonhosted.org/packages/31/1d/063b25dcc01faa8fada1469bdf769de3768b7044eac9d41f734fd7b6ad6d/websockets-15.0.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",hashes = {sha256 = "595b6c3969023ecf9041b2936ac3827e4623bfa3ccf007575f04c5a6aa318c22"}}, + {name = "websockets-15.0.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl",url = "https://files.pythonhosted.org/packages/93/53/9a87ee494a51bf63e4ec9241c1ccc4f7c2f45fff85d5bde2ff74fcb68b9e/websockets-15.0.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl",hashes = {sha256 = "3c714d2fc58b5ca3e285461a4cc0c9a66bd0e24c5da9911e30158286c9b5be7f"}}, + {name = "websockets-15.0.1-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl",url = "https://files.pythonhosted.org/packages/ff/b2/83a6ddf56cdcbad4e3d841fcc55d6ba7d19aeb89c50f24dd7e859ec0805f/websockets-15.0.1-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl",hashes = {sha256 = "0f3c1e2ab208db911594ae5b4f79addeb3501604a165019dd221c0bdcabe4db8"}}, + {name = "websockets-15.0.1-cp313-cp313-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/98/41/e7038944ed0abf34c45aa4635ba28136f06052e08fc2168520bb8b25149f/websockets-15.0.1-cp313-cp313-musllinux_1_2_aarch64.whl",hashes = {sha256 = "229cf1d3ca6c1804400b0a9790dc66528e08a6a1feec0d5040e8b9eb14422375"}}, + {name = "websockets-15.0.1-cp313-cp313-musllinux_1_2_i686.whl",url = "https://files.pythonhosted.org/packages/e0/17/de15b6158680c7623c6ef0db361da965ab25d813ae54fcfeae2e5b9ef910/websockets-15.0.1-cp313-cp313-musllinux_1_2_i686.whl",hashes = {sha256 = "756c56e867a90fb00177d530dca4b097dd753cde348448a1012ed6c5131f8b7d"}}, + {name = "websockets-15.0.1-cp313-cp313-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/33/2b/1f168cb6041853eef0362fb9554c3824367c5560cbdaad89ac40f8c2edfc/websockets-15.0.1-cp313-cp313-musllinux_1_2_x86_64.whl",hashes = {sha256 = "558d023b3df0bffe50a04e710bc87742de35060580a293c2a984299ed83bc4e4"}}, + {name = "websockets-15.0.1-cp313-cp313-win32.whl",url = "https://files.pythonhosted.org/packages/86/eb/20b6cdf273913d0ad05a6a14aed4b9a85591c18a987a3d47f20fa13dcc47/websockets-15.0.1-cp313-cp313-win32.whl",hashes = {sha256 = "ba9e56e8ceeeedb2e080147ba85ffcd5cd0711b89576b83784d8605a7df455fa"}}, + {name = "websockets-15.0.1-cp313-cp313-win_amd64.whl",url = "https://files.pythonhosted.org/packages/1b/6c/c65773d6cab416a64d191d6ee8a8b1c68a09970ea6909d16965d26bfed1e/websockets-15.0.1-cp313-cp313-win_amd64.whl",hashes = {sha256 = "e09473f095a819042ecb2ab9465aee615bd9c2028e4ef7d933600a8401c79561"}}, + {name = "websockets-15.0.1-cp312-cp312-macosx_10_13_universal2.whl",url = "https://files.pythonhosted.org/packages/51/6b/4545a0d843594f5d0771e86463606a3988b5a09ca5123136f8a76580dd63/websockets-15.0.1-cp312-cp312-macosx_10_13_universal2.whl",hashes = {sha256 = "3e90baa811a5d73f3ca0bcbf32064d663ed81318ab225ee4f427ad4e26e5aff3"}}, + {name = "websockets-15.0.1-cp312-cp312-macosx_10_13_x86_64.whl",url = "https://files.pythonhosted.org/packages/f4/71/809a0f5f6a06522af902e0f2ea2757f71ead94610010cf570ab5c98e99ed/websockets-15.0.1-cp312-cp312-macosx_10_13_x86_64.whl",hashes = {sha256 = "592f1a9fe869c778694f0aa806ba0374e97648ab57936f092fd9d87f8bc03665"}}, + {name = "websockets-15.0.1-cp312-cp312-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/3d/69/1a681dd6f02180916f116894181eab8b2e25b31e484c5d0eae637ec01f7c/websockets-15.0.1-cp312-cp312-macosx_11_0_arm64.whl",hashes = {sha256 = "0701bc3cfcb9164d04a14b149fd74be7347a530ad3bbf15ab2c678a2cd3dd9a2"}}, + {name = "websockets-15.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",url = "https://files.pythonhosted.org/packages/a6/02/0073b3952f5bce97eafbb35757f8d0d54812b6174ed8dd952aa08429bcc3/websockets-15.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",hashes = {sha256 = "e8b56bdcdb4505c8078cb6c7157d9811a85790f2f2b3632c7d1462ab5783d215"}}, + {name = "websockets-15.0.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl",url = "https://files.pythonhosted.org/packages/74/45/c205c8480eafd114b428284840da0b1be9ffd0e4f87338dc95dc6ff961a1/websockets-15.0.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl",hashes = {sha256 = "0af68c55afbd5f07986df82831c7bff04846928ea8d1fd7f30052638788bc9b5"}}, + {name = "websockets-15.0.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl",url = "https://files.pythonhosted.org/packages/14/8f/aa61f528fba38578ec553c145857a181384c72b98156f858ca5c8e82d9d3/websockets-15.0.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl",hashes = {sha256 = "64dee438fed052b52e4f98f76c5790513235efaa1ef7f3f2192c392cd7c91b65"}}, + {name = "websockets-15.0.1-cp312-cp312-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/ec/6d/0267396610add5bc0d0d3e77f546d4cd287200804fe02323797de77dbce9/websockets-15.0.1-cp312-cp312-musllinux_1_2_aarch64.whl",hashes = {sha256 = "d5f6b181bb38171a8ad1d6aa58a67a6aa9d4b38d0f8c5f496b9e42561dfc62fe"}}, + {name = "websockets-15.0.1-cp312-cp312-musllinux_1_2_i686.whl",url = "https://files.pythonhosted.org/packages/02/05/c68c5adbf679cf610ae2f74a9b871ae84564462955d991178f95a1ddb7dd/websockets-15.0.1-cp312-cp312-musllinux_1_2_i686.whl",hashes = {sha256 = "5d54b09eba2bada6011aea5375542a157637b91029687eb4fdb2dab11059c1b4"}}, + {name = "websockets-15.0.1-cp312-cp312-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/29/93/bb672df7b2f5faac89761cb5fa34f5cec45a4026c383a4b5761c6cea5c16/websockets-15.0.1-cp312-cp312-musllinux_1_2_x86_64.whl",hashes = {sha256 = "3be571a8b5afed347da347bfcf27ba12b069d9d7f42cb8c7028b5e98bbb12597"}}, + {name = "websockets-15.0.1-cp312-cp312-win32.whl",url = "https://files.pythonhosted.org/packages/ff/83/de1f7709376dc3ca9b7eeb4b9a07b4526b14876b6d372a4dc62312bebee0/websockets-15.0.1-cp312-cp312-win32.whl",hashes = {sha256 = "c338ffa0520bdb12fbc527265235639fb76e7bc7faafbb93f6ba80d9c06578a9"}}, + {name = "websockets-15.0.1-cp312-cp312-win_amd64.whl",url = "https://files.pythonhosted.org/packages/7d/71/abf2ebc3bbfa40f391ce1428c7168fb20582d0ff57019b69ea20fa698043/websockets-15.0.1-cp312-cp312-win_amd64.whl",hashes = {sha256 = "fcd5cf9e305d7b8338754470cf69cf81f420459dbae8a3b40cee57417f4614a7"}}, + {name = "websockets-15.0.1-py3-none-any.whl",url = "https://files.pythonhosted.org/packages/fa/a8/5b41e0da817d64113292ab1f8247140aac61cbf6cfd085d6a0fa77f4984f/websockets-15.0.1-py3-none-any.whl",hashes = {sha256 = "f7a866fbc1e97b5c617ee4116daaa09b722101d4a3c170c787450ba409f9736f"}}, +] +marker = "\"default\" in dependency_groups" + +[packages.tool.pdm] +dependencies = [] + [[packages]] name = "win32-setctime" version = "1.2.0" @@ -3085,14 +2991,54 @@ marker = "sys_platform == \"win32\" and \"default\" in dependency_groups" dependencies = [] [[packages]] -name = "zipp" -version = "3.23.0" -requires-python = ">=3.9" -sdist = {name = "zipp-3.23.0.tar.gz", url = "https://files.pythonhosted.org/packages/e3/02/0f2892c661036d50ede074e376733dca2ae7c6eb617489437771209d4180/zipp-3.23.0.tar.gz", hashes = {sha256 = "a07157588a12518c9d4034df3fbbee09c814741a33ff63c05fa29d26a2404166"}} -wheels = [ - {name = "zipp-3.23.0-py3-none-any.whl",url = "https://files.pythonhosted.org/packages/2e/54/647ade08bf0db230bfea292f893923872fd20be6ac6f53b2b936ba839d75/zipp-3.23.0-py3-none-any.whl",hashes = {sha256 = "071652d6115ed432f5ce1d34c336c0adfd6a884660d1e9712a256d3d3bd4b14e"}}, +name = "wrapt" +version = "1.17.3" +requires-python = ">=3.8" +sdist = {name = "wrapt-1.17.3.tar.gz", url = "https://files.pythonhosted.org/packages/95/8f/aeb76c5b46e273670962298c23e7ddde79916cb74db802131d49a85e4b7d/wrapt-1.17.3.tar.gz", hashes = {sha256 = "f66eb08feaa410fe4eebd17f2a2c8e2e46d3476e9f8c783daa8e09e0faa666d0"}} +wheels = [ + {name = "wrapt-1.17.3-cp314-cp314-macosx_10_13_universal2.whl",url = "https://files.pythonhosted.org/packages/02/a2/cd864b2a14f20d14f4c496fab97802001560f9f41554eef6df201cd7f76c/wrapt-1.17.3-cp314-cp314-macosx_10_13_universal2.whl",hashes = {sha256 = "cf30f6e3c077c8e6a9a7809c94551203c8843e74ba0c960f4a98cd80d4665d39"}}, + {name = "wrapt-1.17.3-cp314-cp314-macosx_10_13_x86_64.whl",url = "https://files.pythonhosted.org/packages/d5/46/d011725b0c89e853dc44cceb738a307cde5d240d023d6d40a82d1b4e1182/wrapt-1.17.3-cp314-cp314-macosx_10_13_x86_64.whl",hashes = {sha256 = "e228514a06843cae89621384cfe3a80418f3c04aadf8a3b14e46a7be704e4235"}}, + {name = "wrapt-1.17.3-cp314-cp314-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/2e/9e/3ad852d77c35aae7ddebdbc3b6d35ec8013af7d7dddad0ad911f3d891dae/wrapt-1.17.3-cp314-cp314-macosx_11_0_arm64.whl",hashes = {sha256 = "5ea5eb3c0c071862997d6f3e02af1d055f381b1d25b286b9d6644b79db77657c"}}, + {name = "wrapt-1.17.3-cp314-cp314-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl",url = "https://files.pythonhosted.org/packages/c3/f7/c983d2762bcce2326c317c26a6a1e7016f7eb039c27cdf5c4e30f4160f31/wrapt-1.17.3-cp314-cp314-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl",hashes = {sha256 = "281262213373b6d5e4bb4353bc36d1ba4084e6d6b5d242863721ef2bf2c2930b"}}, + {name = "wrapt-1.17.3-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl",url = "https://files.pythonhosted.org/packages/e4/0f/f673f75d489c7f22d17fe0193e84b41540d962f75fce579cf6873167c29b/wrapt-1.17.3-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl",hashes = {sha256 = "dc4a8d2b25efb6681ecacad42fca8859f88092d8732b170de6a5dddd80a1c8fa"}}, + {name = "wrapt-1.17.3-cp314-cp314-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/df/61/515ad6caca68995da2fac7a6af97faab8f78ebe3bf4f761e1b77efbc47b5/wrapt-1.17.3-cp314-cp314-musllinux_1_2_aarch64.whl",hashes = {sha256 = "373342dd05b1d07d752cecbec0c41817231f29f3a89aa8b8843f7b95992ed0c7"}}, + {name = "wrapt-1.17.3-cp314-cp314-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/d3/bd/4e70162ce398462a467bc09e768bee112f1412e563620adc353de9055d33/wrapt-1.17.3-cp314-cp314-musllinux_1_2_x86_64.whl",hashes = {sha256 = "d40770d7c0fd5cbed9d84b2c3f2e156431a12c9a37dc6284060fb4bec0b7ffd4"}}, + {name = "wrapt-1.17.3-cp314-cp314-win32.whl",url = "https://files.pythonhosted.org/packages/2b/b8/da8560695e9284810b8d3df8a19396a6e40e7518059584a1a394a2b35e0a/wrapt-1.17.3-cp314-cp314-win32.whl",hashes = {sha256 = "fbd3c8319de8e1dc79d346929cd71d523622da527cca14e0c1d257e31c2b8b10"}}, + {name = "wrapt-1.17.3-cp314-cp314-win_amd64.whl",url = "https://files.pythonhosted.org/packages/db/c8/b71eeb192c440d67a5a0449aaee2310a1a1e8eca41676046f99ed2487e9f/wrapt-1.17.3-cp314-cp314-win_amd64.whl",hashes = {sha256 = "e1a4120ae5705f673727d3253de3ed0e016f7cd78dc463db1b31e2463e1f3cf6"}}, + {name = "wrapt-1.17.3-cp314-cp314-win_arm64.whl",url = "https://files.pythonhosted.org/packages/45/20/2cda20fd4865fa40f86f6c46ed37a2a8356a7a2fde0773269311f2af56c7/wrapt-1.17.3-cp314-cp314-win_arm64.whl",hashes = {sha256 = "507553480670cab08a800b9463bdb881b2edeed77dc677b0a5915e6106e91a58"}}, + {name = "wrapt-1.17.3-cp314-cp314t-macosx_10_13_universal2.whl",url = "https://files.pythonhosted.org/packages/77/ed/dd5cf21aec36c80443c6f900449260b80e2a65cf963668eaef3b9accce36/wrapt-1.17.3-cp314-cp314t-macosx_10_13_universal2.whl",hashes = {sha256 = "ed7c635ae45cfbc1a7371f708727bf74690daedc49b4dba310590ca0bd28aa8a"}}, + {name = "wrapt-1.17.3-cp314-cp314t-macosx_10_13_x86_64.whl",url = "https://files.pythonhosted.org/packages/8d/96/450c651cc753877ad100c7949ab4d2e2ecc4d97157e00fa8f45df682456a/wrapt-1.17.3-cp314-cp314t-macosx_10_13_x86_64.whl",hashes = {sha256 = "249f88ed15503f6492a71f01442abddd73856a0032ae860de6d75ca62eed8067"}}, + {name = "wrapt-1.17.3-cp314-cp314t-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/d1/86/2fcad95994d9b572db57632acb6f900695a648c3e063f2cd344b3f5c5a37/wrapt-1.17.3-cp314-cp314t-macosx_11_0_arm64.whl",hashes = {sha256 = "5a03a38adec8066d5a37bea22f2ba6bbf39fcdefbe2d91419ab864c3fb515454"}}, + {name = "wrapt-1.17.3-cp314-cp314t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl",url = "https://files.pythonhosted.org/packages/64/0e/f4472f2fdde2d4617975144311f8800ef73677a159be7fe61fa50997d6c0/wrapt-1.17.3-cp314-cp314t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl",hashes = {sha256 = "5d4478d72eb61c36e5b446e375bbc49ed002430d17cdec3cecb36993398e1a9e"}}, + {name = "wrapt-1.17.3-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl",url = "https://files.pythonhosted.org/packages/cc/01/9b85a99996b0a97c8a17484684f206cbb6ba73c1ce6890ac668bcf3838fb/wrapt-1.17.3-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl",hashes = {sha256 = "223db574bb38637e8230eb14b185565023ab624474df94d2af18f1cdb625216f"}}, + {name = "wrapt-1.17.3-cp314-cp314t-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/25/02/78926c1efddcc7b3aa0bc3d6b33a822f7d898059f7cd9ace8c8318e559ef/wrapt-1.17.3-cp314-cp314t-musllinux_1_2_aarch64.whl",hashes = {sha256 = "e405adefb53a435f01efa7ccdec012c016b5a1d3f35459990afc39b6be4d5056"}}, + {name = "wrapt-1.17.3-cp314-cp314t-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/dc/ee/c414501ad518ac3e6fe184753632fe5e5ecacdcf0effc23f31c1e4f7bfcf/wrapt-1.17.3-cp314-cp314t-musllinux_1_2_x86_64.whl",hashes = {sha256 = "88547535b787a6c9ce4086917b6e1d291aa8ed914fdd3a838b3539dc95c12804"}}, + {name = "wrapt-1.17.3-cp314-cp314t-win32.whl",url = "https://files.pythonhosted.org/packages/be/44/a1bd64b723d13bb151d6cc91b986146a1952385e0392a78567e12149c7b4/wrapt-1.17.3-cp314-cp314t-win32.whl",hashes = {sha256 = "41b1d2bc74c2cac6f9074df52b2efbef2b30bdfe5f40cb78f8ca22963bc62977"}}, + {name = "wrapt-1.17.3-cp314-cp314t-win_amd64.whl",url = "https://files.pythonhosted.org/packages/79/d9/7cfd5a312760ac4dd8bf0184a6ee9e43c33e47f3dadc303032ce012b8fa3/wrapt-1.17.3-cp314-cp314t-win_amd64.whl",hashes = {sha256 = "73d496de46cd2cdbdbcce4ae4bcdb4afb6a11234a1df9c085249d55166b95116"}}, + {name = "wrapt-1.17.3-cp314-cp314t-win_arm64.whl",url = "https://files.pythonhosted.org/packages/46/78/10ad9781128ed2f99dbc474f43283b13fea8ba58723e98844367531c18e9/wrapt-1.17.3-cp314-cp314t-win_arm64.whl",hashes = {sha256 = "f38e60678850c42461d4202739f9bf1e3a737c7ad283638251e79cc49effb6b6"}}, + {name = "wrapt-1.17.3-cp313-cp313-macosx_10_13_universal2.whl",url = "https://files.pythonhosted.org/packages/fc/f6/759ece88472157acb55fc195e5b116e06730f1b651b5b314c66291729193/wrapt-1.17.3-cp313-cp313-macosx_10_13_universal2.whl",hashes = {sha256 = "a47681378a0439215912ef542c45a783484d4dd82bac412b71e59cf9c0e1cea0"}}, + {name = "wrapt-1.17.3-cp313-cp313-macosx_10_13_x86_64.whl",url = "https://files.pythonhosted.org/packages/4f/a9/49940b9dc6d47027dc850c116d79b4155f15c08547d04db0f07121499347/wrapt-1.17.3-cp313-cp313-macosx_10_13_x86_64.whl",hashes = {sha256 = "54a30837587c6ee3cd1a4d1c2ec5d24e77984d44e2f34547e2323ddb4e22eb77"}}, + {name = "wrapt-1.17.3-cp313-cp313-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/45/35/6a08de0f2c96dcdd7fe464d7420ddb9a7655a6561150e5fc4da9356aeaab/wrapt-1.17.3-cp313-cp313-macosx_11_0_arm64.whl",hashes = {sha256 = "16ecf15d6af39246fe33e507105d67e4b81d8f8d2c6598ff7e3ca1b8a37213f7"}}, + {name = "wrapt-1.17.3-cp313-cp313-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl",url = "https://files.pythonhosted.org/packages/0c/37/6faf15cfa41bf1f3dba80cd3f5ccc6622dfccb660ab26ed79f0178c7497f/wrapt-1.17.3-cp313-cp313-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl",hashes = {sha256 = "6fd1ad24dc235e4ab88cda009e19bf347aabb975e44fd5c2fb22a3f6e4141277"}}, + {name = "wrapt-1.17.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl",url = "https://files.pythonhosted.org/packages/78/f2/efe19ada4a38e4e15b6dff39c3e3f3f73f5decf901f66e6f72fe79623a06/wrapt-1.17.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl",hashes = {sha256 = "0ed61b7c2d49cee3c027372df5809a59d60cf1b6c2f81ee980a091f3afed6a2d"}}, + {name = "wrapt-1.17.3-cp313-cp313-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/40/90/ca86701e9de1622b16e09689fc24b76f69b06bb0150990f6f4e8b0eeb576/wrapt-1.17.3-cp313-cp313-musllinux_1_2_aarch64.whl",hashes = {sha256 = "423ed5420ad5f5529db9ce89eac09c8a2f97da18eb1c870237e84c5a5c2d60aa"}}, + {name = "wrapt-1.17.3-cp313-cp313-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/fd/e0/d10bd257c9a3e15cbf5523025252cc14d77468e8ed644aafb2d6f54cb95d/wrapt-1.17.3-cp313-cp313-musllinux_1_2_x86_64.whl",hashes = {sha256 = "e01375f275f010fcbf7f643b4279896d04e571889b8a5b3f848423d91bf07050"}}, + {name = "wrapt-1.17.3-cp313-cp313-win32.whl",url = "https://files.pythonhosted.org/packages/e8/cf/7d848740203c7b4b27eb55dbfede11aca974a51c3d894f6cc4b865f42f58/wrapt-1.17.3-cp313-cp313-win32.whl",hashes = {sha256 = "53e5e39ff71b3fc484df8a522c933ea2b7cdd0d5d15ae82e5b23fde87d44cbd8"}}, + {name = "wrapt-1.17.3-cp313-cp313-win_amd64.whl",url = "https://files.pythonhosted.org/packages/57/54/35a84d0a4d23ea675994104e667ceff49227ce473ba6a59ba2c84f250b74/wrapt-1.17.3-cp313-cp313-win_amd64.whl",hashes = {sha256 = "1f0b2f40cf341ee8cc1a97d51ff50dddb9fcc73241b9143ec74b30fc4f44f6cb"}}, + {name = "wrapt-1.17.3-cp313-cp313-win_arm64.whl",url = "https://files.pythonhosted.org/packages/01/77/66e54407c59d7b02a3c4e0af3783168fff8e5d61def52cda8728439d86bc/wrapt-1.17.3-cp313-cp313-win_arm64.whl",hashes = {sha256 = "7425ac3c54430f5fc5e7b6f41d41e704db073309acfc09305816bc6a0b26bb16"}}, + {name = "wrapt-1.17.3-cp312-cp312-macosx_10_13_universal2.whl",url = "https://files.pythonhosted.org/packages/9f/41/cad1aba93e752f1f9268c77270da3c469883d56e2798e7df6240dcb2287b/wrapt-1.17.3-cp312-cp312-macosx_10_13_universal2.whl",hashes = {sha256 = "ab232e7fdb44cdfbf55fc3afa31bcdb0d8980b9b95c38b6405df2acb672af0e0"}}, + {name = "wrapt-1.17.3-cp312-cp312-macosx_10_13_x86_64.whl",url = "https://files.pythonhosted.org/packages/60/f8/096a7cc13097a1869fe44efe68dace40d2a16ecb853141394047f0780b96/wrapt-1.17.3-cp312-cp312-macosx_10_13_x86_64.whl",hashes = {sha256 = "9baa544e6acc91130e926e8c802a17f3b16fbea0fd441b5a60f5cf2cc5c3deba"}}, + {name = "wrapt-1.17.3-cp312-cp312-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/33/df/bdf864b8997aab4febb96a9ae5c124f700a5abd9b5e13d2a3214ec4be705/wrapt-1.17.3-cp312-cp312-macosx_11_0_arm64.whl",hashes = {sha256 = "6b538e31eca1a7ea4605e44f81a48aa24c4632a277431a6ed3f328835901f4fd"}}, + {name = "wrapt-1.17.3-cp312-cp312-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl",url = "https://files.pythonhosted.org/packages/9f/81/5d931d78d0eb732b95dc3ddaeeb71c8bb572fb01356e9133916cd729ecdd/wrapt-1.17.3-cp312-cp312-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl",hashes = {sha256 = "042ec3bb8f319c147b1301f2393bc19dba6e176b7da446853406d041c36c7828"}}, + {name = "wrapt-1.17.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl",url = "https://files.pythonhosted.org/packages/ca/38/2e1785df03b3d72d34fc6252d91d9d12dc27a5c89caef3335a1bbb8908ca/wrapt-1.17.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl",hashes = {sha256 = "3af60380ba0b7b5aeb329bc4e402acd25bd877e98b3727b0135cb5c2efdaefe9"}}, + {name = "wrapt-1.17.3-cp312-cp312-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/b3/8b/48cdb60fe0603e34e05cffda0b2a4adab81fd43718e11111a4b0100fd7c1/wrapt-1.17.3-cp312-cp312-musllinux_1_2_aarch64.whl",hashes = {sha256 = "0b02e424deef65c9f7326d8c19220a2c9040c51dc165cddb732f16198c168396"}}, + {name = "wrapt-1.17.3-cp312-cp312-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/3c/51/d81abca783b58f40a154f1b2c56db1d2d9e0d04fa2d4224e357529f57a57/wrapt-1.17.3-cp312-cp312-musllinux_1_2_x86_64.whl",hashes = {sha256 = "74afa28374a3c3a11b3b5e5fca0ae03bef8450d6aa3ab3a1e2c30e3a75d023dc"}}, + {name = "wrapt-1.17.3-cp312-cp312-win32.whl",url = "https://files.pythonhosted.org/packages/9e/b1/43b286ca1392a006d5336412d41663eeef1ad57485f3e52c767376ba7e5a/wrapt-1.17.3-cp312-cp312-win32.whl",hashes = {sha256 = "4da9f45279fff3543c371d5ababc57a0384f70be244de7759c85a7f989cb4ebe"}}, + {name = "wrapt-1.17.3-cp312-cp312-win_amd64.whl",url = "https://files.pythonhosted.org/packages/28/de/49493f962bd3c586ab4b88066e967aa2e0703d6ef2c43aa28cb83bf7b507/wrapt-1.17.3-cp312-cp312-win_amd64.whl",hashes = {sha256 = "e71d5c6ebac14875668a1e90baf2ea0ef5b7ac7918355850c0908ae82bcb297c"}}, + {name = "wrapt-1.17.3-cp312-cp312-win_arm64.whl",url = "https://files.pythonhosted.org/packages/f1/48/0f7102fe9cb1e8a5a77f80d4f0956d62d97034bbe88d33e94699f99d181d/wrapt-1.17.3-cp312-cp312-win_arm64.whl",hashes = {sha256 = "604d076c55e2fdd4c1c03d06dc1a31b95130010517b5019db15365ec4a405fc6"}}, + {name = "wrapt-1.17.3-py3-none-any.whl",url = "https://files.pythonhosted.org/packages/1f/f6/a933bd70f98e9cf3e08167fc5cd7aaaca49147e48411c0bd5ae701bb2194/wrapt-1.17.3-py3-none-any.whl",hashes = {sha256 = "7171ae35d2c33d326ac19dd8facb1e82e5fd04ef8c6c0e394d7af55a55051c22"}}, ] -marker = "python_full_version < \"3.10.2\" and python_version >= \"3.9\" and \"dev\" in extras" +marker = "\"default\" in dependency_groups" [packages.tool.pdm] dependencies = [] @@ -3156,6 +3102,19 @@ dependencies = [ "uc-micro-py", ] +[[packages]] +name = "networkx" +version = "3.5" +requires-python = ">=3.11" +sdist = {name = "networkx-3.5.tar.gz", url = "https://files.pythonhosted.org/packages/6c/4f/ccdb8ad3a38e583f214547fd2f7ff1fc160c43a75af88e6aec213404b96a/networkx-3.5.tar.gz", hashes = {sha256 = "d4c6f9cf81f52d69230866796b82afbccdec3db7ae4fbd1b65ea750feed50037"}} +wheels = [ + {name = "networkx-3.5-py3-none-any.whl",url = "https://files.pythonhosted.org/packages/eb/8d/776adee7bbf76365fdd7f2552710282c79a4ead5d2a46408c9043a2b70ba/networkx-3.5-py3-none-any.whl",hashes = {sha256 = "0030d386a9a06dee3565298b4a734b68589749a544acbb6c412dc9e2489ec6ec"}}, +] +marker = "python_version ~= \"3.12\"" + +[packages.tool.pdm] +dependencies = [] + [[packages]] name = "pandas" version = "2.3.1" @@ -3182,27 +3141,6 @@ wheels = [ {name = "pandas-2.3.1-cp312-cp312-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/5b/14/cec7760d7c9507f11c97d64f29022e12a6cc4fc03ac694535e89f88ad2ec/pandas-2.3.1-cp312-cp312-musllinux_1_2_aarch64.whl",hashes = {sha256 = "56a342b231e8862c96bdb6ab97170e203ce511f4d0429589c8ede1ee8ece48b8"}}, {name = "pandas-2.3.1-cp312-cp312-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/50/b9/6e2d2c6728ed29fb3d4d4d302504fb66f1a543e37eb2e43f352a86365cdf/pandas-2.3.1-cp312-cp312-musllinux_1_2_x86_64.whl",hashes = {sha256 = "ca7ed14832bce68baef331f4d7f294411bed8efd032f8109d690df45e00c4679"}}, {name = "pandas-2.3.1-cp312-cp312-win_amd64.whl",url = "https://files.pythonhosted.org/packages/80/a5/3a92893e7399a691bad7664d977cb5e7c81cf666c81f89ea76ba2bff483d/pandas-2.3.1-cp312-cp312-win_amd64.whl",hashes = {sha256 = "ac942bfd0aca577bef61f2bc8da8147c4ef6879965ef883d8e8d5d2dc3e744b8"}}, - {name = "pandas-2.3.1-cp311-cp311-macosx_10_9_x86_64.whl",url = "https://files.pythonhosted.org/packages/76/1c/ccf70029e927e473a4476c00e0d5b32e623bff27f0402d0a92b7fc29bb9f/pandas-2.3.1-cp311-cp311-macosx_10_9_x86_64.whl",hashes = {sha256 = "2b0540963d83431f5ce8870ea02a7430adca100cec8a050f0811f8e31035541b"}}, - {name = "pandas-2.3.1-cp311-cp311-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/ec/d3/3c37cb724d76a841f14b8f5fe57e5e3645207cc67370e4f84717e8bb7657/pandas-2.3.1-cp311-cp311-macosx_11_0_arm64.whl",hashes = {sha256 = "fe7317f578c6a153912bd2292f02e40c1d8f253e93c599e82620c7f69755c74f"}}, - {name = "pandas-2.3.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",url = "https://files.pythonhosted.org/packages/8a/4c/367c98854a1251940edf54a4df0826dcacfb987f9068abf3e3064081a382/pandas-2.3.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",hashes = {sha256 = "e6723a27ad7b244c0c79d8e7007092d7c8f0f11305770e2f4cd778b3ad5f9f85"}}, - {name = "pandas-2.3.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",url = "https://files.pythonhosted.org/packages/07/5f/63760ff107bcf5146eee41b38b3985f9055e710a72fdd637b791dea3495c/pandas-2.3.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",hashes = {sha256 = "3462c3735fe19f2638f2c3a40bd94ec2dc5ba13abbb032dd2fa1f540a075509d"}}, - {name = "pandas-2.3.1-cp311-cp311-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/15/53/f31a9b4dfe73fe4711c3a609bd8e60238022f48eacedc257cd13ae9327a7/pandas-2.3.1-cp311-cp311-musllinux_1_2_aarch64.whl",hashes = {sha256 = "98bcc8b5bf7afed22cc753a28bc4d9e26e078e777066bc53fac7904ddef9a678"}}, - {name = "pandas-2.3.1-cp311-cp311-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/e0/94/6fce6bf85b5056d065e0a7933cba2616dcb48596f7ba3c6341ec4bcc529d/pandas-2.3.1-cp311-cp311-musllinux_1_2_x86_64.whl",hashes = {sha256 = "4d544806b485ddf29e52d75b1f559142514e60ef58a832f74fb38e48d757b299"}}, - {name = "pandas-2.3.1-cp311-cp311-win_amd64.whl",url = "https://files.pythonhosted.org/packages/c8/7b/bdcb1ed8fccb63d04bdb7635161d0ec26596d92c9d7a6cce964e7876b6c1/pandas-2.3.1-cp311-cp311-win_amd64.whl",hashes = {sha256 = "b3cd4273d3cb3707b6fffd217204c52ed92859533e31dc03b7c5008aa933aaab"}}, - {name = "pandas-2.3.1-cp310-cp310-macosx_10_9_x86_64.whl",url = "https://files.pythonhosted.org/packages/c4/ca/aa97b47287221fa37a49634532e520300088e290b20d690b21ce3e448143/pandas-2.3.1-cp310-cp310-macosx_10_9_x86_64.whl",hashes = {sha256 = "22c2e866f7209ebc3a8f08d75766566aae02bcc91d196935a1d9e59c7b990ac9"}}, - {name = "pandas-2.3.1-cp310-cp310-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/80/bf/7938dddc5f01e18e573dcfb0f1b8c9357d9b5fa6ffdee6e605b92efbdff2/pandas-2.3.1-cp310-cp310-macosx_11_0_arm64.whl",hashes = {sha256 = "3583d348546201aff730c8c47e49bc159833f971c2899d6097bce68b9112a4f1"}}, - {name = "pandas-2.3.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",url = "https://files.pythonhosted.org/packages/ee/2f/9af748366763b2a494fed477f88051dbf06f56053d5c00eba652697e3f94/pandas-2.3.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",hashes = {sha256 = "0f951fbb702dacd390561e0ea45cdd8ecfa7fb56935eb3dd78e306c19104b9b0"}}, - {name = "pandas-2.3.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",url = "https://files.pythonhosted.org/packages/2c/95/79ab37aa4c25d1e7df953dde407bb9c3e4ae47d154bc0dd1692f3a6dcf8c/pandas-2.3.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",hashes = {sha256 = "cd05b72ec02ebfb993569b4931b2e16fbb4d6ad6ce80224a3ee838387d83a191"}}, - {name = "pandas-2.3.1-cp310-cp310-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/75/a7/d65e5d8665c12c3c6ff5edd9709d5836ec9b6f80071b7f4a718c6106e86e/pandas-2.3.1-cp310-cp310-musllinux_1_2_aarch64.whl",hashes = {sha256 = "1b916a627919a247d865aed068eb65eb91a344b13f5b57ab9f610b7716c92de1"}}, - {name = "pandas-2.3.1-cp310-cp310-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/65/f3/4c1dbd754dbaa79dbf8b537800cb2fa1a6e534764fef50ab1f7533226c5c/pandas-2.3.1-cp310-cp310-musllinux_1_2_x86_64.whl",hashes = {sha256 = "fe67dc676818c186d5a3d5425250e40f179c2a89145df477dd82945eaea89e97"}}, - {name = "pandas-2.3.1-cp310-cp310-win_amd64.whl",url = "https://files.pythonhosted.org/packages/3f/d6/d7f5777162aa9b48ec3910bca5a58c9b5927cfd9cfde3aa64322f5ba4b9f/pandas-2.3.1-cp310-cp310-win_amd64.whl",hashes = {sha256 = "2eb789ae0274672acbd3c575b0598d213345660120a257b47b5dafdc618aec83"}}, - {name = "pandas-2.3.1-cp39-cp39-macosx_10_9_x86_64.whl",url = "https://files.pythonhosted.org/packages/6e/21/ecf2df680982616459409b09962a8c2065330c7151dc6538069f3b634acf/pandas-2.3.1-cp39-cp39-macosx_10_9_x86_64.whl",hashes = {sha256 = "4645f770f98d656f11c69e81aeb21c6fca076a44bed3dcbb9396a4311bc7f6d8"}}, - {name = "pandas-2.3.1-cp39-cp39-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/1e/1a/dcb50e44b75419e96b276c9fb023b0f147b3c411be1cd517492aa2a184d4/pandas-2.3.1-cp39-cp39-macosx_11_0_arm64.whl",hashes = {sha256 = "342e59589cc454aaff7484d75b816a433350b3d7964d7847327edda4d532a2e3"}}, - {name = "pandas-2.3.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",url = "https://files.pythonhosted.org/packages/2d/55/66cd2b679f6a27398380eac7574bc24746128f74626a3c02b978ea00e5ce/pandas-2.3.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",hashes = {sha256 = "1d12f618d80379fde6af007f65f0c25bd3e40251dbd1636480dfffce2cf1e6da"}}, - {name = "pandas-2.3.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",url = "https://files.pythonhosted.org/packages/ae/1c/5b9b263c80fd5e231b77df6f78cd7426d1d4ad3a4e858e85b7b3d93d0e9c/pandas-2.3.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",hashes = {sha256 = "dd71c47a911da120d72ef173aeac0bf5241423f9bfea57320110a978457e069e"}}, - {name = "pandas-2.3.1-cp39-cp39-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/f7/74/7e817b31413fbb96366ea327d43d1926a9c48c58074e27e094e2839a0e36/pandas-2.3.1-cp39-cp39-musllinux_1_2_aarch64.whl",hashes = {sha256 = "09e3b1587f0f3b0913e21e8b32c3119174551deb4a4eba4a89bc7377947977e7"}}, - {name = "pandas-2.3.1-cp39-cp39-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/1f/0f/bc0a44b47eba2f22ae4235719a573d552ef7ad76ed3ea39ae62d554e040b/pandas-2.3.1-cp39-cp39-musllinux_1_2_x86_64.whl",hashes = {sha256 = "2323294c73ed50f612f67e2bf3ae45aea04dce5690778e08a09391897f35ff88"}}, - {name = "pandas-2.3.1-cp39-cp39-win_amd64.whl",url = "https://files.pythonhosted.org/packages/fa/cb/6c32f8fadefa4314b740fbe8f74f6a02423bd1549e7c930826df35ac3c1b/pandas-2.3.1-cp39-cp39-win_amd64.whl",hashes = {sha256 = "b4b0de34dc8499c2db34000ef8baad684cfa4cbd836ecee05f323ebfba348c7d"}}, ] marker = "\"default\" in dependency_groups" @@ -3269,6 +3207,19 @@ marker = "\"default\" in dependency_groups" [packages.tool.pdm] dependencies = [] +[[packages]] +name = "pycparser" +version = "2.23" +requires-python = ">=3.8" +sdist = {name = "pycparser-2.23.tar.gz", url = "https://files.pythonhosted.org/packages/fe/cf/d2d3b9f5699fb1e4615c8e32ff220203e43b248e1dfcc6736ad9057731ca/pycparser-2.23.tar.gz", hashes = {sha256 = "78816d4f24add8f10a06d6f05b4d424ad9e96cfebf68a4ddc99c65c0720d00c2"}} +wheels = [ + {name = "pycparser-2.23-py3-none-any.whl",url = "https://files.pythonhosted.org/packages/a0/e3/59cd50310fc9b59512193629e1984c1f95e5c8ae6e5d8c69532ccc65a7fe/pycparser-2.23-py3-none-any.whl",hashes = {sha256 = "e5c6e8d3fbad53479cab09ac03729e0a9faf2bee3db8208a550daf5af81a5934"}}, +] +marker = "implementation_name != \"PyPy\" and \"default\" in dependency_groups" + +[packages.tool.pdm] +dependencies = [] + [[packages]] name = "pyproject-hooks" version = "1.2.0" @@ -3321,33 +3272,6 @@ wheels = [ {name = "ruamel.yaml.clib-0.2.12-cp312-cp312-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/db/5d/36619b61ffa2429eeaefaab4f3374666adf36ad8ac6330d855848d7d36fd/ruamel.yaml.clib-0.2.12-cp312-cp312-musllinux_1_2_aarch64.whl",hashes = {sha256 = "b82a7c94a498853aa0b272fd5bc67f29008da798d4f93a2f9f289feb8426a58d"}}, {name = "ruamel.yaml.clib-0.2.12-cp312-cp312-win32.whl",url = "https://files.pythonhosted.org/packages/b1/82/85cb92f15a4231c89b95dfe08b09eb6adca929ef7df7e17ab59902b6f589/ruamel.yaml.clib-0.2.12-cp312-cp312-win32.whl",hashes = {sha256 = "e8c4ebfcfd57177b572e2040777b8abc537cdef58a2120e830124946aa9b42c5"}}, {name = "ruamel.yaml.clib-0.2.12-cp312-cp312-win_amd64.whl",url = "https://files.pythonhosted.org/packages/d7/8f/c3654f6f1ddb75daf3922c3d8fc6005b1ab56671ad56ffb874d908bfa668/ruamel.yaml.clib-0.2.12-cp312-cp312-win_amd64.whl",hashes = {sha256 = "0467c5965282c62203273b838ae77c0d29d7638c8a4e3a1c8bdd3602c10904e4"}}, - {name = "ruamel.yaml.clib-0.2.12-cp311-cp311-macosx_13_0_arm64.whl",url = "https://files.pythonhosted.org/packages/fb/8f/683c6ad562f558cbc4f7c029abcd9599148c51c54b5ef0f24f2638da9fbb/ruamel.yaml.clib-0.2.12-cp311-cp311-macosx_13_0_arm64.whl",hashes = {sha256 = "4a6679521a58256a90b0d89e03992c15144c5f3858f40d7c18886023d7943db6"}}, - {name = "ruamel.yaml.clib-0.2.12-cp311-cp311-manylinux2014_aarch64.whl",url = "https://files.pythonhosted.org/packages/3c/d2/b79b7d695e2f21da020bd44c782490578f300dd44f0a4c57a92575758a76/ruamel.yaml.clib-0.2.12-cp311-cp311-manylinux2014_aarch64.whl",hashes = {sha256 = "d84318609196d6bd6da0edfa25cedfbabd8dbde5140a0a23af29ad4b8f91fb1e"}}, - {name = "ruamel.yaml.clib-0.2.12-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",url = "https://files.pythonhosted.org/packages/68/6e/264c50ce2a31473a9fdbf4fa66ca9b2b17c7455b31ef585462343818bd6c/ruamel.yaml.clib-0.2.12-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",hashes = {sha256 = "bb43a269eb827806502c7c8efb7ae7e9e9d0573257a46e8e952f4d4caba4f31e"}}, - {name = "ruamel.yaml.clib-0.2.12-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl",url = "https://files.pythonhosted.org/packages/86/29/88c2567bc893c84d88b4c48027367c3562ae69121d568e8a3f3a8d363f4d/ruamel.yaml.clib-0.2.12-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl",hashes = {sha256 = "811ea1594b8a0fb466172c384267a4e5e367298af6b228931f273b111f17ef52"}}, - {name = "ruamel.yaml.clib-0.2.12-cp311-cp311-musllinux_1_1_i686.whl",url = "https://files.pythonhosted.org/packages/11/46/879763c619b5470820f0cd6ca97d134771e502776bc2b844d2adb6e37753/ruamel.yaml.clib-0.2.12-cp311-cp311-musllinux_1_1_i686.whl",hashes = {sha256 = "cf12567a7b565cbf65d438dec6cfbe2917d3c1bdddfce84a9930b7d35ea59642"}}, - {name = "ruamel.yaml.clib-0.2.12-cp311-cp311-musllinux_1_1_x86_64.whl",url = "https://files.pythonhosted.org/packages/02/80/ece7e6034256a4186bbe50dee28cd032d816974941a6abf6a9d65e4228a7/ruamel.yaml.clib-0.2.12-cp311-cp311-musllinux_1_1_x86_64.whl",hashes = {sha256 = "7dd5adc8b930b12c8fc5b99e2d535a09889941aa0d0bd06f4749e9a9397c71d2"}}, - {name = "ruamel.yaml.clib-0.2.12-cp311-cp311-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/f0/ca/e4106ac7e80efbabdf4bf91d3d32fc424e41418458251712f5672eada9ce/ruamel.yaml.clib-0.2.12-cp311-cp311-musllinux_1_2_aarch64.whl",hashes = {sha256 = "1492a6051dab8d912fc2adeef0e8c72216b24d57bd896ea607cb90bb0c4981d3"}}, - {name = "ruamel.yaml.clib-0.2.12-cp311-cp311-win32.whl",url = "https://files.pythonhosted.org/packages/67/58/b1f60a1d591b771298ffa0428237afb092c7f29ae23bad93420b1eb10703/ruamel.yaml.clib-0.2.12-cp311-cp311-win32.whl",hashes = {sha256 = "bd0a08f0bab19093c54e18a14a10b4322e1eacc5217056f3c063bd2f59853ce4"}}, - {name = "ruamel.yaml.clib-0.2.12-cp311-cp311-win_amd64.whl",url = "https://files.pythonhosted.org/packages/b4/4f/b52f634c9548a9291a70dfce26ca7ebce388235c93588a1068028ea23fcc/ruamel.yaml.clib-0.2.12-cp311-cp311-win_amd64.whl",hashes = {sha256 = "a274fb2cb086c7a3dea4322ec27f4cb5cc4b6298adb583ab0e211a4682f241eb"}}, - {name = "ruamel.yaml.clib-0.2.12-cp310-cp310-macosx_13_0_arm64.whl",url = "https://files.pythonhosted.org/packages/70/57/40a958e863e299f0c74ef32a3bde9f2d1ea8d69669368c0c502a0997f57f/ruamel.yaml.clib-0.2.12-cp310-cp310-macosx_13_0_arm64.whl",hashes = {sha256 = "11f891336688faf5156a36293a9c362bdc7c88f03a8a027c2c1d8e0bcde998e5"}}, - {name = "ruamel.yaml.clib-0.2.12-cp310-cp310-manylinux2014_aarch64.whl",url = "https://files.pythonhosted.org/packages/98/a8/29a3eb437b12b95f50a6bcc3d7d7214301c6c529d8fdc227247fa84162b5/ruamel.yaml.clib-0.2.12-cp310-cp310-manylinux2014_aarch64.whl",hashes = {sha256 = "a606ef75a60ecf3d924613892cc603b154178ee25abb3055db5062da811fd969"}}, - {name = "ruamel.yaml.clib-0.2.12-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",url = "https://files.pythonhosted.org/packages/35/6d/ae05a87a3ad540259c3ad88d71275cbd1c0f2d30ae04c65dcbfb6dcd4b9f/ruamel.yaml.clib-0.2.12-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",hashes = {sha256 = "fd5415dded15c3822597455bc02bcd66e81ef8b7a48cb71a33628fc9fdde39df"}}, - {name = "ruamel.yaml.clib-0.2.12-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl",url = "https://files.pythonhosted.org/packages/7f/b7/20c6f3c0b656fe609675d69bc135c03aac9e3865912444be6339207b6648/ruamel.yaml.clib-0.2.12-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl",hashes = {sha256 = "f66efbc1caa63c088dead1c4170d148eabc9b80d95fb75b6c92ac0aad2437d76"}}, - {name = "ruamel.yaml.clib-0.2.12-cp310-cp310-musllinux_1_1_i686.whl",url = "https://files.pythonhosted.org/packages/cd/11/d12dbf683471f888d354dac59593873c2b45feb193c5e3e0f2ebf85e68b9/ruamel.yaml.clib-0.2.12-cp310-cp310-musllinux_1_1_i686.whl",hashes = {sha256 = "22353049ba4181685023b25b5b51a574bce33e7f51c759371a7422dcae5402a6"}}, - {name = "ruamel.yaml.clib-0.2.12-cp310-cp310-musllinux_1_1_x86_64.whl",url = "https://files.pythonhosted.org/packages/72/14/4c268f5077db5c83f743ee1daeb236269fa8577133a5cfa49f8b382baf13/ruamel.yaml.clib-0.2.12-cp310-cp310-musllinux_1_1_x86_64.whl",hashes = {sha256 = "932205970b9f9991b34f55136be327501903f7c66830e9760a8ffb15b07f05cd"}}, - {name = "ruamel.yaml.clib-0.2.12-cp310-cp310-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/30/fc/8cd12f189c6405a4c1cf37bd633aa740a9538c8e40497c231072d0fef5cf/ruamel.yaml.clib-0.2.12-cp310-cp310-musllinux_1_2_aarch64.whl",hashes = {sha256 = "a52d48f4e7bf9005e8f0a89209bf9a73f7190ddf0489eee5eb51377385f59f2a"}}, - {name = "ruamel.yaml.clib-0.2.12-cp310-cp310-win32.whl",url = "https://files.pythonhosted.org/packages/80/29/c0a017b704aaf3cbf704989785cd9c5d5b8ccec2dae6ac0c53833c84e677/ruamel.yaml.clib-0.2.12-cp310-cp310-win32.whl",hashes = {sha256 = "3eac5a91891ceb88138c113f9db04f3cebdae277f5d44eaa3651a4f573e6a5da"}}, - {name = "ruamel.yaml.clib-0.2.12-cp310-cp310-win_amd64.whl",url = "https://files.pythonhosted.org/packages/3a/65/fa39d74db4e2d0cd252355732d966a460a41cd01c6353b820a0952432839/ruamel.yaml.clib-0.2.12-cp310-cp310-win_amd64.whl",hashes = {sha256 = "ab007f2f5a87bd08ab1499bdf96f3d5c6ad4dcfa364884cb4549aa0154b13a28"}}, - {name = "ruamel.yaml.clib-0.2.12-cp39-cp39-macosx_12_0_arm64.whl",url = "https://files.pythonhosted.org/packages/e5/46/ccdef7a84ad745c37cb3d9a81790f28fbc9adf9c237dba682017b123294e/ruamel.yaml.clib-0.2.12-cp39-cp39-macosx_12_0_arm64.whl",hashes = {sha256 = "fc4b630cd3fa2cf7fce38afa91d7cfe844a9f75d7f0f36393fa98815e911d987"}}, - {name = "ruamel.yaml.clib-0.2.12-cp39-cp39-manylinux2014_aarch64.whl",url = "https://files.pythonhosted.org/packages/29/09/932360f30ad1b7b79f08757e0a6fb8c5392a52cdcc182779158fe66d25ac/ruamel.yaml.clib-0.2.12-cp39-cp39-manylinux2014_aarch64.whl",hashes = {sha256 = "bc5f1e1c28e966d61d2519f2a3d451ba989f9ea0f2307de7bc45baa526de9e45"}}, - {name = "ruamel.yaml.clib-0.2.12-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",url = "https://files.pythonhosted.org/packages/a2/2a/5b27602e7a4344c1334e26bf4739746206b7a60a8acdba33a61473468b73/ruamel.yaml.clib-0.2.12-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",hashes = {sha256 = "5a0e060aace4c24dcaf71023bbd7d42674e3b230f7e7b97317baf1e953e5b519"}}, - {name = "ruamel.yaml.clib-0.2.12-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl",url = "https://files.pythonhosted.org/packages/da/1c/23497017c554fc06ff5701b29355522cff850f626337fff35d9ab352cb18/ruamel.yaml.clib-0.2.12-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl",hashes = {sha256 = "e2f1c3765db32be59d18ab3953f43ab62a761327aafc1594a2a1fbe038b8b8a7"}}, - {name = "ruamel.yaml.clib-0.2.12-cp39-cp39-musllinux_1_1_i686.whl",url = "https://files.pythonhosted.org/packages/68/e6/f3d4ff3223f9ea49c3b7169ec0268e42bd49f87c70c0e3e853895e4a7ae2/ruamel.yaml.clib-0.2.12-cp39-cp39-musllinux_1_1_i686.whl",hashes = {sha256 = "d85252669dc32f98ebcd5d36768f5d4faeaeaa2d655ac0473be490ecdae3c285"}}, - {name = "ruamel.yaml.clib-0.2.12-cp39-cp39-musllinux_1_1_x86_64.whl",url = "https://files.pythonhosted.org/packages/84/62/ead07043527642491e5011b143f44b81ef80f1025a96069b7210e0f2f0f3/ruamel.yaml.clib-0.2.12-cp39-cp39-musllinux_1_1_x86_64.whl",hashes = {sha256 = "e143ada795c341b56de9418c58d028989093ee611aa27ffb9b7f609c00d813ed"}}, - {name = "ruamel.yaml.clib-0.2.12-cp39-cp39-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/52/b3/fe4d84446f7e4887e3bea7ceff0a7df23790b5ed625f830e79ace88ebefb/ruamel.yaml.clib-0.2.12-cp39-cp39-musllinux_1_2_aarch64.whl",hashes = {sha256 = "2c59aa6170b990d8d2719323e628aaf36f3bfbc1c26279c0eeeb24d05d2d11c7"}}, - {name = "ruamel.yaml.clib-0.2.12-cp39-cp39-win32.whl",url = "https://files.pythonhosted.org/packages/6e/b3/7feb99a00bfaa5c6868617bb7651308afde85e5a0b23cd187fe5de65feeb/ruamel.yaml.clib-0.2.12-cp39-cp39-win32.whl",hashes = {sha256 = "beffaed67936fbbeffd10966a4eb53c402fafd3d6833770516bf7314bc6ffa12"}}, - {name = "ruamel.yaml.clib-0.2.12-cp39-cp39-win_amd64.whl",url = "https://files.pythonhosted.org/packages/93/07/de635108684b7a5bb06e432b0930c5a04b6c59efe73bd966d8db3cc208f2/ruamel.yaml.clib-0.2.12-cp39-cp39-win_amd64.whl",hashes = {sha256 = "040ae85536960525ea62868b642bdb0c2cc6021c9f9d507810c0c604e66f5a7b"}}, ] marker = "platform_python_implementation == \"CPython\" and python_version < \"3.14\" and python_version >= \"3.9\" and \"dev\" in extras" @@ -3406,6 +3330,107 @@ marker = "\"dev\" in extras" [packages.tool.pdm] dependencies = [] +[[packages]] +name = "standard-aifc" +version = "3.13.0" +sdist = {name = "standard_aifc-3.13.0.tar.gz", url = "https://files.pythonhosted.org/packages/c4/53/6050dc3dde1671eb3db592c13b55a8005e5040131f7509cef0215212cb84/standard_aifc-3.13.0.tar.gz", hashes = {sha256 = "64e249c7cb4b3daf2fdba4e95721f811bde8bdfc43ad9f936589b7bb2fae2e43"}} +wheels = [ + {name = "standard_aifc-3.13.0-py3-none-any.whl",url = "https://files.pythonhosted.org/packages/c3/52/5fbb203394cc852334d1575cc020f6bcec768d2265355984dfd361968f36/standard_aifc-3.13.0-py3-none-any.whl",hashes = {sha256 = "f7ae09cc57de1224a0dd8e3eb8f73830be7c3d0bc485de4c1f82b4a7f645ac66"}}, +] +marker = "python_version ~= \"3.13\"" + +[packages.tool.pdm] +dependencies = [ + "standard-chunk; python_version >= \"3.13\"", + "audioop-lts; python_version >= \"3.13\"", +] + +[[packages]] +name = "audioop-lts" +version = "0.2.2" +requires-python = ">=3.13" +sdist = {name = "audioop_lts-0.2.2.tar.gz", url = "https://files.pythonhosted.org/packages/38/53/946db57842a50b2da2e0c1e34bd37f36f5aadba1a929a3971c5d7841dbca/audioop_lts-0.2.2.tar.gz", hashes = {sha256 = "64d0c62d88e67b98a1a5e71987b7aa7b5bcffc7dcee65b635823dbdd0a8dbbd0"}} +wheels = [ + {name = "audioop_lts-0.2.2-cp314-cp314t-macosx_10_13_universal2.whl",url = "https://files.pythonhosted.org/packages/5c/73/413b5a2804091e2c7d5def1d618e4837f1cb82464e230f827226278556b7/audioop_lts-0.2.2-cp314-cp314t-macosx_10_13_universal2.whl",hashes = {sha256 = "f9ee9b52f5f857fbaf9d605a360884f034c92c1c23021fb90b2e39b8e64bede6"}}, + {name = "audioop_lts-0.2.2-cp314-cp314t-macosx_10_13_x86_64.whl",url = "https://files.pythonhosted.org/packages/ae/8c/daa3308dc6593944410c2c68306a5e217f5c05b70a12e70228e7dd42dc5c/audioop_lts-0.2.2-cp314-cp314t-macosx_10_13_x86_64.whl",hashes = {sha256 = "49ee1a41738a23e98d98b937a0638357a2477bc99e61b0f768a8f654f45d9b7a"}}, + {name = "audioop_lts-0.2.2-cp314-cp314t-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/4e/86/c2e0f627168fcf61781a8f72cab06b228fe1da4b9fa4ab39cfb791b5836b/audioop_lts-0.2.2-cp314-cp314t-macosx_11_0_arm64.whl",hashes = {sha256 = "5b00be98ccd0fc123dcfad31d50030d25fcf31488cde9e61692029cd7394733b"}}, + {name = "audioop_lts-0.2.2-cp314-cp314t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl",url = "https://files.pythonhosted.org/packages/c7/bd/35dce665255434f54e5307de39e31912a6f902d4572da7c37582809de14f/audioop_lts-0.2.2-cp314-cp314t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl",hashes = {sha256 = "a6d2e0f9f7a69403e388894d4ca5ada5c47230716a03f2847cfc7bd1ecb589d6"}}, + {name = "audioop_lts-0.2.2-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl",url = "https://files.pythonhosted.org/packages/2d/d2/deeb9f51def1437b3afa35aeb729d577c04bcd89394cb56f9239a9f50b6f/audioop_lts-0.2.2-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl",hashes = {sha256 = "f9b0b8a03ef474f56d1a842af1a2e01398b8f7654009823c6d9e0ecff4d5cfbf"}}, + {name = "audioop_lts-0.2.2-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl",url = "https://files.pythonhosted.org/packages/76/3b/09f8b35b227cee28cc8231e296a82759ed80c1a08e349811d69773c48426/audioop_lts-0.2.2-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl",hashes = {sha256 = "2b267b70747d82125f1a021506565bdc5609a2b24bcb4773c16d79d2bb260bbd"}}, + {name = "audioop_lts-0.2.2-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl",url = "https://files.pythonhosted.org/packages/0b/15/05b48a935cf3b130c248bfdbdea71ce6437f5394ee8533e0edd7cfd93d5e/audioop_lts-0.2.2-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl",hashes = {sha256 = "0337d658f9b81f4cd0fdb1f47635070cc084871a3d4646d9de74fdf4e7c3d24a"}}, + {name = "audioop_lts-0.2.2-cp314-cp314t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl",url = "https://files.pythonhosted.org/packages/83/80/186b7fce6d35b68d3d739f228dc31d60b3412105854edb975aa155a58339/audioop_lts-0.2.2-cp314-cp314t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl",hashes = {sha256 = "167d3b62586faef8b6b2275c3218796b12621a60e43f7e9d5845d627b9c9b80e"}}, + {name = "audioop_lts-0.2.2-cp314-cp314t-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/49/89/c78cc5ac6cb5828f17514fb12966e299c850bc885e80f8ad94e38d450886/audioop_lts-0.2.2-cp314-cp314t-musllinux_1_2_aarch64.whl",hashes = {sha256 = "0d9385e96f9f6da847f4d571ce3cb15b5091140edf3db97276872647ce37efd7"}}, + {name = "audioop_lts-0.2.2-cp314-cp314t-musllinux_1_2_ppc64le.whl",url = "https://files.pythonhosted.org/packages/4c/4b/6401888d0c010e586c2ca50fce4c903d70a6bb55928b16cfbdfd957a13da/audioop_lts-0.2.2-cp314-cp314t-musllinux_1_2_ppc64le.whl",hashes = {sha256 = "48159d96962674eccdca9a3df280e864e8ac75e40a577cc97c5c42667ffabfc5"}}, + {name = "audioop_lts-0.2.2-cp314-cp314t-musllinux_1_2_riscv64.whl",url = "https://files.pythonhosted.org/packages/de/f8/c874ca9bb447dae0e2ef2e231f6c4c2b0c39e31ae684d2420b0f9e97ee68/audioop_lts-0.2.2-cp314-cp314t-musllinux_1_2_riscv64.whl",hashes = {sha256 = "8fefe5868cd082db1186f2837d64cfbfa78b548ea0d0543e9b28935ccce81ce9"}}, + {name = "audioop_lts-0.2.2-cp314-cp314t-musllinux_1_2_s390x.whl",url = "https://files.pythonhosted.org/packages/3e/c0/0323e66f3daebc13fd46b36b30c3be47e3fc4257eae44f1e77eb828c703f/audioop_lts-0.2.2-cp314-cp314t-musllinux_1_2_s390x.whl",hashes = {sha256 = "58cf54380c3884fb49fdd37dfb7a772632b6701d28edd3e2904743c5e1773602"}}, + {name = "audioop_lts-0.2.2-cp314-cp314t-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/98/6b/acc7734ac02d95ab791c10c3f17ffa3584ccb9ac5c18fd771c638ed6d1f5/audioop_lts-0.2.2-cp314-cp314t-musllinux_1_2_x86_64.whl",hashes = {sha256 = "088327f00488cdeed296edd9215ca159f3a5a5034741465789cad403fcf4bec0"}}, + {name = "audioop_lts-0.2.2-cp314-cp314t-win32.whl",url = "https://files.pythonhosted.org/packages/13/c3/c3dc3f564ce6877ecd2a05f8d751b9b27a8c320c2533a98b0c86349778d0/audioop_lts-0.2.2-cp314-cp314t-win32.whl",hashes = {sha256 = "068aa17a38b4e0e7de771c62c60bbca2455924b67a8814f3b0dee92b5820c0b3"}}, + {name = "audioop_lts-0.2.2-cp314-cp314t-win_amd64.whl",url = "https://files.pythonhosted.org/packages/72/bb/b4608537e9ffcb86449091939d52d24a055216a36a8bf66b936af8c3e7ac/audioop_lts-0.2.2-cp314-cp314t-win_amd64.whl",hashes = {sha256 = "a5bf613e96f49712073de86f20dbdd4014ca18efd4d34ed18c75bd808337851b"}}, + {name = "audioop_lts-0.2.2-cp314-cp314t-win_arm64.whl",url = "https://files.pythonhosted.org/packages/f6/22/91616fe707a5c5510de2cac9b046a30defe7007ba8a0c04f9c08f27df312/audioop_lts-0.2.2-cp314-cp314t-win_arm64.whl",hashes = {sha256 = "b492c3b040153e68b9fdaff5913305aaaba5bb433d8a7f73d5cf6a64ed3cc1dd"}}, + {name = "audioop_lts-0.2.2-cp313-cp313t-macosx_10_13_universal2.whl",url = "https://files.pythonhosted.org/packages/58/a7/0a764f77b5c4ac58dc13c01a580f5d32ae8c74c92020b961556a43e26d02/audioop_lts-0.2.2-cp313-cp313t-macosx_10_13_universal2.whl",hashes = {sha256 = "73f80bf4cd5d2ca7814da30a120de1f9408ee0619cc75da87d0641273d202a09"}}, + {name = "audioop_lts-0.2.2-cp313-cp313t-macosx_10_13_x86_64.whl",url = "https://files.pythonhosted.org/packages/aa/ed/ebebedde1a18848b085ad0fa54b66ceb95f1f94a3fc04f1cd1b5ccb0ed42/audioop_lts-0.2.2-cp313-cp313t-macosx_10_13_x86_64.whl",hashes = {sha256 = "106753a83a25ee4d6f473f2be6b0966fc1c9af7e0017192f5531a3e7463dce58"}}, + {name = "audioop_lts-0.2.2-cp313-cp313t-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/cb/6e/11ca8c21af79f15dbb1c7f8017952ee8c810c438ce4e2b25638dfef2b02c/audioop_lts-0.2.2-cp313-cp313t-macosx_11_0_arm64.whl",hashes = {sha256 = "fbdd522624141e40948ab3e8cdae6e04c748d78710e9f0f8d4dae2750831de19"}}, + {name = "audioop_lts-0.2.2-cp313-cp313t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl",url = "https://files.pythonhosted.org/packages/84/52/0022f93d56d85eec5da6b9da6a958a1ef09e80c39f2cc0a590c6af81dcbb/audioop_lts-0.2.2-cp313-cp313t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl",hashes = {sha256 = "143fad0311e8209ece30a8dbddab3b65ab419cbe8c0dde6e8828da25999be911"}}, + {name = "audioop_lts-0.2.2-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl",url = "https://files.pythonhosted.org/packages/87/1d/48a889855e67be8718adbc7a01f3c01d5743c325453a5e81cf3717664aad/audioop_lts-0.2.2-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl",hashes = {sha256 = "dfbbc74ec68a0fd08cfec1f4b5e8cca3d3cd7de5501b01c4b5d209995033cde9"}}, + {name = "audioop_lts-0.2.2-cp313-cp313t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl",url = "https://files.pythonhosted.org/packages/98/a6/94b7213190e8077547ffae75e13ed05edc488653c85aa5c41472c297d295/audioop_lts-0.2.2-cp313-cp313t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl",hashes = {sha256 = "cfcac6aa6f42397471e4943e0feb2244549db5c5d01efcd02725b96af417f3fe"}}, + {name = "audioop_lts-0.2.2-cp313-cp313t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl",url = "https://files.pythonhosted.org/packages/e9/e9/78450d7cb921ede0cfc33426d3a8023a3bda755883c95c868ee36db8d48d/audioop_lts-0.2.2-cp313-cp313t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl",hashes = {sha256 = "752d76472d9804ac60f0078c79cdae8b956f293177acd2316cd1e15149aee132"}}, + {name = "audioop_lts-0.2.2-cp313-cp313t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl",url = "https://files.pythonhosted.org/packages/4f/e2/cd5439aad4f3e34ae1ee852025dc6aa8f67a82b97641e390bf7bd9891d3e/audioop_lts-0.2.2-cp313-cp313t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl",hashes = {sha256 = "83c381767e2cc10e93e40281a04852facc4cd9334550e0f392f72d1c0a9c5753"}}, + {name = "audioop_lts-0.2.2-cp313-cp313t-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/68/4b/9d853e9076c43ebba0d411e8d2aa19061083349ac695a7d082540bad64d0/audioop_lts-0.2.2-cp313-cp313t-musllinux_1_2_aarch64.whl",hashes = {sha256 = "c0022283e9556e0f3643b7c3c03f05063ca72b3063291834cca43234f20c60bb"}}, + {name = "audioop_lts-0.2.2-cp313-cp313t-musllinux_1_2_ppc64le.whl",url = "https://files.pythonhosted.org/packages/58/26/4bae7f9d2f116ed5593989d0e521d679b0d583973d203384679323d8fa85/audioop_lts-0.2.2-cp313-cp313t-musllinux_1_2_ppc64le.whl",hashes = {sha256 = "a2d4f1513d63c795e82948e1305f31a6d530626e5f9f2605408b300ae6095093"}}, + {name = "audioop_lts-0.2.2-cp313-cp313t-musllinux_1_2_riscv64.whl",url = "https://files.pythonhosted.org/packages/b2/67/a9f4fb3e250dda9e9046f8866e9fa7d52664f8985e445c6b4ad6dfb55641/audioop_lts-0.2.2-cp313-cp313t-musllinux_1_2_riscv64.whl",hashes = {sha256 = "c9c8e68d8b4a56fda8c025e538e639f8c5953f5073886b596c93ec9b620055e7"}}, + {name = "audioop_lts-0.2.2-cp313-cp313t-musllinux_1_2_s390x.whl",url = "https://files.pythonhosted.org/packages/70/f7/3de86562db0121956148bcb0fe5b506615e3bcf6e63c4357a612b910765a/audioop_lts-0.2.2-cp313-cp313t-musllinux_1_2_s390x.whl",hashes = {sha256 = "96f19de485a2925314f5020e85911fb447ff5fbef56e8c7c6927851b95533a1c"}}, + {name = "audioop_lts-0.2.2-cp313-cp313t-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/f1/32/fd772bf9078ae1001207d2df1eef3da05bea611a87dd0e8217989b2848fa/audioop_lts-0.2.2-cp313-cp313t-musllinux_1_2_x86_64.whl",hashes = {sha256 = "e541c3ef484852ef36545f66209444c48b28661e864ccadb29daddb6a4b8e5f5"}}, + {name = "audioop_lts-0.2.2-cp313-cp313t-win32.whl",url = "https://files.pythonhosted.org/packages/4f/41/affea7181592ab0ab560044632571a38edaf9130b84928177823fbf3176a/audioop_lts-0.2.2-cp313-cp313t-win32.whl",hashes = {sha256 = "d5e73fa573e273e4f2e5ff96f9043858a5e9311e94ffefd88a3186a910c70917"}}, + {name = "audioop_lts-0.2.2-cp313-cp313t-win_amd64.whl",url = "https://files.pythonhosted.org/packages/28/2b/0372842877016641db8fc54d5c88596b542eec2f8f6c20a36fb6612bf9ee/audioop_lts-0.2.2-cp313-cp313t-win_amd64.whl",hashes = {sha256 = "9191d68659eda01e448188f60364c7763a7ca6653ed3f87ebb165822153a8547"}}, + {name = "audioop_lts-0.2.2-cp313-cp313t-win_arm64.whl",url = "https://files.pythonhosted.org/packages/ee/ca/baf2b9cc7e96c179bb4a54f30fcd83e6ecb340031bde68f486403f943768/audioop_lts-0.2.2-cp313-cp313t-win_arm64.whl",hashes = {sha256 = "c174e322bb5783c099aaf87faeb240c8d210686b04bd61dfd05a8e5a83d88969"}}, + {name = "audioop_lts-0.2.2-cp313-abi3-macosx_10_13_universal2.whl",url = "https://files.pythonhosted.org/packages/de/d4/94d277ca941de5a507b07f0b592f199c22454eeaec8f008a286b3fbbacd6/audioop_lts-0.2.2-cp313-abi3-macosx_10_13_universal2.whl",hashes = {sha256 = "fd3d4602dc64914d462924a08c1a9816435a2155d74f325853c1f1ac3b2d9800"}}, + {name = "audioop_lts-0.2.2-cp313-abi3-macosx_10_13_x86_64.whl",url = "https://files.pythonhosted.org/packages/f8/5a/656d1c2da4b555920ce4177167bfeb8623d98765594af59702c8873f60ec/audioop_lts-0.2.2-cp313-abi3-macosx_10_13_x86_64.whl",hashes = {sha256 = "550c114a8df0aafe9a05442a1162dfc8fec37e9af1d625ae6060fed6e756f303"}}, + {name = "audioop_lts-0.2.2-cp313-abi3-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/1b/83/ea581e364ce7b0d41456fb79d6ee0ad482beda61faf0cab20cbd4c63a541/audioop_lts-0.2.2-cp313-abi3-macosx_11_0_arm64.whl",hashes = {sha256 = "9a13dc409f2564de15dd68be65b462ba0dde01b19663720c68c1140c782d1d75"}}, + {name = "audioop_lts-0.2.2-cp313-abi3-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl",url = "https://files.pythonhosted.org/packages/b8/3b/e8964210b5e216e5041593b7d33e97ee65967f17c282e8510d19c666dab4/audioop_lts-0.2.2-cp313-abi3-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl",hashes = {sha256 = "51c916108c56aa6e426ce611946f901badac950ee2ddaf302b7ed35d9958970d"}}, + {name = "audioop_lts-0.2.2-cp313-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl",url = "https://files.pythonhosted.org/packages/c7/2e/0a1c52faf10d51def20531a59ce4c706cb7952323b11709e10de324d6493/audioop_lts-0.2.2-cp313-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl",hashes = {sha256 = "47eba38322370347b1c47024defbd36374a211e8dd5b0dcbce7b34fdb6f8847b"}}, + {name = "audioop_lts-0.2.2-cp313-abi3-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl",url = "https://files.pythonhosted.org/packages/75/e8/cd95eef479656cb75ab05dfece8c1f8c395d17a7c651d88f8e6e291a63ab/audioop_lts-0.2.2-cp313-abi3-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl",hashes = {sha256 = "ba7c3a7e5f23e215cb271516197030c32aef2e754252c4c70a50aaff7031a2c8"}}, + {name = "audioop_lts-0.2.2-cp313-abi3-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl",url = "https://files.pythonhosted.org/packages/5c/1e/a0c42570b74f83efa5cca34905b3eef03f7ab09fe5637015df538a7f3345/audioop_lts-0.2.2-cp313-abi3-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl",hashes = {sha256 = "def246fe9e180626731b26e89816e79aae2276f825420a07b4a647abaa84becc"}}, + {name = "audioop_lts-0.2.2-cp313-abi3-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl",url = "https://files.pythonhosted.org/packages/50/d5/8a0ae607ca07dbb34027bac8db805498ee7bfecc05fd2c148cc1ed7646e7/audioop_lts-0.2.2-cp313-abi3-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl",hashes = {sha256 = "e160bf9df356d841bb6c180eeeea1834085464626dc1b68fa4e1d59070affdc3"}}, + {name = "audioop_lts-0.2.2-cp313-abi3-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/12/17/0d28c46179e7910bfb0bb62760ccb33edb5de973052cb2230b662c14ca2e/audioop_lts-0.2.2-cp313-abi3-musllinux_1_2_aarch64.whl",hashes = {sha256 = "4b4cd51a57b698b2d06cb9993b7ac8dfe89a3b2878e96bc7948e9f19ff51dba6"}}, + {name = "audioop_lts-0.2.2-cp313-abi3-musllinux_1_2_ppc64le.whl",url = "https://files.pythonhosted.org/packages/84/ba/bd5d3806641564f2024e97ca98ea8f8811d4e01d9b9f9831474bc9e14f9e/audioop_lts-0.2.2-cp313-abi3-musllinux_1_2_ppc64le.whl",hashes = {sha256 = "4a53aa7c16a60a6857e6b0b165261436396ef7293f8b5c9c828a3a203147ed4a"}}, + {name = "audioop_lts-0.2.2-cp313-abi3-musllinux_1_2_riscv64.whl",url = "https://files.pythonhosted.org/packages/f9/5e/435ce8d5642f1f7679540d1e73c1c42d933331c0976eb397d1717d7f01a3/audioop_lts-0.2.2-cp313-abi3-musllinux_1_2_riscv64.whl",hashes = {sha256 = "3fc38008969796f0f689f1453722a0f463da1b8a6fbee11987830bfbb664f623"}}, + {name = "audioop_lts-0.2.2-cp313-abi3-musllinux_1_2_s390x.whl",url = "https://files.pythonhosted.org/packages/ae/3b/b909e76b606cbfd53875693ec8c156e93e15a1366a012f0b7e4fb52d3c34/audioop_lts-0.2.2-cp313-abi3-musllinux_1_2_s390x.whl",hashes = {sha256 = "15ab25dd3e620790f40e9ead897f91e79c0d3ce65fe193c8ed6c26cffdd24be7"}}, + {name = "audioop_lts-0.2.2-cp313-abi3-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/30/e7/8f1603b4572d79b775f2140d7952f200f5e6c62904585d08a01f0a70393a/audioop_lts-0.2.2-cp313-abi3-musllinux_1_2_x86_64.whl",hashes = {sha256 = "03f061a1915538fd96272bac9551841859dbb2e3bf73ebe4a23ef043766f5449"}}, + {name = "audioop_lts-0.2.2-cp313-abi3-win32.whl",url = "https://files.pythonhosted.org/packages/b5/96/c37846df657ccdda62ba1ae2b6534fa90e2e1b1742ca8dcf8ebd38c53801/audioop_lts-0.2.2-cp313-abi3-win32.whl",hashes = {sha256 = "3bcddaaf6cc5935a300a8387c99f7a7fbbe212a11568ec6cf6e4bc458c048636"}}, + {name = "audioop_lts-0.2.2-cp313-abi3-win_amd64.whl",url = "https://files.pythonhosted.org/packages/34/a5/9d78fdb5b844a83da8a71226c7bdae7cc638861085fff7a1d707cb4823fa/audioop_lts-0.2.2-cp313-abi3-win_amd64.whl",hashes = {sha256 = "a2c2a947fae7d1062ef08c4e369e0ba2086049a5e598fda41122535557012e9e"}}, + {name = "audioop_lts-0.2.2-cp313-abi3-win_arm64.whl",url = "https://files.pythonhosted.org/packages/34/25/20d8fde083123e90c61b51afb547bb0ea7e77bab50d98c0ab243d02a0e43/audioop_lts-0.2.2-cp313-abi3-win_arm64.whl",hashes = {sha256 = "5f93a5db13927a37d2d09637ccca4b2b6b48c19cd9eda7b17a2e9f77edee6a6f"}}, +] +marker = "python_version ~= \"3.13\"" + +[packages.tool.pdm] +dependencies = [] + +[[packages]] +name = "standard-chunk" +version = "3.13.0" +sdist = {name = "standard_chunk-3.13.0.tar.gz", url = "https://files.pythonhosted.org/packages/43/06/ce1bb165c1f111c7d23a1ad17204d67224baa69725bb6857a264db61beaf/standard_chunk-3.13.0.tar.gz", hashes = {sha256 = "4ac345d37d7e686d2755e01836b8d98eda0d1a3ee90375e597ae43aaf064d654"}} +wheels = [ + {name = "standard_chunk-3.13.0-py3-none-any.whl",url = "https://files.pythonhosted.org/packages/7a/90/a5c1084d87767d787a6caba615aa50dc587229646308d9420c960cb5e4c0/standard_chunk-3.13.0-py3-none-any.whl",hashes = {sha256 = "17880a26c285189c644bd5bd8f8ed2bdb795d216e3293e6dbe55bbd848e2982c"}}, +] +marker = "python_version ~= \"3.13\"" + +[packages.tool.pdm] +dependencies = [] + +[[packages]] +name = "standard-sunau" +version = "3.13.0" +sdist = {name = "standard_sunau-3.13.0.tar.gz", url = "https://files.pythonhosted.org/packages/66/e3/ce8d38cb2d70e05ffeddc28bb09bad77cfef979eb0a299c9117f7ed4e6a9/standard_sunau-3.13.0.tar.gz", hashes = {sha256 = "b319a1ac95a09a2378a8442f403c66f4fd4b36616d6df6ae82b8e536ee790908"}} +wheels = [ + {name = "standard_sunau-3.13.0-py3-none-any.whl",url = "https://files.pythonhosted.org/packages/34/ae/e3707f6c1bc6f7aa0df600ba8075bfb8a19252140cd595335be60e25f9ee/standard_sunau-3.13.0-py3-none-any.whl",hashes = {sha256 = "53af624a9529c41062f4c2fd33837f297f3baa196b0cfceffea6555654602622"}}, +] +marker = "python_version ~= \"3.13\"" + +[packages.tool.pdm] +dependencies = [ + "audioop-lts; python_version >= \"3.13\"", +] + [[packages]] name = "uc-micro-py" version = "1.0.3" @@ -3455,61 +3480,6 @@ wheels = [ {name = "xxhash-3.5.0-cp312-cp312-win32.whl",url = "https://files.pythonhosted.org/packages/78/e3/dd76659b2811b3fd06892a8beb850e1996b63e9235af5a86ea348f053e9e/xxhash-3.5.0-cp312-cp312-win32.whl",hashes = {sha256 = "f7b58d1fd3551b8c80a971199543379be1cee3d0d409e1f6d8b01c1a2eebf1f8"}}, {name = "xxhash-3.5.0-cp312-cp312-win_amd64.whl",url = "https://files.pythonhosted.org/packages/d9/6b/1c443fe6cfeb4ad1dcf231cdec96eb94fb43d6498b4469ed8b51f8b59a37/xxhash-3.5.0-cp312-cp312-win_amd64.whl",hashes = {sha256 = "fa0cafd3a2af231b4e113fba24a65d7922af91aeb23774a8b78228e6cd785e3e"}}, {name = "xxhash-3.5.0-cp312-cp312-win_arm64.whl",url = "https://files.pythonhosted.org/packages/0f/eb/04405305f290173acc0350eba6d2f1a794b57925df0398861a20fbafa415/xxhash-3.5.0-cp312-cp312-win_arm64.whl",hashes = {sha256 = "586886c7e89cb9828bcd8a5686b12e161368e0064d040e225e72607b43858ba2"}}, - {name = "xxhash-3.5.0-cp311-cp311-macosx_10_9_x86_64.whl",url = "https://files.pythonhosted.org/packages/b8/c7/afed0f131fbda960ff15eee7f304fa0eeb2d58770fade99897984852ef23/xxhash-3.5.0-cp311-cp311-macosx_10_9_x86_64.whl",hashes = {sha256 = "02c2e816896dc6f85922ced60097bcf6f008dedfc5073dcba32f9c8dd786f3c1"}}, - {name = "xxhash-3.5.0-cp311-cp311-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/8c/0c/7c3bc6d87e5235672fcc2fb42fd5ad79fe1033925f71bf549ee068c7d1ca/xxhash-3.5.0-cp311-cp311-macosx_11_0_arm64.whl",hashes = {sha256 = "6027dcd885e21581e46d3c7f682cfb2b870942feeed58a21c29583512c3f09f8"}}, - {name = "xxhash-3.5.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",url = "https://files.pythonhosted.org/packages/04/9e/01067981d98069eec1c20201f8c145367698e9056f8bc295346e4ea32dd1/xxhash-3.5.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",hashes = {sha256 = "1308fa542bbdbf2fa85e9e66b1077eea3a88bef38ee8a06270b4298a7a62a166"}}, - {name = "xxhash-3.5.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl",url = "https://files.pythonhosted.org/packages/d4/09/d4996de4059c3ce5342b6e1e6a77c9d6c91acce31f6ed979891872dd162b/xxhash-3.5.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl",hashes = {sha256 = "c28b2fdcee797e1c1961cd3bcd3d545cab22ad202c846235197935e1df2f8ef7"}}, - {name = "xxhash-3.5.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl",url = "https://files.pythonhosted.org/packages/62/f5/6d2dc9f8d55a7ce0f5e7bfef916e67536f01b85d32a9fbf137d4cadbee38/xxhash-3.5.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl",hashes = {sha256 = "924361811732ddad75ff23e90efd9ccfda4f664132feecb90895bade6a1b4623"}}, - {name = "xxhash-3.5.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",url = "https://files.pythonhosted.org/packages/d9/72/9256303f10e41ab004799a4aa74b80b3c5977d6383ae4550548b24bd1971/xxhash-3.5.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",hashes = {sha256 = "89997aa1c4b6a5b1e5b588979d1da048a3c6f15e55c11d117a56b75c84531f5a"}}, - {name = "xxhash-3.5.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl",url = "https://files.pythonhosted.org/packages/34/92/1a3a29acd08248a34b0e6a94f4e0ed9b8379a4ff471f1668e4dce7bdbaa8/xxhash-3.5.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl",hashes = {sha256 = "685c4f4e8c59837de103344eb1c8a3851f670309eb5c361f746805c5471b8c88"}}, - {name = "xxhash-3.5.0-cp311-cp311-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/53/ad/7fa1a109663366de42f724a1cdb8e796a260dbac45047bce153bc1e18abf/xxhash-3.5.0-cp311-cp311-musllinux_1_2_aarch64.whl",hashes = {sha256 = "dbd2ecfbfee70bc1a4acb7461fa6af7748ec2ab08ac0fa298f281c51518f982c"}}, - {name = "xxhash-3.5.0-cp311-cp311-musllinux_1_2_i686.whl",url = "https://files.pythonhosted.org/packages/35/02/137300e24203bf2b2a49b48ce898ecce6fd01789c0fcd9c686c0a002d129/xxhash-3.5.0-cp311-cp311-musllinux_1_2_i686.whl",hashes = {sha256 = "25b5a51dc3dfb20a10833c8eee25903fd2e14059e9afcd329c9da20609a307b2"}}, - {name = "xxhash-3.5.0-cp311-cp311-musllinux_1_2_ppc64le.whl",url = "https://files.pythonhosted.org/packages/23/03/aeceb273933d7eee248c4322b98b8e971f06cc3880e5f7602c94e5578af5/xxhash-3.5.0-cp311-cp311-musllinux_1_2_ppc64le.whl",hashes = {sha256 = "a8fb786fb754ef6ff8c120cb96629fb518f8eb5a61a16aac3a979a9dbd40a084"}}, - {name = "xxhash-3.5.0-cp311-cp311-musllinux_1_2_s390x.whl",url = "https://files.pythonhosted.org/packages/e3/64/ed82ec09489474cbb35c716b189ddc1521d8b3de12b1b5ab41ce7f70253c/xxhash-3.5.0-cp311-cp311-musllinux_1_2_s390x.whl",hashes = {sha256 = "a905ad00ad1e1c34fe4e9d7c1d949ab09c6fa90c919860c1534ff479f40fd12d"}}, - {name = "xxhash-3.5.0-cp311-cp311-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/71/43/6db4c02dcb488ad4e03bc86d70506c3d40a384ee73c9b5c93338eb1f3c23/xxhash-3.5.0-cp311-cp311-musllinux_1_2_x86_64.whl",hashes = {sha256 = "963be41bcd49f53af6d795f65c0da9b4cc518c0dd9c47145c98f61cb464f4839"}}, - {name = "xxhash-3.5.0-cp311-cp311-win32.whl",url = "https://files.pythonhosted.org/packages/22/6d/db4abec29e7a567455344433d095fdb39c97db6955bb4a2c432e486b4d28/xxhash-3.5.0-cp311-cp311-win32.whl",hashes = {sha256 = "109b436096d0a2dd039c355fa3414160ec4d843dfecc64a14077332a00aeb7da"}}, - {name = "xxhash-3.5.0-cp311-cp311-win_amd64.whl",url = "https://files.pythonhosted.org/packages/52/1c/fa3b61c0cf03e1da4767213672efe186b1dfa4fc901a4a694fb184a513d1/xxhash-3.5.0-cp311-cp311-win_amd64.whl",hashes = {sha256 = "b702f806693201ad6c0a05ddbbe4c8f359626d0b3305f766077d51388a6bac58"}}, - {name = "xxhash-3.5.0-cp311-cp311-win_arm64.whl",url = "https://files.pythonhosted.org/packages/6b/8e/9e6fc572acf6e1cc7ccb01973c213f895cb8668a9d4c2b58a99350da14b7/xxhash-3.5.0-cp311-cp311-win_arm64.whl",hashes = {sha256 = "c4dcb4120d0cc3cc448624147dba64e9021b278c63e34a38789b688fd0da9bf3"}}, - {name = "xxhash-3.5.0-cp310-cp310-macosx_10_9_x86_64.whl",url = "https://files.pythonhosted.org/packages/bb/8a/0e9feca390d512d293afd844d31670e25608c4a901e10202aa98785eab09/xxhash-3.5.0-cp310-cp310-macosx_10_9_x86_64.whl",hashes = {sha256 = "ece616532c499ee9afbb83078b1b952beffef121d989841f7f4b3dc5ac0fd212"}}, - {name = "xxhash-3.5.0-cp310-cp310-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/16/e6/be5aa49580cd064a18200ab78e29b88b1127e1a8c7955eb8ecf81f2626eb/xxhash-3.5.0-cp310-cp310-macosx_11_0_arm64.whl",hashes = {sha256 = "3171f693dbc2cef6477054a665dc255d996646b4023fe56cb4db80e26f4cc520"}}, - {name = "xxhash-3.5.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",url = "https://files.pythonhosted.org/packages/20/ee/b8a99ebbc6d1113b3a3f09e747fa318c3cde5b04bd9c197688fadf0eeae8/xxhash-3.5.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",hashes = {sha256 = "7c5d3e570ef46adaf93fc81b44aca6002b5a4d8ca11bd0580c07eac537f36680"}}, - {name = "xxhash-3.5.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl",url = "https://files.pythonhosted.org/packages/58/62/15d10582ef159283a5c2b47f6d799fc3303fe3911d5bb0bcc820e1ef7ff4/xxhash-3.5.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl",hashes = {sha256 = "7cb29a034301e2982df8b1fe6328a84f4b676106a13e9135a0d7e0c3e9f806da"}}, - {name = "xxhash-3.5.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl",url = "https://files.pythonhosted.org/packages/23/41/61202663ea9b1bd8e53673b8ec9e2619989353dba8cfb68e59a9cbd9ffe3/xxhash-3.5.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl",hashes = {sha256 = "5d0d307d27099bb0cbeea7260eb39ed4fdb99c5542e21e94bb6fd29e49c57a23"}}, - {name = "xxhash-3.5.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",url = "https://files.pythonhosted.org/packages/f2/07/d9a3059f702dec5b3b703737afb6dda32f304f6e9da181a229dafd052c29/xxhash-3.5.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",hashes = {sha256 = "c0342aafd421795d740e514bc9858ebddfc705a75a8c5046ac56d85fe97bf196"}}, - {name = "xxhash-3.5.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl",url = "https://files.pythonhosted.org/packages/eb/58/27caadf78226ecf1d62dbd0c01d152ed381c14c1ee4ad01f0d460fc40eac/xxhash-3.5.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl",hashes = {sha256 = "3dbbd9892c5ebffeca1ed620cf0ade13eb55a0d8c84e0751a6653adc6ac40d0c"}}, - {name = "xxhash-3.5.0-cp310-cp310-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/b1/08/32d558ce23e1e068453c39aed7b3c1cdc690c177873ec0ca3a90d5808765/xxhash-3.5.0-cp310-cp310-musllinux_1_2_aarch64.whl",hashes = {sha256 = "4cc2d67fdb4d057730c75a64c5923abfa17775ae234a71b0200346bfb0a7f482"}}, - {name = "xxhash-3.5.0-cp310-cp310-musllinux_1_2_i686.whl",url = "https://files.pythonhosted.org/packages/3f/d4/2b971e2d2b0a61045f842b622ef11e94096cf1f12cd448b6fd426e80e0e2/xxhash-3.5.0-cp310-cp310-musllinux_1_2_i686.whl",hashes = {sha256 = "ec28adb204b759306a3d64358a5e5c07d7b1dd0ccbce04aa76cb9377b7b70296"}}, - {name = "xxhash-3.5.0-cp310-cp310-musllinux_1_2_ppc64le.whl",url = "https://files.pythonhosted.org/packages/19/ae/6a6438864a8c4c39915d7b65effd85392ebe22710412902487e51769146d/xxhash-3.5.0-cp310-cp310-musllinux_1_2_ppc64le.whl",hashes = {sha256 = "1328f6d8cca2b86acb14104e381225a3d7b42c92c4b86ceae814e5c400dbb415"}}, - {name = "xxhash-3.5.0-cp310-cp310-musllinux_1_2_s390x.whl",url = "https://files.pythonhosted.org/packages/48/7d/b3c27c27d1fc868094d02fe4498ccce8cec9fcc591825c01d6bcb0b4fc49/xxhash-3.5.0-cp310-cp310-musllinux_1_2_s390x.whl",hashes = {sha256 = "8d47ebd9f5d9607fd039c1fbf4994e3b071ea23eff42f4ecef246ab2b7334198"}}, - {name = "xxhash-3.5.0-cp310-cp310-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/a1/05/918f9e7d2fbbd334b829997045d341d6239b563c44e683b9a7ef8fe50f5d/xxhash-3.5.0-cp310-cp310-musllinux_1_2_x86_64.whl",hashes = {sha256 = "b96d559e0fcddd3343c510a0fe2b127fbff16bf346dd76280b82292567523442"}}, - {name = "xxhash-3.5.0-cp310-cp310-win32.whl",url = "https://files.pythonhosted.org/packages/08/29/dfe393805b2f86bfc47c290b275f0b7c189dc2f4e136fd4754f32eb18a8d/xxhash-3.5.0-cp310-cp310-win32.whl",hashes = {sha256 = "61c722ed8d49ac9bc26c7071eeaa1f6ff24053d553146d5df031802deffd03da"}}, - {name = "xxhash-3.5.0-cp310-cp310-win_amd64.whl",url = "https://files.pythonhosted.org/packages/7b/d7/aa0b22c4ebb7c3ccb993d4c565132abc641cd11164f8952d89eb6a501909/xxhash-3.5.0-cp310-cp310-win_amd64.whl",hashes = {sha256 = "9bed5144c6923cc902cd14bb8963f2d5e034def4486ab0bbe1f58f03f042f9a9"}}, - {name = "xxhash-3.5.0-cp310-cp310-win_arm64.whl",url = "https://files.pythonhosted.org/packages/69/12/f969b81541ee91b55f1ce469d7ab55079593c80d04fd01691b550e535000/xxhash-3.5.0-cp310-cp310-win_arm64.whl",hashes = {sha256 = "893074d651cf25c1cc14e3bea4fceefd67f2921b1bb8e40fcfeba56820de80c6"}}, - {name = "xxhash-3.5.0-pp310-pypy310_pp73-macosx_10_15_x86_64.whl",url = "https://files.pythonhosted.org/packages/ab/9a/233606bada5bd6f50b2b72c45de3d9868ad551e83893d2ac86dc7bb8553a/xxhash-3.5.0-pp310-pypy310_pp73-macosx_10_15_x86_64.whl",hashes = {sha256 = "2014c5b3ff15e64feecb6b713af12093f75b7926049e26a580e94dcad3c73d8c"}}, - {name = "xxhash-3.5.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",url = "https://files.pythonhosted.org/packages/0c/67/f75276ca39e2c6604e3bee6c84e9db8a56a4973fde9bf35989787cf6e8aa/xxhash-3.5.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",hashes = {sha256 = "fab81ef75003eda96239a23eda4e4543cedc22e34c373edcaf744e721a163986"}}, - {name = "xxhash-3.5.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",url = "https://files.pythonhosted.org/packages/0f/f8/f6c61fd794229cc3848d144f73754a0c107854372d7261419dcbbd286299/xxhash-3.5.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",hashes = {sha256 = "4e2febf914ace002132aa09169cc572e0d8959d0f305f93d5828c4836f9bc5a6"}}, - {name = "xxhash-3.5.0-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl",url = "https://files.pythonhosted.org/packages/79/d3/c029c99801526f859e6b38d34ab87c08993bf3dcea34b11275775001638a/xxhash-3.5.0-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl",hashes = {sha256 = "5d3a10609c51da2a1c0ea0293fc3968ca0a18bd73838455b5bca3069d7f8e32b"}}, - {name = "xxhash-3.5.0-pp310-pypy310_pp73-win_amd64.whl",url = "https://files.pythonhosted.org/packages/62/e3/bef7b82c1997579c94de9ac5ea7626d01ae5858aa22bf4fcb38bf220cb3e/xxhash-3.5.0-pp310-pypy310_pp73-win_amd64.whl",hashes = {sha256 = "5a74f23335b9689b66eb6dbe2a931a88fcd7a4c2cc4b1cb0edba8ce381c7a1da"}}, - {name = "xxhash-3.5.0-cp39-cp39-macosx_10_9_x86_64.whl",url = "https://files.pythonhosted.org/packages/d4/f6/531dd6858adf8877675270b9d6989b6dacfd1c2d7135b17584fc29866df3/xxhash-3.5.0-cp39-cp39-macosx_10_9_x86_64.whl",hashes = {sha256 = "bfc8cdd7f33d57f0468b0614ae634cc38ab9202c6957a60e31d285a71ebe0301"}}, - {name = "xxhash-3.5.0-cp39-cp39-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/7c/a8/b2a42b6c9ae46e233f474f3d307c2e7bca8d9817650babeca048d2ad01d6/xxhash-3.5.0-cp39-cp39-macosx_11_0_arm64.whl",hashes = {sha256 = "e0c48b6300cd0b0106bf49169c3e0536408dfbeb1ccb53180068a18b03c662ab"}}, - {name = "xxhash-3.5.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",url = "https://files.pythonhosted.org/packages/b4/92/9ac297e3487818f429bcf369c1c6a097edf5b56ed6fc1feff4c1882e87ef/xxhash-3.5.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",hashes = {sha256 = "fe1a92cfbaa0a1253e339ccec42dbe6db262615e52df591b68726ab10338003f"}}, - {name = "xxhash-3.5.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl",url = "https://files.pythonhosted.org/packages/86/48/c1426dd3c86fc4a52f983301867463472f6a9013fb32d15991e60c9919b6/xxhash-3.5.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl",hashes = {sha256 = "33513d6cc3ed3b559134fb307aae9bdd94d7e7c02907b37896a6c45ff9ce51bd"}}, - {name = "xxhash-3.5.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl",url = "https://files.pythonhosted.org/packages/f3/de/0ab8c79993765c94fc0d0c1a22b454483c58a0161e1b562f58b654f47660/xxhash-3.5.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl",hashes = {sha256 = "eefc37f6138f522e771ac6db71a6d4838ec7933939676f3753eafd7d3f4c40bc"}}, - {name = "xxhash-3.5.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",url = "https://files.pythonhosted.org/packages/b4/b4/332647451ed7d2c021294b7c1e9c144dbb5586b1fb214ad4f5a404642835/xxhash-3.5.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",hashes = {sha256 = "a606c8070ada8aa2a88e181773fa1ef17ba65ce5dd168b9d08038e2a61b33754"}}, - {name = "xxhash-3.5.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl",url = "https://files.pythonhosted.org/packages/f4/1c/a42c0a6cac752f84f7b44a90d1a9fa9047cf70bdba5198a304fde7cc471f/xxhash-3.5.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl",hashes = {sha256 = "42eca420c8fa072cc1dd62597635d140e78e384a79bb4944f825fbef8bfeeef6"}}, - {name = "xxhash-3.5.0-cp39-cp39-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/c4/d7/04e1b0daae9dc9b02c73c1664cc8aa527498c3f66ccbc586eeb25bbe9f14/xxhash-3.5.0-cp39-cp39-musllinux_1_2_aarch64.whl",hashes = {sha256 = "604253b2143e13218ff1ef0b59ce67f18b8bd1c4205d2ffda22b09b426386898"}}, - {name = "xxhash-3.5.0-cp39-cp39-musllinux_1_2_i686.whl",url = "https://files.pythonhosted.org/packages/c4/f4/05e15e67505228fc19ee98a79e427b3a0b9695f5567cd66ced5d66389883/xxhash-3.5.0-cp39-cp39-musllinux_1_2_i686.whl",hashes = {sha256 = "6e93a5ad22f434d7876665444a97e713a8f60b5b1a3521e8df11b98309bff833"}}, - {name = "xxhash-3.5.0-cp39-cp39-musllinux_1_2_ppc64le.whl",url = "https://files.pythonhosted.org/packages/94/fb/e9028d3645bba5412a09de13ee36df276a567e60bdb31d499dafa46d76ae/xxhash-3.5.0-cp39-cp39-musllinux_1_2_ppc64le.whl",hashes = {sha256 = "7a46e1d6d2817ba8024de44c4fd79913a90e5f7265434cef97026215b7d30df6"}}, - {name = "xxhash-3.5.0-cp39-cp39-musllinux_1_2_s390x.whl",url = "https://files.pythonhosted.org/packages/02/2c/18c6a622429368274739372d2f86c8125413ec169025c7d8ffb051784bba/xxhash-3.5.0-cp39-cp39-musllinux_1_2_s390x.whl",hashes = {sha256 = "30eb2efe6503c379b7ab99c81ba4a779748e3830241f032ab46bd182bf5873af"}}, - {name = "xxhash-3.5.0-cp39-cp39-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/72/bb/5b55c391084a0321c3809632a018b9b657e59d5966289664f85a645942ac/xxhash-3.5.0-cp39-cp39-musllinux_1_2_x86_64.whl",hashes = {sha256 = "c8aa771ff2c13dd9cda8166d685d7333d389fae30a4d2bb39d63ab5775de8606"}}, - {name = "xxhash-3.5.0-cp39-cp39-win32.whl",url = "https://files.pythonhosted.org/packages/86/2b/915049db13401792fec159f57e4f4a5ca7a9768e83ef71d6645b9d0cd749/xxhash-3.5.0-cp39-cp39-win32.whl",hashes = {sha256 = "5ed9ebc46f24cf91034544b26b131241b699edbfc99ec5e7f8f3d02d6eb7fba4"}}, - {name = "xxhash-3.5.0-cp39-cp39-win_amd64.whl",url = "https://files.pythonhosted.org/packages/d5/87/382ef7b24917d7cf4c540ee30f29b283bc87ac5893d2f89b23ea3cdf7d77/xxhash-3.5.0-cp39-cp39-win_amd64.whl",hashes = {sha256 = "220f3f896c6b8d0316f63f16c077d52c412619e475f9372333474ee15133a558"}}, - {name = "xxhash-3.5.0-cp39-cp39-win_arm64.whl",url = "https://files.pythonhosted.org/packages/e2/47/d06b24e2d9c3dcabccfd734d11b5bbebfdf59ceac2c61509d8205dd20ac6/xxhash-3.5.0-cp39-cp39-win_arm64.whl",hashes = {sha256 = "a7b1d8315d9b5e9f89eb2933b73afae6ec9597a258d52190944437158b49d38e"}}, - {name = "xxhash-3.5.0-pp39-pypy39_pp73-macosx_10_15_x86_64.whl",url = "https://files.pythonhosted.org/packages/c2/56/30d3df421814947f9d782b20c9b7e5e957f3791cbd89874578011daafcbd/xxhash-3.5.0-pp39-pypy39_pp73-macosx_10_15_x86_64.whl",hashes = {sha256 = "531af8845aaadcadf951b7e0c1345c6b9c68a990eeb74ff9acd8501a0ad6a1c9"}}, - {name = "xxhash-3.5.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",url = "https://files.pythonhosted.org/packages/82/dd/3c42a1f022ad0d82c852d3cb65493ebac03dcfa8c994465a5fb052b00e3c/xxhash-3.5.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",hashes = {sha256 = "7ce379bcaa9fcc00f19affa7773084dd09f5b59947b3fb47a1ceb0179f91aaa1"}}, - {name = "xxhash-3.5.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",url = "https://files.pythonhosted.org/packages/b2/40/8f902ab3bebda228a9b4de69eba988280285a7f7f167b942bc20bb562df9/xxhash-3.5.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",hashes = {sha256 = "fd1b2281d01723f076df3c8188f43f2472248a6b63118b036e641243656b1b0f"}}, - {name = "xxhash-3.5.0-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl",url = "https://files.pythonhosted.org/packages/db/87/bd06beb8ccaa0e9e577c9b909a49cfa5c5cd2ca46034342d72dd9ce5bc56/xxhash-3.5.0-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl",hashes = {sha256 = "9c770750cc80e8694492244bca7251385188bc5597b6a39d98a9f30e8da984e0"}}, - {name = "xxhash-3.5.0-pp39-pypy39_pp73-win_amd64.whl",url = "https://files.pythonhosted.org/packages/bb/f8/505385e2fbd753ddcaafd5550eabe86f6232cbebabad3b2508d411b19153/xxhash-3.5.0-pp39-pypy39_pp73-win_amd64.whl",hashes = {sha256 = "b150b8467852e1bd844387459aa6fbe11d7f38b56e901f9f3b3e6aba0d660240"}}, ] marker = "\"default\" in dependency_groups" @@ -3522,6 +3492,18 @@ version = "1.13.1" requires-python = ">=3.9" sdist = {name = "scipy-1.13.1.tar.gz", url = "https://files.pythonhosted.org/packages/ae/00/48c2f661e2816ccf2ecd77982f6605b2950afe60f60a52b4cbbc2504aa8f/scipy-1.13.1.tar.gz", hashes = {sha256 = "095a87a0312b08dfd6a6155cbbd310a8c51800fc931b8c0b84003014b874ed3c"}} wheels = [ + {name = "scipy-1.13.1-cp311-cp311-macosx_10_9_x86_64.whl",url = "https://files.pythonhosted.org/packages/b4/15/4a4bb1b15bbd2cd2786c4f46e76b871b28799b67891f23f455323a0cdcfb/scipy-1.13.1-cp311-cp311-macosx_10_9_x86_64.whl",hashes = {sha256 = "27e52b09c0d3a1d5b63e1105f24177e544a222b43611aaf5bc44d4a0979e32f9"}}, + {name = "scipy-1.13.1-cp311-cp311-macosx_12_0_arm64.whl",url = "https://files.pythonhosted.org/packages/ba/92/42476de1af309c27710004f5cdebc27bec62c204db42e05b23a302cb0c9a/scipy-1.13.1-cp311-cp311-macosx_12_0_arm64.whl",hashes = {sha256 = "54f430b00f0133e2224c3ba42b805bfd0086fe488835effa33fa291561932326"}}, + {name = "scipy-1.13.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",url = "https://files.pythonhosted.org/packages/80/ba/8be64fe225360a4beb6840f3cbee494c107c0887f33350d0a47d55400b01/scipy-1.13.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",hashes = {sha256 = "e89369d27f9e7b0884ae559a3a956e77c02114cc60a6058b4e5011572eea9299"}}, + {name = "scipy-1.13.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",url = "https://files.pythonhosted.org/packages/36/07/035d22ff9795129c5a847c64cb43c1fa9188826b59344fee28a3ab02e283/scipy-1.13.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",hashes = {sha256 = "a78b4b3345f1b6f68a763c6e25c0c9a23a9fd0f39f5f3d200efe8feda560a5fa"}}, + {name = "scipy-1.13.1-cp311-cp311-musllinux_1_1_x86_64.whl",url = "https://files.pythonhosted.org/packages/d9/10/f9b43de37e5ed91facc0cfff31d45ed0104f359e4f9a68416cbf4e790241/scipy-1.13.1-cp311-cp311-musllinux_1_1_x86_64.whl",hashes = {sha256 = "45484bee6d65633752c490404513b9ef02475b4284c4cfab0ef946def50b3f59"}}, + {name = "scipy-1.13.1-cp311-cp311-win_amd64.whl",url = "https://files.pythonhosted.org/packages/4a/48/4513a1a5623a23e95f94abd675ed91cfb19989c58e9f6f7d03990f6caf3d/scipy-1.13.1-cp311-cp311-win_amd64.whl",hashes = {sha256 = "5713f62f781eebd8d597eb3f88b8bf9274e79eeabf63afb4a737abc6c84ad37b"}}, + {name = "scipy-1.13.1-cp310-cp310-macosx_10_9_x86_64.whl",url = "https://files.pythonhosted.org/packages/33/59/41b2529908c002ade869623b87eecff3e11e3ce62e996d0bdcb536984187/scipy-1.13.1-cp310-cp310-macosx_10_9_x86_64.whl",hashes = {sha256 = "20335853b85e9a49ff7572ab453794298bcf0354d8068c5f6775a0eabf350aca"}}, + {name = "scipy-1.13.1-cp310-cp310-macosx_12_0_arm64.whl",url = "https://files.pythonhosted.org/packages/d5/33/f1307601f492f764062ce7dd471a14750f3360e33cd0f8c614dae208492c/scipy-1.13.1-cp310-cp310-macosx_12_0_arm64.whl",hashes = {sha256 = "d605e9c23906d1994f55ace80e0125c587f96c020037ea6aa98d01b4bd2e222f"}}, + {name = "scipy-1.13.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",url = "https://files.pythonhosted.org/packages/c0/66/9cd4f501dd5ea03e4a4572ecd874936d0da296bd04d1c45ae1a4a75d9c3a/scipy-1.13.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",hashes = {sha256 = "cfa31f1def5c819b19ecc3a8b52d28ffdcc7ed52bb20c9a7589669dd3c250989"}}, + {name = "scipy-1.13.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",url = "https://files.pythonhosted.org/packages/a3/ba/7255e5dc82a65adbe83771c72f384d99c43063648456796436c9a5585ec3/scipy-1.13.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",hashes = {sha256 = "f26264b282b9da0952a024ae34710c2aff7d27480ee91a2e82b7b7073c24722f"}}, + {name = "scipy-1.13.1-cp310-cp310-musllinux_1_1_x86_64.whl",url = "https://files.pythonhosted.org/packages/49/a5/bb9ded8326e9f0cdfdc412eeda1054b914dfea952bda2097d174f8832cc0/scipy-1.13.1-cp310-cp310-musllinux_1_1_x86_64.whl",hashes = {sha256 = "eccfa1906eacc02de42d70ef4aecea45415f5be17e72b61bafcfd329bdc52e94"}}, + {name = "scipy-1.13.1-cp310-cp310-win_amd64.whl",url = "https://files.pythonhosted.org/packages/12/30/df7a8fcc08f9b4a83f5f27cfaaa7d43f9a2d2ad0b6562cced433e5b04e31/scipy-1.13.1-cp310-cp310-win_amd64.whl",hashes = {sha256 = "2831f0dc9c5ea9edd6e51e6e769b655f08ec6db6e2e10f86ef39bd32eb11da54"}}, {name = "scipy-1.13.1-cp39-cp39-macosx_10_9_x86_64.whl",url = "https://files.pythonhosted.org/packages/7f/29/c2ea58c9731b9ecb30b6738113a95d147e83922986b34c685b8f6eefde21/scipy-1.13.1-cp39-cp39-macosx_10_9_x86_64.whl",hashes = {sha256 = "436bbb42a94a8aeef855d755ce5a465479c721e9d684de76bf61a62e7c2b81d5"}}, {name = "scipy-1.13.1-cp39-cp39-macosx_12_0_arm64.whl",url = "https://files.pythonhosted.org/packages/5c/c0/e71b94b20ccf9effb38d7147c0064c08c622309fd487b1b677771a97d18c/scipy-1.13.1-cp39-cp39-macosx_12_0_arm64.whl",hashes = {sha256 = "8335549ebbca860c52bf3d02f80784e91a004b71b059e3eea9678ba994796a24"}}, {name = "scipy-1.13.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",url = "https://files.pythonhosted.org/packages/6d/0f/aaa55b06d474817cea311e7b10aab2ea1fd5d43bc6a2861ccc9caec9f418/scipy-1.13.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",hashes = {sha256 = "d533654b7d221a6a97304ab63c41c96473ff04459e404b83275b60aa8f4b7004"}}, @@ -3529,7 +3511,7 @@ wheels = [ {name = "scipy-1.13.1-cp39-cp39-musllinux_1_1_x86_64.whl",url = "https://files.pythonhosted.org/packages/8d/02/1165905f14962174e6569076bcc3315809ae1291ed14de6448cc151eedfd/scipy-1.13.1-cp39-cp39-musllinux_1_1_x86_64.whl",hashes = {sha256 = "a014c2b3697bde71724244f63de2476925596c24285c7a637364761f8710891c"}}, {name = "scipy-1.13.1-cp39-cp39-win_amd64.whl",url = "https://files.pythonhosted.org/packages/3e/77/dab54fe647a08ee4253963bcd8f9cf17509c8ca64d6335141422fe2e2114/scipy-1.13.1-cp39-cp39-win_amd64.whl",hashes = {sha256 = "392e4ec766654852c25ebad4f64e4e584cf19820b980bc04960bca0b0cd6eaa2"}}, ] -marker = "python_version < \"3.10\" and python_version >= \"3.9\" and \"dev\" in extras" +marker = "python_version < \"3.12\" and python_version >= \"3.9\" and \"default\" in dependency_groups or python_version < \"3.12\" and python_version >= \"3.9\" and \"dev\" in extras" [packages.tool.pdm] dependencies = [ @@ -3542,6 +3524,26 @@ version = "2.0.2" requires-python = ">=3.9" sdist = {name = "numpy-2.0.2.tar.gz", url = "https://files.pythonhosted.org/packages/a9/75/10dd1f8116a8b796cb2c737b674e02d02e80454bda953fa7e65d8c12b016/numpy-2.0.2.tar.gz", hashes = {sha256 = "883c987dee1880e2a864ab0dc9892292582510604156762362d9326444636e78"}} wheels = [ + {name = "numpy-2.0.2-cp311-cp311-macosx_10_9_x86_64.whl",url = "https://files.pythonhosted.org/packages/8b/cf/034500fb83041aa0286e0fb16e7c76e5c8b67c0711bb6e9e9737a717d5fe/numpy-2.0.2-cp311-cp311-macosx_10_9_x86_64.whl",hashes = {sha256 = "49ca4decb342d66018b01932139c0961a8f9ddc7589611158cb3c27cbcf76448"}}, + {name = "numpy-2.0.2-cp311-cp311-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/4a/d9/32de45561811a4b87fbdee23b5797394e3d1504b4a7cf40c10199848893e/numpy-2.0.2-cp311-cp311-macosx_11_0_arm64.whl",hashes = {sha256 = "11a76c372d1d37437857280aa142086476136a8c0f373b2e648ab2c8f18fb195"}}, + {name = "numpy-2.0.2-cp311-cp311-macosx_14_0_arm64.whl",url = "https://files.pythonhosted.org/packages/c1/ca/2f384720020c7b244d22508cb7ab23d95f179fcfff33c31a6eeba8d6c512/numpy-2.0.2-cp311-cp311-macosx_14_0_arm64.whl",hashes = {sha256 = "807ec44583fd708a21d4a11d94aedf2f4f3c3719035c76a2bbe1fe8e217bdc57"}}, + {name = "numpy-2.0.2-cp311-cp311-macosx_14_0_x86_64.whl",url = "https://files.pythonhosted.org/packages/0e/78/a3e4f9fb6aa4e6fdca0c5428e8ba039408514388cf62d89651aade838269/numpy-2.0.2-cp311-cp311-macosx_14_0_x86_64.whl",hashes = {sha256 = "8cafab480740e22f8d833acefed5cc87ce276f4ece12fdaa2e8903db2f82897a"}}, + {name = "numpy-2.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",url = "https://files.pythonhosted.org/packages/a0/72/cfc3a1beb2caf4efc9d0b38a15fe34025230da27e1c08cc2eb9bfb1c7231/numpy-2.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",hashes = {sha256 = "a15f476a45e6e5a3a79d8a14e62161d27ad897381fecfa4a09ed5322f2085669"}}, + {name = "numpy-2.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",url = "https://files.pythonhosted.org/packages/ba/a8/c17acf65a931ce551fee11b72e8de63bf7e8a6f0e21add4c937c83563538/numpy-2.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",hashes = {sha256 = "13e689d772146140a252c3a28501da66dfecd77490b498b168b501835041f951"}}, + {name = "numpy-2.0.2-cp311-cp311-musllinux_1_1_x86_64.whl",url = "https://files.pythonhosted.org/packages/ba/86/8767f3d54f6ae0165749f84648da9dcc8cd78ab65d415494962c86fac80f/numpy-2.0.2-cp311-cp311-musllinux_1_1_x86_64.whl",hashes = {sha256 = "9ea91dfb7c3d1c56a0e55657c0afb38cf1eeae4544c208dc465c3c9f3a7c09f9"}}, + {name = "numpy-2.0.2-cp311-cp311-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/df/87/f76450e6e1c14e5bb1eae6836478b1028e096fd02e85c1c37674606ab752/numpy-2.0.2-cp311-cp311-musllinux_1_2_aarch64.whl",hashes = {sha256 = "c1c9307701fec8f3f7a1e6711f9089c06e6284b3afbbcd259f7791282d660a15"}}, + {name = "numpy-2.0.2-cp311-cp311-win32.whl",url = "https://files.pythonhosted.org/packages/5c/ca/0f0f328e1e59f73754f06e1adfb909de43726d4f24c6a3f8805f34f2b0fa/numpy-2.0.2-cp311-cp311-win32.whl",hashes = {sha256 = "a392a68bd329eafac5817e5aefeb39038c48b671afd242710b451e76090e81f4"}}, + {name = "numpy-2.0.2-cp311-cp311-win_amd64.whl",url = "https://files.pythonhosted.org/packages/eb/57/3a3f14d3a759dcf9bf6e9eda905794726b758819df4663f217d658a58695/numpy-2.0.2-cp311-cp311-win_amd64.whl",hashes = {sha256 = "286cd40ce2b7d652a6f22efdfc6d1edf879440e53e76a75955bc0c826c7e64dc"}}, + {name = "numpy-2.0.2-cp310-cp310-macosx_10_9_x86_64.whl",url = "https://files.pythonhosted.org/packages/21/91/3495b3237510f79f5d81f2508f9f13fea78ebfdf07538fc7444badda173d/numpy-2.0.2-cp310-cp310-macosx_10_9_x86_64.whl",hashes = {sha256 = "51129a29dbe56f9ca83438b706e2e69a39892b5eda6cedcb6b0c9fdc9b0d3ece"}}, + {name = "numpy-2.0.2-cp310-cp310-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/05/33/26178c7d437a87082d11019292dce6d3fe6f0e9026b7b2309cbf3e489b1d/numpy-2.0.2-cp310-cp310-macosx_11_0_arm64.whl",hashes = {sha256 = "f15975dfec0cf2239224d80e32c3170b1d168335eaedee69da84fbe9f1f9cd04"}}, + {name = "numpy-2.0.2-cp310-cp310-macosx_14_0_arm64.whl",url = "https://files.pythonhosted.org/packages/ec/31/cc46e13bf07644efc7a4bf68df2df5fb2a1a88d0cd0da9ddc84dc0033e51/numpy-2.0.2-cp310-cp310-macosx_14_0_arm64.whl",hashes = {sha256 = "8c5713284ce4e282544c68d1c3b2c7161d38c256d2eefc93c1d683cf47683e66"}}, + {name = "numpy-2.0.2-cp310-cp310-macosx_14_0_x86_64.whl",url = "https://files.pythonhosted.org/packages/6e/16/7bfcebf27bb4f9d7ec67332ffebee4d1bf085c84246552d52dbb548600e7/numpy-2.0.2-cp310-cp310-macosx_14_0_x86_64.whl",hashes = {sha256 = "becfae3ddd30736fe1889a37f1f580e245ba79a5855bff5f2a29cb3ccc22dd7b"}}, + {name = "numpy-2.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",url = "https://files.pythonhosted.org/packages/f9/a3/561c531c0e8bf082c5bef509d00d56f82e0ea7e1e3e3a7fc8fa78742a6e5/numpy-2.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",hashes = {sha256 = "2da5960c3cf0df7eafefd806d4e612c5e19358de82cb3c343631188991566ccd"}}, + {name = "numpy-2.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",url = "https://files.pythonhosted.org/packages/fa/66/f7177ab331876200ac7563a580140643d1179c8b4b6a6b0fc9838de2a9b8/numpy-2.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",hashes = {sha256 = "496f71341824ed9f3d2fd36cf3ac57ae2e0165c143b55c3a035ee219413f3318"}}, + {name = "numpy-2.0.2-cp310-cp310-musllinux_1_1_x86_64.whl",url = "https://files.pythonhosted.org/packages/25/7f/0b209498009ad6453e4efc2c65bcdf0ae08a182b2b7877d7ab38a92dc542/numpy-2.0.2-cp310-cp310-musllinux_1_1_x86_64.whl",hashes = {sha256 = "a61ec659f68ae254e4d237816e33171497e978140353c0c2038d46e63282d0c8"}}, + {name = "numpy-2.0.2-cp310-cp310-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/3e/df/2619393b1e1b565cd2d4c4403bdd979621e2c4dea1f8532754b2598ed63b/numpy-2.0.2-cp310-cp310-musllinux_1_2_aarch64.whl",hashes = {sha256 = "d731a1c6116ba289c1e9ee714b08a8ff882944d4ad631fd411106a30f083c326"}}, + {name = "numpy-2.0.2-cp310-cp310-win32.whl",url = "https://files.pythonhosted.org/packages/22/ad/77e921b9f256d5da36424ffb711ae79ca3f451ff8489eeca544d0701d74a/numpy-2.0.2-cp310-cp310-win32.whl",hashes = {sha256 = "984d96121c9f9616cd33fbd0618b7f08e0cfc9600a7ee1d6fd9b239186d19d97"}}, + {name = "numpy-2.0.2-cp310-cp310-win_amd64.whl",url = "https://files.pythonhosted.org/packages/10/05/3442317535028bc29cf0c0dd4c191a4481e8376e9f0db6bcf29703cadae6/numpy-2.0.2-cp310-cp310-win_amd64.whl",hashes = {sha256 = "c7b0be4ef08607dd04da4092faee0b86607f111d5ae68036f16cc787e250a131"}}, {name = "numpy-2.0.2-cp39-cp39-macosx_10_9_x86_64.whl",url = "https://files.pythonhosted.org/packages/43/c1/41c8f6df3162b0c6ffd4437d729115704bd43363de0090c7f913cfbc2d89/numpy-2.0.2-cp39-cp39-macosx_10_9_x86_64.whl",hashes = {sha256 = "9059e10581ce4093f735ed23f3b9d283b9d517ff46009ddd485f1747eb22653c"}}, {name = "numpy-2.0.2-cp39-cp39-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/39/bc/fd298f308dcd232b56a4031fd6ddf11c43f9917fbc937e53762f7b5a3bb1/numpy-2.0.2-cp39-cp39-macosx_11_0_arm64.whl",hashes = {sha256 = "423e89b23490805d2a5a96fe40ec507407b8ee786d66f7328be214f9679df6dd"}}, {name = "numpy-2.0.2-cp39-cp39-macosx_14_0_arm64.whl",url = "https://files.pythonhosted.org/packages/96/ff/06d1aa3eeb1c614eda245c1ba4fb88c483bee6520d361641331872ac4b82/numpy-2.0.2-cp39-cp39-macosx_14_0_arm64.whl",hashes = {sha256 = "2b2955fa6f11907cf7a70dab0d0755159bca87755e831e47932367fc8f2f2d0b"}}, @@ -3557,17 +3559,212 @@ wheels = [ {name = "numpy-2.0.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",url = "https://files.pythonhosted.org/packages/12/46/de1fbd0c1b5ccaa7f9a005b66761533e2f6a3e560096682683a223631fe9/numpy-2.0.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",hashes = {sha256 = "26df23238872200f63518dd2aa984cfca675d82469535dc7162dc2ee52d9dd5c"}}, {name = "numpy-2.0.2-pp39-pypy39_pp73-win_amd64.whl",url = "https://files.pythonhosted.org/packages/cc/dc/d330a6faefd92b446ec0f0dfea4c3207bb1fef3c4771d19cf4543efd2c78/numpy-2.0.2-pp39-pypy39_pp73-win_amd64.whl",hashes = {sha256 = "a46288ec55ebbd58947d31d72be2c63cbf839f0a63b49cb755022310792a3385"}}, ] -marker = "python_version < \"3.10\" and python_version >= \"3.9\" and \"default\" in dependency_groups or python_version < \"3.10\" and python_version >= \"3.9\" and \"dev\" in extras" +marker = "python_version < \"3.12\" and python_version >= \"3.9\" and \"default\" in dependency_groups or python_version < \"3.12\" and python_version >= \"3.9\" and \"dev\" in extras" + +[packages.tool.pdm] +dependencies = [] + +[[packages]] +name = "tomli" +version = "2.2.1" +requires-python = ">=3.8" +sdist = {name = "tomli-2.2.1.tar.gz", url = "https://files.pythonhosted.org/packages/18/87/302344fed471e44a87289cf4967697d07e532f2421fdaf868a303cbae4ff/tomli-2.2.1.tar.gz", hashes = {sha256 = "cd45e1dc79c835ce60f7404ec8119f2eb06d38b1deba146f07ced3bbc44505ff"}} +wheels = [ + {name = "tomli-2.2.1-cp311-cp311-macosx_10_9_x86_64.whl",url = "https://files.pythonhosted.org/packages/43/ca/75707e6efa2b37c77dadb324ae7d9571cb424e61ea73fad7c56c2d14527f/tomli-2.2.1-cp311-cp311-macosx_10_9_x86_64.whl",hashes = {sha256 = "678e4fa69e4575eb77d103de3df8a895e1591b48e740211bd1067378c69e8249"}}, + {name = "tomli-2.2.1-cp311-cp311-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/c7/16/51ae563a8615d472fdbffc43a3f3d46588c264ac4f024f63f01283becfbb/tomli-2.2.1-cp311-cp311-macosx_11_0_arm64.whl",hashes = {sha256 = "023aa114dd824ade0100497eb2318602af309e5a55595f76b626d6d9f3b7b0a6"}}, + {name = "tomli-2.2.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",url = "https://files.pythonhosted.org/packages/f1/dd/4f6cd1e7b160041db83c694abc78e100473c15d54620083dbd5aae7b990e/tomli-2.2.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",hashes = {sha256 = "ece47d672db52ac607a3d9599a9d48dcb2f2f735c6c2d1f34130085bb12b112a"}}, + {name = "tomli-2.2.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",url = "https://files.pythonhosted.org/packages/a9/6b/c54ede5dc70d648cc6361eaf429304b02f2871a345bbdd51e993d6cdf550/tomli-2.2.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",hashes = {sha256 = "6972ca9c9cc9f0acaa56a8ca1ff51e7af152a9f87fb64623e31d5c83700080ee"}}, + {name = "tomli-2.2.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl",url = "https://files.pythonhosted.org/packages/1f/47/999514fa49cfaf7a92c805a86c3c43f4215621855d151b61c602abb38091/tomli-2.2.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl",hashes = {sha256 = "c954d2250168d28797dd4e3ac5cf812a406cd5a92674ee4c8f123c889786aa8e"}}, + {name = "tomli-2.2.1-cp311-cp311-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/73/41/0a01279a7ae09ee1573b423318e7934674ce06eb33f50936655071d81a24/tomli-2.2.1-cp311-cp311-musllinux_1_2_aarch64.whl",hashes = {sha256 = "8dd28b3e155b80f4d54beb40a441d366adcfe740969820caf156c019fb5c7ec4"}}, + {name = "tomli-2.2.1-cp311-cp311-musllinux_1_2_i686.whl",url = "https://files.pythonhosted.org/packages/55/18/5d8bc5b0a0362311ce4d18830a5d28943667599a60d20118074ea1b01bb7/tomli-2.2.1-cp311-cp311-musllinux_1_2_i686.whl",hashes = {sha256 = "e59e304978767a54663af13c07b3d1af22ddee3bb2fb0618ca1593e4f593a106"}}, + {name = "tomli-2.2.1-cp311-cp311-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/92/a3/7ade0576d17f3cdf5ff44d61390d4b3febb8a9fc2b480c75c47ea048c646/tomli-2.2.1-cp311-cp311-musllinux_1_2_x86_64.whl",hashes = {sha256 = "33580bccab0338d00994d7f16f4c4ec25b776af3ffaac1ed74e0b3fc95e885a8"}}, + {name = "tomli-2.2.1-cp311-cp311-win32.whl",url = "https://files.pythonhosted.org/packages/72/6f/fa64ef058ac1446a1e51110c375339b3ec6be245af9d14c87c4a6412dd32/tomli-2.2.1-cp311-cp311-win32.whl",hashes = {sha256 = "465af0e0875402f1d226519c9904f37254b3045fc5084697cefb9bdde1ff99ff"}}, + {name = "tomli-2.2.1-cp311-cp311-win_amd64.whl",url = "https://files.pythonhosted.org/packages/6a/1c/4a2dcde4a51b81be3530565e92eda625d94dafb46dbeb15069df4caffc34/tomli-2.2.1-cp311-cp311-win_amd64.whl",hashes = {sha256 = "2d0f2fdd22b02c6d81637a3c95f8cd77f995846af7414c5c4b8d0545afa1bc4b"}}, + {name = "tomli-2.2.1-py3-none-any.whl",url = "https://files.pythonhosted.org/packages/6e/c2/61d3e0f47e2b74ef40a68b9e6ad5984f6241a942f7cd3bbfbdbd03861ea9/tomli-2.2.1-py3-none-any.whl",hashes = {sha256 = "cb55c73c5f4408779d0cf3eef9f762b9c9f147a77de7b258bef0a5628adc85cc"}}, +] +marker = "python_version < \"3.11\" and python_version >= \"3.9\" and \"dev\" in extras" + +[packages.tool.pdm] +dependencies = [] + +[[packages]] +name = "importlib-metadata" +version = "8.7.0" +requires-python = ">=3.9" +sdist = {name = "importlib_metadata-8.7.0.tar.gz", url = "https://files.pythonhosted.org/packages/76/66/650a33bd90f786193e4de4b3ad86ea60b53c89b669a5c7be931fac31cdb0/importlib_metadata-8.7.0.tar.gz", hashes = {sha256 = "d13b81ad223b890aa16c5471f2ac3056cf76c5f10f82d6f9292f0b415f389000"}} +wheels = [ + {name = "importlib_metadata-8.7.0-py3-none-any.whl",url = "https://files.pythonhosted.org/packages/20/b0/36bd937216ec521246249be3bf9855081de4c5e06a0c9b4219dbeda50373/importlib_metadata-8.7.0-py3-none-any.whl",hashes = {sha256 = "e5dd1551894c77868a30651cef00984d50e1002d06942a7101d34870c5f02afd"}}, +] +marker = "python_full_version < \"3.10.2\" and python_version >= \"3.9\" and \"dev\" in extras" + +[packages.tool.pdm] +dependencies = [ + "zipp>=3.20", + "typing-extensions>=3.6.4; python_version < \"3.8\"", +] + +[[packages]] +name = "backports-asyncio-runner" +version = "1.2.0" +requires-python = "<3.11,>=3.8" +sdist = {name = "backports_asyncio_runner-1.2.0.tar.gz", url = "https://files.pythonhosted.org/packages/8e/ff/70dca7d7cb1cbc0edb2c6cc0c38b65cba36cccc491eca64cabd5fe7f8670/backports_asyncio_runner-1.2.0.tar.gz", hashes = {sha256 = "a5aa7b2b7d8f8bfcaa2b57313f70792df84e32a2a746f585213373f900b42162"}} +wheels = [ + {name = "backports_asyncio_runner-1.2.0-py3-none-any.whl",url = "https://files.pythonhosted.org/packages/a0/59/76ab57e3fe74484f48a53f8e337171b4a2349e506eabe136d7e01d059086/backports_asyncio_runner-1.2.0-py3-none-any.whl",hashes = {sha256 = "0da0a936a8aeb554eccb426dc55af3ba63bcdc69fa1a600b5bb305413a4477b5"}}, +] +marker = "python_version < \"3.11\" and python_version >= \"3.9\" and \"dev\" in extras" + +[packages.tool.pdm] +dependencies = [] + +[[packages]] +name = "async-timeout" +version = "5.0.1" +requires-python = ">=3.8" +sdist = {name = "async_timeout-5.0.1.tar.gz", url = "https://files.pythonhosted.org/packages/a5/ae/136395dfbfe00dfc94da3f3e136d0b13f394cba8f4841120e34226265780/async_timeout-5.0.1.tar.gz", hashes = {sha256 = "d9321a7a3d5a6a5e187e824d2fa0793ce379a202935782d555d6e9d2735677d3"}} +wheels = [ + {name = "async_timeout-5.0.1-py3-none-any.whl",url = "https://files.pythonhosted.org/packages/fe/ba/e2081de779ca30d473f21f5b30e0e737c438205440784c7dfc81efc2b029/async_timeout-5.0.1-py3-none-any.whl",hashes = {sha256 = "39e3809566ff85354557ec2398b55e096c8364bacac9405a7a1fa429e77fe76c"}}, +] +marker = "python_version < \"3.11\" and python_version >= \"3.9\" and \"default\" in dependency_groups or python_version < \"3.11\" and python_version >= \"3.9\" and \"dev\" in extras" + +[packages.tool.pdm] +dependencies = [] + +[[packages]] +name = "exceptiongroup" +version = "1.3.0" +requires-python = ">=3.7" +sdist = {name = "exceptiongroup-1.3.0.tar.gz", url = "https://files.pythonhosted.org/packages/0b/9f/a65090624ecf468cdca03533906e7c69ed7588582240cfe7cc9e770b50eb/exceptiongroup-1.3.0.tar.gz", hashes = {sha256 = "b241f5885f560bc56a59ee63ca4c6a8bfa46ae4ad651af316d4e81817bb9fd88"}} +wheels = [ + {name = "exceptiongroup-1.3.0-py3-none-any.whl",url = "https://files.pythonhosted.org/packages/36/f4/c6e662dade71f56cd2f3735141b265c3c79293c109549c1e6933b0651ffc/exceptiongroup-1.3.0-py3-none-any.whl",hashes = {sha256 = "4d111e6e0c13d0644cad6ddaa7ed0261a0b36971f6d23e7ec9b4b9097da78a10"}}, +] +marker = "python_version < \"3.11\" and python_version >= \"3.9\" and \"default\" in dependency_groups or python_version < \"3.11\" and python_version >= \"3.9\" and \"dev\" in extras" + +[packages.tool.pdm] +dependencies = [ + "typing-extensions>=4.6.0; python_version < \"3.13\"", +] + +[[packages]] +name = "numba" +version = "0.60.0" +requires-python = ">=3.9" +sdist = {name = "numba-0.60.0.tar.gz", url = "https://files.pythonhosted.org/packages/3c/93/2849300a9184775ba274aba6f82f303343669b0592b7bb0849ea713dabb0/numba-0.60.0.tar.gz", hashes = {sha256 = "5df6158e5584eece5fc83294b949fd30b9f1125df7708862205217e068aabf16"}} +wheels = [ + {name = "numba-0.60.0-cp311-cp311-macosx_10_9_x86_64.whl",url = "https://files.pythonhosted.org/packages/98/ad/df18d492a8f00d29a30db307904b9b296e37507034eedb523876f3a2e13e/numba-0.60.0-cp311-cp311-macosx_10_9_x86_64.whl",hashes = {sha256 = "a17b70fc9e380ee29c42717e8cc0bfaa5556c416d94f9aa96ba13acb41bdece8"}}, + {name = "numba-0.60.0-cp311-cp311-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/9a/51/a4dc2c01ce7a850b8e56ff6d5381d047a5daea83d12bad08aa071d34b2ee/numba-0.60.0-cp311-cp311-macosx_11_0_arm64.whl",hashes = {sha256 = "3fb02b344a2a80efa6f677aa5c40cd5dd452e1b35f8d1c2af0dfd9ada9978e4b"}}, + {name = "numba-0.60.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl",url = "https://files.pythonhosted.org/packages/f9/4c/8889ac94c0b33dca80bed11564b8c6d9ea14d7f094e674c58e5c5b05859b/numba-0.60.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl",hashes = {sha256 = "5f4fde652ea604ea3c86508a3fb31556a6157b2c76c8b51b1d45eb40c8598703"}}, + {name = "numba-0.60.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl",url = "https://files.pythonhosted.org/packages/57/03/2b4245b05b71c0cee667e6a0b51606dfa7f4157c9093d71c6b208385a611/numba-0.60.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl",hashes = {sha256 = "4142d7ac0210cc86432b818338a2bc368dc773a2f5cf1e32ff7c5b378bd63ee8"}}, + {name = "numba-0.60.0-cp311-cp311-win_amd64.whl",url = "https://files.pythonhosted.org/packages/79/89/2d924ca60dbf949f18a6fec223a2445f5f428d9a5f97a6b29c2122319015/numba-0.60.0-cp311-cp311-win_amd64.whl",hashes = {sha256 = "cac02c041e9b5bc8cf8f2034ff6f0dbafccd1ae9590dc146b3a02a45e53af4e2"}}, + {name = "numba-0.60.0-cp310-cp310-macosx_10_9_x86_64.whl",url = "https://files.pythonhosted.org/packages/f7/cf/baa13a7e3556d73d9e38021e6d6aa4aeb30d8b94545aa8b70d0f24a1ccc4/numba-0.60.0-cp310-cp310-macosx_10_9_x86_64.whl",hashes = {sha256 = "5d761de835cd38fb400d2c26bb103a2726f548dc30368853121d66201672e651"}}, + {name = "numba-0.60.0-cp310-cp310-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/ac/ba/4b57fa498564457c3cc9fc9e570a6b08e6086c74220f24baaf04e54b995f/numba-0.60.0-cp310-cp310-macosx_11_0_arm64.whl",hashes = {sha256 = "159e618ef213fba758837f9837fb402bbe65326e60ba0633dbe6c7f274d42c1b"}}, + {name = "numba-0.60.0-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.whl",url = "https://files.pythonhosted.org/packages/28/98/7ea97ee75870a54f938a8c70f7e0be4495ba5349c5f9db09d467c4a5d5b7/numba-0.60.0-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.whl",hashes = {sha256 = "1527dc578b95c7c4ff248792ec33d097ba6bef9eda466c948b68dfc995c25781"}}, + {name = "numba-0.60.0-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.whl",url = "https://files.pythonhosted.org/packages/79/58/cb4ac5b8f7ec64200460aef1fed88258fb872ceef504ab1f989d2ff0f684/numba-0.60.0-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.whl",hashes = {sha256 = "fe0b28abb8d70f8160798f4de9d486143200f34458d34c4a214114e445d7124e"}}, + {name = "numba-0.60.0-cp310-cp310-win_amd64.whl",url = "https://files.pythonhosted.org/packages/1c/b0/c61a93ca947d12233ff45de506ddbf52af3f752066a0b8be4d27426e16da/numba-0.60.0-cp310-cp310-win_amd64.whl",hashes = {sha256 = "19407ced081d7e2e4b8d8c36aa57b7452e0283871c296e12d798852bc7d7f198"}}, + {name = "numba-0.60.0-cp39-cp39-macosx_10_9_x86_64.whl",url = "https://files.pythonhosted.org/packages/68/1a/87c53f836cdf557083248c3f47212271f220280ff766538795e77c8c6bbf/numba-0.60.0-cp39-cp39-macosx_10_9_x86_64.whl",hashes = {sha256 = "01ef4cd7d83abe087d644eaa3d95831b777aa21d441a23703d649e06b8e06b74"}}, + {name = "numba-0.60.0-cp39-cp39-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/28/14/a5baa1f2edea7b49afa4dc1bb1b126645198cf1075186853b5b497be826e/numba-0.60.0-cp39-cp39-macosx_11_0_arm64.whl",hashes = {sha256 = "819a3dfd4630d95fd574036f99e47212a1af41cbcb019bf8afac63ff56834449"}}, + {name = "numba-0.60.0-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.whl",url = "https://files.pythonhosted.org/packages/3b/bd/f1985719ff34e37e07bb18f9d3acd17e5a21da255f550c8eae031e2ddf5f/numba-0.60.0-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.whl",hashes = {sha256 = "0b983bd6ad82fe868493012487f34eae8bf7dd94654951404114f23c3466d34b"}}, + {name = "numba-0.60.0-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.whl",url = "https://files.pythonhosted.org/packages/54/9b/cd73d3f6617ddc8398a63ef97d8dc9139a9879b9ca8a7ca4b8789056ea46/numba-0.60.0-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.whl",hashes = {sha256 = "c151748cd269ddeab66334bd754817ffc0cabd9433acb0f551697e5151917d25"}}, + {name = "numba-0.60.0-cp39-cp39-win_amd64.whl",url = "https://files.pythonhosted.org/packages/01/01/8b7b670c77c5ea0e47e283d82332969bf672ab6410d0b2610cac5b7a3ded/numba-0.60.0-cp39-cp39-win_amd64.whl",hashes = {sha256 = "3031547a015710140e8c87226b4cfe927cac199835e5bf7d4fe5cb64e814e3ab"}}, +] +marker = "python_version < \"3.12\" and python_version >= \"3.9\" and \"default\" in dependency_groups" + +[packages.tool.pdm] +dependencies = [ + "llvmlite<0.44,>=0.43.0dev0", + "numpy<2.1,>=1.22", +] + +[[packages]] +name = "llvmlite" +version = "0.43.0" +requires-python = ">=3.9" +sdist = {name = "llvmlite-0.43.0.tar.gz", url = "https://files.pythonhosted.org/packages/9f/3d/f513755f285db51ab363a53e898b85562e950f79a2e6767a364530c2f645/llvmlite-0.43.0.tar.gz", hashes = {sha256 = "ae2b5b5c3ef67354824fb75517c8db5fbe93bc02cd9671f3c62271626bc041d5"}} +wheels = [ + {name = "llvmlite-0.43.0-cp311-cp311-macosx_10_9_x86_64.whl",url = "https://files.pythonhosted.org/packages/95/8c/de3276d773ab6ce3ad676df5fab5aac19696b2956319d65d7dd88fb10f19/llvmlite-0.43.0-cp311-cp311-macosx_10_9_x86_64.whl",hashes = {sha256 = "3e8d0618cb9bfe40ac38a9633f2493d4d4e9fcc2f438d39a4e854f39cc0f5f98"}}, + {name = "llvmlite-0.43.0-cp311-cp311-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/ee/e1/38deed89ced4cf378c61e232265cfe933ccde56ae83c901aa68b477d14b1/llvmlite-0.43.0-cp311-cp311-macosx_11_0_arm64.whl",hashes = {sha256 = "e0a9a1a39d4bf3517f2af9d23d479b4175ead205c592ceeb8b89af48a327ea57"}}, + {name = "llvmlite-0.43.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",url = "https://files.pythonhosted.org/packages/2f/b2/4429433eb2dc8379e2cb582502dca074c23837f8fd009907f78a24de4c25/llvmlite-0.43.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",hashes = {sha256 = "c1da416ab53e4f7f3bc8d4eeba36d801cc1894b9fbfbf2022b29b6bad34a7df2"}}, + {name = "llvmlite-0.43.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",url = "https://files.pythonhosted.org/packages/6b/99/5d00a7d671b1ba1751fc9f19d3b36f3300774c6eebe2bcdb5f6191763eb4/llvmlite-0.43.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",hashes = {sha256 = "977525a1e5f4059316b183fb4fd34fa858c9eade31f165427a3977c95e3ee749"}}, + {name = "llvmlite-0.43.0-cp311-cp311-win_amd64.whl",url = "https://files.pythonhosted.org/packages/20/ab/ed5ed3688c6ba4f0b8d789da19fd8e30a9cf7fc5852effe311bc5aefe73e/llvmlite-0.43.0-cp311-cp311-win_amd64.whl",hashes = {sha256 = "d5bd550001d26450bd90777736c69d68c487d17bf371438f975229b2b8241a91"}}, + {name = "llvmlite-0.43.0-cp310-cp310-macosx_10_9_x86_64.whl",url = "https://files.pythonhosted.org/packages/23/ff/6ca7e98998b573b4bd6566f15c35e5c8bea829663a6df0c7aa55ab559da9/llvmlite-0.43.0-cp310-cp310-macosx_10_9_x86_64.whl",hashes = {sha256 = "a289af9a1687c6cf463478f0fa8e8aa3b6fb813317b0d70bf1ed0759eab6f761"}}, + {name = "llvmlite-0.43.0-cp310-cp310-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/ca/5c/a27f9257f86f0cda3f764ff21d9f4217b9f6a0d45e7a39ecfa7905f524ce/llvmlite-0.43.0-cp310-cp310-macosx_11_0_arm64.whl",hashes = {sha256 = "6d4fd101f571a31acb1559ae1af30f30b1dc4b3186669f92ad780e17c81e91bc"}}, + {name = "llvmlite-0.43.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",url = "https://files.pythonhosted.org/packages/7e/3c/4410f670ad0a911227ea2ecfcba9f672a77cf1924df5280c4562032ec32d/llvmlite-0.43.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",hashes = {sha256 = "7d434ec7e2ce3cc8f452d1cd9a28591745de022f931d67be688a737320dfcead"}}, + {name = "llvmlite-0.43.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",url = "https://files.pythonhosted.org/packages/c6/21/2ffbab5714e72f2483207b4a1de79b2eecd9debbf666ff4e7067bcc5c134/llvmlite-0.43.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",hashes = {sha256 = "6912a87782acdff6eb8bf01675ed01d60ca1f2551f8176a300a886f09e836a6a"}}, + {name = "llvmlite-0.43.0-cp310-cp310-win_amd64.whl",url = "https://files.pythonhosted.org/packages/f2/26/b5478037c453554a61625ef1125f7e12bb1429ae11c6376f47beba9b0179/llvmlite-0.43.0-cp310-cp310-win_amd64.whl",hashes = {sha256 = "14f0e4bf2fd2d9a75a3534111e8ebeb08eda2f33e9bdd6dfa13282afacdde0ed"}}, + {name = "llvmlite-0.43.0-cp39-cp39-macosx_10_9_x86_64.whl",url = "https://files.pythonhosted.org/packages/2a/73/12925b1bbb3c2beb6d96f892ef5b4d742c34f00ddb9f4a125e9e87b22f52/llvmlite-0.43.0-cp39-cp39-macosx_10_9_x86_64.whl",hashes = {sha256 = "9cd2a7376f7b3367019b664c21f0c61766219faa3b03731113ead75107f3b66c"}}, + {name = "llvmlite-0.43.0-cp39-cp39-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/cc/61/58c70aa0808a8cba825a7d98cc65bef4801b99328fba80837bfcb5fc767f/llvmlite-0.43.0-cp39-cp39-macosx_11_0_arm64.whl",hashes = {sha256 = "18e9953c748b105668487b7c81a3e97b046d8abf95c4ddc0cd3c94f4e4651ae8"}}, + {name = "llvmlite-0.43.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",url = "https://files.pythonhosted.org/packages/c8/c6/9324eb5de2ba9d99cbed853d85ba7a318652a48e077797bec27cf40f911d/llvmlite-0.43.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",hashes = {sha256 = "74937acd22dc11b33946b67dca7680e6d103d6e90eeaaaf932603bec6fe7b03a"}}, + {name = "llvmlite-0.43.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",url = "https://files.pythonhosted.org/packages/e0/d0/889e9705107db7b1ec0767b03f15d7b95b4c4f9fdf91928ab1c7e9ffacf6/llvmlite-0.43.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",hashes = {sha256 = "bc9efc739cc6ed760f795806f67889923f7274276f0eb45092a1473e40d9b867"}}, + {name = "llvmlite-0.43.0-cp39-cp39-win_amd64.whl",url = "https://files.pythonhosted.org/packages/df/41/73cc26a2634b538cfe813f618c91e7e9960b8c163f8f0c94a2b0f008b9da/llvmlite-0.43.0-cp39-cp39-win_amd64.whl",hashes = {sha256 = "47e147cdda9037f94b399bf03bfd8a6b6b1f2f90be94a454e3386f006455a9b4"}}, +] +marker = "python_version < \"3.12\" and python_version >= \"3.9\" and \"default\" in dependency_groups" + +[packages.tool.pdm] +dependencies = [] + +[[packages]] +name = "scikit-learn" +version = "1.6.1" +requires-python = ">=3.9" +sdist = {name = "scikit_learn-1.6.1.tar.gz", url = "https://files.pythonhosted.org/packages/9e/a5/4ae3b3a0755f7b35a280ac90b28817d1f380318973cff14075ab41ef50d9/scikit_learn-1.6.1.tar.gz", hashes = {sha256 = "b4fc2525eca2c69a59260f583c56a7557c6ccdf8deafdba6e060f94c1c59738e"}} +wheels = [ + {name = "scikit_learn-1.6.1-cp311-cp311-macosx_10_9_x86_64.whl",url = "https://files.pythonhosted.org/packages/6c/2a/e291c29670795406a824567d1dfc91db7b699799a002fdaa452bceea8f6e/scikit_learn-1.6.1-cp311-cp311-macosx_10_9_x86_64.whl",hashes = {sha256 = "72abc587c75234935e97d09aa4913a82f7b03ee0b74111dcc2881cba3c5a7b33"}}, + {name = "scikit_learn-1.6.1-cp311-cp311-macosx_12_0_arm64.whl",url = "https://files.pythonhosted.org/packages/25/92/ee1d7a00bb6b8c55755d4984fd82608603a3cc59959245068ce32e7fb808/scikit_learn-1.6.1-cp311-cp311-macosx_12_0_arm64.whl",hashes = {sha256 = "b3b00cdc8f1317b5f33191df1386c0befd16625f49d979fe77a8d44cae82410d"}}, + {name = "scikit_learn-1.6.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",url = "https://files.pythonhosted.org/packages/30/cd/ed4399485ef364bb25f388ab438e3724e60dc218c547a407b6e90ccccaef/scikit_learn-1.6.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",hashes = {sha256 = "dc4765af3386811c3ca21638f63b9cf5ecf66261cc4815c1db3f1e7dc7b79db2"}}, + {name = "scikit_learn-1.6.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",url = "https://files.pythonhosted.org/packages/a8/f3/62fc9a5a659bb58a03cdd7e258956a5824bdc9b4bb3c5d932f55880be569/scikit_learn-1.6.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",hashes = {sha256 = "25fc636bdaf1cc2f4a124a116312d837148b5e10872147bdaf4887926b8c03d8"}}, + {name = "scikit_learn-1.6.1-cp311-cp311-win_amd64.whl",url = "https://files.pythonhosted.org/packages/a1/a6/c5b78606743a1f28eae8f11973de6613a5ee87366796583fb74c67d54939/scikit_learn-1.6.1-cp311-cp311-win_amd64.whl",hashes = {sha256 = "fa909b1a36e000a03c382aade0bd2063fd5680ff8b8e501660c0f59f021a6415"}}, + {name = "scikit_learn-1.6.1-cp310-cp310-macosx_10_9_x86_64.whl",url = "https://files.pythonhosted.org/packages/2e/3a/f4597eb41049110b21ebcbb0bcb43e4035017545daa5eedcfeb45c08b9c5/scikit_learn-1.6.1-cp310-cp310-macosx_10_9_x86_64.whl",hashes = {sha256 = "d056391530ccd1e501056160e3c9673b4da4805eb67eb2bdf4e983e1f9c9204e"}}, + {name = "scikit_learn-1.6.1-cp310-cp310-macosx_12_0_arm64.whl",url = "https://files.pythonhosted.org/packages/37/19/0423e5e1fd1c6ec5be2352ba05a537a473c1677f8188b9306097d684b327/scikit_learn-1.6.1-cp310-cp310-macosx_12_0_arm64.whl",hashes = {sha256 = "0c8d036eb937dbb568c6242fa598d551d88fb4399c0344d95c001980ec1c7d36"}}, + {name = "scikit_learn-1.6.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",url = "https://files.pythonhosted.org/packages/70/95/d5cb2297a835b0f5fc9a77042b0a2d029866379091ab8b3f52cc62277808/scikit_learn-1.6.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",hashes = {sha256 = "8634c4bd21a2a813e0a7e3900464e6d593162a29dd35d25bdf0103b3fce60ed5"}}, + {name = "scikit_learn-1.6.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",url = "https://files.pythonhosted.org/packages/b7/91/ab3c697188f224d658969f678be86b0968ccc52774c8ab4a86a07be13c25/scikit_learn-1.6.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",hashes = {sha256 = "775da975a471c4f6f467725dff0ced5c7ac7bda5e9316b260225b48475279a1b"}}, + {name = "scikit_learn-1.6.1-cp310-cp310-win_amd64.whl",url = "https://files.pythonhosted.org/packages/17/04/d5d556b6c88886c092cc989433b2bab62488e0f0dafe616a1d5c9cb0efb1/scikit_learn-1.6.1-cp310-cp310-win_amd64.whl",hashes = {sha256 = "8a600c31592bd7dab31e1c61b9bbd6dea1b3433e67d264d17ce1017dbdce8002"}}, + {name = "scikit_learn-1.6.1-cp39-cp39-macosx_10_9_x86_64.whl",url = "https://files.pythonhosted.org/packages/d2/37/b305b759cc65829fe1b8853ff3e308b12cdd9d8884aa27840835560f2b42/scikit_learn-1.6.1-cp39-cp39-macosx_10_9_x86_64.whl",hashes = {sha256 = "6849dd3234e87f55dce1db34c89a810b489ead832aaf4d4550b7ea85628be6c1"}}, + {name = "scikit_learn-1.6.1-cp39-cp39-macosx_12_0_arm64.whl",url = "https://files.pythonhosted.org/packages/83/74/f64379a4ed5879d9db744fe37cfe1978c07c66684d2439c3060d19a536d8/scikit_learn-1.6.1-cp39-cp39-macosx_12_0_arm64.whl",hashes = {sha256 = "e7be3fa5d2eb9be7d77c3734ff1d599151bb523674be9b834e8da6abe132f44e"}}, + {name = "scikit_learn-1.6.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",url = "https://files.pythonhosted.org/packages/fd/dc/d5457e03dc9c971ce2b0d750e33148dd060fefb8b7dc71acd6054e4bb51b/scikit_learn-1.6.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",hashes = {sha256 = "44a17798172df1d3c1065e8fcf9019183f06c87609b49a124ebdf57ae6cb0107"}}, + {name = "scikit_learn-1.6.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",url = "https://files.pythonhosted.org/packages/79/35/b1d2188967c3204c78fa79c9263668cf1b98060e8e58d1a730fe5b2317bb/scikit_learn-1.6.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",hashes = {sha256 = "b8b7a3b86e411e4bce21186e1c180d792f3d99223dcfa3b4f597ecc92fa1a422"}}, + {name = "scikit_learn-1.6.1-cp39-cp39-win_amd64.whl",url = "https://files.pythonhosted.org/packages/fb/d8/8d603bdd26601f4b07e2363032b8565ab82eb857f93d86d0f7956fcf4523/scikit_learn-1.6.1-cp39-cp39-win_amd64.whl",hashes = {sha256 = "7a73d457070e3318e32bdb3aa79a8d990474f19035464dfd8bede2883ab5dc3b"}}, +] +marker = "python_version < \"3.12\" and python_version >= \"3.9\" and \"default\" in dependency_groups" + +[packages.tool.pdm] +dependencies = [ + "numpy>=1.19.5", + "scipy>=1.6.0", + "joblib>=1.2.0", + "threadpoolctl>=3.1.0", +] + +[[packages]] +name = "zipp" +version = "3.23.0" +requires-python = ">=3.9" +sdist = {name = "zipp-3.23.0.tar.gz", url = "https://files.pythonhosted.org/packages/e3/02/0f2892c661036d50ede074e376733dca2ae7c6eb617489437771209d4180/zipp-3.23.0.tar.gz", hashes = {sha256 = "a07157588a12518c9d4034df3fbbee09c814741a33ff63c05fa29d26a2404166"}} +wheels = [ + {name = "zipp-3.23.0-py3-none-any.whl",url = "https://files.pythonhosted.org/packages/2e/54/647ade08bf0db230bfea292f893923872fd20be6ac6f53b2b936ba839d75/zipp-3.23.0-py3-none-any.whl",hashes = {sha256 = "071652d6115ed432f5ce1d34c336c0adfd6a884660d1e9712a256d3d3bd4b14e"}}, +] +marker = "python_full_version < \"3.10.2\" and python_version >= \"3.9\" and \"dev\" in extras" + +[packages.tool.pdm] +dependencies = [] + +[[packages]] +name = "networkx" +version = "3.2.1" +requires-python = ">=3.9" +sdist = {name = "networkx-3.2.1.tar.gz", url = "https://files.pythonhosted.org/packages/c4/80/a84676339aaae2f1cfdf9f418701dd634aef9cc76f708ef55c36ff39c3ca/networkx-3.2.1.tar.gz", hashes = {sha256 = "9f1bb5cf3409bf324e0a722c20bdb4c20ee39bf1c30ce8ae499c8502b0b5e0c6"}} +wheels = [ + {name = "networkx-3.2.1-py3-none-any.whl",url = "https://files.pythonhosted.org/packages/d5/f0/8fbc882ca80cf077f1b246c0e3c3465f7f415439bdea6b899f6b19f61f70/networkx-3.2.1-py3-none-any.whl",hashes = {sha256 = "f18c69adc97877c42332c170849c96cefa91881c99a7cb3e95b7c659ebdc1ec2"}}, +] +marker = "python_version < \"3.12\" and python_version >= \"3.9\" and \"default\" in dependency_groups" [packages.tool.pdm] dependencies = [] [tool.pdm] -hashes = {sha256 = "4909a619da3f004dcd3f4ece16e07e96f81709904464fc614e66b456c5f8c73e"} +hashes = {sha256 = "555b4bbcb733817760de0902c1c4437a46a7d37708dba2168675e50f454e6361"} strategy = ["inherit_metadata", "static_urls"] [[tool.pdm.targets]] -requires_python = "~=3.10" +requires_python = "~=3.12" [[tool.pdm.targets]] -requires_python = ">=3.9,<3.10" +requires_python = ">=3.9,<3.12" diff --git a/pyproject.toml b/pyproject.toml index fbe054ad..7237e66d 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -13,6 +13,13 @@ include = ["*"] [tool.pdm] distribution = true +[[tool.pdm.source]] +name = "torch" +type = "find_links" +#url = "https://download.pytorch.org/whl/cpu/torch_stable.html" +url = "https://download.pytorch.org/whl/cpu/torch/" +include_packages = ["torch"] + # ************************************************ # ********** Project Metadata ********** @@ -64,6 +71,8 @@ dependencies = [ "sanic", "transformers", "uvloop>=0.18", + "librosa>=0.11.0", + "torch>=2.8.0", ] [project.optional-dependencies] diff --git a/src/guidellm/data/deserializers/__init__.py b/src/guidellm/data/deserializers/__init__.py index fdee12ce..1062f2b7 100644 --- a/src/guidellm/data/deserializers/__init__.py +++ b/src/guidellm/data/deserializers/__init__.py @@ -25,6 +25,7 @@ SyntheticTextDatasetConfig, SyntheticTextDatasetDeserializer, SyntheticTextGenerator, + SyntheticTextPrefixBucketConfig, ) __all__ = [ @@ -46,6 +47,7 @@ "SyntheticTextDatasetConfig", "SyntheticTextDatasetDeserializer", "SyntheticTextGenerator", + "SyntheticTextPrefixBucketConfig", "TarFileDatasetDeserializer", "TextFileDatasetDeserializer", ] diff --git a/src/guidellm/data/deserializers/synthetic.py b/src/guidellm/data/deserializers/synthetic.py index 2335596d..a071eeea 100644 --- a/src/guidellm/data/deserializers/synthetic.py +++ b/src/guidellm/data/deserializers/synthetic.py @@ -1,13 +1,15 @@ from __future__ import annotations +import math from collections.abc import Iterator from pathlib import Path -from typing import Any, Callable +from random import Random +from typing import Any, Callable, Self import yaml from datasets import Features, IterableDataset, Value from faker import Faker -from pydantic import Field +from pydantic import ConfigDict, Field, model_validator from transformers import PreTrainedTokenizerBase from guidellm.data.deserializers.deserializer import ( @@ -21,10 +23,37 @@ "SyntheticTextDatasetConfig", "SyntheticTextDatasetDeserializer", "SyntheticTextGenerator", + "SyntheticTextPrefixBucketConfig", ] +class SyntheticTextPrefixBucketConfig(StandardBaseModel): + bucket_weight: int = Field( + description="Weight of this bucket in the overall distribution.", + gt=0, + default=100, + ) + prefix_count: int = Field( + description="The number of unique prefixes to generate for this bucket.", + ge=1, + default=1, + ) + prefix_tokens: int = Field( + description="The number of prefix tokens per-prompt for this bucket.", + ge=0, + default=0, + ) + + class SyntheticTextDatasetConfig(StandardBaseModel): + model_config = ConfigDict( + extra="allow", + ) + + prefix_buckets: list[SyntheticTextPrefixBucketConfig] | None = Field( + description="Buckets for the prefix tokens distribution.", + default=None, + ) prompt_tokens: int = Field( description="The average number of text tokens generated for prompts.", gt=0, @@ -68,6 +97,26 @@ class SyntheticTextDatasetConfig(StandardBaseModel): default="data:prideandprejudice.txt.gz", ) + @model_validator(mode="after") + def check_prefix_options(self) -> Self: + prefix_count = self.__pydantic_extra__.get("prefix_count", None) # type: ignore[attr-defined] + prefix_tokens = self.__pydantic_extra__.get("prefix_count", None) # type: ignore[attr-defined] + if prefix_count is not None or prefix_tokens is not None: + if self.prefix_buckets: + raise ValueError( + "prefix_buckets is mutually exclusive" + " with prefix_count and prefix_tokens" + ) + + self.prefix_buckets = [ + SyntheticTextPrefixBucketConfig( + prefix_count=prefix_count or 1, + prefix_tokens=prefix_tokens or 0, + ) + ] + + return self + class SyntheticTextGenerator: def __init__( @@ -104,20 +153,27 @@ def __iter__(self) -> Iterator[dict[str, Any]]: ) ) + # Create a shared prefix if specified + rand = Random(self.random_seed + 3) + prefix_iter = self._create_prefix_iter(faker, rand) + while True: prompt_tokens_count = next(prompt_tokens_sampler) output_tokens_count = next(output_tokens_sampler) yield { + "prefix": next(prefix_iter), "prompt": self._create_prompt( - prompt_tokens_count, samples_generated, faker + prompt_tokens_count, faker, f"{samples_generated} " ), "prompt_tokens_count": prompt_tokens_count, "output_tokens_count": output_tokens_count, } samples_generated += 1 - def _create_prompt(self, prompt_tokens_count: int, index: int, faker: Faker) -> str: + def _create_prompt( + self, prompt_tokens_count: int, faker: Faker, unique: str = "" + ) -> str: prompt_token_ids = [] avg_chars_per_token = 5 margin_of_safety = 1.5 @@ -128,13 +184,42 @@ def _create_prompt(self, prompt_tokens_count: int, index: int, faker: Faker) -> num_chars = ( prompt_tokens_count * avg_chars_per_token * margin_of_safety * attempts ) - text = f"{index} " + faker.text(max_nb_chars=num_chars) + text = unique + faker.text(max_nb_chars=num_chars) prompt_token_ids = self.processor.encode(text) return self.processor.decode( prompt_token_ids[:prompt_tokens_count], skip_special_tokens=True ) + def _create_prefix_iter(self, faker: Faker, rand: Random) -> Iterator[str]: + if not self.config.prefix_buckets: + while True: + yield "" + + # Increase weights to ensure an integer number of samples per per-prefix + least_common_prefix_count = math.lcm( + *(bucket.prefix_count for bucket in self.config.prefix_buckets) + ) + unnorm_weights = [ + least_common_prefix_count * bucket.bucket_weight // bucket.prefix_count + for bucket in self.config.prefix_buckets + ] + # Use GCD to reduce the weights to smallest integer ratio + common_divisor = math.gcd(*unnorm_weights) + + # Create prefix list maintaining the correct distribution + prefixes = [] + for bucket, weight in zip(self.config.prefix_buckets, unnorm_weights): + bucket_prefixes = [ + self._create_prompt(bucket.prefix_tokens, faker) + for _ in range(bucket.prefix_count) + ] + sample_count = weight // common_divisor + prefixes.extend(bucket_prefixes * sample_count) + + while True: + yield rand.choice(prefixes) + @DatasetDeserializerFactory.register("synthetic_text") class SyntheticTextDatasetDeserializer(DatasetDeserializer): @@ -166,6 +251,7 @@ def __call__( ), features=Features( { + "prefix": Value("string"), "prompt": Value("string"), "prompt_tokens_count": Value("int32"), "output_tokens_count": Value("int32"), diff --git a/src/guidellm/data/formatters/templates.py b/src/guidellm/data/formatters/templates.py index 2cf6e2f3..52db73b1 100644 --- a/src/guidellm/data/formatters/templates.py +++ b/src/guidellm/data/formatters/templates.py @@ -22,11 +22,7 @@ class JinjaTemplatesRegistry(RegistryMixin[Union[Template, str]]): textwrap.dedent(""" {% set obj = { "json_body": { - "prompt": ( - text_column[0] - if text_column and text_column|length == 1 - else text_column - ) + "prompt": prefix_column[0]|default("") + text_column[0] } } %} @@ -52,6 +48,10 @@ class JinjaTemplatesRegistry(RegistryMixin[Union[Template, str]]): {% set obj = { "json_body": { "messages": [ + { + "role": "system", + "content": prefix_column[0]|default("") + }, { "role": "user", "content": [] @@ -61,11 +61,11 @@ class JinjaTemplatesRegistry(RegistryMixin[Union[Template, str]]): } %} {%- for item in text_column or [] %} - {% do obj["json_body"].messages[0].content.append({"type": "text", "text": item}) %} + {% do obj["json_body"].messages[1].content.append({"type": "text", "text": item}) %} {%- endfor %} {%- for item in image_column or [] %} - {% do obj["json_body"].messages[0].content.append({ + {% do obj["json_body"].messages[1].content.append({ "type": "image_url", "image_url": encode_image( item, @@ -78,7 +78,7 @@ class JinjaTemplatesRegistry(RegistryMixin[Union[Template, str]]): {%- endfor %} {%- for item in video_column or [] %} - {% do obj["json_body"].messages[0].content.append({ + {% do obj["json_body"].messages[1].content.append({ "type": "video_url", "video_url": encode_video( item, diff --git a/src/guidellm/data/objects.py b/src/guidellm/data/objects.py index 04c5407d..b4a38719 100644 --- a/src/guidellm/data/objects.py +++ b/src/guidellm/data/objects.py @@ -31,6 +31,7 @@ GenerativeDatasetColumnType = Literal[ "prompt_tokens_count_column", "output_tokens_count_column", + "prefix_column", "text_column", "image_column", "video_column", @@ -195,6 +196,7 @@ class GenerativeDatasetArgs(StandardBaseDict): split: str | None = None prompt_tokens_count_column: str | None = None output_tokens_count_column: str | None = None + prefix_column: str | None = None text_column: str | list[str] | None = None image_column: str | list[str] | None = None video_column: str | list[str] | None = None diff --git a/src/guidellm/data/utils.py b/src/guidellm/data/utils.py index 7d53a054..d2fa1f9c 100644 --- a/src/guidellm/data/utils.py +++ b/src/guidellm/data/utils.py @@ -80,6 +80,11 @@ DEFAULT_COLUMN_NAMES: dict[str, list[str]] = { "prompt_tokens_count": ["prompt_tokens_count", "input_tokens_count"], "output_tokens_count": ["output_tokens_count", "completion_tokens_count"], + "prefix_column": [ + "system_prompt", + "system", + "prefix", + ], "text_column": [ "prompt", "instruction", diff --git a/tests/unit/dataset/__init__.py b/tests/unit/data/__init__.py similarity index 100% rename from tests/unit/dataset/__init__.py rename to tests/unit/data/__init__.py diff --git a/tests/unit/data/deserializers/__init__.py b/tests/unit/data/deserializers/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/tests/unit/data/deserializers/test_synthetic.py b/tests/unit/data/deserializers/test_synthetic.py new file mode 100644 index 00000000..58b76aee --- /dev/null +++ b/tests/unit/data/deserializers/test_synthetic.py @@ -0,0 +1,587 @@ +""" +Unit tests for guidellm.data.deserializers.synthetic module. +""" + +import json +import tempfile +from pathlib import Path +from unittest.mock import Mock + +import pytest +import yaml +from datasets import IterableDataset + +from guidellm.data.deserializers.deserializer import DataNotSupportedError +from guidellm.data.deserializers.synthetic import ( + SyntheticTextDatasetConfig, + SyntheticTextDatasetDeserializer, + SyntheticTextGenerator, + SyntheticTextPrefixBucketConfig, +) + + +class TestPrefixBucketConfig: + """Test cases for PrefixBucketConfig class. + + ### WRITTEN BY AI ### + """ + + @pytest.mark.smoke + def test_creation_with_valid_params(self): + """Test creating PrefixBucketConfig with valid parameters. + + ### WRITTEN BY AI ### + """ + config = SyntheticTextPrefixBucketConfig( + bucket_weight=100, prefix_count=1, prefix_tokens=5 + ) + + assert config.bucket_weight == 100 + assert config.prefix_count == 1 + assert config.prefix_tokens == 5 + + @pytest.mark.sanity + def test_creation_with_negative_values(self): + """Test creating PrefixBucketConfig with negative values raises ValueError. + + ### WRITTEN BY AI ### + """ + with pytest.raises(ValueError): + SyntheticTextPrefixBucketConfig( + bucket_weight=-10, prefix_count=1, prefix_tokens=5 + ) + + with pytest.raises(ValueError): + SyntheticTextPrefixBucketConfig( + bucket_weight=100, prefix_count=-1, prefix_tokens=5 + ) + + with pytest.raises(ValueError): + SyntheticTextPrefixBucketConfig( + bucket_weight=100, prefix_count=1, prefix_tokens=-5 + ) + + @pytest.mark.regression + def test_prefix_bucket_zero_weight_error(self): + """Test that zero total weight raises an error. + + ### WRITTEN BY AI ### + """ + # Test validation error for creating PrefixBucketConfig with weight=0 + with pytest.raises(ValueError): + SyntheticTextPrefixBucketConfig( + bucket_weight=0, prefix_count=1, prefix_tokens=2 + ) + + @pytest.mark.sanity + def test_prefix_bucket_config_validation(self): + """Test PrefixBucketConfig validation. + + ### WRITTEN BY AI ### + """ + # Test valid config + valid_config = SyntheticTextPrefixBucketConfig( + bucket_weight=50, prefix_count=2, prefix_tokens=3 + ) + assert valid_config.bucket_weight == 50 + assert valid_config.prefix_count == 2 + assert valid_config.prefix_tokens == 3 + + # Test invalid bucket_weight + with pytest.raises(ValueError): + SyntheticTextPrefixBucketConfig( + bucket_weight=0, prefix_count=1, prefix_tokens=2 + ) + + # Test invalid prefix_count + with pytest.raises(ValueError): + SyntheticTextPrefixBucketConfig( + bucket_weight=100, prefix_count=0, prefix_tokens=2 + ) + + # Test invalid prefix_tokens + with pytest.raises(ValueError): + SyntheticTextPrefixBucketConfig( + bucket_weight=100, prefix_count=1, prefix_tokens=-1 + ) + + +class TestSyntheticDatasetConfig: + """Test cases for SyntheticDatasetConfig class. + + ### WRITTEN BY AI ### + """ + + @pytest.mark.smoke + def test_config_creation_with_all_params(self): + """Test creating config with all parameters specified. + + ### WRITTEN BY AI ### + """ + prefix_bucket = SyntheticTextPrefixBucketConfig( + bucket_weight=100, prefix_count=1, prefix_tokens=5 + ) + + config = SyntheticTextDatasetConfig( + prefix_buckets=[prefix_bucket], + prompt_tokens=100, + prompt_tokens_stdev=10, + prompt_tokens_min=50, + prompt_tokens_max=150, + output_tokens=30, + output_tokens_stdev=5, + output_tokens_min=20, + output_tokens_max=40, + source="custom_text.txt", + ) + + assert config.prefix_buckets[0].prefix_tokens == 5 # type: ignore [index] + assert config.prompt_tokens == 100 + assert config.prompt_tokens_stdev == 10 + assert config.prompt_tokens_min == 50 + assert config.prompt_tokens_max == 150 + assert config.output_tokens == 30 + assert config.output_tokens_stdev == 5 + assert config.output_tokens_min == 20 + assert config.output_tokens_max == 40 + assert config.source == "custom_text.txt" + + @pytest.mark.regression + def test_parse_json_string(self): + """Test parsing JSON string configuration. + + ### WRITTEN BY AI ### + """ + json_str = json.dumps( + { + "prompt_tokens": 75, + "output_tokens": 25, + "source": "test.txt", + "prefix_buckets": [ + {"bucket_weight": 100, "prefix_count": 1, "prefix_tokens": 10} + ], + } + ) + + config = SyntheticTextDatasetConfig.model_validate_json(json_str) + + assert config.prompt_tokens == 75 + assert config.output_tokens == 25 + assert config.source == "test.txt" + assert config.prefix_buckets[0].prefix_tokens == 10 # type: ignore [index] + + @pytest.mark.sanity + def test_validation_positive_values(self): + """Test that negative or zero values are rejected. + + ### WRITTEN BY AI ### + """ + with pytest.raises(ValueError): + SyntheticTextDatasetConfig(prompt_tokens=0, output_tokens=20) + + with pytest.raises(ValueError): + SyntheticTextDatasetConfig(prompt_tokens=20, output_tokens=0) + + # Test negative prefix tokens via PrefixBucketConfig validation + with pytest.raises(ValueError): + SyntheticTextPrefixBucketConfig(prefix_tokens=-1) + + @pytest.mark.regression + def test_validation_optional_positive_values(self): + """Test that optional parameters reject negative values. + + ### WRITTEN BY AI ### + """ + with pytest.raises(ValueError): + SyntheticTextDatasetConfig( + prompt_tokens=20, output_tokens=10, prompt_tokens_stdev=-1 + ) + + with pytest.raises(ValueError): + SyntheticTextDatasetConfig( + prompt_tokens=20, output_tokens=10, prompt_tokens_min=-1 + ) + + with pytest.raises(ValueError): + SyntheticTextDatasetConfig( + prompt_tokens=20, output_tokens=10, output_tokens_max=0 + ) + + +class TestSyntheticTextGenerator: + """Test cases for SyntheticTextGenerator class. + + ### WRITTEN BY AI ### + """ + + @pytest.fixture + def mock_tokenizer(self): + """Fixture to provide a mocked tokenizer. + + ### WRITTEN BY AI ### + """ + tokenizer = Mock() + tokenizer.encode.side_effect = lambda text: list(range(len(text.split()))) + tokenizer.decode.side_effect = ( + lambda tokens, skip_special_tokens=False: " ".join( + f"token_{t}" for t in tokens[:5] + ) + ) + return tokenizer + + @pytest.fixture + def simple_config(self): + """Fixture for simple configuration. + + ### WRITTEN BY AI ### + """ + return SyntheticTextDatasetConfig( + prompt_tokens=15, + output_tokens=10, + source="The quick brown fox jumps over the lazy dog.", + ) + + @pytest.fixture + def config_with_prefix(self): + """Fixture for configuration with prefix tokens. + + ### WRITTEN BY AI ### + """ + prefix_bucket = SyntheticTextPrefixBucketConfig( + bucket_weight=100, prefix_count=1, prefix_tokens=3 + ) + + return SyntheticTextDatasetConfig( + prefix_buckets=[prefix_bucket], + prompt_tokens=15, + output_tokens=10, + source="The quick brown fox jumps over the lazy dog.", + ) + + @pytest.mark.smoke + def test_generator_initialization(self, simple_config, mock_tokenizer): + """Test generator initialization. + + ### WRITTEN BY AI ### + """ + generator = SyntheticTextGenerator( + simple_config, mock_tokenizer, random_seed=42 + ) + + assert generator.config == simple_config + assert generator.processor == mock_tokenizer + assert generator.random_seed == 42 + + @pytest.mark.smoke + def test_basic_iteration(self, simple_config, mock_tokenizer): + """Test basic iteration functionality. + + ### WRITTEN BY AI ### + """ + generator = SyntheticTextGenerator( + simple_config, mock_tokenizer, random_seed=42 + ) + + items = [] + for i, item in enumerate(generator): + items.append(item) + if i >= 4: # Only get 5 items + break + + # Verify we get the expected number of items + assert len(items) == 5 + + # Verify each item has the required keys + for item in items: + assert "prefix" in item + assert "prompt" in item + assert "prompt_tokens_count" in item + assert "output_tokens_count" in item + assert isinstance(item["prefix"], str) + assert isinstance(item["prompt"], str) + assert isinstance(item["prompt_tokens_count"], int) + assert isinstance(item["output_tokens_count"], int) + + @pytest.mark.sanity + def test_create_prompt_method(self, simple_config, mock_tokenizer): + """Test _create_prompt method. + + ### WRITTEN BY AI ### + """ + from faker import Faker + + generator = SyntheticTextGenerator( + simple_config, mock_tokenizer, random_seed=42 + ) + faker = Faker() + faker.seed_instance(42) + + # Test normal case + result = generator._create_prompt(5, faker, "unique_prefix ") + assert isinstance(result, str) + # The result should be the decoded tokens (token_0 token_1 etc.) due to our mock + assert "token_" in result + + # Test zero tokens + result = generator._create_prompt(0, faker) + assert result == "" + + @pytest.mark.regression + def test_prefix_tokens_integration(self, config_with_prefix, mock_tokenizer): + """Test integration with prefix tokens. + + ### WRITTEN BY AI ### + """ + generator = SyntheticTextGenerator( + config_with_prefix, mock_tokenizer, random_seed=42 + ) + + items = [] + for i, item in enumerate(generator): + items.append(item) + if i >= 2: # Only get 3 items + break + + # Verify prefix is present in items + for item in items: + assert isinstance(item["prefix"], str) + + @pytest.mark.regression + def test_random_seeding_consistency(self, simple_config, mock_tokenizer): + """Test that same seed produces consistent results. + + ### WRITTEN BY AI ### + """ + # Create two generators with same seed + generator1 = SyntheticTextGenerator( + simple_config, mock_tokenizer, random_seed=42 + ) + generator2 = SyntheticTextGenerator( + simple_config, mock_tokenizer, random_seed=42 + ) + + items1 = [] + items2 = [] + for i, (item1, item2) in enumerate(zip(generator1, generator2)): + items1.append(item1) + items2.append(item2) + if i >= 2: # Only get 3 items + break + + # With same seed and deterministic mocks, results should be identical + assert len(items1) == len(items2) + for item1, item2 in zip(items1, items2): + assert item1["prompt_tokens_count"] == item2["prompt_tokens_count"] + assert item1["output_tokens_count"] == item2["output_tokens_count"] + + +class TestSyntheticDatasetDeserializer: + """Test cases for SyntheticDatasetDeserializer class. + + ### WRITTEN BY AI ### + """ + + @pytest.fixture + def mock_tokenizer(self): + """Fixture to provide a mocked tokenizer. + + ### WRITTEN BY AI ### + """ + tokenizer = Mock() + tokenizer.encode.side_effect = lambda text: list(range(len(text.split()))) + tokenizer.decode.side_effect = ( + lambda tokens, skip_special_tokens=False: " ".join( + f"token_{t}" for t in tokens[:5] + ) + ) + return tokenizer + + @pytest.mark.sanity + def test_load_config_file_yaml(self): + """Test loading YAML config file. + + ### WRITTEN BY AI ### + """ + config_data = { + "prompt_tokens": 60, + "output_tokens": 15, + "source": "yaml_test.txt", + "prefix_buckets": [ + {"bucket_weight": 100, "prefix_count": 1, "prefix_tokens": 3} + ], + } + + with tempfile.NamedTemporaryFile(mode="w", suffix=".yaml", delete=False) as f: + yaml.dump(config_data, f) + yaml_path = f.name + + try: + deserializer = SyntheticTextDatasetDeserializer() + config = deserializer._load_config_file(yaml_path) + + assert config.prompt_tokens == 60 + assert config.output_tokens == 15 + assert config.source == "yaml_test.txt" + assert config.prefix_buckets[0].prefix_tokens == 3 # type: ignore [index] + finally: + Path(yaml_path).unlink() + + @pytest.mark.sanity + def test_load_config_file_config_extension(self): + """Test loading .config file. + + ### WRITTEN BY AI ### + """ + config_data = { + "prompt_tokens": 90, + "output_tokens": 35, + "prefix_buckets": [ + {"bucket_weight": 100, "prefix_count": 1, "prefix_tokens": 2} + ], + } + + with tempfile.NamedTemporaryFile(mode="w", suffix=".config", delete=False) as f: + yaml.dump(config_data, f) + config_path = f.name + + try: + deserializer = SyntheticTextDatasetDeserializer() + config = deserializer._load_config_file(config_path) + + assert config.prompt_tokens == 90 + assert config.output_tokens == 35 + assert config.prefix_buckets[0].prefix_tokens == 2 # type: ignore [index] + finally: + Path(config_path).unlink() + + @pytest.mark.smoke + def test_load_config_str_json(self): + """Test loading JSON string config. + + ### WRITTEN BY AI ### + """ + json_str = '{"prompt_tokens": 50, "output_tokens": 25}' + deserializer = SyntheticTextDatasetDeserializer() + config = deserializer._load_config_str(json_str) + + assert config.prompt_tokens == 50 + assert config.output_tokens == 25 + + @pytest.mark.smoke + def test_load_config_str_key_value(self): + """Test loading key-value string config. + + ### WRITTEN BY AI ### + """ + kv_str = "prompt_tokens=50,output_tokens=25" + deserializer = SyntheticTextDatasetDeserializer() + config = deserializer._load_config_str(kv_str) + + assert config.prompt_tokens == 50 + assert config.output_tokens == 25 + + @pytest.mark.sanity + def test_load_config_str_invalid_format(self): + """Test loading invalid format raises DataNotSupportedError. + + ### WRITTEN BY AI ### + """ + deserializer = SyntheticTextDatasetDeserializer() + with pytest.raises(DataNotSupportedError, match="Unsupported string data"): + deserializer._load_config_str("invalid_format_string") + + @pytest.mark.regression + def test_load_config_file_non_existent(self): + """Test loading non-existent file returns None. + + ### WRITTEN BY AI ### + """ + deserializer = SyntheticTextDatasetDeserializer() + config = deserializer._load_config_file("/non/existent/path.config") + assert config is None + + @pytest.mark.regression + def test_load_config_str_non_string(self): + """Test loading non-string returns None. + + ### WRITTEN BY AI ### + """ + deserializer = SyntheticTextDatasetDeserializer() + config = deserializer._load_config_str(123) + assert config is None + + @pytest.mark.smoke + def test_call_with_config_object(self, mock_tokenizer): + """Test calling deserializer with SyntheticTextDatasetConfig. + + ### WRITTEN BY AI ### + """ + config = SyntheticTextDatasetConfig(prompt_tokens=50, output_tokens=25) + deserializer = SyntheticTextDatasetDeserializer() + + result = deserializer( + data=config, + data_kwargs={}, + processor_factory=lambda: mock_tokenizer, + random_seed=42, + ) + + assert isinstance(result, IterableDataset) + + @pytest.mark.regression + def test_call_with_unsupported_data(self, mock_tokenizer): + """Test calling deserializer with unsupported data raises error. + + ### WRITTEN BY AI ### + """ + deserializer = SyntheticTextDatasetDeserializer() + + with pytest.raises(DataNotSupportedError, match="Unsupported data"): + deserializer( + data=123, + data_kwargs={}, + processor_factory=lambda: mock_tokenizer, + random_seed=42, + ) + + @pytest.mark.regression + def test_call_with_json_string(self, mock_tokenizer): + """Test calling deserializer with JSON string. + + ### WRITTEN BY AI ### + """ + json_str = '{"prompt_tokens": 50, "output_tokens": 25}' + deserializer = SyntheticTextDatasetDeserializer() + + result = deserializer( + data=json_str, + data_kwargs={}, + processor_factory=lambda: mock_tokenizer, + random_seed=42, + ) + + assert isinstance(result, IterableDataset) + + @pytest.mark.regression + def test_call_with_config_file(self, mock_tokenizer): + """Test calling deserializer with config file. + + ### WRITTEN BY AI ### + """ + config_data = {"prompt_tokens": 65, "output_tokens": 45} + + with tempfile.NamedTemporaryFile(mode="w", suffix=".yaml", delete=False) as f: + yaml.dump(config_data, f) + config_path = f.name + + try: + deserializer = SyntheticTextDatasetDeserializer() + result = deserializer( + data=config_path, + data_kwargs={}, + processor_factory=lambda: mock_tokenizer, + random_seed=42, + ) + assert isinstance(result, IterableDataset) + finally: + Path(config_path).unlink() diff --git a/tests/unit/dataset/test_synthetic.py b/tests/unit/dataset/test_synthetic.py deleted file mode 100644 index e3110fa3..00000000 --- a/tests/unit/dataset/test_synthetic.py +++ /dev/null @@ -1,873 +0,0 @@ -""" -Unit tests for guidellm.dataset.synthetic module. -""" - -import json -import tempfile -from pathlib import Path -from unittest.mock import Mock, patch - -import pytest -import yaml - -from guidellm.dataset.synthetic import ( - SyntheticDatasetConfig, - SyntheticDatasetCreator, - SyntheticTextItemsGenerator, -) - - -class TestSyntheticDatasetConfig: - """Test cases for SyntheticDatasetConfig class. - - ### WRITTEN BY AI ### - """ - - @pytest.mark.smoke - def test_config_creation_with_all_params(self): - """Test creating config with all parameters specified. - - ### WRITTEN BY AI ### - """ - config = SyntheticDatasetConfig( - prefix_tokens=5, - prompt_tokens=100, - prompt_tokens_stdev=10, - prompt_tokens_min=50, - prompt_tokens_max=150, - output_tokens=30, - output_tokens_stdev=5, - output_tokens_min=20, - output_tokens_max=40, - samples=500, - source="custom_text.txt", - ) - - assert config.prefix_tokens == 5 - assert config.prompt_tokens == 100 - assert config.prompt_tokens_stdev == 10 - assert config.prompt_tokens_min == 50 - assert config.prompt_tokens_max == 150 - assert config.output_tokens == 30 - assert config.output_tokens_stdev == 5 - assert config.output_tokens_min == 20 - assert config.output_tokens_max == 40 - assert config.samples == 500 - assert config.source == "custom_text.txt" - - @pytest.mark.regression - def test_parse_json_string(self): - """Test parsing JSON string configuration. - - ### WRITTEN BY AI ### - """ - json_str = json.dumps( - { - "prompt_tokens": 75, - "output_tokens": 25, - "samples": 200, - "source": "test.txt", - "prefix_tokens": 10, - } - ) - - config = SyntheticDatasetConfig.parse_str(json_str) - - assert config.prompt_tokens == 75 - assert config.output_tokens == 25 - assert config.samples == 200 - assert config.source == "test.txt" - assert config.prefix_tokens == 10 - - @pytest.mark.regression - def test_parse_key_value_pairs(self): - """Test parsing key-value pairs configuration. - - ### WRITTEN BY AI ### - """ - kv_str = "prompt_tokens=80,output_tokens=30,samples=300,source=data.txt,prefix_tokens=5" # noqa: E501 - - config = SyntheticDatasetConfig.parse_str(kv_str) - - assert config.prompt_tokens == 80 - assert config.output_tokens == 30 - assert config.samples == 300 - assert config.source == "data.txt" - assert config.prefix_tokens == 5 - - @pytest.mark.sanity - def test_parse_yaml_file(self): - """Test parsing YAML file configuration. - - ### WRITTEN BY AI ### - """ - config_data = { - "prompt_tokens": 60, - "output_tokens": 15, - "samples": 100, - "source": "yaml_test.txt", - "prefix_tokens": 3, - } - - with tempfile.NamedTemporaryFile(mode="w", suffix=".yaml", delete=False) as f: - yaml.dump(config_data, f) - yaml_path = f.name - - try: - config = SyntheticDatasetConfig.parse_str(yaml_path) - - assert config.prompt_tokens == 60 - assert config.output_tokens == 15 - assert config.samples == 100 - assert config.source == "yaml_test.txt" - assert config.prefix_tokens == 3 - finally: - Path(yaml_path).unlink() - - @pytest.mark.sanity - def test_parse_config_file(self): - """Test parsing .config file. - - ### WRITTEN BY AI ### - """ - config_data = { - "prompt_tokens": 90, - "output_tokens": 35, - "samples": 150, - "prefix_tokens": 2, - } - - with tempfile.NamedTemporaryFile(mode="w", suffix=".config", delete=False) as f: - yaml.dump(config_data, f) - config_path = f.name - - try: - config = SyntheticDatasetConfig.parse_str(config_path) - - assert config.prompt_tokens == 90 - assert config.output_tokens == 35 - assert config.samples == 150 - assert config.prefix_tokens == 2 - finally: - Path(config_path).unlink() - - @pytest.mark.regression - def test_parse_path_object(self): - """Test parsing with Path object. - - ### WRITTEN BY AI ### - """ - config_data = {"prompt_tokens": 45, "output_tokens": 25} - - with tempfile.NamedTemporaryFile(mode="w", suffix=".yaml", delete=False) as f: - yaml.dump(config_data, f) - yaml_path = Path(f.name) - - try: - config = SyntheticDatasetConfig.parse_str(yaml_path) - assert config.prompt_tokens == 45 - assert config.output_tokens == 25 - finally: - yaml_path.unlink() - - @pytest.mark.sanity - def test_parse_invalid_format(self): - """Test parsing invalid format raises ValueError. - - ### WRITTEN BY AI ### - """ - with pytest.raises(ValueError, match="Unsupported data format"): - SyntheticDatasetConfig.parse_str("invalid_format_string") - - @pytest.mark.sanity - def test_validation_positive_values(self): - """Test that negative or zero values are rejected. - - ### WRITTEN BY AI ### - """ - with pytest.raises(ValueError): - SyntheticDatasetConfig(prompt_tokens=0, output_tokens=20) - - with pytest.raises(ValueError): - SyntheticDatasetConfig(prompt_tokens=20, output_tokens=0) - - with pytest.raises(ValueError): - SyntheticDatasetConfig(prompt_tokens=20, output_tokens=10, samples=0) - - with pytest.raises(ValueError): - SyntheticDatasetConfig(prompt_tokens=20, output_tokens=10, prefix_tokens=-1) - - @pytest.mark.regression - def test_validation_optional_positive_values(self): - """Test that optional parameters reject negative values. - - ### WRITTEN BY AI ### - """ - with pytest.raises(ValueError): - SyntheticDatasetConfig( - prompt_tokens=20, output_tokens=10, prompt_tokens_stdev=-1 - ) - - with pytest.raises(ValueError): - SyntheticDatasetConfig( - prompt_tokens=20, output_tokens=10, prompt_tokens_min=-1 - ) - - with pytest.raises(ValueError): - SyntheticDatasetConfig( - prompt_tokens=20, output_tokens=10, output_tokens_max=0 - ) - - @pytest.mark.regression - def test_parse_json_method_directly(self): - """Test parse_json static method directly. - - ### WRITTEN BY AI ### - """ - json_data = {"prompt_tokens": 100, "output_tokens": 50} - json_str = json.dumps(json_data) - - config = SyntheticDatasetConfig.parse_json(json_str) - - assert config.prompt_tokens == 100 - assert config.output_tokens == 50 - - @pytest.mark.regression - def test_parse_key_value_pairs_method_directly(self): - """Test parse_key_value_pairs static method directly. - - ### WRITTEN BY AI ### - """ - kv_str = "prompt_tokens=75,output_tokens=35" - - config = SyntheticDatasetConfig.parse_key_value_pairs(kv_str) - - assert config.prompt_tokens == 75 - assert config.output_tokens == 35 - - @pytest.mark.regression - def test_parse_config_file_method_directly(self): - """Test parse_config_file static method directly. - - ### WRITTEN BY AI ### - """ - config_data = {"prompt_tokens": 65, "output_tokens": 45} - - with tempfile.NamedTemporaryFile(mode="w", suffix=".yaml", delete=False) as f: - yaml.dump(config_data, f) - config_path = f.name - - try: - config = SyntheticDatasetConfig.parse_config_file(config_path) - assert config.prompt_tokens == 65 - assert config.output_tokens == 45 - finally: - Path(config_path).unlink() - - -class TestSyntheticTextItemsGenerator: - """Test cases for SyntheticTextItemsGenerator class. - - ### WRITTEN BY AI ### - """ - - @pytest.fixture - def mock_tokenizer(self): - """Fixture to provide a mocked tokenizer. - - ### WRITTEN BY AI ### - """ - tokenizer = Mock() - tokenizer.get_vocab.return_value = {f"token_{i}": i for i in range(1000)} - tokenizer.encode.side_effect = lambda text: [1, 2, 3] * (len(text) // 10 + 1) - tokenizer.decode.side_effect = ( - lambda tokens, skip_special_tokens=False: " ".join( - f"token_{t}" for t in tokens[:5] - ) - ) - return tokenizer - - @pytest.fixture - def simple_config(self): - """Fixture for simple configuration. - - ### WRITTEN BY AI ### - """ - return SyntheticDatasetConfig( - prompt_tokens=15, - output_tokens=10, - samples=5, - source="The quick brown fox jumps over the lazy dog.", - ) - - @pytest.fixture - def config_with_prefix(self): - """Fixture for configuration with prefix tokens. - - ### WRITTEN BY AI ### - """ - return SyntheticDatasetConfig( - prefix_tokens=3, - prompt_tokens=15, - output_tokens=10, - samples=5, - source="The quick brown fox jumps over the lazy dog.", - ) - - @pytest.fixture - def complex_config(self): - """Fixture for complex configuration with variance. - - ### WRITTEN BY AI ### - """ - return SyntheticDatasetConfig( - prompt_tokens=20, - prompt_tokens_stdev=5, - prompt_tokens_min=10, - prompt_tokens_max=30, - output_tokens=15, - output_tokens_stdev=3, - output_tokens_min=10, - output_tokens_max=20, - samples=10, - source="The quick brown fox jumps over the lazy dog.", - ) - - @pytest.mark.smoke - @patch("guidellm.dataset.synthetic.EndlessTextCreator") - def test_generator_initialization( - self, mock_text_creator, simple_config, mock_tokenizer - ): - """Test generator initialization. - - ### WRITTEN BY AI ### - """ - generator = SyntheticTextItemsGenerator( - simple_config, mock_tokenizer, random_seed=42 - ) - - assert generator.config == simple_config - assert generator.processor == mock_tokenizer - assert generator.random_seed == 42 - mock_text_creator.assert_called_once_with(data=simple_config.source) - - @pytest.mark.smoke - @patch("guidellm.dataset.synthetic.EndlessTextCreator") - @patch("guidellm.dataset.synthetic.IntegerRangeSampler") - def test_basic_iteration( - self, mock_sampler, mock_text_creator, simple_config, mock_tokenizer - ): - """Test basic iteration functionality. - - ### WRITTEN BY AI ### - """ - # Setup mocks - mock_text_creator_instance = Mock() - mock_text_creator_instance.words = ["word1", "word2", "word3"] * 100 - mock_text_creator_instance.create_text.return_value = "sample text" - mock_text_creator.return_value = mock_text_creator_instance - - # Mock IntegerRangeSampler to return iterators - def mock_sampler_side_effect(*args, **kwargs): - mock_instance = Mock() - mock_instance.__iter__ = Mock(return_value=iter([15, 15, 15, 15, 15])) - return mock_instance - - mock_sampler.side_effect = mock_sampler_side_effect - - generator = SyntheticTextItemsGenerator( - simple_config, mock_tokenizer, random_seed=42 - ) - - items = list(generator) - - # Verify we get the expected number of items - assert len(items) == simple_config.samples - - # Verify each item has the required keys - for item in items: - assert "prompt" in item - assert "prompt_tokens_count" in item - assert "output_tokens_count" in item - assert isinstance(item["prompt"], str) - assert isinstance(item["prompt_tokens_count"], int) - assert isinstance(item["output_tokens_count"], int) - - @pytest.mark.sanity - @patch("guidellm.dataset.synthetic.EndlessTextCreator") - def test_create_prompt_method( - self, mock_text_creator, simple_config, mock_tokenizer - ): - """Test _create_prompt method. - - ### WRITTEN BY AI ### - """ - mock_text_creator_instance = Mock() - mock_text_creator_instance.words = ["word"] * 100 - mock_text_creator_instance.create_text.return_value = "test text" - mock_text_creator.return_value = mock_text_creator_instance - - mock_tokenizer.encode.return_value = [1, 2, 3] - - generator = SyntheticTextItemsGenerator( - simple_config, mock_tokenizer, random_seed=42 - ) - - # Test normal case - result = generator._create_prompt(5, 0, 42) - assert result == [42, 1, 2, 3] - - # Test zero tokens - result = generator._create_prompt(0, 0, 42) - assert result == [] - - # Test without unique prefix - result = generator._create_prompt(3, 0) - assert result == [1, 2, 3] - - @pytest.mark.regression - @patch("guidellm.dataset.synthetic.EndlessTextCreator") - def test_create_prompt_binary_search( - self, mock_text_creator, simple_config, mock_tokenizer - ): - """Test binary search logic in _create_prompt. - - ### WRITTEN BY AI ### - """ - mock_text_creator_instance = Mock() - mock_text_creator_instance.words = ["word"] * 1000 - mock_text_creator_instance.create_text.side_effect = lambda start, length: ( - "text " * max(1, length // 4) - ).strip() - mock_text_creator.return_value = mock_text_creator_instance - - # Mock tokenizer to return different lengths based on input - def mock_encode(text): - return [1] * len(text.split()) - - mock_tokenizer.encode.side_effect = mock_encode - - generator = SyntheticTextItemsGenerator( - simple_config, mock_tokenizer, random_seed=42 - ) - - # Test that binary search finds appropriate length - result = generator._create_prompt(5, 0, 42) - assert len(result) >= 4 # Should include prefix + some tokens - - @pytest.mark.sanity - @patch("guidellm.dataset.synthetic.EndlessTextCreator") - @patch("guidellm.dataset.synthetic.IntegerRangeSampler") - def test_prefix_tokens_integration( - self, mock_sampler, mock_text_creator, config_with_prefix, mock_tokenizer - ): - """Test integration with prefix tokens. - - ### WRITTEN BY AI ### - """ - # Setup mocks - mock_text_creator_instance = Mock() - mock_text_creator_instance.words = ["word"] * 100 - mock_text_creator_instance.create_text.return_value = "sample text" - mock_text_creator.return_value = mock_text_creator_instance - - mock_sampler_instance = Mock() - mock_sampler_instance.__iter__ = Mock(return_value=iter([15, 15, 15, 15, 15])) - mock_sampler.return_value = mock_sampler_instance - - generator = SyntheticTextItemsGenerator( - config_with_prefix, mock_tokenizer, random_seed=42 - ) - - items = list(generator) - - # Verify prompt_tokens_count includes prefix - for item in items: - assert item["prompt_tokens_count"] == config_with_prefix.prefix_tokens + 15 - - @pytest.mark.regression - @patch("guidellm.dataset.synthetic.EndlessTextCreator") - @patch("guidellm.dataset.synthetic.IntegerRangeSampler") - def test_random_seeding_consistency( - self, mock_sampler, mock_text_creator, simple_config, mock_tokenizer - ): - """Test that same seed produces consistent results. - - ### WRITTEN BY AI ### - """ - # Setup mocks - mock_text_creator_instance = Mock() - mock_text_creator_instance.words = ["word"] * 100 - mock_text_creator_instance.create_text.return_value = "sample text" - mock_text_creator.return_value = mock_text_creator_instance - - # Create consistent mock sampler behavior - call_count = 0 - - def mock_sampler_side_effect(*args, **kwargs): - nonlocal call_count - mock_instance = Mock() - # Return same sequence for both prompt and output tokens - if call_count % 2 == 0: # prompt tokens - mock_instance.__iter__ = Mock(return_value=iter([15, 16, 17, 18, 19])) - else: # output tokens - mock_instance.__iter__ = Mock(return_value=iter([10, 11, 12, 13, 14])) - call_count += 1 - return mock_instance - - mock_sampler.side_effect = mock_sampler_side_effect - - # Create two generators with same seed - generator1 = SyntheticTextItemsGenerator( - simple_config, mock_tokenizer, random_seed=42 - ) - generator2 = SyntheticTextItemsGenerator( - simple_config, mock_tokenizer, random_seed=42 - ) - - items1 = list(generator1) - items2 = list(generator2) - - # Results should be identical with same seed - assert len(items1) == len(items2) - for item1, item2 in zip(items1, items2): - assert item1["prompt"] == item2["prompt"] - assert item1["prompt_tokens_count"] == item2["prompt_tokens_count"] - assert item1["output_tokens_count"] == item2["output_tokens_count"] - - @pytest.mark.regression - @patch("guidellm.dataset.synthetic.EndlessTextCreator") - @patch("guidellm.dataset.synthetic.IntegerRangeSampler") - def test_variance_configuration( - self, mock_sampler, mock_text_creator, complex_config, mock_tokenizer - ): - """Test that variance configuration is properly used. - - ### WRITTEN BY AI ### - """ - # Setup mocks - mock_text_creator_instance = Mock() - mock_text_creator_instance.words = ["word"] * 100 - mock_text_creator_instance.create_text.return_value = "sample text" - mock_text_creator.return_value = mock_text_creator_instance - - # Fix tokenizer mock to handle the create_text return properly - mock_tokenizer.encode.side_effect = ( - lambda text: [1, 2, 3] if isinstance(text, str) else [1, 2, 3] - ) - - # Setup mock sampler to track calls - def mock_sampler_side_effect(*args, **kwargs): - mock_instance = Mock() - mock_instance.__iter__ = Mock(return_value=iter([20, 18, 22, 19, 21] * 2)) - return mock_instance - - mock_sampler.side_effect = mock_sampler_side_effect - - generator = SyntheticTextItemsGenerator( - complex_config, mock_tokenizer, random_seed=42 - ) - - # Initialize the generator to trigger sampler creation - generator_iter = iter(generator) - next(generator_iter) - - # Verify that IntegerRangeSampler is called with correct parameters - assert mock_sampler.call_count == 2 - - # Check prompt tokens sampler call - prompt_call = mock_sampler.call_args_list[0] - assert prompt_call[1]["average"] == complex_config.prompt_tokens - assert prompt_call[1]["variance"] == complex_config.prompt_tokens_stdev - assert prompt_call[1]["min_value"] == complex_config.prompt_tokens_min - assert prompt_call[1]["max_value"] == complex_config.prompt_tokens_max - assert prompt_call[1]["random_seed"] == 42 - - # Check output tokens sampler call - output_call = mock_sampler.call_args_list[1] - assert output_call[1]["average"] == complex_config.output_tokens - assert output_call[1]["variance"] == complex_config.output_tokens_stdev - assert output_call[1]["min_value"] == complex_config.output_tokens_min - assert output_call[1]["max_value"] == complex_config.output_tokens_max - assert output_call[1]["random_seed"] == 43 # 42 + 1 - - @pytest.mark.regression - @patch("guidellm.dataset.synthetic.EndlessTextCreator") - def test_unique_prefix_generation( - self, mock_text_creator, simple_config, mock_tokenizer - ): - """Test that unique prefixes are generated for each request. - - ### WRITTEN BY AI ### - """ - mock_text_creator_instance = Mock() - mock_text_creator_instance.words = ["word"] * 100 - mock_text_creator_instance.create_text.return_value = "sample text" - mock_text_creator.return_value = mock_text_creator_instance - - # Mock the cycle to return predictable values - with patch("guidellm.dataset.synthetic.cycle") as mock_cycle: - mock_cycle.return_value = iter([100, 101, 102, 103, 104]) - - generator = SyntheticTextItemsGenerator( - simple_config, mock_tokenizer, random_seed=42 - ) - - # Access the iterator to trigger the cycle creation - generator_iter = iter(generator) - next(generator_iter) - - # Verify cycle was called with vocab values - mock_cycle.assert_called_once() - - -class TestSyntheticDatasetCreator: - """Test cases for SyntheticDatasetCreator class. - - ### WRITTEN BY AI ### - """ - - @pytest.mark.sanity - def test_is_supported_path_config_file(self): - """Test is_supported with config file paths. - - ### WRITTEN BY AI ### - """ - with tempfile.NamedTemporaryFile(suffix=".config", delete=False) as f: - config_path = Path(f.name) - - try: - assert SyntheticDatasetCreator.is_supported(config_path, None) - finally: - config_path.unlink() - - @pytest.mark.sanity - def test_is_supported_path_yaml_file(self): - """Test is_supported with YAML file paths. - - ### WRITTEN BY AI ### - """ - with tempfile.NamedTemporaryFile(suffix=".yaml", delete=False) as f: - yaml_path = Path(f.name) - - try: - assert SyntheticDatasetCreator.is_supported(yaml_path, None) - finally: - yaml_path.unlink() - - @pytest.mark.smoke - def test_is_supported_json_string(self): - """Test is_supported with JSON string. - - ### WRITTEN BY AI ### - """ - json_str = '{"prompt_tokens": 50, "output_tokens": 25}' - assert SyntheticDatasetCreator.is_supported(json_str, None) - - @pytest.mark.smoke - def test_is_supported_key_value_string(self): - """Test is_supported with key-value string. - - ### WRITTEN BY AI ### - """ - kv_str = "prompt_tokens=50,output_tokens=25" - assert SyntheticDatasetCreator.is_supported(kv_str, None) - - @pytest.mark.sanity - def test_is_supported_config_filename_string(self): - """Test is_supported with config filename string. - - ### WRITTEN BY AI ### - """ - assert SyntheticDatasetCreator.is_supported("config.yaml", None) - assert SyntheticDatasetCreator.is_supported("settings.config", None) - - @pytest.mark.sanity - def test_is_not_supported_regular_string(self): - """Test is_supported returns False for regular strings. - - ### WRITTEN BY AI ### - """ - assert not SyntheticDatasetCreator.is_supported("regular string", None) - assert not SyntheticDatasetCreator.is_supported("single=pair", None) - - @pytest.mark.regression - def test_is_not_supported_non_existent_path(self): - """Test is_supported returns False for non-existent paths. - - ### WRITTEN BY AI ### - """ - non_existent_path = Path("/non/existent/path.config") - assert not SyntheticDatasetCreator.is_supported(non_existent_path, None) - - @pytest.mark.regression - def test_is_not_supported_other_types(self): - """Test is_supported returns False for other data types. - - ### WRITTEN BY AI ### - """ - assert not SyntheticDatasetCreator.is_supported(123, None) - assert not SyntheticDatasetCreator.is_supported(["list"], None) - assert not SyntheticDatasetCreator.is_supported({"dict": "value"}, None) - - @pytest.mark.smoke - @patch("guidellm.dataset.synthetic.check_load_processor") - @patch("guidellm.dataset.synthetic.SyntheticTextItemsGenerator") - @patch("guidellm.dataset.synthetic.Dataset") - def test_handle_create_basic( - self, mock_dataset, mock_generator, mock_check_processor - ): - """Test handle_create basic functionality. - - ### WRITTEN BY AI ### - """ - # Setup mocks - mock_processor = Mock() - mock_check_processor.return_value = mock_processor - - mock_generator_instance = Mock() - mock_generator_instance.__iter__ = Mock( - return_value=iter( - [ - { - "prompt": "test", - "prompt_tokens_count": 10, - "output_tokens_count": 5, - } - ] - ) - ) - mock_generator.return_value = mock_generator_instance - - mock_dataset_instance = Mock() - mock_dataset.from_list.return_value = mock_dataset_instance - - # Test - data = '{"prompt_tokens": 50, "output_tokens": 25}' - result = SyntheticDatasetCreator.handle_create( - data=data, - data_args=None, - processor="gpt2", - processor_args=None, - random_seed=42, - ) - - # Verify - mock_check_processor.assert_called_once_with( - "gpt2", - None, - error_msg="Processor/tokenizer required for synthetic dataset generation.", - ) - mock_generator.assert_called_once() - mock_dataset.from_list.assert_called_once() - assert result == mock_dataset_instance - - @pytest.mark.sanity - @patch("guidellm.dataset.synthetic.check_load_processor") - def test_handle_create_processor_required(self, mock_check_processor): - """Test handle_create requires processor. - - ### WRITTEN BY AI ### - """ - mock_check_processor.side_effect = ValueError("Processor required") - - data = '{"prompt_tokens": 50, "output_tokens": 25}' - - with pytest.raises(ValueError, match="Processor required"): - SyntheticDatasetCreator.handle_create( - data=data, - data_args=None, - processor=None, - processor_args=None, - random_seed=42, - ) - - @pytest.mark.regression - @patch("guidellm.dataset.synthetic.check_load_processor") - @patch("guidellm.dataset.synthetic.SyntheticTextItemsGenerator") - @patch("guidellm.dataset.synthetic.Dataset") - def test_handle_create_with_data_args( - self, mock_dataset, mock_generator, mock_check_processor - ): - """Test handle_create with data_args. - - ### WRITTEN BY AI ### - """ - # Setup mocks - mock_processor = Mock() - mock_check_processor.return_value = mock_processor - - mock_generator_instance = Mock() - mock_generator_instance.__iter__ = Mock(return_value=iter([])) - mock_generator.return_value = mock_generator_instance - - mock_dataset_instance = Mock() - mock_dataset.from_list.return_value = mock_dataset_instance - - # Test with data_args - data = '{"prompt_tokens": 50, "output_tokens": 25}' - data_args = {"features": "custom_features"} - - SyntheticDatasetCreator.handle_create( - data=data, - data_args=data_args, - processor="gpt2", - processor_args=None, - random_seed=42, - ) - - # Verify data_args are passed to Dataset.from_list - mock_dataset.from_list.assert_called_once_with([], **data_args) - - @pytest.mark.sanity - def test_extract_args_column_mappings_empty(self): - """Test extract_args_column_mappings with empty data_args. - - ### WRITTEN BY AI ### - """ - result = SyntheticDatasetCreator.extract_args_column_mappings(None) - - expected = { - "prompt_column": "prompt", - "prompt_tokens_count_column": "prompt_tokens_count", - "output_tokens_count_column": "output_tokens_count", - } - assert result == expected - - @pytest.mark.regression - def test_extract_args_column_mappings_with_parent_mappings(self): - """Test extract_args_column_mappings rejects column mappings. - - ### WRITTEN BY AI ### - """ - with ( - patch.object( - SyntheticDatasetCreator.__bases__[0], - "extract_args_column_mappings", - return_value={"prompt_column": "custom_prompt"}, - ), - pytest.raises(ValueError, match="Column mappings are not supported"), - ): - SyntheticDatasetCreator.extract_args_column_mappings({"some": "args"}) - - @pytest.mark.regression - def test_extract_args_column_mappings_no_parent_mappings(self): - """Test extract_args_column_mappings with no parent mappings. - - ### WRITTEN BY AI ### - """ - with patch.object( - SyntheticDatasetCreator.__bases__[0], - "extract_args_column_mappings", - return_value={}, - ): - result = SyntheticDatasetCreator.extract_args_column_mappings( - {"some": "args"} - ) - - expected = { - "prompt_column": "prompt", - "prompt_tokens_count_column": "prompt_tokens_count", - "output_tokens_count_column": "output_tokens_count", - } - assert result == expected From bbca65a81c60d0b7336d219d9546793103c4369b Mon Sep 17 00:00:00 2001 From: Mark Kurtz Date: Tue, 7 Oct 2025 10:21:42 -0400 Subject: [PATCH 44/90] Simplifications for new data pathways and reenablement of completions and chat completions pathways --- pyproject.toml | 16 +- src/guidellm/__main__.py | 124 ++++--- src/guidellm/benchmark/aggregator.py | 12 +- src/guidellm/benchmark/entrypoints.py | 118 ++++--- src/guidellm/data/__init__.py | 32 +- src/guidellm/data/collators.py | 16 + src/guidellm/data/datasets.py | 88 ----- .../data/deserializers/deserializer.py | 60 ++-- src/guidellm/data/deserializers/file.py | 16 +- .../data/deserializers/huggingface.py | 2 +- src/guidellm/data/deserializers/memory.py | 12 +- src/guidellm/data/deserializers/synthetic.py | 19 +- src/guidellm/data/formatters/__init__.py | 47 --- src/guidellm/data/formatters/environment.py | 63 ---- src/guidellm/data/formatters/globals.py | 9 - src/guidellm/data/formatters/objects.py | 92 ------ src/guidellm/data/formatters/templates.py | 182 ----------- src/guidellm/data/loaders.py | 136 ++++---- src/guidellm/data/objects.py | 121 ++----- src/guidellm/data/preprocessors/__init__.py | 20 +- src/guidellm/data/preprocessors/formatters.py | 303 ++++++++++++++++++ src/guidellm/data/preprocessors/mappers.py | 257 +++++++++------ src/guidellm/data/preprocessors/objects.py | 20 -- .../data/preprocessors/preprocessor.py | 29 ++ src/guidellm/data/processor.py | 30 ++ src/guidellm/data/utils/__init__.py | 34 ++ .../data/{utils.py => utils/dataset.py} | 78 +---- .../filters.py => utils/functions.py} | 156 +++++---- src/guidellm/scheduler/worker.py | 12 +- src/guidellm/scheduler/worker_group.py | 1 + src/guidellm/settings.py | 2 +- src/guidellm/utils/messaging.py | 4 + 32 files changed, 1000 insertions(+), 1111 deletions(-) create mode 100644 src/guidellm/data/collators.py delete mode 100644 src/guidellm/data/datasets.py delete mode 100644 src/guidellm/data/formatters/__init__.py delete mode 100644 src/guidellm/data/formatters/environment.py delete mode 100644 src/guidellm/data/formatters/globals.py delete mode 100644 src/guidellm/data/formatters/objects.py delete mode 100644 src/guidellm/data/formatters/templates.py create mode 100644 src/guidellm/data/preprocessors/formatters.py delete mode 100644 src/guidellm/data/preprocessors/objects.py create mode 100644 src/guidellm/data/preprocessors/preprocessor.py create mode 100644 src/guidellm/data/processor.py create mode 100644 src/guidellm/data/utils/__init__.py rename src/guidellm/data/{utils.py => utils/dataset.py} (54%) rename src/guidellm/data/{formatters/filters.py => utils/functions.py} (73%) diff --git a/pyproject.toml b/pyproject.toml index 7237e66d..3461530d 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -66,25 +66,21 @@ dependencies = [ "protobuf", "pydantic>=2.11.7", "pydantic-settings>=2.0.0", + "pydub", "pyyaml>=6.0.0", "rich", "sanic", "transformers", "uvloop>=0.18", "librosa>=0.11.0", - "torch>=2.8.0", + "torch", ] [project.optional-dependencies] -perf = [ - "orjson", - "msgpack", - "msgspec", - "uvloop", -] +perf = ["orjson", "msgpack", "msgspec", "uvloop"] recommended = [ - "tiktoken>=0.11.0", # For OpenAI tokenizer - "blobfile>=3.1.0", # For OpenAI tokenizer + "tiktoken>=0.11.0", # For OpenAI tokenizer + "blobfile>=3.1.0", # For OpenAI tokenizer ] dev = [ # build @@ -127,7 +123,7 @@ dev = [ ] [dependency-groups] -dev = [ "guidellm[dev]" ] +dev = ["guidellm[dev]"] [project.urls] homepage = "https://github.com/vllm-project/guidellm" diff --git a/src/guidellm/__main__.py b/src/guidellm/__main__.py index 82632bc8..43939fa7 100644 --- a/src/guidellm/__main__.py +++ b/src/guidellm/__main__.py @@ -56,11 +56,7 @@ from guidellm.benchmark.scenario import ( GenerativeTextScenario, ) -from guidellm.data import ( - GenerativeDatasetArgs, - GenerativeRequestFormatter, - GenerativeRequestType, -) +from guidellm.data import GenerativeRequestType from guidellm.mock_server import MockServer, MockServerConfig from guidellm.preprocess.dataset import ShortPromptStrategy, process_dataset from guidellm.scheduler import StrategyType @@ -177,12 +173,6 @@ def benchmark(): "For rate-type=synchronous,throughput, this must not be set." ), ) -@click.option( - "--random-seed", - default=GenerativeTextScenario.get_default("random_seed"), - type=int, - help="The random seed to use for benchmarking to ensure reproducibility.", -) # Backend configuration @click.option( "--backend", @@ -216,6 +206,24 @@ def benchmark(): ), ) # Data configuration +@click.option( + "--request-type", + default="chat_completions", + type=click.Choice(list(get_literal_vals(GenerativeRequestType))), + help=( + "The type of request to create for each data sample and send to the backend. " + f"Supported types: {list(get_literal_vals(GenerativeRequestType))}." + ), +) +@click.option( + "--request-formatter-kwargs", + default=None, + callback=cli_tools.parse_json, + help=( + "A JSON string containing any arguments to pass to the request formatter " + "as a dict with **kwargs." + ), +) @click.option( "--processor", default=None, @@ -238,16 +246,7 @@ def benchmark(): @click.option( "--data-args", default=None, - callback=( - lambda _ctx, _param, value: [ - GenerativeDatasetArgs.model_validate_json(val) - if val - else GenerativeDatasetArgs() - for val in value - ] - if value - else None - ), + callback=cli_tools.parse_json, help=( "A JSON string containing any arguments to pass to the dataset creation " "as a dict with **kwargs." @@ -259,43 +258,30 @@ def benchmark(): type=int, help=( "The number of samples to use from the dataset. If -1 (default), will use all " - "samples in the dataset." + "samples in the dataset and dynamically generate samples. " + "If >1, will precompile that number of items from the dataset configs." ), ) @click.option( - "--data-sampler", - default=None, - type=click.Choice(["shuffle"]), - help="The data sampler type to use.", -) -@click.option( - "--data-request-type", - default="text_completions", - type=str, - help=( - "The type of request to create for each data sample. " - f"For example, {list(get_literal_vals(GenerativeRequestType))}." - ), -) -@click.option( - "--data-request-template", + "--data-column-mappings", default=None, + callback=cli_tools.parse_json, help=( - "A Jinja2 template string or path to a Jinja2 template file to use for " - "creating requests from the data samples. If not provided, will use a " - "default template based on the request type." + "A JSON string of column mappings to apply to the dataset to map into request " + "column types." ), ) @click.option( - "--data-request-extras", + "--data-sampler", default=None, - callback=cli_tools.parse_json, - help=("A JSON string of extra data to include with each data request."), + type=click.Choice(["shuffle"]), + help="The data sampler type to use.", ) @click.option( - "--data-request-nonstreaming", - is_flag=True, - help="Set this flag to disable streaming for the data requests.", + "--data-num-workers", + default=1, + type=int, + help="The number of worker processes to use for data loading.", ) @click.option( "--dataloader_kwargs", @@ -306,6 +292,12 @@ def benchmark(): "as a dict with **kwargs." ), ) +@click.option( + "--random-seed", + default=GenerativeTextScenario.get_default("random_seed"), + type=int, + help="The random seed to use for benchmarking to ensure reproducibility.", +) # Output configuration @click.option( "--output-path", @@ -435,22 +427,22 @@ def run( data, profile, rate, - random_seed, # Backend Configuration backend, backend_kwargs, model, # Data configuration + request_type, + request_formatter_kwargs, processor, processor_args, data_args, data_samples, + data_column_mappings, data_sampler, - data_request_type, - data_request_template, - data_request_extras, - data_request_nonstreaming, + data_num_workers, dataloader_kwargs, + random_seed, # Output configuration output_path, output_formats, @@ -478,6 +470,12 @@ def run( Supports multiple backends, data sources, output formats, and constraint types for flexible benchmark configuration. """ + data_request_formatter = ( + request_type + if not request_formatter_kwargs + else {"request_type": request_type, **request_formatter_kwargs} + ) + if HAS_UVLOOP: asyncio.set_event_loop_policy(uvloop.EventLoopPolicy()) asyncio.run( @@ -487,7 +485,6 @@ def run( # Benchmark configuration profile=profile, rate=rate, - random_seed=random_seed, # Backend configuration backend=backend, backend_kwargs=backend_kwargs, @@ -497,21 +494,12 @@ def run( processor_args=processor_args, data_args=data_args, data_samples=data_samples, - data_column_mapper=None, # use default - data_request_formatter=GenerativeRequestFormatter( - request_type=data_request_type, - request_template=data_request_template, - request_extras=data_request_extras, - request_defaults=( - {} # disable defaults if non-streaming - if data_request_nonstreaming - else None - ), - ), - data_preprocessors=None, # no preprocessors through CLI for now - dataloader_sampler=data_sampler, - dataloader_collate_fn=None, # use default + data_column_mapper=data_column_mappings, + data_request_formatter=data_request_formatter, + data_sampler=data_sampler, + data_num_workers=data_num_workers, dataloader_kwargs=dataloader_kwargs, + random_seed=random_seed, # Output configuration output_path=output_path, output_formats=[ @@ -534,7 +522,7 @@ def run( add_aggregators={"extras": InjectExtrasAggregator(extras=output_extras)}, warmup=warmup, cooldown=cooldown, - request_samples=request_samples, + sample_requests=request_samples, # Constraints configuration max_seconds=max_seconds, max_requests=max_requests, diff --git a/src/guidellm/benchmark/aggregator.py b/src/guidellm/benchmark/aggregator.py index 3040ad36..562fc36c 100644 --- a/src/guidellm/benchmark/aggregator.py +++ b/src/guidellm/benchmark/aggregator.py @@ -678,20 +678,20 @@ class GenerativeRequestsAggregator( @classmethod def validated_kwargs( cls, - request_samples: int | None = 20, + sample_requests: int | None = 20, warmup: int | float | None = None, cooldown: int | float | None = None, **_kwargs, ) -> dict[str, Any]: return { - "request_samples": request_samples, + "sample_requests": sample_requests, "warmup": warmup, "cooldown": cooldown, } type_: Literal["generative_requests"] = Field(default="generative_requests") - request_samples: int | None = Field(default=20, description="") + sample_requests: int | None = Field(default=20, description="") warmup: int | float | None = Field( default=None, description="Number of warmup requests to ignore at benchmark start", @@ -828,9 +828,9 @@ def compile( list[GenerativeRequestStats], list[GenerativeRequestStats], ]( - successful=self._sample_request_stats(successful, self.request_samples), - incomplete=self._sample_request_stats(incomplete, self.request_samples), - errored=self._sample_request_stats(errored, self.request_samples), + successful=self._sample_request_stats(successful, self.sample_requests), + incomplete=self._sample_request_stats(incomplete, self.sample_requests), + errored=self._sample_request_stats(errored, self.sample_requests), ), "metrics": GenerativeMetrics( requests_per_second=self._calculate_requests_per_second( diff --git a/src/guidellm/benchmark/entrypoints.py b/src/guidellm/benchmark/entrypoints.py index 23bc985a..e400907a 100644 --- a/src/guidellm/benchmark/entrypoints.py +++ b/src/guidellm/benchmark/entrypoints.py @@ -1,11 +1,10 @@ from __future__ import annotations from pathlib import Path -from typing import Any, Literal +from typing import Any, Callable, Literal from torch.utils.data import Sampler from transformers import ( # type: ignore[import] - AutoTokenizer, PreTrainedTokenizerBase, ) @@ -35,13 +34,13 @@ BenchmarkerProgressGroup, ) from guidellm.data import ( + DataLoader, DatasetPreprocessor, - GenerativeColumnMapper, - GenerativeDataLoader, GenerativeRequestCollator, - GenerativeRequestFormatter, + PreprocessorRegistry, + ProcessorFactory, ) -from guidellm.data.objects import GenerativeDatasetArgs +from guidellm.data.preprocessors import GenerativeColumnMapper from guidellm.scheduler import ( ConstraintInitializer, NonDistributedEnvironment, @@ -59,14 +58,13 @@ # @validate_call(config={"arbitrary_types_allowed": True}) -async def benchmark_generative_text( # noqa: C901, PLR0915 +async def benchmark_generative_text( # noqa: C901, PLR0915, PLR0912 # Required target: str, data: list[Any], # Benchmark configuration profile: StrategyType | ProfileType | Profile = "sweep", rate: float | list[float] | None = None, - random_seed: int = 42, # Backend configuration backend: BackendType | Backend = "openai_http", backend_kwargs: dict[str, Any] | None = None, @@ -74,14 +72,19 @@ async def benchmark_generative_text( # noqa: C901, PLR0915 # Data configuration processor: str | Path | PreTrainedTokenizerBase | None = None, processor_args: dict[str, Any] | None = None, - data_args: list[GenerativeDatasetArgs] | None = None, + data_args: list[dict[str, Any]] | None = None, data_samples: int = -1, - data_column_mapper: GenerativeColumnMapper | None = None, - data_preprocessors: list[DatasetPreprocessor] | None = None, - data_request_formatter: GenerativeRequestFormatter | None = None, - dataloader_sampler: Sampler[int] | Literal["shuffle"] | None = None, - dataloader_collate_fn: GenerativeRequestCollator | None = None, + data_column_mapper: ( + DatasetPreprocessor | dict[str, str] | Literal["generative_column_mapper"] + ) = "generative_column_mapper", + data_request_formatter: ( + DatasetPreprocessor | dict[str, str] | str + ) = "chat_completions", + data_collator: Callable | Literal["generative"] | None = "generative", + data_sampler: Sampler[int] | Literal["shuffle"] | None = None, + data_num_workers: int | None = 1, dataloader_kwargs: dict[str, Any] | None = None, + random_seed: int = 42, # Output configuration output_path: str | Path | None = _CURRENT_WORKING_DIR, output_formats: ( @@ -99,7 +102,7 @@ async def benchmark_generative_text( # noqa: C901, PLR0915 ) = None, warmup: float | None = None, cooldown: float | None = None, - request_samples: int | None = 20, + sample_requests: int | None = 10, # Constraints configuration max_seconds: int | float | None = None, max_requests: int | None = None, @@ -123,8 +126,17 @@ async def benchmark_generative_text( # noqa: C901, PLR0915 console_step.update(f"{backend.__class__.__name__} backend initialized") await backend.process_startup() await backend.validate() + if model is None: + console_step.update( + title="Resolving default model from backend.default_model", + status_level="info", + ) + model = await backend.default_model() + await backend.process_shutdown() console_step.finish( - title=f"{backend.__class__.__name__} backend initialized", + title=( + f"{backend.__class__.__name__} backend validated with model {model}" + ), details=backend.info, status_level="success", ) @@ -136,54 +148,56 @@ async def benchmark_generative_text( # noqa: C901, PLR0915 details=f"Using processor '{processor}'", status_level="success", ) - elif model is not None: - console_step.finish( - title="Processor resolved", - details=f"Using model '{model}' as processor", - status_level="success", - ) - processor = model else: - console_step.update( - title="Resolving processor from backend.default_model", - status_level="info", - ) - processor = await backend.default_model() + processor = model console_step.finish( title="Processor resolved", - details=( - f"Using model '{processor}' from backend " - f"{backend.__class__.__name__} as processor" - ), + details=f"Using model '{processor}' as processor", status_level="success", ) - await backend.process_shutdown() with console.print_update_step( title=f"Initializing request loader from {data}" ) as console_step: + if not isinstance(data_column_mapper, DatasetPreprocessor): + column_mappings = ( + data_column_mapper if isinstance(data_column_mapper, dict) else None + ) + data_column_mapper = GenerativeColumnMapper( + column_mappings=column_mappings, + ) + if not isinstance(data_request_formatter, DatasetPreprocessor): + request_type = ( + data_request_formatter + if isinstance(data_request_formatter, str) + else data_request_formatter.pop("request_type", "chat_completions") + ) + data_request_formatter = PreprocessorRegistry.get_registered_object( + request_type + )( + model=model, + **( + data_request_formatter + if isinstance(data_request_formatter, dict) + else {} + ), + ) - def processor_factory() -> PreTrainedTokenizerBase: - nonlocal processor - if isinstance(processor, PreTrainedTokenizerBase): - return processor - else: - processor = AutoTokenizer.from_pretrained( - processor, - **(processor_args or {}), - ) - return processor - - request_loader = GenerativeDataLoader( + request_loader = DataLoader( data=data, data_args=data_args, data_samples=data_samples, - processor_factory=processor_factory, - column_mapper=data_column_mapper or GenerativeColumnMapper(), - preprocessors=data_preprocessors or [], - request_formatter=data_request_formatter or GenerativeRequestFormatter(), - sampler=dataloader_sampler, - collate_fn=dataloader_collate_fn, + processor_factory=ProcessorFactory( + processor=processor, processor_args=processor_args + ), + preprocessors=[data_column_mapper, data_request_formatter], + collator=( + data_collator + if callable(data_collator) + else GenerativeRequestCollator() + ), + sampler=data_sampler, + num_workers=data_num_workers, random_seed=random_seed, **(dataloader_kwargs or {}), ) @@ -234,7 +248,7 @@ def processor_factory() -> PreTrainedTokenizerBase: "scheduler_stats": SchedulerStatsAggregator(), "requests_progress": GenerativeStatsProgressAggregator(), "requests": GenerativeRequestsAggregator( - request_samples=request_samples, + request_samples=sample_requests, warmup=warmup, cooldown=cooldown, ), diff --git a/src/guidellm/data/__init__.py b/src/guidellm/data/__init__.py index 282c5b59..d25c719a 100644 --- a/src/guidellm/data/__init__.py +++ b/src/guidellm/data/__init__.py @@ -1,31 +1,29 @@ -from .datasets import GenerativeRequestsDataset +from .collators import GenerativeRequestCollator from .deserializers import ( DataNotSupportedError, DatasetDeserializer, DatasetDeserializerFactory, ) -from .formatters import ( - GenerativeRequestFormatter, - JinjaEnvironmentMixin, - JinjaFiltersRegistry, - JinjaGlobalsRegistry, - JinjaTemplatesRegistry, -) -from .loaders import GenerativeDataLoader, GenerativeRequestCollator +from .loaders import DataLoader from .objects import ( GenerationRequest, GenerationRequestArguments, GenerationRequestTimings, - GenerativeDatasetArgs, GenerativeDatasetColumnType, GenerativeRequestType, ) from .preprocessors import ( + DataDependentPreprocessor, DatasetPreprocessor, - GenerativeColumnMapper, + PreprocessorRegistry, ) +from .processor import ProcessorFactory __all__ = [ + "ColumnMapper", + "ColumnMapperRegistry", + "DataDependentPreprocessor", + "DataLoader", "DataNotSupportedError", "DatasetDeserializer", "DatasetDeserializerFactory", @@ -33,16 +31,12 @@ "GenerationRequest", "GenerationRequestArguments", "GenerationRequestTimings", - "GenerativeColumnMapper", - "GenerativeDataLoader", "GenerativeDatasetArgs", "GenerativeDatasetColumnType", "GenerativeRequestCollator", - "GenerativeRequestFormatter", "GenerativeRequestType", - "GenerativeRequestsDataset", - "JinjaEnvironmentMixin", - "JinjaFiltersRegistry", - "JinjaGlobalsRegistry", - "JinjaTemplatesRegistry", + "PreprocessorRegistry", + "ProcessorFactory", + "RequestFormatter", + "RequestFormatterRegistry", ] diff --git a/src/guidellm/data/collators.py b/src/guidellm/data/collators.py new file mode 100644 index 00000000..4d12f0c0 --- /dev/null +++ b/src/guidellm/data/collators.py @@ -0,0 +1,16 @@ +from __future__ import annotations + +from guidellm.data.objects import GenerationRequest + +__all__ = ["GenerativeRequestCollator"] + + +class GenerativeRequestCollator: + def __call__(self, batch: list) -> GenerationRequest: + if len(batch) != 1: + raise NotImplementedError( + f"Batch size greater than 1 is not currently supported. " + f"Got batch size: {len(batch)}" + ) + + return batch[0] diff --git a/src/guidellm/data/datasets.py b/src/guidellm/data/datasets.py deleted file mode 100644 index 8c24683c..00000000 --- a/src/guidellm/data/datasets.py +++ /dev/null @@ -1,88 +0,0 @@ -from __future__ import annotations - -from collections.abc import Callable -from typing import Any - -from datasets import Dataset, IterableDataset -from transformers import PreTrainedTokenizerBase - -from guidellm.data.deserializers import DatasetDeserializerFactory -from guidellm.data.formatters import GenerativeRequestFormatter -from guidellm.data.objects import GenerativeDatasetArgs -from guidellm.data.preprocessors import ( - DatasetPreprocessor, - GenerativeColumnMapper, -) -from guidellm.data.utils import datasets_item_iterator, resolve_dataset_split - -__all__ = ["GenerativeRequestsDataset"] - - -class GenerativeRequestsDataset: - @classmethod - def build( - cls, - data: list[Any], - data_args: list[GenerativeDatasetArgs] | None, - data_samples: int, - processor_factory: Callable[[], PreTrainedTokenizerBase], - column_mapper: GenerativeColumnMapper, - preprocessors: list[DatasetPreprocessor], - request_formatter: GenerativeRequestFormatter, - random_seed: int = 42, - ) -> Dataset | IterableDataset: - if not data or not isinstance(data, list): - raise ValueError(f"Data must be a non-empty list, got {data}.") - - if data_args is None: - data_args = [GenerativeDatasetArgs() for _ in data] - - if len(data) != len(data_args): - raise ValueError( - f"Length of data ({len(data)}) must match length of data_args " - f"({len(data_args)})." - ) - - datasets = [] - for datum, args in zip(data, data_args): - datasets.append( - resolve_dataset_split( - dataset=DatasetDeserializerFactory.deserialize( - data=datum, - data_kwargs=args.to_kwargs(), - processor_factory=processor_factory, - random_seed=random_seed, - type_=args.type_, - ), - split=args.split, - ) - ) - - column_mapper.init_data(datasets=datasets, data_args=data_args) - request_formatter.init_data(datasets=datasets, data_args=data_args) - for preprocessor in preprocessors: - preprocessor.init_data(datasets=datasets, data_args=data_args) - - if data_samples > 0: - dataset = Dataset.from_list( - list( - datasets_item_iterator( - datasets=datasets, - data_samples=data_samples, - ) - ) - ) - else: - dataset = IterableDataset.from_generator( - datasets_item_iterator, - gen_kwargs={ - "datasets": datasets, - "data_samples": data_samples, - }, - ) - - dataset = dataset.map(column_mapper) - for preprocessor in preprocessors: - dataset = dataset.map(preprocessor) - - return dataset.map(request_formatter) diff --git a/src/guidellm/data/deserializers/deserializer.py b/src/guidellm/data/deserializers/deserializer.py index ed9050a1..c7e2f1da 100644 --- a/src/guidellm/data/deserializers/deserializer.py +++ b/src/guidellm/data/deserializers/deserializer.py @@ -4,9 +4,10 @@ from collections.abc import Callable from typing import Any, Protocol, Union, runtime_checkable -from datasets import Dataset, DatasetDict, IterableDataset, IterableDatasetDict +from datasets import Dataset, IterableDataset from transformers import PreTrainedTokenizerBase +from guidellm.data.utils import resolve_dataset_split from guidellm.utils import RegistryMixin __all__ = [ @@ -25,9 +26,9 @@ class DatasetDeserializer(Protocol): def __call__( self, data: Any, - data_kwargs: dict[str, Any], processor_factory: Callable[[], PreTrainedTokenizerBase], random_seed: int, + **data_kwargs: dict[str, Any], ) -> dict[str, list]: ... @@ -38,44 +39,43 @@ class DatasetDeserializerFactory( def deserialize( cls, data: Any, - data_kwargs: dict[str, Any], processor_factory: Callable[[], PreTrainedTokenizerBase], random_seed: int = 42, type_: str | None = None, - ) -> Dataset | IterableDataset | DatasetDict | IterableDatasetDict: - if type_ is not None: - deserializer = cls.get_registered_object(type_) + resolve_split: bool = True, + **data_kwargs: dict[str, Any], + ) -> Dataset | IterableDataset: + dataset = None - if deserializer is None: - raise DataNotSupportedError( - f"Deserializer type '{type_}' is not registered. " - f"Available types: {cls.registry}" + if type_ is None: + for deserializer in cls.registered_objects(): + deserializer_fn: DatasetDeserializer = ( + deserializer() if isinstance(deserializer, type) else deserializer ) - elif isinstance(deserializer, type): - deserializer_fn = deserializer() - else: - deserializer_fn = deserializer - return deserializer_fn( + with contextlib.suppress(DataNotSupportedError): + dataset = deserializer_fn( + data=data, + processor_factory=processor_factory, + random_seed=random_seed, + **data_kwargs, + ) + elif deserializer := cls.get_registered_object(type_) is not None: + deserializer_fn: DatasetDeserializer = ( + deserializer() if isinstance(deserializer, type) else deserializer + ) + + dataset = deserializer_fn( data=data, - data_kwargs=data_kwargs, processor_factory=processor_factory, random_seed=random_seed, + **data_kwargs, ) - for deserializer in cls.registered_objects(): - deserializer_fn: DatasetDeserializer = ( - deserializer() if isinstance(deserializer, type) else deserializer + if dataset is None: + raise DataNotSupportedError( + f"No suitable deserializer found for data {data} " + f"with kwargs {data_kwargs} and type_ {type_}." ) - with contextlib.suppress(DataNotSupportedError): - return deserializer_fn( - data=data, - data_kwargs=data_kwargs, - processor_factory=processor_factory, - random_seed=random_seed, - ) - - raise DataNotSupportedError( - f"No suitable deserializer found for data {data} with kwargs {data_kwargs}." - ) + return resolve_dataset_split(dataset) if resolve_split else dataset diff --git a/src/guidellm/data/deserializers/file.py b/src/guidellm/data/deserializers/file.py index 53688cf0..54b18edb 100644 --- a/src/guidellm/data/deserializers/file.py +++ b/src/guidellm/data/deserializers/file.py @@ -30,9 +30,9 @@ class TextFileDatasetDeserializer(DatasetDeserializer): def __call__( self, data: Any, - data_kwargs: dict[str, Any], processor_factory: Callable[[], PreTrainedTokenizerBase], random_seed: int, + **data_kwargs: dict[str, Any], ) -> dict[str, list]: _ = (processor_factory, random_seed) # Ignore unused args format errors @@ -58,9 +58,9 @@ class CSVFileDatasetDeserializer(DatasetDeserializer): def __call__( self, data: Any, - data_kwargs: dict[str, Any], processor_factory: Callable[[], PreTrainedTokenizerBase], random_seed: int, + **data_kwargs: dict[str, Any], ) -> dict[str, list]: _ = (processor_factory, random_seed) if ( @@ -82,9 +82,9 @@ class JSONFileDatasetDeserializer(DatasetDeserializer): def __call__( self, data: Any, - data_kwargs: dict[str, Any], processor_factory: Callable[[], PreTrainedTokenizerBase], random_seed: int, + **data_kwargs: dict[str, Any], ) -> dict[str, list]: _ = (processor_factory, random_seed) if ( @@ -106,9 +106,9 @@ class ParquetFileDatasetDeserializer(DatasetDeserializer): def __call__( self, data: Any, - data_kwargs: dict[str, Any], processor_factory: Callable[[], PreTrainedTokenizerBase], random_seed: int, + **data_kwargs: dict[str, Any], ) -> dict[str, list]: _ = (processor_factory, random_seed) if ( @@ -130,9 +130,9 @@ class ArrowFileDatasetDeserializer(DatasetDeserializer): def __call__( self, data: Any, - data_kwargs: dict[str, Any], processor_factory: Callable[[], PreTrainedTokenizerBase], random_seed: int, + **data_kwargs: dict[str, Any], ) -> dict[str, list]: _ = (processor_factory, random_seed) if ( @@ -154,9 +154,9 @@ class HDF5FileDatasetDeserializer(DatasetDeserializer): def __call__( self, data: Any, - data_kwargs: dict[str, Any], processor_factory: Callable[[], PreTrainedTokenizerBase], random_seed: int, + **data_kwargs: dict[str, Any], ) -> dict[str, list]: _ = (processor_factory, random_seed) if ( @@ -178,9 +178,9 @@ class DBFileDatasetDeserializer(DatasetDeserializer): def __call__( self, data: Any, - data_kwargs: dict[str, Any], processor_factory: Callable[[], PreTrainedTokenizerBase], random_seed: int, + **data_kwargs: dict[str, Any], ) -> dict[str, list]: _ = (processor_factory, random_seed) if ( @@ -202,9 +202,9 @@ class TarFileDatasetDeserializer(DatasetDeserializer): def __call__( self, data: Any, - data_kwargs: dict[str, Any], processor_factory: Callable[[], PreTrainedTokenizerBase], random_seed: int, + **data_kwargs: dict[str, Any], ) -> dict[str, list]: _ = (processor_factory, random_seed) if ( diff --git a/src/guidellm/data/deserializers/huggingface.py b/src/guidellm/data/deserializers/huggingface.py index 275f7180..3e0cf090 100644 --- a/src/guidellm/data/deserializers/huggingface.py +++ b/src/guidellm/data/deserializers/huggingface.py @@ -27,9 +27,9 @@ class HuggingFaceDatasetDeserializer(DatasetDeserializer): def __call__( self, data: Any, - data_kwargs: dict[str, Any], processor_factory: Callable[[], PreTrainedTokenizerBase], random_seed: int, + **data_kwargs: dict[str, Any], ) -> dict[str, list]: _ = (processor_factory, random_seed) diff --git a/src/guidellm/data/deserializers/memory.py b/src/guidellm/data/deserializers/memory.py index b04ea6bc..ddca64a9 100644 --- a/src/guidellm/data/deserializers/memory.py +++ b/src/guidellm/data/deserializers/memory.py @@ -29,9 +29,9 @@ class InMemoryDictDatasetDeserializer(DatasetDeserializer): def __call__( self, data: Any, - data_kwargs: dict[str, Any], processor_factory: Callable[[], PreTrainedTokenizerBase], random_seed: int, + **data_kwargs: dict[str, Any], ) -> dict[str, list]: _ = (processor_factory, random_seed) # Ignore unused args format errors @@ -63,9 +63,9 @@ class InMemoryDictListDatasetDeserializer(DatasetDeserializer): def __call__( self, data: Any, - data_kwargs: dict[str, Any], processor_factory: Callable[[], PreTrainedTokenizerBase], random_seed: int, + **data_kwargs: dict[str, Any], ) -> dict[str, list]: _ = (processor_factory, random_seed) # Ignore unused args format errors @@ -104,9 +104,9 @@ class InMemoryItemListDatasetDeserializer(DatasetDeserializer): def __call__( self, data: Any, - data_kwargs: dict[str, Any], processor_factory: Callable[[], PreTrainedTokenizerBase], random_seed: int, + **data_kwargs: dict[str, Any], ) -> dict[str, list]: _ = (processor_factory, random_seed) # Ignore unused args format errors @@ -131,9 +131,9 @@ class InMemoryJsonStrDatasetDeserializer(DatasetDeserializer): def __call__( self, data: Any, - data_kwargs: dict[str, Any], processor_factory: Callable[[], PreTrainedTokenizerBase], random_seed: int, + **data_kwargs: dict[str, Any], ) -> dict[str, list]: if ( isinstance(data, str) @@ -167,9 +167,9 @@ class InMemoryCsvDatasetDeserializer(DatasetDeserializer): def __call__( self, data: Any, - data_kwargs: dict[str, Any], processor_factory: Callable[[], PreTrainedTokenizerBase], random_seed: int, + **data_kwargs: dict[str, Any], ) -> dict[str, list]: if ( isinstance(data, str) @@ -182,7 +182,7 @@ def __call__( rows = list(reader) return InMemoryDictListDatasetDeserializer()( - rows, data_kwargs, processor_factory, random_seed + rows, processor_factory, random_seed, **data_kwargs ) raise DataNotSupportedError( diff --git a/src/guidellm/data/deserializers/synthetic.py b/src/guidellm/data/deserializers/synthetic.py index a071eeea..c2078f1a 100644 --- a/src/guidellm/data/deserializers/synthetic.py +++ b/src/guidellm/data/deserializers/synthetic.py @@ -4,7 +4,7 @@ from collections.abc import Iterator from pathlib import Path from random import Random -from typing import Any, Callable, Self +from typing import Any, Callable import yaml from datasets import Features, IterableDataset, Value @@ -98,7 +98,7 @@ class SyntheticTextDatasetConfig(StandardBaseModel): ) @model_validator(mode="after") - def check_prefix_options(self) -> Self: + def check_prefix_options(self) -> SyntheticTextDatasetConfig: prefix_count = self.__pydantic_extra__.get("prefix_count", None) # type: ignore[attr-defined] prefix_tokens = self.__pydantic_extra__.get("prefix_count", None) # type: ignore[attr-defined] if prefix_count is not None or prefix_tokens is not None: @@ -226,17 +226,17 @@ class SyntheticTextDatasetDeserializer(DatasetDeserializer): def __call__( self, data: Any, - data_kwargs: dict[str, Any], processor_factory: Callable[[], PreTrainedTokenizerBase], random_seed: int, + **data_kwargs: dict[str, Any], ) -> IterableDataset: # Config file pathways, deserialize and call self again if (config := self._load_config_file(data)) is not None: - return self(config, data_kwargs, processor_factory, random_seed) + return self(config, processor_factory, random_seed, **data_kwargs) # Config str pathways, deserialize and call self again if (config := self._load_config_str(data)) is not None: - return self(config, data_kwargs, processor_factory, random_seed) + return self(config, processor_factory, random_seed, **data_kwargs) if not isinstance(data, SyntheticTextDatasetConfig): raise DataNotSupportedError( @@ -246,9 +246,12 @@ def __call__( ) return IterableDataset.from_generator( - lambda: SyntheticTextGenerator( - config=data, processor=processor_factory(), random_seed=random_seed - ), + SyntheticTextGenerator, + gen_kwargs={ + "config": data, + "processor": processor_factory(), + "random_seed": random_seed, + }, features=Features( { "prefix": Value("string"), diff --git a/src/guidellm/data/formatters/__init__.py b/src/guidellm/data/formatters/__init__.py deleted file mode 100644 index 0a5ccbc9..00000000 --- a/src/guidellm/data/formatters/__init__.py +++ /dev/null @@ -1,47 +0,0 @@ -from .environment import JinjaEnvironmentMixin -from .filters import ( - JinjaFiltersRegistry, - download_audio, - download_image, - download_video, - encode_audio, - encode_image, - encode_image_base64, - encode_video, - encode_video_base64, - get_file_format, - is_url, - resize_image, -) -from .globals import JinjaGlobalsRegistry -from .objects import GenerativeRequestFormatter -from .templates import ( - DEFAULT_AUDIO_TRANSCRIPTIONS_TEMPLATE, - DEFAULT_AUDIO_TRANSLATIONS_TEMPLATE, - DEFAULT_CHAT_COMPLETIONS_TEMPLATE, - DEFAULT_TEXT_COMPLETIONS_TEMPLATE, - JinjaTemplatesRegistry, -) - -__all__ = [ - "DEFAULT_AUDIO_TRANSCRIPTIONS_TEMPLATE", - "DEFAULT_AUDIO_TRANSLATIONS_TEMPLATE", - "DEFAULT_CHAT_COMPLETIONS_TEMPLATE", - "DEFAULT_TEXT_COMPLETIONS_TEMPLATE", - "GenerativeRequestFormatter", - "JinjaEnvironmentMixin", - "JinjaFiltersRegistry", - "JinjaGlobalsRegistry", - "JinjaTemplatesRegistry", - "download_audio", - "download_image", - "download_video", - "encode_audio", - "encode_image", - "encode_image_base64", - "encode_video", - "encode_video_base64", - "get_file_format", - "is_url", - "resize_image", -] diff --git a/src/guidellm/data/formatters/environment.py b/src/guidellm/data/formatters/environment.py deleted file mode 100644 index bd37e26b..00000000 --- a/src/guidellm/data/formatters/environment.py +++ /dev/null @@ -1,63 +0,0 @@ -from __future__ import annotations - -from typing import Any, ClassVar - -from jinja2 import Template -from jinja2.nativetypes import NativeEnvironment, NativeTemplate - -from guidellm.data.formatters.filters import JinjaFiltersRegistry -from guidellm.data.formatters.globals import JinjaGlobalsRegistry -from guidellm.data.formatters.templates import JinjaTemplatesRegistry - -__all__ = ["JinjaEnvironmentMixin"] - - -class JinjaEnvironmentMixin: - jinja_environment: ClassVar[NativeEnvironment | None] = None - - @classmethod - def create_environment(cls, **env_kwargs: Any) -> NativeEnvironment: - if "autoescape" not in env_kwargs: - env_kwargs["autoescape"] = False - - extensions = env_kwargs.pop("extensions", []) - extensions = set(extensions) | {"jinja2.ext.do"} - - env = NativeEnvironment(extensions=list(extensions), **env_kwargs) # noqa: S701 - - # Attach registered filters - filters_registry = JinjaFiltersRegistry.registry # type: ignore[misc] - if filters_registry: - for name, func in filters_registry.items(): - env.filters[name] = func - - # Attach registered globals - globals_registry = JinjaGlobalsRegistry.registry # type: ignore[misc] - if globals_registry: - for name, value in globals_registry.items(): - env.globals[name] = value - - cls.jinja_environment = env - return env - - @classmethod - def get_environment(cls) -> NativeEnvironment: - if cls.jinja_environment is None: - raise ValueError( - "Jinja environment is not initialized. Call create_environment first." - ) - return cls.jinja_environment - - @classmethod - def template_from_source(cls, source: str | Template) -> NativeTemplate: - if isinstance(source, Template): - return source - env = cls.get_environment() - return env.from_string(source) - - @classmethod - def template_from_registry(cls, name: str) -> NativeTemplate: - template = JinjaTemplatesRegistry.get_registered_object(name) - if template is None: - raise ValueError(f"Template '{name}' not found in registry.") - return cls.template_from_source(template) diff --git a/src/guidellm/data/formatters/globals.py b/src/guidellm/data/formatters/globals.py deleted file mode 100644 index 6c066191..00000000 --- a/src/guidellm/data/formatters/globals.py +++ /dev/null @@ -1,9 +0,0 @@ -from typing import Any - -from guidellm.utils import RegistryMixin - -__all__ = ["JinjaGlobalsRegistry"] - - -class JinjaGlobalsRegistry(RegistryMixin[Any]): - pass diff --git a/src/guidellm/data/formatters/objects.py b/src/guidellm/data/formatters/objects.py deleted file mode 100644 index 3e032089..00000000 --- a/src/guidellm/data/formatters/objects.py +++ /dev/null @@ -1,92 +0,0 @@ -from __future__ import annotations - -from typing import Any, Literal - -from datasets import Dataset, IterableDataset -from jinja2 import Template - -from guidellm.data.formatters import JinjaEnvironmentMixin -from guidellm.data.objects import ( - GenerationRequest, - GenerationRequestArguments, - GenerativeDatasetArgs, - GenerativeRequestType, -) -from guidellm.data.preprocessors.objects import DatasetPreprocessor - -__all__ = ["GenerativeRequestFormatter"] - - -class GenerativeRequestFormatter(DatasetPreprocessor, JinjaEnvironmentMixin): - def __init__( - self, - request_type: GenerativeRequestType | str = "text_completions", - request_template: str | Template | None = None, - request_extras: dict[str, Any] | GenerationRequestArguments | None = None, - request_defaults: dict[str, Any] | GenerationRequestArguments | None = None, - environment_extras: dict[str, Any] | None = None, - ): - self.datasets: list[Dataset | IterableDataset] | None = None - self.data_args: list[GenerativeDatasetArgs] | None = None - - self.request_type = request_type - self.request_template = request_template - self.request_extras = request_extras or {} - self.request_defaults = request_defaults or { - "stream": True, - "json_body": { - "stream": True, - "stream_options": { - "include_usage": True, - }, - }, - } - self.environment_extras = environment_extras or {} - self.jinja_template: Template | None = None - - def init_data( - self, - datasets: list[Dataset | IterableDataset], - data_args: list[GenerativeDatasetArgs], - ): - self.datasets = datasets - self.data_args = data_args - - self.create_environment(**self.environment_extras) - self.jinja_template = ( - self.template_from_source(self.request_template) - if self.request_template - else self.template_from_registry(self.request_type) - ) - - def __call__( - self, item: dict[str, Any] - ) -> dict[Literal["request"], GenerationRequest]: - if self.jinja_template is None: - raise ValueError("GenerativeRequestCreator not initialized with data.") - - stats = {} - if "prompt_tokens_count" in item: - count = item["prompt_tokens_count"][0] - stats["prompt_tokens"] = count - item["prompt_tokens_count"] = count - if "output_tokens_count" in item: - count = item["output_tokens_count"][0] - stats["output_tokens"] = count - item["output_tokens_count"] = count - - return { - "request": { - "request_type": self.request_type, - "arguments": GenerationRequestArguments.model_combine_dict( - self.request_defaults, - self.request_extras, - self.jinja_template.render( - **item, - request_defaults=self.request_defaults, - request_extras=self.request_extras, - ), - ), - "stats": stats, - } - } diff --git a/src/guidellm/data/formatters/templates.py b/src/guidellm/data/formatters/templates.py deleted file mode 100644 index 52db73b1..00000000 --- a/src/guidellm/data/formatters/templates.py +++ /dev/null @@ -1,182 +0,0 @@ -import textwrap -from typing import Union - -from jinja2 import Template - -from guidellm.utils import RegistryMixin - -__all__ = [ - "DEFAULT_AUDIO_TRANSCRIPTIONS_TEMPLATE", - "DEFAULT_AUDIO_TRANSLATIONS_TEMPLATE", - "DEFAULT_CHAT_COMPLETIONS_TEMPLATE", - "DEFAULT_TEXT_COMPLETIONS_TEMPLATE", - "JinjaTemplatesRegistry", -] - - -class JinjaTemplatesRegistry(RegistryMixin[Union[Template, str]]): - pass - - -DEFAULT_TEXT_COMPLETIONS_TEMPLATE = JinjaTemplatesRegistry.register("text_completions")( - textwrap.dedent(""" - {% set obj = { - "json_body": { - "prompt": prefix_column[0]|default("") + text_column[0] - } - } %} - - {% if output_tokens_count is defined and output_tokens_count is not none %} - {% do obj["json_body"].update({ - "max_tokens": output_tokens_count, - "max_completion_tokens": output_tokens_count, - "stop": None, - "ignore_eos": True - }) %} - {% elif max_tokens is defined and max_tokens is not none %} - {% do obj["json_body"].update({"max_tokens": max_tokens}) %} - {% elif max_completion_tokens is defined and max_completion_tokens is not none %} - {% do obj["json_body"].update({"max_completion_tokens": max_completion_tokens}) %} - {% endif %} - - {{ obj }} - """).strip() # noqa: E501 -) - -DEFAULT_CHAT_COMPLETIONS_TEMPLATE = JinjaTemplatesRegistry.register("chat_completions")( - textwrap.dedent(""" - {% set obj = { - "json_body": { - "messages": [ - { - "role": "system", - "content": prefix_column[0]|default("") - }, - { - "role": "user", - "content": [] - } - ] - } - } %} - - {%- for item in text_column or [] %} - {% do obj["json_body"].messages[1].content.append({"type": "text", "text": item}) %} - {%- endfor %} - - {%- for item in image_column or [] %} - {% do obj["json_body"].messages[1].content.append({ - "type": "image_url", - "image_url": encode_image( - item, - max_size=max_size|default(None), - max_width=max_width|default(None), - max_height=max_height|default(None), - encode_type=image_encode_type|default(encode_type|default(None)) - ) - }) %} - {%- endfor %} - - {%- for item in video_column or [] %} - {% do obj["json_body"].messages[1].content.append({ - "type": "video_url", - "video_url": encode_video( - item, - encode_type=video_encode_type|default(encode_type|default(None)) - ) - }) %} - {%- endfor %} - - {%- for item in audio_column or [] %} - {%- set audio_type, audio_val = encode_audio( - item, - sample_rate=sample_rate|default(None), - max_duration=max_duration|default(None), - encode_type=audio_encode_type|default(encode_type|default(None)) - ) -%} - {% do content_list.append({"type": audio_type, audio_type: audio_val}) %} - {%- endfor %} - - {% if output_tokens_count is defined and output_tokens_count is not none %} - {% do obj["json_body"].update({ - "max_completion_tokens": output_tokens_count, - "stop": None, - "ignore_eos": True - }) %} - {% elif max_tokens is defined and max_tokens is not none %} - {% do obj["json_body"].update({"max_completion_tokens": max_tokens}) %} - {% elif max_completion_tokens is defined and max_completion_tokens is not none %} - {% do obj["json_body"].update({"max_completion_tokens": max_completion_tokens}) %} - {% endif %} - - {{ obj }} - """).strip() # noqa: E501 -) - -DEFAULT_AUDIO_TRANSCRIPTIONS_TEMPLATE = JinjaTemplatesRegistry.register( - "audio_transcriptions" -)( - textwrap.dedent(""" - { - {%- if output_tokens_count_column is defined and output_tokens_count_column is not none -%} - "max_tokens": {{ output_tokens_count_column }}, - "max_completion_tokens": {{ output_tokens_count_column }}, - "stop": None, - "ignore_eos": True, - {%- else -%} - {%- if max_tokens is defined and max_tokens is not none -%} - "max_tokens": {{ max_tokens }}, - {%- endif -%} - {%- if max_completion_tokens is defined and max_completion_tokens is not none -%} - "max_completion_tokens": {{ max_completion_tokens }}, - {%- endif -%} - {%- endif -%} - "files": { - "file": {{ encode_audio_file( - audio_column[0], - encode_type=audio_encode_type|default(encode_type|default(None)) - ) }} - } - {%- if text_column and text_column|length > 0 -%} - , - "json": { - "prompt": {{ text_column[0] }} - } - {%- endif -%} - } - """).strip() # noqa: E501 -) - -DEFAULT_AUDIO_TRANSLATIONS_TEMPLATE = JinjaTemplatesRegistry.register( - "audio_translations" -)( - textwrap.dedent(""" - { - {%- if output_tokens_count_column is defined and output_tokens_count_column is not none -%} - "max_tokens": {{ output_tokens_count_column }}, - "max_completion_tokens": {{ output_tokens_count_column }}, - "stop": None, - "ignore_eos": True, - {%- else -%} - {%- if max_tokens is defined and max_tokens is not none -%} - "max_tokens": {{ max_tokens }}, - {%- endif -%} - {%- if max_completion_tokens is defined and max_completion_tokens is not none -%} - "max_completion_tokens": {{ max_completion_tokens }}, - {%- endif -%} - {%- endif -%} - "files": { - "file": {{ encode_audio_file( - audio_column[0], - encode_type=audio_encode_type|default(encode_type|default(None)) - ) }} - } - {%- if text_column and text_column|length > 0 -%} - , - "json": { - "prompt": {{ text_column[0] }} - } - {%- endif -%} - } - """).strip() # noqa: E501 -) diff --git a/src/guidellm/data/loaders.py b/src/guidellm/data/loaders.py index ebecdb6f..303e5a8d 100644 --- a/src/guidellm/data/loaders.py +++ b/src/guidellm/data/loaders.py @@ -1,93 +1,111 @@ from __future__ import annotations -from collections.abc import Callable +import contextlib +import math +from collections.abc import Callable, Iterator from typing import Any, Literal from datasets import Dataset, IterableDataset -from torch.utils.data import DataLoader, Sampler +from torch.utils.data import Sampler +from torch.utils.data.dataloader import DataLoader as PyTorchDataLoader from transformers import PreTrainedTokenizerBase -from guidellm.data.datasets import GenerativeRequestsDataset -from guidellm.data.formatters import GenerativeRequestFormatter -from guidellm.data.objects import GenerationRequest, GenerativeDatasetArgs -from guidellm.data.preprocessors import ( - DatasetPreprocessor, - GenerativeColumnMapper, -) +from guidellm.data.deserializers import DatasetDeserializerFactory +from guidellm.data.objects import GenerationRequest +from guidellm.data.preprocessors import DataDependentPreprocessor, DatasetPreprocessor -__all__ = ["GenerativeDataLoader", "GenerativeRequestCollator"] +__all__ = ["DataLoader", "datasets_item_iterator"] -class GenerativeRequestCollator: - def __call__( - self, batch: list[dict[Literal["request"], dict[str, Any]]] - ) -> GenerationRequest: - if len(batch) != 1: - raise NotImplementedError( - f"Batch size greater than 1 is not currently supported. " - f"Got batch size: {len(batch)}" - ) +def datasets_item_iterator( + datasets: list[Dataset | IterableDataset], + data_samples: int, + preprocessors: tuple[DatasetPreprocessor | DataDependentPreprocessor], +) -> Iterator[Any]: + gen_count = 0 + dataset_iters = [iter(dataset) for dataset in datasets] + + with contextlib.suppress(StopIteration): + while gen_count < data_samples or data_samples == math.inf: + row = {"items": [next(dataset_iter) for dataset_iter in dataset_iters]} + for preprocessor in preprocessors: + row = preprocessor(row) + yield row + gen_count += 1 - return GenerationRequest.model_validate(batch[0]["request"]) + if data_samples != math.inf and gen_count < data_samples: + raise ValueError( + f"Requested {data_samples} samples, but only {gen_count} " + "available from the provided datasets." + ) -class GenerativeDataLoader(DataLoader[GenerationRequest]): +class DataLoader(PyTorchDataLoader[GenerationRequest]): def __init__( self, data: list[Any], - data_args: list[GenerativeDatasetArgs] | None, + data_args: list[dict[str, Any]] | None, data_samples: int, processor_factory: Callable[[], PreTrainedTokenizerBase], - column_mapper: GenerativeColumnMapper, - preprocessors: list[DatasetPreprocessor], - request_formatter: GenerativeRequestFormatter, + preprocessors: list[DatasetPreprocessor | DataDependentPreprocessor], + collator: Callable, sampler: Sampler[int] | Literal["shuffle"] | None = None, - collate_fn: GenerativeRequestCollator | None = None, - num_workers: int | None = None, + num_workers: int | None = 1, random_seed: int = 42, **kwargs: Any, ): - dataset = GenerativeRequestsDataset.build( - data=data, - data_args=data_args, - data_samples=data_samples, - processor_factory=processor_factory, - column_mapper=column_mapper, - request_formatter=request_formatter, - preprocessors=preprocessors, - random_seed=random_seed, - ) + if not data or not isinstance(data, list): + raise ValueError(f"Data must be a non-empty list, got {data}.") - if collate_fn is None: - collate_fn = GenerativeRequestCollator() + if data_args is None: + data_args = [{} for _ in data] - # Handle sampler/shuffle logic based on dataset type - if sampler == "shuffle": - shuffle = True - sampler = None - elif isinstance(sampler, str) and sampler != "shuffle": + if len(data) != len(data_args): raise ValueError( - f"Invalid string sampler: {sampler}. " - f"Only 'shuffle' is supported as a string value." + f"Length of data ({len(data)}) must match length of data_args " + f"({len(data_args)})." ) - else: - shuffle = False - if isinstance(dataset, IterableDataset) and sampler is not None: - raise ValueError( - "Samplers are not supported with IterableDataset. " - "Use shuffle=True or apply shuffling to the dataset directly." + datasets = [] + for datum, data_kwargs in zip(data, data_args): + type_ = data_kwargs.pop("type_") if "type_" in data_kwargs else None + datasets.append( + DatasetDeserializerFactory.deserialize( + data=datum, + data_kwargs=data_args, + processor_factory=processor_factory, + random_seed=random_seed, + type_=type_, + **data_kwargs, + ) + ) + for preprocessor in preprocessors: + if isinstance(preprocessor, DataDependentPreprocessor): + preprocessor.setup_data( + datasets=datasets, + data_args=data_args, + ) + if data_samples != math.inf and data_samples > 0: + cached_samples = list( + datasets_item_iterator(datasets, data_samples, tuple(preprocessors)) + ) + dataset = IterableDataset.from_generator(lambda: cached_samples) + else: + dataset = IterableDataset.from_generator( + datasets_item_iterator, + gen_kwargs={ + "datasets": datasets, + "data_samples": math.inf, + "preprocessors": tuple(preprocessors), + }, ) - elif isinstance(dataset, Dataset) and shuffle: - dataset = dataset.shuffle(seed=random_seed) - shuffle = False super().__init__( dataset=dataset, batch_size=1, - shuffle=shuffle, - sampler=sampler, - collate_fn=collate_fn, - num_workers=num_workers or 0, + shuffle=sampler == "shuffle", + sampler=sampler if sampler != "shuffle" else None, + collate_fn=collator, + num_workers=num_workers, **kwargs, ) diff --git a/src/guidellm/data/objects.py b/src/guidellm/data/objects.py index b4a38719..2a4b3857 100644 --- a/src/guidellm/data/objects.py +++ b/src/guidellm/data/objects.py @@ -1,7 +1,7 @@ from __future__ import annotations import uuid -from typing import Any, Literal, get_args +from typing import Any, Literal from pydantic import Field @@ -15,7 +15,6 @@ "GenerationRequest", "GenerationRequestArguments", "GenerationRequestTimings", - "GenerativeDatasetArgs", "GenerativeDatasetColumnType", "GenerativeRequestType", ] @@ -47,66 +46,23 @@ def model_combine_dict( # noqa: C901, PLR0912 combined = {} for args in arguments: - if ( - url := args.get("url") if isinstance(args, dict) else args.url - ) is not None: - combined["url"] = url - - if ( - path := args.get("path") if isinstance(args, dict) else args.path - ) is not None: - combined["path"] = path - - if ( - method := args.get("method") if isinstance(args, dict) else args.method - ) is not None: - combined["method"] = method - - if ( - stream := args.get("stream") if isinstance(args, dict) else args.stream - ) is not None: - combined["stream"] = stream - - if ( - content_body := ( - args.get("content_body") - if isinstance(args, dict) - else args.content_body - ) - ) is not None: - combined["content_body"] = content_body - - if ( - json_body := ( - args.get("json_body") if isinstance(args, dict) else args.json_body - ) - ) is not None: - if "json_body" not in combined: - combined["json_body"] = {} - combined["json_body"].update(json_body) - - if ( - files := args.get("files") if isinstance(args, dict) else args.files - ) is not None: - if "files" not in combined: - combined["files"] = {} - combined["files"].update(files) - - if ( - params := args.get("params") if isinstance(args, dict) else args.params - ) is not None: - if "params" not in combined: - combined["params"] = {} - combined["params"].update(params) - - if ( - headers := ( - args.get("headers") if isinstance(args, dict) else args.headers - ) - ) is not None: - if "headers" not in combined: - combined["headers"] = {} - combined["headers"].update(headers) + args_dict = args if isinstance(args, dict) else args.model_dump() + combined["url"] = args_dict.get("url", combined.get("url")) + combined["path"] = args_dict.get("path", combined.get("path")) + combined["method"] = args_dict.get("method", combined.get("method")) + combined["stream"] = args_dict.get("stream", combined.get("stream")) + combined["content_body"] = args_dict.get( + "content_body", combined.get("content_body") + ) + + if (json_body := args_dict.get("json_body")) is not None: + combined["json_body"] = combined.get("json_body", {}) + json_body + if (files := args_dict.get("files")) is not None: + combined["files"] = combined.get("files", {}) + files + if (params := args_dict.get("params")) is not None: + combined["params"] = combined.get("params", {}) + params + if (headers := args_dict.get("headers")) is not None: + combined["headers"] = combined.get("headers", {}) + headers return combined @@ -189,44 +145,3 @@ class GenerationRequestTimings(MeasuredRequestTimings): default=None, description="Unix timestamp when the last generation iteration completed.", ) - - -class GenerativeDatasetArgs(StandardBaseDict): - type_: str | None = None - split: str | None = None - prompt_tokens_count_column: str | None = None - output_tokens_count_column: str | None = None - prefix_column: str | None = None - text_column: str | list[str] | None = None - image_column: str | list[str] | None = None - video_column: str | list[str] | None = None - audio_column: str | list[str] | None = None - - def to_kwargs(self) -> dict[str, Any]: - return { - key: value - for key, value in self.model_extra.items() - if not key.endswith("_column") - } - - def get_mapped_columns( - self, - ) -> dict[GenerativeDatasetColumnType | str, str | list[str]]: - column_mapping: dict[GenerativeDatasetColumnType | str, list[str] | None] = {} - - # Add in any non None columns from the fields - for column in get_args(GenerativeDatasetColumnType): - value = getattr(self, column) - if value is not None: - column_mapping[column] = value - - # Enable flexibility for extra columns to be passed through and referenced later - for extra in self.model_extra: - if ( - extra.endswith("_column") - and extra not in column_mapping - and self.model_extra[extra] is not None - ): - column_mapping[extra] = self.model_extra[extra] - - return column_mapping diff --git a/src/guidellm/data/preprocessors/__init__.py b/src/guidellm/data/preprocessors/__init__.py index 039f74a5..664e196b 100644 --- a/src/guidellm/data/preprocessors/__init__.py +++ b/src/guidellm/data/preprocessors/__init__.py @@ -1,7 +1,25 @@ +from .formatters import ( + GenerativeAudioTranscriptionRequestFormatter, + GenerativeAudioTranslationRequestFormatter, + GenerativeChatCompletionsRequestFormatter, + GenerativeTextCompletionsRequestFormatter, +) from .mappers import GenerativeColumnMapper -from .objects import DatasetPreprocessor +from .preprocessor import ( + DataDependentPreprocessor, + DatasetPreprocessor, + PreprocessorRegistry, +) __all__ = [ + "ColumnMapper", + "ColumnMapperRegistry", + "DataDependentPreprocessor", "DatasetPreprocessor", + "GenerativeAudioTranscriptionRequestFormatter", + "GenerativeAudioTranslationRequestFormatter", + "GenerativeChatCompletionsRequestFormatter", "GenerativeColumnMapper", + "GenerativeTextCompletionsRequestFormatter", + "PreprocessorRegistry", ] diff --git a/src/guidellm/data/preprocessors/formatters.py b/src/guidellm/data/preprocessors/formatters.py new file mode 100644 index 00000000..c41ce936 --- /dev/null +++ b/src/guidellm/data/preprocessors/formatters.py @@ -0,0 +1,303 @@ +from __future__ import annotations + +from typing import Any, Literal + +from guidellm.data.objects import ( + GenerationRequest, + GenerationRequestArguments, + GenerativeDatasetColumnType, +) +from guidellm.data.preprocessors.preprocessor import ( + DatasetPreprocessor, + PreprocessorRegistry, +) +from guidellm.data.utils import ( + encode_audio_as_dict, + encode_audio_as_file, + encode_image, + encode_video, +) + +__all__ = [ + "GenerativeAudioTranscriptionRequestFormatter", + "GenerativeAudioTranslationRequestFormatter", + "GenerativeChatCompletionsRequestFormatter", + "GenerativeTextCompletionsRequestFormatter", +] + + +@PreprocessorRegistry.register("text_completions") +class GenerativeTextCompletionsRequestFormatter(DatasetPreprocessor): + def __init__( + self, + model: str, + extras: dict[str, Any] | GenerationRequestArguments | None = None, + stream: bool = True, + max_tokens: int | None = None, + max_completion_tokens: int | None = None, + ): + self.model: str | None = model + self.extras = ( + GenerationRequestArguments(**extras) + if extras and isinstance(extras, dict) + else extras + ) + self.stream: bool = stream + self.max_tokens: int | None = max_tokens or max_completion_tokens + + def __call__( + self, columns: dict[GenerativeDatasetColumnType, list[Any]] + ) -> GenerationRequest: + arguments = {"json_body": {}} + stats = {} + + # Add model + if self.model is not None: + arguments["json_body"]["model"] = self.model + + # Configure streaming + if self.stream: + arguments["json_body"].update( + {"stream": True, "stream_options": {"include_usage": True}} + ) + arguments["stream"] = True + + # Handle output tokens + if output_tokens := columns.get("output_tokens_count_column", []): + output_count = output_tokens[0] + stats["output_tokens"] = output_count + arguments["json_body"].update( + {"max_tokens": output_count, "stop": None, "ignore_eos": True} + ) + elif self.max_tokens is not None: + arguments["json_body"]["max_tokens"] = self.max_tokens + + # Handle prompt tokens + if prompt_tokens := columns.get("prompt_tokens_count_column", []): + stats["prompt_tokens"] = prompt_tokens[0] + + # Apply extra arguments + if self.extras: + arguments = GenerationRequestArguments.model_combine_dict( + arguments, self.extras + ) + + # Build prompt + arguments["json_body"]["prompt"] = "".join( + columns.get("prefix_column", []) + columns.get("text_column", []) + ) + + return GenerationRequest( + request_type="text_completions", + arguments=GenerationRequestArguments(**arguments), + stats=stats, + ) + + +@PreprocessorRegistry.register("chat_completions") +class GenerativeChatCompletionsRequestFormatter(DatasetPreprocessor): + def __init__( + self, + model: str, + extras: dict[str, Any] | GenerationRequestArguments | None = None, + stream: bool = True, + max_tokens: int | None = None, + max_completion_tokens: int | None = None, + encode_kwargs: dict[str, Any] | None = None, + ): + self.model = model + self.extras = ( + GenerationRequestArguments(**extras) + if extras and isinstance(extras, dict) + else extras + ) + self.stream = stream + self.max_completion_tokens = max_tokens or max_completion_tokens + self.encode_image_kwargs = ( + encode_kwargs.get("image", {}) if encode_kwargs else {} + ) + self.encode_video_kwargs = ( + encode_kwargs.get("video", {}) if encode_kwargs else {} + ) + self.encode_audio_kwargs = ( + encode_kwargs.get("audio", {}) if encode_kwargs else {} + ) + + def __call__( + self, columns: dict[GenerativeDatasetColumnType, list[Any]] + ) -> GenerationRequest: + arguments = {"json_body": {}} + stats = {} + + # Add model + if self.model is not None: + arguments["json_body"]["model"] = self.model + + # Configure streaming + if self.stream: + arguments["json_body"].update( + {"stream": True, "stream_options": {"include_usage": True}} + ) + arguments["stream"] = True + + # Handle output tokens + if output_tokens := columns.pop("output_tokens_count_column", []): + output_count = output_tokens[0] + stats["output_tokens"] = output_count + arguments["json_body"].update( + { + "max_completion_tokens": output_count, + "stop": None, + "ignore_eos": True, + } + ) + elif self.max_completion_tokens is not None: + arguments["json_body"]["max_completion_tokens"] = self.max_completion_tokens + + # Handle prompt tokens + if prompt_tokens := columns.pop("prompt_tokens_count_column", []): + stats["prompt_tokens"] = prompt_tokens[0] + + # Apply extra arguments + if self.extras: + arguments = GenerationRequestArguments.model_combine_dict( + arguments, self.extras + ) + + # Build messages + arguments["json_body"]["messages"] = ( + [ + {"role": "system", "content": prefix} + for prefix in columns.pop("prefix_column", []) + ] + + [ + {"role": "user", "content": [{"type": "text", "text": text}]} + for text in columns.pop("text_column", []) + ] + + [ + { + "role": "user", + "content": [ + { + "type": "image_url", + "image_url": encode_image( + image, **self.encode_image_kwargs + ), + } + ], + } + for image in columns.pop("image_column", []) + ] + + [ + { + "role": "user", + "content": [ + { + "type": "video_url", + "video_url": encode_video( + video, **self.encode_video_kwargs + ), + } + ], + } + for video in columns.pop("video_column", []) + ] + + [ + { + "role": "user", + "content": [ + { + "type": "input_audio", + "input_audio": encode_audio_as_dict( + audio, **self.encode_audio_kwargs + ), + } + ], + } + for audio in columns.pop("audio_column", []) + ] + ) + + return GenerationRequest( + request_type="chat_completions", + arguments=GenerationRequestArguments(**arguments), + stats=stats, + ) + + +@PreprocessorRegistry.register("audio_transcriptions") +class GenerativeAudioTranscriptionRequestFormatter(DatasetPreprocessor): + def __init__( + self, + model: str, + extra_args: dict[str, Any] | GenerationRequestArguments | None = None, + stream: bool = True, + encode_kwargs: dict[str, Any] | None = None, + ): + self.model = model + self.extra_args = extra_args + self.stream = stream + self.encode_audio_kwargs = encode_kwargs or {} + + def __call__( + self, columns: dict[GenerativeDatasetColumnType, list[Any]] + ) -> GenerationRequest: + arguments = {"json_body": {}} + stats = {} + + # Add model + if self.model is not None: + arguments["json_body"]["model"] = self.model + + # Configure streaming + if self.stream: + arguments["json_body"].update( + {"stream": True, "stream_options": {"include_usage": True}} + ) + + # Apply extra arguments + if self.extra_args: + arguments = GenerationRequestArguments.model_combine_dict( + arguments, self.extra_args + ) + + # Handle stats tokens + if output_tokens := columns.get("output_tokens_count_column", []): + output_count = output_tokens[0] + stats["output_tokens"] = output_count + if prompt_tokens := columns.get("prompt_tokens_count_column", []): + stats["prompt_tokens"] = prompt_tokens[0] + + # Build audio input + if audio := columns.get("audio_column", []): + arguments["files"]["file"] = encode_audio_as_file( + audio[0], **self.encode_audio_kwargs + ) + else: + raise ValueError("No audio column found for audio transcription request.") + + # Build prompt + if (prefix := columns.get("prefix_column", [])) or ( + text := columns.get("text_column", []) + ): + arguments["json_body"]["prompt"] = "".join(prefix) + "".join(text) + + return { + "request": { + "request_type": "audio_transcriptions", + "arguments": arguments, + "stats": stats, + } + } + + +@PreprocessorRegistry.register("audio_translations") +class GenerativeAudioTranslationRequestFormatter( + GenerativeAudioTranscriptionRequestFormatter +): + def __call__( + self, columns: dict[GenerativeDatasetColumnType, list[Any]] + ) -> dict[Literal["request"], dict[Literal["request_type"], Any]]: + result = super().__call__(columns) + result["request"]["request_type"] = "audio_translations" + return result diff --git a/src/guidellm/data/preprocessors/mappers.py b/src/guidellm/data/preprocessors/mappers.py index 1792cb7e..56ca0342 100644 --- a/src/guidellm/data/preprocessors/mappers.py +++ b/src/guidellm/data/preprocessors/mappers.py @@ -1,115 +1,182 @@ from __future__ import annotations -from dataclasses import dataclass -from typing import Any, Literal +from collections import defaultdict +from typing import Any, ClassVar from datasets import Dataset, IterableDataset -from guidellm.data.objects import ( - GenerativeDatasetArgs, - GenerativeDatasetColumnType, +from guidellm.data.objects import GenerativeDatasetColumnType +from guidellm.data.preprocessors.preprocessor import ( + DataDependentPreprocessor, + PreprocessorRegistry, ) -from guidellm.data.preprocessors.objects import DatasetPreprocessor -from guidellm.data.utils import DEFAULT_COLUMN_NAMES -__all__ = ["ColumnMapping", "GenerativeColumnMapper"] +__all__ = ["GenerativeColumnMapper"] + + +@PreprocessorRegistry.register("generative_column_mapper") +class GenerativeColumnMapper(DataDependentPreprocessor): + defaults: ClassVar[dict[str, list[str]]] = { + "prompt_tokens_count_column": ["prompt_tokens_count", "input_tokens_count"], + "output_tokens_count_column": [ + "output_tokens_count", + "completion_tokens_count", + ], + "prefix_column": [ + "system_prompt", + "system", + "prefix", + ], + "text_column": [ + "prompt", + "instruction", + "question", + "input", + "context", + "content", + "conversation", + "turn", + "text", + ], + "image_column": [ + "image", + "picture", + "photo", + "img", + ], + "video_column": [ + "video", + "clip", + "movie", + "footage", + "mp4", + "mov", + "avi", + ], + "audio_column": [ + "audio", + "sound", + "voice", + "speech", + "wav", + "mp3", + ], + } + + @classmethod + def datasets_default_mappings( + cls, datasets: list[Dataset | IterableDataset] + ) -> dict[str, list[tuple[int, str]]]: + mappings: dict[GenerativeDatasetColumnType, list[tuple[int, str]]] = ( + defaultdict(list) + ) + + for index, dataset in enumerate(datasets): + dataset_columns = dataset.column_names or list(next(iter(dataset)).keys()) + + for column_type in cls.defaults: + if column_type in mappings: + continue + + type_names = [ + variant + for name in cls.defaults.get(column_type, []) + for plural in [name, f"{name}s", f"{name}es"] + for variant in [ + plural, + plural.lower(), + plural.upper(), + plural.capitalize(), + ] + ] + + for name in type_names: + if name in dataset_columns: + mappings[column_type].append((index, name)) + break + return mappings -@dataclass -class ColumnMapping: - indices: list[int] - names: list[str] + @classmethod + def datasets_mappings( + cls, + datasets: list[Dataset | IterableDataset], + input_mappings: dict[GenerativeDatasetColumnType, str | list[str]], + ) -> dict[GenerativeDatasetColumnType, list[tuple[int, str]]]: + mappings: dict[GenerativeDatasetColumnType, list[tuple[int, str]]] = ( + defaultdict(list) + ) + datasets_named_indices = { + ( + dataset.info.dataset_name + if dataset.info and dataset.info.dataset_name + else index + ): index + for index, dataset in enumerate(datasets) + } + datasets_columns = { + index: dataset.column_names or list(next(iter(dataset)).keys()) + for index, dataset in enumerate(datasets) + } + + for column_type, names in input_mappings.items(): + mappings[column_type] = [] + + for name in names if isinstance(names, list) else [names]: + dataset, column_name = name.split(".", 1) + dataset_index = ( + int(dataset) + if dataset.isdigit() + else datasets_named_indices.get(dataset) + ) + if dataset_index is None or dataset_index >= len(datasets): + raise ValueError( + f"Dataset '{dataset}' not found in datasets: " + f"{datasets_named_indices}." + ) + if column_name not in datasets_columns[dataset_index]: + raise ValueError( + f"Column '{column_name}' not found in dataset '{dataset}' " + f"columns: {datasets_columns[dataset_index]}." + ) + mappings[column_type].append((dataset_index, column_name)) + return mappings -class GenerativeColumnMapper(DatasetPreprocessor): - def __init__(self): - self.datasets: list[Dataset | IterableDataset] | None = None - self.data_args: list[GenerativeDatasetArgs] | None = None - self.column_mappings: ( - dict[GenerativeDatasetColumnType, ColumnMapping | None] | None - ) = None + def __init__( + self, + column_mappings: dict[GenerativeDatasetColumnType, str | list[str]] + | None = None, + ): + self.input_mappings = column_mappings + self.datasets_column_mappings: ( + dict[GenerativeDatasetColumnType, list[tuple[int, str]]] | None + ) - def __call__( - self, row: dict[Literal["items"], tuple[dict[str, Any]]] - ) -> dict[str, Any]: - if ( - self.datasets is None - or self.data_args is None - or self.column_mapping is None - ): - raise ValueError("GenerativeColumnMapper not initialized with data.") + def __call__(self, row: dict[int, list[dict[str, Any]]]) -> dict[str, list[Any]]: + if self.datasets_column_mappings is None: + raise ValueError("DefaultGenerativeColumnMapper not setup with data.") - mapped: dict[GenerativeDatasetColumnType, list[Any]] = {} items = row.pop("items") + mapped: dict[GenerativeDatasetColumnType, list[Any]] = defaultdict(list) - for column_type, column_mapping in self.column_mapping.items(): - mapped[column_type] = [ - items[index].get(name) - for index, name in zip(column_mapping.indices, column_mapping.names) - ] + for column_type, column_mappings in self.datasets_column_mappings.items(): + for ( + dataset_index, + dataset_column, + ) in column_mappings: + mapped[column_type].append(items[dataset_index][dataset_column]) - return mapped + return dict(mapped) - def init_data( + def setup_data( self, datasets: list[Dataset | IterableDataset], - data_args: list[GenerativeDatasetArgs], + data_args: list[dict[str, Any]], ): - self.datasets = datasets - self.data_args = data_args - self.column_mapping = self.generate_column_mapping() - - def generate_column_mapping( - self, - ) -> dict[GenerativeDatasetColumnType, ColumnMapping]: - mappings: dict[GenerativeDatasetColumnType, ColumnMapping] = {} - # Map any columns specified in the GenerativeDatasetArgs first - self._fill_mappings_from_data_args(mappings) - # For standard column types not mapped, fill in first one found from defaults - self._fill_mappings_from_defaults(mappings) - - return mappings - - def _fill_mappings_from_data_args( - self, mappings: dict[GenerativeDatasetColumnType, ColumnMapping] - ): - for index, args in enumerate(self.data_args): - args_column_mappings = args.get_mapped_columns() - for column_type, column_name in args_column_mappings.items(): - if column_type not in mappings: - mappings[column_type] = ColumnMapping(indices=[], names=[]) - column_mapping = mappings[column_type] - - for name in ( - column_name if isinstance(column_name, list) else [column_name] - ): - if name not in self.datasets[index].column_names: - raise ValueError( - f"Column '{name}' not found in dataset columns: " - f"{self.datasets[index].column_names}" - ) - column_mapping.indices.append(index) - column_mapping.names.append(name) - - def _fill_mappings_from_defaults( - self, mappings: dict[GenerativeDatasetColumnType, ColumnMapping] - ): - for column_type, default_names in DEFAULT_COLUMN_NAMES.items(): - if column_type in mappings: - continue - - for index, dataset in enumerate(self.datasets): - for name in default_names: - if name in dataset.column_names: - mappings[column_type] = ColumnMapping( - indices=[index], names=[name] - ) - break - # Check for plural form of the name - if f"{name}s" in dataset.column_names: - mappings[column_type] = ColumnMapping( - indices=[index], names=[f"{name}s"] - ) - break - if column_type in mappings: - break + _ = data_args # Unused for this mapper + self.datasets_column_mappings = ( + self.datasets_default_mappings(datasets) + if self.input_mappings is None + else self.datasets_mappings(datasets, self.input_mappings) + ) diff --git a/src/guidellm/data/preprocessors/objects.py b/src/guidellm/data/preprocessors/objects.py deleted file mode 100644 index 831f944d..00000000 --- a/src/guidellm/data/preprocessors/objects.py +++ /dev/null @@ -1,20 +0,0 @@ -from __future__ import annotations - -from typing import Any, Protocol, runtime_checkable - -from datasets import Dataset, IterableDataset - -from guidellm.data.objects import GenerativeDatasetArgs - -__all__ = ["DatasetPreprocessor"] - - -@runtime_checkable -class DatasetPreprocessor(Protocol): - def init_data( - self, - datasets: list[Dataset | IterableDataset], - data_args: list[GenerativeDatasetArgs], - ): ... - - def __call__(self, item: dict[str, Any]) -> dict[str, Any]: ... diff --git a/src/guidellm/data/preprocessors/preprocessor.py b/src/guidellm/data/preprocessors/preprocessor.py new file mode 100644 index 00000000..eefb53d3 --- /dev/null +++ b/src/guidellm/data/preprocessors/preprocessor.py @@ -0,0 +1,29 @@ +from __future__ import annotations + +from typing import Any, Protocol, Union, runtime_checkable + +from datasets import Dataset, IterableDataset + +from guidellm.utils import RegistryMixin + +__all__ = ["DataDependentPreprocessor", "DatasetPreprocessor", "PreprocessorRegistry"] + + +@runtime_checkable +class DatasetPreprocessor(Protocol): + def __call__(self, item: dict[str, Any]) -> dict[str, Any]: ... + + +@runtime_checkable +class DataDependentPreprocessor(DatasetPreprocessor, Protocol): + def setup_data( + self, + datasets: list[Dataset | IterableDataset], + data_args: list[dict[str, Any]], + ): ... + + +class PreprocessorRegistry( + RegistryMixin[Union[DataDependentPreprocessor, type[DataDependentPreprocessor]]] +): + pass diff --git a/src/guidellm/data/processor.py b/src/guidellm/data/processor.py new file mode 100644 index 00000000..645683c4 --- /dev/null +++ b/src/guidellm/data/processor.py @@ -0,0 +1,30 @@ +from __future__ import annotations + +from typing import Any + +from transformers import ( # type: ignore[import] + AutoTokenizer, + PreTrainedTokenizerBase, +) + +__all__ = ["ProcessorFactory"] + + +class ProcessorFactory: + def __init__( + self, + processor: str | PreTrainedTokenizerBase, + processor_args: dict[str, Any] | None = None, + ) -> None: + self.processor = processor + self.processor_args = processor_args or {} + + def __call__(self) -> PreTrainedTokenizerBase: + if isinstance(self.processor, PreTrainedTokenizerBase): + return self.processor + else: + self.processor = AutoTokenizer.from_pretrained( + self.processor, + **(self.processor_args or {}), + ) + return self.processor diff --git a/src/guidellm/data/utils/__init__.py b/src/guidellm/data/utils/__init__.py new file mode 100644 index 00000000..aac657f8 --- /dev/null +++ b/src/guidellm/data/utils/__init__.py @@ -0,0 +1,34 @@ +from .dataset import DEFAULT_SPLITS, resolve_dataset_split +from .functions import ( + download_audio, + download_image, + download_video, + encode_audio, + encode_audio_as_dict, + encode_audio_as_file, + encode_image, + encode_image_base64, + encode_video, + encode_video_base64, + get_file_format, + is_url, + resize_image, +) + +__all__ = [ + "DEFAULT_SPLITS", + "download_audio", + "download_image", + "download_video", + "encode_audio", + "encode_audio_as_dict", + "encode_audio_as_file", + "encode_image", + "encode_image_base64", + "encode_video", + "encode_video_base64", + "get_file_format", + "is_url", + "resize_image", + "resolve_dataset_split", +] diff --git a/src/guidellm/data/utils.py b/src/guidellm/data/utils/dataset.py similarity index 54% rename from src/guidellm/data/utils.py rename to src/guidellm/data/utils/dataset.py index d2fa1f9c..9656c1a7 100644 --- a/src/guidellm/data/utils.py +++ b/src/guidellm/data/utils/dataset.py @@ -1,18 +1,10 @@ from __future__ import annotations -import contextlib -import math -from collections.abc import Iterator -from typing import Any, Literal +from typing import Literal from datasets import Dataset, DatasetDict, IterableDataset, IterableDatasetDict -__all__ = [ - "DEFAULT_COLUMN_NAMES", - "DEFAULT_SPLITS", - "datasets_item_iterator", - "resolve_dataset_split", -] +__all__ = ["DEFAULT_SPLITS", "resolve_dataset_split"] DEFAULT_SPLITS: dict[Literal["train", "calib", "val", "test"], list[str]] = { @@ -77,54 +69,9 @@ } -DEFAULT_COLUMN_NAMES: dict[str, list[str]] = { - "prompt_tokens_count": ["prompt_tokens_count", "input_tokens_count"], - "output_tokens_count": ["output_tokens_count", "completion_tokens_count"], - "prefix_column": [ - "system_prompt", - "system", - "prefix", - ], - "text_column": [ - "prompt", - "instruction", - "question", - "input", - "context", - "content", - "conversation", - "turn", - "text", - ], - "image_column": [ - "image", - "picture", - "photo", - "img", - ], - "video_column": [ - "video", - "clip", - "movie", - "footage", - "mp4", - "mov", - "avi", - ], - "audio_column": [ - "audio", - "sound", - "voice", - "speech", - "wav", - "mp3", - ], -} - - def resolve_dataset_split( dataset: Dataset | IterableDataset | DatasetDict | IterableDatasetDict, - split: str | None, + split: str | None = None, ) -> Dataset | IterableDataset: if split is not None and isinstance(dataset, (DatasetDict, IterableDatasetDict)): if split in dataset: @@ -145,22 +92,3 @@ def resolve_dataset_split( return dataset[default_split] return dataset[list(dataset.keys())[0]] - - -def datasets_item_iterator( - datasets: list[Dataset | IterableDataset], - data_samples: int, -) -> Iterator[dict[Literal["items"], tuple[dict[str, Any]]]]: - dataset_iters = [iter(dataset) for dataset in datasets] - gen_count = 0 - - with contextlib.suppress(StopIteration): - while gen_count < data_samples or data_samples <= 0 or data_samples == math.inf: - yield {"items": tuple(next(dataset_iter) for dataset_iter in dataset_iters)} - gen_count += 1 - - if gen_count < data_samples and data_samples > 0 and data_samples != math.inf: - raise ValueError( - f"Requested {data_samples} samples, but only {gen_count} available " - "from the provided datasets." - ) diff --git a/src/guidellm/data/formatters/filters.py b/src/guidellm/data/utils/functions.py similarity index 73% rename from src/guidellm/data/formatters/filters.py rename to src/guidellm/data/utils/functions.py index 8dd4e445..c9ca20ed 100644 --- a/src/guidellm/data/formatters/filters.py +++ b/src/guidellm/data/utils/functions.py @@ -3,7 +3,7 @@ import base64 import io from pathlib import Path -from typing import Any, Callable, Literal +from typing import Any, Literal import datasets import httpx @@ -11,15 +11,15 @@ import numpy as np import soundfile from PIL import Image as PILImage - -from guidellm.utils import RegistryMixin +from pydub import AudioSegment __all__ = [ - "JinjaFiltersRegistry", "download_audio", "download_image", "download_video", "encode_audio", + "encode_audio_as_dict", + "encode_audio_as_file", "encode_image", "encode_image_base64", "encode_video", @@ -30,16 +30,10 @@ ] -class JinjaFiltersRegistry(RegistryMixin[Callable[..., Any]]): - pass - - -@JinjaFiltersRegistry.register("is_url") def is_url(text: Any) -> bool: return isinstance(text, str) and text.startswith(("http://", "https://")) -@JinjaFiltersRegistry.register("encode_image") def encode_image( image: bytes | str | Path | np.ndarray | PILImage.Image | datasets.Image, max_size: int | None = None, @@ -90,7 +84,6 @@ def encode_image( ) -@JinjaFiltersRegistry.register("encode_image_base64") def encode_image_base64( image: bytes | str | Path | np.ndarray | PILImage.Image, width: int | None = None, @@ -137,7 +130,6 @@ def encode_image_base64( return f"data:image/jpeg;base64,{image_base64}" -@JinjaFiltersRegistry.register("resize_image") def resize_image( image: PILImage.Image, width: int | None = None, @@ -183,14 +175,12 @@ def resize_image( return image -@JinjaFiltersRegistry.register("download_image") def download_image(url: str) -> bytes: response = httpx.get(url) response.raise_for_status() return response.content -@JinjaFiltersRegistry.register("encode_video") def encode_video( video: bytes | str | Path | datasets.Video, encode_type: Literal["base64", "url"] | None = None, @@ -221,7 +211,6 @@ def encode_video( return encode_video_base64(video=video) -@JinjaFiltersRegistry.register("encode_video_base64") def encode_video_base64(video: bytes | str | Path) -> str: if ( isinstance(video, str) @@ -246,78 +235,121 @@ def encode_video_base64(video: bytes | str | Path) -> str: return f"data:video/{video_format};base64,{video_base64}" -@JinjaFiltersRegistry.register("download_video") def download_video(url: str) -> tuple[bytes, str]: response = httpx.get(url) response.raise_for_status() return response.content, get_file_format(url) -@JinjaFiltersRegistry.register("encode_audio") -def encode_audio( +def encode_audio_as_dict( audio: bytes | str | Path | dict | np.ndarray, - sample_rate: int | None = None, + sample_rate: int | None = 16000, max_duration: float | None = None, -) -> dict[str, str]: - """ - Input audio types: - - bytes: raw audio bytes - - str: file path on disk or URL - - pathlib.Path: file path on disk - - dict: {"data": base64_string, "format": "wav"} format - - numpy.ndarray: audio array, assumed to be at sample_rate if provided - - sample_rate: sample rate of the input audio if input is np.ndarray - target_sample_rate: resample to this rate if provided - duration: limit audio to this duration in seconds if provided + mono: bool = True, + audio_format: str = "mp3", + bitrate: str = "64k", +) -> dict[Literal["data", "format"], Any]: + content, file_name, file_format = encode_audio( + audio=audio, + sample_rate=sample_rate or 16000, + max_duration=max_duration, + mono=mono, + audio_format=audio_format, + bitrate=bitrate, + ) - Returns dict with format: - { - "data": base64_encoded_audio_bytes, - "format": "wav" + return { + "data": base64.b64encode(content).decode("utf-8"), + "format": file_format, } - """ - if is_url(audio): - audio, _ = download_audio(audio) - if isinstance(audio, dict): - if "data" not in audio: - raise ValueError("Audio dict must contain 'data' key") - audio = base64.b64decode(audio["data"]) - if isinstance(audio, bytes): - audio_data, sample_rate = librosa.load(io.BytesIO(audio), sr=sample_rate) +def encode_audio_as_file( + audio: bytes | str | Path | dict | np.ndarray, + sample_rate: int | None = 16000, + max_duration: float | None = None, + mono: bool = True, + audio_format: str = "mp3", + bitrate: str = "64k", +) -> tuple[str, bytes, str]: + content, file_name, file_format = encode_audio( + audio=audio, + sample_rate=sample_rate or 16000, + max_duration=max_duration, + mono=mono, + audio_format=audio_format, + bitrate=bitrate, + ) + + return file_name, content, f"audio/{file_format}" + + +def encode_audio( + audio: bytes | str | Path | dict, + sample_rate: int = 16000, + max_duration: float | None = None, + mono: bool = True, + audio_format: str = "mp3", + bitrate: str = "64k", +) -> tuple[bytes, str, str]: + file_name = "audio.wav" + + if is_url(audio): + audio, file_name, _ = download_audio(audio) + elif isinstance(audio, dict): + file_name = audio.get("name", "audio") + audio = base64.b64decode(audio["data"]) elif isinstance(audio, (str, Path)): - audio_data, sample_rate = librosa.load(str(audio), sr=sample_rate) - elif isinstance(audio, np.ndarray): - if sample_rate is None: - raise ValueError("sample_rate must be provided for numpy arrays") - audio_data = audio - else: + path = Path(audio) + file_name = get_file_name(path) + audio = path.read_bytes() + elif not isinstance(audio, bytes): raise ValueError(f"Unsupported audio type: {type(audio)}") - if max_duration is not None: - max_samples = int(max_duration * sample_rate) - if len(audio_data) > max_samples: - audio_data = audio_data[:max_samples] + processed_audio, sample_rate = librosa.load( + io.BytesIO(audio), + sr=sample_rate, + mono=mono, + duration=max_duration, + ) + # Encode to target format buffer = io.BytesIO() - soundfile.write(buffer, audio_data, sample_rate, format="WAV", subtype="PCM_16") + if audio_format.lower() == "mp3": + temp_wav = io.BytesIO() + soundfile.write( + temp_wav, + processed_audio, + sample_rate, + format="WAV", + subtype="PCM_16", + ) + temp_wav.seek(0) + AudioSegment.from_wav(temp_wav).export(buffer, format="mp3", bitrate=bitrate) + else: + soundfile.write( + buffer, + processed_audio, + sample_rate, + format=audio_format.upper(), + ) - return {"data": buffer.getvalue(), "format": "wav"} + return buffer.getvalue(), file_name, audio_format.lower() -@JinjaFiltersRegistry.register("download_audio") -def download_audio(url: str) -> tuple[bytes, str]: - """Download audio from URL and return bytes with format.""" +def download_audio(url: str) -> tuple[bytes, str, str]: response = httpx.get(url) response.raise_for_status() content = response.content - audio_format = get_file_format(url) - return content, audio_format + + return content, get_file_name(url), get_file_format(url) + + +def get_file_name(path: Path | str) -> str: + """Get file name from path.""" + return Path(path).name -@JinjaFiltersRegistry.register("get_file_format") def get_file_format(path: Path | str) -> str: """Get file format from path extension.""" suffix = Path(path).suffix.lower() diff --git a/src/guidellm/scheduler/worker.py b/src/guidellm/scheduler/worker.py index 5f2fb74b..1832d25f 100644 --- a/src/guidellm/scheduler/worker.py +++ b/src/guidellm/scheduler/worker.py @@ -233,6 +233,12 @@ async def _processing_startup(self): self.backend_started = True await self.backend.validate() + # Wait for all processes to be ready + await wait_for_sync_barrier( + self.startup_barrier, + poll_interval=self.messaging.poll_interval, + ) + # Get messaging system ready await self.messaging.start( receive_stop_criteria=[self.requests_generated_event], @@ -240,12 +246,6 @@ async def _processing_startup(self): ) self.messaging_started = True - # Wait for all processes to be ready - await wait_for_sync_barrier( - self.startup_barrier, - poll_interval=self.messaging.poll_interval, - ) - self.startup_completed = True async def _processing_shutdown(self): diff --git a/src/guidellm/scheduler/worker_group.py b/src/guidellm/scheduler/worker_group.py index e64d64fc..9baccd1b 100644 --- a/src/guidellm/scheduler/worker_group.py +++ b/src/guidellm/scheduler/worker_group.py @@ -495,6 +495,7 @@ def _iter(): count = 0 request_info: ScheduledRequestInfo = None + for request in _iter(): count += 1 diff --git a/src/guidellm/settings.py b/src/guidellm/settings.py index 20d9ff96..5c360eff 100644 --- a/src/guidellm/settings.py +++ b/src/guidellm/settings.py @@ -145,7 +145,7 @@ class Settings(BaseSettings): mp_max_pending_buffer_percent: float = 0.5 mp_max_worker_buffer_percent: float = 0.2 max_concurrency: int = 512 - max_worker_processes: int = 10 + max_worker_processes: int = 2 scheduler_start_delay_non_distributed: float = 1.0 constraint_error_window_size: float = 30 constraint_error_min_processed: float = 30 diff --git a/src/guidellm/utils/messaging.py b/src/guidellm/utils/messaging.py index c56ec29a..2f631a87 100644 --- a/src/guidellm/utils/messaging.py +++ b/src/guidellm/utils/messaging.py @@ -610,6 +610,8 @@ def _send_messages_task_thread( # noqa: C901, PLR0912 except (culsans.QueueFull, queue.Full): pass + time.sleep(0) # Yield to other threads + def _receive_messages_task_thread( # noqa: C901 self, receive_callback: Callable[[Any], Any] | None, @@ -649,6 +651,8 @@ def _receive_messages_task_thread( # noqa: C901 except (culsans.QueueFull, queue.Full): pass + time.sleep(0) # Yield to other threads + class InterProcessMessagingManagerQueue( InterProcessMessagingQueue[SendMessageT, ReceiveMessageT] From 11d585c652ed28ae06be4906fa8928a7531b6e1e Mon Sep 17 00:00:00 2001 From: Jared O'Connell Date: Wed, 1 Oct 2025 18:03:19 -0400 Subject: [PATCH 45/90] Fixed type errors in utility classes registry and singleton Signed-off-by: Jared O'Connell --- src/guidellm/utils/registry.py | 12 +++++++----- src/guidellm/utils/singleton.py | 3 +++ 2 files changed, 10 insertions(+), 5 deletions(-) diff --git a/src/guidellm/utils/registry.py b/src/guidellm/utils/registry.py index b9e3faf5..909d3f45 100644 --- a/src/guidellm/utils/registry.py +++ b/src/guidellm/utils/registry.py @@ -10,7 +10,7 @@ from __future__ import annotations -from typing import Callable, ClassVar, Generic, TypeVar, cast +from typing import Any, Callable, ClassVar, Generic, TypeVar, cast from guidellm.utils.auto_importer import AutoImporterMixin @@ -19,7 +19,7 @@ RegistryObjT = TypeVar("RegistryObjT") """Generic type variable for objects managed by the registry system.""" -RegisterT = TypeVar("RegisterT") +RegisterT = TypeVar("RegisterT", bound=type) # Must be bound to type to ensure __name__ is available. """Generic type variable for the args and return values within the registry.""" @@ -62,7 +62,7 @@ class TokenProposal(RegistryMixin): :cvar registry_populated: Track whether auto-discovery has completed """ - registry: ClassVar[dict[str, RegistryObjT] | None] = None + registry: ClassVar[dict[str, Any] | None] = None registry_auto_discovery: ClassVar[bool] = False registry_populated: ClassVar[bool] = False @@ -209,6 +209,8 @@ def get_registered_object(cls, name: str) -> RegistryObjT | None: if name in cls.registry: return cls.registry[name] - lower_key_map = {key.lower(): key for key in cls.registry} + for k, v in cls.registry.items(): + if name.lower() == k.lower(): + return v - return cls.registry.get(lower_key_map.get(name.lower())) + return None # Not found diff --git a/src/guidellm/utils/singleton.py b/src/guidellm/utils/singleton.py index 3ec10f79..693bbf2e 100644 --- a/src/guidellm/utils/singleton.py +++ b/src/guidellm/utils/singleton.py @@ -36,6 +36,9 @@ def __init__(self, config_path: str): assert manager1 is manager2 """ + _singleton_initialized: bool + _init_lock: threading.Lock + def __new__(cls, *args, **kwargs): # noqa: ARG004 """ Create or return the singleton instance. From c84d1603055d87f7ba20cbd7abc706187f8ff682 Mon Sep 17 00:00:00 2001 From: Jared O'Connell Date: Wed, 8 Oct 2025 00:21:09 -0400 Subject: [PATCH 46/90] Fix type errors in utils package Signed-off-by: Jared O'Connell --- src/guidellm/utils/console.py | 2 +- src/guidellm/utils/encoding.py | 94 ++++++------ src/guidellm/utils/functions.py | 15 +- src/guidellm/utils/messaging.py | 192 +++++++++++++++--------- src/guidellm/utils/pydantic_utils.py | 6 +- src/guidellm/utils/synchronous.py | 3 +- tests/unit/utils/test_pydantic_utils.py | 2 +- 7 files changed, 181 insertions(+), 133 deletions(-) diff --git a/src/guidellm/utils/console.py b/src/guidellm/utils/console.py index c8cd6825..54e90cf7 100644 --- a/src/guidellm/utils/console.py +++ b/src/guidellm/utils/console.py @@ -155,7 +155,7 @@ def print_update_details(self, details: Any | None): block = Padding( Text.from_markup(str(details)), (0, 0, 0, 2), - style=StatusStyles.get("debug"), + style=StatusStyles.get("debug", "dim"), ) self.print(block) diff --git a/src/guidellm/utils/encoding.py b/src/guidellm/utils/encoding.py index ccd26982..78d4bbbb 100644 --- a/src/guidellm/utils/encoding.py +++ b/src/guidellm/utils/encoding.py @@ -12,10 +12,10 @@ import json from collections.abc import Mapping -from typing import Annotated, Any, ClassVar, Generic, Literal, Optional, TypeVar +from typing import Annotated, Any, cast, ClassVar, Generic, Literal, Optional, TypeVar try: - import msgpack + import msgpack # type: ignore[import-untyped] # Optional dependency from msgpack import Packer, Unpacker HAS_MSGPACK = True @@ -24,8 +24,8 @@ HAS_MSGPACK = False try: - from msgspec.msgpack import Decoder as MsgspecDecoder - from msgspec.msgpack import Encoder as MsgspecEncoder + from msgspec.msgpack import Decoder as MsgspecDecoder # type: ignore[import-not-found] # Optional dependency + from msgspec.msgpack import Encoder as MsgspecEncoder # type: ignore[import-not-found] # Optional dependency HAS_MSGSPEC = True except ImportError: @@ -33,7 +33,7 @@ HAS_MSGSPEC = False try: - import orjson + import orjson # type: ignore[import-not-found] # Optional dependency HAS_ORJSON = True except ImportError: @@ -116,7 +116,7 @@ def encode_message( """ serialized = serializer.serialize(obj) if serializer else obj - return encoder.encode(serialized) if encoder else serialized + return cast(MsgT, encoder.encode(serialized) if encoder else serialized) @classmethod def decode_message( @@ -137,7 +137,7 @@ def decode_message( """ serialized = encoder.decode(message) if encoder else message - return serializer.deserialize(serialized) if serializer else serialized + return cast(ObjT, serializer.deserialize(serialized) if serializer else serialized) def __init__( self, @@ -296,6 +296,8 @@ def _get_available_encoder_decoder( return None, None, None +PayloadType = Literal['pydantic', 'python', 'collection_tuple', 'collection_sequence', 'collection_mapping'] + class Serializer: """ Object serialization with specialized Pydantic model support. @@ -474,6 +476,7 @@ def to_sequence(self, obj: Any) -> str | Any: :param obj: Object to serialize to sequence format :return: Serialized sequence string or bytes """ + payload_type: PayloadType if isinstance(obj, BaseModel): payload_type = "pydantic" payload = self.to_sequence_pydantic(obj) @@ -515,7 +518,7 @@ def to_sequence(self, obj: Any) -> str | Any: payload_type = "python" payload = self.to_sequence_python(obj) - return self.pack_next_sequence(payload_type, payload, None) + return self.pack_next_sequence(payload_type, payload if payload is not None else "", None) def from_sequence(self, data: str | Any) -> Any: # noqa: C901, PLR0912 """ @@ -529,6 +532,7 @@ def from_sequence(self, data: str | Any) -> Any: # noqa: C901, PLR0912 :raises ValueError: If sequence format is invalid or contains multiple packed sequences """ + payload: str | bytes | None type_, payload, remaining = self.unpack_next_sequence(data) if remaining is not None: raise ValueError("Data contains multiple packed sequences; expected one.") @@ -540,16 +544,16 @@ def from_sequence(self, data: str | Any) -> Any: # noqa: C901, PLR0912 return self.from_sequence_python(payload) if type_ in {"collection_sequence", "collection_tuple"}: - items = [] + c_items = [] while payload: type_, item_payload, payload = self.unpack_next_sequence(payload) if type_ == "pydantic": - items.append(self.from_sequence_pydantic(item_payload)) + c_items.append(self.from_sequence_pydantic(item_payload)) elif type_ == "python": - items.append(self.from_sequence_python(item_payload)) + c_items.append(self.from_sequence_python(item_payload)) else: raise ValueError("Invalid type in collection sequence") - return items + return c_items if type_ != "collection_mapping": raise ValueError(f"Invalid type for mapping sequence: {type_}") @@ -604,6 +608,7 @@ def from_sequence_pydantic(self, data: str | bytes) -> BaseModel: :param data: Sequence data containing class metadata and JSON :return: Reconstructed Pydantic model instance """ + json_data: str | bytes | bytearray if isinstance(data, bytes): class_name_end = data.index(b"|") class_name = data[:class_name_end].decode() @@ -647,13 +652,7 @@ def from_sequence_python(self, data: str | bytes) -> Any: def pack_next_sequence( # noqa: C901, PLR0912 self, - type_: Literal[ - "pydantic", - "python", - "collection_tuple", - "collection_sequence", - "collection_mapping", - ], + type_: PayloadType, payload: str | bytes, current: str | bytes | None, ) -> str | bytes: @@ -672,9 +671,11 @@ def pack_next_sequence( # noqa: C901, PLR0912 raise ValueError("Payload and current must be of the same type") payload_len = len(payload) - + payload_len_output: str | bytes + payload_type: str | bytes + delimiter: str | bytes if isinstance(payload, bytes): - payload_len = payload_len.to_bytes( + payload_len_output = payload_len.to_bytes( length=(payload_len.bit_length() + 7) // 8 if payload_len > 0 else 1, byteorder="big", ) @@ -692,7 +693,7 @@ def pack_next_sequence( # noqa: C901, PLR0912 raise ValueError(f"Unknown type for packing: {type_}") delimiter = b"|" else: - payload_len = str(payload_len) + payload_len_output = str(payload_len) if type_ == "pydantic": payload_type = "P" elif type_ == "python": @@ -707,20 +708,14 @@ def pack_next_sequence( # noqa: C901, PLR0912 raise ValueError(f"Unknown type for packing: {type_}") delimiter = "|" - next_sequence = payload_type + delimiter + payload_len + delimiter + payload - - return current + next_sequence if current else next_sequence + # Type ignores because types are enforced at runtime + next_sequence = payload_type + delimiter + payload_len_output + delimiter + payload # type: ignore[operator] + return current + next_sequence if current else next_sequence # type: ignore[operator] def unpack_next_sequence( # noqa: C901, PLR0912 self, data: str | bytes ) -> tuple[ - Literal[ - "pydantic", - "python", - "collection_tuple", - "collection_sequence", - "collection_mapping", - ], + PayloadType, str | bytes, str | bytes | None, ]: @@ -731,57 +726,58 @@ def unpack_next_sequence( # noqa: C901, PLR0912 :return: Tuple of (type, payload, remaining_data) :raises ValueError: If sequence format is invalid or unknown type character """ + type_: PayloadType if isinstance(data, bytes): if len(data) < len(b"T|N") or data[1:2] != b"|": raise ValueError("Invalid packed data format") - type_char = data[0:1] - if type_char == b"P": + type_char_b = data[0:1] + if type_char_b == b"P": type_ = "pydantic" - elif type_char == b"p": + elif type_char_b == b"p": type_ = "python" - elif type_char == b"T": + elif type_char_b == b"T": type_ = "collection_tuple" - elif type_char == b"S": + elif type_char_b == b"S": type_ = "collection_sequence" - elif type_char == b"M": + elif type_char_b == b"M": type_ = "collection_mapping" else: raise ValueError("Unknown type character in packed data") len_end = data.index(b"|", 2) payload_len = int.from_bytes(data[2:len_end], "big") - payload = data[len_end + 1 : len_end + 1 + payload_len] - remaining = ( + payload_b = data[len_end + 1 : len_end + 1 + payload_len] + remaining_b = ( data[len_end + 1 + payload_len :] if len_end + 1 + payload_len < len(data) else None ) - return type_, payload, remaining + return type_, payload_b, remaining_b if len(data) < len("T|N") or data[1] != "|": raise ValueError("Invalid packed data format") - type_char = data[0] - if type_char == "P": + type_char_s = data[0] + if type_char_s == "P": type_ = "pydantic" - elif type_char == "p": + elif type_char_s == "p": type_ = "python" - elif type_char == "S": + elif type_char_s == "S": type_ = "collection_sequence" - elif type_char == "M": + elif type_char_s == "M": type_ = "collection_mapping" else: raise ValueError("Unknown type character in packed data") len_end = data.index("|", 2) payload_len = int(data[2:len_end]) - payload = data[len_end + 1 : len_end + 1 + payload_len] - remaining = ( + payload_s = data[len_end + 1 : len_end + 1 + payload_len] + remaining_s = ( data[len_end + 1 + payload_len :] if len_end + 1 + payload_len < len(data) else None ) - return type_, payload, remaining + return type_, payload_s, remaining_s diff --git a/src/guidellm/utils/functions.py b/src/guidellm/utils/functions.py index 6343cbf2..ed4a2075 100644 --- a/src/guidellm/utils/functions.py +++ b/src/guidellm/utils/functions.py @@ -96,19 +96,20 @@ def safe_add( if not values: return default - values = list(values) + values_list = list(values) if signs is None: - signs = [1] * len(values) + signs = [1] * len(values_list) - if len(signs) != len(values): + if len(signs) != len(values_list): raise ValueError("Length of signs must match length of values") - result = values[0] if values[0] is not None else default + result = values_list[0] if values_list[0] is not None else default - for ind in range(1, len(values)): - val = values[ind] if values[ind] is not None else default - result += signs[ind] * val + for ind in range(1, len(values_list)): + value = values_list[ind] + checked_value = value if value is not None else default + result += signs[ind] * checked_value return result diff --git a/src/guidellm/utils/messaging.py b/src/guidellm/utils/messaging.py index c56ec29a..db926200 100644 --- a/src/guidellm/utils/messaging.py +++ b/src/guidellm/utils/messaging.py @@ -22,7 +22,7 @@ from multiprocessing.managers import SyncManager from multiprocessing.synchronize import Event as ProcessingEvent from threading import Event as ThreadingEvent -from typing import Any, Callable, Generic, Protocol, TypeVar +from typing import Any, Callable, cast, Generic, List, Protocol, TypeVar import culsans from pydantic import BaseModel @@ -48,19 +48,20 @@ ReceiveMessageT = TypeVar("ReceiveMessageT", bound=Any) """Generic type variable for messages received through the messaging system""" +CheckStopCallableT = Callable[[bool, int], bool] class MessagingStopCallback(Protocol): """Protocol for evaluating stop conditions in messaging operations.""" def __call__( - self, messaging: InterProcessMessaging, pending: bool, queue_empty: int + self, messaging: InterProcessMessaging, pending: bool, queue_empty_count: int ) -> bool: """ Evaluate whether messaging operations should stop. :param messaging: The messaging instance to evaluate :param pending: Whether there are pending operations - :param queue_empty: The number of times in a row the queue has been empty + :param queue_empty_count: The number of times in a row the queue has been empty :return: True if operations should stop, False otherwise """ ... @@ -90,7 +91,7 @@ class InterProcessMessaging(Generic[SendMessageT, ReceiveMessageT], ABC): await messaging.stop() """ - STOP_REQUIRED_QUEUE_EMPTY: int = 3 + STOP_REQUIRED_QUEUE_EMPTY_COUNT: int = 3 def __init__( self, @@ -126,13 +127,13 @@ def __init__( self.max_buffer_receive_size = max_buffer_receive_size self.poll_interval = poll_interval - self.send_stopped_event: ThreadingEvent | ProcessingEvent = None - self.receive_stopped_event: ThreadingEvent | ProcessingEvent = None - self.shutdown_event: ThreadingEvent = None - self.buffer_send_queue: culsans.Queue[SendMessageT] = None - self.buffer_receive_queue: culsans.Queue[ReceiveMessageT] = None - self.send_task: asyncio.Task = None - self.receive_task: asyncio.Task = None + self.send_stopped_event: ThreadingEvent | ProcessingEvent | None = None + self.receive_stopped_event: ThreadingEvent | ProcessingEvent | None = None + self.shutdown_event: ThreadingEvent | None = None + self.buffer_send_queue: culsans.Queue[SendMessageT] | None = None + self.buffer_receive_queue: culsans.Queue[ReceiveMessageT] | None = None + self.send_task: asyncio.Task | None = None + self.receive_task: asyncio.Task | None = None self.running = False @abstractmethod @@ -152,7 +153,7 @@ def create_send_messages_threads( self, send_items: Iterable[Any] | None, message_encoding: MessageEncoding, - check_stop: Callable[[bool, bool], bool], + check_stop: CheckStopCallableT, ) -> list[tuple[Callable, tuple[Any, ...]]]: """ Create send message processing threads for transport implementation. @@ -169,7 +170,7 @@ def create_receive_messages_threads( self, receive_callback: Callable[[Any], Any] | None, message_encoding: MessageEncoding, - check_stop: Callable[[bool, bool], bool], + check_stop: CheckStopCallableT, ) -> list[tuple[Callable, tuple[Any, ...]]]: """ Create receive message processing threads for transport implementation. @@ -216,9 +217,8 @@ async def start( self.buffer_receive_queue = culsans.Queue[ReceiveMessageT]( maxsize=self.max_buffer_receive_size or 0 ) - self.tasks_lock = threading.Lock() - message_encoding = MessageEncoding( + message_encoding: MessageEncoding = MessageEncoding( serialization=self.serialization, encoding=self.encoding, pydantic_models=pydantic_models, @@ -245,18 +245,26 @@ async def stop(self): """ Stop message processing tasks and clean up resources. """ - self.shutdown_event.set() - with contextlib.suppress(asyncio.CancelledError): - await asyncio.gather( - self.send_task, self.receive_task, return_exceptions=True - ) + if self.shutdown_event is not None: + self.shutdown_event.set() + else: + raise RuntimeError("shutdown_event is not set; was start() not called or is this a redundant stop() call?") + tasks = [self.send_task, self.receive_task] + tasks_to_run: List[asyncio.Task[Any]] = [task for task in tasks if task is not None] + if len(tasks_to_run) > 0: + with contextlib.suppress(asyncio.CancelledError): + await asyncio.gather( + *tasks_to_run, return_exceptions=True + ) self.send_task = None self.receive_task = None if self.worker_index is None: - self.buffer_send_queue.clear() - await self.buffer_send_queue.aclose() - self.buffer_receive_queue.clear() - await self.buffer_receive_queue.aclose() + if self.buffer_send_queue is not None: + self.buffer_send_queue.clear() + await self.buffer_send_queue.aclose() + if self.buffer_receive_queue is not None: + self.buffer_receive_queue.clear() + await self.buffer_receive_queue.aclose() self.buffer_send_queue = None self.buffer_receive_queue = None self.send_stopped_event = None @@ -298,7 +306,8 @@ async def send_messages_coroutine( canceled_event.set() raise finally: - self.send_stopped_event.set() + if self.send_stopped_event is not None: + self.send_stopped_event.set() async def receive_messages_coroutine( self, @@ -334,15 +343,18 @@ async def receive_messages_coroutine( canceled_event.set() raise finally: - self.receive_stopped_event.set() + if self.receive_stopped_event is not None: + self.receive_stopped_event.set() async def get(self, timeout: float | None = None) -> ReceiveMessageT: """ - Retrieve message from receive buffer with optional timeout. + Retrieve a message from receive buffer with optional timeout. :param timeout: Maximum time to wait for a message :return: Decoded message from the receive buffer """ + if self.buffer_receive_queue is None: + raise RuntimeError("buffer receive queue is None; check start()/stop() calls") return await asyncio.wait_for( self.buffer_receive_queue.async_get(), timeout=timeout ) @@ -354,6 +366,8 @@ def get_sync(self, timeout: float | None = None) -> ReceiveMessageT: :param timeout: Maximum time to wait for a message, if <=0 uses get_nowait :return: Decoded message from the receive buffer """ + if self.buffer_receive_queue is None: + raise RuntimeError("buffer receive queue is None; check start()/stop() calls") if timeout is not None and timeout <= 0: return self.buffer_receive_queue.get_nowait() else: @@ -366,6 +380,8 @@ async def put(self, item: SendMessageT, timeout: float | None = None): :param item: Message item to add to the send buffer :param timeout: Maximum time to wait for buffer space """ + if self.buffer_send_queue is None: + raise RuntimeError("buffer receive queue is None; check start()/stop() calls") await asyncio.wait_for(self.buffer_send_queue.async_put(item), timeout=timeout) def put_sync(self, item: SendMessageT, timeout: float | None = None): @@ -375,6 +391,8 @@ def put_sync(self, item: SendMessageT, timeout: float | None = None): :param item: Message item to add to the send buffer :param timeout: Maximum time to wait for buffer space, if <=0 uses put_nowait """ + if self.buffer_send_queue is None: + raise RuntimeError("buffer receive queue is None; check start()/stop() calls") if timeout is not None and timeout <= 0: self.buffer_send_queue.put_nowait(item) else: @@ -394,18 +412,21 @@ def _create_check_stop_callable( ) stop_callbacks = tuple(item for item in stop_criteria or [] if callable(item)) - def check_stop(pending: bool, queue_empty: int) -> bool: + def check_stop(pending: bool, queue_empty_count: int) -> bool: if canceled_event.is_set(): return True if stop_callbacks and any( - cb(self, pending, queue_empty) for cb in stop_callbacks + cb(self, pending, queue_empty_count) for cb in stop_callbacks ): return True + if self.shutdown_event is None: + return True + return ( not pending - and queue_empty >= self.STOP_REQUIRED_QUEUE_EMPTY + and queue_empty_count >= self.STOP_REQUIRED_QUEUE_EMPTY_COUNT and ( self.shutdown_event.is_set() or any(event.is_set() for event in stop_events) @@ -436,6 +457,8 @@ class InterProcessMessagingQueue(InterProcessMessaging[SendMessageT, ReceiveMess # Create worker copy for distributed processing worker_messaging = messaging.create_worker_copy(worker_index=0) """ + pending_queue: multiprocessing.Queue | queue.Queue[Any] | None + done_queue: multiprocessing.Queue | queue.Queue[Any] | None def __init__( self, @@ -448,8 +471,8 @@ def __init__( max_buffer_receive_size: int | None = None, poll_interval: float = 0.1, worker_index: int | None = None, - pending_queue: multiprocessing.Queue | None = None, - done_queue: multiprocessing.Queue | None = None, + pending_queue: multiprocessing.Queue | queue.Queue[Any] | None = None, + done_queue: multiprocessing.Queue | queue.Queue[Any] | None = None, ): """ Initialize queue-based messaging for inter-process communication. @@ -506,9 +529,9 @@ def create_worker_copy( "pending_queue": self.pending_queue, "done_queue": self.done_queue, } - copy_args.update(kwargs) + final_args = {**copy_args, **kwargs} - return InterProcessMessagingQueue[ReceiveMessageT, SendMessageT](**copy_args) + return InterProcessMessagingQueue[ReceiveMessageT, SendMessageT](**final_args) async def stop(self): """ @@ -517,15 +540,21 @@ async def stop(self): await super().stop() if self.worker_index is None: # only main process should close the queues + if self.pending_queue is None: + raise RuntimeError("pending_queue is None; was stop() already called?") with contextlib.suppress(queue.Empty): while True: self.pending_queue.get_nowait() - self.pending_queue.close() + if hasattr(self.pending_queue, 'close'): + self.pending_queue.close() + if self.done_queue is None: + raise RuntimeError("done_queue is None; was stop() already called?") with contextlib.suppress(queue.Empty): while True: self.done_queue.get_nowait() - self.done_queue.close() + if hasattr(self.done_queue, 'close'): + self.done_queue.close() self.pending_queue = None self.done_queue = None @@ -534,7 +563,7 @@ def create_send_messages_threads( self, send_items: Iterable[Any] | None, message_encoding: MessageEncoding, - check_stop: Callable[[bool, bool], bool], + check_stop: CheckStopCallableT, ) -> list[tuple[Callable, tuple[Any, ...]]]: """ Create send message processing threads for queue-based transport. @@ -555,7 +584,7 @@ def create_receive_messages_threads( self, receive_callback: Callable[[Any], Any] | None, message_encoding: MessageEncoding, - check_stop: Callable[[bool, bool], bool], + check_stop: CheckStopCallableT, ) -> list[tuple[Callable, tuple[Any, ...]]]: """ Create receive message processing threads for queue-based transport. @@ -576,35 +605,43 @@ def _send_messages_task_thread( # noqa: C901, PLR0912 self, send_items: Iterable[Any] | None, message_encoding: MessageEncoding, - check_stop: Callable[[bool, bool], bool], + check_stop: CheckStopCallableT, ): send_items_iter = iter(send_items) if send_items is not None else None pending_item = None - queue_empty = 0 + queue_empty_count = 0 - while not check_stop(pending_item is not None, queue_empty): + while not check_stop(pending_item is not None, queue_empty_count): if pending_item is None: try: if send_items_iter is not None: item = next(send_items_iter) else: + if self.buffer_send_queue is None: + raise RuntimeError("buffer_send_queue is None; was stop() already called?") item = self.buffer_send_queue.sync_get( timeout=self.poll_interval ) pending_item = message_encoding.encode(item) - queue_empty = 0 + queue_empty_count = 0 except (culsans.QueueEmpty, queue.Empty, StopIteration): - queue_empty += 1 + queue_empty_count += 1 if pending_item is not None: try: if self.worker_index is None: # Main publisher + if self.pending_queue is None: + raise RuntimeError("pending_queue is None; was stop() already called?") self.pending_queue.put(pending_item, timeout=self.poll_interval) else: # Worker + if self.done_queue is None: + raise RuntimeError("done_queue is None; was stop() already called?") self.done_queue.put(pending_item, timeout=self.poll_interval) if send_items_iter is None: + if self.buffer_send_queue is None: + raise RuntimeError("buffer_send_queue is None; was stop() already called?") self.buffer_send_queue.task_done() pending_item = None except (culsans.QueueFull, queue.Full): @@ -614,25 +651,29 @@ def _receive_messages_task_thread( # noqa: C901 self, receive_callback: Callable[[Any], Any] | None, message_encoding: MessageEncoding, - check_stop: Callable[[bool, bool], bool], + check_stop: CheckStopCallableT, ): pending_item = None received_item = None - queue_empty = 0 + queue_empty_count = 0 - while not check_stop(pending_item is not None, queue_empty): + while not check_stop(pending_item is not None, queue_empty_count): if pending_item is None: try: if self.worker_index is None: # Main publisher + if self.done_queue is None: + raise RuntimeError("done_queue is None; check start()/stop() calls") item = self.done_queue.get(timeout=self.poll_interval) else: # Worker + if self.pending_queue is None: + raise RuntimeError("pending_queue is None; check start()/stop() calls") item = self.pending_queue.get(timeout=self.poll_interval) pending_item = message_encoding.decode(item) - queue_empty = 0 + queue_empty_count = 0 except (culsans.QueueEmpty, queue.Empty): - queue_empty += 1 + queue_empty_count += 1 if pending_item is not None or received_item is not None: try: @@ -643,7 +684,9 @@ def _receive_messages_task_thread( # noqa: C901 else receive_callback(pending_item) ) - self.buffer_receive_queue.sync_put(received_item) + if self.buffer_receive_queue is None: + raise RuntimeError("buffer_receive_queue is None; check start()/stop() calls") + self.buffer_receive_queue.sync_put(cast(ReceiveMessageT, received_item)) pending_item = None received_item = None except (culsans.QueueFull, queue.Full): @@ -714,8 +757,8 @@ def __init__( max_buffer_receive_size=max_buffer_receive_size, poll_interval=poll_interval, worker_index=worker_index, - pending_queue=pending_queue or manager.Queue(maxsize=max_pending_size or 0), # type: ignore [assignment] - done_queue=done_queue or manager.Queue(maxsize=max_done_size or 0), # type: ignore [assignment] + pending_queue=pending_queue or manager.Queue(maxsize=max_pending_size or 0), + done_queue=done_queue or manager.Queue(maxsize=max_done_size or 0), ) def create_worker_copy( @@ -741,9 +784,9 @@ def create_worker_copy( "pending_queue": self.pending_queue, "done_queue": self.done_queue, } - copy_args.update(kwargs) + final_args = {**copy_args, **kwargs} - return InterProcessMessagingManagerQueue(**copy_args) + return InterProcessMessagingManagerQueue(**final_args) async def stop(self): """ @@ -818,12 +861,13 @@ def __init__( ) self.num_workers = num_workers + self.pipes: list[tuple[Connection, Connection]] if pipe is None: - self.pipes: list[tuple[Connection, Connection]] = [ + self.pipes = [ self.mp_context.Pipe(duplex=True) for _ in range(num_workers) ] else: - self.pipes: list[tuple[Connection, Connection]] = [pipe] + self.pipes = [pipe] def create_worker_copy( self, worker_index: int, **kwargs @@ -847,9 +891,10 @@ def create_worker_copy( "worker_index": worker_index, "pipe": self.pipes[worker_index], } - copy_args.update(kwargs) - return InterProcessMessagingPipe(**copy_args) + final_args = {**copy_args, **kwargs} + + return InterProcessMessagingPipe(**final_args) async def stop(self): """ @@ -866,7 +911,7 @@ def create_send_messages_threads( self, send_items: Iterable[Any] | None, message_encoding: MessageEncoding, - check_stop: Callable[[bool, bool], bool], + check_stop: CheckStopCallableT, ) -> list[tuple[Callable, tuple[Any, ...]]]: """ Create send message processing threads for pipe-based transport. @@ -897,7 +942,7 @@ def create_receive_messages_threads( self, receive_callback: Callable[[Any], Any] | None, message_encoding: MessageEncoding, - check_stop: Callable[[bool, bool], bool], + check_stop: CheckStopCallableT, ) -> list[tuple[Callable, tuple[Any, ...]]]: """ Create receive message processing threads for pipe-based transport. @@ -929,13 +974,13 @@ def _send_messages_task_thread( # noqa: C901, PLR0912 pipe: tuple[Connection, Connection], send_items: Iterable[Any] | None, message_encoding: MessageEncoding, - check_stop: Callable[[bool, bool], bool], + check_stop: CheckStopCallableT, ): local_stop = ThreadingEvent() send_connection: Connection = pipe[0] if self.worker_index is None else pipe[1] send_items_iter = iter(send_items) if send_items is not None else None pending_item = None - queue_empty = 0 + queue_empty_count = 0 pipe_item = None pipe_lock = threading.Lock() @@ -957,19 +1002,21 @@ def _background_pipe_recv(): threading.Thread(target=_background_pipe_recv, daemon=True).start() try: - while not check_stop(pending_item is not None, queue_empty): + while not check_stop(pending_item is not None, queue_empty_count): if pending_item is None: try: if send_items_iter is not None: item = next(send_items_iter) else: + if self.buffer_send_queue is None: + raise RuntimeError("buffer_send_queue is None; check start()/stop() calls") item = self.buffer_send_queue.sync_get( timeout=self.poll_interval ) pending_item = message_encoding.encode(item) - queue_empty = 0 + queue_empty_count = 0 except (culsans.QueueEmpty, queue.Empty, StopIteration): - queue_empty += 1 + queue_empty_count += 1 if pending_item is not None: try: @@ -980,6 +1027,8 @@ def _background_pipe_recv(): else: pipe_item = pending_item if send_items_iter is None: + if self.buffer_send_queue is None: + raise RuntimeError("buffer_send_queue is None; check start()/stop() calls") self.buffer_send_queue.task_done() pending_item = None except (culsans.QueueFull, queue.Full): @@ -992,16 +1041,16 @@ def _receive_messages_task_thread( # noqa: C901 pipe: tuple[Connection, Connection], receive_callback: Callable[[Any], Any] | None, message_encoding: MessageEncoding, - check_stop: Callable[[bool, bool], bool], + check_stop: CheckStopCallableT, ): receive_connection: Connection = ( pipe[0] if self.worker_index is not None else pipe[1] ) pending_item = None received_item = None - queue_empty = 0 + queue_empty_count = 0 - while not check_stop(pending_item is not None, queue_empty): + while not check_stop(pending_item is not None, queue_empty_count): if pending_item is None: try: if receive_connection.poll(self.poll_interval): @@ -1009,9 +1058,9 @@ def _receive_messages_task_thread( # noqa: C901 pending_item = message_encoding.decode(item) else: raise queue.Empty - queue_empty = 0 + queue_empty_count = 0 except (culsans.QueueEmpty, queue.Empty): - queue_empty += 1 + queue_empty_count += 1 if pending_item is not None or received_item is not None: try: @@ -1021,8 +1070,9 @@ def _receive_messages_task_thread( # noqa: C901 if not receive_callback else receive_callback(pending_item) ) - - self.buffer_receive_queue.sync_put(received_item) + if self.buffer_receive_queue is None: + raise RuntimeError("buffer receive queue is None; check start()/stop() calls") + self.buffer_receive_queue.sync_put(cast(ReceiveMessageT, received_item)) pending_item = None received_item = None except (culsans.QueueFull, queue.Full): diff --git a/src/guidellm/utils/pydantic_utils.py b/src/guidellm/utils/pydantic_utils.py index 27c2e1cf..7c46a1bf 100644 --- a/src/guidellm/utils/pydantic_utils.py +++ b/src/guidellm/utils/pydantic_utils.py @@ -11,7 +11,7 @@ from __future__ import annotations from abc import ABC, abstractmethod -from typing import Any, ClassVar, Generic, TypeVar +from typing import Any, cast, ClassVar, Generic, TypeVar from pydantic import BaseModel, ConfigDict, Field, GetCoreSchemaHandler from pydantic_core import CoreSchema, core_schema @@ -29,7 +29,7 @@ BaseModelT = TypeVar("BaseModelT", bound=BaseModel) -RegisterClassT = TypeVar("RegisterClassT") +RegisterClassT = TypeVar("RegisterClassT", bound=type) SuccessfulT = TypeVar("SuccessfulT") ErroredT = TypeVar("ErroredT") IncompleteT = TypeVar("IncompleteT") @@ -300,7 +300,7 @@ def register_decorator( super().register_decorator(clazz, name=name) cls.reload_schema() - return clazz + return cast(RegisterClassT, clazz) @classmethod def __get_pydantic_core_schema__( diff --git a/src/guidellm/utils/synchronous.py b/src/guidellm/utils/synchronous.py index 14f3d908..64c14e94 100644 --- a/src/guidellm/utils/synchronous.py +++ b/src/guidellm/utils/synchronous.py @@ -131,8 +131,9 @@ async def wait_for_sync_objects( :param poll_interval: Time in seconds between polling checks for each object :return: Index (for list/single) or key name (for dict) of the first completed object - :raises asyncio.CancelledError: If the async task is cancelled + :raises asyncio.CancelledError: If the async task is canceled """ + keys: list[int | str] if isinstance(objects, dict): keys = list(objects.keys()) objects = list(objects.values()) diff --git a/tests/unit/utils/test_pydantic_utils.py b/tests/unit/utils/test_pydantic_utils.py index 726b5ddf..dfd57e69 100644 --- a/tests/unit/utils/test_pydantic_utils.py +++ b/tests/unit/utils/test_pydantic_utils.py @@ -41,7 +41,7 @@ def test_register_class_t(): """Test that RegisterClassT is configured correctly as a TypeVar.""" assert isinstance(RegisterClassT, type(TypeVar("test"))) assert RegisterClassT.__name__ == "RegisterClassT" - assert RegisterClassT.__bound__ is None + assert RegisterClassT.__bound__ == type assert RegisterClassT.__constraints__ == () From 5101d70db88649ad1260343d2e18c4f1595d8873 Mon Sep 17 00:00:00 2001 From: Jared O'Connell Date: Wed, 8 Oct 2025 09:38:26 -0400 Subject: [PATCH 47/90] Fix linting errors Signed-off-by: Jared O'Connell --- src/guidellm/utils/__init__.py | 4 +- src/guidellm/utils/encoding.py | 39 ++++++++---- src/guidellm/utils/messaging.py | 91 +++++++++++++++++++--------- src/guidellm/utils/pydantic_utils.py | 4 +- src/guidellm/utils/registry.py | 4 +- 5 files changed, 98 insertions(+), 44 deletions(-) diff --git a/src/guidellm/utils/__init__.py b/src/guidellm/utils/__init__.py index bd6b5a90..702b2a9d 100644 --- a/src/guidellm/utils/__init__.py +++ b/src/guidellm/utils/__init__.py @@ -81,7 +81,6 @@ "EndlessTextCreator", "InfoMixin", "IntegerRangeSampler", - "camelize_str", "InterProcessMessaging", "InterProcessMessagingManagerQueue", "InterProcessMessagingPipe", @@ -107,14 +106,15 @@ "ThreadSafeSingletonMixin", "TimeRunningStats", "all_defined", + "camelize_str", "check_load_processor", "clean_text", "filter_text", "format_value_display", "get_literal_vals", "is_punctuation", - "recursive_key_update", "load_text", + "recursive_key_update", "safe_add", "safe_divide", "safe_format_timestamp", diff --git a/src/guidellm/utils/encoding.py b/src/guidellm/utils/encoding.py index 78d4bbbb..6823fb77 100644 --- a/src/guidellm/utils/encoding.py +++ b/src/guidellm/utils/encoding.py @@ -12,10 +12,10 @@ import json from collections.abc import Mapping -from typing import Annotated, Any, cast, ClassVar, Generic, Literal, Optional, TypeVar +from typing import Annotated, Any, ClassVar, Generic, Literal, Optional, TypeVar, cast try: - import msgpack # type: ignore[import-untyped] # Optional dependency + import msgpack # type: ignore[import-untyped] # Optional dependency from msgpack import Packer, Unpacker HAS_MSGPACK = True @@ -24,8 +24,12 @@ HAS_MSGPACK = False try: - from msgspec.msgpack import Decoder as MsgspecDecoder # type: ignore[import-not-found] # Optional dependency - from msgspec.msgpack import Encoder as MsgspecEncoder # type: ignore[import-not-found] # Optional dependency + from msgspec.msgpack import ( # type: ignore[import-not-found] # Optional dependency + Decoder as MsgspecDecoder, + ) + from msgspec.msgpack import ( # type: ignore[import-not-found] # Optional dependency + Encoder as MsgspecEncoder, + ) HAS_MSGSPEC = True except ImportError: @@ -33,7 +37,7 @@ HAS_MSGSPEC = False try: - import orjson # type: ignore[import-not-found] # Optional dependency + import orjson # type: ignore[import-not-found] # Optional dependency HAS_ORJSON = True except ImportError: @@ -116,7 +120,7 @@ def encode_message( """ serialized = serializer.serialize(obj) if serializer else obj - return cast(MsgT, encoder.encode(serialized) if encoder else serialized) + return cast("MsgT", encoder.encode(serialized) if encoder else serialized) @classmethod def decode_message( @@ -137,7 +141,9 @@ def decode_message( """ serialized = encoder.decode(message) if encoder else message - return cast(ObjT, serializer.deserialize(serialized) if serializer else serialized) + return cast( + "ObjT", serializer.deserialize(serialized) if serializer else serialized + ) def __init__( self, @@ -296,7 +302,14 @@ def _get_available_encoder_decoder( return None, None, None -PayloadType = Literal['pydantic', 'python', 'collection_tuple', 'collection_sequence', 'collection_mapping'] +PayloadType = Literal[ + "pydantic", + "python", + "collection_tuple", + "collection_sequence", + "collection_mapping", +] + class Serializer: """ @@ -518,7 +531,9 @@ def to_sequence(self, obj: Any) -> str | Any: payload_type = "python" payload = self.to_sequence_python(obj) - return self.pack_next_sequence(payload_type, payload if payload is not None else "", None) + return self.pack_next_sequence( + payload_type, payload if payload is not None else "", None + ) def from_sequence(self, data: str | Any) -> Any: # noqa: C901, PLR0912 """ @@ -709,8 +724,10 @@ def pack_next_sequence( # noqa: C901, PLR0912 delimiter = "|" # Type ignores because types are enforced at runtime - next_sequence = payload_type + delimiter + payload_len_output + delimiter + payload # type: ignore[operator] - return current + next_sequence if current else next_sequence # type: ignore[operator] + next_sequence = ( + payload_type + delimiter + payload_len_output + delimiter + payload # type: ignore[operator] + ) + return current + next_sequence if current else next_sequence # type: ignore[operator] def unpack_next_sequence( # noqa: C901, PLR0912 self, data: str | bytes diff --git a/src/guidellm/utils/messaging.py b/src/guidellm/utils/messaging.py index db926200..9311259d 100644 --- a/src/guidellm/utils/messaging.py +++ b/src/guidellm/utils/messaging.py @@ -22,7 +22,7 @@ from multiprocessing.managers import SyncManager from multiprocessing.synchronize import Event as ProcessingEvent from threading import Event as ThreadingEvent -from typing import Any, Callable, cast, Generic, List, Protocol, TypeVar +from typing import Any, Callable, Generic, Protocol, TypeVar, cast import culsans from pydantic import BaseModel @@ -50,6 +50,7 @@ CheckStopCallableT = Callable[[bool, int], bool] + class MessagingStopCallback(Protocol): """Protocol for evaluating stop conditions in messaging operations.""" @@ -248,14 +249,17 @@ async def stop(self): if self.shutdown_event is not None: self.shutdown_event.set() else: - raise RuntimeError("shutdown_event is not set; was start() not called or is this a redundant stop() call?") + raise RuntimeError( + "shutdown_event is not set; was start() not called or " + "is this a redundant stop() call?" + ) tasks = [self.send_task, self.receive_task] - tasks_to_run: List[asyncio.Task[Any]] = [task for task in tasks if task is not None] + tasks_to_run: list[asyncio.Task[Any]] = [ + task for task in tasks if task is not None + ] if len(tasks_to_run) > 0: with contextlib.suppress(asyncio.CancelledError): - await asyncio.gather( - *tasks_to_run, return_exceptions=True - ) + await asyncio.gather(*tasks_to_run, return_exceptions=True) self.send_task = None self.receive_task = None if self.worker_index is None: @@ -354,7 +358,9 @@ async def get(self, timeout: float | None = None) -> ReceiveMessageT: :return: Decoded message from the receive buffer """ if self.buffer_receive_queue is None: - raise RuntimeError("buffer receive queue is None; check start()/stop() calls") + raise RuntimeError( + "buffer receive queue is None; check start()/stop() calls" + ) return await asyncio.wait_for( self.buffer_receive_queue.async_get(), timeout=timeout ) @@ -367,7 +373,9 @@ def get_sync(self, timeout: float | None = None) -> ReceiveMessageT: :return: Decoded message from the receive buffer """ if self.buffer_receive_queue is None: - raise RuntimeError("buffer receive queue is None; check start()/stop() calls") + raise RuntimeError( + "buffer receive queue is None; check start()/stop() calls" + ) if timeout is not None and timeout <= 0: return self.buffer_receive_queue.get_nowait() else: @@ -381,7 +389,9 @@ async def put(self, item: SendMessageT, timeout: float | None = None): :param timeout: Maximum time to wait for buffer space """ if self.buffer_send_queue is None: - raise RuntimeError("buffer receive queue is None; check start()/stop() calls") + raise RuntimeError( + "buffer receive queue is None; check start()/stop() calls" + ) await asyncio.wait_for(self.buffer_send_queue.async_put(item), timeout=timeout) def put_sync(self, item: SendMessageT, timeout: float | None = None): @@ -392,7 +402,9 @@ def put_sync(self, item: SendMessageT, timeout: float | None = None): :param timeout: Maximum time to wait for buffer space, if <=0 uses put_nowait """ if self.buffer_send_queue is None: - raise RuntimeError("buffer receive queue is None; check start()/stop() calls") + raise RuntimeError( + "buffer receive queue is None; check start()/stop() calls" + ) if timeout is not None and timeout <= 0: self.buffer_send_queue.put_nowait(item) else: @@ -457,6 +469,7 @@ class InterProcessMessagingQueue(InterProcessMessaging[SendMessageT, ReceiveMess # Create worker copy for distributed processing worker_messaging = messaging.create_worker_copy(worker_index=0) """ + pending_queue: multiprocessing.Queue | queue.Queue[Any] | None done_queue: multiprocessing.Queue | queue.Queue[Any] | None @@ -545,7 +558,7 @@ async def stop(self): with contextlib.suppress(queue.Empty): while True: self.pending_queue.get_nowait() - if hasattr(self.pending_queue, 'close'): + if hasattr(self.pending_queue, "close"): self.pending_queue.close() if self.done_queue is None: @@ -553,7 +566,7 @@ async def stop(self): with contextlib.suppress(queue.Empty): while True: self.done_queue.get_nowait() - if hasattr(self.done_queue, 'close'): + if hasattr(self.done_queue, "close"): self.done_queue.close() self.pending_queue = None @@ -618,7 +631,9 @@ def _send_messages_task_thread( # noqa: C901, PLR0912 item = next(send_items_iter) else: if self.buffer_send_queue is None: - raise RuntimeError("buffer_send_queue is None; was stop() already called?") + raise RuntimeError( + "buffer_send_queue is None; was stop() already called?" + ) item = self.buffer_send_queue.sync_get( timeout=self.poll_interval ) @@ -632,16 +647,22 @@ def _send_messages_task_thread( # noqa: C901, PLR0912 if self.worker_index is None: # Main publisher if self.pending_queue is None: - raise RuntimeError("pending_queue is None; was stop() already called?") + raise RuntimeError( + "pending_queue is None; was stop() already called?" + ) self.pending_queue.put(pending_item, timeout=self.poll_interval) else: # Worker if self.done_queue is None: - raise RuntimeError("done_queue is None; was stop() already called?") + raise RuntimeError( + "done_queue is None; was stop() already called?" + ) self.done_queue.put(pending_item, timeout=self.poll_interval) if send_items_iter is None: if self.buffer_send_queue is None: - raise RuntimeError("buffer_send_queue is None; was stop() already called?") + raise RuntimeError( + "buffer_send_queue is None; was stop() already called?" + ) self.buffer_send_queue.task_done() pending_item = None except (culsans.QueueFull, queue.Full): @@ -663,12 +684,16 @@ def _receive_messages_task_thread( # noqa: C901 if self.worker_index is None: # Main publisher if self.done_queue is None: - raise RuntimeError("done_queue is None; check start()/stop() calls") + raise RuntimeError( + "done_queue is None; check start()/stop() calls" + ) item = self.done_queue.get(timeout=self.poll_interval) else: # Worker if self.pending_queue is None: - raise RuntimeError("pending_queue is None; check start()/stop() calls") + raise RuntimeError( + "pending_queue is None; check start()/stop() calls" + ) item = self.pending_queue.get(timeout=self.poll_interval) pending_item = message_encoding.decode(item) queue_empty_count = 0 @@ -685,8 +710,12 @@ def _receive_messages_task_thread( # noqa: C901 ) if self.buffer_receive_queue is None: - raise RuntimeError("buffer_receive_queue is None; check start()/stop() calls") - self.buffer_receive_queue.sync_put(cast(ReceiveMessageT, received_item)) + raise RuntimeError( + "buffer_receive_queue is None; check start()/stop() calls" + ) + self.buffer_receive_queue.sync_put( + cast("ReceiveMessageT", received_item) + ) pending_item = None received_item = None except (culsans.QueueFull, queue.Full): @@ -863,9 +892,7 @@ def __init__( self.pipes: list[tuple[Connection, Connection]] if pipe is None: - self.pipes = [ - self.mp_context.Pipe(duplex=True) for _ in range(num_workers) - ] + self.pipes = [self.mp_context.Pipe(duplex=True) for _ in range(num_workers)] else: self.pipes = [pipe] @@ -969,7 +996,7 @@ def create_receive_messages_threads( ) ] - def _send_messages_task_thread( # noqa: C901, PLR0912 + def _send_messages_task_thread( # noqa: C901, PLR0912, PLR0915 self, pipe: tuple[Connection, Connection], send_items: Iterable[Any] | None, @@ -1009,7 +1036,9 @@ def _background_pipe_recv(): item = next(send_items_iter) else: if self.buffer_send_queue is None: - raise RuntimeError("buffer_send_queue is None; check start()/stop() calls") + raise RuntimeError( + "buffer_send_queue is None; check start()/stop() calls" # noqa: E501 + ) item = self.buffer_send_queue.sync_get( timeout=self.poll_interval ) @@ -1028,7 +1057,9 @@ def _background_pipe_recv(): pipe_item = pending_item if send_items_iter is None: if self.buffer_send_queue is None: - raise RuntimeError("buffer_send_queue is None; check start()/stop() calls") + raise RuntimeError( + "buffer_send_queue is None; check start()/stop() calls" # noqa: E501 + ) self.buffer_send_queue.task_done() pending_item = None except (culsans.QueueFull, queue.Full): @@ -1071,8 +1102,12 @@ def _receive_messages_task_thread( # noqa: C901 else receive_callback(pending_item) ) if self.buffer_receive_queue is None: - raise RuntimeError("buffer receive queue is None; check start()/stop() calls") - self.buffer_receive_queue.sync_put(cast(ReceiveMessageT, received_item)) + raise RuntimeError( + "buffer receive queue is None; check start()/stop() calls" + ) + self.buffer_receive_queue.sync_put( + cast("ReceiveMessageT", received_item) + ) pending_item = None received_item = None except (culsans.QueueFull, queue.Full): diff --git a/src/guidellm/utils/pydantic_utils.py b/src/guidellm/utils/pydantic_utils.py index 7c46a1bf..515b445e 100644 --- a/src/guidellm/utils/pydantic_utils.py +++ b/src/guidellm/utils/pydantic_utils.py @@ -11,7 +11,7 @@ from __future__ import annotations from abc import ABC, abstractmethod -from typing import Any, cast, ClassVar, Generic, TypeVar +from typing import Any, ClassVar, Generic, TypeVar, cast from pydantic import BaseModel, ConfigDict, Field, GetCoreSchemaHandler from pydantic_core import CoreSchema, core_schema @@ -300,7 +300,7 @@ def register_decorator( super().register_decorator(clazz, name=name) cls.reload_schema() - return cast(RegisterClassT, clazz) + return cast("RegisterClassT", clazz) @classmethod def __get_pydantic_core_schema__( diff --git a/src/guidellm/utils/registry.py b/src/guidellm/utils/registry.py index 909d3f45..2fdfc318 100644 --- a/src/guidellm/utils/registry.py +++ b/src/guidellm/utils/registry.py @@ -19,7 +19,9 @@ RegistryObjT = TypeVar("RegistryObjT") """Generic type variable for objects managed by the registry system.""" -RegisterT = TypeVar("RegisterT", bound=type) # Must be bound to type to ensure __name__ is available. +RegisterT = TypeVar( + "RegisterT", bound=type +) # Must be bound to type to ensure __name__ is available. """Generic type variable for the args and return values within the registry.""" From 61fa01f99dbbc7b2612ce3f6ddada388e3da67dd Mon Sep 17 00:00:00 2001 From: Jared O'Connell Date: Wed, 8 Oct 2025 10:11:16 -0400 Subject: [PATCH 48/90] Fix test Signed-off-by: Jared O'Connell --- tests/unit/utils/test_registry.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/unit/utils/test_registry.py b/tests/unit/utils/test_registry.py index eed126d3..7bd0eaf8 100644 --- a/tests/unit/utils/test_registry.py +++ b/tests/unit/utils/test_registry.py @@ -26,7 +26,7 @@ def test_registered_type(): """Test that RegisterT is configured correctly as a TypeVar.""" assert isinstance(RegisterT, type(TypeVar("test"))) assert RegisterT.__name__ == "RegisterT" - assert RegisterT.__bound__ is None + assert RegisterT.__bound__ is type assert RegisterT.__constraints__ == () From db43ccd77aa966c3e7d311526a3adac59d3c6bf9 Mon Sep 17 00:00:00 2001 From: Jared O'Connell Date: Wed, 8 Oct 2025 10:22:24 -0400 Subject: [PATCH 49/90] Use preferred operator for type check Signed-off-by: Jared O'Connell --- tests/unit/utils/test_pydantic_utils.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/unit/utils/test_pydantic_utils.py b/tests/unit/utils/test_pydantic_utils.py index dfd57e69..b1278f51 100644 --- a/tests/unit/utils/test_pydantic_utils.py +++ b/tests/unit/utils/test_pydantic_utils.py @@ -41,7 +41,7 @@ def test_register_class_t(): """Test that RegisterClassT is configured correctly as a TypeVar.""" assert isinstance(RegisterClassT, type(TypeVar("test"))) assert RegisterClassT.__name__ == "RegisterClassT" - assert RegisterClassT.__bound__ == type + assert RegisterClassT.__bound__ is type assert RegisterClassT.__constraints__ == () From 80df98bab4cd6d4f880c0e883893c9025b652069 Mon Sep 17 00:00:00 2001 From: Jared O'Connell Date: Wed, 8 Oct 2025 11:20:43 -0400 Subject: [PATCH 50/90] Fix and enable CSV output test Signed-off-by: Jared O'Connell --- tests/unit/benchmark/test_output.py | 2 +- tests/unit/mock_benchmark.py | 6 +++++- 2 files changed, 6 insertions(+), 2 deletions(-) diff --git a/tests/unit/benchmark/test_output.py b/tests/unit/benchmark/test_output.py index 85979c12..6763d978 100644 --- a/tests/unit/benchmark/test_output.py +++ b/tests/unit/benchmark/test_output.py @@ -80,7 +80,6 @@ def test_file_yaml(): mock_path.unlink() -@pytest.mark.skip(reason="CSV fix not merged yet") @pytest.mark.asyncio async def test_file_csv(): mock_benchmark = mock_generative_benchmark() @@ -96,6 +95,7 @@ async def test_file_csv(): rows = list(reader) assert "Type" in headers + assert "Profile" in headers assert len(rows) == 1 mock_path.unlink() diff --git a/tests/unit/mock_benchmark.py b/tests/unit/mock_benchmark.py index c0d6aa34..cdf4375a 100644 --- a/tests/unit/mock_benchmark.py +++ b/tests/unit/mock_benchmark.py @@ -76,7 +76,11 @@ def mock_generative_benchmark() -> GenerativeBenchmark: ), benchmarker=BenchmarkerDict( profile=SynchronousProfile.create("synchronous", rate=None), - requests={}, + requests={ + "attributes": { + "data": "prompt_tokens=256,output_tokens=128", + }, + }, backend={}, environment={}, aggregators={}, From 616ef92e5cd456ca7db971deb822e88119f3c57b Mon Sep 17 00:00:00 2001 From: Mark Kurtz Date: Wed, 8 Oct 2025 15:44:55 -0400 Subject: [PATCH 51/90] Fix audio pathways so requests work --- pyproject.toml | 2 +- src/guidellm/__init__.py | 4 +- src/guidellm/__main__.py | 1 + src/guidellm/backends/openai.py | 4 +- src/guidellm/benchmark/aggregator.py | 2 +- src/guidellm/benchmark/objects.py | 6 +- .../data/deserializers/deserializer.py | 16 +- .../data/deserializers/huggingface.py | 6 + src/guidellm/data/loaders.py | 88 ++++---- src/guidellm/data/objects.py | 10 + src/guidellm/data/preprocessors/formatters.py | 22 +- src/guidellm/data/preprocessors/mappers.py | 35 ++-- src/guidellm/data/utils/functions.py | 190 +++++++++++++----- src/guidellm/scheduler/worker_group.py | 4 +- src/guidellm/settings.py | 2 +- src/guidellm/utils/cli.py | 4 +- 16 files changed, 266 insertions(+), 130 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 3461530d..6ccbf06a 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -61,7 +61,7 @@ dependencies = [ "httpx[http2]<1.0.0", "loguru", "msgpack", - "numpy", + "numpy<2.0.0", "pillow", "protobuf", "pydantic>=2.11.7", diff --git a/src/guidellm/__init__.py b/src/guidellm/__init__.py index dde6e937..f466073e 100644 --- a/src/guidellm/__init__.py +++ b/src/guidellm/__init__.py @@ -7,7 +7,7 @@ import logging import os -from datasets.utils.logging import disable_progress_bar +from datasets import config with ( open(os.devnull, "w") as devnull, # noqa: PTH123 @@ -21,7 +21,7 @@ os.environ["TOKENIZERS_PARALLELISM"] = "false" # Silence warnings for tokenizers hf_logging.set_verbosity_error() logging.getLogger("transformers").setLevel(logging.ERROR) - disable_progress_bar() + config.USE_AUDIO_DECODE = False from .logger import configure_logger, logger from .settings import ( diff --git a/src/guidellm/__main__.py b/src/guidellm/__main__.py index 43939fa7..4bb43d0f 100644 --- a/src/guidellm/__main__.py +++ b/src/guidellm/__main__.py @@ -245,6 +245,7 @@ def benchmark(): ) @click.option( "--data-args", + multiple=True, default=None, callback=cli_tools.parse_json, help=( diff --git a/src/guidellm/backends/openai.py b/src/guidellm/backends/openai.py index 22394afe..f8ccaafb 100644 --- a/src/guidellm/backends/openai.py +++ b/src/guidellm/backends/openai.py @@ -248,7 +248,7 @@ async def resolve( # noqa: C901 request.arguments.method or "POST", request.arguments.url, content=request.arguments.content_body, - files=request.arguments.files, + files=request.arguments.request_files, json=request.arguments.json_body, params=request.arguments.params, headers=request.arguments.headers, @@ -281,7 +281,7 @@ async def resolve( # noqa: C901 request.arguments.method or "POST", request.arguments.url, content=request.arguments.content_body, - files=request.arguments.files, + files=request.arguments.request_files, json=request.arguments.json_body, params=request.arguments.params, headers=request.arguments.headers, diff --git a/src/guidellm/benchmark/aggregator.py b/src/guidellm/benchmark/aggregator.py index 562fc36c..2dc3c56f 100644 --- a/src/guidellm/benchmark/aggregator.py +++ b/src/guidellm/benchmark/aggregator.py @@ -937,7 +937,7 @@ def _create_generative_request_stats( return GenerativeRequestStats( request_id=request.request_id, request_type=request.request_type, - request_args=request.arguments, + request_args=str(request.arguments), output=response.text if response else None, iterations=response.iterations if response else 0, prompt_tokens=( diff --git a/src/guidellm/benchmark/objects.py b/src/guidellm/benchmark/objects.py index a9b5ff79..c3481303 100644 --- a/src/guidellm/benchmark/objects.py +++ b/src/guidellm/benchmark/objects.py @@ -35,7 +35,7 @@ Profile, ) from guidellm.data import ( - GenerationRequestArguments, + GenerativeRequestType, ) from guidellm.scheduler import ( ScheduledRequestInfo, @@ -214,10 +214,10 @@ class GenerativeRequestStats(BenchmarkRequestStats): type_: Literal["generative_request_stats"] = "generative_request_stats" request_id: str = Field(description="Unique identifier for the request") - request_type: Literal["text_completions", "chat_completions"] = Field( + request_type: GenerativeRequestType | str = Field( description="Type of generative request: text or chat completion" ) - request_args: GenerationRequestArguments | None = Field( + request_args: str | None = Field( default=None, description="Arguments passed to the backend for this request" ) output: str | None = Field( diff --git a/src/guidellm/data/deserializers/deserializer.py b/src/guidellm/data/deserializers/deserializer.py index c7e2f1da..cb362710 100644 --- a/src/guidellm/data/deserializers/deserializer.py +++ b/src/guidellm/data/deserializers/deserializer.py @@ -43,6 +43,8 @@ def deserialize( random_seed: int = 42, type_: str | None = None, resolve_split: bool = True, + select_columns: list[str] | None = None, + remove_columns: list[str] | None = None, **data_kwargs: dict[str, Any], ) -> Dataset | IterableDataset: dataset = None @@ -78,4 +80,16 @@ def deserialize( f"with kwargs {data_kwargs} and type_ {type_}." ) - return resolve_dataset_split(dataset) if resolve_split else dataset + if resolve_split: + dataset = resolve_dataset_split(dataset) + + if select_columns is not None or remove_columns is not None: + column_names = dataset.column_names or list(next(iter(dataset)).keys()) + if select_columns is not None: + remove_columns = [ + col for col in column_names if col not in select_columns + ] + + dataset = dataset.remove_columns(remove_columns) + + return dataset diff --git a/src/guidellm/data/deserializers/huggingface.py b/src/guidellm/data/deserializers/huggingface.py index 3e0cf090..69f7d506 100644 --- a/src/guidellm/data/deserializers/huggingface.py +++ b/src/guidellm/data/deserializers/huggingface.py @@ -62,6 +62,12 @@ def __call__( except Exception as err: # noqa: BLE001 load_error = err + try: + # Handle dataset identifier from the Hugging Face Hub + return load_dataset(str(data), **data_kwargs) + except Exception as err: # noqa: BLE001 + load_error = err + not_supported = DataNotSupportedError( "Unsupported data for HuggingFaceDatasetDeserializer, " "expected Dataset, IterableDataset, DatasetDict, IterableDatasetDict, " diff --git a/src/guidellm/data/loaders.py b/src/guidellm/data/loaders.py index 303e5a8d..89098964 100644 --- a/src/guidellm/data/loaders.py +++ b/src/guidellm/data/loaders.py @@ -14,31 +14,47 @@ from guidellm.data.objects import GenerationRequest from guidellm.data.preprocessors import DataDependentPreprocessor, DatasetPreprocessor -__all__ = ["DataLoader", "datasets_item_iterator"] - - -def datasets_item_iterator( - datasets: list[Dataset | IterableDataset], - data_samples: int, - preprocessors: tuple[DatasetPreprocessor | DataDependentPreprocessor], -) -> Iterator[Any]: - gen_count = 0 - dataset_iters = [iter(dataset) for dataset in datasets] - - with contextlib.suppress(StopIteration): - while gen_count < data_samples or data_samples == math.inf: - row = {"items": [next(dataset_iter) for dataset_iter in dataset_iters]} - for preprocessor in preprocessors: - row = preprocessor(row) - yield row - gen_count += 1 - - if data_samples != math.inf and gen_count < data_samples: - raise ValueError( - f"Requested {data_samples} samples, but only {gen_count} " - "available from the provided datasets." +__all__ = ["DataIterator", "DataLoader"] + + +class DataIterator: + def __init__( + self, + datasets: list[Dataset | IterableDataset], + preprocessors: list[DatasetPreprocessor | DataDependentPreprocessor], + precache_size: int | None = None, + ): + self.datasets = datasets + self.preprocessors = preprocessors + self.precache = ( + None if not precache_size else list(self.generator(precache_size)) ) + def __iter__(self): + if self.precache is not None: + yield from self.precache + else: + yield from self.generator() + + def generator(self, max_items: int | None = None) -> Iterator[Any]: + gen_count = 0 + + with contextlib.suppress(StopIteration): + dataset_iters = [iter(dataset) for dataset in self.datasets] + + while max_items is None or gen_count < max_items: + row = {"items": [next(dataset_iter) for dataset_iter in dataset_iters]} + for preprocessor in self.preprocessors: + row = preprocessor(row) + yield row + gen_count += 1 + + if max_items is not None and gen_count < max_items: + raise ValueError( + f"Requested {max_items} samples, but only {gen_count} " + "available from the provided datasets." + ) + class DataLoader(PyTorchDataLoader[GenerationRequest]): def __init__( @@ -68,14 +84,11 @@ def __init__( datasets = [] for datum, data_kwargs in zip(data, data_args): - type_ = data_kwargs.pop("type_") if "type_" in data_kwargs else None datasets.append( DatasetDeserializerFactory.deserialize( data=datum, - data_kwargs=data_args, processor_factory=processor_factory, random_seed=random_seed, - type_=type_, **data_kwargs, ) ) @@ -85,20 +98,15 @@ def __init__( datasets=datasets, data_args=data_args, ) - if data_samples != math.inf and data_samples > 0: - cached_samples = list( - datasets_item_iterator(datasets, data_samples, tuple(preprocessors)) - ) - dataset = IterableDataset.from_generator(lambda: cached_samples) - else: - dataset = IterableDataset.from_generator( - datasets_item_iterator, - gen_kwargs={ - "datasets": datasets, - "data_samples": math.inf, - "preprocessors": tuple(preprocessors), - }, - ) + + data_iterator = DataIterator( + datasets=datasets, + preprocessors=preprocessors, + precache_size=data_samples + if data_samples != math.inf and data_samples > 0 + else None, + ) + dataset = IterableDataset.from_generator(data_iterator.__iter__) super().__init__( dataset=dataset, diff --git a/src/guidellm/data/objects.py b/src/guidellm/data/objects.py index 2a4b3857..095014d3 100644 --- a/src/guidellm/data/objects.py +++ b/src/guidellm/data/objects.py @@ -103,6 +103,16 @@ def model_combine_dict( # noqa: C901, PLR0912 description="HTTP headers to include in the request, if applicable.", ) + @property + def request_files(self) -> dict[str, Any] | None: + if not self.files: + return None + + return { + key: value if not isinstance(value, list) else tuple(value) + for key, value in self.files.items() + } + @SchedulerMessagingPydanticRegistry.register() class GenerationRequest(StandardBaseModel): diff --git a/src/guidellm/data/preprocessors/formatters.py b/src/guidellm/data/preprocessors/formatters.py index c41ce936..02bb7398 100644 --- a/src/guidellm/data/preprocessors/formatters.py +++ b/src/guidellm/data/preprocessors/formatters.py @@ -1,6 +1,6 @@ from __future__ import annotations -from typing import Any, Literal +from typing import Any from guidellm.data.objects import ( GenerationRequest, @@ -242,7 +242,7 @@ def __init__( def __call__( self, columns: dict[GenerativeDatasetColumnType, list[Any]] ) -> GenerationRequest: - arguments = {"json_body": {}} + arguments = {"json_body": {}, "files": {}} stats = {} # Add model @@ -251,6 +251,7 @@ def __call__( # Configure streaming if self.stream: + arguments["stream"] = True arguments["json_body"].update( {"stream": True, "stream_options": {"include_usage": True}} ) @@ -282,13 +283,11 @@ def __call__( ): arguments["json_body"]["prompt"] = "".join(prefix) + "".join(text) - return { - "request": { - "request_type": "audio_transcriptions", - "arguments": arguments, - "stats": stats, - } - } + return GenerationRequest( + request_type="audio_transcriptions", + arguments=GenerationRequestArguments(**arguments), + stats=stats, + ) @PreprocessorRegistry.register("audio_translations") @@ -297,7 +296,8 @@ class GenerativeAudioTranslationRequestFormatter( ): def __call__( self, columns: dict[GenerativeDatasetColumnType, list[Any]] - ) -> dict[Literal["request"], dict[Literal["request_type"], Any]]: + ) -> GenerationRequest: result = super().__call__(columns) - result["request"]["request_type"] = "audio_translations" + result.request_type = "audio_translations" + return result diff --git a/src/guidellm/data/preprocessors/mappers.py b/src/guidellm/data/preprocessors/mappers.py index 56ca0342..5e64b51c 100644 --- a/src/guidellm/data/preprocessors/mappers.py +++ b/src/guidellm/data/preprocessors/mappers.py @@ -1,7 +1,7 @@ from __future__ import annotations from collections import defaultdict -from typing import Any, ClassVar +from typing import Any, ClassVar, cast from datasets import Dataset, IterableDataset @@ -66,7 +66,7 @@ class GenerativeColumnMapper(DataDependentPreprocessor): @classmethod def datasets_default_mappings( cls, datasets: list[Dataset | IterableDataset] - ) -> dict[str, list[tuple[int, str]]]: + ) -> dict[GenerativeDatasetColumnType, list[tuple[int, str]]]: mappings: dict[GenerativeDatasetColumnType, list[tuple[int, str]]] = ( defaultdict(list) ) @@ -92,7 +92,8 @@ def datasets_default_mappings( for name in type_names: if name in dataset_columns: - mappings[column_type].append((index, name)) + key = cast("GenerativeDatasetColumnType", column_type) + mappings[key].append((index, name)) break return mappings @@ -123,20 +124,26 @@ def datasets_mappings( mappings[column_type] = [] for name in names if isinstance(names, list) else [names]: - dataset, column_name = name.split(".", 1) - dataset_index = ( - int(dataset) - if dataset.isdigit() - else datasets_named_indices.get(dataset) - ) + if "." in name: + dataset, column_name = name.split(".", 1) + dataset_index = ( + int(dataset) + if dataset.isdigit() + else datasets_named_indices.get(dataset) + ) + else: + dataset_index = 0 + column_name = name + if dataset_index is None or dataset_index >= len(datasets): raise ValueError( - f"Dataset '{dataset}' not found in datasets: " + f"Dataset '{name}' not found in datasets: " f"{datasets_named_indices}." ) if column_name not in datasets_columns[dataset_index]: raise ValueError( - f"Column '{column_name}' not found in dataset '{dataset}' " + f"Column '{column_name}' not found in dataset " + f"'{datasets[dataset_index]}' " f"columns: {datasets_columns[dataset_index]}." ) mappings[column_type].append((dataset_index, column_name)) @@ -153,11 +160,13 @@ def __init__( dict[GenerativeDatasetColumnType, list[tuple[int, str]]] | None ) - def __call__(self, row: dict[int, list[dict[str, Any]]]) -> dict[str, list[Any]]: + def __call__( + self, row: dict[str, Any] + ) -> dict[GenerativeDatasetColumnType, list[Any]]: if self.datasets_column_mappings is None: raise ValueError("DefaultGenerativeColumnMapper not setup with data.") - items = row.pop("items") + items = cast("dict[int, dict[str, Any]]", row.pop("items")) mapped: dict[GenerativeDatasetColumnType, list[Any]] = defaultdict(list) for column_type, column_mappings in self.datasets_column_mappings.items(): diff --git a/src/guidellm/data/utils/functions.py b/src/guidellm/data/utils/functions.py index c9ca20ed..413b5a92 100644 --- a/src/guidellm/data/utils/functions.py +++ b/src/guidellm/data/utils/functions.py @@ -12,6 +12,7 @@ import soundfile from PIL import Image as PILImage from pydub import AudioSegment +from torch import Tensor __all__ = [ "download_audio", @@ -182,7 +183,7 @@ def download_image(url: str) -> bytes: def encode_video( - video: bytes | str | Path | datasets.Video, + video: bytes | str | Path, encode_type: Literal["base64", "url"] | None = None, ) -> str: """ @@ -201,11 +202,13 @@ def encode_video( - video url - "data:video/{type};base64, {data}" string """ - url = is_url(video) - - if url and (encode_type is None or encode_type == "url"): + if ( + isinstance(video, str) + and is_url(video) + and (encode_type is None or encode_type == "url") + ): return video - elif url and encode_type == "base64": + elif isinstance(video, str) and is_url(video) and encode_type == "base64": raise ValueError(f"Cannot encode URL video {video}") return encode_video_base64(video=video) @@ -221,7 +224,7 @@ def encode_video_base64(video: bytes | str | Path) -> str: video_format = "unknown" - if is_url(video): + if isinstance(video, str) and is_url(video): video, video_format = download_video(video) if isinstance(video, (str, Path)): @@ -242,16 +245,18 @@ def download_video(url: str) -> tuple[bytes, str]: def encode_audio_as_dict( - audio: bytes | str | Path | dict | np.ndarray, - sample_rate: int | None = 16000, + audio: Any, + sample_rate: int = 16000, + encode_sample_rate: int = 16000, max_duration: float | None = None, mono: bool = True, audio_format: str = "mp3", bitrate: str = "64k", ) -> dict[Literal["data", "format"], Any]: - content, file_name, file_format = encode_audio( + content, _, file_format = encode_audio( audio=audio, - sample_rate=sample_rate or 16000, + sample_rate=sample_rate, + encode_sample_rate=encode_sample_rate, max_duration=max_duration, mono=mono, audio_format=audio_format, @@ -265,8 +270,9 @@ def encode_audio_as_dict( def encode_audio_as_file( - audio: bytes | str | Path | dict | np.ndarray, - sample_rate: int | None = 16000, + audio: Any, + sample_rate: int = 16000, + encode_sample_rate: int = 16000, max_duration: float | None = None, mono: bool = True, audio_format: str = "mp3", @@ -274,7 +280,8 @@ def encode_audio_as_file( ) -> tuple[str, bytes, str]: content, file_name, file_format = encode_audio( audio=audio, - sample_rate=sample_rate or 16000, + sample_rate=sample_rate, + encode_sample_rate=encode_sample_rate, max_duration=max_duration, mono=mono, audio_format=audio_format, @@ -284,57 +291,136 @@ def encode_audio_as_file( return file_name, content, f"audio/{file_format}" -def encode_audio( - audio: bytes | str | Path | dict, +def encode_audio( # noqa: PLR0912, PLR0911, C901 + audio: Any, sample_rate: int = 16000, + file_name: str = "audio.wav", + encode_sample_rate: int = 16000, max_duration: float | None = None, mono: bool = True, audio_format: str = "mp3", bitrate: str = "64k", ) -> tuple[bytes, str, str]: - file_name = "audio.wav" - - if is_url(audio): - audio, file_name, _ = download_audio(audio) - elif isinstance(audio, dict): - file_name = audio.get("name", "audio") - audio = base64.b64decode(audio["data"]) - elif isinstance(audio, (str, Path)): - path = Path(audio) - file_name = get_file_name(path) - audio = path.read_bytes() - elif not isinstance(audio, bytes): + audio_buffer: io.BytesIO = io.BytesIO() + + if hasattr(audio, "get_samples_played_in_range"): + # HF datasets Audio object + audio_samples = audio.get_samples_played_in_range( + start_seconds=0.0, + stop_seconds=None + if max_duration is None + else min(max_duration, audio.metadata.duration_seconds_from_header), + ) + return encode_audio( + audio=audio_samples.data.numpy(), + sample_rate=audio_samples.sample_rate, + encode_sample_rate=encode_sample_rate, + max_duration=max_duration, + mono=mono, + audio_format=audio_format, + bitrate=bitrate, + ) + + if isinstance(audio, Tensor): + return encode_audio( + audio=audio.numpy(), + sample_rate=sample_rate, + encode_sample_rate=encode_sample_rate, + max_duration=max_duration, + mono=mono, + audio_format=audio_format, + bitrate=bitrate, + ) + + if isinstance(audio, dict): + sample_rate = audio.get("sample_rate", audio.get("sampling_rate", sample_rate)) + if "data" not in audio and "url" not in audio: + raise ValueError( + f"Audio dict must contain either 'data' or 'url' keys, got {audio}" + ) + return encode_audio( + audio=audio.get("data") or audio.get("url"), + sample_rate=sample_rate, + encode_sample_rate=encode_sample_rate, + max_duration=max_duration, + mono=mono, + audio_format=audio_format, + bitrate=bitrate, + ) + + if isinstance(audio, str) and is_url(audio): + audio_bytes, file_name, _ = download_audio(audio) + return encode_audio( + audio=audio_bytes, + sample_rate=sample_rate, + encode_sample_rate=encode_sample_rate, + max_duration=max_duration, + mono=mono, + audio_format=audio_format, + bitrate=bitrate, + ) + + if isinstance(audio, (str, Path)): + if not Path(audio).exists(): + raise ValueError(f"Audio file does not exist: {audio}") + file_name = get_file_name(audio) + data, sample_rate = soundfile.read(str(audio), dtype="float32") + + return encode_audio( + audio=data, + sample_rate=sample_rate, + encode_sample_rate=encode_sample_rate, + max_duration=max_duration, + mono=mono, + audio_format=audio_format, + bitrate=bitrate, + ) + + if isinstance(audio, bytes): + data, sample_rate = soundfile.read(io.BytesIO(audio), dtype="float32") + + return encode_audio( + audio=data, + sample_rate=sample_rate, + encode_sample_rate=encode_sample_rate, + max_duration=max_duration, + mono=mono, + audio_format=audio_format, + bitrate=bitrate, + ) + + if not isinstance(audio, np.ndarray): raise ValueError(f"Unsupported audio type: {type(audio)}") - processed_audio, sample_rate = librosa.load( - io.BytesIO(audio), - sr=sample_rate, - mono=mono, - duration=max_duration, - ) + if sample_rate != encode_sample_rate: + audio = librosa.resample( + audio.astype(np.float32), orig_sr=sample_rate, target_sr=encode_sample_rate + ) + sample_rate = encode_sample_rate + + audio = librosa.to_mono(audio) + + if ( + max_duration is not None + and max_duration > 0 + and (max_samples := int(max_duration * sample_rate)) < len(audio) + ): + audio = audio[:max_samples] + + audio_buffer = io.BytesIO() - # Encode to target format - buffer = io.BytesIO() if audio_format.lower() == "mp3": - temp_wav = io.BytesIO() - soundfile.write( - temp_wav, - processed_audio, - sample_rate, - format="WAV", - subtype="PCM_16", - ) - temp_wav.seek(0) - AudioSegment.from_wav(temp_wav).export(buffer, format="mp3", bitrate=bitrate) + wav = io.BytesIO() + soundfile.write(wav, audio, sample_rate, format="WAV", subtype="PCM_16") + wav.seek(0) + + sound = AudioSegment.from_wav(wav) + sound.export(audio_buffer, format="mp3", bitrate=bitrate) else: - soundfile.write( - buffer, - processed_audio, - sample_rate, - format=audio_format.upper(), - ) + soundfile.write(audio_buffer, audio, sample_rate, format=audio_format.upper()) - return buffer.getvalue(), file_name, audio_format.lower() + audio_buffer.seek(0) + return audio_buffer.read(), file_name, audio_format.lower() def download_audio(url: str) -> tuple[bytes, str, str]: diff --git a/src/guidellm/scheduler/worker_group.py b/src/guidellm/scheduler/worker_group.py index 9baccd1b..278fb44d 100644 --- a/src/guidellm/scheduler/worker_group.py +++ b/src/guidellm/scheduler/worker_group.py @@ -557,7 +557,7 @@ def received_callback( # based on no more requests sent and all requests removed from queue if ( state_update.state.queued_requests == 0 - and self.send_requests_stopped_event.is_set() + and self.stop_send_requests_event.is_set() and not self.requests_generated_event.is_set() ): self.requests_generated_event.set() @@ -569,7 +569,7 @@ def received_callback( # Check if all requests have been processed and can shutdown if ( state_update.state.processed_requests == state_update.state.created_requests - and self.send_requests_stopped_event.is_set() + and self.stop_send_requests_event.is_set() and self.requests_generated_event.is_set() and self.constraint_reached_event.is_set() and not self.shutdown_event.is_set() diff --git a/src/guidellm/settings.py b/src/guidellm/settings.py index 5c360eff..222d85f9 100644 --- a/src/guidellm/settings.py +++ b/src/guidellm/settings.py @@ -46,7 +46,7 @@ class LoggingSettings(BaseModel): disabled: bool = False clear_loggers: bool = True - console_log_level: str = "WARNING" + console_log_level: str = "DEBUG" log_file: str | None = None log_file_level: str | None = None diff --git a/src/guidellm/utils/cli.py b/src/guidellm/utils/cli.py index 4d83526a..f049e94e 100644 --- a/src/guidellm/utils/cli.py +++ b/src/guidellm/utils/cli.py @@ -5,8 +5,10 @@ def parse_json(ctx, param, value): # noqa: ARG001 - if value is None: + if value is None or value == [None]: return None + if isinstance(value, (list, tuple)): + return [parse_json(ctx, param, val) for val in value] try: return json.loads(value) except json.JSONDecodeError as err: From a10e61ac053187b2f42ec577deaefcb127cdccef Mon Sep 17 00:00:00 2001 From: Samuel Monson Date: Thu, 9 Oct 2025 10:52:56 -0400 Subject: [PATCH 52/90] Add a script to generate pylock from scratch Signed-off-by: Samuel Monson --- pylock.toml | 1203 ++++++++++++++++++++++-------------- scripts/generate_pylock.sh | 18 + 2 files changed, 768 insertions(+), 453 deletions(-) create mode 100755 scripts/generate_pylock.sh diff --git a/pylock.toml b/pylock.toml index 2fa1b28e..e3f14678 100644 --- a/pylock.toml +++ b/pylock.toml @@ -4,7 +4,7 @@ lock-version = "1.0" requires-python = "<4.0,>=3.9.0" environments = [ "python_version ~= \"3.10\"", - "python_version < \"3.10\" and python_version >= \"3.9\"", + "python_full_version ~= \"3.9.0\"", ] extras = ["dev", "recommended"] dependency-groups = ["default"] @@ -44,6 +44,24 @@ dependencies = [ "tomli>=2.0.1; python_version < \"3.11\"", ] +[[packages]] +name = "blobfile" +version = "3.1.0" +requires-python = ">=3.8.0" +sdist = {name = "blobfile-3.1.0.tar.gz", url = "https://files.pythonhosted.org/packages/f0/6d/2e7567da75ddbb24fe979f52284b708da349d67a41042635af36071a5a6b/blobfile-3.1.0.tar.gz", hashes = {sha256 = "d45b6b1fa3b0920732314c23ddbdb4f494ca12f787c2b6eb6bba6faa51382671"}} +wheels = [ + {name = "blobfile-3.1.0-py3-none-any.whl",url = "https://files.pythonhosted.org/packages/77/a7/51af11120d75af2828f8eede0b13a4caff650d708ac50e62d000aefe1ffb/blobfile-3.1.0-py3-none-any.whl",hashes = {sha256 = "2b4c5e766ebb7dfa20e4990cf6ec3d2106bdc91d632fb9377f170a234c5a5c6a"}}, +] +marker = "\"recommended\" in extras" + +[packages.tool.pdm] +dependencies = [ + "pycryptodomex>=3.8", + "urllib3<3,>=1.25.3", + "lxml>=4.9", + "filelock>=3.0", +] + [[packages]] name = "build" version = "1.2.2.post1" @@ -63,6 +81,21 @@ dependencies = [ "tomli>=1.1.0; python_version < \"3.11\"", ] +[[packages]] +name = "culsans" +version = "0.9.0" +requires-python = ">=3.8" +sdist = {name = "culsans-0.9.0.tar.gz", url = "https://files.pythonhosted.org/packages/90/5d/12e7e16b0caafaa8cca0728dd817204afd1274ddb35531b029b1c5cf7b2a/culsans-0.9.0.tar.gz", hashes = {sha256 = "942dd3c3c77f20e9ac3383d9a5ef8b7b24c0dac1a593bdb20d46c8a38720a5f3"}} +wheels = [ + {name = "culsans-0.9.0-py3-none-any.whl",url = "https://files.pythonhosted.org/packages/6f/b4/1e3cccb48f09e89e0cfc06925182cbcd36abf80b8eda2489430b41c7eaff/culsans-0.9.0-py3-none-any.whl",hashes = {sha256 = "d3537b65bbb341c2ac72e7d152deb8ab893b2a00452d2a68702a1a1a41619d6f"}}, +] +marker = "\"default\" in dependency_groups" + +[packages.tool.pdm] +dependencies = [ + "aiologic>=0.13.0", +] + [[packages]] name = "ftfy" version = "6.3.1" @@ -204,12 +237,6 @@ wheels = [ {name = "mypy-1.15.0-cp310-cp310-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/48/e1/301a73852d40c241e915ac6d7bcd7fedd47d519246db2d7b86b9d7e7a0cb/mypy-1.15.0-cp310-cp310-musllinux_1_2_x86_64.whl",hashes = {sha256 = "2e2c2e6d3593f6451b18588848e66260ff62ccca522dd231cd4dd59b0160668b"}}, {name = "mypy-1.15.0-cp310-cp310-win_amd64.whl",url = "https://files.pythonhosted.org/packages/77/ba/c37bc323ae5fe7f3f15a28e06ab012cd0b7552886118943e90b15af31195/mypy-1.15.0-cp310-cp310-win_amd64.whl",hashes = {sha256 = "6983aae8b2f653e098edb77f893f7b6aca69f6cffb19b2cc7443f23cce5f4828"}}, {name = "mypy-1.15.0-py3-none-any.whl",url = "https://files.pythonhosted.org/packages/09/4e/a7d65c7322c510de2c409ff3828b03354a7c43f5a8ed458a7a131b41c7b9/mypy-1.15.0-py3-none-any.whl",hashes = {sha256 = "5469affef548bd1895d86d3bf10ce2b44e33d86923c29e4d675b3e323437ea3e"}}, - {name = "mypy-1.15.0-cp39-cp39-macosx_10_9_x86_64.whl",url = "https://files.pythonhosted.org/packages/5a/fa/79cf41a55b682794abe71372151dbbf856e3008f6767057229e6649d294a/mypy-1.15.0-cp39-cp39-macosx_10_9_x86_64.whl",hashes = {sha256 = "e601a7fa172c2131bff456bb3ee08a88360760d0d2f8cbd7a75a65497e2df078"}}, - {name = "mypy-1.15.0-cp39-cp39-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/d3/33/dd8feb2597d648de29e3da0a8bf4e1afbda472964d2a4a0052203a6f3594/mypy-1.15.0-cp39-cp39-macosx_11_0_arm64.whl",hashes = {sha256 = "712e962a6357634fef20412699a3655c610110e01cdaa6180acec7fc9f8513ba"}}, - {name = "mypy-1.15.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl",url = "https://files.pythonhosted.org/packages/e4/b5/74508959c1b06b96674b364ffeb7ae5802646b32929b7701fc6b18447592/mypy-1.15.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl",hashes = {sha256 = "f95579473af29ab73a10bada2f9722856792a36ec5af5399b653aa28360290a5"}}, - {name = "mypy-1.15.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl",url = "https://files.pythonhosted.org/packages/6c/53/da61b9d9973efcd6507183fdad96606996191657fe79701b2c818714d573/mypy-1.15.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl",hashes = {sha256 = "8f8722560a14cde92fdb1e31597760dc35f9f5524cce17836c0d22841830fd5b"}}, - {name = "mypy-1.15.0-cp39-cp39-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/c1/72/965bd9ee89540c79a25778cc080c7e6ef40aa1eeac4d52cec7eae6eb5228/mypy-1.15.0-cp39-cp39-musllinux_1_2_x86_64.whl",hashes = {sha256 = "1fbb8da62dc352133d7d7ca90ed2fb0e9d42bb1a32724c287d3c76c58cbaa9c2"}}, - {name = "mypy-1.15.0-cp39-cp39-win_amd64.whl",url = "https://files.pythonhosted.org/packages/46/d0/f41645c2eb263e6c77ada7d76f894c580c9ddb20d77f0c24d34273a4dab2/mypy-1.15.0-cp39-cp39-win_amd64.whl",hashes = {sha256 = "d10d994b41fb3497719bbf866f227b3489048ea4bbbb5015357db306249f7980"}}, ] marker = "\"dev\" in extras" @@ -281,15 +308,6 @@ wheels = [ {name = "PyYAML-6.0.2-cp310-cp310-musllinux_1_1_x86_64.whl",url = "https://files.pythonhosted.org/packages/5c/20/8347dcabd41ef3a3cdc4f7b7a2aff3d06598c8779faa189cdbf878b626a4/PyYAML-6.0.2-cp310-cp310-musllinux_1_1_x86_64.whl",hashes = {sha256 = "23502f431948090f597378482b4812b0caae32c22213aecf3b55325e049a6c68"}}, {name = "PyYAML-6.0.2-cp310-cp310-win32.whl",url = "https://files.pythonhosted.org/packages/be/aa/5afe99233fb360d0ff37377145a949ae258aaab831bde4792b32650a4378/PyYAML-6.0.2-cp310-cp310-win32.whl",hashes = {sha256 = "2e99c6826ffa974fe6e27cdb5ed0021786b03fc98e5ee3c5bfe1fd5015f42b99"}}, {name = "PyYAML-6.0.2-cp310-cp310-win_amd64.whl",url = "https://files.pythonhosted.org/packages/b5/84/0fa4b06f6d6c958d207620fc60005e241ecedceee58931bb20138e1e5776/PyYAML-6.0.2-cp310-cp310-win_amd64.whl",hashes = {sha256 = "a4d3091415f010369ae4ed1fc6b79def9416358877534caf6a0fdd2146c87a3e"}}, - {name = "PyYAML-6.0.2-cp39-cp39-macosx_10_9_x86_64.whl",url = "https://files.pythonhosted.org/packages/65/d8/b7a1db13636d7fb7d4ff431593c510c8b8fca920ade06ca8ef20015493c5/PyYAML-6.0.2-cp39-cp39-macosx_10_9_x86_64.whl",hashes = {sha256 = "688ba32a1cffef67fd2e9398a2efebaea461578b0923624778664cc1c914db5d"}}, - {name = "PyYAML-6.0.2-cp39-cp39-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/0a/02/6ec546cd45143fdf9840b2c6be8d875116a64076218b61d68e12548e5839/PyYAML-6.0.2-cp39-cp39-macosx_11_0_arm64.whl",hashes = {sha256 = "a8786accb172bd8afb8be14490a16625cbc387036876ab6ba70912730faf8e1f"}}, - {name = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",url = "https://files.pythonhosted.org/packages/0e/9a/8cc68be846c972bda34f6c2a93abb644fb2476f4dcc924d52175786932c9/PyYAML-6.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",hashes = {sha256 = "d8e03406cac8513435335dbab54c0d385e4a49e4945d2909a581c83647ca0290"}}, - {name = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl",url = "https://files.pythonhosted.org/packages/e9/6c/6e1b7f40181bc4805e2e07f4abc10a88ce4648e7e95ff1abe4ae4014a9b2/PyYAML-6.0.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl",hashes = {sha256 = "f753120cb8181e736c57ef7636e83f31b9c0d1722c516f7e86cf15b7aa57ff12"}}, - {name = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",url = "https://files.pythonhosted.org/packages/3d/32/e7bd8535d22ea2874cef6a81021ba019474ace0d13a4819c2a4bce79bd6a/PyYAML-6.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",hashes = {sha256 = "3b1fdb9dc17f5a7677423d508ab4f243a726dea51fa5e70992e59a7411c89d19"}}, - {name = "PyYAML-6.0.2-cp39-cp39-musllinux_1_1_aarch64.whl",url = "https://files.pythonhosted.org/packages/d7/12/7322c1e30b9be969670b672573d45479edef72c9a0deac3bb2868f5d7469/PyYAML-6.0.2-cp39-cp39-musllinux_1_1_aarch64.whl",hashes = {sha256 = "0b69e4ce7a131fe56b7e4d770c67429700908fc0752af059838b1cfb41960e4e"}}, - {name = "PyYAML-6.0.2-cp39-cp39-musllinux_1_1_x86_64.whl",url = "https://files.pythonhosted.org/packages/82/72/04fcad41ca56491995076630c3ec1e834be241664c0c09a64c9a2589b507/PyYAML-6.0.2-cp39-cp39-musllinux_1_1_x86_64.whl",hashes = {sha256 = "a9f8c2e67970f13b16084e04f134610fd1d374bf477b17ec1599185cf611d725"}}, - {name = "PyYAML-6.0.2-cp39-cp39-win32.whl",url = "https://files.pythonhosted.org/packages/ed/5e/46168b1f2757f1fcd442bc3029cd8767d88a98c9c05770d8b420948743bb/PyYAML-6.0.2-cp39-cp39-win32.whl",hashes = {sha256 = "6395c297d42274772abc367baaa79683958044e5d3835486c16da75d2a694631"}}, - {name = "PyYAML-6.0.2-cp39-cp39-win_amd64.whl",url = "https://files.pythonhosted.org/packages/19/87/5124b1c1f2412bb95c59ec481eaf936cd32f0fe2a7b16b97b81c4c017a6a/PyYAML-6.0.2-cp39-cp39-win_amd64.whl",hashes = {sha256 = "39693e1f8320ae4f43943590b49779ffb98acb81f788220ea932a6b6c51004d8"}}, ] marker = "\"default\" in dependency_groups or \"dev\" in extras" @@ -353,17 +371,19 @@ dependencies = [ [[packages]] name = "pytest-asyncio" -version = "0.23.8" -requires-python = ">=3.8" -sdist = {name = "pytest_asyncio-0.23.8.tar.gz", url = "https://files.pythonhosted.org/packages/de/b4/0b378b7bf26a8ae161c3890c0b48a91a04106c5713ce81b4b080ea2f4f18/pytest_asyncio-0.23.8.tar.gz", hashes = {sha256 = "759b10b33a6dc61cce40a8bd5205e302978bbbcc00e279a8b61d9a6a3c82e4d3"}} +version = "1.1.1" +requires-python = ">=3.9" +sdist = {name = "pytest_asyncio-1.1.1.tar.gz", url = "https://files.pythonhosted.org/packages/8d/1e/2aa43805d4a320a9489d2b99f7877b69f9094c79aa0732159a1415dd6cd4/pytest_asyncio-1.1.1.tar.gz", hashes = {sha256 = "b72d215c38e2c91dbb32f275e0b5be69602d7869910e109360e375129960a649"}} wheels = [ - {name = "pytest_asyncio-0.23.8-py3-none-any.whl",url = "https://files.pythonhosted.org/packages/ee/82/62e2d63639ecb0fbe8a7ee59ef0bc69a4669ec50f6d3459f74ad4e4189a2/pytest_asyncio-0.23.8-py3-none-any.whl",hashes = {sha256 = "50265d892689a5faefb84df80819d1ecef566eb3549cf915dfb33569359d1ce2"}}, + {name = "pytest_asyncio-1.1.1-py3-none-any.whl",url = "https://files.pythonhosted.org/packages/28/de/aba79e9ccdb51b5d0d65c67dd857bd78b00c64723df16b9fc800d8b94ce6/pytest_asyncio-1.1.1-py3-none-any.whl",hashes = {sha256 = "726339d30fcfde24691f589445b9b67d058b311ac632b1d704e97f20f1d878da"}}, ] marker = "\"dev\" in extras" [packages.tool.pdm] dependencies = [ - "pytest<9,>=7.0.0", + "backports-asyncio-runner<2,>=1.1; python_version < \"3.11\"", + "pytest<9,>=8.2", + "typing-extensions>=4.12; python_version < \"3.10\"", ] [[packages]] @@ -509,7 +529,7 @@ wheels = [ {name = "scipy-1.15.3-cp310-cp310-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/93/3e/b3257cf446f2a3533ed7809757039016b74cd6f38271de91682aa844cfc5/scipy-1.15.3-cp310-cp310-musllinux_1_2_x86_64.whl",hashes = {sha256 = "5380741e53df2c566f4d234b100a484b420af85deb39ea35a1cc1be84ff53a5c"}}, {name = "scipy-1.15.3-cp310-cp310-win_amd64.whl",url = "https://files.pythonhosted.org/packages/d1/84/55bc4881973d3f79b479a5a2e2df61c8c9a04fcb986a213ac9c02cfb659b/scipy-1.15.3-cp310-cp310-win_amd64.whl",hashes = {sha256 = "9d61e97b186a57350f6d6fd72640f9e99d5a4a2b8fbf4b9ee9a841eab327dc13"}}, ] -marker = "python_version ~= \"3.10\" and \"dev\" in extras" +marker = "python_version ~= \"3.10\"" [packages.tool.pdm] dependencies = [ @@ -577,7 +597,7 @@ wheels = [ {name = "numpy-2.2.6-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",url = "https://files.pythonhosted.org/packages/af/30/feba75f143bdc868a1cc3f44ccfa6c4b9ec522b36458e738cd00f67b573f/numpy-2.2.6-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",hashes = {sha256 = "ce47521a4754c8f4593837384bd3424880629f718d87c5d44f8ed763edd63543"}}, {name = "numpy-2.2.6-pp310-pypy310_pp73-win_amd64.whl",url = "https://files.pythonhosted.org/packages/37/48/ac2a9584402fb6c0cd5b5d1a91dcf176b15760130dd386bbafdbfe3640bf/numpy-2.2.6-pp310-pypy310_pp73-win_amd64.whl",hashes = {sha256 = "d042d24c90c41b54fd506da306759e06e568864df8ec17ccc17e9e884634fd00"}}, ] -marker = "\"default\" in dependency_groups and python_version ~= \"3.10\" or \"dev\" in extras and python_version ~= \"3.10\"" +marker = "python_version ~= \"3.10\"" [packages.tool.pdm] dependencies = [] @@ -590,7 +610,7 @@ sdist = {name = "setuptools-80.9.0.tar.gz", url = "https://files.pythonhosted.or wheels = [ {name = "setuptools-80.9.0-py3-none-any.whl",url = "https://files.pythonhosted.org/packages/a3/dc/17031897dae0efacfea57dfd3a82fdd2a2aeb58e0ff71b77b87e44edc772/setuptools-80.9.0-py3-none-any.whl",hashes = {sha256 = "062d34222ad13e0cc312a4c02d73f059e86a4acbfbdea8f8f76b28c99f306922"}}, ] -marker = "\"dev\" in extras" +marker = "\"default\" in dependency_groups or \"dev\" in extras" [packages.tool.pdm] dependencies = [] @@ -656,12 +676,6 @@ wheels = [ {name = "tiktoken-0.11.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",url = "https://files.pythonhosted.org/packages/94/80/fb0ada0a882cb453caf519a4bf0d117c2a3ee2e852c88775abff5413c176/tiktoken-0.11.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",hashes = {sha256 = "b062c82300341dc87e0258c69f79bed725f87e753c21887aea90d272816be882"}}, {name = "tiktoken-0.11.0-cp310-cp310-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/2f/e9/6c104355b463601719582823f3ea658bc3aa7c73d1b3b7553ebdc48468ce/tiktoken-0.11.0-cp310-cp310-musllinux_1_2_x86_64.whl",hashes = {sha256 = "195d84bec46169af3b1349a1495c151d37a0ff4cba73fd08282736be7f92cc6c"}}, {name = "tiktoken-0.11.0-cp310-cp310-win_amd64.whl",url = "https://files.pythonhosted.org/packages/94/75/eaa6068f47e8b3f0aab9e05177cce2cf5aa2cc0ca93981792e620d4d4117/tiktoken-0.11.0-cp310-cp310-win_amd64.whl",hashes = {sha256 = "fe91581b0ecdd8783ce8cb6e3178f2260a3912e8724d2f2d49552b98714641a1"}}, - {name = "tiktoken-0.11.0-cp39-cp39-macosx_10_12_x86_64.whl",url = "https://files.pythonhosted.org/packages/aa/b6/81c5799ab77a9580c6d840cf77d4717e929193a42190fd623a080c647aa6/tiktoken-0.11.0-cp39-cp39-macosx_10_12_x86_64.whl",hashes = {sha256 = "13220f12c9e82e399377e768640ddfe28bea962739cc3a869cad98f42c419a89"}}, - {name = "tiktoken-0.11.0-cp39-cp39-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/50/89/faa668066b2a4640534ef5797c09ecd0a48b43367502129b217339dfaa97/tiktoken-0.11.0-cp39-cp39-macosx_11_0_arm64.whl",hashes = {sha256 = "7f2db627f5c74477c0404b4089fd8a28ae22fa982a6f7d9c7d4c305c375218f3"}}, - {name = "tiktoken-0.11.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",url = "https://files.pythonhosted.org/packages/aa/7f/5f950528b54cb3025af4bc3522c23dbfb691afe8ffb292aa1e8dc2e6bddf/tiktoken-0.11.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",hashes = {sha256 = "2302772f035dceb2bcf8e55a735e4604a0b51a6dd50f38218ff664d46ec43807"}}, - {name = "tiktoken-0.11.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",url = "https://files.pythonhosted.org/packages/27/a4/e82ddf0773835ba24536ac8c0dce561e697698ec020a93212a1e041d39b4/tiktoken-0.11.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",hashes = {sha256 = "20b977989afe44c94bcc50db1f76971bb26dca44218bd203ba95925ef56f8e7a"}}, - {name = "tiktoken-0.11.0-cp39-cp39-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/1b/c2/06361e41d176e62797ae65fa678111cdd30553321cf4d83e7b84107ea95f/tiktoken-0.11.0-cp39-cp39-musllinux_1_2_x86_64.whl",hashes = {sha256 = "669a1aa1ad6ebf1b3c26b45deb346f345da7680f845b5ea700bba45c20dea24c"}}, - {name = "tiktoken-0.11.0-cp39-cp39-win_amd64.whl",url = "https://files.pythonhosted.org/packages/bb/ad/ca37e15c46741ebb3904d562d03194e845539a08f7751a6df0f391757312/tiktoken-0.11.0-cp39-cp39-win_amd64.whl",hashes = {sha256 = "e363f33c720a055586f730c00e330df4c7ea0024bf1c83a8a9a9dbc054c4f304"}}, ] marker = "\"recommended\" in extras" @@ -724,22 +738,40 @@ dependencies = [ ] [[packages]] -name = "blobfile" -version = "3.1.0" +name = "uvloop" +version = "0.21.0" requires-python = ">=3.8.0" -sdist = {name = "blobfile-3.1.0.tar.gz", url = "https://files.pythonhosted.org/packages/f0/6d/2e7567da75ddbb24fe979f52284b708da349d67a41042635af36071a5a6b/blobfile-3.1.0.tar.gz", hashes = {sha256 = "d45b6b1fa3b0920732314c23ddbdb4f494ca12f787c2b6eb6bba6faa51382671"}} -wheels = [ - {name = "blobfile-3.1.0-py3-none-any.whl",url = "https://files.pythonhosted.org/packages/77/a7/51af11120d75af2828f8eede0b13a4caff650d708ac50e62d000aefe1ffb/blobfile-3.1.0-py3-none-any.whl",hashes = {sha256 = "2b4c5e766ebb7dfa20e4990cf6ec3d2106bdc91d632fb9377f170a234c5a5c6a"}}, +sdist = {name = "uvloop-0.21.0.tar.gz", url = "https://files.pythonhosted.org/packages/af/c0/854216d09d33c543f12a44b393c402e89a920b1a0a7dc634c42de91b9cf6/uvloop-0.21.0.tar.gz", hashes = {sha256 = "3bf12b0fda68447806a7ad847bfa591613177275d35b6724b1ee573faa3704e3"}} +wheels = [ + {name = "uvloop-0.21.0-cp313-cp313-macosx_10_13_universal2.whl",url = "https://files.pythonhosted.org/packages/3f/8d/2cbef610ca21539f0f36e2b34da49302029e7c9f09acef0b1c3b5839412b/uvloop-0.21.0-cp313-cp313-macosx_10_13_universal2.whl",hashes = {sha256 = "bfd55dfcc2a512316e65f16e503e9e450cab148ef11df4e4e679b5e8253a5281"}}, + {name = "uvloop-0.21.0-cp313-cp313-macosx_10_13_x86_64.whl",url = "https://files.pythonhosted.org/packages/93/0d/b0038d5a469f94ed8f2b2fce2434a18396d8fbfb5da85a0a9781ebbdec14/uvloop-0.21.0-cp313-cp313-macosx_10_13_x86_64.whl",hashes = {sha256 = "787ae31ad8a2856fc4e7c095341cccc7209bd657d0e71ad0dc2ea83c4a6fa8af"}}, + {name = "uvloop-0.21.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",url = "https://files.pythonhosted.org/packages/50/94/0a687f39e78c4c1e02e3272c6b2ccdb4e0085fda3b8352fecd0410ccf915/uvloop-0.21.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",hashes = {sha256 = "5ee4d4ef48036ff6e5cfffb09dd192c7a5027153948d85b8da7ff705065bacc6"}}, + {name = "uvloop-0.21.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",url = "https://files.pythonhosted.org/packages/d2/19/f5b78616566ea68edd42aacaf645adbf71fbd83fc52281fba555dc27e3f1/uvloop-0.21.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",hashes = {sha256 = "f3df876acd7ec037a3d005b3ab85a7e4110422e4d9c1571d4fc89b0fc41b6816"}}, + {name = "uvloop-0.21.0-cp313-cp313-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/47/57/66f061ee118f413cd22a656de622925097170b9380b30091b78ea0c6ea75/uvloop-0.21.0-cp313-cp313-musllinux_1_2_aarch64.whl",hashes = {sha256 = "bd53ecc9a0f3d87ab847503c2e1552b690362e005ab54e8a48ba97da3924c0dc"}}, + {name = "uvloop-0.21.0-cp313-cp313-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/63/9a/0962b05b308494e3202d3f794a6e85abe471fe3cafdbcf95c2e8c713aabd/uvloop-0.21.0-cp313-cp313-musllinux_1_2_x86_64.whl",hashes = {sha256 = "a5c39f217ab3c663dc699c04cbd50c13813e31d917642d459fdcec07555cc553"}}, + {name = "uvloop-0.21.0-cp312-cp312-macosx_10_13_universal2.whl",url = "https://files.pythonhosted.org/packages/8c/4c/03f93178830dc7ce8b4cdee1d36770d2f5ebb6f3d37d354e061eefc73545/uvloop-0.21.0-cp312-cp312-macosx_10_13_universal2.whl",hashes = {sha256 = "359ec2c888397b9e592a889c4d72ba3d6befba8b2bb01743f72fffbde663b59c"}}, + {name = "uvloop-0.21.0-cp312-cp312-macosx_10_13_x86_64.whl",url = "https://files.pythonhosted.org/packages/43/3e/92c03f4d05e50f09251bd8b2b2b584a2a7f8fe600008bcc4523337abe676/uvloop-0.21.0-cp312-cp312-macosx_10_13_x86_64.whl",hashes = {sha256 = "f7089d2dc73179ce5ac255bdf37c236a9f914b264825fdaacaded6990a7fb4c2"}}, + {name = "uvloop-0.21.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",url = "https://files.pythonhosted.org/packages/a6/ef/a02ec5da49909dbbfb1fd205a9a1ac4e88ea92dcae885e7c961847cd51e2/uvloop-0.21.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",hashes = {sha256 = "baa4dcdbd9ae0a372f2167a207cd98c9f9a1ea1188a8a526431eef2f8116cc8d"}}, + {name = "uvloop-0.21.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",url = "https://files.pythonhosted.org/packages/06/a7/b4e6a19925c900be9f98bec0a75e6e8f79bb53bdeb891916609ab3958967/uvloop-0.21.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",hashes = {sha256 = "86975dca1c773a2c9864f4c52c5a55631038e387b47eaf56210f873887b6c8dc"}}, + {name = "uvloop-0.21.0-cp312-cp312-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/ce/0c/f07435a18a4b94ce6bd0677d8319cd3de61f3a9eeb1e5f8ab4e8b5edfcb3/uvloop-0.21.0-cp312-cp312-musllinux_1_2_aarch64.whl",hashes = {sha256 = "461d9ae6660fbbafedd07559c6a2e57cd553b34b0065b6550685f6653a98c1cb"}}, + {name = "uvloop-0.21.0-cp312-cp312-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/8f/eb/f7032be105877bcf924709c97b1bf3b90255b4ec251f9340cef912559f28/uvloop-0.21.0-cp312-cp312-musllinux_1_2_x86_64.whl",hashes = {sha256 = "183aef7c8730e54c9a3ee3227464daed66e37ba13040bb3f350bc2ddc040f22f"}}, + {name = "uvloop-0.21.0-cp311-cp311-macosx_10_9_universal2.whl",url = "https://files.pythonhosted.org/packages/57/a7/4cf0334105c1160dd6819f3297f8700fda7fc30ab4f61fbf3e725acbc7cc/uvloop-0.21.0-cp311-cp311-macosx_10_9_universal2.whl",hashes = {sha256 = "c0f3fa6200b3108919f8bdabb9a7f87f20e7097ea3c543754cabc7d717d95cf8"}}, + {name = "uvloop-0.21.0-cp311-cp311-macosx_10_9_x86_64.whl",url = "https://files.pythonhosted.org/packages/8c/7c/1517b0bbc2dbe784b563d6ab54f2ef88c890fdad77232c98ed490aa07132/uvloop-0.21.0-cp311-cp311-macosx_10_9_x86_64.whl",hashes = {sha256 = "0878c2640cf341b269b7e128b1a5fed890adc4455513ca710d77d5e93aa6d6a0"}}, + {name = "uvloop-0.21.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",url = "https://files.pythonhosted.org/packages/ee/ea/0bfae1aceb82a503f358d8d2fa126ca9dbdb2ba9c7866974faec1cb5875c/uvloop-0.21.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",hashes = {sha256 = "b9fb766bb57b7388745d8bcc53a359b116b8a04c83a2288069809d2b3466c37e"}}, + {name = "uvloop-0.21.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",url = "https://files.pythonhosted.org/packages/8a/ca/0864176a649838b838f36d44bf31c451597ab363b60dc9e09c9630619d41/uvloop-0.21.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",hashes = {sha256 = "8a375441696e2eda1c43c44ccb66e04d61ceeffcd76e4929e527b7fa401b90fb"}}, + {name = "uvloop-0.21.0-cp311-cp311-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/30/bf/08ad29979a936d63787ba47a540de2132169f140d54aa25bc8c3df3e67f4/uvloop-0.21.0-cp311-cp311-musllinux_1_2_aarch64.whl",hashes = {sha256 = "baa0e6291d91649c6ba4ed4b2f982f9fa165b5bbd50a9e203c416a2797bab3c6"}}, + {name = "uvloop-0.21.0-cp311-cp311-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/da/e2/5cf6ef37e3daf2f06e651aae5ea108ad30df3cb269102678b61ebf1fdf42/uvloop-0.21.0-cp311-cp311-musllinux_1_2_x86_64.whl",hashes = {sha256 = "4509360fcc4c3bd2c70d87573ad472de40c13387f5fda8cb58350a1d7475e58d"}}, + {name = "uvloop-0.21.0-cp310-cp310-macosx_10_9_universal2.whl",url = "https://files.pythonhosted.org/packages/3d/76/44a55515e8c9505aa1420aebacf4dd82552e5e15691654894e90d0bd051a/uvloop-0.21.0-cp310-cp310-macosx_10_9_universal2.whl",hashes = {sha256 = "ec7e6b09a6fdded42403182ab6b832b71f4edaf7f37a9a0e371a01db5f0cb45f"}}, + {name = "uvloop-0.21.0-cp310-cp310-macosx_10_9_x86_64.whl",url = "https://files.pythonhosted.org/packages/35/5a/62d5800358a78cc25c8a6c72ef8b10851bdb8cca22e14d9c74167b7f86da/uvloop-0.21.0-cp310-cp310-macosx_10_9_x86_64.whl",hashes = {sha256 = "196274f2adb9689a289ad7d65700d37df0c0930fd8e4e743fa4834e850d7719d"}}, + {name = "uvloop-0.21.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",url = "https://files.pythonhosted.org/packages/f3/96/63695e0ebd7da6c741ccd4489b5947394435e198a1382349c17b1146bb97/uvloop-0.21.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",hashes = {sha256 = "f38b2e090258d051d68a5b14d1da7203a3c3677321cf32a95a6f4db4dd8b6f26"}}, + {name = "uvloop-0.21.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",url = "https://files.pythonhosted.org/packages/61/e0/f0f8ec84979068ffae132c58c79af1de9cceeb664076beea86d941af1a30/uvloop-0.21.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",hashes = {sha256 = "87c43e0f13022b998eb9b973b5e97200c8b90823454d4bc06ab33829e09fb9bb"}}, + {name = "uvloop-0.21.0-cp310-cp310-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/bf/fe/5e94a977d058a54a19df95f12f7161ab6e323ad49f4dabc28822eb2df7ea/uvloop-0.21.0-cp310-cp310-musllinux_1_2_aarch64.whl",hashes = {sha256 = "10d66943def5fcb6e7b37310eb6b5639fd2ccbc38df1177262b0640c3ca68c1f"}}, + {name = "uvloop-0.21.0-cp310-cp310-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/26/dd/c7179618e46092a77e036650c1f056041a028a35c4d76945089fcfc38af8/uvloop-0.21.0-cp310-cp310-musllinux_1_2_x86_64.whl",hashes = {sha256 = "67dd654b8ca23aed0a8e99010b4c34aca62f4b7fce88f39d452ed7622c94845c"}}, ] -marker = "\"recommended\" in extras" +marker = "\"default\" in dependency_groups" [packages.tool.pdm] -dependencies = [ - "pycryptodomex>=3.8", - "urllib3<3,>=1.25.3", - "lxml>=4.9", - "filelock>=3.0", -] +dependencies = [] [[packages]] name = "datasets" @@ -768,6 +800,34 @@ dependencies = [ "pyyaml>=5.1", ] +[[packages]] +name = "eval-type-backport" +version = "0.2.2" +requires-python = ">=3.8" +sdist = {name = "eval_type_backport-0.2.2.tar.gz", url = "https://files.pythonhosted.org/packages/30/ea/8b0ac4469d4c347c6a385ff09dc3c048c2d021696664e26c7ee6791631b5/eval_type_backport-0.2.2.tar.gz", hashes = {sha256 = "f0576b4cf01ebb5bd358d02314d31846af5e07678387486e2c798af0e7d849c1"}} +wheels = [ + {name = "eval_type_backport-0.2.2-py3-none-any.whl",url = "https://files.pythonhosted.org/packages/ce/31/55cd413eaccd39125368be33c46de24a1f639f2e12349b0361b4678f3915/eval_type_backport-0.2.2-py3-none-any.whl",hashes = {sha256 = "cb6ad7c393517f476f96d456d0412ea80f0a8cf96f6892834cd9340149111b0a"}}, +] +marker = "\"default\" in dependency_groups" + +[packages.tool.pdm] +dependencies = [] + +[[packages]] +name = "faker" +version = "37.11.0" +requires-python = ">=3.9" +sdist = {name = "faker-37.11.0.tar.gz", url = "https://files.pythonhosted.org/packages/c9/4b/ca43f6bbcef63deb8ac01201af306388670a172587169aab3b192f7490f0/faker-37.11.0.tar.gz", hashes = {sha256 = "22969803849ba0618be8eee2dd01d0d9e2cd3b75e6ff1a291fa9abcdb34da5e6"}} +wheels = [ + {name = "faker-37.11.0-py3-none-any.whl",url = "https://files.pythonhosted.org/packages/a3/46/8f4097b55e43af39e8e71e1f7aec59ff7398bca54d975c30889bc844719d/faker-37.11.0-py3-none-any.whl",hashes = {sha256 = "1508d2da94dfd1e0087b36f386126d84f8583b3de19ac18e392a2831a6676c57"}}, +] +marker = "\"default\" in dependency_groups" + +[packages.tool.pdm] +dependencies = [ + "tzdata", +] + [[packages]] name = "loguru" version = "0.7.3" @@ -785,6 +845,71 @@ dependencies = [ "win32-setctime>=1.0.0; sys_platform == \"win32\"", ] +[[packages]] +name = "msgpack" +version = "1.1.2" +requires-python = ">=3.9" +sdist = {name = "msgpack-1.1.2.tar.gz", url = "https://files.pythonhosted.org/packages/4d/f2/bfb55a6236ed8725a96b0aa3acbd0ec17588e6a2c3b62a93eb513ed8783f/msgpack-1.1.2.tar.gz", hashes = {sha256 = "3b60763c1373dd60f398488069bcdc703cd08a711477b5d480eecc9f9626f47e"}} +wheels = [ + {name = "msgpack-1.1.2-cp314-cp314-macosx_10_13_x86_64.whl",url = "https://files.pythonhosted.org/packages/22/71/201105712d0a2ff07b7873ed3c220292fb2ea5120603c00c4b634bcdafb3/msgpack-1.1.2-cp314-cp314-macosx_10_13_x86_64.whl",hashes = {sha256 = "e23ce8d5f7aa6ea6d2a2b326b4ba46c985dbb204523759984430db7114f8aa00"}}, + {name = "msgpack-1.1.2-cp314-cp314-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/1b/9f/38ff9e57a2eade7bf9dfee5eae17f39fc0e998658050279cbb14d97d36d9/msgpack-1.1.2-cp314-cp314-macosx_11_0_arm64.whl",hashes = {sha256 = "6c15b7d74c939ebe620dd8e559384be806204d73b4f9356320632d783d1f7939"}}, + {name = "msgpack-1.1.2-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl",url = "https://files.pythonhosted.org/packages/8e/a9/3536e385167b88c2cc8f4424c49e28d49a6fc35206d4a8060f136e71f94c/msgpack-1.1.2-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl",hashes = {sha256 = "99e2cb7b9031568a2a5c73aa077180f93dd2e95b4f8d3b8e14a73ae94a9e667e"}}, + {name = "msgpack-1.1.2-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl",url = "https://files.pythonhosted.org/packages/2f/40/dc34d1a8d5f1e51fc64640b62b191684da52ca469da9cd74e84936ffa4a6/msgpack-1.1.2-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl",hashes = {sha256 = "180759d89a057eab503cf62eeec0aa61c4ea1200dee709f3a8e9397dbb3b6931"}}, + {name = "msgpack-1.1.2-cp314-cp314-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/3b/ef/2b92e286366500a09a67e03496ee8b8ba00562797a52f3c117aa2b29514b/msgpack-1.1.2-cp314-cp314-musllinux_1_2_aarch64.whl",hashes = {sha256 = "04fb995247a6e83830b62f0b07bf36540c213f6eac8e851166d8d86d83cbd014"}}, + {name = "msgpack-1.1.2-cp314-cp314-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/78/90/e0ea7990abea5764e4655b8177aa7c63cdfa89945b6e7641055800f6c16b/msgpack-1.1.2-cp314-cp314-musllinux_1_2_x86_64.whl",hashes = {sha256 = "8e22ab046fa7ede9e36eeb4cfad44d46450f37bb05d5ec482b02868f451c95e2"}}, + {name = "msgpack-1.1.2-cp314-cp314-win32.whl",url = "https://files.pythonhosted.org/packages/72/4e/9390aed5db983a2310818cd7d3ec0aecad45e1f7007e0cda79c79507bb0d/msgpack-1.1.2-cp314-cp314-win32.whl",hashes = {sha256 = "80a0ff7d4abf5fecb995fcf235d4064b9a9a8a40a3ab80999e6ac1e30b702717"}}, + {name = "msgpack-1.1.2-cp314-cp314-win_amd64.whl",url = "https://files.pythonhosted.org/packages/6e/f1/abd09c2ae91228c5f3998dbd7f41353def9eac64253de3c8105efa2082f7/msgpack-1.1.2-cp314-cp314-win_amd64.whl",hashes = {sha256 = "9ade919fac6a3e7260b7f64cea89df6bec59104987cbea34d34a2fa15d74310b"}}, + {name = "msgpack-1.1.2-cp314-cp314-win_arm64.whl",url = "https://files.pythonhosted.org/packages/6a/b0/9d9f667ab48b16ad4115c1935d94023b82b3198064cb84a123e97f7466c1/msgpack-1.1.2-cp314-cp314-win_arm64.whl",hashes = {sha256 = "59415c6076b1e30e563eb732e23b994a61c159cec44deaf584e5cc1dd662f2af"}}, + {name = "msgpack-1.1.2-cp314-cp314t-macosx_10_13_x86_64.whl",url = "https://files.pythonhosted.org/packages/16/67/93f80545eb1792b61a217fa7f06d5e5cb9e0055bed867f43e2b8e012e137/msgpack-1.1.2-cp314-cp314t-macosx_10_13_x86_64.whl",hashes = {sha256 = "897c478140877e5307760b0ea66e0932738879e7aa68144d9b78ea4c8302a84a"}}, + {name = "msgpack-1.1.2-cp314-cp314t-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/87/1c/33c8a24959cf193966ef11a6f6a2995a65eb066bd681fd085afd519a57ce/msgpack-1.1.2-cp314-cp314t-macosx_11_0_arm64.whl",hashes = {sha256 = "a668204fa43e6d02f89dbe79a30b0d67238d9ec4c5bd8a940fc3a004a47b721b"}}, + {name = "msgpack-1.1.2-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl",url = "https://files.pythonhosted.org/packages/fc/6b/62e85ff7193663fbea5c0254ef32f0c77134b4059f8da89b958beb7696f3/msgpack-1.1.2-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl",hashes = {sha256 = "5559d03930d3aa0f3aacb4c42c776af1a2ace2611871c84a75afe436695e6245"}}, + {name = "msgpack-1.1.2-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl",url = "https://files.pythonhosted.org/packages/c1/47/5c74ecb4cc277cf09f64e913947871682ffa82b3b93c8dad68083112f412/msgpack-1.1.2-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl",hashes = {sha256 = "70c5a7a9fea7f036b716191c29047374c10721c389c21e9ffafad04df8c52c90"}}, + {name = "msgpack-1.1.2-cp314-cp314t-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/24/a4/e98ccdb56dc4e98c929a3f150de1799831c0a800583cde9fa022fa90602d/msgpack-1.1.2-cp314-cp314t-musllinux_1_2_aarch64.whl",hashes = {sha256 = "f2cb069d8b981abc72b41aea1c580ce92d57c673ec61af4c500153a626cb9e20"}}, + {name = "msgpack-1.1.2-cp314-cp314t-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/da/28/6951f7fb67bc0a4e184a6b38ab71a92d9ba58080b27a77d3e2fb0be5998f/msgpack-1.1.2-cp314-cp314t-musllinux_1_2_x86_64.whl",hashes = {sha256 = "d62ce1f483f355f61adb5433ebfd8868c5f078d1a52d042b0a998682b4fa8c27"}}, + {name = "msgpack-1.1.2-cp314-cp314t-win32.whl",url = "https://files.pythonhosted.org/packages/f0/03/42106dcded51f0a0b5284d3ce30a671e7bd3f7318d122b2ead66ad289fed/msgpack-1.1.2-cp314-cp314t-win32.whl",hashes = {sha256 = "1d1418482b1ee984625d88aa9585db570180c286d942da463533b238b98b812b"}}, + {name = "msgpack-1.1.2-cp314-cp314t-win_amd64.whl",url = "https://files.pythonhosted.org/packages/15/86/d0071e94987f8db59d4eeb386ddc64d0bb9b10820a8d82bcd3e53eeb2da6/msgpack-1.1.2-cp314-cp314t-win_amd64.whl",hashes = {sha256 = "5a46bf7e831d09470ad92dff02b8b1ac92175ca36b087f904a0519857c6be3ff"}}, + {name = "msgpack-1.1.2-cp314-cp314t-win_arm64.whl",url = "https://files.pythonhosted.org/packages/81/f2/08ace4142eb281c12701fc3b93a10795e4d4dc7f753911d836675050f886/msgpack-1.1.2-cp314-cp314t-win_arm64.whl",hashes = {sha256 = "d99ef64f349d5ec3293688e91486c5fdb925ed03807f64d98d205d2713c60b46"}}, + {name = "msgpack-1.1.2-cp313-cp313-macosx_10_13_x86_64.whl",url = "https://files.pythonhosted.org/packages/6b/31/b46518ecc604d7edf3a4f94cb3bf021fc62aa301f0cb849936968164ef23/msgpack-1.1.2-cp313-cp313-macosx_10_13_x86_64.whl",hashes = {sha256 = "4efd7b5979ccb539c221a4c4e16aac1a533efc97f3b759bb5a5ac9f6d10383bf"}}, + {name = "msgpack-1.1.2-cp313-cp313-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/92/dc/c385f38f2c2433333345a82926c6bfa5ecfff3ef787201614317b58dd8be/msgpack-1.1.2-cp313-cp313-macosx_11_0_arm64.whl",hashes = {sha256 = "42eefe2c3e2af97ed470eec850facbe1b5ad1d6eacdbadc42ec98e7dcf68b4b7"}}, + {name = "msgpack-1.1.2-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl",url = "https://files.pythonhosted.org/packages/d3/68/93180dce57f684a61a88a45ed13047558ded2be46f03acb8dec6d7c513af/msgpack-1.1.2-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl",hashes = {sha256 = "1fdf7d83102bf09e7ce3357de96c59b627395352a4024f6e2458501f158bf999"}}, + {name = "msgpack-1.1.2-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl",url = "https://files.pythonhosted.org/packages/5d/ba/459f18c16f2b3fc1a1ca871f72f07d70c07bf768ad0a507a698b8052ac58/msgpack-1.1.2-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl",hashes = {sha256 = "fac4be746328f90caa3cd4bc67e6fe36ca2bf61d5c6eb6d895b6527e3f05071e"}}, + {name = "msgpack-1.1.2-cp313-cp313-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/38/f8/4398c46863b093252fe67368b44edc6c13b17f4e6b0e4929dbf0bdb13f23/msgpack-1.1.2-cp313-cp313-musllinux_1_2_aarch64.whl",hashes = {sha256 = "fffee09044073e69f2bad787071aeec727183e7580443dfeb8556cbf1978d162"}}, + {name = "msgpack-1.1.2-cp313-cp313-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/28/ce/698c1eff75626e4124b4d78e21cca0b4cc90043afb80a507626ea354ab52/msgpack-1.1.2-cp313-cp313-musllinux_1_2_x86_64.whl",hashes = {sha256 = "5928604de9b032bc17f5099496417f113c45bc6bc21b5c6920caf34b3c428794"}}, + {name = "msgpack-1.1.2-cp313-cp313-win32.whl",url = "https://files.pythonhosted.org/packages/67/32/f3cd1667028424fa7001d82e10ee35386eea1408b93d399b09fb0aa7875f/msgpack-1.1.2-cp313-cp313-win32.whl",hashes = {sha256 = "a7787d353595c7c7e145e2331abf8b7ff1e6673a6b974ded96e6d4ec09f00c8c"}}, + {name = "msgpack-1.1.2-cp313-cp313-win_amd64.whl",url = "https://files.pythonhosted.org/packages/74/07/1ed8277f8653c40ebc65985180b007879f6a836c525b3885dcc6448ae6cb/msgpack-1.1.2-cp313-cp313-win_amd64.whl",hashes = {sha256 = "a465f0dceb8e13a487e54c07d04ae3ba131c7c5b95e2612596eafde1dccf64a9"}}, + {name = "msgpack-1.1.2-cp313-cp313-win_arm64.whl",url = "https://files.pythonhosted.org/packages/e5/db/0314e4e2db56ebcf450f277904ffd84a7988b9e5da8d0d61ab2d057df2b6/msgpack-1.1.2-cp313-cp313-win_arm64.whl",hashes = {sha256 = "e69b39f8c0aa5ec24b57737ebee40be647035158f14ed4b40e6f150077e21a84"}}, + {name = "msgpack-1.1.2-cp312-cp312-macosx_10_13_x86_64.whl",url = "https://files.pythonhosted.org/packages/ad/bd/8b0d01c756203fbab65d265859749860682ccd2a59594609aeec3a144efa/msgpack-1.1.2-cp312-cp312-macosx_10_13_x86_64.whl",hashes = {sha256 = "70a0dff9d1f8da25179ffcf880e10cf1aad55fdb63cd59c9a49a1b82290062aa"}}, + {name = "msgpack-1.1.2-cp312-cp312-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/34/68/ba4f155f793a74c1483d4bdef136e1023f7bcba557f0db4ef3db3c665cf1/msgpack-1.1.2-cp312-cp312-macosx_11_0_arm64.whl",hashes = {sha256 = "446abdd8b94b55c800ac34b102dffd2f6aa0ce643c55dfc017ad89347db3dbdb"}}, + {name = "msgpack-1.1.2-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl",url = "https://files.pythonhosted.org/packages/f2/60/a064b0345fc36c4c3d2c743c82d9100c40388d77f0b48b2f04d6041dbec1/msgpack-1.1.2-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl",hashes = {sha256 = "c63eea553c69ab05b6747901b97d620bb2a690633c77f23feb0c6a947a8a7b8f"}}, + {name = "msgpack-1.1.2-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl",url = "https://files.pythonhosted.org/packages/65/92/a5100f7185a800a5d29f8d14041f61475b9de465ffcc0f3b9fba606e4505/msgpack-1.1.2-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl",hashes = {sha256 = "372839311ccf6bdaf39b00b61288e0557916c3729529b301c52c2d88842add42"}}, + {name = "msgpack-1.1.2-cp312-cp312-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/f5/87/ffe21d1bf7d9991354ad93949286f643b2bb6ddbeab66373922b44c3b8cc/msgpack-1.1.2-cp312-cp312-musllinux_1_2_aarch64.whl",hashes = {sha256 = "2929af52106ca73fcb28576218476ffbb531a036c2adbcf54a3664de124303e9"}}, + {name = "msgpack-1.1.2-cp312-cp312-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/ff/41/8543ed2b8604f7c0d89ce066f42007faac1eaa7d79a81555f206a5cdb889/msgpack-1.1.2-cp312-cp312-musllinux_1_2_x86_64.whl",hashes = {sha256 = "be52a8fc79e45b0364210eef5234a7cf8d330836d0a64dfbb878efa903d84620"}}, + {name = "msgpack-1.1.2-cp312-cp312-win32.whl",url = "https://files.pythonhosted.org/packages/41/0d/2ddfaa8b7e1cee6c490d46cb0a39742b19e2481600a7a0e96537e9c22f43/msgpack-1.1.2-cp312-cp312-win32.whl",hashes = {sha256 = "1fff3d825d7859ac888b0fbda39a42d59193543920eda9d9bea44d958a878029"}}, + {name = "msgpack-1.1.2-cp312-cp312-win_amd64.whl",url = "https://files.pythonhosted.org/packages/8c/ec/d431eb7941fb55a31dd6ca3404d41fbb52d99172df2e7707754488390910/msgpack-1.1.2-cp312-cp312-win_amd64.whl",hashes = {sha256 = "1de460f0403172cff81169a30b9a92b260cb809c4cb7e2fc79ae8d0510c78b6b"}}, + {name = "msgpack-1.1.2-cp312-cp312-win_arm64.whl",url = "https://files.pythonhosted.org/packages/c5/31/5b1a1f70eb0e87d1678e9624908f86317787b536060641d6798e3cf70ace/msgpack-1.1.2-cp312-cp312-win_arm64.whl",hashes = {sha256 = "be5980f3ee0e6bd44f3a9e9dea01054f175b50c3e6cdb692bc9424c0bbb8bf69"}}, + {name = "msgpack-1.1.2-cp311-cp311-macosx_10_9_x86_64.whl",url = "https://files.pythonhosted.org/packages/2c/97/560d11202bcd537abca693fd85d81cebe2107ba17301de42b01ac1677b69/msgpack-1.1.2-cp311-cp311-macosx_10_9_x86_64.whl",hashes = {sha256 = "2e86a607e558d22985d856948c12a3fa7b42efad264dca8a3ebbcfa2735d786c"}}, + {name = "msgpack-1.1.2-cp311-cp311-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/83/04/28a41024ccbd67467380b6fb440ae916c1e4f25e2cd4c63abe6835ac566e/msgpack-1.1.2-cp311-cp311-macosx_11_0_arm64.whl",hashes = {sha256 = "283ae72fc89da59aa004ba147e8fc2f766647b1251500182fac0350d8af299c0"}}, + {name = "msgpack-1.1.2-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl",url = "https://files.pythonhosted.org/packages/71/46/b817349db6886d79e57a966346cf0902a426375aadc1e8e7a86a75e22f19/msgpack-1.1.2-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl",hashes = {sha256 = "61c8aa3bd513d87c72ed0b37b53dd5c5a0f58f2ff9f26e1555d3bd7948fb7296"}}, + {name = "msgpack-1.1.2-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl",url = "https://files.pythonhosted.org/packages/da/e0/6cc2e852837cd6086fe7d8406af4294e66827a60a4cf60b86575a4a65ca8/msgpack-1.1.2-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl",hashes = {sha256 = "454e29e186285d2ebe65be34629fa0e8605202c60fbc7c4c650ccd41870896ef"}}, + {name = "msgpack-1.1.2-cp311-cp311-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/25/98/6a19f030b3d2ea906696cedd1eb251708e50a5891d0978b012cb6107234c/msgpack-1.1.2-cp311-cp311-musllinux_1_2_aarch64.whl",hashes = {sha256 = "7bc8813f88417599564fafa59fd6f95be417179f76b40325b500b3c98409757c"}}, + {name = "msgpack-1.1.2-cp311-cp311-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/b7/cd/9098fcb6adb32187a70b7ecaabf6339da50553351558f37600e53a4a2a23/msgpack-1.1.2-cp311-cp311-musllinux_1_2_x86_64.whl",hashes = {sha256 = "bafca952dc13907bdfdedfc6a5f579bf4f292bdd506fadb38389afa3ac5b208e"}}, + {name = "msgpack-1.1.2-cp311-cp311-win32.whl",url = "https://files.pythonhosted.org/packages/e6/ae/270cecbcf36c1dc85ec086b33a51a4d7d08fc4f404bdbc15b582255d05ff/msgpack-1.1.2-cp311-cp311-win32.whl",hashes = {sha256 = "602b6740e95ffc55bfb078172d279de3773d7b7db1f703b2f1323566b878b90e"}}, + {name = "msgpack-1.1.2-cp311-cp311-win_amd64.whl",url = "https://files.pythonhosted.org/packages/2a/79/309d0e637f6f37e83c711f547308b91af02b72d2326ddd860b966080ef29/msgpack-1.1.2-cp311-cp311-win_amd64.whl",hashes = {sha256 = "d198d275222dc54244bf3327eb8cbe00307d220241d9cec4d306d49a44e85f68"}}, + {name = "msgpack-1.1.2-cp311-cp311-win_arm64.whl",url = "https://files.pythonhosted.org/packages/73/4d/7c4e2b3d9b1106cd0aa6cb56cc57c6267f59fa8bfab7d91df5adc802c847/msgpack-1.1.2-cp311-cp311-win_arm64.whl",hashes = {sha256 = "86f8136dfa5c116365a8a651a7d7484b65b13339731dd6faebb9a0242151c406"}}, + {name = "msgpack-1.1.2-cp310-cp310-macosx_10_9_x86_64.whl",url = "https://files.pythonhosted.org/packages/f5/a2/3b68a9e769db68668b25c6108444a35f9bd163bb848c0650d516761a59c0/msgpack-1.1.2-cp310-cp310-macosx_10_9_x86_64.whl",hashes = {sha256 = "0051fffef5a37ca2cd16978ae4f0aef92f164df86823871b5162812bebecd8e2"}}, + {name = "msgpack-1.1.2-cp310-cp310-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/5b/e1/2b720cc341325c00be44e1ed59e7cfeae2678329fbf5aa68f5bda57fe728/msgpack-1.1.2-cp310-cp310-macosx_11_0_arm64.whl",hashes = {sha256 = "a605409040f2da88676e9c9e5853b3449ba8011973616189ea5ee55ddbc5bc87"}}, + {name = "msgpack-1.1.2-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl",url = "https://files.pythonhosted.org/packages/71/e5/c2241de64bfceac456b140737812a2ab310b10538a7b34a1d393b748e095/msgpack-1.1.2-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl",hashes = {sha256 = "8b696e83c9f1532b4af884045ba7f3aa741a63b2bc22617293a2c6a7c645f251"}}, + {name = "msgpack-1.1.2-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl",url = "https://files.pythonhosted.org/packages/b7/09/2a06956383c0fdebaef5aa9246e2356776f12ea6f2a44bd1368abf0e46c4/msgpack-1.1.2-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl",hashes = {sha256 = "365c0bbe981a27d8932da71af63ef86acc59ed5c01ad929e09a0b88c6294e28a"}}, + {name = "msgpack-1.1.2-cp310-cp310-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/0e/74/2957703f0e1ef20637d6aead4fbb314330c26f39aa046b348c7edcf6ca6b/msgpack-1.1.2-cp310-cp310-musllinux_1_2_aarch64.whl",hashes = {sha256 = "41d1a5d875680166d3ac5c38573896453bbbea7092936d2e107214daf43b1d4f"}}, + {name = "msgpack-1.1.2-cp310-cp310-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/a5/09/3bfc12aa90f77b37322fc33e7a8a7c29ba7c8edeadfa27664451801b9860/msgpack-1.1.2-cp310-cp310-musllinux_1_2_x86_64.whl",hashes = {sha256 = "354e81bcdebaab427c3df4281187edc765d5d76bfb3a7c125af9da7a27e8458f"}}, + {name = "msgpack-1.1.2-cp310-cp310-win32.whl",url = "https://files.pythonhosted.org/packages/4b/4f/05fcebd3b4977cb3d840f7ef6b77c51f8582086de5e642f3fefee35c86fc/msgpack-1.1.2-cp310-cp310-win32.whl",hashes = {sha256 = "e64c8d2f5e5d5fda7b842f55dec6133260ea8f53c4257d64494c534f306bf7a9"}}, + {name = "msgpack-1.1.2-cp310-cp310-win_amd64.whl",url = "https://files.pythonhosted.org/packages/d0/3e/b4547e3a34210956382eed1c85935fff7e0f9b98be3106b3745d7dec9c5e/msgpack-1.1.2-cp310-cp310-win_amd64.whl",hashes = {sha256 = "db6192777d943bdaaafb6ba66d44bf65aa0e9c5616fa1d2da9bb08828c6b39aa"}}, +] +marker = "\"default\" in dependency_groups" + +[packages.tool.pdm] +dependencies = [] + [[packages]] name = "pillow" version = "11.3.0" @@ -885,17 +1010,6 @@ wheels = [ {name = "pillow-11.3.0-pp310-pypy310_pp73-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl",url = "https://files.pythonhosted.org/packages/3b/8e/5c9d410f9217b12320efc7c413e72693f48468979a013ad17fd690397b9a/pillow-11.3.0-pp310-pypy310_pp73-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl",hashes = {sha256 = "527b37216b6ac3a12d7838dc3bd75208ec57c1c6d11ef01902266a5a0c14fc27"}}, {name = "pillow-11.3.0-pp310-pypy310_pp73-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl",url = "https://files.pythonhosted.org/packages/62/bb/78347dbe13219991877ffb3a91bf09da8317fbfcd4b5f9140aeae020ad71/pillow-11.3.0-pp310-pypy310_pp73-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl",hashes = {sha256 = "be5463ac478b623b9dd3937afd7fb7ab3d79dd290a28e2b6df292dc75063eb8a"}}, {name = "pillow-11.3.0-pp310-pypy310_pp73-win_amd64.whl",url = "https://files.pythonhosted.org/packages/d9/28/1000353d5e61498aaeaaf7f1e4b49ddb05f2c6575f9d4f9f914a3538b6e1/pillow-11.3.0-pp310-pypy310_pp73-win_amd64.whl",hashes = {sha256 = "8dc70ca24c110503e16918a658b869019126ecfe03109b754c402daff12b3d9f"}}, - {name = "pillow-11.3.0-cp39-cp39-macosx_10_10_x86_64.whl",url = "https://files.pythonhosted.org/packages/9e/8e/9c089f01677d1264ab8648352dcb7773f37da6ad002542760c80107da816/pillow-11.3.0-cp39-cp39-macosx_10_10_x86_64.whl",hashes = {sha256 = "48d254f8a4c776de343051023eb61ffe818299eeac478da55227d96e241de53f"}}, - {name = "pillow-11.3.0-cp39-cp39-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/b5/a9/5749930caf674695867eb56a581e78eb5f524b7583ff10b01b6e5048acb3/pillow-11.3.0-cp39-cp39-macosx_11_0_arm64.whl",hashes = {sha256 = "7aee118e30a4cf54fdd873bd3a29de51e29105ab11f9aad8c32123f58c8f8081"}}, - {name = "pillow-11.3.0-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.whl",url = "https://files.pythonhosted.org/packages/43/46/0b85b763eb292b691030795f9f6bb6fcaf8948c39413c81696a01c3577f7/pillow-11.3.0-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.whl",hashes = {sha256 = "23cff760a9049c502721bdb743a7cb3e03365fafcdfc2ef9784610714166e5a4"}}, - {name = "pillow-11.3.0-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.whl",url = "https://files.pythonhosted.org/packages/5e/c6/1a230ec0067243cbd60bc2dad5dc3ab46a8a41e21c15f5c9b52b26873069/pillow-11.3.0-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.whl",hashes = {sha256 = "6359a3bc43f57d5b375d1ad54a0074318a0844d11b76abccf478c37c986d3cfc"}}, - {name = "pillow-11.3.0-cp39-cp39-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl",url = "https://files.pythonhosted.org/packages/63/dd/f296c27ffba447bfad76c6a0c44c1ea97a90cb9472b9304c94a732e8dbfb/pillow-11.3.0-cp39-cp39-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl",hashes = {sha256 = "092c80c76635f5ecb10f3f83d76716165c96f5229addbd1ec2bdbbda7d496e06"}}, - {name = "pillow-11.3.0-cp39-cp39-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl",url = "https://files.pythonhosted.org/packages/a5/a0/98a3630f0b57f77bae67716562513d3032ae70414fcaf02750279c389a9e/pillow-11.3.0-cp39-cp39-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl",hashes = {sha256 = "cadc9e0ea0a2431124cde7e1697106471fc4c1da01530e679b2391c37d3fbb3a"}}, - {name = "pillow-11.3.0-cp39-cp39-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/de/e6/83dfba5646a290edd9a21964da07674409e410579c341fc5b8f7abd81620/pillow-11.3.0-cp39-cp39-musllinux_1_2_aarch64.whl",hashes = {sha256 = "6a418691000f2a418c9135a7cf0d797c1bb7d9a485e61fe8e7722845b95ef978"}}, - {name = "pillow-11.3.0-cp39-cp39-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/bc/41/15ab268fe6ee9a2bc7391e2bbb20a98d3974304ab1a406a992dcb297a370/pillow-11.3.0-cp39-cp39-musllinux_1_2_x86_64.whl",hashes = {sha256 = "97afb3a00b65cc0804d1c7abddbf090a81eaac02768af58cbdcaaa0a931e0b6d"}}, - {name = "pillow-11.3.0-cp39-cp39-win32.whl",url = "https://files.pythonhosted.org/packages/64/79/6d4f638b288300bed727ff29f2a3cb63db054b33518a95f27724915e3fbc/pillow-11.3.0-cp39-cp39-win32.whl",hashes = {sha256 = "ea944117a7974ae78059fcc1800e5d3295172bb97035c0c1d9345fca1419da71"}}, - {name = "pillow-11.3.0-cp39-cp39-win_amd64.whl",url = "https://files.pythonhosted.org/packages/46/05/4106422f45a05716fd34ed21763f8ec182e8ea00af6e9cb05b93a247361a/pillow-11.3.0-cp39-cp39-win_amd64.whl",hashes = {sha256 = "e5c5858ad8ec655450a7c7df532e9842cf8df7cc349df7225c60d5d348c8aada"}}, - {name = "pillow-11.3.0-cp39-cp39-win_arm64.whl",url = "https://files.pythonhosted.org/packages/63/c6/287fd55c2c12761d0591549d48885187579b7c257bef0c6660755b0b59ae/pillow-11.3.0-cp39-cp39-win_arm64.whl",hashes = {sha256 = "6abdbfd3aea42be05702a8dd98832329c167ee84400a1d1f61ab11437f1717eb"}}, ] marker = "\"default\" in dependency_groups" @@ -914,8 +1028,6 @@ wheels = [ {name = "protobuf-6.31.1-cp39-abi3-manylinux2014_aarch64.whl",url = "https://files.pythonhosted.org/packages/76/a1/7a5a94032c83375e4fe7e7f56e3976ea6ac90c5e85fac8576409e25c39c3/protobuf-6.31.1-cp39-abi3-manylinux2014_aarch64.whl",hashes = {sha256 = "a40fc12b84c154884d7d4c4ebd675d5b3b5283e155f324049ae396b95ddebc39"}}, {name = "protobuf-6.31.1-cp39-abi3-manylinux2014_x86_64.whl",url = "https://files.pythonhosted.org/packages/fa/b1/b59d405d64d31999244643d88c45c8241c58f17cc887e73bcb90602327f8/protobuf-6.31.1-cp39-abi3-manylinux2014_x86_64.whl",hashes = {sha256 = "4ee898bf66f7a8b0bd21bce523814e6fbd8c6add948045ce958b73af7e8878c6"}}, {name = "protobuf-6.31.1-py3-none-any.whl",url = "https://files.pythonhosted.org/packages/f7/af/ab3c51ab7507a7325e98ffe691d9495ee3d3aa5f589afad65ec920d39821/protobuf-6.31.1-py3-none-any.whl",hashes = {sha256 = "720a6c7e6b77288b85063569baae8536671b39f15cc22037ec7045658d80489e"}}, - {name = "protobuf-6.31.1-cp39-cp39-win32.whl",url = "https://files.pythonhosted.org/packages/b1/f0/4160dbd205eee8fdf8647d154e7ceaa9d25b3a877b6311274eb6dc896b75/protobuf-6.31.1-cp39-cp39-win32.whl",hashes = {sha256 = "0414e3aa5a5f3ff423828e1e6a6e907d6c65c1d5b7e6e975793d5590bdeecc16"}}, - {name = "protobuf-6.31.1-cp39-cp39-win_amd64.whl",url = "https://files.pythonhosted.org/packages/09/34/13989eb9f482409ed821bfa3e34e6a3878b42607c38e7f7572b4cc825091/protobuf-6.31.1-cp39-cp39-win_amd64.whl",hashes = {sha256 = "8764cf4587791e7564051b35524b72844f845ad0bb011704c3736cce762d8fe9"}}, ] marker = "\"default\" in dependency_groups" @@ -939,6 +1051,31 @@ dependencies = [ "typing-extensions<5.0,>=4.0.0; python_version < \"3.11\"", ] +[[packages]] +name = "sanic" +version = "25.3.0" +requires-python = ">=3.8" +sdist = {name = "sanic-25.3.0.tar.gz", url = "https://files.pythonhosted.org/packages/df/8b/08dc376390fe854ef32984973883b646ee68c6727da72ffcc65340d8f192/sanic-25.3.0.tar.gz", hashes = {sha256 = "775d522001ec81f034ec8e4d7599e2175bfc097b8d57884f5e4c9322f5e369bb"}} +wheels = [ + {name = "sanic-25.3.0-py3-none-any.whl",url = "https://files.pythonhosted.org/packages/a6/e1/b36ddc16862d63d22986ae21b04a79c8fb7ec48d5d664acdfd1c2acf78ac/sanic-25.3.0-py3-none-any.whl",hashes = {sha256 = "fb519b38b4c220569b0e2e868583ffeaffaab96a78b2e42ae78bc56a644a4cd7"}}, +] +marker = "\"default\" in dependency_groups" + +[packages.tool.pdm] +dependencies = [ + "sanic-routing>=23.12.0", + "httptools>=0.0.10", + "uvloop>=0.15.0; sys_platform != \"win32\" and implementation_name == \"cpython\"", + "ujson>=1.35; sys_platform != \"win32\" and implementation_name == \"cpython\"", + "aiofiles>=0.6.0", + "websockets>=10.0", + "multidict<7.0,>=5.0", + "html5tagger>=1.2.1", + "tracerite>=1.0.0", + "typing-extensions>=4.4.0", + "setuptools>=70.1.0", +] + [[packages]] name = "transformers" version = "4.53.1" @@ -1092,28 +1229,6 @@ wheels = [ {name = "pydantic_core-2.33.2-pp310-pypy310_pp73-musllinux_1_1_armv7l.whl",url = "https://files.pythonhosted.org/packages/22/a8/dccc38768274d3ed3a59b5d06f59ccb845778687652daa71df0cab4040d7/pydantic_core-2.33.2-pp310-pypy310_pp73-musllinux_1_1_armv7l.whl",hashes = {sha256 = "1a8695a8d00c73e50bff9dfda4d540b7dee29ff9b8053e38380426a85ef10052"}}, {name = "pydantic_core-2.33.2-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl",url = "https://files.pythonhosted.org/packages/d4/e7/4f98c0b125dda7cf7ccd14ba936218397b44f50a56dd8c16a3091df116c3/pydantic_core-2.33.2-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl",hashes = {sha256 = "fa754d1850735a0b0e03bcffd9d4b4343eb417e47196e4485d9cca326073a42c"}}, {name = "pydantic_core-2.33.2-pp310-pypy310_pp73-win_amd64.whl",url = "https://files.pythonhosted.org/packages/ce/91/2ec36480fdb0b783cd9ef6795753c1dea13882f2e68e73bce76ae8c21e6a/pydantic_core-2.33.2-pp310-pypy310_pp73-win_amd64.whl",hashes = {sha256 = "a11c8d26a50bfab49002947d3d237abe4d9e4b5bdc8846a63537b6488e197808"}}, - {name = "pydantic_core-2.33.2-cp39-cp39-macosx_10_12_x86_64.whl",url = "https://files.pythonhosted.org/packages/53/ea/bbe9095cdd771987d13c82d104a9c8559ae9aec1e29f139e286fd2e9256e/pydantic_core-2.33.2-cp39-cp39-macosx_10_12_x86_64.whl",hashes = {sha256 = "a2b911a5b90e0374d03813674bf0a5fbbb7741570dcd4b4e85a2e48d17def29d"}}, - {name = "pydantic_core-2.33.2-cp39-cp39-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/49/1d/4ac5ed228078737d457a609013e8f7edc64adc37b91d619ea965758369e5/pydantic_core-2.33.2-cp39-cp39-macosx_11_0_arm64.whl",hashes = {sha256 = "6fa6dfc3e4d1f734a34710f391ae822e0a8eb8559a85c6979e14e65ee6ba2954"}}, - {name = "pydantic_core-2.33.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",url = "https://files.pythonhosted.org/packages/23/9a/2e70d6388d7cda488ae38f57bc2f7b03ee442fbcf0d75d848304ac7e405b/pydantic_core-2.33.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",hashes = {sha256 = "c54c939ee22dc8e2d545da79fc5381f1c020d6d3141d3bd747eab59164dc89fb"}}, - {name = "pydantic_core-2.33.2-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl",url = "https://files.pythonhosted.org/packages/ff/2e/1568934feb43370c1ffb78a77f0baaa5a8b6897513e7a91051af707ffdc4/pydantic_core-2.33.2-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl",hashes = {sha256 = "53a57d2ed685940a504248187d5685e49eb5eef0f696853647bf37c418c538f7"}}, - {name = "pydantic_core-2.33.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl",url = "https://files.pythonhosted.org/packages/01/1a/1a1118f38ab64eac2f6269eb8c120ab915be30e387bb561e3af904b12499/pydantic_core-2.33.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl",hashes = {sha256 = "09fb9dd6571aacd023fe6aaca316bd01cf60ab27240d7eb39ebd66a3a15293b4"}}, - {name = "pydantic_core-2.33.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl",url = "https://files.pythonhosted.org/packages/5c/da/44754d1d7ae0f22d6d3ce6c6b1486fc07ac2c524ed8f6eca636e2e1ee49b/pydantic_core-2.33.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl",hashes = {sha256 = "0e6116757f7959a712db11f3e9c0a99ade00a5bbedae83cb801985aa154f071b"}}, - {name = "pydantic_core-2.33.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",url = "https://files.pythonhosted.org/packages/4d/98/f43cd89172220ec5aa86654967b22d862146bc4d736b1350b4c41e7c9c03/pydantic_core-2.33.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",hashes = {sha256 = "8d55ab81c57b8ff8548c3e4947f119551253f4e3787a7bbc0b6b3ca47498a9d3"}}, - {name = "pydantic_core-2.33.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl",url = "https://files.pythonhosted.org/packages/2b/cc/f77e8e242171d2158309f830f7d5d07e0531b756106f36bc18712dc439df/pydantic_core-2.33.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl",hashes = {sha256 = "c20c462aa4434b33a2661701b861604913f912254e441ab8d78d30485736115a"}}, - {name = "pydantic_core-2.33.2-cp39-cp39-musllinux_1_1_aarch64.whl",url = "https://files.pythonhosted.org/packages/54/7a/7be6a7bd43e0a47c147ba7fbf124fe8aaf1200bc587da925509641113b2d/pydantic_core-2.33.2-cp39-cp39-musllinux_1_1_aarch64.whl",hashes = {sha256 = "44857c3227d3fb5e753d5fe4a3420d6376fa594b07b621e220cd93703fe21782"}}, - {name = "pydantic_core-2.33.2-cp39-cp39-musllinux_1_1_armv7l.whl",url = "https://files.pythonhosted.org/packages/2a/07/31cf8fadffbb03be1cb520850e00a8490c0927ec456e8293cafda0726184/pydantic_core-2.33.2-cp39-cp39-musllinux_1_1_armv7l.whl",hashes = {sha256 = "eb9b459ca4df0e5c87deb59d37377461a538852765293f9e6ee834f0435a93b9"}}, - {name = "pydantic_core-2.33.2-cp39-cp39-musllinux_1_1_x86_64.whl",url = "https://files.pythonhosted.org/packages/b6/8d/bbaf4c6721b668d44f01861f297eb01c9b35f612f6b8e14173cb204e6240/pydantic_core-2.33.2-cp39-cp39-musllinux_1_1_x86_64.whl",hashes = {sha256 = "9fcd347d2cc5c23b06de6d3b7b8275be558a0c90549495c699e379a80bf8379e"}}, - {name = "pydantic_core-2.33.2-cp39-cp39-win32.whl",url = "https://files.pythonhosted.org/packages/bb/93/3cc157026bca8f5006250e74515119fcaa6d6858aceee8f67ab6dc548c16/pydantic_core-2.33.2-cp39-cp39-win32.whl",hashes = {sha256 = "83aa99b1285bc8f038941ddf598501a86f1536789740991d7d8756e34f1e74d9"}}, - {name = "pydantic_core-2.33.2-cp39-cp39-win_amd64.whl",url = "https://files.pythonhosted.org/packages/5b/90/7edc3b2a0d9f0dda8806c04e511a67b0b7a41d2187e2003673a996fb4310/pydantic_core-2.33.2-cp39-cp39-win_amd64.whl",hashes = {sha256 = "f481959862f57f29601ccced557cc2e817bce7533ab8e01a797a48b49c9692b3"}}, - {name = "pydantic_core-2.33.2-pp39-pypy39_pp73-macosx_10_12_x86_64.whl",url = "https://files.pythonhosted.org/packages/08/98/dbf3fdfabaf81cda5622154fda78ea9965ac467e3239078e0dcd6df159e7/pydantic_core-2.33.2-pp39-pypy39_pp73-macosx_10_12_x86_64.whl",hashes = {sha256 = "87acbfcf8e90ca885206e98359d7dca4bcbb35abdc0ff66672a293e1d7a19101"}}, - {name = "pydantic_core-2.33.2-pp39-pypy39_pp73-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/8d/99/7810aa9256e7f2ccd492590f86b79d370df1e9292f1f80b000b6a75bd2fb/pydantic_core-2.33.2-pp39-pypy39_pp73-macosx_11_0_arm64.whl",hashes = {sha256 = "7f92c15cd1e97d4b12acd1cc9004fa092578acfa57b67ad5e43a197175d01a64"}}, - {name = "pydantic_core-2.33.2-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",url = "https://files.pythonhosted.org/packages/d8/60/bc06fa9027c7006cc6dd21e48dbf39076dc39d9abbaf718a1604973a9670/pydantic_core-2.33.2-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",hashes = {sha256 = "d3f26877a748dc4251cfcfda9dfb5f13fcb034f5308388066bcfe9031b63ae7d"}}, - {name = "pydantic_core-2.33.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",url = "https://files.pythonhosted.org/packages/f2/40/9d03997d9518816c68b4dfccb88969756b9146031b61cd37f781c74c9b6a/pydantic_core-2.33.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",hashes = {sha256 = "dac89aea9af8cd672fa7b510e7b8c33b0bba9a43186680550ccf23020f32d535"}}, - {name = "pydantic_core-2.33.2-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl",url = "https://files.pythonhosted.org/packages/d8/62/d490198d05d2d86672dc269f52579cad7261ced64c2df213d5c16e0aecb1/pydantic_core-2.33.2-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl",hashes = {sha256 = "970919794d126ba8645f3837ab6046fb4e72bbc057b3709144066204c19a455d"}}, - {name = "pydantic_core-2.33.2-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl",url = "https://files.pythonhosted.org/packages/9a/ec/4cd215534fd10b8549015f12ea650a1a973da20ce46430b68fc3185573e8/pydantic_core-2.33.2-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl",hashes = {sha256 = "3eb3fe62804e8f859c49ed20a8451342de53ed764150cb14ca71357c765dc2a6"}}, - {name = "pydantic_core-2.33.2-pp39-pypy39_pp73-musllinux_1_1_armv7l.whl",url = "https://files.pythonhosted.org/packages/1a/1a/abbd63d47e1d9b0d632fee6bb15785d0889c8a6e0a6c3b5a8e28ac1ec5d2/pydantic_core-2.33.2-pp39-pypy39_pp73-musllinux_1_1_armv7l.whl",hashes = {sha256 = "3abcd9392a36025e3bd55f9bd38d908bd17962cc49bc6da8e7e96285336e2bca"}}, - {name = "pydantic_core-2.33.2-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl",url = "https://files.pythonhosted.org/packages/80/1c/fa883643429908b1c90598fd2642af8839efd1d835b65af1f75fba4d94fe/pydantic_core-2.33.2-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl",hashes = {sha256 = "3a1c81334778f9e3af2f8aeb7a960736e5cab1dfebfb26aabca09afd2906c039"}}, - {name = "pydantic_core-2.33.2-pp39-pypy39_pp73-win_amd64.whl",url = "https://files.pythonhosted.org/packages/d4/29/3cade8a924a61f60ccfa10842f75eb12787e1440e2b8660ceffeb26685e7/pydantic_core-2.33.2-pp39-pypy39_pp73-win_amd64.whl",hashes = {sha256 = "2807668ba86cb38c6817ad9bc66215ab8584d1d304030ce4f0887336f28a5e27"}}, ] marker = "\"default\" in dependency_groups" @@ -1122,6 +1237,19 @@ dependencies = [ "typing-extensions!=4.7.0,>=4.6.0", ] +[[packages]] +name = "typing-extensions" +version = "4.14.1" +requires-python = ">=3.9" +sdist = {name = "typing_extensions-4.14.1.tar.gz", url = "https://files.pythonhosted.org/packages/98/5a/da40306b885cc8c09109dc2e1abd358d5684b1425678151cdaed4731c822/typing_extensions-4.14.1.tar.gz", hashes = {sha256 = "38b39f4aeeab64884ce9f74c94263ef78f3c22467c8724005483154c26648d36"}} +wheels = [ + {name = "typing_extensions-4.14.1-py3-none-any.whl",url = "https://files.pythonhosted.org/packages/b5/00/d631e67a838026495268c2f6884f3711a15a9a2a96cd244fdaea53b823fb/typing_extensions-4.14.1-py3-none-any.whl",hashes = {sha256 = "d1e1e3b58374dc93031d6eda2420a48ea44a36c2b4766a4fdeb3710755731d76"}}, +] +marker = "\"default\" in dependency_groups or \"dev\" in extras" + +[packages.tool.pdm] +dependencies = [] + [[packages]] name = "tomli" version = "2.2.1" @@ -1160,20 +1288,7 @@ wheels = [ {name = "tomli-2.2.1-cp311-cp311-win_amd64.whl",url = "https://files.pythonhosted.org/packages/6a/1c/4a2dcde4a51b81be3530565e92eda625d94dafb46dbeb15069df4caffc34/tomli-2.2.1-cp311-cp311-win_amd64.whl",hashes = {sha256 = "2d0f2fdd22b02c6d81637a3c95f8cd77f995846af7414c5c4b8d0545afa1bc4b"}}, {name = "tomli-2.2.1-py3-none-any.whl",url = "https://files.pythonhosted.org/packages/6e/c2/61d3e0f47e2b74ef40a68b9e6ad5984f6241a942f7cd3bbfbdbd03861ea9/tomli-2.2.1-py3-none-any.whl",hashes = {sha256 = "cb55c73c5f4408779d0cf3eef9f762b9c9f147a77de7b258bef0a5628adc85cc"}}, ] -marker = "python_version < \"3.11\" and python_version >= \"3.9\" and \"dev\" in extras" - -[packages.tool.pdm] -dependencies = [] - -[[packages]] -name = "typing-extensions" -version = "4.14.1" -requires-python = ">=3.9" -sdist = {name = "typing_extensions-4.14.1.tar.gz", url = "https://files.pythonhosted.org/packages/98/5a/da40306b885cc8c09109dc2e1abd358d5684b1425678151cdaed4731c822/typing_extensions-4.14.1.tar.gz", hashes = {sha256 = "38b39f4aeeab64884ce9f74c94263ef78f3c22467c8724005483154c26648d36"}} -wheels = [ - {name = "typing_extensions-4.14.1-py3-none-any.whl",url = "https://files.pythonhosted.org/packages/b5/00/d631e67a838026495268c2f6884f3711a15a9a2a96cd244fdaea53b823fb/typing_extensions-4.14.1-py3-none-any.whl",hashes = {sha256 = "d1e1e3b58374dc93031d6eda2420a48ea44a36c2b4766a4fdeb3710755731d76"}}, -] -marker = "\"default\" in dependency_groups or \"dev\" in extras" +marker = "python_version < \"3.11\" and python_version ~= \"3.10\" and \"dev\" in extras or python_full_version ~= \"3.9.0\" and \"dev\" in extras" [packages.tool.pdm] dependencies = [] @@ -1326,6 +1441,19 @@ marker = "\"dev\" in extras" [packages.tool.pdm] dependencies = [] +[[packages]] +name = "backports-asyncio-runner" +version = "1.2.0" +requires-python = "<3.11,>=3.8" +sdist = {name = "backports_asyncio_runner-1.2.0.tar.gz", url = "https://files.pythonhosted.org/packages/8e/ff/70dca7d7cb1cbc0edb2c6cc0c38b65cba36cccc491eca64cabd5fe7f8670/backports_asyncio_runner-1.2.0.tar.gz", hashes = {sha256 = "a5aa7b2b7d8f8bfcaa2b57313f70792df84e32a2a746f585213373f900b42162"}} +wheels = [ + {name = "backports_asyncio_runner-1.2.0-py3-none-any.whl",url = "https://files.pythonhosted.org/packages/a0/59/76ab57e3fe74484f48a53f8e337171b4a2349e506eabe136d7e01d059086/backports_asyncio_runner-1.2.0-py3-none-any.whl",hashes = {sha256 = "0da0a936a8aeb554eccb426dc55af3ba63bcdc69fa1a600b5bb305413a4477b5"}}, +] +marker = "python_version < \"3.11\" and python_version ~= \"3.10\" and \"dev\" in extras or python_full_version ~= \"3.9.0\" and \"dev\" in extras" + +[packages.tool.pdm] +dependencies = [] + [[packages]] name = "charset-normalizer" version = "3.4.2" @@ -1385,19 +1513,6 @@ wheels = [ {name = "charset_normalizer-3.4.2-cp310-cp310-win32.whl",url = "https://files.pythonhosted.org/packages/1f/79/4b8da9f712bc079c0f16b6d67b099b0b8d808c2292c937f267d816ec5ecc/charset_normalizer-3.4.2-cp310-cp310-win32.whl",hashes = {sha256 = "e8082b26888e2f8b36a042a58307d5b917ef2b1cacab921ad3323ef91901c71a"}}, {name = "charset_normalizer-3.4.2-cp310-cp310-win_amd64.whl",url = "https://files.pythonhosted.org/packages/7d/d7/96970afb4fb66497a40761cdf7bd4f6fca0fc7bafde3a84f836c1f57a926/charset_normalizer-3.4.2-cp310-cp310-win_amd64.whl",hashes = {sha256 = "f69a27e45c43520f5487f27627059b64aaf160415589230992cec34c5e18a509"}}, {name = "charset_normalizer-3.4.2-py3-none-any.whl",url = "https://files.pythonhosted.org/packages/20/94/c5790835a017658cbfabd07f3bfb549140c3ac458cfc196323996b10095a/charset_normalizer-3.4.2-py3-none-any.whl",hashes = {sha256 = "7f56930ab0abd1c45cd15be65cc741c28b1c9a34876ce8c17a2fa107810c0af0"}}, - {name = "charset_normalizer-3.4.2-cp39-cp39-macosx_10_9_universal2.whl",url = "https://files.pythonhosted.org/packages/28/f8/dfb01ff6cc9af38552c69c9027501ff5a5117c4cc18dcd27cb5259fa1888/charset_normalizer-3.4.2-cp39-cp39-macosx_10_9_universal2.whl",hashes = {sha256 = "005fa3432484527f9732ebd315da8da8001593e2cf46a3d817669f062c3d9ed4"}}, - {name = "charset_normalizer-3.4.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",url = "https://files.pythonhosted.org/packages/32/fb/74e26ee556a9dbfe3bd264289b67be1e6d616329403036f6507bb9f3f29c/charset_normalizer-3.4.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",hashes = {sha256 = "e92fca20c46e9f5e1bb485887d074918b13543b1c2a1185e69bb8d17ab6236a7"}}, - {name = "charset_normalizer-3.4.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl",url = "https://files.pythonhosted.org/packages/ad/06/8499ee5aa7addc6f6d72e068691826ff093329fe59891e83b092ae4c851c/charset_normalizer-3.4.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl",hashes = {sha256 = "50bf98d5e563b83cc29471fa114366e6806bc06bc7a25fd59641e41445327836"}}, - {name = "charset_normalizer-3.4.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl",url = "https://files.pythonhosted.org/packages/f1/a2/5e4c187680728219254ef107a6949c60ee0e9a916a5dadb148c7ae82459c/charset_normalizer-3.4.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl",hashes = {sha256 = "721c76e84fe669be19c5791da68232ca2e05ba5185575086e384352e2c309597"}}, - {name = "charset_normalizer-3.4.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",url = "https://files.pythonhosted.org/packages/4c/fe/56aca740dda674f0cc1ba1418c4d84534be51f639b5f98f538b332dc9a95/charset_normalizer-3.4.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",hashes = {sha256 = "82d8fd25b7f4675d0c47cf95b594d4e7b158aca33b76aa63d07186e13c0e0ab7"}}, - {name = "charset_normalizer-3.4.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl",url = "https://files.pythonhosted.org/packages/53/13/db2e7779f892386b589173dd689c1b1e304621c5792046edd8a978cbf9e0/charset_normalizer-3.4.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl",hashes = {sha256 = "b3daeac64d5b371dea99714f08ffc2c208522ec6b06fbc7866a450dd446f5c0f"}}, - {name = "charset_normalizer-3.4.2-cp39-cp39-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/69/35/e52ab9a276186f729bce7a0638585d2982f50402046e4b0faa5d2c3ef2da/charset_normalizer-3.4.2-cp39-cp39-musllinux_1_2_aarch64.whl",hashes = {sha256 = "dccab8d5fa1ef9bfba0590ecf4d46df048d18ffe3eec01eeb73a42e0d9e7a8ba"}}, - {name = "charset_normalizer-3.4.2-cp39-cp39-musllinux_1_2_i686.whl",url = "https://files.pythonhosted.org/packages/a6/d8/af7333f732fc2e7635867d56cb7c349c28c7094910c72267586947561b4b/charset_normalizer-3.4.2-cp39-cp39-musllinux_1_2_i686.whl",hashes = {sha256 = "aaf27faa992bfee0264dc1f03f4c75e9fcdda66a519db6b957a3f826e285cf12"}}, - {name = "charset_normalizer-3.4.2-cp39-cp39-musllinux_1_2_ppc64le.whl",url = "https://files.pythonhosted.org/packages/7a/3d/a5b2e48acef264d71e036ff30bcc49e51bde80219bb628ba3e00cf59baac/charset_normalizer-3.4.2-cp39-cp39-musllinux_1_2_ppc64le.whl",hashes = {sha256 = "eb30abc20df9ab0814b5a2524f23d75dcf83cde762c161917a2b4b7b55b1e518"}}, - {name = "charset_normalizer-3.4.2-cp39-cp39-musllinux_1_2_s390x.whl",url = "https://files.pythonhosted.org/packages/85/d8/23e2c112532a29f3eef374375a8684a4f3b8e784f62b01da931186f43494/charset_normalizer-3.4.2-cp39-cp39-musllinux_1_2_s390x.whl",hashes = {sha256 = "c72fbbe68c6f32f251bdc08b8611c7b3060612236e960ef848e0a517ddbe76c5"}}, - {name = "charset_normalizer-3.4.2-cp39-cp39-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/c7/57/93e0169f08ecc20fe82d12254a200dfaceddc1c12a4077bf454ecc597e33/charset_normalizer-3.4.2-cp39-cp39-musllinux_1_2_x86_64.whl",hashes = {sha256 = "982bb1e8b4ffda883b3d0a521e23abcd6fd17418f6d2c4118d257a10199c0ce3"}}, - {name = "charset_normalizer-3.4.2-cp39-cp39-win32.whl",url = "https://files.pythonhosted.org/packages/2c/9d/9bf2b005138e7e060d7ebdec7503d0ef3240141587651f4b445bdf7286c2/charset_normalizer-3.4.2-cp39-cp39-win32.whl",hashes = {sha256 = "43e0933a0eff183ee85833f341ec567c0980dae57c464d8a508e1b2ceb336471"}}, - {name = "charset_normalizer-3.4.2-cp39-cp39-win_amd64.whl",url = "https://files.pythonhosted.org/packages/6d/24/5849d46cf4311bbf21b424c443b09b459f5b436b1558c04e45dbb7cc478b/charset_normalizer-3.4.2-cp39-cp39-win_amd64.whl",hashes = {sha256 = "d11b54acf878eef558599658b0ffca78138c8c3655cf4f3a4a673c437e67732e"}}, ] marker = "\"default\" in dependency_groups or \"dev\" in extras or \"recommended\" in extras" @@ -1530,23 +1645,6 @@ wheels = [ {name = "aiohttp-3.12.14-cp310-cp310-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/e3/b3/751124b8ceb0831c17960d06ee31a4732cb4a6a006fdbfa1153d07c52226/aiohttp-3.12.14-cp310-cp310-musllinux_1_2_x86_64.whl",hashes = {sha256 = "bbad68a2af4877cc103cd94af9160e45676fc6f0c14abb88e6e092b945c2c8e3"}}, {name = "aiohttp-3.12.14-cp310-cp310-win32.whl",url = "https://files.pythonhosted.org/packages/81/3c/72477a1d34edb8ab8ce8013086a41526d48b64f77e381c8908d24e1c18f5/aiohttp-3.12.14-cp310-cp310-win32.whl",hashes = {sha256 = "ee580cb7c00bd857b3039ebca03c4448e84700dc1322f860cf7a500a6f62630c"}}, {name = "aiohttp-3.12.14-cp310-cp310-win_amd64.whl",url = "https://files.pythonhosted.org/packages/a2/c4/8aec4ccf1b822ec78e7982bd5cf971113ecce5f773f04039c76a083116fc/aiohttp-3.12.14-cp310-cp310-win_amd64.whl",hashes = {sha256 = "cf4f05b8cea571e2ccc3ca744e35ead24992d90a72ca2cf7ab7a2efbac6716db"}}, - {name = "aiohttp-3.12.14-cp39-cp39-macosx_10_9_universal2.whl",url = "https://files.pythonhosted.org/packages/cf/54/8a65095784f5c8b2a60a8baa2baabb15b8d507efb0911d59f94af04ba908/aiohttp-3.12.14-cp39-cp39-macosx_10_9_universal2.whl",hashes = {sha256 = "b8cc6b05e94d837bcd71c6531e2344e1ff0fb87abe4ad78a9261d67ef5d83eae"}}, - {name = "aiohttp-3.12.14-cp39-cp39-macosx_10_9_x86_64.whl",url = "https://files.pythonhosted.org/packages/d0/23/65a82d33841c790178aed8aa6b5e720e37f08bdf7256936fa3bc86f03257/aiohttp-3.12.14-cp39-cp39-macosx_10_9_x86_64.whl",hashes = {sha256 = "d1dcb015ac6a3b8facd3677597edd5ff39d11d937456702f0bb2b762e390a21b"}}, - {name = "aiohttp-3.12.14-cp39-cp39-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/10/66/9d51ec40613aca2f38d6ac527b592686a302197109aa1c0fe045040835ec/aiohttp-3.12.14-cp39-cp39-macosx_11_0_arm64.whl",hashes = {sha256 = "3779ed96105cd70ee5e85ca4f457adbce3d9ff33ec3d0ebcdf6c5727f26b21b3"}}, - {name = "aiohttp-3.12.14-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",url = "https://files.pythonhosted.org/packages/48/9e/2f14e4780a461351325d7821fb64e9107189315dd8f6e8a67e7afdbf875c/aiohttp-3.12.14-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",hashes = {sha256 = "717a0680729b4ebd7569c1dcd718c46b09b360745fd8eb12317abc74b14d14d0"}}, - {name = "aiohttp-3.12.14-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl",url = "https://files.pythonhosted.org/packages/b8/26/26ef03e6cc4b7fb275eaa76b33c128f72729e8833e512b6770f877560b6e/aiohttp-3.12.14-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl",hashes = {sha256 = "b5dd3a2ef7c7e968dbbac8f5574ebeac4d2b813b247e8cec28174a2ba3627170"}}, - {name = "aiohttp-3.12.14-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl",url = "https://files.pythonhosted.org/packages/68/cf/fffc2a9edacbd475cfb508075bad052426ce0b9100f1045536ee1b683872/aiohttp-3.12.14-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl",hashes = {sha256 = "4710f77598c0092239bc12c1fcc278a444e16c7032d91babf5abbf7166463f7b"}}, - {name = "aiohttp-3.12.14-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl",url = "https://files.pythonhosted.org/packages/0b/c5/bb8b29ef079d3ecb5960ec1b547b56bc52ee5ffc43c8a30ef21f9afeb67b/aiohttp-3.12.14-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl",hashes = {sha256 = "f3e9f75ae842a6c22a195d4a127263dbf87cbab729829e0bd7857fb1672400b2"}}, - {name = "aiohttp-3.12.14-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",url = "https://files.pythonhosted.org/packages/09/0d/d18e2d2754497bf91b9559425e8c4286af61bdbe42d49c43d955c7269680/aiohttp-3.12.14-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",hashes = {sha256 = "5f9c8d55d6802086edd188e3a7d85a77787e50d56ce3eb4757a3205fa4657922"}}, - {name = "aiohttp-3.12.14-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl",url = "https://files.pythonhosted.org/packages/33/c8/2c32cd25deb9f590cb8d50ff33fb3bb2cc8d1761958989f6f64cf00ef1cb/aiohttp-3.12.14-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl",hashes = {sha256 = "79b29053ff3ad307880d94562cca80693c62062a098a5776ea8ef5ef4b28d140"}}, - {name = "aiohttp-3.12.14-cp39-cp39-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/0f/36/1b36ae47b9d6afdd39072373bb7157b464996376d562d3c50950ddf6d10e/aiohttp-3.12.14-cp39-cp39-musllinux_1_2_aarch64.whl",hashes = {sha256 = "23e1332fff36bebd3183db0c7a547a1da9d3b4091509f6d818e098855f2f27d3"}}, - {name = "aiohttp-3.12.14-cp39-cp39-musllinux_1_2_armv7l.whl",url = "https://files.pythonhosted.org/packages/2b/e8/6864b7812351821168e80ca102d7fa244a78fefe9690995a40e8b5c19f4b/aiohttp-3.12.14-cp39-cp39-musllinux_1_2_armv7l.whl",hashes = {sha256 = "a564188ce831fd110ea76bcc97085dd6c625b427db3f1dbb14ca4baa1447dcbc"}}, - {name = "aiohttp-3.12.14-cp39-cp39-musllinux_1_2_i686.whl",url = "https://files.pythonhosted.org/packages/9b/55/f90e3eb25330f8a564a6e6b4d3cc15d3630bd28b0795a025e397e3279411/aiohttp-3.12.14-cp39-cp39-musllinux_1_2_i686.whl",hashes = {sha256 = "a7a1b4302f70bb3ec40ca86de82def532c97a80db49cac6a6700af0de41af5ee"}}, - {name = "aiohttp-3.12.14-cp39-cp39-musllinux_1_2_ppc64le.whl",url = "https://files.pythonhosted.org/packages/1b/f7/39c3570434bb7e81601155ba71327735b26548473cca2d5c7f5badabb140/aiohttp-3.12.14-cp39-cp39-musllinux_1_2_ppc64le.whl",hashes = {sha256 = "1b07ccef62950a2519f9bfc1e5b294de5dd84329f444ca0b329605ea787a3de5"}}, - {name = "aiohttp-3.12.14-cp39-cp39-musllinux_1_2_s390x.whl",url = "https://files.pythonhosted.org/packages/46/0d/caee8733fbe511c34a54e93ee26c4b8d505e12785444d31f772a610df7ab/aiohttp-3.12.14-cp39-cp39-musllinux_1_2_s390x.whl",hashes = {sha256 = "938bd3ca6259e7e48b38d84f753d548bd863e0c222ed6ee6ace3fd6752768a84"}}, - {name = "aiohttp-3.12.14-cp39-cp39-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/24/f3/5d21196abf74dee66c5809e764cc27a2275e54c9355019c21be3bf77dd77/aiohttp-3.12.14-cp39-cp39-musllinux_1_2_x86_64.whl",hashes = {sha256 = "8bc784302b6b9f163b54c4e93d7a6f09563bd01ff2b841b29ed3ac126e5040bf"}}, - {name = "aiohttp-3.12.14-cp39-cp39-win32.whl",url = "https://files.pythonhosted.org/packages/54/bb/b4226f4fd0597d5245f284d10be48bf1ef610ab4f57d4239686fb03d1814/aiohttp-3.12.14-cp39-cp39-win32.whl",hashes = {sha256 = "a3416f95961dd7d5393ecff99e3f41dc990fb72eda86c11f2a60308ac6dcd7a0"}}, - {name = "aiohttp-3.12.14-cp39-cp39-win_amd64.whl",url = "https://files.pythonhosted.org/packages/a0/c0/2f1cefb7b077bf5c19f01bdf0d82b89de0bf2801b441eda23ada0b8966ac/aiohttp-3.12.14-cp39-cp39-win_amd64.whl",hashes = {sha256 = "196858b8820d7f60578f8b47e5669b3195c21d8ab261e39b1d705346458f445f"}}, ] marker = "\"default\" in dependency_groups or \"dev\" in extras" @@ -1562,108 +1660,6 @@ dependencies = [ "yarl<2.0,>=1.17.0", ] -[[packages]] -name = "async-timeout" -version = "5.0.1" -requires-python = ">=3.8" -sdist = {name = "async_timeout-5.0.1.tar.gz", url = "https://files.pythonhosted.org/packages/a5/ae/136395dfbfe00dfc94da3f3e136d0b13f394cba8f4841120e34226265780/async_timeout-5.0.1.tar.gz", hashes = {sha256 = "d9321a7a3d5a6a5e187e824d2fa0793ce379a202935782d555d6e9d2735677d3"}} -wheels = [ - {name = "async_timeout-5.0.1-py3-none-any.whl",url = "https://files.pythonhosted.org/packages/fe/ba/e2081de779ca30d473f21f5b30e0e737c438205440784c7dfc81efc2b029/async_timeout-5.0.1-py3-none-any.whl",hashes = {sha256 = "39e3809566ff85354557ec2398b55e096c8364bacac9405a7a1fa429e77fe76c"}}, -] -marker = "python_version < \"3.11\" and python_version >= \"3.9\" and \"default\" in dependency_groups or python_version < \"3.11\" and python_version >= \"3.9\" and \"dev\" in extras" - -[packages.tool.pdm] -dependencies = [] - -[[packages]] -name = "h2" -version = "4.2.0" -requires-python = ">=3.9" -sdist = {name = "h2-4.2.0.tar.gz", url = "https://files.pythonhosted.org/packages/1b/38/d7f80fd13e6582fb8e0df8c9a653dcc02b03ca34f4d72f34869298c5baf8/h2-4.2.0.tar.gz", hashes = {sha256 = "c8a52129695e88b1a0578d8d2cc6842bbd79128ac685463b887ee278126ad01f"}} -wheels = [ - {name = "h2-4.2.0-py3-none-any.whl",url = "https://files.pythonhosted.org/packages/d0/9e/984486f2d0a0bd2b024bf4bc1c62688fcafa9e61991f041fb0e2def4a982/h2-4.2.0-py3-none-any.whl",hashes = {sha256 = "479a53ad425bb29af087f3458a61d30780bc818e4ebcf01f0b536ba916462ed0"}}, -] -marker = "\"default\" in dependency_groups" - -[packages.tool.pdm] -dependencies = [ - "hyperframe<7,>=6.1", - "hpack<5,>=4.1", -] - -[[packages]] -name = "hf-xet" -version = "1.1.5" -requires-python = ">=3.8" -sdist = {name = "hf_xet-1.1.5.tar.gz", url = "https://files.pythonhosted.org/packages/ed/d4/7685999e85945ed0d7f0762b686ae7015035390de1161dcea9d5276c134c/hf_xet-1.1.5.tar.gz", hashes = {sha256 = "69ebbcfd9ec44fdc2af73441619eeb06b94ee34511bbcf57cd423820090f5694"}} -wheels = [ - {name = "hf_xet-1.1.5-cp37-abi3-macosx_10_12_x86_64.whl",url = "https://files.pythonhosted.org/packages/00/89/a1119eebe2836cb25758e7661d6410d3eae982e2b5e974bcc4d250be9012/hf_xet-1.1.5-cp37-abi3-macosx_10_12_x86_64.whl",hashes = {sha256 = "f52c2fa3635b8c37c7764d8796dfa72706cc4eded19d638331161e82b0792e23"}}, - {name = "hf_xet-1.1.5-cp37-abi3-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/de/5f/2c78e28f309396e71ec8e4e9304a6483dcbc36172b5cea8f291994163425/hf_xet-1.1.5-cp37-abi3-macosx_11_0_arm64.whl",hashes = {sha256 = "9fa6e3ee5d61912c4a113e0708eaaef987047616465ac7aa30f7121a48fc1af8"}}, - {name = "hf_xet-1.1.5-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",url = "https://files.pythonhosted.org/packages/6d/2f/6cad7b5fe86b7652579346cb7f85156c11761df26435651cbba89376cd2c/hf_xet-1.1.5-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",hashes = {sha256 = "fc874b5c843e642f45fd85cda1ce599e123308ad2901ead23d3510a47ff506d1"}}, - {name = "hf_xet-1.1.5-cp37-abi3-manylinux_2_28_aarch64.whl",url = "https://files.pythonhosted.org/packages/d0/54/0fcf2b619720a26fbb6cc941e89f2472a522cd963a776c089b189559447f/hf_xet-1.1.5-cp37-abi3-manylinux_2_28_aarch64.whl",hashes = {sha256 = "dbba1660e5d810bd0ea77c511a99e9242d920790d0e63c0e4673ed36c4022d18"}}, - {name = "hf_xet-1.1.5-cp37-abi3-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/f3/92/1d351ac6cef7c4ba8c85744d37ffbfac2d53d0a6c04d2cabeba614640a78/hf_xet-1.1.5-cp37-abi3-musllinux_1_2_aarch64.whl",hashes = {sha256 = "ab34c4c3104133c495785d5d8bba3b1efc99de52c02e759cf711a91fd39d3a14"}}, - {name = "hf_xet-1.1.5-cp37-abi3-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/c9/65/4b2ddb0e3e983f2508528eb4501288ae2f84963586fbdfae596836d5e57a/hf_xet-1.1.5-cp37-abi3-musllinux_1_2_x86_64.whl",hashes = {sha256 = "83088ecea236d5113de478acb2339f92c95b4fb0462acaa30621fac02f5a534a"}}, - {name = "hf_xet-1.1.5-cp37-abi3-win_amd64.whl",url = "https://files.pythonhosted.org/packages/f0/55/ef77a85ee443ae05a9e9cba1c9f0dd9241eb42da2aeba1dc50f51154c81a/hf_xet-1.1.5-cp37-abi3-win_amd64.whl",hashes = {sha256 = "73e167d9807d166596b4b2f0b585c6d5bd84a26dea32843665a8b58f6edba245"}}, -] -marker = "(platform_machine == \"x86_64\" or platform_machine == \"amd64\" or platform_machine == \"arm64\" or platform_machine == \"aarch64\") and \"default\" in dependency_groups" - -[packages.tool.pdm] -dependencies = [] - -[[packages]] -name = "hpack" -version = "4.1.0" -requires-python = ">=3.9" -sdist = {name = "hpack-4.1.0.tar.gz", url = "https://files.pythonhosted.org/packages/2c/48/71de9ed269fdae9c8057e5a4c0aa7402e8bb16f2c6e90b3aa53327b113f8/hpack-4.1.0.tar.gz", hashes = {sha256 = "ec5eca154f7056aa06f196a557655c5b009b382873ac8d1e66e79e87535f1dca"}} -wheels = [ - {name = "hpack-4.1.0-py3-none-any.whl",url = "https://files.pythonhosted.org/packages/07/c6/80c95b1b2b94682a72cbdbfb85b81ae2daffa4291fbfa1b1464502ede10d/hpack-4.1.0-py3-none-any.whl",hashes = {sha256 = "157ac792668d995c657d93111f46b4535ed114f0c9c8d672271bbec7eae1b496"}}, -] -marker = "\"default\" in dependency_groups" - -[packages.tool.pdm] -dependencies = [] - -[[packages]] -name = "hyperframe" -version = "6.1.0" -requires-python = ">=3.9" -sdist = {name = "hyperframe-6.1.0.tar.gz", url = "https://files.pythonhosted.org/packages/02/e7/94f8232d4a74cc99514c13a9f995811485a6903d48e5d952771ef6322e30/hyperframe-6.1.0.tar.gz", hashes = {sha256 = "f630908a00854a7adeabd6382b43923a4c4cd4b821fcb527e6ab9e15382a3b08"}} -wheels = [ - {name = "hyperframe-6.1.0-py3-none-any.whl",url = "https://files.pythonhosted.org/packages/48/30/47d0bf6072f7252e6521f3447ccfa40b421b6824517f82854703d0f5a98b/hyperframe-6.1.0-py3-none-any.whl",hashes = {sha256 = "b03380493a519fce58ea5af42e4a42317bf9bd425596f7a0835ffce80f1a42e5"}}, -] -marker = "\"default\" in dependency_groups" - -[packages.tool.pdm] -dependencies = [] - -[[packages]] -name = "idna" -version = "3.10" -requires-python = ">=3.6" -sdist = {name = "idna-3.10.tar.gz", url = "https://files.pythonhosted.org/packages/f1/70/7703c29685631f5a7590aa73f1f1d3fa9a380e654b86af429e0934a32f7d/idna-3.10.tar.gz", hashes = {sha256 = "12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9"}} -wheels = [ - {name = "idna-3.10-py3-none-any.whl",url = "https://files.pythonhosted.org/packages/76/c6/c88e154df9c4e1a2a66ccf0005a88dfb2650c1dffb6f5ce603dfbd452ce3/idna-3.10-py3-none-any.whl",hashes = {sha256 = "946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3"}}, -] -marker = "\"default\" in dependency_groups or \"dev\" in extras or \"recommended\" in extras" - -[packages.tool.pdm] -dependencies = [] - -[[packages]] -name = "mdit-py-plugins" -version = "0.4.2" -requires-python = ">=3.8" -sdist = {name = "mdit_py_plugins-0.4.2.tar.gz", url = "https://files.pythonhosted.org/packages/19/03/a2ecab526543b152300717cf232bb4bb8605b6edb946c845016fa9c9c9fd/mdit_py_plugins-0.4.2.tar.gz", hashes = {sha256 = "5f2cd1fdb606ddf152d37ec30e46101a60512bc0e5fa1a7002c36647b09e26b5"}} -wheels = [ - {name = "mdit_py_plugins-0.4.2-py3-none-any.whl",url = "https://files.pythonhosted.org/packages/a7/f7/7782a043553ee469c1ff49cfa1cdace2d6bf99a1f333cf38676b3ddf30da/mdit_py_plugins-0.4.2-py3-none-any.whl",hashes = {sha256 = "0c673c3f889399a33b95e88d2f0d111b4447bdfea7f237dab2d488f459835636"}}, -] -marker = "\"dev\" in extras" - -[packages.tool.pdm] -dependencies = [ - "markdown-it-py<4.0.0,>=1.0.0", -] - [[packages]] name = "multidict" version = "6.6.3" @@ -1761,24 +1757,6 @@ wheels = [ {name = "multidict-6.6.3-cp310-cp310-win_amd64.whl",url = "https://files.pythonhosted.org/packages/f3/d2/17897a8f3f2c5363d969b4c635aa40375fe1f09168dc09a7826780bfb2a4/multidict-6.6.3-cp310-cp310-win_amd64.whl",hashes = {sha256 = "ab0a34a007704c625e25a9116c6770b4d3617a071c8a7c30cd338dfbadfe6485"}}, {name = "multidict-6.6.3-cp310-cp310-win_arm64.whl",url = "https://files.pythonhosted.org/packages/2d/5f/d4a717c1e457fe44072e33fa400d2b93eb0f2819c4d669381f925b7cba1f/multidict-6.6.3-cp310-cp310-win_arm64.whl",hashes = {sha256 = "769841d70ca8bdd140a715746199fc6473414bd02efd678d75681d2d6a8986c5"}}, {name = "multidict-6.6.3-py3-none-any.whl",url = "https://files.pythonhosted.org/packages/d8/30/9aec301e9772b098c1f5c0ca0279237c9766d94b97802e9888010c64b0ed/multidict-6.6.3-py3-none-any.whl",hashes = {sha256 = "8db10f29c7541fc5da4defd8cd697e1ca429db743fa716325f236079b96f775a"}}, - {name = "multidict-6.6.3-cp39-cp39-macosx_10_9_universal2.whl",url = "https://files.pythonhosted.org/packages/d2/64/ba29bd6dfc895e592b2f20f92378e692ac306cf25dd0be2f8e0a0f898edb/multidict-6.6.3-cp39-cp39-macosx_10_9_universal2.whl",hashes = {sha256 = "c8161b5a7778d3137ea2ee7ae8a08cce0010de3b00ac671c5ebddeaa17cefd22"}}, - {name = "multidict-6.6.3-cp39-cp39-macosx_10_9_x86_64.whl",url = "https://files.pythonhosted.org/packages/ca/cd/872ae4c134257dacebff59834983c1615d6ec863b6e3d360f3203aad8400/multidict-6.6.3-cp39-cp39-macosx_10_9_x86_64.whl",hashes = {sha256 = "1328201ee930f069961ae707d59c6627ac92e351ed5b92397cf534d1336ce557"}}, - {name = "multidict-6.6.3-cp39-cp39-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/15/35/d417d8f62f2886784b76df60522d608aba39dfc83dd53b230ca71f2d4c53/multidict-6.6.3-cp39-cp39-macosx_11_0_arm64.whl",hashes = {sha256 = "b1db4d2093d6b235de76932febf9d50766cf49a5692277b2c28a501c9637f616"}}, - {name = "multidict-6.6.3-cp39-cp39-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl",url = "https://files.pythonhosted.org/packages/85/59/25cddf781f12cddb2386baa29744a3fdd160eb705539b48065f0cffd86d5/multidict-6.6.3-cp39-cp39-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl",hashes = {sha256 = "53becb01dd8ebd19d1724bebe369cfa87e4e7f29abbbe5c14c98ce4c383e16cd"}}, - {name = "multidict-6.6.3-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl",url = "https://files.pythonhosted.org/packages/c4/21/4055b6a527954c572498a8068c26bd3b75f2b959080e17e12104b592273c/multidict-6.6.3-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl",hashes = {sha256 = "41bb9d1d4c303886e2d85bade86e59885112a7f4277af5ad47ab919a2251f306"}}, - {name = "multidict-6.6.3-cp39-cp39-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl",url = "https://files.pythonhosted.org/packages/58/98/17f1f80bdba0b2fef49cf4ba59cebf8a81797f745f547abb5c9a4039df62/multidict-6.6.3-cp39-cp39-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl",hashes = {sha256 = "775b464d31dac90f23192af9c291dc9f423101857e33e9ebf0020a10bfcf4144"}}, - {name = "multidict-6.6.3-cp39-cp39-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl",url = "https://files.pythonhosted.org/packages/f8/0e/a5e595fdd0820069f0c29911d5dc9dc3a75ec755ae733ce59a4e6962ae42/multidict-6.6.3-cp39-cp39-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl",hashes = {sha256 = "d04d01f0a913202205a598246cf77826fe3baa5a63e9f6ccf1ab0601cf56eca0"}}, - {name = "multidict-6.6.3-cp39-cp39-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl",url = "https://files.pythonhosted.org/packages/66/9e/0f51e4cffea2daf24c137feabc9ec848ce50f8379c9badcbac00b41ab55e/multidict-6.6.3-cp39-cp39-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl",hashes = {sha256 = "d25594d3b38a2e6cabfdcafef339f754ca6e81fbbdb6650ad773ea9775af35ab"}}, - {name = "multidict-6.6.3-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl",url = "https://files.pythonhosted.org/packages/49/a0/a7cfc13c9a71ceb8c1c55457820733af9ce01e121139271f7b13e30c29d2/multidict-6.6.3-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl",hashes = {sha256 = "35712f1748d409e0707b165bf49f9f17f9e28ae85470c41615778f8d4f7d9609"}}, - {name = "multidict-6.6.3-cp39-cp39-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/c7/50/7ae0d1149ac71cab6e20bb7faf2a1868435974994595dadfdb7377f7140f/multidict-6.6.3-cp39-cp39-musllinux_1_2_aarch64.whl",hashes = {sha256 = "1c8082e5814b662de8589d6a06c17e77940d5539080cbab9fe6794b5241b76d9"}}, - {name = "multidict-6.6.3-cp39-cp39-musllinux_1_2_armv7l.whl",url = "https://files.pythonhosted.org/packages/b4/ac/2d0bf836c9c63a57360d57b773359043b371115e1c78ff648993bf19abd0/multidict-6.6.3-cp39-cp39-musllinux_1_2_armv7l.whl",hashes = {sha256 = "61af8a4b771f1d4d000b3168c12c3120ccf7284502a94aa58c68a81f5afac090"}}, - {name = "multidict-6.6.3-cp39-cp39-musllinux_1_2_i686.whl",url = "https://files.pythonhosted.org/packages/85/e1/68a65f069df298615591e70e48bfd379c27d4ecb252117c18bf52eebc237/multidict-6.6.3-cp39-cp39-musllinux_1_2_i686.whl",hashes = {sha256 = "448e4a9afccbf297577f2eaa586f07067441e7b63c8362a3540ba5a38dc0f14a"}}, - {name = "multidict-6.6.3-cp39-cp39-musllinux_1_2_ppc64le.whl",url = "https://files.pythonhosted.org/packages/ae/ab/702f1baca649f88ea1dc6259fc2aa4509f4ad160ba48c8e61fbdb4a5a365/multidict-6.6.3-cp39-cp39-musllinux_1_2_ppc64le.whl",hashes = {sha256 = "233ad16999afc2bbd3e534ad8dbe685ef8ee49a37dbc2cdc9514e57b6d589ced"}}, - {name = "multidict-6.6.3-cp39-cp39-musllinux_1_2_s390x.whl",url = "https://files.pythonhosted.org/packages/5e/0b/726e690bfbf887985a8710ef2f25f1d6dd184a35bd3b36429814f810a2fc/multidict-6.6.3-cp39-cp39-musllinux_1_2_s390x.whl",hashes = {sha256 = "bb933c891cd4da6bdcc9733d048e994e22e1883287ff7540c2a0f3b117605092"}}, - {name = "multidict-6.6.3-cp39-cp39-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/73/bb/839486b27bcbcc2e0d875fb9d4012b4b6aa99639137343106aa7210e047a/multidict-6.6.3-cp39-cp39-musllinux_1_2_x86_64.whl",hashes = {sha256 = "37b09ca60998e87734699e88c2363abfd457ed18cfbf88e4009a4e83788e63ed"}}, - {name = "multidict-6.6.3-cp39-cp39-win32.whl",url = "https://files.pythonhosted.org/packages/e3/46/574d75ab7b9ae8690fe27e89f5fcd0121633112b438edfb9ed2be8be096b/multidict-6.6.3-cp39-cp39-win32.whl",hashes = {sha256 = "f54cb79d26d0cd420637d184af38f0668558f3c4bbe22ab7ad830e67249f2e0b"}}, - {name = "multidict-6.6.3-cp39-cp39-win_amd64.whl",url = "https://files.pythonhosted.org/packages/78/c3/8b3bc755508b777868349f4bfa844d3d31832f075ee800a3d6f1807338c5/multidict-6.6.3-cp39-cp39-win_amd64.whl",hashes = {sha256 = "295adc9c0551e5d5214b45cf29ca23dbc28c2d197a9c30d51aed9e037cb7c578"}}, - {name = "multidict-6.6.3-cp39-cp39-win_arm64.whl",url = "https://files.pythonhosted.org/packages/b2/30/5a66e7e4550e80975faee5b5dd9e9bd09194d2fd8f62363119b9e46e204b/multidict-6.6.3-cp39-cp39-win_arm64.whl",hashes = {sha256 = "15332783596f227db50fb261c2c251a58ac3873c457f3a550a95d5c0aa3c770d"}}, ] marker = "\"default\" in dependency_groups or \"dev\" in extras" @@ -1787,6 +1765,108 @@ dependencies = [ "typing-extensions>=4.1.0; python_version < \"3.11\"", ] +[[packages]] +name = "async-timeout" +version = "5.0.1" +requires-python = ">=3.8" +sdist = {name = "async_timeout-5.0.1.tar.gz", url = "https://files.pythonhosted.org/packages/a5/ae/136395dfbfe00dfc94da3f3e136d0b13f394cba8f4841120e34226265780/async_timeout-5.0.1.tar.gz", hashes = {sha256 = "d9321a7a3d5a6a5e187e824d2fa0793ce379a202935782d555d6e9d2735677d3"}} +wheels = [ + {name = "async_timeout-5.0.1-py3-none-any.whl",url = "https://files.pythonhosted.org/packages/fe/ba/e2081de779ca30d473f21f5b30e0e737c438205440784c7dfc81efc2b029/async_timeout-5.0.1-py3-none-any.whl",hashes = {sha256 = "39e3809566ff85354557ec2398b55e096c8364bacac9405a7a1fa429e77fe76c"}}, +] +marker = "python_version < \"3.11\" and python_version ~= \"3.10\" and \"default\" in dependency_groups or python_version < \"3.11\" and python_version ~= \"3.10\" and \"dev\" in extras or python_full_version ~= \"3.9.0\" and \"default\" in dependency_groups or python_full_version ~= \"3.9.0\" and \"dev\" in extras" + +[packages.tool.pdm] +dependencies = [] + +[[packages]] +name = "h2" +version = "4.2.0" +requires-python = ">=3.9" +sdist = {name = "h2-4.2.0.tar.gz", url = "https://files.pythonhosted.org/packages/1b/38/d7f80fd13e6582fb8e0df8c9a653dcc02b03ca34f4d72f34869298c5baf8/h2-4.2.0.tar.gz", hashes = {sha256 = "c8a52129695e88b1a0578d8d2cc6842bbd79128ac685463b887ee278126ad01f"}} +wheels = [ + {name = "h2-4.2.0-py3-none-any.whl",url = "https://files.pythonhosted.org/packages/d0/9e/984486f2d0a0bd2b024bf4bc1c62688fcafa9e61991f041fb0e2def4a982/h2-4.2.0-py3-none-any.whl",hashes = {sha256 = "479a53ad425bb29af087f3458a61d30780bc818e4ebcf01f0b536ba916462ed0"}}, +] +marker = "\"default\" in dependency_groups" + +[packages.tool.pdm] +dependencies = [ + "hyperframe<7,>=6.1", + "hpack<5,>=4.1", +] + +[[packages]] +name = "hf-xet" +version = "1.1.5" +requires-python = ">=3.8" +sdist = {name = "hf_xet-1.1.5.tar.gz", url = "https://files.pythonhosted.org/packages/ed/d4/7685999e85945ed0d7f0762b686ae7015035390de1161dcea9d5276c134c/hf_xet-1.1.5.tar.gz", hashes = {sha256 = "69ebbcfd9ec44fdc2af73441619eeb06b94ee34511bbcf57cd423820090f5694"}} +wheels = [ + {name = "hf_xet-1.1.5-cp37-abi3-macosx_10_12_x86_64.whl",url = "https://files.pythonhosted.org/packages/00/89/a1119eebe2836cb25758e7661d6410d3eae982e2b5e974bcc4d250be9012/hf_xet-1.1.5-cp37-abi3-macosx_10_12_x86_64.whl",hashes = {sha256 = "f52c2fa3635b8c37c7764d8796dfa72706cc4eded19d638331161e82b0792e23"}}, + {name = "hf_xet-1.1.5-cp37-abi3-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/de/5f/2c78e28f309396e71ec8e4e9304a6483dcbc36172b5cea8f291994163425/hf_xet-1.1.5-cp37-abi3-macosx_11_0_arm64.whl",hashes = {sha256 = "9fa6e3ee5d61912c4a113e0708eaaef987047616465ac7aa30f7121a48fc1af8"}}, + {name = "hf_xet-1.1.5-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",url = "https://files.pythonhosted.org/packages/6d/2f/6cad7b5fe86b7652579346cb7f85156c11761df26435651cbba89376cd2c/hf_xet-1.1.5-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",hashes = {sha256 = "fc874b5c843e642f45fd85cda1ce599e123308ad2901ead23d3510a47ff506d1"}}, + {name = "hf_xet-1.1.5-cp37-abi3-manylinux_2_28_aarch64.whl",url = "https://files.pythonhosted.org/packages/d0/54/0fcf2b619720a26fbb6cc941e89f2472a522cd963a776c089b189559447f/hf_xet-1.1.5-cp37-abi3-manylinux_2_28_aarch64.whl",hashes = {sha256 = "dbba1660e5d810bd0ea77c511a99e9242d920790d0e63c0e4673ed36c4022d18"}}, + {name = "hf_xet-1.1.5-cp37-abi3-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/f3/92/1d351ac6cef7c4ba8c85744d37ffbfac2d53d0a6c04d2cabeba614640a78/hf_xet-1.1.5-cp37-abi3-musllinux_1_2_aarch64.whl",hashes = {sha256 = "ab34c4c3104133c495785d5d8bba3b1efc99de52c02e759cf711a91fd39d3a14"}}, + {name = "hf_xet-1.1.5-cp37-abi3-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/c9/65/4b2ddb0e3e983f2508528eb4501288ae2f84963586fbdfae596836d5e57a/hf_xet-1.1.5-cp37-abi3-musllinux_1_2_x86_64.whl",hashes = {sha256 = "83088ecea236d5113de478acb2339f92c95b4fb0462acaa30621fac02f5a534a"}}, + {name = "hf_xet-1.1.5-cp37-abi3-win_amd64.whl",url = "https://files.pythonhosted.org/packages/f0/55/ef77a85ee443ae05a9e9cba1c9f0dd9241eb42da2aeba1dc50f51154c81a/hf_xet-1.1.5-cp37-abi3-win_amd64.whl",hashes = {sha256 = "73e167d9807d166596b4b2f0b585c6d5bd84a26dea32843665a8b58f6edba245"}}, +] +marker = "(platform_machine == \"x86_64\" or platform_machine == \"amd64\" or platform_machine == \"arm64\" or platform_machine == \"aarch64\") and \"default\" in dependency_groups" + +[packages.tool.pdm] +dependencies = [] + +[[packages]] +name = "hpack" +version = "4.1.0" +requires-python = ">=3.9" +sdist = {name = "hpack-4.1.0.tar.gz", url = "https://files.pythonhosted.org/packages/2c/48/71de9ed269fdae9c8057e5a4c0aa7402e8bb16f2c6e90b3aa53327b113f8/hpack-4.1.0.tar.gz", hashes = {sha256 = "ec5eca154f7056aa06f196a557655c5b009b382873ac8d1e66e79e87535f1dca"}} +wheels = [ + {name = "hpack-4.1.0-py3-none-any.whl",url = "https://files.pythonhosted.org/packages/07/c6/80c95b1b2b94682a72cbdbfb85b81ae2daffa4291fbfa1b1464502ede10d/hpack-4.1.0-py3-none-any.whl",hashes = {sha256 = "157ac792668d995c657d93111f46b4535ed114f0c9c8d672271bbec7eae1b496"}}, +] +marker = "\"default\" in dependency_groups" + +[packages.tool.pdm] +dependencies = [] + +[[packages]] +name = "hyperframe" +version = "6.1.0" +requires-python = ">=3.9" +sdist = {name = "hyperframe-6.1.0.tar.gz", url = "https://files.pythonhosted.org/packages/02/e7/94f8232d4a74cc99514c13a9f995811485a6903d48e5d952771ef6322e30/hyperframe-6.1.0.tar.gz", hashes = {sha256 = "f630908a00854a7adeabd6382b43923a4c4cd4b821fcb527e6ab9e15382a3b08"}} +wheels = [ + {name = "hyperframe-6.1.0-py3-none-any.whl",url = "https://files.pythonhosted.org/packages/48/30/47d0bf6072f7252e6521f3447ccfa40b421b6824517f82854703d0f5a98b/hyperframe-6.1.0-py3-none-any.whl",hashes = {sha256 = "b03380493a519fce58ea5af42e4a42317bf9bd425596f7a0835ffce80f1a42e5"}}, +] +marker = "\"default\" in dependency_groups" + +[packages.tool.pdm] +dependencies = [] + +[[packages]] +name = "idna" +version = "3.10" +requires-python = ">=3.6" +sdist = {name = "idna-3.10.tar.gz", url = "https://files.pythonhosted.org/packages/f1/70/7703c29685631f5a7590aa73f1f1d3fa9a380e654b86af429e0934a32f7d/idna-3.10.tar.gz", hashes = {sha256 = "12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9"}} +wheels = [ + {name = "idna-3.10-py3-none-any.whl",url = "https://files.pythonhosted.org/packages/76/c6/c88e154df9c4e1a2a66ccf0005a88dfb2650c1dffb6f5ce603dfbd452ce3/idna-3.10-py3-none-any.whl",hashes = {sha256 = "946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3"}}, +] +marker = "\"default\" in dependency_groups or \"dev\" in extras or \"recommended\" in extras" + +[packages.tool.pdm] +dependencies = [] + +[[packages]] +name = "mdit-py-plugins" +version = "0.4.2" +requires-python = ">=3.8" +sdist = {name = "mdit_py_plugins-0.4.2.tar.gz", url = "https://files.pythonhosted.org/packages/19/03/a2ecab526543b152300717cf232bb4bb8605b6edb946c845016fa9c9c9fd/mdit_py_plugins-0.4.2.tar.gz", hashes = {sha256 = "5f2cd1fdb606ddf152d37ec30e46101a60512bc0e5fa1a7002c36647b09e26b5"}} +wheels = [ + {name = "mdit_py_plugins-0.4.2-py3-none-any.whl",url = "https://files.pythonhosted.org/packages/a7/f7/7782a043553ee469c1ff49cfa1cdace2d6bf99a1f333cf38676b3ddf30da/mdit_py_plugins-0.4.2-py3-none-any.whl",hashes = {sha256 = "0c673c3f889399a33b95e88d2f0d111b4447bdfea7f237dab2d488f459835636"}}, +] +marker = "\"dev\" in extras" + +[packages.tool.pdm] +dependencies = [ + "markdown-it-py<4.0.0,>=1.0.0", +] + [[packages]] name = "regex" version = "2024.11.6" @@ -1854,22 +1934,6 @@ wheels = [ {name = "regex-2024.11.6-cp310-cp310-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/7e/f2/48b393b51900456155de3ad001900f94298965e1cad1c772b87f9cfea011/regex-2024.11.6-cp310-cp310-musllinux_1_2_x86_64.whl",hashes = {sha256 = "da8f5fc57d1933de22a9e23eec290a0d8a5927a5370d24bda9a6abe50683fe62"}}, {name = "regex-2024.11.6-cp310-cp310-win32.whl",url = "https://files.pythonhosted.org/packages/45/3f/ef9589aba93e084cd3f8471fded352826dcae8489b650d0b9b27bc5bba8a/regex-2024.11.6-cp310-cp310-win32.whl",hashes = {sha256 = "b489578720afb782f6ccf2840920f3a32e31ba28a4b162e13900c3e6bd3f930e"}}, {name = "regex-2024.11.6-cp310-cp310-win_amd64.whl",url = "https://files.pythonhosted.org/packages/42/7e/5f1b92c8468290c465fd50c5318da64319133231415a8aa6ea5ab995a815/regex-2024.11.6-cp310-cp310-win_amd64.whl",hashes = {sha256 = "5071b2093e793357c9d8b2929dfc13ac5f0a6c650559503bb81189d0a3814519"}}, - {name = "regex-2024.11.6-cp39-cp39-macosx_10_9_universal2.whl",url = "https://files.pythonhosted.org/packages/89/23/c4a86df398e57e26f93b13ae63acce58771e04bdde86092502496fa57f9c/regex-2024.11.6-cp39-cp39-macosx_10_9_universal2.whl",hashes = {sha256 = "5704e174f8ccab2026bd2f1ab6c510345ae8eac818b613d7d73e785f1310f839"}}, - {name = "regex-2024.11.6-cp39-cp39-macosx_10_9_x86_64.whl",url = "https://files.pythonhosted.org/packages/3c/8b/45c24ab7a51a1658441b961b86209c43e6bb9d39caf1e63f46ce6ea03bc7/regex-2024.11.6-cp39-cp39-macosx_10_9_x86_64.whl",hashes = {sha256 = "220902c3c5cc6af55d4fe19ead504de80eb91f786dc102fbd74894b1551f095e"}}, - {name = "regex-2024.11.6-cp39-cp39-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/7a/d1/598de10b17fdafc452d11f7dada11c3be4e379a8671393e4e3da3c4070df/regex-2024.11.6-cp39-cp39-macosx_11_0_arm64.whl",hashes = {sha256 = "5e7e351589da0850c125f1600a4c4ba3c722efefe16b297de54300f08d734fbf"}}, - {name = "regex-2024.11.6-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",url = "https://files.pythonhosted.org/packages/49/70/c7eaa219efa67a215846766fde18d92d54cb590b6a04ffe43cef30057622/regex-2024.11.6-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",hashes = {sha256 = "5056b185ca113c88e18223183aa1a50e66507769c9640a6ff75859619d73957b"}}, - {name = "regex-2024.11.6-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl",url = "https://files.pythonhosted.org/packages/89/e5/ef52c7eb117dd20ff1697968219971d052138965a4d3d9b95e92e549f505/regex-2024.11.6-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl",hashes = {sha256 = "2e34b51b650b23ed3354b5a07aab37034d9f923db2a40519139af34f485f77d0"}}, - {name = "regex-2024.11.6-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl",url = "https://files.pythonhosted.org/packages/5f/3f/9f5da81aff1d4167ac52711acf789df13e789fe6ac9545552e49138e3282/regex-2024.11.6-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl",hashes = {sha256 = "5670bce7b200273eee1840ef307bfa07cda90b38ae56e9a6ebcc9f50da9c469b"}}, - {name = "regex-2024.11.6-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",url = "https://files.pythonhosted.org/packages/86/44/2101cc0890c3621b90365c9ee8d7291a597c0722ad66eccd6ffa7f1bcc09/regex-2024.11.6-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",hashes = {sha256 = "08986dce1339bc932923e7d1232ce9881499a0e02925f7402fb7c982515419ef"}}, - {name = "regex-2024.11.6-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl",url = "https://files.pythonhosted.org/packages/ce/2e/3e0668d8d1c7c3c0d397bf54d92fc182575b3a26939aed5000d3cc78760f/regex-2024.11.6-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl",hashes = {sha256 = "93c0b12d3d3bc25af4ebbf38f9ee780a487e8bf6954c115b9f015822d3bb8e48"}}, - {name = "regex-2024.11.6-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl",url = "https://files.pythonhosted.org/packages/a6/49/1bc4584254355e3dba930a3a2fd7ad26ccba3ebbab7d9100db0aff2eedb0/regex-2024.11.6-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl",hashes = {sha256 = "764e71f22ab3b305e7f4c21f1a97e1526a25ebdd22513e251cf376760213da13"}}, - {name = "regex-2024.11.6-cp39-cp39-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/c8/dd/42879c1fc8a37a887cd08e358af3d3ba9e23038cd77c7fe044a86d9450ba/regex-2024.11.6-cp39-cp39-musllinux_1_2_aarch64.whl",hashes = {sha256 = "f056bf21105c2515c32372bbc057f43eb02aae2fda61052e2f7622c801f0b4e2"}}, - {name = "regex-2024.11.6-cp39-cp39-musllinux_1_2_i686.whl",url = "https://files.pythonhosted.org/packages/89/96/c05a0fe173cd2acd29d5e13c1adad8b706bcaa71b169e1ee57dcf2e74584/regex-2024.11.6-cp39-cp39-musllinux_1_2_i686.whl",hashes = {sha256 = "69ab78f848845569401469da20df3e081e6b5a11cb086de3eed1d48f5ed57c95"}}, - {name = "regex-2024.11.6-cp39-cp39-musllinux_1_2_ppc64le.whl",url = "https://files.pythonhosted.org/packages/b5/f3/a757748066255f97f14506483436c5f6aded7af9e37bca04ec30c90ca683/regex-2024.11.6-cp39-cp39-musllinux_1_2_ppc64le.whl",hashes = {sha256 = "86fddba590aad9208e2fa8b43b4c098bb0ec74f15718bb6a704e3c63e2cef3e9"}}, - {name = "regex-2024.11.6-cp39-cp39-musllinux_1_2_s390x.whl",url = "https://files.pythonhosted.org/packages/5c/93/c6d2092fd479dcaeea40fc8fa673822829181ded77d294a7f950f1dda6e2/regex-2024.11.6-cp39-cp39-musllinux_1_2_s390x.whl",hashes = {sha256 = "684d7a212682996d21ca12ef3c17353c021fe9de6049e19ac8481ec35574a70f"}}, - {name = "regex-2024.11.6-cp39-cp39-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/ff/9c/daa99532c72f25051a90ef90e1413a8d54413a9e64614d9095b0c1c154d0/regex-2024.11.6-cp39-cp39-musllinux_1_2_x86_64.whl",hashes = {sha256 = "a03e02f48cd1abbd9f3b7e3586d97c8f7a9721c436f51a5245b3b9483044480b"}}, - {name = "regex-2024.11.6-cp39-cp39-win32.whl",url = "https://files.pythonhosted.org/packages/13/5d/61a533ccb8c231b474ac8e3a7d70155b00dfc61af6cafdccd1947df6d735/regex-2024.11.6-cp39-cp39-win32.whl",hashes = {sha256 = "41758407fc32d5c3c5de163888068cfee69cb4c2be844e7ac517a52770f9af57"}}, - {name = "regex-2024.11.6-cp39-cp39-win_amd64.whl",url = "https://files.pythonhosted.org/packages/dc/7b/e59b7f7c91ae110d154370c24133f947262525b5d6406df65f23422acc17/regex-2024.11.6-cp39-cp39-win_amd64.whl",hashes = {sha256 = "b2837718570f95dd41675328e111345f9b7095d821bac435aac173ac80b19983"}}, ] marker = "\"default\" in dependency_groups or \"recommended\" in extras" @@ -2054,23 +2118,6 @@ wheels = [ {name = "yarl-1.20.1-cp310-cp310-win32.whl",url = "https://files.pythonhosted.org/packages/ce/a3/eaa0ab9712f1f3d01faf43cf6f1f7210ce4ea4a7e9b28b489a2261ca8db9/yarl-1.20.1-cp310-cp310-win32.whl",hashes = {sha256 = "6c4fbf6b02d70e512d7ade4b1f998f237137f1417ab07ec06358ea04f69134f8"}}, {name = "yarl-1.20.1-cp310-cp310-win_amd64.whl",url = "https://files.pythonhosted.org/packages/8f/34/e4abde70a9256465fe31c88ed02c3f8502b7b5dead693a4f350a06413f28/yarl-1.20.1-cp310-cp310-win_amd64.whl",hashes = {sha256 = "aef6c4d69554d44b7f9d923245f8ad9a707d971e6209d51279196d8e8fe1ae16"}}, {name = "yarl-1.20.1-py3-none-any.whl",url = "https://files.pythonhosted.org/packages/b4/2d/2345fce04cfd4bee161bf1e7d9cdc702e3e16109021035dbb24db654a622/yarl-1.20.1-py3-none-any.whl",hashes = {sha256 = "83b8eb083fe4683c6115795d9fc1cfaf2cbbefb19b3a1cb68f6527460f483a77"}}, - {name = "yarl-1.20.1-cp39-cp39-macosx_10_9_universal2.whl",url = "https://files.pythonhosted.org/packages/01/75/0d37402d208d025afa6b5b8eb80e466d267d3fd1927db8e317d29a94a4cb/yarl-1.20.1-cp39-cp39-macosx_10_9_universal2.whl",hashes = {sha256 = "e42ba79e2efb6845ebab49c7bf20306c4edf74a0b20fc6b2ccdd1a219d12fad3"}}, - {name = "yarl-1.20.1-cp39-cp39-macosx_10_9_x86_64.whl",url = "https://files.pythonhosted.org/packages/73/84/1fb6c85ae0cf9901046f07d0ac9eb162f7ce6d95db541130aa542ed377e6/yarl-1.20.1-cp39-cp39-macosx_10_9_x86_64.whl",hashes = {sha256 = "41493b9b7c312ac448b7f0a42a089dffe1d6e6e981a2d76205801a023ed26a2b"}}, - {name = "yarl-1.20.1-cp39-cp39-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/f3/9c/eae746b24c4ea29a5accba9a06c197a70fa38a49c7df244e0d3951108861/yarl-1.20.1-cp39-cp39-macosx_11_0_arm64.whl",hashes = {sha256 = "f5a5928ff5eb13408c62a968ac90d43f8322fd56d87008b8f9dabf3c0f6ee983"}}, - {name = "yarl-1.20.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",url = "https://files.pythonhosted.org/packages/fb/30/693e71003ec4bc1daf2e4cf7c478c417d0985e0a8e8f00b2230d517876fc/yarl-1.20.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",hashes = {sha256 = "30c41ad5d717b3961b2dd785593b67d386b73feca30522048d37298fee981805"}}, - {name = "yarl-1.20.1-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl",url = "https://files.pythonhosted.org/packages/0f/a2/5264dbebf90763139aeb0b0b3154763239398400f754ae19a0518b654117/yarl-1.20.1-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl",hashes = {sha256 = "59febc3969b0781682b469d4aca1a5cab7505a4f7b85acf6db01fa500fa3f6ba"}}, - {name = "yarl-1.20.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl",url = "https://files.pythonhosted.org/packages/e7/17/77c7a89b3c05856489777e922f41db79ab4faf58621886df40d812c7facd/yarl-1.20.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl",hashes = {sha256 = "d2b6fb3622b7e5bf7a6e5b679a69326b4279e805ed1699d749739a61d242449e"}}, - {name = "yarl-1.20.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl",url = "https://files.pythonhosted.org/packages/6d/55/28409330b8ef5f2f681f5b478150496ec9cf3309b149dab7ec8ab5cfa3f0/yarl-1.20.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl",hashes = {sha256 = "749d73611db8d26a6281086f859ea7ec08f9c4c56cec864e52028c8b328db723"}}, - {name = "yarl-1.20.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",url = "https://files.pythonhosted.org/packages/85/58/cb0257cbd4002828ff735f44d3c5b6966c4fd1fc8cc1cd3cd8a143fbc513/yarl-1.20.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",hashes = {sha256 = "9427925776096e664c39e131447aa20ec738bdd77c049c48ea5200db2237e000"}}, - {name = "yarl-1.20.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl",url = "https://files.pythonhosted.org/packages/53/f6/c77960370cfa46f6fb3d6a5a79a49d3abfdb9ef92556badc2dcd2748bc2a/yarl-1.20.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl",hashes = {sha256 = "ff70f32aa316393eaf8222d518ce9118148eddb8a53073c2403863b41033eed5"}}, - {name = "yarl-1.20.1-cp39-cp39-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/64/ab/be0b10b8e029553c10905b6b00c64ecad3ebc8ace44b02293a62579343f6/yarl-1.20.1-cp39-cp39-musllinux_1_2_aarch64.whl",hashes = {sha256 = "c7ddf7a09f38667aea38801da8b8d6bfe81df767d9dfc8c88eb45827b195cd1c"}}, - {name = "yarl-1.20.1-cp39-cp39-musllinux_1_2_armv7l.whl",url = "https://files.pythonhosted.org/packages/c5/c3/3f327bd3905a4916029bf5feb7f86dcf864c7704f099715f62155fb386b2/yarl-1.20.1-cp39-cp39-musllinux_1_2_armv7l.whl",hashes = {sha256 = "57edc88517d7fc62b174fcfb2e939fbc486a68315d648d7e74d07fac42cec240"}}, - {name = "yarl-1.20.1-cp39-cp39-musllinux_1_2_i686.whl",url = "https://files.pythonhosted.org/packages/d1/42/040bdd5d3b3bb02b4a6ace4ed4075e02f85df964d6e6cb321795d2a6496a/yarl-1.20.1-cp39-cp39-musllinux_1_2_i686.whl",hashes = {sha256 = "dab096ce479d5894d62c26ff4f699ec9072269d514b4edd630a393223f45a0ee"}}, - {name = "yarl-1.20.1-cp39-cp39-musllinux_1_2_ppc64le.whl",url = "https://files.pythonhosted.org/packages/0d/1c/911867b8e8c7463b84dfdc275e0d99b04b66ad5132b503f184fe76be8ea4/yarl-1.20.1-cp39-cp39-musllinux_1_2_ppc64le.whl",hashes = {sha256 = "14a85f3bd2d7bb255be7183e5d7d6e70add151a98edf56a770d6140f5d5f4010"}}, - {name = "yarl-1.20.1-cp39-cp39-musllinux_1_2_s390x.whl",url = "https://files.pythonhosted.org/packages/e2/31/8c389f6c6ca0379b57b2da87f1f126c834777b4931c5ee8427dd65d0ff6b/yarl-1.20.1-cp39-cp39-musllinux_1_2_s390x.whl",hashes = {sha256 = "2c89b5c792685dd9cd3fa9761c1b9f46fc240c2a3265483acc1565769996a3f8"}}, - {name = "yarl-1.20.1-cp39-cp39-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/7f/09/ae4a649fb3964324c70a3e2b61f45e566d9ffc0affd2b974cbf628957673/yarl-1.20.1-cp39-cp39-musllinux_1_2_x86_64.whl",hashes = {sha256 = "69e9b141de5511021942a6866990aea6d111c9042235de90e08f94cf972ca03d"}}, - {name = "yarl-1.20.1-cp39-cp39-win32.whl",url = "https://files.pythonhosted.org/packages/8d/43/bbb4ed4c34d5bb62b48bf957f68cd43f736f79059d4f85225ab1ef80f4b9/yarl-1.20.1-cp39-cp39-win32.whl",hashes = {sha256 = "b5f307337819cdfdbb40193cad84978a029f847b0a357fbe49f712063cfc4f06"}}, - {name = "yarl-1.20.1-cp39-cp39-win_amd64.whl",url = "https://files.pythonhosted.org/packages/d7/cd/ce185848a7dba68ea69e932674b5c1a42a1852123584bccc5443120f857c/yarl-1.20.1-cp39-cp39-win_amd64.whl",hashes = {sha256 = "eae7bfe2069f9c1c5b05fc7fe5d612e5bbc089a39309904ee8b829e322dcad00"}}, ] marker = "\"default\" in dependency_groups or \"dev\" in extras" @@ -2168,28 +2215,25 @@ wheels = [ {name = "propcache-0.3.2-cp310-cp310-win32.whl",url = "https://files.pythonhosted.org/packages/22/ec/c469c9d59dada8a7679625e0440b544fe72e99311a4679c279562051f6fc/propcache-0.3.2-cp310-cp310-win32.whl",hashes = {sha256 = "404d70768080d3d3bdb41d0771037da19d8340d50b08e104ca0e7f9ce55fce70"}}, {name = "propcache-0.3.2-cp310-cp310-win_amd64.whl",url = "https://files.pythonhosted.org/packages/38/35/07a471371ac89d418f8d0b699c75ea6dca2041fbda360823de21f6a9ce0a/propcache-0.3.2-cp310-cp310-win_amd64.whl",hashes = {sha256 = "7435d766f978b4ede777002e6b3b6641dd229cd1da8d3d3106a45770365f9ad9"}}, {name = "propcache-0.3.2-py3-none-any.whl",url = "https://files.pythonhosted.org/packages/cc/35/cc0aaecf278bb4575b8555f2b137de5ab821595ddae9da9d3cd1da4072c7/propcache-0.3.2-py3-none-any.whl",hashes = {sha256 = "98f1ec44fb675f5052cccc8e609c46ed23a35a1cfd18545ad4e29002d858a43f"}}, - {name = "propcache-0.3.2-cp39-cp39-macosx_10_9_universal2.whl",url = "https://files.pythonhosted.org/packages/6c/39/8ea9bcfaaff16fd0b0fc901ee522e24c9ec44b4ca0229cfffb8066a06959/propcache-0.3.2-cp39-cp39-macosx_10_9_universal2.whl",hashes = {sha256 = "a7fad897f14d92086d6b03fdd2eb844777b0c4d7ec5e3bac0fbae2ab0602bbe5"}}, - {name = "propcache-0.3.2-cp39-cp39-macosx_10_9_x86_64.whl",url = "https://files.pythonhosted.org/packages/d3/85/cab84c86966e1d354cf90cdc4ba52f32f99a5bca92a1529d666d957d7686/propcache-0.3.2-cp39-cp39-macosx_10_9_x86_64.whl",hashes = {sha256 = "1f43837d4ca000243fd7fd6301947d7cb93360d03cd08369969450cc6b2ce3b4"}}, - {name = "propcache-0.3.2-cp39-cp39-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/23/f7/9cb719749152d8b26d63801b3220ce2d3931312b2744d2b3a088b0ee9947/propcache-0.3.2-cp39-cp39-macosx_11_0_arm64.whl",hashes = {sha256 = "261df2e9474a5949c46e962065d88eb9b96ce0f2bd30e9d3136bcde84befd8f2"}}, - {name = "propcache-0.3.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",url = "https://files.pythonhosted.org/packages/a2/a2/0b2b5a210ff311260002a315f6f9531b65a36064dfb804655432b2f7d3e3/propcache-0.3.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",hashes = {sha256 = "e514326b79e51f0a177daab1052bc164d9d9e54133797a3a58d24c9c87a3fe6d"}}, - {name = "propcache-0.3.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl",url = "https://files.pythonhosted.org/packages/3f/e0/7aff5de0c535f783b0c8be5bdb750c305c1961d69fbb136939926e155d98/propcache-0.3.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl",hashes = {sha256 = "d4a996adb6904f85894570301939afeee65f072b4fd265ed7e569e8d9058e4ec"}}, - {name = "propcache-0.3.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl",url = "https://files.pythonhosted.org/packages/92/1d/65fa889eb3b2a7d6e4ed3c2b568a9cb8817547a1450b572de7bf24872800/propcache-0.3.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl",hashes = {sha256 = "76cace5d6b2a54e55b137669b30f31aa15977eeed390c7cbfb1dafa8dfe9a701"}}, - {name = "propcache-0.3.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",url = "https://files.pythonhosted.org/packages/9a/e2/eecf6989870988dfd731de408a6fa366e853d361a06c2133b5878ce821ad/propcache-0.3.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",hashes = {sha256 = "31248e44b81d59d6addbb182c4720f90b44e1efdc19f58112a3c3a1615fb47ef"}}, - {name = "propcache-0.3.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl",url = "https://files.pythonhosted.org/packages/12/06/c32be4950967f18f77489268488c7cdc78cbfc65a8ba8101b15e526b83dc/propcache-0.3.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl",hashes = {sha256 = "abb7fa19dbf88d3857363e0493b999b8011eea856b846305d8c0512dfdf8fbb1"}}, - {name = "propcache-0.3.2-cp39-cp39-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/46/6c/17b521a6b3b7cbe277a4064ff0aa9129dd8c89f425a5a9b6b4dd51cc3ff4/propcache-0.3.2-cp39-cp39-musllinux_1_2_aarch64.whl",hashes = {sha256 = "d81ac3ae39d38588ad0549e321e6f773a4e7cc68e7751524a22885d5bbadf886"}}, - {name = "propcache-0.3.2-cp39-cp39-musllinux_1_2_armv7l.whl",url = "https://files.pythonhosted.org/packages/62/cb/3bdba2b736b3e45bc0e40f4370f745b3e711d439ffbffe3ae416393eece9/propcache-0.3.2-cp39-cp39-musllinux_1_2_armv7l.whl",hashes = {sha256 = "cc2782eb0f7a16462285b6f8394bbbd0e1ee5f928034e941ffc444012224171b"}}, - {name = "propcache-0.3.2-cp39-cp39-musllinux_1_2_i686.whl",url = "https://files.pythonhosted.org/packages/29/bd/760c5c6a60a4a2c55a421bc34a25ba3919d49dee411ddb9d1493bb51d46e/propcache-0.3.2-cp39-cp39-musllinux_1_2_i686.whl",hashes = {sha256 = "db429c19a6c7e8a1c320e6a13c99799450f411b02251fb1b75e6217cf4a14fcb"}}, - {name = "propcache-0.3.2-cp39-cp39-musllinux_1_2_ppc64le.whl",url = "https://files.pythonhosted.org/packages/76/58/ced2757a46f55b8c84358d6ab8de4faf57cba831c51e823654da7144b13a/propcache-0.3.2-cp39-cp39-musllinux_1_2_ppc64le.whl",hashes = {sha256 = "21d8759141a9e00a681d35a1f160892a36fb6caa715ba0b832f7747da48fb6ea"}}, - {name = "propcache-0.3.2-cp39-cp39-musllinux_1_2_s390x.whl",url = "https://files.pythonhosted.org/packages/bb/ec/d98ea8d5a4d8fe0e372033f5254eddf3254344c0c5dc6c49ab84349e4733/propcache-0.3.2-cp39-cp39-musllinux_1_2_s390x.whl",hashes = {sha256 = "2ca6d378f09adb13837614ad2754fa8afaee330254f404299611bce41a8438cb"}}, - {name = "propcache-0.3.2-cp39-cp39-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/56/84/b6d8a7ecf3f62d7dd09d9d10bbf89fad6837970ef868b35b5ffa0d24d9de/propcache-0.3.2-cp39-cp39-musllinux_1_2_x86_64.whl",hashes = {sha256 = "34a624af06c048946709f4278b4176470073deda88d91342665d95f7c6270fbe"}}, - {name = "propcache-0.3.2-cp39-cp39-win32.whl",url = "https://files.pythonhosted.org/packages/bf/32/889f4903ddfe4a9dc61da71ee58b763758cf2d608fe1decede06e6467f8d/propcache-0.3.2-cp39-cp39-win32.whl",hashes = {sha256 = "4ba3fef1c30f306b1c274ce0b8baaa2c3cdd91f645c48f06394068f37d3837a1"}}, - {name = "propcache-0.3.2-cp39-cp39-win_amd64.whl",url = "https://files.pythonhosted.org/packages/67/74/d666795fb9ba1dc139d30de64f3b6fd1ff9c9d3d96ccfdb992cd715ce5d2/propcache-0.3.2-cp39-cp39-win_amd64.whl",hashes = {sha256 = "7a2368eed65fc69a7a7a40b27f22e85e7627b74216f0846b04ba5c116e191ec9"}}, ] marker = "\"default\" in dependency_groups or \"dev\" in extras" [packages.tool.pdm] dependencies = [] +[[packages]] +name = "aiofiles" +version = "24.1.0" +requires-python = ">=3.8" +sdist = {name = "aiofiles-24.1.0.tar.gz", url = "https://files.pythonhosted.org/packages/0b/03/a88171e277e8caa88a4c77808c20ebb04ba74cc4681bf1e9416c862de237/aiofiles-24.1.0.tar.gz", hashes = {sha256 = "22a075c9e5a3810f0c2e48f3008c94d68c65d763b9b03857924c99e57355166c"}} +wheels = [ + {name = "aiofiles-24.1.0-py3-none-any.whl",url = "https://files.pythonhosted.org/packages/a5/45/30bb92d442636f570cb5651bc661f52b610e2eec3f891a5dc3a4c3667db0/aiofiles-24.1.0-py3-none-any.whl",hashes = {sha256 = "b4ec55f4195e3eb5d7abd1bf7e061763e864dd4954231fb8539a0ef8bb8260e5"}}, +] +marker = "\"default\" in dependency_groups" + +[packages.tool.pdm] +dependencies = [] + [[packages]] name = "aiohappyeyeballs" version = "2.6.1" @@ -2203,6 +2247,21 @@ marker = "\"default\" in dependency_groups or \"dev\" in extras" [packages.tool.pdm] dependencies = [] +[[packages]] +name = "aiologic" +version = "0.14.0" +requires-python = ">=3.8" +sdist = {name = "aiologic-0.14.0.tar.gz", url = "https://files.pythonhosted.org/packages/7e/2d/e893dcfa041dab1d045abfc8898239747cde19881796640861609138d360/aiologic-0.14.0.tar.gz", hashes = {sha256 = "c87925fa2bfe9ae292859e1094eb8fb6d456c8202a16405b0a44134803c8a791"}} +wheels = [ + {name = "aiologic-0.14.0-py3-none-any.whl",url = "https://files.pythonhosted.org/packages/4d/1f/f797b684fb4e11a5066ab464b460b5cfdbaedea9c4a3d0f0afc8e894ada0/aiologic-0.14.0-py3-none-any.whl",hashes = {sha256 = "cc59d39dc1d5e2575b4a6b5faf678b551fb0f910c7cb42e4c5f5689ffedcce78"}}, +] +marker = "\"default\" in dependency_groups" + +[packages.tool.pdm] +dependencies = [ + "wrapt>=1.16.0", +] + [[packages]] name = "aiosignal" version = "1.4.0" @@ -2311,23 +2370,6 @@ wheels = [ {name = "frozenlist-1.7.0-cp310-cp310-win32.whl",url = "https://files.pythonhosted.org/packages/c5/bf/7dcebae315436903b1d98ffb791a09d674c88480c158aa171958a3ac07f0/frozenlist-1.7.0-cp310-cp310-win32.whl",hashes = {sha256 = "400ddd24ab4e55014bba442d917203c73b2846391dd42ca5e38ff52bb18c3c5e"}}, {name = "frozenlist-1.7.0-cp310-cp310-win_amd64.whl",url = "https://files.pythonhosted.org/packages/8f/5f/f69818f017fa9a3d24d1ae39763e29b7f60a59e46d5f91b9c6b21622f4cd/frozenlist-1.7.0-cp310-cp310-win_amd64.whl",hashes = {sha256 = "6eb93efb8101ef39d32d50bce242c84bcbddb4f7e9febfa7b524532a239b4464"}}, {name = "frozenlist-1.7.0-py3-none-any.whl",url = "https://files.pythonhosted.org/packages/ee/45/b82e3c16be2182bff01179db177fe144d58b5dc787a7d4492c6ed8b9317f/frozenlist-1.7.0-py3-none-any.whl",hashes = {sha256 = "9a5af342e34f7e97caf8c995864c7a396418ae2859cc6fdf1b1073020d516a7e"}}, - {name = "frozenlist-1.7.0-cp39-cp39-macosx_10_9_universal2.whl",url = "https://files.pythonhosted.org/packages/dd/b1/ee59496f51cd244039330015d60f13ce5a54a0f2bd8d79e4a4a375ab7469/frozenlist-1.7.0-cp39-cp39-macosx_10_9_universal2.whl",hashes = {sha256 = "cea3dbd15aea1341ea2de490574a4a37ca080b2ae24e4b4f4b51b9057b4c3630"}}, - {name = "frozenlist-1.7.0-cp39-cp39-macosx_10_9_x86_64.whl",url = "https://files.pythonhosted.org/packages/75/e1/d518391ce36a6279b3fa5bc14327dde80bcb646bb50d059c6ca0756b8d05/frozenlist-1.7.0-cp39-cp39-macosx_10_9_x86_64.whl",hashes = {sha256 = "7d536ee086b23fecc36c2073c371572374ff50ef4db515e4e503925361c24f71"}}, - {name = "frozenlist-1.7.0-cp39-cp39-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/b7/8d/a0d04f28b6e821a9685c22e67b5fb798a5a7b68752f104bfbc2dccf080c4/frozenlist-1.7.0-cp39-cp39-macosx_11_0_arm64.whl",hashes = {sha256 = "dfcebf56f703cb2e346315431699f00db126d158455e513bd14089d992101e44"}}, - {name = "frozenlist-1.7.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",url = "https://files.pythonhosted.org/packages/93/3a/a5334c0535c8b7c78eeabda1579179e44fe3d644e07118e59a2276dedaf1/frozenlist-1.7.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",hashes = {sha256 = "974c5336e61d6e7eb1ea5b929cb645e882aadab0095c5a6974a111e6479f8878"}}, - {name = "frozenlist-1.7.0-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl",url = "https://files.pythonhosted.org/packages/0a/67/8258d971f519dc3f278c55069a775096cda6610a267b53f6248152b72b2f/frozenlist-1.7.0-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl",hashes = {sha256 = "c70db4a0ab5ab20878432c40563573229a7ed9241506181bba12f6b7d0dc41cb"}}, - {name = "frozenlist-1.7.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl",url = "https://files.pythonhosted.org/packages/fc/89/8225905bf889b97c6d935dd3aeb45668461e59d415cb019619383a8a7c3b/frozenlist-1.7.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl",hashes = {sha256 = "1137b78384eebaf70560a36b7b229f752fb64d463d38d1304939984d5cb887b6"}}, - {name = "frozenlist-1.7.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl",url = "https://files.pythonhosted.org/packages/54/6e/ef52375aa93d4bc510d061df06205fa6dcfd94cd631dd22956b09128f0d4/frozenlist-1.7.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl",hashes = {sha256 = "e793a9f01b3e8b5c0bc646fb59140ce0efcc580d22a3468d70766091beb81b35"}}, - {name = "frozenlist-1.7.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl",url = "https://files.pythonhosted.org/packages/ee/55/62c87d1a6547bfbcd645df10432c129100c5bd0fd92a384de6e3378b07c1/frozenlist-1.7.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl",hashes = {sha256 = "74739ba8e4e38221d2c5c03d90a7e542cb8ad681915f4ca8f68d04f810ee0a87"}}, - {name = "frozenlist-1.7.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl",url = "https://files.pythonhosted.org/packages/45/d2/263fea1f658b8ad648c7d94d18a87bca7e8c67bd6a1bbf5445b1bd5b158c/frozenlist-1.7.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl",hashes = {sha256 = "1e63344c4e929b1a01e29bc184bbb5fd82954869033765bfe8d65d09e336a677"}}, - {name = "frozenlist-1.7.0-cp39-cp39-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/7b/22/7145e35d12fb368d92124f679bea87309495e2e9ddf14c6533990cb69218/frozenlist-1.7.0-cp39-cp39-musllinux_1_2_aarch64.whl",hashes = {sha256 = "2ea2a7369eb76de2217a842f22087913cdf75f63cf1307b9024ab82dfb525938"}}, - {name = "frozenlist-1.7.0-cp39-cp39-musllinux_1_2_armv7l.whl",url = "https://files.pythonhosted.org/packages/44/1e/7dae8c54301beb87bcafc6144b9a103bfd2c8f38078c7902984c9a0c4e5b/frozenlist-1.7.0-cp39-cp39-musllinux_1_2_armv7l.whl",hashes = {sha256 = "836b42f472a0e006e02499cef9352ce8097f33df43baaba3e0a28a964c26c7d2"}}, - {name = "frozenlist-1.7.0-cp39-cp39-musllinux_1_2_i686.whl",url = "https://files.pythonhosted.org/packages/4b/1e/99c93e54aa382e949a98976a73b9b20c3aae6d9d893f31bbe4991f64e3a8/frozenlist-1.7.0-cp39-cp39-musllinux_1_2_i686.whl",hashes = {sha256 = "e22b9a99741294b2571667c07d9f8cceec07cb92aae5ccda39ea1b6052ed4319"}}, - {name = "frozenlist-1.7.0-cp39-cp39-musllinux_1_2_ppc64le.whl",url = "https://files.pythonhosted.org/packages/5e/9c/ca5105fa7fb5abdfa8837581be790447ae051da75d32f25c8f81082ffc45/frozenlist-1.7.0-cp39-cp39-musllinux_1_2_ppc64le.whl",hashes = {sha256 = "9a19e85cc503d958abe5218953df722748d87172f71b73cf3c9257a91b999890"}}, - {name = "frozenlist-1.7.0-cp39-cp39-musllinux_1_2_s390x.whl",url = "https://files.pythonhosted.org/packages/8d/4d/e99014756093b4ddbb67fb8f0df11fe7a415760d69ace98e2ac6d5d43402/frozenlist-1.7.0-cp39-cp39-musllinux_1_2_s390x.whl",hashes = {sha256 = "f22dac33bb3ee8fe3e013aa7b91dc12f60d61d05b7fe32191ffa84c3aafe77bd"}}, - {name = "frozenlist-1.7.0-cp39-cp39-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/8b/72/a19a40bcdaa28a51add2aaa3a1a294ec357f36f27bd836a012e070c5e8a5/frozenlist-1.7.0-cp39-cp39-musllinux_1_2_x86_64.whl",hashes = {sha256 = "9ccec739a99e4ccf664ea0775149f2749b8a6418eb5b8384b4dc0a7d15d304cb"}}, - {name = "frozenlist-1.7.0-cp39-cp39-win32.whl",url = "https://files.pythonhosted.org/packages/08/49/0042469993e023a758af81db68c76907cd29e847d772334d4d201cbe9a42/frozenlist-1.7.0-cp39-cp39-win32.whl",hashes = {sha256 = "b3950f11058310008a87757f3eee16a8e1ca97979833239439586857bc25482e"}}, - {name = "frozenlist-1.7.0-cp39-cp39-win_amd64.whl",url = "https://files.pythonhosted.org/packages/5a/45/827d86ee475c877f5f766fbc23fb6acb6fada9e52f1c9720e2ba3eae32da/frozenlist-1.7.0-cp39-cp39-win_amd64.whl",hashes = {sha256 = "43a82fce6769c70f2f5a06248b614a7d268080a9d20f7457ef10ecee5af82b63"}}, ] marker = "\"default\" in dependency_groups or \"dev\" in extras" @@ -2431,87 +2473,101 @@ dependencies = [] [[packages]] name = "coverage" -version = "7.10.6" +version = "7.10.7" requires-python = ">=3.9" -sdist = {name = "coverage-7.10.6.tar.gz", url = "https://files.pythonhosted.org/packages/14/70/025b179c993f019105b79575ac6edb5e084fb0f0e63f15cdebef4e454fb5/coverage-7.10.6.tar.gz", hashes = {sha256 = "f644a3ae5933a552a29dbb9aa2f90c677a875f80ebea028e5a52a4f429044b90"}} -wheels = [ - {name = "coverage-7.10.6-cp314-cp314-macosx_10_13_x86_64.whl",url = "https://files.pythonhosted.org/packages/d3/aa/76cf0b5ec00619ef208da4689281d48b57f2c7fde883d14bf9441b74d59f/coverage-7.10.6-cp314-cp314-macosx_10_13_x86_64.whl",hashes = {sha256 = "6008a021907be8c4c02f37cdc3ffb258493bdebfeaf9a839f9e71dfdc47b018e"}}, - {name = "coverage-7.10.6-cp314-cp314-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/65/91/8e41b8c7c505d398d7730206f3cbb4a875a35ca1041efc518051bfce0f6b/coverage-7.10.6-cp314-cp314-macosx_11_0_arm64.whl",hashes = {sha256 = "5e75e37f23eb144e78940b40395b42f2321951206a4f50e23cfd6e8a198d3ceb"}}, - {name = "coverage-7.10.6-cp314-cp314-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl",url = "https://files.pythonhosted.org/packages/87/7f/f718e732a423d442e6616580a951b8d1ec3575ea48bcd0e2228386805e79/coverage-7.10.6-cp314-cp314-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl",hashes = {sha256 = "0f7cb359a448e043c576f0da00aa8bfd796a01b06aa610ca453d4dde09cc1034"}}, - {name = "coverage-7.10.6-cp314-cp314-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl",url = "https://files.pythonhosted.org/packages/e6/52/c1106120e6d801ac03e12b5285e971e758e925b6f82ee9b86db3aa10045d/coverage-7.10.6-cp314-cp314-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl",hashes = {sha256 = "c68018e4fc4e14b5668f1353b41ccf4bc83ba355f0e1b3836861c6f042d89ac1"}}, - {name = "coverage-7.10.6-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl",url = "https://files.pythonhosted.org/packages/3d/ec/3a8645b1bb40e36acde9c0609f08942852a4af91a937fe2c129a38f2d3f5/coverage-7.10.6-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl",hashes = {sha256 = "cd4b2b0707fc55afa160cd5fc33b27ccbf75ca11d81f4ec9863d5793fc6df56a"}}, - {name = "coverage-7.10.6-cp314-cp314-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/a1/70/09ecb68eeb1155b28a1d16525fd3a9b65fbe75337311a99830df935d62b6/coverage-7.10.6-cp314-cp314-musllinux_1_2_aarch64.whl",hashes = {sha256 = "4cec13817a651f8804a86e4f79d815b3b28472c910e099e4d5a0e8a3b6a1d4cb"}}, - {name = "coverage-7.10.6-cp314-cp314-musllinux_1_2_i686.whl",url = "https://files.pythonhosted.org/packages/c6/80/47df374b893fa812e953b5bc93dcb1427a7b3d7a1a7d2db33043d17f74b9/coverage-7.10.6-cp314-cp314-musllinux_1_2_i686.whl",hashes = {sha256 = "f2a6a8e06bbda06f78739f40bfb56c45d14eb8249d0f0ea6d4b3d48e1f7c695d"}}, - {name = "coverage-7.10.6-cp314-cp314-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/4a/65/9f98640979ecee1b0d1a7164b589de720ddf8100d1747d9bbdb84be0c0fb/coverage-7.10.6-cp314-cp314-musllinux_1_2_x86_64.whl",hashes = {sha256 = "081b98395ced0d9bcf60ada7661a0b75f36b78b9d7e39ea0790bb4ed8da14747"}}, - {name = "coverage-7.10.6-cp314-cp314-win32.whl",url = "https://files.pythonhosted.org/packages/1f/55/eeb6603371e6629037f47bd25bef300387257ed53a3c5fdb159b7ac8c651/coverage-7.10.6-cp314-cp314-win32.whl",hashes = {sha256 = "6937347c5d7d069ee776b2bf4e1212f912a9f1f141a429c475e6089462fcecc5"}}, - {name = "coverage-7.10.6-cp314-cp314-win_amd64.whl",url = "https://files.pythonhosted.org/packages/15/d1/a0912b7611bc35412e919a2cd59ae98e7ea3b475e562668040a43fb27897/coverage-7.10.6-cp314-cp314-win_amd64.whl",hashes = {sha256 = "adec1d980fa07e60b6ef865f9e5410ba760e4e1d26f60f7e5772c73b9a5b0713"}}, - {name = "coverage-7.10.6-cp314-cp314-win_arm64.whl",url = "https://files.pythonhosted.org/packages/ef/2d/11880bb8ef80a45338e0b3e0725e4c2d73ffbb4822c29d987078224fd6a5/coverage-7.10.6-cp314-cp314-win_arm64.whl",hashes = {sha256 = "a80f7aef9535442bdcf562e5a0d5a5538ce8abe6bb209cfbf170c462ac2c2a32"}}, - {name = "coverage-7.10.6-cp314-cp314t-macosx_10_13_x86_64.whl",url = "https://files.pythonhosted.org/packages/83/c0/1f00caad775c03a700146f55536ecd097a881ff08d310a58b353a1421be0/coverage-7.10.6-cp314-cp314t-macosx_10_13_x86_64.whl",hashes = {sha256 = "0de434f4fbbe5af4fa7989521c655c8c779afb61c53ab561b64dcee6149e4c65"}}, - {name = "coverage-7.10.6-cp314-cp314t-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/a9/c4/b1c5d2bd7cc412cbeb035e257fd06ed4e3e139ac871d16a07434e145d18d/coverage-7.10.6-cp314-cp314t-macosx_11_0_arm64.whl",hashes = {sha256 = "6e31b8155150c57e5ac43ccd289d079eb3f825187d7c66e755a055d2c85794c6"}}, - {name = "coverage-7.10.6-cp314-cp314t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl",url = "https://files.pythonhosted.org/packages/3f/07/4468d37c94724bf6ec354e4ec2f205fda194343e3e85fd2e59cec57e6a54/coverage-7.10.6-cp314-cp314t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl",hashes = {sha256 = "98cede73eb83c31e2118ae8d379c12e3e42736903a8afcca92a7218e1f2903b0"}}, - {name = "coverage-7.10.6-cp314-cp314t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl",url = "https://files.pythonhosted.org/packages/82/d8/f8fb351be5fee31690cd8da768fd62f1cfab33c31d9f7baba6cd8960f6b8/coverage-7.10.6-cp314-cp314t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl",hashes = {sha256 = "f863c08f4ff6b64fa8045b1e3da480f5374779ef187f07b82e0538c68cb4ff8e"}}, - {name = "coverage-7.10.6-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl",url = "https://files.pythonhosted.org/packages/e8/70/65d4d7cfc75c5c6eb2fed3ee5cdf420fd8ae09c4808723a89a81d5b1b9c3/coverage-7.10.6-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl",hashes = {sha256 = "2b38261034fda87be356f2c3f42221fdb4171c3ce7658066ae449241485390d5"}}, - {name = "coverage-7.10.6-cp314-cp314t-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/98/3c/069df106d19024324cde10e4ec379fe2fb978017d25e97ebee23002fbadf/coverage-7.10.6-cp314-cp314t-musllinux_1_2_aarch64.whl",hashes = {sha256 = "0e93b1476b79eae849dc3872faeb0bf7948fd9ea34869590bc16a2a00b9c82a7"}}, - {name = "coverage-7.10.6-cp314-cp314t-musllinux_1_2_i686.whl",url = "https://files.pythonhosted.org/packages/fc/8a/2974d53904080c5dc91af798b3a54a4ccb99a45595cc0dcec6eb9616a57d/coverage-7.10.6-cp314-cp314t-musllinux_1_2_i686.whl",hashes = {sha256 = "ff8a991f70f4c0cf53088abf1e3886edcc87d53004c7bb94e78650b4d3dac3b5"}}, - {name = "coverage-7.10.6-cp314-cp314t-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/30/38/9616a6b49c686394b318974d7f6e08f38b8af2270ce7488e879888d1e5db/coverage-7.10.6-cp314-cp314t-musllinux_1_2_x86_64.whl",hashes = {sha256 = "ac765b026c9f33044419cbba1da913cfb82cca1b60598ac1c7a5ed6aac4621a0"}}, - {name = "coverage-7.10.6-cp314-cp314t-win32.whl",url = "https://files.pythonhosted.org/packages/76/16/3ed2d6312b371a8cf804abf4e14895b70e4c3491c6e53536d63fd0958a8d/coverage-7.10.6-cp314-cp314t-win32.whl",hashes = {sha256 = "441c357d55f4936875636ef2cfb3bee36e466dcf50df9afbd398ce79dba1ebb7"}}, - {name = "coverage-7.10.6-cp314-cp314t-win_amd64.whl",url = "https://files.pythonhosted.org/packages/d5/e5/d38d0cb830abede2adb8b147770d2a3d0e7fecc7228245b9b1ae6c24930a/coverage-7.10.6-cp314-cp314t-win_amd64.whl",hashes = {sha256 = "073711de3181b2e204e4870ac83a7c4853115b42e9cd4d145f2231e12d670930"}}, - {name = "coverage-7.10.6-cp314-cp314t-win_arm64.whl",url = "https://files.pythonhosted.org/packages/f4/51/e48e550f6279349895b0ffcd6d2a690e3131ba3a7f4eafccc141966d4dea/coverage-7.10.6-cp314-cp314t-win_arm64.whl",hashes = {sha256 = "137921f2bac5559334ba66122b753db6dc5d1cf01eb7b64eb412bb0d064ef35b"}}, - {name = "coverage-7.10.6-cp313-cp313-macosx_10_13_x86_64.whl",url = "https://files.pythonhosted.org/packages/bd/e7/917e5953ea29a28c1057729c1d5af9084ab6d9c66217523fd0e10f14d8f6/coverage-7.10.6-cp313-cp313-macosx_10_13_x86_64.whl",hashes = {sha256 = "ffea0575345e9ee0144dfe5701aa17f3ba546f8c3bb48db62ae101afb740e7d6"}}, - {name = "coverage-7.10.6-cp313-cp313-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/eb/86/2e161b93a4f11d0ea93f9bebb6a53f113d5d6e416d7561ca41bb0a29996b/coverage-7.10.6-cp313-cp313-macosx_11_0_arm64.whl",hashes = {sha256 = "95d91d7317cde40a1c249d6b7382750b7e6d86fad9d8eaf4fa3f8f44cf171e80"}}, - {name = "coverage-7.10.6-cp313-cp313-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl",url = "https://files.pythonhosted.org/packages/0e/66/d03348fdd8df262b3a7fb4ee5727e6e4936e39e2f3a842e803196946f200/coverage-7.10.6-cp313-cp313-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl",hashes = {sha256 = "3e23dd5408fe71a356b41baa82892772a4cefcf758f2ca3383d2aa39e1b7a003"}}, - {name = "coverage-7.10.6-cp313-cp313-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl",url = "https://files.pythonhosted.org/packages/73/dd/508420fb47d09d904d962f123221bc249f64b5e56aa93d5f5f7603be475f/coverage-7.10.6-cp313-cp313-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl",hashes = {sha256 = "0f3f56e4cb573755e96a16501a98bf211f100463d70275759e73f3cbc00d4f27"}}, - {name = "coverage-7.10.6-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl",url = "https://files.pythonhosted.org/packages/e9/1f/9020135734184f439da85c70ea78194c2730e56c2d18aee6e8ff1719d50d/coverage-7.10.6-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl",hashes = {sha256 = "db4a1d897bbbe7339946ffa2fe60c10cc81c43fab8b062d3fcb84188688174a4"}}, - {name = "coverage-7.10.6-cp313-cp313-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/a4/a4/3d228f3942bb5a2051fde28c136eea23a761177dc4ff4ef54533164ce255/coverage-7.10.6-cp313-cp313-musllinux_1_2_aarch64.whl",hashes = {sha256 = "d8fd7879082953c156d5b13c74aa6cca37f6a6f4747b39538504c3f9c63d043d"}}, - {name = "coverage-7.10.6-cp313-cp313-musllinux_1_2_i686.whl",url = "https://files.pythonhosted.org/packages/36/e3/293dce8cdb9a83de971637afc59b7190faad60603b40e32635cbd15fbf61/coverage-7.10.6-cp313-cp313-musllinux_1_2_i686.whl",hashes = {sha256 = "28395ca3f71cd103b8c116333fa9db867f3a3e1ad6a084aa3725ae002b6583bc"}}, - {name = "coverage-7.10.6-cp313-cp313-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/90/26/64eecfa214e80dd1d101e420cab2901827de0e49631d666543d0e53cf597/coverage-7.10.6-cp313-cp313-musllinux_1_2_x86_64.whl",hashes = {sha256 = "61c950fc33d29c91b9e18540e1aed7d9f6787cc870a3e4032493bbbe641d12fc"}}, - {name = "coverage-7.10.6-cp313-cp313-win32.whl",url = "https://files.pythonhosted.org/packages/3e/70/bd80588338f65ea5b0d97e424b820fb4068b9cfb9597fbd91963086e004b/coverage-7.10.6-cp313-cp313-win32.whl",hashes = {sha256 = "160c00a5e6b6bdf4e5984b0ef21fc860bc94416c41b7df4d63f536d17c38902e"}}, - {name = "coverage-7.10.6-cp313-cp313-win_amd64.whl",url = "https://files.pythonhosted.org/packages/a7/14/0b831122305abcc1060c008f6c97bbdc0a913ab47d65070a01dc50293c2b/coverage-7.10.6-cp313-cp313-win_amd64.whl",hashes = {sha256 = "628055297f3e2aa181464c3808402887643405573eb3d9de060d81531fa79d32"}}, - {name = "coverage-7.10.6-cp313-cp313-win_arm64.whl",url = "https://files.pythonhosted.org/packages/83/c6/81a83778c1f83f1a4a168ed6673eeedc205afb562d8500175292ca64b94e/coverage-7.10.6-cp313-cp313-win_arm64.whl",hashes = {sha256 = "df4ec1f8540b0bcbe26ca7dd0f541847cc8a108b35596f9f91f59f0c060bfdd2"}}, - {name = "coverage-7.10.6-cp313-cp313t-macosx_10_13_x86_64.whl",url = "https://files.pythonhosted.org/packages/d7/1c/ccccf4bf116f9517275fa85047495515add43e41dfe8e0bef6e333c6b344/coverage-7.10.6-cp313-cp313t-macosx_10_13_x86_64.whl",hashes = {sha256 = "c9a8b7a34a4de3ed987f636f71881cd3b8339f61118b1aa311fbda12741bff0b"}}, - {name = "coverage-7.10.6-cp313-cp313t-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/92/97/8a3ceff833d27c7492af4f39d5da6761e9ff624831db9e9f25b3886ddbca/coverage-7.10.6-cp313-cp313t-macosx_11_0_arm64.whl",hashes = {sha256 = "8dd5af36092430c2b075cee966719898f2ae87b636cefb85a653f1d0ba5d5393"}}, - {name = "coverage-7.10.6-cp313-cp313t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl",url = "https://files.pythonhosted.org/packages/92/d8/50b4a32580cf41ff0423777a2791aaf3269ab60c840b62009aec12d3970d/coverage-7.10.6-cp313-cp313t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl",hashes = {sha256 = "b0353b0f0850d49ada66fdd7d0c7cdb0f86b900bb9e367024fd14a60cecc1e27"}}, - {name = "coverage-7.10.6-cp313-cp313t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl",url = "https://files.pythonhosted.org/packages/7e/7e/6a7df5a6fb440a0179d94a348eb6616ed4745e7df26bf2a02bc4db72c421/coverage-7.10.6-cp313-cp313t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl",hashes = {sha256 = "d6b9ae13d5d3e8aeca9ca94198aa7b3ebbc5acfada557d724f2a1f03d2c0b0df"}}, - {name = "coverage-7.10.6-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl",url = "https://files.pythonhosted.org/packages/3a/4c/a270a414f4ed5d196b9d3d67922968e768cd971d1b251e1b4f75e9362f75/coverage-7.10.6-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl",hashes = {sha256 = "675824a363cc05781b1527b39dc2587b8984965834a748177ee3c37b64ffeafb"}}, - {name = "coverage-7.10.6-cp313-cp313t-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/9c/8b/3210d663d594926c12f373c5370bf1e7c5c3a427519a8afa65b561b9a55c/coverage-7.10.6-cp313-cp313t-musllinux_1_2_aarch64.whl",hashes = {sha256 = "692d70ea725f471a547c305f0d0fc6a73480c62fb0da726370c088ab21aed282"}}, - {name = "coverage-7.10.6-cp313-cp313t-musllinux_1_2_i686.whl",url = "https://files.pythonhosted.org/packages/72/d0/e1961eff67e9e1dba3fc5eb7a4caf726b35a5b03776892da8d79ec895775/coverage-7.10.6-cp313-cp313t-musllinux_1_2_i686.whl",hashes = {sha256 = "851430a9a361c7a8484a36126d1d0ff8d529d97385eacc8dfdc9bfc8c2d2cbe4"}}, - {name = "coverage-7.10.6-cp313-cp313t-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/3a/06/d6478d152cd189b33eac691cba27a40704990ba95de49771285f34a5861e/coverage-7.10.6-cp313-cp313t-musllinux_1_2_x86_64.whl",hashes = {sha256 = "d9369a23186d189b2fc95cc08b8160ba242057e887d766864f7adf3c46b2df21"}}, - {name = "coverage-7.10.6-cp313-cp313t-win32.whl",url = "https://files.pythonhosted.org/packages/ed/73/737440247c914a332f0b47f7598535b29965bf305e19bbc22d4c39615d2b/coverage-7.10.6-cp313-cp313t-win32.whl",hashes = {sha256 = "92be86fcb125e9bda0da7806afd29a3fd33fdf58fba5d60318399adf40bf37d0"}}, - {name = "coverage-7.10.6-cp313-cp313t-win_amd64.whl",url = "https://files.pythonhosted.org/packages/bd/76/b92d3214740f2357ef4a27c75a526eb6c28f79c402e9f20a922c295c05e2/coverage-7.10.6-cp313-cp313t-win_amd64.whl",hashes = {sha256 = "6b3039e2ca459a70c79523d39347d83b73f2f06af5624905eba7ec34d64d80b5"}}, - {name = "coverage-7.10.6-cp313-cp313t-win_arm64.whl",url = "https://files.pythonhosted.org/packages/fc/8e/6dcb29c599c8a1f654ec6cb68d76644fe635513af16e932d2d4ad1e5ac6e/coverage-7.10.6-cp313-cp313t-win_arm64.whl",hashes = {sha256 = "3fb99d0786fe17b228eab663d16bee2288e8724d26a199c29325aac4b0319b9b"}}, - {name = "coverage-7.10.6-cp312-cp312-macosx_10_13_x86_64.whl",url = "https://files.pythonhosted.org/packages/26/06/263f3305c97ad78aab066d116b52250dd316e74fcc20c197b61e07eb391a/coverage-7.10.6-cp312-cp312-macosx_10_13_x86_64.whl",hashes = {sha256 = "5b2dd6059938063a2c9fee1af729d4f2af28fd1a545e9b7652861f0d752ebcea"}}, - {name = "coverage-7.10.6-cp312-cp312-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/e9/60/1e1ded9a4fe80d843d7d53b3e395c1db3ff32d6c301e501f393b2e6c1c1f/coverage-7.10.6-cp312-cp312-macosx_11_0_arm64.whl",hashes = {sha256 = "388d80e56191bf846c485c14ae2bc8898aa3124d9d35903fef7d907780477634"}}, - {name = "coverage-7.10.6-cp312-cp312-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl",url = "https://files.pythonhosted.org/packages/b8/25/52136173c14e26dfed8b106ed725811bb53c30b896d04d28d74cb64318b3/coverage-7.10.6-cp312-cp312-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl",hashes = {sha256 = "90cb5b1a4670662719591aa92d0095bb41714970c0b065b02a2610172dbf0af6"}}, - {name = "coverage-7.10.6-cp312-cp312-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl",url = "https://files.pythonhosted.org/packages/cb/1d/ae25a7dc58fcce8b172d42ffe5313fc267afe61c97fa872b80ee72d9515a/coverage-7.10.6-cp312-cp312-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl",hashes = {sha256 = "961834e2f2b863a0e14260a9a273aff07ff7818ab6e66d2addf5628590c628f9"}}, - {name = "coverage-7.10.6-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl",url = "https://files.pythonhosted.org/packages/f5/7a/1f561d47743710fe996957ed7c124b421320f150f1d38523d8d9102d3e2a/coverage-7.10.6-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl",hashes = {sha256 = "bf9a19f5012dab774628491659646335b1928cfc931bf8d97b0d5918dd58033c"}}, - {name = "coverage-7.10.6-cp312-cp312-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/6c/ad/8b97cd5d28aecdfde792dcbf646bac141167a5cacae2cd775998b45fabb5/coverage-7.10.6-cp312-cp312-musllinux_1_2_aarch64.whl",hashes = {sha256 = "99c4283e2a0e147b9c9cc6bc9c96124de9419d6044837e9799763a0e29a7321a"}}, - {name = "coverage-7.10.6-cp312-cp312-musllinux_1_2_i686.whl",url = "https://files.pythonhosted.org/packages/33/6a/95c32b558d9a61858ff9d79580d3877df3eb5bc9eed0941b1f187c89e143/coverage-7.10.6-cp312-cp312-musllinux_1_2_i686.whl",hashes = {sha256 = "282b1b20f45df57cc508c1e033403f02283adfb67d4c9c35a90281d81e5c52c5"}}, - {name = "coverage-7.10.6-cp312-cp312-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/0d/9c/8ce95dee640a38e760d5b747c10913e7a06554704d60b41e73fdea6a1ffd/coverage-7.10.6-cp312-cp312-musllinux_1_2_x86_64.whl",hashes = {sha256 = "8cdbe264f11afd69841bd8c0d83ca10b5b32853263ee62e6ac6a0ab63895f972"}}, - {name = "coverage-7.10.6-cp312-cp312-win32.whl",url = "https://files.pythonhosted.org/packages/04/12/7a55b0bdde78a98e2eb2356771fd2dcddb96579e8342bb52aa5bc52e96f0/coverage-7.10.6-cp312-cp312-win32.whl",hashes = {sha256 = "a517feaf3a0a3eca1ee985d8373135cfdedfbba3882a5eab4362bda7c7cf518d"}}, - {name = "coverage-7.10.6-cp312-cp312-win_amd64.whl",url = "https://files.pythonhosted.org/packages/36/4a/32b185b8b8e327802c9efce3d3108d2fe2d9d31f153a0f7ecfd59c773705/coverage-7.10.6-cp312-cp312-win_amd64.whl",hashes = {sha256 = "856986eadf41f52b214176d894a7de05331117f6035a28ac0016c0f63d887629"}}, - {name = "coverage-7.10.6-cp312-cp312-win_arm64.whl",url = "https://files.pythonhosted.org/packages/08/3a/d5d8dc703e4998038c3099eaf77adddb00536a3cec08c8dcd556a36a3eb4/coverage-7.10.6-cp312-cp312-win_arm64.whl",hashes = {sha256 = "acf36b8268785aad739443fa2780c16260ee3fa09d12b3a70f772ef100939d80"}}, - {name = "coverage-7.10.6-cp311-cp311-macosx_10_9_x86_64.whl",url = "https://files.pythonhosted.org/packages/d4/16/2bea27e212c4980753d6d563a0803c150edeaaddb0771a50d2afc410a261/coverage-7.10.6-cp311-cp311-macosx_10_9_x86_64.whl",hashes = {sha256 = "c706db3cabb7ceef779de68270150665e710b46d56372455cd741184f3868d8f"}}, - {name = "coverage-7.10.6-cp311-cp311-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/2a/51/e7159e068831ab37e31aac0969d47b8c5ee25b7d307b51e310ec34869315/coverage-7.10.6-cp311-cp311-macosx_11_0_arm64.whl",hashes = {sha256 = "8e0c38dc289e0508ef68ec95834cb5d2e96fdbe792eaccaa1bccac3966bbadcc"}}, - {name = "coverage-7.10.6-cp311-cp311-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl",url = "https://files.pythonhosted.org/packages/e7/c0/246ccbea53d6099325d25cd208df94ea435cd55f0db38099dd721efc7a1f/coverage-7.10.6-cp311-cp311-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl",hashes = {sha256 = "752a3005a1ded28f2f3a6e8787e24f28d6abe176ca64677bcd8d53d6fe2ec08a"}}, - {name = "coverage-7.10.6-cp311-cp311-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl",url = "https://files.pythonhosted.org/packages/7d/fb/7435ef8ab9b2594a6e3f58505cc30e98ae8b33265d844007737946c59389/coverage-7.10.6-cp311-cp311-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl",hashes = {sha256 = "689920ecfd60f992cafca4f5477d55720466ad2c7fa29bb56ac8d44a1ac2b47a"}}, - {name = "coverage-7.10.6-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl",url = "https://files.pythonhosted.org/packages/51/f8/d9d64e8da7bcddb094d511154824038833c81e3a039020a9d6539bf303e9/coverage-7.10.6-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl",hashes = {sha256 = "ec98435796d2624d6905820a42f82149ee9fc4f2d45c2c5bc5a44481cc50db62"}}, - {name = "coverage-7.10.6-cp311-cp311-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/43/28/c43ba0ef19f446d6463c751315140d8f2a521e04c3e79e5c5fe211bfa430/coverage-7.10.6-cp311-cp311-musllinux_1_2_aarch64.whl",hashes = {sha256 = "b37201ce4a458c7a758ecc4efa92fa8ed783c66e0fa3c42ae19fc454a0792153"}}, - {name = "coverage-7.10.6-cp311-cp311-musllinux_1_2_i686.whl",url = "https://files.pythonhosted.org/packages/79/3e/53635bd0b72beaacf265784508a0b386defc9ab7fad99ff95f79ce9db555/coverage-7.10.6-cp311-cp311-musllinux_1_2_i686.whl",hashes = {sha256 = "2904271c80898663c810a6b067920a61dd8d38341244a3605bd31ab55250dad5"}}, - {name = "coverage-7.10.6-cp311-cp311-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/4c/55/0964aa87126624e8c159e32b0bc4e84edef78c89a1a4b924d28dd8265625/coverage-7.10.6-cp311-cp311-musllinux_1_2_x86_64.whl",hashes = {sha256 = "5aea98383463d6e1fa4e95416d8de66f2d0cb588774ee20ae1b28df826bcb619"}}, - {name = "coverage-7.10.6-cp311-cp311-win32.whl",url = "https://files.pythonhosted.org/packages/eb/ab/6cfa9dc518c6c8e14a691c54e53a9433ba67336c760607e299bfcf520cb1/coverage-7.10.6-cp311-cp311-win32.whl",hashes = {sha256 = "e3fb1fa01d3598002777dd259c0c2e6d9d5e10e7222976fc8e03992f972a2cba"}}, - {name = "coverage-7.10.6-cp311-cp311-win_amd64.whl",url = "https://files.pythonhosted.org/packages/5b/18/99b25346690cbc55922e7cfef06d755d4abee803ef335baff0014268eff4/coverage-7.10.6-cp311-cp311-win_amd64.whl",hashes = {sha256 = "f35ed9d945bece26553d5b4c8630453169672bea0050a564456eb88bdffd927e"}}, - {name = "coverage-7.10.6-cp311-cp311-win_arm64.whl",url = "https://files.pythonhosted.org/packages/d8/ed/81d86648a07ccb124a5cf1f1a7788712b8d7216b593562683cd5c9b0d2c1/coverage-7.10.6-cp311-cp311-win_arm64.whl",hashes = {sha256 = "99e1a305c7765631d74b98bf7dbf54eeea931f975e80f115437d23848ee8c27c"}}, - {name = "coverage-7.10.6-cp310-cp310-macosx_10_9_x86_64.whl",url = "https://files.pythonhosted.org/packages/a8/1d/2e64b43d978b5bd184e0756a41415597dfef30fcbd90b747474bd749d45f/coverage-7.10.6-cp310-cp310-macosx_10_9_x86_64.whl",hashes = {sha256 = "70e7bfbd57126b5554aa482691145f798d7df77489a177a6bef80de78860a356"}}, - {name = "coverage-7.10.6-cp310-cp310-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/23/62/b1e0f513417c02cc10ef735c3ee5186df55f190f70498b3702d516aad06f/coverage-7.10.6-cp310-cp310-macosx_11_0_arm64.whl",hashes = {sha256 = "e41be6f0f19da64af13403e52f2dec38bbc2937af54df8ecef10850ff8d35301"}}, - {name = "coverage-7.10.6-cp310-cp310-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl",url = "https://files.pythonhosted.org/packages/e7/16/b800640b7a43e7c538429e4d7223e0a94fd72453a1a048f70bf766f12e96/coverage-7.10.6-cp310-cp310-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl",hashes = {sha256 = "c61fc91ab80b23f5fddbee342d19662f3d3328173229caded831aa0bd7595460"}}, - {name = "coverage-7.10.6-cp310-cp310-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl",url = "https://files.pythonhosted.org/packages/fb/6f/5e03631c3305cad187eaf76af0b559fff88af9a0b0c180d006fb02413d7a/coverage-7.10.6-cp310-cp310-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl",hashes = {sha256 = "10356fdd33a7cc06e8051413140bbdc6f972137508a3572e3f59f805cd2832fd"}}, - {name = "coverage-7.10.6-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl",url = "https://files.pythonhosted.org/packages/eb/a1/f30ea0fb400b080730125b490771ec62b3375789f90af0bb68bfb8a921d7/coverage-7.10.6-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl",hashes = {sha256 = "80b1695cf7c5ebe7b44bf2521221b9bb8cdf69b1f24231149a7e3eb1ae5fa2fb"}}, - {name = "coverage-7.10.6-cp310-cp310-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/02/8e/cfa8fee8e8ef9a6bb76c7bef039f3302f44e615d2194161a21d3d83ac2e9/coverage-7.10.6-cp310-cp310-musllinux_1_2_aarch64.whl",hashes = {sha256 = "2e4c33e6378b9d52d3454bd08847a8651f4ed23ddbb4a0520227bd346382bbc6"}}, - {name = "coverage-7.10.6-cp310-cp310-musllinux_1_2_i686.whl",url = "https://files.pythonhosted.org/packages/93/a9/51be09b75c55c4f6c16d8d73a6a1d46ad764acca0eab48fa2ffaef5958fe/coverage-7.10.6-cp310-cp310-musllinux_1_2_i686.whl",hashes = {sha256 = "c8a3ec16e34ef980a46f60dc6ad86ec60f763c3f2fa0db6d261e6e754f72e945"}}, - {name = "coverage-7.10.6-cp310-cp310-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/e9/a6/ba188b376529ce36483b2d585ca7bdac64aacbe5aa10da5978029a9c94db/coverage-7.10.6-cp310-cp310-musllinux_1_2_x86_64.whl",hashes = {sha256 = "7d79dabc0a56f5af990cc6da9ad1e40766e82773c075f09cc571e2076fef882e"}}, - {name = "coverage-7.10.6-cp310-cp310-win32.whl",url = "https://files.pythonhosted.org/packages/d0/4c/37ed872374a21813e0d3215256180c9a382c3f5ced6f2e5da0102fc2fd3e/coverage-7.10.6-cp310-cp310-win32.whl",hashes = {sha256 = "86b9b59f2b16e981906e9d6383eb6446d5b46c278460ae2c36487667717eccf1"}}, - {name = "coverage-7.10.6-cp310-cp310-win_amd64.whl",url = "https://files.pythonhosted.org/packages/8e/36/9311352fdc551dec5b973b61f4e453227ce482985a9368305880af4f85dd/coverage-7.10.6-cp310-cp310-win_amd64.whl",hashes = {sha256 = "e132b9152749bd33534e5bd8565c7576f135f157b4029b975e15ee184325f528"}}, - {name = "coverage-7.10.6-py3-none-any.whl",url = "https://files.pythonhosted.org/packages/44/0c/50db5379b615854b5cf89146f8f5bd1d5a9693d7f3a987e269693521c404/coverage-7.10.6-py3-none-any.whl",hashes = {sha256 = "92c4ecf6bf11b2e85fd4d8204814dc26e6a19f0c9d938c207c5cb0eadfcabbe3"}}, +sdist = {name = "coverage-7.10.7.tar.gz", url = "https://files.pythonhosted.org/packages/51/26/d22c300112504f5f9a9fd2297ce33c35f3d353e4aeb987c8419453b2a7c2/coverage-7.10.7.tar.gz", hashes = {sha256 = "f4ab143ab113be368a3e9b795f9cd7906c5ef407d6173fe9675a902e1fffc239"}} +wheels = [ + {name = "coverage-7.10.7-cp314-cp314-macosx_10_13_x86_64.whl",url = "https://files.pythonhosted.org/packages/23/9c/5844ab4ca6a4dd97a1850e030a15ec7d292b5c5cb93082979225126e35dd/coverage-7.10.7-cp314-cp314-macosx_10_13_x86_64.whl",hashes = {sha256 = "b06f260b16ead11643a5a9f955bd4b5fd76c1a4c6796aeade8520095b75de520"}}, + {name = "coverage-7.10.7-cp314-cp314-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/f0/89/673f6514b0961d1f0e20ddc242e9342f6da21eaba3489901b565c0689f34/coverage-7.10.7-cp314-cp314-macosx_11_0_arm64.whl",hashes = {sha256 = "212f8f2e0612778f09c55dd4872cb1f64a1f2b074393d139278ce902064d5b32"}}, + {name = "coverage-7.10.7-cp314-cp314-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl",url = "https://files.pythonhosted.org/packages/05/e8/261cae479e85232828fb17ad536765c88dd818c8470aca690b0ac6feeaa3/coverage-7.10.7-cp314-cp314-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl",hashes = {sha256 = "3445258bcded7d4aa630ab8296dea4d3f15a255588dd535f980c193ab6b95f3f"}}, + {name = "coverage-7.10.7-cp314-cp314-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl",url = "https://files.pythonhosted.org/packages/82/62/14ed6546d0207e6eda876434e3e8475a3e9adbe32110ce896c9e0c06bb9a/coverage-7.10.7-cp314-cp314-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl",hashes = {sha256 = "bb45474711ba385c46a0bfe696c695a929ae69ac636cda8f532be9e8c93d720a"}}, + {name = "coverage-7.10.7-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl",url = "https://files.pythonhosted.org/packages/ff/49/07f00db9ac6478e4358165a08fb41b469a1b053212e8a00cb02f0d27a05f/coverage-7.10.7-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl",hashes = {sha256 = "813922f35bd800dca9994c5971883cbc0d291128a5de6b167c7aa697fcf59360"}}, + {name = "coverage-7.10.7-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl",url = "https://files.pythonhosted.org/packages/a2/59/c5201c62dbf165dfbc91460f6dbbaa85a8b82cfa6131ac45d6c1bfb52deb/coverage-7.10.7-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl",hashes = {sha256 = "93c1b03552081b2a4423091d6fb3787265b8f86af404cff98d1b5342713bdd69"}}, + {name = "coverage-7.10.7-cp314-cp314-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/07/ae/5920097195291a51fb00b3a70b9bbd2edbfe3c84876a1762bd1ef1565ebc/coverage-7.10.7-cp314-cp314-musllinux_1_2_aarch64.whl",hashes = {sha256 = "cc87dd1b6eaf0b848eebb1c86469b9f72a1891cb42ac7adcfbce75eadb13dd14"}}, + {name = "coverage-7.10.7-cp314-cp314-musllinux_1_2_i686.whl",url = "https://files.pythonhosted.org/packages/b9/3c/a815dde77a2981f5743a60b63df31cb322c944843e57dbd579326625a413/coverage-7.10.7-cp314-cp314-musllinux_1_2_i686.whl",hashes = {sha256 = "39508ffda4f343c35f3236fe8d1a6634a51f4581226a1262769d7f970e73bffe"}}, + {name = "coverage-7.10.7-cp314-cp314-musllinux_1_2_riscv64.whl",url = "https://files.pythonhosted.org/packages/aa/99/f5cdd8421ea656abefb6c0ce92556709db2265c41e8f9fc6c8ae0f7824c9/coverage-7.10.7-cp314-cp314-musllinux_1_2_riscv64.whl",hashes = {sha256 = "925a1edf3d810537c5a3abe78ec5530160c5f9a26b1f4270b40e62cc79304a1e"}}, + {name = "coverage-7.10.7-cp314-cp314-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/c3/7a/e9a2da6a1fc5d007dd51fca083a663ab930a8c4d149c087732a5dbaa0029/coverage-7.10.7-cp314-cp314-musllinux_1_2_x86_64.whl",hashes = {sha256 = "2c8b9a0636f94c43cd3576811e05b89aa9bc2d0a85137affc544ae5cb0e4bfbd"}}, + {name = "coverage-7.10.7-cp314-cp314-win32.whl",url = "https://files.pythonhosted.org/packages/ef/5b/0b5799aa30380a949005a353715095d6d1da81927d6dbed5def2200a4e25/coverage-7.10.7-cp314-cp314-win32.whl",hashes = {sha256 = "b7b8288eb7cdd268b0304632da8cb0bb93fadcfec2fe5712f7b9cc8f4d487be2"}}, + {name = "coverage-7.10.7-cp314-cp314-win_amd64.whl",url = "https://files.pythonhosted.org/packages/da/b0/e802fbb6eb746de006490abc9bb554b708918b6774b722bb3a0e6aa1b7de/coverage-7.10.7-cp314-cp314-win_amd64.whl",hashes = {sha256 = "1ca6db7c8807fb9e755d0379ccc39017ce0a84dcd26d14b5a03b78563776f681"}}, + {name = "coverage-7.10.7-cp314-cp314-win_arm64.whl",url = "https://files.pythonhosted.org/packages/9e/e8/71d0c8e374e31f39e3389bb0bd19e527d46f00ea8571ec7ec8fd261d8b44/coverage-7.10.7-cp314-cp314-win_arm64.whl",hashes = {sha256 = "097c1591f5af4496226d5783d036bf6fd6cd0cbc132e071b33861de756efb880"}}, + {name = "coverage-7.10.7-cp314-cp314t-macosx_10_13_x86_64.whl",url = "https://files.pythonhosted.org/packages/62/09/9a5608d319fa3eba7a2019addeacb8c746fb50872b57a724c9f79f146969/coverage-7.10.7-cp314-cp314t-macosx_10_13_x86_64.whl",hashes = {sha256 = "a62c6ef0d50e6de320c270ff91d9dd0a05e7250cac2a800b7784bae474506e63"}}, + {name = "coverage-7.10.7-cp314-cp314t-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/f5/6f/f58d46f33db9f2e3647b2d0764704548c184e6f5e014bef528b7f979ef84/coverage-7.10.7-cp314-cp314t-macosx_11_0_arm64.whl",hashes = {sha256 = "9fa6e4dd51fe15d8738708a973470f67a855ca50002294852e9571cdbd9433f2"}}, + {name = "coverage-7.10.7-cp314-cp314t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl",url = "https://files.pythonhosted.org/packages/74/5c/183ffc817ba68e0b443b8c934c8795553eb0c14573813415bd59941ee165/coverage-7.10.7-cp314-cp314t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl",hashes = {sha256 = "8fb190658865565c549b6b4706856d6a7b09302c797eb2cf8e7fe9dabb043f0d"}}, + {name = "coverage-7.10.7-cp314-cp314t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl",url = "https://files.pythonhosted.org/packages/0f/48/71a8abe9c1ad7e97548835e3cc1adbf361e743e9d60310c5f75c9e7bf847/coverage-7.10.7-cp314-cp314t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl",hashes = {sha256 = "affef7c76a9ef259187ef31599a9260330e0335a3011732c4b9effa01e1cd6e0"}}, + {name = "coverage-7.10.7-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl",url = "https://files.pythonhosted.org/packages/84/fd/193a8fb132acfc0a901f72020e54be5e48021e1575bb327d8ee1097a28fd/coverage-7.10.7-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl",hashes = {sha256 = "6e16e07d85ca0cf8bafe5f5d23a0b850064e8e945d5677492b06bbe6f09cc699"}}, + {name = "coverage-7.10.7-cp314-cp314t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl",url = "https://files.pythonhosted.org/packages/b1/8f/74ecc30607dd95ad50e3034221113ccb1c6d4e8085cc761134782995daae/coverage-7.10.7-cp314-cp314t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl",hashes = {sha256 = "03ffc58aacdf65d2a82bbeb1ffe4d01ead4017a21bfd0454983b88ca73af94b9"}}, + {name = "coverage-7.10.7-cp314-cp314t-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/0f/55/79ff53a769f20d71b07023ea115c9167c0bb56f281320520cf64c5298a96/coverage-7.10.7-cp314-cp314t-musllinux_1_2_aarch64.whl",hashes = {sha256 = "1b4fd784344d4e52647fd7857b2af5b3fbe6c239b0b5fa63e94eb67320770e0f"}}, + {name = "coverage-7.10.7-cp314-cp314t-musllinux_1_2_i686.whl",url = "https://files.pythonhosted.org/packages/88/e2/dac66c140009b61ac3fc13af673a574b00c16efdf04f9b5c740703e953c0/coverage-7.10.7-cp314-cp314t-musllinux_1_2_i686.whl",hashes = {sha256 = "0ebbaddb2c19b71912c6f2518e791aa8b9f054985a0769bdb3a53ebbc765c6a1"}}, + {name = "coverage-7.10.7-cp314-cp314t-musllinux_1_2_riscv64.whl",url = "https://files.pythonhosted.org/packages/a2/f1/f48f645e3f33bb9ca8a496bc4a9671b52f2f353146233ebd7c1df6160440/coverage-7.10.7-cp314-cp314t-musllinux_1_2_riscv64.whl",hashes = {sha256 = "a2d9a3b260cc1d1dbdb1c582e63ddcf5363426a1a68faa0f5da28d8ee3c722a0"}}, + {name = "coverage-7.10.7-cp314-cp314t-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/bb/3b/8442618972c51a7affeead957995cfa8323c0c9bcf8fa5a027421f720ff4/coverage-7.10.7-cp314-cp314t-musllinux_1_2_x86_64.whl",hashes = {sha256 = "a3cc8638b2480865eaa3926d192e64ce6c51e3d29c849e09d5b4ad95efae5399"}}, + {name = "coverage-7.10.7-cp314-cp314t-win32.whl",url = "https://files.pythonhosted.org/packages/b2/dc/101f3fa3a45146db0cb03f5b4376e24c0aac818309da23e2de0c75295a91/coverage-7.10.7-cp314-cp314t-win32.whl",hashes = {sha256 = "67f8c5cbcd3deb7a60b3345dffc89a961a484ed0af1f6f73de91705cc6e31235"}}, + {name = "coverage-7.10.7-cp314-cp314t-win_amd64.whl",url = "https://files.pythonhosted.org/packages/4c/a1/74c51803fc70a8a40d7346660379e144be772bab4ac7bb6e6b905152345c/coverage-7.10.7-cp314-cp314t-win_amd64.whl",hashes = {sha256 = "e1ed71194ef6dea7ed2d5cb5f7243d4bcd334bfb63e59878519be558078f848d"}}, + {name = "coverage-7.10.7-cp314-cp314t-win_arm64.whl",url = "https://files.pythonhosted.org/packages/12/65/f116a6d2127df30bcafbceef0302d8a64ba87488bf6f73a6d8eebf060873/coverage-7.10.7-cp314-cp314t-win_arm64.whl",hashes = {sha256 = "7fe650342addd8524ca63d77b2362b02345e5f1a093266787d210c70a50b471a"}}, + {name = "coverage-7.10.7-cp313-cp313-macosx_10_13_x86_64.whl",url = "https://files.pythonhosted.org/packages/9a/94/b765c1abcb613d103b64fcf10395f54d69b0ef8be6a0dd9c524384892cc7/coverage-7.10.7-cp313-cp313-macosx_10_13_x86_64.whl",hashes = {sha256 = "981a651f543f2854abd3b5fcb3263aac581b18209be49863ba575de6edf4c14d"}}, + {name = "coverage-7.10.7-cp313-cp313-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/72/4f/732fff31c119bb73b35236dd333030f32c4bfe909f445b423e6c7594f9a2/coverage-7.10.7-cp313-cp313-macosx_11_0_arm64.whl",hashes = {sha256 = "73ab1601f84dc804f7812dc297e93cd99381162da39c47040a827d4e8dafe63b"}}, + {name = "coverage-7.10.7-cp313-cp313-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl",url = "https://files.pythonhosted.org/packages/87/02/ae7e0af4b674be47566707777db1aa375474f02a1d64b9323e5813a6cdd5/coverage-7.10.7-cp313-cp313-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl",hashes = {sha256 = "a8b6f03672aa6734e700bbcd65ff050fd19cddfec4b031cc8cf1c6967de5a68e"}}, + {name = "coverage-7.10.7-cp313-cp313-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl",url = "https://files.pythonhosted.org/packages/a2/77/8c6d22bf61921a59bce5471c2f1f7ac30cd4ac50aadde72b8c48d5727902/coverage-7.10.7-cp313-cp313-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl",hashes = {sha256 = "10b6ba00ab1132a0ce4428ff68cf50a25efd6840a42cdf4239c9b99aad83be8b"}}, + {name = "coverage-7.10.7-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl",url = "https://files.pythonhosted.org/packages/b1/20/b6ea4f69bbb52dac0aebd62157ba6a9dddbfe664f5af8122dac296c3ee15/coverage-7.10.7-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl",hashes = {sha256 = "c79124f70465a150e89340de5963f936ee97097d2ef76c869708c4248c63ca49"}}, + {name = "coverage-7.10.7-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl",url = "https://files.pythonhosted.org/packages/f9/28/4831523ba483a7f90f7b259d2018fef02cb4d5b90bc7c1505d6e5a84883c/coverage-7.10.7-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl",hashes = {sha256 = "69212fbccdbd5b0e39eac4067e20a4a5256609e209547d86f740d68ad4f04911"}}, + {name = "coverage-7.10.7-cp313-cp313-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/a7/9f/4331142bc98c10ca6436d2d620c3e165f31e6c58d43479985afce6f3191c/coverage-7.10.7-cp313-cp313-musllinux_1_2_aarch64.whl",hashes = {sha256 = "7ea7c6c9d0d286d04ed3541747e6597cbe4971f22648b68248f7ddcd329207f0"}}, + {name = "coverage-7.10.7-cp313-cp313-musllinux_1_2_i686.whl",url = "https://files.pythonhosted.org/packages/ce/60/bda83b96602036b77ecf34e6393a3836365481b69f7ed7079ab85048202b/coverage-7.10.7-cp313-cp313-musllinux_1_2_i686.whl",hashes = {sha256 = "b9be91986841a75042b3e3243d0b3cb0b2434252b977baaf0cd56e960fe1e46f"}}, + {name = "coverage-7.10.7-cp313-cp313-musllinux_1_2_riscv64.whl",url = "https://files.pythonhosted.org/packages/5f/af/152633ff35b2af63977edd835d8e6430f0caef27d171edf2fc76c270ef31/coverage-7.10.7-cp313-cp313-musllinux_1_2_riscv64.whl",hashes = {sha256 = "b281d5eca50189325cfe1f365fafade89b14b4a78d9b40b05ddd1fc7d2a10a9c"}}, + {name = "coverage-7.10.7-cp313-cp313-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/9d/71/d92105d122bd21cebba877228990e1646d862e34a98bb3374d3fece5a794/coverage-7.10.7-cp313-cp313-musllinux_1_2_x86_64.whl",hashes = {sha256 = "99e4aa63097ab1118e75a848a28e40d68b08a5e19ce587891ab7fd04475e780f"}}, + {name = "coverage-7.10.7-cp313-cp313-win32.whl",url = "https://files.pythonhosted.org/packages/a2/9e/9fdb08f4bf476c912f0c3ca292e019aab6712c93c9344a1653986c3fd305/coverage-7.10.7-cp313-cp313-win32.whl",hashes = {sha256 = "dc7c389dce432500273eaf48f410b37886be9208b2dd5710aaf7c57fd442c698"}}, + {name = "coverage-7.10.7-cp313-cp313-win_amd64.whl",url = "https://files.pythonhosted.org/packages/b1/b1/a75fd25df44eab52d1931e89980d1ada46824c7a3210be0d3c88a44aaa99/coverage-7.10.7-cp313-cp313-win_amd64.whl",hashes = {sha256 = "cac0fdca17b036af3881a9d2729a850b76553f3f716ccb0360ad4dbc06b3b843"}}, + {name = "coverage-7.10.7-cp313-cp313-win_arm64.whl",url = "https://files.pythonhosted.org/packages/14/3a/d720d7c989562a6e9a14b2c9f5f2876bdb38e9367126d118495b89c99c37/coverage-7.10.7-cp313-cp313-win_arm64.whl",hashes = {sha256 = "4b6f236edf6e2f9ae8fcd1332da4e791c1b6ba0dc16a2dc94590ceccb482e546"}}, + {name = "coverage-7.10.7-cp313-cp313t-macosx_10_13_x86_64.whl",url = "https://files.pythonhosted.org/packages/bb/22/e04514bf2a735d8b0add31d2b4ab636fc02370730787c576bb995390d2d5/coverage-7.10.7-cp313-cp313t-macosx_10_13_x86_64.whl",hashes = {sha256 = "a0ec07fd264d0745ee396b666d47cef20875f4ff2375d7c4f58235886cc1ef0c"}}, + {name = "coverage-7.10.7-cp313-cp313t-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/11/0b/91128e099035ece15da3445d9015e4b4153a6059403452d324cbb0a575fa/coverage-7.10.7-cp313-cp313t-macosx_11_0_arm64.whl",hashes = {sha256 = "dd5e856ebb7bfb7672b0086846db5afb4567a7b9714b8a0ebafd211ec7ce6a15"}}, + {name = "coverage-7.10.7-cp313-cp313t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl",url = "https://files.pythonhosted.org/packages/8b/51/66420081e72801536a091a0c8f8c1f88a5c4bf7b9b1bdc6222c7afe6dc9b/coverage-7.10.7-cp313-cp313t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl",hashes = {sha256 = "f57b2a3c8353d3e04acf75b3fed57ba41f5c0646bbf1d10c7c282291c97936b4"}}, + {name = "coverage-7.10.7-cp313-cp313t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl",url = "https://files.pythonhosted.org/packages/5d/22/9b8d458c2881b22df3db5bb3e7369e63d527d986decb6c11a591ba2364f7/coverage-7.10.7-cp313-cp313t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl",hashes = {sha256 = "1ef2319dd15a0b009667301a3f84452a4dc6fddfd06b0c5c53ea472d3989fbf0"}}, + {name = "coverage-7.10.7-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl",url = "https://files.pythonhosted.org/packages/f7/08/16bee2c433e60913c610ea200b276e8eeef084b0d200bdcff69920bd5828/coverage-7.10.7-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl",hashes = {sha256 = "83082a57783239717ceb0ad584de3c69cf581b2a95ed6bf81ea66034f00401c0"}}, + {name = "coverage-7.10.7-cp313-cp313t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl",url = "https://files.pythonhosted.org/packages/20/9d/e53eb9771d154859b084b90201e5221bca7674ba449a17c101a5031d4054/coverage-7.10.7-cp313-cp313t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl",hashes = {sha256 = "50aa94fb1fb9a397eaa19c0d5ec15a5edd03a47bf1a3a6111a16b36e190cff65"}}, + {name = "coverage-7.10.7-cp313-cp313t-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/ad/b0/69bc7050f8d4e56a89fb550a1577d5d0d1db2278106f6f626464067b3817/coverage-7.10.7-cp313-cp313t-musllinux_1_2_aarch64.whl",hashes = {sha256 = "2120043f147bebb41c85b97ac45dd173595ff14f2a584f2963891cbcc3091541"}}, + {name = "coverage-7.10.7-cp313-cp313t-musllinux_1_2_i686.whl",url = "https://files.pythonhosted.org/packages/ef/4b/2514b060dbd1bc0aaf23b852c14bb5818f244c664cb16517feff6bb3a5ab/coverage-7.10.7-cp313-cp313t-musllinux_1_2_i686.whl",hashes = {sha256 = "2fafd773231dd0378fdba66d339f84904a8e57a262f583530f4f156ab83863e6"}}, + {name = "coverage-7.10.7-cp313-cp313t-musllinux_1_2_riscv64.whl",url = "https://files.pythonhosted.org/packages/54/78/7ba2175007c246d75e496f64c06e94122bdb914790a1285d627a918bd271/coverage-7.10.7-cp313-cp313t-musllinux_1_2_riscv64.whl",hashes = {sha256 = "0b944ee8459f515f28b851728ad224fa2d068f1513ef6b7ff1efafeb2185f999"}}, + {name = "coverage-7.10.7-cp313-cp313t-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/c0/b3/fac9f7abbc841409b9a410309d73bfa6cfb2e51c3fada738cb607ce174f8/coverage-7.10.7-cp313-cp313t-musllinux_1_2_x86_64.whl",hashes = {sha256 = "4b583b97ab2e3efe1b3e75248a9b333bd3f8b0b1b8e5b45578e05e5850dfb2c2"}}, + {name = "coverage-7.10.7-cp313-cp313t-win32.whl",url = "https://files.pythonhosted.org/packages/ee/51/a03bec00d37faaa891b3ff7387192cef20f01604e5283a5fabc95346befa/coverage-7.10.7-cp313-cp313t-win32.whl",hashes = {sha256 = "2a78cd46550081a7909b3329e2266204d584866e8d97b898cd7fb5ac8d888b1a"}}, + {name = "coverage-7.10.7-cp313-cp313t-win_amd64.whl",url = "https://files.pythonhosted.org/packages/53/22/3cf25d614e64bf6d8e59c7c669b20d6d940bb337bdee5900b9ca41c820bb/coverage-7.10.7-cp313-cp313t-win_amd64.whl",hashes = {sha256 = "33a5e6396ab684cb43dc7befa386258acb2d7fae7f67330ebb85ba4ea27938eb"}}, + {name = "coverage-7.10.7-cp313-cp313t-win_arm64.whl",url = "https://files.pythonhosted.org/packages/49/a1/00164f6d30d8a01c3c9c48418a7a5be394de5349b421b9ee019f380df2a0/coverage-7.10.7-cp313-cp313t-win_arm64.whl",hashes = {sha256 = "86b0e7308289ddde73d863b7683f596d8d21c7d8664ce1dee061d0bcf3fbb4bb"}}, + {name = "coverage-7.10.7-cp312-cp312-macosx_10_13_x86_64.whl",url = "https://files.pythonhosted.org/packages/13/e4/eb12450f71b542a53972d19117ea5a5cea1cab3ac9e31b0b5d498df1bd5a/coverage-7.10.7-cp312-cp312-macosx_10_13_x86_64.whl",hashes = {sha256 = "7bb3b9ddb87ef7725056572368040c32775036472d5a033679d1fa6c8dc08417"}}, + {name = "coverage-7.10.7-cp312-cp312-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/37/66/593f9be12fc19fb36711f19a5371af79a718537204d16ea1d36f16bd78d2/coverage-7.10.7-cp312-cp312-macosx_11_0_arm64.whl",hashes = {sha256 = "18afb24843cbc175687225cab1138c95d262337f5473512010e46831aa0c2973"}}, + {name = "coverage-7.10.7-cp312-cp312-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl",url = "https://files.pythonhosted.org/packages/66/80/4c49f7ae09cafdacc73fbc30949ffe77359635c168f4e9ff33c9ebb07838/coverage-7.10.7-cp312-cp312-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl",hashes = {sha256 = "399a0b6347bcd3822be369392932884b8216d0944049ae22925631a9b3d4ba4c"}}, + {name = "coverage-7.10.7-cp312-cp312-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl",url = "https://files.pythonhosted.org/packages/a6/90/a64aaacab3b37a17aaedd83e8000142561a29eb262cede42d94a67f7556b/coverage-7.10.7-cp312-cp312-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl",hashes = {sha256 = "314f2c326ded3f4b09be11bc282eb2fc861184bc95748ae67b360ac962770be7"}}, + {name = "coverage-7.10.7-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl",url = "https://files.pythonhosted.org/packages/98/2e/2dda59afd6103b342e096f246ebc5f87a3363b5412609946c120f4e7750d/coverage-7.10.7-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl",hashes = {sha256 = "c41e71c9cfb854789dee6fc51e46743a6d138b1803fab6cb860af43265b42ea6"}}, + {name = "coverage-7.10.7-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl",url = "https://files.pythonhosted.org/packages/53/dc/8d8119c9051d50f3119bb4a75f29f1e4a6ab9415cd1fa8bf22fcc3fb3b5f/coverage-7.10.7-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl",hashes = {sha256 = "bc01f57ca26269c2c706e838f6422e2a8788e41b3e3c65e2f41148212e57cd59"}}, + {name = "coverage-7.10.7-cp312-cp312-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/98/b3/edaff9c5d79ee4d4b6d3fe046f2b1d799850425695b789d491a64225d493/coverage-7.10.7-cp312-cp312-musllinux_1_2_aarch64.whl",hashes = {sha256 = "a6442c59a8ac8b85812ce33bc4d05bde3fb22321fa8294e2a5b487c3505f611b"}}, + {name = "coverage-7.10.7-cp312-cp312-musllinux_1_2_i686.whl",url = "https://files.pythonhosted.org/packages/11/25/9a0728564bb05863f7e513e5a594fe5ffef091b325437f5430e8cfb0d530/coverage-7.10.7-cp312-cp312-musllinux_1_2_i686.whl",hashes = {sha256 = "78a384e49f46b80fb4c901d52d92abe098e78768ed829c673fbb53c498bef73a"}}, + {name = "coverage-7.10.7-cp312-cp312-musllinux_1_2_riscv64.whl",url = "https://files.pythonhosted.org/packages/e0/fd/ca2650443bfbef5b0e74373aac4df67b08180d2f184b482c41499668e258/coverage-7.10.7-cp312-cp312-musllinux_1_2_riscv64.whl",hashes = {sha256 = "5e1e9802121405ede4b0133aa4340ad8186a1d2526de5b7c3eca519db7bb89fb"}}, + {name = "coverage-7.10.7-cp312-cp312-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/24/79/f692f125fb4299b6f963b0745124998ebb8e73ecdfce4ceceb06a8c6bec5/coverage-7.10.7-cp312-cp312-musllinux_1_2_x86_64.whl",hashes = {sha256 = "d41213ea25a86f69efd1575073d34ea11aabe075604ddf3d148ecfec9e1e96a1"}}, + {name = "coverage-7.10.7-cp312-cp312-win32.whl",url = "https://files.pythonhosted.org/packages/5e/75/61b9bbd6c7d24d896bfeec57acba78e0f8deac68e6baf2d4804f7aae1f88/coverage-7.10.7-cp312-cp312-win32.whl",hashes = {sha256 = "77eb4c747061a6af8d0f7bdb31f1e108d172762ef579166ec84542f711d90256"}}, + {name = "coverage-7.10.7-cp312-cp312-win_amd64.whl",url = "https://files.pythonhosted.org/packages/ca/f3/3bf7905288b45b075918d372498f1cf845b5b579b723c8fd17168018d5f5/coverage-7.10.7-cp312-cp312-win_amd64.whl",hashes = {sha256 = "f51328ffe987aecf6d09f3cd9d979face89a617eacdaea43e7b3080777f647ba"}}, + {name = "coverage-7.10.7-cp312-cp312-win_arm64.whl",url = "https://files.pythonhosted.org/packages/5c/44/3e32dbe933979d05cf2dac5e697c8599cfe038aaf51223ab901e208d5a62/coverage-7.10.7-cp312-cp312-win_arm64.whl",hashes = {sha256 = "bda5e34f8a75721c96085903c6f2197dc398c20ffd98df33f866a9c8fd95f4bf"}}, + {name = "coverage-7.10.7-cp311-cp311-macosx_10_9_x86_64.whl",url = "https://files.pythonhosted.org/packages/d2/5d/c1a17867b0456f2e9ce2d8d4708a4c3a089947d0bec9c66cdf60c9e7739f/coverage-7.10.7-cp311-cp311-macosx_10_9_x86_64.whl",hashes = {sha256 = "a609f9c93113be646f44c2a0256d6ea375ad047005d7f57a5c15f614dc1b2f59"}}, + {name = "coverage-7.10.7-cp311-cp311-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/54/f0/514dcf4b4e3698b9a9077f084429681bf3aad2b4a72578f89d7f643eb506/coverage-7.10.7-cp311-cp311-macosx_11_0_arm64.whl",hashes = {sha256 = "65646bb0359386e07639c367a22cf9b5bf6304e8630b565d0626e2bdf329227a"}}, + {name = "coverage-7.10.7-cp311-cp311-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl",url = "https://files.pythonhosted.org/packages/20/f6/9626b81d17e2a4b25c63ac1b425ff307ecdeef03d67c9a147673ae40dc36/coverage-7.10.7-cp311-cp311-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl",hashes = {sha256 = "5f33166f0dfcce728191f520bd2692914ec70fac2713f6bf3ce59c3deacb4699"}}, + {name = "coverage-7.10.7-cp311-cp311-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl",url = "https://files.pythonhosted.org/packages/b0/ef/bd8e719c2f7417ba03239052e099b76ea1130ac0cbb183ee1fcaa58aaff3/coverage-7.10.7-cp311-cp311-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl",hashes = {sha256 = "35f5e3f9e455bb17831876048355dca0f758b6df22f49258cb5a91da23ef437d"}}, + {name = "coverage-7.10.7-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl",url = "https://files.pythonhosted.org/packages/a5/b6/bf054de41ec948b151ae2b79a55c107f5760979538f5fb80c195f2517718/coverage-7.10.7-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl",hashes = {sha256 = "4da86b6d62a496e908ac2898243920c7992499c1712ff7c2b6d837cc69d9467e"}}, + {name = "coverage-7.10.7-cp311-cp311-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl",url = "https://files.pythonhosted.org/packages/0f/e5/3860756aa6f9318227443c6ce4ed7bf9e70bb7f1447a0353f45ac5c7974b/coverage-7.10.7-cp311-cp311-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl",hashes = {sha256 = "6b8b09c1fad947c84bbbc95eca841350fad9cbfa5a2d7ca88ac9f8d836c92e23"}}, + {name = "coverage-7.10.7-cp311-cp311-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/26/0f/bd08bd042854f7fd07b45808927ebcce99a7ed0f2f412d11629883517ac2/coverage-7.10.7-cp311-cp311-musllinux_1_2_aarch64.whl",hashes = {sha256 = "4376538f36b533b46f8971d3a3e63464f2c7905c9800db97361c43a2b14792ab"}}, + {name = "coverage-7.10.7-cp311-cp311-musllinux_1_2_i686.whl",url = "https://files.pythonhosted.org/packages/8e/a7/4777b14de4abcc2e80c6b1d430f5d51eb18ed1d75fca56cbce5f2db9b36e/coverage-7.10.7-cp311-cp311-musllinux_1_2_i686.whl",hashes = {sha256 = "121da30abb574f6ce6ae09840dae322bef734480ceafe410117627aa54f76d82"}}, + {name = "coverage-7.10.7-cp311-cp311-musllinux_1_2_riscv64.whl",url = "https://files.pythonhosted.org/packages/34/72/17d082b00b53cd45679bad682fac058b87f011fd8b9fe31d77f5f8d3a4e4/coverage-7.10.7-cp311-cp311-musllinux_1_2_riscv64.whl",hashes = {sha256 = "88127d40df529336a9836870436fc2751c339fbaed3a836d42c93f3e4bd1d0a2"}}, + {name = "coverage-7.10.7-cp311-cp311-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/81/7a/92367572eb5bdd6a84bfa278cc7e97db192f9f45b28c94a9ca1a921c3577/coverage-7.10.7-cp311-cp311-musllinux_1_2_x86_64.whl",hashes = {sha256 = "ba58bbcd1b72f136080c0bccc2400d66cc6115f3f906c499013d065ac33a4b61"}}, + {name = "coverage-7.10.7-cp311-cp311-win32.whl",url = "https://files.pythonhosted.org/packages/2f/88/a23cc185f6a805dfc4fdf14a94016835eeb85e22ac3a0e66d5e89acd6462/coverage-7.10.7-cp311-cp311-win32.whl",hashes = {sha256 = "972b9e3a4094b053a4e46832b4bc829fc8a8d347160eb39d03f1690316a99c14"}}, + {name = "coverage-7.10.7-cp311-cp311-win_amd64.whl",url = "https://files.pythonhosted.org/packages/fe/ef/0b510a399dfca17cec7bc2f05ad8bd78cf55f15c8bc9a73ab20c5c913c2e/coverage-7.10.7-cp311-cp311-win_amd64.whl",hashes = {sha256 = "a7b55a944a7f43892e28ad4bc0561dfd5f0d73e605d1aa5c3c976b52aea121d2"}}, + {name = "coverage-7.10.7-cp311-cp311-win_arm64.whl",url = "https://files.pythonhosted.org/packages/51/7f/023657f301a276e4ba1850f82749bc136f5a7e8768060c2e5d9744a22951/coverage-7.10.7-cp311-cp311-win_arm64.whl",hashes = {sha256 = "736f227fb490f03c6488f9b6d45855f8e0fd749c007f9303ad30efab0e73c05a"}}, + {name = "coverage-7.10.7-cp310-cp310-macosx_10_9_x86_64.whl",url = "https://files.pythonhosted.org/packages/e5/6c/3a3f7a46888e69d18abe3ccc6fe4cb16cccb1e6a2f99698931dafca489e6/coverage-7.10.7-cp310-cp310-macosx_10_9_x86_64.whl",hashes = {sha256 = "fc04cc7a3db33664e0c2d10eb8990ff6b3536f6842c9590ae8da4c614b9ed05a"}}, + {name = "coverage-7.10.7-cp310-cp310-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/03/94/952d30f180b1a916c11a56f5c22d3535e943aa22430e9e3322447e520e1c/coverage-7.10.7-cp310-cp310-macosx_11_0_arm64.whl",hashes = {sha256 = "e201e015644e207139f7e2351980feb7040e6f4b2c2978892f3e3789d1c125e5"}}, + {name = "coverage-7.10.7-cp310-cp310-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl",url = "https://files.pythonhosted.org/packages/50/2b/9e0cf8ded1e114bcd8b2fd42792b57f1c4e9e4ea1824cde2af93a67305be/coverage-7.10.7-cp310-cp310-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl",hashes = {sha256 = "240af60539987ced2c399809bd34f7c78e8abe0736af91c3d7d0e795df633d17"}}, + {name = "coverage-7.10.7-cp310-cp310-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl",url = "https://files.pythonhosted.org/packages/19/20/d0384ac06a6f908783d9b6aa6135e41b093971499ec488e47279f5b846e6/coverage-7.10.7-cp310-cp310-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl",hashes = {sha256 = "8421e088bc051361b01c4b3a50fd39a4b9133079a2229978d9d30511fd05231b"}}, + {name = "coverage-7.10.7-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl",url = "https://files.pythonhosted.org/packages/60/83/5c283cff3d41285f8eab897651585db908a909c572bdc014bcfaf8a8b6ae/coverage-7.10.7-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl",hashes = {sha256 = "6be8ed3039ae7f7ac5ce058c308484787c86e8437e72b30bf5e88b8ea10f3c87"}}, + {name = "coverage-7.10.7-cp310-cp310-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl",url = "https://files.pythonhosted.org/packages/60/22/02eb98fdc5ff79f423e990d877693e5310ae1eab6cb20ae0b0b9ac45b23b/coverage-7.10.7-cp310-cp310-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl",hashes = {sha256 = "e28299d9f2e889e6d51b1f043f58d5f997c373cc12e6403b90df95b8b047c13e"}}, + {name = "coverage-7.10.7-cp310-cp310-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/b4/bc/25c83bcf3ad141b32cd7dc45485ef3c01a776ca3aa8ef0a93e77e8b5bc43/coverage-7.10.7-cp310-cp310-musllinux_1_2_aarch64.whl",hashes = {sha256 = "c4e16bd7761c5e454f4efd36f345286d6f7c5fa111623c355691e2755cae3b9e"}}, + {name = "coverage-7.10.7-cp310-cp310-musllinux_1_2_i686.whl",url = "https://files.pythonhosted.org/packages/3c/b7/95574702888b58c0928a6e982038c596f9c34d52c5e5107f1eef729399b5/coverage-7.10.7-cp310-cp310-musllinux_1_2_i686.whl",hashes = {sha256 = "b1c81d0e5e160651879755c9c675b974276f135558cf4ba79fee7b8413a515df"}}, + {name = "coverage-7.10.7-cp310-cp310-musllinux_1_2_riscv64.whl",url = "https://files.pythonhosted.org/packages/47/b6/40095c185f235e085df0e0b158f6bd68cc6e1d80ba6c7721dc81d97ec318/coverage-7.10.7-cp310-cp310-musllinux_1_2_riscv64.whl",hashes = {sha256 = "606cc265adc9aaedcc84f1f064f0e8736bc45814f15a357e30fca7ecc01504e0"}}, + {name = "coverage-7.10.7-cp310-cp310-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/c8/50/4aea0556da7a4b93ec9168420d170b55e2eb50ae21b25062513d020c6861/coverage-7.10.7-cp310-cp310-musllinux_1_2_x86_64.whl",hashes = {sha256 = "10b24412692df990dbc34f8fb1b6b13d236ace9dfdd68df5b28c2e39cafbba13"}}, + {name = "coverage-7.10.7-cp310-cp310-win32.whl",url = "https://files.pythonhosted.org/packages/6a/28/ea1a84a60828177ae3b100cb6723838523369a44ec5742313ed7db3da160/coverage-7.10.7-cp310-cp310-win32.whl",hashes = {sha256 = "b51dcd060f18c19290d9b8a9dd1e0181538df2ce0717f562fff6cf74d9fc0b5b"}}, + {name = "coverage-7.10.7-cp310-cp310-win_amd64.whl",url = "https://files.pythonhosted.org/packages/fc/1a/a81d46bbeb3c3fd97b9602ebaa411e076219a150489bcc2c025f151bd52d/coverage-7.10.7-cp310-cp310-win_amd64.whl",hashes = {sha256 = "3a622ac801b17198020f09af3eaf45666b344a0d69fc2a6ffe2ea83aeef1d807"}}, + {name = "coverage-7.10.7-py3-none-any.whl",url = "https://files.pythonhosted.org/packages/ec/16/114df1c291c22cac3b0c127a73e0af5c12ed7bbb6558d310429a0ae24023/coverage-7.10.7-py3-none-any.whl",hashes = {sha256 = "f7941f6f2fe6dd6807a1208737b8a0cbcf1cc6d7b07d24998ad2d63590868260"}}, ] marker = "\"dev\" in extras" @@ -2526,7 +2582,7 @@ sdist = {name = "exceptiongroup-1.3.0.tar.gz", url = "https://files.pythonhosted wheels = [ {name = "exceptiongroup-1.3.0-py3-none-any.whl",url = "https://files.pythonhosted.org/packages/36/f4/c6e662dade71f56cd2f3735141b265c3c79293c109549c1e6933b0651ffc/exceptiongroup-1.3.0-py3-none-any.whl",hashes = {sha256 = "4d111e6e0c13d0644cad6ddaa7ed0261a0b36971f6d23e7ec9b4b9097da78a10"}}, ] -marker = "python_version < \"3.11\" and python_version >= \"3.9\" and \"default\" in dependency_groups or python_version < \"3.11\" and python_version >= \"3.9\" and \"dev\" in extras" +marker = "python_version < \"3.11\" and python_version ~= \"3.10\" and \"default\" in dependency_groups or python_version < \"3.11\" and python_version ~= \"3.10\" and \"dev\" in extras or python_full_version ~= \"3.9.0\" and \"default\" in dependency_groups or python_full_version ~= \"3.9.0\" and \"dev\" in extras" [packages.tool.pdm] dependencies = [ @@ -2546,6 +2602,59 @@ marker = "\"default\" in dependency_groups or \"dev\" in extras" [packages.tool.pdm] dependencies = [] +[[packages]] +name = "html5tagger" +version = "1.3.0" +requires-python = ">=3.7" +sdist = {name = "html5tagger-1.3.0.tar.gz", url = "https://files.pythonhosted.org/packages/9e/02/2ae5f46d517a2c1d4a17f2b1e4834c2c7cc0fb3a69c92389172fa16ab389/html5tagger-1.3.0.tar.gz", hashes = {sha256 = "84fa3dfb49e5c83b79bbd856ab7b1de8e2311c3bb46a8be925f119e3880a8da9"}} +wheels = [ + {name = "html5tagger-1.3.0-py3-none-any.whl",url = "https://files.pythonhosted.org/packages/9b/12/2f5d43ee912ea14a6baba4b3db6d309b02d932e3b7074c3339b4aded98ff/html5tagger-1.3.0-py3-none-any.whl",hashes = {sha256 = "ce14313515edffec8ed8a36c5890d023922641171b4e6e5774ad1a74998f5351"}}, +] +marker = "\"default\" in dependency_groups" + +[packages.tool.pdm] +dependencies = [] + +[[packages]] +name = "httptools" +version = "0.6.4" +requires-python = ">=3.8.0" +sdist = {name = "httptools-0.6.4.tar.gz", url = "https://files.pythonhosted.org/packages/a7/9a/ce5e1f7e131522e6d3426e8e7a490b3a01f39a6696602e1c4f33f9e94277/httptools-0.6.4.tar.gz", hashes = {sha256 = "4e93eee4add6493b59a5c514da98c939b244fce4a0d8879cd3f466562f4b7d5c"}} +wheels = [ + {name = "httptools-0.6.4-cp313-cp313-macosx_10_13_universal2.whl",url = "https://files.pythonhosted.org/packages/94/a3/9fe9ad23fd35f7de6b91eeb60848986058bd8b5a5c1e256f5860a160cc3e/httptools-0.6.4-cp313-cp313-macosx_10_13_universal2.whl",hashes = {sha256 = "ade273d7e767d5fae13fa637f4d53b6e961fb7fd93c7797562663f0171c26660"}}, + {name = "httptools-0.6.4-cp313-cp313-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/ea/d9/82d5e68bab783b632023f2fa31db20bebb4e89dfc4d2293945fd68484ee4/httptools-0.6.4-cp313-cp313-macosx_11_0_arm64.whl",hashes = {sha256 = "856f4bc0478ae143bad54a4242fccb1f3f86a6e1be5548fecfd4102061b3a083"}}, + {name = "httptools-0.6.4-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",url = "https://files.pythonhosted.org/packages/96/c1/cb499655cbdbfb57b577734fde02f6fa0bbc3fe9fb4d87b742b512908dff/httptools-0.6.4-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",hashes = {sha256 = "322d20ea9cdd1fa98bd6a74b77e2ec5b818abdc3d36695ab402a0de8ef2865a3"}}, + {name = "httptools-0.6.4-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl",url = "https://files.pythonhosted.org/packages/af/71/ee32fd358f8a3bb199b03261f10921716990808a675d8160b5383487a317/httptools-0.6.4-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl",hashes = {sha256 = "4d87b29bd4486c0093fc64dea80231f7c7f7eb4dc70ae394d70a495ab8436071"}}, + {name = "httptools-0.6.4-cp313-cp313-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/8a/0a/0d4df132bfca1507114198b766f1737d57580c9ad1cf93c1ff673e3387be/httptools-0.6.4-cp313-cp313-musllinux_1_2_aarch64.whl",hashes = {sha256 = "342dd6946aa6bda4b8f18c734576106b8a31f2fe31492881a9a160ec84ff4bd5"}}, + {name = "httptools-0.6.4-cp313-cp313-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/1e/6a/787004fdef2cabea27bad1073bf6a33f2437b4dbd3b6fb4a9d71172b1c7c/httptools-0.6.4-cp313-cp313-musllinux_1_2_x86_64.whl",hashes = {sha256 = "4b36913ba52008249223042dca46e69967985fb4051951f94357ea681e1f5dc0"}}, + {name = "httptools-0.6.4-cp313-cp313-win_amd64.whl",url = "https://files.pythonhosted.org/packages/4d/dc/7decab5c404d1d2cdc1bb330b1bf70e83d6af0396fd4fc76fc60c0d522bf/httptools-0.6.4-cp313-cp313-win_amd64.whl",hashes = {sha256 = "28908df1b9bb8187393d5b5db91435ccc9c8e891657f9cbb42a2541b44c82fc8"}}, + {name = "httptools-0.6.4-cp312-cp312-macosx_10_13_universal2.whl",url = "https://files.pythonhosted.org/packages/bb/0e/d0b71465c66b9185f90a091ab36389a7352985fe857e352801c39d6127c8/httptools-0.6.4-cp312-cp312-macosx_10_13_universal2.whl",hashes = {sha256 = "df017d6c780287d5c80601dafa31f17bddb170232d85c066604d8558683711a2"}}, + {name = "httptools-0.6.4-cp312-cp312-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/e2/b8/412a9bb28d0a8988de3296e01efa0bd62068b33856cdda47fe1b5e890954/httptools-0.6.4-cp312-cp312-macosx_11_0_arm64.whl",hashes = {sha256 = "85071a1e8c2d051b507161f6c3e26155b5c790e4e28d7f236422dbacc2a9cc44"}}, + {name = "httptools-0.6.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",url = "https://files.pythonhosted.org/packages/9b/01/6fb20be3196ffdc8eeec4e653bc2a275eca7f36634c86302242c4fbb2760/httptools-0.6.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",hashes = {sha256 = "69422b7f458c5af875922cdb5bd586cc1f1033295aa9ff63ee196a87519ac8e1"}}, + {name = "httptools-0.6.4-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl",url = "https://files.pythonhosted.org/packages/f7/d8/b644c44acc1368938317d76ac991c9bba1166311880bcc0ac297cb9d6bd7/httptools-0.6.4-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl",hashes = {sha256 = "16e603a3bff50db08cd578d54f07032ca1631450ceb972c2f834c2b860c28ea2"}}, + {name = "httptools-0.6.4-cp312-cp312-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/52/d8/254d16a31d543073a0e57f1c329ca7378d8924e7e292eda72d0064987486/httptools-0.6.4-cp312-cp312-musllinux_1_2_aarch64.whl",hashes = {sha256 = "ec4f178901fa1834d4a060320d2f3abc5c9e39766953d038f1458cb885f47e81"}}, + {name = "httptools-0.6.4-cp312-cp312-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/5f/3c/4aee161b4b7a971660b8be71a92c24d6c64372c1ab3ae7f366b3680df20f/httptools-0.6.4-cp312-cp312-musllinux_1_2_x86_64.whl",hashes = {sha256 = "f9eb89ecf8b290f2e293325c646a211ff1c2493222798bb80a530c5e7502494f"}}, + {name = "httptools-0.6.4-cp312-cp312-win_amd64.whl",url = "https://files.pythonhosted.org/packages/12/b7/5cae71a8868e555f3f67a50ee7f673ce36eac970f029c0c5e9d584352961/httptools-0.6.4-cp312-cp312-win_amd64.whl",hashes = {sha256 = "db78cb9ca56b59b016e64b6031eda5653be0589dba2b1b43453f6e8b405a0970"}}, + {name = "httptools-0.6.4-cp311-cp311-macosx_10_9_universal2.whl",url = "https://files.pythonhosted.org/packages/7b/26/bb526d4d14c2774fe07113ca1db7255737ffbb119315839af2065abfdac3/httptools-0.6.4-cp311-cp311-macosx_10_9_universal2.whl",hashes = {sha256 = "f47f8ed67cc0ff862b84a1189831d1d33c963fb3ce1ee0c65d3b0cbe7b711069"}}, + {name = "httptools-0.6.4-cp311-cp311-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/a6/17/3e0d3e9b901c732987a45f4f94d4e2c62b89a041d93db89eafb262afd8d5/httptools-0.6.4-cp311-cp311-macosx_11_0_arm64.whl",hashes = {sha256 = "0614154d5454c21b6410fdf5262b4a3ddb0f53f1e1721cfd59d55f32138c578a"}}, + {name = "httptools-0.6.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",url = "https://files.pythonhosted.org/packages/b7/24/0fe235d7b69c42423c7698d086d4db96475f9b50b6ad26a718ef27a0bce6/httptools-0.6.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",hashes = {sha256 = "f8787367fbdfccae38e35abf7641dafc5310310a5987b689f4c32cc8cc3ee975"}}, + {name = "httptools-0.6.4-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl",url = "https://files.pythonhosted.org/packages/b1/2f/205d1f2a190b72da6ffb5f41a3736c26d6fa7871101212b15e9b5cd8f61d/httptools-0.6.4-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl",hashes = {sha256 = "40b0f7fe4fd38e6a507bdb751db0379df1e99120c65fbdc8ee6c1d044897a636"}}, + {name = "httptools-0.6.4-cp311-cp311-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/6e/4c/d09ce0eff09057a206a74575ae8f1e1e2f0364d20e2442224f9e6612c8b9/httptools-0.6.4-cp311-cp311-musllinux_1_2_aarch64.whl",hashes = {sha256 = "40a5ec98d3f49904b9fe36827dcf1aadfef3b89e2bd05b0e35e94f97c2b14721"}}, + {name = "httptools-0.6.4-cp311-cp311-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/3e/d2/84c9e23edbccc4a4c6f96a1b8d99dfd2350289e94f00e9ccc7aadde26fb5/httptools-0.6.4-cp311-cp311-musllinux_1_2_x86_64.whl",hashes = {sha256 = "dacdd3d10ea1b4ca9df97a0a303cbacafc04b5cd375fa98732678151643d4988"}}, + {name = "httptools-0.6.4-cp311-cp311-win_amd64.whl",url = "https://files.pythonhosted.org/packages/d0/46/4d8e7ba9581416de1c425b8264e2cadd201eb709ec1584c381f3e98f51c1/httptools-0.6.4-cp311-cp311-win_amd64.whl",hashes = {sha256 = "288cd628406cc53f9a541cfaf06041b4c71d751856bab45e3702191f931ccd17"}}, + {name = "httptools-0.6.4-cp310-cp310-macosx_10_9_universal2.whl",url = "https://files.pythonhosted.org/packages/3b/6f/972f8eb0ea7d98a1c6be436e2142d51ad2a64ee18e02b0e7ff1f62171ab1/httptools-0.6.4-cp310-cp310-macosx_10_9_universal2.whl",hashes = {sha256 = "3c73ce323711a6ffb0d247dcd5a550b8babf0f757e86a52558fe5b86d6fefcc0"}}, + {name = "httptools-0.6.4-cp310-cp310-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/6a/b0/17c672b4bc5c7ba7f201eada4e96c71d0a59fbc185e60e42580093a86f21/httptools-0.6.4-cp310-cp310-macosx_11_0_arm64.whl",hashes = {sha256 = "345c288418f0944a6fe67be8e6afa9262b18c7626c3ef3c28adc5eabc06a68da"}}, + {name = "httptools-0.6.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",url = "https://files.pythonhosted.org/packages/92/5e/b4a826fe91971a0b68e8c2bd4e7db3e7519882f5a8ccdb1194be2b3ab98f/httptools-0.6.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",hashes = {sha256 = "deee0e3343f98ee8047e9f4c5bc7cedbf69f5734454a94c38ee829fb2d5fa3c1"}}, + {name = "httptools-0.6.4-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl",url = "https://files.pythonhosted.org/packages/b0/51/ce61e531e40289a681a463e1258fa1e05e0be54540e40d91d065a264cd8f/httptools-0.6.4-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl",hashes = {sha256 = "ca80b7485c76f768a3bc83ea58373f8db7b015551117375e4918e2aa77ea9b50"}}, + {name = "httptools-0.6.4-cp310-cp310-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/ea/9e/270b7d767849b0c96f275c695d27ca76c30671f8eb8cc1bab6ced5c5e1d0/httptools-0.6.4-cp310-cp310-musllinux_1_2_aarch64.whl",hashes = {sha256 = "90d96a385fa941283ebd231464045187a31ad932ebfa541be8edf5b3c2328959"}}, + {name = "httptools-0.6.4-cp310-cp310-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/81/86/ced96e3179c48c6f656354e106934e65c8963d48b69be78f355797f0e1b3/httptools-0.6.4-cp310-cp310-musllinux_1_2_x86_64.whl",hashes = {sha256 = "59e724f8b332319e2875efd360e61ac07f33b492889284a3e05e6d13746876f4"}}, + {name = "httptools-0.6.4-cp310-cp310-win_amd64.whl",url = "https://files.pythonhosted.org/packages/75/73/187a3f620ed3175364ddb56847d7a608a6fc42d551e133197098c0143eca/httptools-0.6.4-cp310-cp310-win_amd64.whl",hashes = {sha256 = "c26f313951f6e26147833fc923f78f95604bbec812a43e5ee37f26dc9e5a686c"}}, +] +marker = "\"default\" in dependency_groups" + +[packages.tool.pdm] +dependencies = [] + [[packages]] name = "identify" version = "2.6.12" @@ -2580,7 +2689,7 @@ sdist = {name = "importlib_metadata-8.7.0.tar.gz", url = "https://files.pythonho wheels = [ {name = "importlib_metadata-8.7.0-py3-none-any.whl",url = "https://files.pythonhosted.org/packages/20/b0/36bd937216ec521246249be3bf9855081de4c5e06a0c9b4219dbeda50373/importlib_metadata-8.7.0-py3-none-any.whl",hashes = {sha256 = "e5dd1551894c77868a30651cef00984d50e1002d06942a7101d34870c5f02afd"}}, ] -marker = "python_full_version < \"3.10.2\" and python_version >= \"3.9\" and \"dev\" in extras" +marker = "python_full_version < \"3.10.2\" and python_version ~= \"3.10\" and \"dev\" in extras or python_full_version ~= \"3.9.0\" and \"dev\" in extras" [packages.tool.pdm] dependencies = [ @@ -2765,16 +2874,6 @@ wheels = [ {name = "MarkupSafe-3.0.2-cp310-cp310-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/bd/6e/61ebf08d8940553afff20d1fb1ba7294b6f8d279df9fd0c0db911b4bbcfd/MarkupSafe-3.0.2-cp310-cp310-musllinux_1_2_x86_64.whl",hashes = {sha256 = "b424c77b206d63d500bcb69fa55ed8d0e6a3774056bdc4839fc9298a7edca171"}}, {name = "MarkupSafe-3.0.2-cp310-cp310-win32.whl",url = "https://files.pythonhosted.org/packages/11/23/ffbf53694e8c94ebd1e7e491de185124277964344733c45481f32ede2499/MarkupSafe-3.0.2-cp310-cp310-win32.whl",hashes = {sha256 = "fcabf5ff6eea076f859677f5f0b6b5c1a51e70a376b0579e0eadef8db48c6b50"}}, {name = "MarkupSafe-3.0.2-cp310-cp310-win_amd64.whl",url = "https://files.pythonhosted.org/packages/44/06/e7175d06dd6e9172d4a69a72592cb3f7a996a9c396eee29082826449bbc3/MarkupSafe-3.0.2-cp310-cp310-win_amd64.whl",hashes = {sha256 = "6af100e168aa82a50e186c82875a5893c5597a0c1ccdb0d8b40240b1f28b969a"}}, - {name = "MarkupSafe-3.0.2-cp39-cp39-macosx_10_9_universal2.whl",url = "https://files.pythonhosted.org/packages/a7/ea/9b1530c3fdeeca613faeb0fb5cbcf2389d816072fab72a71b45749ef6062/MarkupSafe-3.0.2-cp39-cp39-macosx_10_9_universal2.whl",hashes = {sha256 = "eaa0a10b7f72326f1372a713e73c3f739b524b3af41feb43e4921cb529f5929a"}}, - {name = "MarkupSafe-3.0.2-cp39-cp39-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/4b/c2/fbdbfe48848e7112ab05e627e718e854d20192b674952d9042ebd8c9e5de/MarkupSafe-3.0.2-cp39-cp39-macosx_11_0_arm64.whl",hashes = {sha256 = "48032821bbdf20f5799ff537c7ac3d1fba0ba032cfc06194faffa8cda8b560ff"}}, - {name = "MarkupSafe-3.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",url = "https://files.pythonhosted.org/packages/f0/25/7a7c6e4dbd4f867d95d94ca15449e91e52856f6ed1905d58ef1de5e211d0/MarkupSafe-3.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",hashes = {sha256 = "1a9d3f5f0901fdec14d8d2f66ef7d035f2157240a433441719ac9a3fba440b13"}}, - {name = "MarkupSafe-3.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",url = "https://files.pythonhosted.org/packages/53/8f/f339c98a178f3c1e545622206b40986a4c3307fe39f70ccd3d9df9a9e425/MarkupSafe-3.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",hashes = {sha256 = "88b49a3b9ff31e19998750c38e030fc7bb937398b1f78cfa599aaef92d693144"}}, - {name = "MarkupSafe-3.0.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl",url = "https://files.pythonhosted.org/packages/1a/03/8496a1a78308456dbd50b23a385c69b41f2e9661c67ea1329849a598a8f9/MarkupSafe-3.0.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl",hashes = {sha256 = "cfad01eed2c2e0c01fd0ecd2ef42c492f7f93902e39a42fc9ee1692961443a29"}}, - {name = "MarkupSafe-3.0.2-cp39-cp39-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/e6/cf/0a490a4bd363048c3022f2f475c8c05582179bb179defcee4766fb3dcc18/MarkupSafe-3.0.2-cp39-cp39-musllinux_1_2_aarch64.whl",hashes = {sha256 = "1225beacc926f536dc82e45f8a4d68502949dc67eea90eab715dea3a21c1b5f0"}}, - {name = "MarkupSafe-3.0.2-cp39-cp39-musllinux_1_2_i686.whl",url = "https://files.pythonhosted.org/packages/19/a3/34187a78613920dfd3cdf68ef6ce5e99c4f3417f035694074beb8848cd77/MarkupSafe-3.0.2-cp39-cp39-musllinux_1_2_i686.whl",hashes = {sha256 = "3169b1eefae027567d1ce6ee7cae382c57fe26e82775f460f0b2778beaad66c0"}}, - {name = "MarkupSafe-3.0.2-cp39-cp39-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/17/d8/5811082f85bb88410ad7e452263af048d685669bbbfb7b595e8689152498/MarkupSafe-3.0.2-cp39-cp39-musllinux_1_2_x86_64.whl",hashes = {sha256 = "eb7972a85c54febfb25b5c4b4f3af4dcc731994c7da0d8a0b4a6eb0640e1d178"}}, - {name = "MarkupSafe-3.0.2-cp39-cp39-win32.whl",url = "https://files.pythonhosted.org/packages/7c/31/bd635fb5989440d9365c5e3c47556cfea121c7803f5034ac843e8f37c2f2/MarkupSafe-3.0.2-cp39-cp39-win32.whl",hashes = {sha256 = "8c4e8c3ce11e1f92f6536ff07154f9d49677ebaaafc32db9db4620bc11ed480f"}}, - {name = "MarkupSafe-3.0.2-cp39-cp39-win_amd64.whl",url = "https://files.pythonhosted.org/packages/b3/73/085399401383ce949f727afec55ec3abd76648d04b9f22e1c0e99cb4bec3/MarkupSafe-3.0.2-cp39-cp39-win_amd64.whl",hashes = {sha256 = "6e296a513ca3d94054c2c881cc913116e90fd030ad1c656b3869762b754f5f8a"}}, ] marker = "\"dev\" in extras" @@ -2821,9 +2920,6 @@ wheels = [ {name = "multiprocess-0.70.16-pp310-pypy310_pp73-macosx_10_13_x86_64.whl",url = "https://files.pythonhosted.org/packages/ef/76/6e712a2623d146d314f17598df5de7224c85c0060ef63fd95cc15a25b3fa/multiprocess-0.70.16-pp310-pypy310_pp73-macosx_10_13_x86_64.whl",hashes = {sha256 = "476887be10e2f59ff183c006af746cb6f1fd0eadcfd4ef49e605cbe2659920ee"}}, {name = "multiprocess-0.70.16-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl",url = "https://files.pythonhosted.org/packages/0f/ab/1e6e8009e380e22254ff539ebe117861e5bdb3bff1fc977920972237c6c7/multiprocess-0.70.16-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl",hashes = {sha256 = "d951bed82c8f73929ac82c61f01a7b5ce8f3e5ef40f5b52553b4f547ce2b08ec"}}, {name = "multiprocess-0.70.16-py310-none-any.whl",url = "https://files.pythonhosted.org/packages/bc/f7/7ec7fddc92e50714ea3745631f79bd9c96424cb2702632521028e57d3a36/multiprocess-0.70.16-py310-none-any.whl",hashes = {sha256 = "c4a9944c67bd49f823687463660a2d6daae94c289adff97e0f9d696ba6371d02"}}, - {name = "multiprocess-0.70.16-pp39-pypy39_pp73-macosx_10_13_x86_64.whl",url = "https://files.pythonhosted.org/packages/d8/94/8638a89f93c80df329116e6781a060506c7e91e1f4370dc831e9d17a041d/multiprocess-0.70.16-pp39-pypy39_pp73-macosx_10_13_x86_64.whl",hashes = {sha256 = "0dfd078c306e08d46d7a8d06fb120313d87aa43af60d66da43ffff40b44d2f41"}}, - {name = "multiprocess-0.70.16-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl",url = "https://files.pythonhosted.org/packages/89/21/222066f6bb8d8af287923ae3bd26cf4699a9ce020228ac273caca1de8250/multiprocess-0.70.16-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl",hashes = {sha256 = "e7b9d0f307cd9bd50851afaac0dba2cb6c44449efff697df7c7645f7d3f2be3a"}}, - {name = "multiprocess-0.70.16-py39-none-any.whl",url = "https://files.pythonhosted.org/packages/da/d9/f7f9379981e39b8c2511c9e0326d212accacb82f12fbfdc1aa2ce2a7b2b6/multiprocess-0.70.16-py39-none-any.whl",hashes = {sha256 = "a0bafd3ae1b732eac64be2e72038231c1ba97724b60b09400d68f229fcc2fbf3"}}, ] marker = "\"default\" in dependency_groups" @@ -2909,15 +3005,6 @@ wheels = [ {name = "pyarrow-20.0.0-cp310-cp310-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/7a/a6/aba40a2bf01b5d00cf9cd16d427a5da1fad0fb69b514ce8c8292ab80e968/pyarrow-20.0.0-cp310-cp310-musllinux_1_2_aarch64.whl",hashes = {sha256 = "00138f79ee1b5aca81e2bdedb91e3739b987245e11fa3c826f9e57c5d102fb75"}}, {name = "pyarrow-20.0.0-cp310-cp310-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/93/6b/98b39650cd64f32bf2ec6d627a9bd24fcb3e4e6ea1873c5e1ea8a83b1a18/pyarrow-20.0.0-cp310-cp310-musllinux_1_2_x86_64.whl",hashes = {sha256 = "f2d67ac28f57a362f1a2c1e6fa98bfe2f03230f7e15927aecd067433b1e70ce8"}}, {name = "pyarrow-20.0.0-cp310-cp310-win_amd64.whl",url = "https://files.pythonhosted.org/packages/ab/32/340238be1eb5037e7b5de7e640ee22334417239bc347eadefaf8c373936d/pyarrow-20.0.0-cp310-cp310-win_amd64.whl",hashes = {sha256 = "4a8b029a07956b8d7bd742ffca25374dd3f634b35e46cc7a7c3fa4c75b297191"}}, - {name = "pyarrow-20.0.0-cp39-cp39-macosx_12_0_arm64.whl",url = "https://files.pythonhosted.org/packages/10/53/421820fa125138c868729b930d4bc487af2c4b01b1c6104818aab7e98f13/pyarrow-20.0.0-cp39-cp39-macosx_12_0_arm64.whl",hashes = {sha256 = "1bcbe471ef3349be7714261dea28fe280db574f9d0f77eeccc195a2d161fd861"}}, - {name = "pyarrow-20.0.0-cp39-cp39-macosx_12_0_x86_64.whl",url = "https://files.pythonhosted.org/packages/2e/70/fd75e03312b715e90d928fb91ed8d45c9b0520346e5231b1c69293afd4c7/pyarrow-20.0.0-cp39-cp39-macosx_12_0_x86_64.whl",hashes = {sha256 = "a18a14baef7d7ae49247e75641fd8bcbb39f44ed49a9fc4ec2f65d5031aa3b96"}}, - {name = "pyarrow-20.0.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",url = "https://files.pythonhosted.org/packages/c4/e3/21e5758e46219fdedf5e6c800574dd9d17e962e80014cfe08d6d475be863/pyarrow-20.0.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",hashes = {sha256 = "cb497649e505dc36542d0e68eca1a3c94ecbe9799cb67b578b55f2441a247fbc"}}, - {name = "pyarrow-20.0.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",url = "https://files.pythonhosted.org/packages/ac/f5/ed6a4c4b11f9215092a35097a985485bb7d879cb79d93d203494e8604f4e/pyarrow-20.0.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",hashes = {sha256 = "11529a2283cb1f6271d7c23e4a8f9f8b7fd173f7360776b668e509d712a02eec"}}, - {name = "pyarrow-20.0.0-cp39-cp39-manylinux_2_28_aarch64.whl",url = "https://files.pythonhosted.org/packages/44/e5/466a63668ba25788ee8d38d55f853a60469ae7ad1cda343db9f3f45e0b0a/pyarrow-20.0.0-cp39-cp39-manylinux_2_28_aarch64.whl",hashes = {sha256 = "6fc1499ed3b4b57ee4e090e1cea6eb3584793fe3d1b4297bbf53f09b434991a5"}}, - {name = "pyarrow-20.0.0-cp39-cp39-manylinux_2_28_x86_64.whl",url = "https://files.pythonhosted.org/packages/e8/d7/4c4d4e4cf6e53e16a519366dfe9223ee4a7a38e6e28c1c0d372b38ba3fe7/pyarrow-20.0.0-cp39-cp39-manylinux_2_28_x86_64.whl",hashes = {sha256 = "db53390eaf8a4dab4dbd6d93c85c5cf002db24902dbff0ca7d988beb5c9dd15b"}}, - {name = "pyarrow-20.0.0-cp39-cp39-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/07/d5/79effb32585b7c18897d3047a2163034f3f9c944d12f7b2fd8df6a2edc70/pyarrow-20.0.0-cp39-cp39-musllinux_1_2_aarch64.whl",hashes = {sha256 = "851c6a8260ad387caf82d2bbf54759130534723e37083111d4ed481cb253cc0d"}}, - {name = "pyarrow-20.0.0-cp39-cp39-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/09/5c/f707603552c058b2e9129732de99a67befb1f13f008cc58856304a62c38b/pyarrow-20.0.0-cp39-cp39-musllinux_1_2_x86_64.whl",hashes = {sha256 = "e22f80b97a271f0a7d9cd07394a7d348f80d3ac63ed7cc38b6d1b696ab3b2619"}}, - {name = "pyarrow-20.0.0-cp39-cp39-win_amd64.whl",url = "https://files.pythonhosted.org/packages/26/cc/1eb6a01c1bbc787f596c270c46bcd2273e35154a84afcb1d0cb4cc72457e/pyarrow-20.0.0-cp39-cp39-win_amd64.whl",hashes = {sha256 = "9965a050048ab02409fb7cbbefeedba04d3d67f2cc899eff505cc084345959ca"}}, ] marker = "\"default\" in dependency_groups" @@ -3018,6 +3105,18 @@ marker = "\"default\" in dependency_groups" [packages.tool.pdm] dependencies = [] +[[packages]] +name = "sanic-routing" +version = "23.12.0" +sdist = {name = "sanic-routing-23.12.0.tar.gz", url = "https://files.pythonhosted.org/packages/d1/5c/2a7edd14fbccca3719a8d680951d4b25f986752c781c61ccf156a6d1ebff/sanic-routing-23.12.0.tar.gz", hashes = {sha256 = "1dcadc62c443e48c852392dba03603f9862b6197fc4cba5bbefeb1ace0848b04"}} +wheels = [ + {name = "sanic_routing-23.12.0-py3-none-any.whl",url = "https://files.pythonhosted.org/packages/cf/e3/3425c9a8773807ac2c01d6a56c8521733f09b627e5827e733c5cd36b9ac5/sanic_routing-23.12.0-py3-none-any.whl",hashes = {sha256 = "1558a72afcb9046ed3134a5edae02fc1552cff08f0fff2e8d5de0877ea43ed73"}}, +] +marker = "\"default\" in dependency_groups" + +[packages.tool.pdm] +dependencies = [] + [[packages]] name = "snowballstemmer" version = "3.0.1" @@ -3057,6 +3156,104 @@ marker = "\"dev\" in extras" [packages.tool.pdm] dependencies = [] +[[packages]] +name = "tracerite" +version = "1.1.3" +sdist = {name = "tracerite-1.1.3.tar.gz", url = "https://files.pythonhosted.org/packages/27/b2/37b825b881f23bc56384c3142214ccbe5d9de7e7c5fe3d155fa032738b98/tracerite-1.1.3.tar.gz", hashes = {sha256 = "119fc006f240aa03fffb41cf99cf82fda5c0449c7d4b6fe42c6340403578b31e"}} +wheels = [ + {name = "tracerite-1.1.3-py3-none-any.whl",url = "https://files.pythonhosted.org/packages/e6/bf/c65d36ec5a93048dd55b3247be26059970daad72263e35ecace2f3188b2c/tracerite-1.1.3-py3-none-any.whl",hashes = {sha256 = "811d8e2e0fb563b77340eebe2e9f7b324acfe01e09ea58db8bcaecb24327c823"}}, +] +marker = "\"default\" in dependency_groups" + +[packages.tool.pdm] +dependencies = [ + "html5tagger>=1.2.1", +] + +[[packages]] +name = "ujson" +version = "5.11.0" +requires-python = ">=3.9" +sdist = {name = "ujson-5.11.0.tar.gz", url = "https://files.pythonhosted.org/packages/43/d9/3f17e3c5773fb4941c68d9a37a47b1a79c9649d6c56aefbed87cc409d18a/ujson-5.11.0.tar.gz", hashes = {sha256 = "e204ae6f909f099ba6b6b942131cee359ddda2b6e4ea39c12eb8b991fe2010e0"}} +wheels = [ + {name = "ujson-5.11.0-cp314-cp314-macosx_10_13_x86_64.whl",url = "https://files.pythonhosted.org/packages/28/08/4518146f4984d112764b1dfa6fb7bad691c44a401adadaa5e23ccd930053/ujson-5.11.0-cp314-cp314-macosx_10_13_x86_64.whl",hashes = {sha256 = "65724738c73645db88f70ba1f2e6fb678f913281804d5da2fd02c8c5839af302"}}, + {name = "ujson-5.11.0-cp314-cp314-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/29/37/2107b9a62168867a692654d8766b81bd2fd1e1ba13e2ec90555861e02b0c/ujson-5.11.0-cp314-cp314-macosx_11_0_arm64.whl",hashes = {sha256 = "29113c003ca33ab71b1b480bde952fbab2a0b6b03a4ee4c3d71687cdcbd1a29d"}}, + {name = "ujson-5.11.0-cp314-cp314-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl",url = "https://files.pythonhosted.org/packages/9b/f8/25583c70f83788edbe3ca62ce6c1b79eff465d78dec5eb2b2b56b3e98b33/ujson-5.11.0-cp314-cp314-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl",hashes = {sha256 = "c44c703842024d796b4c78542a6fcd5c3cb948b9fc2a73ee65b9c86a22ee3638"}}, + {name = "ujson-5.11.0-cp314-cp314-manylinux_2_24_i686.manylinux_2_28_i686.whl",url = "https://files.pythonhosted.org/packages/ed/ca/19b3a632933a09d696f10dc1b0dfa1d692e65ad507d12340116ce4f67967/ujson-5.11.0-cp314-cp314-manylinux_2_24_i686.manylinux_2_28_i686.whl",hashes = {sha256 = "e750c436fb90edf85585f5c62a35b35082502383840962c6983403d1bd96a02c"}}, + {name = "ujson-5.11.0-cp314-cp314-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl",url = "https://files.pythonhosted.org/packages/55/7a/4572af5324ad4b2bfdd2321e898a527050290147b4ea337a79a0e4e87ec7/ujson-5.11.0-cp314-cp314-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl",hashes = {sha256 = "f278b31a7c52eb0947b2db55a5133fbc46b6f0ef49972cd1a80843b72e135aba"}}, + {name = "ujson-5.11.0-cp314-cp314-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/7b/71/a2b8c19cf4e1efe53cf439cdf7198ac60ae15471d2f1040b490c1f0f831f/ujson-5.11.0-cp314-cp314-musllinux_1_2_aarch64.whl",hashes = {sha256 = "ab2cb8351d976e788669c8281465d44d4e94413718af497b4e7342d7b2f78018"}}, + {name = "ujson-5.11.0-cp314-cp314-musllinux_1_2_i686.whl",url = "https://files.pythonhosted.org/packages/7a/3e/7b98668cba3bb3735929c31b999b374ebc02c19dfa98dfebaeeb5c8597ca/ujson-5.11.0-cp314-cp314-musllinux_1_2_i686.whl",hashes = {sha256 = "090b4d11b380ae25453100b722d0609d5051ffe98f80ec52853ccf8249dfd840"}}, + {name = "ujson-5.11.0-cp314-cp314-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/a1/ea/8870f208c20b43571a5c409ebb2fe9b9dba5f494e9e60f9314ac01ea8f78/ujson-5.11.0-cp314-cp314-musllinux_1_2_x86_64.whl",hashes = {sha256 = "80017e870d882d5517d28995b62e4e518a894f932f1e242cbc802a2fd64d365c"}}, + {name = "ujson-5.11.0-cp314-cp314-win32.whl",url = "https://files.pythonhosted.org/packages/63/b6/c0e6607e37fa47929920a685a968c6b990a802dec65e9c5181e97845985d/ujson-5.11.0-cp314-cp314-win32.whl",hashes = {sha256 = "1d663b96eb34c93392e9caae19c099ec4133ba21654b081956613327f0e973ac"}}, + {name = "ujson-5.11.0-cp314-cp314-win_amd64.whl",url = "https://files.pythonhosted.org/packages/4e/56/f4fe86b4c9000affd63e9219e59b222dc48b01c534533093e798bf617a7e/ujson-5.11.0-cp314-cp314-win_amd64.whl",hashes = {sha256 = "849e65b696f0d242833f1df4182096cedc50d414215d1371fca85c541fbff629"}}, + {name = "ujson-5.11.0-cp314-cp314-win_arm64.whl",url = "https://files.pythonhosted.org/packages/0a/f3/669437f0280308db4783b12a6d88c00730b394327d8334cc7a32ef218e64/ujson-5.11.0-cp314-cp314-win_arm64.whl",hashes = {sha256 = "e73df8648c9470af2b6a6bf5250d4744ad2cf3d774dcf8c6e31f018bdd04d764"}}, + {name = "ujson-5.11.0-cp314-cp314t-macosx_10_13_x86_64.whl",url = "https://files.pythonhosted.org/packages/6e/cd/e9809b064a89fe5c4184649adeb13c1b98652db3f8518980b04227358574/ujson-5.11.0-cp314-cp314t-macosx_10_13_x86_64.whl",hashes = {sha256 = "de6e88f62796372fba1de973c11138f197d3e0e1d80bcb2b8aae1e826096d433"}}, + {name = "ujson-5.11.0-cp314-cp314t-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/1b/be/ae26a6321179ebbb3a2e2685b9007c71bcda41ad7a77bbbe164005e956fc/ujson-5.11.0-cp314-cp314t-macosx_11_0_arm64.whl",hashes = {sha256 = "49e56ef8066f11b80d620985ae36869a3ff7e4b74c3b6129182ec5d1df0255f3"}}, + {name = "ujson-5.11.0-cp314-cp314t-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl",url = "https://files.pythonhosted.org/packages/ae/e9/fb4a220ee6939db099f4cfeeae796ecb91e7584ad4d445d4ca7f994a9135/ujson-5.11.0-cp314-cp314t-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl",hashes = {sha256 = "1a325fd2c3a056cf6c8e023f74a0c478dd282a93141356ae7f16d5309f5ff823"}}, + {name = "ujson-5.11.0-cp314-cp314t-manylinux_2_24_i686.manylinux_2_28_i686.whl",url = "https://files.pythonhosted.org/packages/bd/f8/fc4b952b8f5fea09ea3397a0bd0ad019e474b204cabcb947cead5d4d1ffc/ujson-5.11.0-cp314-cp314t-manylinux_2_24_i686.manylinux_2_28_i686.whl",hashes = {sha256 = "a0af6574fc1d9d53f4ff371f58c96673e6d988ed2b5bf666a6143c782fa007e9"}}, + {name = "ujson-5.11.0-cp314-cp314t-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl",url = "https://files.pythonhosted.org/packages/2e/e5/af5491dfda4f8b77e24cf3da68ee0d1552f99a13e5c622f4cef1380925c3/ujson-5.11.0-cp314-cp314t-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl",hashes = {sha256 = "10f29e71ecf4ecd93a6610bd8efa8e7b6467454a363c3d6416db65de883eb076"}}, + {name = "ujson-5.11.0-cp314-cp314t-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/c4/09/0945349dd41f25cc8c38d78ace49f14c5052c5bbb7257d2f466fa7bdb533/ujson-5.11.0-cp314-cp314t-musllinux_1_2_aarch64.whl",hashes = {sha256 = "1a0a9b76a89827a592656fe12e000cf4f12da9692f51a841a4a07aa4c7ecc41c"}}, + {name = "ujson-5.11.0-cp314-cp314t-musllinux_1_2_i686.whl",url = "https://files.pythonhosted.org/packages/49/44/8e04496acb3d5a1cbee3a54828d9652f67a37523efa3d3b18a347339680a/ujson-5.11.0-cp314-cp314t-musllinux_1_2_i686.whl",hashes = {sha256 = "b16930f6a0753cdc7d637b33b4e8f10d5e351e1fb83872ba6375f1e87be39746"}}, + {name = "ujson-5.11.0-cp314-cp314t-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/64/ae/4bc825860d679a0f208a19af2f39206dfd804ace2403330fdc3170334a2f/ujson-5.11.0-cp314-cp314t-musllinux_1_2_x86_64.whl",hashes = {sha256 = "04c41afc195fd477a59db3a84d5b83a871bd648ef371cf8c6f43072d89144eef"}}, + {name = "ujson-5.11.0-cp314-cp314t-win32.whl",url = "https://files.pythonhosted.org/packages/30/ed/5a057199fb0a5deabe0957073a1c1c1c02a3e99476cd03daee98ea21fa57/ujson-5.11.0-cp314-cp314t-win32.whl",hashes = {sha256 = "aa6d7a5e09217ff93234e050e3e380da62b084e26b9f2e277d2606406a2fc2e5"}}, + {name = "ujson-5.11.0-cp314-cp314t-win_amd64.whl",url = "https://files.pythonhosted.org/packages/aa/03/b19c6176bdf1dc13ed84b886e99677a52764861b6cc023d5e7b6ebda249d/ujson-5.11.0-cp314-cp314t-win_amd64.whl",hashes = {sha256 = "48055e1061c1bb1f79e75b4ac39e821f3f35a9b82de17fce92c3140149009bec"}}, + {name = "ujson-5.11.0-cp314-cp314t-win_arm64.whl",url = "https://files.pythonhosted.org/packages/5d/ca/a0413a3874b2dc1708b8796ca895bf363292f9c70b2e8ca482b7dbc0259d/ujson-5.11.0-cp314-cp314t-win_arm64.whl",hashes = {sha256 = "1194b943e951092db611011cb8dbdb6cf94a3b816ed07906e14d3bc6ce0e90ab"}}, + {name = "ujson-5.11.0-cp313-cp313-macosx_10_13_x86_64.whl",url = "https://files.pythonhosted.org/packages/1c/ec/2de9dd371d52c377abc05d2b725645326c4562fc87296a8907c7bcdf2db7/ujson-5.11.0-cp313-cp313-macosx_10_13_x86_64.whl",hashes = {sha256 = "109f59885041b14ee9569bf0bb3f98579c3fa0652317b355669939e5fc5ede53"}}, + {name = "ujson-5.11.0-cp313-cp313-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/5b/a4/f611f816eac3a581d8a4372f6967c3ed41eddbae4008d1d77f223f1a4e0a/ujson-5.11.0-cp313-cp313-macosx_11_0_arm64.whl",hashes = {sha256 = "a31c6b8004438e8c20fc55ac1c0e07dad42941db24176fe9acf2815971f8e752"}}, + {name = "ujson-5.11.0-cp313-cp313-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl",url = "https://files.pythonhosted.org/packages/e9/c5/c161940967184de96f5cbbbcce45b562a4bf851d60f4c677704b1770136d/ujson-5.11.0-cp313-cp313-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl",hashes = {sha256 = "78c684fb21255b9b90320ba7e199780f653e03f6c2528663768965f4126a5b50"}}, + {name = "ujson-5.11.0-cp313-cp313-manylinux_2_24_i686.manylinux_2_28_i686.whl",url = "https://files.pythonhosted.org/packages/2b/d6/c7b2444238f5b2e2d0e3dab300b9ddc3606e4b1f0e4bed5a48157cebc792/ujson-5.11.0-cp313-cp313-manylinux_2_24_i686.manylinux_2_28_i686.whl",hashes = {sha256 = "4c9f5d6a27d035dd90a146f7761c2272cf7103de5127c9ab9c4cd39ea61e878a"}}, + {name = "ujson-5.11.0-cp313-cp313-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl",url = "https://files.pythonhosted.org/packages/fe/a3/292551f936d3d02d9af148f53e1bc04306b00a7cf1fcbb86fa0d1c887242/ujson-5.11.0-cp313-cp313-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl",hashes = {sha256 = "837da4d27fed5fdc1b630bd18f519744b23a0b5ada1bbde1a36ba463f2900c03"}}, + {name = "ujson-5.11.0-cp313-cp313-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/90/a6/82cfa70448831b1a9e73f882225980b5c689bf539ec6400b31656a60ea46/ujson-5.11.0-cp313-cp313-musllinux_1_2_aarch64.whl",hashes = {sha256 = "787aff4a84da301b7f3bac09bc696e2e5670df829c6f8ecf39916b4e7e24e701"}}, + {name = "ujson-5.11.0-cp313-cp313-musllinux_1_2_i686.whl",url = "https://files.pythonhosted.org/packages/84/5c/96e2266be50f21e9b27acaee8ca8f23ea0b85cb998c33d4f53147687839b/ujson-5.11.0-cp313-cp313-musllinux_1_2_i686.whl",hashes = {sha256 = "6dd703c3e86dc6f7044c5ac0b3ae079ed96bf297974598116aa5fb7f655c3a60"}}, + {name = "ujson-5.11.0-cp313-cp313-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/8d/20/78abe3d808cf3bb3e76f71fca46cd208317bf461c905d79f0d26b9df20f1/ujson-5.11.0-cp313-cp313-musllinux_1_2_x86_64.whl",hashes = {sha256 = "3772e4fe6b0c1e025ba3c50841a0ca4786825a4894c8411bf8d3afe3a8061328"}}, + {name = "ujson-5.11.0-cp313-cp313-win32.whl",url = "https://files.pythonhosted.org/packages/d8/50/8856e24bec5e2fc7f775d867aeb7a3f137359356200ac44658f1f2c834b2/ujson-5.11.0-cp313-cp313-win32.whl",hashes = {sha256 = "8fa2af7c1459204b7a42e98263b069bd535ea0cd978b4d6982f35af5a04a4241"}}, + {name = "ujson-5.11.0-cp313-cp313-win_amd64.whl",url = "https://files.pythonhosted.org/packages/5b/d8/1baee0f4179a4d0f5ce086832147b6cc9b7731c24ca08e14a3fdb8d39c32/ujson-5.11.0-cp313-cp313-win_amd64.whl",hashes = {sha256 = "34032aeca4510a7c7102bd5933f59a37f63891f30a0706fb46487ab6f0edf8f0"}}, + {name = "ujson-5.11.0-cp313-cp313-win_arm64.whl",url = "https://files.pythonhosted.org/packages/a9/8c/6d85ef5be82c6d66adced3ec5ef23353ed710a11f70b0b6a836878396334/ujson-5.11.0-cp313-cp313-win_arm64.whl",hashes = {sha256 = "ce076f2df2e1aa62b685086fbad67f2b1d3048369664b4cdccc50707325401f9"}}, + {name = "ujson-5.11.0-cp312-cp312-macosx_10_13_x86_64.whl",url = "https://files.pythonhosted.org/packages/b9/ef/a9cb1fce38f699123ff012161599fb9f2ff3f8d482b4b18c43a2dc35073f/ujson-5.11.0-cp312-cp312-macosx_10_13_x86_64.whl",hashes = {sha256 = "7895f0d2d53bd6aea11743bd56e3cb82d729980636cd0ed9b89418bf66591702"}}, + {name = "ujson-5.11.0-cp312-cp312-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/b1/05/dba51a00eb30bd947791b173766cbed3492269c150a7771d2750000c965f/ujson-5.11.0-cp312-cp312-macosx_11_0_arm64.whl",hashes = {sha256 = "12b5e7e22a1fe01058000d1b317d3b65cc3daf61bd2ea7a2b76721fe160fa74d"}}, + {name = "ujson-5.11.0-cp312-cp312-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl",url = "https://files.pythonhosted.org/packages/03/3c/fd11a224f73fbffa299fb9644e425f38b38b30231f7923a088dd513aabb4/ujson-5.11.0-cp312-cp312-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl",hashes = {sha256 = "0180a480a7d099082501cad1fe85252e4d4bf926b40960fb3d9e87a3a6fbbc80"}}, + {name = "ujson-5.11.0-cp312-cp312-manylinux_2_24_i686.manylinux_2_28_i686.whl",url = "https://files.pythonhosted.org/packages/55/b9/405103cae24899df688a3431c776e00528bd4799e7d68820e7ebcf824f92/ujson-5.11.0-cp312-cp312-manylinux_2_24_i686.manylinux_2_28_i686.whl",hashes = {sha256 = "fa79fdb47701942c2132a9dd2297a1a85941d966d8c87bfd9e29b0cf423f26cc"}}, + {name = "ujson-5.11.0-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl",url = "https://files.pythonhosted.org/packages/17/7b/2dcbc2bbfdbf68f2368fb21ab0f6735e872290bb604c75f6e06b81edcb3f/ujson-5.11.0-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl",hashes = {sha256 = "8254e858437c00f17cb72e7a644fc42dad0ebb21ea981b71df6e84b1072aaa7c"}}, + {name = "ujson-5.11.0-cp312-cp312-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/d1/71/fea2ca18986a366c750767b694430d5ded6b20b6985fddca72f74af38a4c/ujson-5.11.0-cp312-cp312-musllinux_1_2_aarch64.whl",hashes = {sha256 = "1aa8a2ab482f09f6c10fba37112af5f957689a79ea598399c85009f2f29898b5"}}, + {name = "ujson-5.11.0-cp312-cp312-musllinux_1_2_i686.whl",url = "https://files.pythonhosted.org/packages/a3/bb/d4220bd7532eac6288d8115db51710fa2d7d271250797b0bfba9f1e755af/ujson-5.11.0-cp312-cp312-musllinux_1_2_i686.whl",hashes = {sha256 = "a638425d3c6eed0318df663df44480f4a40dc87cc7c6da44d221418312f6413b"}}, + {name = "ujson-5.11.0-cp312-cp312-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/80/47/226e540aa38878ce1194454385701d82df538ccb5ff8db2cf1641dde849a/ujson-5.11.0-cp312-cp312-musllinux_1_2_x86_64.whl",hashes = {sha256 = "7e3cff632c1d78023b15f7e3a81c3745cd3f94c044d1e8fa8efbd6b161997bbc"}}, + {name = "ujson-5.11.0-cp312-cp312-win32.whl",url = "https://files.pythonhosted.org/packages/7e/81/546042f0b23c9040d61d46ea5ca76f0cc5e0d399180ddfb2ae976ebff5b5/ujson-5.11.0-cp312-cp312-win32.whl",hashes = {sha256 = "be6b0eaf92cae8cdee4d4c9e074bde43ef1c590ed5ba037ea26c9632fb479c88"}}, + {name = "ujson-5.11.0-cp312-cp312-win_amd64.whl",url = "https://files.pythonhosted.org/packages/44/1b/27c05dc8c9728f44875d74b5bfa948ce91f6c33349232619279f35c6e817/ujson-5.11.0-cp312-cp312-win_amd64.whl",hashes = {sha256 = "b7b136cc6abc7619124fd897ef75f8e63105298b5ca9bdf43ebd0e1fa0ee105f"}}, + {name = "ujson-5.11.0-cp312-cp312-win_arm64.whl",url = "https://files.pythonhosted.org/packages/22/2d/37b6557c97c3409c202c838aa9c960ca3896843b4295c4b7bb2bbd260664/ujson-5.11.0-cp312-cp312-win_arm64.whl",hashes = {sha256 = "6cd2df62f24c506a0ba322d5e4fe4466d47a9467b57e881ee15a31f7ecf68ff6"}}, + {name = "ujson-5.11.0-cp311-cp311-macosx_10_9_x86_64.whl",url = "https://files.pythonhosted.org/packages/da/ea/80346b826349d60ca4d612a47cdf3533694e49b45e9d1c07071bb867a184/ujson-5.11.0-cp311-cp311-macosx_10_9_x86_64.whl",hashes = {sha256 = "d7c46cb0fe5e7056b9acb748a4c35aa1b428025853032540bb7e41f46767321f"}}, + {name = "ujson-5.11.0-cp311-cp311-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/57/df/b53e747562c89515e18156513cc7c8ced2e5e3fd6c654acaa8752ffd7cd9/ujson-5.11.0-cp311-cp311-macosx_11_0_arm64.whl",hashes = {sha256 = "d8951bb7a505ab2a700e26f691bdfacf395bc7e3111e3416d325b513eea03a58"}}, + {name = "ujson-5.11.0-cp311-cp311-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl",url = "https://files.pythonhosted.org/packages/41/b8/ab67ec8c01b8a3721fd13e5cb9d85ab2a6066a3a5e9148d661a6870d6293/ujson-5.11.0-cp311-cp311-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl",hashes = {sha256 = "952c0be400229940248c0f5356514123d428cba1946af6fa2bbd7503395fef26"}}, + {name = "ujson-5.11.0-cp311-cp311-manylinux_2_24_i686.manylinux_2_28_i686.whl",url = "https://files.pythonhosted.org/packages/7b/c7/fb84f27cd80a2c7e2d3c6012367aecade0da936790429801803fa8d4bffc/ujson-5.11.0-cp311-cp311-manylinux_2_24_i686.manylinux_2_28_i686.whl",hashes = {sha256 = "94fcae844f1e302f6f8095c5d1c45a2f0bfb928cccf9f1b99e3ace634b980a2a"}}, + {name = "ujson-5.11.0-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl",url = "https://files.pythonhosted.org/packages/5d/7c/48706f7c1e917ecb97ddcfb7b1d756040b86ed38290e28579d63bd3fcc48/ujson-5.11.0-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl",hashes = {sha256 = "7e0ec1646db172beb8d3df4c32a9d78015e671d2000af548252769e33079d9a6"}}, + {name = "ujson-5.11.0-cp311-cp311-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/ec/ce/48877c6eb4afddfd6bd1db6be34456538c07ca2d6ed233d3f6c6efc2efe8/ujson-5.11.0-cp311-cp311-musllinux_1_2_aarch64.whl",hashes = {sha256 = "da473b23e3a54448b008d33f742bcd6d5fb2a897e42d1fc6e7bf306ea5d18b1b"}}, + {name = "ujson-5.11.0-cp311-cp311-musllinux_1_2_i686.whl",url = "https://files.pythonhosted.org/packages/8b/7a/2c20dc97ad70cd7c31ad0596ba8e2cf8794d77191ba4d1e0bded69865477/ujson-5.11.0-cp311-cp311-musllinux_1_2_i686.whl",hashes = {sha256 = "aa6b3d4f1c0d3f82930f4cbd7fe46d905a4a9205a7c13279789c1263faf06dba"}}, + {name = "ujson-5.11.0-cp311-cp311-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/15/f5/ca454f2f6a2c840394b6f162fff2801450803f4ff56c7af8ce37640b8a2a/ujson-5.11.0-cp311-cp311-musllinux_1_2_x86_64.whl",hashes = {sha256 = "4843f3ab4fe1cc596bb7e02228ef4c25d35b4bb0809d6a260852a4bfcab37ba3"}}, + {name = "ujson-5.11.0-cp311-cp311-win32.whl",url = "https://files.pythonhosted.org/packages/fe/d3/9ba310e07969bc9906eb7548731e33a0f448b122ad9705fed699c9b29345/ujson-5.11.0-cp311-cp311-win32.whl",hashes = {sha256 = "e979fbc469a7f77f04ec2f4e853ba00c441bf2b06720aa259f0f720561335e34"}}, + {name = "ujson-5.11.0-cp311-cp311-win_amd64.whl",url = "https://files.pythonhosted.org/packages/57/f7/da05b4a8819f1360be9e71fb20182f0bb3ec611a36c3f213f4d20709e099/ujson-5.11.0-cp311-cp311-win_amd64.whl",hashes = {sha256 = "683f57f0dd3acdd7d9aff1de0528d603aafcb0e6d126e3dc7ce8b020a28f5d01"}}, + {name = "ujson-5.11.0-cp311-cp311-win_arm64.whl",url = "https://files.pythonhosted.org/packages/9a/cc/f3f9ac0f24f00a623a48d97dc3814df5c2dc368cfb00031aa4141527a24b/ujson-5.11.0-cp311-cp311-win_arm64.whl",hashes = {sha256 = "7855ccea3f8dad5e66d8445d754fc1cf80265a4272b5f8059ebc7ec29b8d0835"}}, + {name = "ujson-5.11.0-pp311-pypy311_pp73-macosx_10_15_x86_64.whl",url = "https://files.pythonhosted.org/packages/50/17/30275aa2933430d8c0c4ead951cc4fdb922f575a349aa0b48a6f35449e97/ujson-5.11.0-pp311-pypy311_pp73-macosx_10_15_x86_64.whl",hashes = {sha256 = "abae0fb58cc820092a0e9e8ba0051ac4583958495bfa5262a12f628249e3b362"}}, + {name = "ujson-5.11.0-pp311-pypy311_pp73-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/c3/15/42b3924258eac2551f8f33fa4e35da20a06a53857ccf3d4deb5e5d7c0b6c/ujson-5.11.0-pp311-pypy311_pp73-macosx_11_0_arm64.whl",hashes = {sha256 = "fac6c0649d6b7c3682a0a6e18d3de6857977378dce8d419f57a0b20e3d775b39"}}, + {name = "ujson-5.11.0-pp311-pypy311_pp73-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl",url = "https://files.pythonhosted.org/packages/94/7e/0519ff7955aba581d1fe1fb1ca0e452471250455d182f686db5ac9e46119/ujson-5.11.0-pp311-pypy311_pp73-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl",hashes = {sha256 = "4b42c115c7c6012506e8168315150d1e3f76e7ba0f4f95616f4ee599a1372bbc"}}, + {name = "ujson-5.11.0-pp311-pypy311_pp73-manylinux_2_24_i686.manylinux_2_28_i686.whl",url = "https://files.pythonhosted.org/packages/74/cf/209d90506b7d6c5873f82c5a226d7aad1a1da153364e9ebf61eff0740c33/ujson-5.11.0-pp311-pypy311_pp73-manylinux_2_24_i686.manylinux_2_28_i686.whl",hashes = {sha256 = "86baf341d90b566d61a394869ce77188cc8668f76d7bb2c311d77a00f4bdf844"}}, + {name = "ujson-5.11.0-pp311-pypy311_pp73-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl",url = "https://files.pythonhosted.org/packages/e9/97/bd939bb76943cb0e1d2b692d7e68629f51c711ef60425fa5bb6968037ecd/ujson-5.11.0-pp311-pypy311_pp73-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl",hashes = {sha256 = "4598bf3965fc1a936bd84034312bcbe00ba87880ef1ee33e33c1e88f2c398b49"}}, + {name = "ujson-5.11.0-pp311-pypy311_pp73-win_amd64.whl",url = "https://files.pythonhosted.org/packages/52/5b/8c5e33228f7f83f05719964db59f3f9f276d272dc43752fa3bbf0df53e7b/ujson-5.11.0-pp311-pypy311_pp73-win_amd64.whl",hashes = {sha256 = "416389ec19ef5f2013592f791486bef712ebce0cd59299bf9df1ba40bb2f6e04"}}, + {name = "ujson-5.11.0-cp310-cp310-macosx_10_9_x86_64.whl",url = "https://files.pythonhosted.org/packages/86/0c/8bf7a4fabfd01c7eed92d9b290930ce6d14910dec708e73538baa38885d1/ujson-5.11.0-cp310-cp310-macosx_10_9_x86_64.whl",hashes = {sha256 = "446e8c11c06048611c9d29ef1237065de0af07cabdd97e6b5b527b957692ec25"}}, + {name = "ujson-5.11.0-cp310-cp310-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/7b/2e/eeab0b8b641817031ede4f790db4c4942df44a12f44d72b3954f39c6a115/ujson-5.11.0-cp310-cp310-macosx_11_0_arm64.whl",hashes = {sha256 = "16ccb973b7ada0455201808ff11d48fe9c3f034a6ab5bd93b944443c88299f89"}}, + {name = "ujson-5.11.0-cp310-cp310-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl",url = "https://files.pythonhosted.org/packages/21/1b/a4e7a41870797633423ea79618526747353fd7be9191f3acfbdee0bf264b/ujson-5.11.0-cp310-cp310-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl",hashes = {sha256 = "3134b783ab314d2298d58cda7e47e7a0f7f71fc6ade6ac86d5dbeaf4b9770fa6"}}, + {name = "ujson-5.11.0-cp310-cp310-manylinux_2_24_i686.manylinux_2_28_i686.whl",url = "https://files.pythonhosted.org/packages/94/ae/4e0d91b8f6db7c9b76423b3649612189506d5a06ddd3b6334b6d37f77a01/ujson-5.11.0-cp310-cp310-manylinux_2_24_i686.manylinux_2_28_i686.whl",hashes = {sha256 = "185f93ebccffebc8baf8302c869fac70dd5dd78694f3b875d03a31b03b062cdb"}}, + {name = "ujson-5.11.0-cp310-cp310-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl",url = "https://files.pythonhosted.org/packages/b3/cc/46b124c2697ca2da7c65c4931ed3cb670646978157aa57a7a60f741c530f/ujson-5.11.0-cp310-cp310-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl",hashes = {sha256 = "d06e87eded62ff0e5f5178c916337d2262fdbc03b31688142a3433eabb6511db"}}, + {name = "ujson-5.11.0-cp310-cp310-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/39/eb/20dd1282bc85dede2f1c62c45b4040bc4c389c80a05983515ab99771bca7/ujson-5.11.0-cp310-cp310-musllinux_1_2_aarch64.whl",hashes = {sha256 = "181fb5b15703a8b9370b25345d2a1fd1359f0f18776b3643d24e13ed9c036d4c"}}, + {name = "ujson-5.11.0-cp310-cp310-musllinux_1_2_i686.whl",url = "https://files.pythonhosted.org/packages/64/a2/80072439065d493e3a4b1fbeec991724419a1b4c232e2d1147d257cac193/ujson-5.11.0-cp310-cp310-musllinux_1_2_i686.whl",hashes = {sha256 = "a4df61a6df0a4a8eb5b9b1ffd673429811f50b235539dac586bb7e9e91994138"}}, + {name = "ujson-5.11.0-cp310-cp310-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/5d/7e/d77f9e9c039d58299c350c978e086a804d1fceae4fd4a1cc6e8d0133f838/ujson-5.11.0-cp310-cp310-musllinux_1_2_x86_64.whl",hashes = {sha256 = "6eff24e1abd79e0ec6d7eae651dd675ddbc41f9e43e29ef81e16b421da896915"}}, + {name = "ujson-5.11.0-cp310-cp310-win32.whl",url = "https://files.pythonhosted.org/packages/ab/f1/697559d45acc849cada6b3571d53522951b1a64027400507aabc6a710178/ujson-5.11.0-cp310-cp310-win32.whl",hashes = {sha256 = "30f607c70091483550fbd669a0b37471e5165b317d6c16e75dba2aa967608723"}}, + {name = "ujson-5.11.0-cp310-cp310-win_amd64.whl",url = "https://files.pythonhosted.org/packages/86/a2/70b73a0f55abe0e6b8046d365d74230c20c5691373e6902a599b2dc79ba1/ujson-5.11.0-cp310-cp310-win_amd64.whl",hashes = {sha256 = "3d2720e9785f84312b8e2cb0c2b87f1a0b1c53aaab3b2af3ab817d54409012e0"}}, + {name = "ujson-5.11.0-cp310-cp310-win_arm64.whl",url = "https://files.pythonhosted.org/packages/1c/5f/b19104afa455630b43efcad3a24495b9c635d92aa8f2da4f30e375deb1a2/ujson-5.11.0-cp310-cp310-win_arm64.whl",hashes = {sha256 = "85e6796631165f719084a9af00c79195d3ebf108151452fefdcb1c8bb50f0105"}}, +] +marker = "sys_platform != \"win32\" and implementation_name == \"cpython\" and \"default\" in dependency_groups" + +[packages.tool.pdm] +dependencies = [] + [[packages]] name = "wcwidth" version = "0.2.13" @@ -3071,6 +3268,69 @@ dependencies = [ "backports-functools-lru-cache>=1.2.1; python_version < \"3.2\"", ] +[[packages]] +name = "websockets" +version = "15.0.1" +requires-python = ">=3.9" +sdist = {name = "websockets-15.0.1.tar.gz", url = "https://files.pythonhosted.org/packages/21/e6/26d09fab466b7ca9c7737474c52be4f76a40301b08362eb2dbc19dcc16c1/websockets-15.0.1.tar.gz", hashes = {sha256 = "82544de02076bafba038ce055ee6412d68da13ab47f0c60cab827346de828dee"}} +wheels = [ + {name = "websockets-15.0.1-cp313-cp313-macosx_10_13_universal2.whl",url = "https://files.pythonhosted.org/packages/cb/9f/51f0cf64471a9d2b4d0fc6c534f323b664e7095640c34562f5182e5a7195/websockets-15.0.1-cp313-cp313-macosx_10_13_universal2.whl",hashes = {sha256 = "ee443ef070bb3b6ed74514f5efaa37a252af57c90eb33b956d35c8e9c10a1931"}}, + {name = "websockets-15.0.1-cp313-cp313-macosx_10_13_x86_64.whl",url = "https://files.pythonhosted.org/packages/8a/05/aa116ec9943c718905997412c5989f7ed671bc0188ee2ba89520e8765d7b/websockets-15.0.1-cp313-cp313-macosx_10_13_x86_64.whl",hashes = {sha256 = "5a939de6b7b4e18ca683218320fc67ea886038265fd1ed30173f5ce3f8e85675"}}, + {name = "websockets-15.0.1-cp313-cp313-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/ff/0b/33cef55ff24f2d92924923c99926dcce78e7bd922d649467f0eda8368923/websockets-15.0.1-cp313-cp313-macosx_11_0_arm64.whl",hashes = {sha256 = "746ee8dba912cd6fc889a8147168991d50ed70447bf18bcda7039f7d2e3d9151"}}, + {name = "websockets-15.0.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",url = "https://files.pythonhosted.org/packages/31/1d/063b25dcc01faa8fada1469bdf769de3768b7044eac9d41f734fd7b6ad6d/websockets-15.0.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",hashes = {sha256 = "595b6c3969023ecf9041b2936ac3827e4623bfa3ccf007575f04c5a6aa318c22"}}, + {name = "websockets-15.0.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl",url = "https://files.pythonhosted.org/packages/93/53/9a87ee494a51bf63e4ec9241c1ccc4f7c2f45fff85d5bde2ff74fcb68b9e/websockets-15.0.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl",hashes = {sha256 = "3c714d2fc58b5ca3e285461a4cc0c9a66bd0e24c5da9911e30158286c9b5be7f"}}, + {name = "websockets-15.0.1-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl",url = "https://files.pythonhosted.org/packages/ff/b2/83a6ddf56cdcbad4e3d841fcc55d6ba7d19aeb89c50f24dd7e859ec0805f/websockets-15.0.1-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl",hashes = {sha256 = "0f3c1e2ab208db911594ae5b4f79addeb3501604a165019dd221c0bdcabe4db8"}}, + {name = "websockets-15.0.1-cp313-cp313-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/98/41/e7038944ed0abf34c45aa4635ba28136f06052e08fc2168520bb8b25149f/websockets-15.0.1-cp313-cp313-musllinux_1_2_aarch64.whl",hashes = {sha256 = "229cf1d3ca6c1804400b0a9790dc66528e08a6a1feec0d5040e8b9eb14422375"}}, + {name = "websockets-15.0.1-cp313-cp313-musllinux_1_2_i686.whl",url = "https://files.pythonhosted.org/packages/e0/17/de15b6158680c7623c6ef0db361da965ab25d813ae54fcfeae2e5b9ef910/websockets-15.0.1-cp313-cp313-musllinux_1_2_i686.whl",hashes = {sha256 = "756c56e867a90fb00177d530dca4b097dd753cde348448a1012ed6c5131f8b7d"}}, + {name = "websockets-15.0.1-cp313-cp313-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/33/2b/1f168cb6041853eef0362fb9554c3824367c5560cbdaad89ac40f8c2edfc/websockets-15.0.1-cp313-cp313-musllinux_1_2_x86_64.whl",hashes = {sha256 = "558d023b3df0bffe50a04e710bc87742de35060580a293c2a984299ed83bc4e4"}}, + {name = "websockets-15.0.1-cp313-cp313-win32.whl",url = "https://files.pythonhosted.org/packages/86/eb/20b6cdf273913d0ad05a6a14aed4b9a85591c18a987a3d47f20fa13dcc47/websockets-15.0.1-cp313-cp313-win32.whl",hashes = {sha256 = "ba9e56e8ceeeedb2e080147ba85ffcd5cd0711b89576b83784d8605a7df455fa"}}, + {name = "websockets-15.0.1-cp313-cp313-win_amd64.whl",url = "https://files.pythonhosted.org/packages/1b/6c/c65773d6cab416a64d191d6ee8a8b1c68a09970ea6909d16965d26bfed1e/websockets-15.0.1-cp313-cp313-win_amd64.whl",hashes = {sha256 = "e09473f095a819042ecb2ab9465aee615bd9c2028e4ef7d933600a8401c79561"}}, + {name = "websockets-15.0.1-cp312-cp312-macosx_10_13_universal2.whl",url = "https://files.pythonhosted.org/packages/51/6b/4545a0d843594f5d0771e86463606a3988b5a09ca5123136f8a76580dd63/websockets-15.0.1-cp312-cp312-macosx_10_13_universal2.whl",hashes = {sha256 = "3e90baa811a5d73f3ca0bcbf32064d663ed81318ab225ee4f427ad4e26e5aff3"}}, + {name = "websockets-15.0.1-cp312-cp312-macosx_10_13_x86_64.whl",url = "https://files.pythonhosted.org/packages/f4/71/809a0f5f6a06522af902e0f2ea2757f71ead94610010cf570ab5c98e99ed/websockets-15.0.1-cp312-cp312-macosx_10_13_x86_64.whl",hashes = {sha256 = "592f1a9fe869c778694f0aa806ba0374e97648ab57936f092fd9d87f8bc03665"}}, + {name = "websockets-15.0.1-cp312-cp312-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/3d/69/1a681dd6f02180916f116894181eab8b2e25b31e484c5d0eae637ec01f7c/websockets-15.0.1-cp312-cp312-macosx_11_0_arm64.whl",hashes = {sha256 = "0701bc3cfcb9164d04a14b149fd74be7347a530ad3bbf15ab2c678a2cd3dd9a2"}}, + {name = "websockets-15.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",url = "https://files.pythonhosted.org/packages/a6/02/0073b3952f5bce97eafbb35757f8d0d54812b6174ed8dd952aa08429bcc3/websockets-15.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",hashes = {sha256 = "e8b56bdcdb4505c8078cb6c7157d9811a85790f2f2b3632c7d1462ab5783d215"}}, + {name = "websockets-15.0.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl",url = "https://files.pythonhosted.org/packages/74/45/c205c8480eafd114b428284840da0b1be9ffd0e4f87338dc95dc6ff961a1/websockets-15.0.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl",hashes = {sha256 = "0af68c55afbd5f07986df82831c7bff04846928ea8d1fd7f30052638788bc9b5"}}, + {name = "websockets-15.0.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl",url = "https://files.pythonhosted.org/packages/14/8f/aa61f528fba38578ec553c145857a181384c72b98156f858ca5c8e82d9d3/websockets-15.0.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl",hashes = {sha256 = "64dee438fed052b52e4f98f76c5790513235efaa1ef7f3f2192c392cd7c91b65"}}, + {name = "websockets-15.0.1-cp312-cp312-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/ec/6d/0267396610add5bc0d0d3e77f546d4cd287200804fe02323797de77dbce9/websockets-15.0.1-cp312-cp312-musllinux_1_2_aarch64.whl",hashes = {sha256 = "d5f6b181bb38171a8ad1d6aa58a67a6aa9d4b38d0f8c5f496b9e42561dfc62fe"}}, + {name = "websockets-15.0.1-cp312-cp312-musllinux_1_2_i686.whl",url = "https://files.pythonhosted.org/packages/02/05/c68c5adbf679cf610ae2f74a9b871ae84564462955d991178f95a1ddb7dd/websockets-15.0.1-cp312-cp312-musllinux_1_2_i686.whl",hashes = {sha256 = "5d54b09eba2bada6011aea5375542a157637b91029687eb4fdb2dab11059c1b4"}}, + {name = "websockets-15.0.1-cp312-cp312-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/29/93/bb672df7b2f5faac89761cb5fa34f5cec45a4026c383a4b5761c6cea5c16/websockets-15.0.1-cp312-cp312-musllinux_1_2_x86_64.whl",hashes = {sha256 = "3be571a8b5afed347da347bfcf27ba12b069d9d7f42cb8c7028b5e98bbb12597"}}, + {name = "websockets-15.0.1-cp312-cp312-win32.whl",url = "https://files.pythonhosted.org/packages/ff/83/de1f7709376dc3ca9b7eeb4b9a07b4526b14876b6d372a4dc62312bebee0/websockets-15.0.1-cp312-cp312-win32.whl",hashes = {sha256 = "c338ffa0520bdb12fbc527265235639fb76e7bc7faafbb93f6ba80d9c06578a9"}}, + {name = "websockets-15.0.1-cp312-cp312-win_amd64.whl",url = "https://files.pythonhosted.org/packages/7d/71/abf2ebc3bbfa40f391ce1428c7168fb20582d0ff57019b69ea20fa698043/websockets-15.0.1-cp312-cp312-win_amd64.whl",hashes = {sha256 = "fcd5cf9e305d7b8338754470cf69cf81f420459dbae8a3b40cee57417f4614a7"}}, + {name = "websockets-15.0.1-cp311-cp311-macosx_10_9_universal2.whl",url = "https://files.pythonhosted.org/packages/9f/32/18fcd5919c293a398db67443acd33fde142f283853076049824fc58e6f75/websockets-15.0.1-cp311-cp311-macosx_10_9_universal2.whl",hashes = {sha256 = "823c248b690b2fd9303ba00c4f66cd5e2d8c3ba4aa968b2779be9532a4dad431"}}, + {name = "websockets-15.0.1-cp311-cp311-macosx_10_9_x86_64.whl",url = "https://files.pythonhosted.org/packages/76/70/ba1ad96b07869275ef42e2ce21f07a5b0148936688c2baf7e4a1f60d5058/websockets-15.0.1-cp311-cp311-macosx_10_9_x86_64.whl",hashes = {sha256 = "678999709e68425ae2593acf2e3ebcbcf2e69885a5ee78f9eb80e6e371f1bf57"}}, + {name = "websockets-15.0.1-cp311-cp311-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/86/f2/10b55821dd40eb696ce4704a87d57774696f9451108cff0d2824c97e0f97/websockets-15.0.1-cp311-cp311-macosx_11_0_arm64.whl",hashes = {sha256 = "d50fd1ee42388dcfb2b3676132c78116490976f1300da28eb629272d5d93e905"}}, + {name = "websockets-15.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",url = "https://files.pythonhosted.org/packages/a5/90/1c37ae8b8a113d3daf1065222b6af61cc44102da95388ac0018fcb7d93d9/websockets-15.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",hashes = {sha256 = "d99e5546bf73dbad5bf3547174cd6cb8ba7273062a23808ffea025ecb1cf8562"}}, + {name = "websockets-15.0.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl",url = "https://files.pythonhosted.org/packages/8e/8d/96e8e288b2a41dffafb78e8904ea7367ee4f891dafc2ab8d87e2124cb3d3/websockets-15.0.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl",hashes = {sha256 = "66dd88c918e3287efc22409d426c8f729688d89a0c587c88971a0faa2c2f3792"}}, + {name = "websockets-15.0.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl",url = "https://files.pythonhosted.org/packages/93/1f/5d6dbf551766308f6f50f8baf8e9860be6182911e8106da7a7f73785f4c4/websockets-15.0.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl",hashes = {sha256 = "8dd8327c795b3e3f219760fa603dcae1dcc148172290a8ab15158cf85a953413"}}, + {name = "websockets-15.0.1-cp311-cp311-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/d4/78/2d4fed9123e6620cbf1706c0de8a1632e1a28e7774d94346d7de1bba2ca3/websockets-15.0.1-cp311-cp311-musllinux_1_2_aarch64.whl",hashes = {sha256 = "8fdc51055e6ff4adeb88d58a11042ec9a5eae317a0a53d12c062c8a8865909e8"}}, + {name = "websockets-15.0.1-cp311-cp311-musllinux_1_2_i686.whl",url = "https://files.pythonhosted.org/packages/e7/3b/66d4c1b444dd1a9823c4a81f50231b921bab54eee2f69e70319b4e21f1ca/websockets-15.0.1-cp311-cp311-musllinux_1_2_i686.whl",hashes = {sha256 = "693f0192126df6c2327cce3baa7c06f2a117575e32ab2308f7f8216c29d9e2e3"}}, + {name = "websockets-15.0.1-cp311-cp311-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/08/ff/e9eed2ee5fed6f76fdd6032ca5cd38c57ca9661430bb3d5fb2872dc8703c/websockets-15.0.1-cp311-cp311-musllinux_1_2_x86_64.whl",hashes = {sha256 = "54479983bd5fb469c38f2f5c7e3a24f9a4e70594cd68cd1fa6b9340dadaff7cf"}}, + {name = "websockets-15.0.1-cp311-cp311-win32.whl",url = "https://files.pythonhosted.org/packages/d8/75/994634a49b7e12532be6a42103597b71098fd25900f7437d6055ed39930a/websockets-15.0.1-cp311-cp311-win32.whl",hashes = {sha256 = "16b6c1b3e57799b9d38427dda63edcbe4926352c47cf88588c0be4ace18dac85"}}, + {name = "websockets-15.0.1-cp311-cp311-win_amd64.whl",url = "https://files.pythonhosted.org/packages/98/93/e36c73f78400a65f5e236cd376713c34182e6663f6889cd45a4a04d8f203/websockets-15.0.1-cp311-cp311-win_amd64.whl",hashes = {sha256 = "27ccee0071a0e75d22cb35849b1db43f2ecd3e161041ac1ee9d2352ddf72f065"}}, + {name = "websockets-15.0.1-cp310-cp310-macosx_10_9_universal2.whl",url = "https://files.pythonhosted.org/packages/1e/da/6462a9f510c0c49837bbc9345aca92d767a56c1fb2939e1579df1e1cdcf7/websockets-15.0.1-cp310-cp310-macosx_10_9_universal2.whl",hashes = {sha256 = "d63efaa0cd96cf0c5fe4d581521d9fa87744540d4bc999ae6e08595a1014b45b"}}, + {name = "websockets-15.0.1-cp310-cp310-macosx_10_9_x86_64.whl",url = "https://files.pythonhosted.org/packages/1c/9f/9d11c1a4eb046a9e106483b9ff69bce7ac880443f00e5ce64261b47b07e7/websockets-15.0.1-cp310-cp310-macosx_10_9_x86_64.whl",hashes = {sha256 = "ac60e3b188ec7574cb761b08d50fcedf9d77f1530352db4eef1707fe9dee7205"}}, + {name = "websockets-15.0.1-cp310-cp310-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/d5/4f/b462242432d93ea45f297b6179c7333dd0402b855a912a04e7fc61c0d71f/websockets-15.0.1-cp310-cp310-macosx_11_0_arm64.whl",hashes = {sha256 = "5756779642579d902eed757b21b0164cd6fe338506a8083eb58af5c372e39d9a"}}, + {name = "websockets-15.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",url = "https://files.pythonhosted.org/packages/6e/0c/6afa1f4644d7ed50284ac59cc70ef8abd44ccf7d45850d989ea7310538d0/websockets-15.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",hashes = {sha256 = "0fdfe3e2a29e4db3659dbd5bbf04560cea53dd9610273917799f1cde46aa725e"}}, + {name = "websockets-15.0.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl",url = "https://files.pythonhosted.org/packages/dd/d4/ffc8bd1350b229ca7a4db2a3e1c482cf87cea1baccd0ef3e72bc720caeec/websockets-15.0.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl",hashes = {sha256 = "4c2529b320eb9e35af0fa3016c187dffb84a3ecc572bcee7c3ce302bfeba52bf"}}, + {name = "websockets-15.0.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl",url = "https://files.pythonhosted.org/packages/97/3a/5323a6bb94917af13bbb34009fac01e55c51dfde354f63692bf2533ffbc2/websockets-15.0.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl",hashes = {sha256 = "ac1e5c9054fe23226fb11e05a6e630837f074174c4c2f0fe442996112a6de4fb"}}, + {name = "websockets-15.0.1-cp310-cp310-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/a6/cc/1aeb0f7cee59ef065724041bb7ed667b6ab1eeffe5141696cccec2687b66/websockets-15.0.1-cp310-cp310-musllinux_1_2_aarch64.whl",hashes = {sha256 = "5df592cd503496351d6dc14f7cdad49f268d8e618f80dce0cd5a36b93c3fc08d"}}, + {name = "websockets-15.0.1-cp310-cp310-musllinux_1_2_i686.whl",url = "https://files.pythonhosted.org/packages/79/f9/c86f8f7af208e4161a7f7e02774e9d0a81c632ae76db2ff22549e1718a51/websockets-15.0.1-cp310-cp310-musllinux_1_2_i686.whl",hashes = {sha256 = "0a34631031a8f05657e8e90903e656959234f3a04552259458aac0b0f9ae6fd9"}}, + {name = "websockets-15.0.1-cp310-cp310-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/c7/b9/828b0bc6753db905b91df6ae477c0b14a141090df64fb17f8a9d7e3516cf/websockets-15.0.1-cp310-cp310-musllinux_1_2_x86_64.whl",hashes = {sha256 = "3d00075aa65772e7ce9e990cab3ff1de702aa09be3940d1dc88d5abf1ab8a09c"}}, + {name = "websockets-15.0.1-cp310-cp310-win32.whl",url = "https://files.pythonhosted.org/packages/89/fb/250f5533ec468ba6327055b7d98b9df056fb1ce623b8b6aaafb30b55d02e/websockets-15.0.1-cp310-cp310-win32.whl",hashes = {sha256 = "1234d4ef35db82f5446dca8e35a7da7964d02c127b095e172e54397fb6a6c256"}}, + {name = "websockets-15.0.1-cp310-cp310-win_amd64.whl",url = "https://files.pythonhosted.org/packages/1c/46/aca7082012768bb98e5608f01658ff3ac8437e563eca41cf068bd5849a5e/websockets-15.0.1-cp310-cp310-win_amd64.whl",hashes = {sha256 = "39c1fec2c11dc8d89bba6b2bf1556af381611a173ac2b511cf7231622058af41"}}, + {name = "websockets-15.0.1-pp310-pypy310_pp73-macosx_10_15_x86_64.whl",url = "https://files.pythonhosted.org/packages/02/9e/d40f779fa16f74d3468357197af8d6ad07e7c5a27ea1ca74ceb38986f77a/websockets-15.0.1-pp310-pypy310_pp73-macosx_10_15_x86_64.whl",hashes = {sha256 = "0c9e74d766f2818bb95f84c25be4dea09841ac0f734d1966f415e4edfc4ef1c3"}}, + {name = "websockets-15.0.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/bc/cd/5b887b8585a593073fd92f7c23ecd3985cd2c3175025a91b0d69b0551372/websockets-15.0.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl",hashes = {sha256 = "1009ee0c7739c08a0cd59de430d6de452a55e42d6b522de7aa15e6f67db0b8e1"}}, + {name = "websockets-15.0.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",url = "https://files.pythonhosted.org/packages/fe/ae/d34f7556890341e900a95acf4886833646306269f899d58ad62f588bf410/websockets-15.0.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",hashes = {sha256 = "76d1f20b1c7a2fa82367e04982e708723ba0e7b8d43aa643d3dcd404d74f1475"}}, + {name = "websockets-15.0.1-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl",url = "https://files.pythonhosted.org/packages/71/e6/5fd43993a87db364ec60fc1d608273a1a465c0caba69176dd160e197ce42/websockets-15.0.1-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl",hashes = {sha256 = "f29d80eb9a9263b8d109135351caf568cc3f80b9928bccde535c235de55c22d9"}}, + {name = "websockets-15.0.1-pp310-pypy310_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl",url = "https://files.pythonhosted.org/packages/2b/fb/c492d6daa5ec067c2988ac80c61359ace5c4c674c532985ac5a123436cec/websockets-15.0.1-pp310-pypy310_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl",hashes = {sha256 = "b359ed09954d7c18bbc1680f380c7301f92c60bf924171629c5db97febb12f04"}}, + {name = "websockets-15.0.1-pp310-pypy310_pp73-win_amd64.whl",url = "https://files.pythonhosted.org/packages/68/a1/dcb68430b1d00b698ae7a7e0194433bce4f07ded185f0ee5fb21e2a2e91e/websockets-15.0.1-pp310-pypy310_pp73-win_amd64.whl",hashes = {sha256 = "cad21560da69f4ce7658ca2cb83138fb4cf695a2ba3e475e0559e05991aa8122"}}, + {name = "websockets-15.0.1-py3-none-any.whl",url = "https://files.pythonhosted.org/packages/fa/a8/5b41e0da817d64113292ab1f8247140aac61cbf6cfd085d6a0fa77f4984f/websockets-15.0.1-py3-none-any.whl",hashes = {sha256 = "f7a866fbc1e97b5c617ee4116daaa09b722101d4a3c170c787450ba409f9736f"}}, +] +marker = "\"default\" in dependency_groups" + +[packages.tool.pdm] +dependencies = [] + [[packages]] name = "win32-setctime" version = "1.2.0" @@ -3084,6 +3344,79 @@ marker = "sys_platform == \"win32\" and \"default\" in dependency_groups" [packages.tool.pdm] dependencies = [] +[[packages]] +name = "wrapt" +version = "1.17.3" +requires-python = ">=3.8" +sdist = {name = "wrapt-1.17.3.tar.gz", url = "https://files.pythonhosted.org/packages/95/8f/aeb76c5b46e273670962298c23e7ddde79916cb74db802131d49a85e4b7d/wrapt-1.17.3.tar.gz", hashes = {sha256 = "f66eb08feaa410fe4eebd17f2a2c8e2e46d3476e9f8c783daa8e09e0faa666d0"}} +wheels = [ + {name = "wrapt-1.17.3-cp314-cp314-macosx_10_13_universal2.whl",url = "https://files.pythonhosted.org/packages/02/a2/cd864b2a14f20d14f4c496fab97802001560f9f41554eef6df201cd7f76c/wrapt-1.17.3-cp314-cp314-macosx_10_13_universal2.whl",hashes = {sha256 = "cf30f6e3c077c8e6a9a7809c94551203c8843e74ba0c960f4a98cd80d4665d39"}}, + {name = "wrapt-1.17.3-cp314-cp314-macosx_10_13_x86_64.whl",url = "https://files.pythonhosted.org/packages/d5/46/d011725b0c89e853dc44cceb738a307cde5d240d023d6d40a82d1b4e1182/wrapt-1.17.3-cp314-cp314-macosx_10_13_x86_64.whl",hashes = {sha256 = "e228514a06843cae89621384cfe3a80418f3c04aadf8a3b14e46a7be704e4235"}}, + {name = "wrapt-1.17.3-cp314-cp314-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/2e/9e/3ad852d77c35aae7ddebdbc3b6d35ec8013af7d7dddad0ad911f3d891dae/wrapt-1.17.3-cp314-cp314-macosx_11_0_arm64.whl",hashes = {sha256 = "5ea5eb3c0c071862997d6f3e02af1d055f381b1d25b286b9d6644b79db77657c"}}, + {name = "wrapt-1.17.3-cp314-cp314-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl",url = "https://files.pythonhosted.org/packages/c3/f7/c983d2762bcce2326c317c26a6a1e7016f7eb039c27cdf5c4e30f4160f31/wrapt-1.17.3-cp314-cp314-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl",hashes = {sha256 = "281262213373b6d5e4bb4353bc36d1ba4084e6d6b5d242863721ef2bf2c2930b"}}, + {name = "wrapt-1.17.3-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl",url = "https://files.pythonhosted.org/packages/e4/0f/f673f75d489c7f22d17fe0193e84b41540d962f75fce579cf6873167c29b/wrapt-1.17.3-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl",hashes = {sha256 = "dc4a8d2b25efb6681ecacad42fca8859f88092d8732b170de6a5dddd80a1c8fa"}}, + {name = "wrapt-1.17.3-cp314-cp314-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/df/61/515ad6caca68995da2fac7a6af97faab8f78ebe3bf4f761e1b77efbc47b5/wrapt-1.17.3-cp314-cp314-musllinux_1_2_aarch64.whl",hashes = {sha256 = "373342dd05b1d07d752cecbec0c41817231f29f3a89aa8b8843f7b95992ed0c7"}}, + {name = "wrapt-1.17.3-cp314-cp314-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/d3/bd/4e70162ce398462a467bc09e768bee112f1412e563620adc353de9055d33/wrapt-1.17.3-cp314-cp314-musllinux_1_2_x86_64.whl",hashes = {sha256 = "d40770d7c0fd5cbed9d84b2c3f2e156431a12c9a37dc6284060fb4bec0b7ffd4"}}, + {name = "wrapt-1.17.3-cp314-cp314-win32.whl",url = "https://files.pythonhosted.org/packages/2b/b8/da8560695e9284810b8d3df8a19396a6e40e7518059584a1a394a2b35e0a/wrapt-1.17.3-cp314-cp314-win32.whl",hashes = {sha256 = "fbd3c8319de8e1dc79d346929cd71d523622da527cca14e0c1d257e31c2b8b10"}}, + {name = "wrapt-1.17.3-cp314-cp314-win_amd64.whl",url = "https://files.pythonhosted.org/packages/db/c8/b71eeb192c440d67a5a0449aaee2310a1a1e8eca41676046f99ed2487e9f/wrapt-1.17.3-cp314-cp314-win_amd64.whl",hashes = {sha256 = "e1a4120ae5705f673727d3253de3ed0e016f7cd78dc463db1b31e2463e1f3cf6"}}, + {name = "wrapt-1.17.3-cp314-cp314-win_arm64.whl",url = "https://files.pythonhosted.org/packages/45/20/2cda20fd4865fa40f86f6c46ed37a2a8356a7a2fde0773269311f2af56c7/wrapt-1.17.3-cp314-cp314-win_arm64.whl",hashes = {sha256 = "507553480670cab08a800b9463bdb881b2edeed77dc677b0a5915e6106e91a58"}}, + {name = "wrapt-1.17.3-cp314-cp314t-macosx_10_13_universal2.whl",url = "https://files.pythonhosted.org/packages/77/ed/dd5cf21aec36c80443c6f900449260b80e2a65cf963668eaef3b9accce36/wrapt-1.17.3-cp314-cp314t-macosx_10_13_universal2.whl",hashes = {sha256 = "ed7c635ae45cfbc1a7371f708727bf74690daedc49b4dba310590ca0bd28aa8a"}}, + {name = "wrapt-1.17.3-cp314-cp314t-macosx_10_13_x86_64.whl",url = "https://files.pythonhosted.org/packages/8d/96/450c651cc753877ad100c7949ab4d2e2ecc4d97157e00fa8f45df682456a/wrapt-1.17.3-cp314-cp314t-macosx_10_13_x86_64.whl",hashes = {sha256 = "249f88ed15503f6492a71f01442abddd73856a0032ae860de6d75ca62eed8067"}}, + {name = "wrapt-1.17.3-cp314-cp314t-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/d1/86/2fcad95994d9b572db57632acb6f900695a648c3e063f2cd344b3f5c5a37/wrapt-1.17.3-cp314-cp314t-macosx_11_0_arm64.whl",hashes = {sha256 = "5a03a38adec8066d5a37bea22f2ba6bbf39fcdefbe2d91419ab864c3fb515454"}}, + {name = "wrapt-1.17.3-cp314-cp314t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl",url = "https://files.pythonhosted.org/packages/64/0e/f4472f2fdde2d4617975144311f8800ef73677a159be7fe61fa50997d6c0/wrapt-1.17.3-cp314-cp314t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl",hashes = {sha256 = "5d4478d72eb61c36e5b446e375bbc49ed002430d17cdec3cecb36993398e1a9e"}}, + {name = "wrapt-1.17.3-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl",url = "https://files.pythonhosted.org/packages/cc/01/9b85a99996b0a97c8a17484684f206cbb6ba73c1ce6890ac668bcf3838fb/wrapt-1.17.3-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl",hashes = {sha256 = "223db574bb38637e8230eb14b185565023ab624474df94d2af18f1cdb625216f"}}, + {name = "wrapt-1.17.3-cp314-cp314t-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/25/02/78926c1efddcc7b3aa0bc3d6b33a822f7d898059f7cd9ace8c8318e559ef/wrapt-1.17.3-cp314-cp314t-musllinux_1_2_aarch64.whl",hashes = {sha256 = "e405adefb53a435f01efa7ccdec012c016b5a1d3f35459990afc39b6be4d5056"}}, + {name = "wrapt-1.17.3-cp314-cp314t-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/dc/ee/c414501ad518ac3e6fe184753632fe5e5ecacdcf0effc23f31c1e4f7bfcf/wrapt-1.17.3-cp314-cp314t-musllinux_1_2_x86_64.whl",hashes = {sha256 = "88547535b787a6c9ce4086917b6e1d291aa8ed914fdd3a838b3539dc95c12804"}}, + {name = "wrapt-1.17.3-cp314-cp314t-win32.whl",url = "https://files.pythonhosted.org/packages/be/44/a1bd64b723d13bb151d6cc91b986146a1952385e0392a78567e12149c7b4/wrapt-1.17.3-cp314-cp314t-win32.whl",hashes = {sha256 = "41b1d2bc74c2cac6f9074df52b2efbef2b30bdfe5f40cb78f8ca22963bc62977"}}, + {name = "wrapt-1.17.3-cp314-cp314t-win_amd64.whl",url = "https://files.pythonhosted.org/packages/79/d9/7cfd5a312760ac4dd8bf0184a6ee9e43c33e47f3dadc303032ce012b8fa3/wrapt-1.17.3-cp314-cp314t-win_amd64.whl",hashes = {sha256 = "73d496de46cd2cdbdbcce4ae4bcdb4afb6a11234a1df9c085249d55166b95116"}}, + {name = "wrapt-1.17.3-cp314-cp314t-win_arm64.whl",url = "https://files.pythonhosted.org/packages/46/78/10ad9781128ed2f99dbc474f43283b13fea8ba58723e98844367531c18e9/wrapt-1.17.3-cp314-cp314t-win_arm64.whl",hashes = {sha256 = "f38e60678850c42461d4202739f9bf1e3a737c7ad283638251e79cc49effb6b6"}}, + {name = "wrapt-1.17.3-cp313-cp313-macosx_10_13_universal2.whl",url = "https://files.pythonhosted.org/packages/fc/f6/759ece88472157acb55fc195e5b116e06730f1b651b5b314c66291729193/wrapt-1.17.3-cp313-cp313-macosx_10_13_universal2.whl",hashes = {sha256 = "a47681378a0439215912ef542c45a783484d4dd82bac412b71e59cf9c0e1cea0"}}, + {name = "wrapt-1.17.3-cp313-cp313-macosx_10_13_x86_64.whl",url = "https://files.pythonhosted.org/packages/4f/a9/49940b9dc6d47027dc850c116d79b4155f15c08547d04db0f07121499347/wrapt-1.17.3-cp313-cp313-macosx_10_13_x86_64.whl",hashes = {sha256 = "54a30837587c6ee3cd1a4d1c2ec5d24e77984d44e2f34547e2323ddb4e22eb77"}}, + {name = "wrapt-1.17.3-cp313-cp313-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/45/35/6a08de0f2c96dcdd7fe464d7420ddb9a7655a6561150e5fc4da9356aeaab/wrapt-1.17.3-cp313-cp313-macosx_11_0_arm64.whl",hashes = {sha256 = "16ecf15d6af39246fe33e507105d67e4b81d8f8d2c6598ff7e3ca1b8a37213f7"}}, + {name = "wrapt-1.17.3-cp313-cp313-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl",url = "https://files.pythonhosted.org/packages/0c/37/6faf15cfa41bf1f3dba80cd3f5ccc6622dfccb660ab26ed79f0178c7497f/wrapt-1.17.3-cp313-cp313-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl",hashes = {sha256 = "6fd1ad24dc235e4ab88cda009e19bf347aabb975e44fd5c2fb22a3f6e4141277"}}, + {name = "wrapt-1.17.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl",url = "https://files.pythonhosted.org/packages/78/f2/efe19ada4a38e4e15b6dff39c3e3f3f73f5decf901f66e6f72fe79623a06/wrapt-1.17.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl",hashes = {sha256 = "0ed61b7c2d49cee3c027372df5809a59d60cf1b6c2f81ee980a091f3afed6a2d"}}, + {name = "wrapt-1.17.3-cp313-cp313-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/40/90/ca86701e9de1622b16e09689fc24b76f69b06bb0150990f6f4e8b0eeb576/wrapt-1.17.3-cp313-cp313-musllinux_1_2_aarch64.whl",hashes = {sha256 = "423ed5420ad5f5529db9ce89eac09c8a2f97da18eb1c870237e84c5a5c2d60aa"}}, + {name = "wrapt-1.17.3-cp313-cp313-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/fd/e0/d10bd257c9a3e15cbf5523025252cc14d77468e8ed644aafb2d6f54cb95d/wrapt-1.17.3-cp313-cp313-musllinux_1_2_x86_64.whl",hashes = {sha256 = "e01375f275f010fcbf7f643b4279896d04e571889b8a5b3f848423d91bf07050"}}, + {name = "wrapt-1.17.3-cp313-cp313-win32.whl",url = "https://files.pythonhosted.org/packages/e8/cf/7d848740203c7b4b27eb55dbfede11aca974a51c3d894f6cc4b865f42f58/wrapt-1.17.3-cp313-cp313-win32.whl",hashes = {sha256 = "53e5e39ff71b3fc484df8a522c933ea2b7cdd0d5d15ae82e5b23fde87d44cbd8"}}, + {name = "wrapt-1.17.3-cp313-cp313-win_amd64.whl",url = "https://files.pythonhosted.org/packages/57/54/35a84d0a4d23ea675994104e667ceff49227ce473ba6a59ba2c84f250b74/wrapt-1.17.3-cp313-cp313-win_amd64.whl",hashes = {sha256 = "1f0b2f40cf341ee8cc1a97d51ff50dddb9fcc73241b9143ec74b30fc4f44f6cb"}}, + {name = "wrapt-1.17.3-cp313-cp313-win_arm64.whl",url = "https://files.pythonhosted.org/packages/01/77/66e54407c59d7b02a3c4e0af3783168fff8e5d61def52cda8728439d86bc/wrapt-1.17.3-cp313-cp313-win_arm64.whl",hashes = {sha256 = "7425ac3c54430f5fc5e7b6f41d41e704db073309acfc09305816bc6a0b26bb16"}}, + {name = "wrapt-1.17.3-cp312-cp312-macosx_10_13_universal2.whl",url = "https://files.pythonhosted.org/packages/9f/41/cad1aba93e752f1f9268c77270da3c469883d56e2798e7df6240dcb2287b/wrapt-1.17.3-cp312-cp312-macosx_10_13_universal2.whl",hashes = {sha256 = "ab232e7fdb44cdfbf55fc3afa31bcdb0d8980b9b95c38b6405df2acb672af0e0"}}, + {name = "wrapt-1.17.3-cp312-cp312-macosx_10_13_x86_64.whl",url = "https://files.pythonhosted.org/packages/60/f8/096a7cc13097a1869fe44efe68dace40d2a16ecb853141394047f0780b96/wrapt-1.17.3-cp312-cp312-macosx_10_13_x86_64.whl",hashes = {sha256 = "9baa544e6acc91130e926e8c802a17f3b16fbea0fd441b5a60f5cf2cc5c3deba"}}, + {name = "wrapt-1.17.3-cp312-cp312-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/33/df/bdf864b8997aab4febb96a9ae5c124f700a5abd9b5e13d2a3214ec4be705/wrapt-1.17.3-cp312-cp312-macosx_11_0_arm64.whl",hashes = {sha256 = "6b538e31eca1a7ea4605e44f81a48aa24c4632a277431a6ed3f328835901f4fd"}}, + {name = "wrapt-1.17.3-cp312-cp312-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl",url = "https://files.pythonhosted.org/packages/9f/81/5d931d78d0eb732b95dc3ddaeeb71c8bb572fb01356e9133916cd729ecdd/wrapt-1.17.3-cp312-cp312-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl",hashes = {sha256 = "042ec3bb8f319c147b1301f2393bc19dba6e176b7da446853406d041c36c7828"}}, + {name = "wrapt-1.17.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl",url = "https://files.pythonhosted.org/packages/ca/38/2e1785df03b3d72d34fc6252d91d9d12dc27a5c89caef3335a1bbb8908ca/wrapt-1.17.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl",hashes = {sha256 = "3af60380ba0b7b5aeb329bc4e402acd25bd877e98b3727b0135cb5c2efdaefe9"}}, + {name = "wrapt-1.17.3-cp312-cp312-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/b3/8b/48cdb60fe0603e34e05cffda0b2a4adab81fd43718e11111a4b0100fd7c1/wrapt-1.17.3-cp312-cp312-musllinux_1_2_aarch64.whl",hashes = {sha256 = "0b02e424deef65c9f7326d8c19220a2c9040c51dc165cddb732f16198c168396"}}, + {name = "wrapt-1.17.3-cp312-cp312-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/3c/51/d81abca783b58f40a154f1b2c56db1d2d9e0d04fa2d4224e357529f57a57/wrapt-1.17.3-cp312-cp312-musllinux_1_2_x86_64.whl",hashes = {sha256 = "74afa28374a3c3a11b3b5e5fca0ae03bef8450d6aa3ab3a1e2c30e3a75d023dc"}}, + {name = "wrapt-1.17.3-cp312-cp312-win32.whl",url = "https://files.pythonhosted.org/packages/9e/b1/43b286ca1392a006d5336412d41663eeef1ad57485f3e52c767376ba7e5a/wrapt-1.17.3-cp312-cp312-win32.whl",hashes = {sha256 = "4da9f45279fff3543c371d5ababc57a0384f70be244de7759c85a7f989cb4ebe"}}, + {name = "wrapt-1.17.3-cp312-cp312-win_amd64.whl",url = "https://files.pythonhosted.org/packages/28/de/49493f962bd3c586ab4b88066e967aa2e0703d6ef2c43aa28cb83bf7b507/wrapt-1.17.3-cp312-cp312-win_amd64.whl",hashes = {sha256 = "e71d5c6ebac14875668a1e90baf2ea0ef5b7ac7918355850c0908ae82bcb297c"}}, + {name = "wrapt-1.17.3-cp312-cp312-win_arm64.whl",url = "https://files.pythonhosted.org/packages/f1/48/0f7102fe9cb1e8a5a77f80d4f0956d62d97034bbe88d33e94699f99d181d/wrapt-1.17.3-cp312-cp312-win_arm64.whl",hashes = {sha256 = "604d076c55e2fdd4c1c03d06dc1a31b95130010517b5019db15365ec4a405fc6"}}, + {name = "wrapt-1.17.3-cp311-cp311-macosx_10_9_universal2.whl",url = "https://files.pythonhosted.org/packages/52/db/00e2a219213856074a213503fdac0511203dceefff26e1daa15250cc01a0/wrapt-1.17.3-cp311-cp311-macosx_10_9_universal2.whl",hashes = {sha256 = "273a736c4645e63ac582c60a56b0acb529ef07f78e08dc6bfadf6a46b19c0da7"}}, + {name = "wrapt-1.17.3-cp311-cp311-macosx_10_9_x86_64.whl",url = "https://files.pythonhosted.org/packages/5e/30/ca3c4a5eba478408572096fe9ce36e6e915994dd26a4e9e98b4f729c06d9/wrapt-1.17.3-cp311-cp311-macosx_10_9_x86_64.whl",hashes = {sha256 = "5531d911795e3f935a9c23eb1c8c03c211661a5060aab167065896bbf62a5f85"}}, + {name = "wrapt-1.17.3-cp311-cp311-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/31/25/3e8cc2c46b5329c5957cec959cb76a10718e1a513309c31399a4dad07eb3/wrapt-1.17.3-cp311-cp311-macosx_11_0_arm64.whl",hashes = {sha256 = "0610b46293c59a3adbae3dee552b648b984176f8562ee0dba099a56cfbe4df1f"}}, + {name = "wrapt-1.17.3-cp311-cp311-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl",url = "https://files.pythonhosted.org/packages/5d/8f/a32a99fc03e4b37e31b57cb9cefc65050ea08147a8ce12f288616b05ef54/wrapt-1.17.3-cp311-cp311-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl",hashes = {sha256 = "b32888aad8b6e68f83a8fdccbf3165f5469702a7544472bdf41f582970ed3311"}}, + {name = "wrapt-1.17.3-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl",url = "https://files.pythonhosted.org/packages/31/57/4930cb8d9d70d59c27ee1332a318c20291749b4fba31f113c2f8ac49a72e/wrapt-1.17.3-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl",hashes = {sha256 = "8cccf4f81371f257440c88faed6b74f1053eef90807b77e31ca057b2db74edb1"}}, + {name = "wrapt-1.17.3-cp311-cp311-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/a8/f3/1afd48de81d63dd66e01b263a6fbb86e1b5053b419b9b33d13e1f6d0f7d0/wrapt-1.17.3-cp311-cp311-musllinux_1_2_aarch64.whl",hashes = {sha256 = "d8a210b158a34164de8bb68b0e7780041a903d7b00c87e906fb69928bf7890d5"}}, + {name = "wrapt-1.17.3-cp311-cp311-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/1e/d7/4ad5327612173b144998232f98a85bb24b60c352afb73bc48e3e0d2bdc4e/wrapt-1.17.3-cp311-cp311-musllinux_1_2_x86_64.whl",hashes = {sha256 = "79573c24a46ce11aab457b472efd8d125e5a51da2d1d24387666cd85f54c05b2"}}, + {name = "wrapt-1.17.3-cp311-cp311-win32.whl",url = "https://files.pythonhosted.org/packages/bb/59/e0adfc831674a65694f18ea6dc821f9fcb9ec82c2ce7e3d73a88ba2e8718/wrapt-1.17.3-cp311-cp311-win32.whl",hashes = {sha256 = "c31eebe420a9a5d2887b13000b043ff6ca27c452a9a22fa71f35f118e8d4bf89"}}, + {name = "wrapt-1.17.3-cp311-cp311-win_amd64.whl",url = "https://files.pythonhosted.org/packages/83/88/16b7231ba49861b6f75fc309b11012ede4d6b0a9c90969d9e0db8d991aeb/wrapt-1.17.3-cp311-cp311-win_amd64.whl",hashes = {sha256 = "0b1831115c97f0663cb77aa27d381237e73ad4f721391a9bfb2fe8bc25fa6e77"}}, + {name = "wrapt-1.17.3-cp311-cp311-win_arm64.whl",url = "https://files.pythonhosted.org/packages/9a/1e/c4d4f3398ec073012c51d1c8d87f715f56765444e1a4b11e5180577b7e6e/wrapt-1.17.3-cp311-cp311-win_arm64.whl",hashes = {sha256 = "5a7b3c1ee8265eb4c8f1b7d29943f195c00673f5ab60c192eba2d4a7eae5f46a"}}, + {name = "wrapt-1.17.3-cp310-cp310-macosx_10_9_universal2.whl",url = "https://files.pythonhosted.org/packages/3f/23/bb82321b86411eb51e5a5db3fb8f8032fd30bd7c2d74bfe936136b2fa1d6/wrapt-1.17.3-cp310-cp310-macosx_10_9_universal2.whl",hashes = {sha256 = "88bbae4d40d5a46142e70d58bf664a89b6b4befaea7b2ecc14e03cedb8e06c04"}}, + {name = "wrapt-1.17.3-cp310-cp310-macosx_10_9_x86_64.whl",url = "https://files.pythonhosted.org/packages/45/69/f3c47642b79485a30a59c63f6d739ed779fb4cc8323205d047d741d55220/wrapt-1.17.3-cp310-cp310-macosx_10_9_x86_64.whl",hashes = {sha256 = "e6b13af258d6a9ad602d57d889f83b9d5543acd471eee12eb51f5b01f8eb1bc2"}}, + {name = "wrapt-1.17.3-cp310-cp310-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/d1/71/e7e7f5670c1eafd9e990438e69d8fb46fa91a50785332e06b560c869454f/wrapt-1.17.3-cp310-cp310-macosx_11_0_arm64.whl",hashes = {sha256 = "fd341868a4b6714a5962c1af0bd44f7c404ef78720c7de4892901e540417111c"}}, + {name = "wrapt-1.17.3-cp310-cp310-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl",url = "https://files.pythonhosted.org/packages/de/17/9f8f86755c191d6779d7ddead1a53c7a8aa18bccb7cea8e7e72dfa6a8a09/wrapt-1.17.3-cp310-cp310-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl",hashes = {sha256 = "f9b2601381be482f70e5d1051a5965c25fb3625455a2bf520b5a077b22afb775"}}, + {name = "wrapt-1.17.3-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl",url = "https://files.pythonhosted.org/packages/f2/15/dd576273491f9f43dd09fce517f6c2ce6eb4fe21681726068db0d0467096/wrapt-1.17.3-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl",hashes = {sha256 = "343e44b2a8e60e06a7e0d29c1671a0d9951f59174f3709962b5143f60a2a98bd"}}, + {name = "wrapt-1.17.3-cp310-cp310-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/0c/c4/5eb4ce0d4814521fee7aa806264bf7a114e748ad05110441cd5b8a5c744b/wrapt-1.17.3-cp310-cp310-musllinux_1_2_aarch64.whl",hashes = {sha256 = "33486899acd2d7d3066156b03465b949da3fd41a5da6e394ec49d271baefcf05"}}, + {name = "wrapt-1.17.3-cp310-cp310-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/31/4b/819e9e0eb5c8dc86f60dfc42aa4e2c0d6c3db8732bce93cc752e604bb5f5/wrapt-1.17.3-cp310-cp310-musllinux_1_2_x86_64.whl",hashes = {sha256 = "e6f40a8aa5a92f150bdb3e1c44b7e98fb7113955b2e5394122fa5532fec4b418"}}, + {name = "wrapt-1.17.3-cp310-cp310-win32.whl",url = "https://files.pythonhosted.org/packages/f8/83/ed6baf89ba3a56694700139698cf703aac9f0f9eb03dab92f57551bd5385/wrapt-1.17.3-cp310-cp310-win32.whl",hashes = {sha256 = "a36692b8491d30a8c75f1dfee65bef119d6f39ea84ee04d9f9311f83c5ad9390"}}, + {name = "wrapt-1.17.3-cp310-cp310-win_amd64.whl",url = "https://files.pythonhosted.org/packages/2f/90/ee61d36862340ad7e9d15a02529df6b948676b9a5829fd5e16640156627d/wrapt-1.17.3-cp310-cp310-win_amd64.whl",hashes = {sha256 = "afd964fd43b10c12213574db492cb8f73b2f0826c8df07a68288f8f19af2ebe6"}}, + {name = "wrapt-1.17.3-cp310-cp310-win_arm64.whl",url = "https://files.pythonhosted.org/packages/bd/c3/cefe0bd330d389c9983ced15d326f45373f4073c9f4a8c2f99b50bfea329/wrapt-1.17.3-cp310-cp310-win_arm64.whl",hashes = {sha256 = "af338aa93554be859173c39c85243970dc6a289fa907402289eeae7543e1ae18"}}, + {name = "wrapt-1.17.3-py3-none-any.whl",url = "https://files.pythonhosted.org/packages/1f/f6/a933bd70f98e9cf3e08167fc5cd7aaaca49147e48411c0bd5ae701bb2194/wrapt-1.17.3-py3-none-any.whl",hashes = {sha256 = "7171ae35d2c33d326ac19dd8facb1e82e5fd04ef8c6c0e394d7af55a55051c22"}}, +] +marker = "\"default\" in dependency_groups" + +[packages.tool.pdm] +dependencies = [] + [[packages]] name = "zipp" version = "3.23.0" @@ -3092,7 +3425,7 @@ sdist = {name = "zipp-3.23.0.tar.gz", url = "https://files.pythonhosted.org/pack wheels = [ {name = "zipp-3.23.0-py3-none-any.whl",url = "https://files.pythonhosted.org/packages/2e/54/647ade08bf0db230bfea292f893923872fd20be6ac6f53b2b936ba839d75/zipp-3.23.0-py3-none-any.whl",hashes = {sha256 = "071652d6115ed432f5ce1d34c336c0adfd6a884660d1e9712a256d3d3bd4b14e"}}, ] -marker = "python_full_version < \"3.10.2\" and python_version >= \"3.9\" and \"dev\" in extras" +marker = "python_full_version < \"3.10.2\" and python_version ~= \"3.10\" and \"dev\" in extras or python_full_version ~= \"3.9.0\" and \"dev\" in extras" [packages.tool.pdm] dependencies = [] @@ -3196,13 +3529,6 @@ wheels = [ {name = "pandas-2.3.1-cp310-cp310-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/75/a7/d65e5d8665c12c3c6ff5edd9709d5836ec9b6f80071b7f4a718c6106e86e/pandas-2.3.1-cp310-cp310-musllinux_1_2_aarch64.whl",hashes = {sha256 = "1b916a627919a247d865aed068eb65eb91a344b13f5b57ab9f610b7716c92de1"}}, {name = "pandas-2.3.1-cp310-cp310-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/65/f3/4c1dbd754dbaa79dbf8b537800cb2fa1a6e534764fef50ab1f7533226c5c/pandas-2.3.1-cp310-cp310-musllinux_1_2_x86_64.whl",hashes = {sha256 = "fe67dc676818c186d5a3d5425250e40f179c2a89145df477dd82945eaea89e97"}}, {name = "pandas-2.3.1-cp310-cp310-win_amd64.whl",url = "https://files.pythonhosted.org/packages/3f/d6/d7f5777162aa9b48ec3910bca5a58c9b5927cfd9cfde3aa64322f5ba4b9f/pandas-2.3.1-cp310-cp310-win_amd64.whl",hashes = {sha256 = "2eb789ae0274672acbd3c575b0598d213345660120a257b47b5dafdc618aec83"}}, - {name = "pandas-2.3.1-cp39-cp39-macosx_10_9_x86_64.whl",url = "https://files.pythonhosted.org/packages/6e/21/ecf2df680982616459409b09962a8c2065330c7151dc6538069f3b634acf/pandas-2.3.1-cp39-cp39-macosx_10_9_x86_64.whl",hashes = {sha256 = "4645f770f98d656f11c69e81aeb21c6fca076a44bed3dcbb9396a4311bc7f6d8"}}, - {name = "pandas-2.3.1-cp39-cp39-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/1e/1a/dcb50e44b75419e96b276c9fb023b0f147b3c411be1cd517492aa2a184d4/pandas-2.3.1-cp39-cp39-macosx_11_0_arm64.whl",hashes = {sha256 = "342e59589cc454aaff7484d75b816a433350b3d7964d7847327edda4d532a2e3"}}, - {name = "pandas-2.3.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",url = "https://files.pythonhosted.org/packages/2d/55/66cd2b679f6a27398380eac7574bc24746128f74626a3c02b978ea00e5ce/pandas-2.3.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",hashes = {sha256 = "1d12f618d80379fde6af007f65f0c25bd3e40251dbd1636480dfffce2cf1e6da"}}, - {name = "pandas-2.3.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",url = "https://files.pythonhosted.org/packages/ae/1c/5b9b263c80fd5e231b77df6f78cd7426d1d4ad3a4e858e85b7b3d93d0e9c/pandas-2.3.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",hashes = {sha256 = "dd71c47a911da120d72ef173aeac0bf5241423f9bfea57320110a978457e069e"}}, - {name = "pandas-2.3.1-cp39-cp39-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/f7/74/7e817b31413fbb96366ea327d43d1926a9c48c58074e27e094e2839a0e36/pandas-2.3.1-cp39-cp39-musllinux_1_2_aarch64.whl",hashes = {sha256 = "09e3b1587f0f3b0913e21e8b32c3119174551deb4a4eba4a89bc7377947977e7"}}, - {name = "pandas-2.3.1-cp39-cp39-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/1f/0f/bc0a44b47eba2f22ae4235719a573d552ef7ad76ed3ea39ae62d554e040b/pandas-2.3.1-cp39-cp39-musllinux_1_2_x86_64.whl",hashes = {sha256 = "2323294c73ed50f612f67e2bf3ae45aea04dce5690778e08a09391897f35ff88"}}, - {name = "pandas-2.3.1-cp39-cp39-win_amd64.whl",url = "https://files.pythonhosted.org/packages/fa/cb/6c32f8fadefa4314b740fbe8f74f6a02423bd1549e7c930826df35ac3c1b/pandas-2.3.1-cp39-cp39-win_amd64.whl",hashes = {sha256 = "b4b0de34dc8499c2db34000ef8baad684cfa4cbd836ecee05f323ebfba348c7d"}}, ] marker = "\"default\" in dependency_groups" @@ -3339,17 +3665,8 @@ wheels = [ {name = "ruamel.yaml.clib-0.2.12-cp310-cp310-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/30/fc/8cd12f189c6405a4c1cf37bd633aa740a9538c8e40497c231072d0fef5cf/ruamel.yaml.clib-0.2.12-cp310-cp310-musllinux_1_2_aarch64.whl",hashes = {sha256 = "a52d48f4e7bf9005e8f0a89209bf9a73f7190ddf0489eee5eb51377385f59f2a"}}, {name = "ruamel.yaml.clib-0.2.12-cp310-cp310-win32.whl",url = "https://files.pythonhosted.org/packages/80/29/c0a017b704aaf3cbf704989785cd9c5d5b8ccec2dae6ac0c53833c84e677/ruamel.yaml.clib-0.2.12-cp310-cp310-win32.whl",hashes = {sha256 = "3eac5a91891ceb88138c113f9db04f3cebdae277f5d44eaa3651a4f573e6a5da"}}, {name = "ruamel.yaml.clib-0.2.12-cp310-cp310-win_amd64.whl",url = "https://files.pythonhosted.org/packages/3a/65/fa39d74db4e2d0cd252355732d966a460a41cd01c6353b820a0952432839/ruamel.yaml.clib-0.2.12-cp310-cp310-win_amd64.whl",hashes = {sha256 = "ab007f2f5a87bd08ab1499bdf96f3d5c6ad4dcfa364884cb4549aa0154b13a28"}}, - {name = "ruamel.yaml.clib-0.2.12-cp39-cp39-macosx_12_0_arm64.whl",url = "https://files.pythonhosted.org/packages/e5/46/ccdef7a84ad745c37cb3d9a81790f28fbc9adf9c237dba682017b123294e/ruamel.yaml.clib-0.2.12-cp39-cp39-macosx_12_0_arm64.whl",hashes = {sha256 = "fc4b630cd3fa2cf7fce38afa91d7cfe844a9f75d7f0f36393fa98815e911d987"}}, - {name = "ruamel.yaml.clib-0.2.12-cp39-cp39-manylinux2014_aarch64.whl",url = "https://files.pythonhosted.org/packages/29/09/932360f30ad1b7b79f08757e0a6fb8c5392a52cdcc182779158fe66d25ac/ruamel.yaml.clib-0.2.12-cp39-cp39-manylinux2014_aarch64.whl",hashes = {sha256 = "bc5f1e1c28e966d61d2519f2a3d451ba989f9ea0f2307de7bc45baa526de9e45"}}, - {name = "ruamel.yaml.clib-0.2.12-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",url = "https://files.pythonhosted.org/packages/a2/2a/5b27602e7a4344c1334e26bf4739746206b7a60a8acdba33a61473468b73/ruamel.yaml.clib-0.2.12-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",hashes = {sha256 = "5a0e060aace4c24dcaf71023bbd7d42674e3b230f7e7b97317baf1e953e5b519"}}, - {name = "ruamel.yaml.clib-0.2.12-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl",url = "https://files.pythonhosted.org/packages/da/1c/23497017c554fc06ff5701b29355522cff850f626337fff35d9ab352cb18/ruamel.yaml.clib-0.2.12-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl",hashes = {sha256 = "e2f1c3765db32be59d18ab3953f43ab62a761327aafc1594a2a1fbe038b8b8a7"}}, - {name = "ruamel.yaml.clib-0.2.12-cp39-cp39-musllinux_1_1_i686.whl",url = "https://files.pythonhosted.org/packages/68/e6/f3d4ff3223f9ea49c3b7169ec0268e42bd49f87c70c0e3e853895e4a7ae2/ruamel.yaml.clib-0.2.12-cp39-cp39-musllinux_1_1_i686.whl",hashes = {sha256 = "d85252669dc32f98ebcd5d36768f5d4faeaeaa2d655ac0473be490ecdae3c285"}}, - {name = "ruamel.yaml.clib-0.2.12-cp39-cp39-musllinux_1_1_x86_64.whl",url = "https://files.pythonhosted.org/packages/84/62/ead07043527642491e5011b143f44b81ef80f1025a96069b7210e0f2f0f3/ruamel.yaml.clib-0.2.12-cp39-cp39-musllinux_1_1_x86_64.whl",hashes = {sha256 = "e143ada795c341b56de9418c58d028989093ee611aa27ffb9b7f609c00d813ed"}}, - {name = "ruamel.yaml.clib-0.2.12-cp39-cp39-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/52/b3/fe4d84446f7e4887e3bea7ceff0a7df23790b5ed625f830e79ace88ebefb/ruamel.yaml.clib-0.2.12-cp39-cp39-musllinux_1_2_aarch64.whl",hashes = {sha256 = "2c59aa6170b990d8d2719323e628aaf36f3bfbc1c26279c0eeeb24d05d2d11c7"}}, - {name = "ruamel.yaml.clib-0.2.12-cp39-cp39-win32.whl",url = "https://files.pythonhosted.org/packages/6e/b3/7feb99a00bfaa5c6868617bb7651308afde85e5a0b23cd187fe5de65feeb/ruamel.yaml.clib-0.2.12-cp39-cp39-win32.whl",hashes = {sha256 = "beffaed67936fbbeffd10966a4eb53c402fafd3d6833770516bf7314bc6ffa12"}}, - {name = "ruamel.yaml.clib-0.2.12-cp39-cp39-win_amd64.whl",url = "https://files.pythonhosted.org/packages/93/07/de635108684b7a5bb06e432b0930c5a04b6c59efe73bd966d8db3cc208f2/ruamel.yaml.clib-0.2.12-cp39-cp39-win_amd64.whl",hashes = {sha256 = "040ae85536960525ea62868b642bdb0c2cc6021c9f9d507810c0c604e66f5a7b"}}, ] -marker = "platform_python_implementation == \"CPython\" and python_version < \"3.14\" and python_version >= \"3.9\" and \"dev\" in extras" +marker = "platform_python_implementation == \"CPython\" and python_version < \"3.14\" and python_version ~= \"3.10\" and \"dev\" in extras or platform_python_implementation == \"CPython\" and python_full_version ~= \"3.9.0\" and \"dev\" in extras" [packages.tool.pdm] dependencies = [] @@ -3490,26 +3807,6 @@ wheels = [ {name = "xxhash-3.5.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",url = "https://files.pythonhosted.org/packages/0f/f8/f6c61fd794229cc3848d144f73754a0c107854372d7261419dcbbd286299/xxhash-3.5.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",hashes = {sha256 = "4e2febf914ace002132aa09169cc572e0d8959d0f305f93d5828c4836f9bc5a6"}}, {name = "xxhash-3.5.0-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl",url = "https://files.pythonhosted.org/packages/79/d3/c029c99801526f859e6b38d34ab87c08993bf3dcea34b11275775001638a/xxhash-3.5.0-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl",hashes = {sha256 = "5d3a10609c51da2a1c0ea0293fc3968ca0a18bd73838455b5bca3069d7f8e32b"}}, {name = "xxhash-3.5.0-pp310-pypy310_pp73-win_amd64.whl",url = "https://files.pythonhosted.org/packages/62/e3/bef7b82c1997579c94de9ac5ea7626d01ae5858aa22bf4fcb38bf220cb3e/xxhash-3.5.0-pp310-pypy310_pp73-win_amd64.whl",hashes = {sha256 = "5a74f23335b9689b66eb6dbe2a931a88fcd7a4c2cc4b1cb0edba8ce381c7a1da"}}, - {name = "xxhash-3.5.0-cp39-cp39-macosx_10_9_x86_64.whl",url = "https://files.pythonhosted.org/packages/d4/f6/531dd6858adf8877675270b9d6989b6dacfd1c2d7135b17584fc29866df3/xxhash-3.5.0-cp39-cp39-macosx_10_9_x86_64.whl",hashes = {sha256 = "bfc8cdd7f33d57f0468b0614ae634cc38ab9202c6957a60e31d285a71ebe0301"}}, - {name = "xxhash-3.5.0-cp39-cp39-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/7c/a8/b2a42b6c9ae46e233f474f3d307c2e7bca8d9817650babeca048d2ad01d6/xxhash-3.5.0-cp39-cp39-macosx_11_0_arm64.whl",hashes = {sha256 = "e0c48b6300cd0b0106bf49169c3e0536408dfbeb1ccb53180068a18b03c662ab"}}, - {name = "xxhash-3.5.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",url = "https://files.pythonhosted.org/packages/b4/92/9ac297e3487818f429bcf369c1c6a097edf5b56ed6fc1feff4c1882e87ef/xxhash-3.5.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",hashes = {sha256 = "fe1a92cfbaa0a1253e339ccec42dbe6db262615e52df591b68726ab10338003f"}}, - {name = "xxhash-3.5.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl",url = "https://files.pythonhosted.org/packages/86/48/c1426dd3c86fc4a52f983301867463472f6a9013fb32d15991e60c9919b6/xxhash-3.5.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl",hashes = {sha256 = "33513d6cc3ed3b559134fb307aae9bdd94d7e7c02907b37896a6c45ff9ce51bd"}}, - {name = "xxhash-3.5.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl",url = "https://files.pythonhosted.org/packages/f3/de/0ab8c79993765c94fc0d0c1a22b454483c58a0161e1b562f58b654f47660/xxhash-3.5.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl",hashes = {sha256 = "eefc37f6138f522e771ac6db71a6d4838ec7933939676f3753eafd7d3f4c40bc"}}, - {name = "xxhash-3.5.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",url = "https://files.pythonhosted.org/packages/b4/b4/332647451ed7d2c021294b7c1e9c144dbb5586b1fb214ad4f5a404642835/xxhash-3.5.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",hashes = {sha256 = "a606c8070ada8aa2a88e181773fa1ef17ba65ce5dd168b9d08038e2a61b33754"}}, - {name = "xxhash-3.5.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl",url = "https://files.pythonhosted.org/packages/f4/1c/a42c0a6cac752f84f7b44a90d1a9fa9047cf70bdba5198a304fde7cc471f/xxhash-3.5.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl",hashes = {sha256 = "42eca420c8fa072cc1dd62597635d140e78e384a79bb4944f825fbef8bfeeef6"}}, - {name = "xxhash-3.5.0-cp39-cp39-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/c4/d7/04e1b0daae9dc9b02c73c1664cc8aa527498c3f66ccbc586eeb25bbe9f14/xxhash-3.5.0-cp39-cp39-musllinux_1_2_aarch64.whl",hashes = {sha256 = "604253b2143e13218ff1ef0b59ce67f18b8bd1c4205d2ffda22b09b426386898"}}, - {name = "xxhash-3.5.0-cp39-cp39-musllinux_1_2_i686.whl",url = "https://files.pythonhosted.org/packages/c4/f4/05e15e67505228fc19ee98a79e427b3a0b9695f5567cd66ced5d66389883/xxhash-3.5.0-cp39-cp39-musllinux_1_2_i686.whl",hashes = {sha256 = "6e93a5ad22f434d7876665444a97e713a8f60b5b1a3521e8df11b98309bff833"}}, - {name = "xxhash-3.5.0-cp39-cp39-musllinux_1_2_ppc64le.whl",url = "https://files.pythonhosted.org/packages/94/fb/e9028d3645bba5412a09de13ee36df276a567e60bdb31d499dafa46d76ae/xxhash-3.5.0-cp39-cp39-musllinux_1_2_ppc64le.whl",hashes = {sha256 = "7a46e1d6d2817ba8024de44c4fd79913a90e5f7265434cef97026215b7d30df6"}}, - {name = "xxhash-3.5.0-cp39-cp39-musllinux_1_2_s390x.whl",url = "https://files.pythonhosted.org/packages/02/2c/18c6a622429368274739372d2f86c8125413ec169025c7d8ffb051784bba/xxhash-3.5.0-cp39-cp39-musllinux_1_2_s390x.whl",hashes = {sha256 = "30eb2efe6503c379b7ab99c81ba4a779748e3830241f032ab46bd182bf5873af"}}, - {name = "xxhash-3.5.0-cp39-cp39-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/72/bb/5b55c391084a0321c3809632a018b9b657e59d5966289664f85a645942ac/xxhash-3.5.0-cp39-cp39-musllinux_1_2_x86_64.whl",hashes = {sha256 = "c8aa771ff2c13dd9cda8166d685d7333d389fae30a4d2bb39d63ab5775de8606"}}, - {name = "xxhash-3.5.0-cp39-cp39-win32.whl",url = "https://files.pythonhosted.org/packages/86/2b/915049db13401792fec159f57e4f4a5ca7a9768e83ef71d6645b9d0cd749/xxhash-3.5.0-cp39-cp39-win32.whl",hashes = {sha256 = "5ed9ebc46f24cf91034544b26b131241b699edbfc99ec5e7f8f3d02d6eb7fba4"}}, - {name = "xxhash-3.5.0-cp39-cp39-win_amd64.whl",url = "https://files.pythonhosted.org/packages/d5/87/382ef7b24917d7cf4c540ee30f29b283bc87ac5893d2f89b23ea3cdf7d77/xxhash-3.5.0-cp39-cp39-win_amd64.whl",hashes = {sha256 = "220f3f896c6b8d0316f63f16c077d52c412619e475f9372333474ee15133a558"}}, - {name = "xxhash-3.5.0-cp39-cp39-win_arm64.whl",url = "https://files.pythonhosted.org/packages/e2/47/d06b24e2d9c3dcabccfd734d11b5bbebfdf59ceac2c61509d8205dd20ac6/xxhash-3.5.0-cp39-cp39-win_arm64.whl",hashes = {sha256 = "a7b1d8315d9b5e9f89eb2933b73afae6ec9597a258d52190944437158b49d38e"}}, - {name = "xxhash-3.5.0-pp39-pypy39_pp73-macosx_10_15_x86_64.whl",url = "https://files.pythonhosted.org/packages/c2/56/30d3df421814947f9d782b20c9b7e5e957f3791cbd89874578011daafcbd/xxhash-3.5.0-pp39-pypy39_pp73-macosx_10_15_x86_64.whl",hashes = {sha256 = "531af8845aaadcadf951b7e0c1345c6b9c68a990eeb74ff9acd8501a0ad6a1c9"}}, - {name = "xxhash-3.5.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",url = "https://files.pythonhosted.org/packages/82/dd/3c42a1f022ad0d82c852d3cb65493ebac03dcfa8c994465a5fb052b00e3c/xxhash-3.5.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",hashes = {sha256 = "7ce379bcaa9fcc00f19affa7773084dd09f5b59947b3fb47a1ceb0179f91aaa1"}}, - {name = "xxhash-3.5.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",url = "https://files.pythonhosted.org/packages/b2/40/8f902ab3bebda228a9b4de69eba988280285a7f7f167b942bc20bb562df9/xxhash-3.5.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",hashes = {sha256 = "fd1b2281d01723f076df3c8188f43f2472248a6b63118b036e641243656b1b0f"}}, - {name = "xxhash-3.5.0-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl",url = "https://files.pythonhosted.org/packages/db/87/bd06beb8ccaa0e9e577c9b909a49cfa5c5cd2ca46034342d72dd9ce5bc56/xxhash-3.5.0-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl",hashes = {sha256 = "9c770750cc80e8694492244bca7251385188bc5597b6a39d98a9f30e8da984e0"}}, - {name = "xxhash-3.5.0-pp39-pypy39_pp73-win_amd64.whl",url = "https://files.pythonhosted.org/packages/bb/f8/505385e2fbd753ddcaafd5550eabe86f6232cbebabad3b2508d411b19153/xxhash-3.5.0-pp39-pypy39_pp73-win_amd64.whl",hashes = {sha256 = "b150b8467852e1bd844387459aa6fbe11d7f38b56e901f9f3b3e6aba0d660240"}}, ] marker = "\"default\" in dependency_groups" @@ -3529,7 +3826,7 @@ wheels = [ {name = "scipy-1.13.1-cp39-cp39-musllinux_1_1_x86_64.whl",url = "https://files.pythonhosted.org/packages/8d/02/1165905f14962174e6569076bcc3315809ae1291ed14de6448cc151eedfd/scipy-1.13.1-cp39-cp39-musllinux_1_1_x86_64.whl",hashes = {sha256 = "a014c2b3697bde71724244f63de2476925596c24285c7a637364761f8710891c"}}, {name = "scipy-1.13.1-cp39-cp39-win_amd64.whl",url = "https://files.pythonhosted.org/packages/3e/77/dab54fe647a08ee4253963bcd8f9cf17509c8ca64d6335141422fe2e2114/scipy-1.13.1-cp39-cp39-win_amd64.whl",hashes = {sha256 = "392e4ec766654852c25ebad4f64e4e584cf19820b980bc04960bca0b0cd6eaa2"}}, ] -marker = "python_version < \"3.10\" and python_version >= \"3.9\" and \"dev\" in extras" +marker = "python_full_version ~= \"3.9.0\" and \"dev\" in extras" [packages.tool.pdm] dependencies = [ @@ -3557,17 +3854,17 @@ wheels = [ {name = "numpy-2.0.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",url = "https://files.pythonhosted.org/packages/12/46/de1fbd0c1b5ccaa7f9a005b66761533e2f6a3e560096682683a223631fe9/numpy-2.0.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",hashes = {sha256 = "26df23238872200f63518dd2aa984cfca675d82469535dc7162dc2ee52d9dd5c"}}, {name = "numpy-2.0.2-pp39-pypy39_pp73-win_amd64.whl",url = "https://files.pythonhosted.org/packages/cc/dc/d330a6faefd92b446ec0f0dfea4c3207bb1fef3c4771d19cf4543efd2c78/numpy-2.0.2-pp39-pypy39_pp73-win_amd64.whl",hashes = {sha256 = "a46288ec55ebbd58947d31d72be2c63cbf839f0a63b49cb755022310792a3385"}}, ] -marker = "python_version < \"3.10\" and python_version >= \"3.9\" and \"default\" in dependency_groups or python_version < \"3.10\" and python_version >= \"3.9\" and \"dev\" in extras" +marker = "\"default\" in dependency_groups and python_full_version ~= \"3.9.0\" or \"dev\" in extras and python_full_version ~= \"3.9.0\"" [packages.tool.pdm] dependencies = [] [tool.pdm] -hashes = {sha256 = "4909a619da3f004dcd3f4ece16e07e96f81709904464fc614e66b456c5f8c73e"} +hashes = {sha256 = "270d4d932c91513087adf7619fe5674be5345433b7a6a4a27147851b95a15892"} strategy = ["inherit_metadata", "static_urls"] [[tool.pdm.targets]] requires_python = "~=3.10" [[tool.pdm.targets]] -requires_python = ">=3.9,<3.10" +requires_python = "~=3.9.0" diff --git a/scripts/generate_pylock.sh b/scripts/generate_pylock.sh new file mode 100755 index 00000000..6c08256f --- /dev/null +++ b/scripts/generate_pylock.sh @@ -0,0 +1,18 @@ +#!/usr/bin/env sh +set -e + +# Script to generate pylock.toml from scratch +# If pylock.toml already exists just run `pdm lock --update-reuse` + +# Check if pdm is available +if ! command -v pdm >/dev/null 2>&1 +then + echo "This script requires 'pdm' but it's not installed." + exit 1 +fi + +# Locking all dependencies to the same version for all supported +# python versions is not possible (mostly due to numpy) +# so we need to lock separately for python >=3.12 and <3.12 +pdm lock --python "~=3.10" --update-reuse +pdm lock --append --python "<3.10" --update-reuse From eb8e84ea81e3ac1cd8871ea7e3a7ef18a0495b72 Mon Sep 17 00:00:00 2001 From: Samuel Monson Date: Thu, 9 Oct 2025 10:55:52 -0400 Subject: [PATCH 53/90] Bump minimal python version to 3.10 Signed-off-by: Samuel Monson --- pylock.toml | 948 ++++--------------------------------- pyproject.toml | 6 +- scripts/generate_pylock.sh | 4 +- tox.ini | 2 +- 4 files changed, 101 insertions(+), 859 deletions(-) diff --git a/pylock.toml b/pylock.toml index e3f14678..62d45221 100644 --- a/pylock.toml +++ b/pylock.toml @@ -1,10 +1,10 @@ # This file is @generated by PDM. # It is not intended for manual editing. lock-version = "1.0" -requires-python = "<4.0,>=3.9.0" +requires-python = "<4.0,>=3.10.0" environments = [ - "python_version ~= \"3.10\"", - "python_full_version ~= \"3.9.0\"", + "python_version ~= \"3.12\"", + "python_full_version >= \"3.10.0\" and python_version < \"3.12\"", ] extras = ["dev", "recommended"] dependency-groups = ["default"] @@ -224,18 +224,6 @@ wheels = [ {name = "mypy-1.15.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl",url = "https://files.pythonhosted.org/packages/b3/d0/92ae4cde706923a2d3f2d6c39629134063ff64b9dedca9c1388363da072d/mypy-1.15.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl",hashes = {sha256 = "8023ff13985661b50a5928fc7a5ca15f3d1affb41e5f0a9952cb68ef090b31ee"}}, {name = "mypy-1.15.0-cp312-cp312-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/46/8b/df49974b337cce35f828ba6fda228152d6db45fed4c86ba56ffe442434fd/mypy-1.15.0-cp312-cp312-musllinux_1_2_x86_64.whl",hashes = {sha256 = "1124a18bc11a6a62887e3e137f37f53fbae476dc36c185d549d4f837a2a6a14e"}}, {name = "mypy-1.15.0-cp312-cp312-win_amd64.whl",url = "https://files.pythonhosted.org/packages/13/50/da5203fcf6c53044a0b699939f31075c45ae8a4cadf538a9069b165c1050/mypy-1.15.0-cp312-cp312-win_amd64.whl",hashes = {sha256 = "171a9ca9a40cd1843abeca0e405bc1940cd9b305eaeea2dda769ba096932bb22"}}, - {name = "mypy-1.15.0-cp311-cp311-macosx_10_9_x86_64.whl",url = "https://files.pythonhosted.org/packages/03/bc/f6339726c627bd7ca1ce0fa56c9ae2d0144604a319e0e339bdadafbbb599/mypy-1.15.0-cp311-cp311-macosx_10_9_x86_64.whl",hashes = {sha256 = "2922d42e16d6de288022e5ca321cd0618b238cfc5570e0263e5ba0a77dbef56f"}}, - {name = "mypy-1.15.0-cp311-cp311-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/e2/90/8dcf506ca1a09b0d17555cc00cd69aee402c203911410136cd716559efe7/mypy-1.15.0-cp311-cp311-macosx_11_0_arm64.whl",hashes = {sha256 = "2ee2d57e01a7c35de00f4634ba1bbf015185b219e4dc5909e281016df43f5ee5"}}, - {name = "mypy-1.15.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl",url = "https://files.pythonhosted.org/packages/05/05/a10f9479681e5da09ef2f9426f650d7b550d4bafbef683b69aad1ba87457/mypy-1.15.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl",hashes = {sha256 = "973500e0774b85d9689715feeffcc980193086551110fd678ebe1f4342fb7c5e"}}, - {name = "mypy-1.15.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl",url = "https://files.pythonhosted.org/packages/e9/9a/1f7d18b30edd57441a6411fcbc0c6869448d1a4bacbaee60656ac0fc29c8/mypy-1.15.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl",hashes = {sha256 = "5a95fb17c13e29d2d5195869262f8125dfdb5c134dc8d9a9d0aecf7525b10c2c"}}, - {name = "mypy-1.15.0-cp311-cp311-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/72/af/19ff499b6f1dafcaf56f9881f7a965ac2f474f69f6f618b5175b044299f5/mypy-1.15.0-cp311-cp311-musllinux_1_2_x86_64.whl",hashes = {sha256 = "1905f494bfd7d85a23a88c5d97840888a7bd516545fc5aaedff0267e0bb54e2f"}}, - {name = "mypy-1.15.0-cp311-cp311-win_amd64.whl",url = "https://files.pythonhosted.org/packages/96/39/11b57431a1f686c1aed54bf794870efe0f6aeca11aca281a0bd87a5ad42c/mypy-1.15.0-cp311-cp311-win_amd64.whl",hashes = {sha256 = "c9817fa23833ff189db061e6d2eff49b2f3b6ed9856b4a0a73046e41932d744f"}}, - {name = "mypy-1.15.0-cp310-cp310-macosx_10_9_x86_64.whl",url = "https://files.pythonhosted.org/packages/68/f8/65a7ce8d0e09b6329ad0c8d40330d100ea343bd4dd04c4f8ae26462d0a17/mypy-1.15.0-cp310-cp310-macosx_10_9_x86_64.whl",hashes = {sha256 = "979e4e1a006511dacf628e36fadfecbcc0160a8af6ca7dad2f5025529e082c13"}}, - {name = "mypy-1.15.0-cp310-cp310-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/b4/95/9c0ecb8eacfe048583706249439ff52105b3f552ea9c4024166c03224270/mypy-1.15.0-cp310-cp310-macosx_11_0_arm64.whl",hashes = {sha256 = "c4bb0e1bd29f7d34efcccd71cf733580191e9a264a2202b0239da95984c5b559"}}, - {name = "mypy-1.15.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl",url = "https://files.pythonhosted.org/packages/84/09/9ec95e982e282e20c0d5407bc65031dfd0f0f8ecc66b69538296e06fcbee/mypy-1.15.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl",hashes = {sha256 = "be68172e9fd9ad8fb876c6389f16d1c1b5f100ffa779f77b1fb2176fcc9ab95b"}}, - {name = "mypy-1.15.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl",url = "https://files.pythonhosted.org/packages/78/13/f7d14e55865036a1e6a0a69580c240f43bc1f37407fe9235c0d4ef25ffb0/mypy-1.15.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl",hashes = {sha256 = "c7be1e46525adfa0d97681432ee9fcd61a3964c2446795714699a998d193f1a3"}}, - {name = "mypy-1.15.0-cp310-cp310-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/48/e1/301a73852d40c241e915ac6d7bcd7fedd47d519246db2d7b86b9d7e7a0cb/mypy-1.15.0-cp310-cp310-musllinux_1_2_x86_64.whl",hashes = {sha256 = "2e2c2e6d3593f6451b18588848e66260ff62ccca522dd231cd4dd59b0160668b"}}, - {name = "mypy-1.15.0-cp310-cp310-win_amd64.whl",url = "https://files.pythonhosted.org/packages/77/ba/c37bc323ae5fe7f3f15a28e06ab012cd0b7552886118943e90b15af31195/mypy-1.15.0-cp310-cp310-win_amd64.whl",hashes = {sha256 = "6983aae8b2f653e098edb77f893f7b6aca69f6cffb19b2cc7443f23cce5f4828"}}, {name = "mypy-1.15.0-py3-none-any.whl",url = "https://files.pythonhosted.org/packages/09/4e/a7d65c7322c510de2c409ff3828b03354a7c43f5a8ed458a7a131b41c7b9/mypy-1.15.0-py3-none-any.whl",hashes = {sha256 = "5469affef548bd1895d86d3bf10ce2b44e33d86923c29e4d675b3e323437ea3e"}}, ] marker = "\"dev\" in extras" @@ -290,24 +278,6 @@ wheels = [ {name = "PyYAML-6.0.2-cp312-cp312-musllinux_1_1_x86_64.whl",url = "https://files.pythonhosted.org/packages/c9/1f/4f998c900485e5c0ef43838363ba4a9723ac0ad73a9dc42068b12aaba4e4/PyYAML-6.0.2-cp312-cp312-musllinux_1_1_x86_64.whl",hashes = {sha256 = "8b9c7197f7cb2738065c481a0461e50ad02f18c78cd75775628afb4d7137fb3b"}}, {name = "PyYAML-6.0.2-cp312-cp312-win32.whl",url = "https://files.pythonhosted.org/packages/df/d1/f5a275fdb252768b7a11ec63585bc38d0e87c9e05668a139fea92b80634c/PyYAML-6.0.2-cp312-cp312-win32.whl",hashes = {sha256 = "ef6107725bd54b262d6dedcc2af448a266975032bc85ef0172c5f059da6325b4"}}, {name = "PyYAML-6.0.2-cp312-cp312-win_amd64.whl",url = "https://files.pythonhosted.org/packages/0c/e8/4f648c598b17c3d06e8753d7d13d57542b30d56e6c2dedf9c331ae56312e/PyYAML-6.0.2-cp312-cp312-win_amd64.whl",hashes = {sha256 = "7e7401d0de89a9a855c839bc697c079a4af81cf878373abd7dc625847d25cbd8"}}, - {name = "PyYAML-6.0.2-cp311-cp311-macosx_10_9_x86_64.whl",url = "https://files.pythonhosted.org/packages/f8/aa/7af4e81f7acba21a4c6be026da38fd2b872ca46226673c89a758ebdc4fd2/PyYAML-6.0.2-cp311-cp311-macosx_10_9_x86_64.whl",hashes = {sha256 = "cc1c1159b3d456576af7a3e4d1ba7e6924cb39de8f67111c735f6fc832082774"}}, - {name = "PyYAML-6.0.2-cp311-cp311-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/8b/62/b9faa998fd185f65c1371643678e4d58254add437edb764a08c5a98fb986/PyYAML-6.0.2-cp311-cp311-macosx_11_0_arm64.whl",hashes = {sha256 = "1e2120ef853f59c7419231f3bf4e7021f1b936f6ebd222406c3b60212205d2ee"}}, - {name = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",url = "https://files.pythonhosted.org/packages/ad/0c/c804f5f922a9a6563bab712d8dcc70251e8af811fce4524d57c2c0fd49a4/PyYAML-6.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",hashes = {sha256 = "5d225db5a45f21e78dd9358e58a98702a0302f2659a3c6cd320564b75b86f47c"}}, - {name = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl",url = "https://files.pythonhosted.org/packages/51/16/6af8d6a6b210c8e54f1406a6b9481febf9c64a3109c541567e35a49aa2e7/PyYAML-6.0.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl",hashes = {sha256 = "5ac9328ec4831237bec75defaf839f7d4564be1e6b25ac710bd1a96321cc8317"}}, - {name = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",url = "https://files.pythonhosted.org/packages/75/e4/2c27590dfc9992f73aabbeb9241ae20220bd9452df27483b6e56d3975cc5/PyYAML-6.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",hashes = {sha256 = "3ad2a3decf9aaba3d29c8f537ac4b243e36bef957511b4766cb0057d32b0be85"}}, - {name = "PyYAML-6.0.2-cp311-cp311-musllinux_1_1_aarch64.whl",url = "https://files.pythonhosted.org/packages/9b/97/ecc1abf4a823f5ac61941a9c00fe501b02ac3ab0e373c3857f7d4b83e2b6/PyYAML-6.0.2-cp311-cp311-musllinux_1_1_aarch64.whl",hashes = {sha256 = "ff3824dc5261f50c9b0dfb3be22b4567a6f938ccce4587b38952d85fd9e9afe4"}}, - {name = "PyYAML-6.0.2-cp311-cp311-musllinux_1_1_x86_64.whl",url = "https://files.pythonhosted.org/packages/45/73/0f49dacd6e82c9430e46f4a027baa4ca205e8b0a9dce1397f44edc23559d/PyYAML-6.0.2-cp311-cp311-musllinux_1_1_x86_64.whl",hashes = {sha256 = "797b4f722ffa07cc8d62053e4cff1486fa6dc094105d13fea7b1de7d8bf71c9e"}}, - {name = "PyYAML-6.0.2-cp311-cp311-win32.whl",url = "https://files.pythonhosted.org/packages/22/5f/956f0f9fc65223a58fbc14459bf34b4cc48dec52e00535c79b8db361aabd/PyYAML-6.0.2-cp311-cp311-win32.whl",hashes = {sha256 = "11d8f3dd2b9c1207dcaf2ee0bbbfd5991f571186ec9cc78427ba5bd32afae4b5"}}, - {name = "PyYAML-6.0.2-cp311-cp311-win_amd64.whl",url = "https://files.pythonhosted.org/packages/ed/23/8da0bbe2ab9dcdd11f4f4557ccaf95c10b9811b13ecced089d43ce59c3c8/PyYAML-6.0.2-cp311-cp311-win_amd64.whl",hashes = {sha256 = "e10ce637b18caea04431ce14fabcf5c64a1c61ec9c56b071a4b7ca131ca52d44"}}, - {name = "PyYAML-6.0.2-cp310-cp310-macosx_10_9_x86_64.whl",url = "https://files.pythonhosted.org/packages/9b/95/a3fac87cb7158e231b5a6012e438c647e1a87f09f8e0d123acec8ab8bf71/PyYAML-6.0.2-cp310-cp310-macosx_10_9_x86_64.whl",hashes = {sha256 = "0a9a2848a5b7feac301353437eb7d5957887edbf81d56e903999a75a3d743086"}}, - {name = "PyYAML-6.0.2-cp310-cp310-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/c7/7a/68bd47624dab8fd4afbfd3c48e3b79efe09098ae941de5b58abcbadff5cb/PyYAML-6.0.2-cp310-cp310-macosx_11_0_arm64.whl",hashes = {sha256 = "29717114e51c84ddfba879543fb232a6ed60086602313ca38cce623c1d62cfbf"}}, - {name = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",url = "https://files.pythonhosted.org/packages/49/ee/14c54df452143b9ee9f0f29074d7ca5516a36edb0b4cc40c3f280131656f/PyYAML-6.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",hashes = {sha256 = "8824b5a04a04a047e72eea5cec3bc266db09e35de6bdfe34c9436ac5ee27d237"}}, - {name = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl",url = "https://files.pythonhosted.org/packages/4d/61/de363a97476e766574650d742205be468921a7b532aa2499fcd886b62530/PyYAML-6.0.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl",hashes = {sha256 = "7c36280e6fb8385e520936c3cb3b8042851904eba0e58d277dca80a5cfed590b"}}, - {name = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",url = "https://files.pythonhosted.org/packages/6b/4e/1523cb902fd98355e2e9ea5e5eb237cbc5f3ad5f3075fa65087aa0ecb669/PyYAML-6.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",hashes = {sha256 = "ec031d5d2feb36d1d1a24380e4db6d43695f3748343d99434e6f5f9156aaa2ed"}}, - {name = "PyYAML-6.0.2-cp310-cp310-musllinux_1_1_aarch64.whl",url = "https://files.pythonhosted.org/packages/b7/33/5504b3a9a4464893c32f118a9cc045190a91637b119a9c881da1cf6b7a72/PyYAML-6.0.2-cp310-cp310-musllinux_1_1_aarch64.whl",hashes = {sha256 = "936d68689298c36b53b29f23c6dbb74de12b4ac12ca6cfe0e047bedceea56180"}}, - {name = "PyYAML-6.0.2-cp310-cp310-musllinux_1_1_x86_64.whl",url = "https://files.pythonhosted.org/packages/5c/20/8347dcabd41ef3a3cdc4f7b7a2aff3d06598c8779faa189cdbf878b626a4/PyYAML-6.0.2-cp310-cp310-musllinux_1_1_x86_64.whl",hashes = {sha256 = "23502f431948090f597378482b4812b0caae32c22213aecf3b55325e049a6c68"}}, - {name = "PyYAML-6.0.2-cp310-cp310-win32.whl",url = "https://files.pythonhosted.org/packages/be/aa/5afe99233fb360d0ff37377145a949ae258aaab831bde4792b32650a4378/PyYAML-6.0.2-cp310-cp310-win32.whl",hashes = {sha256 = "2e99c6826ffa974fe6e27cdb5ed0021786b03fc98e5ee3c5bfe1fd5015f42b99"}}, - {name = "PyYAML-6.0.2-cp310-cp310-win_amd64.whl",url = "https://files.pythonhosted.org/packages/b5/84/0fa4b06f6d6c958d207620fc60005e241ecedceee58931bb20138e1e5776/PyYAML-6.0.2-cp310-cp310-win_amd64.whl",hashes = {sha256 = "a4d3091415f010369ae4ed1fc6b79def9416358877534caf6a0fdd2146c87a3e"}}, ] marker = "\"default\" in dependency_groups or \"dev\" in extras" @@ -510,26 +480,8 @@ wheels = [ {name = "scipy-1.15.3-cp312-cp312-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/89/b1/fbb53137f42c4bf630b1ffdfc2151a62d1d1b903b249f030d2b1c0280af8/scipy-1.15.3-cp312-cp312-musllinux_1_2_aarch64.whl",hashes = {sha256 = "6cfd56fc1a8e53f6e89ba3a7a7251f7396412d655bca2aa5611c8ec9a6784a1e"}}, {name = "scipy-1.15.3-cp312-cp312-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/2e/2e/025e39e339f5090df1ff266d021892694dbb7e63568edcfe43f892fa381d/scipy-1.15.3-cp312-cp312-musllinux_1_2_x86_64.whl",hashes = {sha256 = "0ff17c0bb1cb32952c09217d8d1eed9b53d1463e5f1dd6052c7857f83127d539"}}, {name = "scipy-1.15.3-cp312-cp312-win_amd64.whl",url = "https://files.pythonhosted.org/packages/e6/eb/3bf6ea8ab7f1503dca3a10df2e4b9c3f6b3316df07f6c0ded94b281c7101/scipy-1.15.3-cp312-cp312-win_amd64.whl",hashes = {sha256 = "52092bc0472cfd17df49ff17e70624345efece4e1a12b23783a1ac59a1b728ed"}}, - {name = "scipy-1.15.3-cp311-cp311-macosx_10_13_x86_64.whl",url = "https://files.pythonhosted.org/packages/96/ab/5cc9f80f28f6a7dff646c5756e559823614a42b1939d86dd0ed550470210/scipy-1.15.3-cp311-cp311-macosx_10_13_x86_64.whl",hashes = {sha256 = "993439ce220d25e3696d1b23b233dd010169b62f6456488567e830654ee37a6b"}}, - {name = "scipy-1.15.3-cp311-cp311-macosx_12_0_arm64.whl",url = "https://files.pythonhosted.org/packages/4a/4a/66ba30abe5ad1a3ad15bfb0b59d22174012e8056ff448cb1644deccbfed2/scipy-1.15.3-cp311-cp311-macosx_12_0_arm64.whl",hashes = {sha256 = "34716e281f181a02341ddeaad584205bd2fd3c242063bd3423d61ac259ca7eba"}}, - {name = "scipy-1.15.3-cp311-cp311-macosx_14_0_arm64.whl",url = "https://files.pythonhosted.org/packages/4b/fa/a7e5b95afd80d24313307f03624acc65801846fa75599034f8ceb9e2cbf6/scipy-1.15.3-cp311-cp311-macosx_14_0_arm64.whl",hashes = {sha256 = "3b0334816afb8b91dab859281b1b9786934392aa3d527cd847e41bb6f45bee65"}}, - {name = "scipy-1.15.3-cp311-cp311-macosx_14_0_x86_64.whl",url = "https://files.pythonhosted.org/packages/17/99/f3aaddccf3588bb4aea70ba35328c204cadd89517a1612ecfda5b2dd9d7a/scipy-1.15.3-cp311-cp311-macosx_14_0_x86_64.whl",hashes = {sha256 = "6db907c7368e3092e24919b5e31c76998b0ce1684d51a90943cb0ed1b4ffd6c1"}}, - {name = "scipy-1.15.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",url = "https://files.pythonhosted.org/packages/56/c5/1032cdb565f146109212153339f9cb8b993701e9fe56b1c97699eee12586/scipy-1.15.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",hashes = {sha256 = "721d6b4ef5dc82ca8968c25b111e307083d7ca9091bc38163fb89243e85e3889"}}, - {name = "scipy-1.15.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",url = "https://files.pythonhosted.org/packages/bd/37/89f19c8c05505d0601ed5650156e50eb881ae3918786c8fd7262b4ee66d3/scipy-1.15.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",hashes = {sha256 = "39cb9c62e471b1bb3750066ecc3a3f3052b37751c7c3dfd0fd7e48900ed52982"}}, - {name = "scipy-1.15.3-cp311-cp311-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/7e/31/be59513aa9695519b18e1851bb9e487de66f2d31f835201f1b42f5d4d475/scipy-1.15.3-cp311-cp311-musllinux_1_2_aarch64.whl",hashes = {sha256 = "795c46999bae845966368a3c013e0e00947932d68e235702b5c3f6ea799aa8c9"}}, - {name = "scipy-1.15.3-cp311-cp311-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/10/c0/4f5f3eeccc235632aab79b27a74a9130c6c35df358129f7ac8b29f562ac7/scipy-1.15.3-cp311-cp311-musllinux_1_2_x86_64.whl",hashes = {sha256 = "18aaacb735ab38b38db42cb01f6b92a2d0d4b6aabefeb07f02849e47f8fb3594"}}, - {name = "scipy-1.15.3-cp311-cp311-win_amd64.whl",url = "https://files.pythonhosted.org/packages/ab/a7/0ddaf514ce8a8714f6ed243a2b391b41dbb65251affe21ee3077ec45ea9a/scipy-1.15.3-cp311-cp311-win_amd64.whl",hashes = {sha256 = "ae48a786a28412d744c62fd7816a4118ef97e5be0bee968ce8f0a2fba7acf3bb"}}, - {name = "scipy-1.15.3-cp310-cp310-macosx_10_13_x86_64.whl",url = "https://files.pythonhosted.org/packages/78/2f/4966032c5f8cc7e6a60f1b2e0ad686293b9474b65246b0c642e3ef3badd0/scipy-1.15.3-cp310-cp310-macosx_10_13_x86_64.whl",hashes = {sha256 = "a345928c86d535060c9c2b25e71e87c39ab2f22fc96e9636bd74d1dbf9de448c"}}, - {name = "scipy-1.15.3-cp310-cp310-macosx_12_0_arm64.whl",url = "https://files.pythonhosted.org/packages/a0/6e/0c3bf90fae0e910c274db43304ebe25a6b391327f3f10b5dcc638c090795/scipy-1.15.3-cp310-cp310-macosx_12_0_arm64.whl",hashes = {sha256 = "ad3432cb0f9ed87477a8d97f03b763fd1d57709f1bbde3c9369b1dff5503b253"}}, - {name = "scipy-1.15.3-cp310-cp310-macosx_14_0_arm64.whl",url = "https://files.pythonhosted.org/packages/ea/b1/4deb37252311c1acff7f101f6453f0440794f51b6eacb1aad4459a134081/scipy-1.15.3-cp310-cp310-macosx_14_0_arm64.whl",hashes = {sha256 = "aef683a9ae6eb00728a542b796f52a5477b78252edede72b8327a886ab63293f"}}, - {name = "scipy-1.15.3-cp310-cp310-macosx_14_0_x86_64.whl",url = "https://files.pythonhosted.org/packages/38/7d/f457626e3cd3c29b3a49ca115a304cebb8cc6f31b04678f03b216899d3c6/scipy-1.15.3-cp310-cp310-macosx_14_0_x86_64.whl",hashes = {sha256 = "1c832e1bd78dea67d5c16f786681b28dd695a8cb1fb90af2e27580d3d0967e92"}}, - {name = "scipy-1.15.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",url = "https://files.pythonhosted.org/packages/db/0a/92b1de4a7adc7a15dcf5bddc6e191f6f29ee663b30511ce20467ef9b82e4/scipy-1.15.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",hashes = {sha256 = "263961f658ce2165bbd7b99fa5135195c3a12d9bef045345016b8b50c315cb82"}}, - {name = "scipy-1.15.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",url = "https://files.pythonhosted.org/packages/8e/6d/41991e503e51fc1134502694c5fa7a1671501a17ffa12716a4a9151af3df/scipy-1.15.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",hashes = {sha256 = "9e2abc762b0811e09a0d3258abee2d98e0c703eee49464ce0069590846f31d40"}}, - {name = "scipy-1.15.3-cp310-cp310-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/25/e1/3df8f83cb15f3500478c889be8fb18700813b95e9e087328230b98d547ff/scipy-1.15.3-cp310-cp310-musllinux_1_2_aarch64.whl",hashes = {sha256 = "ed7284b21a7a0c8f1b6e5977ac05396c0d008b89e05498c8b7e8f4a1423bba0e"}}, - {name = "scipy-1.15.3-cp310-cp310-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/93/3e/b3257cf446f2a3533ed7809757039016b74cd6f38271de91682aa844cfc5/scipy-1.15.3-cp310-cp310-musllinux_1_2_x86_64.whl",hashes = {sha256 = "5380741e53df2c566f4d234b100a484b420af85deb39ea35a1cc1be84ff53a5c"}}, - {name = "scipy-1.15.3-cp310-cp310-win_amd64.whl",url = "https://files.pythonhosted.org/packages/d1/84/55bc4881973d3f79b479a5a2e2df61c8c9a04fcb986a213ac9c02cfb659b/scipy-1.15.3-cp310-cp310-win_amd64.whl",hashes = {sha256 = "9d61e97b186a57350f6d6fd72640f9e99d5a4a2b8fbf4b9ee9a841eab327dc13"}}, -] -marker = "python_version ~= \"3.10\"" +] +marker = "\"dev\" in extras" [packages.tool.pdm] dependencies = [ @@ -572,32 +524,8 @@ wheels = [ {name = "numpy-2.2.6-cp312-cp312-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/b7/25/5761d832a81df431e260719ec45de696414266613c9ee268394dd5ad8236/numpy-2.2.6-cp312-cp312-musllinux_1_2_x86_64.whl",hashes = {sha256 = "fe27749d33bb772c80dcd84ae7e8df2adc920ae8297400dabec45f0dedb3f6de"}}, {name = "numpy-2.2.6-cp312-cp312-win32.whl",url = "https://files.pythonhosted.org/packages/57/0a/72d5a3527c5ebffcd47bde9162c39fae1f90138c961e5296491ce778e682/numpy-2.2.6-cp312-cp312-win32.whl",hashes = {sha256 = "4eeaae00d789f66c7a25ac5f34b71a7035bb474e679f410e5e1a94deb24cf2d4"}}, {name = "numpy-2.2.6-cp312-cp312-win_amd64.whl",url = "https://files.pythonhosted.org/packages/36/fa/8c9210162ca1b88529ab76b41ba02d433fd54fecaf6feb70ef9f124683f1/numpy-2.2.6-cp312-cp312-win_amd64.whl",hashes = {sha256 = "c1f9540be57940698ed329904db803cf7a402f3fc200bfe599334c9bd84a40b2"}}, - {name = "numpy-2.2.6-cp311-cp311-macosx_10_9_x86_64.whl",url = "https://files.pythonhosted.org/packages/da/a8/4f83e2aa666a9fbf56d6118faaaf5f1974d456b1823fda0a176eff722839/numpy-2.2.6-cp311-cp311-macosx_10_9_x86_64.whl",hashes = {sha256 = "f9f1adb22318e121c5c69a09142811a201ef17ab257a1e66ca3025065b7f53ae"}}, - {name = "numpy-2.2.6-cp311-cp311-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/b3/2b/64e1affc7972decb74c9e29e5649fac940514910960ba25cd9af4488b66c/numpy-2.2.6-cp311-cp311-macosx_11_0_arm64.whl",hashes = {sha256 = "c820a93b0255bc360f53eca31a0e676fd1101f673dda8da93454a12e23fc5f7a"}}, - {name = "numpy-2.2.6-cp311-cp311-macosx_14_0_arm64.whl",url = "https://files.pythonhosted.org/packages/4a/9f/0121e375000b5e50ffdd8b25bf78d8e1a5aa4cca3f185d41265198c7b834/numpy-2.2.6-cp311-cp311-macosx_14_0_arm64.whl",hashes = {sha256 = "3d70692235e759f260c3d837193090014aebdf026dfd167834bcba43e30c2a42"}}, - {name = "numpy-2.2.6-cp311-cp311-macosx_14_0_x86_64.whl",url = "https://files.pythonhosted.org/packages/31/0d/b48c405c91693635fbe2dcd7bc84a33a602add5f63286e024d3b6741411c/numpy-2.2.6-cp311-cp311-macosx_14_0_x86_64.whl",hashes = {sha256 = "481b49095335f8eed42e39e8041327c05b0f6f4780488f61286ed3c01368d491"}}, - {name = "numpy-2.2.6-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",url = "https://files.pythonhosted.org/packages/52/b8/7f0554d49b565d0171eab6e99001846882000883998e7b7d9f0d98b1f934/numpy-2.2.6-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",hashes = {sha256 = "b64d8d4d17135e00c8e346e0a738deb17e754230d7e0810ac5012750bbd85a5a"}}, - {name = "numpy-2.2.6-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",url = "https://files.pythonhosted.org/packages/b3/dd/2238b898e51bd6d389b7389ffb20d7f4c10066d80351187ec8e303a5a475/numpy-2.2.6-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",hashes = {sha256 = "ba10f8411898fc418a521833e014a77d3ca01c15b0c6cdcce6a0d2897e6dbbdf"}}, - {name = "numpy-2.2.6-cp311-cp311-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/83/6c/44d0325722cf644f191042bf47eedad61c1e6df2432ed65cbe28509d404e/numpy-2.2.6-cp311-cp311-musllinux_1_2_aarch64.whl",hashes = {sha256 = "bd48227a919f1bafbdda0583705e547892342c26fb127219d60a5c36882609d1"}}, - {name = "numpy-2.2.6-cp311-cp311-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/ae/9d/81e8216030ce66be25279098789b665d49ff19eef08bfa8cb96d4957f422/numpy-2.2.6-cp311-cp311-musllinux_1_2_x86_64.whl",hashes = {sha256 = "9551a499bf125c1d4f9e250377c1ee2eddd02e01eac6644c080162c0c51778ab"}}, - {name = "numpy-2.2.6-cp311-cp311-win32.whl",url = "https://files.pythonhosted.org/packages/6a/fd/e19617b9530b031db51b0926eed5345ce8ddc669bb3bc0044b23e275ebe8/numpy-2.2.6-cp311-cp311-win32.whl",hashes = {sha256 = "0678000bb9ac1475cd454c6b8c799206af8107e310843532b04d49649c717a47"}}, - {name = "numpy-2.2.6-cp311-cp311-win_amd64.whl",url = "https://files.pythonhosted.org/packages/31/0a/f354fb7176b81747d870f7991dc763e157a934c717b67b58456bc63da3df/numpy-2.2.6-cp311-cp311-win_amd64.whl",hashes = {sha256 = "e8213002e427c69c45a52bbd94163084025f533a55a59d6f9c5b820774ef3303"}}, - {name = "numpy-2.2.6-cp310-cp310-macosx_10_9_x86_64.whl",url = "https://files.pythonhosted.org/packages/9a/3e/ed6db5be21ce87955c0cbd3009f2803f59fa08df21b5df06862e2d8e2bdd/numpy-2.2.6-cp310-cp310-macosx_10_9_x86_64.whl",hashes = {sha256 = "b412caa66f72040e6d268491a59f2c43bf03eb6c96dd8f0307829feb7fa2b6fb"}}, - {name = "numpy-2.2.6-cp310-cp310-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/22/c2/4b9221495b2a132cc9d2eb862e21d42a009f5a60e45fc44b00118c174bff/numpy-2.2.6-cp310-cp310-macosx_11_0_arm64.whl",hashes = {sha256 = "8e41fd67c52b86603a91c1a505ebaef50b3314de0213461c7a6e99c9a3beff90"}}, - {name = "numpy-2.2.6-cp310-cp310-macosx_14_0_arm64.whl",url = "https://files.pythonhosted.org/packages/fd/77/dc2fcfc66943c6410e2bf598062f5959372735ffda175b39906d54f02349/numpy-2.2.6-cp310-cp310-macosx_14_0_arm64.whl",hashes = {sha256 = "37e990a01ae6ec7fe7fa1c26c55ecb672dd98b19c3d0e1d1f326fa13cb38d163"}}, - {name = "numpy-2.2.6-cp310-cp310-macosx_14_0_x86_64.whl",url = "https://files.pythonhosted.org/packages/7a/4f/1cb5fdc353a5f5cc7feb692db9b8ec2c3d6405453f982435efc52561df58/numpy-2.2.6-cp310-cp310-macosx_14_0_x86_64.whl",hashes = {sha256 = "5a6429d4be8ca66d889b7cf70f536a397dc45ba6faeb5f8c5427935d9592e9cf"}}, - {name = "numpy-2.2.6-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",url = "https://files.pythonhosted.org/packages/eb/17/96a3acd228cec142fcb8723bd3cc39c2a474f7dcf0a5d16731980bcafa95/numpy-2.2.6-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",hashes = {sha256 = "efd28d4e9cd7d7a8d39074a4d44c63eda73401580c5c76acda2ce969e0a38e83"}}, - {name = "numpy-2.2.6-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",url = "https://files.pythonhosted.org/packages/b4/63/3de6a34ad7ad6646ac7d2f55ebc6ad439dbbf9c4370017c50cf403fb19b5/numpy-2.2.6-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",hashes = {sha256 = "fc7b73d02efb0e18c000e9ad8b83480dfcd5dfd11065997ed4c6747470ae8915"}}, - {name = "numpy-2.2.6-cp310-cp310-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/07/b6/89d837eddef52b3d0cec5c6ba0456c1bf1b9ef6a6672fc2b7873c3ec4e2e/numpy-2.2.6-cp310-cp310-musllinux_1_2_aarch64.whl",hashes = {sha256 = "74d4531beb257d2c3f4b261bfb0fc09e0f9ebb8842d82a7b4209415896adc680"}}, - {name = "numpy-2.2.6-cp310-cp310-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/01/c8/dc6ae86e3c61cfec1f178e5c9f7858584049b6093f843bca541f94120920/numpy-2.2.6-cp310-cp310-musllinux_1_2_x86_64.whl",hashes = {sha256 = "8fc377d995680230e83241d8a96def29f204b5782f371c532579b4f20607a289"}}, - {name = "numpy-2.2.6-cp310-cp310-win32.whl",url = "https://files.pythonhosted.org/packages/5b/c5/0064b1b7e7c89137b471ccec1fd2282fceaae0ab3a9550f2568782d80357/numpy-2.2.6-cp310-cp310-win32.whl",hashes = {sha256 = "b093dd74e50a8cba3e873868d9e93a85b78e0daf2e98c6797566ad8044e8363d"}}, - {name = "numpy-2.2.6-cp310-cp310-win_amd64.whl",url = "https://files.pythonhosted.org/packages/a3/dd/4b822569d6b96c39d1215dbae0582fd99954dcbcf0c1a13c61783feaca3f/numpy-2.2.6-cp310-cp310-win_amd64.whl",hashes = {sha256 = "f0fd6321b839904e15c46e0d257fdd101dd7f530fe03fd6359c1ea63738703f3"}}, - {name = "numpy-2.2.6-pp310-pypy310_pp73-macosx_10_15_x86_64.whl",url = "https://files.pythonhosted.org/packages/9e/3b/d94a75f4dbf1ef5d321523ecac21ef23a3cd2ac8b78ae2aac40873590229/numpy-2.2.6-pp310-pypy310_pp73-macosx_10_15_x86_64.whl",hashes = {sha256 = "0b605b275d7bd0c640cad4e5d30fa701a8d59302e127e5f79138ad62762c3e3d"}}, - {name = "numpy-2.2.6-pp310-pypy310_pp73-macosx_14_0_x86_64.whl",url = "https://files.pythonhosted.org/packages/17/f4/09b2fa1b58f0fb4f7c7963a1649c64c4d315752240377ed74d9cd878f7b5/numpy-2.2.6-pp310-pypy310_pp73-macosx_14_0_x86_64.whl",hashes = {sha256 = "7befc596a7dc9da8a337f79802ee8adb30a552a94f792b9c9d18c840055907db"}}, - {name = "numpy-2.2.6-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",url = "https://files.pythonhosted.org/packages/af/30/feba75f143bdc868a1cc3f44ccfa6c4b9ec522b36458e738cd00f67b573f/numpy-2.2.6-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",hashes = {sha256 = "ce47521a4754c8f4593837384bd3424880629f718d87c5d44f8ed763edd63543"}}, - {name = "numpy-2.2.6-pp310-pypy310_pp73-win_amd64.whl",url = "https://files.pythonhosted.org/packages/37/48/ac2a9584402fb6c0cd5b5d1a91dcf176b15760130dd386bbafdbfe3640bf/numpy-2.2.6-pp310-pypy310_pp73-win_amd64.whl",hashes = {sha256 = "d042d24c90c41b54fd506da306759e06e568864df8ec17ccc17e9e884634fd00"}}, -] -marker = "python_version ~= \"3.10\"" +] +marker = "\"default\" in dependency_groups or \"dev\" in extras" [packages.tool.pdm] dependencies = [] @@ -664,18 +592,6 @@ wheels = [ {name = "tiktoken-0.11.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",url = "https://files.pythonhosted.org/packages/d5/2d/4d77f6feb9292bfdd23d5813e442b3bba883f42d0ac78ef5fdc56873f756/tiktoken-0.11.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",hashes = {sha256 = "7f929255c705efec7a28bf515e29dc74220b2f07544a8c81b8d69e8efc4578bd"}}, {name = "tiktoken-0.11.0-cp312-cp312-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/7a/65/7ff0a65d3bb0fc5a1fb6cc71b03e0f6e71a68c5eea230d1ff1ba3fd6df49/tiktoken-0.11.0-cp312-cp312-musllinux_1_2_x86_64.whl",hashes = {sha256 = "61f1d15822e4404953d499fd1dcc62817a12ae9fb1e4898033ec8fe3915fdf8e"}}, {name = "tiktoken-0.11.0-cp312-cp312-win_amd64.whl",url = "https://files.pythonhosted.org/packages/f5/6e/5b71578799b72e5bdcef206a214c3ce860d999d579a3b56e74a6c8989ee2/tiktoken-0.11.0-cp312-cp312-win_amd64.whl",hashes = {sha256 = "45927a71ab6643dfd3ef57d515a5db3d199137adf551f66453be098502838b0f"}}, - {name = "tiktoken-0.11.0-cp311-cp311-macosx_10_12_x86_64.whl",url = "https://files.pythonhosted.org/packages/8a/91/912b459799a025d2842566fe1e902f7f50d54a1ce8a0f236ab36b5bd5846/tiktoken-0.11.0-cp311-cp311-macosx_10_12_x86_64.whl",hashes = {sha256 = "4ae374c46afadad0f501046db3da1b36cd4dfbfa52af23c998773682446097cf"}}, - {name = "tiktoken-0.11.0-cp311-cp311-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/8c/e9/6faa6870489ce64f5f75dcf91512bf35af5864583aee8fcb0dcb593121f5/tiktoken-0.11.0-cp311-cp311-macosx_11_0_arm64.whl",hashes = {sha256 = "25a512ff25dc6c85b58f5dd4f3d8c674dc05f96b02d66cdacf628d26a4e4866b"}}, - {name = "tiktoken-0.11.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",url = "https://files.pythonhosted.org/packages/a1/3e/a05d1547cf7db9dc75d1461cfa7b556a3b48e0516ec29dfc81d984a145f6/tiktoken-0.11.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",hashes = {sha256 = "2130127471e293d385179c1f3f9cd445070c0772be73cdafb7cec9a3684c0458"}}, - {name = "tiktoken-0.11.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",url = "https://files.pythonhosted.org/packages/34/9a/db7a86b829e05a01fd4daa492086f708e0a8b53952e1dbc9d380d2b03677/tiktoken-0.11.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",hashes = {sha256 = "21e43022bf2c33f733ea9b54f6a3f6b4354b909f5a73388fb1b9347ca54a069c"}}, - {name = "tiktoken-0.11.0-cp311-cp311-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/9d/bb/52edc8e078cf062ed749248f1454e9e5cfd09979baadb830b3940e522015/tiktoken-0.11.0-cp311-cp311-musllinux_1_2_x86_64.whl",hashes = {sha256 = "adb4e308eb64380dc70fa30493e21c93475eaa11669dea313b6bbf8210bfd013"}}, - {name = "tiktoken-0.11.0-cp311-cp311-win_amd64.whl",url = "https://files.pythonhosted.org/packages/60/d9/884b6cd7ae2570ecdcaffa02b528522b18fef1cbbfdbcaa73799807d0d3b/tiktoken-0.11.0-cp311-cp311-win_amd64.whl",hashes = {sha256 = "ece6b76bfeeb61a125c44bbefdfccc279b5288e6007fbedc0d32bfec602df2f2"}}, - {name = "tiktoken-0.11.0-cp310-cp310-macosx_10_12_x86_64.whl",url = "https://files.pythonhosted.org/packages/8b/4d/c6a2e7dca2b4f2e9e0bfd62b3fe4f114322e2c028cfba905a72bc76ce479/tiktoken-0.11.0-cp310-cp310-macosx_10_12_x86_64.whl",hashes = {sha256 = "8a9b517d6331d7103f8bef29ef93b3cca95fa766e293147fe7bacddf310d5917"}}, - {name = "tiktoken-0.11.0-cp310-cp310-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/41/54/3739d35b9f94cb8dc7b0db2edca7192d5571606aa2369a664fa27e811804/tiktoken-0.11.0-cp310-cp310-macosx_11_0_arm64.whl",hashes = {sha256 = "b4ddb1849e6bf0afa6cc1c5d809fb980ca240a5fffe585a04e119519758788c0"}}, - {name = "tiktoken-0.11.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",url = "https://files.pythonhosted.org/packages/dd/f4/ec8d43338d28d53513004ebf4cd83732a135d11011433c58bf045890cc10/tiktoken-0.11.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",hashes = {sha256 = "10331d08b5ecf7a780b4fe4d0281328b23ab22cdb4ff65e68d56caeda9940ecc"}}, - {name = "tiktoken-0.11.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",url = "https://files.pythonhosted.org/packages/94/80/fb0ada0a882cb453caf519a4bf0d117c2a3ee2e852c88775abff5413c176/tiktoken-0.11.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",hashes = {sha256 = "b062c82300341dc87e0258c69f79bed725f87e753c21887aea90d272816be882"}}, - {name = "tiktoken-0.11.0-cp310-cp310-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/2f/e9/6c104355b463601719582823f3ea658bc3aa7c73d1b3b7553ebdc48468ce/tiktoken-0.11.0-cp310-cp310-musllinux_1_2_x86_64.whl",hashes = {sha256 = "195d84bec46169af3b1349a1495c151d37a0ff4cba73fd08282736be7f92cc6c"}}, - {name = "tiktoken-0.11.0-cp310-cp310-win_amd64.whl",url = "https://files.pythonhosted.org/packages/94/75/eaa6068f47e8b3f0aab9e05177cce2cf5aa2cc0ca93981792e620d4d4117/tiktoken-0.11.0-cp310-cp310-win_amd64.whl",hashes = {sha256 = "fe91581b0ecdd8783ce8cb6e3178f2260a3912e8724d2f2d49552b98714641a1"}}, ] marker = "\"recommended\" in extras" @@ -755,18 +671,6 @@ wheels = [ {name = "uvloop-0.21.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",url = "https://files.pythonhosted.org/packages/06/a7/b4e6a19925c900be9f98bec0a75e6e8f79bb53bdeb891916609ab3958967/uvloop-0.21.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",hashes = {sha256 = "86975dca1c773a2c9864f4c52c5a55631038e387b47eaf56210f873887b6c8dc"}}, {name = "uvloop-0.21.0-cp312-cp312-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/ce/0c/f07435a18a4b94ce6bd0677d8319cd3de61f3a9eeb1e5f8ab4e8b5edfcb3/uvloop-0.21.0-cp312-cp312-musllinux_1_2_aarch64.whl",hashes = {sha256 = "461d9ae6660fbbafedd07559c6a2e57cd553b34b0065b6550685f6653a98c1cb"}}, {name = "uvloop-0.21.0-cp312-cp312-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/8f/eb/f7032be105877bcf924709c97b1bf3b90255b4ec251f9340cef912559f28/uvloop-0.21.0-cp312-cp312-musllinux_1_2_x86_64.whl",hashes = {sha256 = "183aef7c8730e54c9a3ee3227464daed66e37ba13040bb3f350bc2ddc040f22f"}}, - {name = "uvloop-0.21.0-cp311-cp311-macosx_10_9_universal2.whl",url = "https://files.pythonhosted.org/packages/57/a7/4cf0334105c1160dd6819f3297f8700fda7fc30ab4f61fbf3e725acbc7cc/uvloop-0.21.0-cp311-cp311-macosx_10_9_universal2.whl",hashes = {sha256 = "c0f3fa6200b3108919f8bdabb9a7f87f20e7097ea3c543754cabc7d717d95cf8"}}, - {name = "uvloop-0.21.0-cp311-cp311-macosx_10_9_x86_64.whl",url = "https://files.pythonhosted.org/packages/8c/7c/1517b0bbc2dbe784b563d6ab54f2ef88c890fdad77232c98ed490aa07132/uvloop-0.21.0-cp311-cp311-macosx_10_9_x86_64.whl",hashes = {sha256 = "0878c2640cf341b269b7e128b1a5fed890adc4455513ca710d77d5e93aa6d6a0"}}, - {name = "uvloop-0.21.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",url = "https://files.pythonhosted.org/packages/ee/ea/0bfae1aceb82a503f358d8d2fa126ca9dbdb2ba9c7866974faec1cb5875c/uvloop-0.21.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",hashes = {sha256 = "b9fb766bb57b7388745d8bcc53a359b116b8a04c83a2288069809d2b3466c37e"}}, - {name = "uvloop-0.21.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",url = "https://files.pythonhosted.org/packages/8a/ca/0864176a649838b838f36d44bf31c451597ab363b60dc9e09c9630619d41/uvloop-0.21.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",hashes = {sha256 = "8a375441696e2eda1c43c44ccb66e04d61ceeffcd76e4929e527b7fa401b90fb"}}, - {name = "uvloop-0.21.0-cp311-cp311-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/30/bf/08ad29979a936d63787ba47a540de2132169f140d54aa25bc8c3df3e67f4/uvloop-0.21.0-cp311-cp311-musllinux_1_2_aarch64.whl",hashes = {sha256 = "baa0e6291d91649c6ba4ed4b2f982f9fa165b5bbd50a9e203c416a2797bab3c6"}}, - {name = "uvloop-0.21.0-cp311-cp311-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/da/e2/5cf6ef37e3daf2f06e651aae5ea108ad30df3cb269102678b61ebf1fdf42/uvloop-0.21.0-cp311-cp311-musllinux_1_2_x86_64.whl",hashes = {sha256 = "4509360fcc4c3bd2c70d87573ad472de40c13387f5fda8cb58350a1d7475e58d"}}, - {name = "uvloop-0.21.0-cp310-cp310-macosx_10_9_universal2.whl",url = "https://files.pythonhosted.org/packages/3d/76/44a55515e8c9505aa1420aebacf4dd82552e5e15691654894e90d0bd051a/uvloop-0.21.0-cp310-cp310-macosx_10_9_universal2.whl",hashes = {sha256 = "ec7e6b09a6fdded42403182ab6b832b71f4edaf7f37a9a0e371a01db5f0cb45f"}}, - {name = "uvloop-0.21.0-cp310-cp310-macosx_10_9_x86_64.whl",url = "https://files.pythonhosted.org/packages/35/5a/62d5800358a78cc25c8a6c72ef8b10851bdb8cca22e14d9c74167b7f86da/uvloop-0.21.0-cp310-cp310-macosx_10_9_x86_64.whl",hashes = {sha256 = "196274f2adb9689a289ad7d65700d37df0c0930fd8e4e743fa4834e850d7719d"}}, - {name = "uvloop-0.21.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",url = "https://files.pythonhosted.org/packages/f3/96/63695e0ebd7da6c741ccd4489b5947394435e198a1382349c17b1146bb97/uvloop-0.21.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",hashes = {sha256 = "f38b2e090258d051d68a5b14d1da7203a3c3677321cf32a95a6f4db4dd8b6f26"}}, - {name = "uvloop-0.21.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",url = "https://files.pythonhosted.org/packages/61/e0/f0f8ec84979068ffae132c58c79af1de9cceeb664076beea86d941af1a30/uvloop-0.21.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",hashes = {sha256 = "87c43e0f13022b998eb9b973b5e97200c8b90823454d4bc06ab33829e09fb9bb"}}, - {name = "uvloop-0.21.0-cp310-cp310-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/bf/fe/5e94a977d058a54a19df95f12f7161ab6e323ad49f4dabc28822eb2df7ea/uvloop-0.21.0-cp310-cp310-musllinux_1_2_aarch64.whl",hashes = {sha256 = "10d66943def5fcb6e7b37310eb6b5639fd2ccbc38df1177262b0640c3ca68c1f"}}, - {name = "uvloop-0.21.0-cp310-cp310-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/26/dd/c7179618e46092a77e036650c1f056041a028a35c4d76945089fcfc38af8/uvloop-0.21.0-cp310-cp310-musllinux_1_2_x86_64.whl",hashes = {sha256 = "67dd654b8ca23aed0a8e99010b4c34aca62f4b7fce88f39d452ed7622c94845c"}}, ] marker = "\"default\" in dependency_groups" @@ -887,23 +791,6 @@ wheels = [ {name = "msgpack-1.1.2-cp312-cp312-win32.whl",url = "https://files.pythonhosted.org/packages/41/0d/2ddfaa8b7e1cee6c490d46cb0a39742b19e2481600a7a0e96537e9c22f43/msgpack-1.1.2-cp312-cp312-win32.whl",hashes = {sha256 = "1fff3d825d7859ac888b0fbda39a42d59193543920eda9d9bea44d958a878029"}}, {name = "msgpack-1.1.2-cp312-cp312-win_amd64.whl",url = "https://files.pythonhosted.org/packages/8c/ec/d431eb7941fb55a31dd6ca3404d41fbb52d99172df2e7707754488390910/msgpack-1.1.2-cp312-cp312-win_amd64.whl",hashes = {sha256 = "1de460f0403172cff81169a30b9a92b260cb809c4cb7e2fc79ae8d0510c78b6b"}}, {name = "msgpack-1.1.2-cp312-cp312-win_arm64.whl",url = "https://files.pythonhosted.org/packages/c5/31/5b1a1f70eb0e87d1678e9624908f86317787b536060641d6798e3cf70ace/msgpack-1.1.2-cp312-cp312-win_arm64.whl",hashes = {sha256 = "be5980f3ee0e6bd44f3a9e9dea01054f175b50c3e6cdb692bc9424c0bbb8bf69"}}, - {name = "msgpack-1.1.2-cp311-cp311-macosx_10_9_x86_64.whl",url = "https://files.pythonhosted.org/packages/2c/97/560d11202bcd537abca693fd85d81cebe2107ba17301de42b01ac1677b69/msgpack-1.1.2-cp311-cp311-macosx_10_9_x86_64.whl",hashes = {sha256 = "2e86a607e558d22985d856948c12a3fa7b42efad264dca8a3ebbcfa2735d786c"}}, - {name = "msgpack-1.1.2-cp311-cp311-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/83/04/28a41024ccbd67467380b6fb440ae916c1e4f25e2cd4c63abe6835ac566e/msgpack-1.1.2-cp311-cp311-macosx_11_0_arm64.whl",hashes = {sha256 = "283ae72fc89da59aa004ba147e8fc2f766647b1251500182fac0350d8af299c0"}}, - {name = "msgpack-1.1.2-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl",url = "https://files.pythonhosted.org/packages/71/46/b817349db6886d79e57a966346cf0902a426375aadc1e8e7a86a75e22f19/msgpack-1.1.2-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl",hashes = {sha256 = "61c8aa3bd513d87c72ed0b37b53dd5c5a0f58f2ff9f26e1555d3bd7948fb7296"}}, - {name = "msgpack-1.1.2-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl",url = "https://files.pythonhosted.org/packages/da/e0/6cc2e852837cd6086fe7d8406af4294e66827a60a4cf60b86575a4a65ca8/msgpack-1.1.2-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl",hashes = {sha256 = "454e29e186285d2ebe65be34629fa0e8605202c60fbc7c4c650ccd41870896ef"}}, - {name = "msgpack-1.1.2-cp311-cp311-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/25/98/6a19f030b3d2ea906696cedd1eb251708e50a5891d0978b012cb6107234c/msgpack-1.1.2-cp311-cp311-musllinux_1_2_aarch64.whl",hashes = {sha256 = "7bc8813f88417599564fafa59fd6f95be417179f76b40325b500b3c98409757c"}}, - {name = "msgpack-1.1.2-cp311-cp311-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/b7/cd/9098fcb6adb32187a70b7ecaabf6339da50553351558f37600e53a4a2a23/msgpack-1.1.2-cp311-cp311-musllinux_1_2_x86_64.whl",hashes = {sha256 = "bafca952dc13907bdfdedfc6a5f579bf4f292bdd506fadb38389afa3ac5b208e"}}, - {name = "msgpack-1.1.2-cp311-cp311-win32.whl",url = "https://files.pythonhosted.org/packages/e6/ae/270cecbcf36c1dc85ec086b33a51a4d7d08fc4f404bdbc15b582255d05ff/msgpack-1.1.2-cp311-cp311-win32.whl",hashes = {sha256 = "602b6740e95ffc55bfb078172d279de3773d7b7db1f703b2f1323566b878b90e"}}, - {name = "msgpack-1.1.2-cp311-cp311-win_amd64.whl",url = "https://files.pythonhosted.org/packages/2a/79/309d0e637f6f37e83c711f547308b91af02b72d2326ddd860b966080ef29/msgpack-1.1.2-cp311-cp311-win_amd64.whl",hashes = {sha256 = "d198d275222dc54244bf3327eb8cbe00307d220241d9cec4d306d49a44e85f68"}}, - {name = "msgpack-1.1.2-cp311-cp311-win_arm64.whl",url = "https://files.pythonhosted.org/packages/73/4d/7c4e2b3d9b1106cd0aa6cb56cc57c6267f59fa8bfab7d91df5adc802c847/msgpack-1.1.2-cp311-cp311-win_arm64.whl",hashes = {sha256 = "86f8136dfa5c116365a8a651a7d7484b65b13339731dd6faebb9a0242151c406"}}, - {name = "msgpack-1.1.2-cp310-cp310-macosx_10_9_x86_64.whl",url = "https://files.pythonhosted.org/packages/f5/a2/3b68a9e769db68668b25c6108444a35f9bd163bb848c0650d516761a59c0/msgpack-1.1.2-cp310-cp310-macosx_10_9_x86_64.whl",hashes = {sha256 = "0051fffef5a37ca2cd16978ae4f0aef92f164df86823871b5162812bebecd8e2"}}, - {name = "msgpack-1.1.2-cp310-cp310-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/5b/e1/2b720cc341325c00be44e1ed59e7cfeae2678329fbf5aa68f5bda57fe728/msgpack-1.1.2-cp310-cp310-macosx_11_0_arm64.whl",hashes = {sha256 = "a605409040f2da88676e9c9e5853b3449ba8011973616189ea5ee55ddbc5bc87"}}, - {name = "msgpack-1.1.2-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl",url = "https://files.pythonhosted.org/packages/71/e5/c2241de64bfceac456b140737812a2ab310b10538a7b34a1d393b748e095/msgpack-1.1.2-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl",hashes = {sha256 = "8b696e83c9f1532b4af884045ba7f3aa741a63b2bc22617293a2c6a7c645f251"}}, - {name = "msgpack-1.1.2-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl",url = "https://files.pythonhosted.org/packages/b7/09/2a06956383c0fdebaef5aa9246e2356776f12ea6f2a44bd1368abf0e46c4/msgpack-1.1.2-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl",hashes = {sha256 = "365c0bbe981a27d8932da71af63ef86acc59ed5c01ad929e09a0b88c6294e28a"}}, - {name = "msgpack-1.1.2-cp310-cp310-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/0e/74/2957703f0e1ef20637d6aead4fbb314330c26f39aa046b348c7edcf6ca6b/msgpack-1.1.2-cp310-cp310-musllinux_1_2_aarch64.whl",hashes = {sha256 = "41d1a5d875680166d3ac5c38573896453bbbea7092936d2e107214daf43b1d4f"}}, - {name = "msgpack-1.1.2-cp310-cp310-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/a5/09/3bfc12aa90f77b37322fc33e7a8a7c29ba7c8edeadfa27664451801b9860/msgpack-1.1.2-cp310-cp310-musllinux_1_2_x86_64.whl",hashes = {sha256 = "354e81bcdebaab427c3df4281187edc765d5d76bfb3a7c125af9da7a27e8458f"}}, - {name = "msgpack-1.1.2-cp310-cp310-win32.whl",url = "https://files.pythonhosted.org/packages/4b/4f/05fcebd3b4977cb3d840f7ef6b77c51f8582086de5e642f3fefee35c86fc/msgpack-1.1.2-cp310-cp310-win32.whl",hashes = {sha256 = "e64c8d2f5e5d5fda7b842f55dec6133260ea8f53c4257d64494c534f306bf7a9"}}, - {name = "msgpack-1.1.2-cp310-cp310-win_amd64.whl",url = "https://files.pythonhosted.org/packages/d0/3e/b4547e3a34210956382eed1c85935fff7e0f9b98be3106b3745d7dec9c5e/msgpack-1.1.2-cp310-cp310-win_amd64.whl",hashes = {sha256 = "db6192777d943bdaaafb6ba66d44bf65aa0e9c5616fa1d2da9bb08828c6b39aa"}}, ] marker = "\"default\" in dependency_groups" @@ -974,42 +861,6 @@ wheels = [ {name = "pillow-11.3.0-cp312-cp312-win32.whl",url = "https://files.pythonhosted.org/packages/26/7d/73699ad77895f69edff76b0f332acc3d497f22f5d75e5360f78cbcaff248/pillow-11.3.0-cp312-cp312-win32.whl",hashes = {sha256 = "7b161756381f0918e05e7cb8a371fff367e807770f8fe92ecb20d905d0e1c149"}}, {name = "pillow-11.3.0-cp312-cp312-win_amd64.whl",url = "https://files.pythonhosted.org/packages/8c/ce/e7dfc873bdd9828f3b6e5c2bbb74e47a98ec23cc5c74fc4e54462f0d9204/pillow-11.3.0-cp312-cp312-win_amd64.whl",hashes = {sha256 = "a6444696fce635783440b7f7a9fc24b3ad10a9ea3f0ab66c5905be1c19ccf17d"}}, {name = "pillow-11.3.0-cp312-cp312-win_arm64.whl",url = "https://files.pythonhosted.org/packages/16/8f/b13447d1bf0b1f7467ce7d86f6e6edf66c0ad7cf44cf5c87a37f9bed9936/pillow-11.3.0-cp312-cp312-win_arm64.whl",hashes = {sha256 = "2aceea54f957dd4448264f9bf40875da0415c83eb85f55069d89c0ed436e3542"}}, - {name = "pillow-11.3.0-cp311-cp311-macosx_10_10_x86_64.whl",url = "https://files.pythonhosted.org/packages/db/26/77f8ed17ca4ffd60e1dcd220a6ec6d71210ba398cfa33a13a1cd614c5613/pillow-11.3.0-cp311-cp311-macosx_10_10_x86_64.whl",hashes = {sha256 = "1cd110edf822773368b396281a2293aeb91c90a2db00d78ea43e7e861631b722"}}, - {name = "pillow-11.3.0-cp311-cp311-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/cb/39/ee475903197ce709322a17a866892efb560f57900d9af2e55f86db51b0a5/pillow-11.3.0-cp311-cp311-macosx_11_0_arm64.whl",hashes = {sha256 = "9c412fddd1b77a75aa904615ebaa6001f169b26fd467b4be93aded278266b288"}}, - {name = "pillow-11.3.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl",url = "https://files.pythonhosted.org/packages/d5/90/442068a160fd179938ba55ec8c97050a612426fae5ec0a764e345839f76d/pillow-11.3.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl",hashes = {sha256 = "7d1aa4de119a0ecac0a34a9c8bde33f34022e2e8f99104e47a3ca392fd60e37d"}}, - {name = "pillow-11.3.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl",url = "https://files.pythonhosted.org/packages/13/92/dcdd147ab02daf405387f0218dcf792dc6dd5b14d2573d40b4caeef01059/pillow-11.3.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl",hashes = {sha256 = "91da1d88226663594e3f6b4b8c3c8d85bd504117d043740a8e0ec449087cc494"}}, - {name = "pillow-11.3.0-cp311-cp311-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl",url = "https://files.pythonhosted.org/packages/6e/db/839d6ba7fd38b51af641aa904e2960e7a5644d60ec754c046b7d2aee00e5/pillow-11.3.0-cp311-cp311-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl",hashes = {sha256 = "643f189248837533073c405ec2f0bb250ba54598cf80e8c1e043381a60632f58"}}, - {name = "pillow-11.3.0-cp311-cp311-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl",url = "https://files.pythonhosted.org/packages/f2/2f/d7675ecae6c43e9f12aa8d58b6012683b20b6edfbdac7abcb4e6af7a3784/pillow-11.3.0-cp311-cp311-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl",hashes = {sha256 = "106064daa23a745510dabce1d84f29137a37224831d88eb4ce94bb187b1d7e5f"}}, - {name = "pillow-11.3.0-cp311-cp311-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/45/ad/931694675ede172e15b2ff03c8144a0ddaea1d87adb72bb07655eaffb654/pillow-11.3.0-cp311-cp311-musllinux_1_2_aarch64.whl",hashes = {sha256 = "cd8ff254faf15591e724dc7c4ddb6bf4793efcbe13802a4ae3e863cd300b493e"}}, - {name = "pillow-11.3.0-cp311-cp311-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/3a/04/ba8f2b11fc80d2dd462d7abec16351b45ec99cbbaea4387648a44190351a/pillow-11.3.0-cp311-cp311-musllinux_1_2_x86_64.whl",hashes = {sha256 = "932c754c2d51ad2b2271fd01c3d121daaa35e27efae2a616f77bf164bc0b3e94"}}, - {name = "pillow-11.3.0-cp311-cp311-win32.whl",url = "https://files.pythonhosted.org/packages/48/59/8cd06d7f3944cc7d892e8533c56b0acb68399f640786313275faec1e3b6f/pillow-11.3.0-cp311-cp311-win32.whl",hashes = {sha256 = "b4b8f3efc8d530a1544e5962bd6b403d5f7fe8b9e08227c6b255f98ad82b4ba0"}}, - {name = "pillow-11.3.0-cp311-cp311-win_amd64.whl",url = "https://files.pythonhosted.org/packages/f1/cc/29c0f5d64ab8eae20f3232da8f8571660aa0ab4b8f1331da5c2f5f9a938e/pillow-11.3.0-cp311-cp311-win_amd64.whl",hashes = {sha256 = "1a992e86b0dd7aeb1f053cd506508c0999d710a8f07b4c791c63843fc6a807ac"}}, - {name = "pillow-11.3.0-cp311-cp311-win_arm64.whl",url = "https://files.pythonhosted.org/packages/c6/df/90bd886fabd544c25addd63e5ca6932c86f2b701d5da6c7839387a076b4a/pillow-11.3.0-cp311-cp311-win_arm64.whl",hashes = {sha256 = "30807c931ff7c095620fe04448e2c2fc673fcbb1ffe2a7da3fb39613489b1ddd"}}, - {name = "pillow-11.3.0-pp311-pypy311_pp73-macosx_10_15_x86_64.whl",url = "https://files.pythonhosted.org/packages/9e/e3/6fa84033758276fb31da12e5fb66ad747ae83b93c67af17f8c6ff4cc8f34/pillow-11.3.0-pp311-pypy311_pp73-macosx_10_15_x86_64.whl",hashes = {sha256 = "7c8ec7a017ad1bd562f93dbd8505763e688d388cde6e4a010ae1486916e713e6"}}, - {name = "pillow-11.3.0-pp311-pypy311_pp73-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/5b/ee/e8d2e1ab4892970b561e1ba96cbd59c0d28cf66737fc44abb2aec3795a4e/pillow-11.3.0-pp311-pypy311_pp73-macosx_11_0_arm64.whl",hashes = {sha256 = "9ab6ae226de48019caa8074894544af5b53a117ccb9d3b3dcb2871464c829438"}}, - {name = "pillow-11.3.0-pp311-pypy311_pp73-manylinux2014_aarch64.manylinux_2_17_aarch64.whl",url = "https://files.pythonhosted.org/packages/f2/6d/17f80f4e1f0761f02160fc433abd4109fa1548dcfdca46cfdadaf9efa565/pillow-11.3.0-pp311-pypy311_pp73-manylinux2014_aarch64.manylinux_2_17_aarch64.whl",hashes = {sha256 = "fe27fb049cdcca11f11a7bfda64043c37b30e6b91f10cb5bab275806c32f6ab3"}}, - {name = "pillow-11.3.0-pp311-pypy311_pp73-manylinux2014_x86_64.manylinux_2_17_x86_64.whl",url = "https://files.pythonhosted.org/packages/de/5f/c22340acd61cef960130585bbe2120e2fd8434c214802f07e8c03596b17e/pillow-11.3.0-pp311-pypy311_pp73-manylinux2014_x86_64.manylinux_2_17_x86_64.whl",hashes = {sha256 = "465b9e8844e3c3519a983d58b80be3f668e2a7a5db97f2784e7079fbc9f9822c"}}, - {name = "pillow-11.3.0-pp311-pypy311_pp73-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl",url = "https://files.pythonhosted.org/packages/31/5e/03966aedfbfcbb4d5f8aa042452d3361f325b963ebbadddac05b122e47dd/pillow-11.3.0-pp311-pypy311_pp73-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl",hashes = {sha256 = "5418b53c0d59b3824d05e029669efa023bbef0f3e92e75ec8428f3799487f361"}}, - {name = "pillow-11.3.0-pp311-pypy311_pp73-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl",url = "https://files.pythonhosted.org/packages/cc/2d/e082982aacc927fc2cab48e1e731bdb1643a1406acace8bed0900a61464e/pillow-11.3.0-pp311-pypy311_pp73-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl",hashes = {sha256 = "504b6f59505f08ae014f724b6207ff6222662aab5cc9542577fb084ed0676ac7"}}, - {name = "pillow-11.3.0-pp311-pypy311_pp73-win_amd64.whl",url = "https://files.pythonhosted.org/packages/34/e7/ae39f538fd6844e982063c3a5e4598b8ced43b9633baa3a85ef33af8c05c/pillow-11.3.0-pp311-pypy311_pp73-win_amd64.whl",hashes = {sha256 = "c84d689db21a1c397d001aa08241044aa2069e7587b398c8cc63020390b1c1b8"}}, - {name = "pillow-11.3.0-cp310-cp310-macosx_10_10_x86_64.whl",url = "https://files.pythonhosted.org/packages/4c/5d/45a3553a253ac8763f3561371432a90bdbe6000fbdcf1397ffe502aa206c/pillow-11.3.0-cp310-cp310-macosx_10_10_x86_64.whl",hashes = {sha256 = "1b9c17fd4ace828b3003dfd1e30bff24863e0eb59b535e8f80194d9cc7ecf860"}}, - {name = "pillow-11.3.0-cp310-cp310-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/7c/c8/67c12ab069ef586a25a4a79ced553586748fad100c77c0ce59bb4983ac98/pillow-11.3.0-cp310-cp310-macosx_11_0_arm64.whl",hashes = {sha256 = "65dc69160114cdd0ca0f35cb434633c75e8e7fad4cf855177a05bf38678f73ad"}}, - {name = "pillow-11.3.0-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.whl",url = "https://files.pythonhosted.org/packages/2f/bd/6741ebd56263390b382ae4c5de02979af7f8bd9807346d068700dd6d5cf9/pillow-11.3.0-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.whl",hashes = {sha256 = "7107195ddc914f656c7fc8e4a5e1c25f32e9236ea3ea860f257b0436011fddd0"}}, - {name = "pillow-11.3.0-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.whl",url = "https://files.pythonhosted.org/packages/ca/0b/c412a9e27e1e6a829e6ab6c2dca52dd563efbedf4c9c6aa453d9a9b77359/pillow-11.3.0-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.whl",hashes = {sha256 = "cc3e831b563b3114baac7ec2ee86819eb03caa1a2cef0b481a5675b59c4fe23b"}}, - {name = "pillow-11.3.0-cp310-cp310-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl",url = "https://files.pythonhosted.org/packages/59/9d/9b7076aaf30f5dd17e5e5589b2d2f5a5d7e30ff67a171eb686e4eecc2adf/pillow-11.3.0-cp310-cp310-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl",hashes = {sha256 = "f1f182ebd2303acf8c380a54f615ec883322593320a9b00438eb842c1f37ae50"}}, - {name = "pillow-11.3.0-cp310-cp310-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl",url = "https://files.pythonhosted.org/packages/f0/16/1a6bf01fb622fb9cf5c91683823f073f053005c849b1f52ed613afcf8dae/pillow-11.3.0-cp310-cp310-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl",hashes = {sha256 = "4445fa62e15936a028672fd48c4c11a66d641d2c05726c7ec1f8ba6a572036ae"}}, - {name = "pillow-11.3.0-cp310-cp310-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/7b/e6/6ff7077077eb47fde78739e7d570bdcd7c10495666b6afcd23ab56b19a43/pillow-11.3.0-cp310-cp310-musllinux_1_2_aarch64.whl",hashes = {sha256 = "71f511f6b3b91dd543282477be45a033e4845a40278fa8dcdbfdb07109bf18f9"}}, - {name = "pillow-11.3.0-cp310-cp310-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/c3/3a/b13f36832ea6d279a697231658199e0a03cd87ef12048016bdcc84131601/pillow-11.3.0-cp310-cp310-musllinux_1_2_x86_64.whl",hashes = {sha256 = "040a5b691b0713e1f6cbe222e0f4f74cd233421e105850ae3b3c0ceda520f42e"}}, - {name = "pillow-11.3.0-cp310-cp310-win32.whl",url = "https://files.pythonhosted.org/packages/6c/e4/61b2e1a7528740efbc70b3d581f33937e38e98ef3d50b05007267a55bcb2/pillow-11.3.0-cp310-cp310-win32.whl",hashes = {sha256 = "89bd777bc6624fe4115e9fac3352c79ed60f3bb18651420635f26e643e3dd1f6"}}, - {name = "pillow-11.3.0-cp310-cp310-win_amd64.whl",url = "https://files.pythonhosted.org/packages/a9/d3/60c781c83a785d6afbd6a326ed4d759d141de43aa7365725cbcd65ce5e54/pillow-11.3.0-cp310-cp310-win_amd64.whl",hashes = {sha256 = "19d2ff547c75b8e3ff46f4d9ef969a06c30ab2d4263a9e287733aa8b2429ce8f"}}, - {name = "pillow-11.3.0-cp310-cp310-win_arm64.whl",url = "https://files.pythonhosted.org/packages/9f/28/4f4a0203165eefb3763939c6789ba31013a2e90adffb456610f30f613850/pillow-11.3.0-cp310-cp310-win_arm64.whl",hashes = {sha256 = "819931d25e57b513242859ce1876c58c59dc31587847bf74cfe06b2e0cb22d2f"}}, - {name = "pillow-11.3.0-pp310-pypy310_pp73-macosx_10_15_x86_64.whl",url = "https://files.pythonhosted.org/packages/6f/8b/209bd6b62ce8367f47e68a218bffac88888fdf2c9fcf1ecadc6c3ec1ebc7/pillow-11.3.0-pp310-pypy310_pp73-macosx_10_15_x86_64.whl",hashes = {sha256 = "3cee80663f29e3843b68199b9d6f4f54bd1d4a6b59bdd91bceefc51238bcb967"}}, - {name = "pillow-11.3.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/2e/e6/231a0b76070c2cfd9e260a7a5b504fb72da0a95279410fa7afd99d9751d6/pillow-11.3.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl",hashes = {sha256 = "b5f56c3f344f2ccaf0dd875d3e180f631dc60a51b314295a3e681fe8cf851fbe"}}, - {name = "pillow-11.3.0-pp310-pypy310_pp73-manylinux2014_aarch64.manylinux_2_17_aarch64.whl",url = "https://files.pythonhosted.org/packages/13/f4/10cf94fda33cb12765f2397fc285fa6d8eb9c29de7f3185165b702fc7386/pillow-11.3.0-pp310-pypy310_pp73-manylinux2014_aarch64.manylinux_2_17_aarch64.whl",hashes = {sha256 = "e67d793d180c9df62f1f40aee3accca4829d3794c95098887edc18af4b8b780c"}}, - {name = "pillow-11.3.0-pp310-pypy310_pp73-manylinux2014_x86_64.manylinux_2_17_x86_64.whl",url = "https://files.pythonhosted.org/packages/72/c9/583821097dc691880c92892e8e2d41fe0a5a3d6021f4963371d2f6d57250/pillow-11.3.0-pp310-pypy310_pp73-manylinux2014_x86_64.manylinux_2_17_x86_64.whl",hashes = {sha256 = "d000f46e2917c705e9fb93a3606ee4a819d1e3aa7a9b442f6444f07e77cf5e25"}}, - {name = "pillow-11.3.0-pp310-pypy310_pp73-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl",url = "https://files.pythonhosted.org/packages/3b/8e/5c9d410f9217b12320efc7c413e72693f48468979a013ad17fd690397b9a/pillow-11.3.0-pp310-pypy310_pp73-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl",hashes = {sha256 = "527b37216b6ac3a12d7838dc3bd75208ec57c1c6d11ef01902266a5a0c14fc27"}}, - {name = "pillow-11.3.0-pp310-pypy310_pp73-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl",url = "https://files.pythonhosted.org/packages/62/bb/78347dbe13219991877ffb3a91bf09da8317fbfcd4b5f9140aeae020ad71/pillow-11.3.0-pp310-pypy310_pp73-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl",hashes = {sha256 = "be5463ac478b623b9dd3937afd7fb7ab3d79dd290a28e2b6df292dc75063eb8a"}}, - {name = "pillow-11.3.0-pp310-pypy310_pp73-win_amd64.whl",url = "https://files.pythonhosted.org/packages/d9/28/1000353d5e61498aaeaaf7f1e4b49ddb05f2c6575f9d4f9f914a3538b6e1/pillow-11.3.0-pp310-pypy310_pp73-win_amd64.whl",hashes = {sha256 = "8dc70ca24c110503e16918a658b869019126ecfe03109b754c402daff12b3d9f"}}, ] marker = "\"default\" in dependency_groups" @@ -1184,51 +1035,6 @@ wheels = [ {name = "pydantic_core-2.33.2-cp312-cp312-win32.whl",url = "https://files.pythonhosted.org/packages/b0/6a/adf5734ffd52bf86d865093ad70b2ce543415e0e356f6cacabbc0d9ad910/pydantic_core-2.33.2-cp312-cp312-win32.whl",hashes = {sha256 = "9cb1da0f5a471435a7bc7e439b8a728e8b61e59784b2af70d7c169f8dd8ae290"}}, {name = "pydantic_core-2.33.2-cp312-cp312-win_amd64.whl",url = "https://files.pythonhosted.org/packages/43/e4/5479fecb3606c1368d496a825d8411e126133c41224c1e7238be58b87d7e/pydantic_core-2.33.2-cp312-cp312-win_amd64.whl",hashes = {sha256 = "f941635f2a3d96b2973e867144fde513665c87f13fe0e193c158ac51bfaaa7b2"}}, {name = "pydantic_core-2.33.2-cp312-cp312-win_arm64.whl",url = "https://files.pythonhosted.org/packages/0d/24/8b11e8b3e2be9dd82df4b11408a67c61bb4dc4f8e11b5b0fc888b38118b5/pydantic_core-2.33.2-cp312-cp312-win_arm64.whl",hashes = {sha256 = "cca3868ddfaccfbc4bfb1d608e2ccaaebe0ae628e1416aeb9c4d88c001bb45ab"}}, - {name = "pydantic_core-2.33.2-cp311-cp311-macosx_10_12_x86_64.whl",url = "https://files.pythonhosted.org/packages/3f/8d/71db63483d518cbbf290261a1fc2839d17ff89fce7089e08cad07ccfce67/pydantic_core-2.33.2-cp311-cp311-macosx_10_12_x86_64.whl",hashes = {sha256 = "4c5b0a576fb381edd6d27f0a85915c6daf2f8138dc5c267a57c08a62900758c7"}}, - {name = "pydantic_core-2.33.2-cp311-cp311-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/24/2f/3cfa7244ae292dd850989f328722d2aef313f74ffc471184dc509e1e4e5a/pydantic_core-2.33.2-cp311-cp311-macosx_11_0_arm64.whl",hashes = {sha256 = "e799c050df38a639db758c617ec771fd8fb7a5f8eaaa4b27b101f266b216a246"}}, - {name = "pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",url = "https://files.pythonhosted.org/packages/b3/d3/4ae42d33f5e3f50dd467761304be2fa0a9417fbf09735bc2cce003480f2a/pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",hashes = {sha256 = "dc46a01bf8d62f227d5ecee74178ffc448ff4e5197c756331f71efcc66dc980f"}}, - {name = "pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl",url = "https://files.pythonhosted.org/packages/f4/f3/aa5976e8352b7695ff808599794b1fba2a9ae2ee954a3426855935799488/pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl",hashes = {sha256 = "a144d4f717285c6d9234a66778059f33a89096dfb9b39117663fd8413d582dcc"}}, - {name = "pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl",url = "https://files.pythonhosted.org/packages/d5/7a/cda9b5a23c552037717f2b2a5257e9b2bfe45e687386df9591eff7b46d28/pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl",hashes = {sha256 = "73cf6373c21bc80b2e0dc88444f41ae60b2f070ed02095754eb5a01df12256de"}}, - {name = "pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl",url = "https://files.pythonhosted.org/packages/2b/9f/b8f9ec8dd1417eb9da784e91e1667d58a2a4a7b7b34cf4af765ef663a7e5/pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl",hashes = {sha256 = "3dc625f4aa79713512d1976fe9f0bc99f706a9dee21dfd1810b4bbbf228d0e8a"}}, - {name = "pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",url = "https://files.pythonhosted.org/packages/47/bc/cd720e078576bdb8255d5032c5d63ee5c0bf4b7173dd955185a1d658c456/pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",hashes = {sha256 = "881b21b5549499972441da4758d662aeea93f1923f953e9cbaff14b8b9565aef"}}, - {name = "pydantic_core-2.33.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl",url = "https://files.pythonhosted.org/packages/ca/22/3602b895ee2cd29d11a2b349372446ae9727c32e78a94b3d588a40fdf187/pydantic_core-2.33.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl",hashes = {sha256 = "bdc25f3681f7b78572699569514036afe3c243bc3059d3942624e936ec93450e"}}, - {name = "pydantic_core-2.33.2-cp311-cp311-musllinux_1_1_aarch64.whl",url = "https://files.pythonhosted.org/packages/ff/e6/e3c5908c03cf00d629eb38393a98fccc38ee0ce8ecce32f69fc7d7b558a7/pydantic_core-2.33.2-cp311-cp311-musllinux_1_1_aarch64.whl",hashes = {sha256 = "fe5b32187cbc0c862ee201ad66c30cf218e5ed468ec8dc1cf49dec66e160cc4d"}}, - {name = "pydantic_core-2.33.2-cp311-cp311-musllinux_1_1_armv7l.whl",url = "https://files.pythonhosted.org/packages/12/e7/6a36a07c59ebefc8777d1ffdaf5ae71b06b21952582e4b07eba88a421c79/pydantic_core-2.33.2-cp311-cp311-musllinux_1_1_armv7l.whl",hashes = {sha256 = "bc7aee6f634a6f4a95676fcb5d6559a2c2a390330098dba5e5a5f28a2e4ada30"}}, - {name = "pydantic_core-2.33.2-cp311-cp311-musllinux_1_1_x86_64.whl",url = "https://files.pythonhosted.org/packages/16/3f/59b3187aaa6cc0c1e6616e8045b284de2b6a87b027cce2ffcea073adf1d2/pydantic_core-2.33.2-cp311-cp311-musllinux_1_1_x86_64.whl",hashes = {sha256 = "235f45e5dbcccf6bd99f9f472858849f73d11120d76ea8707115415f8e5ebebf"}}, - {name = "pydantic_core-2.33.2-cp311-cp311-win32.whl",url = "https://files.pythonhosted.org/packages/e0/ed/55532bb88f674d5d8f67ab121a2a13c385df382de2a1677f30ad385f7438/pydantic_core-2.33.2-cp311-cp311-win32.whl",hashes = {sha256 = "6368900c2d3ef09b69cb0b913f9f8263b03786e5b2a387706c5afb66800efd51"}}, - {name = "pydantic_core-2.33.2-cp311-cp311-win_amd64.whl",url = "https://files.pythonhosted.org/packages/fe/1b/25b7cccd4519c0b23c2dd636ad39d381abf113085ce4f7bec2b0dc755eb1/pydantic_core-2.33.2-cp311-cp311-win_amd64.whl",hashes = {sha256 = "1e063337ef9e9820c77acc768546325ebe04ee38b08703244c1309cccc4f1bab"}}, - {name = "pydantic_core-2.33.2-cp311-cp311-win_arm64.whl",url = "https://files.pythonhosted.org/packages/49/a9/d809358e49126438055884c4366a1f6227f0f84f635a9014e2deb9b9de54/pydantic_core-2.33.2-cp311-cp311-win_arm64.whl",hashes = {sha256 = "6b99022f1d19bc32a4c2a0d544fc9a76e3be90f0b3f4af413f87d38749300e65"}}, - {name = "pydantic_core-2.33.2-pp311-pypy311_pp73-macosx_10_12_x86_64.whl",url = "https://files.pythonhosted.org/packages/7b/27/d4ae6487d73948d6f20dddcd94be4ea43e74349b56eba82e9bdee2d7494c/pydantic_core-2.33.2-pp311-pypy311_pp73-macosx_10_12_x86_64.whl",hashes = {sha256 = "dd14041875d09cc0f9308e37a6f8b65f5585cf2598a53aa0123df8b129d481f8"}}, - {name = "pydantic_core-2.33.2-pp311-pypy311_pp73-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/f1/b8/b3cb95375f05d33801024079b9392a5ab45267a63400bf1866e7ce0f0de4/pydantic_core-2.33.2-pp311-pypy311_pp73-macosx_11_0_arm64.whl",hashes = {sha256 = "d87c561733f66531dced0da6e864f44ebf89a8fba55f31407b00c2f7f9449593"}}, - {name = "pydantic_core-2.33.2-pp311-pypy311_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",url = "https://files.pythonhosted.org/packages/05/bc/0d0b5adeda59a261cd30a1235a445bf55c7e46ae44aea28f7bd6ed46e091/pydantic_core-2.33.2-pp311-pypy311_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",hashes = {sha256 = "2f82865531efd18d6e07a04a17331af02cb7a651583c418df8266f17a63c6612"}}, - {name = "pydantic_core-2.33.2-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",url = "https://files.pythonhosted.org/packages/3e/11/d37bdebbda2e449cb3f519f6ce950927b56d62f0b84fd9cb9e372a26a3d5/pydantic_core-2.33.2-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",hashes = {sha256 = "2bfb5112df54209d820d7bf9317c7a6c9025ea52e49f46b6a2060104bba37de7"}}, - {name = "pydantic_core-2.33.2-pp311-pypy311_pp73-manylinux_2_5_i686.manylinux1_i686.whl",url = "https://files.pythonhosted.org/packages/8c/55/1f95f0a05ce72ecb02a8a8a1c3be0579bbc29b1d5ab68f1378b7bebc5057/pydantic_core-2.33.2-pp311-pypy311_pp73-manylinux_2_5_i686.manylinux1_i686.whl",hashes = {sha256 = "64632ff9d614e5eecfb495796ad51b0ed98c453e447a76bcbeeb69615079fc7e"}}, - {name = "pydantic_core-2.33.2-pp311-pypy311_pp73-musllinux_1_1_aarch64.whl",url = "https://files.pythonhosted.org/packages/53/89/2b2de6c81fa131f423246a9109d7b2a375e83968ad0800d6e57d0574629b/pydantic_core-2.33.2-pp311-pypy311_pp73-musllinux_1_1_aarch64.whl",hashes = {sha256 = "f889f7a40498cc077332c7ab6b4608d296d852182211787d4f3ee377aaae66e8"}}, - {name = "pydantic_core-2.33.2-pp311-pypy311_pp73-musllinux_1_1_armv7l.whl",url = "https://files.pythonhosted.org/packages/b8/e9/1f7efbe20d0b2b10f6718944b5d8ece9152390904f29a78e68d4e7961159/pydantic_core-2.33.2-pp311-pypy311_pp73-musllinux_1_1_armv7l.whl",hashes = {sha256 = "de4b83bb311557e439b9e186f733f6c645b9417c84e2eb8203f3f820a4b988bf"}}, - {name = "pydantic_core-2.33.2-pp311-pypy311_pp73-musllinux_1_1_x86_64.whl",url = "https://files.pythonhosted.org/packages/3c/b2/5309c905a93811524a49b4e031e9851a6b00ff0fb668794472ea7746b448/pydantic_core-2.33.2-pp311-pypy311_pp73-musllinux_1_1_x86_64.whl",hashes = {sha256 = "82f68293f055f51b51ea42fafc74b6aad03e70e191799430b90c13d643059ebb"}}, - {name = "pydantic_core-2.33.2-pp311-pypy311_pp73-win_amd64.whl",url = "https://files.pythonhosted.org/packages/32/56/8a7ca5d2cd2cda1d245d34b1c9a942920a718082ae8e54e5f3e5a58b7add/pydantic_core-2.33.2-pp311-pypy311_pp73-win_amd64.whl",hashes = {sha256 = "329467cecfb529c925cf2bbd4d60d2c509bc2fb52a20c1045bf09bb70971a9c1"}}, - {name = "pydantic_core-2.33.2-cp310-cp310-macosx_10_12_x86_64.whl",url = "https://files.pythonhosted.org/packages/e5/92/b31726561b5dae176c2d2c2dc43a9c5bfba5d32f96f8b4c0a600dd492447/pydantic_core-2.33.2-cp310-cp310-macosx_10_12_x86_64.whl",hashes = {sha256 = "2b3d326aaef0c0399d9afffeb6367d5e26ddc24d351dbc9c636840ac355dc5d8"}}, - {name = "pydantic_core-2.33.2-cp310-cp310-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/a3/44/3f0b95fafdaca04a483c4e685fe437c6891001bf3ce8b2fded82b9ea3aa1/pydantic_core-2.33.2-cp310-cp310-macosx_11_0_arm64.whl",hashes = {sha256 = "0e5b2671f05ba48b94cb90ce55d8bdcaaedb8ba00cc5359f6810fc918713983d"}}, - {name = "pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",url = "https://files.pythonhosted.org/packages/30/97/e8f13b55766234caae05372826e8e4b3b96e7b248be3157f53237682e43c/pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",hashes = {sha256 = "0069c9acc3f3981b9ff4cdfaf088e98d83440a4c7ea1bc07460af3d4dc22e72d"}}, - {name = "pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl",url = "https://files.pythonhosted.org/packages/9b/a3/99c48cf7bafc991cc3ee66fd544c0aae8dc907b752f1dad2d79b1b5a471f/pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl",hashes = {sha256 = "d53b22f2032c42eaaf025f7c40c2e3b94568ae077a606f006d206a463bc69572"}}, - {name = "pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl",url = "https://files.pythonhosted.org/packages/de/8e/a5b882ec4307010a840fb8b58bd9bf65d1840c92eae7534c7441709bf54b/pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl",hashes = {sha256 = "0405262705a123b7ce9f0b92f123334d67b70fd1f20a9372b907ce1080c7ba02"}}, - {name = "pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl",url = "https://files.pythonhosted.org/packages/e4/bb/71e35fc3ed05af6834e890edb75968e2802fe98778971ab5cba20a162315/pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl",hashes = {sha256 = "4b25d91e288e2c4e0662b8038a28c6a07eaac3e196cfc4ff69de4ea3db992a1b"}}, - {name = "pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",url = "https://files.pythonhosted.org/packages/31/0d/c8f7593e6bc7066289bbc366f2235701dcbebcd1ff0ef8e64f6f239fb47d/pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",hashes = {sha256 = "6bdfe4b3789761f3bcb4b1ddf33355a71079858958e3a552f16d5af19768fef2"}}, - {name = "pydantic_core-2.33.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl",url = "https://files.pythonhosted.org/packages/d2/7a/996d8bd75f3eda405e3dd219ff5ff0a283cd8e34add39d8ef9157e722867/pydantic_core-2.33.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl",hashes = {sha256 = "efec8db3266b76ef9607c2c4c419bdb06bf335ae433b80816089ea7585816f6a"}}, - {name = "pydantic_core-2.33.2-cp310-cp310-musllinux_1_1_aarch64.whl",url = "https://files.pythonhosted.org/packages/ff/84/daf2a6fb2db40ffda6578a7e8c5a6e9c8affb251a05c233ae37098118788/pydantic_core-2.33.2-cp310-cp310-musllinux_1_1_aarch64.whl",hashes = {sha256 = "031c57d67ca86902726e0fae2214ce6770bbe2f710dc33063187a68744a5ecac"}}, - {name = "pydantic_core-2.33.2-cp310-cp310-musllinux_1_1_armv7l.whl",url = "https://files.pythonhosted.org/packages/77/fb/2258da019f4825128445ae79456a5499c032b55849dbd5bed78c95ccf163/pydantic_core-2.33.2-cp310-cp310-musllinux_1_1_armv7l.whl",hashes = {sha256 = "f8de619080e944347f5f20de29a975c2d815d9ddd8be9b9b7268e2e3ef68605a"}}, - {name = "pydantic_core-2.33.2-cp310-cp310-musllinux_1_1_x86_64.whl",url = "https://files.pythonhosted.org/packages/d8/7a/925ff73756031289468326e355b6fa8316960d0d65f8b5d6b3a3e7866de7/pydantic_core-2.33.2-cp310-cp310-musllinux_1_1_x86_64.whl",hashes = {sha256 = "73662edf539e72a9440129f231ed3757faab89630d291b784ca99237fb94db2b"}}, - {name = "pydantic_core-2.33.2-cp310-cp310-win32.whl",url = "https://files.pythonhosted.org/packages/0b/b0/249ee6d2646f1cdadcb813805fe76265745c4010cf20a8eba7b0e639d9b2/pydantic_core-2.33.2-cp310-cp310-win32.whl",hashes = {sha256 = "0a39979dcbb70998b0e505fb1556a1d550a0781463ce84ebf915ba293ccb7e22"}}, - {name = "pydantic_core-2.33.2-cp310-cp310-win_amd64.whl",url = "https://files.pythonhosted.org/packages/66/ff/172ba8f12a42d4b552917aa65d1f2328990d3ccfc01d5b7c943ec084299f/pydantic_core-2.33.2-cp310-cp310-win_amd64.whl",hashes = {sha256 = "b0379a2b24882fef529ec3b4987cb5d003b9cda32256024e6fe1586ac45fc640"}}, - {name = "pydantic_core-2.33.2-pp310-pypy310_pp73-macosx_10_12_x86_64.whl",url = "https://files.pythonhosted.org/packages/30/68/373d55e58b7e83ce371691f6eaa7175e3a24b956c44628eb25d7da007917/pydantic_core-2.33.2-pp310-pypy310_pp73-macosx_10_12_x86_64.whl",hashes = {sha256 = "5c4aa4e82353f65e548c476b37e64189783aa5384903bfea4f41580f255fddfa"}}, - {name = "pydantic_core-2.33.2-pp310-pypy310_pp73-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/a4/16/145f54ac08c96a63d8ed6442f9dec17b2773d19920b627b18d4f10a061ea/pydantic_core-2.33.2-pp310-pypy310_pp73-macosx_11_0_arm64.whl",hashes = {sha256 = "d946c8bf0d5c24bf4fe333af284c59a19358aa3ec18cb3dc4370080da1e8ad29"}}, - {name = "pydantic_core-2.33.2-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",url = "https://files.pythonhosted.org/packages/41/b1/c6dc6c3e2de4516c0bb2c46f6a373b91b5660312342a0cf5826e38ad82fa/pydantic_core-2.33.2-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",hashes = {sha256 = "87b31b6846e361ef83fedb187bb5b4372d0da3f7e28d85415efa92d6125d6e6d"}}, - {name = "pydantic_core-2.33.2-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",url = "https://files.pythonhosted.org/packages/12/73/8cd57e20afba760b21b742106f9dbdfa6697f1570b189c7457a1af4cd8a0/pydantic_core-2.33.2-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",hashes = {sha256 = "aa9d91b338f2df0508606f7009fde642391425189bba6d8c653afd80fd6bb64e"}}, - {name = "pydantic_core-2.33.2-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl",url = "https://files.pythonhosted.org/packages/e3/d5/0bb5d988cc019b3cba4a78f2d4b3854427fc47ee8ec8e9eaabf787da239c/pydantic_core-2.33.2-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl",hashes = {sha256 = "2058a32994f1fde4ca0480ab9d1e75a0e8c87c22b53a3ae66554f9af78f2fe8c"}}, - {name = "pydantic_core-2.33.2-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl",url = "https://files.pythonhosted.org/packages/f1/c5/00c02d1571913d496aabf146106ad8239dc132485ee22efe08085084ff7c/pydantic_core-2.33.2-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl",hashes = {sha256 = "0e03262ab796d986f978f79c943fc5f620381be7287148b8010b4097f79a39ec"}}, - {name = "pydantic_core-2.33.2-pp310-pypy310_pp73-musllinux_1_1_armv7l.whl",url = "https://files.pythonhosted.org/packages/22/a8/dccc38768274d3ed3a59b5d06f59ccb845778687652daa71df0cab4040d7/pydantic_core-2.33.2-pp310-pypy310_pp73-musllinux_1_1_armv7l.whl",hashes = {sha256 = "1a8695a8d00c73e50bff9dfda4d540b7dee29ff9b8053e38380426a85ef10052"}}, - {name = "pydantic_core-2.33.2-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl",url = "https://files.pythonhosted.org/packages/d4/e7/4f98c0b125dda7cf7ccd14ba936218397b44f50a56dd8c16a3091df116c3/pydantic_core-2.33.2-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl",hashes = {sha256 = "fa754d1850735a0b0e03bcffd9d4b4343eb417e47196e4485d9cca326073a42c"}}, - {name = "pydantic_core-2.33.2-pp310-pypy310_pp73-win_amd64.whl",url = "https://files.pythonhosted.org/packages/ce/91/2ec36480fdb0b783cd9ef6795753c1dea13882f2e68e73bce76ae8c21e6a/pydantic_core-2.33.2-pp310-pypy310_pp73-win_amd64.whl",hashes = {sha256 = "a11c8d26a50bfab49002947d3d237abe4d9e4b5bdc8846a63537b6488e197808"}}, ] marker = "\"default\" in dependency_groups" @@ -1238,12 +1044,12 @@ dependencies = [ ] [[packages]] -name = "typing-extensions" -version = "4.14.1" -requires-python = ">=3.9" -sdist = {name = "typing_extensions-4.14.1.tar.gz", url = "https://files.pythonhosted.org/packages/98/5a/da40306b885cc8c09109dc2e1abd358d5684b1425678151cdaed4731c822/typing_extensions-4.14.1.tar.gz", hashes = {sha256 = "38b39f4aeeab64884ce9f74c94263ef78f3c22467c8724005483154c26648d36"}} +name = "packaging" +version = "25.0" +requires-python = ">=3.8" +sdist = {name = "packaging-25.0.tar.gz", url = "https://files.pythonhosted.org/packages/a1/d4/1fc4078c65507b51b96ca8f8c3ba19e6a61c8253c72794544580a7b6c24d/packaging-25.0.tar.gz", hashes = {sha256 = "d443872c98d677bf60f6a1f2f8c1cb748e8fe762d2bf9d3148b5599295b0fc4f"}} wheels = [ - {name = "typing_extensions-4.14.1-py3-none-any.whl",url = "https://files.pythonhosted.org/packages/b5/00/d631e67a838026495268c2f6884f3711a15a9a2a96cd244fdaea53b823fb/typing_extensions-4.14.1-py3-none-any.whl",hashes = {sha256 = "d1e1e3b58374dc93031d6eda2420a48ea44a36c2b4766a4fdeb3710755731d76"}}, + {name = "packaging-25.0-py3-none-any.whl",url = "https://files.pythonhosted.org/packages/20/12/38679034af332785aac8774540895e234f4d07f7545804097de4b666afd8/packaging-25.0-py3-none-any.whl",hashes = {sha256 = "29572ef2b1f17581046b3a2227d5c611fb25ec70ca1ba8554b24b0e69331a484"}}, ] marker = "\"default\" in dependency_groups or \"dev\" in extras" @@ -1251,55 +1057,12 @@ marker = "\"default\" in dependency_groups or \"dev\" in extras" dependencies = [] [[packages]] -name = "tomli" -version = "2.2.1" -requires-python = ">=3.8" -sdist = {name = "tomli-2.2.1.tar.gz", url = "https://files.pythonhosted.org/packages/18/87/302344fed471e44a87289cf4967697d07e532f2421fdaf868a303cbae4ff/tomli-2.2.1.tar.gz", hashes = {sha256 = "cd45e1dc79c835ce60f7404ec8119f2eb06d38b1deba146f07ced3bbc44505ff"}} -wheels = [ - {name = "tomli-2.2.1-cp313-cp313-macosx_10_13_x86_64.whl",url = "https://files.pythonhosted.org/packages/04/90/2ee5f2e0362cb8a0b6499dc44f4d7d48f8fff06d28ba46e6f1eaa61a1388/tomli-2.2.1-cp313-cp313-macosx_10_13_x86_64.whl",hashes = {sha256 = "f4039b9cbc3048b2416cc57ab3bda989a6fcf9b36cf8937f01a6e731b64f80d7"}}, - {name = "tomli-2.2.1-cp313-cp313-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/c0/ec/46b4108816de6b385141f082ba99e315501ccd0a2ea23db4a100dd3990ea/tomli-2.2.1-cp313-cp313-macosx_11_0_arm64.whl",hashes = {sha256 = "286f0ca2ffeeb5b9bd4fcc8d6c330534323ec51b2f52da063b11c502da16f30c"}}, - {name = "tomli-2.2.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",url = "https://files.pythonhosted.org/packages/a0/bd/b470466d0137b37b68d24556c38a0cc819e8febe392d5b199dcd7f578365/tomli-2.2.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",hashes = {sha256 = "a92ef1a44547e894e2a17d24e7557a5e85a9e1d0048b0b5e7541f76c5032cb13"}}, - {name = "tomli-2.2.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",url = "https://files.pythonhosted.org/packages/d9/e5/82e80ff3b751373f7cead2815bcbe2d51c895b3c990686741a8e56ec42ab/tomli-2.2.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",hashes = {sha256 = "9316dc65bed1684c9a98ee68759ceaed29d229e985297003e494aa825ebb0281"}}, - {name = "tomli-2.2.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl",url = "https://files.pythonhosted.org/packages/05/7e/2a110bc2713557d6a1bfb06af23dd01e7dde52b6ee7dadc589868f9abfac/tomli-2.2.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl",hashes = {sha256 = "e85e99945e688e32d5a35c1ff38ed0b3f41f43fad8df0bdf79f72b2ba7bc5272"}}, - {name = "tomli-2.2.1-cp313-cp313-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/64/7b/22d713946efe00e0adbcdfd6d1aa119ae03fd0b60ebed51ebb3fa9f5a2e5/tomli-2.2.1-cp313-cp313-musllinux_1_2_aarch64.whl",hashes = {sha256 = "ac065718db92ca818f8d6141b5f66369833d4a80a9d74435a268c52bdfa73140"}}, - {name = "tomli-2.2.1-cp313-cp313-musllinux_1_2_i686.whl",url = "https://files.pythonhosted.org/packages/38/31/3a76f67da4b0cf37b742ca76beaf819dca0ebef26d78fc794a576e08accf/tomli-2.2.1-cp313-cp313-musllinux_1_2_i686.whl",hashes = {sha256 = "d920f33822747519673ee656a4b6ac33e382eca9d331c87770faa3eef562aeb2"}}, - {name = "tomli-2.2.1-cp313-cp313-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/07/10/5af1293da642aded87e8a988753945d0cf7e00a9452d3911dd3bb354c9e2/tomli-2.2.1-cp313-cp313-musllinux_1_2_x86_64.whl",hashes = {sha256 = "a198f10c4d1b1375d7687bc25294306e551bf1abfa4eace6650070a5c1ae2744"}}, - {name = "tomli-2.2.1-cp313-cp313-win32.whl",url = "https://files.pythonhosted.org/packages/5b/b9/1ed31d167be802da0fc95020d04cd27b7d7065cc6fbefdd2f9186f60d7bd/tomli-2.2.1-cp313-cp313-win32.whl",hashes = {sha256 = "d3f5614314d758649ab2ab3a62d4f2004c825922f9e370b29416484086b264ec"}}, - {name = "tomli-2.2.1-cp313-cp313-win_amd64.whl",url = "https://files.pythonhosted.org/packages/c7/32/b0963458706accd9afcfeb867c0f9175a741bf7b19cd424230714d722198/tomli-2.2.1-cp313-cp313-win_amd64.whl",hashes = {sha256 = "a38aa0308e754b0e3c67e344754dff64999ff9b513e691d0e786265c93583c69"}}, - {name = "tomli-2.2.1-cp312-cp312-macosx_10_13_x86_64.whl",url = "https://files.pythonhosted.org/packages/52/e1/f8af4c2fcde17500422858155aeb0d7e93477a0d59a98e56cbfe75070fd0/tomli-2.2.1-cp312-cp312-macosx_10_13_x86_64.whl",hashes = {sha256 = "4a8f6e44de52d5e6c657c9fe83b562f5f4256d8ebbfe4ff922c495620a7f6cea"}}, - {name = "tomli-2.2.1-cp312-cp312-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/03/b8/152c68bb84fc00396b83e7bbddd5ec0bd3dd409db4195e2a9b3e398ad2e3/tomli-2.2.1-cp312-cp312-macosx_11_0_arm64.whl",hashes = {sha256 = "8d57ca8095a641b8237d5b079147646153d22552f1c637fd3ba7f4b0b29167a8"}}, - {name = "tomli-2.2.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",url = "https://files.pythonhosted.org/packages/c8/d6/fc9267af9166f79ac528ff7e8c55c8181ded34eb4b0e93daa767b8841573/tomli-2.2.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",hashes = {sha256 = "4e340144ad7ae1533cb897d406382b4b6fede8890a03738ff1683af800d54192"}}, - {name = "tomli-2.2.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",url = "https://files.pythonhosted.org/packages/5c/51/51c3f2884d7bab89af25f678447ea7d297b53b5a3b5730a7cb2ef6069f07/tomli-2.2.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",hashes = {sha256 = "db2b95f9de79181805df90bedc5a5ab4c165e6ec3fe99f970d0e302f384ad222"}}, - {name = "tomli-2.2.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl",url = "https://files.pythonhosted.org/packages/ab/df/bfa89627d13a5cc22402e441e8a931ef2108403db390ff3345c05253935e/tomli-2.2.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl",hashes = {sha256 = "40741994320b232529c802f8bc86da4e1aa9f413db394617b9a256ae0f9a7f77"}}, - {name = "tomli-2.2.1-cp312-cp312-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/9e/6e/fa2b916dced65763a5168c6ccb91066f7639bdc88b48adda990db10c8c0b/tomli-2.2.1-cp312-cp312-musllinux_1_2_aarch64.whl",hashes = {sha256 = "400e720fe168c0f8521520190686ef8ef033fb19fc493da09779e592861b78c6"}}, - {name = "tomli-2.2.1-cp312-cp312-musllinux_1_2_i686.whl",url = "https://files.pythonhosted.org/packages/b4/04/885d3b1f650e1153cbb93a6a9782c58a972b94ea4483ae4ac5cedd5e4a09/tomli-2.2.1-cp312-cp312-musllinux_1_2_i686.whl",hashes = {sha256 = "02abe224de6ae62c19f090f68da4e27b10af2b93213d36cf44e6e1c5abd19fdd"}}, - {name = "tomli-2.2.1-cp312-cp312-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/9c/de/6b432d66e986e501586da298e28ebeefd3edc2c780f3ad73d22566034239/tomli-2.2.1-cp312-cp312-musllinux_1_2_x86_64.whl",hashes = {sha256 = "b82ebccc8c8a36f2094e969560a1b836758481f3dc360ce9a3277c65f374285e"}}, - {name = "tomli-2.2.1-cp312-cp312-win32.whl",url = "https://files.pythonhosted.org/packages/1c/9a/47c0449b98e6e7d1be6cbac02f93dd79003234ddc4aaab6ba07a9a7482e2/tomli-2.2.1-cp312-cp312-win32.whl",hashes = {sha256 = "889f80ef92701b9dbb224e49ec87c645ce5df3fa2cc548664eb8a25e03127a98"}}, - {name = "tomli-2.2.1-cp312-cp312-win_amd64.whl",url = "https://files.pythonhosted.org/packages/ef/60/9b9638f081c6f1261e2688bd487625cd1e660d0a85bd469e91d8db969734/tomli-2.2.1-cp312-cp312-win_amd64.whl",hashes = {sha256 = "7fc04e92e1d624a4a63c76474610238576942d6b8950a2d7f908a340494e67e4"}}, - {name = "tomli-2.2.1-cp311-cp311-macosx_10_9_x86_64.whl",url = "https://files.pythonhosted.org/packages/43/ca/75707e6efa2b37c77dadb324ae7d9571cb424e61ea73fad7c56c2d14527f/tomli-2.2.1-cp311-cp311-macosx_10_9_x86_64.whl",hashes = {sha256 = "678e4fa69e4575eb77d103de3df8a895e1591b48e740211bd1067378c69e8249"}}, - {name = "tomli-2.2.1-cp311-cp311-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/c7/16/51ae563a8615d472fdbffc43a3f3d46588c264ac4f024f63f01283becfbb/tomli-2.2.1-cp311-cp311-macosx_11_0_arm64.whl",hashes = {sha256 = "023aa114dd824ade0100497eb2318602af309e5a55595f76b626d6d9f3b7b0a6"}}, - {name = "tomli-2.2.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",url = "https://files.pythonhosted.org/packages/f1/dd/4f6cd1e7b160041db83c694abc78e100473c15d54620083dbd5aae7b990e/tomli-2.2.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",hashes = {sha256 = "ece47d672db52ac607a3d9599a9d48dcb2f2f735c6c2d1f34130085bb12b112a"}}, - {name = "tomli-2.2.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",url = "https://files.pythonhosted.org/packages/a9/6b/c54ede5dc70d648cc6361eaf429304b02f2871a345bbdd51e993d6cdf550/tomli-2.2.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",hashes = {sha256 = "6972ca9c9cc9f0acaa56a8ca1ff51e7af152a9f87fb64623e31d5c83700080ee"}}, - {name = "tomli-2.2.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl",url = "https://files.pythonhosted.org/packages/1f/47/999514fa49cfaf7a92c805a86c3c43f4215621855d151b61c602abb38091/tomli-2.2.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl",hashes = {sha256 = "c954d2250168d28797dd4e3ac5cf812a406cd5a92674ee4c8f123c889786aa8e"}}, - {name = "tomli-2.2.1-cp311-cp311-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/73/41/0a01279a7ae09ee1573b423318e7934674ce06eb33f50936655071d81a24/tomli-2.2.1-cp311-cp311-musllinux_1_2_aarch64.whl",hashes = {sha256 = "8dd28b3e155b80f4d54beb40a441d366adcfe740969820caf156c019fb5c7ec4"}}, - {name = "tomli-2.2.1-cp311-cp311-musllinux_1_2_i686.whl",url = "https://files.pythonhosted.org/packages/55/18/5d8bc5b0a0362311ce4d18830a5d28943667599a60d20118074ea1b01bb7/tomli-2.2.1-cp311-cp311-musllinux_1_2_i686.whl",hashes = {sha256 = "e59e304978767a54663af13c07b3d1af22ddee3bb2fb0618ca1593e4f593a106"}}, - {name = "tomli-2.2.1-cp311-cp311-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/92/a3/7ade0576d17f3cdf5ff44d61390d4b3febb8a9fc2b480c75c47ea048c646/tomli-2.2.1-cp311-cp311-musllinux_1_2_x86_64.whl",hashes = {sha256 = "33580bccab0338d00994d7f16f4c4ec25b776af3ffaac1ed74e0b3fc95e885a8"}}, - {name = "tomli-2.2.1-cp311-cp311-win32.whl",url = "https://files.pythonhosted.org/packages/72/6f/fa64ef058ac1446a1e51110c375339b3ec6be245af9d14c87c4a6412dd32/tomli-2.2.1-cp311-cp311-win32.whl",hashes = {sha256 = "465af0e0875402f1d226519c9904f37254b3045fc5084697cefb9bdde1ff99ff"}}, - {name = "tomli-2.2.1-cp311-cp311-win_amd64.whl",url = "https://files.pythonhosted.org/packages/6a/1c/4a2dcde4a51b81be3530565e92eda625d94dafb46dbeb15069df4caffc34/tomli-2.2.1-cp311-cp311-win_amd64.whl",hashes = {sha256 = "2d0f2fdd22b02c6d81637a3c95f8cd77f995846af7414c5c4b8d0545afa1bc4b"}}, - {name = "tomli-2.2.1-py3-none-any.whl",url = "https://files.pythonhosted.org/packages/6e/c2/61d3e0f47e2b74ef40a68b9e6ad5984f6241a942f7cd3bbfbdbd03861ea9/tomli-2.2.1-py3-none-any.whl",hashes = {sha256 = "cb55c73c5f4408779d0cf3eef9f762b9c9f147a77de7b258bef0a5628adc85cc"}}, -] -marker = "python_version < \"3.11\" and python_version ~= \"3.10\" and \"dev\" in extras or python_full_version ~= \"3.9.0\" and \"dev\" in extras" - -[packages.tool.pdm] -dependencies = [] - -[[packages]] -name = "packaging" -version = "25.0" -requires-python = ">=3.8" -sdist = {name = "packaging-25.0.tar.gz", url = "https://files.pythonhosted.org/packages/a1/d4/1fc4078c65507b51b96ca8f8c3ba19e6a61c8253c72794544580a7b6c24d/packaging-25.0.tar.gz", hashes = {sha256 = "d443872c98d677bf60f6a1f2f8c1cb748e8fe762d2bf9d3148b5599295b0fc4f"}} +name = "typing-extensions" +version = "4.14.1" +requires-python = ">=3.9" +sdist = {name = "typing_extensions-4.14.1.tar.gz", url = "https://files.pythonhosted.org/packages/98/5a/da40306b885cc8c09109dc2e1abd358d5684b1425678151cdaed4731c822/typing_extensions-4.14.1.tar.gz", hashes = {sha256 = "38b39f4aeeab64884ce9f74c94263ef78f3c22467c8724005483154c26648d36"}} wheels = [ - {name = "packaging-25.0-py3-none-any.whl",url = "https://files.pythonhosted.org/packages/20/12/38679034af332785aac8774540895e234f4d07f7545804097de4b666afd8/packaging-25.0-py3-none-any.whl",hashes = {sha256 = "29572ef2b1f17581046b3a2227d5c611fb25ec70ca1ba8554b24b0e69331a484"}}, + {name = "typing_extensions-4.14.1-py3-none-any.whl",url = "https://files.pythonhosted.org/packages/b5/00/d631e67a838026495268c2f6884f3711a15a9a2a96cd244fdaea53b823fb/typing_extensions-4.14.1-py3-none-any.whl",hashes = {sha256 = "d1e1e3b58374dc93031d6eda2420a48ea44a36c2b4766a4fdeb3710755731d76"}}, ] marker = "\"default\" in dependency_groups or \"dev\" in extras" @@ -1441,19 +1204,6 @@ marker = "\"dev\" in extras" [packages.tool.pdm] dependencies = [] -[[packages]] -name = "backports-asyncio-runner" -version = "1.2.0" -requires-python = "<3.11,>=3.8" -sdist = {name = "backports_asyncio_runner-1.2.0.tar.gz", url = "https://files.pythonhosted.org/packages/8e/ff/70dca7d7cb1cbc0edb2c6cc0c38b65cba36cccc491eca64cabd5fe7f8670/backports_asyncio_runner-1.2.0.tar.gz", hashes = {sha256 = "a5aa7b2b7d8f8bfcaa2b57313f70792df84e32a2a746f585213373f900b42162"}} -wheels = [ - {name = "backports_asyncio_runner-1.2.0-py3-none-any.whl",url = "https://files.pythonhosted.org/packages/a0/59/76ab57e3fe74484f48a53f8e337171b4a2349e506eabe136d7e01d059086/backports_asyncio_runner-1.2.0-py3-none-any.whl",hashes = {sha256 = "0da0a936a8aeb554eccb426dc55af3ba63bcdc69fa1a600b5bb305413a4477b5"}}, -] -marker = "python_version < \"3.11\" and python_version ~= \"3.10\" and \"dev\" in extras or python_full_version ~= \"3.9.0\" and \"dev\" in extras" - -[packages.tool.pdm] -dependencies = [] - [[packages]] name = "charset-normalizer" version = "3.4.2" @@ -1486,32 +1236,6 @@ wheels = [ {name = "charset_normalizer-3.4.2-cp312-cp312-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/77/1a/5eefc0ce04affb98af07bc05f3bac9094513c0e23b0562d64af46a06aae4/charset_normalizer-3.4.2-cp312-cp312-musllinux_1_2_x86_64.whl",hashes = {sha256 = "dedb8adb91d11846ee08bec4c8236c8549ac721c245678282dcb06b221aab59f"}}, {name = "charset_normalizer-3.4.2-cp312-cp312-win32.whl",url = "https://files.pythonhosted.org/packages/37/a0/2410e5e6032a174c95e0806b1a6585eb21e12f445ebe239fac441995226a/charset_normalizer-3.4.2-cp312-cp312-win32.whl",hashes = {sha256 = "db4c7bf0e07fc3b7d89ac2a5880a6a8062056801b83ff56d8464b70f65482b6c"}}, {name = "charset_normalizer-3.4.2-cp312-cp312-win_amd64.whl",url = "https://files.pythonhosted.org/packages/6c/4f/c02d5c493967af3eda9c771ad4d2bbc8df6f99ddbeb37ceea6e8716a32bc/charset_normalizer-3.4.2-cp312-cp312-win_amd64.whl",hashes = {sha256 = "5a9979887252a82fefd3d3ed2a8e3b937a7a809f65dcb1e068b090e165bbe99e"}}, - {name = "charset_normalizer-3.4.2-cp311-cp311-macosx_10_9_universal2.whl",url = "https://files.pythonhosted.org/packages/05/85/4c40d00dcc6284a1c1ad5de5e0996b06f39d8232f1031cd23c2f5c07ee86/charset_normalizer-3.4.2-cp311-cp311-macosx_10_9_universal2.whl",hashes = {sha256 = "be1e352acbe3c78727a16a455126d9ff83ea2dfdcbc83148d2982305a04714c2"}}, - {name = "charset_normalizer-3.4.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",url = "https://files.pythonhosted.org/packages/41/d9/7a6c0b9db952598e97e93cbdfcb91bacd89b9b88c7c983250a77c008703c/charset_normalizer-3.4.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",hashes = {sha256 = "aa88ca0b1932e93f2d961bf3addbb2db902198dca337d88c89e1559e066e7645"}}, - {name = "charset_normalizer-3.4.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl",url = "https://files.pythonhosted.org/packages/66/82/a37989cda2ace7e37f36c1a8ed16c58cf48965a79c2142713244bf945c89/charset_normalizer-3.4.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl",hashes = {sha256 = "d524ba3f1581b35c03cb42beebab4a13e6cdad7b36246bd22541fa585a56cccd"}}, - {name = "charset_normalizer-3.4.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl",url = "https://files.pythonhosted.org/packages/df/68/a576b31b694d07b53807269d05ec3f6f1093e9545e8607121995ba7a8313/charset_normalizer-3.4.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl",hashes = {sha256 = "28a1005facc94196e1fb3e82a3d442a9d9110b8434fc1ded7a24a2983c9888d8"}}, - {name = "charset_normalizer-3.4.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",url = "https://files.pythonhosted.org/packages/92/9b/ad67f03d74554bed3aefd56fe836e1623a50780f7c998d00ca128924a499/charset_normalizer-3.4.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",hashes = {sha256 = "fdb20a30fe1175ecabed17cbf7812f7b804b8a315a25f24678bcdf120a90077f"}}, - {name = "charset_normalizer-3.4.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl",url = "https://files.pythonhosted.org/packages/a6/e6/8aebae25e328160b20e31a7e9929b1578bbdc7f42e66f46595a432f8539e/charset_normalizer-3.4.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl",hashes = {sha256 = "0f5d9ed7f254402c9e7d35d2f5972c9bbea9040e99cd2861bd77dc68263277c7"}}, - {name = "charset_normalizer-3.4.2-cp311-cp311-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/8b/f2/b3c2f07dbcc248805f10e67a0262c93308cfa149a4cd3d1fe01f593e5fd2/charset_normalizer-3.4.2-cp311-cp311-musllinux_1_2_aarch64.whl",hashes = {sha256 = "efd387a49825780ff861998cd959767800d54f8308936b21025326de4b5a42b9"}}, - {name = "charset_normalizer-3.4.2-cp311-cp311-musllinux_1_2_i686.whl",url = "https://files.pythonhosted.org/packages/60/5b/c3f3a94bc345bc211622ea59b4bed9ae63c00920e2e8f11824aa5708e8b7/charset_normalizer-3.4.2-cp311-cp311-musllinux_1_2_i686.whl",hashes = {sha256 = "f0aa37f3c979cf2546b73e8222bbfa3dc07a641585340179d768068e3455e544"}}, - {name = "charset_normalizer-3.4.2-cp311-cp311-musllinux_1_2_ppc64le.whl",url = "https://files.pythonhosted.org/packages/e2/4d/ff460c8b474122334c2fa394a3f99a04cf11c646da895f81402ae54f5c42/charset_normalizer-3.4.2-cp311-cp311-musllinux_1_2_ppc64le.whl",hashes = {sha256 = "e70e990b2137b29dc5564715de1e12701815dacc1d056308e2b17e9095372a82"}}, - {name = "charset_normalizer-3.4.2-cp311-cp311-musllinux_1_2_s390x.whl",url = "https://files.pythonhosted.org/packages/a2/2b/b964c6a2fda88611a1fe3d4c400d39c66a42d6c169c924818c848f922415/charset_normalizer-3.4.2-cp311-cp311-musllinux_1_2_s390x.whl",hashes = {sha256 = "0c8c57f84ccfc871a48a47321cfa49ae1df56cd1d965a09abe84066f6853b9c0"}}, - {name = "charset_normalizer-3.4.2-cp311-cp311-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/59/2e/d3b9811db26a5ebf444bc0fa4f4be5aa6d76fc6e1c0fd537b16c14e849b6/charset_normalizer-3.4.2-cp311-cp311-musllinux_1_2_x86_64.whl",hashes = {sha256 = "6b66f92b17849b85cad91259efc341dce9c1af48e2173bf38a85c6329f1033e5"}}, - {name = "charset_normalizer-3.4.2-cp311-cp311-win32.whl",url = "https://files.pythonhosted.org/packages/90/07/c5fd7c11eafd561bb51220d600a788f1c8d77c5eef37ee49454cc5c35575/charset_normalizer-3.4.2-cp311-cp311-win32.whl",hashes = {sha256 = "daac4765328a919a805fa5e2720f3e94767abd632ae410a9062dff5412bae65a"}}, - {name = "charset_normalizer-3.4.2-cp311-cp311-win_amd64.whl",url = "https://files.pythonhosted.org/packages/a8/05/5e33dbef7e2f773d672b6d79f10ec633d4a71cd96db6673625838a4fd532/charset_normalizer-3.4.2-cp311-cp311-win_amd64.whl",hashes = {sha256 = "e53efc7c7cee4c1e70661e2e112ca46a575f90ed9ae3fef200f2a25e954f4b28"}}, - {name = "charset_normalizer-3.4.2-cp310-cp310-macosx_10_9_universal2.whl",url = "https://files.pythonhosted.org/packages/95/28/9901804da60055b406e1a1c5ba7aac1276fb77f1dde635aabfc7fd84b8ab/charset_normalizer-3.4.2-cp310-cp310-macosx_10_9_universal2.whl",hashes = {sha256 = "7c48ed483eb946e6c04ccbe02c6b4d1d48e51944b6db70f697e089c193404941"}}, - {name = "charset_normalizer-3.4.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",url = "https://files.pythonhosted.org/packages/d9/9b/892a8c8af9110935e5adcbb06d9c6fe741b6bb02608c6513983048ba1a18/charset_normalizer-3.4.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",hashes = {sha256 = "b2d318c11350e10662026ad0eb71bb51c7812fc8590825304ae0bdd4ac283acd"}}, - {name = "charset_normalizer-3.4.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl",url = "https://files.pythonhosted.org/packages/7b/a5/4179abd063ff6414223575e008593861d62abfc22455b5d1a44995b7c101/charset_normalizer-3.4.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl",hashes = {sha256 = "9cbfacf36cb0ec2897ce0ebc5d08ca44213af24265bd56eca54bee7923c48fd6"}}, - {name = "charset_normalizer-3.4.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl",url = "https://files.pythonhosted.org/packages/3b/95/bc08c7dfeddd26b4be8c8287b9bb055716f31077c8b0ea1cd09553794665/charset_normalizer-3.4.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl",hashes = {sha256 = "18dd2e350387c87dabe711b86f83c9c78af772c748904d372ade190b5c7c9d4d"}}, - {name = "charset_normalizer-3.4.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",url = "https://files.pythonhosted.org/packages/a8/2d/7a5b635aa65284bf3eab7653e8b4151ab420ecbae918d3e359d1947b4d61/charset_normalizer-3.4.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",hashes = {sha256 = "8075c35cd58273fee266c58c0c9b670947c19df5fb98e7b66710e04ad4e9ff86"}}, - {name = "charset_normalizer-3.4.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl",url = "https://files.pythonhosted.org/packages/ae/38/51fc6ac74251fd331a8cfdb7ec57beba8c23fd5493f1050f71c87ef77ed0/charset_normalizer-3.4.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl",hashes = {sha256 = "5bf4545e3b962767e5c06fe1738f951f77d27967cb2caa64c28be7c4563e162c"}}, - {name = "charset_normalizer-3.4.2-cp310-cp310-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/b7/17/edee1e32215ee6e9e46c3e482645b46575a44a2d72c7dfd49e49f60ce6bf/charset_normalizer-3.4.2-cp310-cp310-musllinux_1_2_aarch64.whl",hashes = {sha256 = "7a6ab32f7210554a96cd9e33abe3ddd86732beeafc7a28e9955cdf22ffadbab0"}}, - {name = "charset_normalizer-3.4.2-cp310-cp310-musllinux_1_2_i686.whl",url = "https://files.pythonhosted.org/packages/26/2c/ea3e66f2b5f21fd00b2825c94cafb8c326ea6240cd80a91eb09e4a285830/charset_normalizer-3.4.2-cp310-cp310-musllinux_1_2_i686.whl",hashes = {sha256 = "b33de11b92e9f75a2b545d6e9b6f37e398d86c3e9e9653c4864eb7e89c5773ef"}}, - {name = "charset_normalizer-3.4.2-cp310-cp310-musllinux_1_2_ppc64le.whl",url = "https://files.pythonhosted.org/packages/52/47/7be7fa972422ad062e909fd62460d45c3ef4c141805b7078dbab15904ff7/charset_normalizer-3.4.2-cp310-cp310-musllinux_1_2_ppc64le.whl",hashes = {sha256 = "8755483f3c00d6c9a77f490c17e6ab0c8729e39e6390328e42521ef175380ae6"}}, - {name = "charset_normalizer-3.4.2-cp310-cp310-musllinux_1_2_s390x.whl",url = "https://files.pythonhosted.org/packages/2f/42/9f02c194da282b2b340f28e5fb60762de1151387a36842a92b533685c61e/charset_normalizer-3.4.2-cp310-cp310-musllinux_1_2_s390x.whl",hashes = {sha256 = "68a328e5f55ec37c57f19ebb1fdc56a248db2e3e9ad769919a58672958e8f366"}}, - {name = "charset_normalizer-3.4.2-cp310-cp310-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/67/44/89cacd6628f31fb0b63201a618049be4be2a7435a31b55b5eb1c3674547a/charset_normalizer-3.4.2-cp310-cp310-musllinux_1_2_x86_64.whl",hashes = {sha256 = "21b2899062867b0e1fde9b724f8aecb1af14f2778d69aacd1a5a1853a597a5db"}}, - {name = "charset_normalizer-3.4.2-cp310-cp310-win32.whl",url = "https://files.pythonhosted.org/packages/1f/79/4b8da9f712bc079c0f16b6d67b099b0b8d808c2292c937f267d816ec5ecc/charset_normalizer-3.4.2-cp310-cp310-win32.whl",hashes = {sha256 = "e8082b26888e2f8b36a042a58307d5b917ef2b1cacab921ad3323ef91901c71a"}}, - {name = "charset_normalizer-3.4.2-cp310-cp310-win_amd64.whl",url = "https://files.pythonhosted.org/packages/7d/d7/96970afb4fb66497a40761cdf7bd4f6fca0fc7bafde3a84f836c1f57a926/charset_normalizer-3.4.2-cp310-cp310-win_amd64.whl",hashes = {sha256 = "f69a27e45c43520f5487f27627059b64aaf160415589230992cec34c5e18a509"}}, {name = "charset_normalizer-3.4.2-py3-none-any.whl",url = "https://files.pythonhosted.org/packages/20/94/c5790835a017658cbfabd07f3bfb549140c3ac458cfc196323996b10095a/charset_normalizer-3.4.2-py3-none-any.whl",hashes = {sha256 = "7f56930ab0abd1c45cd15be65cc741c28b1c9a34876ce8c17a2fa107810c0af0"}}, ] marker = "\"default\" in dependency_groups or \"dev\" in extras or \"recommended\" in extras" @@ -1611,40 +1335,6 @@ wheels = [ {name = "aiohttp-3.12.14-cp312-cp312-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/22/32/5501ab525a47ba23c20613e568174d6c63aa09e2caa22cded5c6ea8e3ada/aiohttp-3.12.14-cp312-cp312-musllinux_1_2_x86_64.whl",hashes = {sha256 = "2785b112346e435dd3a1a67f67713a3fe692d288542f1347ad255683f066d8e0"}}, {name = "aiohttp-3.12.14-cp312-cp312-win32.whl",url = "https://files.pythonhosted.org/packages/06/af/28e24574801fcf1657945347ee10df3892311c2829b41232be6089e461e7/aiohttp-3.12.14-cp312-cp312-win32.whl",hashes = {sha256 = "15f5f4792c9c999a31d8decf444e79fcfd98497bf98e94284bf390a7bb8c1729"}}, {name = "aiohttp-3.12.14-cp312-cp312-win_amd64.whl",url = "https://files.pythonhosted.org/packages/98/d5/7ac2464aebd2eecac38dbe96148c9eb487679c512449ba5215d233755582/aiohttp-3.12.14-cp312-cp312-win_amd64.whl",hashes = {sha256 = "3b66e1a182879f579b105a80d5c4bd448b91a57e8933564bf41665064796a338"}}, - {name = "aiohttp-3.12.14-cp311-cp311-macosx_10_9_universal2.whl",url = "https://files.pythonhosted.org/packages/53/e1/8029b29316971c5fa89cec170274582619a01b3d82dd1036872acc9bc7e8/aiohttp-3.12.14-cp311-cp311-macosx_10_9_universal2.whl",hashes = {sha256 = "f4552ff7b18bcec18b60a90c6982049cdb9dac1dba48cf00b97934a06ce2e597"}}, - {name = "aiohttp-3.12.14-cp311-cp311-macosx_10_9_x86_64.whl",url = "https://files.pythonhosted.org/packages/96/bd/4f204cf1e282041f7b7e8155f846583b19149e0872752711d0da5e9cc023/aiohttp-3.12.14-cp311-cp311-macosx_10_9_x86_64.whl",hashes = {sha256 = "8283f42181ff6ccbcf25acaae4e8ab2ff7e92b3ca4a4ced73b2c12d8cd971393"}}, - {name = "aiohttp-3.12.14-cp311-cp311-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/d6/0f/2a580fcdd113fe2197a3b9df30230c7e85bb10bf56f7915457c60e9addd9/aiohttp-3.12.14-cp311-cp311-macosx_11_0_arm64.whl",hashes = {sha256 = "040afa180ea514495aaff7ad34ec3d27826eaa5d19812730fe9e529b04bb2179"}}, - {name = "aiohttp-3.12.14-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",url = "https://files.pythonhosted.org/packages/38/78/2c1089f6adca90c3dd74915bafed6d6d8a87df5e3da74200f6b3a8b8906f/aiohttp-3.12.14-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",hashes = {sha256 = "b413c12f14c1149f0ffd890f4141a7471ba4b41234fe4fd4a0ff82b1dc299dbb"}}, - {name = "aiohttp-3.12.14-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl",url = "https://files.pythonhosted.org/packages/4a/c8/ce6c7a34d9c589f007cfe064da2d943b3dee5aabc64eaecd21faf927ab11/aiohttp-3.12.14-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl",hashes = {sha256 = "1d6f607ce2e1a93315414e3d448b831238f1874b9968e1195b06efaa5c87e245"}}, - {name = "aiohttp-3.12.14-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl",url = "https://files.pythonhosted.org/packages/18/10/431cd3d089de700756a56aa896faf3ea82bee39d22f89db7ddc957580308/aiohttp-3.12.14-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl",hashes = {sha256 = "565e70d03e924333004ed101599902bba09ebb14843c8ea39d657f037115201b"}}, - {name = "aiohttp-3.12.14-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl",url = "https://files.pythonhosted.org/packages/fa/b2/26f4524184e0f7ba46671c512d4b03022633bcf7d32fa0c6f1ef49d55800/aiohttp-3.12.14-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl",hashes = {sha256 = "4699979560728b168d5ab63c668a093c9570af2c7a78ea24ca5212c6cdc2b641"}}, - {name = "aiohttp-3.12.14-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",url = "https://files.pythonhosted.org/packages/e0/30/aadcdf71b510a718e3d98a7bfeaea2396ac847f218b7e8edb241b09bd99a/aiohttp-3.12.14-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",hashes = {sha256 = "ad5fdf6af93ec6c99bf800eba3af9a43d8bfd66dce920ac905c817ef4a712afe"}}, - {name = "aiohttp-3.12.14-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl",url = "https://files.pythonhosted.org/packages/67/7f/7ccf11756ae498fdedc3d689a0c36ace8fc82f9d52d3517da24adf6e9a74/aiohttp-3.12.14-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl",hashes = {sha256 = "4ac76627c0b7ee0e80e871bde0d376a057916cb008a8f3ffc889570a838f5cc7"}}, - {name = "aiohttp-3.12.14-cp311-cp311-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/6b/4d/35ebc170b1856dd020c92376dbfe4297217625ef4004d56587024dc2289c/aiohttp-3.12.14-cp311-cp311-musllinux_1_2_aarch64.whl",hashes = {sha256 = "798204af1180885651b77bf03adc903743a86a39c7392c472891649610844635"}}, - {name = "aiohttp-3.12.14-cp311-cp311-musllinux_1_2_armv7l.whl",url = "https://files.pythonhosted.org/packages/7b/24/46dc0380146f33e2e4aa088b92374b598f5bdcde1718c77e8d1a0094f1a4/aiohttp-3.12.14-cp311-cp311-musllinux_1_2_armv7l.whl",hashes = {sha256 = "4f1205f97de92c37dd71cf2d5bcfb65fdaed3c255d246172cce729a8d849b4da"}}, - {name = "aiohttp-3.12.14-cp311-cp311-musllinux_1_2_i686.whl",url = "https://files.pythonhosted.org/packages/2f/0a/46599d7d19b64f4d0fe1b57bdf96a9a40b5c125f0ae0d8899bc22e91fdce/aiohttp-3.12.14-cp311-cp311-musllinux_1_2_i686.whl",hashes = {sha256 = "76ae6f1dd041f85065d9df77c6bc9c9703da9b5c018479d20262acc3df97d419"}}, - {name = "aiohttp-3.12.14-cp311-cp311-musllinux_1_2_ppc64le.whl",url = "https://files.pythonhosted.org/packages/08/86/b21b682e33d5ca317ef96bd21294984f72379454e689d7da584df1512a19/aiohttp-3.12.14-cp311-cp311-musllinux_1_2_ppc64le.whl",hashes = {sha256 = "a194ace7bc43ce765338ca2dfb5661489317db216ea7ea700b0332878b392cab"}}, - {name = "aiohttp-3.12.14-cp311-cp311-musllinux_1_2_s390x.whl",url = "https://files.pythonhosted.org/packages/4f/45/f639482530b1396c365f23c5e3b1ae51c9bc02ba2b2248ca0c855a730059/aiohttp-3.12.14-cp311-cp311-musllinux_1_2_s390x.whl",hashes = {sha256 = "16260e8e03744a6fe3fcb05259eeab8e08342c4c33decf96a9dad9f1187275d0"}}, - {name = "aiohttp-3.12.14-cp311-cp311-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/7e/e5/39635a9e06eed1d73671bd4079a3caf9cf09a49df08490686f45a710b80e/aiohttp-3.12.14-cp311-cp311-musllinux_1_2_x86_64.whl",hashes = {sha256 = "8c779e5ebbf0e2e15334ea404fcce54009dc069210164a244d2eac8352a44b28"}}, - {name = "aiohttp-3.12.14-cp311-cp311-win32.whl",url = "https://files.pythonhosted.org/packages/51/e1/7f1c77515d369b7419c5b501196526dad3e72800946c0099594c1f0c20b4/aiohttp-3.12.14-cp311-cp311-win32.whl",hashes = {sha256 = "a289f50bf1bd5be227376c067927f78079a7bdeccf8daa6a9e65c38bae14324b"}}, - {name = "aiohttp-3.12.14-cp311-cp311-win_amd64.whl",url = "https://files.pythonhosted.org/packages/06/24/a6bf915c85b7a5b07beba3d42b3282936b51e4578b64a51e8e875643c276/aiohttp-3.12.14-cp311-cp311-win_amd64.whl",hashes = {sha256 = "0b8a69acaf06b17e9c54151a6c956339cf46db4ff72b3ac28516d0f7068f4ced"}}, - {name = "aiohttp-3.12.14-cp310-cp310-macosx_10_9_universal2.whl",url = "https://files.pythonhosted.org/packages/0c/88/f161f429f9de391eee6a5c2cffa54e2ecd5b7122ae99df247f7734dfefcb/aiohttp-3.12.14-cp310-cp310-macosx_10_9_universal2.whl",hashes = {sha256 = "906d5075b5ba0dd1c66fcaaf60eb09926a9fef3ca92d912d2a0bbdbecf8b1248"}}, - {name = "aiohttp-3.12.14-cp310-cp310-macosx_10_9_x86_64.whl",url = "https://files.pythonhosted.org/packages/fe/b5/24fa382a69a25d242e2baa3e56d5ea5227d1b68784521aaf3a1a8b34c9a4/aiohttp-3.12.14-cp310-cp310-macosx_10_9_x86_64.whl",hashes = {sha256 = "c875bf6fc2fd1a572aba0e02ef4e7a63694778c5646cdbda346ee24e630d30fb"}}, - {name = "aiohttp-3.12.14-cp310-cp310-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/09/67/fda1bc34adbfaa950d98d934a23900918f9d63594928c70e55045838c943/aiohttp-3.12.14-cp310-cp310-macosx_11_0_arm64.whl",hashes = {sha256 = "fbb284d15c6a45fab030740049d03c0ecd60edad9cd23b211d7e11d3be8d56fd"}}, - {name = "aiohttp-3.12.14-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",url = "https://files.pythonhosted.org/packages/36/96/3ce1ea96d3cf6928b87cfb8cdd94650367f5c2f36e686a1f5568f0f13754/aiohttp-3.12.14-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",hashes = {sha256 = "38e360381e02e1a05d36b223ecab7bc4a6e7b5ab15760022dc92589ee1d4238c"}}, - {name = "aiohttp-3.12.14-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl",url = "https://files.pythonhosted.org/packages/be/04/ddea06cb4bc7d8db3745cf95e2c42f310aad485ca075bd685f0e4f0f6b65/aiohttp-3.12.14-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl",hashes = {sha256 = "aaf90137b5e5d84a53632ad95ebee5c9e3e7468f0aab92ba3f608adcb914fa95"}}, - {name = "aiohttp-3.12.14-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl",url = "https://files.pythonhosted.org/packages/73/66/63942f104d33ce6ca7871ac6c1e2ebab48b88f78b2b7680c37de60f5e8cd/aiohttp-3.12.14-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl",hashes = {sha256 = "e532a25e4a0a2685fa295a31acf65e027fbe2bea7a4b02cdfbbba8a064577663"}}, - {name = "aiohttp-3.12.14-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl",url = "https://files.pythonhosted.org/packages/20/00/aab615742b953f04b48cb378ee72ada88555b47b860b98c21c458c030a23/aiohttp-3.12.14-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl",hashes = {sha256 = "eab9762c4d1b08ae04a6c77474e6136da722e34fdc0e6d6eab5ee93ac29f35d1"}}, - {name = "aiohttp-3.12.14-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",url = "https://files.pythonhosted.org/packages/d6/4f/ef6d9f77225cf27747368c37b3d69fac1f8d6f9d3d5de2d410d155639524/aiohttp-3.12.14-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",hashes = {sha256 = "abe53c3812b2899889a7fca763cdfaeee725f5be68ea89905e4275476ffd7e61"}}, - {name = "aiohttp-3.12.14-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl",url = "https://files.pythonhosted.org/packages/37/e1/e98a43c15aa52e9219a842f18c59cbae8bbe2d50c08d298f17e9e8bafa38/aiohttp-3.12.14-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl",hashes = {sha256 = "5760909b7080aa2ec1d320baee90d03b21745573780a072b66ce633eb77a8656"}}, - {name = "aiohttp-3.12.14-cp310-cp310-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/71/5c/29c6dfb49323bcdb0239bf3fc97ffcf0eaf86d3a60426a3287ec75d67721/aiohttp-3.12.14-cp310-cp310-musllinux_1_2_aarch64.whl",hashes = {sha256 = "02fcd3f69051467bbaa7f84d7ec3267478c7df18d68b2e28279116e29d18d4f3"}}, - {name = "aiohttp-3.12.14-cp310-cp310-musllinux_1_2_armv7l.whl",url = "https://files.pythonhosted.org/packages/79/60/ec90782084090c4a6b459790cfd8d17be2c5662c9c4b2d21408b2f2dc36c/aiohttp-3.12.14-cp310-cp310-musllinux_1_2_armv7l.whl",hashes = {sha256 = "4dcd1172cd6794884c33e504d3da3c35648b8be9bfa946942d353b939d5f1288"}}, - {name = "aiohttp-3.12.14-cp310-cp310-musllinux_1_2_i686.whl",url = "https://files.pythonhosted.org/packages/22/89/205d3ad30865c32bc472ac13f94374210745b05bd0f2856996cb34d53396/aiohttp-3.12.14-cp310-cp310-musllinux_1_2_i686.whl",hashes = {sha256 = "224d0da41355b942b43ad08101b1b41ce633a654128ee07e36d75133443adcda"}}, - {name = "aiohttp-3.12.14-cp310-cp310-musllinux_1_2_ppc64le.whl",url = "https://files.pythonhosted.org/packages/48/ae/2f66edaa8bd6db2a4cba0386881eb92002cdc70834e2a93d1d5607132c7e/aiohttp-3.12.14-cp310-cp310-musllinux_1_2_ppc64le.whl",hashes = {sha256 = "e387668724f4d734e865c1776d841ed75b300ee61059aca0b05bce67061dcacc"}}, - {name = "aiohttp-3.12.14-cp310-cp310-musllinux_1_2_s390x.whl",url = "https://files.pythonhosted.org/packages/08/3a/fa73bfc6e21407ea57f7906a816f0dc73663d9549da703be05dbd76d2dc3/aiohttp-3.12.14-cp310-cp310-musllinux_1_2_s390x.whl",hashes = {sha256 = "dec9cde5b5a24171e0b0a4ca064b1414950904053fb77c707efd876a2da525d8"}}, - {name = "aiohttp-3.12.14-cp310-cp310-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/e3/b3/751124b8ceb0831c17960d06ee31a4732cb4a6a006fdbfa1153d07c52226/aiohttp-3.12.14-cp310-cp310-musllinux_1_2_x86_64.whl",hashes = {sha256 = "bbad68a2af4877cc103cd94af9160e45676fc6f0c14abb88e6e092b945c2c8e3"}}, - {name = "aiohttp-3.12.14-cp310-cp310-win32.whl",url = "https://files.pythonhosted.org/packages/81/3c/72477a1d34edb8ab8ce8013086a41526d48b64f77e381c8908d24e1c18f5/aiohttp-3.12.14-cp310-cp310-win32.whl",hashes = {sha256 = "ee580cb7c00bd857b3039ebca03c4448e84700dc1322f860cf7a500a6f62630c"}}, - {name = "aiohttp-3.12.14-cp310-cp310-win_amd64.whl",url = "https://files.pythonhosted.org/packages/a2/c4/8aec4ccf1b822ec78e7982bd5cf971113ecce5f773f04039c76a083116fc/aiohttp-3.12.14-cp310-cp310-win_amd64.whl",hashes = {sha256 = "cf4f05b8cea571e2ccc3ca744e35ead24992d90a72ca2cf7ab7a2efbac6716db"}}, ] marker = "\"default\" in dependency_groups or \"dev\" in extras" @@ -1720,42 +1410,6 @@ wheels = [ {name = "multidict-6.6.3-cp312-cp312-win32.whl",url = "https://files.pythonhosted.org/packages/6f/5c/4d7adc739884f7a9fbe00d1eac8c034023ef8bad71f2ebe12823ca2e3649/multidict-6.6.3-cp312-cp312-win32.whl",hashes = {sha256 = "73ab034fb8d58ff85c2bcbadc470efc3fafeea8affcf8722855fb94557f14cc5"}}, {name = "multidict-6.6.3-cp312-cp312-win_amd64.whl",url = "https://files.pythonhosted.org/packages/6a/a3/0fbc7afdf7cb1aa12a086b02959307848eb6bcc8f66fcb66c0cb57e2a2c1/multidict-6.6.3-cp312-cp312-win_amd64.whl",hashes = {sha256 = "04cbcce84f63b9af41bad04a54d4cc4e60e90c35b9e6ccb130be2d75b71f8c17"}}, {name = "multidict-6.6.3-cp312-cp312-win_arm64.whl",url = "https://files.pythonhosted.org/packages/b8/95/8c825bd70ff9b02462dc18d1295dd08d3e9e4eb66856d292ffa62cfe1920/multidict-6.6.3-cp312-cp312-win_arm64.whl",hashes = {sha256 = "0f1130b896ecb52d2a1e615260f3ea2af55fa7dc3d7c3003ba0c3121a759b18b"}}, - {name = "multidict-6.6.3-cp311-cp311-macosx_10_9_universal2.whl",url = "https://files.pythonhosted.org/packages/08/f0/1a39863ced51f639c81a5463fbfa9eb4df59c20d1a8769ab9ef4ca57ae04/multidict-6.6.3-cp311-cp311-macosx_10_9_universal2.whl",hashes = {sha256 = "18f4eba0cbac3546b8ae31e0bbc55b02c801ae3cbaf80c247fcdd89b456ff58c"}}, - {name = "multidict-6.6.3-cp311-cp311-macosx_10_9_x86_64.whl",url = "https://files.pythonhosted.org/packages/c9/0e/a7cfa451c7b0365cd844e90b41e21fab32edaa1e42fc0c9f68461ce44ed7/multidict-6.6.3-cp311-cp311-macosx_10_9_x86_64.whl",hashes = {sha256 = "ef43b5dd842382329e4797c46f10748d8c2b6e0614f46b4afe4aee9ac33159df"}}, - {name = "multidict-6.6.3-cp311-cp311-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/c6/bb/a14a4efc5ee748cc1904b0748be278c31b9295ce5f4d2ef66526f410b94d/multidict-6.6.3-cp311-cp311-macosx_11_0_arm64.whl",hashes = {sha256 = "bf9bd1fd5eec01494e0f2e8e446a74a85d5e49afb63d75a9934e4a5423dba21d"}}, - {name = "multidict-6.6.3-cp311-cp311-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl",url = "https://files.pythonhosted.org/packages/c2/f8/410677d563c2d55e063ef74fe578f9d53fe6b0a51649597a5861f83ffa15/multidict-6.6.3-cp311-cp311-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl",hashes = {sha256 = "5bd8d6f793a787153956cd35e24f60485bf0651c238e207b9a54f7458b16d539"}}, - {name = "multidict-6.6.3-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl",url = "https://files.pythonhosted.org/packages/fd/df/2b787f80059314a98e1ec6a4cc7576244986df3e56b3c755e6fc7c99e038/multidict-6.6.3-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl",hashes = {sha256 = "1bf99b4daf908c73856bd87ee0a2499c3c9a3d19bb04b9c6025e66af3fd07462"}}, - {name = "multidict-6.6.3-cp311-cp311-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl",url = "https://files.pythonhosted.org/packages/05/f2/f9117089151b9a8ab39f9019620d10d9718eec2ac89e7ca9d30f3ec78e96/multidict-6.6.3-cp311-cp311-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl",hashes = {sha256 = "0b9e59946b49dafaf990fd9c17ceafa62976e8471a14952163d10a7a630413a9"}}, - {name = "multidict-6.6.3-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl",url = "https://files.pythonhosted.org/packages/93/2d/7115300ec5b699faa152c56799b089a53ed69e399c3c2d528251f0aeda1a/multidict-6.6.3-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl",hashes = {sha256 = "e2db616467070d0533832d204c54eea6836a5e628f2cb1e6dfd8cd6ba7277cb7"}}, - {name = "multidict-6.6.3-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl",url = "https://files.pythonhosted.org/packages/15/ea/ff4bab367623e39c20d3b07637225c7688d79e4f3cc1f3b9f89867677f9a/multidict-6.6.3-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl",hashes = {sha256 = "7394888236621f61dcdd25189b2768ae5cc280f041029a5bcf1122ac63df79f9"}}, - {name = "multidict-6.6.3-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl",url = "https://files.pythonhosted.org/packages/74/07/2c9246cda322dfe08be85f1b8739646f2c4c5113a1422d7a407763422ec4/multidict-6.6.3-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl",hashes = {sha256 = "f114d8478733ca7388e7c7e0ab34b72547476b97009d643644ac33d4d3fe1821"}}, - {name = "multidict-6.6.3-cp311-cp311-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/a8/62/279c13d584207d5697a752a66ffc9bb19355a95f7659140cb1b3cf82180e/multidict-6.6.3-cp311-cp311-musllinux_1_2_aarch64.whl",hashes = {sha256 = "cdf22e4db76d323bcdc733514bf732e9fb349707c98d341d40ebcc6e9318ef3d"}}, - {name = "multidict-6.6.3-cp311-cp311-musllinux_1_2_armv7l.whl",url = "https://files.pythonhosted.org/packages/69/cc/e06636f48c6d51e724a8bc8d9e1db5f136fe1df066d7cafe37ef4000f86a/multidict-6.6.3-cp311-cp311-musllinux_1_2_armv7l.whl",hashes = {sha256 = "e995a34c3d44ab511bfc11aa26869b9d66c2d8c799fa0e74b28a473a692532d6"}}, - {name = "multidict-6.6.3-cp311-cp311-musllinux_1_2_i686.whl",url = "https://files.pythonhosted.org/packages/89/a4/66c9d8fb9acf3b226cdd468ed009537ac65b520aebdc1703dd6908b19d33/multidict-6.6.3-cp311-cp311-musllinux_1_2_i686.whl",hashes = {sha256 = "766a4a5996f54361d8d5a9050140aa5362fe48ce51c755a50c0bc3706460c430"}}, - {name = "multidict-6.6.3-cp311-cp311-musllinux_1_2_ppc64le.whl",url = "https://files.pythonhosted.org/packages/cf/01/c69e0317be556e46257826d5449feb4e6aa0d18573e567a48a2c14156f1f/multidict-6.6.3-cp311-cp311-musllinux_1_2_ppc64le.whl",hashes = {sha256 = "3893a0d7d28a7fe6ca7a1f760593bc13038d1d35daf52199d431b61d2660602b"}}, - {name = "multidict-6.6.3-cp311-cp311-musllinux_1_2_s390x.whl",url = "https://files.pythonhosted.org/packages/c0/da/9cc1da0299762d20e626fe0042e71b5694f9f72d7d3f9678397cbaa71b2b/multidict-6.6.3-cp311-cp311-musllinux_1_2_s390x.whl",hashes = {sha256 = "934796c81ea996e61914ba58064920d6cad5d99140ac3167901eb932150e2e56"}}, - {name = "multidict-6.6.3-cp311-cp311-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/e6/91/b22756afec99cc31105ddd4a52f95ab32b1a4a58f4d417979c570c4a922e/multidict-6.6.3-cp311-cp311-musllinux_1_2_x86_64.whl",hashes = {sha256 = "9ed948328aec2072bc00f05d961ceadfd3e9bfc2966c1319aeaf7b7c21219183"}}, - {name = "multidict-6.6.3-cp311-cp311-win32.whl",url = "https://files.pythonhosted.org/packages/be/f1/adcc185b878036a20399d5be5228f3cbe7f823d78985d101d425af35c800/multidict-6.6.3-cp311-cp311-win32.whl",hashes = {sha256 = "9f5b28c074c76afc3e4c610c488e3493976fe0e596dd3db6c8ddfbb0134dcac5"}}, - {name = "multidict-6.6.3-cp311-cp311-win_amd64.whl",url = "https://files.pythonhosted.org/packages/e0/d4/27652c1c6526ea6b4f5ddd397e93f4232ff5de42bea71d339bc6a6cc497f/multidict-6.6.3-cp311-cp311-win_amd64.whl",hashes = {sha256 = "bc7f6fbc61b1c16050a389c630da0b32fc6d4a3d191394ab78972bf5edc568c2"}}, - {name = "multidict-6.6.3-cp311-cp311-win_arm64.whl",url = "https://files.pythonhosted.org/packages/16/18/23f4932019804e56d3c2413e237f866444b774b0263bcb81df2fdecaf593/multidict-6.6.3-cp311-cp311-win_arm64.whl",hashes = {sha256 = "d4e47d8faffaae822fb5cba20937c048d4f734f43572e7079298a6c39fb172cb"}}, - {name = "multidict-6.6.3-cp310-cp310-macosx_10_9_universal2.whl",url = "https://files.pythonhosted.org/packages/0b/67/414933982bce2efce7cbcb3169eaaf901e0f25baec69432b4874dfb1f297/multidict-6.6.3-cp310-cp310-macosx_10_9_universal2.whl",hashes = {sha256 = "a2be5b7b35271f7fff1397204ba6708365e3d773579fe2a30625e16c4b4ce817"}}, - {name = "multidict-6.6.3-cp310-cp310-macosx_10_9_x86_64.whl",url = "https://files.pythonhosted.org/packages/8a/fe/d8a3ee1fad37dc2ef4f75488b0d9d4f25bf204aad8306cbab63d97bff64a/multidict-6.6.3-cp310-cp310-macosx_10_9_x86_64.whl",hashes = {sha256 = "12f4581d2930840295c461764b9a65732ec01250b46c6b2c510d7ee68872b140"}}, - {name = "multidict-6.6.3-cp310-cp310-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/1f/e0/265d89af8c98240265d82b8cbcf35897f83b76cd59ee3ab3879050fd8c45/multidict-6.6.3-cp310-cp310-macosx_11_0_arm64.whl",hashes = {sha256 = "dd7793bab517e706c9ed9d7310b06c8672fd0aeee5781bfad612f56b8e0f7d14"}}, - {name = "multidict-6.6.3-cp310-cp310-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl",url = "https://files.pythonhosted.org/packages/e6/05/6b759379f7e8e04ccc97cfb2a5dcc5cdbd44a97f072b2272dc51281e6a40/multidict-6.6.3-cp310-cp310-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl",hashes = {sha256 = "72d8815f2cd3cf3df0f83cac3f3ef801d908b2d90409ae28102e0553af85545a"}}, - {name = "multidict-6.6.3-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl",url = "https://files.pythonhosted.org/packages/4e/f5/8d5a15488edd9a91fa4aad97228d785df208ed6298580883aa3d9def1959/multidict-6.6.3-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl",hashes = {sha256 = "531e331a2ee53543ab32b16334e2deb26f4e6b9b28e41f8e0c87e99a6c8e2d69"}}, - {name = "multidict-6.6.3-cp310-cp310-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl",url = "https://files.pythonhosted.org/packages/6e/b5/a8f317d47d0ac5bb746d6d8325885c8967c2a8ce0bb57be5399e3642cccb/multidict-6.6.3-cp310-cp310-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl",hashes = {sha256 = "42ca5aa9329a63be8dc49040f63817d1ac980e02eeddba763a9ae5b4027b9c9c"}}, - {name = "multidict-6.6.3-cp310-cp310-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl",url = "https://files.pythonhosted.org/packages/76/88/18b2a0d5e80515fa22716556061189c2853ecf2aa2133081ebbe85ebea38/multidict-6.6.3-cp310-cp310-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl",hashes = {sha256 = "208b9b9757060b9faa6f11ab4bc52846e4f3c2fb8b14d5680c8aac80af3dc751"}}, - {name = "multidict-6.6.3-cp310-cp310-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl",url = "https://files.pythonhosted.org/packages/62/bf/ebfcfd6b55a1b05ef16d0775ae34c0fe15e8dab570d69ca9941073b969e7/multidict-6.6.3-cp310-cp310-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl",hashes = {sha256 = "acf6b97bd0884891af6a8b43d0f586ab2fcf8e717cbd47ab4bdddc09e20652d8"}}, - {name = "multidict-6.6.3-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl",url = "https://files.pythonhosted.org/packages/44/11/780615a98fd3775fc309d0234d563941af69ade2df0bb82c91dda6ddaea1/multidict-6.6.3-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl",hashes = {sha256 = "68e9e12ed00e2089725669bdc88602b0b6f8d23c0c95e52b95f0bc69f7fe9b55"}}, - {name = "multidict-6.6.3-cp310-cp310-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/28/3d/35f33045e21034b388686213752cabc3a1b9d03e20969e6fa8f1b1d82db1/multidict-6.6.3-cp310-cp310-musllinux_1_2_aarch64.whl",hashes = {sha256 = "05db2f66c9addb10cfa226e1acb363450fab2ff8a6df73c622fefe2f5af6d4e7"}}, - {name = "multidict-6.6.3-cp310-cp310-musllinux_1_2_armv7l.whl",url = "https://files.pythonhosted.org/packages/6e/cc/ff84c03b95b430015d2166d9aae775a3985d757b94f6635010d0038d9241/multidict-6.6.3-cp310-cp310-musllinux_1_2_armv7l.whl",hashes = {sha256 = "0db58da8eafb514db832a1b44f8fa7906fdd102f7d982025f816a93ba45e3dcb"}}, - {name = "multidict-6.6.3-cp310-cp310-musllinux_1_2_i686.whl",url = "https://files.pythonhosted.org/packages/2e/f0/8cd49a0b37bdea673a4b793c2093f2f4ba8e7c9d6d7c9bd672fd6d38cd11/multidict-6.6.3-cp310-cp310-musllinux_1_2_i686.whl",hashes = {sha256 = "14117a41c8fdb3ee19c743b1c027da0736fdb79584d61a766da53d399b71176c"}}, - {name = "multidict-6.6.3-cp310-cp310-musllinux_1_2_ppc64le.whl",url = "https://files.pythonhosted.org/packages/96/19/5d9a0cfdafe65d82b616a45ae950975820289069f885328e8185e64283c2/multidict-6.6.3-cp310-cp310-musllinux_1_2_ppc64le.whl",hashes = {sha256 = "877443eaaabcd0b74ff32ebeed6f6176c71850feb7d6a1d2db65945256ea535c"}}, - {name = "multidict-6.6.3-cp310-cp310-musllinux_1_2_s390x.whl",url = "https://files.pythonhosted.org/packages/e6/dc/c90066151da87d1e489f147b9b4327927241e65f1876702fafec6729c014/multidict-6.6.3-cp310-cp310-musllinux_1_2_s390x.whl",hashes = {sha256 = "70b72e749a4f6e7ed8fb334fa8d8496384840319512746a5f42fa0aec79f4d61"}}, - {name = "multidict-6.6.3-cp310-cp310-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/ec/39/458afb0cccbb0ee9164365273be3e039efddcfcb94ef35924b7dbdb05db0/multidict-6.6.3-cp310-cp310-musllinux_1_2_x86_64.whl",hashes = {sha256 = "43571f785b86afd02b3855c5ac8e86ec921b760298d6f82ff2a61daf5a35330b"}}, - {name = "multidict-6.6.3-cp310-cp310-win32.whl",url = "https://files.pythonhosted.org/packages/35/38/0016adac3990426610a081787011177e661875546b434f50a26319dc8372/multidict-6.6.3-cp310-cp310-win32.whl",hashes = {sha256 = "20c5a0c3c13a15fd5ea86c42311859f970070e4e24de5a550e99d7c271d76318"}}, - {name = "multidict-6.6.3-cp310-cp310-win_amd64.whl",url = "https://files.pythonhosted.org/packages/f3/d2/17897a8f3f2c5363d969b4c635aa40375fe1f09168dc09a7826780bfb2a4/multidict-6.6.3-cp310-cp310-win_amd64.whl",hashes = {sha256 = "ab0a34a007704c625e25a9116c6770b4d3617a071c8a7c30cd338dfbadfe6485"}}, - {name = "multidict-6.6.3-cp310-cp310-win_arm64.whl",url = "https://files.pythonhosted.org/packages/2d/5f/d4a717c1e457fe44072e33fa400d2b93eb0f2819c4d669381f925b7cba1f/multidict-6.6.3-cp310-cp310-win_arm64.whl",hashes = {sha256 = "769841d70ca8bdd140a715746199fc6473414bd02efd678d75681d2d6a8986c5"}}, {name = "multidict-6.6.3-py3-none-any.whl",url = "https://files.pythonhosted.org/packages/d8/30/9aec301e9772b098c1f5c0ca0279237c9766d94b97802e9888010c64b0ed/multidict-6.6.3-py3-none-any.whl",hashes = {sha256 = "8db10f29c7541fc5da4defd8cd697e1ca429db743fa716325f236079b96f775a"}}, ] marker = "\"default\" in dependency_groups or \"dev\" in extras" @@ -1765,19 +1419,6 @@ dependencies = [ "typing-extensions>=4.1.0; python_version < \"3.11\"", ] -[[packages]] -name = "async-timeout" -version = "5.0.1" -requires-python = ">=3.8" -sdist = {name = "async_timeout-5.0.1.tar.gz", url = "https://files.pythonhosted.org/packages/a5/ae/136395dfbfe00dfc94da3f3e136d0b13f394cba8f4841120e34226265780/async_timeout-5.0.1.tar.gz", hashes = {sha256 = "d9321a7a3d5a6a5e187e824d2fa0793ce379a202935782d555d6e9d2735677d3"}} -wheels = [ - {name = "async_timeout-5.0.1-py3-none-any.whl",url = "https://files.pythonhosted.org/packages/fe/ba/e2081de779ca30d473f21f5b30e0e737c438205440784c7dfc81efc2b029/async_timeout-5.0.1-py3-none-any.whl",hashes = {sha256 = "39e3809566ff85354557ec2398b55e096c8364bacac9405a7a1fa429e77fe76c"}}, -] -marker = "python_version < \"3.11\" and python_version ~= \"3.10\" and \"default\" in dependency_groups or python_version < \"3.11\" and python_version ~= \"3.10\" and \"dev\" in extras or python_full_version ~= \"3.9.0\" and \"default\" in dependency_groups or python_full_version ~= \"3.9.0\" and \"dev\" in extras" - -[packages.tool.pdm] -dependencies = [] - [[packages]] name = "h2" version = "4.2.0" @@ -1903,37 +1544,6 @@ wheels = [ {name = "regex-2024.11.6-cp312-cp312-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/93/2d/dd56bb76bd8e95bbce684326302f287455b56242a4f9c61f1bc76e28360e/regex-2024.11.6-cp312-cp312-musllinux_1_2_x86_64.whl",hashes = {sha256 = "0a86e7eeca091c09e021db8eb72d54751e527fa47b8d5787caf96d9831bd02ad"}}, {name = "regex-2024.11.6-cp312-cp312-win32.whl",url = "https://files.pythonhosted.org/packages/0b/55/31877a249ab7a5156758246b9c59539abbeba22461b7d8adc9e8475ff73e/regex-2024.11.6-cp312-cp312-win32.whl",hashes = {sha256 = "32f9a4c643baad4efa81d549c2aadefaeba12249b2adc5af541759237eee1c54"}}, {name = "regex-2024.11.6-cp312-cp312-win_amd64.whl",url = "https://files.pythonhosted.org/packages/38/ec/ad2d7de49a600cdb8dd78434a1aeffe28b9d6fc42eb36afab4a27ad23384/regex-2024.11.6-cp312-cp312-win_amd64.whl",hashes = {sha256 = "a93c194e2df18f7d264092dc8539b8ffb86b45b899ab976aa15d48214138e81b"}}, - {name = "regex-2024.11.6-cp311-cp311-macosx_10_9_universal2.whl",url = "https://files.pythonhosted.org/packages/58/58/7e4d9493a66c88a7da6d205768119f51af0f684fe7be7bac8328e217a52c/regex-2024.11.6-cp311-cp311-macosx_10_9_universal2.whl",hashes = {sha256 = "5478c6962ad548b54a591778e93cd7c456a7a29f8eca9c49e4f9a806dcc5d638"}}, - {name = "regex-2024.11.6-cp311-cp311-macosx_10_9_x86_64.whl",url = "https://files.pythonhosted.org/packages/34/4c/8f8e631fcdc2ff978609eaeef1d6994bf2f028b59d9ac67640ed051f1218/regex-2024.11.6-cp311-cp311-macosx_10_9_x86_64.whl",hashes = {sha256 = "2c89a8cc122b25ce6945f0423dc1352cb9593c68abd19223eebbd4e56612c5b7"}}, - {name = "regex-2024.11.6-cp311-cp311-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/c5/1b/f0e4d13e6adf866ce9b069e191f303a30ab1277e037037a365c3aad5cc9c/regex-2024.11.6-cp311-cp311-macosx_11_0_arm64.whl",hashes = {sha256 = "94d87b689cdd831934fa3ce16cc15cd65748e6d689f5d2b8f4f4df2065c9fa20"}}, - {name = "regex-2024.11.6-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",url = "https://files.pythonhosted.org/packages/25/4d/ab21047f446693887f25510887e6820b93f791992994f6498b0318904d4a/regex-2024.11.6-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",hashes = {sha256 = "1062b39a0a2b75a9c694f7a08e7183a80c63c0d62b301418ffd9c35f55aaa114"}}, - {name = "regex-2024.11.6-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl",url = "https://files.pythonhosted.org/packages/45/ee/c867e15cd894985cb32b731d89576c41a4642a57850c162490ea34b78c3b/regex-2024.11.6-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl",hashes = {sha256 = "167ed4852351d8a750da48712c3930b031f6efdaa0f22fa1933716bfcd6bf4a3"}}, - {name = "regex-2024.11.6-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl",url = "https://files.pythonhosted.org/packages/b3/12/b0f480726cf1c60f6536fa5e1c95275a77624f3ac8fdccf79e6727499e28/regex-2024.11.6-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl",hashes = {sha256 = "2d548dafee61f06ebdb584080621f3e0c23fff312f0de1afc776e2a2ba99a74f"}}, - {name = "regex-2024.11.6-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",url = "https://files.pythonhosted.org/packages/bf/ce/0d0e61429f603bac433910d99ef1a02ce45a8967ffbe3cbee48599e62d88/regex-2024.11.6-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",hashes = {sha256 = "f2a19f302cd1ce5dd01a9099aaa19cae6173306d1302a43b627f62e21cf18ac0"}}, - {name = "regex-2024.11.6-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl",url = "https://files.pythonhosted.org/packages/e4/c1/243c83c53d4a419c1556f43777ccb552bccdf79d08fda3980e4e77dd9137/regex-2024.11.6-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl",hashes = {sha256 = "bec9931dfb61ddd8ef2ebc05646293812cb6b16b60cf7c9511a832b6f1854b55"}}, - {name = "regex-2024.11.6-cp311-cp311-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/c5/f4/75eb0dd4ce4b37f04928987f1d22547ddaf6c4bae697623c1b05da67a8aa/regex-2024.11.6-cp311-cp311-musllinux_1_2_aarch64.whl",hashes = {sha256 = "9714398225f299aa85267fd222f7142fcb5c769e73d7733344efc46f2ef5cf89"}}, - {name = "regex-2024.11.6-cp311-cp311-musllinux_1_2_i686.whl",url = "https://files.pythonhosted.org/packages/16/5d/95c568574e630e141a69ff8a254c2f188b4398e813c40d49228c9bbd9875/regex-2024.11.6-cp311-cp311-musllinux_1_2_i686.whl",hashes = {sha256 = "202eb32e89f60fc147a41e55cb086db2a3f8cb82f9a9a88440dcfc5d37faae8d"}}, - {name = "regex-2024.11.6-cp311-cp311-musllinux_1_2_ppc64le.whl",url = "https://files.pythonhosted.org/packages/8e/b5/f8495c7917f15cc6fee1e7f395e324ec3e00ab3c665a7dc9d27562fd5290/regex-2024.11.6-cp311-cp311-musllinux_1_2_ppc64le.whl",hashes = {sha256 = "4181b814e56078e9b00427ca358ec44333765f5ca1b45597ec7446d3a1ef6e34"}}, - {name = "regex-2024.11.6-cp311-cp311-musllinux_1_2_s390x.whl",url = "https://files.pythonhosted.org/packages/1c/80/6dd7118e8cb212c3c60b191b932dc57db93fb2e36fb9e0e92f72a5909af9/regex-2024.11.6-cp311-cp311-musllinux_1_2_s390x.whl",hashes = {sha256 = "068376da5a7e4da51968ce4c122a7cd31afaaec4fccc7856c92f63876e57b51d"}}, - {name = "regex-2024.11.6-cp311-cp311-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/11/9b/5a05d2040297d2d254baf95eeeb6df83554e5e1df03bc1a6687fc4ba1f66/regex-2024.11.6-cp311-cp311-musllinux_1_2_x86_64.whl",hashes = {sha256 = "ac10f2c4184420d881a3475fb2c6f4d95d53a8d50209a2500723d831036f7c45"}}, - {name = "regex-2024.11.6-cp311-cp311-win32.whl",url = "https://files.pythonhosted.org/packages/26/b7/b14e2440156ab39e0177506c08c18accaf2b8932e39fb092074de733d868/regex-2024.11.6-cp311-cp311-win32.whl",hashes = {sha256 = "c36f9b6f5f8649bb251a5f3f66564438977b7ef8386a52460ae77e6070d309d9"}}, - {name = "regex-2024.11.6-cp311-cp311-win_amd64.whl",url = "https://files.pythonhosted.org/packages/80/32/763a6cc01d21fb3819227a1cc3f60fd251c13c37c27a73b8ff4315433a8e/regex-2024.11.6-cp311-cp311-win_amd64.whl",hashes = {sha256 = "02e28184be537f0e75c1f9b2f8847dc51e08e6e171c6bde130b2687e0c33cf60"}}, - {name = "regex-2024.11.6-cp310-cp310-macosx_10_9_universal2.whl",url = "https://files.pythonhosted.org/packages/95/3c/4651f6b130c6842a8f3df82461a8950f923925db8b6961063e82744bddcc/regex-2024.11.6-cp310-cp310-macosx_10_9_universal2.whl",hashes = {sha256 = "ff590880083d60acc0433f9c3f713c51f7ac6ebb9adf889c79a261ecf541aa91"}}, - {name = "regex-2024.11.6-cp310-cp310-macosx_10_9_x86_64.whl",url = "https://files.pythonhosted.org/packages/15/51/9f35d12da8434b489c7b7bffc205c474a0a9432a889457026e9bc06a297a/regex-2024.11.6-cp310-cp310-macosx_10_9_x86_64.whl",hashes = {sha256 = "658f90550f38270639e83ce492f27d2c8d2cd63805c65a13a14d36ca126753f0"}}, - {name = "regex-2024.11.6-cp310-cp310-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/bd/18/b731f5510d1b8fb63c6b6d3484bfa9a59b84cc578ac8b5172970e05ae07c/regex-2024.11.6-cp310-cp310-macosx_11_0_arm64.whl",hashes = {sha256 = "164d8b7b3b4bcb2068b97428060b2a53be050085ef94eca7f240e7947f1b080e"}}, - {name = "regex-2024.11.6-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",url = "https://files.pythonhosted.org/packages/78/a2/6dd36e16341ab95e4c6073426561b9bfdeb1a9c9b63ab1b579c2e96cb105/regex-2024.11.6-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",hashes = {sha256 = "d3660c82f209655a06b587d55e723f0b813d3a7db2e32e5e7dc64ac2a9e86fde"}}, - {name = "regex-2024.11.6-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl",url = "https://files.pythonhosted.org/packages/1b/2b/323e72d5d2fd8de0d9baa443e1ed70363ed7e7b2fb526f5950c5cb99c364/regex-2024.11.6-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl",hashes = {sha256 = "d22326fcdef5e08c154280b71163ced384b428343ae16a5ab2b3354aed12436e"}}, - {name = "regex-2024.11.6-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl",url = "https://files.pythonhosted.org/packages/90/30/63373b9ea468fbef8a907fd273e5c329b8c9535fee36fc8dba5fecac475d/regex-2024.11.6-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl",hashes = {sha256 = "f1ac758ef6aebfc8943560194e9fd0fa18bcb34d89fd8bd2af18183afd8da3a2"}}, - {name = "regex-2024.11.6-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",url = "https://files.pythonhosted.org/packages/f2/98/26d3830875b53071f1f0ae6d547f1d98e964dd29ad35cbf94439120bb67a/regex-2024.11.6-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",hashes = {sha256 = "997d6a487ff00807ba810e0f8332c18b4eb8d29463cfb7c820dc4b6e7562d0cf"}}, - {name = "regex-2024.11.6-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl",url = "https://files.pythonhosted.org/packages/87/55/eb2a068334274db86208ab9d5599ffa63631b9f0f67ed70ea7c82a69bbc8/regex-2024.11.6-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl",hashes = {sha256 = "02a02d2bb04fec86ad61f3ea7f49c015a0681bf76abb9857f945d26159d2968c"}}, - {name = "regex-2024.11.6-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl",url = "https://files.pythonhosted.org/packages/74/c0/be707bcfe98254d8f9d2cff55d216e946f4ea48ad2fd8cf1428f8c5332ba/regex-2024.11.6-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl",hashes = {sha256 = "f02f93b92358ee3f78660e43b4b0091229260c5d5c408d17d60bf26b6c900e86"}}, - {name = "regex-2024.11.6-cp310-cp310-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/49/dc/bb45572ceb49e0f6509f7596e4ba7031f6819ecb26bc7610979af5a77f45/regex-2024.11.6-cp310-cp310-musllinux_1_2_aarch64.whl",hashes = {sha256 = "06eb1be98df10e81ebaded73fcd51989dcf534e3c753466e4b60c4697a003b67"}}, - {name = "regex-2024.11.6-cp310-cp310-musllinux_1_2_i686.whl",url = "https://files.pythonhosted.org/packages/5a/db/f43fd75dc4c0c2d96d0881967897926942e935d700863666f3c844a72ce6/regex-2024.11.6-cp310-cp310-musllinux_1_2_i686.whl",hashes = {sha256 = "040df6fe1a5504eb0f04f048e6d09cd7c7110fef851d7c567a6b6e09942feb7d"}}, - {name = "regex-2024.11.6-cp310-cp310-musllinux_1_2_ppc64le.whl",url = "https://files.pythonhosted.org/packages/99/d7/f94154db29ab5a89d69ff893159b19ada89e76b915c1293e98603d39838c/regex-2024.11.6-cp310-cp310-musllinux_1_2_ppc64le.whl",hashes = {sha256 = "fdabbfc59f2c6edba2a6622c647b716e34e8e3867e0ab975412c5c2f79b82da2"}}, - {name = "regex-2024.11.6-cp310-cp310-musllinux_1_2_s390x.whl",url = "https://files.pythonhosted.org/packages/f7/17/3cbfab1f23356fbbf07708220ab438a7efa1e0f34195bf857433f79f1788/regex-2024.11.6-cp310-cp310-musllinux_1_2_s390x.whl",hashes = {sha256 = "8447d2d39b5abe381419319f942de20b7ecd60ce86f16a23b0698f22e1b70008"}}, - {name = "regex-2024.11.6-cp310-cp310-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/7e/f2/48b393b51900456155de3ad001900f94298965e1cad1c772b87f9cfea011/regex-2024.11.6-cp310-cp310-musllinux_1_2_x86_64.whl",hashes = {sha256 = "da8f5fc57d1933de22a9e23eec290a0d8a5927a5370d24bda9a6abe50683fe62"}}, - {name = "regex-2024.11.6-cp310-cp310-win32.whl",url = "https://files.pythonhosted.org/packages/45/3f/ef9589aba93e084cd3f8471fded352826dcae8489b650d0b9b27bc5bba8a/regex-2024.11.6-cp310-cp310-win32.whl",hashes = {sha256 = "b489578720afb782f6ccf2840920f3a32e31ba28a4b162e13900c3e6bd3f930e"}}, - {name = "regex-2024.11.6-cp310-cp310-win_amd64.whl",url = "https://files.pythonhosted.org/packages/42/7e/5f1b92c8468290c465fd50c5318da64319133231415a8aa6ea5ab995a815/regex-2024.11.6-cp310-cp310-win_amd64.whl",hashes = {sha256 = "5071b2093e793357c9d8b2929dfc13ac5f0a6c650559503bb81189d0a3814519"}}, ] marker = "\"default\" in dependency_groups or \"recommended\" in extras" @@ -2083,40 +1693,6 @@ wheels = [ {name = "yarl-1.20.1-cp312-cp312-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/af/85/9363f77bdfa1e4d690957cd39d192c4cacd1c58965df0470a4905253b54f/yarl-1.20.1-cp312-cp312-musllinux_1_2_x86_64.whl",hashes = {sha256 = "564ab3d517e3d01c408c67f2e5247aad4019dcf1969982aba3974b4093279004"}}, {name = "yarl-1.20.1-cp312-cp312-win32.whl",url = "https://files.pythonhosted.org/packages/35/99/9918c8739ba271dcd935400cff8b32e3cd319eaf02fcd023d5dcd487a7c8/yarl-1.20.1-cp312-cp312-win32.whl",hashes = {sha256 = "daea0d313868da1cf2fac6b2d3a25c6e3a9e879483244be38c8e6a41f1d876a5"}}, {name = "yarl-1.20.1-cp312-cp312-win_amd64.whl",url = "https://files.pythonhosted.org/packages/eb/83/5d9092950565481b413b31a23e75dd3418ff0a277d6e0abf3729d4d1ce25/yarl-1.20.1-cp312-cp312-win_amd64.whl",hashes = {sha256 = "48ea7d7f9be0487339828a4de0360d7ce0efc06524a48e1810f945c45b813698"}}, - {name = "yarl-1.20.1-cp311-cp311-macosx_10_9_universal2.whl",url = "https://files.pythonhosted.org/packages/b1/18/893b50efc2350e47a874c5c2d67e55a0ea5df91186b2a6f5ac52eff887cd/yarl-1.20.1-cp311-cp311-macosx_10_9_universal2.whl",hashes = {sha256 = "47ee6188fea634bdfaeb2cc420f5b3b17332e6225ce88149a17c413c77ff269e"}}, - {name = "yarl-1.20.1-cp311-cp311-macosx_10_9_x86_64.whl",url = "https://files.pythonhosted.org/packages/89/ed/b8773448030e6fc47fa797f099ab9eab151a43a25717f9ac043844ad5ea3/yarl-1.20.1-cp311-cp311-macosx_10_9_x86_64.whl",hashes = {sha256 = "d0f6500f69e8402d513e5eedb77a4e1818691e8f45e6b687147963514d84b44b"}}, - {name = "yarl-1.20.1-cp311-cp311-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/e3/e3/409bd17b1e42619bf69f60e4f031ce1ccb29bd7380117a55529e76933464/yarl-1.20.1-cp311-cp311-macosx_11_0_arm64.whl",hashes = {sha256 = "7a8900a42fcdaad568de58887c7b2f602962356908eedb7628eaf6021a6e435b"}}, - {name = "yarl-1.20.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",url = "https://files.pythonhosted.org/packages/f8/77/64d8431a4d77c856eb2d82aa3de2ad6741365245a29b3a9543cd598ed8c5/yarl-1.20.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",hashes = {sha256 = "bad6d131fda8ef508b36be3ece16d0902e80b88ea7200f030a0f6c11d9e508d4"}}, - {name = "yarl-1.20.1-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl",url = "https://files.pythonhosted.org/packages/8d/d2/0c7e4def093dcef0bd9fa22d4d24b023788b0a33b8d0088b51aa51e21e99/yarl-1.20.1-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl",hashes = {sha256 = "df018d92fe22aaebb679a7f89fe0c0f368ec497e3dda6cb81a567610f04501f1"}}, - {name = "yarl-1.20.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl",url = "https://files.pythonhosted.org/packages/f0/f3/fc514f4b2cf02cb59d10cbfe228691d25929ce8f72a38db07d3febc3f706/yarl-1.20.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl",hashes = {sha256 = "8f969afbb0a9b63c18d0feecf0db09d164b7a44a053e78a7d05f5df163e43833"}}, - {name = "yarl-1.20.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl",url = "https://files.pythonhosted.org/packages/ea/6d/a313ac8d8391381ff9006ac05f1d4331cee3b1efaa833a53d12253733255/yarl-1.20.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl",hashes = {sha256 = "812303eb4aa98e302886ccda58d6b099e3576b1b9276161469c25803a8db277d"}}, - {name = "yarl-1.20.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",url = "https://files.pythonhosted.org/packages/00/70/8f78a95d6935a70263d46caa3dd18e1f223cf2f2ff2037baa01a22bc5b22/yarl-1.20.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",hashes = {sha256 = "98c4a7d166635147924aa0bf9bfe8d8abad6fffa6102de9c99ea04a1376f91e8"}}, - {name = "yarl-1.20.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl",url = "https://files.pythonhosted.org/packages/cb/05/42773027968968f4f15143553970ee36ead27038d627f457cc44bbbeecf3/yarl-1.20.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl",hashes = {sha256 = "12e768f966538e81e6e7550f9086a6236b16e26cd964cf4df35349970f3551cf"}}, - {name = "yarl-1.20.1-cp311-cp311-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/05/be/665634aa196954156741ea591d2f946f1b78ceee8bb8f28488bf28c0dd62/yarl-1.20.1-cp311-cp311-musllinux_1_2_aarch64.whl",hashes = {sha256 = "fe41919b9d899661c5c28a8b4b0acf704510b88f27f0934ac7a7bebdd8938d5e"}}, - {name = "yarl-1.20.1-cp311-cp311-musllinux_1_2_armv7l.whl",url = "https://files.pythonhosted.org/packages/eb/90/73448401d36fa4e210ece5579895731f190d5119c4b66b43b52182e88cd5/yarl-1.20.1-cp311-cp311-musllinux_1_2_armv7l.whl",hashes = {sha256 = "8601bc010d1d7780592f3fc1bdc6c72e2b6466ea34569778422943e1a1f3c389"}}, - {name = "yarl-1.20.1-cp311-cp311-musllinux_1_2_i686.whl",url = "https://files.pythonhosted.org/packages/c3/b0/fce922d46dc1eb43c811f1889f7daa6001b27a4005587e94878570300881/yarl-1.20.1-cp311-cp311-musllinux_1_2_i686.whl",hashes = {sha256 = "daadbdc1f2a9033a2399c42646fbd46da7992e868a5fe9513860122d7fe7a73f"}}, - {name = "yarl-1.20.1-cp311-cp311-musllinux_1_2_ppc64le.whl",url = "https://files.pythonhosted.org/packages/f1/0d/b172628fce039dae8977fd22caeff3eeebffd52e86060413f5673767c427/yarl-1.20.1-cp311-cp311-musllinux_1_2_ppc64le.whl",hashes = {sha256 = "03aa1e041727cb438ca762628109ef1333498b122e4c76dd858d186a37cec845"}}, - {name = "yarl-1.20.1-cp311-cp311-musllinux_1_2_s390x.whl",url = "https://files.pythonhosted.org/packages/6b/9b/5b886d7671f4580209e855974fe1cecec409aa4a89ea58b8f0560dc529b1/yarl-1.20.1-cp311-cp311-musllinux_1_2_s390x.whl",hashes = {sha256 = "642980ef5e0fa1de5fa96d905c7e00cb2c47cb468bfcac5a18c58e27dbf8d8d1"}}, - {name = "yarl-1.20.1-cp311-cp311-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/73/be/75ef5fd0fcd8f083a5d13f78fd3f009528132a1f2a1d7c925c39fa20aa79/yarl-1.20.1-cp311-cp311-musllinux_1_2_x86_64.whl",hashes = {sha256 = "86971e2795584fe8c002356d3b97ef6c61862720eeff03db2a7c86b678d85b3e"}}, - {name = "yarl-1.20.1-cp311-cp311-win32.whl",url = "https://files.pythonhosted.org/packages/50/4f/62faab3b479dfdcb741fe9e3f0323e2a7d5cd1ab2edc73221d57ad4834b2/yarl-1.20.1-cp311-cp311-win32.whl",hashes = {sha256 = "597f40615b8d25812f14562699e287f0dcc035d25eb74da72cae043bb884d773"}}, - {name = "yarl-1.20.1-cp311-cp311-win_amd64.whl",url = "https://files.pythonhosted.org/packages/f0/09/d9c7942f8f05c32ec72cd5c8e041c8b29b5807328b68b4801ff2511d4d5e/yarl-1.20.1-cp311-cp311-win_amd64.whl",hashes = {sha256 = "26ef53a9e726e61e9cd1cda6b478f17e350fb5800b4bd1cd9fe81c4d91cfeb2e"}}, - {name = "yarl-1.20.1-cp310-cp310-macosx_10_9_universal2.whl",url = "https://files.pythonhosted.org/packages/cb/65/7fed0d774abf47487c64be14e9223749468922817b5e8792b8a64792a1bb/yarl-1.20.1-cp310-cp310-macosx_10_9_universal2.whl",hashes = {sha256 = "6032e6da6abd41e4acda34d75a816012717000fa6839f37124a47fcefc49bec4"}}, - {name = "yarl-1.20.1-cp310-cp310-macosx_10_9_x86_64.whl",url = "https://files.pythonhosted.org/packages/8a/7b/988f55a52da99df9e56dc733b8e4e5a6ae2090081dc2754fc8fd34e60aa0/yarl-1.20.1-cp310-cp310-macosx_10_9_x86_64.whl",hashes = {sha256 = "2c7b34d804b8cf9b214f05015c4fee2ebe7ed05cf581e7192c06555c71f4446a"}}, - {name = "yarl-1.20.1-cp310-cp310-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/f7/de/30d98f03e95d30c7e3cc093759982d038c8833ec2451001d45ef4854edc1/yarl-1.20.1-cp310-cp310-macosx_11_0_arm64.whl",hashes = {sha256 = "0c869f2651cc77465f6cd01d938d91a11d9ea5d798738c1dc077f3de0b5e5fed"}}, - {name = "yarl-1.20.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",url = "https://files.pythonhosted.org/packages/e0/7a/f2f314f5ebfe9200724b0b748de2186b927acb334cf964fd312eb86fc286/yarl-1.20.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",hashes = {sha256 = "62915e6688eb4d180d93840cda4110995ad50c459bf931b8b3775b37c264af1e"}}, - {name = "yarl-1.20.1-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl",url = "https://files.pythonhosted.org/packages/15/3f/718d26f189db96d993d14b984ce91de52e76309d0fd1d4296f34039856aa/yarl-1.20.1-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl",hashes = {sha256 = "41ebd28167bc6af8abb97fec1a399f412eec5fd61a3ccbe2305a18b84fb4ca73"}}, - {name = "yarl-1.20.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl",url = "https://files.pythonhosted.org/packages/a5/76/8fcfbf5fa2369157b9898962a4a7d96764b287b085b5b3d9ffae69cdefd1/yarl-1.20.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl",hashes = {sha256 = "21242b4288a6d56f04ea193adde174b7e347ac46ce6bc84989ff7c1b1ecea84e"}}, - {name = "yarl-1.20.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl",url = "https://files.pythonhosted.org/packages/3c/95/d7fc301cc4661785967acc04f54a4a42d5124905e27db27bb578aac49b5c/yarl-1.20.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl",hashes = {sha256 = "bea21cdae6c7eb02ba02a475f37463abfe0a01f5d7200121b03e605d6a0439f8"}}, - {name = "yarl-1.20.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",url = "https://files.pythonhosted.org/packages/65/94/e21269718349582eee81efc5c1c08ee71c816bfc1585b77d0ec3f58089eb/yarl-1.20.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",hashes = {sha256 = "1f8a891e4a22a89f5dde7862994485e19db246b70bb288d3ce73a34422e55b23"}}, - {name = "yarl-1.20.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl",url = "https://files.pythonhosted.org/packages/32/ae/8616d1f07853704523519f6131d21f092e567c5af93de7e3e94b38d7f065/yarl-1.20.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl",hashes = {sha256 = "dd803820d44c8853a109a34e3660e5a61beae12970da479cf44aa2954019bf70"}}, - {name = "yarl-1.20.1-cp310-cp310-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/48/aa/0ace06280861ef055855333707db5e49c6e3a08840a7ce62682259d0a6c0/yarl-1.20.1-cp310-cp310-musllinux_1_2_aarch64.whl",hashes = {sha256 = "b982fa7f74c80d5c0c7b5b38f908971e513380a10fecea528091405f519b9ebb"}}, - {name = "yarl-1.20.1-cp310-cp310-musllinux_1_2_armv7l.whl",url = "https://files.pythonhosted.org/packages/20/52/1e9d0e6916f45a8fb50e6844f01cb34692455f1acd548606cbda8134cd1e/yarl-1.20.1-cp310-cp310-musllinux_1_2_armv7l.whl",hashes = {sha256 = "33f29ecfe0330c570d997bcf1afd304377f2e48f61447f37e846a6058a4d33b2"}}, - {name = "yarl-1.20.1-cp310-cp310-musllinux_1_2_i686.whl",url = "https://files.pythonhosted.org/packages/f2/65/60452df742952c630e82f394cd409de10610481d9043aa14c61bf846b7b1/yarl-1.20.1-cp310-cp310-musllinux_1_2_i686.whl",hashes = {sha256 = "835ab2cfc74d5eb4a6a528c57f05688099da41cf4957cf08cad38647e4a83b30"}}, - {name = "yarl-1.20.1-cp310-cp310-musllinux_1_2_ppc64le.whl",url = "https://files.pythonhosted.org/packages/7b/f5/6cd4ff38dcde57a70f23719a838665ee17079640c77087404c3d34da6727/yarl-1.20.1-cp310-cp310-musllinux_1_2_ppc64le.whl",hashes = {sha256 = "46b5e0ccf1943a9a6e766b2c2b8c732c55b34e28be57d8daa2b3c1d1d4009309"}}, - {name = "yarl-1.20.1-cp310-cp310-musllinux_1_2_s390x.whl",url = "https://files.pythonhosted.org/packages/d1/90/c42eefd79d0d8222cb3227bdd51b640c0c1d0aa33fe4cc86c36eccba77d3/yarl-1.20.1-cp310-cp310-musllinux_1_2_s390x.whl",hashes = {sha256 = "df47c55f7d74127d1b11251fe6397d84afdde0d53b90bedb46a23c0e534f9d24"}}, - {name = "yarl-1.20.1-cp310-cp310-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/03/c8/cea6b232cb4617514232e0f8a718153a95b5d82b5290711b201545825532/yarl-1.20.1-cp310-cp310-musllinux_1_2_x86_64.whl",hashes = {sha256 = "76d12524d05841276b0e22573f28d5fbcb67589836772ae9244d90dd7d66aa13"}}, - {name = "yarl-1.20.1-cp310-cp310-win32.whl",url = "https://files.pythonhosted.org/packages/ce/a3/eaa0ab9712f1f3d01faf43cf6f1f7210ce4ea4a7e9b28b489a2261ca8db9/yarl-1.20.1-cp310-cp310-win32.whl",hashes = {sha256 = "6c4fbf6b02d70e512d7ade4b1f998f237137f1417ab07ec06358ea04f69134f8"}}, - {name = "yarl-1.20.1-cp310-cp310-win_amd64.whl",url = "https://files.pythonhosted.org/packages/8f/34/e4abde70a9256465fe31c88ed02c3f8502b7b5dead693a4f350a06413f28/yarl-1.20.1-cp310-cp310-win_amd64.whl",hashes = {sha256 = "aef6c4d69554d44b7f9d923245f8ad9a707d971e6209d51279196d8e8fe1ae16"}}, {name = "yarl-1.20.1-py3-none-any.whl",url = "https://files.pythonhosted.org/packages/b4/2d/2345fce04cfd4bee161bf1e7d9cdc702e3e16109021035dbb24db654a622/yarl-1.20.1-py3-none-any.whl",hashes = {sha256 = "83b8eb083fe4683c6115795d9fc1cfaf2cbbefb19b3a1cb68f6527460f483a77"}}, ] marker = "\"default\" in dependency_groups or \"dev\" in extras" @@ -2182,38 +1758,6 @@ wheels = [ {name = "propcache-0.3.2-cp312-cp312-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/a4/7c/3f539fcae630408d0bd8bf3208b9a647ccad10976eda62402a80adf8fc34/propcache-0.3.2-cp312-cp312-musllinux_1_2_x86_64.whl",hashes = {sha256 = "62b4239611205294cc433845b914131b2a1f03500ff3c1ed093ed216b82621e1"}}, {name = "propcache-0.3.2-cp312-cp312-win32.whl",url = "https://files.pythonhosted.org/packages/7c/d2/34b9eac8c35f79f8a962546b3e97e9d4b990c420ee66ac8255d5d9611648/propcache-0.3.2-cp312-cp312-win32.whl",hashes = {sha256 = "df4a81b9b53449ebc90cc4deefb052c1dd934ba85012aa912c7ea7b7e38b60c1"}}, {name = "propcache-0.3.2-cp312-cp312-win_amd64.whl",url = "https://files.pythonhosted.org/packages/19/61/d582be5d226cf79071681d1b46b848d6cb03d7b70af7063e33a2787eaa03/propcache-0.3.2-cp312-cp312-win_amd64.whl",hashes = {sha256 = "7046e79b989d7fe457bb755844019e10f693752d169076138abf17f31380800c"}}, - {name = "propcache-0.3.2-cp311-cp311-macosx_10_9_universal2.whl",url = "https://files.pythonhosted.org/packages/80/8d/e8b436717ab9c2cfc23b116d2c297305aa4cd8339172a456d61ebf5669b8/propcache-0.3.2-cp311-cp311-macosx_10_9_universal2.whl",hashes = {sha256 = "0b8d2f607bd8f80ddc04088bc2a037fdd17884a6fcadc47a96e334d72f3717be"}}, - {name = "propcache-0.3.2-cp311-cp311-macosx_10_9_x86_64.whl",url = "https://files.pythonhosted.org/packages/d6/29/1e34000e9766d112171764b9fa3226fa0153ab565d0c242c70e9945318a7/propcache-0.3.2-cp311-cp311-macosx_10_9_x86_64.whl",hashes = {sha256 = "06766d8f34733416e2e34f46fea488ad5d60726bb9481d3cddf89a6fa2d9603f"}}, - {name = "propcache-0.3.2-cp311-cp311-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/46/92/1ad5af0df781e76988897da39b5f086c2bf0f028b7f9bd1f409bb05b6874/propcache-0.3.2-cp311-cp311-macosx_11_0_arm64.whl",hashes = {sha256 = "a2dc1f4a1df4fecf4e6f68013575ff4af84ef6f478fe5344317a65d38a8e6dc9"}}, - {name = "propcache-0.3.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",url = "https://files.pythonhosted.org/packages/b3/ce/e96392460f9fb68461fabab3e095cb00c8ddf901205be4eae5ce246e5b7e/propcache-0.3.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",hashes = {sha256 = "be29c4f4810c5789cf10ddf6af80b041c724e629fa51e308a7a0fb19ed1ef7bf"}}, - {name = "propcache-0.3.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl",url = "https://files.pythonhosted.org/packages/c5/2a/866726ea345299f7ceefc861a5e782b045545ae6940851930a6adaf1fca6/propcache-0.3.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl",hashes = {sha256 = "59d61f6970ecbd8ff2e9360304d5c8876a6abd4530cb752c06586849ac8a9dc9"}}, - {name = "propcache-0.3.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl",url = "https://files.pythonhosted.org/packages/de/03/07d992ccb6d930398689187e1b3c718339a1c06b8b145a8d9650e4726166/propcache-0.3.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl",hashes = {sha256 = "62180e0b8dbb6b004baec00a7983e4cc52f5ada9cd11f48c3528d8cfa7b96a66"}}, - {name = "propcache-0.3.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",url = "https://files.pythonhosted.org/packages/5d/e6/116ba39448753b1330f48ab8ba927dcd6cf0baea8a0ccbc512dfb49ba670/propcache-0.3.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",hashes = {sha256 = "c144ca294a204c470f18cf4c9d78887810d04a3e2fbb30eea903575a779159df"}}, - {name = "propcache-0.3.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl",url = "https://files.pythonhosted.org/packages/a6/85/f01f5d97e54e428885a5497ccf7f54404cbb4f906688a1690cd51bf597dc/propcache-0.3.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl",hashes = {sha256 = "c5c2a784234c28854878d68978265617aa6dc0780e53d44b4d67f3651a17a9a2"}}, - {name = "propcache-0.3.2-cp311-cp311-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/e3/79/7bf5ab9033b8b8194cc3f7cf1aaa0e9c3256320726f64a3e1f113a812dce/propcache-0.3.2-cp311-cp311-musllinux_1_2_aarch64.whl",hashes = {sha256 = "5745bc7acdafa978ca1642891b82c19238eadc78ba2aaa293c6863b304e552d7"}}, - {name = "propcache-0.3.2-cp311-cp311-musllinux_1_2_armv7l.whl",url = "https://files.pythonhosted.org/packages/31/0b/bd3e0c00509b609317df4a18e6b05a450ef2d9a963e1d8bc9c9415d86f30/propcache-0.3.2-cp311-cp311-musllinux_1_2_armv7l.whl",hashes = {sha256 = "c0075bf773d66fa8c9d41f66cc132ecc75e5bb9dd7cce3cfd14adc5ca184cb95"}}, - {name = "propcache-0.3.2-cp311-cp311-musllinux_1_2_i686.whl",url = "https://files.pythonhosted.org/packages/7a/23/fae0ff9b54b0de4e819bbe559508da132d5683c32d84d0dc2ccce3563ed4/propcache-0.3.2-cp311-cp311-musllinux_1_2_i686.whl",hashes = {sha256 = "5f57aa0847730daceff0497f417c9de353c575d8da3579162cc74ac294c5369e"}}, - {name = "propcache-0.3.2-cp311-cp311-musllinux_1_2_ppc64le.whl",url = "https://files.pythonhosted.org/packages/b7/7f/ad6a3c22630aaa5f618b4dc3c3598974a72abb4c18e45a50b3cdd091eb2f/propcache-0.3.2-cp311-cp311-musllinux_1_2_ppc64le.whl",hashes = {sha256 = "eef914c014bf72d18efb55619447e0aecd5fb7c2e3fa7441e2e5d6099bddff7e"}}, - {name = "propcache-0.3.2-cp311-cp311-musllinux_1_2_s390x.whl",url = "https://files.pythonhosted.org/packages/5b/2c/ba4f1c0e8a4b4c75910742f0d333759d441f65a1c7f34683b4a74c0ee015/propcache-0.3.2-cp311-cp311-musllinux_1_2_s390x.whl",hashes = {sha256 = "2a4092e8549031e82facf3decdbc0883755d5bbcc62d3aea9d9e185549936dcf"}}, - {name = "propcache-0.3.2-cp311-cp311-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/88/e4/ebe30fc399e98572019eee82ad0caf512401661985cbd3da5e3140ffa1b0/propcache-0.3.2-cp311-cp311-musllinux_1_2_x86_64.whl",hashes = {sha256 = "85871b050f174bc0bfb437efbdb68aaf860611953ed12418e4361bc9c392749e"}}, - {name = "propcache-0.3.2-cp311-cp311-win32.whl",url = "https://files.pythonhosted.org/packages/96/0a/7d5260b914e01d1d0906f7f38af101f8d8ed0dc47426219eeaf05e8ea7c2/propcache-0.3.2-cp311-cp311-win32.whl",hashes = {sha256 = "36c8d9b673ec57900c3554264e630d45980fd302458e4ac801802a7fd2ef7897"}}, - {name = "propcache-0.3.2-cp311-cp311-win_amd64.whl",url = "https://files.pythonhosted.org/packages/e1/2d/89fe4489a884bc0da0c3278c552bd4ffe06a1ace559db5ef02ef24ab446b/propcache-0.3.2-cp311-cp311-win_amd64.whl",hashes = {sha256 = "e53af8cb6a781b02d2ea079b5b853ba9430fcbe18a8e3ce647d5982a3ff69f39"}}, - {name = "propcache-0.3.2-cp310-cp310-macosx_10_9_universal2.whl",url = "https://files.pythonhosted.org/packages/ab/14/510deed325e262afeb8b360043c5d7c960da7d3ecd6d6f9496c9c56dc7f4/propcache-0.3.2-cp310-cp310-macosx_10_9_universal2.whl",hashes = {sha256 = "22d9962a358aedbb7a2e36187ff273adeaab9743373a272976d2e348d08c7770"}}, - {name = "propcache-0.3.2-cp310-cp310-macosx_10_9_x86_64.whl",url = "https://files.pythonhosted.org/packages/cd/4e/ad52a7925ff01c1325653a730c7ec3175a23f948f08626a534133427dcff/propcache-0.3.2-cp310-cp310-macosx_10_9_x86_64.whl",hashes = {sha256 = "0d0fda578d1dc3f77b6b5a5dce3b9ad69a8250a891760a548df850a5e8da87f3"}}, - {name = "propcache-0.3.2-cp310-cp310-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/63/7c/e9399ba5da7780871db4eac178e9c2e204c23dd3e7d32df202092a1ed400/propcache-0.3.2-cp310-cp310-macosx_11_0_arm64.whl",hashes = {sha256 = "3def3da3ac3ce41562d85db655d18ebac740cb3fa4367f11a52b3da9d03a5cc3"}}, - {name = "propcache-0.3.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",url = "https://files.pythonhosted.org/packages/22/e1/58da211eb8fdc6fc854002387d38f415a6ca5f5c67c1315b204a5d3e9d7a/propcache-0.3.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",hashes = {sha256 = "9bec58347a5a6cebf239daba9bda37dffec5b8d2ce004d9fe4edef3d2815137e"}}, - {name = "propcache-0.3.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl",url = "https://files.pythonhosted.org/packages/c4/0a/550ea0f52aac455cb90111c8bab995208443e46d925e51e2f6ebdf869525/propcache-0.3.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl",hashes = {sha256 = "55ffda449a507e9fbd4aca1a7d9aa6753b07d6166140e5a18d2ac9bc49eac220"}}, - {name = "propcache-0.3.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl",url = "https://files.pythonhosted.org/packages/5a/af/9893b7d878deda9bb69fcf54600b247fba7317761b7db11fede6e0f28bd0/propcache-0.3.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl",hashes = {sha256 = "64a67fb39229a8a8491dd42f864e5e263155e729c2e7ff723d6e25f596b1e8cb"}}, - {name = "propcache-0.3.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",url = "https://files.pythonhosted.org/packages/7c/bb/38fd08b278ca85cde36d848091ad2b45954bc5f15cce494bb300b9285831/propcache-0.3.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",hashes = {sha256 = "9da1cf97b92b51253d5b68cf5a2b9e0dafca095e36b7f2da335e27dc6172a614"}}, - {name = "propcache-0.3.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl",url = "https://files.pythonhosted.org/packages/78/8c/9fe55bd01d362bafb413dfe508c48753111a1e269737fa143ba85693592c/propcache-0.3.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl",hashes = {sha256 = "5f559e127134b07425134b4065be45b166183fdcb433cb6c24c8e4149056ad50"}}, - {name = "propcache-0.3.2-cp310-cp310-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/54/14/4701c33852937a22584e08abb531d654c8bcf7948a8f87ad0a4822394147/propcache-0.3.2-cp310-cp310-musllinux_1_2_aarch64.whl",hashes = {sha256 = "aff2e4e06435d61f11a428360a932138d0ec288b0a31dd9bd78d200bd4a2b339"}}, - {name = "propcache-0.3.2-cp310-cp310-musllinux_1_2_armv7l.whl",url = "https://files.pythonhosted.org/packages/16/44/447f2253d859602095356007657ee535e0093215ea0b3d1d6a41d16e5201/propcache-0.3.2-cp310-cp310-musllinux_1_2_armv7l.whl",hashes = {sha256 = "4927842833830942a5d0a56e6f4839bc484785b8e1ce8d287359794818633ba0"}}, - {name = "propcache-0.3.2-cp310-cp310-musllinux_1_2_i686.whl",url = "https://files.pythonhosted.org/packages/f2/b3/e4756258749bb2d3b46defcff606a2f47410bab82be5824a67e84015b267/propcache-0.3.2-cp310-cp310-musllinux_1_2_i686.whl",hashes = {sha256 = "6107ddd08b02654a30fb8ad7a132021759d750a82578b94cd55ee2772b6ebea2"}}, - {name = "propcache-0.3.2-cp310-cp310-musllinux_1_2_ppc64le.whl",url = "https://files.pythonhosted.org/packages/1e/df/e6d3c7574233164b6330b9fd697beeac402afd367280e6dc377bb99b43d9/propcache-0.3.2-cp310-cp310-musllinux_1_2_ppc64le.whl",hashes = {sha256 = "70bd8b9cd6b519e12859c99f3fc9a93f375ebd22a50296c3a295028bea73b9e7"}}, - {name = "propcache-0.3.2-cp310-cp310-musllinux_1_2_s390x.whl",url = "https://files.pythonhosted.org/packages/b2/53/e4d31dd5170b4a0e2e6b730f2385a96410633b4833dc25fe5dffd1f73294/propcache-0.3.2-cp310-cp310-musllinux_1_2_s390x.whl",hashes = {sha256 = "2183111651d710d3097338dd1893fcf09c9f54e27ff1a8795495a16a469cc90b"}}, - {name = "propcache-0.3.2-cp310-cp310-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/7f/fe/74d54cf9fbe2a20ff786e5f7afcfde446588f0cf15fb2daacfbc267b866c/propcache-0.3.2-cp310-cp310-musllinux_1_2_x86_64.whl",hashes = {sha256 = "fb075ad271405dcad8e2a7ffc9a750a3bf70e533bd86e89f0603e607b93aa64c"}}, - {name = "propcache-0.3.2-cp310-cp310-win32.whl",url = "https://files.pythonhosted.org/packages/22/ec/c469c9d59dada8a7679625e0440b544fe72e99311a4679c279562051f6fc/propcache-0.3.2-cp310-cp310-win32.whl",hashes = {sha256 = "404d70768080d3d3bdb41d0771037da19d8340d50b08e104ca0e7f9ce55fce70"}}, - {name = "propcache-0.3.2-cp310-cp310-win_amd64.whl",url = "https://files.pythonhosted.org/packages/38/35/07a471371ac89d418f8d0b699c75ea6dca2041fbda360823de21f6a9ce0a/propcache-0.3.2-cp310-cp310-win_amd64.whl",hashes = {sha256 = "7435d766f978b4ede777002e6b3b6641dd229cd1da8d3d3106a45770365f9ad9"}}, {name = "propcache-0.3.2-py3-none-any.whl",url = "https://files.pythonhosted.org/packages/cc/35/cc0aaecf278bb4575b8555f2b137de5ab821595ddae9da9d3cd1da4072c7/propcache-0.3.2-py3-none-any.whl",hashes = {sha256 = "98f1ec44fb675f5052cccc8e609c46ed23a35a1cfd18545ad4e29002d858a43f"}}, ] marker = "\"default\" in dependency_groups or \"dev\" in extras" @@ -2335,40 +1879,6 @@ wheels = [ {name = "frozenlist-1.7.0-cp312-cp312-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/1c/80/9a0eb48b944050f94cc51ee1c413eb14a39543cc4f760ed12657a5a3c45a/frozenlist-1.7.0-cp312-cp312-musllinux_1_2_x86_64.whl",hashes = {sha256 = "290a172aae5a4c278c6da8a96222e6337744cd9c77313efe33d5670b9f65fc43"}}, {name = "frozenlist-1.7.0-cp312-cp312-win32.whl",url = "https://files.pythonhosted.org/packages/f3/74/87601e0fb0369b7a2baf404ea921769c53b7ae00dee7dcfe5162c8c6dbf0/frozenlist-1.7.0-cp312-cp312-win32.whl",hashes = {sha256 = "426c7bc70e07cfebc178bc4c2bf2d861d720c4fff172181eeb4a4c41d4ca2ad3"}}, {name = "frozenlist-1.7.0-cp312-cp312-win_amd64.whl",url = "https://files.pythonhosted.org/packages/0b/15/c026e9a9fc17585a9d461f65d8593d281fedf55fbf7eb53f16c6df2392f9/frozenlist-1.7.0-cp312-cp312-win_amd64.whl",hashes = {sha256 = "563b72efe5da92e02eb68c59cb37205457c977aa7a449ed1b37e6939e5c47c6a"}}, - {name = "frozenlist-1.7.0-cp311-cp311-macosx_10_9_universal2.whl",url = "https://files.pythonhosted.org/packages/34/7e/803dde33760128acd393a27eb002f2020ddb8d99d30a44bfbaab31c5f08a/frozenlist-1.7.0-cp311-cp311-macosx_10_9_universal2.whl",hashes = {sha256 = "aa51e147a66b2d74de1e6e2cf5921890de6b0f4820b257465101d7f37b49fb5a"}}, - {name = "frozenlist-1.7.0-cp311-cp311-macosx_10_9_x86_64.whl",url = "https://files.pythonhosted.org/packages/75/a9/9c2c5760b6ba45eae11334db454c189d43d34a4c0b489feb2175e5e64277/frozenlist-1.7.0-cp311-cp311-macosx_10_9_x86_64.whl",hashes = {sha256 = "9b35db7ce1cd71d36ba24f80f0c9e7cff73a28d7a74e91fe83e23d27c7828750"}}, - {name = "frozenlist-1.7.0-cp311-cp311-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/47/be/4038e2d869f8a2da165f35a6befb9158c259819be22eeaf9c9a8f6a87771/frozenlist-1.7.0-cp311-cp311-macosx_11_0_arm64.whl",hashes = {sha256 = "34a69a85e34ff37791e94542065c8416c1afbf820b68f720452f636d5fb990cd"}}, - {name = "frozenlist-1.7.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",url = "https://files.pythonhosted.org/packages/79/26/85314b8a83187c76a37183ceed886381a5f992975786f883472fcb6dc5f2/frozenlist-1.7.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",hashes = {sha256 = "4a646531fa8d82c87fe4bb2e596f23173caec9185bfbca5d583b4ccfb95183e2"}}, - {name = "frozenlist-1.7.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl",url = "https://files.pythonhosted.org/packages/1f/fd/e5b64f7d2c92a41639ffb2ad44a6a82f347787abc0c7df5f49057cf11770/frozenlist-1.7.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl",hashes = {sha256 = "79b2ffbba483f4ed36a0f236ccb85fbb16e670c9238313709638167670ba235f"}}, - {name = "frozenlist-1.7.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl",url = "https://files.pythonhosted.org/packages/20/fb/03395c0a43a5976af4bf7534759d214405fbbb4c114683f434dfdd3128ef/frozenlist-1.7.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl",hashes = {sha256 = "a26f205c9ca5829cbf82bb2a84b5c36f7184c4316617d7ef1b271a56720d6b30"}}, - {name = "frozenlist-1.7.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl",url = "https://files.pythonhosted.org/packages/d0/15/c01c8e1dffdac5d9803507d824f27aed2ba76b6ed0026fab4d9866e82f1f/frozenlist-1.7.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl",hashes = {sha256 = "bcacfad3185a623fa11ea0e0634aac7b691aa925d50a440f39b458e41c561d98"}}, - {name = "frozenlist-1.7.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl",url = "https://files.pythonhosted.org/packages/14/99/3f4c6fe882c1f5514b6848aa0a69b20cb5e5d8e8f51a339d48c0e9305ed0/frozenlist-1.7.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl",hashes = {sha256 = "72c1b0fe8fe451b34f12dce46445ddf14bd2a5bcad7e324987194dc8e3a74c86"}}, - {name = "frozenlist-1.7.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl",url = "https://files.pythonhosted.org/packages/4d/83/220a374bd7b2aeba9d0725130665afe11de347d95c3620b9b82cc2fcab97/frozenlist-1.7.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl",hashes = {sha256 = "61d1a5baeaac6c0798ff6edfaeaa00e0e412d49946c53fae8d4b8e8b3566c4ae"}}, - {name = "frozenlist-1.7.0-cp311-cp311-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/03/3c/3e3390d75334a063181625343e8daab61b77e1b8214802cc4e8a1bb678fc/frozenlist-1.7.0-cp311-cp311-musllinux_1_2_aarch64.whl",hashes = {sha256 = "7edf5c043c062462f09b6820de9854bf28cc6cc5b6714b383149745e287181a8"}}, - {name = "frozenlist-1.7.0-cp311-cp311-musllinux_1_2_armv7l.whl",url = "https://files.pythonhosted.org/packages/23/1e/58232c19608b7a549d72d9903005e2d82488f12554a32de2d5fb59b9b1ba/frozenlist-1.7.0-cp311-cp311-musllinux_1_2_armv7l.whl",hashes = {sha256 = "d50ac7627b3a1bd2dcef6f9da89a772694ec04d9a61b66cf87f7d9446b4a0c31"}}, - {name = "frozenlist-1.7.0-cp311-cp311-musllinux_1_2_i686.whl",url = "https://files.pythonhosted.org/packages/c0/a4/e4a567e01702a88a74ce8a324691e62a629bf47d4f8607f24bf1c7216e7f/frozenlist-1.7.0-cp311-cp311-musllinux_1_2_i686.whl",hashes = {sha256 = "ce48b2fece5aeb45265bb7a58259f45027db0abff478e3077e12b05b17fb9da7"}}, - {name = "frozenlist-1.7.0-cp311-cp311-musllinux_1_2_ppc64le.whl",url = "https://files.pythonhosted.org/packages/73/a6/63b3374f7d22268b41a9db73d68a8233afa30ed164c46107b33c4d18ecdd/frozenlist-1.7.0-cp311-cp311-musllinux_1_2_ppc64le.whl",hashes = {sha256 = "fe2365ae915a1fafd982c146754e1de6ab3478def8a59c86e1f7242d794f97d5"}}, - {name = "frozenlist-1.7.0-cp311-cp311-musllinux_1_2_s390x.whl",url = "https://files.pythonhosted.org/packages/6d/eb/d18b3f6e64799a79673c4ba0b45e4cfbe49c240edfd03a68be20002eaeaa/frozenlist-1.7.0-cp311-cp311-musllinux_1_2_s390x.whl",hashes = {sha256 = "45a6f2fdbd10e074e8814eb98b05292f27bad7d1883afbe009d96abdcf3bc898"}}, - {name = "frozenlist-1.7.0-cp311-cp311-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/5a/f5/720f3812e3d06cd89a1d5db9ff6450088b8f5c449dae8ffb2971a44da506/frozenlist-1.7.0-cp311-cp311-musllinux_1_2_x86_64.whl",hashes = {sha256 = "21884e23cffabb157a9dd7e353779077bf5b8f9a58e9b262c6caad2ef5f80a56"}}, - {name = "frozenlist-1.7.0-cp311-cp311-win32.whl",url = "https://files.pythonhosted.org/packages/69/68/03efbf545e217d5db8446acfd4c447c15b7c8cf4dbd4a58403111df9322d/frozenlist-1.7.0-cp311-cp311-win32.whl",hashes = {sha256 = "284d233a8953d7b24f9159b8a3496fc1ddc00f4db99c324bd5fb5f22d8698ea7"}}, - {name = "frozenlist-1.7.0-cp311-cp311-win_amd64.whl",url = "https://files.pythonhosted.org/packages/58/17/fe61124c5c333ae87f09bb67186d65038834a47d974fc10a5fadb4cc5ae1/frozenlist-1.7.0-cp311-cp311-win_amd64.whl",hashes = {sha256 = "387cbfdcde2f2353f19c2f66bbb52406d06ed77519ac7ee21be0232147c2592d"}}, - {name = "frozenlist-1.7.0-cp310-cp310-macosx_10_9_universal2.whl",url = "https://files.pythonhosted.org/packages/af/36/0da0a49409f6b47cc2d060dc8c9040b897b5902a8a4e37d9bc1deb11f680/frozenlist-1.7.0-cp310-cp310-macosx_10_9_universal2.whl",hashes = {sha256 = "cc4df77d638aa2ed703b878dd093725b72a824c3c546c076e8fdf276f78ee84a"}}, - {name = "frozenlist-1.7.0-cp310-cp310-macosx_10_9_x86_64.whl",url = "https://files.pythonhosted.org/packages/77/f0/77c11d13d39513b298e267b22eb6cb559c103d56f155aa9a49097221f0b6/frozenlist-1.7.0-cp310-cp310-macosx_10_9_x86_64.whl",hashes = {sha256 = "716a9973a2cc963160394f701964fe25012600f3d311f60c790400b00e568b61"}}, - {name = "frozenlist-1.7.0-cp310-cp310-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/37/12/9d07fa18971a44150593de56b2f2947c46604819976784bcf6ea0d5db43b/frozenlist-1.7.0-cp310-cp310-macosx_11_0_arm64.whl",hashes = {sha256 = "a0fd1bad056a3600047fb9462cff4c5322cebc59ebf5d0a3725e0ee78955001d"}}, - {name = "frozenlist-1.7.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",url = "https://files.pythonhosted.org/packages/70/34/f73539227e06288fcd1f8a76853e755b2b48bca6747e99e283111c18bcd4/frozenlist-1.7.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",hashes = {sha256 = "3789ebc19cb811163e70fe2bd354cea097254ce6e707ae42e56f45e31e96cb8e"}}, - {name = "frozenlist-1.7.0-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl",url = "https://files.pythonhosted.org/packages/fb/68/c1d9c2f4a6e438e14613bad0f2973567586610cc22dcb1e1241da71de9d3/frozenlist-1.7.0-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl",hashes = {sha256 = "af369aa35ee34f132fcfad5be45fbfcde0e3a5f6a1ec0712857f286b7d20cca9"}}, - {name = "frozenlist-1.7.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl",url = "https://files.pythonhosted.org/packages/b9/d0/98e8f9a515228d708344d7c6986752be3e3192d1795f748c24bcf154ad99/frozenlist-1.7.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl",hashes = {sha256 = "ac64b6478722eeb7a3313d494f8342ef3478dff539d17002f849101b212ef97c"}}, - {name = "frozenlist-1.7.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl",url = "https://files.pythonhosted.org/packages/79/df/8a11bcec5600557f40338407d3e5bea80376ed1c01a6c0910fcfdc4b8993/frozenlist-1.7.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl",hashes = {sha256 = "f89f65d85774f1797239693cef07ad4c97fdd0639544bad9ac4b869782eb1981"}}, - {name = "frozenlist-1.7.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl",url = "https://files.pythonhosted.org/packages/50/82/41cb97d9c9a5ff94438c63cc343eb7980dac4187eb625a51bdfdb7707314/frozenlist-1.7.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl",hashes = {sha256 = "1073557c941395fdfcfac13eb2456cb8aad89f9de27bae29fabca8e563b12615"}}, - {name = "frozenlist-1.7.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl",url = "https://files.pythonhosted.org/packages/13/47/f9179ee5ee4f55629e4f28c660b3fdf2775c8bfde8f9c53f2de2d93f52a9/frozenlist-1.7.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl",hashes = {sha256 = "1ed8d2fa095aae4bdc7fdd80351009a48d286635edffee66bf865e37a9125c50"}}, - {name = "frozenlist-1.7.0-cp310-cp310-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/1a/52/df81e41ec6b953902c8b7e3a83bee48b195cb0e5ec2eabae5d8330c78038/frozenlist-1.7.0-cp310-cp310-musllinux_1_2_aarch64.whl",hashes = {sha256 = "24c34bea555fe42d9f928ba0a740c553088500377448febecaa82cc3e88aa1fa"}}, - {name = "frozenlist-1.7.0-cp310-cp310-musllinux_1_2_armv7l.whl",url = "https://files.pythonhosted.org/packages/84/17/30d6ea87fa95a9408245a948604b82c1a4b8b3e153cea596421a2aef2754/frozenlist-1.7.0-cp310-cp310-musllinux_1_2_armv7l.whl",hashes = {sha256 = "69cac419ac6a6baad202c85aaf467b65ac860ac2e7f2ac1686dc40dbb52f6577"}}, - {name = "frozenlist-1.7.0-cp310-cp310-musllinux_1_2_i686.whl",url = "https://files.pythonhosted.org/packages/8f/00/ecbeb51669e3c3df76cf2ddd66ae3e48345ec213a55e3887d216eb4fbab3/frozenlist-1.7.0-cp310-cp310-musllinux_1_2_i686.whl",hashes = {sha256 = "960d67d0611f4c87da7e2ae2eacf7ea81a5be967861e0c63cf205215afbfac59"}}, - {name = "frozenlist-1.7.0-cp310-cp310-musllinux_1_2_ppc64le.whl",url = "https://files.pythonhosted.org/packages/1a/c0/c224ce0e0eb31cc57f67742071bb470ba8246623c1823a7530be0e76164c/frozenlist-1.7.0-cp310-cp310-musllinux_1_2_ppc64le.whl",hashes = {sha256 = "41be2964bd4b15bf575e5daee5a5ce7ed3115320fb3c2b71fca05582ffa4dc9e"}}, - {name = "frozenlist-1.7.0-cp310-cp310-musllinux_1_2_s390x.whl",url = "https://files.pythonhosted.org/packages/55/3c/34cb694abf532f31f365106deebdeac9e45c19304d83cf7d51ebbb4ca4d1/frozenlist-1.7.0-cp310-cp310-musllinux_1_2_s390x.whl",hashes = {sha256 = "46d84d49e00c9429238a7ce02dc0be8f6d7cd0cd405abd1bebdc991bf27c15bd"}}, - {name = "frozenlist-1.7.0-cp310-cp310-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/98/c0/2052d8b6cecda2e70bd81299e3512fa332abb6dcd2969b9c80dfcdddbf75/frozenlist-1.7.0-cp310-cp310-musllinux_1_2_x86_64.whl",hashes = {sha256 = "15900082e886edb37480335d9d518cec978afc69ccbc30bd18610b7c1b22a718"}}, - {name = "frozenlist-1.7.0-cp310-cp310-win32.whl",url = "https://files.pythonhosted.org/packages/c5/bf/7dcebae315436903b1d98ffb791a09d674c88480c158aa171958a3ac07f0/frozenlist-1.7.0-cp310-cp310-win32.whl",hashes = {sha256 = "400ddd24ab4e55014bba442d917203c73b2846391dd42ca5e38ff52bb18c3c5e"}}, - {name = "frozenlist-1.7.0-cp310-cp310-win_amd64.whl",url = "https://files.pythonhosted.org/packages/8f/5f/f69818f017fa9a3d24d1ae39763e29b7f60a59e46d5f91b9c6b21622f4cd/frozenlist-1.7.0-cp310-cp310-win_amd64.whl",hashes = {sha256 = "6eb93efb8101ef39d32d50bce242c84bcbddb4f7e9febfa7b524532a239b4464"}}, {name = "frozenlist-1.7.0-py3-none-any.whl",url = "https://files.pythonhosted.org/packages/ee/45/b82e3c16be2182bff01179db177fe144d58b5dc787a7d4492c6ed8b9317f/frozenlist-1.7.0-py3-none-any.whl",hashes = {sha256 = "9a5af342e34f7e97caf8c995864c7a396418ae2859cc6fdf1b1073020d516a7e"}}, ] marker = "\"default\" in dependency_groups or \"dev\" in extras" @@ -2542,31 +2052,6 @@ wheels = [ {name = "coverage-7.10.7-cp312-cp312-win32.whl",url = "https://files.pythonhosted.org/packages/5e/75/61b9bbd6c7d24d896bfeec57acba78e0f8deac68e6baf2d4804f7aae1f88/coverage-7.10.7-cp312-cp312-win32.whl",hashes = {sha256 = "77eb4c747061a6af8d0f7bdb31f1e108d172762ef579166ec84542f711d90256"}}, {name = "coverage-7.10.7-cp312-cp312-win_amd64.whl",url = "https://files.pythonhosted.org/packages/ca/f3/3bf7905288b45b075918d372498f1cf845b5b579b723c8fd17168018d5f5/coverage-7.10.7-cp312-cp312-win_amd64.whl",hashes = {sha256 = "f51328ffe987aecf6d09f3cd9d979face89a617eacdaea43e7b3080777f647ba"}}, {name = "coverage-7.10.7-cp312-cp312-win_arm64.whl",url = "https://files.pythonhosted.org/packages/5c/44/3e32dbe933979d05cf2dac5e697c8599cfe038aaf51223ab901e208d5a62/coverage-7.10.7-cp312-cp312-win_arm64.whl",hashes = {sha256 = "bda5e34f8a75721c96085903c6f2197dc398c20ffd98df33f866a9c8fd95f4bf"}}, - {name = "coverage-7.10.7-cp311-cp311-macosx_10_9_x86_64.whl",url = "https://files.pythonhosted.org/packages/d2/5d/c1a17867b0456f2e9ce2d8d4708a4c3a089947d0bec9c66cdf60c9e7739f/coverage-7.10.7-cp311-cp311-macosx_10_9_x86_64.whl",hashes = {sha256 = "a609f9c93113be646f44c2a0256d6ea375ad047005d7f57a5c15f614dc1b2f59"}}, - {name = "coverage-7.10.7-cp311-cp311-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/54/f0/514dcf4b4e3698b9a9077f084429681bf3aad2b4a72578f89d7f643eb506/coverage-7.10.7-cp311-cp311-macosx_11_0_arm64.whl",hashes = {sha256 = "65646bb0359386e07639c367a22cf9b5bf6304e8630b565d0626e2bdf329227a"}}, - {name = "coverage-7.10.7-cp311-cp311-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl",url = "https://files.pythonhosted.org/packages/20/f6/9626b81d17e2a4b25c63ac1b425ff307ecdeef03d67c9a147673ae40dc36/coverage-7.10.7-cp311-cp311-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl",hashes = {sha256 = "5f33166f0dfcce728191f520bd2692914ec70fac2713f6bf3ce59c3deacb4699"}}, - {name = "coverage-7.10.7-cp311-cp311-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl",url = "https://files.pythonhosted.org/packages/b0/ef/bd8e719c2f7417ba03239052e099b76ea1130ac0cbb183ee1fcaa58aaff3/coverage-7.10.7-cp311-cp311-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl",hashes = {sha256 = "35f5e3f9e455bb17831876048355dca0f758b6df22f49258cb5a91da23ef437d"}}, - {name = "coverage-7.10.7-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl",url = "https://files.pythonhosted.org/packages/a5/b6/bf054de41ec948b151ae2b79a55c107f5760979538f5fb80c195f2517718/coverage-7.10.7-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl",hashes = {sha256 = "4da86b6d62a496e908ac2898243920c7992499c1712ff7c2b6d837cc69d9467e"}}, - {name = "coverage-7.10.7-cp311-cp311-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl",url = "https://files.pythonhosted.org/packages/0f/e5/3860756aa6f9318227443c6ce4ed7bf9e70bb7f1447a0353f45ac5c7974b/coverage-7.10.7-cp311-cp311-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl",hashes = {sha256 = "6b8b09c1fad947c84bbbc95eca841350fad9cbfa5a2d7ca88ac9f8d836c92e23"}}, - {name = "coverage-7.10.7-cp311-cp311-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/26/0f/bd08bd042854f7fd07b45808927ebcce99a7ed0f2f412d11629883517ac2/coverage-7.10.7-cp311-cp311-musllinux_1_2_aarch64.whl",hashes = {sha256 = "4376538f36b533b46f8971d3a3e63464f2c7905c9800db97361c43a2b14792ab"}}, - {name = "coverage-7.10.7-cp311-cp311-musllinux_1_2_i686.whl",url = "https://files.pythonhosted.org/packages/8e/a7/4777b14de4abcc2e80c6b1d430f5d51eb18ed1d75fca56cbce5f2db9b36e/coverage-7.10.7-cp311-cp311-musllinux_1_2_i686.whl",hashes = {sha256 = "121da30abb574f6ce6ae09840dae322bef734480ceafe410117627aa54f76d82"}}, - {name = "coverage-7.10.7-cp311-cp311-musllinux_1_2_riscv64.whl",url = "https://files.pythonhosted.org/packages/34/72/17d082b00b53cd45679bad682fac058b87f011fd8b9fe31d77f5f8d3a4e4/coverage-7.10.7-cp311-cp311-musllinux_1_2_riscv64.whl",hashes = {sha256 = "88127d40df529336a9836870436fc2751c339fbaed3a836d42c93f3e4bd1d0a2"}}, - {name = "coverage-7.10.7-cp311-cp311-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/81/7a/92367572eb5bdd6a84bfa278cc7e97db192f9f45b28c94a9ca1a921c3577/coverage-7.10.7-cp311-cp311-musllinux_1_2_x86_64.whl",hashes = {sha256 = "ba58bbcd1b72f136080c0bccc2400d66cc6115f3f906c499013d065ac33a4b61"}}, - {name = "coverage-7.10.7-cp311-cp311-win32.whl",url = "https://files.pythonhosted.org/packages/2f/88/a23cc185f6a805dfc4fdf14a94016835eeb85e22ac3a0e66d5e89acd6462/coverage-7.10.7-cp311-cp311-win32.whl",hashes = {sha256 = "972b9e3a4094b053a4e46832b4bc829fc8a8d347160eb39d03f1690316a99c14"}}, - {name = "coverage-7.10.7-cp311-cp311-win_amd64.whl",url = "https://files.pythonhosted.org/packages/fe/ef/0b510a399dfca17cec7bc2f05ad8bd78cf55f15c8bc9a73ab20c5c913c2e/coverage-7.10.7-cp311-cp311-win_amd64.whl",hashes = {sha256 = "a7b55a944a7f43892e28ad4bc0561dfd5f0d73e605d1aa5c3c976b52aea121d2"}}, - {name = "coverage-7.10.7-cp311-cp311-win_arm64.whl",url = "https://files.pythonhosted.org/packages/51/7f/023657f301a276e4ba1850f82749bc136f5a7e8768060c2e5d9744a22951/coverage-7.10.7-cp311-cp311-win_arm64.whl",hashes = {sha256 = "736f227fb490f03c6488f9b6d45855f8e0fd749c007f9303ad30efab0e73c05a"}}, - {name = "coverage-7.10.7-cp310-cp310-macosx_10_9_x86_64.whl",url = "https://files.pythonhosted.org/packages/e5/6c/3a3f7a46888e69d18abe3ccc6fe4cb16cccb1e6a2f99698931dafca489e6/coverage-7.10.7-cp310-cp310-macosx_10_9_x86_64.whl",hashes = {sha256 = "fc04cc7a3db33664e0c2d10eb8990ff6b3536f6842c9590ae8da4c614b9ed05a"}}, - {name = "coverage-7.10.7-cp310-cp310-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/03/94/952d30f180b1a916c11a56f5c22d3535e943aa22430e9e3322447e520e1c/coverage-7.10.7-cp310-cp310-macosx_11_0_arm64.whl",hashes = {sha256 = "e201e015644e207139f7e2351980feb7040e6f4b2c2978892f3e3789d1c125e5"}}, - {name = "coverage-7.10.7-cp310-cp310-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl",url = "https://files.pythonhosted.org/packages/50/2b/9e0cf8ded1e114bcd8b2fd42792b57f1c4e9e4ea1824cde2af93a67305be/coverage-7.10.7-cp310-cp310-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl",hashes = {sha256 = "240af60539987ced2c399809bd34f7c78e8abe0736af91c3d7d0e795df633d17"}}, - {name = "coverage-7.10.7-cp310-cp310-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl",url = "https://files.pythonhosted.org/packages/19/20/d0384ac06a6f908783d9b6aa6135e41b093971499ec488e47279f5b846e6/coverage-7.10.7-cp310-cp310-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl",hashes = {sha256 = "8421e088bc051361b01c4b3a50fd39a4b9133079a2229978d9d30511fd05231b"}}, - {name = "coverage-7.10.7-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl",url = "https://files.pythonhosted.org/packages/60/83/5c283cff3d41285f8eab897651585db908a909c572bdc014bcfaf8a8b6ae/coverage-7.10.7-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl",hashes = {sha256 = "6be8ed3039ae7f7ac5ce058c308484787c86e8437e72b30bf5e88b8ea10f3c87"}}, - {name = "coverage-7.10.7-cp310-cp310-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl",url = "https://files.pythonhosted.org/packages/60/22/02eb98fdc5ff79f423e990d877693e5310ae1eab6cb20ae0b0b9ac45b23b/coverage-7.10.7-cp310-cp310-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl",hashes = {sha256 = "e28299d9f2e889e6d51b1f043f58d5f997c373cc12e6403b90df95b8b047c13e"}}, - {name = "coverage-7.10.7-cp310-cp310-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/b4/bc/25c83bcf3ad141b32cd7dc45485ef3c01a776ca3aa8ef0a93e77e8b5bc43/coverage-7.10.7-cp310-cp310-musllinux_1_2_aarch64.whl",hashes = {sha256 = "c4e16bd7761c5e454f4efd36f345286d6f7c5fa111623c355691e2755cae3b9e"}}, - {name = "coverage-7.10.7-cp310-cp310-musllinux_1_2_i686.whl",url = "https://files.pythonhosted.org/packages/3c/b7/95574702888b58c0928a6e982038c596f9c34d52c5e5107f1eef729399b5/coverage-7.10.7-cp310-cp310-musllinux_1_2_i686.whl",hashes = {sha256 = "b1c81d0e5e160651879755c9c675b974276f135558cf4ba79fee7b8413a515df"}}, - {name = "coverage-7.10.7-cp310-cp310-musllinux_1_2_riscv64.whl",url = "https://files.pythonhosted.org/packages/47/b6/40095c185f235e085df0e0b158f6bd68cc6e1d80ba6c7721dc81d97ec318/coverage-7.10.7-cp310-cp310-musllinux_1_2_riscv64.whl",hashes = {sha256 = "606cc265adc9aaedcc84f1f064f0e8736bc45814f15a357e30fca7ecc01504e0"}}, - {name = "coverage-7.10.7-cp310-cp310-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/c8/50/4aea0556da7a4b93ec9168420d170b55e2eb50ae21b25062513d020c6861/coverage-7.10.7-cp310-cp310-musllinux_1_2_x86_64.whl",hashes = {sha256 = "10b24412692df990dbc34f8fb1b6b13d236ace9dfdd68df5b28c2e39cafbba13"}}, - {name = "coverage-7.10.7-cp310-cp310-win32.whl",url = "https://files.pythonhosted.org/packages/6a/28/ea1a84a60828177ae3b100cb6723838523369a44ec5742313ed7db3da160/coverage-7.10.7-cp310-cp310-win32.whl",hashes = {sha256 = "b51dcd060f18c19290d9b8a9dd1e0181538df2ce0717f562fff6cf74d9fc0b5b"}}, - {name = "coverage-7.10.7-cp310-cp310-win_amd64.whl",url = "https://files.pythonhosted.org/packages/fc/1a/a81d46bbeb3c3fd97b9602ebaa411e076219a150489bcc2c025f151bd52d/coverage-7.10.7-cp310-cp310-win_amd64.whl",hashes = {sha256 = "3a622ac801b17198020f09af3eaf45666b344a0d69fc2a6ffe2ea83aeef1d807"}}, {name = "coverage-7.10.7-py3-none-any.whl",url = "https://files.pythonhosted.org/packages/ec/16/114df1c291c22cac3b0c127a73e0af5c12ed7bbb6558d310429a0ae24023/coverage-7.10.7-py3-none-any.whl",hashes = {sha256 = "f7941f6f2fe6dd6807a1208737b8a0cbcf1cc6d7b07d24998ad2d63590868260"}}, ] marker = "\"dev\" in extras" @@ -2574,21 +2059,6 @@ marker = "\"dev\" in extras" [packages.tool.pdm] dependencies = [] -[[packages]] -name = "exceptiongroup" -version = "1.3.0" -requires-python = ">=3.7" -sdist = {name = "exceptiongroup-1.3.0.tar.gz", url = "https://files.pythonhosted.org/packages/0b/9f/a65090624ecf468cdca03533906e7c69ed7588582240cfe7cc9e770b50eb/exceptiongroup-1.3.0.tar.gz", hashes = {sha256 = "b241f5885f560bc56a59ee63ca4c6a8bfa46ae4ad651af316d4e81817bb9fd88"}} -wheels = [ - {name = "exceptiongroup-1.3.0-py3-none-any.whl",url = "https://files.pythonhosted.org/packages/36/f4/c6e662dade71f56cd2f3735141b265c3c79293c109549c1e6933b0651ffc/exceptiongroup-1.3.0-py3-none-any.whl",hashes = {sha256 = "4d111e6e0c13d0644cad6ddaa7ed0261a0b36971f6d23e7ec9b4b9097da78a10"}}, -] -marker = "python_version < \"3.11\" and python_version ~= \"3.10\" and \"default\" in dependency_groups or python_version < \"3.11\" and python_version ~= \"3.10\" and \"dev\" in extras or python_full_version ~= \"3.9.0\" and \"default\" in dependency_groups or python_full_version ~= \"3.9.0\" and \"dev\" in extras" - -[packages.tool.pdm] -dependencies = [ - "typing-extensions>=4.6.0; python_version < \"3.13\"", -] - [[packages]] name = "h11" version = "0.16.0" @@ -2635,20 +2105,6 @@ wheels = [ {name = "httptools-0.6.4-cp312-cp312-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/52/d8/254d16a31d543073a0e57f1c329ca7378d8924e7e292eda72d0064987486/httptools-0.6.4-cp312-cp312-musllinux_1_2_aarch64.whl",hashes = {sha256 = "ec4f178901fa1834d4a060320d2f3abc5c9e39766953d038f1458cb885f47e81"}}, {name = "httptools-0.6.4-cp312-cp312-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/5f/3c/4aee161b4b7a971660b8be71a92c24d6c64372c1ab3ae7f366b3680df20f/httptools-0.6.4-cp312-cp312-musllinux_1_2_x86_64.whl",hashes = {sha256 = "f9eb89ecf8b290f2e293325c646a211ff1c2493222798bb80a530c5e7502494f"}}, {name = "httptools-0.6.4-cp312-cp312-win_amd64.whl",url = "https://files.pythonhosted.org/packages/12/b7/5cae71a8868e555f3f67a50ee7f673ce36eac970f029c0c5e9d584352961/httptools-0.6.4-cp312-cp312-win_amd64.whl",hashes = {sha256 = "db78cb9ca56b59b016e64b6031eda5653be0589dba2b1b43453f6e8b405a0970"}}, - {name = "httptools-0.6.4-cp311-cp311-macosx_10_9_universal2.whl",url = "https://files.pythonhosted.org/packages/7b/26/bb526d4d14c2774fe07113ca1db7255737ffbb119315839af2065abfdac3/httptools-0.6.4-cp311-cp311-macosx_10_9_universal2.whl",hashes = {sha256 = "f47f8ed67cc0ff862b84a1189831d1d33c963fb3ce1ee0c65d3b0cbe7b711069"}}, - {name = "httptools-0.6.4-cp311-cp311-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/a6/17/3e0d3e9b901c732987a45f4f94d4e2c62b89a041d93db89eafb262afd8d5/httptools-0.6.4-cp311-cp311-macosx_11_0_arm64.whl",hashes = {sha256 = "0614154d5454c21b6410fdf5262b4a3ddb0f53f1e1721cfd59d55f32138c578a"}}, - {name = "httptools-0.6.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",url = "https://files.pythonhosted.org/packages/b7/24/0fe235d7b69c42423c7698d086d4db96475f9b50b6ad26a718ef27a0bce6/httptools-0.6.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",hashes = {sha256 = "f8787367fbdfccae38e35abf7641dafc5310310a5987b689f4c32cc8cc3ee975"}}, - {name = "httptools-0.6.4-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl",url = "https://files.pythonhosted.org/packages/b1/2f/205d1f2a190b72da6ffb5f41a3736c26d6fa7871101212b15e9b5cd8f61d/httptools-0.6.4-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl",hashes = {sha256 = "40b0f7fe4fd38e6a507bdb751db0379df1e99120c65fbdc8ee6c1d044897a636"}}, - {name = "httptools-0.6.4-cp311-cp311-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/6e/4c/d09ce0eff09057a206a74575ae8f1e1e2f0364d20e2442224f9e6612c8b9/httptools-0.6.4-cp311-cp311-musllinux_1_2_aarch64.whl",hashes = {sha256 = "40a5ec98d3f49904b9fe36827dcf1aadfef3b89e2bd05b0e35e94f97c2b14721"}}, - {name = "httptools-0.6.4-cp311-cp311-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/3e/d2/84c9e23edbccc4a4c6f96a1b8d99dfd2350289e94f00e9ccc7aadde26fb5/httptools-0.6.4-cp311-cp311-musllinux_1_2_x86_64.whl",hashes = {sha256 = "dacdd3d10ea1b4ca9df97a0a303cbacafc04b5cd375fa98732678151643d4988"}}, - {name = "httptools-0.6.4-cp311-cp311-win_amd64.whl",url = "https://files.pythonhosted.org/packages/d0/46/4d8e7ba9581416de1c425b8264e2cadd201eb709ec1584c381f3e98f51c1/httptools-0.6.4-cp311-cp311-win_amd64.whl",hashes = {sha256 = "288cd628406cc53f9a541cfaf06041b4c71d751856bab45e3702191f931ccd17"}}, - {name = "httptools-0.6.4-cp310-cp310-macosx_10_9_universal2.whl",url = "https://files.pythonhosted.org/packages/3b/6f/972f8eb0ea7d98a1c6be436e2142d51ad2a64ee18e02b0e7ff1f62171ab1/httptools-0.6.4-cp310-cp310-macosx_10_9_universal2.whl",hashes = {sha256 = "3c73ce323711a6ffb0d247dcd5a550b8babf0f757e86a52558fe5b86d6fefcc0"}}, - {name = "httptools-0.6.4-cp310-cp310-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/6a/b0/17c672b4bc5c7ba7f201eada4e96c71d0a59fbc185e60e42580093a86f21/httptools-0.6.4-cp310-cp310-macosx_11_0_arm64.whl",hashes = {sha256 = "345c288418f0944a6fe67be8e6afa9262b18c7626c3ef3c28adc5eabc06a68da"}}, - {name = "httptools-0.6.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",url = "https://files.pythonhosted.org/packages/92/5e/b4a826fe91971a0b68e8c2bd4e7db3e7519882f5a8ccdb1194be2b3ab98f/httptools-0.6.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",hashes = {sha256 = "deee0e3343f98ee8047e9f4c5bc7cedbf69f5734454a94c38ee829fb2d5fa3c1"}}, - {name = "httptools-0.6.4-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl",url = "https://files.pythonhosted.org/packages/b0/51/ce61e531e40289a681a463e1258fa1e05e0be54540e40d91d065a264cd8f/httptools-0.6.4-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl",hashes = {sha256 = "ca80b7485c76f768a3bc83ea58373f8db7b015551117375e4918e2aa77ea9b50"}}, - {name = "httptools-0.6.4-cp310-cp310-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/ea/9e/270b7d767849b0c96f275c695d27ca76c30671f8eb8cc1bab6ced5c5e1d0/httptools-0.6.4-cp310-cp310-musllinux_1_2_aarch64.whl",hashes = {sha256 = "90d96a385fa941283ebd231464045187a31ad932ebfa541be8edf5b3c2328959"}}, - {name = "httptools-0.6.4-cp310-cp310-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/81/86/ced96e3179c48c6f656354e106934e65c8963d48b69be78f355797f0e1b3/httptools-0.6.4-cp310-cp310-musllinux_1_2_x86_64.whl",hashes = {sha256 = "59e724f8b332319e2875efd360e61ac07f33b492889284a3e05e6d13746876f4"}}, - {name = "httptools-0.6.4-cp310-cp310-win_amd64.whl",url = "https://files.pythonhosted.org/packages/75/73/187a3f620ed3175364ddb56847d7a608a6fc42d551e133197098c0143eca/httptools-0.6.4-cp310-cp310-win_amd64.whl",hashes = {sha256 = "c26f313951f6e26147833fc923f78f95604bbec812a43e5ee37f26dc9e5a686c"}}, ] marker = "\"default\" in dependency_groups" @@ -2681,22 +2137,6 @@ marker = "\"dev\" in extras" [packages.tool.pdm] dependencies = [] -[[packages]] -name = "importlib-metadata" -version = "8.7.0" -requires-python = ">=3.9" -sdist = {name = "importlib_metadata-8.7.0.tar.gz", url = "https://files.pythonhosted.org/packages/76/66/650a33bd90f786193e4de4b3ad86ea60b53c89b669a5c7be931fac31cdb0/importlib_metadata-8.7.0.tar.gz", hashes = {sha256 = "d13b81ad223b890aa16c5471f2ac3056cf76c5f10f82d6f9292f0b415f389000"}} -wheels = [ - {name = "importlib_metadata-8.7.0-py3-none-any.whl",url = "https://files.pythonhosted.org/packages/20/b0/36bd937216ec521246249be3bf9855081de4c5e06a0c9b4219dbeda50373/importlib_metadata-8.7.0-py3-none-any.whl",hashes = {sha256 = "e5dd1551894c77868a30651cef00984d50e1002d06942a7101d34870c5f02afd"}}, -] -marker = "python_full_version < \"3.10.2\" and python_version ~= \"3.10\" and \"dev\" in extras or python_full_version ~= \"3.9.0\" and \"dev\" in extras" - -[packages.tool.pdm] -dependencies = [ - "zipp>=3.20", - "typing-extensions>=3.6.4; python_version < \"3.8\"", -] - [[packages]] name = "jinja2" version = "3.1.6" @@ -2772,46 +2212,6 @@ wheels = [ {name = "lxml-6.0.1-cp312-cp312-win32.whl",url = "https://files.pythonhosted.org/packages/bc/1f/962ea2696759abe331c3b0e838bb17e92224f39c638c2068bf0d8345e913/lxml-6.0.1-cp312-cp312-win32.whl",hashes = {sha256 = "987ad5c3941c64031f59c226167f55a04d1272e76b241bfafc968bdb778e07fb"}}, {name = "lxml-6.0.1-cp312-cp312-win_amd64.whl",url = "https://files.pythonhosted.org/packages/41/e2/22c86a990b51b44442b75c43ecb2f77b8daba8c4ba63696921966eac7022/lxml-6.0.1-cp312-cp312-win_amd64.whl",hashes = {sha256 = "abb05a45394fd76bf4a60c1b7bec0e6d4e8dfc569fc0e0b1f634cd983a006ddc"}}, {name = "lxml-6.0.1-cp312-cp312-win_arm64.whl",url = "https://files.pythonhosted.org/packages/b2/21/dc0c73325e5eb94ef9c9d60dbb5dcdcb2e7114901ea9509735614a74e75a/lxml-6.0.1-cp312-cp312-win_arm64.whl",hashes = {sha256 = "c4be29bce35020d8579d60aa0a4e95effd66fcfce31c46ffddf7e5422f73a299"}}, - {name = "lxml-6.0.1-cp311-cp311-macosx_10_9_universal2.whl",url = "https://files.pythonhosted.org/packages/29/c8/262c1d19339ef644cdc9eb5aad2e85bd2d1fa2d7c71cdef3ede1a3eed84d/lxml-6.0.1-cp311-cp311-macosx_10_9_universal2.whl",hashes = {sha256 = "c6acde83f7a3d6399e6d83c1892a06ac9b14ea48332a5fbd55d60b9897b9570a"}}, - {name = "lxml-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl",url = "https://files.pythonhosted.org/packages/e5/d4/1b0afbeb801468a310642c3a6f6704e53c38a4a6eb1ca6faea013333e02f/lxml-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl",hashes = {sha256 = "0d21c9cacb6a889cbb8eeb46c77ef2c1dd529cde10443fdeb1de847b3193c541"}}, - {name = "lxml-6.0.1-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl",url = "https://files.pythonhosted.org/packages/5b/c1/8db9b5402bf52ceb758618313f7423cd54aea85679fcf607013707d854a8/lxml-6.0.1-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl",hashes = {sha256 = "847458b7cd0d04004895f1fb2cca8e7c0f8ec923c49c06b7a72ec2d48ea6aca2"}}, - {name = "lxml-6.0.1-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl",url = "https://files.pythonhosted.org/packages/e7/78/838e115358dd2369c1c5186080dd874a50a691fb5cd80db6afe5e816e2c6/lxml-6.0.1-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl",hashes = {sha256 = "1dc13405bf315d008fe02b1472d2a9d65ee1c73c0a06de5f5a45e6e404d9a1c0"}}, - {name = "lxml-6.0.1-cp311-cp311-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl",url = "https://files.pythonhosted.org/packages/c7/b6/bdcb3a3ddd2438c5b1a1915161f34e8c85c96dc574b0ef3be3924f36315c/lxml-6.0.1-cp311-cp311-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl",hashes = {sha256 = "70f540c229a8c0a770dcaf6d5af56a5295e0fc314fc7ef4399d543328054bcea"}}, - {name = "lxml-6.0.1-cp311-cp311-manylinux_2_26_i686.manylinux_2_28_i686.whl",url = "https://files.pythonhosted.org/packages/73/e5/1bfb96185dc1a64c7c6fbb7369192bda4461952daa2025207715f9968205/lxml-6.0.1-cp311-cp311-manylinux_2_26_i686.manylinux_2_28_i686.whl",hashes = {sha256 = "d2f73aef768c70e8deb8c4742fca4fd729b132fda68458518851c7735b55297e"}}, - {name = "lxml-6.0.1-cp311-cp311-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl",url = "https://files.pythonhosted.org/packages/a2/ae/df3ea9ebc3c493b9c6bdc6bd8c554ac4e147f8d7839993388aab57ec606d/lxml-6.0.1-cp311-cp311-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl",hashes = {sha256 = "e7f4066b85a4fa25ad31b75444bd578c3ebe6b8ed47237896341308e2ce923c3"}}, - {name = "lxml-6.0.1-cp311-cp311-manylinux_2_31_armv7l.whl",url = "https://files.pythonhosted.org/packages/37/b3/65e1e33600542c08bc03a4c5c9c306c34696b0966a424a3be6ffec8038ed/lxml-6.0.1-cp311-cp311-manylinux_2_31_armv7l.whl",hashes = {sha256 = "0cce65db0cd8c750a378639900d56f89f7d6af11cd5eda72fde054d27c54b8ce"}}, - {name = "lxml-6.0.1-cp311-cp311-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/7a/46/ee3ed8f3a60e9457d7aea46542d419917d81dbfd5700fe64b2a36fb5ef61/lxml-6.0.1-cp311-cp311-musllinux_1_2_aarch64.whl",hashes = {sha256 = "c372d42f3eee5844b69dcab7b8d18b2f449efd54b46ac76970d6e06b8e8d9a66"}}, - {name = "lxml-6.0.1-cp311-cp311-musllinux_1_2_armv7l.whl",url = "https://files.pythonhosted.org/packages/9c/b9/8394538e7cdbeb3bfa36bc74924be1a4383e0bb5af75f32713c2c4aa0479/lxml-6.0.1-cp311-cp311-musllinux_1_2_armv7l.whl",hashes = {sha256 = "2e2b0e042e1408bbb1c5f3cfcb0f571ff4ac98d8e73f4bf37c5dd179276beedd"}}, - {name = "lxml-6.0.1-cp311-cp311-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/b3/21/3ef7da1ea2a73976c1a5a311d7cde5d379234eec0968ee609517714940b4/lxml-6.0.1-cp311-cp311-musllinux_1_2_x86_64.whl",hashes = {sha256 = "cc73bb8640eadd66d25c5a03175de6801f63c535f0f3cf50cac2f06a8211f420"}}, - {name = "lxml-6.0.1-cp311-cp311-win32.whl",url = "https://files.pythonhosted.org/packages/26/7d/0980016f124f00c572cba6f4243e13a8e80650843c66271ee692cddf25f3/lxml-6.0.1-cp311-cp311-win32.whl",hashes = {sha256 = "7c23fd8c839708d368e406282d7953cee5134f4592ef4900026d84566d2b4c88"}}, - {name = "lxml-6.0.1-cp311-cp311-win_amd64.whl",url = "https://files.pythonhosted.org/packages/b1/08/28440437521f265eff4413eb2a65efac269c4c7db5fd8449b586e75d8de2/lxml-6.0.1-cp311-cp311-win_amd64.whl",hashes = {sha256 = "2516acc6947ecd3c41a4a4564242a87c6786376989307284ddb115f6a99d927f"}}, - {name = "lxml-6.0.1-cp311-cp311-win_arm64.whl",url = "https://files.pythonhosted.org/packages/7b/dc/617e67296d98099213a505d781f04804e7b12923ecd15a781a4ab9181992/lxml-6.0.1-cp311-cp311-win_arm64.whl",hashes = {sha256 = "cb46f8cfa1b0334b074f40c0ff94ce4d9a6755d492e6c116adb5f4a57fb6ad96"}}, - {name = "lxml-6.0.1-pp311-pypy311_pp73-macosx_10_15_x86_64.whl",url = "https://files.pythonhosted.org/packages/41/37/41961f53f83ded57b37e65e4f47d1c6c6ef5fd02cb1d6ffe028ba0efa7d4/lxml-6.0.1-pp311-pypy311_pp73-macosx_10_15_x86_64.whl",hashes = {sha256 = "b556aaa6ef393e989dac694b9c95761e32e058d5c4c11ddeef33f790518f7a5e"}}, - {name = "lxml-6.0.1-pp311-pypy311_pp73-manylinux2014_aarch64.manylinux_2_17_aarch64.whl",url = "https://files.pythonhosted.org/packages/3d/47/8631ea73f3dc776fb6517ccde4d5bd5072f35f9eacbba8c657caa4037a69/lxml-6.0.1-pp311-pypy311_pp73-manylinux2014_aarch64.manylinux_2_17_aarch64.whl",hashes = {sha256 = "64fac7a05ebb3737b79fd89fe5a5b6c5546aac35cfcfd9208eb6e5d13215771c"}}, - {name = "lxml-6.0.1-pp311-pypy311_pp73-manylinux2014_x86_64.manylinux_2_17_x86_64.whl",url = "https://files.pythonhosted.org/packages/3d/b8/39ae30ca3b1516729faeef941ed84bf8f12321625f2644492ed8320cb254/lxml-6.0.1-pp311-pypy311_pp73-manylinux2014_x86_64.manylinux_2_17_x86_64.whl",hashes = {sha256 = "038d3c08babcfce9dc89aaf498e6da205efad5b7106c3b11830a488d4eadf56b"}}, - {name = "lxml-6.0.1-pp311-pypy311_pp73-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl",url = "https://files.pythonhosted.org/packages/9c/ea/048dea6cdfc7a72d40ae8ed7e7d23cf4a6b6a6547b51b492a3be50af0e80/lxml-6.0.1-pp311-pypy311_pp73-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl",hashes = {sha256 = "445f2cee71c404ab4259bc21e20339a859f75383ba2d7fb97dfe7c163994287b"}}, - {name = "lxml-6.0.1-pp311-pypy311_pp73-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl",url = "https://files.pythonhosted.org/packages/6b/d4/c2b46e432377c45d611ae2f669aa47971df1586c1a5240675801d0f02bac/lxml-6.0.1-pp311-pypy311_pp73-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl",hashes = {sha256 = "e352d8578e83822d70bea88f3d08b9912528e4c338f04ab707207ab12f4b7aac"}}, - {name = "lxml-6.0.1-pp311-pypy311_pp73-win_amd64.whl",url = "https://files.pythonhosted.org/packages/b6/db/8f620f1ac62cf32554821b00b768dd5957ac8e3fd051593532be5b40b438/lxml-6.0.1-pp311-pypy311_pp73-win_amd64.whl",hashes = {sha256 = "51bd5d1a9796ca253db6045ab45ca882c09c071deafffc22e06975b7ace36300"}}, - {name = "lxml-6.0.1-cp310-cp310-macosx_10_9_universal2.whl",url = "https://files.pythonhosted.org/packages/b2/06/29693634ad5fc8ae0bab6723ba913c821c780614eea9ab9ebb5b2105d0e4/lxml-6.0.1-cp310-cp310-macosx_10_9_universal2.whl",hashes = {sha256 = "3b38e20c578149fdbba1fd3f36cb1928a3aaca4b011dfd41ba09d11fb396e1b9"}}, - {name = "lxml-6.0.1-cp310-cp310-macosx_10_9_x86_64.whl",url = "https://files.pythonhosted.org/packages/97/e0/69d4113afbda9441f0e4d5574d9336535ead6a0608ee6751b3db0832ade0/lxml-6.0.1-cp310-cp310-macosx_10_9_x86_64.whl",hashes = {sha256 = "11a052cbd013b7140bbbb38a14e2329b6192478344c99097e378c691b7119551"}}, - {name = "lxml-6.0.1-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.whl",url = "https://files.pythonhosted.org/packages/eb/3d/8fa1dbf48a3ea0d6c646f0129bef89a5ecf9a1cfe935e26e07554261d728/lxml-6.0.1-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.whl",hashes = {sha256 = "21344d29c82ca8547ea23023bb8e7538fa5d4615a1773b991edf8176a870c1ea"}}, - {name = "lxml-6.0.1-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.whl",url = "https://files.pythonhosted.org/packages/2c/52/a48331a269900488b886d527611ab66238cddc6373054a60b3c15d4cefb2/lxml-6.0.1-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.whl",hashes = {sha256 = "aa8f130f4b2dc94baa909c17bb7994f0268a2a72b9941c872e8e558fd6709050"}}, - {name = "lxml-6.0.1-cp310-cp310-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl",url = "https://files.pythonhosted.org/packages/33/3b/8f6778a6fb9d30a692db2b1f5a9547dfcb674b27b397e1d864ca797486b1/lxml-6.0.1-cp310-cp310-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl",hashes = {sha256 = "4588806a721552692310ebe9f90c17ac6c7c5dac438cd93e3d74dd60531c3211"}}, - {name = "lxml-6.0.1-cp310-cp310-manylinux_2_26_i686.manylinux_2_28_i686.whl",url = "https://files.pythonhosted.org/packages/42/15/c9364f23fa89ef2d3dbb896912aa313108820286223cfa833a0a9e183c9e/lxml-6.0.1-cp310-cp310-manylinux_2_26_i686.manylinux_2_28_i686.whl",hashes = {sha256 = "8466faa66b0353802fb7c054a400ac17ce2cf416e3ad8516eadeff9cba85b741"}}, - {name = "lxml-6.0.1-cp310-cp310-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl",url = "https://files.pythonhosted.org/packages/04/af/11985b0d47786161ddcdc53dc06142dc863b81a38da7f221c7b997dd5d4b/lxml-6.0.1-cp310-cp310-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl",hashes = {sha256 = "50b5e54f6a9461b1e9c08b4a3420415b538d4773bd9df996b9abcbfe95f4f1fd"}}, - {name = "lxml-6.0.1-cp310-cp310-manylinux_2_31_armv7l.whl",url = "https://files.pythonhosted.org/packages/6a/42/74b35ccc9ef1bb53f0487a4dace5ff612f1652d27faafe91ada7f7b9ee60/lxml-6.0.1-cp310-cp310-manylinux_2_31_armv7l.whl",hashes = {sha256 = "6f393e10685b37f15b1daef8aa0d734ec61860bb679ec447afa0001a31e7253f"}}, - {name = "lxml-6.0.1-cp310-cp310-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/b0/5a/b934534f83561ad71fb64ba1753992e836ea73776cfb56fc0758dbb46bdf/lxml-6.0.1-cp310-cp310-musllinux_1_2_aarch64.whl",hashes = {sha256 = "07038c62fd0fe2743e2f5326f54d464715373c791035d7dda377b3c9a5d0ad77"}}, - {name = "lxml-6.0.1-cp310-cp310-musllinux_1_2_armv7l.whl",url = "https://files.pythonhosted.org/packages/6c/26/d833a56ec8ca943b696f3a7a1e54f97cfb63754c951037de5e222c011f3b/lxml-6.0.1-cp310-cp310-musllinux_1_2_armv7l.whl",hashes = {sha256 = "7a44a5fb1edd11b3a65c12c23e1049c8ae49d90a24253ff18efbcb6aa042d012"}}, - {name = "lxml-6.0.1-cp310-cp310-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/3f/cb/601aa274c7cda51d0cc84a13d9639096c1191de9d9adf58f6c195d4822a2/lxml-6.0.1-cp310-cp310-musllinux_1_2_x86_64.whl",hashes = {sha256 = "a57d9eb9aadf311c9e8785230eec83c6abb9aef2adac4c0587912caf8f3010b8"}}, - {name = "lxml-6.0.1-cp310-cp310-win32.whl",url = "https://files.pythonhosted.org/packages/76/4e/e079f7b324e6d5f83007f30855448646e1cba74b5c30da1a081df75eba89/lxml-6.0.1-cp310-cp310-win32.whl",hashes = {sha256 = "d877874a31590b72d1fa40054b50dc33084021bfc15d01b3a661d85a302af821"}}, - {name = "lxml-6.0.1-cp310-cp310-win_amd64.whl",url = "https://files.pythonhosted.org/packages/65/0a/da298d7a96316c75ae096686de8d036d814ec3b72c7d643a2c226c364168/lxml-6.0.1-cp310-cp310-win_amd64.whl",hashes = {sha256 = "c43460f4aac016ee0e156bfa14a9de9b3e06249b12c228e27654ac3996a46d5b"}}, - {name = "lxml-6.0.1-cp310-cp310-win_arm64.whl",url = "https://files.pythonhosted.org/packages/0f/65/d7f61082fecf4543ab084e8bd3d4b9be0c1a0c83979f1fa2258e2a7987fb/lxml-6.0.1-cp310-cp310-win_arm64.whl",hashes = {sha256 = "615bb6c73fed7929e3a477a3297a797892846b253d59c84a62c98bdce3849a0a"}}, - {name = "lxml-6.0.1-pp310-pypy310_pp73-macosx_10_15_x86_64.whl",url = "https://files.pythonhosted.org/packages/ae/61/ad51fbecaf741f825d496947b19d8aea0dcd323fdc2be304e93ce59f66f0/lxml-6.0.1-pp310-pypy310_pp73-macosx_10_15_x86_64.whl",hashes = {sha256 = "0abfbaf4ebbd7fd33356217d317b6e4e2ef1648be6a9476a52b57ffc6d8d1780"}}, - {name = "lxml-6.0.1-pp310-pypy310_pp73-manylinux2014_aarch64.manylinux_2_17_aarch64.whl",url = "https://files.pythonhosted.org/packages/1b/7f/310bef082cc69d0db46a8b9d8ca5f4a8fb41e1c5d299ef4ca5f391c4f12d/lxml-6.0.1-pp310-pypy310_pp73-manylinux2014_aarch64.manylinux_2_17_aarch64.whl",hashes = {sha256 = "1ebbf2d9775be149235abebdecae88fe3b3dd06b1797cd0f6dffe6948e85309d"}}, - {name = "lxml-6.0.1-pp310-pypy310_pp73-manylinux2014_x86_64.manylinux_2_17_x86_64.whl",url = "https://files.pythonhosted.org/packages/86/cc/dc5833def5998c783500666468df127d6d919e8b9678866904e5680b0b13/lxml-6.0.1-pp310-pypy310_pp73-manylinux2014_x86_64.manylinux_2_17_x86_64.whl",hashes = {sha256 = "a389e9f11c010bd30531325805bbe97bdf7f728a73d0ec475adef57ffec60547"}}, - {name = "lxml-6.0.1-pp310-pypy310_pp73-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl",url = "https://files.pythonhosted.org/packages/1b/dc/bdd4d413844b5348134444d64911f6f34b211f8b778361946d07623fc904/lxml-6.0.1-pp310-pypy310_pp73-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl",hashes = {sha256 = "8f5cf2addfbbe745251132c955ad62d8519bb4b2c28b0aa060eca4541798d86e"}}, - {name = "lxml-6.0.1-pp310-pypy310_pp73-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl",url = "https://files.pythonhosted.org/packages/d9/14/e60e9d46972603753824eb7bea06fbe4153c627cc0f7110111253b7c9fc5/lxml-6.0.1-pp310-pypy310_pp73-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl",hashes = {sha256 = "f1b60a3287bf33a2a54805d76b82055bcc076e445fd539ee9ae1fe85ed373691"}}, - {name = "lxml-6.0.1-pp310-pypy310_pp73-win_amd64.whl",url = "https://files.pythonhosted.org/packages/42/fa/268c9be8c69a418b8106e096687aba2b1a781fb6fc1b3f04955fac2be2b9/lxml-6.0.1-pp310-pypy310_pp73-win_amd64.whl",hashes = {sha256 = "f7bbfb0751551a8786915fc6b615ee56344dacc1b1033697625b553aefdd9837"}}, ] marker = "\"recommended\" in extras" @@ -2854,26 +2254,6 @@ wheels = [ {name = "MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/a2/82/8be4c96ffee03c5b4a034e60a31294daf481e12c7c43ab8e34a1453ee48b/MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_x86_64.whl",hashes = {sha256 = "ad10d3ded218f1039f11a75f8091880239651b52e9bb592ca27de44eed242a48"}}, {name = "MarkupSafe-3.0.2-cp312-cp312-win32.whl",url = "https://files.pythonhosted.org/packages/51/ae/97827349d3fcffee7e184bdf7f41cd6b88d9919c80f0263ba7acd1bbcb18/MarkupSafe-3.0.2-cp312-cp312-win32.whl",hashes = {sha256 = "0f4ca02bea9a23221c0182836703cbf8930c5e9454bacce27e767509fa286a30"}}, {name = "MarkupSafe-3.0.2-cp312-cp312-win_amd64.whl",url = "https://files.pythonhosted.org/packages/c1/80/a61f99dc3a936413c3ee4e1eecac96c0da5ed07ad56fd975f1a9da5bc630/MarkupSafe-3.0.2-cp312-cp312-win_amd64.whl",hashes = {sha256 = "8e06879fc22a25ca47312fbe7c8264eb0b662f6db27cb2d3bbbc74b1df4b9b87"}}, - {name = "MarkupSafe-3.0.2-cp311-cp311-macosx_10_9_universal2.whl",url = "https://files.pythonhosted.org/packages/6b/28/bbf83e3f76936960b850435576dd5e67034e200469571be53f69174a2dfd/MarkupSafe-3.0.2-cp311-cp311-macosx_10_9_universal2.whl",hashes = {sha256 = "9025b4018f3a1314059769c7bf15441064b2207cb3f065e6ea1e7359cb46db9d"}}, - {name = "MarkupSafe-3.0.2-cp311-cp311-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/6c/30/316d194b093cde57d448a4c3209f22e3046c5bb2fb0820b118292b334be7/MarkupSafe-3.0.2-cp311-cp311-macosx_11_0_arm64.whl",hashes = {sha256 = "93335ca3812df2f366e80509ae119189886b0f3c2b81325d39efdb84a1e2ae93"}}, - {name = "MarkupSafe-3.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",url = "https://files.pythonhosted.org/packages/f2/96/9cdafba8445d3a53cae530aaf83c38ec64c4d5427d975c974084af5bc5d2/MarkupSafe-3.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",hashes = {sha256 = "2cb8438c3cbb25e220c2ab33bb226559e7afb3baec11c4f218ffa7308603c832"}}, - {name = "MarkupSafe-3.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",url = "https://files.pythonhosted.org/packages/f1/a4/aefb044a2cd8d7334c8a47d3fb2c9f328ac48cb349468cc31c20b539305f/MarkupSafe-3.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",hashes = {sha256 = "a123e330ef0853c6e822384873bef7507557d8e4a082961e1defa947aa59ba84"}}, - {name = "MarkupSafe-3.0.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl",url = "https://files.pythonhosted.org/packages/8d/21/5e4851379f88f3fad1de30361db501300d4f07bcad047d3cb0449fc51f8c/MarkupSafe-3.0.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl",hashes = {sha256 = "1e084f686b92e5b83186b07e8a17fc09e38fff551f3602b249881fec658d3eca"}}, - {name = "MarkupSafe-3.0.2-cp311-cp311-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/00/7b/e92c64e079b2d0d7ddf69899c98842f3f9a60a1ae72657c89ce2655c999d/MarkupSafe-3.0.2-cp311-cp311-musllinux_1_2_aarch64.whl",hashes = {sha256 = "d8213e09c917a951de9d09ecee036d5c7d36cb6cb7dbaece4c71a60d79fb9798"}}, - {name = "MarkupSafe-3.0.2-cp311-cp311-musllinux_1_2_i686.whl",url = "https://files.pythonhosted.org/packages/f9/ac/46f960ca323037caa0a10662ef97d0a4728e890334fc156b9f9e52bcc4ca/MarkupSafe-3.0.2-cp311-cp311-musllinux_1_2_i686.whl",hashes = {sha256 = "5b02fb34468b6aaa40dfc198d813a641e3a63b98c2b05a16b9f80b7ec314185e"}}, - {name = "MarkupSafe-3.0.2-cp311-cp311-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/69/84/83439e16197337b8b14b6a5b9c2105fff81d42c2a7c5b58ac7b62ee2c3b1/MarkupSafe-3.0.2-cp311-cp311-musllinux_1_2_x86_64.whl",hashes = {sha256 = "0bff5e0ae4ef2e1ae4fdf2dfd5b76c75e5c2fa4132d05fc1b0dabcd20c7e28c4"}}, - {name = "MarkupSafe-3.0.2-cp311-cp311-win32.whl",url = "https://files.pythonhosted.org/packages/9a/34/a15aa69f01e2181ed8d2b685c0d2f6655d5cca2c4db0ddea775e631918cd/MarkupSafe-3.0.2-cp311-cp311-win32.whl",hashes = {sha256 = "6c89876f41da747c8d3677a2b540fb32ef5715f97b66eeb0c6b66f5e3ef6f59d"}}, - {name = "MarkupSafe-3.0.2-cp311-cp311-win_amd64.whl",url = "https://files.pythonhosted.org/packages/da/b8/3a3bd761922d416f3dc5d00bfbed11f66b1ab89a0c2b6e887240a30b0f6b/MarkupSafe-3.0.2-cp311-cp311-win_amd64.whl",hashes = {sha256 = "70a87b411535ccad5ef2f1df5136506a10775d267e197e4cf531ced10537bd6b"}}, - {name = "MarkupSafe-3.0.2-cp310-cp310-macosx_10_9_universal2.whl",url = "https://files.pythonhosted.org/packages/04/90/d08277ce111dd22f77149fd1a5d4653eeb3b3eaacbdfcbae5afb2600eebd/MarkupSafe-3.0.2-cp310-cp310-macosx_10_9_universal2.whl",hashes = {sha256 = "7e94c425039cde14257288fd61dcfb01963e658efbc0ff54f5306b06054700f8"}}, - {name = "MarkupSafe-3.0.2-cp310-cp310-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/04/e1/6e2194baeae0bca1fae6629dc0cbbb968d4d941469cbab11a3872edff374/MarkupSafe-3.0.2-cp310-cp310-macosx_11_0_arm64.whl",hashes = {sha256 = "9e2d922824181480953426608b81967de705c3cef4d1af983af849d7bd619158"}}, - {name = "MarkupSafe-3.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",url = "https://files.pythonhosted.org/packages/1d/69/35fa85a8ece0a437493dc61ce0bb6d459dcba482c34197e3efc829aa357f/MarkupSafe-3.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",hashes = {sha256 = "38a9ef736c01fccdd6600705b09dc574584b89bea478200c5fbf112a6b0d5579"}}, - {name = "MarkupSafe-3.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",url = "https://files.pythonhosted.org/packages/22/35/137da042dfb4720b638d2937c38a9c2df83fe32d20e8c8f3185dbfef05f7/MarkupSafe-3.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",hashes = {sha256 = "bbcb445fa71794da8f178f0f6d66789a28d7319071af7a496d4d507ed566270d"}}, - {name = "MarkupSafe-3.0.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl",url = "https://files.pythonhosted.org/packages/29/28/6d029a903727a1b62edb51863232152fd335d602def598dade38996887f0/MarkupSafe-3.0.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl",hashes = {sha256 = "57cb5a3cf367aeb1d316576250f65edec5bb3be939e9247ae594b4bcbc317dfb"}}, - {name = "MarkupSafe-3.0.2-cp310-cp310-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/cc/cd/07438f95f83e8bc028279909d9c9bd39e24149b0d60053a97b2bc4f8aa51/MarkupSafe-3.0.2-cp310-cp310-musllinux_1_2_aarch64.whl",hashes = {sha256 = "3809ede931876f5b2ec92eef964286840ed3540dadf803dd570c3b7e13141a3b"}}, - {name = "MarkupSafe-3.0.2-cp310-cp310-musllinux_1_2_i686.whl",url = "https://files.pythonhosted.org/packages/29/01/84b57395b4cc062f9c4c55ce0df7d3108ca32397299d9df00fedd9117d3d/MarkupSafe-3.0.2-cp310-cp310-musllinux_1_2_i686.whl",hashes = {sha256 = "e07c3764494e3776c602c1e78e298937c3315ccc9043ead7e685b7f2b8d47b3c"}}, - {name = "MarkupSafe-3.0.2-cp310-cp310-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/bd/6e/61ebf08d8940553afff20d1fb1ba7294b6f8d279df9fd0c0db911b4bbcfd/MarkupSafe-3.0.2-cp310-cp310-musllinux_1_2_x86_64.whl",hashes = {sha256 = "b424c77b206d63d500bcb69fa55ed8d0e6a3774056bdc4839fc9298a7edca171"}}, - {name = "MarkupSafe-3.0.2-cp310-cp310-win32.whl",url = "https://files.pythonhosted.org/packages/11/23/ffbf53694e8c94ebd1e7e491de185124277964344733c45481f32ede2499/MarkupSafe-3.0.2-cp310-cp310-win32.whl",hashes = {sha256 = "fcabf5ff6eea076f859677f5f0b6b5c1a51e70a376b0579e0eadef8db48c6b50"}}, - {name = "MarkupSafe-3.0.2-cp310-cp310-win_amd64.whl",url = "https://files.pythonhosted.org/packages/44/06/e7175d06dd6e9172d4a69a72592cb3f7a996a9c396eee29082826449bbc3/MarkupSafe-3.0.2-cp310-cp310-win_amd64.whl",hashes = {sha256 = "6af100e168aa82a50e186c82875a5893c5597a0c1ccdb0d8b40240b1f28b969a"}}, ] marker = "\"dev\" in extras" @@ -2916,10 +2296,6 @@ requires-python = ">=3.8" sdist = {name = "multiprocess-0.70.16.tar.gz", url = "https://files.pythonhosted.org/packages/b5/ae/04f39c5d0d0def03247c2893d6f2b83c136bf3320a2154d7b8858f2ba72d/multiprocess-0.70.16.tar.gz", hashes = {sha256 = "161af703d4652a0e1410be6abccecde4a7ddffd19341be0a7011b94aeb171ac1"}} wheels = [ {name = "multiprocess-0.70.16-py312-none-any.whl",url = "https://files.pythonhosted.org/packages/0a/7d/a988f258104dcd2ccf1ed40fdc97e26c4ac351eeaf81d76e266c52d84e2f/multiprocess-0.70.16-py312-none-any.whl",hashes = {sha256 = "fc0544c531920dde3b00c29863377f87e1632601092ea2daca74e4beb40faa2e"}}, - {name = "multiprocess-0.70.16-py311-none-any.whl",url = "https://files.pythonhosted.org/packages/50/15/b56e50e8debaf439f44befec5b2af11db85f6e0f344c3113ae0be0593a91/multiprocess-0.70.16-py311-none-any.whl",hashes = {sha256 = "af4cabb0dac72abfb1e794fa7855c325fd2b55a10a44628a3c1ad3311c04127a"}}, - {name = "multiprocess-0.70.16-pp310-pypy310_pp73-macosx_10_13_x86_64.whl",url = "https://files.pythonhosted.org/packages/ef/76/6e712a2623d146d314f17598df5de7224c85c0060ef63fd95cc15a25b3fa/multiprocess-0.70.16-pp310-pypy310_pp73-macosx_10_13_x86_64.whl",hashes = {sha256 = "476887be10e2f59ff183c006af746cb6f1fd0eadcfd4ef49e605cbe2659920ee"}}, - {name = "multiprocess-0.70.16-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl",url = "https://files.pythonhosted.org/packages/0f/ab/1e6e8009e380e22254ff539ebe117861e5bdb3bff1fc977920972237c6c7/multiprocess-0.70.16-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl",hashes = {sha256 = "d951bed82c8f73929ac82c61f01a7b5ce8f3e5ef40f5b52553b4f547ce2b08ec"}}, - {name = "multiprocess-0.70.16-py310-none-any.whl",url = "https://files.pythonhosted.org/packages/bc/f7/7ec7fddc92e50714ea3745631f79bd9c96424cb2702632521028e57d3a36/multiprocess-0.70.16-py310-none-any.whl",hashes = {sha256 = "c4a9944c67bd49f823687463660a2d6daae94c289adff97e0f9d696ba6371d02"}}, ] marker = "\"default\" in dependency_groups" @@ -2987,24 +2363,6 @@ wheels = [ {name = "pyarrow-20.0.0-cp312-cp312-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/b2/66/2d976c0c7158fd25591c8ca55aee026e6d5745a021915a1835578707feb3/pyarrow-20.0.0-cp312-cp312-musllinux_1_2_aarch64.whl",hashes = {sha256 = "89e030dc58fc760e4010148e6ff164d2f44441490280ef1e97a542375e41058e"}}, {name = "pyarrow-20.0.0-cp312-cp312-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/31/a9/dfb999c2fc6911201dcbf348247f9cc382a8990f9ab45c12eabfd7243a38/pyarrow-20.0.0-cp312-cp312-musllinux_1_2_x86_64.whl",hashes = {sha256 = "6102b4864d77102dbbb72965618e204e550135a940c2534711d5ffa787df2a5a"}}, {name = "pyarrow-20.0.0-cp312-cp312-win_amd64.whl",url = "https://files.pythonhosted.org/packages/a0/8e/9adee63dfa3911be2382fb4d92e4b2e7d82610f9d9f668493bebaa2af50f/pyarrow-20.0.0-cp312-cp312-win_amd64.whl",hashes = {sha256 = "96d6a0a37d9c98be08f5ed6a10831d88d52cac7b13f5287f1e0f625a0de8062b"}}, - {name = "pyarrow-20.0.0-cp311-cp311-macosx_12_0_arm64.whl",url = "https://files.pythonhosted.org/packages/47/a2/b7930824181ceadd0c63c1042d01fa4ef63eee233934826a7a2a9af6e463/pyarrow-20.0.0-cp311-cp311-macosx_12_0_arm64.whl",hashes = {sha256 = "24ca380585444cb2a31324c546a9a56abbe87e26069189e14bdba19c86c049f0"}}, - {name = "pyarrow-20.0.0-cp311-cp311-macosx_12_0_x86_64.whl",url = "https://files.pythonhosted.org/packages/9b/18/c765770227d7f5bdfa8a69f64b49194352325c66a5c3bb5e332dfd5867d9/pyarrow-20.0.0-cp311-cp311-macosx_12_0_x86_64.whl",hashes = {sha256 = "95b330059ddfdc591a3225f2d272123be26c8fa76e8c9ee1a77aad507361cfdb"}}, - {name = "pyarrow-20.0.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",url = "https://files.pythonhosted.org/packages/44/fb/dfb2dfdd3e488bb14f822d7335653092dde150cffc2da97de6e7500681f9/pyarrow-20.0.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",hashes = {sha256 = "5f0fb1041267e9968c6d0d2ce3ff92e3928b243e2b6d11eeb84d9ac547308232"}}, - {name = "pyarrow-20.0.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",url = "https://files.pythonhosted.org/packages/58/0d/08a95878d38808051a953e887332d4a76bc06c6ee04351918ee1155407eb/pyarrow-20.0.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",hashes = {sha256 = "b8ff87cc837601532cc8242d2f7e09b4e02404de1b797aee747dd4ba4bd6313f"}}, - {name = "pyarrow-20.0.0-cp311-cp311-manylinux_2_28_aarch64.whl",url = "https://files.pythonhosted.org/packages/f3/cd/efa271234dfe38f0271561086eedcad7bc0f2ddd1efba423916ff0883684/pyarrow-20.0.0-cp311-cp311-manylinux_2_28_aarch64.whl",hashes = {sha256 = "7a3a5dcf54286e6141d5114522cf31dd67a9e7c9133d150799f30ee302a7a1ab"}}, - {name = "pyarrow-20.0.0-cp311-cp311-manylinux_2_28_x86_64.whl",url = "https://files.pythonhosted.org/packages/46/1f/7f02009bc7fc8955c391defee5348f510e589a020e4b40ca05edcb847854/pyarrow-20.0.0-cp311-cp311-manylinux_2_28_x86_64.whl",hashes = {sha256 = "a6ad3e7758ecf559900261a4df985662df54fb7fdb55e8e3b3aa99b23d526b62"}}, - {name = "pyarrow-20.0.0-cp311-cp311-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/4f/92/692c562be4504c262089e86757a9048739fe1acb4024f92d39615e7bab3f/pyarrow-20.0.0-cp311-cp311-musllinux_1_2_aarch64.whl",hashes = {sha256 = "6bb830757103a6cb300a04610e08d9636f0cd223d32f388418ea893a3e655f1c"}}, - {name = "pyarrow-20.0.0-cp311-cp311-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/a4/ec/9f5c7e7c828d8e0a3c7ef50ee62eca38a7de2fa6eb1b8fa43685c9414fef/pyarrow-20.0.0-cp311-cp311-musllinux_1_2_x86_64.whl",hashes = {sha256 = "96e37f0766ecb4514a899d9a3554fadda770fb57ddf42b63d80f14bc20aa7db3"}}, - {name = "pyarrow-20.0.0-cp311-cp311-win_amd64.whl",url = "https://files.pythonhosted.org/packages/54/96/46613131b4727f10fd2ffa6d0d6f02efcc09a0e7374eff3b5771548aa95b/pyarrow-20.0.0-cp311-cp311-win_amd64.whl",hashes = {sha256 = "3346babb516f4b6fd790da99b98bed9708e3f02e734c84971faccb20736848dc"}}, - {name = "pyarrow-20.0.0-cp310-cp310-macosx_12_0_arm64.whl",url = "https://files.pythonhosted.org/packages/5b/23/77094eb8ee0dbe88441689cb6afc40ac312a1e15d3a7acc0586999518222/pyarrow-20.0.0-cp310-cp310-macosx_12_0_arm64.whl",hashes = {sha256 = "c7dd06fd7d7b410ca5dc839cc9d485d2bc4ae5240851bcd45d85105cc90a47d7"}}, - {name = "pyarrow-20.0.0-cp310-cp310-macosx_12_0_x86_64.whl",url = "https://files.pythonhosted.org/packages/c3/d5/48cc573aff00d62913701d9fac478518f693b30c25f2c157550b0b2565cb/pyarrow-20.0.0-cp310-cp310-macosx_12_0_x86_64.whl",hashes = {sha256 = "d5382de8dc34c943249b01c19110783d0d64b207167c728461add1ecc2db88e4"}}, - {name = "pyarrow-20.0.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",url = "https://files.pythonhosted.org/packages/37/df/4099b69a432b5cb412dd18adc2629975544d656df3d7fda6d73c5dba935d/pyarrow-20.0.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",hashes = {sha256 = "6415a0d0174487456ddc9beaead703d0ded5966129fa4fd3114d76b5d1c5ceae"}}, - {name = "pyarrow-20.0.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",url = "https://files.pythonhosted.org/packages/4c/27/99922a9ac1c9226f346e3a1e15e63dee6f623ed757ff2893f9d6994a69d3/pyarrow-20.0.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",hashes = {sha256 = "15aa1b3b2587e74328a730457068dc6c89e6dcbf438d4369f572af9d320a25ee"}}, - {name = "pyarrow-20.0.0-cp310-cp310-manylinux_2_28_aarch64.whl",url = "https://files.pythonhosted.org/packages/21/d1/71d91b2791b829c9e98f1e0d85be66ed93aff399f80abb99678511847eaa/pyarrow-20.0.0-cp310-cp310-manylinux_2_28_aarch64.whl",hashes = {sha256 = "5605919fbe67a7948c1f03b9f3727d82846c053cd2ce9303ace791855923fd20"}}, - {name = "pyarrow-20.0.0-cp310-cp310-manylinux_2_28_x86_64.whl",url = "https://files.pythonhosted.org/packages/f1/ca/ae10fba419a6e94329707487835ec721f5a95f3ac9168500bcf7aa3813c7/pyarrow-20.0.0-cp310-cp310-manylinux_2_28_x86_64.whl",hashes = {sha256 = "a5704f29a74b81673d266e5ec1fe376f060627c2e42c5c7651288ed4b0db29e9"}}, - {name = "pyarrow-20.0.0-cp310-cp310-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/7a/a6/aba40a2bf01b5d00cf9cd16d427a5da1fad0fb69b514ce8c8292ab80e968/pyarrow-20.0.0-cp310-cp310-musllinux_1_2_aarch64.whl",hashes = {sha256 = "00138f79ee1b5aca81e2bdedb91e3739b987245e11fa3c826f9e57c5d102fb75"}}, - {name = "pyarrow-20.0.0-cp310-cp310-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/93/6b/98b39650cd64f32bf2ec6d627a9bd24fcb3e4e6ea1873c5e1ea8a83b1a18/pyarrow-20.0.0-cp310-cp310-musllinux_1_2_x86_64.whl",hashes = {sha256 = "f2d67ac28f57a362f1a2c1e6fa98bfe2f03230f7e15927aecd067433b1e70ce8"}}, - {name = "pyarrow-20.0.0-cp310-cp310-win_amd64.whl",url = "https://files.pythonhosted.org/packages/ab/32/340238be1eb5037e7b5de7e640ee22334417239bc347eadefaf8c373936d/pyarrow-20.0.0-cp310-cp310-win_amd64.whl",hashes = {sha256 = "4a8b029a07956b8d7bd742ffca25374dd3f634b35e46cc7a7c3fa4c75b297191"}}, ] marker = "\"default\" in dependency_groups" @@ -3028,11 +2386,6 @@ wheels = [ {name = "pycryptodomex-3.23.0-cp313-cp313t-win32.whl",url = "https://files.pythonhosted.org/packages/ca/18/4ca89ac737230b52ac8ffaca42f9c6f1fd07c81a6cd821e91af79db60632/pycryptodomex-3.23.0-cp313-cp313t-win32.whl",hashes = {sha256 = "a9d446e844f08299236780f2efa9898c818fe7e02f17263866b8550c7d5fb328"}}, {name = "pycryptodomex-3.23.0-cp313-cp313t-win_amd64.whl",url = "https://files.pythonhosted.org/packages/73/34/13e01c322db027682e00986873eca803f11c56ade9ba5bbf3225841ea2d4/pycryptodomex-3.23.0-cp313-cp313t-win_amd64.whl",hashes = {sha256 = "bc65bdd9fc8de7a35a74cab1c898cab391a4add33a8fe740bda00f5976ca4708"}}, {name = "pycryptodomex-3.23.0-cp313-cp313t-win_arm64.whl",url = "https://files.pythonhosted.org/packages/54/68/9504c8796b1805d58f4425002bcca20f12880e6fa4dc2fc9a668705c7a08/pycryptodomex-3.23.0-cp313-cp313t-win_arm64.whl",hashes = {sha256 = "c885da45e70139464f082018ac527fdaad26f1657a99ee13eecdce0f0ca24ab4"}}, - {name = "pycryptodomex-3.23.0-pp310-pypy310_pp73-macosx_10_15_x86_64.whl",url = "https://files.pythonhosted.org/packages/f3/b8/3e76d948c3c4ac71335bbe75dac53e154b40b0f8f1f022dfa295257a0c96/pycryptodomex-3.23.0-pp310-pypy310_pp73-macosx_10_15_x86_64.whl",hashes = {sha256 = "ebfff755c360d674306e5891c564a274a47953562b42fb74a5c25b8fc1fb1cb5"}}, - {name = "pycryptodomex-3.23.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",url = "https://files.pythonhosted.org/packages/6a/cf/80f4297a4820dfdfd1c88cf6c4666a200f204b3488103d027b5edd9176ec/pycryptodomex-3.23.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",hashes = {sha256 = "eca54f4bb349d45afc17e3011ed4264ef1cc9e266699874cdd1349c504e64798"}}, - {name = "pycryptodomex-3.23.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",url = "https://files.pythonhosted.org/packages/d1/42/1e969ee0ad19fe3134b0e1b856c39bd0b70d47a4d0e81c2a8b05727394c9/pycryptodomex-3.23.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",hashes = {sha256 = "4f2596e643d4365e14d0879dc5aafe6355616c61c2176009270f3048f6d9a61f"}}, - {name = "pycryptodomex-3.23.0-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl",url = "https://files.pythonhosted.org/packages/6e/c3/1de4f7631fea8a992a44ba632aa40e0008764c0fb9bf2854b0acf78c2cf2/pycryptodomex-3.23.0-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl",hashes = {sha256 = "fdfac7cda115bca3a5abb2f9e43bc2fb66c2b65ab074913643803ca7083a79ea"}}, - {name = "pycryptodomex-3.23.0-pp310-pypy310_pp73-win_amd64.whl",url = "https://files.pythonhosted.org/packages/f2/5f/af7da8e6f1e42b52f44a24d08b8e4c726207434e2593732d39e7af5e7256/pycryptodomex-3.23.0-pp310-pypy310_pp73-win_amd64.whl",hashes = {sha256 = "14c37aaece158d0ace436f76a7bb19093db3b4deade9797abfc39ec6cd6cc2fe"}}, {name = "pycryptodomex-3.23.0-cp37-abi3-macosx_10_9_universal2.whl",url = "https://files.pythonhosted.org/packages/dd/9c/1a8f35daa39784ed8adf93a694e7e5dc15c23c741bbda06e1d45f8979e9e/pycryptodomex-3.23.0-cp37-abi3-macosx_10_9_universal2.whl",hashes = {sha256 = "06698f957fe1ab229a99ba2defeeae1c09af185baa909a31a5d1f9d42b1aaed6"}}, {name = "pycryptodomex-3.23.0-cp37-abi3-macosx_10_9_x86_64.whl",url = "https://files.pythonhosted.org/packages/7a/62/f5221a191a97157d240cf6643747558759126c76ee92f29a3f4aee3197a5/pycryptodomex-3.23.0-cp37-abi3-macosx_10_9_x86_64.whl",hashes = {sha256 = "b2c2537863eccef2d41061e82a881dcabb04944c5c06c5aa7110b577cc487545"}}, {name = "pycryptodomex-3.23.0-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",url = "https://files.pythonhosted.org/packages/8c/fd/5a054543c8988d4ed7b612721d7e78a4b9bf36bc3c5ad45ef45c22d0060e/pycryptodomex-3.23.0-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",hashes = {sha256 = "43c446e2ba8df8889e0e16f02211c25b4934898384c1ec1ec04d7889c0333587"}}, @@ -3220,34 +2573,6 @@ wheels = [ {name = "ujson-5.11.0-cp312-cp312-win32.whl",url = "https://files.pythonhosted.org/packages/7e/81/546042f0b23c9040d61d46ea5ca76f0cc5e0d399180ddfb2ae976ebff5b5/ujson-5.11.0-cp312-cp312-win32.whl",hashes = {sha256 = "be6b0eaf92cae8cdee4d4c9e074bde43ef1c590ed5ba037ea26c9632fb479c88"}}, {name = "ujson-5.11.0-cp312-cp312-win_amd64.whl",url = "https://files.pythonhosted.org/packages/44/1b/27c05dc8c9728f44875d74b5bfa948ce91f6c33349232619279f35c6e817/ujson-5.11.0-cp312-cp312-win_amd64.whl",hashes = {sha256 = "b7b136cc6abc7619124fd897ef75f8e63105298b5ca9bdf43ebd0e1fa0ee105f"}}, {name = "ujson-5.11.0-cp312-cp312-win_arm64.whl",url = "https://files.pythonhosted.org/packages/22/2d/37b6557c97c3409c202c838aa9c960ca3896843b4295c4b7bb2bbd260664/ujson-5.11.0-cp312-cp312-win_arm64.whl",hashes = {sha256 = "6cd2df62f24c506a0ba322d5e4fe4466d47a9467b57e881ee15a31f7ecf68ff6"}}, - {name = "ujson-5.11.0-cp311-cp311-macosx_10_9_x86_64.whl",url = "https://files.pythonhosted.org/packages/da/ea/80346b826349d60ca4d612a47cdf3533694e49b45e9d1c07071bb867a184/ujson-5.11.0-cp311-cp311-macosx_10_9_x86_64.whl",hashes = {sha256 = "d7c46cb0fe5e7056b9acb748a4c35aa1b428025853032540bb7e41f46767321f"}}, - {name = "ujson-5.11.0-cp311-cp311-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/57/df/b53e747562c89515e18156513cc7c8ced2e5e3fd6c654acaa8752ffd7cd9/ujson-5.11.0-cp311-cp311-macosx_11_0_arm64.whl",hashes = {sha256 = "d8951bb7a505ab2a700e26f691bdfacf395bc7e3111e3416d325b513eea03a58"}}, - {name = "ujson-5.11.0-cp311-cp311-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl",url = "https://files.pythonhosted.org/packages/41/b8/ab67ec8c01b8a3721fd13e5cb9d85ab2a6066a3a5e9148d661a6870d6293/ujson-5.11.0-cp311-cp311-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl",hashes = {sha256 = "952c0be400229940248c0f5356514123d428cba1946af6fa2bbd7503395fef26"}}, - {name = "ujson-5.11.0-cp311-cp311-manylinux_2_24_i686.manylinux_2_28_i686.whl",url = "https://files.pythonhosted.org/packages/7b/c7/fb84f27cd80a2c7e2d3c6012367aecade0da936790429801803fa8d4bffc/ujson-5.11.0-cp311-cp311-manylinux_2_24_i686.manylinux_2_28_i686.whl",hashes = {sha256 = "94fcae844f1e302f6f8095c5d1c45a2f0bfb928cccf9f1b99e3ace634b980a2a"}}, - {name = "ujson-5.11.0-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl",url = "https://files.pythonhosted.org/packages/5d/7c/48706f7c1e917ecb97ddcfb7b1d756040b86ed38290e28579d63bd3fcc48/ujson-5.11.0-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl",hashes = {sha256 = "7e0ec1646db172beb8d3df4c32a9d78015e671d2000af548252769e33079d9a6"}}, - {name = "ujson-5.11.0-cp311-cp311-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/ec/ce/48877c6eb4afddfd6bd1db6be34456538c07ca2d6ed233d3f6c6efc2efe8/ujson-5.11.0-cp311-cp311-musllinux_1_2_aarch64.whl",hashes = {sha256 = "da473b23e3a54448b008d33f742bcd6d5fb2a897e42d1fc6e7bf306ea5d18b1b"}}, - {name = "ujson-5.11.0-cp311-cp311-musllinux_1_2_i686.whl",url = "https://files.pythonhosted.org/packages/8b/7a/2c20dc97ad70cd7c31ad0596ba8e2cf8794d77191ba4d1e0bded69865477/ujson-5.11.0-cp311-cp311-musllinux_1_2_i686.whl",hashes = {sha256 = "aa6b3d4f1c0d3f82930f4cbd7fe46d905a4a9205a7c13279789c1263faf06dba"}}, - {name = "ujson-5.11.0-cp311-cp311-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/15/f5/ca454f2f6a2c840394b6f162fff2801450803f4ff56c7af8ce37640b8a2a/ujson-5.11.0-cp311-cp311-musllinux_1_2_x86_64.whl",hashes = {sha256 = "4843f3ab4fe1cc596bb7e02228ef4c25d35b4bb0809d6a260852a4bfcab37ba3"}}, - {name = "ujson-5.11.0-cp311-cp311-win32.whl",url = "https://files.pythonhosted.org/packages/fe/d3/9ba310e07969bc9906eb7548731e33a0f448b122ad9705fed699c9b29345/ujson-5.11.0-cp311-cp311-win32.whl",hashes = {sha256 = "e979fbc469a7f77f04ec2f4e853ba00c441bf2b06720aa259f0f720561335e34"}}, - {name = "ujson-5.11.0-cp311-cp311-win_amd64.whl",url = "https://files.pythonhosted.org/packages/57/f7/da05b4a8819f1360be9e71fb20182f0bb3ec611a36c3f213f4d20709e099/ujson-5.11.0-cp311-cp311-win_amd64.whl",hashes = {sha256 = "683f57f0dd3acdd7d9aff1de0528d603aafcb0e6d126e3dc7ce8b020a28f5d01"}}, - {name = "ujson-5.11.0-cp311-cp311-win_arm64.whl",url = "https://files.pythonhosted.org/packages/9a/cc/f3f9ac0f24f00a623a48d97dc3814df5c2dc368cfb00031aa4141527a24b/ujson-5.11.0-cp311-cp311-win_arm64.whl",hashes = {sha256 = "7855ccea3f8dad5e66d8445d754fc1cf80265a4272b5f8059ebc7ec29b8d0835"}}, - {name = "ujson-5.11.0-pp311-pypy311_pp73-macosx_10_15_x86_64.whl",url = "https://files.pythonhosted.org/packages/50/17/30275aa2933430d8c0c4ead951cc4fdb922f575a349aa0b48a6f35449e97/ujson-5.11.0-pp311-pypy311_pp73-macosx_10_15_x86_64.whl",hashes = {sha256 = "abae0fb58cc820092a0e9e8ba0051ac4583958495bfa5262a12f628249e3b362"}}, - {name = "ujson-5.11.0-pp311-pypy311_pp73-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/c3/15/42b3924258eac2551f8f33fa4e35da20a06a53857ccf3d4deb5e5d7c0b6c/ujson-5.11.0-pp311-pypy311_pp73-macosx_11_0_arm64.whl",hashes = {sha256 = "fac6c0649d6b7c3682a0a6e18d3de6857977378dce8d419f57a0b20e3d775b39"}}, - {name = "ujson-5.11.0-pp311-pypy311_pp73-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl",url = "https://files.pythonhosted.org/packages/94/7e/0519ff7955aba581d1fe1fb1ca0e452471250455d182f686db5ac9e46119/ujson-5.11.0-pp311-pypy311_pp73-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl",hashes = {sha256 = "4b42c115c7c6012506e8168315150d1e3f76e7ba0f4f95616f4ee599a1372bbc"}}, - {name = "ujson-5.11.0-pp311-pypy311_pp73-manylinux_2_24_i686.manylinux_2_28_i686.whl",url = "https://files.pythonhosted.org/packages/74/cf/209d90506b7d6c5873f82c5a226d7aad1a1da153364e9ebf61eff0740c33/ujson-5.11.0-pp311-pypy311_pp73-manylinux_2_24_i686.manylinux_2_28_i686.whl",hashes = {sha256 = "86baf341d90b566d61a394869ce77188cc8668f76d7bb2c311d77a00f4bdf844"}}, - {name = "ujson-5.11.0-pp311-pypy311_pp73-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl",url = "https://files.pythonhosted.org/packages/e9/97/bd939bb76943cb0e1d2b692d7e68629f51c711ef60425fa5bb6968037ecd/ujson-5.11.0-pp311-pypy311_pp73-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl",hashes = {sha256 = "4598bf3965fc1a936bd84034312bcbe00ba87880ef1ee33e33c1e88f2c398b49"}}, - {name = "ujson-5.11.0-pp311-pypy311_pp73-win_amd64.whl",url = "https://files.pythonhosted.org/packages/52/5b/8c5e33228f7f83f05719964db59f3f9f276d272dc43752fa3bbf0df53e7b/ujson-5.11.0-pp311-pypy311_pp73-win_amd64.whl",hashes = {sha256 = "416389ec19ef5f2013592f791486bef712ebce0cd59299bf9df1ba40bb2f6e04"}}, - {name = "ujson-5.11.0-cp310-cp310-macosx_10_9_x86_64.whl",url = "https://files.pythonhosted.org/packages/86/0c/8bf7a4fabfd01c7eed92d9b290930ce6d14910dec708e73538baa38885d1/ujson-5.11.0-cp310-cp310-macosx_10_9_x86_64.whl",hashes = {sha256 = "446e8c11c06048611c9d29ef1237065de0af07cabdd97e6b5b527b957692ec25"}}, - {name = "ujson-5.11.0-cp310-cp310-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/7b/2e/eeab0b8b641817031ede4f790db4c4942df44a12f44d72b3954f39c6a115/ujson-5.11.0-cp310-cp310-macosx_11_0_arm64.whl",hashes = {sha256 = "16ccb973b7ada0455201808ff11d48fe9c3f034a6ab5bd93b944443c88299f89"}}, - {name = "ujson-5.11.0-cp310-cp310-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl",url = "https://files.pythonhosted.org/packages/21/1b/a4e7a41870797633423ea79618526747353fd7be9191f3acfbdee0bf264b/ujson-5.11.0-cp310-cp310-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl",hashes = {sha256 = "3134b783ab314d2298d58cda7e47e7a0f7f71fc6ade6ac86d5dbeaf4b9770fa6"}}, - {name = "ujson-5.11.0-cp310-cp310-manylinux_2_24_i686.manylinux_2_28_i686.whl",url = "https://files.pythonhosted.org/packages/94/ae/4e0d91b8f6db7c9b76423b3649612189506d5a06ddd3b6334b6d37f77a01/ujson-5.11.0-cp310-cp310-manylinux_2_24_i686.manylinux_2_28_i686.whl",hashes = {sha256 = "185f93ebccffebc8baf8302c869fac70dd5dd78694f3b875d03a31b03b062cdb"}}, - {name = "ujson-5.11.0-cp310-cp310-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl",url = "https://files.pythonhosted.org/packages/b3/cc/46b124c2697ca2da7c65c4931ed3cb670646978157aa57a7a60f741c530f/ujson-5.11.0-cp310-cp310-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl",hashes = {sha256 = "d06e87eded62ff0e5f5178c916337d2262fdbc03b31688142a3433eabb6511db"}}, - {name = "ujson-5.11.0-cp310-cp310-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/39/eb/20dd1282bc85dede2f1c62c45b4040bc4c389c80a05983515ab99771bca7/ujson-5.11.0-cp310-cp310-musllinux_1_2_aarch64.whl",hashes = {sha256 = "181fb5b15703a8b9370b25345d2a1fd1359f0f18776b3643d24e13ed9c036d4c"}}, - {name = "ujson-5.11.0-cp310-cp310-musllinux_1_2_i686.whl",url = "https://files.pythonhosted.org/packages/64/a2/80072439065d493e3a4b1fbeec991724419a1b4c232e2d1147d257cac193/ujson-5.11.0-cp310-cp310-musllinux_1_2_i686.whl",hashes = {sha256 = "a4df61a6df0a4a8eb5b9b1ffd673429811f50b235539dac586bb7e9e91994138"}}, - {name = "ujson-5.11.0-cp310-cp310-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/5d/7e/d77f9e9c039d58299c350c978e086a804d1fceae4fd4a1cc6e8d0133f838/ujson-5.11.0-cp310-cp310-musllinux_1_2_x86_64.whl",hashes = {sha256 = "6eff24e1abd79e0ec6d7eae651dd675ddbc41f9e43e29ef81e16b421da896915"}}, - {name = "ujson-5.11.0-cp310-cp310-win32.whl",url = "https://files.pythonhosted.org/packages/ab/f1/697559d45acc849cada6b3571d53522951b1a64027400507aabc6a710178/ujson-5.11.0-cp310-cp310-win32.whl",hashes = {sha256 = "30f607c70091483550fbd669a0b37471e5165b317d6c16e75dba2aa967608723"}}, - {name = "ujson-5.11.0-cp310-cp310-win_amd64.whl",url = "https://files.pythonhosted.org/packages/86/a2/70b73a0f55abe0e6b8046d365d74230c20c5691373e6902a599b2dc79ba1/ujson-5.11.0-cp310-cp310-win_amd64.whl",hashes = {sha256 = "3d2720e9785f84312b8e2cb0c2b87f1a0b1c53aaab3b2af3ab817d54409012e0"}}, - {name = "ujson-5.11.0-cp310-cp310-win_arm64.whl",url = "https://files.pythonhosted.org/packages/1c/5f/b19104afa455630b43efcad3a24495b9c635d92aa8f2da4f30e375deb1a2/ujson-5.11.0-cp310-cp310-win_arm64.whl",hashes = {sha256 = "85e6796631165f719084a9af00c79195d3ebf108151452fefdcb1c8bb50f0105"}}, ] marker = "sys_platform != \"win32\" and implementation_name == \"cpython\" and \"default\" in dependency_groups" @@ -3296,34 +2621,6 @@ wheels = [ {name = "websockets-15.0.1-cp312-cp312-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/29/93/bb672df7b2f5faac89761cb5fa34f5cec45a4026c383a4b5761c6cea5c16/websockets-15.0.1-cp312-cp312-musllinux_1_2_x86_64.whl",hashes = {sha256 = "3be571a8b5afed347da347bfcf27ba12b069d9d7f42cb8c7028b5e98bbb12597"}}, {name = "websockets-15.0.1-cp312-cp312-win32.whl",url = "https://files.pythonhosted.org/packages/ff/83/de1f7709376dc3ca9b7eeb4b9a07b4526b14876b6d372a4dc62312bebee0/websockets-15.0.1-cp312-cp312-win32.whl",hashes = {sha256 = "c338ffa0520bdb12fbc527265235639fb76e7bc7faafbb93f6ba80d9c06578a9"}}, {name = "websockets-15.0.1-cp312-cp312-win_amd64.whl",url = "https://files.pythonhosted.org/packages/7d/71/abf2ebc3bbfa40f391ce1428c7168fb20582d0ff57019b69ea20fa698043/websockets-15.0.1-cp312-cp312-win_amd64.whl",hashes = {sha256 = "fcd5cf9e305d7b8338754470cf69cf81f420459dbae8a3b40cee57417f4614a7"}}, - {name = "websockets-15.0.1-cp311-cp311-macosx_10_9_universal2.whl",url = "https://files.pythonhosted.org/packages/9f/32/18fcd5919c293a398db67443acd33fde142f283853076049824fc58e6f75/websockets-15.0.1-cp311-cp311-macosx_10_9_universal2.whl",hashes = {sha256 = "823c248b690b2fd9303ba00c4f66cd5e2d8c3ba4aa968b2779be9532a4dad431"}}, - {name = "websockets-15.0.1-cp311-cp311-macosx_10_9_x86_64.whl",url = "https://files.pythonhosted.org/packages/76/70/ba1ad96b07869275ef42e2ce21f07a5b0148936688c2baf7e4a1f60d5058/websockets-15.0.1-cp311-cp311-macosx_10_9_x86_64.whl",hashes = {sha256 = "678999709e68425ae2593acf2e3ebcbcf2e69885a5ee78f9eb80e6e371f1bf57"}}, - {name = "websockets-15.0.1-cp311-cp311-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/86/f2/10b55821dd40eb696ce4704a87d57774696f9451108cff0d2824c97e0f97/websockets-15.0.1-cp311-cp311-macosx_11_0_arm64.whl",hashes = {sha256 = "d50fd1ee42388dcfb2b3676132c78116490976f1300da28eb629272d5d93e905"}}, - {name = "websockets-15.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",url = "https://files.pythonhosted.org/packages/a5/90/1c37ae8b8a113d3daf1065222b6af61cc44102da95388ac0018fcb7d93d9/websockets-15.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",hashes = {sha256 = "d99e5546bf73dbad5bf3547174cd6cb8ba7273062a23808ffea025ecb1cf8562"}}, - {name = "websockets-15.0.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl",url = "https://files.pythonhosted.org/packages/8e/8d/96e8e288b2a41dffafb78e8904ea7367ee4f891dafc2ab8d87e2124cb3d3/websockets-15.0.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl",hashes = {sha256 = "66dd88c918e3287efc22409d426c8f729688d89a0c587c88971a0faa2c2f3792"}}, - {name = "websockets-15.0.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl",url = "https://files.pythonhosted.org/packages/93/1f/5d6dbf551766308f6f50f8baf8e9860be6182911e8106da7a7f73785f4c4/websockets-15.0.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl",hashes = {sha256 = "8dd8327c795b3e3f219760fa603dcae1dcc148172290a8ab15158cf85a953413"}}, - {name = "websockets-15.0.1-cp311-cp311-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/d4/78/2d4fed9123e6620cbf1706c0de8a1632e1a28e7774d94346d7de1bba2ca3/websockets-15.0.1-cp311-cp311-musllinux_1_2_aarch64.whl",hashes = {sha256 = "8fdc51055e6ff4adeb88d58a11042ec9a5eae317a0a53d12c062c8a8865909e8"}}, - {name = "websockets-15.0.1-cp311-cp311-musllinux_1_2_i686.whl",url = "https://files.pythonhosted.org/packages/e7/3b/66d4c1b444dd1a9823c4a81f50231b921bab54eee2f69e70319b4e21f1ca/websockets-15.0.1-cp311-cp311-musllinux_1_2_i686.whl",hashes = {sha256 = "693f0192126df6c2327cce3baa7c06f2a117575e32ab2308f7f8216c29d9e2e3"}}, - {name = "websockets-15.0.1-cp311-cp311-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/08/ff/e9eed2ee5fed6f76fdd6032ca5cd38c57ca9661430bb3d5fb2872dc8703c/websockets-15.0.1-cp311-cp311-musllinux_1_2_x86_64.whl",hashes = {sha256 = "54479983bd5fb469c38f2f5c7e3a24f9a4e70594cd68cd1fa6b9340dadaff7cf"}}, - {name = "websockets-15.0.1-cp311-cp311-win32.whl",url = "https://files.pythonhosted.org/packages/d8/75/994634a49b7e12532be6a42103597b71098fd25900f7437d6055ed39930a/websockets-15.0.1-cp311-cp311-win32.whl",hashes = {sha256 = "16b6c1b3e57799b9d38427dda63edcbe4926352c47cf88588c0be4ace18dac85"}}, - {name = "websockets-15.0.1-cp311-cp311-win_amd64.whl",url = "https://files.pythonhosted.org/packages/98/93/e36c73f78400a65f5e236cd376713c34182e6663f6889cd45a4a04d8f203/websockets-15.0.1-cp311-cp311-win_amd64.whl",hashes = {sha256 = "27ccee0071a0e75d22cb35849b1db43f2ecd3e161041ac1ee9d2352ddf72f065"}}, - {name = "websockets-15.0.1-cp310-cp310-macosx_10_9_universal2.whl",url = "https://files.pythonhosted.org/packages/1e/da/6462a9f510c0c49837bbc9345aca92d767a56c1fb2939e1579df1e1cdcf7/websockets-15.0.1-cp310-cp310-macosx_10_9_universal2.whl",hashes = {sha256 = "d63efaa0cd96cf0c5fe4d581521d9fa87744540d4bc999ae6e08595a1014b45b"}}, - {name = "websockets-15.0.1-cp310-cp310-macosx_10_9_x86_64.whl",url = "https://files.pythonhosted.org/packages/1c/9f/9d11c1a4eb046a9e106483b9ff69bce7ac880443f00e5ce64261b47b07e7/websockets-15.0.1-cp310-cp310-macosx_10_9_x86_64.whl",hashes = {sha256 = "ac60e3b188ec7574cb761b08d50fcedf9d77f1530352db4eef1707fe9dee7205"}}, - {name = "websockets-15.0.1-cp310-cp310-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/d5/4f/b462242432d93ea45f297b6179c7333dd0402b855a912a04e7fc61c0d71f/websockets-15.0.1-cp310-cp310-macosx_11_0_arm64.whl",hashes = {sha256 = "5756779642579d902eed757b21b0164cd6fe338506a8083eb58af5c372e39d9a"}}, - {name = "websockets-15.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",url = "https://files.pythonhosted.org/packages/6e/0c/6afa1f4644d7ed50284ac59cc70ef8abd44ccf7d45850d989ea7310538d0/websockets-15.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",hashes = {sha256 = "0fdfe3e2a29e4db3659dbd5bbf04560cea53dd9610273917799f1cde46aa725e"}}, - {name = "websockets-15.0.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl",url = "https://files.pythonhosted.org/packages/dd/d4/ffc8bd1350b229ca7a4db2a3e1c482cf87cea1baccd0ef3e72bc720caeec/websockets-15.0.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl",hashes = {sha256 = "4c2529b320eb9e35af0fa3016c187dffb84a3ecc572bcee7c3ce302bfeba52bf"}}, - {name = "websockets-15.0.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl",url = "https://files.pythonhosted.org/packages/97/3a/5323a6bb94917af13bbb34009fac01e55c51dfde354f63692bf2533ffbc2/websockets-15.0.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl",hashes = {sha256 = "ac1e5c9054fe23226fb11e05a6e630837f074174c4c2f0fe442996112a6de4fb"}}, - {name = "websockets-15.0.1-cp310-cp310-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/a6/cc/1aeb0f7cee59ef065724041bb7ed667b6ab1eeffe5141696cccec2687b66/websockets-15.0.1-cp310-cp310-musllinux_1_2_aarch64.whl",hashes = {sha256 = "5df592cd503496351d6dc14f7cdad49f268d8e618f80dce0cd5a36b93c3fc08d"}}, - {name = "websockets-15.0.1-cp310-cp310-musllinux_1_2_i686.whl",url = "https://files.pythonhosted.org/packages/79/f9/c86f8f7af208e4161a7f7e02774e9d0a81c632ae76db2ff22549e1718a51/websockets-15.0.1-cp310-cp310-musllinux_1_2_i686.whl",hashes = {sha256 = "0a34631031a8f05657e8e90903e656959234f3a04552259458aac0b0f9ae6fd9"}}, - {name = "websockets-15.0.1-cp310-cp310-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/c7/b9/828b0bc6753db905b91df6ae477c0b14a141090df64fb17f8a9d7e3516cf/websockets-15.0.1-cp310-cp310-musllinux_1_2_x86_64.whl",hashes = {sha256 = "3d00075aa65772e7ce9e990cab3ff1de702aa09be3940d1dc88d5abf1ab8a09c"}}, - {name = "websockets-15.0.1-cp310-cp310-win32.whl",url = "https://files.pythonhosted.org/packages/89/fb/250f5533ec468ba6327055b7d98b9df056fb1ce623b8b6aaafb30b55d02e/websockets-15.0.1-cp310-cp310-win32.whl",hashes = {sha256 = "1234d4ef35db82f5446dca8e35a7da7964d02c127b095e172e54397fb6a6c256"}}, - {name = "websockets-15.0.1-cp310-cp310-win_amd64.whl",url = "https://files.pythonhosted.org/packages/1c/46/aca7082012768bb98e5608f01658ff3ac8437e563eca41cf068bd5849a5e/websockets-15.0.1-cp310-cp310-win_amd64.whl",hashes = {sha256 = "39c1fec2c11dc8d89bba6b2bf1556af381611a173ac2b511cf7231622058af41"}}, - {name = "websockets-15.0.1-pp310-pypy310_pp73-macosx_10_15_x86_64.whl",url = "https://files.pythonhosted.org/packages/02/9e/d40f779fa16f74d3468357197af8d6ad07e7c5a27ea1ca74ceb38986f77a/websockets-15.0.1-pp310-pypy310_pp73-macosx_10_15_x86_64.whl",hashes = {sha256 = "0c9e74d766f2818bb95f84c25be4dea09841ac0f734d1966f415e4edfc4ef1c3"}}, - {name = "websockets-15.0.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/bc/cd/5b887b8585a593073fd92f7c23ecd3985cd2c3175025a91b0d69b0551372/websockets-15.0.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl",hashes = {sha256 = "1009ee0c7739c08a0cd59de430d6de452a55e42d6b522de7aa15e6f67db0b8e1"}}, - {name = "websockets-15.0.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",url = "https://files.pythonhosted.org/packages/fe/ae/d34f7556890341e900a95acf4886833646306269f899d58ad62f588bf410/websockets-15.0.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",hashes = {sha256 = "76d1f20b1c7a2fa82367e04982e708723ba0e7b8d43aa643d3dcd404d74f1475"}}, - {name = "websockets-15.0.1-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl",url = "https://files.pythonhosted.org/packages/71/e6/5fd43993a87db364ec60fc1d608273a1a465c0caba69176dd160e197ce42/websockets-15.0.1-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl",hashes = {sha256 = "f29d80eb9a9263b8d109135351caf568cc3f80b9928bccde535c235de55c22d9"}}, - {name = "websockets-15.0.1-pp310-pypy310_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl",url = "https://files.pythonhosted.org/packages/2b/fb/c492d6daa5ec067c2988ac80c61359ace5c4c674c532985ac5a123436cec/websockets-15.0.1-pp310-pypy310_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl",hashes = {sha256 = "b359ed09954d7c18bbc1680f380c7301f92c60bf924171629c5db97febb12f04"}}, - {name = "websockets-15.0.1-pp310-pypy310_pp73-win_amd64.whl",url = "https://files.pythonhosted.org/packages/68/a1/dcb68430b1d00b698ae7a7e0194433bce4f07ded185f0ee5fb21e2a2e91e/websockets-15.0.1-pp310-pypy310_pp73-win_amd64.whl",hashes = {sha256 = "cad21560da69f4ce7658ca2cb83138fb4cf695a2ba3e475e0559e05991aa8122"}}, {name = "websockets-15.0.1-py3-none-any.whl",url = "https://files.pythonhosted.org/packages/fa/a8/5b41e0da817d64113292ab1f8247140aac61cbf6cfd085d6a0fa77f4984f/websockets-15.0.1-py3-none-any.whl",hashes = {sha256 = "f7a866fbc1e97b5c617ee4116daaa09b722101d4a3c170c787450ba409f9736f"}}, ] marker = "\"default\" in dependency_groups" @@ -3390,26 +2687,6 @@ wheels = [ {name = "wrapt-1.17.3-cp312-cp312-win32.whl",url = "https://files.pythonhosted.org/packages/9e/b1/43b286ca1392a006d5336412d41663eeef1ad57485f3e52c767376ba7e5a/wrapt-1.17.3-cp312-cp312-win32.whl",hashes = {sha256 = "4da9f45279fff3543c371d5ababc57a0384f70be244de7759c85a7f989cb4ebe"}}, {name = "wrapt-1.17.3-cp312-cp312-win_amd64.whl",url = "https://files.pythonhosted.org/packages/28/de/49493f962bd3c586ab4b88066e967aa2e0703d6ef2c43aa28cb83bf7b507/wrapt-1.17.3-cp312-cp312-win_amd64.whl",hashes = {sha256 = "e71d5c6ebac14875668a1e90baf2ea0ef5b7ac7918355850c0908ae82bcb297c"}}, {name = "wrapt-1.17.3-cp312-cp312-win_arm64.whl",url = "https://files.pythonhosted.org/packages/f1/48/0f7102fe9cb1e8a5a77f80d4f0956d62d97034bbe88d33e94699f99d181d/wrapt-1.17.3-cp312-cp312-win_arm64.whl",hashes = {sha256 = "604d076c55e2fdd4c1c03d06dc1a31b95130010517b5019db15365ec4a405fc6"}}, - {name = "wrapt-1.17.3-cp311-cp311-macosx_10_9_universal2.whl",url = "https://files.pythonhosted.org/packages/52/db/00e2a219213856074a213503fdac0511203dceefff26e1daa15250cc01a0/wrapt-1.17.3-cp311-cp311-macosx_10_9_universal2.whl",hashes = {sha256 = "273a736c4645e63ac582c60a56b0acb529ef07f78e08dc6bfadf6a46b19c0da7"}}, - {name = "wrapt-1.17.3-cp311-cp311-macosx_10_9_x86_64.whl",url = "https://files.pythonhosted.org/packages/5e/30/ca3c4a5eba478408572096fe9ce36e6e915994dd26a4e9e98b4f729c06d9/wrapt-1.17.3-cp311-cp311-macosx_10_9_x86_64.whl",hashes = {sha256 = "5531d911795e3f935a9c23eb1c8c03c211661a5060aab167065896bbf62a5f85"}}, - {name = "wrapt-1.17.3-cp311-cp311-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/31/25/3e8cc2c46b5329c5957cec959cb76a10718e1a513309c31399a4dad07eb3/wrapt-1.17.3-cp311-cp311-macosx_11_0_arm64.whl",hashes = {sha256 = "0610b46293c59a3adbae3dee552b648b984176f8562ee0dba099a56cfbe4df1f"}}, - {name = "wrapt-1.17.3-cp311-cp311-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl",url = "https://files.pythonhosted.org/packages/5d/8f/a32a99fc03e4b37e31b57cb9cefc65050ea08147a8ce12f288616b05ef54/wrapt-1.17.3-cp311-cp311-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl",hashes = {sha256 = "b32888aad8b6e68f83a8fdccbf3165f5469702a7544472bdf41f582970ed3311"}}, - {name = "wrapt-1.17.3-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl",url = "https://files.pythonhosted.org/packages/31/57/4930cb8d9d70d59c27ee1332a318c20291749b4fba31f113c2f8ac49a72e/wrapt-1.17.3-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl",hashes = {sha256 = "8cccf4f81371f257440c88faed6b74f1053eef90807b77e31ca057b2db74edb1"}}, - {name = "wrapt-1.17.3-cp311-cp311-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/a8/f3/1afd48de81d63dd66e01b263a6fbb86e1b5053b419b9b33d13e1f6d0f7d0/wrapt-1.17.3-cp311-cp311-musllinux_1_2_aarch64.whl",hashes = {sha256 = "d8a210b158a34164de8bb68b0e7780041a903d7b00c87e906fb69928bf7890d5"}}, - {name = "wrapt-1.17.3-cp311-cp311-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/1e/d7/4ad5327612173b144998232f98a85bb24b60c352afb73bc48e3e0d2bdc4e/wrapt-1.17.3-cp311-cp311-musllinux_1_2_x86_64.whl",hashes = {sha256 = "79573c24a46ce11aab457b472efd8d125e5a51da2d1d24387666cd85f54c05b2"}}, - {name = "wrapt-1.17.3-cp311-cp311-win32.whl",url = "https://files.pythonhosted.org/packages/bb/59/e0adfc831674a65694f18ea6dc821f9fcb9ec82c2ce7e3d73a88ba2e8718/wrapt-1.17.3-cp311-cp311-win32.whl",hashes = {sha256 = "c31eebe420a9a5d2887b13000b043ff6ca27c452a9a22fa71f35f118e8d4bf89"}}, - {name = "wrapt-1.17.3-cp311-cp311-win_amd64.whl",url = "https://files.pythonhosted.org/packages/83/88/16b7231ba49861b6f75fc309b11012ede4d6b0a9c90969d9e0db8d991aeb/wrapt-1.17.3-cp311-cp311-win_amd64.whl",hashes = {sha256 = "0b1831115c97f0663cb77aa27d381237e73ad4f721391a9bfb2fe8bc25fa6e77"}}, - {name = "wrapt-1.17.3-cp311-cp311-win_arm64.whl",url = "https://files.pythonhosted.org/packages/9a/1e/c4d4f3398ec073012c51d1c8d87f715f56765444e1a4b11e5180577b7e6e/wrapt-1.17.3-cp311-cp311-win_arm64.whl",hashes = {sha256 = "5a7b3c1ee8265eb4c8f1b7d29943f195c00673f5ab60c192eba2d4a7eae5f46a"}}, - {name = "wrapt-1.17.3-cp310-cp310-macosx_10_9_universal2.whl",url = "https://files.pythonhosted.org/packages/3f/23/bb82321b86411eb51e5a5db3fb8f8032fd30bd7c2d74bfe936136b2fa1d6/wrapt-1.17.3-cp310-cp310-macosx_10_9_universal2.whl",hashes = {sha256 = "88bbae4d40d5a46142e70d58bf664a89b6b4befaea7b2ecc14e03cedb8e06c04"}}, - {name = "wrapt-1.17.3-cp310-cp310-macosx_10_9_x86_64.whl",url = "https://files.pythonhosted.org/packages/45/69/f3c47642b79485a30a59c63f6d739ed779fb4cc8323205d047d741d55220/wrapt-1.17.3-cp310-cp310-macosx_10_9_x86_64.whl",hashes = {sha256 = "e6b13af258d6a9ad602d57d889f83b9d5543acd471eee12eb51f5b01f8eb1bc2"}}, - {name = "wrapt-1.17.3-cp310-cp310-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/d1/71/e7e7f5670c1eafd9e990438e69d8fb46fa91a50785332e06b560c869454f/wrapt-1.17.3-cp310-cp310-macosx_11_0_arm64.whl",hashes = {sha256 = "fd341868a4b6714a5962c1af0bd44f7c404ef78720c7de4892901e540417111c"}}, - {name = "wrapt-1.17.3-cp310-cp310-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl",url = "https://files.pythonhosted.org/packages/de/17/9f8f86755c191d6779d7ddead1a53c7a8aa18bccb7cea8e7e72dfa6a8a09/wrapt-1.17.3-cp310-cp310-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl",hashes = {sha256 = "f9b2601381be482f70e5d1051a5965c25fb3625455a2bf520b5a077b22afb775"}}, - {name = "wrapt-1.17.3-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl",url = "https://files.pythonhosted.org/packages/f2/15/dd576273491f9f43dd09fce517f6c2ce6eb4fe21681726068db0d0467096/wrapt-1.17.3-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl",hashes = {sha256 = "343e44b2a8e60e06a7e0d29c1671a0d9951f59174f3709962b5143f60a2a98bd"}}, - {name = "wrapt-1.17.3-cp310-cp310-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/0c/c4/5eb4ce0d4814521fee7aa806264bf7a114e748ad05110441cd5b8a5c744b/wrapt-1.17.3-cp310-cp310-musllinux_1_2_aarch64.whl",hashes = {sha256 = "33486899acd2d7d3066156b03465b949da3fd41a5da6e394ec49d271baefcf05"}}, - {name = "wrapt-1.17.3-cp310-cp310-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/31/4b/819e9e0eb5c8dc86f60dfc42aa4e2c0d6c3db8732bce93cc752e604bb5f5/wrapt-1.17.3-cp310-cp310-musllinux_1_2_x86_64.whl",hashes = {sha256 = "e6f40a8aa5a92f150bdb3e1c44b7e98fb7113955b2e5394122fa5532fec4b418"}}, - {name = "wrapt-1.17.3-cp310-cp310-win32.whl",url = "https://files.pythonhosted.org/packages/f8/83/ed6baf89ba3a56694700139698cf703aac9f0f9eb03dab92f57551bd5385/wrapt-1.17.3-cp310-cp310-win32.whl",hashes = {sha256 = "a36692b8491d30a8c75f1dfee65bef119d6f39ea84ee04d9f9311f83c5ad9390"}}, - {name = "wrapt-1.17.3-cp310-cp310-win_amd64.whl",url = "https://files.pythonhosted.org/packages/2f/90/ee61d36862340ad7e9d15a02529df6b948676b9a5829fd5e16640156627d/wrapt-1.17.3-cp310-cp310-win_amd64.whl",hashes = {sha256 = "afd964fd43b10c12213574db492cb8f73b2f0826c8df07a68288f8f19af2ebe6"}}, - {name = "wrapt-1.17.3-cp310-cp310-win_arm64.whl",url = "https://files.pythonhosted.org/packages/bd/c3/cefe0bd330d389c9983ced15d326f45373f4073c9f4a8c2f99b50bfea329/wrapt-1.17.3-cp310-cp310-win_arm64.whl",hashes = {sha256 = "af338aa93554be859173c39c85243970dc6a289fa907402289eeae7543e1ae18"}}, {name = "wrapt-1.17.3-py3-none-any.whl",url = "https://files.pythonhosted.org/packages/1f/f6/a933bd70f98e9cf3e08167fc5cd7aaaca49147e48411c0bd5ae701bb2194/wrapt-1.17.3-py3-none-any.whl",hashes = {sha256 = "7171ae35d2c33d326ac19dd8facb1e82e5fd04ef8c6c0e394d7af55a55051c22"}}, ] marker = "\"default\" in dependency_groups" @@ -3417,19 +2694,6 @@ marker = "\"default\" in dependency_groups" [packages.tool.pdm] dependencies = [] -[[packages]] -name = "zipp" -version = "3.23.0" -requires-python = ">=3.9" -sdist = {name = "zipp-3.23.0.tar.gz", url = "https://files.pythonhosted.org/packages/e3/02/0f2892c661036d50ede074e376733dca2ae7c6eb617489437771209d4180/zipp-3.23.0.tar.gz", hashes = {sha256 = "a07157588a12518c9d4034df3fbbee09c814741a33ff63c05fa29d26a2404166"}} -wheels = [ - {name = "zipp-3.23.0-py3-none-any.whl",url = "https://files.pythonhosted.org/packages/2e/54/647ade08bf0db230bfea292f893923872fd20be6ac6f53b2b936ba839d75/zipp-3.23.0-py3-none-any.whl",hashes = {sha256 = "071652d6115ed432f5ce1d34c336c0adfd6a884660d1e9712a256d3d3bd4b14e"}}, -] -marker = "python_full_version < \"3.10.2\" and python_version ~= \"3.10\" and \"dev\" in extras or python_full_version ~= \"3.9.0\" and \"dev\" in extras" - -[packages.tool.pdm] -dependencies = [] - [[packages]] name = "anyio" version = "4.9.0" @@ -3515,20 +2779,6 @@ wheels = [ {name = "pandas-2.3.1-cp312-cp312-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/5b/14/cec7760d7c9507f11c97d64f29022e12a6cc4fc03ac694535e89f88ad2ec/pandas-2.3.1-cp312-cp312-musllinux_1_2_aarch64.whl",hashes = {sha256 = "56a342b231e8862c96bdb6ab97170e203ce511f4d0429589c8ede1ee8ece48b8"}}, {name = "pandas-2.3.1-cp312-cp312-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/50/b9/6e2d2c6728ed29fb3d4d4d302504fb66f1a543e37eb2e43f352a86365cdf/pandas-2.3.1-cp312-cp312-musllinux_1_2_x86_64.whl",hashes = {sha256 = "ca7ed14832bce68baef331f4d7f294411bed8efd032f8109d690df45e00c4679"}}, {name = "pandas-2.3.1-cp312-cp312-win_amd64.whl",url = "https://files.pythonhosted.org/packages/80/a5/3a92893e7399a691bad7664d977cb5e7c81cf666c81f89ea76ba2bff483d/pandas-2.3.1-cp312-cp312-win_amd64.whl",hashes = {sha256 = "ac942bfd0aca577bef61f2bc8da8147c4ef6879965ef883d8e8d5d2dc3e744b8"}}, - {name = "pandas-2.3.1-cp311-cp311-macosx_10_9_x86_64.whl",url = "https://files.pythonhosted.org/packages/76/1c/ccf70029e927e473a4476c00e0d5b32e623bff27f0402d0a92b7fc29bb9f/pandas-2.3.1-cp311-cp311-macosx_10_9_x86_64.whl",hashes = {sha256 = "2b0540963d83431f5ce8870ea02a7430adca100cec8a050f0811f8e31035541b"}}, - {name = "pandas-2.3.1-cp311-cp311-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/ec/d3/3c37cb724d76a841f14b8f5fe57e5e3645207cc67370e4f84717e8bb7657/pandas-2.3.1-cp311-cp311-macosx_11_0_arm64.whl",hashes = {sha256 = "fe7317f578c6a153912bd2292f02e40c1d8f253e93c599e82620c7f69755c74f"}}, - {name = "pandas-2.3.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",url = "https://files.pythonhosted.org/packages/8a/4c/367c98854a1251940edf54a4df0826dcacfb987f9068abf3e3064081a382/pandas-2.3.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",hashes = {sha256 = "e6723a27ad7b244c0c79d8e7007092d7c8f0f11305770e2f4cd778b3ad5f9f85"}}, - {name = "pandas-2.3.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",url = "https://files.pythonhosted.org/packages/07/5f/63760ff107bcf5146eee41b38b3985f9055e710a72fdd637b791dea3495c/pandas-2.3.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",hashes = {sha256 = "3462c3735fe19f2638f2c3a40bd94ec2dc5ba13abbb032dd2fa1f540a075509d"}}, - {name = "pandas-2.3.1-cp311-cp311-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/15/53/f31a9b4dfe73fe4711c3a609bd8e60238022f48eacedc257cd13ae9327a7/pandas-2.3.1-cp311-cp311-musllinux_1_2_aarch64.whl",hashes = {sha256 = "98bcc8b5bf7afed22cc753a28bc4d9e26e078e777066bc53fac7904ddef9a678"}}, - {name = "pandas-2.3.1-cp311-cp311-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/e0/94/6fce6bf85b5056d065e0a7933cba2616dcb48596f7ba3c6341ec4bcc529d/pandas-2.3.1-cp311-cp311-musllinux_1_2_x86_64.whl",hashes = {sha256 = "4d544806b485ddf29e52d75b1f559142514e60ef58a832f74fb38e48d757b299"}}, - {name = "pandas-2.3.1-cp311-cp311-win_amd64.whl",url = "https://files.pythonhosted.org/packages/c8/7b/bdcb1ed8fccb63d04bdb7635161d0ec26596d92c9d7a6cce964e7876b6c1/pandas-2.3.1-cp311-cp311-win_amd64.whl",hashes = {sha256 = "b3cd4273d3cb3707b6fffd217204c52ed92859533e31dc03b7c5008aa933aaab"}}, - {name = "pandas-2.3.1-cp310-cp310-macosx_10_9_x86_64.whl",url = "https://files.pythonhosted.org/packages/c4/ca/aa97b47287221fa37a49634532e520300088e290b20d690b21ce3e448143/pandas-2.3.1-cp310-cp310-macosx_10_9_x86_64.whl",hashes = {sha256 = "22c2e866f7209ebc3a8f08d75766566aae02bcc91d196935a1d9e59c7b990ac9"}}, - {name = "pandas-2.3.1-cp310-cp310-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/80/bf/7938dddc5f01e18e573dcfb0f1b8c9357d9b5fa6ffdee6e605b92efbdff2/pandas-2.3.1-cp310-cp310-macosx_11_0_arm64.whl",hashes = {sha256 = "3583d348546201aff730c8c47e49bc159833f971c2899d6097bce68b9112a4f1"}}, - {name = "pandas-2.3.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",url = "https://files.pythonhosted.org/packages/ee/2f/9af748366763b2a494fed477f88051dbf06f56053d5c00eba652697e3f94/pandas-2.3.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",hashes = {sha256 = "0f951fbb702dacd390561e0ea45cdd8ecfa7fb56935eb3dd78e306c19104b9b0"}}, - {name = "pandas-2.3.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",url = "https://files.pythonhosted.org/packages/2c/95/79ab37aa4c25d1e7df953dde407bb9c3e4ae47d154bc0dd1692f3a6dcf8c/pandas-2.3.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",hashes = {sha256 = "cd05b72ec02ebfb993569b4931b2e16fbb4d6ad6ce80224a3ee838387d83a191"}}, - {name = "pandas-2.3.1-cp310-cp310-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/75/a7/d65e5d8665c12c3c6ff5edd9709d5836ec9b6f80071b7f4a718c6106e86e/pandas-2.3.1-cp310-cp310-musllinux_1_2_aarch64.whl",hashes = {sha256 = "1b916a627919a247d865aed068eb65eb91a344b13f5b57ab9f610b7716c92de1"}}, - {name = "pandas-2.3.1-cp310-cp310-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/65/f3/4c1dbd754dbaa79dbf8b537800cb2fa1a6e534764fef50ab1f7533226c5c/pandas-2.3.1-cp310-cp310-musllinux_1_2_x86_64.whl",hashes = {sha256 = "fe67dc676818c186d5a3d5425250e40f179c2a89145df477dd82945eaea89e97"}}, - {name = "pandas-2.3.1-cp310-cp310-win_amd64.whl",url = "https://files.pythonhosted.org/packages/3f/d6/d7f5777162aa9b48ec3910bca5a58c9b5927cfd9cfde3aa64322f5ba4b9f/pandas-2.3.1-cp310-cp310-win_amd64.whl",hashes = {sha256 = "2eb789ae0274672acbd3c575b0598d213345660120a257b47b5dafdc618aec83"}}, ] marker = "\"default\" in dependency_groups" @@ -3647,26 +2897,8 @@ wheels = [ {name = "ruamel.yaml.clib-0.2.12-cp312-cp312-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/db/5d/36619b61ffa2429eeaefaab4f3374666adf36ad8ac6330d855848d7d36fd/ruamel.yaml.clib-0.2.12-cp312-cp312-musllinux_1_2_aarch64.whl",hashes = {sha256 = "b82a7c94a498853aa0b272fd5bc67f29008da798d4f93a2f9f289feb8426a58d"}}, {name = "ruamel.yaml.clib-0.2.12-cp312-cp312-win32.whl",url = "https://files.pythonhosted.org/packages/b1/82/85cb92f15a4231c89b95dfe08b09eb6adca929ef7df7e17ab59902b6f589/ruamel.yaml.clib-0.2.12-cp312-cp312-win32.whl",hashes = {sha256 = "e8c4ebfcfd57177b572e2040777b8abc537cdef58a2120e830124946aa9b42c5"}}, {name = "ruamel.yaml.clib-0.2.12-cp312-cp312-win_amd64.whl",url = "https://files.pythonhosted.org/packages/d7/8f/c3654f6f1ddb75daf3922c3d8fc6005b1ab56671ad56ffb874d908bfa668/ruamel.yaml.clib-0.2.12-cp312-cp312-win_amd64.whl",hashes = {sha256 = "0467c5965282c62203273b838ae77c0d29d7638c8a4e3a1c8bdd3602c10904e4"}}, - {name = "ruamel.yaml.clib-0.2.12-cp311-cp311-macosx_13_0_arm64.whl",url = "https://files.pythonhosted.org/packages/fb/8f/683c6ad562f558cbc4f7c029abcd9599148c51c54b5ef0f24f2638da9fbb/ruamel.yaml.clib-0.2.12-cp311-cp311-macosx_13_0_arm64.whl",hashes = {sha256 = "4a6679521a58256a90b0d89e03992c15144c5f3858f40d7c18886023d7943db6"}}, - {name = "ruamel.yaml.clib-0.2.12-cp311-cp311-manylinux2014_aarch64.whl",url = "https://files.pythonhosted.org/packages/3c/d2/b79b7d695e2f21da020bd44c782490578f300dd44f0a4c57a92575758a76/ruamel.yaml.clib-0.2.12-cp311-cp311-manylinux2014_aarch64.whl",hashes = {sha256 = "d84318609196d6bd6da0edfa25cedfbabd8dbde5140a0a23af29ad4b8f91fb1e"}}, - {name = "ruamel.yaml.clib-0.2.12-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",url = "https://files.pythonhosted.org/packages/68/6e/264c50ce2a31473a9fdbf4fa66ca9b2b17c7455b31ef585462343818bd6c/ruamel.yaml.clib-0.2.12-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",hashes = {sha256 = "bb43a269eb827806502c7c8efb7ae7e9e9d0573257a46e8e952f4d4caba4f31e"}}, - {name = "ruamel.yaml.clib-0.2.12-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl",url = "https://files.pythonhosted.org/packages/86/29/88c2567bc893c84d88b4c48027367c3562ae69121d568e8a3f3a8d363f4d/ruamel.yaml.clib-0.2.12-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl",hashes = {sha256 = "811ea1594b8a0fb466172c384267a4e5e367298af6b228931f273b111f17ef52"}}, - {name = "ruamel.yaml.clib-0.2.12-cp311-cp311-musllinux_1_1_i686.whl",url = "https://files.pythonhosted.org/packages/11/46/879763c619b5470820f0cd6ca97d134771e502776bc2b844d2adb6e37753/ruamel.yaml.clib-0.2.12-cp311-cp311-musllinux_1_1_i686.whl",hashes = {sha256 = "cf12567a7b565cbf65d438dec6cfbe2917d3c1bdddfce84a9930b7d35ea59642"}}, - {name = "ruamel.yaml.clib-0.2.12-cp311-cp311-musllinux_1_1_x86_64.whl",url = "https://files.pythonhosted.org/packages/02/80/ece7e6034256a4186bbe50dee28cd032d816974941a6abf6a9d65e4228a7/ruamel.yaml.clib-0.2.12-cp311-cp311-musllinux_1_1_x86_64.whl",hashes = {sha256 = "7dd5adc8b930b12c8fc5b99e2d535a09889941aa0d0bd06f4749e9a9397c71d2"}}, - {name = "ruamel.yaml.clib-0.2.12-cp311-cp311-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/f0/ca/e4106ac7e80efbabdf4bf91d3d32fc424e41418458251712f5672eada9ce/ruamel.yaml.clib-0.2.12-cp311-cp311-musllinux_1_2_aarch64.whl",hashes = {sha256 = "1492a6051dab8d912fc2adeef0e8c72216b24d57bd896ea607cb90bb0c4981d3"}}, - {name = "ruamel.yaml.clib-0.2.12-cp311-cp311-win32.whl",url = "https://files.pythonhosted.org/packages/67/58/b1f60a1d591b771298ffa0428237afb092c7f29ae23bad93420b1eb10703/ruamel.yaml.clib-0.2.12-cp311-cp311-win32.whl",hashes = {sha256 = "bd0a08f0bab19093c54e18a14a10b4322e1eacc5217056f3c063bd2f59853ce4"}}, - {name = "ruamel.yaml.clib-0.2.12-cp311-cp311-win_amd64.whl",url = "https://files.pythonhosted.org/packages/b4/4f/b52f634c9548a9291a70dfce26ca7ebce388235c93588a1068028ea23fcc/ruamel.yaml.clib-0.2.12-cp311-cp311-win_amd64.whl",hashes = {sha256 = "a274fb2cb086c7a3dea4322ec27f4cb5cc4b6298adb583ab0e211a4682f241eb"}}, - {name = "ruamel.yaml.clib-0.2.12-cp310-cp310-macosx_13_0_arm64.whl",url = "https://files.pythonhosted.org/packages/70/57/40a958e863e299f0c74ef32a3bde9f2d1ea8d69669368c0c502a0997f57f/ruamel.yaml.clib-0.2.12-cp310-cp310-macosx_13_0_arm64.whl",hashes = {sha256 = "11f891336688faf5156a36293a9c362bdc7c88f03a8a027c2c1d8e0bcde998e5"}}, - {name = "ruamel.yaml.clib-0.2.12-cp310-cp310-manylinux2014_aarch64.whl",url = "https://files.pythonhosted.org/packages/98/a8/29a3eb437b12b95f50a6bcc3d7d7214301c6c529d8fdc227247fa84162b5/ruamel.yaml.clib-0.2.12-cp310-cp310-manylinux2014_aarch64.whl",hashes = {sha256 = "a606ef75a60ecf3d924613892cc603b154178ee25abb3055db5062da811fd969"}}, - {name = "ruamel.yaml.clib-0.2.12-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",url = "https://files.pythonhosted.org/packages/35/6d/ae05a87a3ad540259c3ad88d71275cbd1c0f2d30ae04c65dcbfb6dcd4b9f/ruamel.yaml.clib-0.2.12-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",hashes = {sha256 = "fd5415dded15c3822597455bc02bcd66e81ef8b7a48cb71a33628fc9fdde39df"}}, - {name = "ruamel.yaml.clib-0.2.12-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl",url = "https://files.pythonhosted.org/packages/7f/b7/20c6f3c0b656fe609675d69bc135c03aac9e3865912444be6339207b6648/ruamel.yaml.clib-0.2.12-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl",hashes = {sha256 = "f66efbc1caa63c088dead1c4170d148eabc9b80d95fb75b6c92ac0aad2437d76"}}, - {name = "ruamel.yaml.clib-0.2.12-cp310-cp310-musllinux_1_1_i686.whl",url = "https://files.pythonhosted.org/packages/cd/11/d12dbf683471f888d354dac59593873c2b45feb193c5e3e0f2ebf85e68b9/ruamel.yaml.clib-0.2.12-cp310-cp310-musllinux_1_1_i686.whl",hashes = {sha256 = "22353049ba4181685023b25b5b51a574bce33e7f51c759371a7422dcae5402a6"}}, - {name = "ruamel.yaml.clib-0.2.12-cp310-cp310-musllinux_1_1_x86_64.whl",url = "https://files.pythonhosted.org/packages/72/14/4c268f5077db5c83f743ee1daeb236269fa8577133a5cfa49f8b382baf13/ruamel.yaml.clib-0.2.12-cp310-cp310-musllinux_1_1_x86_64.whl",hashes = {sha256 = "932205970b9f9991b34f55136be327501903f7c66830e9760a8ffb15b07f05cd"}}, - {name = "ruamel.yaml.clib-0.2.12-cp310-cp310-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/30/fc/8cd12f189c6405a4c1cf37bd633aa740a9538c8e40497c231072d0fef5cf/ruamel.yaml.clib-0.2.12-cp310-cp310-musllinux_1_2_aarch64.whl",hashes = {sha256 = "a52d48f4e7bf9005e8f0a89209bf9a73f7190ddf0489eee5eb51377385f59f2a"}}, - {name = "ruamel.yaml.clib-0.2.12-cp310-cp310-win32.whl",url = "https://files.pythonhosted.org/packages/80/29/c0a017b704aaf3cbf704989785cd9c5d5b8ccec2dae6ac0c53833c84e677/ruamel.yaml.clib-0.2.12-cp310-cp310-win32.whl",hashes = {sha256 = "3eac5a91891ceb88138c113f9db04f3cebdae277f5d44eaa3651a4f573e6a5da"}}, - {name = "ruamel.yaml.clib-0.2.12-cp310-cp310-win_amd64.whl",url = "https://files.pythonhosted.org/packages/3a/65/fa39d74db4e2d0cd252355732d966a460a41cd01c6353b820a0952432839/ruamel.yaml.clib-0.2.12-cp310-cp310-win_amd64.whl",hashes = {sha256 = "ab007f2f5a87bd08ab1499bdf96f3d5c6ad4dcfa364884cb4549aa0154b13a28"}}, ] -marker = "platform_python_implementation == \"CPython\" and python_version < \"3.14\" and python_version ~= \"3.10\" and \"dev\" in extras or platform_python_implementation == \"CPython\" and python_full_version ~= \"3.9.0\" and \"dev\" in extras" +marker = "platform_python_implementation == \"CPython\" and python_version < \"3.14\" and python_full_version >= \"3.10.0\" and \"dev\" in extras" [packages.tool.pdm] dependencies = [] @@ -3772,41 +3004,6 @@ wheels = [ {name = "xxhash-3.5.0-cp312-cp312-win32.whl",url = "https://files.pythonhosted.org/packages/78/e3/dd76659b2811b3fd06892a8beb850e1996b63e9235af5a86ea348f053e9e/xxhash-3.5.0-cp312-cp312-win32.whl",hashes = {sha256 = "f7b58d1fd3551b8c80a971199543379be1cee3d0d409e1f6d8b01c1a2eebf1f8"}}, {name = "xxhash-3.5.0-cp312-cp312-win_amd64.whl",url = "https://files.pythonhosted.org/packages/d9/6b/1c443fe6cfeb4ad1dcf231cdec96eb94fb43d6498b4469ed8b51f8b59a37/xxhash-3.5.0-cp312-cp312-win_amd64.whl",hashes = {sha256 = "fa0cafd3a2af231b4e113fba24a65d7922af91aeb23774a8b78228e6cd785e3e"}}, {name = "xxhash-3.5.0-cp312-cp312-win_arm64.whl",url = "https://files.pythonhosted.org/packages/0f/eb/04405305f290173acc0350eba6d2f1a794b57925df0398861a20fbafa415/xxhash-3.5.0-cp312-cp312-win_arm64.whl",hashes = {sha256 = "586886c7e89cb9828bcd8a5686b12e161368e0064d040e225e72607b43858ba2"}}, - {name = "xxhash-3.5.0-cp311-cp311-macosx_10_9_x86_64.whl",url = "https://files.pythonhosted.org/packages/b8/c7/afed0f131fbda960ff15eee7f304fa0eeb2d58770fade99897984852ef23/xxhash-3.5.0-cp311-cp311-macosx_10_9_x86_64.whl",hashes = {sha256 = "02c2e816896dc6f85922ced60097bcf6f008dedfc5073dcba32f9c8dd786f3c1"}}, - {name = "xxhash-3.5.0-cp311-cp311-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/8c/0c/7c3bc6d87e5235672fcc2fb42fd5ad79fe1033925f71bf549ee068c7d1ca/xxhash-3.5.0-cp311-cp311-macosx_11_0_arm64.whl",hashes = {sha256 = "6027dcd885e21581e46d3c7f682cfb2b870942feeed58a21c29583512c3f09f8"}}, - {name = "xxhash-3.5.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",url = "https://files.pythonhosted.org/packages/04/9e/01067981d98069eec1c20201f8c145367698e9056f8bc295346e4ea32dd1/xxhash-3.5.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",hashes = {sha256 = "1308fa542bbdbf2fa85e9e66b1077eea3a88bef38ee8a06270b4298a7a62a166"}}, - {name = "xxhash-3.5.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl",url = "https://files.pythonhosted.org/packages/d4/09/d4996de4059c3ce5342b6e1e6a77c9d6c91acce31f6ed979891872dd162b/xxhash-3.5.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl",hashes = {sha256 = "c28b2fdcee797e1c1961cd3bcd3d545cab22ad202c846235197935e1df2f8ef7"}}, - {name = "xxhash-3.5.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl",url = "https://files.pythonhosted.org/packages/62/f5/6d2dc9f8d55a7ce0f5e7bfef916e67536f01b85d32a9fbf137d4cadbee38/xxhash-3.5.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl",hashes = {sha256 = "924361811732ddad75ff23e90efd9ccfda4f664132feecb90895bade6a1b4623"}}, - {name = "xxhash-3.5.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",url = "https://files.pythonhosted.org/packages/d9/72/9256303f10e41ab004799a4aa74b80b3c5977d6383ae4550548b24bd1971/xxhash-3.5.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",hashes = {sha256 = "89997aa1c4b6a5b1e5b588979d1da048a3c6f15e55c11d117a56b75c84531f5a"}}, - {name = "xxhash-3.5.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl",url = "https://files.pythonhosted.org/packages/34/92/1a3a29acd08248a34b0e6a94f4e0ed9b8379a4ff471f1668e4dce7bdbaa8/xxhash-3.5.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl",hashes = {sha256 = "685c4f4e8c59837de103344eb1c8a3851f670309eb5c361f746805c5471b8c88"}}, - {name = "xxhash-3.5.0-cp311-cp311-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/53/ad/7fa1a109663366de42f724a1cdb8e796a260dbac45047bce153bc1e18abf/xxhash-3.5.0-cp311-cp311-musllinux_1_2_aarch64.whl",hashes = {sha256 = "dbd2ecfbfee70bc1a4acb7461fa6af7748ec2ab08ac0fa298f281c51518f982c"}}, - {name = "xxhash-3.5.0-cp311-cp311-musllinux_1_2_i686.whl",url = "https://files.pythonhosted.org/packages/35/02/137300e24203bf2b2a49b48ce898ecce6fd01789c0fcd9c686c0a002d129/xxhash-3.5.0-cp311-cp311-musllinux_1_2_i686.whl",hashes = {sha256 = "25b5a51dc3dfb20a10833c8eee25903fd2e14059e9afcd329c9da20609a307b2"}}, - {name = "xxhash-3.5.0-cp311-cp311-musllinux_1_2_ppc64le.whl",url = "https://files.pythonhosted.org/packages/23/03/aeceb273933d7eee248c4322b98b8e971f06cc3880e5f7602c94e5578af5/xxhash-3.5.0-cp311-cp311-musllinux_1_2_ppc64le.whl",hashes = {sha256 = "a8fb786fb754ef6ff8c120cb96629fb518f8eb5a61a16aac3a979a9dbd40a084"}}, - {name = "xxhash-3.5.0-cp311-cp311-musllinux_1_2_s390x.whl",url = "https://files.pythonhosted.org/packages/e3/64/ed82ec09489474cbb35c716b189ddc1521d8b3de12b1b5ab41ce7f70253c/xxhash-3.5.0-cp311-cp311-musllinux_1_2_s390x.whl",hashes = {sha256 = "a905ad00ad1e1c34fe4e9d7c1d949ab09c6fa90c919860c1534ff479f40fd12d"}}, - {name = "xxhash-3.5.0-cp311-cp311-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/71/43/6db4c02dcb488ad4e03bc86d70506c3d40a384ee73c9b5c93338eb1f3c23/xxhash-3.5.0-cp311-cp311-musllinux_1_2_x86_64.whl",hashes = {sha256 = "963be41bcd49f53af6d795f65c0da9b4cc518c0dd9c47145c98f61cb464f4839"}}, - {name = "xxhash-3.5.0-cp311-cp311-win32.whl",url = "https://files.pythonhosted.org/packages/22/6d/db4abec29e7a567455344433d095fdb39c97db6955bb4a2c432e486b4d28/xxhash-3.5.0-cp311-cp311-win32.whl",hashes = {sha256 = "109b436096d0a2dd039c355fa3414160ec4d843dfecc64a14077332a00aeb7da"}}, - {name = "xxhash-3.5.0-cp311-cp311-win_amd64.whl",url = "https://files.pythonhosted.org/packages/52/1c/fa3b61c0cf03e1da4767213672efe186b1dfa4fc901a4a694fb184a513d1/xxhash-3.5.0-cp311-cp311-win_amd64.whl",hashes = {sha256 = "b702f806693201ad6c0a05ddbbe4c8f359626d0b3305f766077d51388a6bac58"}}, - {name = "xxhash-3.5.0-cp311-cp311-win_arm64.whl",url = "https://files.pythonhosted.org/packages/6b/8e/9e6fc572acf6e1cc7ccb01973c213f895cb8668a9d4c2b58a99350da14b7/xxhash-3.5.0-cp311-cp311-win_arm64.whl",hashes = {sha256 = "c4dcb4120d0cc3cc448624147dba64e9021b278c63e34a38789b688fd0da9bf3"}}, - {name = "xxhash-3.5.0-cp310-cp310-macosx_10_9_x86_64.whl",url = "https://files.pythonhosted.org/packages/bb/8a/0e9feca390d512d293afd844d31670e25608c4a901e10202aa98785eab09/xxhash-3.5.0-cp310-cp310-macosx_10_9_x86_64.whl",hashes = {sha256 = "ece616532c499ee9afbb83078b1b952beffef121d989841f7f4b3dc5ac0fd212"}}, - {name = "xxhash-3.5.0-cp310-cp310-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/16/e6/be5aa49580cd064a18200ab78e29b88b1127e1a8c7955eb8ecf81f2626eb/xxhash-3.5.0-cp310-cp310-macosx_11_0_arm64.whl",hashes = {sha256 = "3171f693dbc2cef6477054a665dc255d996646b4023fe56cb4db80e26f4cc520"}}, - {name = "xxhash-3.5.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",url = "https://files.pythonhosted.org/packages/20/ee/b8a99ebbc6d1113b3a3f09e747fa318c3cde5b04bd9c197688fadf0eeae8/xxhash-3.5.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",hashes = {sha256 = "7c5d3e570ef46adaf93fc81b44aca6002b5a4d8ca11bd0580c07eac537f36680"}}, - {name = "xxhash-3.5.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl",url = "https://files.pythonhosted.org/packages/58/62/15d10582ef159283a5c2b47f6d799fc3303fe3911d5bb0bcc820e1ef7ff4/xxhash-3.5.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl",hashes = {sha256 = "7cb29a034301e2982df8b1fe6328a84f4b676106a13e9135a0d7e0c3e9f806da"}}, - {name = "xxhash-3.5.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl",url = "https://files.pythonhosted.org/packages/23/41/61202663ea9b1bd8e53673b8ec9e2619989353dba8cfb68e59a9cbd9ffe3/xxhash-3.5.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl",hashes = {sha256 = "5d0d307d27099bb0cbeea7260eb39ed4fdb99c5542e21e94bb6fd29e49c57a23"}}, - {name = "xxhash-3.5.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",url = "https://files.pythonhosted.org/packages/f2/07/d9a3059f702dec5b3b703737afb6dda32f304f6e9da181a229dafd052c29/xxhash-3.5.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",hashes = {sha256 = "c0342aafd421795d740e514bc9858ebddfc705a75a8c5046ac56d85fe97bf196"}}, - {name = "xxhash-3.5.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl",url = "https://files.pythonhosted.org/packages/eb/58/27caadf78226ecf1d62dbd0c01d152ed381c14c1ee4ad01f0d460fc40eac/xxhash-3.5.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl",hashes = {sha256 = "3dbbd9892c5ebffeca1ed620cf0ade13eb55a0d8c84e0751a6653adc6ac40d0c"}}, - {name = "xxhash-3.5.0-cp310-cp310-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/b1/08/32d558ce23e1e068453c39aed7b3c1cdc690c177873ec0ca3a90d5808765/xxhash-3.5.0-cp310-cp310-musllinux_1_2_aarch64.whl",hashes = {sha256 = "4cc2d67fdb4d057730c75a64c5923abfa17775ae234a71b0200346bfb0a7f482"}}, - {name = "xxhash-3.5.0-cp310-cp310-musllinux_1_2_i686.whl",url = "https://files.pythonhosted.org/packages/3f/d4/2b971e2d2b0a61045f842b622ef11e94096cf1f12cd448b6fd426e80e0e2/xxhash-3.5.0-cp310-cp310-musllinux_1_2_i686.whl",hashes = {sha256 = "ec28adb204b759306a3d64358a5e5c07d7b1dd0ccbce04aa76cb9377b7b70296"}}, - {name = "xxhash-3.5.0-cp310-cp310-musllinux_1_2_ppc64le.whl",url = "https://files.pythonhosted.org/packages/19/ae/6a6438864a8c4c39915d7b65effd85392ebe22710412902487e51769146d/xxhash-3.5.0-cp310-cp310-musllinux_1_2_ppc64le.whl",hashes = {sha256 = "1328f6d8cca2b86acb14104e381225a3d7b42c92c4b86ceae814e5c400dbb415"}}, - {name = "xxhash-3.5.0-cp310-cp310-musllinux_1_2_s390x.whl",url = "https://files.pythonhosted.org/packages/48/7d/b3c27c27d1fc868094d02fe4498ccce8cec9fcc591825c01d6bcb0b4fc49/xxhash-3.5.0-cp310-cp310-musllinux_1_2_s390x.whl",hashes = {sha256 = "8d47ebd9f5d9607fd039c1fbf4994e3b071ea23eff42f4ecef246ab2b7334198"}}, - {name = "xxhash-3.5.0-cp310-cp310-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/a1/05/918f9e7d2fbbd334b829997045d341d6239b563c44e683b9a7ef8fe50f5d/xxhash-3.5.0-cp310-cp310-musllinux_1_2_x86_64.whl",hashes = {sha256 = "b96d559e0fcddd3343c510a0fe2b127fbff16bf346dd76280b82292567523442"}}, - {name = "xxhash-3.5.0-cp310-cp310-win32.whl",url = "https://files.pythonhosted.org/packages/08/29/dfe393805b2f86bfc47c290b275f0b7c189dc2f4e136fd4754f32eb18a8d/xxhash-3.5.0-cp310-cp310-win32.whl",hashes = {sha256 = "61c722ed8d49ac9bc26c7071eeaa1f6ff24053d553146d5df031802deffd03da"}}, - {name = "xxhash-3.5.0-cp310-cp310-win_amd64.whl",url = "https://files.pythonhosted.org/packages/7b/d7/aa0b22c4ebb7c3ccb993d4c565132abc641cd11164f8952d89eb6a501909/xxhash-3.5.0-cp310-cp310-win_amd64.whl",hashes = {sha256 = "9bed5144c6923cc902cd14bb8963f2d5e034def4486ab0bbe1f58f03f042f9a9"}}, - {name = "xxhash-3.5.0-cp310-cp310-win_arm64.whl",url = "https://files.pythonhosted.org/packages/69/12/f969b81541ee91b55f1ce469d7ab55079593c80d04fd01691b550e535000/xxhash-3.5.0-cp310-cp310-win_arm64.whl",hashes = {sha256 = "893074d651cf25c1cc14e3bea4fceefd67f2921b1bb8e40fcfeba56820de80c6"}}, - {name = "xxhash-3.5.0-pp310-pypy310_pp73-macosx_10_15_x86_64.whl",url = "https://files.pythonhosted.org/packages/ab/9a/233606bada5bd6f50b2b72c45de3d9868ad551e83893d2ac86dc7bb8553a/xxhash-3.5.0-pp310-pypy310_pp73-macosx_10_15_x86_64.whl",hashes = {sha256 = "2014c5b3ff15e64feecb6b713af12093f75b7926049e26a580e94dcad3c73d8c"}}, - {name = "xxhash-3.5.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",url = "https://files.pythonhosted.org/packages/0c/67/f75276ca39e2c6604e3bee6c84e9db8a56a4973fde9bf35989787cf6e8aa/xxhash-3.5.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",hashes = {sha256 = "fab81ef75003eda96239a23eda4e4543cedc22e34c373edcaf744e721a163986"}}, - {name = "xxhash-3.5.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",url = "https://files.pythonhosted.org/packages/0f/f8/f6c61fd794229cc3848d144f73754a0c107854372d7261419dcbbd286299/xxhash-3.5.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",hashes = {sha256 = "4e2febf914ace002132aa09169cc572e0d8959d0f305f93d5828c4836f9bc5a6"}}, - {name = "xxhash-3.5.0-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl",url = "https://files.pythonhosted.org/packages/79/d3/c029c99801526f859e6b38d34ab87c08993bf3dcea34b11275775001638a/xxhash-3.5.0-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl",hashes = {sha256 = "5d3a10609c51da2a1c0ea0293fc3968ca0a18bd73838455b5bca3069d7f8e32b"}}, - {name = "xxhash-3.5.0-pp310-pypy310_pp73-win_amd64.whl",url = "https://files.pythonhosted.org/packages/62/e3/bef7b82c1997579c94de9ac5ea7626d01ae5858aa22bf4fcb38bf220cb3e/xxhash-3.5.0-pp310-pypy310_pp73-win_amd64.whl",hashes = {sha256 = "5a74f23335b9689b66eb6dbe2a931a88fcd7a4c2cc4b1cb0edba8ce381c7a1da"}}, ] marker = "\"default\" in dependency_groups" @@ -3814,57 +3011,102 @@ marker = "\"default\" in dependency_groups" dependencies = [] [[packages]] -name = "scipy" -version = "1.13.1" +name = "tomli" +version = "2.3.0" +requires-python = ">=3.8" +sdist = {name = "tomli-2.3.0.tar.gz", url = "https://files.pythonhosted.org/packages/52/ed/3f73f72945444548f33eba9a87fc7a6e969915e7b1acc8260b30e1f76a2f/tomli-2.3.0.tar.gz", hashes = {sha256 = "64be704a875d2a59753d80ee8a533c3fe183e3f06807ff7dc2232938ccb01549"}} +wheels = [ + {name = "tomli-2.3.0-cp311-cp311-macosx_10_9_x86_64.whl",url = "https://files.pythonhosted.org/packages/b3/2e/299f62b401438d5fe1624119c723f5d877acc86a4c2492da405626665f12/tomli-2.3.0-cp311-cp311-macosx_10_9_x86_64.whl",hashes = {sha256 = "88bd15eb972f3664f5ed4b57c1634a97153b4bac4479dcb6a495f41921eb7f45"}}, + {name = "tomli-2.3.0-cp311-cp311-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/86/7f/d8fffe6a7aefdb61bced88fcb5e280cfd71e08939da5894161bd71bea022/tomli-2.3.0-cp311-cp311-macosx_11_0_arm64.whl",hashes = {sha256 = "883b1c0d6398a6a9d29b508c331fa56adbcdff647f6ace4dfca0f50e90dfd0ba"}}, + {name = "tomli-2.3.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl",url = "https://files.pythonhosted.org/packages/47/5c/24935fb6a2ee63e86d80e4d3b58b222dafaf438c416752c8b58537c8b89a/tomli-2.3.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl",hashes = {sha256 = "d1381caf13ab9f300e30dd8feadb3de072aeb86f1d34a8569453ff32a7dea4bf"}}, + {name = "tomli-2.3.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl",url = "https://files.pythonhosted.org/packages/89/da/75dfd804fc11e6612846758a23f13271b76d577e299592b4371a4ca4cd09/tomli-2.3.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl",hashes = {sha256 = "a0e285d2649b78c0d9027570d4da3425bdb49830a6156121360b3f8511ea3441"}}, + {name = "tomli-2.3.0-cp311-cp311-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/70/8c/f48ac899f7b3ca7eb13af73bacbc93aec37f9c954df3c08ad96991c8c373/tomli-2.3.0-cp311-cp311-musllinux_1_2_aarch64.whl",hashes = {sha256 = "0a154a9ae14bfcf5d8917a59b51ffd5a3ac1fd149b71b47a3a104ca4edcfa845"}}, + {name = "tomli-2.3.0-cp311-cp311-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/ba/28/72f8afd73f1d0e7829bfc093f4cb98ce0a40ffc0cc997009ee1ed94ba705/tomli-2.3.0-cp311-cp311-musllinux_1_2_x86_64.whl",hashes = {sha256 = "74bf8464ff93e413514fefd2be591c3b0b23231a77f901db1eb30d6f712fc42c"}}, + {name = "tomli-2.3.0-cp311-cp311-win32.whl",url = "https://files.pythonhosted.org/packages/b6/eb/a7679c8ac85208706d27436e8d421dfa39d4c914dcf5fa8083a9305f58d9/tomli-2.3.0-cp311-cp311-win32.whl",hashes = {sha256 = "00b5f5d95bbfc7d12f91ad8c593a1659b6387b43f054104cda404be6bda62456"}}, + {name = "tomli-2.3.0-cp311-cp311-win_amd64.whl",url = "https://files.pythonhosted.org/packages/0a/fe/3d3420c4cb1ad9cb462fb52967080575f15898da97e21cb6f1361d505383/tomli-2.3.0-cp311-cp311-win_amd64.whl",hashes = {sha256 = "4dc4ce8483a5d429ab602f111a93a6ab1ed425eae3122032db7e9acf449451be"}}, + {name = "tomli-2.3.0-py3-none-any.whl",url = "https://files.pythonhosted.org/packages/77/b8/0135fadc89e73be292b473cb820b4f5a08197779206b33191e801feeae40/tomli-2.3.0-py3-none-any.whl",hashes = {sha256 = "e95b1af3c5b07d9e643909b5abbec77cd9f1217e6d0bca72b0234736b9fb1f1b"}}, +] +marker = "python_full_version ~= \"3.10.0\" and \"dev\" in extras" + +[packages.tool.pdm] +dependencies = [] + +[[packages]] +name = "backports-asyncio-runner" +version = "1.2.0" +requires-python = "<3.11,>=3.8" +sdist = {name = "backports_asyncio_runner-1.2.0.tar.gz", url = "https://files.pythonhosted.org/packages/8e/ff/70dca7d7cb1cbc0edb2c6cc0c38b65cba36cccc491eca64cabd5fe7f8670/backports_asyncio_runner-1.2.0.tar.gz", hashes = {sha256 = "a5aa7b2b7d8f8bfcaa2b57313f70792df84e32a2a746f585213373f900b42162"}} +wheels = [ + {name = "backports_asyncio_runner-1.2.0-py3-none-any.whl",url = "https://files.pythonhosted.org/packages/a0/59/76ab57e3fe74484f48a53f8e337171b4a2349e506eabe136d7e01d059086/backports_asyncio_runner-1.2.0-py3-none-any.whl",hashes = {sha256 = "0da0a936a8aeb554eccb426dc55af3ba63bcdc69fa1a600b5bb305413a4477b5"}}, +] +marker = "python_full_version ~= \"3.10.0\" and \"dev\" in extras" + +[packages.tool.pdm] +dependencies = [] + +[[packages]] +name = "async-timeout" +version = "5.0.1" +requires-python = ">=3.8" +sdist = {name = "async_timeout-5.0.1.tar.gz", url = "https://files.pythonhosted.org/packages/a5/ae/136395dfbfe00dfc94da3f3e136d0b13f394cba8f4841120e34226265780/async_timeout-5.0.1.tar.gz", hashes = {sha256 = "d9321a7a3d5a6a5e187e824d2fa0793ce379a202935782d555d6e9d2735677d3"}} +wheels = [ + {name = "async_timeout-5.0.1-py3-none-any.whl",url = "https://files.pythonhosted.org/packages/fe/ba/e2081de779ca30d473f21f5b30e0e737c438205440784c7dfc81efc2b029/async_timeout-5.0.1-py3-none-any.whl",hashes = {sha256 = "39e3809566ff85354557ec2398b55e096c8364bacac9405a7a1fa429e77fe76c"}}, +] +marker = "\"default\" in dependency_groups and python_full_version ~= \"3.10.0\" or \"dev\" in extras and python_full_version ~= \"3.10.0\"" + +[packages.tool.pdm] +dependencies = [] + +[[packages]] +name = "exceptiongroup" +version = "1.3.0" +requires-python = ">=3.7" +sdist = {name = "exceptiongroup-1.3.0.tar.gz", url = "https://files.pythonhosted.org/packages/0b/9f/a65090624ecf468cdca03533906e7c69ed7588582240cfe7cc9e770b50eb/exceptiongroup-1.3.0.tar.gz", hashes = {sha256 = "b241f5885f560bc56a59ee63ca4c6a8bfa46ae4ad651af316d4e81817bb9fd88"}} +wheels = [ + {name = "exceptiongroup-1.3.0-py3-none-any.whl",url = "https://files.pythonhosted.org/packages/36/f4/c6e662dade71f56cd2f3735141b265c3c79293c109549c1e6933b0651ffc/exceptiongroup-1.3.0-py3-none-any.whl",hashes = {sha256 = "4d111e6e0c13d0644cad6ddaa7ed0261a0b36971f6d23e7ec9b4b9097da78a10"}}, +] +marker = "\"default\" in dependency_groups and python_full_version ~= \"3.10.0\" or \"dev\" in extras and python_full_version ~= \"3.10.0\"" + +[packages.tool.pdm] +dependencies = [ + "typing-extensions>=4.6.0; python_version < \"3.13\"", +] + +[[packages]] +name = "importlib-metadata" +version = "8.7.0" requires-python = ">=3.9" -sdist = {name = "scipy-1.13.1.tar.gz", url = "https://files.pythonhosted.org/packages/ae/00/48c2f661e2816ccf2ecd77982f6605b2950afe60f60a52b4cbbc2504aa8f/scipy-1.13.1.tar.gz", hashes = {sha256 = "095a87a0312b08dfd6a6155cbbd310a8c51800fc931b8c0b84003014b874ed3c"}} +sdist = {name = "importlib_metadata-8.7.0.tar.gz", url = "https://files.pythonhosted.org/packages/76/66/650a33bd90f786193e4de4b3ad86ea60b53c89b669a5c7be931fac31cdb0/importlib_metadata-8.7.0.tar.gz", hashes = {sha256 = "d13b81ad223b890aa16c5471f2ac3056cf76c5f10f82d6f9292f0b415f389000"}} wheels = [ - {name = "scipy-1.13.1-cp39-cp39-macosx_10_9_x86_64.whl",url = "https://files.pythonhosted.org/packages/7f/29/c2ea58c9731b9ecb30b6738113a95d147e83922986b34c685b8f6eefde21/scipy-1.13.1-cp39-cp39-macosx_10_9_x86_64.whl",hashes = {sha256 = "436bbb42a94a8aeef855d755ce5a465479c721e9d684de76bf61a62e7c2b81d5"}}, - {name = "scipy-1.13.1-cp39-cp39-macosx_12_0_arm64.whl",url = "https://files.pythonhosted.org/packages/5c/c0/e71b94b20ccf9effb38d7147c0064c08c622309fd487b1b677771a97d18c/scipy-1.13.1-cp39-cp39-macosx_12_0_arm64.whl",hashes = {sha256 = "8335549ebbca860c52bf3d02f80784e91a004b71b059e3eea9678ba994796a24"}}, - {name = "scipy-1.13.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",url = "https://files.pythonhosted.org/packages/6d/0f/aaa55b06d474817cea311e7b10aab2ea1fd5d43bc6a2861ccc9caec9f418/scipy-1.13.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",hashes = {sha256 = "d533654b7d221a6a97304ab63c41c96473ff04459e404b83275b60aa8f4b7004"}}, - {name = "scipy-1.13.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",url = "https://files.pythonhosted.org/packages/35/f5/d0ad1a96f80962ba65e2ce1de6a1e59edecd1f0a7b55990ed208848012e0/scipy-1.13.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",hashes = {sha256 = "637e98dcf185ba7f8e663e122ebf908c4702420477ae52a04f9908707456ba4d"}}, - {name = "scipy-1.13.1-cp39-cp39-musllinux_1_1_x86_64.whl",url = "https://files.pythonhosted.org/packages/8d/02/1165905f14962174e6569076bcc3315809ae1291ed14de6448cc151eedfd/scipy-1.13.1-cp39-cp39-musllinux_1_1_x86_64.whl",hashes = {sha256 = "a014c2b3697bde71724244f63de2476925596c24285c7a637364761f8710891c"}}, - {name = "scipy-1.13.1-cp39-cp39-win_amd64.whl",url = "https://files.pythonhosted.org/packages/3e/77/dab54fe647a08ee4253963bcd8f9cf17509c8ca64d6335141422fe2e2114/scipy-1.13.1-cp39-cp39-win_amd64.whl",hashes = {sha256 = "392e4ec766654852c25ebad4f64e4e584cf19820b980bc04960bca0b0cd6eaa2"}}, + {name = "importlib_metadata-8.7.0-py3-none-any.whl",url = "https://files.pythonhosted.org/packages/20/b0/36bd937216ec521246249be3bf9855081de4c5e06a0c9b4219dbeda50373/importlib_metadata-8.7.0-py3-none-any.whl",hashes = {sha256 = "e5dd1551894c77868a30651cef00984d50e1002d06942a7101d34870c5f02afd"}}, ] -marker = "python_full_version ~= \"3.9.0\" and \"dev\" in extras" +marker = "python_full_version >= \"3.10.0\" and python_full_version < \"3.10.2\" and \"dev\" in extras" [packages.tool.pdm] dependencies = [ - "numpy<2.3,>=1.22.4", + "zipp>=3.20", + "typing-extensions>=3.6.4; python_version < \"3.8\"", ] [[packages]] -name = "numpy" -version = "2.0.2" +name = "zipp" +version = "3.23.0" requires-python = ">=3.9" -sdist = {name = "numpy-2.0.2.tar.gz", url = "https://files.pythonhosted.org/packages/a9/75/10dd1f8116a8b796cb2c737b674e02d02e80454bda953fa7e65d8c12b016/numpy-2.0.2.tar.gz", hashes = {sha256 = "883c987dee1880e2a864ab0dc9892292582510604156762362d9326444636e78"}} +sdist = {name = "zipp-3.23.0.tar.gz", url = "https://files.pythonhosted.org/packages/e3/02/0f2892c661036d50ede074e376733dca2ae7c6eb617489437771209d4180/zipp-3.23.0.tar.gz", hashes = {sha256 = "a07157588a12518c9d4034df3fbbee09c814741a33ff63c05fa29d26a2404166"}} wheels = [ - {name = "numpy-2.0.2-cp39-cp39-macosx_10_9_x86_64.whl",url = "https://files.pythonhosted.org/packages/43/c1/41c8f6df3162b0c6ffd4437d729115704bd43363de0090c7f913cfbc2d89/numpy-2.0.2-cp39-cp39-macosx_10_9_x86_64.whl",hashes = {sha256 = "9059e10581ce4093f735ed23f3b9d283b9d517ff46009ddd485f1747eb22653c"}}, - {name = "numpy-2.0.2-cp39-cp39-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/39/bc/fd298f308dcd232b56a4031fd6ddf11c43f9917fbc937e53762f7b5a3bb1/numpy-2.0.2-cp39-cp39-macosx_11_0_arm64.whl",hashes = {sha256 = "423e89b23490805d2a5a96fe40ec507407b8ee786d66f7328be214f9679df6dd"}}, - {name = "numpy-2.0.2-cp39-cp39-macosx_14_0_arm64.whl",url = "https://files.pythonhosted.org/packages/96/ff/06d1aa3eeb1c614eda245c1ba4fb88c483bee6520d361641331872ac4b82/numpy-2.0.2-cp39-cp39-macosx_14_0_arm64.whl",hashes = {sha256 = "2b2955fa6f11907cf7a70dab0d0755159bca87755e831e47932367fc8f2f2d0b"}}, - {name = "numpy-2.0.2-cp39-cp39-macosx_14_0_x86_64.whl",url = "https://files.pythonhosted.org/packages/2d/98/121996dcfb10a6087a05e54453e28e58694a7db62c5a5a29cee14c6e047b/numpy-2.0.2-cp39-cp39-macosx_14_0_x86_64.whl",hashes = {sha256 = "97032a27bd9d8988b9a97a8c4d2c9f2c15a81f61e2f21404d7e8ef00cb5be729"}}, - {name = "numpy-2.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",url = "https://files.pythonhosted.org/packages/15/31/9dffc70da6b9bbf7968f6551967fc21156207366272c2a40b4ed6008dc9b/numpy-2.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",hashes = {sha256 = "1e795a8be3ddbac43274f18588329c72939870a16cae810c2b73461c40718ab1"}}, - {name = "numpy-2.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",url = "https://files.pythonhosted.org/packages/b9/14/78635daab4b07c0930c919d451b8bf8c164774e6a3413aed04a6d95758ce/numpy-2.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",hashes = {sha256 = "f26b258c385842546006213344c50655ff1555a9338e2e5e02a0756dc3e803dd"}}, - {name = "numpy-2.0.2-cp39-cp39-musllinux_1_1_x86_64.whl",url = "https://files.pythonhosted.org/packages/26/4c/0eeca4614003077f68bfe7aac8b7496f04221865b3a5e7cb230c9d055afd/numpy-2.0.2-cp39-cp39-musllinux_1_1_x86_64.whl",hashes = {sha256 = "5fec9451a7789926bcf7c2b8d187292c9f93ea30284802a0ab3f5be8ab36865d"}}, - {name = "numpy-2.0.2-cp39-cp39-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/f1/46/ea25b98b13dccaebddf1a803f8c748680d972e00507cd9bc6dcdb5aa2ac1/numpy-2.0.2-cp39-cp39-musllinux_1_2_aarch64.whl",hashes = {sha256 = "9189427407d88ff25ecf8f12469d4d39d35bee1db5d39fc5c168c6f088a6956d"}}, - {name = "numpy-2.0.2-cp39-cp39-win32.whl",url = "https://files.pythonhosted.org/packages/c8/a6/177dd88d95ecf07e722d21008b1b40e681a929eb9e329684d449c36586b2/numpy-2.0.2-cp39-cp39-win32.whl",hashes = {sha256 = "905d16e0c60200656500c95b6b8dca5d109e23cb24abc701d41c02d74c6b3afa"}}, - {name = "numpy-2.0.2-cp39-cp39-win_amd64.whl",url = "https://files.pythonhosted.org/packages/ea/2b/7fc9f4e7ae5b507c1a3a21f0f15ed03e794c1242ea8a242ac158beb56034/numpy-2.0.2-cp39-cp39-win_amd64.whl",hashes = {sha256 = "a3f4ab0caa7f053f6797fcd4e1e25caee367db3112ef2b6ef82d749530768c73"}}, - {name = "numpy-2.0.2-pp39-pypy39_pp73-macosx_10_9_x86_64.whl",url = "https://files.pythonhosted.org/packages/8f/3b/df5a870ac6a3be3a86856ce195ef42eec7ae50d2a202be1f5a4b3b340e14/numpy-2.0.2-pp39-pypy39_pp73-macosx_10_9_x86_64.whl",hashes = {sha256 = "7f0a0c6f12e07fa94133c8a67404322845220c06a9e80e85999afe727f7438b8"}}, - {name = "numpy-2.0.2-pp39-pypy39_pp73-macosx_14_0_x86_64.whl",url = "https://files.pythonhosted.org/packages/2c/97/51af92f18d6f6f2d9ad8b482a99fb74e142d71372da5d834b3a2747a446e/numpy-2.0.2-pp39-pypy39_pp73-macosx_14_0_x86_64.whl",hashes = {sha256 = "312950fdd060354350ed123c0e25a71327d3711584beaef30cdaa93320c392d4"}}, - {name = "numpy-2.0.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",url = "https://files.pythonhosted.org/packages/12/46/de1fbd0c1b5ccaa7f9a005b66761533e2f6a3e560096682683a223631fe9/numpy-2.0.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",hashes = {sha256 = "26df23238872200f63518dd2aa984cfca675d82469535dc7162dc2ee52d9dd5c"}}, - {name = "numpy-2.0.2-pp39-pypy39_pp73-win_amd64.whl",url = "https://files.pythonhosted.org/packages/cc/dc/d330a6faefd92b446ec0f0dfea4c3207bb1fef3c4771d19cf4543efd2c78/numpy-2.0.2-pp39-pypy39_pp73-win_amd64.whl",hashes = {sha256 = "a46288ec55ebbd58947d31d72be2c63cbf839f0a63b49cb755022310792a3385"}}, + {name = "zipp-3.23.0-py3-none-any.whl",url = "https://files.pythonhosted.org/packages/2e/54/647ade08bf0db230bfea292f893923872fd20be6ac6f53b2b936ba839d75/zipp-3.23.0-py3-none-any.whl",hashes = {sha256 = "071652d6115ed432f5ce1d34c336c0adfd6a884660d1e9712a256d3d3bd4b14e"}}, ] -marker = "\"default\" in dependency_groups and python_full_version ~= \"3.9.0\" or \"dev\" in extras and python_full_version ~= \"3.9.0\"" +marker = "python_full_version >= \"3.10.0\" and python_full_version < \"3.10.2\" and \"dev\" in extras" [packages.tool.pdm] dependencies = [] [tool.pdm] -hashes = {sha256 = "270d4d932c91513087adf7619fe5674be5345433b7a6a4a27147851b95a15892"} +hashes = {sha256 = "624646aafaf5561776673cdfb44330f8a295ed590670600bc9000c6dcdd8019b"} strategy = ["inherit_metadata", "static_urls"] [[tool.pdm.targets]] -requires_python = "~=3.10" +requires_python = "~=3.12" [[tool.pdm.targets]] -requires_python = "~=3.9.0" +requires_python = ">=3.10.0,<3.12" diff --git a/pyproject.toml b/pyproject.toml index fbe054ad..935587d0 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -23,7 +23,7 @@ dynamic = ["version"] name = "guidellm" description = "Guidance platform for deploying and managing large language models." readme = { file = "README.md", content-type = "text/markdown" } -requires-python = ">=3.9.0,<4.0" +requires-python = ">=3.10.0,<4.0" license = { text = "Apache-2.0" } authors = [{ name = "Red Hat" }] keywords = [ @@ -142,7 +142,7 @@ profile = "black" [tool.mypy] files = ["src/guidellm", "tests"] -python_version = '3.9' +python_version = '3.10' warn_redundant_casts = true warn_unused_ignores = false show_error_codes = true @@ -164,7 +164,7 @@ ignore_missing_imports = true [tool.ruff] -target-version = "py39" +target-version = "py310" line-length = 88 indent-width = 4 exclude = ["build", "dist", "env", ".venv"] diff --git a/scripts/generate_pylock.sh b/scripts/generate_pylock.sh index 6c08256f..ad953391 100755 --- a/scripts/generate_pylock.sh +++ b/scripts/generate_pylock.sh @@ -14,5 +14,5 @@ fi # Locking all dependencies to the same version for all supported # python versions is not possible (mostly due to numpy) # so we need to lock separately for python >=3.12 and <3.12 -pdm lock --python "~=3.10" --update-reuse -pdm lock --append --python "<3.10" --update-reuse +pdm lock --python "~=3.12" --update-reuse +pdm lock --append --python "<3.12" --update-reuse diff --git a/tox.ini b/tox.ini index 8405a11e..723d9382 100644 --- a/tox.ini +++ b/tox.ini @@ -1,6 +1,6 @@ [tox] min_version = 4.0 -env_list = py3{9,10,11,12,13} +env_list = py3{10,11,12,13} [testenv] From 9292e38e63a4f2ba92430a064bf98002b62818c1 Mon Sep 17 00:00:00 2001 From: Samuel Monson Date: Thu, 9 Oct 2025 11:02:47 -0400 Subject: [PATCH 54/90] Bump min python to 3.10 in CI Signed-off-by: Samuel Monson --- .github/workflows/development.yml | 10 +++++----- .github/workflows/main.yml | 10 +++++----- .github/workflows/nightly.yml | 10 +++++----- .github/workflows/release-candidate.yml | 10 +++++----- .github/workflows/release.yml | 10 +++++----- 5 files changed, 25 insertions(+), 25 deletions(-) diff --git a/.github/workflows/development.yml b/.github/workflows/development.yml index eabf1934..0231e33f 100644 --- a/.github/workflows/development.yml +++ b/.github/workflows/development.yml @@ -9,7 +9,7 @@ jobs: runs-on: ubuntu-latest strategy: matrix: - python: ["3.9", "3.13"] + python: ["3.10", "3.13"] steps: - uses: actions/checkout@v4 - name: Set up Python @@ -44,7 +44,7 @@ jobs: runs-on: ubuntu-latest strategy: matrix: - python: ["3.9", "3.13"] + python: ["3.10", "3.13"] steps: - uses: actions/checkout@v4 - name: Set up Python @@ -79,7 +79,7 @@ jobs: runs-on: ubuntu-latest strategy: matrix: - python: ["3.9"] + python: ["3.10"] steps: - uses: actions/checkout@v4 - name: Set up Python @@ -114,7 +114,7 @@ jobs: runs-on: ubuntu-latest strategy: matrix: - python: ["3.9", "3.13"] + python: ["3.10", "3.13"] steps: - uses: actions/checkout@v4 - name: Set up Python @@ -149,7 +149,7 @@ jobs: runs-on: ubuntu-latest strategy: matrix: - python: ["3.9", "3.13"] + python: ["3.10", "3.13"] steps: - uses: actions/checkout@v4 - name: Set up Python diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index 41123e7a..b5666747 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -10,7 +10,7 @@ jobs: runs-on: ubuntu-latest strategy: matrix: - python: ["3.9", "3.10", "3.11", "3.12", "3.13"] + python: ["3.10", "3.11", "3.12", "3.13"] steps: - uses: actions/checkout@v4 - name: Set up Python @@ -45,7 +45,7 @@ jobs: runs-on: ubuntu-latest strategy: matrix: - python: ["3.9", "3.10", "3.11", "3.12", "3.13"] + python: ["3.10", "3.11", "3.12", "3.13"] steps: - uses: actions/checkout@v4 - name: Set up Python @@ -80,7 +80,7 @@ jobs: runs-on: ubuntu-latest strategy: matrix: - python: ["3.9"] + python: ["3.10"] steps: - uses: actions/checkout@v4 - name: Set up Python @@ -115,7 +115,7 @@ jobs: runs-on: ubuntu-latest strategy: matrix: - python: ["3.9", "3.13"] + python: ["3.10", "3.13"] steps: - uses: actions/checkout@v4 - name: Set up Python @@ -150,7 +150,7 @@ jobs: runs-on: ubuntu-latest strategy: matrix: - python: ["3.9", "3.13"] + python: ["3.10", "3.13"] steps: - uses: actions/checkout@v4 - name: Set up Python diff --git a/.github/workflows/nightly.yml b/.github/workflows/nightly.yml index 87ff04ad..2a696861 100644 --- a/.github/workflows/nightly.yml +++ b/.github/workflows/nightly.yml @@ -10,7 +10,7 @@ jobs: runs-on: ubuntu-latest strategy: matrix: - python: ["3.9"] + python: ["3.10"] steps: - uses: actions/checkout@v4 - name: Set up Python @@ -26,7 +26,7 @@ jobs: runs-on: ubuntu-latest strategy: matrix: - python: ["3.9", "3.13"] + python: ["3.10", "3.13"] steps: - uses: actions/checkout@v4 - name: Set up Python @@ -61,7 +61,7 @@ jobs: runs-on: ubuntu-latest strategy: matrix: - python: ["3.9", "3.13"] + python: ["3.10", "3.13"] steps: - uses: actions/checkout@v4 - name: Set up Python @@ -96,7 +96,7 @@ jobs: runs-on: ubuntu-latest strategy: matrix: - python: ["3.9", "3.13"] + python: ["3.10", "3.13"] steps: - uses: actions/checkout@v4 - name: Set up Python @@ -142,7 +142,7 @@ jobs: runs-on: ubuntu-latest strategy: matrix: - python: ["3.9"] + python: ["3.10"] steps: - name: Checkout code uses: actions/checkout@v4 diff --git a/.github/workflows/release-candidate.yml b/.github/workflows/release-candidate.yml index 703ca4c9..903e12ba 100644 --- a/.github/workflows/release-candidate.yml +++ b/.github/workflows/release-candidate.yml @@ -10,7 +10,7 @@ jobs: runs-on: ubuntu-latest strategy: matrix: - python: ["3.9"] + python: ["3.10"] steps: - uses: actions/checkout@v4 - name: Set up Python @@ -26,7 +26,7 @@ jobs: runs-on: ubuntu-latest strategy: matrix: - python: ["3.9", "3.10", "3.11", "3.12", "3.13"] + python: ["3.10", "3.11", "3.12", "3.13"] steps: - uses: actions/checkout@v4 - name: Set up Python @@ -61,7 +61,7 @@ jobs: runs-on: ubuntu-latest strategy: matrix: - python: ["3.9", "3.10", "3.11", "3.12", "3.13"] + python: ["3.10", "3.11", "3.12", "3.13"] steps: - uses: actions/checkout@v4 - name: Set up Python @@ -96,7 +96,7 @@ jobs: runs-on: ubuntu-latest strategy: matrix: - python: ["3.9", "3.10", "3.11", "3.12", "3.13"] + python: ["3.10", "3.11", "3.12", "3.13"] steps: - uses: actions/checkout@v4 - name: Set up Python @@ -142,7 +142,7 @@ jobs: runs-on: ubuntu-latest strategy: matrix: - python: ["3.9"] + python: ["3.10"] steps: - name: Checkout code uses: actions/checkout@v4 diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 9f3d9d75..44b40250 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -10,7 +10,7 @@ jobs: runs-on: ubuntu-latest strategy: matrix: - python: ["3.9"] + python: ["3.10"] steps: - name: Checkout code uses: actions/checkout@v4 @@ -55,7 +55,7 @@ jobs: runs-on: ubuntu-latest strategy: matrix: - python: ["3.9"] + python: ["3.10"] steps: - uses: actions/checkout@v4 - name: Set up Python @@ -71,7 +71,7 @@ jobs: runs-on: ubuntu-latest strategy: matrix: - python: ["3.9", "3.10", "3.11", "3.12", "3.13"] + python: ["3.10", "3.11", "3.12", "3.13"] steps: - uses: actions/checkout@v4 - name: Set up Python @@ -106,7 +106,7 @@ jobs: runs-on: ubuntu-latest strategy: matrix: - python: ["3.9", "3.10", "3.11", "3.12", "3.13"] + python: ["3.10", "3.11", "3.12", "3.13"] steps: - uses: actions/checkout@v4 - name: Set up Python @@ -141,7 +141,7 @@ jobs: runs-on: ubuntu-latest strategy: matrix: - python: ["3.9", "3.10", "3.11", "3.12", "3.13"] + python: ["3.10", "3.11", "3.12", "3.13"] steps: - uses: actions/checkout@v4 - name: Set up Python From d40a657b2b12aec11d60e581b139c574296c102e Mon Sep 17 00:00:00 2001 From: Samuel Monson Date: Thu, 9 Oct 2025 11:05:21 -0400 Subject: [PATCH 55/90] Update docs to reflect bumping min pyhton Signed-off-by: Samuel Monson --- CONTRIBUTING.md | 2 +- DEVELOPING.md | 2 +- README.md | 4 ++-- docs/install.md | 2 +- 4 files changed, 5 insertions(+), 5 deletions(-) diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 075f41be..3396b5c3 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -18,7 +18,7 @@ There are many ways to contribute to GuideLLM: Before contributing, ensure you have the following installed: -- Python 3.9 or higher +- Python 3.10 or higher - pip (Python package manager) - Tox - Git diff --git a/DEVELOPING.md b/DEVELOPING.md index f1c3dfc4..c99680c2 100644 --- a/DEVELOPING.md +++ b/DEVELOPING.md @@ -8,7 +8,7 @@ Thank you for your interest in contributing to GuideLLM! This document provides Before you begin, ensure you have the following installed: -- Python 3.9 or higher +- Python 3.10 or higher - pip (Python package manager) - Tox - Git diff --git a/README.md b/README.md index 2de7b4a9..83d5cc13 100644 --- a/README.md +++ b/README.md @@ -9,7 +9,7 @@ Scale Efficiently: Evaluate and Optimize Your LLM Deployments for Real-World Inference -[![GitHub Release](https://img.shields.io/github/release/vllm-project/guidellm.svg?label=Version)](https://github.com/vllm-project/guidellm/releases) [![Documentation](https://img.shields.io/badge/Documentation-8A2BE2?logo=read-the-docs&logoColor=%23ffffff&color=%231BC070)](https://github.com/vllm-project/guidellm/tree/main/docs) [![License](https://img.shields.io/github/license/vllm-project/guidellm.svg)](https://github.com/vllm-project/guidellm/blob/main/LICENSE) [![PyPI Release](https://img.shields.io/pypi/v/guidellm.svg?label=PyPI%20Release)](https://pypi.python.org/pypi/guidellm) [![Python Versions](https://img.shields.io/badge/Python-3.9--3.13-orange)](https://pypi.python.org/pypi/guidellm) [![Nightly Build](https://img.shields.io/github/actions/workflow/status/vllm-project/guidellm/nightly.yml?branch=main&label=Nightly%20Build)](https://github.com/vllm-project/guidellm/actions/workflows/nightly.yml) +[![GitHub Release](https://img.shields.io/github/release/vllm-project/guidellm.svg?label=Version)](https://github.com/vllm-project/guidellm/releases) [![Documentation](https://img.shields.io/badge/Documentation-8A2BE2?logo=read-the-docs&logoColor=%23ffffff&color=%231BC070)](https://github.com/vllm-project/guidellm/tree/main/docs) [![License](https://img.shields.io/github/license/vllm-project/guidellm.svg)](https://github.com/vllm-project/guidellm/blob/main/LICENSE) [![PyPI Release](https://img.shields.io/pypi/v/guidellm.svg?label=PyPI%20Release)](https://pypi.python.org/pypi/guidellm) [![Python Versions](https://img.shields.io/badge/Python-3.10--3.13-orange)](https://pypi.python.org/pypi/guidellm) [![Nightly Build](https://img.shields.io/github/actions/workflow/status/vllm-project/guidellm/nightly.yml?branch=main&label=Nightly%20Build)](https://github.com/vllm-project/guidellm/actions/workflows/nightly.yml) ## Overview @@ -36,7 +36,7 @@ Scale Efficiently: Evaluate and Optimize Your LLM Deployments for Real-World Inf Before installing, ensure you have the following prerequisites: - OS: Linux or MacOS -- Python: 3.9 – 3.13 +- Python: 3.10 – 3.13 The latest GuideLLM release can be installed using pip: diff --git a/docs/install.md b/docs/install.md index c25c465b..9af28df1 100644 --- a/docs/install.md +++ b/docs/install.md @@ -8,7 +8,7 @@ Before installing GuideLLM, ensure you have the following prerequisites: - **Operating System:** Linux or MacOS -- **Python Version:** 3.9 – 3.13 +- **Python Version:** 3.10 – 3.13 - **Pip Version:** Ensure you have the latest version of pip installed. You can upgrade pip using the following command: From eff0f46451417822288f02074b3b56850acb7e42 Mon Sep 17 00:00:00 2001 From: Samuel Monson Date: Thu, 9 Oct 2025 12:00:55 -0400 Subject: [PATCH 56/90] Add force update stratagy to lockfile script Signed-off-by: Samuel Monson --- pylock.toml | 2746 +++++++++++++++++++++++++----------- scripts/generate_pylock.sh | 31 +- 2 files changed, 1958 insertions(+), 819 deletions(-) diff --git a/pylock.toml b/pylock.toml index 62d45221..11fecb48 100644 --- a/pylock.toml +++ b/pylock.toml @@ -64,11 +64,11 @@ dependencies = [ [[packages]] name = "build" -version = "1.2.2.post1" -requires-python = ">=3.8" -sdist = {name = "build-1.2.2.post1.tar.gz", url = "https://files.pythonhosted.org/packages/7d/46/aeab111f8e06793e4f0e421fcad593d547fb8313b50990f31681ee2fb1ad/build-1.2.2.post1.tar.gz", hashes = {sha256 = "b36993e92ca9375a219c99e606a122ff365a760a2d4bba0caa09bd5278b608b7"}} +version = "1.3.0" +requires-python = ">=3.9" +sdist = {name = "build-1.3.0.tar.gz", url = "https://files.pythonhosted.org/packages/25/1c/23e33405a7c9eac261dff640926b8b5adaed6a6eb3e1767d441ed611d0c0/build-1.3.0.tar.gz", hashes = {sha256 = "698edd0ea270bde950f53aed21f3a0135672206f3911e0176261a31e0e07b397"}} wheels = [ - {name = "build-1.2.2.post1-py3-none-any.whl",url = "https://files.pythonhosted.org/packages/84/c2/80633736cd183ee4a62107413def345f7e6e3c01563dbca1417363cf957e/build-1.2.2.post1-py3-none-any.whl",hashes = {sha256 = "1d61c0887fa860c01971625baae8bdd338e517b836a2f70dd1f7aa3a6b2fc5b5"}}, + {name = "build-1.3.0-py3-none-any.whl",url = "https://files.pythonhosted.org/packages/cb/8c/2b30c12155ad8de0cf641d76a8b396a16d2c36bc6d50b621a62b7c4567c1/build-1.3.0-py3-none-any.whl",hashes = {sha256 = "7145f0b5061ba90a1500d60bd1b13ca0a8a4cebdd0cc16ed8adf1c0e739f43b4"}}, ] marker = "\"dev\" in extras" @@ -225,6 +225,18 @@ wheels = [ {name = "mypy-1.15.0-cp312-cp312-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/46/8b/df49974b337cce35f828ba6fda228152d6db45fed4c86ba56ffe442434fd/mypy-1.15.0-cp312-cp312-musllinux_1_2_x86_64.whl",hashes = {sha256 = "1124a18bc11a6a62887e3e137f37f53fbae476dc36c185d549d4f837a2a6a14e"}}, {name = "mypy-1.15.0-cp312-cp312-win_amd64.whl",url = "https://files.pythonhosted.org/packages/13/50/da5203fcf6c53044a0b699939f31075c45ae8a4cadf538a9069b165c1050/mypy-1.15.0-cp312-cp312-win_amd64.whl",hashes = {sha256 = "171a9ca9a40cd1843abeca0e405bc1940cd9b305eaeea2dda769ba096932bb22"}}, {name = "mypy-1.15.0-py3-none-any.whl",url = "https://files.pythonhosted.org/packages/09/4e/a7d65c7322c510de2c409ff3828b03354a7c43f5a8ed458a7a131b41c7b9/mypy-1.15.0-py3-none-any.whl",hashes = {sha256 = "5469affef548bd1895d86d3bf10ce2b44e33d86923c29e4d675b3e323437ea3e"}}, + {name = "mypy-1.15.0-cp311-cp311-macosx_10_9_x86_64.whl",url = "https://files.pythonhosted.org/packages/03/bc/f6339726c627bd7ca1ce0fa56c9ae2d0144604a319e0e339bdadafbbb599/mypy-1.15.0-cp311-cp311-macosx_10_9_x86_64.whl",hashes = {sha256 = "2922d42e16d6de288022e5ca321cd0618b238cfc5570e0263e5ba0a77dbef56f"}}, + {name = "mypy-1.15.0-cp311-cp311-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/e2/90/8dcf506ca1a09b0d17555cc00cd69aee402c203911410136cd716559efe7/mypy-1.15.0-cp311-cp311-macosx_11_0_arm64.whl",hashes = {sha256 = "2ee2d57e01a7c35de00f4634ba1bbf015185b219e4dc5909e281016df43f5ee5"}}, + {name = "mypy-1.15.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl",url = "https://files.pythonhosted.org/packages/05/05/a10f9479681e5da09ef2f9426f650d7b550d4bafbef683b69aad1ba87457/mypy-1.15.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl",hashes = {sha256 = "973500e0774b85d9689715feeffcc980193086551110fd678ebe1f4342fb7c5e"}}, + {name = "mypy-1.15.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl",url = "https://files.pythonhosted.org/packages/e9/9a/1f7d18b30edd57441a6411fcbc0c6869448d1a4bacbaee60656ac0fc29c8/mypy-1.15.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl",hashes = {sha256 = "5a95fb17c13e29d2d5195869262f8125dfdb5c134dc8d9a9d0aecf7525b10c2c"}}, + {name = "mypy-1.15.0-cp311-cp311-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/72/af/19ff499b6f1dafcaf56f9881f7a965ac2f474f69f6f618b5175b044299f5/mypy-1.15.0-cp311-cp311-musllinux_1_2_x86_64.whl",hashes = {sha256 = "1905f494bfd7d85a23a88c5d97840888a7bd516545fc5aaedff0267e0bb54e2f"}}, + {name = "mypy-1.15.0-cp311-cp311-win_amd64.whl",url = "https://files.pythonhosted.org/packages/96/39/11b57431a1f686c1aed54bf794870efe0f6aeca11aca281a0bd87a5ad42c/mypy-1.15.0-cp311-cp311-win_amd64.whl",hashes = {sha256 = "c9817fa23833ff189db061e6d2eff49b2f3b6ed9856b4a0a73046e41932d744f"}}, + {name = "mypy-1.15.0-cp310-cp310-macosx_10_9_x86_64.whl",url = "https://files.pythonhosted.org/packages/68/f8/65a7ce8d0e09b6329ad0c8d40330d100ea343bd4dd04c4f8ae26462d0a17/mypy-1.15.0-cp310-cp310-macosx_10_9_x86_64.whl",hashes = {sha256 = "979e4e1a006511dacf628e36fadfecbcc0160a8af6ca7dad2f5025529e082c13"}}, + {name = "mypy-1.15.0-cp310-cp310-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/b4/95/9c0ecb8eacfe048583706249439ff52105b3f552ea9c4024166c03224270/mypy-1.15.0-cp310-cp310-macosx_11_0_arm64.whl",hashes = {sha256 = "c4bb0e1bd29f7d34efcccd71cf733580191e9a264a2202b0239da95984c5b559"}}, + {name = "mypy-1.15.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl",url = "https://files.pythonhosted.org/packages/84/09/9ec95e982e282e20c0d5407bc65031dfd0f0f8ecc66b69538296e06fcbee/mypy-1.15.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl",hashes = {sha256 = "be68172e9fd9ad8fb876c6389f16d1c1b5f100ffa779f77b1fb2176fcc9ab95b"}}, + {name = "mypy-1.15.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl",url = "https://files.pythonhosted.org/packages/78/13/f7d14e55865036a1e6a0a69580c240f43bc1f37407fe9235c0d4ef25ffb0/mypy-1.15.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl",hashes = {sha256 = "c7be1e46525adfa0d97681432ee9fcd61a3964c2446795714699a998d193f1a3"}}, + {name = "mypy-1.15.0-cp310-cp310-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/48/e1/301a73852d40c241e915ac6d7bcd7fedd47d519246db2d7b86b9d7e7a0cb/mypy-1.15.0-cp310-cp310-musllinux_1_2_x86_64.whl",hashes = {sha256 = "2e2c2e6d3593f6451b18588848e66260ff62ccca522dd231cd4dd59b0160668b"}}, + {name = "mypy-1.15.0-cp310-cp310-win_amd64.whl",url = "https://files.pythonhosted.org/packages/77/ba/c37bc323ae5fe7f3f15a28e06ab012cd0b7552886118943e90b15af31195/mypy-1.15.0-cp310-cp310-win_amd64.whl",hashes = {sha256 = "6983aae8b2f653e098edb77f893f7b6aca69f6cffb19b2cc7443f23cce5f4828"}}, ] marker = "\"dev\" in extras" @@ -256,28 +268,66 @@ dependencies = [ [[packages]] name = "pyyaml" -version = "6.0.2" +version = "6.0.3" requires-python = ">=3.8" -sdist = {name = "pyyaml-6.0.2.tar.gz", url = "https://files.pythonhosted.org/packages/54/ed/79a089b6be93607fa5cdaedf301d7dfb23af5f25c398d5ead2525b063e17/pyyaml-6.0.2.tar.gz", hashes = {sha256 = "d584d9ec91ad65861cc08d42e834324ef890a082e591037abe114850ff7bbc3e"}} -wheels = [ - {name = "PyYAML-6.0.2-cp313-cp313-macosx_10_13_x86_64.whl",url = "https://files.pythonhosted.org/packages/ef/e3/3af305b830494fa85d95f6d95ef7fa73f2ee1cc8ef5b495c7c3269fb835f/PyYAML-6.0.2-cp313-cp313-macosx_10_13_x86_64.whl",hashes = {sha256 = "efdca5630322a10774e8e98e1af481aad470dd62c3170801852d752aa7a783ba"}}, - {name = "PyYAML-6.0.2-cp313-cp313-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/45/9f/3b1c20a0b7a3200524eb0076cc027a970d320bd3a6592873c85c92a08731/PyYAML-6.0.2-cp313-cp313-macosx_11_0_arm64.whl",hashes = {sha256 = "50187695423ffe49e2deacb8cd10510bc361faac997de9efef88badc3bb9e2d1"}}, - {name = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",url = "https://files.pythonhosted.org/packages/7c/9a/337322f27005c33bcb656c655fa78325b730324c78620e8328ae28b64d0c/PyYAML-6.0.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",hashes = {sha256 = "0ffe8360bab4910ef1b9e87fb812d8bc0a308b0d0eef8c8f44e0254ab3b07133"}}, - {name = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl",url = "https://files.pythonhosted.org/packages/a3/69/864fbe19e6c18ea3cc196cbe5d392175b4cf3d5d0ac1403ec3f2d237ebb5/PyYAML-6.0.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl",hashes = {sha256 = "17e311b6c678207928d649faa7cb0d7b4c26a0ba73d41e99c4fff6b6c3276484"}}, - {name = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",url = "https://files.pythonhosted.org/packages/04/24/b7721e4845c2f162d26f50521b825fb061bc0a5afcf9a386840f23ea19fa/PyYAML-6.0.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",hashes = {sha256 = "70b189594dbe54f75ab3a1acec5f1e3faa7e8cf2f1e08d9b561cb41b845f69d5"}}, - {name = "PyYAML-6.0.2-cp313-cp313-musllinux_1_1_aarch64.whl",url = "https://files.pythonhosted.org/packages/2b/b2/e3234f59ba06559c6ff63c4e10baea10e5e7df868092bf9ab40e5b9c56b6/PyYAML-6.0.2-cp313-cp313-musllinux_1_1_aarch64.whl",hashes = {sha256 = "41e4e3953a79407c794916fa277a82531dd93aad34e29c2a514c2c0c5fe971cc"}}, - {name = "PyYAML-6.0.2-cp313-cp313-musllinux_1_1_x86_64.whl",url = "https://files.pythonhosted.org/packages/fe/0f/25911a9f080464c59fab9027482f822b86bf0608957a5fcc6eaac85aa515/PyYAML-6.0.2-cp313-cp313-musllinux_1_1_x86_64.whl",hashes = {sha256 = "68ccc6023a3400877818152ad9a1033e3db8625d899c72eacb5a668902e4d652"}}, - {name = "PyYAML-6.0.2-cp313-cp313-win32.whl",url = "https://files.pythonhosted.org/packages/14/0d/e2c3b43bbce3cf6bd97c840b46088a3031085179e596d4929729d8d68270/PyYAML-6.0.2-cp313-cp313-win32.whl",hashes = {sha256 = "bc2fa7c6b47d6bc618dd7fb02ef6fdedb1090ec036abab80d4681424b84c1183"}}, - {name = "PyYAML-6.0.2-cp313-cp313-win_amd64.whl",url = "https://files.pythonhosted.org/packages/fa/de/02b54f42487e3d3c6efb3f89428677074ca7bf43aae402517bc7cca949f3/PyYAML-6.0.2-cp313-cp313-win_amd64.whl",hashes = {sha256 = "8388ee1976c416731879ac16da0aff3f63b286ffdd57cdeb95f3f2e085687563"}}, - {name = "PyYAML-6.0.2-cp312-cp312-macosx_10_9_x86_64.whl",url = "https://files.pythonhosted.org/packages/86/0c/c581167fc46d6d6d7ddcfb8c843a4de25bdd27e4466938109ca68492292c/PyYAML-6.0.2-cp312-cp312-macosx_10_9_x86_64.whl",hashes = {sha256 = "c70c95198c015b85feafc136515252a261a84561b7b1d51e3384e0655ddf25ab"}}, - {name = "PyYAML-6.0.2-cp312-cp312-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/a8/0c/38374f5bb272c051e2a69281d71cba6fdb983413e6758b84482905e29a5d/PyYAML-6.0.2-cp312-cp312-macosx_11_0_arm64.whl",hashes = {sha256 = "ce826d6ef20b1bc864f0a68340c8b3287705cae2f8b4b1d932177dcc76721725"}}, - {name = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",url = "https://files.pythonhosted.org/packages/c3/93/9916574aa8c00aa06bbac729972eb1071d002b8e158bd0e83a3b9a20a1f7/PyYAML-6.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",hashes = {sha256 = "1f71ea527786de97d1a0cc0eacd1defc0985dcf6b3f17bb77dcfc8c34bec4dc5"}}, - {name = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl",url = "https://files.pythonhosted.org/packages/95/0f/b8938f1cbd09739c6da569d172531567dbcc9789e0029aa070856f123984/PyYAML-6.0.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl",hashes = {sha256 = "9b22676e8097e9e22e36d6b7bda33190d0d400f345f23d4065d48f4ca7ae0425"}}, - {name = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",url = "https://files.pythonhosted.org/packages/b9/2b/614b4752f2e127db5cc206abc23a8c19678e92b23c3db30fc86ab731d3bd/PyYAML-6.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",hashes = {sha256 = "80bab7bfc629882493af4aa31a4cfa43a4c57c83813253626916b8c7ada83476"}}, - {name = "PyYAML-6.0.2-cp312-cp312-musllinux_1_1_aarch64.whl",url = "https://files.pythonhosted.org/packages/d4/00/dd137d5bcc7efea1836d6264f049359861cf548469d18da90cd8216cf05f/PyYAML-6.0.2-cp312-cp312-musllinux_1_1_aarch64.whl",hashes = {sha256 = "0833f8694549e586547b576dcfaba4a6b55b9e96098b36cdc7ebefe667dfed48"}}, - {name = "PyYAML-6.0.2-cp312-cp312-musllinux_1_1_x86_64.whl",url = "https://files.pythonhosted.org/packages/c9/1f/4f998c900485e5c0ef43838363ba4a9723ac0ad73a9dc42068b12aaba4e4/PyYAML-6.0.2-cp312-cp312-musllinux_1_1_x86_64.whl",hashes = {sha256 = "8b9c7197f7cb2738065c481a0461e50ad02f18c78cd75775628afb4d7137fb3b"}}, - {name = "PyYAML-6.0.2-cp312-cp312-win32.whl",url = "https://files.pythonhosted.org/packages/df/d1/f5a275fdb252768b7a11ec63585bc38d0e87c9e05668a139fea92b80634c/PyYAML-6.0.2-cp312-cp312-win32.whl",hashes = {sha256 = "ef6107725bd54b262d6dedcc2af448a266975032bc85ef0172c5f059da6325b4"}}, - {name = "PyYAML-6.0.2-cp312-cp312-win_amd64.whl",url = "https://files.pythonhosted.org/packages/0c/e8/4f648c598b17c3d06e8753d7d13d57542b30d56e6c2dedf9c331ae56312e/PyYAML-6.0.2-cp312-cp312-win_amd64.whl",hashes = {sha256 = "7e7401d0de89a9a855c839bc697c079a4af81cf878373abd7dc625847d25cbd8"}}, +sdist = {name = "pyyaml-6.0.3.tar.gz", url = "https://files.pythonhosted.org/packages/05/8e/961c0007c59b8dd7729d542c61a4d537767a59645b82a0b521206e1e25c2/pyyaml-6.0.3.tar.gz", hashes = {sha256 = "d76623373421df22fb4cf8817020cbb7ef15c725b9d5e45f17e189bfc384190f"}} +wheels = [ + {name = "pyyaml-6.0.3-cp314-cp314-macosx_10_13_x86_64.whl",url = "https://files.pythonhosted.org/packages/9d/8c/f4bd7f6465179953d3ac9bc44ac1a8a3e6122cf8ada906b4f96c60172d43/pyyaml-6.0.3-cp314-cp314-macosx_10_13_x86_64.whl",hashes = {sha256 = "8d1fab6bb153a416f9aeb4b8763bc0f22a5586065f86f7664fc23339fc1c1fac"}}, + {name = "pyyaml-6.0.3-cp314-cp314-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/bd/9c/4d95bb87eb2063d20db7b60faa3840c1b18025517ae857371c4dd55a6b3a/pyyaml-6.0.3-cp314-cp314-macosx_11_0_arm64.whl",hashes = {sha256 = "34d5fcd24b8445fadc33f9cf348c1047101756fd760b4dacb5c3e99755703310"}}, + {name = "pyyaml-6.0.3-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl",url = "https://files.pythonhosted.org/packages/92/b5/47e807c2623074914e29dabd16cbbdd4bf5e9b2db9f8090fa64411fc5382/pyyaml-6.0.3-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl",hashes = {sha256 = "501a031947e3a9025ed4405a168e6ef5ae3126c59f90ce0cd6f2bfc477be31b7"}}, + {name = "pyyaml-6.0.3-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl",url = "https://files.pythonhosted.org/packages/02/9e/e5e9b168be58564121efb3de6859c452fccde0ab093d8438905899a3a483/pyyaml-6.0.3-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl",hashes = {sha256 = "b3bc83488de33889877a0f2543ade9f70c67d66d9ebb4ac959502e12de895788"}}, + {name = "pyyaml-6.0.3-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl",url = "https://files.pythonhosted.org/packages/88/f9/16491d7ed2a919954993e48aa941b200f38040928474c9e85ea9e64222c3/pyyaml-6.0.3-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl",hashes = {sha256 = "c458b6d084f9b935061bc36216e8a69a7e293a2f1e68bf956dcd9e6cbcd143f5"}}, + {name = "pyyaml-6.0.3-cp314-cp314-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/dd/3f/5989debef34dc6397317802b527dbbafb2b4760878a53d4166579111411e/pyyaml-6.0.3-cp314-cp314-musllinux_1_2_aarch64.whl",hashes = {sha256 = "7c6610def4f163542a622a73fb39f534f8c101d690126992300bf3207eab9764"}}, + {name = "pyyaml-6.0.3-cp314-cp314-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/d7/ce/af88a49043cd2e265be63d083fc75b27b6ed062f5f9fd6cdc223ad62f03e/pyyaml-6.0.3-cp314-cp314-musllinux_1_2_x86_64.whl",hashes = {sha256 = "5190d403f121660ce8d1d2c1bb2ef1bd05b5f68533fc5c2ea899bd15f4399b35"}}, + {name = "pyyaml-6.0.3-cp314-cp314-win_amd64.whl",url = "https://files.pythonhosted.org/packages/23/20/bb6982b26a40bb43951265ba29d4c246ef0ff59c9fdcdf0ed04e0687de4d/pyyaml-6.0.3-cp314-cp314-win_amd64.whl",hashes = {sha256 = "4a2e8cebe2ff6ab7d1050ecd59c25d4c8bd7e6f400f5f82b96557ac0abafd0ac"}}, + {name = "pyyaml-6.0.3-cp314-cp314-win_arm64.whl",url = "https://files.pythonhosted.org/packages/f4/f4/a4541072bb9422c8a883ab55255f918fa378ecf083f5b85e87fc2b4eda1b/pyyaml-6.0.3-cp314-cp314-win_arm64.whl",hashes = {sha256 = "93dda82c9c22deb0a405ea4dc5f2d0cda384168e466364dec6255b293923b2f3"}}, + {name = "pyyaml-6.0.3-cp314-cp314t-macosx_10_13_x86_64.whl",url = "https://files.pythonhosted.org/packages/7c/f9/07dd09ae774e4616edf6cda684ee78f97777bdd15847253637a6f052a62f/pyyaml-6.0.3-cp314-cp314t-macosx_10_13_x86_64.whl",hashes = {sha256 = "02893d100e99e03eda1c8fd5c441d8c60103fd175728e23e431db1b589cf5ab3"}}, + {name = "pyyaml-6.0.3-cp314-cp314t-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/4e/78/8d08c9fb7ce09ad8c38ad533c1191cf27f7ae1effe5bb9400a46d9437fcf/pyyaml-6.0.3-cp314-cp314t-macosx_11_0_arm64.whl",hashes = {sha256 = "c1ff362665ae507275af2853520967820d9124984e0f7466736aea23d8611fba"}}, + {name = "pyyaml-6.0.3-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl",url = "https://files.pythonhosted.org/packages/7b/5b/3babb19104a46945cf816d047db2788bcaf8c94527a805610b0289a01c6b/pyyaml-6.0.3-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl",hashes = {sha256 = "6adc77889b628398debc7b65c073bcb99c4a0237b248cacaf3fe8a557563ef6c"}}, + {name = "pyyaml-6.0.3-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl",url = "https://files.pythonhosted.org/packages/8b/cc/dff0684d8dc44da4d22a13f35f073d558c268780ce3c6ba1b87055bb0b87/pyyaml-6.0.3-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl",hashes = {sha256 = "a80cb027f6b349846a3bf6d73b5e95e782175e52f22108cfa17876aaeff93702"}}, + {name = "pyyaml-6.0.3-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl",url = "https://files.pythonhosted.org/packages/b1/5e/f77dc6b9036943e285ba76b49e118d9ea929885becb0a29ba8a7c75e29fe/pyyaml-6.0.3-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl",hashes = {sha256 = "00c4bdeba853cc34e7dd471f16b4114f4162dc03e6b7afcc2128711f0eca823c"}}, + {name = "pyyaml-6.0.3-cp314-cp314t-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/ce/88/a9db1376aa2a228197c58b37302f284b5617f56a5d959fd1763fb1675ce6/pyyaml-6.0.3-cp314-cp314t-musllinux_1_2_aarch64.whl",hashes = {sha256 = "66e1674c3ef6f541c35191caae2d429b967b99e02040f5ba928632d9a7f0f065"}}, + {name = "pyyaml-6.0.3-cp314-cp314t-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/da/92/1446574745d74df0c92e6aa4a7b0b3130706a4142b2d1a5869f2eaa423c6/pyyaml-6.0.3-cp314-cp314t-musllinux_1_2_x86_64.whl",hashes = {sha256 = "16249ee61e95f858e83976573de0f5b2893b3677ba71c9dd36b9cf8be9ac6d65"}}, + {name = "pyyaml-6.0.3-cp314-cp314t-win_amd64.whl",url = "https://files.pythonhosted.org/packages/f0/7a/1c7270340330e575b92f397352af856a8c06f230aa3e76f86b39d01b416a/pyyaml-6.0.3-cp314-cp314t-win_amd64.whl",hashes = {sha256 = "4ad1906908f2f5ae4e5a8ddfce73c320c2a1429ec52eafd27138b7f1cbe341c9"}}, + {name = "pyyaml-6.0.3-cp314-cp314t-win_arm64.whl",url = "https://files.pythonhosted.org/packages/f1/12/de94a39c2ef588c7e6455cfbe7343d3b2dc9d6b6b2f40c4c6565744c873d/pyyaml-6.0.3-cp314-cp314t-win_arm64.whl",hashes = {sha256 = "ebc55a14a21cb14062aa4162f906cd962b28e2e9ea38f9b4391244cd8de4ae0b"}}, + {name = "pyyaml-6.0.3-cp313-cp313-macosx_10_13_x86_64.whl",url = "https://files.pythonhosted.org/packages/d1/11/0fd08f8192109f7169db964b5707a2f1e8b745d4e239b784a5a1dd80d1db/pyyaml-6.0.3-cp313-cp313-macosx_10_13_x86_64.whl",hashes = {sha256 = "8da9669d359f02c0b91ccc01cac4a67f16afec0dac22c2ad09f46bee0697eba8"}}, + {name = "pyyaml-6.0.3-cp313-cp313-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/b1/16/95309993f1d3748cd644e02e38b75d50cbc0d9561d21f390a76242ce073f/pyyaml-6.0.3-cp313-cp313-macosx_11_0_arm64.whl",hashes = {sha256 = "2283a07e2c21a2aa78d9c4442724ec1eb15f5e42a723b99cb3d822d48f5f7ad1"}}, + {name = "pyyaml-6.0.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl",url = "https://files.pythonhosted.org/packages/50/31/b20f376d3f810b9b2371e72ef5adb33879b25edb7a6d072cb7ca0c486398/pyyaml-6.0.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl",hashes = {sha256 = "ee2922902c45ae8ccada2c5b501ab86c36525b883eff4255313a253a3160861c"}}, + {name = "pyyaml-6.0.3-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl",url = "https://files.pythonhosted.org/packages/49/1e/a55ca81e949270d5d4432fbbd19dfea5321eda7c41a849d443dc92fd1ff7/pyyaml-6.0.3-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl",hashes = {sha256 = "a33284e20b78bd4a18c8c2282d549d10bc8408a2a7ff57653c0cf0b9be0afce5"}}, + {name = "pyyaml-6.0.3-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl",url = "https://files.pythonhosted.org/packages/74/27/e5b8f34d02d9995b80abcef563ea1f8b56d20134d8f4e5e81733b1feceb2/pyyaml-6.0.3-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl",hashes = {sha256 = "0f29edc409a6392443abf94b9cf89ce99889a1dd5376d94316ae5145dfedd5d6"}}, + {name = "pyyaml-6.0.3-cp313-cp313-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/f9/11/ba845c23988798f40e52ba45f34849aa8a1f2d4af4b798588010792ebad6/pyyaml-6.0.3-cp313-cp313-musllinux_1_2_aarch64.whl",hashes = {sha256 = "f7057c9a337546edc7973c0d3ba84ddcdf0daa14533c2065749c9075001090e6"}}, + {name = "pyyaml-6.0.3-cp313-cp313-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/3d/e0/7966e1a7bfc0a45bf0a7fb6b98ea03fc9b8d84fa7f2229e9659680b69ee3/pyyaml-6.0.3-cp313-cp313-musllinux_1_2_x86_64.whl",hashes = {sha256 = "eda16858a3cab07b80edaf74336ece1f986ba330fdb8ee0d6c0d68fe82bc96be"}}, + {name = "pyyaml-6.0.3-cp313-cp313-win32.whl",url = "https://files.pythonhosted.org/packages/de/94/980b50a6531b3019e45ddeada0626d45fa85cbe22300844a7983285bed3b/pyyaml-6.0.3-cp313-cp313-win32.whl",hashes = {sha256 = "d0eae10f8159e8fdad514efdc92d74fd8d682c933a6dd088030f3834bc8e6b26"}}, + {name = "pyyaml-6.0.3-cp313-cp313-win_amd64.whl",url = "https://files.pythonhosted.org/packages/97/c9/39d5b874e8b28845e4ec2202b5da735d0199dbe5b8fb85f91398814a9a46/pyyaml-6.0.3-cp313-cp313-win_amd64.whl",hashes = {sha256 = "79005a0d97d5ddabfeeea4cf676af11e647e41d81c9a7722a193022accdb6b7c"}}, + {name = "pyyaml-6.0.3-cp313-cp313-win_arm64.whl",url = "https://files.pythonhosted.org/packages/73/e8/2bdf3ca2090f68bb3d75b44da7bbc71843b19c9f2b9cb9b0f4ab7a5a4329/pyyaml-6.0.3-cp313-cp313-win_arm64.whl",hashes = {sha256 = "5498cd1645aa724a7c71c8f378eb29ebe23da2fc0d7a08071d89469bf1d2defb"}}, + {name = "pyyaml-6.0.3-cp312-cp312-macosx_10_13_x86_64.whl",url = "https://files.pythonhosted.org/packages/d1/33/422b98d2195232ca1826284a76852ad5a86fe23e31b009c9886b2d0fb8b2/pyyaml-6.0.3-cp312-cp312-macosx_10_13_x86_64.whl",hashes = {sha256 = "7f047e29dcae44602496db43be01ad42fc6f1cc0d8cd6c83d342306c32270196"}}, + {name = "pyyaml-6.0.3-cp312-cp312-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/89/a0/6cf41a19a1f2f3feab0e9c0b74134aa2ce6849093d5517a0c550fe37a648/pyyaml-6.0.3-cp312-cp312-macosx_11_0_arm64.whl",hashes = {sha256 = "fc09d0aa354569bc501d4e787133afc08552722d3ab34836a80547331bb5d4a0"}}, + {name = "pyyaml-6.0.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl",url = "https://files.pythonhosted.org/packages/ed/23/7a778b6bd0b9a8039df8b1b1d80e2e2ad78aa04171592c8a5c43a56a6af4/pyyaml-6.0.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl",hashes = {sha256 = "9149cad251584d5fb4981be1ecde53a1ca46c891a79788c0df828d2f166bda28"}}, + {name = "pyyaml-6.0.3-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl",url = "https://files.pythonhosted.org/packages/65/30/d7353c338e12baef4ecc1b09e877c1970bd3382789c159b4f89d6a70dc09/pyyaml-6.0.3-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl",hashes = {sha256 = "5fdec68f91a0c6739b380c83b951e2c72ac0197ace422360e6d5a959d8d97b2c"}}, + {name = "pyyaml-6.0.3-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl",url = "https://files.pythonhosted.org/packages/8b/9d/b3589d3877982d4f2329302ef98a8026e7f4443c765c46cfecc8858c6b4b/pyyaml-6.0.3-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl",hashes = {sha256 = "ba1cc08a7ccde2d2ec775841541641e4548226580ab850948cbfda66a1befcdc"}}, + {name = "pyyaml-6.0.3-cp312-cp312-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/05/c0/b3be26a015601b822b97d9149ff8cb5ead58c66f981e04fedf4e762f4bd4/pyyaml-6.0.3-cp312-cp312-musllinux_1_2_aarch64.whl",hashes = {sha256 = "8dc52c23056b9ddd46818a57b78404882310fb473d63f17b07d5c40421e47f8e"}}, + {name = "pyyaml-6.0.3-cp312-cp312-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/be/8e/98435a21d1d4b46590d5459a22d88128103f8da4c2d4cb8f14f2a96504e1/pyyaml-6.0.3-cp312-cp312-musllinux_1_2_x86_64.whl",hashes = {sha256 = "41715c910c881bc081f1e8872880d3c650acf13dfa8214bad49ed4cede7c34ea"}}, + {name = "pyyaml-6.0.3-cp312-cp312-win32.whl",url = "https://files.pythonhosted.org/packages/74/93/7baea19427dcfbe1e5a372d81473250b379f04b1bd3c4c5ff825e2327202/pyyaml-6.0.3-cp312-cp312-win32.whl",hashes = {sha256 = "96b533f0e99f6579b3d4d4995707cf36df9100d67e0c8303a0c55b27b5f99bc5"}}, + {name = "pyyaml-6.0.3-cp312-cp312-win_amd64.whl",url = "https://files.pythonhosted.org/packages/86/bf/899e81e4cce32febab4fb42bb97dcdf66bc135272882d1987881a4b519e9/pyyaml-6.0.3-cp312-cp312-win_amd64.whl",hashes = {sha256 = "5fcd34e47f6e0b794d17de1b4ff496c00986e1c83f7ab2fb8fcfe9616ff7477b"}}, + {name = "pyyaml-6.0.3-cp312-cp312-win_arm64.whl",url = "https://files.pythonhosted.org/packages/1a/08/67bd04656199bbb51dbed1439b7f27601dfb576fb864099c7ef0c3e55531/pyyaml-6.0.3-cp312-cp312-win_arm64.whl",hashes = {sha256 = "64386e5e707d03a7e172c0701abfb7e10f0fb753ee1d773128192742712a98fd"}}, + {name = "pyyaml-6.0.3-cp311-cp311-macosx_10_13_x86_64.whl",url = "https://files.pythonhosted.org/packages/6d/16/a95b6757765b7b031c9374925bb718d55e0a9ba8a1b6a12d25962ea44347/pyyaml-6.0.3-cp311-cp311-macosx_10_13_x86_64.whl",hashes = {sha256 = "44edc647873928551a01e7a563d7452ccdebee747728c1080d881d68af7b997e"}}, + {name = "pyyaml-6.0.3-cp311-cp311-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/16/19/13de8e4377ed53079ee996e1ab0a9c33ec2faf808a4647b7b4c0d46dd239/pyyaml-6.0.3-cp311-cp311-macosx_11_0_arm64.whl",hashes = {sha256 = "652cb6edd41e718550aad172851962662ff2681490a8a711af6a4d288dd96824"}}, + {name = "pyyaml-6.0.3-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl",url = "https://files.pythonhosted.org/packages/0c/62/d2eb46264d4b157dae1275b573017abec435397aa59cbcdab6fc978a8af4/pyyaml-6.0.3-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl",hashes = {sha256 = "10892704fc220243f5305762e276552a0395f7beb4dbf9b14ec8fd43b57f126c"}}, + {name = "pyyaml-6.0.3-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl",url = "https://files.pythonhosted.org/packages/10/cb/16c3f2cf3266edd25aaa00d6c4350381c8b012ed6f5276675b9eba8d9ff4/pyyaml-6.0.3-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl",hashes = {sha256 = "850774a7879607d3a6f50d36d04f00ee69e7fc816450e5f7e58d7f17f1ae5c00"}}, + {name = "pyyaml-6.0.3-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl",url = "https://files.pythonhosted.org/packages/71/60/917329f640924b18ff085ab889a11c763e0b573da888e8404ff486657602/pyyaml-6.0.3-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl",hashes = {sha256 = "b8bb0864c5a28024fac8a632c443c87c5aa6f215c0b126c449ae1a150412f31d"}}, + {name = "pyyaml-6.0.3-cp311-cp311-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/dd/6f/529b0f316a9fd167281a6c3826b5583e6192dba792dd55e3203d3f8e655a/pyyaml-6.0.3-cp311-cp311-musllinux_1_2_aarch64.whl",hashes = {sha256 = "1d37d57ad971609cf3c53ba6a7e365e40660e3be0e5175fa9f2365a379d6095a"}}, + {name = "pyyaml-6.0.3-cp311-cp311-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/f2/6a/b627b4e0c1dd03718543519ffb2f1deea4a1e6d42fbab8021936a4d22589/pyyaml-6.0.3-cp311-cp311-musllinux_1_2_x86_64.whl",hashes = {sha256 = "37503bfbfc9d2c40b344d06b2199cf0e96e97957ab1c1b546fd4f87e53e5d3e4"}}, + {name = "pyyaml-6.0.3-cp311-cp311-win32.whl",url = "https://files.pythonhosted.org/packages/45/91/47a6e1c42d9ee337c4839208f30d9f09caa9f720ec7582917b264defc875/pyyaml-6.0.3-cp311-cp311-win32.whl",hashes = {sha256 = "8098f252adfa6c80ab48096053f512f2321f0b998f98150cea9bd23d83e1467b"}}, + {name = "pyyaml-6.0.3-cp311-cp311-win_amd64.whl",url = "https://files.pythonhosted.org/packages/da/e3/ea007450a105ae919a72393cb06f122f288ef60bba2dc64b26e2646fa315/pyyaml-6.0.3-cp311-cp311-win_amd64.whl",hashes = {sha256 = "9f3bfb4965eb874431221a3ff3fdcddc7e74e3b07799e0e84ca4a0f867d449bf"}}, + {name = "pyyaml-6.0.3-cp310-cp310-macosx_10_13_x86_64.whl",url = "https://files.pythonhosted.org/packages/f4/a0/39350dd17dd6d6c6507025c0e53aef67a9293a6d37d3511f23ea510d5800/pyyaml-6.0.3-cp310-cp310-macosx_10_13_x86_64.whl",hashes = {sha256 = "214ed4befebe12df36bcc8bc2b64b396ca31be9304b8f59e25c11cf94a4c033b"}}, + {name = "pyyaml-6.0.3-cp310-cp310-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/05/14/52d505b5c59ce73244f59c7a50ecf47093ce4765f116cdb98286a71eeca2/pyyaml-6.0.3-cp310-cp310-macosx_11_0_arm64.whl",hashes = {sha256 = "02ea2dfa234451bbb8772601d7b8e426c2bfa197136796224e50e35a78777956"}}, + {name = "pyyaml-6.0.3-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl",url = "https://files.pythonhosted.org/packages/43/f7/0e6a5ae5599c838c696adb4e6330a59f463265bfa1e116cfd1fbb0abaaae/pyyaml-6.0.3-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl",hashes = {sha256 = "b30236e45cf30d2b8e7b3e85881719e98507abed1011bf463a8fa23e9c3e98a8"}}, + {name = "pyyaml-6.0.3-cp310-cp310-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl",url = "https://files.pythonhosted.org/packages/2f/3a/61b9db1d28f00f8fd0ae760459a5c4bf1b941baf714e207b6eb0657d2578/pyyaml-6.0.3-cp310-cp310-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl",hashes = {sha256 = "66291b10affd76d76f54fad28e22e51719ef9ba22b29e1d7d03d6777a9174198"}}, + {name = "pyyaml-6.0.3-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl",url = "https://files.pythonhosted.org/packages/7a/1e/7acc4f0e74c4b3d9531e24739e0ab832a5edf40e64fbae1a9c01941cabd7/pyyaml-6.0.3-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl",hashes = {sha256 = "9c7708761fccb9397fe64bbc0395abcae8c4bf7b0eac081e12b809bf47700d0b"}}, + {name = "pyyaml-6.0.3-cp310-cp310-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/8b/ef/abd085f06853af0cd59fa5f913d61a8eab65d7639ff2a658d18a25d6a89d/pyyaml-6.0.3-cp310-cp310-musllinux_1_2_aarch64.whl",hashes = {sha256 = "418cf3f2111bc80e0933b2cd8cd04f286338bb88bdc7bc8e6dd775ebde60b5e0"}}, + {name = "pyyaml-6.0.3-cp310-cp310-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/1f/15/2bc9c8faf6450a8b3c9fc5448ed869c599c0a74ba2669772b1f3a0040180/pyyaml-6.0.3-cp310-cp310-musllinux_1_2_x86_64.whl",hashes = {sha256 = "5e0b74767e5f8c593e8c9b5912019159ed0533c70051e9cce3e8b6aa699fcd69"}}, + {name = "pyyaml-6.0.3-cp310-cp310-win32.whl",url = "https://files.pythonhosted.org/packages/a3/00/531e92e88c00f4333ce359e50c19b8d1de9fe8d581b1534e35ccfbc5f393/pyyaml-6.0.3-cp310-cp310-win32.whl",hashes = {sha256 = "28c8d926f98f432f88adc23edf2e6d4921ac26fb084b028c733d01868d19007e"}}, + {name = "pyyaml-6.0.3-cp310-cp310-win_amd64.whl",url = "https://files.pythonhosted.org/packages/2a/fa/926c003379b19fca39dd4634818b00dec6c62d87faf628d1394e137354d4/pyyaml-6.0.3-cp310-cp310-win_amd64.whl",hashes = {sha256 = "bdb2c67c6c1390b63c6ff89f210c8fd09d9a1217a465701eac7316313c915e4c"}}, ] marker = "\"default\" in dependency_groups or \"dev\" in extras" @@ -286,29 +336,29 @@ dependencies = [] [[packages]] name = "pydantic" -version = "2.11.7" +version = "2.12.0" requires-python = ">=3.9" -sdist = {name = "pydantic-2.11.7.tar.gz", url = "https://files.pythonhosted.org/packages/00/dd/4325abf92c39ba8623b5af936ddb36ffcfe0beae70405d456ab1fb2f5b8c/pydantic-2.11.7.tar.gz", hashes = {sha256 = "d989c3c6cb79469287b1569f7447a17848c998458d49ebe294e975b9baf0f0db"}} +sdist = {name = "pydantic-2.12.0.tar.gz", url = "https://files.pythonhosted.org/packages/c3/da/b8a7ee04378a53f6fefefc0c5e05570a3ebfdfa0523a878bcd3b475683ee/pydantic-2.12.0.tar.gz", hashes = {sha256 = "c1a077e6270dbfb37bfd8b498b3981e2bb18f68103720e51fa6c306a5a9af563"}} wheels = [ - {name = "pydantic-2.11.7-py3-none-any.whl",url = "https://files.pythonhosted.org/packages/6a/c0/ec2b1c8712ca690e5d61979dee872603e92b8a32f94cc1b72d53beab008a/pydantic-2.11.7-py3-none-any.whl",hashes = {sha256 = "dde5df002701f6de26248661f6835bbe296a47bf73990135c7d07ce741b9623b"}}, + {name = "pydantic-2.12.0-py3-none-any.whl",url = "https://files.pythonhosted.org/packages/f4/9d/d5c855424e2e5b6b626fbc6ec514d8e655a600377ce283008b115abb7445/pydantic-2.12.0-py3-none-any.whl",hashes = {sha256 = "f6a1da352d42790537e95e83a8bdfb91c7efbae63ffd0b86fa823899e807116f"}}, ] marker = "\"default\" in dependency_groups" [packages.tool.pdm] dependencies = [ "annotated-types>=0.6.0", - "pydantic-core==2.33.2", - "typing-extensions>=4.12.2", - "typing-inspection>=0.4.0", + "pydantic-core==2.41.1", + "typing-extensions>=4.14.1", + "typing-inspection>=0.4.2", ] [[packages]] name = "pydantic-settings" -version = "2.10.1" +version = "2.11.0" requires-python = ">=3.9" -sdist = {name = "pydantic_settings-2.10.1.tar.gz", url = "https://files.pythonhosted.org/packages/68/85/1ea668bbab3c50071ca613c6ab30047fb36ab0da1b92fa8f17bbc38fd36c/pydantic_settings-2.10.1.tar.gz", hashes = {sha256 = "06f0062169818d0f5524420a360d632d5857b83cffd4d42fe29597807a1614ee"}} +sdist = {name = "pydantic_settings-2.11.0.tar.gz", url = "https://files.pythonhosted.org/packages/20/c5/dbbc27b814c71676593d1c3f718e6cd7d4f00652cefa24b75f7aa3efb25e/pydantic_settings-2.11.0.tar.gz", hashes = {sha256 = "d0e87a1c7d33593beb7194adb8470fc426e95ba02af83a0f23474a04c9a08180"}} wheels = [ - {name = "pydantic_settings-2.10.1-py3-none-any.whl",url = "https://files.pythonhosted.org/packages/58/f0/427018098906416f580e3cf1366d3b1abfb408a0652e9f31600c24a1903c/pydantic_settings-2.10.1-py3-none-any.whl",hashes = {sha256 = "a60952460b99cf661dc25c29c0ef171721f98bfcb52ef8d9ea4c943d7c8cc796"}}, + {name = "pydantic_settings-2.11.0-py3-none-any.whl",url = "https://files.pythonhosted.org/packages/83/d6/887a1ff844e64aa823fb4905978d882a633cfe295c32eacad582b78a7d8b/pydantic_settings-2.11.0-py3-none-any.whl",hashes = {sha256 = "fe2cea3413b9530d10f3a5875adffb17ada5c1e1bab0b2885546d7310415207c"}}, ] marker = "\"default\" in dependency_groups" @@ -449,83 +499,131 @@ dependencies = [] [[packages]] name = "scipy" -version = "1.15.3" -requires-python = ">=3.10" -sdist = {name = "scipy-1.15.3.tar.gz", url = "https://files.pythonhosted.org/packages/0f/37/6964b830433e654ec7485e45a00fc9a27cf868d622838f6b6d9c5ec0d532/scipy-1.15.3.tar.gz", hashes = {sha256 = "eae3cf522bc7df64b42cad3925c876e1b0b6c35c1337c93e12c0f366f55b0eaf"}} -wheels = [ - {name = "scipy-1.15.3-cp313-cp313-macosx_10_13_x86_64.whl",url = "https://files.pythonhosted.org/packages/73/18/ec27848c9baae6e0d6573eda6e01a602e5649ee72c27c3a8aad673ebecfd/scipy-1.15.3-cp313-cp313-macosx_10_13_x86_64.whl",hashes = {sha256 = "2c620736bcc334782e24d173c0fdbb7590a0a436d2fdf39310a8902505008759"}}, - {name = "scipy-1.15.3-cp313-cp313-macosx_12_0_arm64.whl",url = "https://files.pythonhosted.org/packages/74/cd/1aef2184948728b4b6e21267d53b3339762c285a46a274ebb7863c9e4742/scipy-1.15.3-cp313-cp313-macosx_12_0_arm64.whl",hashes = {sha256 = "7e11270a000969409d37ed399585ee530b9ef6aa99d50c019de4cb01e8e54e62"}}, - {name = "scipy-1.15.3-cp313-cp313-macosx_14_0_arm64.whl",url = "https://files.pythonhosted.org/packages/5b/d8/59e452c0a255ec352bd0a833537a3bc1bfb679944c4938ab375b0a6b3a3e/scipy-1.15.3-cp313-cp313-macosx_14_0_arm64.whl",hashes = {sha256 = "8c9ed3ba2c8a2ce098163a9bdb26f891746d02136995df25227a20e71c396ebb"}}, - {name = "scipy-1.15.3-cp313-cp313-macosx_14_0_x86_64.whl",url = "https://files.pythonhosted.org/packages/08/f5/456f56bbbfccf696263b47095291040655e3cbaf05d063bdc7c7517f32ac/scipy-1.15.3-cp313-cp313-macosx_14_0_x86_64.whl",hashes = {sha256 = "0bdd905264c0c9cfa74a4772cdb2070171790381a5c4d312c973382fc6eaf730"}}, - {name = "scipy-1.15.3-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",url = "https://files.pythonhosted.org/packages/a2/66/a9618b6a435a0f0c0b8a6d0a2efb32d4ec5a85f023c2b79d39512040355b/scipy-1.15.3-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",hashes = {sha256 = "79167bba085c31f38603e11a267d862957cbb3ce018d8b38f79ac043bc92d825"}}, - {name = "scipy-1.15.3-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",url = "https://files.pythonhosted.org/packages/b5/09/c5b6734a50ad4882432b6bb7c02baf757f5b2f256041da5df242e2d7e6b6/scipy-1.15.3-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",hashes = {sha256 = "c9deabd6d547aee2c9a81dee6cc96c6d7e9a9b1953f74850c179f91fdc729cb7"}}, - {name = "scipy-1.15.3-cp313-cp313-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/77/0a/eac00ff741f23bcabd352731ed9b8995a0a60ef57f5fd788d611d43d69a1/scipy-1.15.3-cp313-cp313-musllinux_1_2_aarch64.whl",hashes = {sha256 = "dde4fc32993071ac0c7dd2d82569e544f0bdaff66269cb475e0f369adad13f11"}}, - {name = "scipy-1.15.3-cp313-cp313-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/fe/54/4379be86dd74b6ad81551689107360d9a3e18f24d20767a2d5b9253a3f0a/scipy-1.15.3-cp313-cp313-musllinux_1_2_x86_64.whl",hashes = {sha256 = "f77f853d584e72e874d87357ad70f44b437331507d1c311457bed8ed2b956126"}}, - {name = "scipy-1.15.3-cp313-cp313-win_amd64.whl",url = "https://files.pythonhosted.org/packages/87/2e/892ad2862ba54f084ffe8cc4a22667eaf9c2bcec6d2bff1d15713c6c0703/scipy-1.15.3-cp313-cp313-win_amd64.whl",hashes = {sha256 = "b90ab29d0c37ec9bf55424c064312930ca5f4bde15ee8619ee44e69319aab163"}}, - {name = "scipy-1.15.3-cp313-cp313t-macosx_10_13_x86_64.whl",url = "https://files.pythonhosted.org/packages/1b/e9/7a879c137f7e55b30d75d90ce3eb468197646bc7b443ac036ae3fe109055/scipy-1.15.3-cp313-cp313t-macosx_10_13_x86_64.whl",hashes = {sha256 = "3ac07623267feb3ae308487c260ac684b32ea35fd81e12845039952f558047b8"}}, - {name = "scipy-1.15.3-cp313-cp313t-macosx_12_0_arm64.whl",url = "https://files.pythonhosted.org/packages/51/d1/226a806bbd69f62ce5ef5f3ffadc35286e9fbc802f606a07eb83bf2359de/scipy-1.15.3-cp313-cp313t-macosx_12_0_arm64.whl",hashes = {sha256 = "6487aa99c2a3d509a5227d9a5e889ff05830a06b2ce08ec30df6d79db5fcd5c5"}}, - {name = "scipy-1.15.3-cp313-cp313t-macosx_14_0_arm64.whl",url = "https://files.pythonhosted.org/packages/e5/9b/f32d1d6093ab9eeabbd839b0f7619c62e46cc4b7b6dbf05b6e615bbd4400/scipy-1.15.3-cp313-cp313t-macosx_14_0_arm64.whl",hashes = {sha256 = "50f9e62461c95d933d5c5ef4a1f2ebf9a2b4e83b0db374cb3f1de104d935922e"}}, - {name = "scipy-1.15.3-cp313-cp313t-macosx_14_0_x86_64.whl",url = "https://files.pythonhosted.org/packages/e7/29/c278f699b095c1a884f29fda126340fcc201461ee8bfea5c8bdb1c7c958b/scipy-1.15.3-cp313-cp313t-macosx_14_0_x86_64.whl",hashes = {sha256 = "14ed70039d182f411ffc74789a16df3835e05dc469b898233a245cdfd7f162cb"}}, - {name = "scipy-1.15.3-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",url = "https://files.pythonhosted.org/packages/24/18/9e5374b617aba742a990581373cd6b68a2945d65cc588482749ef2e64467/scipy-1.15.3-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",hashes = {sha256 = "0a769105537aa07a69468a0eefcd121be52006db61cdd8cac8a0e68980bbb723"}}, - {name = "scipy-1.15.3-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",url = "https://files.pythonhosted.org/packages/e1/fe/9c4361e7ba2927074360856db6135ef4904d505e9b3afbbcb073c4008328/scipy-1.15.3-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",hashes = {sha256 = "9db984639887e3dffb3928d118145ffe40eff2fa40cb241a306ec57c219ebbbb"}}, - {name = "scipy-1.15.3-cp313-cp313t-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/b7/8e/038ccfe29d272b30086b25a4960f757f97122cb2ec42e62b460d02fe98e9/scipy-1.15.3-cp313-cp313t-musllinux_1_2_aarch64.whl",hashes = {sha256 = "40e54d5c7e7ebf1aa596c374c49fa3135f04648a0caabcb66c52884b943f02b4"}}, - {name = "scipy-1.15.3-cp313-cp313t-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/10/7e/5c12285452970be5bdbe8352c619250b97ebf7917d7a9a9e96b8a8140f17/scipy-1.15.3-cp313-cp313t-musllinux_1_2_x86_64.whl",hashes = {sha256 = "5e721fed53187e71d0ccf382b6bf977644c533e506c4d33c3fb24de89f5c3ed5"}}, - {name = "scipy-1.15.3-cp313-cp313t-win_amd64.whl",url = "https://files.pythonhosted.org/packages/81/06/0a5e5349474e1cbc5757975b21bd4fad0e72ebf138c5592f191646154e06/scipy-1.15.3-cp313-cp313t-win_amd64.whl",hashes = {sha256 = "76ad1fb5f8752eabf0fa02e4cc0336b4e8f021e2d5f061ed37d6d264db35e3ca"}}, - {name = "scipy-1.15.3-cp312-cp312-macosx_10_13_x86_64.whl",url = "https://files.pythonhosted.org/packages/37/4b/683aa044c4162e10ed7a7ea30527f2cbd92e6999c10a8ed8edb253836e9c/scipy-1.15.3-cp312-cp312-macosx_10_13_x86_64.whl",hashes = {sha256 = "6ac6310fdbfb7aa6612408bd2f07295bcbd3fda00d2d702178434751fe48e019"}}, - {name = "scipy-1.15.3-cp312-cp312-macosx_12_0_arm64.whl",url = "https://files.pythonhosted.org/packages/7b/7e/f30be3d03de07f25dc0ec926d1681fed5c732d759ac8f51079708c79e680/scipy-1.15.3-cp312-cp312-macosx_12_0_arm64.whl",hashes = {sha256 = "185cd3d6d05ca4b44a8f1595af87f9c372bb6acf9c808e99aa3e9aa03bd98cf6"}}, - {name = "scipy-1.15.3-cp312-cp312-macosx_14_0_arm64.whl",url = "https://files.pythonhosted.org/packages/07/9c/0ddb0d0abdabe0d181c1793db51f02cd59e4901da6f9f7848e1f96759f0d/scipy-1.15.3-cp312-cp312-macosx_14_0_arm64.whl",hashes = {sha256 = "05dc6abcd105e1a29f95eada46d4a3f251743cfd7d3ae8ddb4088047f24ea477"}}, - {name = "scipy-1.15.3-cp312-cp312-macosx_14_0_x86_64.whl",url = "https://files.pythonhosted.org/packages/af/43/0bce905a965f36c58ff80d8bea33f1f9351b05fad4beaad4eae34699b7a1/scipy-1.15.3-cp312-cp312-macosx_14_0_x86_64.whl",hashes = {sha256 = "06efcba926324df1696931a57a176c80848ccd67ce6ad020c810736bfd58eb1c"}}, - {name = "scipy-1.15.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",url = "https://files.pythonhosted.org/packages/56/30/a6f08f84ee5b7b28b4c597aca4cbe545535c39fe911845a96414700b64ba/scipy-1.15.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",hashes = {sha256 = "c05045d8b9bfd807ee1b9f38761993297b10b245f012b11b13b91ba8945f7e45"}}, - {name = "scipy-1.15.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",url = "https://files.pythonhosted.org/packages/0b/1f/03f52c282437a168ee2c7c14a1a0d0781a9a4a8962d84ac05c06b4c5b555/scipy-1.15.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",hashes = {sha256 = "271e3713e645149ea5ea3e97b57fdab61ce61333f97cfae392c28ba786f9bb49"}}, - {name = "scipy-1.15.3-cp312-cp312-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/89/b1/fbb53137f42c4bf630b1ffdfc2151a62d1d1b903b249f030d2b1c0280af8/scipy-1.15.3-cp312-cp312-musllinux_1_2_aarch64.whl",hashes = {sha256 = "6cfd56fc1a8e53f6e89ba3a7a7251f7396412d655bca2aa5611c8ec9a6784a1e"}}, - {name = "scipy-1.15.3-cp312-cp312-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/2e/2e/025e39e339f5090df1ff266d021892694dbb7e63568edcfe43f892fa381d/scipy-1.15.3-cp312-cp312-musllinux_1_2_x86_64.whl",hashes = {sha256 = "0ff17c0bb1cb32952c09217d8d1eed9b53d1463e5f1dd6052c7857f83127d539"}}, - {name = "scipy-1.15.3-cp312-cp312-win_amd64.whl",url = "https://files.pythonhosted.org/packages/e6/eb/3bf6ea8ab7f1503dca3a10df2e4b9c3f6b3316df07f6c0ded94b281c7101/scipy-1.15.3-cp312-cp312-win_amd64.whl",hashes = {sha256 = "52092bc0472cfd17df49ff17e70624345efece4e1a12b23783a1ac59a1b728ed"}}, -] -marker = "\"dev\" in extras" +version = "1.16.2" +requires-python = ">=3.11" +sdist = {name = "scipy-1.16.2.tar.gz", url = "https://files.pythonhosted.org/packages/4c/3b/546a6f0bfe791bbb7f8d591613454d15097e53f906308ec6f7c1ce588e8e/scipy-1.16.2.tar.gz", hashes = {sha256 = "af029b153d243a80afb6eabe40b0a07f8e35c9adc269c019f364ad747f826a6b"}} +wheels = [ + {name = "scipy-1.16.2-cp314-cp314-macosx_10_14_x86_64.whl",url = "https://files.pythonhosted.org/packages/8b/ac/ad8951250516db71619f0bd3b2eb2448db04b720a003dd98619b78b692c0/scipy-1.16.2-cp314-cp314-macosx_10_14_x86_64.whl",hashes = {sha256 = "567e77755019bb7461513c87f02bb73fb65b11f049aaaa8ca17cfaa5a5c45d77"}}, + {name = "scipy-1.16.2-cp314-cp314-macosx_12_0_arm64.whl",url = "https://files.pythonhosted.org/packages/ff/f6/5779049ed119c5b503b0f3dc6d6f3f68eefc3a9190d4ad4c276f854f051b/scipy-1.16.2-cp314-cp314-macosx_12_0_arm64.whl",hashes = {sha256 = "17d9bb346194e8967296621208fcdfd39b55498ef7d2f376884d5ac47cec1a70"}}, + {name = "scipy-1.16.2-cp314-cp314-macosx_14_0_arm64.whl",url = "https://files.pythonhosted.org/packages/82/09/9986e410ae38bf0a0c737ff8189ac81a93b8e42349aac009891c054403d7/scipy-1.16.2-cp314-cp314-macosx_14_0_arm64.whl",hashes = {sha256 = "0a17541827a9b78b777d33b623a6dcfe2ef4a25806204d08ead0768f4e529a88"}}, + {name = "scipy-1.16.2-cp314-cp314-macosx_14_0_x86_64.whl",url = "https://files.pythonhosted.org/packages/0d/ad/485cdef2d9215e2a7df6d61b81d2ac073dfacf6ae24b9ae87274c4e936ae/scipy-1.16.2-cp314-cp314-macosx_14_0_x86_64.whl",hashes = {sha256 = "d7d4c6ba016ffc0f9568d012f5f1eb77ddd99412aea121e6fa8b4c3b7cbad91f"}}, + {name = "scipy-1.16.2-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.whl",url = "https://files.pythonhosted.org/packages/a7/74/f6a852e5d581122b8f0f831f1d1e32fb8987776ed3658e95c377d308ed86/scipy-1.16.2-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.whl",hashes = {sha256 = "9702c4c023227785c779cba2e1d6f7635dbb5b2e0936cdd3a4ecb98d78fd41eb"}}, + {name = "scipy-1.16.2-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl",url = "https://files.pythonhosted.org/packages/d9/f5/61d243bbc7c6e5e4e13dde9887e84a5cbe9e0f75fd09843044af1590844e/scipy-1.16.2-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl",hashes = {sha256 = "d1cdf0ac28948d225decdefcc45ad7dd91716c29ab56ef32f8e0d50657dffcc7"}}, + {name = "scipy-1.16.2-cp314-cp314-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/03/99/59933956331f8cc57e406cdb7a483906c74706b156998f322913e789c7e1/scipy-1.16.2-cp314-cp314-musllinux_1_2_aarch64.whl",hashes = {sha256 = "70327d6aa572a17c2941cdfb20673f82e536e91850a2e4cb0c5b858b690e1548"}}, + {name = "scipy-1.16.2-cp314-cp314-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/c6/7d/00f825cfb47ee19ef74ecf01244b43e95eae74e7e0ff796026ea7cd98456/scipy-1.16.2-cp314-cp314-musllinux_1_2_x86_64.whl",hashes = {sha256 = "5221c0b2a4b58aa7c4ed0387d360fd90ee9086d383bb34d9f2789fafddc8a936"}}, + {name = "scipy-1.16.2-cp314-cp314-win_amd64.whl",url = "https://files.pythonhosted.org/packages/e4/9f/b62587029980378304ba5a8563d376c96f40b1e133daacee76efdcae32de/scipy-1.16.2-cp314-cp314-win_amd64.whl",hashes = {sha256 = "f5a85d7b2b708025af08f060a496dd261055b617d776fc05a1a1cc69e09fe9ff"}}, + {name = "scipy-1.16.2-cp314-cp314-win_arm64.whl",url = "https://files.pythonhosted.org/packages/82/04/7a2f1609921352c7fbee0815811b5050582f67f19983096c4769867ca45f/scipy-1.16.2-cp314-cp314-win_arm64.whl",hashes = {sha256 = "2cc73a33305b4b24556957d5857d6253ce1e2dcd67fa0ff46d87d1670b3e1e1d"}}, + {name = "scipy-1.16.2-cp314-cp314t-macosx_10_14_x86_64.whl",url = "https://files.pythonhosted.org/packages/51/b9/60929ce350c16b221928725d2d1d7f86cf96b8bc07415547057d1196dc92/scipy-1.16.2-cp314-cp314t-macosx_10_14_x86_64.whl",hashes = {sha256 = "9ea2a3fed83065d77367775d689401a703d0f697420719ee10c0780bcab594d8"}}, + {name = "scipy-1.16.2-cp314-cp314t-macosx_12_0_arm64.whl",url = "https://files.pythonhosted.org/packages/2a/41/ed80e67782d4bc5fc85a966bc356c601afddd175856ba7c7bb6d9490607e/scipy-1.16.2-cp314-cp314t-macosx_12_0_arm64.whl",hashes = {sha256 = "7280d926f11ca945c3ef92ba960fa924e1465f8d07ce3a9923080363390624c4"}}, + {name = "scipy-1.16.2-cp314-cp314t-macosx_14_0_arm64.whl",url = "https://files.pythonhosted.org/packages/c4/a3/2f673ace4090452696ccded5f5f8efffb353b8f3628f823a110e0170b605/scipy-1.16.2-cp314-cp314t-macosx_14_0_arm64.whl",hashes = {sha256 = "8afae1756f6a1fe04636407ef7dbece33d826a5d462b74f3d0eb82deabefd831"}}, + {name = "scipy-1.16.2-cp314-cp314t-macosx_14_0_x86_64.whl",url = "https://files.pythonhosted.org/packages/42/bf/59df61c5d51395066c35836b78136accf506197617c8662e60ea209881e1/scipy-1.16.2-cp314-cp314t-macosx_14_0_x86_64.whl",hashes = {sha256 = "5c66511f29aa8d233388e7416a3f20d5cae7a2744d5cee2ecd38c081f4e861b3"}}, + {name = "scipy-1.16.2-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl",url = "https://files.pythonhosted.org/packages/91/c3/edc7b300dc16847ad3672f1a6f3f7c5d13522b21b84b81c265f4f2760d4a/scipy-1.16.2-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl",hashes = {sha256 = "efe6305aeaa0e96b0ccca5ff647a43737d9a092064a3894e46c414db84bc54ac"}}, + {name = "scipy-1.16.2-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl",url = "https://files.pythonhosted.org/packages/26/c7/24d1524e72f06ff141e8d04b833c20db3021020563272ccb1b83860082a9/scipy-1.16.2-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl",hashes = {sha256 = "7f3a337d9ae06a1e8d655ee9d8ecb835ea5ddcdcbd8d23012afa055ab014f374"}}, + {name = "scipy-1.16.2-cp314-cp314t-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/aa/b7/5aaad984eeedd56858dc33d75efa59e8ce798d918e1033ef62d2708f2c3d/scipy-1.16.2-cp314-cp314t-musllinux_1_2_aarch64.whl",hashes = {sha256 = "bab3605795d269067d8ce78a910220262711b753de8913d3deeaedb5dded3bb6"}}, + {name = "scipy-1.16.2-cp314-cp314t-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/fd/c2/e276a237acb09824822b0ada11b028ed4067fdc367a946730979feacb870/scipy-1.16.2-cp314-cp314t-musllinux_1_2_x86_64.whl",hashes = {sha256 = "b0348d8ddb55be2a844c518cd8cc8deeeb8aeba707cf834db5758fc89b476a2c"}}, + {name = "scipy-1.16.2-cp314-cp314t-win_amd64.whl",url = "https://files.pythonhosted.org/packages/c6/b4/5c18a766e8353015439f3780f5fc473f36f9762edc1a2e45da3ff5a31b21/scipy-1.16.2-cp314-cp314t-win_amd64.whl",hashes = {sha256 = "26284797e38b8a75e14ea6631d29bda11e76ceaa6ddb6fdebbfe4c4d90faf2f9"}}, + {name = "scipy-1.16.2-cp314-cp314t-win_arm64.whl",url = "https://files.pythonhosted.org/packages/97/30/2f9a5243008f76dfc5dee9a53dfb939d9b31e16ce4bd4f2e628bfc5d89d2/scipy-1.16.2-cp314-cp314t-win_arm64.whl",hashes = {sha256 = "d2a4472c231328d4de38d5f1f68fdd6d28a615138f842580a8a321b5845cf779"}}, + {name = "scipy-1.16.2-cp313-cp313-macosx_10_14_x86_64.whl",url = "https://files.pythonhosted.org/packages/c1/27/c5b52f1ee81727a9fc457f5ac1e9bf3d6eab311805ea615c83c27ba06400/scipy-1.16.2-cp313-cp313-macosx_10_14_x86_64.whl",hashes = {sha256 = "84f7bf944b43e20b8a894f5fe593976926744f6c185bacfcbdfbb62736b5cc70"}}, + {name = "scipy-1.16.2-cp313-cp313-macosx_12_0_arm64.whl",url = "https://files.pythonhosted.org/packages/32/a9/15c20d08e950b540184caa8ced675ba1128accb0e09c653780ba023a4110/scipy-1.16.2-cp313-cp313-macosx_12_0_arm64.whl",hashes = {sha256 = "5c39026d12edc826a1ef2ad35ad1e6d7f087f934bb868fc43fa3049c8b8508f9"}}, + {name = "scipy-1.16.2-cp313-cp313-macosx_14_0_arm64.whl",url = "https://files.pythonhosted.org/packages/4c/fc/ea36098df653cca26062a627c1a94b0de659e97127c8491e18713ca0e3b9/scipy-1.16.2-cp313-cp313-macosx_14_0_arm64.whl",hashes = {sha256 = "e52729ffd45b68777c5319560014d6fd251294200625d9d70fd8626516fc49f5"}}, + {name = "scipy-1.16.2-cp313-cp313-macosx_14_0_x86_64.whl",url = "https://files.pythonhosted.org/packages/dc/6f/d0b53be55727f3e6d7c72687ec18ea6d0047cf95f1f77488b99a2bafaee1/scipy-1.16.2-cp313-cp313-macosx_14_0_x86_64.whl",hashes = {sha256 = "024dd4a118cccec09ca3209b7e8e614931a6ffb804b2a601839499cb88bdf925"}}, + {name = "scipy-1.16.2-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl",url = "https://files.pythonhosted.org/packages/11/85/bf7dab56e5c4b1d3d8eef92ca8ede788418ad38a7dc3ff50262f00808760/scipy-1.16.2-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl",hashes = {sha256 = "7a5dc7ee9c33019973a470556081b0fd3c9f4c44019191039f9769183141a4d9"}}, + {name = "scipy-1.16.2-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl",url = "https://files.pythonhosted.org/packages/da/6a/1a927b14ddc7714111ea51f4e568203b2bb6ed59bdd036d62127c1a360c8/scipy-1.16.2-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl",hashes = {sha256 = "c2275ff105e508942f99d4e3bc56b6ef5e4b3c0af970386ca56b777608ce95b7"}}, + {name = "scipy-1.16.2-cp313-cp313-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/c1/5f/331148ea5780b4fcc7007a4a6a6ee0a0c1507a796365cc642d4d226e1c3a/scipy-1.16.2-cp313-cp313-musllinux_1_2_aarch64.whl",hashes = {sha256 = "af80196eaa84f033e48444d2e0786ec47d328ba00c71e4299b602235ffef9acb"}}, + {name = "scipy-1.16.2-cp313-cp313-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/46/3a/e991aa9d2aec723b4a8dcfbfc8365edec5d5e5f9f133888067f1cbb7dfc1/scipy-1.16.2-cp313-cp313-musllinux_1_2_x86_64.whl",hashes = {sha256 = "9fb1eb735fe3d6ed1f89918224e3385fbf6f9e23757cacc35f9c78d3b712dd6e"}}, + {name = "scipy-1.16.2-cp313-cp313-win_amd64.whl",url = "https://files.pythonhosted.org/packages/a1/57/0f38e396ad19e41b4c5db66130167eef8ee620a49bc7d0512e3bb67e0cab/scipy-1.16.2-cp313-cp313-win_amd64.whl",hashes = {sha256 = "fda714cf45ba43c9d3bae8f2585c777f64e3f89a2e073b668b32ede412d8f52c"}}, + {name = "scipy-1.16.2-cp313-cp313-win_arm64.whl",url = "https://files.pythonhosted.org/packages/1b/a5/85d3e867b6822d331e26c862a91375bb7746a0b458db5effa093d34cdb89/scipy-1.16.2-cp313-cp313-win_arm64.whl",hashes = {sha256 = "2f5350da923ccfd0b00e07c3e5cfb316c1c0d6c1d864c07a72d092e9f20db104"}}, + {name = "scipy-1.16.2-cp313-cp313t-macosx_10_14_x86_64.whl",url = "https://files.pythonhosted.org/packages/09/d9/60679189bcebda55992d1a45498de6d080dcaf21ce0c8f24f888117e0c2d/scipy-1.16.2-cp313-cp313t-macosx_10_14_x86_64.whl",hashes = {sha256 = "53d8d2ee29b925344c13bda64ab51785f016b1b9617849dac10897f0701b20c1"}}, + {name = "scipy-1.16.2-cp313-cp313t-macosx_12_0_arm64.whl",url = "https://files.pythonhosted.org/packages/83/be/a99d13ee4d3b7887a96f8c71361b9659ba4ef34da0338f14891e102a127f/scipy-1.16.2-cp313-cp313t-macosx_12_0_arm64.whl",hashes = {sha256 = "9e05e33657efb4c6a9d23bd8300101536abd99c85cca82da0bffff8d8764d08a"}}, + {name = "scipy-1.16.2-cp313-cp313t-macosx_14_0_arm64.whl",url = "https://files.pythonhosted.org/packages/bf/0a/130164a4881cec6ca8c00faf3b57926f28ed429cd6001a673f83c7c2a579/scipy-1.16.2-cp313-cp313t-macosx_14_0_arm64.whl",hashes = {sha256 = "7fe65b36036357003b3ef9d37547abeefaa353b237e989c21027b8ed62b12d4f"}}, + {name = "scipy-1.16.2-cp313-cp313t-macosx_14_0_x86_64.whl",url = "https://files.pythonhosted.org/packages/47/a6/503ffb0310ae77fba874e10cddfc4a1280bdcca1d13c3751b8c3c2996cf8/scipy-1.16.2-cp313-cp313t-macosx_14_0_x86_64.whl",hashes = {sha256 = "6406d2ac6d40b861cccf57f49592f9779071655e9f75cd4f977fa0bdd09cb2e4"}}, + {name = "scipy-1.16.2-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl",url = "https://files.pythonhosted.org/packages/fa/c7/1147774bcea50d00c02600aadaa919facbd8537997a62496270133536ed6/scipy-1.16.2-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl",hashes = {sha256 = "ff4dc42bd321991fbf611c23fc35912d690f731c9914bf3af8f417e64aca0f21"}}, + {name = "scipy-1.16.2-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl",url = "https://files.pythonhosted.org/packages/6a/74/99d5415e4c3e46b2586f30cdbecb95e101c7192628a484a40dd0d163811a/scipy-1.16.2-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl",hashes = {sha256 = "654324826654d4d9133e10675325708fb954bc84dae6e9ad0a52e75c6b1a01d7"}}, + {name = "scipy-1.16.2-cp313-cp313t-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/1b/ee/a6559de7c1cc710e938c0355d9d4fbcd732dac4d0d131959d1f3b63eb29c/scipy-1.16.2-cp313-cp313t-musllinux_1_2_aarch64.whl",hashes = {sha256 = "63870a84cd15c44e65220eaed2dac0e8f8b26bbb991456a033c1d9abfe8a94f8"}}, + {name = "scipy-1.16.2-cp313-cp313t-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/4e/7b/f127a5795d5ba8ece4e0dce7d4a9fb7cb9e4f4757137757d7a69ab7d4f1a/scipy-1.16.2-cp313-cp313t-musllinux_1_2_x86_64.whl",hashes = {sha256 = "fa01f0f6a3050fa6a9771a95d5faccc8e2f5a92b4a2e5440a0fa7264a2398472"}}, + {name = "scipy-1.16.2-cp313-cp313t-win_amd64.whl",url = "https://files.pythonhosted.org/packages/3e/9f/bc81c1d1e033951eb5912cd3750cc005943afa3e65a725d2443a3b3c4347/scipy-1.16.2-cp313-cp313t-win_amd64.whl",hashes = {sha256 = "116296e89fba96f76353a8579820c2512f6e55835d3fad7780fece04367de351"}}, + {name = "scipy-1.16.2-cp313-cp313t-win_arm64.whl",url = "https://files.pythonhosted.org/packages/d6/5e/2cc7555fd81d01814271412a1d59a289d25f8b63208a0a16c21069d55d3e/scipy-1.16.2-cp313-cp313t-win_arm64.whl",hashes = {sha256 = "98e22834650be81d42982360382b43b17f7ba95e0e6993e2a4f5b9ad9283a94d"}}, + {name = "scipy-1.16.2-cp312-cp312-macosx_10_14_x86_64.whl",url = "https://files.pythonhosted.org/packages/b7/8d/6396e00db1282279a4ddd507c5f5e11f606812b608ee58517ce8abbf883f/scipy-1.16.2-cp312-cp312-macosx_10_14_x86_64.whl",hashes = {sha256 = "89d6c100fa5c48472047632e06f0876b3c4931aac1f4291afc81a3644316bb0d"}}, + {name = "scipy-1.16.2-cp312-cp312-macosx_12_0_arm64.whl",url = "https://files.pythonhosted.org/packages/3b/93/ea9edd7e193fceb8eef149804491890bde73fb169c896b61aa3e2d1e4e77/scipy-1.16.2-cp312-cp312-macosx_12_0_arm64.whl",hashes = {sha256 = "ca748936cd579d3f01928b30a17dc474550b01272d8046e3e1ee593f23620371"}}, + {name = "scipy-1.16.2-cp312-cp312-macosx_14_0_arm64.whl",url = "https://files.pythonhosted.org/packages/91/4d/281fddc3d80fd738ba86fd3aed9202331180b01e2c78eaae0642f22f7e83/scipy-1.16.2-cp312-cp312-macosx_14_0_arm64.whl",hashes = {sha256 = "fac4f8ce2ddb40e2e3d0f7ec36d2a1e7f92559a2471e59aec37bd8d9de01fec0"}}, + {name = "scipy-1.16.2-cp312-cp312-macosx_14_0_x86_64.whl",url = "https://files.pythonhosted.org/packages/69/40/b33b74c84606fd301b2915f0062e45733c6ff5708d121dd0deaa8871e2d0/scipy-1.16.2-cp312-cp312-macosx_14_0_x86_64.whl",hashes = {sha256 = "033570f1dcefd79547a88e18bccacff025c8c647a330381064f561d43b821232"}}, + {name = "scipy-1.16.2-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl",url = "https://files.pythonhosted.org/packages/55/a7/22c739e2f21a42cc8f16bc76b47cff4ed54fbe0962832c589591c2abec34/scipy-1.16.2-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl",hashes = {sha256 = "ea3421209bf00c8a5ef2227de496601087d8f638a2363ee09af059bd70976dc1"}}, + {name = "scipy-1.16.2-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl",url = "https://files.pythonhosted.org/packages/53/11/a0160990b82999b45874dc60c0c183d3a3a969a563fffc476d5a9995c407/scipy-1.16.2-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl",hashes = {sha256 = "f66bd07ba6f84cd4a380b41d1bf3c59ea488b590a2ff96744845163309ee8e2f"}}, + {name = "scipy-1.16.2-cp312-cp312-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/96/53/7ef48a4cfcf243c3d0f1643f5887c81f29fdf76911c4e49331828e19fc0a/scipy-1.16.2-cp312-cp312-musllinux_1_2_aarch64.whl",hashes = {sha256 = "5e9feab931bd2aea4a23388c962df6468af3d808ddf2d40f94a81c5dc38f32ef"}}, + {name = "scipy-1.16.2-cp312-cp312-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/49/7f/71a69e0afd460049d41c65c630c919c537815277dfea214031005f474d78/scipy-1.16.2-cp312-cp312-musllinux_1_2_x86_64.whl",hashes = {sha256 = "03dfc75e52f72cf23ec2ced468645321407faad8f0fe7b1f5b49264adbc29cb1"}}, + {name = "scipy-1.16.2-cp312-cp312-win_amd64.whl",url = "https://files.pythonhosted.org/packages/34/95/20e02ca66fb495a95fba0642fd48e0c390d0ece9b9b14c6e931a60a12dea/scipy-1.16.2-cp312-cp312-win_amd64.whl",hashes = {sha256 = "0ce54e07bbb394b417457409a64fd015be623f36e330ac49306433ffe04bc97e"}}, + {name = "scipy-1.16.2-cp312-cp312-win_arm64.whl",url = "https://files.pythonhosted.org/packages/92/ad/13646b9beb0a95528ca46d52b7babafbe115017814a611f2065ee4e61d20/scipy-1.16.2-cp312-cp312-win_arm64.whl",hashes = {sha256 = "2a8ffaa4ac0df81a0b94577b18ee079f13fecdb924df3328fc44a7dc5ac46851"}}, +] +marker = "python_version ~= \"3.12\"" [packages.tool.pdm] dependencies = [ - "numpy<2.5,>=1.23.5", + "numpy<2.6,>=1.25.2", ] [[packages]] name = "numpy" -version = "2.2.6" -requires-python = ">=3.10" -sdist = {name = "numpy-2.2.6.tar.gz", url = "https://files.pythonhosted.org/packages/76/21/7d2a95e4bba9dc13d043ee156a356c0a8f0c6309dff6b21b4d71a073b8a8/numpy-2.2.6.tar.gz", hashes = {sha256 = "e29554e2bef54a90aa5cc07da6ce955accb83f21ab5de01a62c8478897b264fd"}} -wheels = [ - {name = "numpy-2.2.6-cp313-cp313-macosx_10_13_x86_64.whl",url = "https://files.pythonhosted.org/packages/f9/5c/6657823f4f594f72b5471f1db1ab12e26e890bb2e41897522d134d2a3e81/numpy-2.2.6-cp313-cp313-macosx_10_13_x86_64.whl",hashes = {sha256 = "0811bb762109d9708cca4d0b13c4f67146e3c3b7cf8d34018c722adb2d957c84"}}, - {name = "numpy-2.2.6-cp313-cp313-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/dc/9e/14520dc3dadf3c803473bd07e9b2bd1b69bc583cb2497b47000fed2fa92f/numpy-2.2.6-cp313-cp313-macosx_11_0_arm64.whl",hashes = {sha256 = "287cc3162b6f01463ccd86be154f284d0893d2b3ed7292439ea97eafa8170e0b"}}, - {name = "numpy-2.2.6-cp313-cp313-macosx_14_0_arm64.whl",url = "https://files.pythonhosted.org/packages/4f/06/7e96c57d90bebdce9918412087fc22ca9851cceaf5567a45c1f404480e9e/numpy-2.2.6-cp313-cp313-macosx_14_0_arm64.whl",hashes = {sha256 = "f1372f041402e37e5e633e586f62aa53de2eac8d98cbfb822806ce4bbefcb74d"}}, - {name = "numpy-2.2.6-cp313-cp313-macosx_14_0_x86_64.whl",url = "https://files.pythonhosted.org/packages/73/ed/63d920c23b4289fdac96ddbdd6132e9427790977d5457cd132f18e76eae0/numpy-2.2.6-cp313-cp313-macosx_14_0_x86_64.whl",hashes = {sha256 = "55a4d33fa519660d69614a9fad433be87e5252f4b03850642f88993f7b2ca566"}}, - {name = "numpy-2.2.6-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",url = "https://files.pythonhosted.org/packages/85/c5/e19c8f99d83fd377ec8c7e0cf627a8049746da54afc24ef0a0cb73d5dfb5/numpy-2.2.6-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",hashes = {sha256 = "f92729c95468a2f4f15e9bb94c432a9229d0d50de67304399627a943201baa2f"}}, - {name = "numpy-2.2.6-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",url = "https://files.pythonhosted.org/packages/19/49/4df9123aafa7b539317bf6d342cb6d227e49f7a35b99c287a6109b13dd93/numpy-2.2.6-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",hashes = {sha256 = "1bc23a79bfabc5d056d106f9befb8d50c31ced2fbc70eedb8155aec74a45798f"}}, - {name = "numpy-2.2.6-cp313-cp313-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/b2/6c/04b5f47f4f32f7c2b0e7260442a8cbcf8168b0e1a41ff1495da42f42a14f/numpy-2.2.6-cp313-cp313-musllinux_1_2_aarch64.whl",hashes = {sha256 = "e3143e4451880bed956e706a3220b4e5cf6172ef05fcc397f6f36a550b1dd868"}}, - {name = "numpy-2.2.6-cp313-cp313-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/17/0a/5cd92e352c1307640d5b6fec1b2ffb06cd0dabe7d7b8227f97933d378422/numpy-2.2.6-cp313-cp313-musllinux_1_2_x86_64.whl",hashes = {sha256 = "b4f13750ce79751586ae2eb824ba7e1e8dba64784086c98cdbbcc6a42112ce0d"}}, - {name = "numpy-2.2.6-cp313-cp313-win32.whl",url = "https://files.pythonhosted.org/packages/f0/3b/5cba2b1d88760ef86596ad0f3d484b1cbff7c115ae2429678465057c5155/numpy-2.2.6-cp313-cp313-win32.whl",hashes = {sha256 = "5beb72339d9d4fa36522fc63802f469b13cdbe4fdab4a288f0c441b74272ebfd"}}, - {name = "numpy-2.2.6-cp313-cp313-win_amd64.whl",url = "https://files.pythonhosted.org/packages/cb/3b/d58c12eafcb298d4e6d0d40216866ab15f59e55d148a5658bb3132311fcf/numpy-2.2.6-cp313-cp313-win_amd64.whl",hashes = {sha256 = "b0544343a702fa80c95ad5d3d608ea3599dd54d4632df855e4c8d24eb6ecfa1c"}}, - {name = "numpy-2.2.6-cp313-cp313t-macosx_10_13_x86_64.whl",url = "https://files.pythonhosted.org/packages/6b/9e/4bf918b818e516322db999ac25d00c75788ddfd2d2ade4fa66f1f38097e1/numpy-2.2.6-cp313-cp313t-macosx_10_13_x86_64.whl",hashes = {sha256 = "0bca768cd85ae743b2affdc762d617eddf3bcf8724435498a1e80132d04879e6"}}, - {name = "numpy-2.2.6-cp313-cp313t-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/61/66/d2de6b291507517ff2e438e13ff7b1e2cdbdb7cb40b3ed475377aece69f9/numpy-2.2.6-cp313-cp313t-macosx_11_0_arm64.whl",hashes = {sha256 = "fc0c5673685c508a142ca65209b4e79ed6740a4ed6b2267dbba90f34b0b3cfda"}}, - {name = "numpy-2.2.6-cp313-cp313t-macosx_14_0_arm64.whl",url = "https://files.pythonhosted.org/packages/e4/25/480387655407ead912e28ba3a820bc69af9adf13bcbe40b299d454ec011f/numpy-2.2.6-cp313-cp313t-macosx_14_0_arm64.whl",hashes = {sha256 = "5bd4fc3ac8926b3819797a7c0e2631eb889b4118a9898c84f585a54d475b7e40"}}, - {name = "numpy-2.2.6-cp313-cp313t-macosx_14_0_x86_64.whl",url = "https://files.pythonhosted.org/packages/aa/4a/6e313b5108f53dcbf3aca0c0f3e9c92f4c10ce57a0a721851f9785872895/numpy-2.2.6-cp313-cp313t-macosx_14_0_x86_64.whl",hashes = {sha256 = "fee4236c876c4e8369388054d02d0e9bb84821feb1a64dd59e137e6511a551f8"}}, - {name = "numpy-2.2.6-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",url = "https://files.pythonhosted.org/packages/b7/30/172c2d5c4be71fdf476e9de553443cf8e25feddbe185e0bd88b096915bcc/numpy-2.2.6-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",hashes = {sha256 = "e1dda9c7e08dc141e0247a5b8f49cf05984955246a327d4c48bda16821947b2f"}}, - {name = "numpy-2.2.6-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",url = "https://files.pythonhosted.org/packages/12/fb/9e743f8d4e4d3c710902cf87af3512082ae3d43b945d5d16563f26ec251d/numpy-2.2.6-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",hashes = {sha256 = "f447e6acb680fd307f40d3da4852208af94afdfab89cf850986c3ca00562f4fa"}}, - {name = "numpy-2.2.6-cp313-cp313t-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/12/75/ee20da0e58d3a66f204f38916757e01e33a9737d0b22373b3eb5a27358f9/numpy-2.2.6-cp313-cp313t-musllinux_1_2_aarch64.whl",hashes = {sha256 = "389d771b1623ec92636b0786bc4ae56abafad4a4c513d36a55dce14bd9ce8571"}}, - {name = "numpy-2.2.6-cp313-cp313t-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/76/95/bef5b37f29fc5e739947e9ce5179ad402875633308504a52d188302319c8/numpy-2.2.6-cp313-cp313t-musllinux_1_2_x86_64.whl",hashes = {sha256 = "8e9ace4a37db23421249ed236fdcdd457d671e25146786dfc96835cd951aa7c1"}}, - {name = "numpy-2.2.6-cp313-cp313t-win32.whl",url = "https://files.pythonhosted.org/packages/09/04/f2f83279d287407cf36a7a8053a5abe7be3622a4363337338f2585e4afda/numpy-2.2.6-cp313-cp313t-win32.whl",hashes = {sha256 = "038613e9fb8c72b0a41f025a7e4c3f0b7a1b5d768ece4796b674c8f3fe13efff"}}, - {name = "numpy-2.2.6-cp313-cp313t-win_amd64.whl",url = "https://files.pythonhosted.org/packages/67/0e/35082d13c09c02c011cf21570543d202ad929d961c02a147493cb0c2bdf5/numpy-2.2.6-cp313-cp313t-win_amd64.whl",hashes = {sha256 = "6031dd6dfecc0cf9f668681a37648373bddd6421fff6c66ec1624eed0180ee06"}}, - {name = "numpy-2.2.6-cp312-cp312-macosx_10_13_x86_64.whl",url = "https://files.pythonhosted.org/packages/82/5d/c00588b6cf18e1da539b45d3598d3557084990dcc4331960c15ee776ee41/numpy-2.2.6-cp312-cp312-macosx_10_13_x86_64.whl",hashes = {sha256 = "41c5a21f4a04fa86436124d388f6ed60a9343a6f767fced1a8a71c3fbca038ff"}}, - {name = "numpy-2.2.6-cp312-cp312-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/66/ee/560deadcdde6c2f90200450d5938f63a34b37e27ebff162810f716f6a230/numpy-2.2.6-cp312-cp312-macosx_11_0_arm64.whl",hashes = {sha256 = "de749064336d37e340f640b05f24e9e3dd678c57318c7289d222a8a2f543e90c"}}, - {name = "numpy-2.2.6-cp312-cp312-macosx_14_0_arm64.whl",url = "https://files.pythonhosted.org/packages/3c/65/4baa99f1c53b30adf0acd9a5519078871ddde8d2339dc5a7fde80d9d87da/numpy-2.2.6-cp312-cp312-macosx_14_0_arm64.whl",hashes = {sha256 = "894b3a42502226a1cac872f840030665f33326fc3dac8e57c607905773cdcde3"}}, - {name = "numpy-2.2.6-cp312-cp312-macosx_14_0_x86_64.whl",url = "https://files.pythonhosted.org/packages/cc/89/e5a34c071a0570cc40c9a54eb472d113eea6d002e9ae12bb3a8407fb912e/numpy-2.2.6-cp312-cp312-macosx_14_0_x86_64.whl",hashes = {sha256 = "71594f7c51a18e728451bb50cc60a3ce4e6538822731b2933209a1f3614e9282"}}, - {name = "numpy-2.2.6-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",url = "https://files.pythonhosted.org/packages/f8/35/8c80729f1ff76b3921d5c9487c7ac3de9b2a103b1cd05e905b3090513510/numpy-2.2.6-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",hashes = {sha256 = "f2618db89be1b4e05f7a1a847a9c1c0abd63e63a1607d892dd54668dd92faf87"}}, - {name = "numpy-2.2.6-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",url = "https://files.pythonhosted.org/packages/8c/3d/1e1db36cfd41f895d266b103df00ca5b3cbe965184df824dec5c08c6b803/numpy-2.2.6-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",hashes = {sha256 = "fd83c01228a688733f1ded5201c678f0c53ecc1006ffbc404db9f7a899ac6249"}}, - {name = "numpy-2.2.6-cp312-cp312-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/61/c6/03ed30992602c85aa3cd95b9070a514f8b3c33e31124694438d88809ae36/numpy-2.2.6-cp312-cp312-musllinux_1_2_aarch64.whl",hashes = {sha256 = "37c0ca431f82cd5fa716eca9506aefcabc247fb27ba69c5062a6d3ade8cf8f49"}}, - {name = "numpy-2.2.6-cp312-cp312-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/b7/25/5761d832a81df431e260719ec45de696414266613c9ee268394dd5ad8236/numpy-2.2.6-cp312-cp312-musllinux_1_2_x86_64.whl",hashes = {sha256 = "fe27749d33bb772c80dcd84ae7e8df2adc920ae8297400dabec45f0dedb3f6de"}}, - {name = "numpy-2.2.6-cp312-cp312-win32.whl",url = "https://files.pythonhosted.org/packages/57/0a/72d5a3527c5ebffcd47bde9162c39fae1f90138c961e5296491ce778e682/numpy-2.2.6-cp312-cp312-win32.whl",hashes = {sha256 = "4eeaae00d789f66c7a25ac5f34b71a7035bb474e679f410e5e1a94deb24cf2d4"}}, - {name = "numpy-2.2.6-cp312-cp312-win_amd64.whl",url = "https://files.pythonhosted.org/packages/36/fa/8c9210162ca1b88529ab76b41ba02d433fd54fecaf6feb70ef9f124683f1/numpy-2.2.6-cp312-cp312-win_amd64.whl",hashes = {sha256 = "c1f9540be57940698ed329904db803cf7a402f3fc200bfe599334c9bd84a40b2"}}, -] -marker = "\"default\" in dependency_groups or \"dev\" in extras" +version = "2.3.3" +requires-python = ">=3.11" +sdist = {name = "numpy-2.3.3.tar.gz", url = "https://files.pythonhosted.org/packages/d0/19/95b3d357407220ed24c139018d2518fab0a61a948e68286a25f1a4d049ff/numpy-2.3.3.tar.gz", hashes = {sha256 = "ddc7c39727ba62b80dfdbedf400d1c10ddfa8eefbd7ec8dcb118be8b56d31029"}} +wheels = [ + {name = "numpy-2.3.3-cp314-cp314-macosx_10_13_x86_64.whl",url = "https://files.pythonhosted.org/packages/6b/01/342ad585ad82419b99bcf7cebe99e61da6bedb89e213c5fd71acc467faee/numpy-2.3.3-cp314-cp314-macosx_10_13_x86_64.whl",hashes = {sha256 = "cd052f1fa6a78dee696b58a914b7229ecfa41f0a6d96dc663c1220a55e137593"}}, + {name = "numpy-2.3.3-cp314-cp314-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/ef/d8/204e0d73fc1b7a9ee80ab1fe1983dd33a4d64a4e30a05364b0208e9a241a/numpy-2.3.3-cp314-cp314-macosx_11_0_arm64.whl",hashes = {sha256 = "414a97499480067d305fcac9716c29cf4d0d76db6ebf0bf3cbce666677f12652"}}, + {name = "numpy-2.3.3-cp314-cp314-macosx_14_0_arm64.whl",url = "https://files.pythonhosted.org/packages/22/af/f11c916d08f3a18fb8ba81ab72b5b74a6e42ead4c2846d270eb19845bf74/numpy-2.3.3-cp314-cp314-macosx_14_0_arm64.whl",hashes = {sha256 = "50a5fe69f135f88a2be9b6ca0481a68a136f6febe1916e4920e12f1a34e708a7"}}, + {name = "numpy-2.3.3-cp314-cp314-macosx_14_0_x86_64.whl",url = "https://files.pythonhosted.org/packages/fb/11/0ed919c8381ac9d2ffacd63fd1f0c34d27e99cab650f0eb6f110e6ae4858/numpy-2.3.3-cp314-cp314-macosx_14_0_x86_64.whl",hashes = {sha256 = "b912f2ed2b67a129e6a601e9d93d4fa37bef67e54cac442a2f588a54afe5c67a"}}, + {name = "numpy-2.3.3-cp314-cp314-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl",url = "https://files.pythonhosted.org/packages/ee/83/deb5f77cb0f7ba6cb52b91ed388b47f8f3c2e9930d4665c600408d9b90b9/numpy-2.3.3-cp314-cp314-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl",hashes = {sha256 = "9e318ee0596d76d4cb3d78535dc005fa60e5ea348cd131a51e99d0bdbe0b54fe"}}, + {name = "numpy-2.3.3-cp314-cp314-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl",url = "https://files.pythonhosted.org/packages/77/cc/70e59dcb84f2b005d4f306310ff0a892518cc0c8000a33d0e6faf7ca8d80/numpy-2.3.3-cp314-cp314-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl",hashes = {sha256 = "ce020080e4a52426202bdb6f7691c65bb55e49f261f31a8f506c9f6bc7450421"}}, + {name = "numpy-2.3.3-cp314-cp314-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/b6/5a/b2ab6c18b4257e099587d5b7f903317bd7115333ad8d4ec4874278eafa61/numpy-2.3.3-cp314-cp314-musllinux_1_2_aarch64.whl",hashes = {sha256 = "e6687dc183aa55dae4a705b35f9c0f8cb178bcaa2f029b241ac5356221d5c021"}}, + {name = "numpy-2.3.3-cp314-cp314-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/b8/f1/8b3fdc44324a259298520dd82147ff648979bed085feeacc1250ef1656c0/numpy-2.3.3-cp314-cp314-musllinux_1_2_x86_64.whl",hashes = {sha256 = "d8f3b1080782469fdc1718c4ed1d22549b5fb12af0d57d35e992158a772a37cf"}}, + {name = "numpy-2.3.3-cp314-cp314-win32.whl",url = "https://files.pythonhosted.org/packages/f0/a1/b87a284fb15a42e9274e7fcea0dad259d12ddbf07c1595b26883151ca3b4/numpy-2.3.3-cp314-cp314-win32.whl",hashes = {sha256 = "cb248499b0bc3be66ebd6578b83e5acacf1d6cb2a77f2248ce0e40fbec5a76d0"}}, + {name = "numpy-2.3.3-cp314-cp314-win_amd64.whl",url = "https://files.pythonhosted.org/packages/70/5f/1816f4d08f3b8f66576d8433a66f8fa35a5acfb3bbd0bf6c31183b003f3d/numpy-2.3.3-cp314-cp314-win_amd64.whl",hashes = {sha256 = "691808c2b26b0f002a032c73255d0bd89751425f379f7bcd22d140db593a96e8"}}, + {name = "numpy-2.3.3-cp314-cp314-win_arm64.whl",url = "https://files.pythonhosted.org/packages/8c/de/072420342e46a8ea41c324a555fa90fcc11637583fb8df722936aed1736d/numpy-2.3.3-cp314-cp314-win_arm64.whl",hashes = {sha256 = "9ad12e976ca7b10f1774b03615a2a4bab8addce37ecc77394d8e986927dc0dfe"}}, + {name = "numpy-2.3.3-cp314-cp314t-macosx_10_13_x86_64.whl",url = "https://files.pythonhosted.org/packages/d5/df/ee2f1c0a9de7347f14da5dd3cd3c3b034d1b8607ccb6883d7dd5c035d631/numpy-2.3.3-cp314-cp314t-macosx_10_13_x86_64.whl",hashes = {sha256 = "9cc48e09feb11e1db00b320e9d30a4151f7369afb96bd0e48d942d09da3a0d00"}}, + {name = "numpy-2.3.3-cp314-cp314t-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/d6/92/9453bdc5a4e9e69cf4358463f25e8260e2ffc126d52e10038b9077815989/numpy-2.3.3-cp314-cp314t-macosx_11_0_arm64.whl",hashes = {sha256 = "901bf6123879b7f251d3631967fd574690734236075082078e0571977c6a8e6a"}}, + {name = "numpy-2.3.3-cp314-cp314t-macosx_14_0_arm64.whl",url = "https://files.pythonhosted.org/packages/13/77/1447b9eb500f028bb44253105bd67534af60499588a5149a94f18f2ca917/numpy-2.3.3-cp314-cp314t-macosx_14_0_arm64.whl",hashes = {sha256 = "7f025652034199c301049296b59fa7d52c7e625017cae4c75d8662e377bf487d"}}, + {name = "numpy-2.3.3-cp314-cp314t-macosx_14_0_x86_64.whl",url = "https://files.pythonhosted.org/packages/3d/f9/d72221b6ca205f9736cb4b2ce3b002f6e45cd67cd6a6d1c8af11a2f0b649/numpy-2.3.3-cp314-cp314t-macosx_14_0_x86_64.whl",hashes = {sha256 = "533ca5f6d325c80b6007d4d7fb1984c303553534191024ec6a524a4c92a5935a"}}, + {name = "numpy-2.3.3-cp314-cp314t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl",url = "https://files.pythonhosted.org/packages/3c/5f/d12834711962ad9c46af72f79bb31e73e416ee49d17f4c797f72c96b6ca5/numpy-2.3.3-cp314-cp314t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl",hashes = {sha256 = "0edd58682a399824633b66885d699d7de982800053acf20be1eaa46d92009c54"}}, + {name = "numpy-2.3.3-cp314-cp314t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl",url = "https://files.pythonhosted.org/packages/a1/0d/fdbec6629d97fd1bebed56cd742884e4eead593611bbe1abc3eb40d304b2/numpy-2.3.3-cp314-cp314t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl",hashes = {sha256 = "367ad5d8fbec5d9296d18478804a530f1191e24ab4d75ab408346ae88045d25e"}}, + {name = "numpy-2.3.3-cp314-cp314t-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/9b/09/0a35196dc5575adde1eb97ddfbc3e1687a814f905377621d18ca9bc2b7dd/numpy-2.3.3-cp314-cp314t-musllinux_1_2_aarch64.whl",hashes = {sha256 = "8f6ac61a217437946a1fa48d24c47c91a0c4f725237871117dea264982128097"}}, + {name = "numpy-2.3.3-cp314-cp314t-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/7a/ca/c9de3ea397d576f1b6753eaa906d4cdef1bf97589a6d9825a349b4729cc2/numpy-2.3.3-cp314-cp314t-musllinux_1_2_x86_64.whl",hashes = {sha256 = "179a42101b845a816d464b6fe9a845dfaf308fdfc7925387195570789bb2c970"}}, + {name = "numpy-2.3.3-cp314-cp314t-win32.whl",url = "https://files.pythonhosted.org/packages/fd/c2/e5ed830e08cd0196351db55db82f65bc0ab05da6ef2b72a836dcf1936d2f/numpy-2.3.3-cp314-cp314t-win32.whl",hashes = {sha256 = "1250c5d3d2562ec4174bce2e3a1523041595f9b651065e4a4473f5f48a6bc8a5"}}, + {name = "numpy-2.3.3-cp314-cp314t-win_amd64.whl",url = "https://files.pythonhosted.org/packages/47/c7/b0f6b5b67f6788a0725f744496badbb604d226bf233ba716683ebb47b570/numpy-2.3.3-cp314-cp314t-win_amd64.whl",hashes = {sha256 = "b37a0b2e5935409daebe82c1e42274d30d9dd355852529eab91dab8dcca7419f"}}, + {name = "numpy-2.3.3-cp314-cp314t-win_arm64.whl",url = "https://files.pythonhosted.org/packages/06/b9/33bba5ff6fb679aa0b1f8a07e853f002a6b04b9394db3069a1270a7784ca/numpy-2.3.3-cp314-cp314t-win_arm64.whl",hashes = {sha256 = "78c9f6560dc7e6b3990e32df7ea1a50bbd0e2a111e05209963f5ddcab7073b0b"}}, + {name = "numpy-2.3.3-cp313-cp313-macosx_10_13_x86_64.whl",url = "https://files.pythonhosted.org/packages/7d/b9/984c2b1ee61a8b803bf63582b4ac4242cf76e2dbd663efeafcb620cc0ccb/numpy-2.3.3-cp313-cp313-macosx_10_13_x86_64.whl",hashes = {sha256 = "f5415fb78995644253370985342cd03572ef8620b934da27d77377a2285955bf"}}, + {name = "numpy-2.3.3-cp313-cp313-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/a6/e4/07970e3bed0b1384d22af1e9912527ecbeb47d3b26e9b6a3bced068b3bea/numpy-2.3.3-cp313-cp313-macosx_11_0_arm64.whl",hashes = {sha256 = "d00de139a3324e26ed5b95870ce63be7ec7352171bc69a4cf1f157a48e3eb6b7"}}, + {name = "numpy-2.3.3-cp313-cp313-macosx_14_0_arm64.whl",url = "https://files.pythonhosted.org/packages/35/c7/477a83887f9de61f1203bad89cf208b7c19cc9fef0cebef65d5a1a0619f2/numpy-2.3.3-cp313-cp313-macosx_14_0_arm64.whl",hashes = {sha256 = "9dc13c6a5829610cc07422bc74d3ac083bd8323f14e2827d992f9e52e22cd6a6"}}, + {name = "numpy-2.3.3-cp313-cp313-macosx_14_0_x86_64.whl",url = "https://files.pythonhosted.org/packages/52/47/93b953bd5866a6f6986344d045a207d3f1cfbad99db29f534ea9cee5108c/numpy-2.3.3-cp313-cp313-macosx_14_0_x86_64.whl",hashes = {sha256 = "d79715d95f1894771eb4e60fb23f065663b2298f7d22945d66877aadf33d00c7"}}, + {name = "numpy-2.3.3-cp313-cp313-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl",url = "https://files.pythonhosted.org/packages/23/83/377f84aaeb800b64c0ef4de58b08769e782edcefa4fea712910b6f0afd3c/numpy-2.3.3-cp313-cp313-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl",hashes = {sha256 = "952cfd0748514ea7c3afc729a0fc639e61655ce4c55ab9acfab14bda4f402b4c"}}, + {name = "numpy-2.3.3-cp313-cp313-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl",url = "https://files.pythonhosted.org/packages/9a/a5/bf3db6e66c4b160d6ea10b534c381a1955dfab34cb1017ea93aa33c70ed3/numpy-2.3.3-cp313-cp313-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl",hashes = {sha256 = "5b83648633d46f77039c29078751f80da65aa64d5622a3cd62aaef9d835b6c93"}}, + {name = "numpy-2.3.3-cp313-cp313-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/a2/59/1287924242eb4fa3f9b3a2c30400f2e17eb2707020d1c5e3086fe7330717/numpy-2.3.3-cp313-cp313-musllinux_1_2_aarch64.whl",hashes = {sha256 = "b001bae8cea1c7dfdb2ae2b017ed0a6f2102d7a70059df1e338e307a4c78a8ae"}}, + {name = "numpy-2.3.3-cp313-cp313-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/e6/93/b3d47ed882027c35e94ac2320c37e452a549f582a5e801f2d34b56973c97/numpy-2.3.3-cp313-cp313-musllinux_1_2_x86_64.whl",hashes = {sha256 = "8e9aced64054739037d42fb84c54dd38b81ee238816c948c8f3ed134665dcd86"}}, + {name = "numpy-2.3.3-cp313-cp313-win32.whl",url = "https://files.pythonhosted.org/packages/20/d9/487a2bccbf7cc9d4bfc5f0f197761a5ef27ba870f1e3bbb9afc4bbe3fcc2/numpy-2.3.3-cp313-cp313-win32.whl",hashes = {sha256 = "9591e1221db3f37751e6442850429b3aabf7026d3b05542d102944ca7f00c8a8"}}, + {name = "numpy-2.3.3-cp313-cp313-win_amd64.whl",url = "https://files.pythonhosted.org/packages/1b/b5/263ebbbbcede85028f30047eab3d58028d7ebe389d6493fc95ae66c636ab/numpy-2.3.3-cp313-cp313-win_amd64.whl",hashes = {sha256 = "f0dadeb302887f07431910f67a14d57209ed91130be0adea2f9793f1a4f817cf"}}, + {name = "numpy-2.3.3-cp313-cp313-win_arm64.whl",url = "https://files.pythonhosted.org/packages/fa/75/67b8ca554bbeaaeb3fac2e8bce46967a5a06544c9108ec0cf5cece559b6c/numpy-2.3.3-cp313-cp313-win_arm64.whl",hashes = {sha256 = "3c7cf302ac6e0b76a64c4aecf1a09e51abd9b01fc7feee80f6c43e3ab1b1dbc5"}}, + {name = "numpy-2.3.3-cp313-cp313t-macosx_10_13_x86_64.whl",url = "https://files.pythonhosted.org/packages/11/d0/0d1ddec56b162042ddfafeeb293bac672de9b0cfd688383590090963720a/numpy-2.3.3-cp313-cp313t-macosx_10_13_x86_64.whl",hashes = {sha256 = "eda59e44957d272846bb407aad19f89dc6f58fecf3504bd144f4c5cf81a7eacc"}}, + {name = "numpy-2.3.3-cp313-cp313t-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/36/9e/1996ca6b6d00415b6acbdd3c42f7f03ea256e2c3f158f80bd7436a8a19f3/numpy-2.3.3-cp313-cp313t-macosx_11_0_arm64.whl",hashes = {sha256 = "823d04112bc85ef5c4fda73ba24e6096c8f869931405a80aa8b0e604510a26bc"}}, + {name = "numpy-2.3.3-cp313-cp313t-macosx_14_0_arm64.whl",url = "https://files.pythonhosted.org/packages/05/24/43da09aa764c68694b76e84b3d3f0c44cb7c18cdc1ba80e48b0ac1d2cd39/numpy-2.3.3-cp313-cp313t-macosx_14_0_arm64.whl",hashes = {sha256 = "40051003e03db4041aa325da2a0971ba41cf65714e65d296397cc0e32de6018b"}}, + {name = "numpy-2.3.3-cp313-cp313t-macosx_14_0_x86_64.whl",url = "https://files.pythonhosted.org/packages/bc/14/50ffb0f22f7218ef8af28dd089f79f68289a7a05a208db9a2c5dcbe123c1/numpy-2.3.3-cp313-cp313t-macosx_14_0_x86_64.whl",hashes = {sha256 = "6ee9086235dd6ab7ae75aba5662f582a81ced49f0f1c6de4260a78d8f2d91a19"}}, + {name = "numpy-2.3.3-cp313-cp313t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl",url = "https://files.pythonhosted.org/packages/55/52/af46ac0795e09657d45a7f4db961917314377edecf66db0e39fa7ab5c3d3/numpy-2.3.3-cp313-cp313t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl",hashes = {sha256 = "94fcaa68757c3e2e668ddadeaa86ab05499a70725811e582b6a9858dd472fb30"}}, + {name = "numpy-2.3.3-cp313-cp313t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl",url = "https://files.pythonhosted.org/packages/a7/b1/dc226b4c90eb9f07a3fff95c2f0db3268e2e54e5cce97c4ac91518aee71b/numpy-2.3.3-cp313-cp313t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl",hashes = {sha256 = "da1a74b90e7483d6ce5244053399a614b1d6b7bc30a60d2f570e5071f8959d3e"}}, + {name = "numpy-2.3.3-cp313-cp313t-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/9d/9d/9d8d358f2eb5eced14dba99f110d83b5cd9a4460895230f3b396ad19a323/numpy-2.3.3-cp313-cp313t-musllinux_1_2_aarch64.whl",hashes = {sha256 = "2990adf06d1ecee3b3dcbb4977dfab6e9f09807598d647f04d385d29e7a3c3d3"}}, + {name = "numpy-2.3.3-cp313-cp313t-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/b6/27/b3922660c45513f9377b3fb42240bec63f203c71416093476ec9aa0719dc/numpy-2.3.3-cp313-cp313t-musllinux_1_2_x86_64.whl",hashes = {sha256 = "ed635ff692483b8e3f0fcaa8e7eb8a75ee71aa6d975388224f70821421800cea"}}, + {name = "numpy-2.3.3-cp313-cp313t-win32.whl",url = "https://files.pythonhosted.org/packages/5b/8e/3ab61a730bdbbc201bb245a71102aa609f0008b9ed15255500a99cd7f780/numpy-2.3.3-cp313-cp313t-win32.whl",hashes = {sha256 = "a333b4ed33d8dc2b373cc955ca57babc00cd6f9009991d9edc5ddbc1bac36bcd"}}, + {name = "numpy-2.3.3-cp313-cp313t-win_amd64.whl",url = "https://files.pythonhosted.org/packages/1c/3a/e22b766b11f6030dc2decdeff5c2fb1610768055603f9f3be88b6d192fb2/numpy-2.3.3-cp313-cp313t-win_amd64.whl",hashes = {sha256 = "4384a169c4d8f97195980815d6fcad04933a7e1ab3b530921c3fef7a1c63426d"}}, + {name = "numpy-2.3.3-cp313-cp313t-win_arm64.whl",url = "https://files.pythonhosted.org/packages/7b/42/c2e2bc48c5e9b2a83423f99733950fbefd86f165b468a3d85d52b30bf782/numpy-2.3.3-cp313-cp313t-win_arm64.whl",hashes = {sha256 = "75370986cc0bc66f4ce5110ad35aae6d182cc4ce6433c40ad151f53690130bf1"}}, + {name = "numpy-2.3.3-cp312-cp312-macosx_10_13_x86_64.whl",url = "https://files.pythonhosted.org/packages/51/5d/bb7fc075b762c96329147799e1bcc9176ab07ca6375ea976c475482ad5b3/numpy-2.3.3-cp312-cp312-macosx_10_13_x86_64.whl",hashes = {sha256 = "cfdd09f9c84a1a934cde1eec2267f0a43a7cd44b2cca4ff95b7c0d14d144b0bf"}}, + {name = "numpy-2.3.3-cp312-cp312-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/6b/0e/c6211bb92af26517acd52125a237a92afe9c3124c6a68d3b9f81b62a0568/numpy-2.3.3-cp312-cp312-macosx_11_0_arm64.whl",hashes = {sha256 = "cb32e3cf0f762aee47ad1ddc6672988f7f27045b0783c887190545baba73aa25"}}, + {name = "numpy-2.3.3-cp312-cp312-macosx_14_0_arm64.whl",url = "https://files.pythonhosted.org/packages/22/f2/07bb754eb2ede9073f4054f7c0286b0d9d2e23982e090a80d478b26d35ca/numpy-2.3.3-cp312-cp312-macosx_14_0_arm64.whl",hashes = {sha256 = "396b254daeb0a57b1fe0ecb5e3cff6fa79a380fa97c8f7781a6d08cd429418fe"}}, + {name = "numpy-2.3.3-cp312-cp312-macosx_14_0_x86_64.whl",url = "https://files.pythonhosted.org/packages/81/0a/afa51697e9fb74642f231ea36aca80fa17c8fb89f7a82abd5174023c3960/numpy-2.3.3-cp312-cp312-macosx_14_0_x86_64.whl",hashes = {sha256 = "067e3d7159a5d8f8a0b46ee11148fc35ca9b21f61e3c49fbd0a027450e65a33b"}}, + {name = "numpy-2.3.3-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl",url = "https://files.pythonhosted.org/packages/5d/f5/122d9cdb3f51c520d150fef6e87df9279e33d19a9611a87c0d2cf78a89f4/numpy-2.3.3-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl",hashes = {sha256 = "1c02d0629d25d426585fb2e45a66154081b9fa677bc92a881ff1d216bc9919a8"}}, + {name = "numpy-2.3.3-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl",url = "https://files.pythonhosted.org/packages/51/64/7de3c91e821a2debf77c92962ea3fe6ac2bc45d0778c1cbe15d4fce2fd94/numpy-2.3.3-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl",hashes = {sha256 = "d9192da52b9745f7f0766531dcfa978b7763916f158bb63bdb8a1eca0068ab20"}}, + {name = "numpy-2.3.3-cp312-cp312-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/30/e4/961a5fa681502cd0d68907818b69f67542695b74e3ceaa513918103b7e80/numpy-2.3.3-cp312-cp312-musllinux_1_2_aarch64.whl",hashes = {sha256 = "cd7de500a5b66319db419dc3c345244404a164beae0d0937283b907d8152e6ea"}}, + {name = "numpy-2.3.3-cp312-cp312-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/99/26/92c912b966e47fbbdf2ad556cb17e3a3088e2e1292b9833be1dfa5361a1a/numpy-2.3.3-cp312-cp312-musllinux_1_2_x86_64.whl",hashes = {sha256 = "93d4962d8f82af58f0b2eb85daaf1b3ca23fe0a85d0be8f1f2b7bb46034e56d7"}}, + {name = "numpy-2.3.3-cp312-cp312-win32.whl",url = "https://files.pythonhosted.org/packages/17/b6/fc8f82cb3520768718834f310c37d96380d9dc61bfdaf05fe5c0b7653e01/numpy-2.3.3-cp312-cp312-win32.whl",hashes = {sha256 = "5534ed6b92f9b7dca6c0a19d6df12d41c68b991cef051d108f6dbff3babc4ebf"}}, + {name = "numpy-2.3.3-cp312-cp312-win_amd64.whl",url = "https://files.pythonhosted.org/packages/32/ee/de999f2625b80d043d6d2d628c07d0d5555a677a3cf78fdf868d409b8766/numpy-2.3.3-cp312-cp312-win_amd64.whl",hashes = {sha256 = "497d7cad08e7092dba36e3d296fe4c97708c93daf26643a1ae4b03f6294d30eb"}}, + {name = "numpy-2.3.3-cp312-cp312-win_arm64.whl",url = "https://files.pythonhosted.org/packages/49/6e/b479032f8a43559c383acb20816644f5f91c88f633d9271ee84f3b3a996c/numpy-2.3.3-cp312-cp312-win_arm64.whl",hashes = {sha256 = "ca0309a18d4dfea6fc6262a66d06c26cfe4640c3926ceec90e57791a82b6eee5"}}, +] +marker = "python_version ~= \"3.12\"" [packages.tool.pdm] dependencies = [] @@ -576,22 +674,59 @@ dependencies = [ [[packages]] name = "tiktoken" -version = "0.11.0" +version = "0.12.0" requires-python = ">=3.9" -sdist = {name = "tiktoken-0.11.0.tar.gz", url = "https://files.pythonhosted.org/packages/a7/86/ad0155a37c4f310935d5ac0b1ccf9bdb635dcb906e0a9a26b616dd55825a/tiktoken-0.11.0.tar.gz", hashes = {sha256 = "3c518641aee1c52247c2b97e74d8d07d780092af79d5911a6ab5e79359d9b06a"}} -wheels = [ - {name = "tiktoken-0.11.0-cp313-cp313-macosx_10_13_x86_64.whl",url = "https://files.pythonhosted.org/packages/cc/cd/a9034bcee638716d9310443818d73c6387a6a96db93cbcb0819b77f5b206/tiktoken-0.11.0-cp313-cp313-macosx_10_13_x86_64.whl",hashes = {sha256 = "a5f3f25ffb152ee7fec78e90a5e5ea5b03b4ea240beed03305615847f7a6ace2"}}, - {name = "tiktoken-0.11.0-cp313-cp313-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/f1/91/9922b345f611b4e92581f234e64e9661e1c524875c8eadd513c4b2088472/tiktoken-0.11.0-cp313-cp313-macosx_11_0_arm64.whl",hashes = {sha256 = "7dc6e9ad16a2a75b4c4be7208055a1f707c9510541d94d9cc31f7fbdc8db41d8"}}, - {name = "tiktoken-0.11.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",url = "https://files.pythonhosted.org/packages/d0/9d/49cd047c71336bc4b4af460ac213ec1c457da67712bde59b892e84f1859f/tiktoken-0.11.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",hashes = {sha256 = "5a0517634d67a8a48fd4a4ad73930c3022629a85a217d256a6e9b8b47439d1e4"}}, - {name = "tiktoken-0.11.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",url = "https://files.pythonhosted.org/packages/52/d5/a0dcdb40dd2ea357e83cb36258967f0ae96f5dd40c722d6e382ceee6bba9/tiktoken-0.11.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",hashes = {sha256 = "7fb4effe60574675118b73c6fbfd3b5868e5d7a1f570d6cc0d18724b09ecf318"}}, - {name = "tiktoken-0.11.0-cp313-cp313-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/3b/17/a0fc51aefb66b7b5261ca1314afa83df0106b033f783f9a7bcbe8e741494/tiktoken-0.11.0-cp313-cp313-musllinux_1_2_x86_64.whl",hashes = {sha256 = "94f984c9831fd32688aef4348803b0905d4ae9c432303087bae370dc1381a2b8"}}, - {name = "tiktoken-0.11.0-cp313-cp313-win_amd64.whl",url = "https://files.pythonhosted.org/packages/50/79/bcf350609f3a10f09fe4fc207f132085e497fdd3612f3925ab24d86a0ca0/tiktoken-0.11.0-cp313-cp313-win_amd64.whl",hashes = {sha256 = "2177ffda31dec4023356a441793fed82f7af5291120751dee4d696414f54db0c"}}, - {name = "tiktoken-0.11.0-cp312-cp312-macosx_10_13_x86_64.whl",url = "https://files.pythonhosted.org/packages/e7/9e/eceddeffc169fc75fe0fd4f38471309f11cb1906f9b8aa39be4f5817df65/tiktoken-0.11.0-cp312-cp312-macosx_10_13_x86_64.whl",hashes = {sha256 = "fd9e6b23e860973cf9526544e220b223c60badf5b62e80a33509d6d40e6c8f5d"}}, - {name = "tiktoken-0.11.0-cp312-cp312-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/4f/cf/5f02bfefffdc6b54e5094d2897bc80efd43050e5b09b576fd85936ee54bf/tiktoken-0.11.0-cp312-cp312-macosx_11_0_arm64.whl",hashes = {sha256 = "6a76d53cee2da71ee2731c9caa747398762bda19d7f92665e882fef229cb0b5b"}}, - {name = "tiktoken-0.11.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",url = "https://files.pythonhosted.org/packages/65/8e/c769b45ef379bc360c9978c4f6914c79fd432400a6733a8afc7ed7b0726a/tiktoken-0.11.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",hashes = {sha256 = "6ef72aab3ea240646e642413cb363b73869fed4e604dcfd69eec63dc54d603e8"}}, - {name = "tiktoken-0.11.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",url = "https://files.pythonhosted.org/packages/d5/2d/4d77f6feb9292bfdd23d5813e442b3bba883f42d0ac78ef5fdc56873f756/tiktoken-0.11.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",hashes = {sha256 = "7f929255c705efec7a28bf515e29dc74220b2f07544a8c81b8d69e8efc4578bd"}}, - {name = "tiktoken-0.11.0-cp312-cp312-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/7a/65/7ff0a65d3bb0fc5a1fb6cc71b03e0f6e71a68c5eea230d1ff1ba3fd6df49/tiktoken-0.11.0-cp312-cp312-musllinux_1_2_x86_64.whl",hashes = {sha256 = "61f1d15822e4404953d499fd1dcc62817a12ae9fb1e4898033ec8fe3915fdf8e"}}, - {name = "tiktoken-0.11.0-cp312-cp312-win_amd64.whl",url = "https://files.pythonhosted.org/packages/f5/6e/5b71578799b72e5bdcef206a214c3ce860d999d579a3b56e74a6c8989ee2/tiktoken-0.11.0-cp312-cp312-win_amd64.whl",hashes = {sha256 = "45927a71ab6643dfd3ef57d515a5db3d199137adf551f66453be098502838b0f"}}, +sdist = {name = "tiktoken-0.12.0.tar.gz", url = "https://files.pythonhosted.org/packages/7d/ab/4d017d0f76ec3171d469d80fc03dfbb4e48a4bcaddaa831b31d526f05edc/tiktoken-0.12.0.tar.gz", hashes = {sha256 = "b18ba7ee2b093863978fcb14f74b3707cdc8d4d4d3836853ce7ec60772139931"}} +wheels = [ + {name = "tiktoken-0.12.0-cp314-cp314-macosx_10_13_x86_64.whl",url = "https://files.pythonhosted.org/packages/72/05/3abc1db5d2c9aadc4d2c76fa5640134e475e58d9fbb82b5c535dc0de9b01/tiktoken-0.12.0-cp314-cp314-macosx_10_13_x86_64.whl",hashes = {sha256 = "a90388128df3b3abeb2bfd1895b0681412a8d7dc644142519e6f0a97c2111646"}}, + {name = "tiktoken-0.12.0-cp314-cp314-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/e3/7b/50c2f060412202d6c95f32b20755c7a6273543b125c0985d6fa9465105af/tiktoken-0.12.0-cp314-cp314-macosx_11_0_arm64.whl",hashes = {sha256 = "da900aa0ad52247d8794e307d6446bd3cdea8e192769b56276695d34d2c9aa88"}}, + {name = "tiktoken-0.12.0-cp314-cp314-manylinux_2_28_aarch64.whl",url = "https://files.pythonhosted.org/packages/14/27/bf795595a2b897e271771cd31cb847d479073497344c637966bdf2853da1/tiktoken-0.12.0-cp314-cp314-manylinux_2_28_aarch64.whl",hashes = {sha256 = "285ba9d73ea0d6171e7f9407039a290ca77efcdb026be7769dccc01d2c8d7fff"}}, + {name = "tiktoken-0.12.0-cp314-cp314-manylinux_2_28_x86_64.whl",url = "https://files.pythonhosted.org/packages/f5/de/9341a6d7a8f1b448573bbf3425fa57669ac58258a667eb48a25dfe916d70/tiktoken-0.12.0-cp314-cp314-manylinux_2_28_x86_64.whl",hashes = {sha256 = "d186a5c60c6a0213f04a7a802264083dea1bbde92a2d4c7069e1a56630aef830"}}, + {name = "tiktoken-0.12.0-cp314-cp314-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/75/0d/881866647b8d1be4d67cb24e50d0c26f9f807f994aa1510cb9ba2fe5f612/tiktoken-0.12.0-cp314-cp314-musllinux_1_2_aarch64.whl",hashes = {sha256 = "604831189bd05480f2b885ecd2d1986dc7686f609de48208ebbbddeea071fc0b"}}, + {name = "tiktoken-0.12.0-cp314-cp314-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/b3/1e/b651ec3059474dab649b8d5b69f5c65cd8fcd8918568c1935bd4136c9392/tiktoken-0.12.0-cp314-cp314-musllinux_1_2_x86_64.whl",hashes = {sha256 = "8f317e8530bb3a222547b85a58583238c8f74fd7a7408305f9f63246d1a0958b"}}, + {name = "tiktoken-0.12.0-cp314-cp314-win_amd64.whl",url = "https://files.pythonhosted.org/packages/80/57/ce64fd16ac390fafde001268c364d559447ba09b509181b2808622420eec/tiktoken-0.12.0-cp314-cp314-win_amd64.whl",hashes = {sha256 = "399c3dd672a6406719d84442299a490420b458c44d3ae65516302a99675888f3"}}, + {name = "tiktoken-0.12.0-cp314-cp314t-macosx_10_13_x86_64.whl",url = "https://files.pythonhosted.org/packages/ac/a4/72eed53e8976a099539cdd5eb36f241987212c29629d0a52c305173e0a68/tiktoken-0.12.0-cp314-cp314t-macosx_10_13_x86_64.whl",hashes = {sha256 = "c2c714c72bc00a38ca969dae79e8266ddec999c7ceccd603cc4f0d04ccd76365"}}, + {name = "tiktoken-0.12.0-cp314-cp314t-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/e6/d7/0110b8f54c008466b19672c615f2168896b83706a6611ba6e47313dbc6e9/tiktoken-0.12.0-cp314-cp314t-macosx_11_0_arm64.whl",hashes = {sha256 = "cbb9a3ba275165a2cb0f9a83f5d7025afe6b9d0ab01a22b50f0e74fee2ad253e"}}, + {name = "tiktoken-0.12.0-cp314-cp314t-manylinux_2_28_aarch64.whl",url = "https://files.pythonhosted.org/packages/5f/77/4f268c41a3957c418b084dd576ea2fad2e95da0d8e1ab705372892c2ca22/tiktoken-0.12.0-cp314-cp314t-manylinux_2_28_aarch64.whl",hashes = {sha256 = "dfdfaa5ffff8993a3af94d1125870b1d27aed7cb97aa7eb8c1cefdbc87dbee63"}}, + {name = "tiktoken-0.12.0-cp314-cp314t-manylinux_2_28_x86_64.whl",url = "https://files.pythonhosted.org/packages/4e/2b/fc46c90fe5028bd094cd6ee25a7db321cb91d45dc87531e2bdbb26b4867a/tiktoken-0.12.0-cp314-cp314t-manylinux_2_28_x86_64.whl",hashes = {sha256 = "584c3ad3d0c74f5269906eb8a659c8bfc6144a52895d9261cdaf90a0ae5f4de0"}}, + {name = "tiktoken-0.12.0-cp314-cp314t-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/28/c0/3c7a39ff68022ddfd7d93f3337ad90389a342f761c4d71de99a3ccc57857/tiktoken-0.12.0-cp314-cp314t-musllinux_1_2_aarch64.whl",hashes = {sha256 = "54c891b416a0e36b8e2045b12b33dd66fb34a4fe7965565f1b482da50da3e86a"}}, + {name = "tiktoken-0.12.0-cp314-cp314t-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/ab/0d/c1ad6f4016a3968c048545f5d9b8ffebf577774b2ede3e2e352553b685fe/tiktoken-0.12.0-cp314-cp314t-musllinux_1_2_x86_64.whl",hashes = {sha256 = "5edb8743b88d5be814b1a8a8854494719080c28faaa1ccbef02e87354fe71ef0"}}, + {name = "tiktoken-0.12.0-cp314-cp314t-win_amd64.whl",url = "https://files.pythonhosted.org/packages/af/df/c7891ef9d2712ad774777271d39fdef63941ffba0a9d59b7ad1fd2765e57/tiktoken-0.12.0-cp314-cp314t-win_amd64.whl",hashes = {sha256 = "f61c0aea5565ac82e2ec50a05e02a6c44734e91b51c10510b084ea1b8e633a71"}}, + {name = "tiktoken-0.12.0-cp313-cp313-macosx_10_13_x86_64.whl",url = "https://files.pythonhosted.org/packages/00/61/441588ee21e6b5cdf59d6870f86beb9789e532ee9718c251b391b70c68d6/tiktoken-0.12.0-cp313-cp313-macosx_10_13_x86_64.whl",hashes = {sha256 = "775c2c55de2310cc1bc9a3ad8826761cbdc87770e586fd7b6da7d4589e13dab3"}}, + {name = "tiktoken-0.12.0-cp313-cp313-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/1f/05/dcf94486d5c5c8d34496abe271ac76c5b785507c8eae71b3708f1ad9b45a/tiktoken-0.12.0-cp313-cp313-macosx_11_0_arm64.whl",hashes = {sha256 = "a01b12f69052fbe4b080a2cfb867c4de12c704b56178edf1d1d7b273561db160"}}, + {name = "tiktoken-0.12.0-cp313-cp313-manylinux_2_28_aarch64.whl",url = "https://files.pythonhosted.org/packages/a0/70/5163fe5359b943f8db9946b62f19be2305de8c3d78a16f629d4165e2f40e/tiktoken-0.12.0-cp313-cp313-manylinux_2_28_aarch64.whl",hashes = {sha256 = "01d99484dc93b129cd0964f9d34eee953f2737301f18b3c7257bf368d7615baa"}}, + {name = "tiktoken-0.12.0-cp313-cp313-manylinux_2_28_x86_64.whl",url = "https://files.pythonhosted.org/packages/0c/da/c028aa0babf77315e1cef357d4d768800c5f8a6de04d0eac0f377cb619fa/tiktoken-0.12.0-cp313-cp313-manylinux_2_28_x86_64.whl",hashes = {sha256 = "4a1a4fcd021f022bfc81904a911d3df0f6543b9e7627b51411da75ff2fe7a1be"}}, + {name = "tiktoken-0.12.0-cp313-cp313-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/a0/5a/886b108b766aa53e295f7216b509be95eb7d60b166049ce2c58416b25f2a/tiktoken-0.12.0-cp313-cp313-musllinux_1_2_aarch64.whl",hashes = {sha256 = "981a81e39812d57031efdc9ec59fa32b2a5a5524d20d4776574c4b4bd2e9014a"}}, + {name = "tiktoken-0.12.0-cp313-cp313-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/f4/f8/4db272048397636ac7a078d22773dd2795b1becee7bc4922fe6207288d57/tiktoken-0.12.0-cp313-cp313-musllinux_1_2_x86_64.whl",hashes = {sha256 = "9baf52f84a3f42eef3ff4e754a0db79a13a27921b457ca9832cf944c6be4f8f3"}}, + {name = "tiktoken-0.12.0-cp313-cp313-win_amd64.whl",url = "https://files.pythonhosted.org/packages/8e/32/45d02e2e0ea2be3a9ed22afc47d93741247e75018aac967b713b2941f8ea/tiktoken-0.12.0-cp313-cp313-win_amd64.whl",hashes = {sha256 = "b8a0cd0c789a61f31bf44851defbd609e8dd1e2c8589c614cc1060940ef1f697"}}, + {name = "tiktoken-0.12.0-cp313-cp313t-macosx_10_13_x86_64.whl",url = "https://files.pythonhosted.org/packages/ce/76/994fc868f88e016e6d05b0da5ac24582a14c47893f4474c3e9744283f1d5/tiktoken-0.12.0-cp313-cp313t-macosx_10_13_x86_64.whl",hashes = {sha256 = "d5f89ea5680066b68bcb797ae85219c72916c922ef0fcdd3480c7d2315ffff16"}}, + {name = "tiktoken-0.12.0-cp313-cp313t-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/f6/b8/57ef1456504c43a849821920d582a738a461b76a047f352f18c0b26c6516/tiktoken-0.12.0-cp313-cp313t-macosx_11_0_arm64.whl",hashes = {sha256 = "b4e7ed1c6a7a8a60a3230965bdedba8cc58f68926b835e519341413370e0399a"}}, + {name = "tiktoken-0.12.0-cp313-cp313t-manylinux_2_28_aarch64.whl",url = "https://files.pythonhosted.org/packages/72/90/13da56f664286ffbae9dbcfadcc625439142675845baa62715e49b87b68b/tiktoken-0.12.0-cp313-cp313t-manylinux_2_28_aarch64.whl",hashes = {sha256 = "fc530a28591a2d74bce821d10b418b26a094bf33839e69042a6e86ddb7a7fb27"}}, + {name = "tiktoken-0.12.0-cp313-cp313t-manylinux_2_28_x86_64.whl",url = "https://files.pythonhosted.org/packages/05/df/4f80030d44682235bdaecd7346c90f67ae87ec8f3df4a3442cb53834f7e4/tiktoken-0.12.0-cp313-cp313t-manylinux_2_28_x86_64.whl",hashes = {sha256 = "06a9f4f49884139013b138920a4c393aa6556b2f8f536345f11819389c703ebb"}}, + {name = "tiktoken-0.12.0-cp313-cp313t-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/22/1f/ae535223a8c4ef4c0c1192e3f9b82da660be9eb66b9279e95c99288e9dab/tiktoken-0.12.0-cp313-cp313t-musllinux_1_2_aarch64.whl",hashes = {sha256 = "04f0e6a985d95913cabc96a741c5ffec525a2c72e9df086ff17ebe35985c800e"}}, + {name = "tiktoken-0.12.0-cp313-cp313t-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/78/a7/f8ead382fce0243cb625c4f266e66c27f65ae65ee9e77f59ea1653b6d730/tiktoken-0.12.0-cp313-cp313t-musllinux_1_2_x86_64.whl",hashes = {sha256 = "0ee8f9ae00c41770b5f9b0bb1235474768884ae157de3beb5439ca0fd70f3e25"}}, + {name = "tiktoken-0.12.0-cp313-cp313t-win_amd64.whl",url = "https://files.pythonhosted.org/packages/93/e0/6cc82a562bc6365785a3ff0af27a2a092d57c47d7a81d9e2295d8c36f011/tiktoken-0.12.0-cp313-cp313t-win_amd64.whl",hashes = {sha256 = "dc2dd125a62cb2b3d858484d6c614d136b5b848976794edfb63688d539b8b93f"}}, + {name = "tiktoken-0.12.0-cp312-cp312-macosx_10_13_x86_64.whl",url = "https://files.pythonhosted.org/packages/a4/85/be65d39d6b647c79800fd9d29241d081d4eeb06271f383bb87200d74cf76/tiktoken-0.12.0-cp312-cp312-macosx_10_13_x86_64.whl",hashes = {sha256 = "b97f74aca0d78a1ff21b8cd9e9925714c15a9236d6ceacf5c7327c117e6e21e8"}}, + {name = "tiktoken-0.12.0-cp312-cp312-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/4a/42/6573e9129bc55c9bf7300b3a35bef2c6b9117018acca0dc760ac2d93dffe/tiktoken-0.12.0-cp312-cp312-macosx_11_0_arm64.whl",hashes = {sha256 = "2b90f5ad190a4bb7c3eb30c5fa32e1e182ca1ca79f05e49b448438c3e225a49b"}}, + {name = "tiktoken-0.12.0-cp312-cp312-manylinux_2_28_aarch64.whl",url = "https://files.pythonhosted.org/packages/66/c5/ed88504d2f4a5fd6856990b230b56d85a777feab84e6129af0822f5d0f70/tiktoken-0.12.0-cp312-cp312-manylinux_2_28_aarch64.whl",hashes = {sha256 = "65b26c7a780e2139e73acc193e5c63ac754021f160df919add909c1492c0fb37"}}, + {name = "tiktoken-0.12.0-cp312-cp312-manylinux_2_28_x86_64.whl",url = "https://files.pythonhosted.org/packages/f4/90/3dae6cc5436137ebd38944d396b5849e167896fc2073da643a49f372dc4f/tiktoken-0.12.0-cp312-cp312-manylinux_2_28_x86_64.whl",hashes = {sha256 = "edde1ec917dfd21c1f2f8046b86348b0f54a2c0547f68149d8600859598769ad"}}, + {name = "tiktoken-0.12.0-cp312-cp312-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/a3/fe/26df24ce53ffde419a42f5f53d755b995c9318908288c17ec3f3448313a3/tiktoken-0.12.0-cp312-cp312-musllinux_1_2_aarch64.whl",hashes = {sha256 = "35a2f8ddd3824608b3d650a000c1ef71f730d0c56486845705a8248da00f9fe5"}}, + {name = "tiktoken-0.12.0-cp312-cp312-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/20/cc/b064cae1a0e9fac84b0d2c46b89f4e57051a5f41324e385d10225a984c24/tiktoken-0.12.0-cp312-cp312-musllinux_1_2_x86_64.whl",hashes = {sha256 = "83d16643edb7fa2c99eff2ab7733508aae1eebb03d5dfc46f5565862810f24e3"}}, + {name = "tiktoken-0.12.0-cp312-cp312-win_amd64.whl",url = "https://files.pythonhosted.org/packages/81/10/b8523105c590c5b8349f2587e2fdfe51a69544bd5a76295fc20f2374f470/tiktoken-0.12.0-cp312-cp312-win_amd64.whl",hashes = {sha256 = "ffc5288f34a8bc02e1ea7047b8d041104791d2ddbf42d1e5fa07822cbffe16bd"}}, + {name = "tiktoken-0.12.0-cp311-cp311-macosx_10_12_x86_64.whl",url = "https://files.pythonhosted.org/packages/de/46/21ea696b21f1d6d1efec8639c204bdf20fde8bafb351e1355c72c5d7de52/tiktoken-0.12.0-cp311-cp311-macosx_10_12_x86_64.whl",hashes = {sha256 = "6e227c7f96925003487c33b1b32265fad2fbcec2b7cf4817afb76d416f40f6bb"}}, + {name = "tiktoken-0.12.0-cp311-cp311-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/c9/d9/35c5d2d9e22bb2a5f74ba48266fb56c63d76ae6f66e02feb628671c0283e/tiktoken-0.12.0-cp311-cp311-macosx_11_0_arm64.whl",hashes = {sha256 = "c06cf0fcc24c2cb2adb5e185c7082a82cba29c17575e828518c2f11a01f445aa"}}, + {name = "tiktoken-0.12.0-cp311-cp311-manylinux_2_28_aarch64.whl",url = "https://files.pythonhosted.org/packages/01/84/961106c37b8e49b9fdcf33fe007bb3a8fdcc380c528b20cc7fbba80578b8/tiktoken-0.12.0-cp311-cp311-manylinux_2_28_aarch64.whl",hashes = {sha256 = "f18f249b041851954217e9fd8e5c00b024ab2315ffda5ed77665a05fa91f42dc"}}, + {name = "tiktoken-0.12.0-cp311-cp311-manylinux_2_28_x86_64.whl",url = "https://files.pythonhosted.org/packages/6a/d0/3d9275198e067f8b65076a68894bb52fd253875f3644f0a321a720277b8a/tiktoken-0.12.0-cp311-cp311-manylinux_2_28_x86_64.whl",hashes = {sha256 = "47a5bc270b8c3db00bb46ece01ef34ad050e364b51d406b6f9730b64ac28eded"}}, + {name = "tiktoken-0.12.0-cp311-cp311-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/78/db/a58e09687c1698a7c592e1038e01c206569b86a0377828d51635561f8ebf/tiktoken-0.12.0-cp311-cp311-musllinux_1_2_aarch64.whl",hashes = {sha256 = "508fa71810c0efdcd1b898fda574889ee62852989f7c1667414736bcb2b9a4bd"}}, + {name = "tiktoken-0.12.0-cp311-cp311-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/9e/1b/a9e4d2bf91d515c0f74afc526fd773a812232dd6cda33ebea7f531202325/tiktoken-0.12.0-cp311-cp311-musllinux_1_2_x86_64.whl",hashes = {sha256 = "a1af81a6c44f008cba48494089dd98cccb8b313f55e961a52f5b222d1e507967"}}, + {name = "tiktoken-0.12.0-cp311-cp311-win_amd64.whl",url = "https://files.pythonhosted.org/packages/9d/15/963819345f1b1fb0809070a79e9dd96938d4ca41297367d471733e79c76c/tiktoken-0.12.0-cp311-cp311-win_amd64.whl",hashes = {sha256 = "3e68e3e593637b53e56f7237be560f7a394451cb8c11079755e80ae64b9e6def"}}, + {name = "tiktoken-0.12.0-cp310-cp310-macosx_10_12_x86_64.whl",url = "https://files.pythonhosted.org/packages/89/b3/2cb7c17b6c4cf8ca983204255d3f1d95eda7213e247e6947a0ee2c747a2c/tiktoken-0.12.0-cp310-cp310-macosx_10_12_x86_64.whl",hashes = {sha256 = "3de02f5a491cfd179aec916eddb70331814bd6bf764075d39e21d5862e533970"}}, + {name = "tiktoken-0.12.0-cp310-cp310-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/27/0f/df139f1df5f6167194ee5ab24634582ba9a1b62c6b996472b0277ec80f66/tiktoken-0.12.0-cp310-cp310-macosx_11_0_arm64.whl",hashes = {sha256 = "b6cfb6d9b7b54d20af21a912bfe63a2727d9cfa8fbda642fd8322c70340aad16"}}, + {name = "tiktoken-0.12.0-cp310-cp310-manylinux_2_28_aarch64.whl",url = "https://files.pythonhosted.org/packages/ef/5d/26a691f28ab220d5edc09b9b787399b130f24327ef824de15e5d85ef21aa/tiktoken-0.12.0-cp310-cp310-manylinux_2_28_aarch64.whl",hashes = {sha256 = "cde24cdb1b8a08368f709124f15b36ab5524aac5fa830cc3fdce9c03d4fb8030"}}, + {name = "tiktoken-0.12.0-cp310-cp310-manylinux_2_28_x86_64.whl",url = "https://files.pythonhosted.org/packages/b2/94/443fab3d4e5ebecac895712abd3849b8da93b7b7dec61c7db5c9c7ebe40c/tiktoken-0.12.0-cp310-cp310-manylinux_2_28_x86_64.whl",hashes = {sha256 = "6de0da39f605992649b9cfa6f84071e3f9ef2cec458d08c5feb1b6f0ff62e134"}}, + {name = "tiktoken-0.12.0-cp310-cp310-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/54/35/388f941251b2521c70dd4c5958e598ea6d2c88e28445d2fb8189eecc1dfc/tiktoken-0.12.0-cp310-cp310-musllinux_1_2_aarch64.whl",hashes = {sha256 = "6faa0534e0eefbcafaccb75927a4a380463a2eaa7e26000f0173b920e98b720a"}}, + {name = "tiktoken-0.12.0-cp310-cp310-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/f8/00/c6681c7f833dd410576183715a530437a9873fa910265817081f65f9105f/tiktoken-0.12.0-cp310-cp310-musllinux_1_2_x86_64.whl",hashes = {sha256 = "82991e04fc860afb933efb63957affc7ad54f83e2216fe7d319007dab1ba5892"}}, + {name = "tiktoken-0.12.0-cp310-cp310-win_amd64.whl",url = "https://files.pythonhosted.org/packages/5f/d2/82e795a6a9bafa034bf26a58e68fe9a89eeaaa610d51dbeb22106ba04f0a/tiktoken-0.12.0-cp310-cp310-win_amd64.whl",hashes = {sha256 = "6fb2995b487c2e31acf0a9e17647e3b242235a20832642bb7a9d1a181c0c1bb1"}}, ] marker = "\"recommended\" in extras" @@ -627,11 +762,11 @@ dependencies = [ [[packages]] name = "types-pyyaml" -version = "6.0.12.20250516" +version = "6.0.12.20250915" requires-python = ">=3.9" -sdist = {name = "types_pyyaml-6.0.12.20250516.tar.gz", url = "https://files.pythonhosted.org/packages/4e/22/59e2aeb48ceeee1f7cd4537db9568df80d62bdb44a7f9e743502ea8aab9c/types_pyyaml-6.0.12.20250516.tar.gz", hashes = {sha256 = "9f21a70216fc0fa1b216a8176db5f9e0af6eb35d2f2932acb87689d03a5bf6ba"}} +sdist = {name = "types_pyyaml-6.0.12.20250915.tar.gz", url = "https://files.pythonhosted.org/packages/7e/69/3c51b36d04da19b92f9e815be12753125bd8bc247ba0470a982e6979e71c/types_pyyaml-6.0.12.20250915.tar.gz", hashes = {sha256 = "0f8b54a528c303f0e6f7165687dd33fafa81c807fcac23f632b63aa624ced1d3"}} wheels = [ - {name = "types_pyyaml-6.0.12.20250516-py3-none-any.whl",url = "https://files.pythonhosted.org/packages/99/5f/e0af6f7f6a260d9af67e1db4f54d732abad514252a7a378a6c4d17dd1036/types_pyyaml-6.0.12.20250516-py3-none-any.whl",hashes = {sha256 = "8478208feaeb53a34cb5d970c56a7cd76b72659442e733e268a94dc72b2d0530"}}, + {name = "types_pyyaml-6.0.12.20250915-py3-none-any.whl",url = "https://files.pythonhosted.org/packages/bd/e0/1eed384f02555dde685fff1a1ac805c1c7dcb6dd019c916fe659b1c1f9ec/types_pyyaml-6.0.12.20250915-py3-none-any.whl",hashes = {sha256 = "e7d4d9e064e89a3b3cae120b4990cd370874d2bf12fa5f46c97018dd5d3c9ab6"}}, ] marker = "\"dev\" in extras" @@ -640,11 +775,11 @@ dependencies = [] [[packages]] name = "types-requests" -version = "2.32.4.20250611" +version = "2.32.4.20250913" requires-python = ">=3.9" -sdist = {name = "types_requests-2.32.4.20250611.tar.gz", url = "https://files.pythonhosted.org/packages/6d/7f/73b3a04a53b0fd2a911d4ec517940ecd6600630b559e4505cc7b68beb5a0/types_requests-2.32.4.20250611.tar.gz", hashes = {sha256 = "741c8777ed6425830bf51e54d6abe245f79b4dcb9019f1622b773463946bf826"}} +sdist = {name = "types_requests-2.32.4.20250913.tar.gz", url = "https://files.pythonhosted.org/packages/36/27/489922f4505975b11de2b5ad07b4fe1dca0bca9be81a703f26c5f3acfce5/types_requests-2.32.4.20250913.tar.gz", hashes = {sha256 = "abd6d4f9ce3a9383f269775a9835a4c24e5cd6b9f647d64f88aa4613c33def5d"}} wheels = [ - {name = "types_requests-2.32.4.20250611-py3-none-any.whl",url = "https://files.pythonhosted.org/packages/3d/ea/0be9258c5a4fa1ba2300111aa5a0767ee6d18eb3fd20e91616c12082284d/types_requests-2.32.4.20250611-py3-none-any.whl",hashes = {sha256 = "ad2fe5d3b0cb3c2c902c8815a70e7fb2302c4b8c1f77bdcd738192cdb3878072"}}, + {name = "types_requests-2.32.4.20250913-py3-none-any.whl",url = "https://files.pythonhosted.org/packages/2a/20/9a227ea57c1285986c4cf78400d0a91615d25b24e257fd9e2969606bdfae/types_requests-2.32.4.20250913-py3-none-any.whl",hashes = {sha256 = "78c9c1fffebbe0fa487a418e0fa5252017e9c60d1a2da394077f1780f655d7e1"}}, ] marker = "\"dev\" in extras" @@ -671,6 +806,18 @@ wheels = [ {name = "uvloop-0.21.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",url = "https://files.pythonhosted.org/packages/06/a7/b4e6a19925c900be9f98bec0a75e6e8f79bb53bdeb891916609ab3958967/uvloop-0.21.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",hashes = {sha256 = "86975dca1c773a2c9864f4c52c5a55631038e387b47eaf56210f873887b6c8dc"}}, {name = "uvloop-0.21.0-cp312-cp312-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/ce/0c/f07435a18a4b94ce6bd0677d8319cd3de61f3a9eeb1e5f8ab4e8b5edfcb3/uvloop-0.21.0-cp312-cp312-musllinux_1_2_aarch64.whl",hashes = {sha256 = "461d9ae6660fbbafedd07559c6a2e57cd553b34b0065b6550685f6653a98c1cb"}}, {name = "uvloop-0.21.0-cp312-cp312-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/8f/eb/f7032be105877bcf924709c97b1bf3b90255b4ec251f9340cef912559f28/uvloop-0.21.0-cp312-cp312-musllinux_1_2_x86_64.whl",hashes = {sha256 = "183aef7c8730e54c9a3ee3227464daed66e37ba13040bb3f350bc2ddc040f22f"}}, + {name = "uvloop-0.21.0-cp311-cp311-macosx_10_9_universal2.whl",url = "https://files.pythonhosted.org/packages/57/a7/4cf0334105c1160dd6819f3297f8700fda7fc30ab4f61fbf3e725acbc7cc/uvloop-0.21.0-cp311-cp311-macosx_10_9_universal2.whl",hashes = {sha256 = "c0f3fa6200b3108919f8bdabb9a7f87f20e7097ea3c543754cabc7d717d95cf8"}}, + {name = "uvloop-0.21.0-cp311-cp311-macosx_10_9_x86_64.whl",url = "https://files.pythonhosted.org/packages/8c/7c/1517b0bbc2dbe784b563d6ab54f2ef88c890fdad77232c98ed490aa07132/uvloop-0.21.0-cp311-cp311-macosx_10_9_x86_64.whl",hashes = {sha256 = "0878c2640cf341b269b7e128b1a5fed890adc4455513ca710d77d5e93aa6d6a0"}}, + {name = "uvloop-0.21.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",url = "https://files.pythonhosted.org/packages/ee/ea/0bfae1aceb82a503f358d8d2fa126ca9dbdb2ba9c7866974faec1cb5875c/uvloop-0.21.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",hashes = {sha256 = "b9fb766bb57b7388745d8bcc53a359b116b8a04c83a2288069809d2b3466c37e"}}, + {name = "uvloop-0.21.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",url = "https://files.pythonhosted.org/packages/8a/ca/0864176a649838b838f36d44bf31c451597ab363b60dc9e09c9630619d41/uvloop-0.21.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",hashes = {sha256 = "8a375441696e2eda1c43c44ccb66e04d61ceeffcd76e4929e527b7fa401b90fb"}}, + {name = "uvloop-0.21.0-cp311-cp311-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/30/bf/08ad29979a936d63787ba47a540de2132169f140d54aa25bc8c3df3e67f4/uvloop-0.21.0-cp311-cp311-musllinux_1_2_aarch64.whl",hashes = {sha256 = "baa0e6291d91649c6ba4ed4b2f982f9fa165b5bbd50a9e203c416a2797bab3c6"}}, + {name = "uvloop-0.21.0-cp311-cp311-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/da/e2/5cf6ef37e3daf2f06e651aae5ea108ad30df3cb269102678b61ebf1fdf42/uvloop-0.21.0-cp311-cp311-musllinux_1_2_x86_64.whl",hashes = {sha256 = "4509360fcc4c3bd2c70d87573ad472de40c13387f5fda8cb58350a1d7475e58d"}}, + {name = "uvloop-0.21.0-cp310-cp310-macosx_10_9_universal2.whl",url = "https://files.pythonhosted.org/packages/3d/76/44a55515e8c9505aa1420aebacf4dd82552e5e15691654894e90d0bd051a/uvloop-0.21.0-cp310-cp310-macosx_10_9_universal2.whl",hashes = {sha256 = "ec7e6b09a6fdded42403182ab6b832b71f4edaf7f37a9a0e371a01db5f0cb45f"}}, + {name = "uvloop-0.21.0-cp310-cp310-macosx_10_9_x86_64.whl",url = "https://files.pythonhosted.org/packages/35/5a/62d5800358a78cc25c8a6c72ef8b10851bdb8cca22e14d9c74167b7f86da/uvloop-0.21.0-cp310-cp310-macosx_10_9_x86_64.whl",hashes = {sha256 = "196274f2adb9689a289ad7d65700d37df0c0930fd8e4e743fa4834e850d7719d"}}, + {name = "uvloop-0.21.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",url = "https://files.pythonhosted.org/packages/f3/96/63695e0ebd7da6c741ccd4489b5947394435e198a1382349c17b1146bb97/uvloop-0.21.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",hashes = {sha256 = "f38b2e090258d051d68a5b14d1da7203a3c3677321cf32a95a6f4db4dd8b6f26"}}, + {name = "uvloop-0.21.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",url = "https://files.pythonhosted.org/packages/61/e0/f0f8ec84979068ffae132c58c79af1de9cceeb664076beea86d941af1a30/uvloop-0.21.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",hashes = {sha256 = "87c43e0f13022b998eb9b973b5e97200c8b90823454d4bc06ab33829e09fb9bb"}}, + {name = "uvloop-0.21.0-cp310-cp310-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/bf/fe/5e94a977d058a54a19df95f12f7161ab6e323ad49f4dabc28822eb2df7ea/uvloop-0.21.0-cp310-cp310-musllinux_1_2_aarch64.whl",hashes = {sha256 = "10d66943def5fcb6e7b37310eb6b5639fd2ccbc38df1177262b0640c3ca68c1f"}}, + {name = "uvloop-0.21.0-cp310-cp310-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/26/dd/c7179618e46092a77e036650c1f056041a028a35c4d76945089fcfc38af8/uvloop-0.21.0-cp310-cp310-musllinux_1_2_x86_64.whl",hashes = {sha256 = "67dd654b8ca23aed0a8e99010b4c34aca62f4b7fce88f39d452ed7622c94845c"}}, ] marker = "\"default\" in dependency_groups" @@ -679,11 +826,11 @@ dependencies = [] [[packages]] name = "datasets" -version = "4.0.0" +version = "4.1.1" requires-python = ">=3.9.0" -sdist = {name = "datasets-4.0.0.tar.gz", url = "https://files.pythonhosted.org/packages/e3/9d/348ed92110ba5f9b70b51ca1078d4809767a835aa2b7ce7e74ad2b98323d/datasets-4.0.0.tar.gz", hashes = {sha256 = "9657e7140a9050db13443ba21cb5de185af8af944479b00e7ff1e00a61c8dbf1"}} +sdist = {name = "datasets-4.1.1.tar.gz", url = "https://files.pythonhosted.org/packages/91/a4/73f8e6ef52c535e1d20d5b2ca83bfe6de399d8b8b8a61ccc8d63d60735aa/datasets-4.1.1.tar.gz", hashes = {sha256 = "7d8d5ba8b12861d2c44bfff9c83484ebfafff1ff553371e5901a8d3aab5450e2"}} wheels = [ - {name = "datasets-4.0.0-py3-none-any.whl",url = "https://files.pythonhosted.org/packages/eb/62/eb8157afb21bd229c864521c1ab4fa8e9b4f1b06bafdd8c4668a7a31b5dd/datasets-4.0.0-py3-none-any.whl",hashes = {sha256 = "7ef95e62025fd122882dbce6cb904c8cd3fbc829de6669a5eb939c77d50e203d"}}, + {name = "datasets-4.1.1-py3-none-any.whl",url = "https://files.pythonhosted.org/packages/f4/c8/09012ac195a0aab58755800d2efdc0e7d5905053509f12cb5d136c911cda/datasets-4.1.1-py3-none-any.whl",hashes = {sha256 = "62e4f6899a36be9ec74a7e759a6951253cc85b3fcfa0a759b0efa8353b149dac"}}, ] marker = "\"default\" in dependency_groups" @@ -691,14 +838,14 @@ marker = "\"default\" in dependency_groups" dependencies = [ "filelock", "numpy>=1.17", - "pyarrow>=15.0.0", - "dill<0.3.9,>=0.3.0", + "pyarrow>=21.0.0", + "dill<0.4.1,>=0.3.0", "pandas", "requests>=2.32.2", "tqdm>=4.66.3", "xxhash", "multiprocess<0.70.17", - "fsspec[http]<=2025.3.0,>=2023.1.0", + "fsspec[http]<=2025.9.0,>=2023.1.0", "huggingface-hub>=0.24.0", "packaging", "pyyaml>=5.1", @@ -791,6 +938,23 @@ wheels = [ {name = "msgpack-1.1.2-cp312-cp312-win32.whl",url = "https://files.pythonhosted.org/packages/41/0d/2ddfaa8b7e1cee6c490d46cb0a39742b19e2481600a7a0e96537e9c22f43/msgpack-1.1.2-cp312-cp312-win32.whl",hashes = {sha256 = "1fff3d825d7859ac888b0fbda39a42d59193543920eda9d9bea44d958a878029"}}, {name = "msgpack-1.1.2-cp312-cp312-win_amd64.whl",url = "https://files.pythonhosted.org/packages/8c/ec/d431eb7941fb55a31dd6ca3404d41fbb52d99172df2e7707754488390910/msgpack-1.1.2-cp312-cp312-win_amd64.whl",hashes = {sha256 = "1de460f0403172cff81169a30b9a92b260cb809c4cb7e2fc79ae8d0510c78b6b"}}, {name = "msgpack-1.1.2-cp312-cp312-win_arm64.whl",url = "https://files.pythonhosted.org/packages/c5/31/5b1a1f70eb0e87d1678e9624908f86317787b536060641d6798e3cf70ace/msgpack-1.1.2-cp312-cp312-win_arm64.whl",hashes = {sha256 = "be5980f3ee0e6bd44f3a9e9dea01054f175b50c3e6cdb692bc9424c0bbb8bf69"}}, + {name = "msgpack-1.1.2-cp311-cp311-macosx_10_9_x86_64.whl",url = "https://files.pythonhosted.org/packages/2c/97/560d11202bcd537abca693fd85d81cebe2107ba17301de42b01ac1677b69/msgpack-1.1.2-cp311-cp311-macosx_10_9_x86_64.whl",hashes = {sha256 = "2e86a607e558d22985d856948c12a3fa7b42efad264dca8a3ebbcfa2735d786c"}}, + {name = "msgpack-1.1.2-cp311-cp311-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/83/04/28a41024ccbd67467380b6fb440ae916c1e4f25e2cd4c63abe6835ac566e/msgpack-1.1.2-cp311-cp311-macosx_11_0_arm64.whl",hashes = {sha256 = "283ae72fc89da59aa004ba147e8fc2f766647b1251500182fac0350d8af299c0"}}, + {name = "msgpack-1.1.2-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl",url = "https://files.pythonhosted.org/packages/71/46/b817349db6886d79e57a966346cf0902a426375aadc1e8e7a86a75e22f19/msgpack-1.1.2-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl",hashes = {sha256 = "61c8aa3bd513d87c72ed0b37b53dd5c5a0f58f2ff9f26e1555d3bd7948fb7296"}}, + {name = "msgpack-1.1.2-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl",url = "https://files.pythonhosted.org/packages/da/e0/6cc2e852837cd6086fe7d8406af4294e66827a60a4cf60b86575a4a65ca8/msgpack-1.1.2-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl",hashes = {sha256 = "454e29e186285d2ebe65be34629fa0e8605202c60fbc7c4c650ccd41870896ef"}}, + {name = "msgpack-1.1.2-cp311-cp311-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/25/98/6a19f030b3d2ea906696cedd1eb251708e50a5891d0978b012cb6107234c/msgpack-1.1.2-cp311-cp311-musllinux_1_2_aarch64.whl",hashes = {sha256 = "7bc8813f88417599564fafa59fd6f95be417179f76b40325b500b3c98409757c"}}, + {name = "msgpack-1.1.2-cp311-cp311-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/b7/cd/9098fcb6adb32187a70b7ecaabf6339da50553351558f37600e53a4a2a23/msgpack-1.1.2-cp311-cp311-musllinux_1_2_x86_64.whl",hashes = {sha256 = "bafca952dc13907bdfdedfc6a5f579bf4f292bdd506fadb38389afa3ac5b208e"}}, + {name = "msgpack-1.1.2-cp311-cp311-win32.whl",url = "https://files.pythonhosted.org/packages/e6/ae/270cecbcf36c1dc85ec086b33a51a4d7d08fc4f404bdbc15b582255d05ff/msgpack-1.1.2-cp311-cp311-win32.whl",hashes = {sha256 = "602b6740e95ffc55bfb078172d279de3773d7b7db1f703b2f1323566b878b90e"}}, + {name = "msgpack-1.1.2-cp311-cp311-win_amd64.whl",url = "https://files.pythonhosted.org/packages/2a/79/309d0e637f6f37e83c711f547308b91af02b72d2326ddd860b966080ef29/msgpack-1.1.2-cp311-cp311-win_amd64.whl",hashes = {sha256 = "d198d275222dc54244bf3327eb8cbe00307d220241d9cec4d306d49a44e85f68"}}, + {name = "msgpack-1.1.2-cp311-cp311-win_arm64.whl",url = "https://files.pythonhosted.org/packages/73/4d/7c4e2b3d9b1106cd0aa6cb56cc57c6267f59fa8bfab7d91df5adc802c847/msgpack-1.1.2-cp311-cp311-win_arm64.whl",hashes = {sha256 = "86f8136dfa5c116365a8a651a7d7484b65b13339731dd6faebb9a0242151c406"}}, + {name = "msgpack-1.1.2-cp310-cp310-macosx_10_9_x86_64.whl",url = "https://files.pythonhosted.org/packages/f5/a2/3b68a9e769db68668b25c6108444a35f9bd163bb848c0650d516761a59c0/msgpack-1.1.2-cp310-cp310-macosx_10_9_x86_64.whl",hashes = {sha256 = "0051fffef5a37ca2cd16978ae4f0aef92f164df86823871b5162812bebecd8e2"}}, + {name = "msgpack-1.1.2-cp310-cp310-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/5b/e1/2b720cc341325c00be44e1ed59e7cfeae2678329fbf5aa68f5bda57fe728/msgpack-1.1.2-cp310-cp310-macosx_11_0_arm64.whl",hashes = {sha256 = "a605409040f2da88676e9c9e5853b3449ba8011973616189ea5ee55ddbc5bc87"}}, + {name = "msgpack-1.1.2-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl",url = "https://files.pythonhosted.org/packages/71/e5/c2241de64bfceac456b140737812a2ab310b10538a7b34a1d393b748e095/msgpack-1.1.2-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl",hashes = {sha256 = "8b696e83c9f1532b4af884045ba7f3aa741a63b2bc22617293a2c6a7c645f251"}}, + {name = "msgpack-1.1.2-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl",url = "https://files.pythonhosted.org/packages/b7/09/2a06956383c0fdebaef5aa9246e2356776f12ea6f2a44bd1368abf0e46c4/msgpack-1.1.2-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl",hashes = {sha256 = "365c0bbe981a27d8932da71af63ef86acc59ed5c01ad929e09a0b88c6294e28a"}}, + {name = "msgpack-1.1.2-cp310-cp310-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/0e/74/2957703f0e1ef20637d6aead4fbb314330c26f39aa046b348c7edcf6ca6b/msgpack-1.1.2-cp310-cp310-musllinux_1_2_aarch64.whl",hashes = {sha256 = "41d1a5d875680166d3ac5c38573896453bbbea7092936d2e107214daf43b1d4f"}}, + {name = "msgpack-1.1.2-cp310-cp310-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/a5/09/3bfc12aa90f77b37322fc33e7a8a7c29ba7c8edeadfa27664451801b9860/msgpack-1.1.2-cp310-cp310-musllinux_1_2_x86_64.whl",hashes = {sha256 = "354e81bcdebaab427c3df4281187edc765d5d76bfb3a7c125af9da7a27e8458f"}}, + {name = "msgpack-1.1.2-cp310-cp310-win32.whl",url = "https://files.pythonhosted.org/packages/4b/4f/05fcebd3b4977cb3d840f7ef6b77c51f8582086de5e642f3fefee35c86fc/msgpack-1.1.2-cp310-cp310-win32.whl",hashes = {sha256 = "e64c8d2f5e5d5fda7b842f55dec6133260ea8f53c4257d64494c534f306bf7a9"}}, + {name = "msgpack-1.1.2-cp310-cp310-win_amd64.whl",url = "https://files.pythonhosted.org/packages/d0/3e/b4547e3a34210956382eed1c85935fff7e0f9b98be3106b3745d7dec9c5e/msgpack-1.1.2-cp310-cp310-win_amd64.whl",hashes = {sha256 = "db6192777d943bdaaafb6ba66d44bf65aa0e9c5616fa1d2da9bb08828c6b39aa"}}, ] marker = "\"default\" in dependency_groups" @@ -861,6 +1025,42 @@ wheels = [ {name = "pillow-11.3.0-cp312-cp312-win32.whl",url = "https://files.pythonhosted.org/packages/26/7d/73699ad77895f69edff76b0f332acc3d497f22f5d75e5360f78cbcaff248/pillow-11.3.0-cp312-cp312-win32.whl",hashes = {sha256 = "7b161756381f0918e05e7cb8a371fff367e807770f8fe92ecb20d905d0e1c149"}}, {name = "pillow-11.3.0-cp312-cp312-win_amd64.whl",url = "https://files.pythonhosted.org/packages/8c/ce/e7dfc873bdd9828f3b6e5c2bbb74e47a98ec23cc5c74fc4e54462f0d9204/pillow-11.3.0-cp312-cp312-win_amd64.whl",hashes = {sha256 = "a6444696fce635783440b7f7a9fc24b3ad10a9ea3f0ab66c5905be1c19ccf17d"}}, {name = "pillow-11.3.0-cp312-cp312-win_arm64.whl",url = "https://files.pythonhosted.org/packages/16/8f/b13447d1bf0b1f7467ce7d86f6e6edf66c0ad7cf44cf5c87a37f9bed9936/pillow-11.3.0-cp312-cp312-win_arm64.whl",hashes = {sha256 = "2aceea54f957dd4448264f9bf40875da0415c83eb85f55069d89c0ed436e3542"}}, + {name = "pillow-11.3.0-cp311-cp311-macosx_10_10_x86_64.whl",url = "https://files.pythonhosted.org/packages/db/26/77f8ed17ca4ffd60e1dcd220a6ec6d71210ba398cfa33a13a1cd614c5613/pillow-11.3.0-cp311-cp311-macosx_10_10_x86_64.whl",hashes = {sha256 = "1cd110edf822773368b396281a2293aeb91c90a2db00d78ea43e7e861631b722"}}, + {name = "pillow-11.3.0-cp311-cp311-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/cb/39/ee475903197ce709322a17a866892efb560f57900d9af2e55f86db51b0a5/pillow-11.3.0-cp311-cp311-macosx_11_0_arm64.whl",hashes = {sha256 = "9c412fddd1b77a75aa904615ebaa6001f169b26fd467b4be93aded278266b288"}}, + {name = "pillow-11.3.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl",url = "https://files.pythonhosted.org/packages/d5/90/442068a160fd179938ba55ec8c97050a612426fae5ec0a764e345839f76d/pillow-11.3.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl",hashes = {sha256 = "7d1aa4de119a0ecac0a34a9c8bde33f34022e2e8f99104e47a3ca392fd60e37d"}}, + {name = "pillow-11.3.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl",url = "https://files.pythonhosted.org/packages/13/92/dcdd147ab02daf405387f0218dcf792dc6dd5b14d2573d40b4caeef01059/pillow-11.3.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl",hashes = {sha256 = "91da1d88226663594e3f6b4b8c3c8d85bd504117d043740a8e0ec449087cc494"}}, + {name = "pillow-11.3.0-cp311-cp311-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl",url = "https://files.pythonhosted.org/packages/6e/db/839d6ba7fd38b51af641aa904e2960e7a5644d60ec754c046b7d2aee00e5/pillow-11.3.0-cp311-cp311-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl",hashes = {sha256 = "643f189248837533073c405ec2f0bb250ba54598cf80e8c1e043381a60632f58"}}, + {name = "pillow-11.3.0-cp311-cp311-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl",url = "https://files.pythonhosted.org/packages/f2/2f/d7675ecae6c43e9f12aa8d58b6012683b20b6edfbdac7abcb4e6af7a3784/pillow-11.3.0-cp311-cp311-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl",hashes = {sha256 = "106064daa23a745510dabce1d84f29137a37224831d88eb4ce94bb187b1d7e5f"}}, + {name = "pillow-11.3.0-cp311-cp311-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/45/ad/931694675ede172e15b2ff03c8144a0ddaea1d87adb72bb07655eaffb654/pillow-11.3.0-cp311-cp311-musllinux_1_2_aarch64.whl",hashes = {sha256 = "cd8ff254faf15591e724dc7c4ddb6bf4793efcbe13802a4ae3e863cd300b493e"}}, + {name = "pillow-11.3.0-cp311-cp311-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/3a/04/ba8f2b11fc80d2dd462d7abec16351b45ec99cbbaea4387648a44190351a/pillow-11.3.0-cp311-cp311-musllinux_1_2_x86_64.whl",hashes = {sha256 = "932c754c2d51ad2b2271fd01c3d121daaa35e27efae2a616f77bf164bc0b3e94"}}, + {name = "pillow-11.3.0-cp311-cp311-win32.whl",url = "https://files.pythonhosted.org/packages/48/59/8cd06d7f3944cc7d892e8533c56b0acb68399f640786313275faec1e3b6f/pillow-11.3.0-cp311-cp311-win32.whl",hashes = {sha256 = "b4b8f3efc8d530a1544e5962bd6b403d5f7fe8b9e08227c6b255f98ad82b4ba0"}}, + {name = "pillow-11.3.0-cp311-cp311-win_amd64.whl",url = "https://files.pythonhosted.org/packages/f1/cc/29c0f5d64ab8eae20f3232da8f8571660aa0ab4b8f1331da5c2f5f9a938e/pillow-11.3.0-cp311-cp311-win_amd64.whl",hashes = {sha256 = "1a992e86b0dd7aeb1f053cd506508c0999d710a8f07b4c791c63843fc6a807ac"}}, + {name = "pillow-11.3.0-cp311-cp311-win_arm64.whl",url = "https://files.pythonhosted.org/packages/c6/df/90bd886fabd544c25addd63e5ca6932c86f2b701d5da6c7839387a076b4a/pillow-11.3.0-cp311-cp311-win_arm64.whl",hashes = {sha256 = "30807c931ff7c095620fe04448e2c2fc673fcbb1ffe2a7da3fb39613489b1ddd"}}, + {name = "pillow-11.3.0-pp311-pypy311_pp73-macosx_10_15_x86_64.whl",url = "https://files.pythonhosted.org/packages/9e/e3/6fa84033758276fb31da12e5fb66ad747ae83b93c67af17f8c6ff4cc8f34/pillow-11.3.0-pp311-pypy311_pp73-macosx_10_15_x86_64.whl",hashes = {sha256 = "7c8ec7a017ad1bd562f93dbd8505763e688d388cde6e4a010ae1486916e713e6"}}, + {name = "pillow-11.3.0-pp311-pypy311_pp73-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/5b/ee/e8d2e1ab4892970b561e1ba96cbd59c0d28cf66737fc44abb2aec3795a4e/pillow-11.3.0-pp311-pypy311_pp73-macosx_11_0_arm64.whl",hashes = {sha256 = "9ab6ae226de48019caa8074894544af5b53a117ccb9d3b3dcb2871464c829438"}}, + {name = "pillow-11.3.0-pp311-pypy311_pp73-manylinux2014_aarch64.manylinux_2_17_aarch64.whl",url = "https://files.pythonhosted.org/packages/f2/6d/17f80f4e1f0761f02160fc433abd4109fa1548dcfdca46cfdadaf9efa565/pillow-11.3.0-pp311-pypy311_pp73-manylinux2014_aarch64.manylinux_2_17_aarch64.whl",hashes = {sha256 = "fe27fb049cdcca11f11a7bfda64043c37b30e6b91f10cb5bab275806c32f6ab3"}}, + {name = "pillow-11.3.0-pp311-pypy311_pp73-manylinux2014_x86_64.manylinux_2_17_x86_64.whl",url = "https://files.pythonhosted.org/packages/de/5f/c22340acd61cef960130585bbe2120e2fd8434c214802f07e8c03596b17e/pillow-11.3.0-pp311-pypy311_pp73-manylinux2014_x86_64.manylinux_2_17_x86_64.whl",hashes = {sha256 = "465b9e8844e3c3519a983d58b80be3f668e2a7a5db97f2784e7079fbc9f9822c"}}, + {name = "pillow-11.3.0-pp311-pypy311_pp73-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl",url = "https://files.pythonhosted.org/packages/31/5e/03966aedfbfcbb4d5f8aa042452d3361f325b963ebbadddac05b122e47dd/pillow-11.3.0-pp311-pypy311_pp73-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl",hashes = {sha256 = "5418b53c0d59b3824d05e029669efa023bbef0f3e92e75ec8428f3799487f361"}}, + {name = "pillow-11.3.0-pp311-pypy311_pp73-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl",url = "https://files.pythonhosted.org/packages/cc/2d/e082982aacc927fc2cab48e1e731bdb1643a1406acace8bed0900a61464e/pillow-11.3.0-pp311-pypy311_pp73-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl",hashes = {sha256 = "504b6f59505f08ae014f724b6207ff6222662aab5cc9542577fb084ed0676ac7"}}, + {name = "pillow-11.3.0-pp311-pypy311_pp73-win_amd64.whl",url = "https://files.pythonhosted.org/packages/34/e7/ae39f538fd6844e982063c3a5e4598b8ced43b9633baa3a85ef33af8c05c/pillow-11.3.0-pp311-pypy311_pp73-win_amd64.whl",hashes = {sha256 = "c84d689db21a1c397d001aa08241044aa2069e7587b398c8cc63020390b1c1b8"}}, + {name = "pillow-11.3.0-cp310-cp310-macosx_10_10_x86_64.whl",url = "https://files.pythonhosted.org/packages/4c/5d/45a3553a253ac8763f3561371432a90bdbe6000fbdcf1397ffe502aa206c/pillow-11.3.0-cp310-cp310-macosx_10_10_x86_64.whl",hashes = {sha256 = "1b9c17fd4ace828b3003dfd1e30bff24863e0eb59b535e8f80194d9cc7ecf860"}}, + {name = "pillow-11.3.0-cp310-cp310-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/7c/c8/67c12ab069ef586a25a4a79ced553586748fad100c77c0ce59bb4983ac98/pillow-11.3.0-cp310-cp310-macosx_11_0_arm64.whl",hashes = {sha256 = "65dc69160114cdd0ca0f35cb434633c75e8e7fad4cf855177a05bf38678f73ad"}}, + {name = "pillow-11.3.0-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.whl",url = "https://files.pythonhosted.org/packages/2f/bd/6741ebd56263390b382ae4c5de02979af7f8bd9807346d068700dd6d5cf9/pillow-11.3.0-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.whl",hashes = {sha256 = "7107195ddc914f656c7fc8e4a5e1c25f32e9236ea3ea860f257b0436011fddd0"}}, + {name = "pillow-11.3.0-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.whl",url = "https://files.pythonhosted.org/packages/ca/0b/c412a9e27e1e6a829e6ab6c2dca52dd563efbedf4c9c6aa453d9a9b77359/pillow-11.3.0-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.whl",hashes = {sha256 = "cc3e831b563b3114baac7ec2ee86819eb03caa1a2cef0b481a5675b59c4fe23b"}}, + {name = "pillow-11.3.0-cp310-cp310-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl",url = "https://files.pythonhosted.org/packages/59/9d/9b7076aaf30f5dd17e5e5589b2d2f5a5d7e30ff67a171eb686e4eecc2adf/pillow-11.3.0-cp310-cp310-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl",hashes = {sha256 = "f1f182ebd2303acf8c380a54f615ec883322593320a9b00438eb842c1f37ae50"}}, + {name = "pillow-11.3.0-cp310-cp310-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl",url = "https://files.pythonhosted.org/packages/f0/16/1a6bf01fb622fb9cf5c91683823f073f053005c849b1f52ed613afcf8dae/pillow-11.3.0-cp310-cp310-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl",hashes = {sha256 = "4445fa62e15936a028672fd48c4c11a66d641d2c05726c7ec1f8ba6a572036ae"}}, + {name = "pillow-11.3.0-cp310-cp310-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/7b/e6/6ff7077077eb47fde78739e7d570bdcd7c10495666b6afcd23ab56b19a43/pillow-11.3.0-cp310-cp310-musllinux_1_2_aarch64.whl",hashes = {sha256 = "71f511f6b3b91dd543282477be45a033e4845a40278fa8dcdbfdb07109bf18f9"}}, + {name = "pillow-11.3.0-cp310-cp310-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/c3/3a/b13f36832ea6d279a697231658199e0a03cd87ef12048016bdcc84131601/pillow-11.3.0-cp310-cp310-musllinux_1_2_x86_64.whl",hashes = {sha256 = "040a5b691b0713e1f6cbe222e0f4f74cd233421e105850ae3b3c0ceda520f42e"}}, + {name = "pillow-11.3.0-cp310-cp310-win32.whl",url = "https://files.pythonhosted.org/packages/6c/e4/61b2e1a7528740efbc70b3d581f33937e38e98ef3d50b05007267a55bcb2/pillow-11.3.0-cp310-cp310-win32.whl",hashes = {sha256 = "89bd777bc6624fe4115e9fac3352c79ed60f3bb18651420635f26e643e3dd1f6"}}, + {name = "pillow-11.3.0-cp310-cp310-win_amd64.whl",url = "https://files.pythonhosted.org/packages/a9/d3/60c781c83a785d6afbd6a326ed4d759d141de43aa7365725cbcd65ce5e54/pillow-11.3.0-cp310-cp310-win_amd64.whl",hashes = {sha256 = "19d2ff547c75b8e3ff46f4d9ef969a06c30ab2d4263a9e287733aa8b2429ce8f"}}, + {name = "pillow-11.3.0-cp310-cp310-win_arm64.whl",url = "https://files.pythonhosted.org/packages/9f/28/4f4a0203165eefb3763939c6789ba31013a2e90adffb456610f30f613850/pillow-11.3.0-cp310-cp310-win_arm64.whl",hashes = {sha256 = "819931d25e57b513242859ce1876c58c59dc31587847bf74cfe06b2e0cb22d2f"}}, + {name = "pillow-11.3.0-pp310-pypy310_pp73-macosx_10_15_x86_64.whl",url = "https://files.pythonhosted.org/packages/6f/8b/209bd6b62ce8367f47e68a218bffac88888fdf2c9fcf1ecadc6c3ec1ebc7/pillow-11.3.0-pp310-pypy310_pp73-macosx_10_15_x86_64.whl",hashes = {sha256 = "3cee80663f29e3843b68199b9d6f4f54bd1d4a6b59bdd91bceefc51238bcb967"}}, + {name = "pillow-11.3.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/2e/e6/231a0b76070c2cfd9e260a7a5b504fb72da0a95279410fa7afd99d9751d6/pillow-11.3.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl",hashes = {sha256 = "b5f56c3f344f2ccaf0dd875d3e180f631dc60a51b314295a3e681fe8cf851fbe"}}, + {name = "pillow-11.3.0-pp310-pypy310_pp73-manylinux2014_aarch64.manylinux_2_17_aarch64.whl",url = "https://files.pythonhosted.org/packages/13/f4/10cf94fda33cb12765f2397fc285fa6d8eb9c29de7f3185165b702fc7386/pillow-11.3.0-pp310-pypy310_pp73-manylinux2014_aarch64.manylinux_2_17_aarch64.whl",hashes = {sha256 = "e67d793d180c9df62f1f40aee3accca4829d3794c95098887edc18af4b8b780c"}}, + {name = "pillow-11.3.0-pp310-pypy310_pp73-manylinux2014_x86_64.manylinux_2_17_x86_64.whl",url = "https://files.pythonhosted.org/packages/72/c9/583821097dc691880c92892e8e2d41fe0a5a3d6021f4963371d2f6d57250/pillow-11.3.0-pp310-pypy310_pp73-manylinux2014_x86_64.manylinux_2_17_x86_64.whl",hashes = {sha256 = "d000f46e2917c705e9fb93a3606ee4a819d1e3aa7a9b442f6444f07e77cf5e25"}}, + {name = "pillow-11.3.0-pp310-pypy310_pp73-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl",url = "https://files.pythonhosted.org/packages/3b/8e/5c9d410f9217b12320efc7c413e72693f48468979a013ad17fd690397b9a/pillow-11.3.0-pp310-pypy310_pp73-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl",hashes = {sha256 = "527b37216b6ac3a12d7838dc3bd75208ec57c1c6d11ef01902266a5a0c14fc27"}}, + {name = "pillow-11.3.0-pp310-pypy310_pp73-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl",url = "https://files.pythonhosted.org/packages/62/bb/78347dbe13219991877ffb3a91bf09da8317fbfcd4b5f9140aeae020ad71/pillow-11.3.0-pp310-pypy310_pp73-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl",hashes = {sha256 = "be5463ac478b623b9dd3937afd7fb7ab3d79dd290a28e2b6df292dc75063eb8a"}}, + {name = "pillow-11.3.0-pp310-pypy310_pp73-win_amd64.whl",url = "https://files.pythonhosted.org/packages/d9/28/1000353d5e61498aaeaaf7f1e4b49ddb05f2c6575f9d4f9f914a3538b6e1/pillow-11.3.0-pp310-pypy310_pp73-win_amd64.whl",hashes = {sha256 = "8dc70ca24c110503e16918a658b869019126ecfe03109b754c402daff12b3d9f"}}, ] marker = "\"default\" in dependency_groups" @@ -869,16 +1069,16 @@ dependencies = [] [[packages]] name = "protobuf" -version = "6.31.1" +version = "6.32.1" requires-python = ">=3.9" -sdist = {name = "protobuf-6.31.1.tar.gz", url = "https://files.pythonhosted.org/packages/52/f3/b9655a711b32c19720253f6f06326faf90580834e2e83f840472d752bc8b/protobuf-6.31.1.tar.gz", hashes = {sha256 = "d8cac4c982f0b957a4dc73a80e2ea24fab08e679c0de9deb835f4a12d69aca9a"}} +sdist = {name = "protobuf-6.32.1.tar.gz", url = "https://files.pythonhosted.org/packages/fa/a4/cc17347aa2897568beece2e674674359f911d6fe21b0b8d6268cd42727ac/protobuf-6.32.1.tar.gz", hashes = {sha256 = "ee2469e4a021474ab9baafea6cd070e5bf27c7d29433504ddea1a4ee5850f68d"}} wheels = [ - {name = "protobuf-6.31.1-cp310-abi3-win32.whl",url = "https://files.pythonhosted.org/packages/f3/6f/6ab8e4bf962fd5570d3deaa2d5c38f0a363f57b4501047b5ebeb83ab1125/protobuf-6.31.1-cp310-abi3-win32.whl",hashes = {sha256 = "7fa17d5a29c2e04b7d90e5e32388b8bfd0e7107cd8e616feef7ed3fa6bdab5c9"}}, - {name = "protobuf-6.31.1-cp310-abi3-win_amd64.whl",url = "https://files.pythonhosted.org/packages/44/3a/b15c4347dd4bf3a1b0ee882f384623e2063bb5cf9fa9d57990a4f7df2fb6/protobuf-6.31.1-cp310-abi3-win_amd64.whl",hashes = {sha256 = "426f59d2964864a1a366254fa703b8632dcec0790d8862d30034d8245e1cd447"}}, - {name = "protobuf-6.31.1-cp39-abi3-macosx_10_9_universal2.whl",url = "https://files.pythonhosted.org/packages/6a/c9/b9689a2a250264a84e66c46d8862ba788ee7a641cdca39bccf64f59284b7/protobuf-6.31.1-cp39-abi3-macosx_10_9_universal2.whl",hashes = {sha256 = "6f1227473dc43d44ed644425268eb7c2e488ae245d51c6866d19fe158e207402"}}, - {name = "protobuf-6.31.1-cp39-abi3-manylinux2014_aarch64.whl",url = "https://files.pythonhosted.org/packages/76/a1/7a5a94032c83375e4fe7e7f56e3976ea6ac90c5e85fac8576409e25c39c3/protobuf-6.31.1-cp39-abi3-manylinux2014_aarch64.whl",hashes = {sha256 = "a40fc12b84c154884d7d4c4ebd675d5b3b5283e155f324049ae396b95ddebc39"}}, - {name = "protobuf-6.31.1-cp39-abi3-manylinux2014_x86_64.whl",url = "https://files.pythonhosted.org/packages/fa/b1/b59d405d64d31999244643d88c45c8241c58f17cc887e73bcb90602327f8/protobuf-6.31.1-cp39-abi3-manylinux2014_x86_64.whl",hashes = {sha256 = "4ee898bf66f7a8b0bd21bce523814e6fbd8c6add948045ce958b73af7e8878c6"}}, - {name = "protobuf-6.31.1-py3-none-any.whl",url = "https://files.pythonhosted.org/packages/f7/af/ab3c51ab7507a7325e98ffe691d9495ee3d3aa5f589afad65ec920d39821/protobuf-6.31.1-py3-none-any.whl",hashes = {sha256 = "720a6c7e6b77288b85063569baae8536671b39f15cc22037ec7045658d80489e"}}, + {name = "protobuf-6.32.1-cp310-abi3-win32.whl",url = "https://files.pythonhosted.org/packages/c0/98/645183ea03ab3995d29086b8bf4f7562ebd3d10c9a4b14ee3f20d47cfe50/protobuf-6.32.1-cp310-abi3-win32.whl",hashes = {sha256 = "a8a32a84bc9f2aad712041b8b366190f71dde248926da517bde9e832e4412085"}}, + {name = "protobuf-6.32.1-cp310-abi3-win_amd64.whl",url = "https://files.pythonhosted.org/packages/8c/f3/6f58f841f6ebafe076cebeae33fc336e900619d34b1c93e4b5c97a81fdfa/protobuf-6.32.1-cp310-abi3-win_amd64.whl",hashes = {sha256 = "b00a7d8c25fa471f16bc8153d0e53d6c9e827f0953f3c09aaa4331c718cae5e1"}}, + {name = "protobuf-6.32.1-cp39-abi3-macosx_10_9_universal2.whl",url = "https://files.pythonhosted.org/packages/10/56/a8a3f4e7190837139e68c7002ec749190a163af3e330f65d90309145a210/protobuf-6.32.1-cp39-abi3-macosx_10_9_universal2.whl",hashes = {sha256 = "d8c7e6eb619ffdf105ee4ab76af5a68b60a9d0f66da3ea12d1640e6d8dab7281"}}, + {name = "protobuf-6.32.1-cp39-abi3-manylinux2014_aarch64.whl",url = "https://files.pythonhosted.org/packages/3f/be/8dd0a927c559b37d7a6c8ab79034fd167dcc1f851595f2e641ad62be8643/protobuf-6.32.1-cp39-abi3-manylinux2014_aarch64.whl",hashes = {sha256 = "2f5b80a49e1eb7b86d85fcd23fe92df154b9730a725c3b38c4e43b9d77018bf4"}}, + {name = "protobuf-6.32.1-cp39-abi3-manylinux2014_x86_64.whl",url = "https://files.pythonhosted.org/packages/5c/f6/88d77011b605ef979aace37b7703e4eefad066f7e84d935e5a696515c2dd/protobuf-6.32.1-cp39-abi3-manylinux2014_x86_64.whl",hashes = {sha256 = "b1864818300c297265c83a4982fd3169f97122c299f56a56e2445c3698d34710"}}, + {name = "protobuf-6.32.1-py3-none-any.whl",url = "https://files.pythonhosted.org/packages/97/b7/15cc7d93443d6c6a84626ae3258a91f4c6ac8c0edd5df35ea7658f71b79c/protobuf-6.32.1-py3-none-any.whl",hashes = {sha256 = "2601b779fc7d32a866c6b4404f9d42a3f67c5b9f3f15b4db3cccabe06b95c346"}}, ] marker = "\"default\" in dependency_groups" @@ -887,11 +1087,11 @@ dependencies = [] [[packages]] name = "rich" -version = "14.0.0" +version = "14.2.0" requires-python = ">=3.8.0" -sdist = {name = "rich-14.0.0.tar.gz", url = "https://files.pythonhosted.org/packages/a1/53/830aa4c3066a8ab0ae9a9955976fb770fe9c6102117c8ec4ab3ea62d89e8/rich-14.0.0.tar.gz", hashes = {sha256 = "82f1bc23a6a21ebca4ae0c45af9bdbc492ed20231dcb63f297d6d1021a9d5725"}} +sdist = {name = "rich-14.2.0.tar.gz", url = "https://files.pythonhosted.org/packages/fb/d2/8920e102050a0de7bfabeb4c4614a49248cf8d5d7a8d01885fbb24dc767a/rich-14.2.0.tar.gz", hashes = {sha256 = "73ff50c7c0c1c77c8243079283f4edb376f0f6442433aecb8ce7e6d0b92d1fe4"}} wheels = [ - {name = "rich-14.0.0-py3-none-any.whl",url = "https://files.pythonhosted.org/packages/0d/9b/63f4c7ebc259242c89b3acafdb37b41d1185c07ff0011164674e9076b491/rich-14.0.0-py3-none-any.whl",hashes = {sha256 = "1c9491e1951aac09caffd42f448ee3d04e58923ffe14993f6e83068dc395d7e0"}}, + {name = "rich-14.2.0-py3-none-any.whl",url = "https://files.pythonhosted.org/packages/25/7a/b0178788f8dc6cafce37a212c99565fa1fe7872c70c6c9c1e1a372d9d88f/rich-14.2.0-py3-none-any.whl",hashes = {sha256 = "76bc51fe2e57d2b1be1f96c524b890b816e334ab4c1e45888799bfaab0021edd"}}, ] marker = "\"default\" in dependency_groups" @@ -899,7 +1099,6 @@ marker = "\"default\" in dependency_groups" dependencies = [ "markdown-it-py>=2.2.0", "pygments<3.0.0,>=2.13.0", - "typing-extensions<5.0,>=4.0.0; python_version < \"3.11\"", ] [[packages]] @@ -929,24 +1128,24 @@ dependencies = [ [[packages]] name = "transformers" -version = "4.53.1" +version = "4.57.0" requires-python = ">=3.9.0" -sdist = {name = "transformers-4.53.1.tar.gz", url = "https://files.pythonhosted.org/packages/9f/2c/68a0024c311db41bb92d4ec17d22e90b7406a4d28aa18d87662f2bbebcd9/transformers-4.53.1.tar.gz", hashes = {sha256 = "da5a9f66ad480bc2a7f75bc32eaf735fd20ac56af4325ca4ce994021ceb37710"}} +sdist = {name = "transformers-4.57.0.tar.gz", url = "https://files.pythonhosted.org/packages/f3/5c/a22c39dac2687f3fe2a6b97e2c1ae516e91cd4d3976a7a2b7c24ff2fae48/transformers-4.57.0.tar.gz", hashes = {sha256 = "d045753f3d93f9216e693cdb168698dfd2e9d3aad1bb72579a5d60ebf1545a8b"}} wheels = [ - {name = "transformers-4.53.1-py3-none-any.whl",url = "https://files.pythonhosted.org/packages/8d/10/8cef2288810a3210659eb3a20711e8387cc35a881a7762ae387806e2d651/transformers-4.53.1-py3-none-any.whl",hashes = {sha256 = "c84f3c3e41c71fdf2c60c8a893e1cd31191b0cb463385f4c276302d2052d837b"}}, + {name = "transformers-4.57.0-py3-none-any.whl",url = "https://files.pythonhosted.org/packages/e5/2b/4d2708ac1ff5cd708b6548f4c5812d0ae40d1c28591c4c1c762b6dbdef2d/transformers-4.57.0-py3-none-any.whl",hashes = {sha256 = "9d7c6d098c026e40d897e017ed1f481ab803cbac041021dbc6ae6100e4949b55"}}, ] marker = "\"default\" in dependency_groups" [packages.tool.pdm] dependencies = [ "filelock", - "huggingface-hub<1.0,>=0.30.0", + "huggingface-hub<1.0,>=0.34.0", "numpy>=1.17", "packaging>=20.0", "pyyaml>=5.1", "regex!=2019.12.17", "requests", - "tokenizers<0.22,>=0.21", + "tokenizers<=0.23.0,>=0.22.0", "safetensors>=0.4.3", "tqdm>=4.27", ] @@ -1000,47 +1199,107 @@ dependencies = [ [[packages]] name = "pydantic-core" -version = "2.33.2" +version = "2.41.1" requires-python = ">=3.9" -sdist = {name = "pydantic_core-2.33.2.tar.gz", url = "https://files.pythonhosted.org/packages/ad/88/5f2260bdfae97aabf98f1778d43f69574390ad787afb646292a638c923d4/pydantic_core-2.33.2.tar.gz", hashes = {sha256 = "7cb8bc3605c29176e1b105350d2e6474142d7c1bd1d9327c4a9bdb46bf827acc"}} -wheels = [ - {name = "pydantic_core-2.33.2-cp313-cp313-macosx_10_12_x86_64.whl",url = "https://files.pythonhosted.org/packages/46/8c/99040727b41f56616573a28771b1bfa08a3d3fe74d3d513f01251f79f172/pydantic_core-2.33.2-cp313-cp313-macosx_10_12_x86_64.whl",hashes = {sha256 = "1082dd3e2d7109ad8b7da48e1d4710c8d06c253cbc4a27c1cff4fbcaa97a9e3f"}}, - {name = "pydantic_core-2.33.2-cp313-cp313-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/3a/cc/5999d1eb705a6cefc31f0b4a90e9f7fc400539b1a1030529700cc1b51838/pydantic_core-2.33.2-cp313-cp313-macosx_11_0_arm64.whl",hashes = {sha256 = "f517ca031dfc037a9c07e748cefd8d96235088b83b4f4ba8939105d20fa1dcd6"}}, - {name = "pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",url = "https://files.pythonhosted.org/packages/6f/5e/a0a7b8885c98889a18b6e376f344da1ef323d270b44edf8174d6bce4d622/pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",hashes = {sha256 = "0a9f2c9dd19656823cb8250b0724ee9c60a82f3cdf68a080979d13092a3b0fef"}}, - {name = "pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl",url = "https://files.pythonhosted.org/packages/3b/2a/953581f343c7d11a304581156618c3f592435523dd9d79865903272c256a/pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl",hashes = {sha256 = "2b0a451c263b01acebe51895bfb0e1cc842a5c666efe06cdf13846c7418caa9a"}}, - {name = "pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl",url = "https://files.pythonhosted.org/packages/e6/55/f1a813904771c03a3f97f676c62cca0c0a4138654107c1b61f19c644868b/pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl",hashes = {sha256 = "1ea40a64d23faa25e62a70ad163571c0b342b8bf66d5fa612ac0dec4f069d916"}}, - {name = "pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl",url = "https://files.pythonhosted.org/packages/aa/c3/053389835a996e18853ba107a63caae0b9deb4a276c6b472931ea9ae6e48/pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl",hashes = {sha256 = "0fb2d542b4d66f9470e8065c5469ec676978d625a8b7a363f07d9a501a9cb36a"}}, - {name = "pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",url = "https://files.pythonhosted.org/packages/eb/3c/f4abd740877a35abade05e437245b192f9d0ffb48bbbbd708df33d3cda37/pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",hashes = {sha256 = "9fdac5d6ffa1b5a83bca06ffe7583f5576555e6c8b3a91fbd25ea7780f825f7d"}}, - {name = "pydantic_core-2.33.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl",url = "https://files.pythonhosted.org/packages/59/a7/63ef2fed1837d1121a894d0ce88439fe3e3b3e48c7543b2a4479eb99c2bd/pydantic_core-2.33.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl",hashes = {sha256 = "04a1a413977ab517154eebb2d326da71638271477d6ad87a769102f7c2488c56"}}, - {name = "pydantic_core-2.33.2-cp313-cp313-musllinux_1_1_aarch64.whl",url = "https://files.pythonhosted.org/packages/04/8f/2551964ef045669801675f1cfc3b0d74147f4901c3ffa42be2ddb1f0efc4/pydantic_core-2.33.2-cp313-cp313-musllinux_1_1_aarch64.whl",hashes = {sha256 = "c8e7af2f4e0194c22b5b37205bfb293d166a7344a5b0d0eaccebc376546d77d5"}}, - {name = "pydantic_core-2.33.2-cp313-cp313-musllinux_1_1_armv7l.whl",url = "https://files.pythonhosted.org/packages/26/bd/d9602777e77fc6dbb0c7db9ad356e9a985825547dce5ad1d30ee04903918/pydantic_core-2.33.2-cp313-cp313-musllinux_1_1_armv7l.whl",hashes = {sha256 = "5c92edd15cd58b3c2d34873597a1e20f13094f59cf88068adb18947df5455b4e"}}, - {name = "pydantic_core-2.33.2-cp313-cp313-musllinux_1_1_x86_64.whl",url = "https://files.pythonhosted.org/packages/42/db/0e950daa7e2230423ab342ae918a794964b053bec24ba8af013fc7c94846/pydantic_core-2.33.2-cp313-cp313-musllinux_1_1_x86_64.whl",hashes = {sha256 = "65132b7b4a1c0beded5e057324b7e16e10910c106d43675d9bd87d4f38dde162"}}, - {name = "pydantic_core-2.33.2-cp313-cp313-win32.whl",url = "https://files.pythonhosted.org/packages/58/4d/4f937099c545a8a17eb52cb67fe0447fd9a373b348ccfa9a87f141eeb00f/pydantic_core-2.33.2-cp313-cp313-win32.whl",hashes = {sha256 = "52fb90784e0a242bb96ec53f42196a17278855b0f31ac7c3cc6f5c1ec4811849"}}, - {name = "pydantic_core-2.33.2-cp313-cp313-win_amd64.whl",url = "https://files.pythonhosted.org/packages/a0/75/4a0a9bac998d78d889def5e4ef2b065acba8cae8c93696906c3a91f310ca/pydantic_core-2.33.2-cp313-cp313-win_amd64.whl",hashes = {sha256 = "c083a3bdd5a93dfe480f1125926afcdbf2917ae714bdb80b36d34318b2bec5d9"}}, - {name = "pydantic_core-2.33.2-cp313-cp313-win_arm64.whl",url = "https://files.pythonhosted.org/packages/f9/86/1beda0576969592f1497b4ce8e7bc8cbdf614c352426271b1b10d5f0aa64/pydantic_core-2.33.2-cp313-cp313-win_arm64.whl",hashes = {sha256 = "e80b087132752f6b3d714f041ccf74403799d3b23a72722ea2e6ba2e892555b9"}}, - {name = "pydantic_core-2.33.2-cp313-cp313t-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/a4/7d/e09391c2eebeab681df2b74bfe6c43422fffede8dc74187b2b0bf6fd7571/pydantic_core-2.33.2-cp313-cp313t-macosx_11_0_arm64.whl",hashes = {sha256 = "61c18fba8e5e9db3ab908620af374db0ac1baa69f0f32df4f61ae23f15e586ac"}}, - {name = "pydantic_core-2.33.2-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",url = "https://files.pythonhosted.org/packages/f1/3d/847b6b1fed9f8ed3bb95a9ad04fbd0b212e832d4f0f50ff4d9ee5a9f15cf/pydantic_core-2.33.2-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",hashes = {sha256 = "95237e53bb015f67b63c91af7518a62a8660376a6a0db19b89acc77a4d6199f5"}}, - {name = "pydantic_core-2.33.2-cp313-cp313t-win_amd64.whl",url = "https://files.pythonhosted.org/packages/6f/9a/e73262f6c6656262b5fdd723ad90f518f579b7bc8622e43a942eec53c938/pydantic_core-2.33.2-cp313-cp313t-win_amd64.whl",hashes = {sha256 = "c2fc0a768ef76c15ab9238afa6da7f69895bb5d1ee83aeea2e3509af4472d0b9"}}, - {name = "pydantic_core-2.33.2-cp312-cp312-macosx_10_12_x86_64.whl",url = "https://files.pythonhosted.org/packages/18/8a/2b41c97f554ec8c71f2a8a5f85cb56a8b0956addfe8b0efb5b3d77e8bdc3/pydantic_core-2.33.2-cp312-cp312-macosx_10_12_x86_64.whl",hashes = {sha256 = "a7ec89dc587667f22b6a0b6579c249fca9026ce7c333fc142ba42411fa243cdc"}}, - {name = "pydantic_core-2.33.2-cp312-cp312-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/a1/02/6224312aacb3c8ecbaa959897af57181fb6cf3a3d7917fd44d0f2917e6f2/pydantic_core-2.33.2-cp312-cp312-macosx_11_0_arm64.whl",hashes = {sha256 = "3c6db6e52c6d70aa0d00d45cdb9b40f0433b96380071ea80b09277dba021ddf7"}}, - {name = "pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",url = "https://files.pythonhosted.org/packages/d6/46/6dcdf084a523dbe0a0be59d054734b86a981726f221f4562aed313dbcb49/pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",hashes = {sha256 = "4e61206137cbc65e6d5256e1166f88331d3b6238e082d9f74613b9b765fb9025"}}, - {name = "pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl",url = "https://files.pythonhosted.org/packages/ec/6b/1ec2c03837ac00886ba8160ce041ce4e325b41d06a034adbef11339ae422/pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl",hashes = {sha256 = "eb8c529b2819c37140eb51b914153063d27ed88e3bdc31b71198a198e921e011"}}, - {name = "pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl",url = "https://files.pythonhosted.org/packages/2d/1d/6bf34d6adb9debd9136bd197ca72642203ce9aaaa85cfcbfcf20f9696e83/pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl",hashes = {sha256 = "c52b02ad8b4e2cf14ca7b3d918f3eb0ee91e63b3167c32591e57c4317e134f8f"}}, - {name = "pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl",url = "https://files.pythonhosted.org/packages/e0/94/2bd0aaf5a591e974b32a9f7123f16637776c304471a0ab33cf263cf5591a/pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl",hashes = {sha256 = "96081f1605125ba0855dfda83f6f3df5ec90c61195421ba72223de35ccfb2f88"}}, - {name = "pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",url = "https://files.pythonhosted.org/packages/f9/41/4b043778cf9c4285d59742281a769eac371b9e47e35f98ad321349cc5d61/pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",hashes = {sha256 = "8f57a69461af2a5fa6e6bbd7a5f60d3b7e6cebb687f55106933188e79ad155c1"}}, - {name = "pydantic_core-2.33.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl",url = "https://files.pythonhosted.org/packages/cb/d5/7bb781bf2748ce3d03af04d5c969fa1308880e1dca35a9bd94e1a96a922e/pydantic_core-2.33.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl",hashes = {sha256 = "572c7e6c8bb4774d2ac88929e3d1f12bc45714ae5ee6d9a788a9fb35e60bb04b"}}, - {name = "pydantic_core-2.33.2-cp312-cp312-musllinux_1_1_aarch64.whl",url = "https://files.pythonhosted.org/packages/fe/36/def5e53e1eb0ad896785702a5bbfd25eed546cdcf4087ad285021a90ed53/pydantic_core-2.33.2-cp312-cp312-musllinux_1_1_aarch64.whl",hashes = {sha256 = "db4b41f9bd95fbe5acd76d89920336ba96f03e149097365afe1cb092fceb89a1"}}, - {name = "pydantic_core-2.33.2-cp312-cp312-musllinux_1_1_armv7l.whl",url = "https://files.pythonhosted.org/packages/01/6c/57f8d70b2ee57fc3dc8b9610315949837fa8c11d86927b9bb044f8705419/pydantic_core-2.33.2-cp312-cp312-musllinux_1_1_armv7l.whl",hashes = {sha256 = "fa854f5cf7e33842a892e5c73f45327760bc7bc516339fda888c75ae60edaeb6"}}, - {name = "pydantic_core-2.33.2-cp312-cp312-musllinux_1_1_x86_64.whl",url = "https://files.pythonhosted.org/packages/27/b9/9c17f0396a82b3d5cbea4c24d742083422639e7bb1d5bf600e12cb176a13/pydantic_core-2.33.2-cp312-cp312-musllinux_1_1_x86_64.whl",hashes = {sha256 = "5f483cfb75ff703095c59e365360cb73e00185e01aaea067cd19acffd2ab20ea"}}, - {name = "pydantic_core-2.33.2-cp312-cp312-win32.whl",url = "https://files.pythonhosted.org/packages/b0/6a/adf5734ffd52bf86d865093ad70b2ce543415e0e356f6cacabbc0d9ad910/pydantic_core-2.33.2-cp312-cp312-win32.whl",hashes = {sha256 = "9cb1da0f5a471435a7bc7e439b8a728e8b61e59784b2af70d7c169f8dd8ae290"}}, - {name = "pydantic_core-2.33.2-cp312-cp312-win_amd64.whl",url = "https://files.pythonhosted.org/packages/43/e4/5479fecb3606c1368d496a825d8411e126133c41224c1e7238be58b87d7e/pydantic_core-2.33.2-cp312-cp312-win_amd64.whl",hashes = {sha256 = "f941635f2a3d96b2973e867144fde513665c87f13fe0e193c158ac51bfaaa7b2"}}, - {name = "pydantic_core-2.33.2-cp312-cp312-win_arm64.whl",url = "https://files.pythonhosted.org/packages/0d/24/8b11e8b3e2be9dd82df4b11408a67c61bb4dc4f8e11b5b0fc888b38118b5/pydantic_core-2.33.2-cp312-cp312-win_arm64.whl",hashes = {sha256 = "cca3868ddfaccfbc4bfb1d608e2ccaaebe0ae628e1416aeb9c4d88c001bb45ab"}}, +sdist = {name = "pydantic_core-2.41.1.tar.gz", url = "https://files.pythonhosted.org/packages/7d/14/12b4a0d2b0b10d8e1d9a24ad94e7bbb43335eaf29c0c4e57860e8a30734a/pydantic_core-2.41.1.tar.gz", hashes = {sha256 = "1ad375859a6d8c356b7704ec0f547a58e82ee80bb41baa811ad710e124bc8f2f"}} +wheels = [ + {name = "pydantic_core-2.41.1-cp314-cp314-macosx_10_12_x86_64.whl",url = "https://files.pythonhosted.org/packages/41/12/cec246429ddfa2778d2d6301eca5362194dc8749ecb19e621f2f65b5090f/pydantic_core-2.41.1-cp314-cp314-macosx_10_12_x86_64.whl",hashes = {sha256 = "05226894a26f6f27e1deb735d7308f74ef5fa3a6de3e0135bb66cdcaee88f64b"}}, + {name = "pydantic_core-2.41.1-cp314-cp314-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/20/39/baba47f8d8b87081302498e610aefc37142ce6a1cc98b2ab6b931a162562/pydantic_core-2.41.1-cp314-cp314-macosx_11_0_arm64.whl",hashes = {sha256 = "85ff7911c6c3e2fd8d3779c50925f6406d770ea58ea6dde9c230d35b52b16b4a"}}, + {name = "pydantic_core-2.41.1-cp314-cp314-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",url = "https://files.pythonhosted.org/packages/50/32/9a3d87cae2c75a5178334b10358d631bd094b916a00a5993382222dbfd92/pydantic_core-2.41.1-cp314-cp314-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",hashes = {sha256 = "47f1f642a205687d59b52dc1a9a607f45e588f5a2e9eeae05edd80c7a8c47674"}}, + {name = "pydantic_core-2.41.1-cp314-cp314-manylinux_2_17_armv7l.manylinux2014_armv7l.whl",url = "https://files.pythonhosted.org/packages/27/42/a96c9d793a04cf2a9773bff98003bb154087b94f5530a2ce6063ecfec583/pydantic_core-2.41.1-cp314-cp314-manylinux_2_17_armv7l.manylinux2014_armv7l.whl",hashes = {sha256 = "df11c24e138876ace5ec6043e5cae925e34cf38af1a1b3d63589e8f7b5f5cdc4"}}, + {name = "pydantic_core-2.41.1-cp314-cp314-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl",url = "https://files.pythonhosted.org/packages/3e/8d/028c4b7d157a005b1f52c086e2d4b0067886b213c86220c1153398dbdf8f/pydantic_core-2.41.1-cp314-cp314-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl",hashes = {sha256 = "7f0bf7f5c8f7bf345c527e8a0d72d6b26eda99c1227b0c34e7e59e181260de31"}}, + {name = "pydantic_core-2.41.1-cp314-cp314-manylinux_2_17_s390x.manylinux2014_s390x.whl",url = "https://files.pythonhosted.org/packages/08/f7/ee64cda8fcc9ca3f4716e6357144f9ee71166775df582a1b6b738bf6da57/pydantic_core-2.41.1-cp314-cp314-manylinux_2_17_s390x.manylinux2014_s390x.whl",hashes = {sha256 = "82b887a711d341c2c47352375d73b029418f55b20bd7815446d175a70effa706"}}, + {name = "pydantic_core-2.41.1-cp314-cp314-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",url = "https://files.pythonhosted.org/packages/13/c0/e8ec05f0f5ee7a3656973ad9cd3bc73204af99f6512c1a4562f6fb4b3f7d/pydantic_core-2.41.1-cp314-cp314-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",hashes = {sha256 = "b5f1d5d6bbba484bdf220c72d8ecd0be460f4bd4c5e534a541bb2cd57589fb8b"}}, + {name = "pydantic_core-2.41.1-cp314-cp314-manylinux_2_5_i686.manylinux1_i686.whl",url = "https://files.pythonhosted.org/packages/0a/25/d77a73ff24e2e4fcea64472f5e39b0402d836da9b08b5361a734d0153023/pydantic_core-2.41.1-cp314-cp314-manylinux_2_5_i686.manylinux1_i686.whl",hashes = {sha256 = "2bf1917385ebe0f968dc5c6ab1375886d56992b93ddfe6bf52bff575d03662be"}}, + {name = "pydantic_core-2.41.1-cp314-cp314-musllinux_1_1_aarch64.whl",url = "https://files.pythonhosted.org/packages/66/45/4a4ebaaae12a740552278d06fe71418c0f2869537a369a89c0e6723b341d/pydantic_core-2.41.1-cp314-cp314-musllinux_1_1_aarch64.whl",hashes = {sha256 = "4f94f3ab188f44b9a73f7295663f3ecb8f2e2dd03a69c8f2ead50d37785ecb04"}}, + {name = "pydantic_core-2.41.1-cp314-cp314-musllinux_1_1_armv7l.whl",url = "https://files.pythonhosted.org/packages/da/6d/b727ce1022f143194a36593243ff244ed5a1eb3c9122296bf7e716aa37ba/pydantic_core-2.41.1-cp314-cp314-musllinux_1_1_armv7l.whl",hashes = {sha256 = "3925446673641d37c30bd84a9d597e49f72eacee8b43322c8999fa17d5ae5bc4"}}, + {name = "pydantic_core-2.41.1-cp314-cp314-musllinux_1_1_x86_64.whl",url = "https://files.pythonhosted.org/packages/6f/8c/02df9d8506c427787059f87c6c7253435c6895e12472a652d9616ee0fc95/pydantic_core-2.41.1-cp314-cp314-musllinux_1_1_x86_64.whl",hashes = {sha256 = "49bd51cc27adb980c7b97357ae036ce9b3c4d0bb406e84fbe16fb2d368b602a8"}}, + {name = "pydantic_core-2.41.1-cp314-cp314-win32.whl",url = "https://files.pythonhosted.org/packages/98/67/0cf429a7d6802536941f430e6e3243f6d4b68f41eeea4b242372f1901794/pydantic_core-2.41.1-cp314-cp314-win32.whl",hashes = {sha256 = "a31ca0cd0e4d12ea0df0077df2d487fc3eb9d7f96bbb13c3c5b88dcc21d05159"}}, + {name = "pydantic_core-2.41.1-cp314-cp314-win_amd64.whl",url = "https://files.pythonhosted.org/packages/38/60/742fef93de5d085022d2302a6317a2b34dbfe15258e9396a535c8a100ae7/pydantic_core-2.41.1-cp314-cp314-win_amd64.whl",hashes = {sha256 = "1b5c4374a152e10a22175d7790e644fbd8ff58418890e07e2073ff9d4414efae"}}, + {name = "pydantic_core-2.41.1-cp314-cp314-win_arm64.whl",url = "https://files.pythonhosted.org/packages/31/38/cdd8ccb8555ef7720bd7715899bd6cfbe3c29198332710e1b61b8f5dd8b8/pydantic_core-2.41.1-cp314-cp314-win_arm64.whl",hashes = {sha256 = "4fee76d757639b493eb600fba668f1e17475af34c17dd61db7a47e824d464ca9"}}, + {name = "pydantic_core-2.41.1-cp314-cp314t-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/e7/7e/8ac10ccb047dc0221aa2530ec3c7c05ab4656d4d4bd984ee85da7f3d5525/pydantic_core-2.41.1-cp314-cp314t-macosx_11_0_arm64.whl",hashes = {sha256 = "f9b9c968cfe5cd576fdd7361f47f27adeb120517e637d1b189eea1c3ece573f4"}}, + {name = "pydantic_core-2.41.1-cp314-cp314t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",url = "https://files.pythonhosted.org/packages/c3/e4/7d9791efeb9c7d97e7268f8d20e0da24d03438a7fa7163ab58f1073ba968/pydantic_core-2.41.1-cp314-cp314t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",hashes = {sha256 = "f1ebc7ab67b856384aba09ed74e3e977dded40e693de18a4f197c67d0d4e6d8e"}}, + {name = "pydantic_core-2.41.1-cp314-cp314t-win_amd64.whl",url = "https://files.pythonhosted.org/packages/2d/c3/3f6e6b2342ac11ac8cd5cb56e24c7b14afa27c010e82a765ffa5f771884a/pydantic_core-2.41.1-cp314-cp314t-win_amd64.whl",hashes = {sha256 = "8ae0dc57b62a762985bc7fbf636be3412394acc0ddb4ade07fe104230f1b9762"}}, + {name = "pydantic_core-2.41.1-cp313-cp313-macosx_10_12_x86_64.whl",url = "https://files.pythonhosted.org/packages/27/8a/6d54198536a90a37807d31a156642aae7a8e1263ed9fe6fc6245defe9332/pydantic_core-2.41.1-cp313-cp313-macosx_10_12_x86_64.whl",hashes = {sha256 = "70e790fce5f05204ef4403159857bfcd587779da78627b0babb3654f75361ebf"}}, + {name = "pydantic_core-2.41.1-cp313-cp313-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/4f/2e/4784fd7b22ac9c8439db25bf98ffed6853d01e7e560a346e8af821776ccc/pydantic_core-2.41.1-cp313-cp313-macosx_11_0_arm64.whl",hashes = {sha256 = "9cebf1ca35f10930612d60bd0f78adfacee824c30a880e3534ba02c207cceceb"}}, + {name = "pydantic_core-2.41.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",url = "https://files.pythonhosted.org/packages/f3/92/31eb0748059ba5bd0aa708fb4bab9fcb211461ddcf9e90702a6542f22d0d/pydantic_core-2.41.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",hashes = {sha256 = "170406a37a5bc82c22c3274616bf6f17cc7df9c4a0a0a50449e559cb755db669"}}, + {name = "pydantic_core-2.41.1-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl",url = "https://files.pythonhosted.org/packages/ab/91/946527792275b5c4c7dde4cfa3e81241bf6900e9fee74fb1ba43e0c0f1ab/pydantic_core-2.41.1-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl",hashes = {sha256 = "12d4257fc9187a0ccd41b8b327d6a4e57281ab75e11dda66a9148ef2e1fb712f"}}, + {name = "pydantic_core-2.41.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl",url = "https://files.pythonhosted.org/packages/31/5d/a35c5d7b414e5c0749f1d9f0d159ee2ef4bab313f499692896b918014ee3/pydantic_core-2.41.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl",hashes = {sha256 = "a75a33b4db105dd1c8d57839e17ee12db8d5ad18209e792fa325dbb4baeb00f4"}}, + {name = "pydantic_core-2.41.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl",url = "https://files.pythonhosted.org/packages/21/4d/8713737c689afa57ecfefe38db78259d4484c97aa494979e6a9d19662584/pydantic_core-2.41.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl",hashes = {sha256 = "08a589f850803a74e0fcb16a72081cafb0d72a3cdda500106942b07e76b7bf62"}}, + {name = "pydantic_core-2.41.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",url = "https://files.pythonhosted.org/packages/f6/ec/929f9a3a5ed5cda767081494bacd32f783e707a690ce6eeb5e0730ec4986/pydantic_core-2.41.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",hashes = {sha256 = "7a97939d6ea44763c456bd8a617ceada2c9b96bb5b8ab3dfa0d0827df7619014"}}, + {name = "pydantic_core-2.41.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl",url = "https://files.pythonhosted.org/packages/26/55/a33f459d4f9cc8786d9db42795dbecc84fa724b290d7d71ddc3d7155d46a/pydantic_core-2.41.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl",hashes = {sha256 = "d2ae423c65c556f09569524b80ffd11babff61f33055ef9773d7c9fabc11ed8d"}}, + {name = "pydantic_core-2.41.1-cp313-cp313-musllinux_1_1_aarch64.whl",url = "https://files.pythonhosted.org/packages/77/af/d5c6959f8b089f2185760a2779079e3c2c411bfc70ea6111f58367851629/pydantic_core-2.41.1-cp313-cp313-musllinux_1_1_aarch64.whl",hashes = {sha256 = "4dc703015fbf8764d6a8001c327a87f1823b7328d40b47ce6000c65918ad2b4f"}}, + {name = "pydantic_core-2.41.1-cp313-cp313-musllinux_1_1_armv7l.whl",url = "https://files.pythonhosted.org/packages/58/e5/2c19bd2a14bffe7fabcf00efbfbd3ac430aaec5271b504a938ff019ac7be/pydantic_core-2.41.1-cp313-cp313-musllinux_1_1_armv7l.whl",hashes = {sha256 = "968e4ffdfd35698a5fe659e5e44c508b53664870a8e61c8f9d24d3d145d30257"}}, + {name = "pydantic_core-2.41.1-cp313-cp313-musllinux_1_1_x86_64.whl",url = "https://files.pythonhosted.org/packages/93/ef/e0870ccda798c54e6b100aff3c4d49df5458fd64217e860cb9c3b0a403f4/pydantic_core-2.41.1-cp313-cp313-musllinux_1_1_x86_64.whl",hashes = {sha256 = "fff2b76c8e172d34771cd4d4f0ade08072385310f214f823b5a6ad4006890d32"}}, + {name = "pydantic_core-2.41.1-cp313-cp313-win32.whl",url = "https://files.pythonhosted.org/packages/b1/4b/c3b991d95f5deb24d0bd52e47bcf716098fa1afe0ce2d4bd3125b38566ba/pydantic_core-2.41.1-cp313-cp313-win32.whl",hashes = {sha256 = "a38a5263185407ceb599f2f035faf4589d57e73c7146d64f10577f6449e8171d"}}, + {name = "pydantic_core-2.41.1-cp313-cp313-win_amd64.whl",url = "https://files.pythonhosted.org/packages/a7/ce/5c316fd62e01f8d6be1b7ee6b54273214e871772997dc2c95e204997a055/pydantic_core-2.41.1-cp313-cp313-win_amd64.whl",hashes = {sha256 = "b42ae7fd6760782c975897e1fdc810f483b021b32245b0105d40f6e7a3803e4b"}}, + {name = "pydantic_core-2.41.1-cp313-cp313-win_arm64.whl",url = "https://files.pythonhosted.org/packages/29/41/902640cfd6a6523194123e2c3373c60f19006447f2fb06f76de4e8466c5b/pydantic_core-2.41.1-cp313-cp313-win_arm64.whl",hashes = {sha256 = "ad4111acc63b7384e205c27a2f15e23ac0ee21a9d77ad6f2e9cb516ec90965fb"}}, + {name = "pydantic_core-2.41.1-cp313-cp313t-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/04/04/28b040e88c1b89d851278478842f0bdf39c7a05da9e850333c6c8cbe7dfa/pydantic_core-2.41.1-cp313-cp313t-macosx_11_0_arm64.whl",hashes = {sha256 = "440d0df7415b50084a4ba9d870480c16c5f67c0d1d4d5119e3f70925533a0edc"}}, + {name = "pydantic_core-2.41.1-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",url = "https://files.pythonhosted.org/packages/d6/58/b41dd3087505220bb58bc81be8c3e8cbc037f5710cd3c838f44f90bdd704/pydantic_core-2.41.1-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",hashes = {sha256 = "71eaa38d342099405dae6484216dcf1e8e4b0bebd9b44a4e08c9b43db6a2ab67"}}, + {name = "pydantic_core-2.41.1-cp313-cp313t-win_amd64.whl",url = "https://files.pythonhosted.org/packages/d7/b8/760f23754e40bf6c65b94a69b22c394c24058a0ef7e2aa471d2e39219c1a/pydantic_core-2.41.1-cp313-cp313t-win_amd64.whl",hashes = {sha256 = "555ecf7e50f1161d3f693bc49f23c82cf6cdeafc71fa37a06120772a09a38795"}}, + {name = "pydantic_core-2.41.1-cp312-cp312-macosx_10_12_x86_64.whl",url = "https://files.pythonhosted.org/packages/ee/bc/5f520319ee1c9e25010412fac4154a72e0a40d0a19eb00281b1f200c0947/pydantic_core-2.41.1-cp312-cp312-macosx_10_12_x86_64.whl",hashes = {sha256 = "db2f82c0ccbce8f021ad304ce35cbe02aa2f95f215cac388eed542b03b4d5eb4"}}, + {name = "pydantic_core-2.41.1-cp312-cp312-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/31/14/010cd64c5c3814fb6064786837ec12604be0dd46df3327cf8474e38abbbd/pydantic_core-2.41.1-cp312-cp312-macosx_11_0_arm64.whl",hashes = {sha256 = "47694a31c710ced9205d5f1e7e8af3ca57cbb8a503d98cb9e33e27c97a501601"}}, + {name = "pydantic_core-2.41.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",url = "https://files.pythonhosted.org/packages/8e/2e/23fc2a8a93efad52df302fdade0a60f471ecc0c7aac889801ac24b4c07d6/pydantic_core-2.41.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",hashes = {sha256 = "93e9decce94daf47baf9e9d392f5f2557e783085f7c5e522011545d9d6858e00"}}, + {name = "pydantic_core-2.41.1-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl",url = "https://files.pythonhosted.org/packages/b9/b6/6db08b2725b2432b9390844852e11d320281e5cea8a859c52c68001975fa/pydantic_core-2.41.1-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl",hashes = {sha256 = "ab0adafdf2b89c8b84f847780a119437a0931eca469f7b44d356f2b426dd9741"}}, + {name = "pydantic_core-2.41.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl",url = "https://files.pythonhosted.org/packages/61/d9/4de44600f2d4514b44f3f3aeeda2e14931214b6b5bf52479339e801ce748/pydantic_core-2.41.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl",hashes = {sha256 = "5da98cc81873f39fd56882e1569c4677940fbc12bce6213fad1ead784192d7c8"}}, + {name = "pydantic_core-2.41.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl",url = "https://files.pythonhosted.org/packages/7a/ae/dbe51187a7f35fc21b283c5250571a94e36373eb557c1cba9f29a9806dcf/pydantic_core-2.41.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl",hashes = {sha256 = "209910e88afb01fd0fd403947b809ba8dba0e08a095e1f703294fda0a8fdca51"}}, + {name = "pydantic_core-2.41.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",url = "https://files.pythonhosted.org/packages/b5/a7/975585147457c2e9fb951c7c8dab56deeb6aa313f3aa72c2fc0df3f74a49/pydantic_core-2.41.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",hashes = {sha256 = "365109d1165d78d98e33c5bfd815a9b5d7d070f578caefaabcc5771825b4ecb5"}}, + {name = "pydantic_core-2.41.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl",url = "https://files.pythonhosted.org/packages/62/37/ea94d1d0c01dec1b7d236c7cec9103baab0021f42500975de3d42522104b/pydantic_core-2.41.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl",hashes = {sha256 = "706abf21e60a2857acdb09502bc853ee5bce732955e7b723b10311114f033115"}}, + {name = "pydantic_core-2.41.1-cp312-cp312-musllinux_1_1_aarch64.whl",url = "https://files.pythonhosted.org/packages/d3/fe/694cf9fdd3a777a618c3afd210dba7b414cb8a72b1bd29b199c2e5765fee/pydantic_core-2.41.1-cp312-cp312-musllinux_1_1_aarch64.whl",hashes = {sha256 = "bf0bd5417acf7f6a7ec3b53f2109f587be176cb35f9cf016da87e6017437a72d"}}, + {name = "pydantic_core-2.41.1-cp312-cp312-musllinux_1_1_armv7l.whl",url = "https://files.pythonhosted.org/packages/0f/ae/174aeabd89916fbd2988cc37b81a59e1186e952afd2a7ed92018c22f31ca/pydantic_core-2.41.1-cp312-cp312-musllinux_1_1_armv7l.whl",hashes = {sha256 = "2e71b1c6ceb9c78424ae9f63a07292fb769fb890a4e7efca5554c47f33a60ea5"}}, + {name = "pydantic_core-2.41.1-cp312-cp312-musllinux_1_1_x86_64.whl",url = "https://files.pythonhosted.org/packages/65/e8/e9aecafaebf53fc456314f72886068725d6fba66f11b013532dc21259343/pydantic_core-2.41.1-cp312-cp312-musllinux_1_1_x86_64.whl",hashes = {sha256 = "80745b9770b4a38c25015b517451c817799bfb9d6499b0d13d8227ec941cb513"}}, + {name = "pydantic_core-2.41.1-cp312-cp312-win32.whl",url = "https://files.pythonhosted.org/packages/35/2f/1c2e71d2a052f9bb2f2df5a6a05464a0eb800f9e8d9dd800202fe31219e1/pydantic_core-2.41.1-cp312-cp312-win32.whl",hashes = {sha256 = "83b64d70520e7890453f1aa21d66fda44e7b35f1cfea95adf7b4289a51e2b479"}}, + {name = "pydantic_core-2.41.1-cp312-cp312-win_amd64.whl",url = "https://files.pythonhosted.org/packages/b1/78/562998301ff2588b9c6dcc5cb21f52fa919d6e1decc75a35055feb973594/pydantic_core-2.41.1-cp312-cp312-win_amd64.whl",hashes = {sha256 = "377defd66ee2003748ee93c52bcef2d14fde48fe28a0b156f88c3dbf9bc49a50"}}, + {name = "pydantic_core-2.41.1-cp312-cp312-win_arm64.whl",url = "https://files.pythonhosted.org/packages/b2/53/d95699ce5a5cdb44bb470bd818b848b9beadf51459fd4ea06667e8ede862/pydantic_core-2.41.1-cp312-cp312-win_arm64.whl",hashes = {sha256 = "c95caff279d49c1d6cdfe2996e6c2ad712571d3b9caaa209a404426c326c4bde"}}, + {name = "pydantic_core-2.41.1-cp311-cp311-macosx_10_12_x86_64.whl",url = "https://files.pythonhosted.org/packages/f6/a9/ec440f02e57beabdfd804725ef1e38ac1ba00c49854d298447562e119513/pydantic_core-2.41.1-cp311-cp311-macosx_10_12_x86_64.whl",hashes = {sha256 = "4f276a6134fe1fc1daa692642a3eaa2b7b858599c49a7610816388f5e37566a1"}}, + {name = "pydantic_core-2.41.1-cp311-cp311-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/f0/f9/6bc15bacfd8dcfc073a1820a564516d9c12a435a9a332d4cbbfd48828ddd/pydantic_core-2.41.1-cp311-cp311-macosx_11_0_arm64.whl",hashes = {sha256 = "07588570a805296ece009c59d9a679dc08fab72fb337365afb4f3a14cfbfc176"}}, + {name = "pydantic_core-2.41.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",url = "https://files.pythonhosted.org/packages/38/8a/d9edcdcdfe80bade17bed424284427c08bea892aaec11438fa52eaeaf79c/pydantic_core-2.41.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",hashes = {sha256 = "28527e4b53400cd60ffbd9812ccb2b5135d042129716d71afd7e45bf42b855c0"}}, + {name = "pydantic_core-2.41.1-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl",url = "https://files.pythonhosted.org/packages/d5/b3/ff225c6d49fba4279de04677c1c876fc3dc6562fd0c53e9bfd66f58c51a8/pydantic_core-2.41.1-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl",hashes = {sha256 = "46a1c935c9228bad738c8a41de06478770927baedf581d172494ab36a6b96575"}}, + {name = "pydantic_core-2.41.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl",url = "https://files.pythonhosted.org/packages/47/ba/183e8c0be4321314af3fd1ae6bfc7eafdd7a49bdea5da81c56044a207316/pydantic_core-2.41.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl",hashes = {sha256 = "447ddf56e2b7d28d200d3e9eafa936fe40485744b5a824b67039937580b3cb20"}}, + {name = "pydantic_core-2.41.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl",url = "https://files.pythonhosted.org/packages/57/c5/aab61e94fd02f45c65f1f8c9ec38bb3b33fbf001a1837c74870e97462572/pydantic_core-2.41.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl",hashes = {sha256 = "63892ead40c1160ac860b5debcc95c95c5a0035e543a8b5a4eac70dd22e995f4"}}, + {name = "pydantic_core-2.41.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",url = "https://files.pythonhosted.org/packages/e5/4f/3aaa3bd1ea420a15acc42d7d3ccb3b0bbc5444ae2f9dbc1959f8173e16b8/pydantic_core-2.41.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",hashes = {sha256 = "f4a9543ca355e6df8fbe9c83e9faab707701e9103ae857ecb40f1c0cf8b0e94d"}}, + {name = "pydantic_core-2.41.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl",url = "https://files.pythonhosted.org/packages/58/bd/e3975cdebe03ec080ef881648de316c73f2a6be95c14fc4efb2f7bdd0d41/pydantic_core-2.41.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl",hashes = {sha256 = "f2611bdb694116c31e551ed82e20e39a90bea9b7ad9e54aaf2d045ad621aa7a1"}}, + {name = "pydantic_core-2.41.1-cp311-cp311-musllinux_1_1_aarch64.whl",url = "https://files.pythonhosted.org/packages/2b/b8/6b7e7217f147d3b3105b57fb1caec3c4f667581affdfaab6d1d277e1f749/pydantic_core-2.41.1-cp311-cp311-musllinux_1_1_aarch64.whl",hashes = {sha256 = "fecc130893a9b5f7bfe230be1bb8c61fe66a19db8ab704f808cb25a82aad0bc9"}}, + {name = "pydantic_core-2.41.1-cp311-cp311-musllinux_1_1_armv7l.whl",url = "https://files.pythonhosted.org/packages/fe/7b/239c2fe76bd8b7eef9ae2140d737368a3c6fea4fd27f8f6b4cde6baa3ce9/pydantic_core-2.41.1-cp311-cp311-musllinux_1_1_armv7l.whl",hashes = {sha256 = "1e2df5f8344c99b6ea5219f00fdc8950b8e6f2c422fbc1cc122ec8641fac85a1"}}, + {name = "pydantic_core-2.41.1-cp311-cp311-musllinux_1_1_x86_64.whl",url = "https://files.pythonhosted.org/packages/bd/2e/77a821a67ff0786f2f14856d6bd1348992f695ee90136a145d7a445c1ff6/pydantic_core-2.41.1-cp311-cp311-musllinux_1_1_x86_64.whl",hashes = {sha256 = "35291331e9d8ed94c257bab6be1cb3a380b5eee570a2784bffc055e18040a2ea"}}, + {name = "pydantic_core-2.41.1-cp311-cp311-win32.whl",url = "https://files.pythonhosted.org/packages/fd/9a/b54512bb9df7f64c586b369328c30481229b70ca6a5fcbb90b715e15facf/pydantic_core-2.41.1-cp311-cp311-win32.whl",hashes = {sha256 = "2876a095292668d753f1a868c4a57c4ac9f6acbd8edda8debe4218d5848cf42f"}}, + {name = "pydantic_core-2.41.1-cp311-cp311-win_amd64.whl",url = "https://files.pythonhosted.org/packages/9d/72/63c9a4f1a5c950e65dd522d7dd67f167681f9d4f6ece3b80085a0329f08f/pydantic_core-2.41.1-cp311-cp311-win_amd64.whl",hashes = {sha256 = "b92d6c628e9a338846a28dfe3fcdc1a3279388624597898b105e078cdfc59298"}}, + {name = "pydantic_core-2.41.1-cp311-cp311-win_arm64.whl",url = "https://files.pythonhosted.org/packages/d8/16/4e2706184209f61b50c231529257c12eb6bd9eb36e99ea1272e4815d2200/pydantic_core-2.41.1-cp311-cp311-win_arm64.whl",hashes = {sha256 = "7d82ae99409eb69d507a89835488fb657faa03ff9968a9379567b0d2e2e56bc5"}}, + {name = "pydantic_core-2.41.1-pp311-pypy311_pp73-macosx_10_12_x86_64.whl",url = "https://files.pythonhosted.org/packages/e6/6c/fa3e45c2b054a1e627a89a364917f12cbe3abc3e91b9004edaae16e7b3c5/pydantic_core-2.41.1-pp311-pypy311_pp73-macosx_10_12_x86_64.whl",hashes = {sha256 = "af2385d3f98243fb733862f806c5bb9122e5fba05b373e3af40e3c82d711cef1"}}, + {name = "pydantic_core-2.41.1-pp311-pypy311_pp73-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/e5/17/7eebc38b4658cc8e6902d0befc26388e4c2a5f2e179c561eeb43e1922c7b/pydantic_core-2.41.1-pp311-pypy311_pp73-macosx_11_0_arm64.whl",hashes = {sha256 = "6550617a0c2115be56f90c31a5370261d8ce9dbf051c3ed53b51172dd34da696"}}, + {name = "pydantic_core-2.41.1-pp311-pypy311_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",url = "https://files.pythonhosted.org/packages/2b/00/9fe640194a1717a464ab861d43595c268830f98cb1e2705aa134b3544b70/pydantic_core-2.41.1-pp311-pypy311_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",hashes = {sha256 = "dc17b6ecf4983d298686014c92ebc955a9f9baf9f57dad4065e7906e7bee6222"}}, + {name = "pydantic_core-2.41.1-pp311-pypy311_pp73-manylinux_2_5_i686.manylinux1_i686.whl",url = "https://files.pythonhosted.org/packages/b2/ad/f4cdfaf483b78ee65362363e73b6b40c48e067078d7b146e8816d5945ad6/pydantic_core-2.41.1-pp311-pypy311_pp73-manylinux_2_5_i686.manylinux1_i686.whl",hashes = {sha256 = "42ae9352cf211f08b04ea110563d6b1e415878eea5b4c70f6bdb17dca3b932d2"}}, + {name = "pydantic_core-2.41.1-pp311-pypy311_pp73-musllinux_1_1_aarch64.whl",url = "https://files.pythonhosted.org/packages/cb/c1/18f416d40a10f44e9387497ba449f40fdb1478c61ba05c4b6bdb82300362/pydantic_core-2.41.1-pp311-pypy311_pp73-musllinux_1_1_aarch64.whl",hashes = {sha256 = "e82947de92068b0a21681a13dd2102387197092fbe7defcfb8453e0913866506"}}, + {name = "pydantic_core-2.41.1-pp311-pypy311_pp73-musllinux_1_1_armv7l.whl",url = "https://files.pythonhosted.org/packages/42/30/134c8a921630d8a88d6f905a562495a6421e959a23c19b0f49b660801d67/pydantic_core-2.41.1-pp311-pypy311_pp73-musllinux_1_1_armv7l.whl",hashes = {sha256 = "e244c37d5471c9acdcd282890c6c4c83747b77238bfa19429b8473586c907656"}}, + {name = "pydantic_core-2.41.1-pp311-pypy311_pp73-musllinux_1_1_x86_64.whl",url = "https://files.pythonhosted.org/packages/9c/48/a9263aeaebdec81e941198525b43edb3b44f27cfa4cb8005b8d3eb8dec72/pydantic_core-2.41.1-pp311-pypy311_pp73-musllinux_1_1_x86_64.whl",hashes = {sha256 = "1e798b4b304a995110d41ec93653e57975620ccb2842ba9420037985e7d7284e"}}, + {name = "pydantic_core-2.41.1-pp311-pypy311_pp73-win_amd64.whl",url = "https://files.pythonhosted.org/packages/1d/62/755d2bd2593f701c5839fc084e9c2c5e2418f460383ad04e3b5d0befc3ca/pydantic_core-2.41.1-pp311-pypy311_pp73-win_amd64.whl",hashes = {sha256 = "f1fc716c0eb1663c59699b024428ad5ec2bcc6b928527b8fe28de6cb89f47efb"}}, + {name = "pydantic_core-2.41.1-cp310-cp310-macosx_10_12_x86_64.whl",url = "https://files.pythonhosted.org/packages/b3/2c/a5c4640dc7132540109f67fe83b566fbc7512ccf2a068cfa22a243df70c7/pydantic_core-2.41.1-cp310-cp310-macosx_10_12_x86_64.whl",hashes = {sha256 = "e63036298322e9aea1c8b7c0a6c1204d615dbf6ec0668ce5b83ff27f07404a61"}}, + {name = "pydantic_core-2.41.1-cp310-cp310-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/e3/e7/a8694c3454a57842095d69c7a4ab3cf81c3c7b590f052738eabfdfc2e234/pydantic_core-2.41.1-cp310-cp310-macosx_11_0_arm64.whl",hashes = {sha256 = "241299ca91fc77ef64f11ed909d2d9220a01834e8e6f8de61275c4dd16b7c936"}}, + {name = "pydantic_core-2.41.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",url = "https://files.pythonhosted.org/packages/9c/58/29f12e65b19c1877a0269eb4f23c5d2267eded6120a7d6762501ab843dc9/pydantic_core-2.41.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",hashes = {sha256 = "1ab7e594a2a5c24ab8013a7dc8cfe5f2260e80e490685814122081705c2cf2b0"}}, + {name = "pydantic_core-2.41.1-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl",url = "https://files.pythonhosted.org/packages/98/26/4e677f2b7ec3fbdd10be6b586a82a814c8ebe3e474024c8df2d4260e564e/pydantic_core-2.41.1-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl",hashes = {sha256 = "b054ef1a78519cb934b58e9c90c09e93b837c935dcd907b891f2b265b129eb6e"}}, + {name = "pydantic_core-2.41.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl",url = "https://files.pythonhosted.org/packages/29/50/50614bd906089904d7ca1be3b9ecf08c00a327143d48f1decfdc21b3c302/pydantic_core-2.41.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl",hashes = {sha256 = "f2ab7d10d0ab2ed6da54c757233eb0f48ebfb4f86e9b88ccecb3f92bbd61a538"}}, + {name = "pydantic_core-2.41.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl",url = "https://files.pythonhosted.org/packages/ea/58/b1e640b4ca559273cca7c28e0fe8891d5d8e9a600f5ab4882670ec107549/pydantic_core-2.41.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl",hashes = {sha256 = "2757606b7948bb853a27e4040820306eaa0ccb9e8f9f8a0fa40cb674e170f350"}}, + {name = "pydantic_core-2.41.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",url = "https://files.pythonhosted.org/packages/53/25/cd47df3bfb24350e03835f0950288d1054f1cc9a8023401dabe6d4ff2834/pydantic_core-2.41.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",hashes = {sha256 = "cec0e75eb61f606bad0a32f2be87507087514e26e8c73db6cbdb8371ccd27917"}}, + {name = "pydantic_core-2.41.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl",url = "https://files.pythonhosted.org/packages/ec/b4/71b2c77e5df527fbbc1a03e72c3fd96c44cd10d4241a81befef8c12b9fc4/pydantic_core-2.41.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl",hashes = {sha256 = "0234236514f44a5bf552105cfe2543a12f48203397d9d0f866affa569345a5b5"}}, + {name = "pydantic_core-2.41.1-cp310-cp310-musllinux_1_1_aarch64.whl",url = "https://files.pythonhosted.org/packages/aa/08/4b8a50733005865efde284fec45da75fe16a258f706e16323c5ace4004eb/pydantic_core-2.41.1-cp310-cp310-musllinux_1_1_aarch64.whl",hashes = {sha256 = "1b974e41adfbb4ebb0f65fc4ca951347b17463d60893ba7d5f7b9bb087c83897"}}, + {name = "pydantic_core-2.41.1-cp310-cp310-musllinux_1_1_armv7l.whl",url = "https://files.pythonhosted.org/packages/83/c3/1037cb603ef2130c210150a51b1710d86825b5c28df54a55750099f91196/pydantic_core-2.41.1-cp310-cp310-musllinux_1_1_armv7l.whl",hashes = {sha256 = "248dafb3204136113c383e91a4d815269f51562b6659b756cf3df14eefc7d0bb"}}, + {name = "pydantic_core-2.41.1-cp310-cp310-musllinux_1_1_x86_64.whl",url = "https://files.pythonhosted.org/packages/56/4c/52d111869610e6b1a46e1f1035abcdc94d0655587e39104433a290e9f377/pydantic_core-2.41.1-cp310-cp310-musllinux_1_1_x86_64.whl",hashes = {sha256 = "678f9d76a91d6bcedd7568bbf6beb77ae8447f85d1aeebaab7e2f0829cfc3a13"}}, + {name = "pydantic_core-2.41.1-cp310-cp310-win32.whl",url = "https://files.pythonhosted.org/packages/32/5d/4b435f0b52ab543967761aca66b84ad3f0026e491e57de47693d15d0a8db/pydantic_core-2.41.1-cp310-cp310-win32.whl",hashes = {sha256 = "dff5bee1d21ee58277900692a641925d2dddfde65182c972569b1a276d2ac8fb"}}, + {name = "pydantic_core-2.41.1-cp310-cp310-win_amd64.whl",url = "https://files.pythonhosted.org/packages/88/52/31b4deafc1d3cb96d0e7c0af70f0dc05454982d135d07f5117e6336153e8/pydantic_core-2.41.1-cp310-cp310-win_amd64.whl",hashes = {sha256 = "5042da12e5d97d215f91567110fdfa2e2595a25f17c19b9ff024f31c34f9b53e"}}, + {name = "pydantic_core-2.41.1-pp310-pypy310_pp73-macosx_10_12_x86_64.whl",url = "https://files.pythonhosted.org/packages/d4/31/f403d7ca8352e3e4df352ccacd200f5f7f7fe81cef8e458515f015091625/pydantic_core-2.41.1-pp310-pypy310_pp73-macosx_10_12_x86_64.whl",hashes = {sha256 = "fabcbdb12de6eada8d6e9a759097adb3c15440fafc675b3e94ae5c9cb8d678a0"}}, + {name = "pydantic_core-2.41.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/6e/b5/334473b6d2810df84db67f03d4f666acacfc538512c2d2a254074fee0889/pydantic_core-2.41.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl",hashes = {sha256 = "80e97ccfaf0aaf67d55de5085b0ed0d994f57747d9d03f2de5cc9847ca737b08"}}, + {name = "pydantic_core-2.41.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",url = "https://files.pythonhosted.org/packages/ea/5e/45513e4dc621f47397cfa5fef12ba8fa5e8b1c4c07f2ff2a5fef8ff81b25/pydantic_core-2.41.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",hashes = {sha256 = "34df1fe8fea5d332484a763702e8b6a54048a9d4fe6ccf41e34a128238e01f52"}}, + {name = "pydantic_core-2.41.1-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl",url = "https://files.pythonhosted.org/packages/22/e3/f1797c168e5f52b973bed1c585e99827a22d5e579d1ed57d51bc15b14633/pydantic_core-2.41.1-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl",hashes = {sha256 = "421b5595f845842fc093f7250e24ee395f54ca62d494fdde96f43ecf9228ae01"}}, + {name = "pydantic_core-2.41.1-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl",url = "https://files.pythonhosted.org/packages/bb/e1/24ef4c3b4ab91c21c3a09a966c7d2cffe101058a7bfe5cc8b2c7c7d574e2/pydantic_core-2.41.1-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl",hashes = {sha256 = "dce8b22663c134583aaad24827863306a933f576c79da450be3984924e2031d1"}}, + {name = "pydantic_core-2.41.1-pp310-pypy310_pp73-musllinux_1_1_armv7l.whl",url = "https://files.pythonhosted.org/packages/35/74/70c1e225d67f7ef3fdba02c506d9011efaf734020914920b2aa3d1a45e61/pydantic_core-2.41.1-pp310-pypy310_pp73-musllinux_1_1_armv7l.whl",hashes = {sha256 = "300a9c162fea9906cc5c103893ca2602afd84f0ec90d3be36f4cc360125d22e1"}}, + {name = "pydantic_core-2.41.1-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl",url = "https://files.pythonhosted.org/packages/c8/bf/dd4d21037c8bef0d8cce90a86a3f2dcb011c30086db2a10113c3eea23eba/pydantic_core-2.41.1-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl",hashes = {sha256 = "e019167628f6e6161ae7ab9fb70f6d076a0bf0d55aa9b20833f86a320c70dd65"}}, + {name = "pydantic_core-2.41.1-pp310-pypy310_pp73-win_amd64.whl",url = "https://files.pythonhosted.org/packages/7e/78/3093b334e9c9796c8236a4701cd2ddef1c56fb0928fe282a10c797644380/pydantic_core-2.41.1-pp310-pypy310_pp73-win_amd64.whl",hashes = {sha256 = "13ab9cc2de6f9d4ab645a050ae5aee61a2424ac4d3a16ba23d4c2027705e0301"}}, ] marker = "\"default\" in dependency_groups" [packages.tool.pdm] dependencies = [ - "typing-extensions!=4.7.0,>=4.6.0", + "typing-extensions>=4.14.1", ] [[packages]] @@ -1058,11 +1317,11 @@ dependencies = [] [[packages]] name = "typing-extensions" -version = "4.14.1" +version = "4.15.0" requires-python = ">=3.9" -sdist = {name = "typing_extensions-4.14.1.tar.gz", url = "https://files.pythonhosted.org/packages/98/5a/da40306b885cc8c09109dc2e1abd358d5684b1425678151cdaed4731c822/typing_extensions-4.14.1.tar.gz", hashes = {sha256 = "38b39f4aeeab64884ce9f74c94263ef78f3c22467c8724005483154c26648d36"}} +sdist = {name = "typing_extensions-4.15.0.tar.gz", url = "https://files.pythonhosted.org/packages/72/94/1a15dd82efb362ac84269196e94cf00f187f7ed21c242792a923cdb1c61f/typing_extensions-4.15.0.tar.gz", hashes = {sha256 = "0cea48d173cc12fa28ecabc3b837ea3cf6f38c6d1136f85cbaaf598984861466"}} wheels = [ - {name = "typing_extensions-4.14.1-py3-none-any.whl",url = "https://files.pythonhosted.org/packages/b5/00/d631e67a838026495268c2f6884f3711a15a9a2a96cd244fdaea53b823fb/typing_extensions-4.14.1-py3-none-any.whl",hashes = {sha256 = "d1e1e3b58374dc93031d6eda2420a48ea44a36c2b4766a4fdeb3710755731d76"}}, + {name = "typing_extensions-4.15.0-py3-none-any.whl",url = "https://files.pythonhosted.org/packages/18/67/36e9267722cc04a6b9f15c7f3441c2363321a3ea07da7ae0c0707beb2a9c/typing_extensions-4.15.0-py3-none-any.whl",hashes = {sha256 = "f0fa19c6845758ab08074a0cfa8b7aecb71c999ca73d62883bc25cc018c4e548"}}, ] marker = "\"default\" in dependency_groups or \"dev\" in extras" @@ -1084,11 +1343,11 @@ dependencies = [] [[packages]] name = "huggingface-hub" -version = "0.33.2" +version = "0.35.3" requires-python = ">=3.8.0" -sdist = {name = "huggingface_hub-0.33.2.tar.gz", url = "https://files.pythonhosted.org/packages/fa/42/8a95c5632080ae312c0498744b2b852195e10b05a20b1be11c5141092f4c/huggingface_hub-0.33.2.tar.gz", hashes = {sha256 = "84221defaec8fa09c090390cd68c78b88e3c4c2b7befba68d3dc5aacbc3c2c5f"}} +sdist = {name = "huggingface_hub-0.35.3.tar.gz", url = "https://files.pythonhosted.org/packages/10/7e/a0a97de7c73671863ca6b3f61fa12518caf35db37825e43d63a70956738c/huggingface_hub-0.35.3.tar.gz", hashes = {sha256 = "350932eaa5cc6a4747efae85126ee220e4ef1b54e29d31c3b45c5612ddf0b32a"}} wheels = [ - {name = "huggingface_hub-0.33.2-py3-none-any.whl",url = "https://files.pythonhosted.org/packages/44/f4/5f3f22e762ad1965f01122b42dae5bf0e009286e2dba601ce1d0dba72424/huggingface_hub-0.33.2-py3-none-any.whl",hashes = {sha256 = "3749498bfa91e8cde2ddc2c1db92c79981f40e66434c20133b39e5928ac9bcc5"}}, + {name = "huggingface_hub-0.35.3-py3-none-any.whl",url = "https://files.pythonhosted.org/packages/31/a0/651f93d154cb72323358bf2bbae3e642bdb5d2f1bfc874d096f7cb159fa0/huggingface_hub-0.35.3-py3-none-any.whl",hashes = {sha256 = "0e3a01829c19d86d03793e4577816fe3bdfc1602ac62c7fb220d593d351224ba"}}, ] marker = "\"default\" in dependency_groups" @@ -1101,7 +1360,7 @@ dependencies = [ "requests", "tqdm>=4.42.1", "typing-extensions>=3.7.4.3", - "hf-xet<2.0.0,>=1.1.2; platform_machine == \"x86_64\" or platform_machine == \"amd64\" or platform_machine == \"arm64\" or platform_machine == \"aarch64\"", + "hf-xet<2.0.0,>=1.1.3; platform_machine == \"x86_64\" or platform_machine == \"amd64\" or platform_machine == \"arm64\" or platform_machine == \"aarch64\"", ] [[packages]] @@ -1147,11 +1406,11 @@ dependencies = [] [[packages]] name = "requests" -version = "2.32.4" -requires-python = ">=3.8" -sdist = {name = "requests-2.32.4.tar.gz", url = "https://files.pythonhosted.org/packages/e1/0a/929373653770d8a0d7ea76c37de6e41f11eb07559b103b1c02cafb3f7cf8/requests-2.32.4.tar.gz", hashes = {sha256 = "27d0316682c8a29834d3264820024b62a36942083d52caf2f14c0591336d3422"}} +version = "2.32.5" +requires-python = ">=3.9" +sdist = {name = "requests-2.32.5.tar.gz", url = "https://files.pythonhosted.org/packages/c9/74/b3ff8e6c8446842c3f5c837e9c3dfcfe2018ea6ecef224c710c85ef728f4/requests-2.32.5.tar.gz", hashes = {sha256 = "dbba0bac56e100853db0ea71b82b4dfd5fe2bf6d3754a8893c3af500cec7d7cf"}} wheels = [ - {name = "requests-2.32.4-py3-none-any.whl",url = "https://files.pythonhosted.org/packages/7c/e4/56027c4a6b4ae70ca9de302488c5ca95ad4a39e190093d6c1a8ace08341b/requests-2.32.4-py3-none-any.whl",hashes = {sha256 = "27babd3cda2a6d50b30443204ee89830707d396671944c998b5975b031ac2b2c"}}, + {name = "requests-2.32.5-py3-none-any.whl",url = "https://files.pythonhosted.org/packages/1e/db/4254e3eabe8020b458f1a747140d32277ec7a271daf1d235b70dc0b4e6e3/requests-2.32.5-py3-none-any.whl",hashes = {sha256 = "2462f94637a34fd532264295e186976db0f5d453d1cdd31473c85a6a161affb6"}}, ] marker = "\"default\" in dependency_groups or \"dev\" in extras or \"recommended\" in extras" @@ -1206,37 +1465,66 @@ dependencies = [] [[packages]] name = "charset-normalizer" -version = "3.4.2" +version = "3.4.3" requires-python = ">=3.7" -sdist = {name = "charset_normalizer-3.4.2.tar.gz", url = "https://files.pythonhosted.org/packages/e4/33/89c2ced2b67d1c2a61c19c6751aa8902d46ce3dacb23600a283619f5a12d/charset_normalizer-3.4.2.tar.gz", hashes = {sha256 = "5baececa9ecba31eff645232d59845c07aa030f0c81ee70184a90d35099a0e63"}} -wheels = [ - {name = "charset_normalizer-3.4.2-cp313-cp313-macosx_10_13_universal2.whl",url = "https://files.pythonhosted.org/packages/ea/12/a93df3366ed32db1d907d7593a94f1fe6293903e3e92967bebd6950ed12c/charset_normalizer-3.4.2-cp313-cp313-macosx_10_13_universal2.whl",hashes = {sha256 = "926ca93accd5d36ccdabd803392ddc3e03e6d4cd1cf17deff3b989ab8e9dbcf0"}}, - {name = "charset_normalizer-3.4.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",url = "https://files.pythonhosted.org/packages/04/93/bf204e6f344c39d9937d3c13c8cd5bbfc266472e51fc8c07cb7f64fcd2de/charset_normalizer-3.4.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",hashes = {sha256 = "eba9904b0f38a143592d9fc0e19e2df0fa2e41c3c3745554761c5f6447eedabf"}}, - {name = "charset_normalizer-3.4.2-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl",url = "https://files.pythonhosted.org/packages/22/2a/ea8a2095b0bafa6c5b5a55ffdc2f924455233ee7b91c69b7edfcc9e02284/charset_normalizer-3.4.2-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl",hashes = {sha256 = "3fddb7e2c84ac87ac3a947cb4e66d143ca5863ef48e4a5ecb83bd48619e4634e"}}, - {name = "charset_normalizer-3.4.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl",url = "https://files.pythonhosted.org/packages/b6/57/1b090ff183d13cef485dfbe272e2fe57622a76694061353c59da52c9a659/charset_normalizer-3.4.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl",hashes = {sha256 = "98f862da73774290f251b9df8d11161b6cf25b599a66baf087c1ffe340e9bfd1"}}, - {name = "charset_normalizer-3.4.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",url = "https://files.pythonhosted.org/packages/e2/28/ffc026b26f441fc67bd21ab7f03b313ab3fe46714a14b516f931abe1a2d8/charset_normalizer-3.4.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",hashes = {sha256 = "6c9379d65defcab82d07b2a9dfbfc2e95bc8fe0ebb1b176a3190230a3ef0e07c"}}, - {name = "charset_normalizer-3.4.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl",url = "https://files.pythonhosted.org/packages/c0/0f/9abe9bd191629c33e69e47c6ef45ef99773320e9ad8e9cb08b8ab4a8d4cb/charset_normalizer-3.4.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl",hashes = {sha256 = "e635b87f01ebc977342e2697d05b56632f5f879a4f15955dfe8cef2448b51691"}}, - {name = "charset_normalizer-3.4.2-cp313-cp313-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/67/7c/a123bbcedca91d5916c056407f89a7f5e8fdfce12ba825d7d6b9954a1a3c/charset_normalizer-3.4.2-cp313-cp313-musllinux_1_2_aarch64.whl",hashes = {sha256 = "1c95a1e2902a8b722868587c0e1184ad5c55631de5afc0eb96bc4b0d738092c0"}}, - {name = "charset_normalizer-3.4.2-cp313-cp313-musllinux_1_2_i686.whl",url = "https://files.pythonhosted.org/packages/ec/fe/1ac556fa4899d967b83e9893788e86b6af4d83e4726511eaaad035e36595/charset_normalizer-3.4.2-cp313-cp313-musllinux_1_2_i686.whl",hashes = {sha256 = "ef8de666d6179b009dce7bcb2ad4c4a779f113f12caf8dc77f0162c29d20490b"}}, - {name = "charset_normalizer-3.4.2-cp313-cp313-musllinux_1_2_ppc64le.whl",url = "https://files.pythonhosted.org/packages/2b/ff/acfc0b0a70b19e3e54febdd5301a98b72fa07635e56f24f60502e954c461/charset_normalizer-3.4.2-cp313-cp313-musllinux_1_2_ppc64le.whl",hashes = {sha256 = "32fc0341d72e0f73f80acb0a2c94216bd704f4f0bce10aedea38f30502b271ff"}}, - {name = "charset_normalizer-3.4.2-cp313-cp313-musllinux_1_2_s390x.whl",url = "https://files.pythonhosted.org/packages/92/08/95b458ce9c740d0645feb0e96cea1f5ec946ea9c580a94adfe0b617f3573/charset_normalizer-3.4.2-cp313-cp313-musllinux_1_2_s390x.whl",hashes = {sha256 = "289200a18fa698949d2b39c671c2cc7a24d44096784e76614899a7ccf2574b7b"}}, - {name = "charset_normalizer-3.4.2-cp313-cp313-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/78/be/8392efc43487ac051eee6c36d5fbd63032d78f7728cb37aebcc98191f1ff/charset_normalizer-3.4.2-cp313-cp313-musllinux_1_2_x86_64.whl",hashes = {sha256 = "4a476b06fbcf359ad25d34a057b7219281286ae2477cc5ff5e3f70a246971148"}}, - {name = "charset_normalizer-3.4.2-cp313-cp313-win32.whl",url = "https://files.pythonhosted.org/packages/44/96/392abd49b094d30b91d9fbda6a69519e95802250b777841cf3bda8fe136c/charset_normalizer-3.4.2-cp313-cp313-win32.whl",hashes = {sha256 = "aaeeb6a479c7667fbe1099af9617c83aaca22182d6cf8c53966491a0f1b7ffb7"}}, - {name = "charset_normalizer-3.4.2-cp313-cp313-win_amd64.whl",url = "https://files.pythonhosted.org/packages/e9/b0/0200da600134e001d91851ddc797809e2fe0ea72de90e09bec5a2fbdaccb/charset_normalizer-3.4.2-cp313-cp313-win_amd64.whl",hashes = {sha256 = "aa6af9e7d59f9c12b33ae4e9450619cf2488e2bbe9b44030905877f0b2324980"}}, - {name = "charset_normalizer-3.4.2-cp312-cp312-macosx_10_13_universal2.whl",url = "https://files.pythonhosted.org/packages/d7/a4/37f4d6035c89cac7930395a35cc0f1b872e652eaafb76a6075943754f095/charset_normalizer-3.4.2-cp312-cp312-macosx_10_13_universal2.whl",hashes = {sha256 = "0c29de6a1a95f24b9a1aa7aefd27d2487263f00dfd55a77719b530788f75cff7"}}, - {name = "charset_normalizer-3.4.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",url = "https://files.pythonhosted.org/packages/ee/8a/1a5e33b73e0d9287274f899d967907cd0bf9c343e651755d9307e0dbf2b3/charset_normalizer-3.4.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",hashes = {sha256 = "cddf7bd982eaa998934a91f69d182aec997c6c468898efe6679af88283b498d3"}}, - {name = "charset_normalizer-3.4.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl",url = "https://files.pythonhosted.org/packages/66/52/59521f1d8e6ab1482164fa21409c5ef44da3e9f653c13ba71becdd98dec3/charset_normalizer-3.4.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl",hashes = {sha256 = "fcbe676a55d7445b22c10967bceaaf0ee69407fbe0ece4d032b6eb8d4565982a"}}, - {name = "charset_normalizer-3.4.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl",url = "https://files.pythonhosted.org/packages/86/2d/fb55fdf41964ec782febbf33cb64be480a6b8f16ded2dbe8db27a405c09f/charset_normalizer-3.4.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl",hashes = {sha256 = "d41c4d287cfc69060fa91cae9683eacffad989f1a10811995fa309df656ec214"}}, - {name = "charset_normalizer-3.4.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",url = "https://files.pythonhosted.org/packages/8c/73/6ede2ec59bce19b3edf4209d70004253ec5f4e319f9a2e3f2f15601ed5f7/charset_normalizer-3.4.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",hashes = {sha256 = "4e594135de17ab3866138f496755f302b72157d115086d100c3f19370839dd3a"}}, - {name = "charset_normalizer-3.4.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl",url = "https://files.pythonhosted.org/packages/09/14/957d03c6dc343c04904530b6bef4e5efae5ec7d7990a7cbb868e4595ee30/charset_normalizer-3.4.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl",hashes = {sha256 = "cf713fe9a71ef6fd5adf7a79670135081cd4431c2943864757f0fa3a65b1fafd"}}, - {name = "charset_normalizer-3.4.2-cp312-cp312-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/0d/c8/8174d0e5c10ccebdcb1b53cc959591c4c722a3ad92461a273e86b9f5a302/charset_normalizer-3.4.2-cp312-cp312-musllinux_1_2_aarch64.whl",hashes = {sha256 = "a370b3e078e418187da8c3674eddb9d983ec09445c99a3a263c2011993522981"}}, - {name = "charset_normalizer-3.4.2-cp312-cp312-musllinux_1_2_i686.whl",url = "https://files.pythonhosted.org/packages/58/aa/8904b84bc8084ac19dc52feb4f5952c6df03ffb460a887b42615ee1382e8/charset_normalizer-3.4.2-cp312-cp312-musllinux_1_2_i686.whl",hashes = {sha256 = "a955b438e62efdf7e0b7b52a64dc5c3396e2634baa62471768a64bc2adb73d5c"}}, - {name = "charset_normalizer-3.4.2-cp312-cp312-musllinux_1_2_ppc64le.whl",url = "https://files.pythonhosted.org/packages/c2/26/89ee1f0e264d201cb65cf054aca6038c03b1a0c6b4ae998070392a3ce605/charset_normalizer-3.4.2-cp312-cp312-musllinux_1_2_ppc64le.whl",hashes = {sha256 = "7222ffd5e4de8e57e03ce2cef95a4c43c98fcb72ad86909abdfc2c17d227fc1b"}}, - {name = "charset_normalizer-3.4.2-cp312-cp312-musllinux_1_2_s390x.whl",url = "https://files.pythonhosted.org/packages/fd/07/68e95b4b345bad3dbbd3a8681737b4338ff2c9df29856a6d6d23ac4c73cb/charset_normalizer-3.4.2-cp312-cp312-musllinux_1_2_s390x.whl",hashes = {sha256 = "bee093bf902e1d8fc0ac143c88902c3dfc8941f7ea1d6a8dd2bcb786d33db03d"}}, - {name = "charset_normalizer-3.4.2-cp312-cp312-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/77/1a/5eefc0ce04affb98af07bc05f3bac9094513c0e23b0562d64af46a06aae4/charset_normalizer-3.4.2-cp312-cp312-musllinux_1_2_x86_64.whl",hashes = {sha256 = "dedb8adb91d11846ee08bec4c8236c8549ac721c245678282dcb06b221aab59f"}}, - {name = "charset_normalizer-3.4.2-cp312-cp312-win32.whl",url = "https://files.pythonhosted.org/packages/37/a0/2410e5e6032a174c95e0806b1a6585eb21e12f445ebe239fac441995226a/charset_normalizer-3.4.2-cp312-cp312-win32.whl",hashes = {sha256 = "db4c7bf0e07fc3b7d89ac2a5880a6a8062056801b83ff56d8464b70f65482b6c"}}, - {name = "charset_normalizer-3.4.2-cp312-cp312-win_amd64.whl",url = "https://files.pythonhosted.org/packages/6c/4f/c02d5c493967af3eda9c771ad4d2bbc8df6f99ddbeb37ceea6e8716a32bc/charset_normalizer-3.4.2-cp312-cp312-win_amd64.whl",hashes = {sha256 = "5a9979887252a82fefd3d3ed2a8e3b937a7a809f65dcb1e068b090e165bbe99e"}}, - {name = "charset_normalizer-3.4.2-py3-none-any.whl",url = "https://files.pythonhosted.org/packages/20/94/c5790835a017658cbfabd07f3bfb549140c3ac458cfc196323996b10095a/charset_normalizer-3.4.2-py3-none-any.whl",hashes = {sha256 = "7f56930ab0abd1c45cd15be65cc741c28b1c9a34876ce8c17a2fa107810c0af0"}}, +sdist = {name = "charset_normalizer-3.4.3.tar.gz", url = "https://files.pythonhosted.org/packages/83/2d/5fd176ceb9b2fc619e63405525573493ca23441330fcdaee6bef9460e924/charset_normalizer-3.4.3.tar.gz", hashes = {sha256 = "6fce4b8500244f6fcb71465d4a4930d132ba9ab8e71a7859e6a5d59851068d14"}} +wheels = [ + {name = "charset_normalizer-3.4.3-cp314-cp314-macosx_10_13_universal2.whl",url = "https://files.pythonhosted.org/packages/8e/91/b5a06ad970ddc7a0e513112d40113e834638f4ca1120eb727a249fb2715e/charset_normalizer-3.4.3-cp314-cp314-macosx_10_13_universal2.whl",hashes = {sha256 = "3cd35b7e8aedeb9e34c41385fda4f73ba609e561faedfae0a9e75e44ac558a15"}}, + {name = "charset_normalizer-3.4.3-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl",url = "https://files.pythonhosted.org/packages/ce/ec/1edc30a377f0a02689342f214455c3f6c2fbedd896a1d2f856c002fc3062/charset_normalizer-3.4.3-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl",hashes = {sha256 = "b89bc04de1d83006373429975f8ef9e7932534b8cc9ca582e4db7d20d91816db"}}, + {name = "charset_normalizer-3.4.3-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl",url = "https://files.pythonhosted.org/packages/17/e5/5e67ab85e6d22b04641acb5399c8684f4d37caf7558a53859f0283a650e9/charset_normalizer-3.4.3-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl",hashes = {sha256 = "2001a39612b241dae17b4687898843f254f8748b796a2e16f1051a17078d991d"}}, + {name = "charset_normalizer-3.4.3-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl",url = "https://files.pythonhosted.org/packages/f1/e5/38421987f6c697ee3722981289d554957c4be652f963d71c5e46a262e135/charset_normalizer-3.4.3-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl",hashes = {sha256 = "8dcfc373f888e4fb39a7bc57e93e3b845e7f462dacc008d9749568b1c4ece096"}}, + {name = "charset_normalizer-3.4.3-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl",url = "https://files.pythonhosted.org/packages/a0/e4/5a075de8daa3ec0745a9a3b54467e0c2967daaaf2cec04c845f73493e9a1/charset_normalizer-3.4.3-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl",hashes = {sha256 = "18b97b8404387b96cdbd30ad660f6407799126d26a39ca65729162fd810a99aa"}}, + {name = "charset_normalizer-3.4.3-cp314-cp314-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/02/f7/3611b32318b30974131db62b4043f335861d4d9b49adc6d57c1149cc49d4/charset_normalizer-3.4.3-cp314-cp314-musllinux_1_2_aarch64.whl",hashes = {sha256 = "ccf600859c183d70eb47e05a44cd80a4ce77394d1ac0f79dbd2dd90a69a3a049"}}, + {name = "charset_normalizer-3.4.3-cp314-cp314-musllinux_1_2_ppc64le.whl",url = "https://files.pythonhosted.org/packages/7e/61/19b36f4bd67f2793ab6a99b979b4e4f3d8fc754cbdffb805335df4337126/charset_normalizer-3.4.3-cp314-cp314-musllinux_1_2_ppc64le.whl",hashes = {sha256 = "53cd68b185d98dde4ad8990e56a58dea83a4162161b1ea9272e5c9182ce415e0"}}, + {name = "charset_normalizer-3.4.3-cp314-cp314-musllinux_1_2_s390x.whl",url = "https://files.pythonhosted.org/packages/06/57/84722eefdd338c04cf3030ada66889298eaedf3e7a30a624201e0cbe424a/charset_normalizer-3.4.3-cp314-cp314-musllinux_1_2_s390x.whl",hashes = {sha256 = "30a96e1e1f865f78b030d65241c1ee850cdf422d869e9028e2fc1d5e4db73b92"}}, + {name = "charset_normalizer-3.4.3-cp314-cp314-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/72/2a/aff5dd112b2f14bcc3462c312dce5445806bfc8ab3a7328555da95330e4b/charset_normalizer-3.4.3-cp314-cp314-musllinux_1_2_x86_64.whl",hashes = {sha256 = "d716a916938e03231e86e43782ca7878fb602a125a91e7acb8b5112e2e96ac16"}}, + {name = "charset_normalizer-3.4.3-cp314-cp314-win32.whl",url = "https://files.pythonhosted.org/packages/b7/8c/9839225320046ed279c6e839d51f028342eb77c91c89b8ef2549f951f3ec/charset_normalizer-3.4.3-cp314-cp314-win32.whl",hashes = {sha256 = "c6dbd0ccdda3a2ba7c2ecd9d77b37f3b5831687d8dc1b6ca5f56a4880cc7b7ce"}}, + {name = "charset_normalizer-3.4.3-cp314-cp314-win_amd64.whl",url = "https://files.pythonhosted.org/packages/ee/7a/36fbcf646e41f710ce0a563c1c9a343c6edf9be80786edeb15b6f62e17db/charset_normalizer-3.4.3-cp314-cp314-win_amd64.whl",hashes = {sha256 = "73dc19b562516fc9bcf6e5d6e596df0b4eb98d87e4f79f3ae71840e6ed21361c"}}, + {name = "charset_normalizer-3.4.3-cp313-cp313-macosx_10_13_universal2.whl",url = "https://files.pythonhosted.org/packages/65/ca/2135ac97709b400c7654b4b764daf5c5567c2da45a30cdd20f9eefe2d658/charset_normalizer-3.4.3-cp313-cp313-macosx_10_13_universal2.whl",hashes = {sha256 = "14c2a87c65b351109f6abfc424cab3927b3bdece6f706e4d12faaf3d52ee5efe"}}, + {name = "charset_normalizer-3.4.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl",url = "https://files.pythonhosted.org/packages/71/11/98a04c3c97dd34e49c7d247083af03645ca3730809a5509443f3c37f7c99/charset_normalizer-3.4.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl",hashes = {sha256 = "41d1fc408ff5fdfb910200ec0e74abc40387bccb3252f3f27c0676731df2b2c8"}}, + {name = "charset_normalizer-3.4.3-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl",url = "https://files.pythonhosted.org/packages/60/f5/4659a4cb3c4ec146bec80c32d8bb16033752574c20b1252ee842a95d1a1e/charset_normalizer-3.4.3-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl",hashes = {sha256 = "1bb60174149316da1c35fa5233681f7c0f9f514509b8e399ab70fea5f17e45c9"}}, + {name = "charset_normalizer-3.4.3-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl",url = "https://files.pythonhosted.org/packages/86/9e/f552f7a00611f168b9a5865a1414179b2c6de8235a4fa40189f6f79a1753/charset_normalizer-3.4.3-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl",hashes = {sha256 = "30d006f98569de3459c2fc1f2acde170b7b2bd265dc1943e87e1a4efe1b67c31"}}, + {name = "charset_normalizer-3.4.3-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl",url = "https://files.pythonhosted.org/packages/7e/95/42aa2156235cbc8fa61208aded06ef46111c4d3f0de233107b3f38631803/charset_normalizer-3.4.3-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl",hashes = {sha256 = "416175faf02e4b0810f1f38bcb54682878a4af94059a1cd63b8747244420801f"}}, + {name = "charset_normalizer-3.4.3-cp313-cp313-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/c2/a9/3865b02c56f300a6f94fc631ef54f0a8a29da74fb45a773dfd3dcd380af7/charset_normalizer-3.4.3-cp313-cp313-musllinux_1_2_aarch64.whl",hashes = {sha256 = "6aab0f181c486f973bc7262a97f5aca3ee7e1437011ef0c2ec04b5a11d16c927"}}, + {name = "charset_normalizer-3.4.3-cp313-cp313-musllinux_1_2_ppc64le.whl",url = "https://files.pythonhosted.org/packages/77/d9/cbcf1a2a5c7d7856f11e7ac2d782aec12bdfea60d104e60e0aa1c97849dc/charset_normalizer-3.4.3-cp313-cp313-musllinux_1_2_ppc64le.whl",hashes = {sha256 = "fdabf8315679312cfa71302f9bd509ded4f2f263fb5b765cf1433b39106c3cc9"}}, + {name = "charset_normalizer-3.4.3-cp313-cp313-musllinux_1_2_s390x.whl",url = "https://files.pythonhosted.org/packages/f6/42/6f45efee8697b89fda4d50580f292b8f7f9306cb2971d4b53f8914e4d890/charset_normalizer-3.4.3-cp313-cp313-musllinux_1_2_s390x.whl",hashes = {sha256 = "bd28b817ea8c70215401f657edef3a8aa83c29d447fb0b622c35403780ba11d5"}}, + {name = "charset_normalizer-3.4.3-cp313-cp313-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/70/99/f1c3bdcfaa9c45b3ce96f70b14f070411366fa19549c1d4832c935d8e2c3/charset_normalizer-3.4.3-cp313-cp313-musllinux_1_2_x86_64.whl",hashes = {sha256 = "18343b2d246dc6761a249ba1fb13f9ee9a2bcd95decc767319506056ea4ad4dc"}}, + {name = "charset_normalizer-3.4.3-cp313-cp313-win32.whl",url = "https://files.pythonhosted.org/packages/a3/ad/b0081f2f99a4b194bcbb1934ef3b12aa4d9702ced80a37026b7607c72e58/charset_normalizer-3.4.3-cp313-cp313-win32.whl",hashes = {sha256 = "6fb70de56f1859a3f71261cbe41005f56a7842cc348d3aeb26237560bfa5e0ce"}}, + {name = "charset_normalizer-3.4.3-cp313-cp313-win_amd64.whl",url = "https://files.pythonhosted.org/packages/9a/8f/ae790790c7b64f925e5c953b924aaa42a243fb778fed9e41f147b2a5715a/charset_normalizer-3.4.3-cp313-cp313-win_amd64.whl",hashes = {sha256 = "cf1ebb7d78e1ad8ec2a8c4732c7be2e736f6e5123a4146c5b89c9d1f585f8cef"}}, + {name = "charset_normalizer-3.4.3-cp312-cp312-macosx_10_13_universal2.whl",url = "https://files.pythonhosted.org/packages/e9/5e/14c94999e418d9b87682734589404a25854d5f5d0408df68bc15b6ff54bb/charset_normalizer-3.4.3-cp312-cp312-macosx_10_13_universal2.whl",hashes = {sha256 = "e28e334d3ff134e88989d90ba04b47d84382a828c061d0d1027b1b12a62b39b1"}}, + {name = "charset_normalizer-3.4.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl",url = "https://files.pythonhosted.org/packages/7d/a8/c6ec5d389672521f644505a257f50544c074cf5fc292d5390331cd6fc9c3/charset_normalizer-3.4.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl",hashes = {sha256 = "0cacf8f7297b0c4fcb74227692ca46b4a5852f8f4f24b3c766dd94a1075c4884"}}, + {name = "charset_normalizer-3.4.3-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl",url = "https://files.pythonhosted.org/packages/fc/eb/a2ffb08547f4e1e5415fb69eb7db25932c52a52bed371429648db4d84fb1/charset_normalizer-3.4.3-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl",hashes = {sha256 = "c6fd51128a41297f5409deab284fecbe5305ebd7e5a1f959bee1c054622b7018"}}, + {name = "charset_normalizer-3.4.3-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl",url = "https://files.pythonhosted.org/packages/82/10/0fd19f20c624b278dddaf83b8464dcddc2456cb4b02bb902a6da126b87a1/charset_normalizer-3.4.3-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl",hashes = {sha256 = "3cfb2aad70f2c6debfbcb717f23b7eb55febc0bb23dcffc0f076009da10c6392"}}, + {name = "charset_normalizer-3.4.3-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl",url = "https://files.pythonhosted.org/packages/16/ab/0233c3231af734f5dfcf0844aa9582d5a1466c985bbed6cedab85af9bfe3/charset_normalizer-3.4.3-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl",hashes = {sha256 = "1606f4a55c0fd363d754049cdf400175ee96c992b1f8018b993941f221221c5f"}}, + {name = "charset_normalizer-3.4.3-cp312-cp312-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/ae/02/e29e22b4e02839a0e4a06557b1999d0a47db3567e82989b5bb21f3fbbd9f/charset_normalizer-3.4.3-cp312-cp312-musllinux_1_2_aarch64.whl",hashes = {sha256 = "027b776c26d38b7f15b26a5da1044f376455fb3766df8fc38563b4efbc515154"}}, + {name = "charset_normalizer-3.4.3-cp312-cp312-musllinux_1_2_ppc64le.whl",url = "https://files.pythonhosted.org/packages/05/6b/e2539a0a4be302b481e8cafb5af8792da8093b486885a1ae4d15d452bcec/charset_normalizer-3.4.3-cp312-cp312-musllinux_1_2_ppc64le.whl",hashes = {sha256 = "42e5088973e56e31e4fa58eb6bd709e42fc03799c11c42929592889a2e54c491"}}, + {name = "charset_normalizer-3.4.3-cp312-cp312-musllinux_1_2_s390x.whl",url = "https://files.pythonhosted.org/packages/31/e7/883ee5676a2ef217a40ce0bffcc3d0dfbf9e64cbcfbdf822c52981c3304b/charset_normalizer-3.4.3-cp312-cp312-musllinux_1_2_s390x.whl",hashes = {sha256 = "cc34f233c9e71701040d772aa7490318673aa7164a0efe3172b2981218c26d93"}}, + {name = "charset_normalizer-3.4.3-cp312-cp312-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/c1/35/6525b21aa0db614cf8b5792d232021dca3df7f90a1944db934efa5d20bb1/charset_normalizer-3.4.3-cp312-cp312-musllinux_1_2_x86_64.whl",hashes = {sha256 = "320e8e66157cc4e247d9ddca8e21f427efc7a04bbd0ac8a9faf56583fa543f9f"}}, + {name = "charset_normalizer-3.4.3-cp312-cp312-win32.whl",url = "https://files.pythonhosted.org/packages/50/ee/f4704bad8201de513fdc8aac1cabc87e38c5818c93857140e06e772b5892/charset_normalizer-3.4.3-cp312-cp312-win32.whl",hashes = {sha256 = "fb6fecfd65564f208cbf0fba07f107fb661bcd1a7c389edbced3f7a493f70e37"}}, + {name = "charset_normalizer-3.4.3-cp312-cp312-win_amd64.whl",url = "https://files.pythonhosted.org/packages/39/f5/3b3836ca6064d0992c58c7561c6b6eee1b3892e9665d650c803bd5614522/charset_normalizer-3.4.3-cp312-cp312-win_amd64.whl",hashes = {sha256 = "86df271bf921c2ee3818f0522e9a5b8092ca2ad8b065ece5d7d9d0e9f4849bcc"}}, + {name = "charset_normalizer-3.4.3-py3-none-any.whl",url = "https://files.pythonhosted.org/packages/8a/1f/f041989e93b001bc4e44bb1669ccdcf54d3f00e628229a85b08d330615c5/charset_normalizer-3.4.3-py3-none-any.whl",hashes = {sha256 = "ce571ab16d890d23b5c278547ba694193a45011ff86a9162a71307ed9f86759a"}}, + {name = "charset_normalizer-3.4.3-cp311-cp311-macosx_10_9_universal2.whl",url = "https://files.pythonhosted.org/packages/7f/b5/991245018615474a60965a7c9cd2b4efbaabd16d582a5547c47ee1c7730b/charset_normalizer-3.4.3-cp311-cp311-macosx_10_9_universal2.whl",hashes = {sha256 = "b256ee2e749283ef3ddcff51a675ff43798d92d746d1a6e4631bf8c707d22d0b"}}, + {name = "charset_normalizer-3.4.3-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl",url = "https://files.pythonhosted.org/packages/c7/2a/ae245c41c06299ec18262825c1569c5d3298fc920e4ddf56ab011b417efd/charset_normalizer-3.4.3-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl",hashes = {sha256 = "13faeacfe61784e2559e690fc53fa4c5ae97c6fcedb8eb6fb8d0a15b475d2c64"}}, + {name = "charset_normalizer-3.4.3-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl",url = "https://files.pythonhosted.org/packages/3a/a4/b3b6c76e7a635748c4421d2b92c7b8f90a432f98bda5082049af37ffc8e3/charset_normalizer-3.4.3-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl",hashes = {sha256 = "00237675befef519d9af72169d8604a067d92755e84fe76492fef5441db05b91"}}, + {name = "charset_normalizer-3.4.3-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl",url = "https://files.pythonhosted.org/packages/e2/e6/63bb0e10f90a8243c5def74b5b105b3bbbfb3e7bb753915fe333fb0c11ea/charset_normalizer-3.4.3-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl",hashes = {sha256 = "585f3b2a80fbd26b048a0be90c5aae8f06605d3c92615911c3a2b03a8a3b796f"}}, + {name = "charset_normalizer-3.4.3-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl",url = "https://files.pythonhosted.org/packages/87/df/b7737ff046c974b183ea9aa111b74185ac8c3a326c6262d413bd5a1b8c69/charset_normalizer-3.4.3-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl",hashes = {sha256 = "0e78314bdc32fa80696f72fa16dc61168fda4d6a0c014e0380f9d02f0e5d8a07"}}, + {name = "charset_normalizer-3.4.3-cp311-cp311-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/61/f1/190d9977e0084d3f1dc169acd060d479bbbc71b90bf3e7bf7b9927dec3eb/charset_normalizer-3.4.3-cp311-cp311-musllinux_1_2_aarch64.whl",hashes = {sha256 = "96b2b3d1a83ad55310de8c7b4a2d04d9277d5591f40761274856635acc5fcb30"}}, + {name = "charset_normalizer-3.4.3-cp311-cp311-musllinux_1_2_ppc64le.whl",url = "https://files.pythonhosted.org/packages/4c/92/27dbe365d34c68cfe0ca76f1edd70e8705d82b378cb54ebbaeabc2e3029d/charset_normalizer-3.4.3-cp311-cp311-musllinux_1_2_ppc64le.whl",hashes = {sha256 = "939578d9d8fd4299220161fdd76e86c6a251987476f5243e8864a7844476ba14"}}, + {name = "charset_normalizer-3.4.3-cp311-cp311-musllinux_1_2_s390x.whl",url = "https://files.pythonhosted.org/packages/99/04/baae2a1ea1893a01635d475b9261c889a18fd48393634b6270827869fa34/charset_normalizer-3.4.3-cp311-cp311-musllinux_1_2_s390x.whl",hashes = {sha256 = "fd10de089bcdcd1be95a2f73dbe6254798ec1bda9f450d5828c96f93e2536b9c"}}, + {name = "charset_normalizer-3.4.3-cp311-cp311-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/2f/36/77da9c6a328c54d17b960c89eccacfab8271fdaaa228305330915b88afa9/charset_normalizer-3.4.3-cp311-cp311-musllinux_1_2_x86_64.whl",hashes = {sha256 = "1e8ac75d72fa3775e0b7cb7e4629cec13b7514d928d15ef8ea06bca03ef01cae"}}, + {name = "charset_normalizer-3.4.3-cp311-cp311-win32.whl",url = "https://files.pythonhosted.org/packages/64/d4/9eb4ff2c167edbbf08cdd28e19078bf195762e9bd63371689cab5ecd3d0d/charset_normalizer-3.4.3-cp311-cp311-win32.whl",hashes = {sha256 = "6cf8fd4c04756b6b60146d98cd8a77d0cdae0e1ca20329da2ac85eed779b6849"}}, + {name = "charset_normalizer-3.4.3-cp311-cp311-win_amd64.whl",url = "https://files.pythonhosted.org/packages/f4/9c/996a4a028222e7761a96634d1820de8a744ff4327a00ada9c8942033089b/charset_normalizer-3.4.3-cp311-cp311-win_amd64.whl",hashes = {sha256 = "31a9a6f775f9bcd865d88ee350f0ffb0e25936a7f930ca98995c05abf1faf21c"}}, + {name = "charset_normalizer-3.4.3-cp310-cp310-macosx_10_9_universal2.whl",url = "https://files.pythonhosted.org/packages/d6/98/f3b8013223728a99b908c9344da3aa04ee6e3fa235f19409033eda92fb78/charset_normalizer-3.4.3-cp310-cp310-macosx_10_9_universal2.whl",hashes = {sha256 = "fb7f67a1bfa6e40b438170ebdc8158b78dc465a5a67b6dde178a46987b244a72"}}, + {name = "charset_normalizer-3.4.3-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl",url = "https://files.pythonhosted.org/packages/21/40/5188be1e3118c82dcb7c2a5ba101b783822cfb413a0268ed3be0468532de/charset_normalizer-3.4.3-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl",hashes = {sha256 = "cc9370a2da1ac13f0153780040f465839e6cccb4a1e44810124b4e22483c93fe"}}, + {name = "charset_normalizer-3.4.3-cp310-cp310-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl",url = "https://files.pythonhosted.org/packages/37/60/5d0d74bc1e1380f0b72c327948d9c2aca14b46a9efd87604e724260f384c/charset_normalizer-3.4.3-cp310-cp310-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl",hashes = {sha256 = "07a0eae9e2787b586e129fdcbe1af6997f8d0e5abaa0bc98c0e20e124d67e601"}}, + {name = "charset_normalizer-3.4.3-cp310-cp310-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl",url = "https://files.pythonhosted.org/packages/85/9a/d891f63722d9158688de58d050c59dc3da560ea7f04f4c53e769de5140f5/charset_normalizer-3.4.3-cp310-cp310-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl",hashes = {sha256 = "74d77e25adda8581ffc1c720f1c81ca082921329452eba58b16233ab1842141c"}}, + {name = "charset_normalizer-3.4.3-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl",url = "https://files.pythonhosted.org/packages/65/1a/7425c952944a6521a9cfa7e675343f83fd82085b8af2b1373a2409c683dc/charset_normalizer-3.4.3-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl",hashes = {sha256 = "d0e909868420b7049dafd3a31d45125b31143eec59235311fc4c57ea26a4acd2"}}, + {name = "charset_normalizer-3.4.3-cp310-cp310-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/f0/c9/a2c9c2a355a8594ce2446085e2ec97fd44d323c684ff32042e2a6b718e1d/charset_normalizer-3.4.3-cp310-cp310-musllinux_1_2_aarch64.whl",hashes = {sha256 = "c6f162aabe9a91a309510d74eeb6507fab5fff92337a15acbe77753d88d9dcf0"}}, + {name = "charset_normalizer-3.4.3-cp310-cp310-musllinux_1_2_ppc64le.whl",url = "https://files.pythonhosted.org/packages/3b/38/20a1f44e4851aa1c9105d6e7110c9d020e093dfa5836d712a5f074a12bf7/charset_normalizer-3.4.3-cp310-cp310-musllinux_1_2_ppc64le.whl",hashes = {sha256 = "4ca4c094de7771a98d7fbd67d9e5dbf1eb73efa4f744a730437d8a3a5cf994f0"}}, + {name = "charset_normalizer-3.4.3-cp310-cp310-musllinux_1_2_s390x.whl",url = "https://files.pythonhosted.org/packages/a4/fa/384d2c0f57edad03d7bec3ebefb462090d8905b4ff5a2d2525f3bb711fac/charset_normalizer-3.4.3-cp310-cp310-musllinux_1_2_s390x.whl",hashes = {sha256 = "02425242e96bcf29a49711b0ca9f37e451da7c70562bc10e8ed992a5a7a25cc0"}}, + {name = "charset_normalizer-3.4.3-cp310-cp310-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/33/9e/eca49d35867ca2db336b6ca27617deed4653b97ebf45dfc21311ce473c37/charset_normalizer-3.4.3-cp310-cp310-musllinux_1_2_x86_64.whl",hashes = {sha256 = "78deba4d8f9590fe4dae384aeff04082510a709957e968753ff3c48399f6f92a"}}, + {name = "charset_normalizer-3.4.3-cp310-cp310-win32.whl",url = "https://files.pythonhosted.org/packages/2a/91/26c3036e62dfe8de8061182d33be5025e2424002125c9500faff74a6735e/charset_normalizer-3.4.3-cp310-cp310-win32.whl",hashes = {sha256 = "d79c198e27580c8e958906f803e63cddb77653731be08851c7df0b1a14a8fc0f"}}, + {name = "charset_normalizer-3.4.3-cp310-cp310-win_amd64.whl",url = "https://files.pythonhosted.org/packages/e2/c6/f05db471f81af1fa01839d44ae2a8bfeec8d2a8b4590f16c4e7393afd323/charset_normalizer-3.4.3-cp310-cp310-win_amd64.whl",hashes = {sha256 = "c6e490913a46fa054e03699c70019ab869e990270597018cef1d8562132c2669"}}, ] marker = "\"default\" in dependency_groups or \"dev\" in extras or \"recommended\" in extras" @@ -1245,11 +1533,11 @@ dependencies = [] [[packages]] name = "dill" -version = "0.3.8" +version = "0.4.0" requires-python = ">=3.8" -sdist = {name = "dill-0.3.8.tar.gz", url = "https://files.pythonhosted.org/packages/17/4d/ac7ffa80c69ea1df30a8aa11b3578692a5118e7cd1aa157e3ef73b092d15/dill-0.3.8.tar.gz", hashes = {sha256 = "3ebe3c479ad625c4553aca177444d89b486b1d84982eeacded644afc0cf797ca"}} +sdist = {name = "dill-0.4.0.tar.gz", url = "https://files.pythonhosted.org/packages/12/80/630b4b88364e9a8c8c5797f4602d0f76ef820909ee32f0bacb9f90654042/dill-0.4.0.tar.gz", hashes = {sha256 = "0633f1d2df477324f53a895b02c901fb961bdbf65a17122586ea7019292cbcf0"}} wheels = [ - {name = "dill-0.3.8-py3-none-any.whl",url = "https://files.pythonhosted.org/packages/c9/7a/cef76fd8438a42f96db64ddaa85280485a9c395e7df3db8158cfec1eee34/dill-0.3.8-py3-none-any.whl",hashes = {sha256 = "c36ca9ffb54365bdd2f8eb3eff7d2a21237f8452b57ace88b1ac615b7e815bd7"}}, + {name = "dill-0.4.0-py3-none-any.whl",url = "https://files.pythonhosted.org/packages/50/3d/9373ad9c56321fdab5b41197068e1d8c25883b3fea29dd361f9b55116869/dill-0.4.0-py3-none-any.whl",hashes = {sha256 = "44f54bf6412c2c8464c14e8243eb163690a9800dbe2c367330883b19c7561049"}}, ] marker = "\"default\" in dependency_groups" @@ -1271,11 +1559,11 @@ dependencies = [] [[packages]] name = "filelock" -version = "3.18.0" -requires-python = ">=3.9" -sdist = {name = "filelock-3.18.0.tar.gz", url = "https://files.pythonhosted.org/packages/0a/10/c23352565a6544bdc5353e0b15fc1c563352101f30e24bf500207a54df9a/filelock-3.18.0.tar.gz", hashes = {sha256 = "adbc88eabb99d2fec8c9c1b229b171f18afa655400173ddc653d5d01501fb9f2"}} +version = "3.20.0" +requires-python = ">=3.10" +sdist = {name = "filelock-3.20.0.tar.gz", url = "https://files.pythonhosted.org/packages/58/46/0028a82567109b5ef6e4d2a1f04a583fb513e6cf9527fcdd09afd817deeb/filelock-3.20.0.tar.gz", hashes = {sha256 = "711e943b4ec6be42e1d4e6690b48dc175c822967466bb31c0c293f34334c13f4"}} wheels = [ - {name = "filelock-3.18.0-py3-none-any.whl",url = "https://files.pythonhosted.org/packages/4d/36/2a115987e2d8c300a974597416d9de88f2444426de9571f4b59b2cca3acc/filelock-3.18.0-py3-none-any.whl",hashes = {sha256 = "c401f4f8377c4464e6db25fff06205fd89bdd83b65eb0488ed1b160f780e21de"}}, + {name = "filelock-3.20.0-py3-none-any.whl",url = "https://files.pythonhosted.org/packages/76/91/7216b27286936c16f5b4d0c530087e4a54eead683e6b0b73dd0c64844af6/filelock-3.20.0-py3-none-any.whl",hashes = {sha256 = "339b4732ffda5cd79b13f4e2711a31b0365ce445d95d243bb996273d072546a2"}}, ] marker = "\"default\" in dependency_groups or \"dev\" in extras or \"recommended\" in extras" @@ -1284,11 +1572,11 @@ dependencies = [] [[packages]] name = "fsspec" -version = "2025.3.0" -requires-python = ">=3.8" -sdist = {name = "fsspec-2025.3.0.tar.gz", url = "https://files.pythonhosted.org/packages/34/f4/5721faf47b8c499e776bc34c6a8fc17efdf7fdef0b00f398128bc5dcb4ac/fsspec-2025.3.0.tar.gz", hashes = {sha256 = "a935fd1ea872591f2b5148907d103488fc523295e6c64b835cfad8c3eca44972"}} +version = "2025.9.0" +requires-python = ">=3.9" +sdist = {name = "fsspec-2025.9.0.tar.gz", url = "https://files.pythonhosted.org/packages/de/e0/bab50af11c2d75c9c4a2a26a5254573c0bd97cea152254401510950486fa/fsspec-2025.9.0.tar.gz", hashes = {sha256 = "19fd429483d25d28b65ec68f9f4adc16c17ea2c7c7bf54ec61360d478fb19c19"}} wheels = [ - {name = "fsspec-2025.3.0-py3-none-any.whl",url = "https://files.pythonhosted.org/packages/56/53/eb690efa8513166adef3e0669afd31e95ffde69fb3c52ec2ac7223ed6018/fsspec-2025.3.0-py3-none-any.whl",hashes = {sha256 = "efb87af3efa9103f94ca91a7f8cb7a4df91af9f74fc106c9c7ea0efd7277c1b3"}}, + {name = "fsspec-2025.9.0-py3-none-any.whl",url = "https://files.pythonhosted.org/packages/47/71/70db47e4f6ce3e5c37a607355f80da8860a33226be640226ac52cb05ef2e/fsspec-2025.9.0-py3-none-any.whl",hashes = {sha256 = "530dc2a2af60a414a832059574df4a6e10cce927f6f4a78209390fe38955cfb7"}}, ] marker = "\"default\" in dependency_groups" @@ -1297,44 +1585,112 @@ dependencies = [] [[packages]] name = "aiohttp" -version = "3.12.14" +version = "3.13.0" requires-python = ">=3.9" -sdist = {name = "aiohttp-3.12.14.tar.gz", url = "https://files.pythonhosted.org/packages/e6/0b/e39ad954107ebf213a2325038a3e7a506be3d98e1435e1f82086eec4cde2/aiohttp-3.12.14.tar.gz", hashes = {sha256 = "6e06e120e34d93100de448fd941522e11dafa78ef1a893c179901b7d66aa29f2"}} -wheels = [ - {name = "aiohttp-3.12.14-cp313-cp313-macosx_10_13_universal2.whl",url = "https://files.pythonhosted.org/packages/06/48/e0d2fa8ac778008071e7b79b93ab31ef14ab88804d7ba71b5c964a7c844e/aiohttp-3.12.14-cp313-cp313-macosx_10_13_universal2.whl",hashes = {sha256 = "3143a7893d94dc82bc409f7308bc10d60285a3cd831a68faf1aa0836c5c3c767"}}, - {name = "aiohttp-3.12.14-cp313-cp313-macosx_10_13_x86_64.whl",url = "https://files.pythonhosted.org/packages/8d/e7/f73206afa33100804f790b71092888f47df65fd9a4cd0e6800d7c6826441/aiohttp-3.12.14-cp313-cp313-macosx_10_13_x86_64.whl",hashes = {sha256 = "3d62ac3d506cef54b355bd34c2a7c230eb693880001dfcda0bf88b38f5d7af7e"}}, - {name = "aiohttp-3.12.14-cp313-cp313-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/df/e2/4dd00180be551a6e7ee979c20fc7c32727f4889ee3fd5b0586e0d47f30e1/aiohttp-3.12.14-cp313-cp313-macosx_11_0_arm64.whl",hashes = {sha256 = "48e43e075c6a438937c4de48ec30fa8ad8e6dfef122a038847456bfe7b947b63"}}, - {name = "aiohttp-3.12.14-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",url = "https://files.pythonhosted.org/packages/de/dd/525ed198a0bb674a323e93e4d928443a680860802c44fa7922d39436b48b/aiohttp-3.12.14-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",hashes = {sha256 = "077b4488411a9724cecc436cbc8c133e0d61e694995b8de51aaf351c7578949d"}}, - {name = "aiohttp-3.12.14-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl",url = "https://files.pythonhosted.org/packages/d8/b1/01e542aed560a968f692ab4fc4323286e8bc4daae83348cd63588e4f33e3/aiohttp-3.12.14-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl",hashes = {sha256 = "d8c35632575653f297dcbc9546305b2c1133391089ab925a6a3706dfa775ccab"}}, - {name = "aiohttp-3.12.14-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl",url = "https://files.pythonhosted.org/packages/b3/06/93669694dc5fdabdc01338791e70452d60ce21ea0946a878715688d5a191/aiohttp-3.12.14-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl",hashes = {sha256 = "6b8ce87963f0035c6834b28f061df90cf525ff7c9b6283a8ac23acee6502afd4"}}, - {name = "aiohttp-3.12.14-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl",url = "https://files.pythonhosted.org/packages/a5/3a/18991048ffc1407ca51efb49ba8bcc1645961f97f563a6c480cdf0286310/aiohttp-3.12.14-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl",hashes = {sha256 = "f0a2cf66e32a2563bb0766eb24eae7e9a269ac0dc48db0aae90b575dc9583026"}}, - {name = "aiohttp-3.12.14-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",url = "https://files.pythonhosted.org/packages/30/a8/81e237f89a32029f9b4a805af6dffc378f8459c7b9942712c809ff9e76e5/aiohttp-3.12.14-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",hashes = {sha256 = "cdea089caf6d5cde975084a884c72d901e36ef9c2fd972c9f51efbbc64e96fbd"}}, - {name = "aiohttp-3.12.14-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl",url = "https://files.pythonhosted.org/packages/8c/e3/bd67a11b0fe7fc12c6030473afd9e44223d456f500f7cf526dbaa259ae46/aiohttp-3.12.14-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl",hashes = {sha256 = "8a7865f27db67d49e81d463da64a59365ebd6b826e0e4847aa111056dcb9dc88"}}, - {name = "aiohttp-3.12.14-cp313-cp313-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/83/ba/e0cc8e0f0d9ce0904e3cf2d6fa41904e379e718a013c721b781d53dcbcca/aiohttp-3.12.14-cp313-cp313-musllinux_1_2_aarch64.whl",hashes = {sha256 = "0ab5b38a6a39781d77713ad930cb5e7feea6f253de656a5f9f281a8f5931b086"}}, - {name = "aiohttp-3.12.14-cp313-cp313-musllinux_1_2_armv7l.whl",url = "https://files.pythonhosted.org/packages/d8/b3/1e6c960520bda094c48b56de29a3d978254637ace7168dd97ddc273d0d6c/aiohttp-3.12.14-cp313-cp313-musllinux_1_2_armv7l.whl",hashes = {sha256 = "9b3b15acee5c17e8848d90a4ebc27853f37077ba6aec4d8cb4dbbea56d156933"}}, - {name = "aiohttp-3.12.14-cp313-cp313-musllinux_1_2_i686.whl",url = "https://files.pythonhosted.org/packages/0a/19/929a3eb8c35b7f9f076a462eaa9830b32c7f27d3395397665caa5e975614/aiohttp-3.12.14-cp313-cp313-musllinux_1_2_i686.whl",hashes = {sha256 = "e4c972b0bdaac167c1e53e16a16101b17c6d0ed7eac178e653a07b9f7fad7151"}}, - {name = "aiohttp-3.12.14-cp313-cp313-musllinux_1_2_ppc64le.whl",url = "https://files.pythonhosted.org/packages/22/e5/81682a6f20dd1b18ce3d747de8eba11cbef9b270f567426ff7880b096b48/aiohttp-3.12.14-cp313-cp313-musllinux_1_2_ppc64le.whl",hashes = {sha256 = "7442488b0039257a3bdbc55f7209587911f143fca11df9869578db6c26feeeb8"}}, - {name = "aiohttp-3.12.14-cp313-cp313-musllinux_1_2_s390x.whl",url = "https://files.pythonhosted.org/packages/8c/17/884938dffaa4048302985483f77dfce5ac18339aad9b04ad4aaa5e32b028/aiohttp-3.12.14-cp313-cp313-musllinux_1_2_s390x.whl",hashes = {sha256 = "f68d3067eecb64c5e9bab4a26aa11bd676f4c70eea9ef6536b0a4e490639add3"}}, - {name = "aiohttp-3.12.14-cp313-cp313-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/95/78/53b081980f50b5cf874359bde707a6eacd6c4be3f5f5c93937e48c9d0025/aiohttp-3.12.14-cp313-cp313-musllinux_1_2_x86_64.whl",hashes = {sha256 = "f88d3704c8b3d598a08ad17d06006cb1ca52a1182291f04979e305c8be6c9758"}}, - {name = "aiohttp-3.12.14-cp313-cp313-win32.whl",url = "https://files.pythonhosted.org/packages/ed/91/228eeddb008ecbe3ffa6c77b440597fdf640307162f0c6488e72c5a2d112/aiohttp-3.12.14-cp313-cp313-win32.whl",hashes = {sha256 = "a3c99ab19c7bf375c4ae3debd91ca5d394b98b6089a03231d4c580ef3c2ae4c5"}}, - {name = "aiohttp-3.12.14-cp313-cp313-win_amd64.whl",url = "https://files.pythonhosted.org/packages/66/5f/8427618903343402fdafe2850738f735fd1d9409d2a8f9bcaae5e630d3ba/aiohttp-3.12.14-cp313-cp313-win_amd64.whl",hashes = {sha256 = "3f8aad695e12edc9d571f878c62bedc91adf30c760c8632f09663e5f564f4baa"}}, - {name = "aiohttp-3.12.14-cp312-cp312-macosx_10_13_universal2.whl",url = "https://files.pythonhosted.org/packages/c3/0d/29026524e9336e33d9767a1e593ae2b24c2b8b09af7c2bd8193762f76b3e/aiohttp-3.12.14-cp312-cp312-macosx_10_13_universal2.whl",hashes = {sha256 = "a0ecbb32fc3e69bc25efcda7d28d38e987d007096cbbeed04f14a6662d0eee22"}}, - {name = "aiohttp-3.12.14-cp312-cp312-macosx_10_13_x86_64.whl",url = "https://files.pythonhosted.org/packages/0a/b8/a5e8e583e6c8c1056f4b012b50a03c77a669c2e9bf012b7cf33d6bc4b141/aiohttp-3.12.14-cp312-cp312-macosx_10_13_x86_64.whl",hashes = {sha256 = "0400f0ca9bb3e0b02f6466421f253797f6384e9845820c8b05e976398ac1d81a"}}, - {name = "aiohttp-3.12.14-cp312-cp312-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/29/e8/5202890c9e81a4ec2c2808dd90ffe024952e72c061729e1d49917677952f/aiohttp-3.12.14-cp312-cp312-macosx_11_0_arm64.whl",hashes = {sha256 = "a56809fed4c8a830b5cae18454b7464e1529dbf66f71c4772e3cfa9cbec0a1ff"}}, - {name = "aiohttp-3.12.14-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",url = "https://files.pythonhosted.org/packages/23/e5/d11db8c23d8923d3484a27468a40737d50f05b05eebbb6288bafcb467356/aiohttp-3.12.14-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",hashes = {sha256 = "27f2e373276e4755691a963e5d11756d093e346119f0627c2d6518208483fb6d"}}, - {name = "aiohttp-3.12.14-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl",url = "https://files.pythonhosted.org/packages/53/44/af6879ca0eff7a16b1b650b7ea4a827301737a350a464239e58aa7c387ef/aiohttp-3.12.14-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl",hashes = {sha256 = "ca39e433630e9a16281125ef57ece6817afd1d54c9f1bf32e901f38f16035869"}}, - {name = "aiohttp-3.12.14-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl",url = "https://files.pythonhosted.org/packages/bb/94/18457f043399e1ec0e59ad8674c0372f925363059c276a45a1459e17f423/aiohttp-3.12.14-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl",hashes = {sha256 = "9c748b3f8b14c77720132b2510a7d9907a03c20ba80f469e58d5dfd90c079a1c"}}, - {name = "aiohttp-3.12.14-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl",url = "https://files.pythonhosted.org/packages/26/d9/1d3744dc588fafb50ff8a6226d58f484a2242b5dd93d8038882f55474d41/aiohttp-3.12.14-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl",hashes = {sha256 = "f0a568abe1b15ce69d4cc37e23020720423f0728e3cb1f9bcd3f53420ec3bfe7"}}, - {name = "aiohttp-3.12.14-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",url = "https://files.pythonhosted.org/packages/73/12/2530fb2b08773f717ab2d249ca7a982ac66e32187c62d49e2c86c9bba9b4/aiohttp-3.12.14-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",hashes = {sha256 = "9888e60c2c54eaf56704b17feb558c7ed6b7439bca1e07d4818ab878f2083660"}}, - {name = "aiohttp-3.12.14-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl",url = "https://files.pythonhosted.org/packages/b9/34/8d6015a729f6571341a311061b578e8b8072ea3656b3d72329fa0faa2c7c/aiohttp-3.12.14-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl",hashes = {sha256 = "3006a1dc579b9156de01e7916d38c63dc1ea0679b14627a37edf6151bc530088"}}, - {name = "aiohttp-3.12.14-cp312-cp312-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/ff/4b/08b83ea02595a582447aeb0c1986792d0de35fe7a22fb2125d65091cbaf3/aiohttp-3.12.14-cp312-cp312-musllinux_1_2_aarch64.whl",hashes = {sha256 = "aa8ec5c15ab80e5501a26719eb48a55f3c567da45c6ea5bb78c52c036b2655c7"}}, - {name = "aiohttp-3.12.14-cp312-cp312-musllinux_1_2_armv7l.whl",url = "https://files.pythonhosted.org/packages/b5/66/9c7c31037a063eec13ecf1976185c65d1394ded4a5120dd5965e3473cb21/aiohttp-3.12.14-cp312-cp312-musllinux_1_2_armv7l.whl",hashes = {sha256 = "39b94e50959aa07844c7fe2206b9f75d63cc3ad1c648aaa755aa257f6f2498a9"}}, - {name = "aiohttp-3.12.14-cp312-cp312-musllinux_1_2_i686.whl",url = "https://files.pythonhosted.org/packages/ba/02/84406e0ad1acb0fb61fd617651ab6de760b2d6a31700904bc0b33bd0894d/aiohttp-3.12.14-cp312-cp312-musllinux_1_2_i686.whl",hashes = {sha256 = "04c11907492f416dad9885d503fbfc5dcb6768d90cad8639a771922d584609d3"}}, - {name = "aiohttp-3.12.14-cp312-cp312-musllinux_1_2_ppc64le.whl",url = "https://files.pythonhosted.org/packages/07/53/da018f4013a7a179017b9a274b46b9a12cbeb387570f116964f498a6f211/aiohttp-3.12.14-cp312-cp312-musllinux_1_2_ppc64le.whl",hashes = {sha256 = "88167bd9ab69bb46cee91bd9761db6dfd45b6e76a0438c7e884c3f8160ff21eb"}}, - {name = "aiohttp-3.12.14-cp312-cp312-musllinux_1_2_s390x.whl",url = "https://files.pythonhosted.org/packages/49/e8/ca01c5ccfeaafb026d85fa4f43ceb23eb80ea9c1385688db0ef322c751e9/aiohttp-3.12.14-cp312-cp312-musllinux_1_2_s390x.whl",hashes = {sha256 = "791504763f25e8f9f251e4688195e8b455f8820274320204f7eafc467e609425"}}, - {name = "aiohttp-3.12.14-cp312-cp312-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/22/32/5501ab525a47ba23c20613e568174d6c63aa09e2caa22cded5c6ea8e3ada/aiohttp-3.12.14-cp312-cp312-musllinux_1_2_x86_64.whl",hashes = {sha256 = "2785b112346e435dd3a1a67f67713a3fe692d288542f1347ad255683f066d8e0"}}, - {name = "aiohttp-3.12.14-cp312-cp312-win32.whl",url = "https://files.pythonhosted.org/packages/06/af/28e24574801fcf1657945347ee10df3892311c2829b41232be6089e461e7/aiohttp-3.12.14-cp312-cp312-win32.whl",hashes = {sha256 = "15f5f4792c9c999a31d8decf444e79fcfd98497bf98e94284bf390a7bb8c1729"}}, - {name = "aiohttp-3.12.14-cp312-cp312-win_amd64.whl",url = "https://files.pythonhosted.org/packages/98/d5/7ac2464aebd2eecac38dbe96148c9eb487679c512449ba5215d233755582/aiohttp-3.12.14-cp312-cp312-win_amd64.whl",hashes = {sha256 = "3b66e1a182879f579b105a80d5c4bd448b91a57e8933564bf41665064796a338"}}, +sdist = {name = "aiohttp-3.13.0.tar.gz", url = "https://files.pythonhosted.org/packages/62/f1/8515650ac3121a9e55c7b217c60e7fae3e0134b5acfe65691781b5356929/aiohttp-3.13.0.tar.gz", hashes = {sha256 = "378dbc57dd8cf341ce243f13fa1fa5394d68e2e02c15cd5f28eae35a70ec7f67"}} +wheels = [ + {name = "aiohttp-3.13.0-cp314-cp314-macosx_10_13_universal2.whl",url = "https://files.pythonhosted.org/packages/fe/c1/93bb1e35cd0c4665bb422b1ca3d87b588f4bca2656bbe9292b963d5b76a9/aiohttp-3.13.0-cp314-cp314-macosx_10_13_universal2.whl",hashes = {sha256 = "c417f8c2e1137775569297c584a8a7144e5d1237789eae56af4faf1894a0b861"}}, + {name = "aiohttp-3.13.0-cp314-cp314-macosx_10_13_x86_64.whl",url = "https://files.pythonhosted.org/packages/5e/36/2d50eba91992d3fe7a6452506ccdab45d03685ee8d8acaa5b289384a7d4c/aiohttp-3.13.0-cp314-cp314-macosx_10_13_x86_64.whl",hashes = {sha256 = "f84b53326abf8e56ebc28a35cebf4a0f396a13a76300f500ab11fe0573bf0b52"}}, + {name = "aiohttp-3.13.0-cp314-cp314-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/82/93/fa4b1d5ecdc7805bdf0815ef00257db4632ccf0a8bffd44f9fc4657b1677/aiohttp-3.13.0-cp314-cp314-macosx_11_0_arm64.whl",hashes = {sha256 = "990a53b9d6a30b2878789e490758e568b12b4a7fb2527d0c89deb9650b0e5813"}}, + {name = "aiohttp-3.13.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl",url = "https://files.pythonhosted.org/packages/05/0f/85241f0d158da5e24e8ac9d50c0849ed24f882cafc53dc95749ef85eef09/aiohttp-3.13.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl",hashes = {sha256 = "c811612711e01b901e18964b3e5dec0d35525150f5f3f85d0aee2935f059910a"}}, + {name = "aiohttp-3.13.0-cp314-cp314-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl",url = "https://files.pythonhosted.org/packages/ab/fc/c755590d6f6d2b5d1565c72d6ee658d3c30ec61acb18964d1e9bf991d9b5/aiohttp-3.13.0-cp314-cp314-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl",hashes = {sha256 = "ee433e594d7948e760b5c2a78cc06ac219df33b0848793cf9513d486a9f90a52"}}, + {name = "aiohttp-3.13.0-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl",url = "https://files.pythonhosted.org/packages/3a/de/caa61e213ff546b8815aef5e931d7eae1dbe8c840a3f11ec5aa41c5ae462/aiohttp-3.13.0-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl",hashes = {sha256 = "19bb08e56f57c215e9572cd65cb6f8097804412c54081d933997ddde3e5ac579"}}, + {name = "aiohttp-3.13.0-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl",url = "https://files.pythonhosted.org/packages/fb/b7/40c3219dd2691aa35cf889b4fbb0c00e48a19092928707044bfe92068e01/aiohttp-3.13.0-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl",hashes = {sha256 = "f27b7488144eb5dd9151cf839b195edd1569629d90ace4c5b6b18e4e75d1e63a"}}, + {name = "aiohttp-3.13.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl",url = "https://files.pythonhosted.org/packages/57/e8/66e3c32841fc0e26a09539c377aa0f3bbf6deac1957ac5182cf276c5719c/aiohttp-3.13.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl",hashes = {sha256 = "d812838c109757a11354a161c95708ae4199c4fd4d82b90959b20914c1d097f6"}}, + {name = "aiohttp-3.13.0-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl",url = "https://files.pythonhosted.org/packages/6b/a5/c68e5b46ff0410fe3abfa508651b09372428f27036138beacf4ff6b7cb8c/aiohttp-3.13.0-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl",hashes = {sha256 = "7c20db99da682f9180fa5195c90b80b159632fb611e8dbccdd99ba0be0970620"}}, + {name = "aiohttp-3.13.0-cp314-cp314-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/7a/a6/4c97dc27f9935c0c0aa6e3e10e5b4548823ab5d056636bde374fcd297256/aiohttp-3.13.0-cp314-cp314-musllinux_1_2_aarch64.whl",hashes = {sha256 = "cf8b0870047900eb1f17f453b4b3953b8ffbf203ef56c2f346780ff930a4d430"}}, + {name = "aiohttp-3.13.0-cp314-cp314-musllinux_1_2_armv7l.whl",url = "https://files.pythonhosted.org/packages/8e/1b/11f9c52fd72b786a47e796e6794883417280cdca8eb1032d8d0939928dfa/aiohttp-3.13.0-cp314-cp314-musllinux_1_2_armv7l.whl",hashes = {sha256 = "5b8a5557d5af3f4e3add52a58c4cf2b8e6e59fc56b261768866f5337872d596d"}}, + {name = "aiohttp-3.13.0-cp314-cp314-musllinux_1_2_ppc64le.whl",url = "https://files.pythonhosted.org/packages/ea/eb/948903d40505f3a25e53e051488d2714ded3afac1f961df135f2936680f9/aiohttp-3.13.0-cp314-cp314-musllinux_1_2_ppc64le.whl",hashes = {sha256 = "052bcdd80c1c54b8a18a9ea0cd5e36f473dc8e38d51b804cea34841f677a9971"}}, + {name = "aiohttp-3.13.0-cp314-cp314-musllinux_1_2_riscv64.whl",url = "https://files.pythonhosted.org/packages/44/14/c8ced38c7dfe80804dec17a671963ccf3cb282f12700ec70b1f689d8de7d/aiohttp-3.13.0-cp314-cp314-musllinux_1_2_riscv64.whl",hashes = {sha256 = "76484ba17b2832776581b7ab466d094e48eba74cb65a60aea20154dae485e8bd"}}, + {name = "aiohttp-3.13.0-cp314-cp314-musllinux_1_2_s390x.whl",url = "https://files.pythonhosted.org/packages/a4/6e/f2e6bff550a51fd7c45fdab116a1dab7cc502e5d942956f10fc5c626bb15/aiohttp-3.13.0-cp314-cp314-musllinux_1_2_s390x.whl",hashes = {sha256 = "62d8a0adcdaf62ee56bfb37737153251ac8e4b27845b3ca065862fb01d99e247"}}, + {name = "aiohttp-3.13.0-cp314-cp314-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/da/00/8f057300d9b598a706348abb375b3de9a253195fb615f17c0b2be2a72836/aiohttp-3.13.0-cp314-cp314-musllinux_1_2_x86_64.whl",hashes = {sha256 = "5004d727499ecb95f7c9147dd0bfc5b5670f71d355f0bd26d7af2d3af8e07d2f"}}, + {name = "aiohttp-3.13.0-cp314-cp314-win32.whl",url = "https://files.pythonhosted.org/packages/8a/ab/6919d584d8f053a14b15f0bfa3f315b3f548435c2142145459da2efa8673/aiohttp-3.13.0-cp314-cp314-win32.whl",hashes = {sha256 = "a1c20c26af48aea984f63f96e5d7af7567c32cb527e33b60a0ef0a6313cf8b03"}}, + {name = "aiohttp-3.13.0-cp314-cp314-win_amd64.whl",url = "https://files.pythonhosted.org/packages/c5/59/5d9e78de6132079066f5077d9687bf524f764a2f8207e04d8d68790060c6/aiohttp-3.13.0-cp314-cp314-win_amd64.whl",hashes = {sha256 = "56f7d230ec66e799fbfd8350e9544f8a45a4353f1cf40c1fea74c1780f555b8f"}}, + {name = "aiohttp-3.13.0-cp314-cp314t-macosx_10_13_universal2.whl",url = "https://files.pythonhosted.org/packages/7c/ea/7d98da03d1e9798bb99c3ca4963229150d45c9b7a3a16210c5b4a5f89e07/aiohttp-3.13.0-cp314-cp314t-macosx_10_13_universal2.whl",hashes = {sha256 = "2fd35177dc483ae702f07b86c782f4f4b100a8ce4e7c5778cea016979023d9fd"}}, + {name = "aiohttp-3.13.0-cp314-cp314t-macosx_10_13_x86_64.whl",url = "https://files.pythonhosted.org/packages/5c/02/37f29beced8213bb467c52ad509a5e3b41e6e967de2f6eaf7f8db63bea54/aiohttp-3.13.0-cp314-cp314t-macosx_10_13_x86_64.whl",hashes = {sha256 = "4df1984c8804ed336089e88ac81a9417b1fd0db7c6f867c50a9264488797e778"}}, + {name = "aiohttp-3.13.0-cp314-cp314t-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/e7/22/b0afcafcfe3637bc8d7992abf08ee9452018366c0801e4e7d4efda2ed839/aiohttp-3.13.0-cp314-cp314t-macosx_11_0_arm64.whl",hashes = {sha256 = "e68c0076052dd911a81d3acc4ef2911cc4ef65bf7cadbfbc8ae762da24da858f"}}, + {name = "aiohttp-3.13.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl",url = "https://files.pythonhosted.org/packages/49/4c/046c847b7a1993b49f3855cc3b97872d5df193d9240de835d0dc6a97b164/aiohttp-3.13.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl",hashes = {sha256 = "bc95c49853cd29613e4fe4ff96d73068ff89b89d61e53988442e127e8da8e7ba"}}, + {name = "aiohttp-3.13.0-cp314-cp314t-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl",url = "https://files.pythonhosted.org/packages/1a/25/1449a59e3c6405da5e47b0138ee0855414dc12a8c306685d7fc3dd300e1f/aiohttp-3.13.0-cp314-cp314t-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl",hashes = {sha256 = "3b3bdc89413117b40cc39baae08fd09cbdeb839d421c4e7dce6a34f6b54b3ac1"}}, + {name = "aiohttp-3.13.0-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl",url = "https://files.pythonhosted.org/packages/23/8f/50cc34ad267b38608f21c6a74327015dd08a66f1dd8e7ceac954d0953191/aiohttp-3.13.0-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl",hashes = {sha256 = "3e77a729df23be2116acc4e9de2767d8e92445fbca68886dd991dc912f473755"}}, + {name = "aiohttp-3.13.0-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl",url = "https://files.pythonhosted.org/packages/df/b9/b3ab1278faa0d1b8f434c85f9cf34eeb0a25016ffe1ee6bc361d09fef0ec/aiohttp-3.13.0-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl",hashes = {sha256 = "e88ab34826d6eeb6c67e6e92400b9ec653faf5092a35f07465f44c9f1c429f82"}}, + {name = "aiohttp-3.13.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl",url = "https://files.pythonhosted.org/packages/88/e2/86050aaa3bd7021b115cdfc88477b754e8cf93ef0079867840eee22d3c34/aiohttp-3.13.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl",hashes = {sha256 = "019dbef24fe28ce2301419dd63a2b97250d9760ca63ee2976c2da2e3f182f82e"}}, + {name = "aiohttp-3.13.0-cp314-cp314t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl",url = "https://files.pythonhosted.org/packages/78/8d/9af903324c2ba24a0c4778e9bcc738b773c98dded3a4fcf8041d5211769f/aiohttp-3.13.0-cp314-cp314t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl",hashes = {sha256 = "2c4aeaedd20771b7b4bcdf0ae791904445df6d856c02fc51d809d12d17cffdc7"}}, + {name = "aiohttp-3.13.0-cp314-cp314t-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/84/97/5174971ba4986d913554ceb248b0401eb5358cb60672ea0166f9f596cd08/aiohttp-3.13.0-cp314-cp314t-musllinux_1_2_aarch64.whl",hashes = {sha256 = "b3a8e6a2058a0240cfde542b641d0e78b594311bc1a710cbcb2e1841417d5cb3"}}, + {name = "aiohttp-3.13.0-cp314-cp314t-musllinux_1_2_armv7l.whl",url = "https://files.pythonhosted.org/packages/dd/ae/8b397e980ac613ef3ddd8e996aa7a40a1828df958257800d4bb325657db3/aiohttp-3.13.0-cp314-cp314t-musllinux_1_2_armv7l.whl",hashes = {sha256 = "f8e38d55ca36c15f36d814ea414ecb2401d860de177c49f84a327a25b3ee752b"}}, + {name = "aiohttp-3.13.0-cp314-cp314t-musllinux_1_2_ppc64le.whl",url = "https://files.pythonhosted.org/packages/c7/54/0e8e2111dd92051c787e934b6bbf30c213daaa5e7ee5f51bca8913607492/aiohttp-3.13.0-cp314-cp314t-musllinux_1_2_ppc64le.whl",hashes = {sha256 = "a921edbe971aade1bf45bcbb3494e30ba6863a5c78f28be992c42de980fd9108"}}, + {name = "aiohttp-3.13.0-cp314-cp314t-musllinux_1_2_riscv64.whl",url = "https://files.pythonhosted.org/packages/fa/dd/c9283dbfd9325ed6fa6c91f009db6344d8d370a7bcf09f36e7b2fcbfae02/aiohttp-3.13.0-cp314-cp314t-musllinux_1_2_riscv64.whl",hashes = {sha256 = "474cade59a447cb4019c0dce9f0434bf835fb558ea932f62c686fe07fe6db6a1"}}, + {name = "aiohttp-3.13.0-cp314-cp314t-musllinux_1_2_s390x.whl",url = "https://files.pythonhosted.org/packages/8c/f6/da76230679bd9ef175d876093f89e7fd6d6476c18505e115e3026fe5ef95/aiohttp-3.13.0-cp314-cp314t-musllinux_1_2_s390x.whl",hashes = {sha256 = "99a303ad960747c33b65b1cb65d01a62ac73fa39b72f08a2e1efa832529b01ed"}}, + {name = "aiohttp-3.13.0-cp314-cp314t-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/d5/78/394003ac738703822616f4f922705b54e5b3d8e7185831ecc1c97904174d/aiohttp-3.13.0-cp314-cp314t-musllinux_1_2_x86_64.whl",hashes = {sha256 = "bb34001fc1f05f6b323e02c278090c07a47645caae3aa77ed7ed8a3ce6abcce9"}}, + {name = "aiohttp-3.13.0-cp314-cp314t-win32.whl",url = "https://files.pythonhosted.org/packages/bd/b0/4bad0a9dd5910bd01c3119f8bd3d71887cd412d4105e4acddcdacf3cfa76/aiohttp-3.13.0-cp314-cp314t-win32.whl",hashes = {sha256 = "dea698b64235d053def7d2f08af9302a69fcd760d1c7bd9988fd5d3b6157e657"}}, + {name = "aiohttp-3.13.0-cp314-cp314t-win_amd64.whl",url = "https://files.pythonhosted.org/packages/bd/af/ad12d592f623aae2bd1d3463201dc39c201ea362f9ddee0d03efd9e83720/aiohttp-3.13.0-cp314-cp314t-win_amd64.whl",hashes = {sha256 = "1f164699a060c0b3616459d13c1464a981fddf36f892f0a5027cbd45121fb14b"}}, + {name = "aiohttp-3.13.0-cp313-cp313-macosx_10_13_universal2.whl",url = "https://files.pythonhosted.org/packages/86/2c/ac53efdc9c10e41399acc2395af98f835b86d0141d5c3820857eb9f6a14a/aiohttp-3.13.0-cp313-cp313-macosx_10_13_universal2.whl",hashes = {sha256 = "00243e51f16f6ec0fb021659d4af92f675f3cf9f9b39efd142aa3ad641d8d1e6"}}, + {name = "aiohttp-3.13.0-cp313-cp313-macosx_10_13_x86_64.whl",url = "https://files.pythonhosted.org/packages/13/18/1ac95683e1c1d48ef4503965c96f5401618a04c139edae12e200392daae8/aiohttp-3.13.0-cp313-cp313-macosx_10_13_x86_64.whl",hashes = {sha256 = "059978d2fddc462e9211362cbc8446747ecd930537fa559d3d25c256f032ff54"}}, + {name = "aiohttp-3.13.0-cp313-cp313-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/fd/79/ef0d477c771a642d1a881b92d226314c43d3c74bc674c93e12e679397a97/aiohttp-3.13.0-cp313-cp313-macosx_11_0_arm64.whl",hashes = {sha256 = "564b36512a7da3b386143c611867e3f7cfb249300a1bf60889bd9985da67ab77"}}, + {name = "aiohttp-3.13.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl",url = "https://files.pythonhosted.org/packages/37/b4/0e440481a0e77a551d6c5dcab5d11f1ff6b2b2ddb8dedc24f54f5caad732/aiohttp-3.13.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl",hashes = {sha256 = "4aa995b9156ae499393d949a456a7ab0b994a8241a96db73a3b73c7a090eff6a"}}, + {name = "aiohttp-3.13.0-cp313-cp313-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl",url = "https://files.pythonhosted.org/packages/e6/59/76c421cc4a75bb1aceadb92f20ee6f05a990aa6960c64b59e8e0d340e3f5/aiohttp-3.13.0-cp313-cp313-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl",hashes = {sha256 = "55ca0e95a3905f62f00900255ed807c580775174252999286f283e646d675a49"}}, + {name = "aiohttp-3.13.0-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl",url = "https://files.pythonhosted.org/packages/ec/ac/5095f12a79c7775f402cfc3e83651b6e0a92ade10ddf7f2c78c4fed79f71/aiohttp-3.13.0-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl",hashes = {sha256 = "49ce7525853a981fc35d380aa2353536a01a9ec1b30979ea4e35966316cace7e"}}, + {name = "aiohttp-3.13.0-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl",url = "https://files.pythonhosted.org/packages/05/d7/a48e4989bd76cc70600c505bbdd0d90ca1ad7f9053eceeb9dbcf9345a9ec/aiohttp-3.13.0-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl",hashes = {sha256 = "2117be9883501eaf95503bd313eb4c7a23d567edd44014ba15835a1e9ec6d852"}}, + {name = "aiohttp-3.13.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl",url = "https://files.pythonhosted.org/packages/1e/02/45b388b49e37933f316e1fb39c0de6fb1d77384b0c8f4cf6af5f2cbe3ea6/aiohttp-3.13.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl",hashes = {sha256 = "d169c47e40c911f728439da853b6fd06da83761012e6e76f11cb62cddae7282b"}}, + {name = "aiohttp-3.13.0-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl",url = "https://files.pythonhosted.org/packages/6c/a7/4fde058f1605c34a219348a83a99f14724cc64e68a42480fc03cf40f9ea3/aiohttp-3.13.0-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl",hashes = {sha256 = "703ad3f742fc81e543638a7bebddd35acadaa0004a5e00535e795f4b6f2c25ca"}}, + {name = "aiohttp-3.13.0-cp313-cp313-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/d1/12/0bac4d29231981e3aa234e88d1931f6ba38135ff4c2cf3afbb7895527630/aiohttp-3.13.0-cp313-cp313-musllinux_1_2_aarch64.whl",hashes = {sha256 = "5bf635c3476f4119b940cc8d94ad454cbe0c377e61b4527f0192aabeac1e9370"}}, + {name = "aiohttp-3.13.0-cp313-cp313-musllinux_1_2_armv7l.whl",url = "https://files.pythonhosted.org/packages/71/95/b829eb5f8ac1ca1d8085bb8df614c8acf3ff32e23ad5ad1173c7c9761daa/aiohttp-3.13.0-cp313-cp313-musllinux_1_2_armv7l.whl",hashes = {sha256 = "cfe6285ef99e7ee51cef20609be2bc1dd0e8446462b71c9db8bb296ba632810a"}}, + {name = "aiohttp-3.13.0-cp313-cp313-musllinux_1_2_ppc64le.whl",url = "https://files.pythonhosted.org/packages/47/6d/15ccf4ef3c254d899f62580e0c7fc717014f4d14a3ac31771e505d2c736c/aiohttp-3.13.0-cp313-cp313-musllinux_1_2_ppc64le.whl",hashes = {sha256 = "34d8af6391c5f2e69749d7f037b614b8c5c42093c251f336bdbfa4b03c57d6c4"}}, + {name = "aiohttp-3.13.0-cp313-cp313-musllinux_1_2_riscv64.whl",url = "https://files.pythonhosted.org/packages/46/6a/8acf6c57e03b6fdcc8b4c06392e66abaff3213ea275e41db3edb20738d91/aiohttp-3.13.0-cp313-cp313-musllinux_1_2_riscv64.whl",hashes = {sha256 = "12f5d820fadc5848d4559ea838aef733cf37ed2a1103bba148ac2f5547c14c29"}}, + {name = "aiohttp-3.13.0-cp313-cp313-musllinux_1_2_s390x.whl",url = "https://files.pythonhosted.org/packages/75/7d/fbfd59ab2a83fe2578ce79ac3db49727b81e9f4c3376217ad09c03c6d279/aiohttp-3.13.0-cp313-cp313-musllinux_1_2_s390x.whl",hashes = {sha256 = "0f1338b61ea66f4757a0544ed8a02ccbf60e38d9cfb3225888888dd4475ebb96"}}, + {name = "aiohttp-3.13.0-cp313-cp313-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/99/e7/cc9f0fdf06cab3ca61e6b62bff9a4b978b8ca736e9d76ddf54365673ab19/aiohttp-3.13.0-cp313-cp313-musllinux_1_2_x86_64.whl",hashes = {sha256 = "582770f82513419512da096e8df21ca44f86a2e56e25dc93c5ab4df0fe065bf0"}}, + {name = "aiohttp-3.13.0-cp313-cp313-win32.whl",url = "https://files.pythonhosted.org/packages/db/43/7abbe1de94748a58a71881163ee280fd3217db36e8344d109f63638fe16a/aiohttp-3.13.0-cp313-cp313-win32.whl",hashes = {sha256 = "3194b8cab8dbc882f37c13ef1262e0a3d62064fa97533d3aa124771f7bf1ecee"}}, + {name = "aiohttp-3.13.0-cp313-cp313-win_amd64.whl",url = "https://files.pythonhosted.org/packages/c9/58/afab7f2b9e7df88c995995172eb78cae8a3d5a62d5681abaade86b3f0089/aiohttp-3.13.0-cp313-cp313-win_amd64.whl",hashes = {sha256 = "7897298b3eedc790257fef8a6ec582ca04e9dbe568ba4a9a890913b925b8ea21"}}, + {name = "aiohttp-3.13.0-cp312-cp312-macosx_10_13_universal2.whl",url = "https://files.pythonhosted.org/packages/3a/95/7e8bdfa6e79099a086d59d42589492f1fe9d29aae3cefb58b676015ce278/aiohttp-3.13.0-cp312-cp312-macosx_10_13_universal2.whl",hashes = {sha256 = "1c272a9a18a5ecc48a7101882230046b83023bb2a662050ecb9bfcb28d9ab53a"}}, + {name = "aiohttp-3.13.0-cp312-cp312-macosx_10_13_x86_64.whl",url = "https://files.pythonhosted.org/packages/9f/20/2f1d3ee06ee94eafe516810705219bff234d09f135d6951661661d5595ae/aiohttp-3.13.0-cp312-cp312-macosx_10_13_x86_64.whl",hashes = {sha256 = "97891a23d7fd4e1afe9c2f4473e04595e4acb18e4733b910b6577b74e7e21985"}}, + {name = "aiohttp-3.13.0-cp312-cp312-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/74/15/ab8600ef6dc1dcd599009a81acfed2ea407037e654d32e47e344e0b08c34/aiohttp-3.13.0-cp312-cp312-macosx_11_0_arm64.whl",hashes = {sha256 = "475bd56492ce5f4cffe32b5533c6533ee0c406d1d0e6924879f83adcf51da0ae"}}, + {name = "aiohttp-3.13.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl",url = "https://files.pythonhosted.org/packages/33/59/752640c2b86ca987fe5703a01733b00d375e6cd2392bc7574489934e64e5/aiohttp-3.13.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl",hashes = {sha256 = "c32ada0abb4bc94c30be2b681c42f058ab104d048da6f0148280a51ce98add8c"}}, + {name = "aiohttp-3.13.0-cp312-cp312-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl",url = "https://files.pythonhosted.org/packages/3d/c6/dd6b86ddb852a7fdbcdc7a45b6bdc80178aef713c08279afcaee7a5a9f07/aiohttp-3.13.0-cp312-cp312-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl",hashes = {sha256 = "4af1f8877ca46ecdd0bc0d4a6b66d4b2bddc84a79e2e8366bc0d5308e76bceb8"}}, + {name = "aiohttp-3.13.0-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl",url = "https://files.pythonhosted.org/packages/33/e2/27c92d205b9e8cee7661670e8e9f187931b71e26d42796b153d2a0ba6949/aiohttp-3.13.0-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl",hashes = {sha256 = "e04ab827ec4f775817736b20cdc8350f40327f9b598dec4e18c9ffdcbea88a93"}}, + {name = "aiohttp-3.13.0-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl",url = "https://files.pythonhosted.org/packages/df/6a/1fc1ad71d130a30f7a207d8d958a41224c29b834463b5185efb2dbff6ad4/aiohttp-3.13.0-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl",hashes = {sha256 = "a6d9487b9471ec36b0faedf52228cd732e89be0a2bbd649af890b5e2ce422353"}}, + {name = "aiohttp-3.13.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl",url = "https://files.pythonhosted.org/packages/14/51/d0c1701a79fcb0109cff5304da16226581569b89a282d8e7f1549a7e3ec0/aiohttp-3.13.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl",hashes = {sha256 = "2e66c57416352f36bf98f6641ddadd47c93740a22af7150d3e9a1ef6e983f9a8"}}, + {name = "aiohttp-3.13.0-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl",url = "https://files.pythonhosted.org/packages/ae/3d/2ec4b934f85856de1c0c18e90adc8902adadbfac2b3c0b831bfeb7214fc8/aiohttp-3.13.0-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl",hashes = {sha256 = "469167d5372f5bb3aedff4fc53035d593884fff2617a75317740e885acd48b04"}}, + {name = "aiohttp-3.13.0-cp312-cp312-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/38/56/e23d9c3e13006e599fdce3851517c70279e177871e3e567d22cf3baf5d6c/aiohttp-3.13.0-cp312-cp312-musllinux_1_2_aarch64.whl",hashes = {sha256 = "a9f3546b503975a69b547c9fd1582cad10ede1ce6f3e313a2f547c73a3d7814f"}}, + {name = "aiohttp-3.13.0-cp312-cp312-musllinux_1_2_armv7l.whl",url = "https://files.pythonhosted.org/packages/56/cb/caa32c2ccaeca0a3dc39129079fd2ad02f9406c3a5f7924340435b87d4cd/aiohttp-3.13.0-cp312-cp312-musllinux_1_2_armv7l.whl",hashes = {sha256 = "6b4174fcec98601f0cfdf308ee29a6ae53c55f14359e848dab4e94009112ee7d"}}, + {name = "aiohttp-3.13.0-cp312-cp312-musllinux_1_2_ppc64le.whl",url = "https://files.pythonhosted.org/packages/fb/c0/5911856fef9e40fd1ccbb8c54a90116875d5753a92c1cac66ce2059b390d/aiohttp-3.13.0-cp312-cp312-musllinux_1_2_ppc64le.whl",hashes = {sha256 = "a533873a7a4ec2270fb362ee5a0d3b98752e4e1dc9042b257cd54545a96bd8ed"}}, + {name = "aiohttp-3.13.0-cp312-cp312-musllinux_1_2_riscv64.whl",url = "https://files.pythonhosted.org/packages/0e/48/8d6f4757a24c02f0a454c043556593a00645d10583859f7156db44d8b7d3/aiohttp-3.13.0-cp312-cp312-musllinux_1_2_riscv64.whl",hashes = {sha256 = "ce887c5e54411d607ee0959cac15bb31d506d86a9bcaddf0b7e9d63325a7a802"}}, + {name = "aiohttp-3.13.0-cp312-cp312-musllinux_1_2_s390x.whl",url = "https://files.pythonhosted.org/packages/39/fa/e82c9445e40b50e46770702b5b6ca2f767966d53e1a5eef03583ceac6df6/aiohttp-3.13.0-cp312-cp312-musllinux_1_2_s390x.whl",hashes = {sha256 = "d871f6a30d43e32fc9252dc7b9febe1a042b3ff3908aa83868d7cf7c9579a59b"}}, + {name = "aiohttp-3.13.0-cp312-cp312-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/3d/e6/9d30554e7f1e700bfeae4ab6b153d5dc7441606a9ec5e929288fa93a1477/aiohttp-3.13.0-cp312-cp312-musllinux_1_2_x86_64.whl",hashes = {sha256 = "222c828243b4789d79a706a876910f656fad4381661691220ba57b2ab4547865"}}, + {name = "aiohttp-3.13.0-cp312-cp312-win32.whl",url = "https://files.pythonhosted.org/packages/1f/e5/29cca547990a59ea54f0674fc01de98519fc628cfceeab6175711750eca7/aiohttp-3.13.0-cp312-cp312-win32.whl",hashes = {sha256 = "682d2e434ff2f1108314ff7f056ce44e457f12dbed0249b24e106e385cf154b9"}}, + {name = "aiohttp-3.13.0-cp312-cp312-win_amd64.whl",url = "https://files.pythonhosted.org/packages/8b/68/46dd042d7bc62eab30bafdb8569f55ef125c3a88bb174270324224f8df56/aiohttp-3.13.0-cp312-cp312-win_amd64.whl",hashes = {sha256 = "0a2be20eb23888df130214b91c262a90e2de1553d6fb7de9e9010cec994c0ff2"}}, + {name = "aiohttp-3.13.0-cp311-cp311-macosx_10_9_universal2.whl",url = "https://files.pythonhosted.org/packages/b1/db/df80cacac46cd548a736c5535b13cc18925cf6f9f83cd128cf3839842219/aiohttp-3.13.0-cp311-cp311-macosx_10_9_universal2.whl",hashes = {sha256 = "99eb94e97a42367fef5fc11e28cb2362809d3e70837f6e60557816c7106e2e20"}}, + {name = "aiohttp-3.13.0-cp311-cp311-macosx_10_9_x86_64.whl",url = "https://files.pythonhosted.org/packages/ae/f9/2d6d93fd57ab4726e18a7cdab083772eda8302d682620fbf2aef48322351/aiohttp-3.13.0-cp311-cp311-macosx_10_9_x86_64.whl",hashes = {sha256 = "4696665b2713021c6eba3e2b882a86013763b442577fe5d2056a42111e732eca"}}, + {name = "aiohttp-3.13.0-cp311-cp311-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/89/a6/e1c061b079fed04ffd6777950c82f2e8246fd08b7b3c4f56fdd47f697e5a/aiohttp-3.13.0-cp311-cp311-macosx_11_0_arm64.whl",hashes = {sha256 = "3e6a38366f7f0d0f6ed7a1198055150c52fda552b107dad4785c0852ad7685d1"}}, + {name = "aiohttp-3.13.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl",url = "https://files.pythonhosted.org/packages/fe/4d/ee8913c0d2c7da37fdc98673a342b51611eaa0871682b37b8430084e35b5/aiohttp-3.13.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl",hashes = {sha256 = "aab715b1a0c37f7f11f9f1f579c6fbaa51ef569e47e3c0a4644fba46077a9409"}}, + {name = "aiohttp-3.13.0-cp311-cp311-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl",url = "https://files.pythonhosted.org/packages/f9/70/26b2c97e8fa68644aec43d788940984c5f3b53a8d1468d5baaa328f809c9/aiohttp-3.13.0-cp311-cp311-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl",hashes = {sha256 = "7972c82bed87d7bd8e374b60a6b6e816d75ba4f7c2627c2d14eed216e62738e1"}}, + {name = "aiohttp-3.13.0-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl",url = "https://files.pythonhosted.org/packages/65/1e/c8aa3c293a0e8b18968b1b88e9bd8fb269eb67eb7449f504a4c3e175b159/aiohttp-3.13.0-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl",hashes = {sha256 = "ca8313cb852af788c78d5afdea24c40172cbfff8b35e58b407467732fde20390"}}, + {name = "aiohttp-3.13.0-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl",url = "https://files.pythonhosted.org/packages/51/b6/a3753fe86249eb441768658cfc00f8c4e0913b255c13be00ddb8192775e1/aiohttp-3.13.0-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl",hashes = {sha256 = "6c333a2385d2a6298265f4b3e960590f787311b87f6b5e6e21bb8375914ef504"}}, + {name = "aiohttp-3.13.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl",url = "https://files.pythonhosted.org/packages/51/6d/7b1e020fe1d2a2be7cf0ce5e35922f345e3507cf337faa1a6563c42065c1/aiohttp-3.13.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl",hashes = {sha256 = "cc6d5fc5edbfb8041d9607f6a417997fa4d02de78284d386bea7ab767b5ea4f3"}}, + {name = "aiohttp-3.13.0-cp311-cp311-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl",url = "https://files.pythonhosted.org/packages/e6/df/aad5dce268f9d4f29759c3eeb5fb5995c569d76abb267468dc1075218d5b/aiohttp-3.13.0-cp311-cp311-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl",hashes = {sha256 = "7ddedba3d0043349edc79df3dc2da49c72b06d59a45a42c1c8d987e6b8d175b8"}}, + {name = "aiohttp-3.13.0-cp311-cp311-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/1c/19/a84a0e97b2da2224c8b85e1aef5cac834d07b2903c17bff1a6bdbc7041d2/aiohttp-3.13.0-cp311-cp311-musllinux_1_2_aarch64.whl",hashes = {sha256 = "23ca762140159417a6bbc959ca1927f6949711851e56f2181ddfe8d63512b5ad"}}, + {name = "aiohttp-3.13.0-cp311-cp311-musllinux_1_2_armv7l.whl",url = "https://files.pythonhosted.org/packages/6c/61/ca6ad390128d964a08554fd63d6df5810fb5fbc7e599cb9e617f1729ae19/aiohttp-3.13.0-cp311-cp311-musllinux_1_2_armv7l.whl",hashes = {sha256 = "bfe824d6707a5dc3c5676685f624bc0c63c40d79dc0239a7fd6c034b98c25ebe"}}, + {name = "aiohttp-3.13.0-cp311-cp311-musllinux_1_2_ppc64le.whl",url = "https://files.pythonhosted.org/packages/2a/71/769e249e6625372c7d14be79b8b8c3b0592963a09793fb3d36758e60952c/aiohttp-3.13.0-cp311-cp311-musllinux_1_2_ppc64le.whl",hashes = {sha256 = "3c11fa5dd2ef773a8a5a6daa40243d83b450915992eab021789498dc87acc114"}}, + {name = "aiohttp-3.13.0-cp311-cp311-musllinux_1_2_riscv64.whl",url = "https://files.pythonhosted.org/packages/66/64/b9cd03cdbb629bc492e4a744fbe96550a8340b0cd7a0cc4a9c90cfecd8d3/aiohttp-3.13.0-cp311-cp311-musllinux_1_2_riscv64.whl",hashes = {sha256 = "00fdfe370cffede3163ba9d3f190b32c0cfc8c774f6f67395683d7b0e48cdb8a"}}, + {name = "aiohttp-3.13.0-cp311-cp311-musllinux_1_2_s390x.whl",url = "https://files.pythonhosted.org/packages/24/0e/87922c8cfdbd09f5e2197e9d87714a98c99c423560d44739e3af55400fe3/aiohttp-3.13.0-cp311-cp311-musllinux_1_2_s390x.whl",hashes = {sha256 = "6475e42ef92717a678bfbf50885a682bb360a6f9c8819fb1a388d98198fdcb80"}}, + {name = "aiohttp-3.13.0-cp311-cp311-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/c5/bb/a3adfe2af76e1ee9e3b5464522004b148b266bc99d7ec424ca7843d64a3c/aiohttp-3.13.0-cp311-cp311-musllinux_1_2_x86_64.whl",hashes = {sha256 = "77da5305a410910218b99f2a963092f4277d8a9c1f429c1ff1b026d1826bd0b6"}}, + {name = "aiohttp-3.13.0-cp311-cp311-win32.whl",url = "https://files.pythonhosted.org/packages/ad/53/e124dcbd64e6365602f3493fe37a11ca5b7ac0a40822a6e2bc8260cd08e0/aiohttp-3.13.0-cp311-cp311-win32.whl",hashes = {sha256 = "2f9d9ea547618d907f2ee6670c9a951f059c5994e4b6de8dcf7d9747b420c820"}}, + {name = "aiohttp-3.13.0-cp311-cp311-win_amd64.whl",url = "https://files.pythonhosted.org/packages/3e/bd/485d98b372a2cd6998484a93ddd401ec6b6031657661c36846a10e2a1f6e/aiohttp-3.13.0-cp311-cp311-win_amd64.whl",hashes = {sha256 = "0f19f7798996d4458c669bd770504f710014926e9970f4729cf55853ae200469"}}, + {name = "aiohttp-3.13.0-cp310-cp310-macosx_10_9_universal2.whl",url = "https://files.pythonhosted.org/packages/25/18/a3a9c9b7c8d400f71d1ff93c3e1520a5d53dba170f829ca9c6b2b070677b/aiohttp-3.13.0-cp310-cp310-macosx_10_9_universal2.whl",hashes = {sha256 = "ca69ec38adf5cadcc21d0b25e2144f6a25b7db7bea7e730bac25075bc305eff0"}}, + {name = "aiohttp-3.13.0-cp310-cp310-macosx_10_9_x86_64.whl",url = "https://files.pythonhosted.org/packages/aa/02/f1eac06d78997e015030130ccf1c7cf864a919f97d77ff27e89c82fc3186/aiohttp-3.13.0-cp310-cp310-macosx_10_9_x86_64.whl",hashes = {sha256 = "240f99f88a9a6beb53ebadac79a2e3417247aa756202ed234b1dbae13d248092"}}, + {name = "aiohttp-3.13.0-cp310-cp310-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/e1/db/5d65af7cbe5f302e23b1ea5cfc156cd0c7738a0d2db531a3837d2754de94/aiohttp-3.13.0-cp310-cp310-macosx_11_0_arm64.whl",hashes = {sha256 = "a4676b978a9711531e7cea499d4cdc0794c617a1c0579310ab46c9fdf5877702"}}, + {name = "aiohttp-3.13.0-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl",url = "https://files.pythonhosted.org/packages/d3/d5/56c622ad3bd57ff4adc2b701f298dcc0408735a8af998cec1c66a9ce224e/aiohttp-3.13.0-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl",hashes = {sha256 = "48fcdd5bc771cbbab8ccc9588b8b6447f6a30f9fe00898b1a5107098e00d6793"}}, + {name = "aiohttp-3.13.0-cp310-cp310-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl",url = "https://files.pythonhosted.org/packages/44/16/db236671ec3758e3a6be6977009e74016470368012a58fea4b3799546549/aiohttp-3.13.0-cp310-cp310-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl",hashes = {sha256 = "eeea0cdd2f687e210c8f605f322d7b0300ba55145014a5dbe98bd4be6fff1f6c"}}, + {name = "aiohttp-3.13.0-cp310-cp310-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl",url = "https://files.pythonhosted.org/packages/19/ad/d96d7d7023e7f5215b8737cad21a7637f6d9d10fbfbfef0435d0277f71a2/aiohttp-3.13.0-cp310-cp310-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl",hashes = {sha256 = "10b3f01d5aeb632adaaf39c5e93f040a550464a768d54c514050c635adcbb9d0"}}, + {name = "aiohttp-3.13.0-cp310-cp310-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl",url = "https://files.pythonhosted.org/packages/88/d7/e8a5ba2bbd929ed587b2a8ea9390765daede2d8cd28dfae3a0773c6d3fbc/aiohttp-3.13.0-cp310-cp310-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl",hashes = {sha256 = "a4dc0b83e25267f42ef065ea57653de4365b56d7bc4e4cfc94fabe56998f8ee6"}}, + {name = "aiohttp-3.13.0-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl",url = "https://files.pythonhosted.org/packages/f9/ca/135c21e85ffeff66b80ecd8a647ca104f2e5a91c37dc86649244ddbf87ab/aiohttp-3.13.0-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl",hashes = {sha256 = "72714919ed9b90f030f761c20670e529c4af96c31bd000917dd0c9afd1afb731"}}, + {name = "aiohttp-3.13.0-cp310-cp310-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl",url = "https://files.pythonhosted.org/packages/f6/38/348c4343052a400968dbf2051ee3dc222bdefd95af5874cf0f04cc7a8c92/aiohttp-3.13.0-cp310-cp310-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl",hashes = {sha256 = "564be41e85318403fdb176e9e5b3e852d528392f42f2c1d1efcbeeed481126d7"}}, + {name = "aiohttp-3.13.0-cp310-cp310-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/47/89/71cbda30f0900ab16084769960c467a355d6b1db51668fbb821c4a4ad5ed/aiohttp-3.13.0-cp310-cp310-musllinux_1_2_aarch64.whl",hashes = {sha256 = "84912962071087286333f70569362e10793f73f45c48854e6859df11001eb2d3"}}, + {name = "aiohttp-3.13.0-cp310-cp310-musllinux_1_2_armv7l.whl",url = "https://files.pythonhosted.org/packages/bf/b1/5ff5fcaecccdcd5be7ff717cbde6e630760a8130e89167c3aa05b6b57707/aiohttp-3.13.0-cp310-cp310-musllinux_1_2_armv7l.whl",hashes = {sha256 = "90b570f1a146181c3d6ae8f755de66227ded49d30d050479b5ae07710f7894c5"}}, + {name = "aiohttp-3.13.0-cp310-cp310-musllinux_1_2_ppc64le.whl",url = "https://files.pythonhosted.org/packages/87/e2/1d1f202f43c8be1956f05196159064cc05dc6842a33c1397cbb1b99610af/aiohttp-3.13.0-cp310-cp310-musllinux_1_2_ppc64le.whl",hashes = {sha256 = "2d71ca30257ce756e37a6078b1dff2d9475fee13609ad831eac9a6531bea903b"}}, + {name = "aiohttp-3.13.0-cp310-cp310-musllinux_1_2_riscv64.whl",url = "https://files.pythonhosted.org/packages/a4/b9/53c1df2991686f947a9651265757ea12c4afc29b351a249b73a0fc81dd3c/aiohttp-3.13.0-cp310-cp310-musllinux_1_2_riscv64.whl",hashes = {sha256 = "cd45eb70eca63f41bb156b7dffbe1a7760153b69892d923bdb79a74099e2ed90"}}, + {name = "aiohttp-3.13.0-cp310-cp310-musllinux_1_2_s390x.whl",url = "https://files.pythonhosted.org/packages/93/24/345166f9c4cd2f5cc1d2173131998ee4adab0db8729126db32a7f91ed400/aiohttp-3.13.0-cp310-cp310-musllinux_1_2_s390x.whl",hashes = {sha256 = "5ae3a19949a27982c7425a7a5a963c1268fdbabf0be15ab59448cbcf0f992519"}}, + {name = "aiohttp-3.13.0-cp310-cp310-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/09/f1/e8f70462848b74d49b3115050623ecbd697889713c2c93c96616da56b2de/aiohttp-3.13.0-cp310-cp310-musllinux_1_2_x86_64.whl",hashes = {sha256 = "ea6df292013c9f050cbf3f93eee9953d6e5acd9e64a0bf4ca16404bfd7aa9bcc"}}, + {name = "aiohttp-3.13.0-cp310-cp310-win32.whl",url = "https://files.pythonhosted.org/packages/23/ba/47fd065510a8bfab5d5f6e1d97c0de672447c0a941c5021298bd7210afc3/aiohttp-3.13.0-cp310-cp310-win32.whl",hashes = {sha256 = "3b64f22fbb6dcd5663de5ef2d847a5638646ef99112503e6f7704bdecb0d1c4d"}}, + {name = "aiohttp-3.13.0-cp310-cp310-win_amd64.whl",url = "https://files.pythonhosted.org/packages/c4/38/f5385cb79afa1f31bcaa3625a9e8d849b782edaeac09f894f46439e006a1/aiohttp-3.13.0-cp310-cp310-win_amd64.whl",hashes = {sha256 = "f8d877aa60d80715b2afc565f0f1aea66565824c229a2d065b31670e09fed6d7"}}, ] marker = "\"default\" in dependency_groups or \"dev\" in extras" @@ -1352,65 +1708,137 @@ dependencies = [ [[packages]] name = "multidict" -version = "6.6.3" +version = "6.7.0" requires-python = ">=3.9" -sdist = {name = "multidict-6.6.3.tar.gz", url = "https://files.pythonhosted.org/packages/3d/2c/5dad12e82fbdf7470f29bff2171484bf07cb3b16ada60a6589af8f376440/multidict-6.6.3.tar.gz", hashes = {sha256 = "798a9eb12dab0a6c2e29c1de6f3468af5cb2da6053a20dfa3344907eed0937cc"}} -wheels = [ - {name = "multidict-6.6.3-cp313-cp313-macosx_10_13_universal2.whl",url = "https://files.pythonhosted.org/packages/52/1d/0bebcbbb4f000751fbd09957257903d6e002943fc668d841a4cf2fb7f872/multidict-6.6.3-cp313-cp313-macosx_10_13_universal2.whl",hashes = {sha256 = "540d3c06d48507357a7d57721e5094b4f7093399a0106c211f33540fdc374d55"}}, - {name = "multidict-6.6.3-cp313-cp313-macosx_10_13_x86_64.whl",url = "https://files.pythonhosted.org/packages/07/8f/cbe241b0434cfe257f65c2b1bcf9e8d5fb52bc708c5061fb29b0fed22bdf/multidict-6.6.3-cp313-cp313-macosx_10_13_x86_64.whl",hashes = {sha256 = "9c19cea2a690f04247d43f366d03e4eb110a0dc4cd1bbeee4d445435428ed35b"}}, - {name = "multidict-6.6.3-cp313-cp313-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/32/d2/0b3b23f9dbad5b270b22a3ac3ea73ed0a50ef2d9a390447061178ed6bdb8/multidict-6.6.3-cp313-cp313-macosx_11_0_arm64.whl",hashes = {sha256 = "7af039820cfd00effec86bda5d8debef711a3e86a1d3772e85bea0f243a4bd65"}}, - {name = "multidict-6.6.3-cp313-cp313-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl",url = "https://files.pythonhosted.org/packages/fd/fe/6eb68927e823999e3683bc49678eb20374ba9615097d085298fd5b386564/multidict-6.6.3-cp313-cp313-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl",hashes = {sha256 = "500b84f51654fdc3944e936f2922114349bf8fdcac77c3092b03449f0e5bc2b3"}}, - {name = "multidict-6.6.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl",url = "https://files.pythonhosted.org/packages/e7/ab/320d8507e7726c460cb77117848b3834ea0d59e769f36fdae495f7669929/multidict-6.6.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl",hashes = {sha256 = "f3fc723ab8a5c5ed6c50418e9bfcd8e6dceba6c271cee6728a10a4ed8561520c"}}, - {name = "multidict-6.6.3-cp313-cp313-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl",url = "https://files.pythonhosted.org/packages/76/60/38ee422db515ac69834e60142a1a69111ac96026e76e8e9aa347fd2e4591/multidict-6.6.3-cp313-cp313-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl",hashes = {sha256 = "94c47ea3ade005b5976789baaed66d4de4480d0a0bf31cef6edaa41c1e7b56a6"}}, - {name = "multidict-6.6.3-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl",url = "https://files.pythonhosted.org/packages/27/fb/905224fde2dff042b030c27ad95a7ae744325cf54b890b443d30a789b80e/multidict-6.6.3-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl",hashes = {sha256 = "dbc7cf464cc6d67e83e136c9f55726da3a30176f020a36ead246eceed87f1cd8"}}, - {name = "multidict-6.6.3-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl",url = "https://files.pythonhosted.org/packages/76/35/dc38ab361051beae08d1a53965e3e1a418752fc5be4d3fb983c5582d8784/multidict-6.6.3-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl",hashes = {sha256 = "900eb9f9da25ada070f8ee4a23f884e0ee66fe4e1a38c3af644256a508ad81ca"}}, - {name = "multidict-6.6.3-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl",url = "https://files.pythonhosted.org/packages/1f/a3/0a485b7f36e422421b17e2bbb5a81c1af10eac1d4476f2ff92927c730479/multidict-6.6.3-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl",hashes = {sha256 = "7c6df517cf177da5d47ab15407143a89cd1a23f8b335f3a28d57e8b0a3dbb884"}}, - {name = "multidict-6.6.3-cp313-cp313-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/b4/59/bcdd52c1dab7c0e0d75ff19cac751fbd5f850d1fc39172ce809a74aa9ea4/multidict-6.6.3-cp313-cp313-musllinux_1_2_aarch64.whl",hashes = {sha256 = "4ef421045f13879e21c994b36e728d8e7d126c91a64b9185810ab51d474f27e7"}}, - {name = "multidict-6.6.3-cp313-cp313-musllinux_1_2_armv7l.whl",url = "https://files.pythonhosted.org/packages/bb/a4/2d96aaa6eae8067ce108d4acee6f45ced5728beda55c0f02ae1072c730d1/multidict-6.6.3-cp313-cp313-musllinux_1_2_armv7l.whl",hashes = {sha256 = "6c1e61bb4f80895c081790b6b09fa49e13566df8fbff817da3f85b3a8192e36b"}}, - {name = "multidict-6.6.3-cp313-cp313-musllinux_1_2_i686.whl",url = "https://files.pythonhosted.org/packages/25/d2/ed9f847fa5c7d0677d4f02ea2c163d5e48573de3f57bacf5670e43a5ffaa/multidict-6.6.3-cp313-cp313-musllinux_1_2_i686.whl",hashes = {sha256 = "e5e8523bb12d7623cd8300dbd91b9e439a46a028cd078ca695eb66ba31adee3c"}}, - {name = "multidict-6.6.3-cp313-cp313-musllinux_1_2_ppc64le.whl",url = "https://files.pythonhosted.org/packages/1f/af/9155850372563fc550803d3f25373308aa70f59b52cff25854086ecb4a79/multidict-6.6.3-cp313-cp313-musllinux_1_2_ppc64le.whl",hashes = {sha256 = "ef58340cc896219e4e653dade08fea5c55c6df41bcc68122e3be3e9d873d9a7b"}}, - {name = "multidict-6.6.3-cp313-cp313-musllinux_1_2_s390x.whl",url = "https://files.pythonhosted.org/packages/36/2f/c6a728f699896252cf309769089568a33c6439626648843f78743660709d/multidict-6.6.3-cp313-cp313-musllinux_1_2_s390x.whl",hashes = {sha256 = "fc9dc435ec8699e7b602b94fe0cd4703e69273a01cbc34409af29e7820f777f1"}}, - {name = "multidict-6.6.3-cp313-cp313-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/d0/60/689880776d6b18fa2b70f6cc74ff87dd6c6b9b47bd9cf74c16fecfaa6ad9/multidict-6.6.3-cp313-cp313-musllinux_1_2_x86_64.whl",hashes = {sha256 = "9e864486ef4ab07db5e9cb997bad2b681514158d6954dd1958dfb163b83d53e6"}}, - {name = "multidict-6.6.3-cp313-cp313-win32.whl",url = "https://files.pythonhosted.org/packages/75/5e/325b11f2222a549019cf2ef879c1f81f94a0d40ace3ef55cf529915ba6cc/multidict-6.6.3-cp313-cp313-win32.whl",hashes = {sha256 = "5633a82fba8e841bc5c5c06b16e21529573cd654f67fd833650a215520a6210e"}}, - {name = "multidict-6.6.3-cp313-cp313-win_amd64.whl",url = "https://files.pythonhosted.org/packages/b1/ad/cf46e73f5d6e3c775cabd2a05976547f3f18b39bee06260369a42501f053/multidict-6.6.3-cp313-cp313-win_amd64.whl",hashes = {sha256 = "e93089c1570a4ad54c3714a12c2cef549dc9d58e97bcded193d928649cab78e9"}}, - {name = "multidict-6.6.3-cp313-cp313-win_arm64.whl",url = "https://files.pythonhosted.org/packages/c5/c9/2e3fe950db28fb7c62e1a5f46e1e38759b072e2089209bc033c2798bb5ec/multidict-6.6.3-cp313-cp313-win_arm64.whl",hashes = {sha256 = "c60b401f192e79caec61f166da9c924e9f8bc65548d4246842df91651e83d600"}}, - {name = "multidict-6.6.3-cp313-cp313t-macosx_10_13_universal2.whl",url = "https://files.pythonhosted.org/packages/3a/58/aaf8114cf34966e084a8cc9517771288adb53465188843d5a19862cb6dc3/multidict-6.6.3-cp313-cp313t-macosx_10_13_universal2.whl",hashes = {sha256 = "02fd8f32d403a6ff13864b0851f1f523d4c988051eea0471d4f1fd8010f11134"}}, - {name = "multidict-6.6.3-cp313-cp313t-macosx_10_13_x86_64.whl",url = "https://files.pythonhosted.org/packages/71/af/5402e7b58a1f5b987a07ad98f2501fdba2a4f4b4c30cf114e3ce8db64c87/multidict-6.6.3-cp313-cp313t-macosx_10_13_x86_64.whl",hashes = {sha256 = "f3aa090106b1543f3f87b2041eef3c156c8da2aed90c63a2fbed62d875c49c37"}}, - {name = "multidict-6.6.3-cp313-cp313t-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/39/65/ab3c8cafe21adb45b24a50266fd747147dec7847425bc2a0f6934b3ae9ce/multidict-6.6.3-cp313-cp313t-macosx_11_0_arm64.whl",hashes = {sha256 = "e924fb978615a5e33ff644cc42e6aa241effcf4f3322c09d4f8cebde95aff5f8"}}, - {name = "multidict-6.6.3-cp313-cp313t-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl",url = "https://files.pythonhosted.org/packages/49/ba/9fcc1b332f67cc0c0c8079e263bfab6660f87fe4e28a35921771ff3eea0d/multidict-6.6.3-cp313-cp313t-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl",hashes = {sha256 = "b9fe5a0e57c6dbd0e2ce81ca66272282c32cd11d31658ee9553849d91289e1c1"}}, - {name = "multidict-6.6.3-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl",url = "https://files.pythonhosted.org/packages/a4/14/0145a251f555f7c754ce2dcbcd012939bbd1f34f066fa5d28a50e722a054/multidict-6.6.3-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl",hashes = {sha256 = "b24576f208793ebae00280c59927c3b7c2a3b1655e443a25f753c4611bc1c373"}}, - {name = "multidict-6.6.3-cp313-cp313t-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl",url = "https://files.pythonhosted.org/packages/9e/d4/d5c0bd2bbb173b586c249a151a26d2fb3ec7d53c96e42091c9fef4e1f10c/multidict-6.6.3-cp313-cp313t-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl",hashes = {sha256 = "135631cb6c58eac37d7ac0df380294fecdc026b28837fa07c02e459c7fb9c54e"}}, - {name = "multidict-6.6.3-cp313-cp313t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl",url = "https://files.pythonhosted.org/packages/21/32/c9a2d8444a50ec48c4733ccc67254100c10e1c8ae8e40c7a2d2183b59b97/multidict-6.6.3-cp313-cp313t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl",hashes = {sha256 = "274d416b0df887aef98f19f21578653982cfb8a05b4e187d4a17103322eeaf8f"}}, - {name = "multidict-6.6.3-cp313-cp313t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl",url = "https://files.pythonhosted.org/packages/68/d0/14fa1699f4ef629eae08ad6201c6b476098f5efb051b296f4c26be7a9fdf/multidict-6.6.3-cp313-cp313t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl",hashes = {sha256 = "e252017a817fad7ce05cafbe5711ed40faeb580e63b16755a3a24e66fa1d87c0"}}, - {name = "multidict-6.6.3-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl",url = "https://files.pythonhosted.org/packages/da/88/84a27570fbe303c65607d517a5f147cd2fc046c2d1da02b84b17b9bdc2aa/multidict-6.6.3-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl",hashes = {sha256 = "2e4cc8d848cd4fe1cdee28c13ea79ab0ed37fc2e89dd77bac86a2e7959a8c3bc"}}, - {name = "multidict-6.6.3-cp313-cp313t-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/1c/60/dca352a0c999ce96a5d8b8ee0b2b9f729dcad2e0b0c195f8286269a2074c/multidict-6.6.3-cp313-cp313t-musllinux_1_2_aarch64.whl",hashes = {sha256 = "9e236a7094b9c4c1b7585f6b9cca34b9d833cf079f7e4c49e6a4a6ec9bfdc68f"}}, - {name = "multidict-6.6.3-cp313-cp313t-musllinux_1_2_armv7l.whl",url = "https://files.pythonhosted.org/packages/50/ef/433fa3ed06028f03946f3993223dada70fb700f763f70c00079533c34578/multidict-6.6.3-cp313-cp313t-musllinux_1_2_armv7l.whl",hashes = {sha256 = "e0cb0ab69915c55627c933f0b555a943d98ba71b4d1c57bc0d0a66e2567c7471"}}, - {name = "multidict-6.6.3-cp313-cp313t-musllinux_1_2_i686.whl",url = "https://files.pythonhosted.org/packages/1b/1f/487612ab56fbe35715320905215a57fede20de7db40a261759690dc80471/multidict-6.6.3-cp313-cp313t-musllinux_1_2_i686.whl",hashes = {sha256 = "81ef2f64593aba09c5212a3d0f8c906a0d38d710a011f2f42759704d4557d3f2"}}, - {name = "multidict-6.6.3-cp313-cp313t-musllinux_1_2_ppc64le.whl",url = "https://files.pythonhosted.org/packages/da/6f/ce8b79de16cd885c6f9052c96a3671373d00c59b3ee635ea93e6e81b8ccf/multidict-6.6.3-cp313-cp313t-musllinux_1_2_ppc64le.whl",hashes = {sha256 = "b9cbc60010de3562545fa198bfc6d3825df430ea96d2cc509c39bd71e2e7d648"}}, - {name = "multidict-6.6.3-cp313-cp313t-musllinux_1_2_s390x.whl",url = "https://files.pythonhosted.org/packages/bb/fe/a2514a6aba78e5abefa1624ca85ae18f542d95ac5cde2e3815a9fbf369aa/multidict-6.6.3-cp313-cp313t-musllinux_1_2_s390x.whl",hashes = {sha256 = "70d974eaaa37211390cd02ef93b7e938de564bbffa866f0b08d07e5e65da783d"}}, - {name = "multidict-6.6.3-cp313-cp313t-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/8c/22/b788718d63bb3cce752d107a57c85fcd1a212c6c778628567c9713f9345a/multidict-6.6.3-cp313-cp313t-musllinux_1_2_x86_64.whl",hashes = {sha256 = "3713303e4a6663c6d01d648a68f2848701001f3390a030edaaf3fc949c90bf7c"}}, - {name = "multidict-6.6.3-cp313-cp313t-win32.whl",url = "https://files.pythonhosted.org/packages/22/d6/fdb3d0670819f2228f3f7d9af613d5e652c15d170c83e5f1c94fbc55a25b/multidict-6.6.3-cp313-cp313t-win32.whl",hashes = {sha256 = "639ecc9fe7cd73f2495f62c213e964843826f44505a3e5d82805aa85cac6f89e"}}, - {name = "multidict-6.6.3-cp313-cp313t-win_amd64.whl",url = "https://files.pythonhosted.org/packages/b6/d6/a9d2c808f2c489ad199723197419207ecbfbc1776f6e155e1ecea9c883aa/multidict-6.6.3-cp313-cp313t-win_amd64.whl",hashes = {sha256 = "9f97e181f344a0ef3881b573d31de8542cc0dbc559ec68c8f8b5ce2c2e91646d"}}, - {name = "multidict-6.6.3-cp313-cp313t-win_arm64.whl",url = "https://files.pythonhosted.org/packages/f2/40/b68001cba8188dd267590a111f9661b6256debc327137667e832bf5d66e8/multidict-6.6.3-cp313-cp313t-win_arm64.whl",hashes = {sha256 = "ce8b7693da41a3c4fde5871c738a81490cea5496c671d74374c8ab889e1834fb"}}, - {name = "multidict-6.6.3-cp312-cp312-macosx_10_13_universal2.whl",url = "https://files.pythonhosted.org/packages/0e/a0/6b57988ea102da0623ea814160ed78d45a2645e4bbb499c2896d12833a70/multidict-6.6.3-cp312-cp312-macosx_10_13_universal2.whl",hashes = {sha256 = "056bebbeda16b2e38642d75e9e5310c484b7c24e3841dc0fb943206a72ec89d6"}}, - {name = "multidict-6.6.3-cp312-cp312-macosx_10_13_x86_64.whl",url = "https://files.pythonhosted.org/packages/07/7a/d1e92665b0850c6c0508f101f9cf0410c1afa24973e1115fe9c6a185ebf7/multidict-6.6.3-cp312-cp312-macosx_10_13_x86_64.whl",hashes = {sha256 = "e5f481cccb3c5c5e5de5d00b5141dc589c1047e60d07e85bbd7dea3d4580d63f"}}, - {name = "multidict-6.6.3-cp312-cp312-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/52/6f/dd104490e01be6ef8bf9573705d8572f8c2d2c561f06e3826b081d9e6591/multidict-6.6.3-cp312-cp312-macosx_11_0_arm64.whl",hashes = {sha256 = "10bea2ee839a759ee368b5a6e47787f399b41e70cf0c20d90dfaf4158dfb4e55"}}, - {name = "multidict-6.6.3-cp312-cp312-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl",url = "https://files.pythonhosted.org/packages/44/fe/06e0e01b1b0611e6581b7fd5a85b43dacc08b6cea3034f902f383b0873e5/multidict-6.6.3-cp312-cp312-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl",hashes = {sha256 = "2334cfb0fa9549d6ce2c21af2bfbcd3ac4ec3646b1b1581c88e3e2b1779ec92b"}}, - {name = "multidict-6.6.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl",url = "https://files.pythonhosted.org/packages/ce/71/4f0e558fb77696b89c233c1ee2d92f3e1d5459070a0e89153c9e9e804186/multidict-6.6.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl",hashes = {sha256 = "b8fee016722550a2276ca2cb5bb624480e0ed2bd49125b2b73b7010b9090e888"}}, - {name = "multidict-6.6.3-cp312-cp312-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl",url = "https://files.pythonhosted.org/packages/e3/25/cca0e68228addad24903801ed1ab42e21307a1b4b6dd2cf63da5d3ae082a/multidict-6.6.3-cp312-cp312-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl",hashes = {sha256 = "e5511cb35f5c50a2db21047c875eb42f308c5583edf96bd8ebf7d770a9d68f6d"}}, - {name = "multidict-6.6.3-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl",url = "https://files.pythonhosted.org/packages/6e/a3/46f2d420d86bbcb8fe660b26a10a219871a0fbf4d43cb846a4031533f3e0/multidict-6.6.3-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl",hashes = {sha256 = "712b348f7f449948e0a6c4564a21c7db965af900973a67db432d724619b3c680"}}, - {name = "multidict-6.6.3-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl",url = "https://files.pythonhosted.org/packages/9e/73/1c743542fe00794a2ec7466abd3f312ccb8fad8dff9f36d42e18fb1ec33e/multidict-6.6.3-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl",hashes = {sha256 = "e4e15d2138ee2694e038e33b7c3da70e6b0ad8868b9f8094a72e1414aeda9c1a"}}, - {name = "multidict-6.6.3-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl",url = "https://files.pythonhosted.org/packages/a4/11/6ec9dcbe2264b92778eeb85407d1df18812248bf3506a5a1754bc035db0c/multidict-6.6.3-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl",hashes = {sha256 = "8df25594989aebff8a130f7899fa03cbfcc5d2b5f4a461cf2518236fe6f15961"}}, - {name = "multidict-6.6.3-cp312-cp312-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/9b/2b/631b1e2afeb5f1696846d747d36cda075bfdc0bc7245d6ba5c319278d6c4/multidict-6.6.3-cp312-cp312-musllinux_1_2_aarch64.whl",hashes = {sha256 = "159ca68bfd284a8860f8d8112cf0521113bffd9c17568579e4d13d1f1dc76b65"}}, - {name = "multidict-6.6.3-cp312-cp312-musllinux_1_2_armv7l.whl",url = "https://files.pythonhosted.org/packages/bf/0e/7e3b93f79efeb6111d3bf9a1a69e555ba1d07ad1c11bceb56b7310d0d7ee/multidict-6.6.3-cp312-cp312-musllinux_1_2_armv7l.whl",hashes = {sha256 = "e098c17856a8c9ade81b4810888c5ad1914099657226283cab3062c0540b0643"}}, - {name = "multidict-6.6.3-cp312-cp312-musllinux_1_2_i686.whl",url = "https://files.pythonhosted.org/packages/ad/9e/086846c1d6601948e7de556ee464a2d4c85e33883e749f46b9547d7b0704/multidict-6.6.3-cp312-cp312-musllinux_1_2_i686.whl",hashes = {sha256 = "67c92ed673049dec52d7ed39f8cf9ebbadf5032c774058b4406d18c8f8fe7063"}}, - {name = "multidict-6.6.3-cp312-cp312-musllinux_1_2_ppc64le.whl",url = "https://files.pythonhosted.org/packages/8c/7b/86ec260118e522f1a31550e87b23542294880c97cfbf6fb18cc67b044c66/multidict-6.6.3-cp312-cp312-musllinux_1_2_ppc64le.whl",hashes = {sha256 = "bd0578596e3a835ef451784053cfd327d607fc39ea1a14812139339a18a0dbc3"}}, - {name = "multidict-6.6.3-cp312-cp312-musllinux_1_2_s390x.whl",url = "https://files.pythonhosted.org/packages/8c/bd/22ce8f47abb0be04692c9fc4638508b8340987b18691aa7775d927b73f72/multidict-6.6.3-cp312-cp312-musllinux_1_2_s390x.whl",hashes = {sha256 = "346055630a2df2115cd23ae271910b4cae40f4e336773550dca4889b12916e75"}}, - {name = "multidict-6.6.3-cp312-cp312-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/07/9c/91b7ac1691be95cd1f4a26e36a74b97cda6aa9820632d31aab4410f46ebd/multidict-6.6.3-cp312-cp312-musllinux_1_2_x86_64.whl",hashes = {sha256 = "555ff55a359302b79de97e0468e9ee80637b0de1fce77721639f7cd9440b3a10"}}, - {name = "multidict-6.6.3-cp312-cp312-win32.whl",url = "https://files.pythonhosted.org/packages/6f/5c/4d7adc739884f7a9fbe00d1eac8c034023ef8bad71f2ebe12823ca2e3649/multidict-6.6.3-cp312-cp312-win32.whl",hashes = {sha256 = "73ab034fb8d58ff85c2bcbadc470efc3fafeea8affcf8722855fb94557f14cc5"}}, - {name = "multidict-6.6.3-cp312-cp312-win_amd64.whl",url = "https://files.pythonhosted.org/packages/6a/a3/0fbc7afdf7cb1aa12a086b02959307848eb6bcc8f66fcb66c0cb57e2a2c1/multidict-6.6.3-cp312-cp312-win_amd64.whl",hashes = {sha256 = "04cbcce84f63b9af41bad04a54d4cc4e60e90c35b9e6ccb130be2d75b71f8c17"}}, - {name = "multidict-6.6.3-cp312-cp312-win_arm64.whl",url = "https://files.pythonhosted.org/packages/b8/95/8c825bd70ff9b02462dc18d1295dd08d3e9e4eb66856d292ffa62cfe1920/multidict-6.6.3-cp312-cp312-win_arm64.whl",hashes = {sha256 = "0f1130b896ecb52d2a1e615260f3ea2af55fa7dc3d7c3003ba0c3121a759b18b"}}, - {name = "multidict-6.6.3-py3-none-any.whl",url = "https://files.pythonhosted.org/packages/d8/30/9aec301e9772b098c1f5c0ca0279237c9766d94b97802e9888010c64b0ed/multidict-6.6.3-py3-none-any.whl",hashes = {sha256 = "8db10f29c7541fc5da4defd8cd697e1ca429db743fa716325f236079b96f775a"}}, +sdist = {name = "multidict-6.7.0.tar.gz", url = "https://files.pythonhosted.org/packages/80/1e/5492c365f222f907de1039b91f922b93fa4f764c713ee858d235495d8f50/multidict-6.7.0.tar.gz", hashes = {sha256 = "c6e99d9a65ca282e578dfea819cfa9c0a62b2499d8677392e09feaf305e9e6f5"}} +wheels = [ + {name = "multidict-6.7.0-cp314-cp314-macosx_10_13_universal2.whl",url = "https://files.pythonhosted.org/packages/e2/b1/3da6934455dd4b261d4c72f897e3a5728eba81db59959f3a639245891baa/multidict-6.7.0-cp314-cp314-macosx_10_13_universal2.whl",hashes = {sha256 = "3bab1e4aff7adaa34410f93b1f8e57c4b36b9af0426a76003f441ee1d3c7e842"}}, + {name = "multidict-6.7.0-cp314-cp314-macosx_10_13_x86_64.whl",url = "https://files.pythonhosted.org/packages/14/2c/f069cab5b51d175a1a2cb4ccdf7a2c2dabd58aa5bd933fa036a8d15e2404/multidict-6.7.0-cp314-cp314-macosx_10_13_x86_64.whl",hashes = {sha256 = "b8512bac933afc3e45fb2b18da8e59b78d4f408399a960339598374d4ae3b56b"}}, + {name = "multidict-6.7.0-cp314-cp314-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/42/e2/64bb41266427af6642b6b128e8774ed84c11b80a90702c13ac0a86bb10cc/multidict-6.7.0-cp314-cp314-macosx_11_0_arm64.whl",hashes = {sha256 = "79dcf9e477bc65414ebfea98ffd013cb39552b5ecd62908752e0e413d6d06e38"}}, + {name = "multidict-6.7.0-cp314-cp314-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl",url = "https://files.pythonhosted.org/packages/02/68/6b086fef8a3f1a8541b9236c594f0c9245617c29841f2e0395d979485cde/multidict-6.7.0-cp314-cp314-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl",hashes = {sha256 = "31bae522710064b5cbeddaf2e9f32b1abab70ac6ac91d42572502299e9953128"}}, + {name = "multidict-6.7.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl",url = "https://files.pythonhosted.org/packages/15/ee/f524093232007cd7a75c1d132df70f235cfd590a7c9eaccd7ff422ef4ae8/multidict-6.7.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl",hashes = {sha256 = "4a0df7ff02397bb63e2fd22af2c87dfa39e8c7f12947bc524dbdc528282c7e34"}}, + {name = "multidict-6.7.0-cp314-cp314-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl",url = "https://files.pythonhosted.org/packages/02/a5/eeb3f43ab45878f1895118c3ef157a480db58ede3f248e29b5354139c2c9/multidict-6.7.0-cp314-cp314-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl",hashes = {sha256 = "7a0222514e8e4c514660e182d5156a415c13ef0aabbd71682fc714e327b95e99"}}, + {name = "multidict-6.7.0-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl",url = "https://files.pythonhosted.org/packages/6a/1e/76d02f8270b97269d7e3dbd45644b1785bda457b474315f8cf999525a193/multidict-6.7.0-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl",hashes = {sha256 = "2397ab4daaf2698eb51a76721e98db21ce4f52339e535725de03ea962b5a3202"}}, + {name = "multidict-6.7.0-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl",url = "https://files.pythonhosted.org/packages/76/0b/c28a70ecb58963847c2a8efe334904cd254812b10e535aefb3bcce513918/multidict-6.7.0-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl",hashes = {sha256 = "8891681594162635948a636c9fe0ff21746aeb3dd5463f6e25d9bea3a8a39ca1"}}, + {name = "multidict-6.7.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl",url = "https://files.pythonhosted.org/packages/b4/63/2ab26e4209773223159b83aa32721b4021ffb08102f8ac7d689c943fded1/multidict-6.7.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl",hashes = {sha256 = "18706cc31dbf402a7945916dd5cddf160251b6dab8a2c5f3d6d5a55949f676b3"}}, + {name = "multidict-6.7.0-cp314-cp314-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/93/cd/06c1fa8282af1d1c46fd55c10a7930af652afdce43999501d4d68664170c/multidict-6.7.0-cp314-cp314-musllinux_1_2_aarch64.whl",hashes = {sha256 = "f844a1bbf1d207dd311a56f383f7eda2d0e134921d45751842d8235e7778965d"}}, + {name = "multidict-6.7.0-cp314-cp314-musllinux_1_2_armv7l.whl",url = "https://files.pythonhosted.org/packages/99/ac/82cb419dd6b04ccf9e7e61befc00c77614fc8134362488b553402ecd55ce/multidict-6.7.0-cp314-cp314-musllinux_1_2_armv7l.whl",hashes = {sha256 = "d4393e3581e84e5645506923816b9cc81f5609a778c7e7534054091acc64d1c6"}}, + {name = "multidict-6.7.0-cp314-cp314-musllinux_1_2_i686.whl",url = "https://files.pythonhosted.org/packages/fa/f3/a0f9bf09493421bd8716a362e0cd1d244f5a6550f5beffdd6b47e885b331/multidict-6.7.0-cp314-cp314-musllinux_1_2_i686.whl",hashes = {sha256 = "fbd18dc82d7bf274b37aa48d664534330af744e03bccf696d6f4c6042e7d19e7"}}, + {name = "multidict-6.7.0-cp314-cp314-musllinux_1_2_ppc64le.whl",url = "https://files.pythonhosted.org/packages/8d/01/476d38fc73a212843f43c852b0eee266b6971f0e28329c2184a8df90c376/multidict-6.7.0-cp314-cp314-musllinux_1_2_ppc64le.whl",hashes = {sha256 = "b6234e14f9314731ec45c42fc4554b88133ad53a09092cc48a88e771c125dadb"}}, + {name = "multidict-6.7.0-cp314-cp314-musllinux_1_2_s390x.whl",url = "https://files.pythonhosted.org/packages/49/6d/23faeb0868adba613b817d0e69c5f15531b24d462af8012c4f6de4fa8dc3/multidict-6.7.0-cp314-cp314-musllinux_1_2_s390x.whl",hashes = {sha256 = "08d4379f9744d8f78d98c8673c06e202ffa88296f009c71bbafe8a6bf847d01f"}}, + {name = "multidict-6.7.0-cp314-cp314-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/1e/cc/48d02ac22b30fa247f7dad82866e4b1015431092f4ba6ebc7e77596e0b18/multidict-6.7.0-cp314-cp314-musllinux_1_2_x86_64.whl",hashes = {sha256 = "9fe04da3f79387f450fd0061d4dd2e45a72749d31bf634aecc9e27f24fdc4b3f"}}, + {name = "multidict-6.7.0-cp314-cp314-win32.whl",url = "https://files.pythonhosted.org/packages/4a/03/29a8bf5a18abf1fe34535c88adbdfa88c9fb869b5a3b120692c64abe8284/multidict-6.7.0-cp314-cp314-win32.whl",hashes = {sha256 = "fbafe31d191dfa7c4c51f7a6149c9fb7e914dcf9ffead27dcfd9f1ae382b3885"}}, + {name = "multidict-6.7.0-cp314-cp314-win_amd64.whl",url = "https://files.pythonhosted.org/packages/82/16/7ed27b680791b939de138f906d5cf2b4657b0d45ca6f5dd6236fdddafb1a/multidict-6.7.0-cp314-cp314-win_amd64.whl",hashes = {sha256 = "2f67396ec0310764b9222a1728ced1ab638f61aadc6226f17a71dd9324f9a99c"}}, + {name = "multidict-6.7.0-cp314-cp314-win_arm64.whl",url = "https://files.pythonhosted.org/packages/cd/3c/e3e62eb35a1950292fe39315d3c89941e30a9d07d5d2df42965ab041da43/multidict-6.7.0-cp314-cp314-win_arm64.whl",hashes = {sha256 = "ba672b26069957ee369cfa7fc180dde1fc6f176eaf1e6beaf61fbebbd3d9c000"}}, + {name = "multidict-6.7.0-cp314-cp314t-macosx_10_13_universal2.whl",url = "https://files.pythonhosted.org/packages/8b/40/cd499bd0dbc5f1136726db3153042a735fffd0d77268e2ee20d5f33c010f/multidict-6.7.0-cp314-cp314t-macosx_10_13_universal2.whl",hashes = {sha256 = "c1dcc7524066fa918c6a27d61444d4ee7900ec635779058571f70d042d86ed63"}}, + {name = "multidict-6.7.0-cp314-cp314t-macosx_10_13_x86_64.whl",url = "https://files.pythonhosted.org/packages/13/8a/18e031eca251c8df76daf0288e6790561806e439f5ce99a170b4af30676b/multidict-6.7.0-cp314-cp314t-macosx_10_13_x86_64.whl",hashes = {sha256 = "27e0b36c2d388dc7b6ced3406671b401e84ad7eb0656b8f3a2f46ed0ce483718"}}, + {name = "multidict-6.7.0-cp314-cp314t-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/40/71/5e6701277470a87d234e433fb0a3a7deaf3bcd92566e421e7ae9776319de/multidict-6.7.0-cp314-cp314t-macosx_11_0_arm64.whl",hashes = {sha256 = "2a7baa46a22e77f0988e3b23d4ede5513ebec1929e34ee9495be535662c0dfe2"}}, + {name = "multidict-6.7.0-cp314-cp314t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl",url = "https://files.pythonhosted.org/packages/fe/6a/bab00cbab6d9cfb57afe1663318f72ec28289ea03fd4e8236bb78429893a/multidict-6.7.0-cp314-cp314t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl",hashes = {sha256 = "7bf77f54997a9166a2f5675d1201520586439424c2511723a7312bdb4bcc034e"}}, + {name = "multidict-6.7.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl",url = "https://files.pythonhosted.org/packages/2a/5f/8de95f629fc22a7769ade8b41028e3e5a822c1f8904f618d175945a81ad3/multidict-6.7.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl",hashes = {sha256 = "e011555abada53f1578d63389610ac8a5400fc70ce71156b0aa30d326f1a5064"}}, + {name = "multidict-6.7.0-cp314-cp314t-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl",url = "https://files.pythonhosted.org/packages/23/b4/38881a960458f25b89e9f4a4fdcb02ac101cfa710190db6e5528841e67de/multidict-6.7.0-cp314-cp314t-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl",hashes = {sha256 = "28b37063541b897fd6a318007373930a75ca6d6ac7c940dbe14731ffdd8d498e"}}, + {name = "multidict-6.7.0-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl",url = "https://files.pythonhosted.org/packages/1e/39/6566210c83f8a261575f18e7144736059f0c460b362e96e9cf797a24b8e7/multidict-6.7.0-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl",hashes = {sha256 = "05047ada7a2fde2631a0ed706f1fd68b169a681dfe5e4cf0f8e4cb6618bbc2cd"}}, + {name = "multidict-6.7.0-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl",url = "https://files.pythonhosted.org/packages/00/a3/67f18315100f64c269f46e6c0319fa87ba68f0f64f2b8e7fd7c72b913a0b/multidict-6.7.0-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl",hashes = {sha256 = "716133f7d1d946a4e1b91b1756b23c088881e70ff180c24e864c26192ad7534a"}}, + {name = "multidict-6.7.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl",url = "https://files.pythonhosted.org/packages/c8/2a/1cb77266afee2458d82f50da41beba02159b1d6b1f7973afc9a1cad1499b/multidict-6.7.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl",hashes = {sha256 = "d1bed1b467ef657f2a0ae62844a607909ef1c6889562de5e1d505f74457d0b96"}}, + {name = "multidict-6.7.0-cp314-cp314t-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/dd/72/09fa7dd487f119b2eb9524946ddd36e2067c08510576d43ff68469563b3b/multidict-6.7.0-cp314-cp314t-musllinux_1_2_aarch64.whl",hashes = {sha256 = "ca43bdfa5d37bd6aee89d85e1d0831fb86e25541be7e9d376ead1b28974f8e5e"}}, + {name = "multidict-6.7.0-cp314-cp314t-musllinux_1_2_armv7l.whl",url = "https://files.pythonhosted.org/packages/65/92/bc1f8bd0853d8669300f732c801974dfc3702c3eeadae2f60cef54dc69d7/multidict-6.7.0-cp314-cp314t-musllinux_1_2_armv7l.whl",hashes = {sha256 = "44b546bd3eb645fd26fb949e43c02a25a2e632e2ca21a35e2e132c8105dc8599"}}, + {name = "multidict-6.7.0-cp314-cp314t-musllinux_1_2_i686.whl",url = "https://files.pythonhosted.org/packages/09/86/ac39399e5cb9d0c2ac8ef6e10a768e4d3bc933ac808d49c41f9dc23337eb/multidict-6.7.0-cp314-cp314t-musllinux_1_2_i686.whl",hashes = {sha256 = "a6ef16328011d3f468e7ebc326f24c1445f001ca1dec335b2f8e66bed3006394"}}, + {name = "multidict-6.7.0-cp314-cp314t-musllinux_1_2_ppc64le.whl",url = "https://files.pythonhosted.org/packages/3d/b6/fed5ac6b8563ec72df6cb1ea8dac6d17f0a4a1f65045f66b6d3bf1497c02/multidict-6.7.0-cp314-cp314t-musllinux_1_2_ppc64le.whl",hashes = {sha256 = "5aa873cbc8e593d361ae65c68f85faadd755c3295ea2c12040ee146802f23b38"}}, + {name = "multidict-6.7.0-cp314-cp314t-musllinux_1_2_s390x.whl",url = "https://files.pythonhosted.org/packages/6b/8d/b954d8c0dc132b68f760aefd45870978deec6818897389dace00fcde32ff/multidict-6.7.0-cp314-cp314t-musllinux_1_2_s390x.whl",hashes = {sha256 = "3d7b6ccce016e29df4b7ca819659f516f0bc7a4b3efa3bb2012ba06431b044f9"}}, + {name = "multidict-6.7.0-cp314-cp314t-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/16/9d/a2dac7009125d3540c2f54e194829ea18ac53716c61b655d8ed300120b0f/multidict-6.7.0-cp314-cp314t-musllinux_1_2_x86_64.whl",hashes = {sha256 = "171b73bd4ee683d307599b66793ac80981b06f069b62eea1c9e29c9241aa66b0"}}, + {name = "multidict-6.7.0-cp314-cp314t-win32.whl",url = "https://files.pythonhosted.org/packages/39/ca/c05f144128ea232ae2178b008d5011d4e2cea86e4ee8c85c2631b1b94802/multidict-6.7.0-cp314-cp314t-win32.whl",hashes = {sha256 = "b2d7f80c4e1fd010b07cb26820aae86b7e73b681ee4889684fb8d2d4537aab13"}}, + {name = "multidict-6.7.0-cp314-cp314t-win_amd64.whl",url = "https://files.pythonhosted.org/packages/ba/8f/0a60e501584145588be1af5cc829265701ba3c35a64aec8e07cbb71d39bb/multidict-6.7.0-cp314-cp314t-win_amd64.whl",hashes = {sha256 = "09929cab6fcb68122776d575e03c6cc64ee0b8fca48d17e135474b042ce515cd"}}, + {name = "multidict-6.7.0-cp314-cp314t-win_arm64.whl",url = "https://files.pythonhosted.org/packages/7f/ae/3148b988a9c6239903e786eac19c889fab607c31d6efa7fb2147e5680f23/multidict-6.7.0-cp314-cp314t-win_arm64.whl",hashes = {sha256 = "cc41db090ed742f32bd2d2c721861725e6109681eddf835d0a82bd3a5c382827"}}, + {name = "multidict-6.7.0-cp313-cp313-macosx_10_13_universal2.whl",url = "https://files.pythonhosted.org/packages/d2/86/33272a544eeb36d66e4d9a920602d1a2f57d4ebea4ef3cdfe5a912574c95/multidict-6.7.0-cp313-cp313-macosx_10_13_universal2.whl",hashes = {sha256 = "bee7c0588aa0076ce77c0ea5d19a68d76ad81fcd9fe8501003b9a24f9d4000f6"}}, + {name = "multidict-6.7.0-cp313-cp313-macosx_10_13_x86_64.whl",url = "https://files.pythonhosted.org/packages/91/1c/eb97db117a1ebe46d457a3d235a7b9d2e6dcab174f42d1b67663dd9e5371/multidict-6.7.0-cp313-cp313-macosx_10_13_x86_64.whl",hashes = {sha256 = "7ef6b61cad77091056ce0e7ce69814ef72afacb150b7ac6a3e9470def2198159"}}, + {name = "multidict-6.7.0-cp313-cp313-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/f1/d8/6c3442322e41fb1dd4de8bd67bfd11cd72352ac131f6368315617de752f1/multidict-6.7.0-cp313-cp313-macosx_11_0_arm64.whl",hashes = {sha256 = "9c0359b1ec12b1d6849c59f9d319610b7f20ef990a6d454ab151aa0e3b9f78ca"}}, + {name = "multidict-6.7.0-cp313-cp313-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl",url = "https://files.pythonhosted.org/packages/75/3f/e2639e80325af0b6c6febdf8e57cc07043ff15f57fa1ef808f4ccb5ac4cd/multidict-6.7.0-cp313-cp313-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl",hashes = {sha256 = "cd240939f71c64bd658f186330603aac1a9a81bf6273f523fca63673cb7378a8"}}, + {name = "multidict-6.7.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl",url = "https://files.pythonhosted.org/packages/5d/cc/84e0585f805cbeaa9cbdaa95f9a3d6aed745b9d25700623ac89a6ecff400/multidict-6.7.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl",hashes = {sha256 = "a60a4d75718a5efa473ebd5ab685786ba0c67b8381f781d1be14da49f1a2dc60"}}, + {name = "multidict-6.7.0-cp313-cp313-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl",url = "https://files.pythonhosted.org/packages/b0/9c/ac851c107c92289acbbf5cfb485694084690c1b17e555f44952c26ddc5bd/multidict-6.7.0-cp313-cp313-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl",hashes = {sha256 = "53a42d364f323275126aff81fb67c5ca1b7a04fda0546245730a55c8c5f24bc4"}}, + {name = "multidict-6.7.0-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl",url = "https://files.pythonhosted.org/packages/50/cc/5f93e99427248c09da95b62d64b25748a5f5c98c7c2ab09825a1d6af0e15/multidict-6.7.0-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl",hashes = {sha256 = "3b29b980d0ddbecb736735ee5bef69bb2ddca56eff603c86f3f29a1128299b4f"}}, + {name = "multidict-6.7.0-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl",url = "https://files.pythonhosted.org/packages/ec/0c/2ec1d883ceb79c6f7f6d7ad90c919c898f5d1c6ea96d322751420211e072/multidict-6.7.0-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl",hashes = {sha256 = "f8a93b1c0ed2d04b97a5e9336fd2d33371b9a6e29ab7dd6503d63407c20ffbaf"}}, + {name = "multidict-6.7.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl",url = "https://files.pythonhosted.org/packages/c6/2d/f0b184fa88d6630aa267680bdb8623fb69cb0d024b8c6f0d23f9a0f406d3/multidict-6.7.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl",hashes = {sha256 = "9ff96e8815eecacc6645da76c413eb3b3d34cfca256c70b16b286a687d013c32"}}, + {name = "multidict-6.7.0-cp313-cp313-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/06/c9/11ea263ad0df7dfabcad404feb3c0dd40b131bc7f232d5537f2fb1356951/multidict-6.7.0-cp313-cp313-musllinux_1_2_aarch64.whl",hashes = {sha256 = "7516c579652f6a6be0e266aec0acd0db80829ca305c3d771ed898538804c2036"}}, + {name = "multidict-6.7.0-cp313-cp313-musllinux_1_2_armv7l.whl",url = "https://files.pythonhosted.org/packages/41/88/d714b86ee2c17d6e09850c70c9d310abac3d808ab49dfa16b43aba9d53fd/multidict-6.7.0-cp313-cp313-musllinux_1_2_armv7l.whl",hashes = {sha256 = "040f393368e63fb0f3330e70c26bfd336656bed925e5cbe17c9da839a6ab13ec"}}, + {name = "multidict-6.7.0-cp313-cp313-musllinux_1_2_i686.whl",url = "https://files.pythonhosted.org/packages/15/fe/ad407bb9e818c2b31383f6131ca19ea7e35ce93cf1310fce69f12e89de75/multidict-6.7.0-cp313-cp313-musllinux_1_2_i686.whl",hashes = {sha256 = "b3bc26a951007b1057a1c543af845f1c7e3e71cc240ed1ace7bf4484aa99196e"}}, + {name = "multidict-6.7.0-cp313-cp313-musllinux_1_2_ppc64le.whl",url = "https://files.pythonhosted.org/packages/8c/a4/a89abdb0229e533fb925e7c6e5c40201c2873efebc9abaf14046a4536ee6/multidict-6.7.0-cp313-cp313-musllinux_1_2_ppc64le.whl",hashes = {sha256 = "7b022717c748dd1992a83e219587aabe45980d88969f01b316e78683e6285f64"}}, + {name = "multidict-6.7.0-cp313-cp313-musllinux_1_2_s390x.whl",url = "https://files.pythonhosted.org/packages/8d/aa/0e2b27bd88b40a4fb8dc53dd74eecac70edaa4c1dd0707eb2164da3675b3/multidict-6.7.0-cp313-cp313-musllinux_1_2_s390x.whl",hashes = {sha256 = "9600082733859f00d79dee64effc7aef1beb26adb297416a4ad2116fd61374bd"}}, + {name = "multidict-6.7.0-cp313-cp313-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/d0/8e/0c67b7120d5d5f6d874ed85a085f9dc770a7f9d8813e80f44a9fec820bb7/multidict-6.7.0-cp313-cp313-musllinux_1_2_x86_64.whl",hashes = {sha256 = "94218fcec4d72bc61df51c198d098ce2b378e0ccbac41ddbed5ef44092913288"}}, + {name = "multidict-6.7.0-cp313-cp313-win32.whl",url = "https://files.pythonhosted.org/packages/ba/55/b73e1d624ea4b8fd4dd07a3bb70f6e4c7c6c5d9d640a41c6ffe5cdbd2a55/multidict-6.7.0-cp313-cp313-win32.whl",hashes = {sha256 = "a37bd74c3fa9d00be2d7b8eca074dc56bd8077ddd2917a839bd989612671ed17"}}, + {name = "multidict-6.7.0-cp313-cp313-win_amd64.whl",url = "https://files.pythonhosted.org/packages/32/31/75c59e7d3b4205075b4c183fa4ca398a2daf2303ddf616b04ae6ef55cffe/multidict-6.7.0-cp313-cp313-win_amd64.whl",hashes = {sha256 = "30d193c6cc6d559db42b6bcec8a5d395d34d60c9877a0b71ecd7c204fcf15390"}}, + {name = "multidict-6.7.0-cp313-cp313-win_arm64.whl",url = "https://files.pythonhosted.org/packages/31/2a/8987831e811f1184c22bc2e45844934385363ee61c0a2dcfa8f71b87e608/multidict-6.7.0-cp313-cp313-win_arm64.whl",hashes = {sha256 = "ea3334cabe4d41b7ccd01e4d349828678794edbc2d3ae97fc162a3312095092e"}}, + {name = "multidict-6.7.0-cp313-cp313t-macosx_10_13_universal2.whl",url = "https://files.pythonhosted.org/packages/e8/68/7b3a5170a382a340147337b300b9eb25a9ddb573bcdfff19c0fa3f31ffba/multidict-6.7.0-cp313-cp313t-macosx_10_13_universal2.whl",hashes = {sha256 = "ad9ce259f50abd98a1ca0aa6e490b58c316a0fce0617f609723e40804add2c00"}}, + {name = "multidict-6.7.0-cp313-cp313t-macosx_10_13_x86_64.whl",url = "https://files.pythonhosted.org/packages/55/5c/3fa2d07c84df4e302060f555bbf539310980362236ad49f50eeb0a1c1eb9/multidict-6.7.0-cp313-cp313t-macosx_10_13_x86_64.whl",hashes = {sha256 = "07f5594ac6d084cbb5de2df218d78baf55ef150b91f0ff8a21cc7a2e3a5a58eb"}}, + {name = "multidict-6.7.0-cp313-cp313t-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/fc/56/67212d33239797f9bd91962bb899d72bb0f4c35a8652dcdb8ed049bef878/multidict-6.7.0-cp313-cp313t-macosx_11_0_arm64.whl",hashes = {sha256 = "0591b48acf279821a579282444814a2d8d0af624ae0bc600aa4d1b920b6e924b"}}, + {name = "multidict-6.7.0-cp313-cp313t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl",url = "https://files.pythonhosted.org/packages/46/d1/908f896224290350721597a61a69cd19b89ad8ee0ae1f38b3f5cd12ea2ac/multidict-6.7.0-cp313-cp313t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl",hashes = {sha256 = "749a72584761531d2b9467cfbdfd29487ee21124c304c4b6cb760d8777b27f9c"}}, + {name = "multidict-6.7.0-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl",url = "https://files.pythonhosted.org/packages/ab/67/8604288bbd68680eee0ab568fdcb56171d8b23a01bcd5cb0c8fedf6e5d99/multidict-6.7.0-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl",hashes = {sha256 = "6b4c3d199f953acd5b446bf7c0de1fe25d94e09e79086f8dc2f48a11a129cdf1"}}, + {name = "multidict-6.7.0-cp313-cp313t-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl",url = "https://files.pythonhosted.org/packages/20/33/9228d76339f1ba51e3efef7da3ebd91964d3006217aae13211653193c3ff/multidict-6.7.0-cp313-cp313t-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl",hashes = {sha256 = "9fb0211dfc3b51efea2f349ec92c114d7754dd62c01f81c3e32b765b70c45c9b"}}, + {name = "multidict-6.7.0-cp313-cp313t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl",url = "https://files.pythonhosted.org/packages/f8/2d/25d9b566d10cab1c42b3b9e5b11ef79c9111eaf4463b8c257a3bd89e0ead/multidict-6.7.0-cp313-cp313t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl",hashes = {sha256 = "a027ec240fe73a8d6281872690b988eed307cd7d91b23998ff35ff577ca688b5"}}, + {name = "multidict-6.7.0-cp313-cp313t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl",url = "https://files.pythonhosted.org/packages/b6/b1/8d1a965e6637fc33de3c0d8f414485c2b7e4af00f42cab3d84e7b955c222/multidict-6.7.0-cp313-cp313t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl",hashes = {sha256 = "d1d964afecdf3a8288789df2f5751dc0a8261138c3768d9af117ed384e538fad"}}, + {name = "multidict-6.7.0-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl",url = "https://files.pythonhosted.org/packages/ba/0c/06b5a8adbdeedada6f4fb8d8f193d44a347223b11939b42953eeb6530b6b/multidict-6.7.0-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl",hashes = {sha256 = "caf53b15b1b7df9fbd0709aa01409000a2b4dd03a5f6f5cc548183c7c8f8b63c"}}, + {name = "multidict-6.7.0-cp313-cp313t-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/8f/31/b2491b5fe167ca044c6eb4b8f2c9f3b8a00b24c432c365358eadac5d7625/multidict-6.7.0-cp313-cp313t-musllinux_1_2_aarch64.whl",hashes = {sha256 = "654030da3197d927f05a536a66186070e98765aa5142794c9904555d3a9d8fb5"}}, + {name = "multidict-6.7.0-cp313-cp313t-musllinux_1_2_armv7l.whl",url = "https://files.pythonhosted.org/packages/61/1a/982913957cb90406c8c94f53001abd9eafc271cb3e70ff6371590bec478e/multidict-6.7.0-cp313-cp313t-musllinux_1_2_armv7l.whl",hashes = {sha256 = "2090d3718829d1e484706a2f525e50c892237b2bf9b17a79b059cb98cddc2f10"}}, + {name = "multidict-6.7.0-cp313-cp313t-musllinux_1_2_i686.whl",url = "https://files.pythonhosted.org/packages/be/c0/21435d804c1a1cf7a2608593f4d19bca5bcbd7a81a70b253fdd1c12af9c0/multidict-6.7.0-cp313-cp313t-musllinux_1_2_i686.whl",hashes = {sha256 = "2d2cfeec3f6f45651b3d408c4acec0ebf3daa9bc8a112a084206f5db5d05b754"}}, + {name = "multidict-6.7.0-cp313-cp313t-musllinux_1_2_ppc64le.whl",url = "https://files.pythonhosted.org/packages/54/0a/4349d540d4a883863191be6eb9a928846d4ec0ea007d3dcd36323bb058ac/multidict-6.7.0-cp313-cp313t-musllinux_1_2_ppc64le.whl",hashes = {sha256 = "4ef089f985b8c194d341eb2c24ae6e7408c9a0e2e5658699c92f497437d88c3c"}}, + {name = "multidict-6.7.0-cp313-cp313t-musllinux_1_2_s390x.whl",url = "https://files.pythonhosted.org/packages/26/64/d5416038dbda1488daf16b676e4dbfd9674dde10a0cc8f4fc2b502d8125d/multidict-6.7.0-cp313-cp313t-musllinux_1_2_s390x.whl",hashes = {sha256 = "e93a0617cd16998784bf4414c7e40f17a35d2350e5c6f0bd900d3a8e02bd3762"}}, + {name = "multidict-6.7.0-cp313-cp313t-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/9f/8c/8290c50d14e49f35e0bd4abc25e1bc7711149ca9588ab7d04f886cdf03d9/multidict-6.7.0-cp313-cp313t-musllinux_1_2_x86_64.whl",hashes = {sha256 = "f0feece2ef8ebc42ed9e2e8c78fc4aa3cf455733b507c09ef7406364c94376c6"}}, + {name = "multidict-6.7.0-cp313-cp313t-win32.whl",url = "https://files.pythonhosted.org/packages/ef/a0/f83ae75e42d694b3fbad3e047670e511c138be747bc713cf1b10d5096416/multidict-6.7.0-cp313-cp313t-win32.whl",hashes = {sha256 = "19a1d55338ec1be74ef62440ca9e04a2f001a04d0cc49a4983dc320ff0f3212d"}}, + {name = "multidict-6.7.0-cp313-cp313t-win_amd64.whl",url = "https://files.pythonhosted.org/packages/dc/80/9b174a92814a3830b7357307a792300f42c9e94664b01dee8e457551fa66/multidict-6.7.0-cp313-cp313t-win_amd64.whl",hashes = {sha256 = "3da4fb467498df97e986af166b12d01f05d2e04f978a9c1c680ea1988e0bc4b6"}}, + {name = "multidict-6.7.0-cp313-cp313t-win_arm64.whl",url = "https://files.pythonhosted.org/packages/cc/28/04baeaf0428d95bb7a7bea0e691ba2f31394338ba424fb0679a9ed0f4c09/multidict-6.7.0-cp313-cp313t-win_arm64.whl",hashes = {sha256 = "b4121773c49a0776461f4a904cdf6264c88e42218aaa8407e803ca8025872792"}}, + {name = "multidict-6.7.0-cp312-cp312-macosx_10_13_universal2.whl",url = "https://files.pythonhosted.org/packages/c2/9e/9f61ac18d9c8b475889f32ccfa91c9f59363480613fc807b6e3023d6f60b/multidict-6.7.0-cp312-cp312-macosx_10_13_universal2.whl",hashes = {sha256 = "8a3862568a36d26e650a19bb5cbbba14b71789032aebc0423f8cc5f150730184"}}, + {name = "multidict-6.7.0-cp312-cp312-macosx_10_13_x86_64.whl",url = "https://files.pythonhosted.org/packages/38/6f/614f09a04e6184f8824268fce4bc925e9849edfa654ddd59f0b64508c595/multidict-6.7.0-cp312-cp312-macosx_10_13_x86_64.whl",hashes = {sha256 = "960c60b5849b9b4f9dcc9bea6e3626143c252c74113df2c1540aebce70209b45"}}, + {name = "multidict-6.7.0-cp312-cp312-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/b3/93/c4f67a436dd026f2e780c433277fff72be79152894d9fc36f44569cab1a6/multidict-6.7.0-cp312-cp312-macosx_11_0_arm64.whl",hashes = {sha256 = "2049be98fb57a31b4ccf870bf377af2504d4ae35646a19037ec271e4c07998aa"}}, + {name = "multidict-6.7.0-cp312-cp312-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl",url = "https://files.pythonhosted.org/packages/7f/f5/013798161ca665e4a422afbc5e2d9e4070142a9ff8905e482139cd09e4d0/multidict-6.7.0-cp312-cp312-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl",hashes = {sha256 = "0934f3843a1860dd465d38895c17fce1f1cb37295149ab05cd1b9a03afacb2a7"}}, + {name = "multidict-6.7.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl",url = "https://files.pythonhosted.org/packages/71/2f/91dbac13e0ba94669ea5119ba267c9a832f0cb65419aca75549fcf09a3dc/multidict-6.7.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl",hashes = {sha256 = "b3e34f3a1b8131ba06f1a73adab24f30934d148afcd5f5de9a73565a4404384e"}}, + {name = "multidict-6.7.0-cp312-cp312-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl",url = "https://files.pythonhosted.org/packages/ef/b0/754038b26f6e04488b48ac621f779c341338d78503fb45403755af2df477/multidict-6.7.0-cp312-cp312-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl",hashes = {sha256 = "efbb54e98446892590dc2458c19c10344ee9a883a79b5cec4bc34d6656e8d546"}}, + {name = "multidict-6.7.0-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl",url = "https://files.pythonhosted.org/packages/87/15/9da40b9336a7c9fa606c4cf2ed80a649dffeb42b905d4f63a1d7eb17d746/multidict-6.7.0-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl",hashes = {sha256 = "a35c5fc61d4f51eb045061e7967cfe3123d622cd500e8868e7c0c592a09fedc4"}}, + {name = "multidict-6.7.0-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl",url = "https://files.pythonhosted.org/packages/82/72/c53fcade0cc94dfaad583105fd92b3a783af2091eddcb41a6d5a52474000/multidict-6.7.0-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl",hashes = {sha256 = "29fe6740ebccba4175af1b9b87bf553e9c15cd5868ee967e010efcf94e4fd0f1"}}, + {name = "multidict-6.7.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl",url = "https://files.pythonhosted.org/packages/0d/e2/9baffdae21a76f77ef8447f1a05a96ec4bc0a24dae08767abc0a2fe680b8/multidict-6.7.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl",hashes = {sha256 = "123e2a72e20537add2f33a79e605f6191fba2afda4cbb876e35c1a7074298a7d"}}, + {name = "multidict-6.7.0-cp312-cp312-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/3c/06/3f06f611087dc60d65ef775f1fb5aca7c6d61c6db4990e7cda0cef9b1651/multidict-6.7.0-cp312-cp312-musllinux_1_2_aarch64.whl",hashes = {sha256 = "b284e319754366c1aee2267a2036248b24eeb17ecd5dc16022095e747f2f4304"}}, + {name = "multidict-6.7.0-cp312-cp312-musllinux_1_2_armv7l.whl",url = "https://files.pythonhosted.org/packages/20/24/54e804ec7945b6023b340c412ce9c3f81e91b3bf5fa5ce65558740141bee/multidict-6.7.0-cp312-cp312-musllinux_1_2_armv7l.whl",hashes = {sha256 = "803d685de7be4303b5a657b76e2f6d1240e7e0a8aa2968ad5811fa2285553a12"}}, + {name = "multidict-6.7.0-cp312-cp312-musllinux_1_2_i686.whl",url = "https://files.pythonhosted.org/packages/14/48/011cba467ea0b17ceb938315d219391d3e421dfd35928e5dbdc3f4ae76ef/multidict-6.7.0-cp312-cp312-musllinux_1_2_i686.whl",hashes = {sha256 = "c04a328260dfd5db8c39538f999f02779012268f54614902d0afc775d44e0a62"}}, + {name = "multidict-6.7.0-cp312-cp312-musllinux_1_2_ppc64le.whl",url = "https://files.pythonhosted.org/packages/0d/2f/919258b43bb35b99fa127435cfb2d91798eb3a943396631ef43e3720dcf4/multidict-6.7.0-cp312-cp312-musllinux_1_2_ppc64le.whl",hashes = {sha256 = "8a19cdb57cd3df4cd865849d93ee14920fb97224300c88501f16ecfa2604b4e0"}}, + {name = "multidict-6.7.0-cp312-cp312-musllinux_1_2_s390x.whl",url = "https://files.pythonhosted.org/packages/31/22/a0e884d86b5242b5a74cf08e876bdf299e413016b66e55511f7a804a366e/multidict-6.7.0-cp312-cp312-musllinux_1_2_s390x.whl",hashes = {sha256 = "9b2fd74c52accced7e75de26023b7dccee62511a600e62311b918ec5c168fc2a"}}, + {name = "multidict-6.7.0-cp312-cp312-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/b2/e5/17e10e1b5c5f5a40f2fcbb45953c9b215f8a4098003915e46a93f5fcaa8f/multidict-6.7.0-cp312-cp312-musllinux_1_2_x86_64.whl",hashes = {sha256 = "3e8bfdd0e487acf992407a140d2589fe598238eaeffa3da8448d63a63cd363f8"}}, + {name = "multidict-6.7.0-cp312-cp312-win32.whl",url = "https://files.pythonhosted.org/packages/e3/9a/201bb1e17e7af53139597069c375e7b0dcbd47594604f65c2d5359508566/multidict-6.7.0-cp312-cp312-win32.whl",hashes = {sha256 = "dd32a49400a2c3d52088e120ee00c1e3576cbff7e10b98467962c74fdb762ed4"}}, + {name = "multidict-6.7.0-cp312-cp312-win_amd64.whl",url = "https://files.pythonhosted.org/packages/46/e2/348cd32faad84eaf1d20cce80e2bb0ef8d312c55bca1f7fa9865e7770aaf/multidict-6.7.0-cp312-cp312-win_amd64.whl",hashes = {sha256 = "92abb658ef2d7ef22ac9f8bb88e8b6c3e571671534e029359b6d9e845923eb1b"}}, + {name = "multidict-6.7.0-cp312-cp312-win_arm64.whl",url = "https://files.pythonhosted.org/packages/25/ec/aad2613c1910dce907480e0c3aa306905830f25df2e54ccc9dea450cb5aa/multidict-6.7.0-cp312-cp312-win_arm64.whl",hashes = {sha256 = "490dab541a6a642ce1a9d61a4781656b346a55c13038f0b1244653828e3a83ec"}}, + {name = "multidict-6.7.0-py3-none-any.whl",url = "https://files.pythonhosted.org/packages/b7/da/7d22601b625e241d4f23ef1ebff8acfc60da633c9e7e7922e24d10f592b3/multidict-6.7.0-py3-none-any.whl",hashes = {sha256 = "394fc5c42a333c9ffc3e421a4c85e08580d990e08b99f6bf35b4132114c5dcb3"}}, + {name = "multidict-6.7.0-cp311-cp311-macosx_10_9_universal2.whl",url = "https://files.pythonhosted.org/packages/34/9e/5c727587644d67b2ed479041e4b1c58e30afc011e3d45d25bbe35781217c/multidict-6.7.0-cp311-cp311-macosx_10_9_universal2.whl",hashes = {sha256 = "4d409aa42a94c0b3fa617708ef5276dfe81012ba6753a0370fcc9d0195d0a1fc"}}, + {name = "multidict-6.7.0-cp311-cp311-macosx_10_9_x86_64.whl",url = "https://files.pythonhosted.org/packages/17/e4/67b5c27bd17c085a5ea8f1ec05b8a3e5cba0ca734bfcad5560fb129e70ca/multidict-6.7.0-cp311-cp311-macosx_10_9_x86_64.whl",hashes = {sha256 = "14c9e076eede3b54c636f8ce1c9c252b5f057c62131211f0ceeec273810c9721"}}, + {name = "multidict-6.7.0-cp311-cp311-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/4d/e1/866a5d77be6ea435711bef2a4291eed11032679b6b28b56b4776ab06ba3e/multidict-6.7.0-cp311-cp311-macosx_11_0_arm64.whl",hashes = {sha256 = "4c09703000a9d0fa3c3404b27041e574cc7f4df4c6563873246d0e11812a94b6"}}, + {name = "multidict-6.7.0-cp311-cp311-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl",url = "https://files.pythonhosted.org/packages/31/61/0c2d50241ada71ff61a79518db85ada85fdabfcf395d5968dae1cbda04e5/multidict-6.7.0-cp311-cp311-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl",hashes = {sha256 = "a265acbb7bb33a3a2d626afbe756371dce0279e7b17f4f4eda406459c2b5ff1c"}}, + {name = "multidict-6.7.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl",url = "https://files.pythonhosted.org/packages/ac/e0/919666a4e4b57fff1b57f279be1c9316e6cdc5de8a8b525d76f6598fefc7/multidict-6.7.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl",hashes = {sha256 = "51cb455de290ae462593e5b1cb1118c5c22ea7f0d3620d9940bf695cea5a4bd7"}}, + {name = "multidict-6.7.0-cp311-cp311-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl",url = "https://files.pythonhosted.org/packages/a1/cc/d027d9c5a520f3321b65adea289b965e7bcbd2c34402663f482648c716ce/multidict-6.7.0-cp311-cp311-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl",hashes = {sha256 = "db99677b4457c7a5c5a949353e125ba72d62b35f74e26da141530fbb012218a7"}}, + {name = "multidict-6.7.0-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl",url = "https://files.pythonhosted.org/packages/75/c4/bbd633980ce6155a28ff04e6a6492dd3335858394d7bb752d8b108708558/multidict-6.7.0-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl",hashes = {sha256 = "f470f68adc395e0183b92a2f4689264d1ea4b40504a24d9882c27375e6662bb9"}}, + {name = "multidict-6.7.0-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl",url = "https://files.pythonhosted.org/packages/4c/6d/d622322d344f1f053eae47e033b0b3f965af01212de21b10bcf91be991fb/multidict-6.7.0-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl",hashes = {sha256 = "0db4956f82723cc1c270de9c6e799b4c341d327762ec78ef82bb962f79cc07d8"}}, + {name = "multidict-6.7.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl",url = "https://files.pythonhosted.org/packages/a8/9f/78f8761c2705d4c6d7516faed63c0ebdac569f6db1bef95e0d5218fdc146/multidict-6.7.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl",hashes = {sha256 = "3e56d780c238f9e1ae66a22d2adf8d16f485381878250db8d496623cd38b22bd"}}, + {name = "multidict-6.7.0-cp311-cp311-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/78/59/950818e04f91b9c2b95aab3d923d9eabd01689d0dcd889563988e9ea0fd8/multidict-6.7.0-cp311-cp311-musllinux_1_2_aarch64.whl",hashes = {sha256 = "9d14baca2ee12c1a64740d4531356ba50b82543017f3ad6de0deb943c5979abb"}}, + {name = "multidict-6.7.0-cp311-cp311-musllinux_1_2_armv7l.whl",url = "https://files.pythonhosted.org/packages/7a/3d/77c79e1934cad2ee74991840f8a0110966d9599b3af95964c0cd79bb905b/multidict-6.7.0-cp311-cp311-musllinux_1_2_armv7l.whl",hashes = {sha256 = "295a92a76188917c7f99cda95858c822f9e4aae5824246bba9b6b44004ddd0a6"}}, + {name = "multidict-6.7.0-cp311-cp311-musllinux_1_2_i686.whl",url = "https://files.pythonhosted.org/packages/63/1b/834ce32a0a97a3b70f86437f685f880136677ac00d8bce0027e9fd9c2db7/multidict-6.7.0-cp311-cp311-musllinux_1_2_i686.whl",hashes = {sha256 = "39f1719f57adbb767ef592a50ae5ebb794220d1188f9ca93de471336401c34d2"}}, + {name = "multidict-6.7.0-cp311-cp311-musllinux_1_2_ppc64le.whl",url = "https://files.pythonhosted.org/packages/23/ef/43d1c3ba205b5dec93dc97f3fba179dfa47910fc73aaaea4f7ceb41cec2a/multidict-6.7.0-cp311-cp311-musllinux_1_2_ppc64le.whl",hashes = {sha256 = "0a13fb8e748dfc94749f622de065dd5c1def7e0d2216dba72b1d8069a389c6ff"}}, + {name = "multidict-6.7.0-cp311-cp311-musllinux_1_2_s390x.whl",url = "https://files.pythonhosted.org/packages/6b/03/eaf95bcc2d19ead522001f6a650ef32811aa9e3624ff0ad37c445c7a588c/multidict-6.7.0-cp311-cp311-musllinux_1_2_s390x.whl",hashes = {sha256 = "e3aa16de190d29a0ea1b48253c57d99a68492c8dd8948638073ab9e74dc9410b"}}, + {name = "multidict-6.7.0-cp311-cp311-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/e8/df/ec8a5fd66ea6cd6f525b1fcbb23511b033c3e9bc42b81384834ffa484a62/multidict-6.7.0-cp311-cp311-musllinux_1_2_x86_64.whl",hashes = {sha256 = "a048ce45dcdaaf1defb76b2e684f997fb5abf74437b6cb7b22ddad934a964e34"}}, + {name = "multidict-6.7.0-cp311-cp311-win32.whl",url = "https://files.pythonhosted.org/packages/8a/a2/59b405d59fd39ec86d1142630e9049243015a5f5291ba49cadf3c090c541/multidict-6.7.0-cp311-cp311-win32.whl",hashes = {sha256 = "a90af66facec4cebe4181b9e62a68be65e45ac9b52b67de9eec118701856e7ff"}}, + {name = "multidict-6.7.0-cp311-cp311-win_amd64.whl",url = "https://files.pythonhosted.org/packages/32/0f/13228f26f8b882c34da36efa776c3b7348455ec383bab4a66390e42963ae/multidict-6.7.0-cp311-cp311-win_amd64.whl",hashes = {sha256 = "95b5ffa4349df2887518bb839409bcf22caa72d82beec453216802f475b23c81"}}, + {name = "multidict-6.7.0-cp311-cp311-win_arm64.whl",url = "https://files.pythonhosted.org/packages/84/1f/68588e31b000535a3207fd3c909ebeec4fb36b52c442107499c18a896a2a/multidict-6.7.0-cp311-cp311-win_arm64.whl",hashes = {sha256 = "329aa225b085b6f004a4955271a7ba9f1087e39dcb7e65f6284a988264a63912"}}, + {name = "multidict-6.7.0-cp310-cp310-macosx_10_9_universal2.whl",url = "https://files.pythonhosted.org/packages/a9/63/7bdd4adc330abcca54c85728db2327130e49e52e8c3ce685cec44e0f2e9f/multidict-6.7.0-cp310-cp310-macosx_10_9_universal2.whl",hashes = {sha256 = "9f474ad5acda359c8758c8accc22032c6abe6dc87a8be2440d097785e27a9349"}}, + {name = "multidict-6.7.0-cp310-cp310-macosx_10_9_x86_64.whl",url = "https://files.pythonhosted.org/packages/3f/bb/b6c35ff175ed1a3142222b78455ee31be71a8396ed3ab5280fbe3ebe4e85/multidict-6.7.0-cp310-cp310-macosx_10_9_x86_64.whl",hashes = {sha256 = "4b7a9db5a870f780220e931d0002bbfd88fb53aceb6293251e2c839415c1b20e"}}, + {name = "multidict-6.7.0-cp310-cp310-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/e0/1f/064c77877c5fa6df6d346e68075c0f6998547afe952d6471b4c5f6a7345d/multidict-6.7.0-cp310-cp310-macosx_11_0_arm64.whl",hashes = {sha256 = "03ca744319864e92721195fa28c7a3b2bc7b686246b35e4078c1e4d0eb5466d3"}}, + {name = "multidict-6.7.0-cp310-cp310-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl",url = "https://files.pythonhosted.org/packages/04/7a/bf6aa92065dd47f287690000b3d7d332edfccb2277634cadf6a810463c6a/multidict-6.7.0-cp310-cp310-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl",hashes = {sha256 = "f0e77e3c0008bc9316e662624535b88d360c3a5d3f81e15cf12c139a75250046"}}, + {name = "multidict-6.7.0-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl",url = "https://files.pythonhosted.org/packages/94/39/297a8de920f76eda343e4ce05f3b489f0ab3f9504f2576dfb37b7c08ca08/multidict-6.7.0-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl",hashes = {sha256 = "08325c9e5367aa379a3496aa9a022fe8837ff22e00b94db256d3a1378c76ab32"}}, + {name = "multidict-6.7.0-cp310-cp310-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl",url = "https://files.pythonhosted.org/packages/39/3a/d0eee2898cfd9d654aea6cb8c4addc2f9756e9a7e09391cfe55541f917f7/multidict-6.7.0-cp310-cp310-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl",hashes = {sha256 = "e2862408c99f84aa571ab462d25236ef9cb12a602ea959ba9c9009a54902fc73"}}, + {name = "multidict-6.7.0-cp310-cp310-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl",url = "https://files.pythonhosted.org/packages/05/48/3b328851193c7a4240815b71eea165b49248867bbb6153a0aee227a0bb47/multidict-6.7.0-cp310-cp310-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl",hashes = {sha256 = "4d72a9a2d885f5c208b0cb91ff2ed43636bb7e345ec839ff64708e04f69a13cc"}}, + {name = "multidict-6.7.0-cp310-cp310-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl",url = "https://files.pythonhosted.org/packages/b1/ca/0706a98c8d126a89245413225ca4a3fefc8435014de309cf8b30acb68841/multidict-6.7.0-cp310-cp310-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl",hashes = {sha256 = "478cc36476687bac1514d651cbbaa94b86b0732fb6855c60c673794c7dd2da62"}}, + {name = "multidict-6.7.0-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl",url = "https://files.pythonhosted.org/packages/5e/4f/9c7992f245554d8b173f6f0a048ad24b3e645d883f096857ec2c0822b8bd/multidict-6.7.0-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl",hashes = {sha256 = "6843b28b0364dc605f21481c90fadb5f60d9123b442eb8a726bb74feef588a84"}}, + {name = "multidict-6.7.0-cp310-cp310-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/31/79/26a85991ae67efd1c0b1fc2e0c275b8a6aceeb155a68861f63f87a798f16/multidict-6.7.0-cp310-cp310-musllinux_1_2_aarch64.whl",hashes = {sha256 = "23bfeee5316266e5ee2d625df2d2c602b829435fc3a235c2ba2131495706e4a0"}}, + {name = "multidict-6.7.0-cp310-cp310-musllinux_1_2_armv7l.whl",url = "https://files.pythonhosted.org/packages/14/1e/75fa96394478930b79d0302eaf9a6c69f34005a1a5251ac8b9c336486ec9/multidict-6.7.0-cp310-cp310-musllinux_1_2_armv7l.whl",hashes = {sha256 = "680878b9f3d45c31e1f730eef731f9b0bc1da456155688c6745ee84eb818e90e"}}, + {name = "multidict-6.7.0-cp310-cp310-musllinux_1_2_i686.whl",url = "https://files.pythonhosted.org/packages/b2/5e/085544cb9f9c4ad2b5d97467c15f856df8d9bac410cffd5c43991a5d878b/multidict-6.7.0-cp310-cp310-musllinux_1_2_i686.whl",hashes = {sha256 = "eb866162ef2f45063acc7a53a88ef6fe8bf121d45c30ea3c9cd87ce7e191a8d4"}}, + {name = "multidict-6.7.0-cp310-cp310-musllinux_1_2_ppc64le.whl",url = "https://files.pythonhosted.org/packages/b9/c3/e9d9e2f20c9474e7a8fcef28f863c5cbd29bb5adce6b70cebe8bdad0039d/multidict-6.7.0-cp310-cp310-musllinux_1_2_ppc64le.whl",hashes = {sha256 = "df0e3bf7993bdbeca5ac25aa859cf40d39019e015c9c91809ba7093967f7a648"}}, + {name = "multidict-6.7.0-cp310-cp310-musllinux_1_2_s390x.whl",url = "https://files.pythonhosted.org/packages/b5/3f/df171b6efa3239ae33b97b887e42671cd1d94d460614bfb2c30ffdab3b95/multidict-6.7.0-cp310-cp310-musllinux_1_2_s390x.whl",hashes = {sha256 = "661709cdcd919a2ece2234f9bae7174e5220c80b034585d7d8a755632d3e2111"}}, + {name = "multidict-6.7.0-cp310-cp310-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/3c/2f/9b5564888c4e14b9af64c54acf149263721a283aaf4aa0ae89b091d5d8c1/multidict-6.7.0-cp310-cp310-musllinux_1_2_x86_64.whl",hashes = {sha256 = "096f52730c3fb8ed419db2d44391932b63891b2c5ed14850a7e215c0ba9ade36"}}, + {name = "multidict-6.7.0-cp310-cp310-win32.whl",url = "https://files.pythonhosted.org/packages/6c/3a/0bd6ca0f7d96d790542d591c8c3354c1e1b6bfd2024d4d92dc3d87485ec7/multidict-6.7.0-cp310-cp310-win32.whl",hashes = {sha256 = "afa8a2978ec65d2336305550535c9c4ff50ee527914328c8677b3973ade52b85"}}, + {name = "multidict-6.7.0-cp310-cp310-win_amd64.whl",url = "https://files.pythonhosted.org/packages/00/35/f6a637ea2c75f0d3b7c7d41b1189189acff0d9deeb8b8f35536bb30f5e33/multidict-6.7.0-cp310-cp310-win_amd64.whl",hashes = {sha256 = "b15b3afff74f707b9275d5ba6a91ae8f6429c3ffb29bbfd216b0b375a56f13d7"}}, + {name = "multidict-6.7.0-cp310-cp310-win_arm64.whl",url = "https://files.pythonhosted.org/packages/e7/b8/f7bf8329b39893d02d9d95cf610c75885d12fc0f402b1c894e1c8e01c916/multidict-6.7.0-cp310-cp310-win_arm64.whl",hashes = {sha256 = "4b73189894398d59131a66ff157837b1fafea9974be486d036bb3d32331fdbf0"}}, ] marker = "\"default\" in dependency_groups or \"dev\" in extras" @@ -1421,11 +1849,11 @@ dependencies = [ [[packages]] name = "h2" -version = "4.2.0" +version = "4.3.0" requires-python = ">=3.9" -sdist = {name = "h2-4.2.0.tar.gz", url = "https://files.pythonhosted.org/packages/1b/38/d7f80fd13e6582fb8e0df8c9a653dcc02b03ca34f4d72f34869298c5baf8/h2-4.2.0.tar.gz", hashes = {sha256 = "c8a52129695e88b1a0578d8d2cc6842bbd79128ac685463b887ee278126ad01f"}} +sdist = {name = "h2-4.3.0.tar.gz", url = "https://files.pythonhosted.org/packages/1d/17/afa56379f94ad0fe8defd37d6eb3f89a25404ffc71d4d848893d270325fc/h2-4.3.0.tar.gz", hashes = {sha256 = "6c59efe4323fa18b47a632221a1888bd7fde6249819beda254aeca909f221bf1"}} wheels = [ - {name = "h2-4.2.0-py3-none-any.whl",url = "https://files.pythonhosted.org/packages/d0/9e/984486f2d0a0bd2b024bf4bc1c62688fcafa9e61991f041fb0e2def4a982/h2-4.2.0-py3-none-any.whl",hashes = {sha256 = "479a53ad425bb29af087f3458a61d30780bc818e4ebcf01f0b536ba916462ed0"}}, + {name = "h2-4.3.0-py3-none-any.whl",url = "https://files.pythonhosted.org/packages/69/b2/119f6e6dcbd96f9069ce9a2665e0146588dc9f88f29549711853645e736a/h2-4.3.0-py3-none-any.whl",hashes = {sha256 = "c438f029a25f7945c69e0ccf0fb951dc3f73a5f6412981daee861431b70e2bdd"}}, ] marker = "\"default\" in dependency_groups" @@ -1437,17 +1865,17 @@ dependencies = [ [[packages]] name = "hf-xet" -version = "1.1.5" +version = "1.1.10" requires-python = ">=3.8" -sdist = {name = "hf_xet-1.1.5.tar.gz", url = "https://files.pythonhosted.org/packages/ed/d4/7685999e85945ed0d7f0762b686ae7015035390de1161dcea9d5276c134c/hf_xet-1.1.5.tar.gz", hashes = {sha256 = "69ebbcfd9ec44fdc2af73441619eeb06b94ee34511bbcf57cd423820090f5694"}} +sdist = {name = "hf_xet-1.1.10.tar.gz", url = "https://files.pythonhosted.org/packages/74/31/feeddfce1748c4a233ec1aa5b7396161c07ae1aa9b7bdbc9a72c3c7dd768/hf_xet-1.1.10.tar.gz", hashes = {sha256 = "408aef343800a2102374a883f283ff29068055c111f003ff840733d3b715bb97"}} wheels = [ - {name = "hf_xet-1.1.5-cp37-abi3-macosx_10_12_x86_64.whl",url = "https://files.pythonhosted.org/packages/00/89/a1119eebe2836cb25758e7661d6410d3eae982e2b5e974bcc4d250be9012/hf_xet-1.1.5-cp37-abi3-macosx_10_12_x86_64.whl",hashes = {sha256 = "f52c2fa3635b8c37c7764d8796dfa72706cc4eded19d638331161e82b0792e23"}}, - {name = "hf_xet-1.1.5-cp37-abi3-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/de/5f/2c78e28f309396e71ec8e4e9304a6483dcbc36172b5cea8f291994163425/hf_xet-1.1.5-cp37-abi3-macosx_11_0_arm64.whl",hashes = {sha256 = "9fa6e3ee5d61912c4a113e0708eaaef987047616465ac7aa30f7121a48fc1af8"}}, - {name = "hf_xet-1.1.5-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",url = "https://files.pythonhosted.org/packages/6d/2f/6cad7b5fe86b7652579346cb7f85156c11761df26435651cbba89376cd2c/hf_xet-1.1.5-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",hashes = {sha256 = "fc874b5c843e642f45fd85cda1ce599e123308ad2901ead23d3510a47ff506d1"}}, - {name = "hf_xet-1.1.5-cp37-abi3-manylinux_2_28_aarch64.whl",url = "https://files.pythonhosted.org/packages/d0/54/0fcf2b619720a26fbb6cc941e89f2472a522cd963a776c089b189559447f/hf_xet-1.1.5-cp37-abi3-manylinux_2_28_aarch64.whl",hashes = {sha256 = "dbba1660e5d810bd0ea77c511a99e9242d920790d0e63c0e4673ed36c4022d18"}}, - {name = "hf_xet-1.1.5-cp37-abi3-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/f3/92/1d351ac6cef7c4ba8c85744d37ffbfac2d53d0a6c04d2cabeba614640a78/hf_xet-1.1.5-cp37-abi3-musllinux_1_2_aarch64.whl",hashes = {sha256 = "ab34c4c3104133c495785d5d8bba3b1efc99de52c02e759cf711a91fd39d3a14"}}, - {name = "hf_xet-1.1.5-cp37-abi3-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/c9/65/4b2ddb0e3e983f2508528eb4501288ae2f84963586fbdfae596836d5e57a/hf_xet-1.1.5-cp37-abi3-musllinux_1_2_x86_64.whl",hashes = {sha256 = "83088ecea236d5113de478acb2339f92c95b4fb0462acaa30621fac02f5a534a"}}, - {name = "hf_xet-1.1.5-cp37-abi3-win_amd64.whl",url = "https://files.pythonhosted.org/packages/f0/55/ef77a85ee443ae05a9e9cba1c9f0dd9241eb42da2aeba1dc50f51154c81a/hf_xet-1.1.5-cp37-abi3-win_amd64.whl",hashes = {sha256 = "73e167d9807d166596b4b2f0b585c6d5bd84a26dea32843665a8b58f6edba245"}}, + {name = "hf_xet-1.1.10-cp37-abi3-macosx_10_12_x86_64.whl",url = "https://files.pythonhosted.org/packages/f7/a2/343e6d05de96908366bdc0081f2d8607d61200be2ac802769c4284cc65bd/hf_xet-1.1.10-cp37-abi3-macosx_10_12_x86_64.whl",hashes = {sha256 = "686083aca1a6669bc85c21c0563551cbcdaa5cf7876a91f3d074a030b577231d"}}, + {name = "hf_xet-1.1.10-cp37-abi3-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/31/f9/6215f948ac8f17566ee27af6430ea72045e0418ce757260248b483f4183b/hf_xet-1.1.10-cp37-abi3-macosx_11_0_arm64.whl",hashes = {sha256 = "71081925383b66b24eedff3013f8e6bbd41215c3338be4b94ba75fd75b21513b"}}, + {name = "hf_xet-1.1.10-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",url = "https://files.pythonhosted.org/packages/15/07/86397573efefff941e100367bbda0b21496ffcdb34db7ab51912994c32a2/hf_xet-1.1.10-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",hashes = {sha256 = "6b6bceb6361c80c1cc42b5a7b4e3efd90e64630bcf11224dcac50ef30a47e435"}}, + {name = "hf_xet-1.1.10-cp37-abi3-manylinux_2_28_aarch64.whl",url = "https://files.pythonhosted.org/packages/01/a7/0b2e242b918cc30e1f91980f3c4b026ff2eedaf1e2ad96933bca164b2869/hf_xet-1.1.10-cp37-abi3-manylinux_2_28_aarch64.whl",hashes = {sha256 = "eae7c1fc8a664e54753ffc235e11427ca61f4b0477d757cc4eb9ae374b69f09c"}}, + {name = "hf_xet-1.1.10-cp37-abi3-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/4a/25/3e32ab61cc7145b11eee9d745988e2f0f4fafda81b25980eebf97d8cff15/hf_xet-1.1.10-cp37-abi3-musllinux_1_2_aarch64.whl",hashes = {sha256 = "0a0005fd08f002180f7a12d4e13b22be277725bc23ed0529f8add5c7a6309c06"}}, + {name = "hf_xet-1.1.10-cp37-abi3-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/2c/3d/ab7109e607ed321afaa690f557a9ada6d6d164ec852fd6bf9979665dc3d6/hf_xet-1.1.10-cp37-abi3-musllinux_1_2_x86_64.whl",hashes = {sha256 = "f900481cf6e362a6c549c61ff77468bd59d6dd082f3170a36acfef2eb6a6793f"}}, + {name = "hf_xet-1.1.10-cp37-abi3-win_amd64.whl",url = "https://files.pythonhosted.org/packages/ee/0e/471f0a21db36e71a2f1752767ad77e92d8cde24e974e03d662931b1305ec/hf_xet-1.1.10-cp37-abi3-win_amd64.whl",hashes = {sha256 = "5f54b19cc347c13235ae7ee98b330c26dd65ef1df47e5316ffb1e87713ca7045"}}, ] marker = "(platform_machine == \"x86_64\" or platform_machine == \"amd64\" or platform_machine == \"arm64\" or platform_machine == \"aarch64\") and \"default\" in dependency_groups" @@ -1495,55 +1923,124 @@ dependencies = [] [[packages]] name = "mdit-py-plugins" -version = "0.4.2" -requires-python = ">=3.8" -sdist = {name = "mdit_py_plugins-0.4.2.tar.gz", url = "https://files.pythonhosted.org/packages/19/03/a2ecab526543b152300717cf232bb4bb8605b6edb946c845016fa9c9c9fd/mdit_py_plugins-0.4.2.tar.gz", hashes = {sha256 = "5f2cd1fdb606ddf152d37ec30e46101a60512bc0e5fa1a7002c36647b09e26b5"}} +version = "0.5.0" +requires-python = ">=3.10" +sdist = {name = "mdit_py_plugins-0.5.0.tar.gz", url = "https://files.pythonhosted.org/packages/b2/fd/a756d36c0bfba5f6e39a1cdbdbfdd448dc02692467d83816dff4592a1ebc/mdit_py_plugins-0.5.0.tar.gz", hashes = {sha256 = "f4918cb50119f50446560513a8e311d574ff6aaed72606ddae6d35716fe809c6"}} wheels = [ - {name = "mdit_py_plugins-0.4.2-py3-none-any.whl",url = "https://files.pythonhosted.org/packages/a7/f7/7782a043553ee469c1ff49cfa1cdace2d6bf99a1f333cf38676b3ddf30da/mdit_py_plugins-0.4.2-py3-none-any.whl",hashes = {sha256 = "0c673c3f889399a33b95e88d2f0d111b4447bdfea7f237dab2d488f459835636"}}, + {name = "mdit_py_plugins-0.5.0-py3-none-any.whl",url = "https://files.pythonhosted.org/packages/fb/86/dd6e5db36df29e76c7a7699123569a4a18c1623ce68d826ed96c62643cae/mdit_py_plugins-0.5.0-py3-none-any.whl",hashes = {sha256 = "07a08422fc1936a5d26d146759e9155ea466e842f5ab2f7d2266dd084c8dab1f"}}, ] marker = "\"dev\" in extras" [packages.tool.pdm] dependencies = [ - "markdown-it-py<4.0.0,>=1.0.0", + "markdown-it-py<5.0.0,>=2.0.0", ] [[packages]] name = "regex" -version = "2024.11.6" -requires-python = ">=3.8" -sdist = {name = "regex-2024.11.6.tar.gz", url = "https://files.pythonhosted.org/packages/8e/5f/bd69653fbfb76cf8604468d3b4ec4c403197144c7bfe0e6a5fc9e02a07cb/regex-2024.11.6.tar.gz", hashes = {sha256 = "7ab159b063c52a0333c884e4679f8d7a85112ee3078fe3d9004b2dd875585519"}} -wheels = [ - {name = "regex-2024.11.6-cp313-cp313-macosx_10_13_universal2.whl",url = "https://files.pythonhosted.org/packages/90/73/bcb0e36614601016552fa9344544a3a2ae1809dc1401b100eab02e772e1f/regex-2024.11.6-cp313-cp313-macosx_10_13_universal2.whl",hashes = {sha256 = "a6ba92c0bcdf96cbf43a12c717eae4bc98325ca3730f6b130ffa2e3c3c723d84"}}, - {name = "regex-2024.11.6-cp313-cp313-macosx_10_13_x86_64.whl",url = "https://files.pythonhosted.org/packages/0f/3f/f1a082a46b31e25291d830b369b6b0c5576a6f7fb89d3053a354c24b8a83/regex-2024.11.6-cp313-cp313-macosx_10_13_x86_64.whl",hashes = {sha256 = "525eab0b789891ac3be914d36893bdf972d483fe66551f79d3e27146191a37d4"}}, - {name = "regex-2024.11.6-cp313-cp313-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/09/c9/4e68181a4a652fb3ef5099e077faf4fd2a694ea6e0f806a7737aff9e758a/regex-2024.11.6-cp313-cp313-macosx_11_0_arm64.whl",hashes = {sha256 = "086a27a0b4ca227941700e0b31425e7a28ef1ae8e5e05a33826e17e47fbfdba0"}}, - {name = "regex-2024.11.6-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",url = "https://files.pythonhosted.org/packages/fc/fd/37868b75eaf63843165f1d2122ca6cb94bfc0271e4428cf58c0616786dce/regex-2024.11.6-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",hashes = {sha256 = "bde01f35767c4a7899b7eb6e823b125a64de314a8ee9791367c9a34d56af18d0"}}, - {name = "regex-2024.11.6-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl",url = "https://files.pythonhosted.org/packages/c4/7c/d4cd9c528502a3dedb5c13c146e7a7a539a3853dc20209c8e75d9ba9d1b2/regex-2024.11.6-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl",hashes = {sha256 = "b583904576650166b3d920d2bcce13971f6f9e9a396c673187f49811b2769dc7"}}, - {name = "regex-2024.11.6-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl",url = "https://files.pythonhosted.org/packages/4f/db/46f563a08f969159c5a0f0e722260568425363bea43bb7ae370becb66a67/regex-2024.11.6-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl",hashes = {sha256 = "1c4de13f06a0d54fa0d5ab1b7138bfa0d883220965a29616e3ea61b35d5f5fc7"}}, - {name = "regex-2024.11.6-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",url = "https://files.pythonhosted.org/packages/db/60/1eeca2074f5b87df394fccaa432ae3fc06c9c9bfa97c5051aed70e6e00c2/regex-2024.11.6-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",hashes = {sha256 = "3cde6e9f2580eb1665965ce9bf17ff4952f34f5b126beb509fee8f4e994f143c"}}, - {name = "regex-2024.11.6-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl",url = "https://files.pythonhosted.org/packages/10/db/ac718a08fcee981554d2f7bb8402f1faa7e868c1345c16ab1ebec54b0d7b/regex-2024.11.6-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl",hashes = {sha256 = "0d7f453dca13f40a02b79636a339c5b62b670141e63efd511d3f8f73fba162b3"}}, - {name = "regex-2024.11.6-cp313-cp313-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/c2/41/7da3fe70216cea93144bf12da2b87367590bcf07db97604edeea55dac9ad/regex-2024.11.6-cp313-cp313-musllinux_1_2_aarch64.whl",hashes = {sha256 = "59dfe1ed21aea057a65c6b586afd2a945de04fc7db3de0a6e3ed5397ad491b07"}}, - {name = "regex-2024.11.6-cp313-cp313-musllinux_1_2_i686.whl",url = "https://files.pythonhosted.org/packages/a7/d5/880921ee4eec393a4752e6ab9f0fe28009435417c3102fc413f3fe81c4e5/regex-2024.11.6-cp313-cp313-musllinux_1_2_i686.whl",hashes = {sha256 = "b97c1e0bd37c5cd7902e65f410779d39eeda155800b65fc4d04cc432efa9bc6e"}}, - {name = "regex-2024.11.6-cp313-cp313-musllinux_1_2_ppc64le.whl",url = "https://files.pythonhosted.org/packages/dc/96/53770115e507081122beca8899ab7f5ae28ae790bfcc82b5e38976df6a77/regex-2024.11.6-cp313-cp313-musllinux_1_2_ppc64le.whl",hashes = {sha256 = "f9d1e379028e0fc2ae3654bac3cbbef81bf3fd571272a42d56c24007979bafb6"}}, - {name = "regex-2024.11.6-cp313-cp313-musllinux_1_2_s390x.whl",url = "https://files.pythonhosted.org/packages/31/d3/1372add5251cc2d44b451bd94f43b2ec78e15a6e82bff6a290ef9fd8f00a/regex-2024.11.6-cp313-cp313-musllinux_1_2_s390x.whl",hashes = {sha256 = "13291b39131e2d002a7940fb176e120bec5145f3aeb7621be6534e46251912c4"}}, - {name = "regex-2024.11.6-cp313-cp313-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/ed/e3/c446a64984ea9f69982ba1a69d4658d5014bc7a0ea468a07e1a1265db6e2/regex-2024.11.6-cp313-cp313-musllinux_1_2_x86_64.whl",hashes = {sha256 = "4f51f88c126370dcec4908576c5a627220da6c09d0bff31cfa89f2523843316d"}}, - {name = "regex-2024.11.6-cp313-cp313-win32.whl",url = "https://files.pythonhosted.org/packages/2b/f1/e40c8373e3480e4f29f2692bd21b3e05f296d3afebc7e5dcf21b9756ca1c/regex-2024.11.6-cp313-cp313-win32.whl",hashes = {sha256 = "63b13cfd72e9601125027202cad74995ab26921d8cd935c25f09c630436348ff"}}, - {name = "regex-2024.11.6-cp313-cp313-win_amd64.whl",url = "https://files.pythonhosted.org/packages/45/94/bc295babb3062a731f52621cdc992d123111282e291abaf23faa413443ea/regex-2024.11.6-cp313-cp313-win_amd64.whl",hashes = {sha256 = "2b3361af3198667e99927da8b84c1b010752fa4b1115ee30beaa332cabc3ef1a"}}, - {name = "regex-2024.11.6-cp312-cp312-macosx_10_13_universal2.whl",url = "https://files.pythonhosted.org/packages/ba/30/9a87ce8336b172cc232a0db89a3af97929d06c11ceaa19d97d84fa90a8f8/regex-2024.11.6-cp312-cp312-macosx_10_13_universal2.whl",hashes = {sha256 = "52fb28f528778f184f870b7cf8f225f5eef0a8f6e3778529bdd40c7b3920796a"}}, - {name = "regex-2024.11.6-cp312-cp312-macosx_10_13_x86_64.whl",url = "https://files.pythonhosted.org/packages/01/e8/00008ad4ff4be8b1844786ba6636035f7ef926db5686e4c0f98093612add/regex-2024.11.6-cp312-cp312-macosx_10_13_x86_64.whl",hashes = {sha256 = "fdd6028445d2460f33136c55eeb1f601ab06d74cb3347132e1c24250187500d9"}}, - {name = "regex-2024.11.6-cp312-cp312-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/60/85/cebcc0aff603ea0a201667b203f13ba75d9fc8668fab917ac5b2de3967bc/regex-2024.11.6-cp312-cp312-macosx_11_0_arm64.whl",hashes = {sha256 = "805e6b60c54bf766b251e94526ebad60b7de0c70f70a4e6210ee2891acb70bf2"}}, - {name = "regex-2024.11.6-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",url = "https://files.pythonhosted.org/packages/94/2b/701a4b0585cb05472a4da28ee28fdfe155f3638f5e1ec92306d924e5faf0/regex-2024.11.6-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",hashes = {sha256 = "b85c2530be953a890eaffde05485238f07029600e8f098cdf1848d414a8b45e4"}}, - {name = "regex-2024.11.6-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl",url = "https://files.pythonhosted.org/packages/4b/bf/fa87e563bf5fee75db8915f7352e1887b1249126a1be4813837f5dbec965/regex-2024.11.6-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl",hashes = {sha256 = "bb26437975da7dc36b7efad18aa9dd4ea569d2357ae6b783bf1118dabd9ea577"}}, - {name = "regex-2024.11.6-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl",url = "https://files.pythonhosted.org/packages/a1/56/7295e6bad94b047f4d0834e4779491b81216583c00c288252ef625c01d23/regex-2024.11.6-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl",hashes = {sha256 = "abfa5080c374a76a251ba60683242bc17eeb2c9818d0d30117b4486be10c59d3"}}, - {name = "regex-2024.11.6-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",url = "https://files.pythonhosted.org/packages/fb/13/e3b075031a738c9598c51cfbc4c7879e26729c53aa9cca59211c44235314/regex-2024.11.6-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",hashes = {sha256 = "70b7fa6606c2881c1db9479b0eaa11ed5dfa11c8d60a474ff0e095099f39d98e"}}, - {name = "regex-2024.11.6-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl",url = "https://files.pythonhosted.org/packages/24/56/0b3f1b66d592be6efec23a795b37732682520b47c53da5a32c33ed7d84e3/regex-2024.11.6-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl",hashes = {sha256 = "0c32f75920cf99fe6b6c539c399a4a128452eaf1af27f39bce8909c9a3fd8cbe"}}, - {name = "regex-2024.11.6-cp312-cp312-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/f9/a1/eb378dada8b91c0e4c5f08ffb56f25fcae47bf52ad18f9b2f33b83e6d498/regex-2024.11.6-cp312-cp312-musllinux_1_2_aarch64.whl",hashes = {sha256 = "982e6d21414e78e1f51cf595d7f321dcd14de1f2881c5dc6a6e23bbbbd68435e"}}, - {name = "regex-2024.11.6-cp312-cp312-musllinux_1_2_i686.whl",url = "https://files.pythonhosted.org/packages/83/f2/033e7dec0cfd6dda93390089864732a3409246ffe8b042e9554afa9bff4e/regex-2024.11.6-cp312-cp312-musllinux_1_2_i686.whl",hashes = {sha256 = "a7c2155f790e2fb448faed6dd241386719802296ec588a8b9051c1f5c481bc29"}}, - {name = "regex-2024.11.6-cp312-cp312-musllinux_1_2_ppc64le.whl",url = "https://files.pythonhosted.org/packages/83/23/15d4552ea28990a74e7696780c438aadd73a20318c47e527b47a4a5a596d/regex-2024.11.6-cp312-cp312-musllinux_1_2_ppc64le.whl",hashes = {sha256 = "149f5008d286636e48cd0b1dd65018548944e495b0265b45e1bffecce1ef7f39"}}, - {name = "regex-2024.11.6-cp312-cp312-musllinux_1_2_s390x.whl",url = "https://files.pythonhosted.org/packages/e3/39/ed4416bc90deedbfdada2568b2cb0bc1fdb98efe11f5378d9892b2a88f8f/regex-2024.11.6-cp312-cp312-musllinux_1_2_s390x.whl",hashes = {sha256 = "e5364a4502efca094731680e80009632ad6624084aff9a23ce8c8c6820de3e51"}}, - {name = "regex-2024.11.6-cp312-cp312-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/93/2d/dd56bb76bd8e95bbce684326302f287455b56242a4f9c61f1bc76e28360e/regex-2024.11.6-cp312-cp312-musllinux_1_2_x86_64.whl",hashes = {sha256 = "0a86e7eeca091c09e021db8eb72d54751e527fa47b8d5787caf96d9831bd02ad"}}, - {name = "regex-2024.11.6-cp312-cp312-win32.whl",url = "https://files.pythonhosted.org/packages/0b/55/31877a249ab7a5156758246b9c59539abbeba22461b7d8adc9e8475ff73e/regex-2024.11.6-cp312-cp312-win32.whl",hashes = {sha256 = "32f9a4c643baad4efa81d549c2aadefaeba12249b2adc5af541759237eee1c54"}}, - {name = "regex-2024.11.6-cp312-cp312-win_amd64.whl",url = "https://files.pythonhosted.org/packages/38/ec/ad2d7de49a600cdb8dd78434a1aeffe28b9d6fc42eb36afab4a27ad23384/regex-2024.11.6-cp312-cp312-win_amd64.whl",hashes = {sha256 = "a93c194e2df18f7d264092dc8539b8ffb86b45b899ab976aa15d48214138e81b"}}, +version = "2025.9.18" +requires-python = ">=3.9" +sdist = {name = "regex-2025.9.18.tar.gz", url = "https://files.pythonhosted.org/packages/49/d3/eaa0d28aba6ad1827ad1e716d9a93e1ba963ada61887498297d3da715133/regex-2025.9.18.tar.gz", hashes = {sha256 = "c5ba23274c61c6fef447ba6a39333297d0c247f53059dba0bca415cac511edc4"}} +wheels = [ + {name = "regex-2025.9.18-cp314-cp314-macosx_10_13_universal2.whl",url = "https://files.pythonhosted.org/packages/44/b7/3b4663aa3b4af16819f2ab6a78c4111c7e9b066725d8107753c2257448a5/regex-2025.9.18-cp314-cp314-macosx_10_13_universal2.whl",hashes = {sha256 = "c6db75b51acf277997f3adcd0ad89045d856190d13359f15ab5dda21581d9129"}}, + {name = "regex-2025.9.18-cp314-cp314-macosx_10_13_x86_64.whl",url = "https://files.pythonhosted.org/packages/80/5b/4533f5d7ac9c6a02a4725fe8883de2aebc713e67e842c04cf02626afb747/regex-2025.9.18-cp314-cp314-macosx_10_13_x86_64.whl",hashes = {sha256 = "8f9698b6f6895d6db810e0bda5364f9ceb9e5b11328700a90cae573574f61eea"}}, + {name = "regex-2025.9.18-cp314-cp314-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/b8/8d/5ab6797c2750985f79e9995fad3254caa4520846580f266ae3b56d1cae58/regex-2025.9.18-cp314-cp314-macosx_11_0_arm64.whl",hashes = {sha256 = "29cd86aa7cb13a37d0f0d7c21d8d949fe402ffa0ea697e635afedd97ab4b69f1"}}, + {name = "regex-2025.9.18-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl",url = "https://files.pythonhosted.org/packages/cb/1e/95afcb02ba8d3a64e6ffeb801718ce73471ad6440c55d993f65a4a5e7a92/regex-2025.9.18-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl",hashes = {sha256 = "7c9f285a071ee55cd9583ba24dde006e53e17780bb309baa8e4289cd472bcc47"}}, + {name = "regex-2025.9.18-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl",url = "https://files.pythonhosted.org/packages/c8/fb/720b1f49cec1f3b5a9fea5b34cd22b88b5ebccc8c1b5de9cc6f65eed165a/regex-2025.9.18-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl",hashes = {sha256 = "5adf266f730431e3be9021d3e5b8d5ee65e563fec2883ea8093944d21863b379"}}, + {name = "regex-2025.9.18-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl",url = "https://files.pythonhosted.org/packages/a9/ca/e0d07ecf701e1616f015a720dc13b84c582024cbfbb3fc5394ae204adbd7/regex-2025.9.18-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl",hashes = {sha256 = "1137cabc0f38807de79e28d3f6e3e3f2cc8cfb26bead754d02e6d1de5f679203"}}, + {name = "regex-2025.9.18-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl",url = "https://files.pythonhosted.org/packages/b6/45/bba86413b910b708eca705a5af62163d5d396d5f647ed9485580c7025209/regex-2025.9.18-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl",hashes = {sha256 = "7cc9e5525cada99699ca9223cce2d52e88c52a3d2a0e842bd53de5497c604164"}}, + {name = "regex-2025.9.18-cp314-cp314-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/b8/a6/740fbd9fcac31a1305a8eed30b44bf0f7f1e042342be0a4722c0365ecfca/regex-2025.9.18-cp314-cp314-musllinux_1_2_aarch64.whl",hashes = {sha256 = "bbb9246568f72dce29bcd433517c2be22c7791784b223a810225af3b50d1aafb"}}, + {name = "regex-2025.9.18-cp314-cp314-musllinux_1_2_ppc64le.whl",url = "https://files.pythonhosted.org/packages/80/a7/0579e8560682645906da640c9055506465d809cb0f5415d9976f417209a6/regex-2025.9.18-cp314-cp314-musllinux_1_2_ppc64le.whl",hashes = {sha256 = "6a52219a93dd3d92c675383efff6ae18c982e2d7651c792b1e6d121055808743"}}, + {name = "regex-2025.9.18-cp314-cp314-musllinux_1_2_s390x.whl",url = "https://files.pythonhosted.org/packages/8d/9b/4dc96b6c17b38900cc9fee254fc9271d0dde044e82c78c0811b58754fde5/regex-2025.9.18-cp314-cp314-musllinux_1_2_s390x.whl",hashes = {sha256 = "ae9b3840c5bd456780e3ddf2f737ab55a79b790f6409182012718a35c6d43282"}}, + {name = "regex-2025.9.18-cp314-cp314-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/b3/6a/6f659f99bebb1775e5ac81a3fb837b85897c1a4ef5acffd0ff8ffe7e67fb/regex-2025.9.18-cp314-cp314-musllinux_1_2_x86_64.whl",hashes = {sha256 = "d488c236ac497c46a5ac2005a952c1a0e22a07be9f10c3e735bc7d1209a34773"}}, + {name = "regex-2025.9.18-cp314-cp314-win32.whl",url = "https://files.pythonhosted.org/packages/61/35/9e35665f097c07cf384a6b90a1ac11b0b1693084a0b7a675b06f760496c6/regex-2025.9.18-cp314-cp314-win32.whl",hashes = {sha256 = "0c3506682ea19beefe627a38872d8da65cc01ffa25ed3f2e422dffa1474f0788"}}, + {name = "regex-2025.9.18-cp314-cp314-win_amd64.whl",url = "https://files.pythonhosted.org/packages/af/64/27594dbe0f1590b82de2821ebfe9a359b44dcb9b65524876cd12fabc447b/regex-2025.9.18-cp314-cp314-win_amd64.whl",hashes = {sha256 = "57929d0f92bebb2d1a83af372cd0ffba2263f13f376e19b1e4fa32aec4efddc3"}}, + {name = "regex-2025.9.18-cp314-cp314-win_arm64.whl",url = "https://files.pythonhosted.org/packages/30/a3/0cd8d0d342886bd7d7f252d701b20ae1a3c72dc7f34ef4b2d17790280a09/regex-2025.9.18-cp314-cp314-win_arm64.whl",hashes = {sha256 = "6a4b44df31d34fa51aa5c995d3aa3c999cec4d69b9bd414a8be51984d859f06d"}}, + {name = "regex-2025.9.18-cp314-cp314t-macosx_10_13_universal2.whl",url = "https://files.pythonhosted.org/packages/99/cb/8a1ab05ecf404e18b54348e293d9b7a60ec2bd7aa59e637020c5eea852e8/regex-2025.9.18-cp314-cp314t-macosx_10_13_universal2.whl",hashes = {sha256 = "b176326bcd544b5e9b17d6943f807697c0cb7351f6cfb45bf5637c95ff7e6306"}}, + {name = "regex-2025.9.18-cp314-cp314t-macosx_10_13_x86_64.whl",url = "https://files.pythonhosted.org/packages/93/3b/6543c9b7f7e734d2404fa2863d0d710c907bef99d4598760ed4563d634c3/regex-2025.9.18-cp314-cp314t-macosx_10_13_x86_64.whl",hashes = {sha256 = "0ffd9e230b826b15b369391bec167baed57c7ce39efc35835448618860995946"}}, + {name = "regex-2025.9.18-cp314-cp314t-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/cd/91/e9fdee6ad6bf708d98c5d17fded423dcb0661795a49cba1b4ffb8358377a/regex-2025.9.18-cp314-cp314t-macosx_11_0_arm64.whl",hashes = {sha256 = "ec46332c41add73f2b57e2f5b642f991f6b15e50e9f86285e08ffe3a512ac39f"}}, + {name = "regex-2025.9.18-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl",url = "https://files.pythonhosted.org/packages/94/a6/bc3e8a918abe4741dadeaeb6c508e3a4ea847ff36030d820d89858f96a6c/regex-2025.9.18-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl",hashes = {sha256 = "b80fa342ed1ea095168a3f116637bd1030d39c9ff38dc04e54ef7c521e01fc95"}}, + {name = "regex-2025.9.18-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl",url = "https://files.pythonhosted.org/packages/2b/71/ea62dbeb55d9e6905c7b5a49f75615ea1373afcad95830047e4e310db979/regex-2025.9.18-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl",hashes = {sha256 = "f4d97071c0ba40f0cf2a93ed76e660654c399a0a04ab7d85472239460f3da84b"}}, + {name = "regex-2025.9.18-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl",url = "https://files.pythonhosted.org/packages/6a/90/fbe9dedb7dad24a3a4399c0bae64bfa932ec8922a0a9acf7bc88db30b161/regex-2025.9.18-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl",hashes = {sha256 = "0ac936537ad87cef9e0e66c5144484206c1354224ee811ab1519a32373e411f3"}}, + {name = "regex-2025.9.18-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl",url = "https://files.pythonhosted.org/packages/f0/1c/47e4a8c0e73d41eb9eb9fdeba3b1b810110a5139a2526e82fd29c2d9f867/regex-2025.9.18-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl",hashes = {sha256 = "dec57f96d4def58c422d212d414efe28218d58537b5445cf0c33afb1b4768571"}}, + {name = "regex-2025.9.18-cp314-cp314t-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/2a/da/435f29fddfd015111523671e36d30af3342e8136a889159b05c1d9110480/regex-2025.9.18-cp314-cp314t-musllinux_1_2_aarch64.whl",hashes = {sha256 = "48317233294648bf7cd068857f248e3a57222259a5304d32c7552e2284a1b2ad"}}, + {name = "regex-2025.9.18-cp314-cp314t-musllinux_1_2_ppc64le.whl",url = "https://files.pythonhosted.org/packages/23/66/df5e6dcca25c8bc57ce404eebc7342310a0d218db739d7882c9a2b5974a3/regex-2025.9.18-cp314-cp314t-musllinux_1_2_ppc64le.whl",hashes = {sha256 = "274687e62ea3cf54846a9b25fc48a04459de50af30a7bd0b61a9e38015983494"}}, + {name = "regex-2025.9.18-cp314-cp314t-musllinux_1_2_s390x.whl",url = "https://files.pythonhosted.org/packages/82/42/94392b39b531f2e469b2daa40acf454863733b674481fda17462a5ffadac/regex-2025.9.18-cp314-cp314t-musllinux_1_2_s390x.whl",hashes = {sha256 = "a78722c86a3e7e6aadf9579e3b0ad78d955f2d1f1a8ca4f67d7ca258e8719d4b"}}, + {name = "regex-2025.9.18-cp314-cp314t-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/a8/f8/dcc64c7f7bbe58842a8f89622b50c58c3598fbbf4aad0a488d6df2c699f1/regex-2025.9.18-cp314-cp314t-musllinux_1_2_x86_64.whl",hashes = {sha256 = "06104cd203cdef3ade989a1c45b6215bf42f8b9dd705ecc220c173233f7cba41"}}, + {name = "regex-2025.9.18-cp314-cp314t-win32.whl",url = "https://files.pythonhosted.org/packages/20/8d/edf1c5d5aa98f99a692313db813ec487732946784f8f93145e0153d910e5/regex-2025.9.18-cp314-cp314t-win32.whl",hashes = {sha256 = "2e1eddc06eeaffd249c0adb6fafc19e2118e6308c60df9db27919e96b5656096"}}, + {name = "regex-2025.9.18-cp314-cp314t-win_amd64.whl",url = "https://files.pythonhosted.org/packages/a7/24/02d4e4f88466f17b145f7ea2b2c11af3a942db6222429c2c146accf16054/regex-2025.9.18-cp314-cp314t-win_amd64.whl",hashes = {sha256 = "8620d247fb8c0683ade51217b459cb4a1081c0405a3072235ba43a40d355c09a"}}, + {name = "regex-2025.9.18-cp314-cp314t-win_arm64.whl",url = "https://files.pythonhosted.org/packages/1f/a3/c64894858aaaa454caa7cc47e2f225b04d3ed08ad649eacf58d45817fad2/regex-2025.9.18-cp314-cp314t-win_arm64.whl",hashes = {sha256 = "b7531a8ef61de2c647cdf68b3229b071e46ec326b3138b2180acb4275f470b01"}}, + {name = "regex-2025.9.18-cp313-cp313-macosx_10_13_universal2.whl",url = "https://files.pythonhosted.org/packages/d2/c7/5c48206a60ce33711cf7dcaeaed10dd737733a3569dc7e1dce324dd48f30/regex-2025.9.18-cp313-cp313-macosx_10_13_universal2.whl",hashes = {sha256 = "2a40f929cd907c7e8ac7566ac76225a77701a6221bca937bdb70d56cb61f57b2"}}, + {name = "regex-2025.9.18-cp313-cp313-macosx_10_13_x86_64.whl",url = "https://files.pythonhosted.org/packages/e9/be/74fc6bb19a3c491ec1ace943e622b5a8539068771e8705e469b2da2306a7/regex-2025.9.18-cp313-cp313-macosx_10_13_x86_64.whl",hashes = {sha256 = "c90471671c2cdf914e58b6af62420ea9ecd06d1554d7474d50133ff26ae88feb"}}, + {name = "regex-2025.9.18-cp313-cp313-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/25/c4/9ceaa433cb5dc515765560f22a19578b95b92ff12526e5a259321c4fc1a0/regex-2025.9.18-cp313-cp313-macosx_11_0_arm64.whl",hashes = {sha256 = "1a351aff9e07a2dabb5022ead6380cff17a4f10e4feb15f9100ee56c4d6d06af"}}, + {name = "regex-2025.9.18-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl",url = "https://files.pythonhosted.org/packages/7d/e6/68bc9393cb4dc68018456568c048ac035854b042bc7c33cb9b99b0680afa/regex-2025.9.18-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl",hashes = {sha256 = "bc4b8e9d16e20ddfe16430c23468a8707ccad3365b06d4536142e71823f3ca29"}}, + {name = "regex-2025.9.18-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl",url = "https://files.pythonhosted.org/packages/6a/1c/ebae9032d34b78ecfe9bd4b5e6575b55351dc8513485bb92326613732b8c/regex-2025.9.18-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl",hashes = {sha256 = "4b8cdbddf2db1c5e80338ba2daa3cfa3dec73a46fff2a7dda087c8efbf12d62f"}}, + {name = "regex-2025.9.18-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl",url = "https://files.pythonhosted.org/packages/3b/74/12332c54b3882557a4bcd2b99f8be581f5c6a43cf1660a85b460dd8ff468/regex-2025.9.18-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl",hashes = {sha256 = "a276937d9d75085b2c91fb48244349c6954f05ee97bba0963ce24a9d915b8b68"}}, + {name = "regex-2025.9.18-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl",url = "https://files.pythonhosted.org/packages/86/70/ba42d5ed606ee275f2465bfc0e2208755b06cdabd0f4c7c4b614d51b57ab/regex-2025.9.18-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl",hashes = {sha256 = "92a8e375ccdc1256401c90e9dc02b8642894443d549ff5e25e36d7cf8a80c783"}}, + {name = "regex-2025.9.18-cp313-cp313-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/da/c5/fcb017e56396a7f2f8357412638d7e2963440b131a3ca549be25774b3641/regex-2025.9.18-cp313-cp313-musllinux_1_2_aarch64.whl",hashes = {sha256 = "0dc6893b1f502d73037cf807a321cdc9be29ef3d6219f7970f842475873712ac"}}, + {name = "regex-2025.9.18-cp313-cp313-musllinux_1_2_ppc64le.whl",url = "https://files.pythonhosted.org/packages/c6/ee/21c4278b973f630adfb3bcb23d09d83625f3ab1ca6e40ebdffe69901c7a1/regex-2025.9.18-cp313-cp313-musllinux_1_2_ppc64le.whl",hashes = {sha256 = "a61e85bfc63d232ac14b015af1261f826260c8deb19401c0597dbb87a864361e"}}, + {name = "regex-2025.9.18-cp313-cp313-musllinux_1_2_s390x.whl",url = "https://files.pythonhosted.org/packages/87/0b/de51550dc7274324435c8f1539373ac63019b0525ad720132866fff4a16a/regex-2025.9.18-cp313-cp313-musllinux_1_2_s390x.whl",hashes = {sha256 = "1ef86a9ebc53f379d921fb9a7e42b92059ad3ee800fcd9e0fe6181090e9f6c23"}}, + {name = "regex-2025.9.18-cp313-cp313-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/60/52/383d3044fc5154d9ffe4321696ee5b2ee4833a28c29b137c22c33f41885b/regex-2025.9.18-cp313-cp313-musllinux_1_2_x86_64.whl",hashes = {sha256 = "d3bc882119764ba3a119fbf2bd4f1b47bc56c1da5d42df4ed54ae1e8e66fdf8f"}}, + {name = "regex-2025.9.18-cp313-cp313-win32.whl",url = "https://files.pythonhosted.org/packages/20/bd/2614fc302671b7359972ea212f0e3a92df4414aaeacab054a8ce80a86073/regex-2025.9.18-cp313-cp313-win32.whl",hashes = {sha256 = "3810a65675845c3bdfa58c3c7d88624356dd6ee2fc186628295e0969005f928d"}}, + {name = "regex-2025.9.18-cp313-cp313-win_amd64.whl",url = "https://files.pythonhosted.org/packages/07/0f/ab5c1581e6563a7bffdc1974fb2d25f05689b88e2d416525271f232b1946/regex-2025.9.18-cp313-cp313-win_amd64.whl",hashes = {sha256 = "16eaf74b3c4180ede88f620f299e474913ab6924d5c4b89b3833bc2345d83b3d"}}, + {name = "regex-2025.9.18-cp313-cp313-win_arm64.whl",url = "https://files.pythonhosted.org/packages/49/22/ee47672bc7958f8c5667a587c2600a4fba8b6bab6e86bd6d3e2b5f7cac42/regex-2025.9.18-cp313-cp313-win_arm64.whl",hashes = {sha256 = "4dc98ba7dd66bd1261927a9f49bd5ee2bcb3660f7962f1ec02617280fc00f5eb"}}, + {name = "regex-2025.9.18-cp313-cp313t-macosx_10_13_universal2.whl",url = "https://files.pythonhosted.org/packages/e8/83/6887e16a187c6226cb85d8301e47d3b73ecc4505a3a13d8da2096b44fd76/regex-2025.9.18-cp313-cp313t-macosx_10_13_universal2.whl",hashes = {sha256 = "fe5d50572bc885a0a799410a717c42b1a6b50e2f45872e2b40f4f288f9bce8a2"}}, + {name = "regex-2025.9.18-cp313-cp313t-macosx_10_13_x86_64.whl",url = "https://files.pythonhosted.org/packages/51/c5/e2f7325301ea2916ff301c8d963ba66b1b2c1b06694191df80a9c4fea5d0/regex-2025.9.18-cp313-cp313t-macosx_10_13_x86_64.whl",hashes = {sha256 = "1b9d9a2d6cda6621551ca8cf7a06f103adf72831153f3c0d982386110870c4d3"}}, + {name = "regex-2025.9.18-cp313-cp313t-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/91/60/7d229d2bc6961289e864a3a3cfebf7d0d250e2e65323a8952cbb7e22d824/regex-2025.9.18-cp313-cp313t-macosx_11_0_arm64.whl",hashes = {sha256 = "13202e4c4ac0ef9a317fff817674b293c8f7e8c68d3190377d8d8b749f566e12"}}, + {name = "regex-2025.9.18-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl",url = "https://files.pythonhosted.org/packages/3c/d7/b4f06868ee2958ff6430df89857fbf3d43014bbf35538b6ec96c2704e15d/regex-2025.9.18-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl",hashes = {sha256 = "874ff523b0fecffb090f80ae53dc93538f8db954c8bb5505f05b7787ab3402a0"}}, + {name = "regex-2025.9.18-cp313-cp313t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl",url = "https://files.pythonhosted.org/packages/d6/e4/bca99034a8f1b9b62ccf337402a8e5b959dd5ba0e5e5b2ead70273df3277/regex-2025.9.18-cp313-cp313t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl",hashes = {sha256 = "d13ab0490128f2bb45d596f754148cd750411afc97e813e4b3a61cf278a23bb6"}}, + {name = "regex-2025.9.18-cp313-cp313t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl",url = "https://files.pythonhosted.org/packages/6d/df/e06ffaf078a162f6dd6b101a5ea9b44696dca860a48136b3ae4a9caf25e2/regex-2025.9.18-cp313-cp313t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl",hashes = {sha256 = "05440bc172bc4b4b37fb9667e796597419404dbba62e171e1f826d7d2a9ebcef"}}, + {name = "regex-2025.9.18-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl",url = "https://files.pythonhosted.org/packages/9e/05/25b05480b63292fd8e84800b1648e160ca778127b8d2367a0a258fa2e225/regex-2025.9.18-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl",hashes = {sha256 = "5514b8e4031fdfaa3d27e92c75719cbe7f379e28cacd939807289bce76d0e35a"}}, + {name = "regex-2025.9.18-cp313-cp313t-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/70/97/7bc7574655eb651ba3a916ed4b1be6798ae97af30104f655d8efd0cab24b/regex-2025.9.18-cp313-cp313t-musllinux_1_2_aarch64.whl",hashes = {sha256 = "65d3c38c39efce73e0d9dc019697b39903ba25b1ad45ebbd730d2cf32741f40d"}}, + {name = "regex-2025.9.18-cp313-cp313t-musllinux_1_2_ppc64le.whl",url = "https://files.pythonhosted.org/packages/b4/c2/d5da49166a52dda879855ecdba0117f073583db2b39bb47ce9a3378a8e9e/regex-2025.9.18-cp313-cp313t-musllinux_1_2_ppc64le.whl",hashes = {sha256 = "ae77e447ebc144d5a26d50055c6ddba1d6ad4a865a560ec7200b8b06bc529368"}}, + {name = "regex-2025.9.18-cp313-cp313t-musllinux_1_2_s390x.whl",url = "https://files.pythonhosted.org/packages/bd/2d/0a5c4e6ec417de56b89ff4418ecc72f7e3feca806824c75ad0bbdae0516b/regex-2025.9.18-cp313-cp313t-musllinux_1_2_s390x.whl",hashes = {sha256 = "e3ef8cf53dc8df49d7e28a356cf824e3623764e9833348b655cfed4524ab8a90"}}, + {name = "regex-2025.9.18-cp313-cp313t-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/f4/8e/d656af63e31a86572ec829665d6fa06eae7e144771e0330650a8bb865635/regex-2025.9.18-cp313-cp313t-musllinux_1_2_x86_64.whl",hashes = {sha256 = "9feb29817df349c976da9a0debf775c5c33fc1c8ad7b9f025825da99374770b7"}}, + {name = "regex-2025.9.18-cp313-cp313t-win32.whl",url = "https://files.pythonhosted.org/packages/db/ce/06edc89df8f7b83ffd321b6071be4c54dc7332c0f77860edc40ce57d757b/regex-2025.9.18-cp313-cp313t-win32.whl",hashes = {sha256 = "168be0d2f9b9d13076940b1ed774f98595b4e3c7fc54584bba81b3cc4181742e"}}, + {name = "regex-2025.9.18-cp313-cp313t-win_amd64.whl",url = "https://files.pythonhosted.org/packages/83/9a/2b5d9c8b307a451fd17068719d971d3634ca29864b89ed5c18e499446d4a/regex-2025.9.18-cp313-cp313t-win_amd64.whl",hashes = {sha256 = "d59ecf3bb549e491c8104fea7313f3563c7b048e01287db0a90485734a70a730"}}, + {name = "regex-2025.9.18-cp313-cp313t-win_arm64.whl",url = "https://files.pythonhosted.org/packages/3d/70/177d31e8089a278a764f8ec9a3faac8d14a312d622a47385d4b43905806f/regex-2025.9.18-cp313-cp313t-win_arm64.whl",hashes = {sha256 = "dbef80defe9fb21310948a2595420b36c6d641d9bea4c991175829b2cc4bc06a"}}, + {name = "regex-2025.9.18-cp312-cp312-macosx_10_13_universal2.whl",url = "https://files.pythonhosted.org/packages/b0/99/05859d87a66ae7098222d65748f11ef7f2dff51bfd7482a4e2256c90d72b/regex-2025.9.18-cp312-cp312-macosx_10_13_universal2.whl",hashes = {sha256 = "436e1b31d7efd4dcd52091d076482031c611dde58bf9c46ca6d0a26e33053a7e"}}, + {name = "regex-2025.9.18-cp312-cp312-macosx_10_13_x86_64.whl",url = "https://files.pythonhosted.org/packages/97/7e/d43d4e8b978890932cf7b0957fce58c5b08c66f32698f695b0c2c24a48bf/regex-2025.9.18-cp312-cp312-macosx_10_13_x86_64.whl",hashes = {sha256 = "c190af81e5576b9c5fdc708f781a52ff20f8b96386c6e2e0557a78402b029f4a"}}, + {name = "regex-2025.9.18-cp312-cp312-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/bb/3b/ff80886089eb5dcf7e0d2040d9aaed539e25a94300403814bb24cc775058/regex-2025.9.18-cp312-cp312-macosx_11_0_arm64.whl",hashes = {sha256 = "e4121f1ce2b2b5eec4b397cc1b277686e577e658d8f5870b7eb2d726bd2300ab"}}, + {name = "regex-2025.9.18-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl",url = "https://files.pythonhosted.org/packages/ee/66/243edf49dd8720cba8d5245dd4d6adcb03a1defab7238598c0c97cf549b8/regex-2025.9.18-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl",hashes = {sha256 = "300e25dbbf8299d87205e821a201057f2ef9aa3deb29caa01cd2cac669e508d5"}}, + {name = "regex-2025.9.18-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl",url = "https://files.pythonhosted.org/packages/df/71/c9d25a1142c70432e68bb03211d4a82299cd1c1fbc41db9409a394374ef5/regex-2025.9.18-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl",hashes = {sha256 = "7b47fcf9f5316c0bdaf449e879407e1b9937a23c3b369135ca94ebc8d74b1742"}}, + {name = "regex-2025.9.18-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl",url = "https://files.pythonhosted.org/packages/f8/8f/329b1efc3a64375a294e3a92d43372bf1a351aa418e83c21f2f01cf6ec41/regex-2025.9.18-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl",hashes = {sha256 = "57a161bd3acaa4b513220b49949b07e252165e6b6dc910ee7617a37ff4f5b425"}}, + {name = "regex-2025.9.18-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl",url = "https://files.pythonhosted.org/packages/35/9e/a91b50332a9750519320ed30ec378b74c996f6befe282cfa6bb6cea7e9fd/regex-2025.9.18-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl",hashes = {sha256 = "4f130c3a7845ba42de42f380fff3c8aebe89a810747d91bcf56d40a069f15352"}}, + {name = "regex-2025.9.18-cp312-cp312-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/a4/1d/6be3b8d7856b6e0d7ee7f942f437d0a76e0d5622983abbb6d21e21ab9a17/regex-2025.9.18-cp312-cp312-musllinux_1_2_aarch64.whl",hashes = {sha256 = "5f96fa342b6f54dcba928dd452e8d8cb9f0d63e711d1721cd765bb9f73bb048d"}}, + {name = "regex-2025.9.18-cp312-cp312-musllinux_1_2_ppc64le.whl",url = "https://files.pythonhosted.org/packages/cb/ce/4a60e53df58bd157c5156a1736d3636f9910bdcc271d067b32b7fcd0c3a8/regex-2025.9.18-cp312-cp312-musllinux_1_2_ppc64le.whl",hashes = {sha256 = "0f0d676522d68c207828dcd01fb6f214f63f238c283d9f01d85fc664c7c85b56"}}, + {name = "regex-2025.9.18-cp312-cp312-musllinux_1_2_s390x.whl",url = "https://files.pythonhosted.org/packages/86/e8/162c91bfe7217253afccde112868afb239f94703de6580fb235058d506a6/regex-2025.9.18-cp312-cp312-musllinux_1_2_s390x.whl",hashes = {sha256 = "40532bff8a1a0621e7903ae57fce88feb2e8a9a9116d341701302c9302aef06e"}}, + {name = "regex-2025.9.18-cp312-cp312-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/35/34/42b165bc45289646ea0959a1bc7531733e90b47c56a72067adfe6b3251f6/regex-2025.9.18-cp312-cp312-musllinux_1_2_x86_64.whl",hashes = {sha256 = "039f11b618ce8d71a1c364fdee37da1012f5a3e79b1b2819a9f389cd82fd6282"}}, + {name = "regex-2025.9.18-cp312-cp312-win32.whl",url = "https://files.pythonhosted.org/packages/79/5d/cdd13b1f3c53afa7191593a7ad2ee24092a5a46417725ffff7f64be8342d/regex-2025.9.18-cp312-cp312-win32.whl",hashes = {sha256 = "e1dd06f981eb226edf87c55d523131ade7285137fbde837c34dc9d1bf309f459"}}, + {name = "regex-2025.9.18-cp312-cp312-win_amd64.whl",url = "https://files.pythonhosted.org/packages/e0/f5/4a7770c9a522e7d2dc1fa3ffc83ab2ab33b0b22b447e62cffef186805302/regex-2025.9.18-cp312-cp312-win_amd64.whl",hashes = {sha256 = "3d86b5247bf25fa3715e385aa9ff272c307e0636ce0c9595f64568b41f0a9c77"}}, + {name = "regex-2025.9.18-cp312-cp312-win_arm64.whl",url = "https://files.pythonhosted.org/packages/df/05/9ce3e110e70d225ecbed455b966003a3afda5e58e8aec2964042363a18f4/regex-2025.9.18-cp312-cp312-win_arm64.whl",hashes = {sha256 = "032720248cbeeae6444c269b78cb15664458b7bb9ed02401d3da59fe4d68c3a5"}}, + {name = "regex-2025.9.18-cp311-cp311-macosx_10_9_universal2.whl",url = "https://files.pythonhosted.org/packages/58/61/80eda662fc4eb32bfedc331f42390974c9e89c7eac1b79cd9eea4d7c458c/regex-2025.9.18-cp311-cp311-macosx_10_9_universal2.whl",hashes = {sha256 = "51076980cd08cd13c88eb7365427ae27f0d94e7cebe9ceb2bb9ffdae8fc4d82a"}}, + {name = "regex-2025.9.18-cp311-cp311-macosx_10_9_x86_64.whl",url = "https://files.pythonhosted.org/packages/a6/d9/33833d9abddf3f07ad48504ddb53fe3b22f353214bbb878a72eee1e3ddbf/regex-2025.9.18-cp311-cp311-macosx_10_9_x86_64.whl",hashes = {sha256 = "828446870bd7dee4e0cbeed767f07961aa07f0ea3129f38b3ccecebc9742e0b8"}}, + {name = "regex-2025.9.18-cp311-cp311-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/2a/b3/526ee96b0d70ea81980cbc20c3496fa582f775a52e001e2743cc33b2fa75/regex-2025.9.18-cp311-cp311-macosx_11_0_arm64.whl",hashes = {sha256 = "c28821d5637866479ec4cc23b8c990f5bc6dd24e5e4384ba4a11d38a526e1414"}}, + {name = "regex-2025.9.18-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl",url = "https://files.pythonhosted.org/packages/65/4f/c2c096b02a351b33442aed5895cdd8bf87d372498d2100927c5a053d7ba3/regex-2025.9.18-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl",hashes = {sha256 = "726177ade8e481db669e76bf99de0b278783be8acd11cef71165327abd1f170a"}}, + {name = "regex-2025.9.18-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl",url = "https://files.pythonhosted.org/packages/24/15/b562c9d6e47c403c4b5deb744f8b4bf6e40684cf866c7b077960a925bdff/regex-2025.9.18-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl",hashes = {sha256 = "f5cca697da89b9f8ea44115ce3130f6c54c22f541943ac8e9900461edc2b8bd4"}}, + {name = "regex-2025.9.18-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl",url = "https://files.pythonhosted.org/packages/f2/01/dba305409849e85b8a1a681eac4c03ed327d8de37895ddf9dc137f59c140/regex-2025.9.18-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl",hashes = {sha256 = "dfbde38f38004703c35666a1e1c088b778e35d55348da2b7b278914491698d6a"}}, + {name = "regex-2025.9.18-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl",url = "https://files.pythonhosted.org/packages/fe/d0/c51d1e6a80eab11ef96a4cbad17fc0310cf68994fb01a7283276b7e5bbd6/regex-2025.9.18-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl",hashes = {sha256 = "f2f422214a03fab16bfa495cfec72bee4aaa5731843b771860a471282f1bf74f"}}, + {name = "regex-2025.9.18-cp311-cp311-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/c4/5e/72db90970887bbe02296612bd61b0fa31e6d88aa24f6a4853db3e96c575e/regex-2025.9.18-cp311-cp311-musllinux_1_2_aarch64.whl",hashes = {sha256 = "a295916890f4df0902e4286bc7223ee7f9e925daa6dcdec4192364255b70561a"}}, + {name = "regex-2025.9.18-cp311-cp311-musllinux_1_2_ppc64le.whl",url = "https://files.pythonhosted.org/packages/50/ff/596be45eea8e9bc31677fde243fa2904d00aad1b32c31bce26c3dbba0b9e/regex-2025.9.18-cp311-cp311-musllinux_1_2_ppc64le.whl",hashes = {sha256 = "5db95ff632dbabc8c38c4e82bf545ab78d902e81160e6e455598014f0abe66b9"}}, + {name = "regex-2025.9.18-cp311-cp311-musllinux_1_2_s390x.whl",url = "https://files.pythonhosted.org/packages/e5/1b/2dfa348fa551e900ed3f5f63f74185b6a08e8a76bc62bc9c106f4f92668b/regex-2025.9.18-cp311-cp311-musllinux_1_2_s390x.whl",hashes = {sha256 = "fb967eb441b0f15ae610b7069bdb760b929f267efbf522e814bbbfffdf125ce2"}}, + {name = "regex-2025.9.18-cp311-cp311-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/f4/bf/aefb1def27fe33b8cbbb19c75c13aefccfbef1c6686f8e7f7095705969c7/regex-2025.9.18-cp311-cp311-musllinux_1_2_x86_64.whl",hashes = {sha256 = "f04d2f20da4053d96c08f7fde6e1419b7ec9dbcee89c96e3d731fca77f411b95"}}, + {name = "regex-2025.9.18-cp311-cp311-win32.whl",url = "https://files.pythonhosted.org/packages/e3/4e/8ef042e7cf0dbbb401e784e896acfc1b367b95dfbfc9ada94c2ed55a081f/regex-2025.9.18-cp311-cp311-win32.whl",hashes = {sha256 = "895197241fccf18c0cea7550c80e75f185b8bd55b6924fcae269a1a92c614a07"}}, + {name = "regex-2025.9.18-cp311-cp311-win_amd64.whl",url = "https://files.pythonhosted.org/packages/b4/7d/c4fcabf80dcdd6821c0578ad9b451f8640b9110fb3dcb74793dd077069ff/regex-2025.9.18-cp311-cp311-win_amd64.whl",hashes = {sha256 = "7e2b414deae99166e22c005e154a5513ac31493db178d8aec92b3269c9cce8c9"}}, + {name = "regex-2025.9.18-cp311-cp311-win_arm64.whl",url = "https://files.pythonhosted.org/packages/64/f8/0e13c8ae4d6df9d128afaba138342d532283d53a4c1e7a8c93d6756c8f4a/regex-2025.9.18-cp311-cp311-win_arm64.whl",hashes = {sha256 = "fb137ec7c5c54f34a25ff9b31f6b7b0c2757be80176435bf367111e3f71d72df"}}, + {name = "regex-2025.9.18-cp310-cp310-macosx_10_9_universal2.whl",url = "https://files.pythonhosted.org/packages/7e/d8/7e06171db8e55f917c5b8e89319cea2d86982e3fc46b677f40358223dece/regex-2025.9.18-cp310-cp310-macosx_10_9_universal2.whl",hashes = {sha256 = "12296202480c201c98a84aecc4d210592b2f55e200a1d193235c4db92b9f6788"}}, + {name = "regex-2025.9.18-cp310-cp310-macosx_10_9_x86_64.whl",url = "https://files.pythonhosted.org/packages/8d/70/bf91bb39e5bedf75ce730ffbaa82ca585584d13335306d637458946b8b9f/regex-2025.9.18-cp310-cp310-macosx_10_9_x86_64.whl",hashes = {sha256 = "220381f1464a581f2ea988f2220cf2a67927adcef107d47d6897ba5a2f6d51a4"}}, + {name = "regex-2025.9.18-cp310-cp310-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/fe/89/69f79b28365eda2c46e64c39d617d5f65a2aa451a4c94de7d9b34c2dc80f/regex-2025.9.18-cp310-cp310-macosx_11_0_arm64.whl",hashes = {sha256 = "87f681bfca84ebd265278b5daa1dcb57f4db315da3b5d044add7c30c10442e61"}}, + {name = "regex-2025.9.18-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl",url = "https://files.pythonhosted.org/packages/44/31/81e62955726c3a14fcc1049a80bc716765af6c055706869de5e880ddc783/regex-2025.9.18-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl",hashes = {sha256 = "34d674cbba70c9398074c8a1fcc1a79739d65d1105de2a3c695e2b05ea728251"}}, + {name = "regex-2025.9.18-cp310-cp310-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl",url = "https://files.pythonhosted.org/packages/fb/23/07072b7e191fbb6e213dc03b2f5b96f06d3c12d7deaded84679482926fc7/regex-2025.9.18-cp310-cp310-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl",hashes = {sha256 = "385c9b769655cb65ea40b6eea6ff763cbb6d69b3ffef0b0db8208e1833d4e746"}}, + {name = "regex-2025.9.18-cp310-cp310-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl",url = "https://files.pythonhosted.org/packages/b3/f0/aec7f6a01f2a112210424d77c6401b9015675fb887ced7e18926df4ae51e/regex-2025.9.18-cp310-cp310-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl",hashes = {sha256 = "8900b3208e022570ae34328712bef6696de0804c122933414014bae791437ab2"}}, + {name = "regex-2025.9.18-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl",url = "https://files.pythonhosted.org/packages/cc/90/2e5f9da89d260de7d0417ead91a1bc897f19f0af05f4f9323313b76c47f2/regex-2025.9.18-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl",hashes = {sha256 = "c204e93bf32cd7a77151d44b05eb36f469d0898e3fba141c026a26b79d9914a0"}}, + {name = "regex-2025.9.18-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.whl",url = "https://files.pythonhosted.org/packages/2b/d5/1c712c7362f2563d389be66bae131c8bab121a3fabfa04b0b5bfc9e73c51/regex-2025.9.18-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.whl",hashes = {sha256 = "3acc471d1dd7e5ff82e6cacb3b286750decd949ecd4ae258696d04f019817ef8"}}, + {name = "regex-2025.9.18-cp310-cp310-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/4f/92/c54cdb4aa41009632e69817a5aa452673507f07e341076735a2f6c46a37c/regex-2025.9.18-cp310-cp310-musllinux_1_2_aarch64.whl",hashes = {sha256 = "6479d5555122433728760e5f29edb4c2b79655a8deb681a141beb5c8a025baea"}}, + {name = "regex-2025.9.18-cp310-cp310-musllinux_1_2_ppc64le.whl",url = "https://files.pythonhosted.org/packages/db/99/75c996dc6a2231a8652d7ad0bfbeaf8a8c77612d335580f520f3ec40e30b/regex-2025.9.18-cp310-cp310-musllinux_1_2_ppc64le.whl",hashes = {sha256 = "431bd2a8726b000eb6f12429c9b438a24062a535d06783a93d2bcbad3698f8a8"}}, + {name = "regex-2025.9.18-cp310-cp310-musllinux_1_2_s390x.whl",url = "https://files.pythonhosted.org/packages/1c/f7/25aba34cc130cb6844047dbfe9716c9b8f9629fee8b8bec331aa9241b97b/regex-2025.9.18-cp310-cp310-musllinux_1_2_s390x.whl",hashes = {sha256 = "0cc3521060162d02bd36927e20690129200e5ac9d2c6d32b70368870b122db25"}}, + {name = "regex-2025.9.18-cp310-cp310-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/51/eb/64e671beafa0ae29712268421597596d781704973551312b2425831d4037/regex-2025.9.18-cp310-cp310-musllinux_1_2_x86_64.whl",hashes = {sha256 = "a021217b01be2d51632ce056d7a837d3fa37c543ede36e39d14063176a26ae29"}}, + {name = "regex-2025.9.18-cp310-cp310-win32.whl",url = "https://files.pythonhosted.org/packages/26/33/c0ebc0b07bd0bf88f716cca240546b26235a07710ea58e271cfe390ae273/regex-2025.9.18-cp310-cp310-win32.whl",hashes = {sha256 = "4a12a06c268a629cb67cc1d009b7bb0be43e289d00d5111f86a2efd3b1949444"}}, + {name = "regex-2025.9.18-cp310-cp310-win_amd64.whl",url = "https://files.pythonhosted.org/packages/59/39/aeb11a4ae68faaec2498512cadae09f2d8a91f1f65730fe62b9bffeea150/regex-2025.9.18-cp310-cp310-win_amd64.whl",hashes = {sha256 = "47acd811589301298c49db2c56bde4f9308d6396da92daf99cba781fa74aa450"}}, + {name = "regex-2025.9.18-cp310-cp310-win_arm64.whl",url = "https://files.pythonhosted.org/packages/29/04/37f2d3fc334a1031fc2767c9d89cec13c2e72207c7e7f6feae8a47f4e149/regex-2025.9.18-cp310-cp310-win_arm64.whl",hashes = {sha256 = "16bd2944e77522275e5ee36f867e19995bcaa533dcb516753a26726ac7285442"}}, ] marker = "\"default\" in dependency_groups or \"recommended\" in extras" @@ -1552,39 +2049,39 @@ dependencies = [] [[packages]] name = "tokenizers" -version = "0.21.2" +version = "0.22.1" requires-python = ">=3.9" -sdist = {name = "tokenizers-0.21.2.tar.gz", url = "https://files.pythonhosted.org/packages/ab/2d/b0fce2b8201635f60e8c95990080f58461cc9ca3d5026de2e900f38a7f21/tokenizers-0.21.2.tar.gz", hashes = {sha256 = "fdc7cffde3e2113ba0e6cc7318c40e3438a4d74bbc62bf04bcc63bdfb082ac77"}} -wheels = [ - {name = "tokenizers-0.21.2-cp39-abi3-macosx_10_12_x86_64.whl",url = "https://files.pythonhosted.org/packages/1d/cc/2936e2d45ceb130a21d929743f1e9897514691bec123203e10837972296f/tokenizers-0.21.2-cp39-abi3-macosx_10_12_x86_64.whl",hashes = {sha256 = "342b5dfb75009f2255ab8dec0041287260fed5ce00c323eb6bab639066fef8ec"}}, - {name = "tokenizers-0.21.2-cp39-abi3-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/6c/e6/33f41f2cc7861faeba8988e7a77601407bf1d9d28fc79c5903f8f77df587/tokenizers-0.21.2-cp39-abi3-macosx_11_0_arm64.whl",hashes = {sha256 = "126df3205d6f3a93fea80c7a8a266a78c1bd8dd2fe043386bafdd7736a23e45f"}}, - {name = "tokenizers-0.21.2-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",url = "https://files.pythonhosted.org/packages/33/2b/1791eb329c07122a75b01035b1a3aa22ad139f3ce0ece1b059b506d9d9de/tokenizers-0.21.2-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",hashes = {sha256 = "4a32cd81be21168bd0d6a0f0962d60177c447a1aa1b1e48fa6ec9fc728ee0b12"}}, - {name = "tokenizers-0.21.2-cp39-abi3-manylinux_2_17_armv7l.manylinux2014_armv7l.whl",url = "https://files.pythonhosted.org/packages/05/15/fd2d8104faa9f86ac68748e6f7ece0b5eb7983c7efc3a2c197cb98c99030/tokenizers-0.21.2-cp39-abi3-manylinux_2_17_armv7l.manylinux2014_armv7l.whl",hashes = {sha256 = "8bd8999538c405133c2ab999b83b17c08b7fc1b48c1ada2469964605a709ef91"}}, - {name = "tokenizers-0.21.2-cp39-abi3-manylinux_2_17_i686.manylinux2014_i686.whl",url = "https://files.pythonhosted.org/packages/a5/2e/53e8fd053e1f3ffbe579ca5f9546f35ac67cf0039ed357ad7ec57f5f5af0/tokenizers-0.21.2-cp39-abi3-manylinux_2_17_i686.manylinux2014_i686.whl",hashes = {sha256 = "5e9944e61239b083a41cf8fc42802f855e1dca0f499196df37a8ce219abac6eb"}}, - {name = "tokenizers-0.21.2-cp39-abi3-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl",url = "https://files.pythonhosted.org/packages/00/15/79713359f4037aa8f4d1f06ffca35312ac83629da062670e8830917e2153/tokenizers-0.21.2-cp39-abi3-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl",hashes = {sha256 = "514cd43045c5d546f01142ff9c79a96ea69e4b5cda09e3027708cb2e6d5762ab"}}, - {name = "tokenizers-0.21.2-cp39-abi3-manylinux_2_17_s390x.manylinux2014_s390x.whl",url = "https://files.pythonhosted.org/packages/38/5f/959f3a8756fc9396aeb704292777b84f02a5c6f25c3fc3ba7530db5feb2c/tokenizers-0.21.2-cp39-abi3-manylinux_2_17_s390x.manylinux2014_s390x.whl",hashes = {sha256 = "b1b9405822527ec1e0f7d8d2fdb287a5730c3a6518189c968254a8441b21faae"}}, - {name = "tokenizers-0.21.2-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",url = "https://files.pythonhosted.org/packages/c5/74/f41a432a0733f61f3d21b288de6dfa78f7acff309c6f0f323b2833e9189f/tokenizers-0.21.2-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",hashes = {sha256 = "fed9a4d51c395103ad24f8e7eb976811c57fbec2af9f133df471afcd922e5020"}}, - {name = "tokenizers-0.21.2-cp39-abi3-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/3c/6a/bc220a11a17e5d07b0dfb3b5c628621d4dcc084bccd27cfaead659963016/tokenizers-0.21.2-cp39-abi3-musllinux_1_2_aarch64.whl",hashes = {sha256 = "2c41862df3d873665ec78b6be36fcc30a26e3d4902e9dd8608ed61d49a48bc19"}}, - {name = "tokenizers-0.21.2-cp39-abi3-musllinux_1_2_armv7l.whl",url = "https://files.pythonhosted.org/packages/6c/bd/ac386d79c4ef20dc6f39c4706640c24823dca7ebb6f703bfe6b5f0292d88/tokenizers-0.21.2-cp39-abi3-musllinux_1_2_armv7l.whl",hashes = {sha256 = "ed21dc7e624e4220e21758b2e62893be7101453525e3d23264081c9ef9a6d00d"}}, - {name = "tokenizers-0.21.2-cp39-abi3-musllinux_1_2_i686.whl",url = "https://files.pythonhosted.org/packages/63/7b/5440bf203b2a5358f074408f7f9c42884849cd9972879e10ee6b7a8c3b3d/tokenizers-0.21.2-cp39-abi3-musllinux_1_2_i686.whl",hashes = {sha256 = "0e73770507e65a0e0e2a1affd6b03c36e3bc4377bd10c9ccf51a82c77c0fe365"}}, - {name = "tokenizers-0.21.2-cp39-abi3-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/a4/d2/faa1acac3f96a7427866e94ed4289949b2524f0c1878512516567d80563c/tokenizers-0.21.2-cp39-abi3-musllinux_1_2_x86_64.whl",hashes = {sha256 = "106746e8aa9014a12109e58d540ad5465b4c183768ea96c03cbc24c44d329958"}}, - {name = "tokenizers-0.21.2-cp39-abi3-win32.whl",url = "https://files.pythonhosted.org/packages/d8/a5/896e1ef0707212745ae9f37e84c7d50269411aef2e9ccd0de63623feecdf/tokenizers-0.21.2-cp39-abi3-win32.whl",hashes = {sha256 = "cabda5a6d15d620b6dfe711e1af52205266d05b379ea85a8a301b3593c60e962"}}, - {name = "tokenizers-0.21.2-cp39-abi3-win_amd64.whl",url = "https://files.pythonhosted.org/packages/13/c3/cc2755ee10be859c4338c962a35b9a663788c0c0b50c0bdd8078fb6870cf/tokenizers-0.21.2-cp39-abi3-win_amd64.whl",hashes = {sha256 = "58747bb898acdb1007f37a7bbe614346e98dc28708ffb66a3fd50ce169ac6c98"}}, +sdist = {name = "tokenizers-0.22.1.tar.gz", url = "https://files.pythonhosted.org/packages/1c/46/fb6854cec3278fbfa4a75b50232c77622bc517ac886156e6afbfa4d8fc6e/tokenizers-0.22.1.tar.gz", hashes = {sha256 = "61de6522785310a309b3407bac22d99c4db5dba349935e99e4d15ea2226af2d9"}} +wheels = [ + {name = "tokenizers-0.22.1-cp39-abi3-macosx_10_12_x86_64.whl",url = "https://files.pythonhosted.org/packages/bf/33/f4b2d94ada7ab297328fc671fed209368ddb82f965ec2224eb1892674c3a/tokenizers-0.22.1-cp39-abi3-macosx_10_12_x86_64.whl",hashes = {sha256 = "59fdb013df17455e5f950b4b834a7b3ee2e0271e6378ccb33aa74d178b513c73"}}, + {name = "tokenizers-0.22.1-cp39-abi3-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/1c/58/2aa8c874d02b974990e89ff95826a4852a8b2a273c7d1b4411cdd45a4565/tokenizers-0.22.1-cp39-abi3-macosx_11_0_arm64.whl",hashes = {sha256 = "8d4e484f7b0827021ac5f9f71d4794aaef62b979ab7608593da22b1d2e3c4edc"}}, + {name = "tokenizers-0.22.1-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",url = "https://files.pythonhosted.org/packages/1e/3b/55e64befa1e7bfea963cf4b787b2cea1011362c4193f5477047532ce127e/tokenizers-0.22.1-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",hashes = {sha256 = "19d2962dd28bc67c1f205ab180578a78eef89ac60ca7ef7cbe9635a46a56422a"}}, + {name = "tokenizers-0.22.1-cp39-abi3-manylinux_2_17_armv7l.manylinux2014_armv7l.whl",url = "https://files.pythonhosted.org/packages/71/0b/fbfecf42f67d9b7b80fde4aabb2b3110a97fac6585c9470b5bff103a80cb/tokenizers-0.22.1-cp39-abi3-manylinux_2_17_armv7l.manylinux2014_armv7l.whl",hashes = {sha256 = "38201f15cdb1f8a6843e6563e6e79f4abd053394992b9bbdf5213ea3469b4ae7"}}, + {name = "tokenizers-0.22.1-cp39-abi3-manylinux_2_17_i686.manylinux2014_i686.whl",url = "https://files.pythonhosted.org/packages/17/a9/b38f4e74e0817af8f8ef925507c63c6ae8171e3c4cb2d5d4624bf58fca69/tokenizers-0.22.1-cp39-abi3-manylinux_2_17_i686.manylinux2014_i686.whl",hashes = {sha256 = "d1cbe5454c9a15df1b3443c726063d930c16f047a3cc724b9e6e1a91140e5a21"}}, + {name = "tokenizers-0.22.1-cp39-abi3-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl",url = "https://files.pythonhosted.org/packages/d2/48/dd2b3dac46bb9134a88e35d72e1aa4869579eacc1a27238f1577270773ff/tokenizers-0.22.1-cp39-abi3-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl",hashes = {sha256 = "e7d094ae6312d69cc2a872b54b91b309f4f6fbce871ef28eb27b52a98e4d0214"}}, + {name = "tokenizers-0.22.1-cp39-abi3-manylinux_2_17_s390x.manylinux2014_s390x.whl",url = "https://files.pythonhosted.org/packages/93/0e/ccabc8d16ae4ba84a55d41345207c1e2ea88784651a5a487547d80851398/tokenizers-0.22.1-cp39-abi3-manylinux_2_17_s390x.manylinux2014_s390x.whl",hashes = {sha256 = "afd7594a56656ace95cdd6df4cca2e4059d294c5cfb1679c57824b605556cb2f"}}, + {name = "tokenizers-0.22.1-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",url = "https://files.pythonhosted.org/packages/d0/c6/dc3a0db5a6766416c32c034286d7c2d406da1f498e4de04ab1b8959edd00/tokenizers-0.22.1-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",hashes = {sha256 = "e2ef6063d7a84994129732b47e7915e8710f27f99f3a3260b8a38fc7ccd083f4"}}, + {name = "tokenizers-0.22.1-cp39-abi3-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/d7/a6/2c8486eef79671601ff57b093889a345dd3d576713ef047776015dc66de7/tokenizers-0.22.1-cp39-abi3-musllinux_1_2_aarch64.whl",hashes = {sha256 = "ba0a64f450b9ef412c98f6bcd2a50c6df6e2443b560024a09fa6a03189726879"}}, + {name = "tokenizers-0.22.1-cp39-abi3-musllinux_1_2_armv7l.whl",url = "https://files.pythonhosted.org/packages/6b/16/32ce667f14c35537f5f605fe9bea3e415ea1b0a646389d2295ec348d5657/tokenizers-0.22.1-cp39-abi3-musllinux_1_2_armv7l.whl",hashes = {sha256 = "331d6d149fa9c7d632cde4490fb8bbb12337fa3a0232e77892be656464f4b446"}}, + {name = "tokenizers-0.22.1-cp39-abi3-musllinux_1_2_i686.whl",url = "https://files.pythonhosted.org/packages/51/7c/a5f7898a3f6baa3fc2685c705e04c98c1094c523051c805cdd9306b8f87e/tokenizers-0.22.1-cp39-abi3-musllinux_1_2_i686.whl",hashes = {sha256 = "607989f2ea68a46cb1dfbaf3e3aabdf3f21d8748312dbeb6263d1b3b66c5010a"}}, + {name = "tokenizers-0.22.1-cp39-abi3-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/36/65/7e75caea90bc73c1dd8d40438adf1a7bc26af3b8d0a6705ea190462506e1/tokenizers-0.22.1-cp39-abi3-musllinux_1_2_x86_64.whl",hashes = {sha256 = "a0f307d490295717726598ef6fa4f24af9d484809223bbc253b201c740a06390"}}, + {name = "tokenizers-0.22.1-cp39-abi3-win32.whl",url = "https://files.pythonhosted.org/packages/30/2c/959dddef581b46e6209da82df3b78471e96260e2bc463f89d23b1bf0e52a/tokenizers-0.22.1-cp39-abi3-win32.whl",hashes = {sha256 = "b5120eed1442765cd90b903bb6cfef781fd8fe64e34ccaecbae4c619b7b12a82"}}, + {name = "tokenizers-0.22.1-cp39-abi3-win_amd64.whl",url = "https://files.pythonhosted.org/packages/b3/46/e33a8c93907b631a99377ef4c5f817ab453d0b34f93529421f42ff559671/tokenizers-0.22.1-cp39-abi3-win_amd64.whl",hashes = {sha256 = "65fd6e3fb11ca1e78a6a93602490f134d1fdeb13bcef99389d5102ea318ed138"}}, ] marker = "\"default\" in dependency_groups" [packages.tool.pdm] dependencies = [ - "huggingface-hub<1.0,>=0.16.4", + "huggingface-hub<2.0,>=0.16.4", ] [[packages]] name = "typing-inspection" -version = "0.4.1" +version = "0.4.2" requires-python = ">=3.9" -sdist = {name = "typing_inspection-0.4.1.tar.gz", url = "https://files.pythonhosted.org/packages/f8/b1/0c11f5058406b3af7609f121aaa6b609744687f1d158b3c3a5bf4cc94238/typing_inspection-0.4.1.tar.gz", hashes = {sha256 = "6ae134cc0203c33377d43188d4064e9b357dba58cff3185f22924610e70a9d28"}} +sdist = {name = "typing_inspection-0.4.2.tar.gz", url = "https://files.pythonhosted.org/packages/55/e3/70399cb7dd41c10ac53367ae42139cf4b1ca5f36bb3dc6c9d33acdb43655/typing_inspection-0.4.2.tar.gz", hashes = {sha256 = "ba561c48a67c5958007083d386c3295464928b01faa735ab8547c5692e87f464"}} wheels = [ - {name = "typing_inspection-0.4.1-py3-none-any.whl",url = "https://files.pythonhosted.org/packages/17/69/cd203477f944c353c31bade965f880aa1061fd6bf05ded0726ca845b6ff7/typing_inspection-0.4.1-py3-none-any.whl",hashes = {sha256 = "389055682238f53b04f7badcb49b989835495a96700ced5dab2d8feae4b26f51"}}, + {name = "typing_inspection-0.4.2-py3-none-any.whl",url = "https://files.pythonhosted.org/packages/dc/9b/47798a6c91d8bdb567fe2698fe81e0c6b7cb7ef4d13da4114b41d239f65d/typing_inspection-0.4.2-py3-none-any.whl",hashes = {sha256 = "4ed1cacbdc298c220f1bd249ed5287caa16f34d44ef4e9c3d0cbad5b521545e7"}}, ] marker = "\"default\" in dependency_groups" @@ -1595,11 +2092,11 @@ dependencies = [ [[packages]] name = "virtualenv" -version = "20.31.2" +version = "20.34.0" requires-python = ">=3.8" -sdist = {name = "virtualenv-20.31.2.tar.gz", url = "https://files.pythonhosted.org/packages/56/2c/444f465fb2c65f40c3a104fd0c495184c4f2336d65baf398e3c75d72ea94/virtualenv-20.31.2.tar.gz", hashes = {sha256 = "e10c0a9d02835e592521be48b332b6caee6887f332c111aa79a09b9e79efc2af"}} +sdist = {name = "virtualenv-20.34.0.tar.gz", url = "https://files.pythonhosted.org/packages/1c/14/37fcdba2808a6c615681cd216fecae00413c9dab44fb2e57805ecf3eaee3/virtualenv-20.34.0.tar.gz", hashes = {sha256 = "44815b2c9dee7ed86e387b842a84f20b93f7f417f95886ca1996a72a4138eb1a"}} wheels = [ - {name = "virtualenv-20.31.2-py3-none-any.whl",url = "https://files.pythonhosted.org/packages/f3/40/b1c265d4b2b62b58576588510fc4d1fe60a86319c8de99fd8e9fec617d2c/virtualenv-20.31.2-py3-none-any.whl",hashes = {sha256 = "36efd0d9650ee985f0cad72065001e66d49a6f24eb44d98980f630686243cf11"}}, + {name = "virtualenv-20.34.0-py3-none-any.whl",url = "https://files.pythonhosted.org/packages/76/06/04c8e804f813cf972e3262f3f8584c232de64f0cde9f703b46cf53a45090/virtualenv-20.34.0-py3-none-any.whl",hashes = {sha256 = "341f5afa7eee943e4984a9207c025feedd768baff6753cd660c857ceb3e36026"}}, ] marker = "\"dev\" in extras" @@ -1609,15 +2106,16 @@ dependencies = [ "filelock<4,>=3.12.2", "importlib-metadata>=6.6; python_version < \"3.8\"", "platformdirs<5,>=3.9.1", + "typing-extensions>=4.13.2; python_version < \"3.11\"", ] [[packages]] name = "platformdirs" -version = "4.3.8" -requires-python = ">=3.9" -sdist = {name = "platformdirs-4.3.8.tar.gz", url = "https://files.pythonhosted.org/packages/fe/8b/3c73abc9c759ecd3f1f7ceff6685840859e8070c4d947c93fae71f6a0bf2/platformdirs-4.3.8.tar.gz", hashes = {sha256 = "3d512d96e16bcb959a814c9f348431070822a6496326a4be0911c40b5a74c2bc"}} +version = "4.5.0" +requires-python = ">=3.10" +sdist = {name = "platformdirs-4.5.0.tar.gz", url = "https://files.pythonhosted.org/packages/61/33/9611380c2bdb1225fdef633e2a9610622310fed35ab11dac9620972ee088/platformdirs-4.5.0.tar.gz", hashes = {sha256 = "70ddccdd7c99fc5942e9fc25636a8b34d04c24b335100223152c2803e4063312"}} wheels = [ - {name = "platformdirs-4.3.8-py3-none-any.whl",url = "https://files.pythonhosted.org/packages/fe/39/979e8e21520d4e47a0bbe349e2713c0aac6f3d853d0e5b34d76206c439aa/platformdirs-4.3.8-py3-none-any.whl",hashes = {sha256 = "ff7059bb7eb1179e2685604f4aaf157cfd9535242bd23742eadc3c13542139b4"}}, + {name = "platformdirs-4.5.0-py3-none-any.whl",url = "https://files.pythonhosted.org/packages/73/cb/ac7874b3e5d58441674fb70742e6c374b28b0c7cb988d37d991cde47166c/platformdirs-4.5.0-py3-none-any.whl",hashes = {sha256 = "e578a81bb873cbb89a41fcc904c7ef523cc18284b7e3b3ccf06aca1403b7ebd3"}}, ] marker = "\"dev\" in extras" @@ -1626,10 +2124,10 @@ dependencies = [] [[packages]] name = "distlib" -version = "0.3.9" -sdist = {name = "distlib-0.3.9.tar.gz", url = "https://files.pythonhosted.org/packages/0d/dd/1bec4c5ddb504ca60fc29472f3d27e8d4da1257a854e1d96742f15c1d02d/distlib-0.3.9.tar.gz", hashes = {sha256 = "a60f20dea646b8a33f3e7772f74dc0b2d0772d2837ee1342a00645c81edf9403"}} +version = "0.4.0" +sdist = {name = "distlib-0.4.0.tar.gz", url = "https://files.pythonhosted.org/packages/96/8e/709914eb2b5749865801041647dc7f4e6d00b549cfe88b65ca192995f07c/distlib-0.4.0.tar.gz", hashes = {sha256 = "feec40075be03a04501a973d81f633735b4b69f98b05450592310c0f401a4e0d"}} wheels = [ - {name = "distlib-0.3.9-py2.py3-none-any.whl",url = "https://files.pythonhosted.org/packages/91/a1/cf2472db20f7ce4a6be1253a81cfdf85ad9c7885ffbed7047fb72c24cf87/distlib-0.3.9-py2.py3-none-any.whl",hashes = {sha256 = "47f8c22fd27c27e25a65601af709b38e4f0a45ea4fc2e710f65755fa8caaaf87"}}, + {name = "distlib-0.4.0-py2.py3-none-any.whl",url = "https://files.pythonhosted.org/packages/33/6b/e0547afaf41bf2c42e52430072fa5658766e3d65bd4b03a563d1b6336f57/distlib-0.4.0-py2.py3-none-any.whl",hashes = {sha256 = "9659f7d87e46584a30b5780e43ac7a2143098441670ff0a49d5f9034c54a6c16"}}, ] marker = "\"dev\" in extras" @@ -1638,62 +2136,123 @@ dependencies = [] [[packages]] name = "yarl" -version = "1.20.1" +version = "1.22.0" requires-python = ">=3.9" -sdist = {name = "yarl-1.20.1.tar.gz", url = "https://files.pythonhosted.org/packages/3c/fb/efaa23fa4e45537b827620f04cf8f3cd658b76642205162e072703a5b963/yarl-1.20.1.tar.gz", hashes = {sha256 = "d017a4997ee50c91fd5466cef416231bb82177b93b029906cefc542ce14c35ac"}} -wheels = [ - {name = "yarl-1.20.1-cp313-cp313-macosx_10_13_universal2.whl",url = "https://files.pythonhosted.org/packages/8a/e1/2411b6d7f769a07687acee88a062af5833cf1966b7266f3d8dfb3d3dc7d3/yarl-1.20.1-cp313-cp313-macosx_10_13_universal2.whl",hashes = {sha256 = "0b5ff0fbb7c9f1b1b5ab53330acbfc5247893069e7716840c8e7d5bb7355038a"}}, - {name = "yarl-1.20.1-cp313-cp313-macosx_10_13_x86_64.whl",url = "https://files.pythonhosted.org/packages/b2/27/584394e1cb76fb771371770eccad35de400e7b434ce3142c2dd27392c968/yarl-1.20.1-cp313-cp313-macosx_10_13_x86_64.whl",hashes = {sha256 = "14f326acd845c2b2e2eb38fb1346c94f7f3b01a4f5c788f8144f9b630bfff9a3"}}, - {name = "yarl-1.20.1-cp313-cp313-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/bf/9a/3246ae92d4049099f52d9b0fe3486e3b500e29b7ea872d0f152966fc209d/yarl-1.20.1-cp313-cp313-macosx_11_0_arm64.whl",hashes = {sha256 = "f60e4ad5db23f0b96e49c018596707c3ae89f5d0bd97f0ad3684bcbad899f1e7"}}, - {name = "yarl-1.20.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",url = "https://files.pythonhosted.org/packages/a3/25/35afe384e31115a1a801fbcf84012d7a066d89035befae7c5d4284df1e03/yarl-1.20.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",hashes = {sha256 = "49bdd1b8e00ce57e68ba51916e4bb04461746e794e7c4d4bbc42ba2f18297691"}}, - {name = "yarl-1.20.1-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl",url = "https://files.pythonhosted.org/packages/28/2d/8aca6cb2cabc8f12efcb82749b9cefecbccfc7b0384e56cd71058ccee433/yarl-1.20.1-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl",hashes = {sha256 = "66252d780b45189975abfed839616e8fd2dbacbdc262105ad7742c6ae58f3e31"}}, - {name = "yarl-1.20.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl",url = "https://files.pythonhosted.org/packages/0b/e9/1312633d16b31acf0098d30440ca855e3492d66623dafb8e25b03d00c3da/yarl-1.20.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl",hashes = {sha256 = "59174e7332f5d153d8f7452a102b103e2e74035ad085f404df2e40e663a22b28"}}, - {name = "yarl-1.20.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl",url = "https://files.pythonhosted.org/packages/bc/a0/688cc99463f12f7669eec7c8acc71ef56a1521b99eab7cd3abb75af887b0/yarl-1.20.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl",hashes = {sha256 = "e3968ec7d92a0c0f9ac34d5ecfd03869ec0cab0697c91a45db3fbbd95fe1b653"}}, - {name = "yarl-1.20.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",url = "https://files.pythonhosted.org/packages/af/44/46407d7f7a56e9a85a4c207724c9f2c545c060380718eea9088f222ba697/yarl-1.20.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",hashes = {sha256 = "d1a4fbb50e14396ba3d375f68bfe02215d8e7bc3ec49da8341fe3157f59d2ff5"}}, - {name = "yarl-1.20.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl",url = "https://files.pythonhosted.org/packages/b1/91/31163295e82b8d5485d31d9cf7754d973d41915cadce070491778d9c9825/yarl-1.20.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl",hashes = {sha256 = "11a62c839c3a8eac2410e951301309426f368388ff2f33799052787035793b02"}}, - {name = "yarl-1.20.1-cp313-cp313-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/b4/8e/c41a5bc482121f51c083c4c2bcd16b9e01e1cf8729e380273a952513a21f/yarl-1.20.1-cp313-cp313-musllinux_1_2_aarch64.whl",hashes = {sha256 = "041eaa14f73ff5a8986b4388ac6bb43a77f2ea09bf1913df7a35d4646db69e53"}}, - {name = "yarl-1.20.1-cp313-cp313-musllinux_1_2_armv7l.whl",url = "https://files.pythonhosted.org/packages/e3/5b/61a3b054238d33d70ea06ebba7e58597891b71c699e247df35cc984ab393/yarl-1.20.1-cp313-cp313-musllinux_1_2_armv7l.whl",hashes = {sha256 = "377fae2fef158e8fd9d60b4c8751387b8d1fb121d3d0b8e9b0be07d1b41e83dc"}}, - {name = "yarl-1.20.1-cp313-cp313-musllinux_1_2_i686.whl",url = "https://files.pythonhosted.org/packages/df/a3/6a72fb83f8d478cb201d14927bc8040af901811a88e0ff2da7842dd0ed19/yarl-1.20.1-cp313-cp313-musllinux_1_2_i686.whl",hashes = {sha256 = "1c92f4390e407513f619d49319023664643d3339bd5e5a56a3bebe01bc67ec04"}}, - {name = "yarl-1.20.1-cp313-cp313-musllinux_1_2_ppc64le.whl",url = "https://files.pythonhosted.org/packages/7c/af/4cc3c36dfc7c077f8dedb561eb21f69e1e9f2456b91b593882b0b18c19dc/yarl-1.20.1-cp313-cp313-musllinux_1_2_ppc64le.whl",hashes = {sha256 = "d25ddcf954df1754ab0f86bb696af765c5bfaba39b74095f27eececa049ef9a4"}}, - {name = "yarl-1.20.1-cp313-cp313-musllinux_1_2_s390x.whl",url = "https://files.pythonhosted.org/packages/19/3a/e54e2c4752160115183a66dc9ee75a153f81f3ab2ba4bf79c3c53b33de34/yarl-1.20.1-cp313-cp313-musllinux_1_2_s390x.whl",hashes = {sha256 = "909313577e9619dcff8c31a0ea2aa0a2a828341d92673015456b3ae492e7317b"}}, - {name = "yarl-1.20.1-cp313-cp313-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/9c/20/200ae86dabfca89060ec6447649f219b4cbd94531e425e50d57e5f5ac330/yarl-1.20.1-cp313-cp313-musllinux_1_2_x86_64.whl",hashes = {sha256 = "793fd0580cb9664548c6b83c63b43c477212c0260891ddf86809e1c06c8b08f1"}}, - {name = "yarl-1.20.1-cp313-cp313-win32.whl",url = "https://files.pythonhosted.org/packages/83/75/11ee332f2f516b3d094e89448da73d557687f7d137d5a0f48c40ff211487/yarl-1.20.1-cp313-cp313-win32.whl",hashes = {sha256 = "468f6e40285de5a5b3c44981ca3a319a4b208ccc07d526b20b12aeedcfa654b7"}}, - {name = "yarl-1.20.1-cp313-cp313-win_amd64.whl",url = "https://files.pythonhosted.org/packages/ba/ba/39b1ecbf51620b40ab402b0fc817f0ff750f6d92712b44689c2c215be89d/yarl-1.20.1-cp313-cp313-win_amd64.whl",hashes = {sha256 = "495b4ef2fea40596bfc0affe3837411d6aa3371abcf31aac0ccc4bdd64d4ef5c"}}, - {name = "yarl-1.20.1-cp313-cp313t-macosx_10_13_universal2.whl",url = "https://files.pythonhosted.org/packages/43/c7/669c52519dca4c95153c8ad96dd123c79f354a376346b198f438e56ffeb4/yarl-1.20.1-cp313-cp313t-macosx_10_13_universal2.whl",hashes = {sha256 = "f60233b98423aab21d249a30eb27c389c14929f47be8430efa7dbd91493a729d"}}, - {name = "yarl-1.20.1-cp313-cp313t-macosx_10_13_x86_64.whl",url = "https://files.pythonhosted.org/packages/6a/42/fc0053719b44f6ad04a75d7f05e0e9674d45ef62f2d9ad2c1163e5c05827/yarl-1.20.1-cp313-cp313t-macosx_10_13_x86_64.whl",hashes = {sha256 = "6f3eff4cc3f03d650d8755c6eefc844edde99d641d0dcf4da3ab27141a5f8ddf"}}, - {name = "yarl-1.20.1-cp313-cp313t-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/4f/7f/fa59c4c27e2a076bba0d959386e26eba77eb52ea4a0aac48e3515c186b4c/yarl-1.20.1-cp313-cp313t-macosx_11_0_arm64.whl",hashes = {sha256 = "69ff8439d8ba832d6bed88af2c2b3445977eba9a4588b787b32945871c2444e3"}}, - {name = "yarl-1.20.1-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",url = "https://files.pythonhosted.org/packages/2f/d4/062b2f48e7c93481e88eff97a6312dca15ea200e959f23e96d8ab898c5b8/yarl-1.20.1-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",hashes = {sha256 = "3cf34efa60eb81dd2645a2e13e00bb98b76c35ab5061a3989c7a70f78c85006d"}}, - {name = "yarl-1.20.1-cp313-cp313t-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl",url = "https://files.pythonhosted.org/packages/89/47/78b7f40d13c8f62b499cc702fdf69e090455518ae544c00a3bf4afc9fc77/yarl-1.20.1-cp313-cp313t-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl",hashes = {sha256 = "8e0fe9364ad0fddab2688ce72cb7a8e61ea42eff3c7caeeb83874a5d479c896c"}}, - {name = "yarl-1.20.1-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl",url = "https://files.pythonhosted.org/packages/eb/2b/490d3b2dc66f52987d4ee0d3090a147ea67732ce6b4d61e362c1846d0d32/yarl-1.20.1-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl",hashes = {sha256 = "8f64fbf81878ba914562c672024089e3401974a39767747691c65080a67b18c1"}}, - {name = "yarl-1.20.1-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl",url = "https://files.pythonhosted.org/packages/66/ad/775da9c8a94ce925d1537f939a4f17d782efef1f973039d821cbe4bcc211/yarl-1.20.1-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl",hashes = {sha256 = "f6342d643bf9a1de97e512e45e4b9560a043347e779a173250824f8b254bd5ce"}}, - {name = "yarl-1.20.1-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",url = "https://files.pythonhosted.org/packages/4b/23/0ed0922b47a4f5c6eb9065d5ff1e459747226ddce5c6a4c111e728c9f701/yarl-1.20.1-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",hashes = {sha256 = "56dac5f452ed25eef0f6e3c6a066c6ab68971d96a9fb441791cad0efba6140d3"}}, - {name = "yarl-1.20.1-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl",url = "https://files.pythonhosted.org/packages/3e/49/bc728a7fe7d0e9336e2b78f0958a2d6b288ba89f25a1762407a222bf53c3/yarl-1.20.1-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl",hashes = {sha256 = "c7d7f497126d65e2cad8dc5f97d34c27b19199b6414a40cb36b52f41b79014be"}}, - {name = "yarl-1.20.1-cp313-cp313t-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/93/8f/b811b9d1f617c83c907e7082a76e2b92b655400e61730cd61a1f67178393/yarl-1.20.1-cp313-cp313t-musllinux_1_2_aarch64.whl",hashes = {sha256 = "67e708dfb8e78d8a19169818eeb5c7a80717562de9051bf2413aca8e3696bf16"}}, - {name = "yarl-1.20.1-cp313-cp313t-musllinux_1_2_armv7l.whl",url = "https://files.pythonhosted.org/packages/70/fd/af94f04f275f95da2c3b8b5e1d49e3e79f1ed8b6ceb0f1664cbd902773ff/yarl-1.20.1-cp313-cp313t-musllinux_1_2_armv7l.whl",hashes = {sha256 = "595c07bc79af2494365cc96ddeb772f76272364ef7c80fb892ef9d0649586513"}}, - {name = "yarl-1.20.1-cp313-cp313t-musllinux_1_2_i686.whl",url = "https://files.pythonhosted.org/packages/84/65/04c62e82704e7dd0a9b3f61dbaa8447f8507655fd16c51da0637b39b2910/yarl-1.20.1-cp313-cp313t-musllinux_1_2_i686.whl",hashes = {sha256 = "7bdd2f80f4a7df852ab9ab49484a4dee8030023aa536df41f2d922fd57bf023f"}}, - {name = "yarl-1.20.1-cp313-cp313t-musllinux_1_2_ppc64le.whl",url = "https://files.pythonhosted.org/packages/91/95/459ca62eb958381b342d94ab9a4b6aec1ddec1f7057c487e926f03c06d30/yarl-1.20.1-cp313-cp313t-musllinux_1_2_ppc64le.whl",hashes = {sha256 = "c03bfebc4ae8d862f853a9757199677ab74ec25424d0ebd68a0027e9c639a390"}}, - {name = "yarl-1.20.1-cp313-cp313t-musllinux_1_2_s390x.whl",url = "https://files.pythonhosted.org/packages/a6/00/d393e82dd955ad20617abc546a8f1aee40534d599ff555ea053d0ec9bf03/yarl-1.20.1-cp313-cp313t-musllinux_1_2_s390x.whl",hashes = {sha256 = "344d1103e9c1523f32a5ed704d576172d2cabed3122ea90b1d4e11fe17c66458"}}, - {name = "yarl-1.20.1-cp313-cp313t-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/9e/ed/c5fb04869b99b717985e244fd93029c7a8e8febdfcffa06093e32d7d44e7/yarl-1.20.1-cp313-cp313t-musllinux_1_2_x86_64.whl",hashes = {sha256 = "88cab98aa4e13e1ade8c141daeedd300a4603b7132819c484841bb7af3edce9e"}}, - {name = "yarl-1.20.1-cp313-cp313t-win32.whl",url = "https://files.pythonhosted.org/packages/24/fd/725b8e73ac2a50e78a4534ac43c6addf5c1c2d65380dd48a9169cc6739a9/yarl-1.20.1-cp313-cp313t-win32.whl",hashes = {sha256 = "b121ff6a7cbd4abc28985b6028235491941b9fe8fe226e6fdc539c977ea1739d"}}, - {name = "yarl-1.20.1-cp313-cp313t-win_amd64.whl",url = "https://files.pythonhosted.org/packages/94/c3/b2e9f38bc3e11191981d57ea08cab2166e74ea770024a646617c9cddd9f6/yarl-1.20.1-cp313-cp313t-win_amd64.whl",hashes = {sha256 = "541d050a355bbbc27e55d906bc91cb6fe42f96c01413dd0f4ed5a5240513874f"}}, - {name = "yarl-1.20.1-cp312-cp312-macosx_10_13_universal2.whl",url = "https://files.pythonhosted.org/packages/5f/9a/cb7fad7d73c69f296eda6815e4a2c7ed53fc70c2f136479a91c8e5fbdb6d/yarl-1.20.1-cp312-cp312-macosx_10_13_universal2.whl",hashes = {sha256 = "bdcc4cd244e58593a4379fe60fdee5ac0331f8eb70320a24d591a3be197b94a9"}}, - {name = "yarl-1.20.1-cp312-cp312-macosx_10_13_x86_64.whl",url = "https://files.pythonhosted.org/packages/67/38/688577a1cb1e656e3971fb66a3492501c5a5df56d99722e57c98249e5b8a/yarl-1.20.1-cp312-cp312-macosx_10_13_x86_64.whl",hashes = {sha256 = "b29a2c385a5f5b9c7d9347e5812b6f7ab267193c62d282a540b4fc528c8a9d2a"}}, - {name = "yarl-1.20.1-cp312-cp312-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/50/ec/72991ae51febeb11a42813fc259f0d4c8e0507f2b74b5514618d8b640365/yarl-1.20.1-cp312-cp312-macosx_11_0_arm64.whl",hashes = {sha256 = "1112ae8154186dfe2de4732197f59c05a83dc814849a5ced892b708033f40dc2"}}, - {name = "yarl-1.20.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",url = "https://files.pythonhosted.org/packages/99/da/4d798025490e89426e9f976702e5f9482005c548c579bdae792a4c37769e/yarl-1.20.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",hashes = {sha256 = "90bbd29c4fe234233f7fa2b9b121fb63c321830e5d05b45153a2ca68f7d310ee"}}, - {name = "yarl-1.20.1-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl",url = "https://files.pythonhosted.org/packages/1a/26/54a15c6a567aac1c61b18aa0f4b8aa2e285a52d547d1be8bf48abe2b3991/yarl-1.20.1-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl",hashes = {sha256 = "680e19c7ce3710ac4cd964e90dad99bf9b5029372ba0c7cbfcd55e54d90ea819"}}, - {name = "yarl-1.20.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl",url = "https://files.pythonhosted.org/packages/d6/95/9dcf2386cb875b234353b93ec43e40219e14900e046bf6ac118f94b1e353/yarl-1.20.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl",hashes = {sha256 = "4a979218c1fdb4246a05efc2cc23859d47c89af463a90b99b7c56094daf25a16"}}, - {name = "yarl-1.20.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl",url = "https://files.pythonhosted.org/packages/91/b2/33a8750f6a4bc224242a635f5f2cff6d6ad5ba651f6edcccf721992c21a0/yarl-1.20.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl",hashes = {sha256 = "255b468adf57b4a7b65d8aad5b5138dce6a0752c139965711bdcb81bc370e1b6"}}, - {name = "yarl-1.20.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",url = "https://files.pythonhosted.org/packages/98/28/3ab7acc5b51f4434b181b0cee8f1f4b77a65919700a355fb3617f9488874/yarl-1.20.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",hashes = {sha256 = "a97d67108e79cfe22e2b430d80d7571ae57d19f17cda8bb967057ca8a7bf5bfd"}}, - {name = "yarl-1.20.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl",url = "https://files.pythonhosted.org/packages/36/a3/f666894aa947a371724ec7cd2e5daa78ee8a777b21509b4252dd7bd15e29/yarl-1.20.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl",hashes = {sha256 = "8570d998db4ddbfb9a590b185a0a33dbf8aafb831d07a5257b4ec9948df9cb0a"}}, - {name = "yarl-1.20.1-cp312-cp312-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/f1/81/5f466427e09773c04219d3450d7a1256138a010b6c9f0af2d48565e9ad13/yarl-1.20.1-cp312-cp312-musllinux_1_2_aarch64.whl",hashes = {sha256 = "97c75596019baae7c71ccf1d8cc4738bc08134060d0adfcbe5642f778d1dca38"}}, - {name = "yarl-1.20.1-cp312-cp312-musllinux_1_2_armv7l.whl",url = "https://files.pythonhosted.org/packages/2e/e3/e4b0ad8403e97e6c9972dd587388940a032f030ebec196ab81a3b8e94d31/yarl-1.20.1-cp312-cp312-musllinux_1_2_armv7l.whl",hashes = {sha256 = "1c48912653e63aef91ff988c5432832692ac5a1d8f0fb8a33091520b5bbe19ef"}}, - {name = "yarl-1.20.1-cp312-cp312-musllinux_1_2_i686.whl",url = "https://files.pythonhosted.org/packages/ac/99/b8a142e79eb86c926f9f06452eb13ecb1bb5713bd01dc0038faf5452e544/yarl-1.20.1-cp312-cp312-musllinux_1_2_i686.whl",hashes = {sha256 = "4c3ae28f3ae1563c50f3d37f064ddb1511ecc1d5584e88c6b7c63cf7702a6d5f"}}, - {name = "yarl-1.20.1-cp312-cp312-musllinux_1_2_ppc64le.whl",url = "https://files.pythonhosted.org/packages/34/f2/08ed34a4a506d82a1a3e5bab99ccd930a040f9b6449e9fd050320e45845c/yarl-1.20.1-cp312-cp312-musllinux_1_2_ppc64le.whl",hashes = {sha256 = "c5e9642f27036283550f5f57dc6156c51084b458570b9d0d96100c8bebb186a8"}}, - {name = "yarl-1.20.1-cp312-cp312-musllinux_1_2_s390x.whl",url = "https://files.pythonhosted.org/packages/92/f8/9a3fbf0968eac704f681726eff595dce9b49c8a25cd92bf83df209668285/yarl-1.20.1-cp312-cp312-musllinux_1_2_s390x.whl",hashes = {sha256 = "2c26b0c49220d5799f7b22c6838409ee9bc58ee5c95361a4d7831f03cc225b5a"}}, - {name = "yarl-1.20.1-cp312-cp312-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/af/85/9363f77bdfa1e4d690957cd39d192c4cacd1c58965df0470a4905253b54f/yarl-1.20.1-cp312-cp312-musllinux_1_2_x86_64.whl",hashes = {sha256 = "564ab3d517e3d01c408c67f2e5247aad4019dcf1969982aba3974b4093279004"}}, - {name = "yarl-1.20.1-cp312-cp312-win32.whl",url = "https://files.pythonhosted.org/packages/35/99/9918c8739ba271dcd935400cff8b32e3cd319eaf02fcd023d5dcd487a7c8/yarl-1.20.1-cp312-cp312-win32.whl",hashes = {sha256 = "daea0d313868da1cf2fac6b2d3a25c6e3a9e879483244be38c8e6a41f1d876a5"}}, - {name = "yarl-1.20.1-cp312-cp312-win_amd64.whl",url = "https://files.pythonhosted.org/packages/eb/83/5d9092950565481b413b31a23e75dd3418ff0a277d6e0abf3729d4d1ce25/yarl-1.20.1-cp312-cp312-win_amd64.whl",hashes = {sha256 = "48ea7d7f9be0487339828a4de0360d7ce0efc06524a48e1810f945c45b813698"}}, - {name = "yarl-1.20.1-py3-none-any.whl",url = "https://files.pythonhosted.org/packages/b4/2d/2345fce04cfd4bee161bf1e7d9cdc702e3e16109021035dbb24db654a622/yarl-1.20.1-py3-none-any.whl",hashes = {sha256 = "83b8eb083fe4683c6115795d9fc1cfaf2cbbefb19b3a1cb68f6527460f483a77"}}, +sdist = {name = "yarl-1.22.0.tar.gz", url = "https://files.pythonhosted.org/packages/57/63/0c6ebca57330cd313f6102b16dd57ffaf3ec4c83403dcb45dbd15c6f3ea1/yarl-1.22.0.tar.gz", hashes = {sha256 = "bebf8557577d4401ba8bd9ff33906f1376c877aa78d1fe216ad01b4d6745af71"}} +wheels = [ + {name = "yarl-1.22.0-cp314-cp314-macosx_10_13_universal2.whl",url = "https://files.pythonhosted.org/packages/46/b3/e20ef504049f1a1c54a814b4b9bed96d1ac0e0610c3b4da178f87209db05/yarl-1.22.0-cp314-cp314-macosx_10_13_universal2.whl",hashes = {sha256 = "34b36c2c57124530884d89d50ed2c1478697ad7473efd59cfd479945c95650e4"}}, + {name = "yarl-1.22.0-cp314-cp314-macosx_10_13_x86_64.whl",url = "https://files.pythonhosted.org/packages/e4/04/3532d990fdbab02e5ede063676b5c4260e7f3abea2151099c2aa745acc4c/yarl-1.22.0-cp314-cp314-macosx_10_13_x86_64.whl",hashes = {sha256 = "0dd9a702591ca2e543631c2a017e4a547e38a5c0f29eece37d9097e04a7ac683"}}, + {name = "yarl-1.22.0-cp314-cp314-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/11/63/ff458113c5c2dac9a9719ac68ee7c947cb621432bcf28c9972b1c0e83938/yarl-1.22.0-cp314-cp314-macosx_11_0_arm64.whl",hashes = {sha256 = "594fcab1032e2d2cc3321bb2e51271e7cd2b516c7d9aee780ece81b07ff8244b"}}, + {name = "yarl-1.22.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl",url = "https://files.pythonhosted.org/packages/a7/bc/315a56aca762d44a6aaaf7ad253f04d996cb6b27bad34410f82d76ea8038/yarl-1.22.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl",hashes = {sha256 = "f3d7a87a78d46a2e3d5b72587ac14b4c16952dd0887dbb051451eceac774411e"}}, + {name = "yarl-1.22.0-cp314-cp314-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl",url = "https://files.pythonhosted.org/packages/3f/3f/08e9b826ec2e099ea6e7c69a61272f4f6da62cb5b1b63590bb80ca2e4a40/yarl-1.22.0-cp314-cp314-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl",hashes = {sha256 = "852863707010316c973162e703bddabec35e8757e67fcb8ad58829de1ebc8590"}}, + {name = "yarl-1.22.0-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl",url = "https://files.pythonhosted.org/packages/e3/9f/90360108e3b32bd76789088e99538febfea24a102380ae73827f62073543/yarl-1.22.0-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl",hashes = {sha256 = "131a085a53bfe839a477c0845acf21efc77457ba2bcf5899618136d64f3303a2"}}, + {name = "yarl-1.22.0-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl",url = "https://files.pythonhosted.org/packages/98/92/ab8d4657bd5b46a38094cfaea498f18bb70ce6b63508fd7e909bd1f93066/yarl-1.22.0-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl",hashes = {sha256 = "078a8aefd263f4d4f923a9677b942b445a2be970ca24548a8102689a3a8ab8da"}}, + {name = "yarl-1.22.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl",url = "https://files.pythonhosted.org/packages/f5/e7/d8c5a7752fef68205296201f8ec2bf718f5c805a7a7e9880576c67600658/yarl-1.22.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl",hashes = {sha256 = "bca03b91c323036913993ff5c738d0842fc9c60c4648e5c8d98331526df89784"}}, + {name = "yarl-1.22.0-cp314-cp314-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/b6/2e/f4d26183c8db0bb82d491b072f3127fb8c381a6206a3a56332714b79b751/yarl-1.22.0-cp314-cp314-musllinux_1_2_aarch64.whl",hashes = {sha256 = "68986a61557d37bb90d3051a45b91fa3d5c516d177dfc6dd6f2f436a07ff2b6b"}}, + {name = "yarl-1.22.0-cp314-cp314-musllinux_1_2_armv7l.whl",url = "https://files.pythonhosted.org/packages/80/7c/428e5812e6b87cd00ee8e898328a62c95825bf37c7fa87f0b6bb2ad31304/yarl-1.22.0-cp314-cp314-musllinux_1_2_armv7l.whl",hashes = {sha256 = "4792b262d585ff0dff6bcb787f8492e40698443ec982a3568c2096433660c694"}}, + {name = "yarl-1.22.0-cp314-cp314-musllinux_1_2_ppc64le.whl",url = "https://files.pythonhosted.org/packages/ec/2a/249405fd26776f8b13c067378ef4d7dd49c9098d1b6457cdd152a99e96a9/yarl-1.22.0-cp314-cp314-musllinux_1_2_ppc64le.whl",hashes = {sha256 = "ebd4549b108d732dba1d4ace67614b9545b21ece30937a63a65dd34efa19732d"}}, + {name = "yarl-1.22.0-cp314-cp314-musllinux_1_2_s390x.whl",url = "https://files.pythonhosted.org/packages/67/a8/fb6b1adbe98cf1e2dd9fad71003d3a63a1bc22459c6e15f5714eb9323b93/yarl-1.22.0-cp314-cp314-musllinux_1_2_s390x.whl",hashes = {sha256 = "f87ac53513d22240c7d59203f25cc3beac1e574c6cd681bbfd321987b69f95fd"}}, + {name = "yarl-1.22.0-cp314-cp314-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/d9/f9/3aa2c0e480fb73e872ae2814c43bc1e734740bb0d54e8cb2a95925f98131/yarl-1.22.0-cp314-cp314-musllinux_1_2_x86_64.whl",hashes = {sha256 = "22b029f2881599e2f1b06f8f1db2ee63bd309e2293ba2d566e008ba12778b8da"}}, + {name = "yarl-1.22.0-cp314-cp314-win32.whl",url = "https://files.pythonhosted.org/packages/50/3c/af9dba3b8b5eeb302f36f16f92791f3ea62e3f47763406abf6d5a4a3333b/yarl-1.22.0-cp314-cp314-win32.whl",hashes = {sha256 = "6a635ea45ba4ea8238463b4f7d0e721bad669f80878b7bfd1f89266e2ae63da2"}}, + {name = "yarl-1.22.0-cp314-cp314-win_amd64.whl",url = "https://files.pythonhosted.org/packages/ac/30/ac3a0c5bdc1d6efd1b41fa24d4897a4329b3b1e98de9449679dd327af4f0/yarl-1.22.0-cp314-cp314-win_amd64.whl",hashes = {sha256 = "0d6e6885777af0f110b0e5d7e5dda8b704efed3894da26220b7f3d887b839a79"}}, + {name = "yarl-1.22.0-cp314-cp314-win_arm64.whl",url = "https://files.pythonhosted.org/packages/df/0a/227ab4ff5b998a1b7410abc7b46c9b7a26b0ca9e86c34ba4b8d8bc7c63d5/yarl-1.22.0-cp314-cp314-win_arm64.whl",hashes = {sha256 = "8218f4e98d3c10d683584cb40f0424f4b9fd6e95610232dd75e13743b070ee33"}}, + {name = "yarl-1.22.0-cp314-cp314t-macosx_10_13_universal2.whl",url = "https://files.pythonhosted.org/packages/06/5e/a15eb13db90abd87dfbefb9760c0f3f257ac42a5cac7e75dbc23bed97a9f/yarl-1.22.0-cp314-cp314t-macosx_10_13_universal2.whl",hashes = {sha256 = "45c2842ff0e0d1b35a6bf1cd6c690939dacb617a70827f715232b2e0494d55d1"}}, + {name = "yarl-1.22.0-cp314-cp314t-macosx_10_13_x86_64.whl",url = "https://files.pythonhosted.org/packages/18/82/9665c61910d4d84f41a5bf6837597c89e665fa88aa4941080704645932a9/yarl-1.22.0-cp314-cp314t-macosx_10_13_x86_64.whl",hashes = {sha256 = "d947071e6ebcf2e2bee8fce76e10faca8f7a14808ca36a910263acaacef08eca"}}, + {name = "yarl-1.22.0-cp314-cp314t-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/5d/9a/2f65743589809af4d0a6d3aa749343c4b5f4c380cc24a8e94a3c6625a808/yarl-1.22.0-cp314-cp314t-macosx_11_0_arm64.whl",hashes = {sha256 = "334b8721303e61b00019474cc103bdac3d7b1f65e91f0bfedeec2d56dfe74b53"}}, + {name = "yarl-1.22.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl",url = "https://files.pythonhosted.org/packages/b0/ab/5b13d3e157505c43c3b43b5a776cbf7b24a02bc4cccc40314771197e3508/yarl-1.22.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl",hashes = {sha256 = "1e7ce67c34138a058fd092f67d07a72b8e31ff0c9236e751957465a24b28910c"}}, + {name = "yarl-1.22.0-cp314-cp314t-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl",url = "https://files.pythonhosted.org/packages/fb/76/242a5ef4677615cf95330cfc1b4610e78184400699bdda0acb897ef5e49a/yarl-1.22.0-cp314-cp314t-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl",hashes = {sha256 = "d77e1b2c6d04711478cb1c4ab90db07f1609ccf06a287d5607fcd90dc9863acf"}}, + {name = "yarl-1.22.0-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl",url = "https://files.pythonhosted.org/packages/8c/96/475509110d3f0153b43d06164cf4195c64d16999e0c7e2d8a099adcd6907/yarl-1.22.0-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl",hashes = {sha256 = "c4647674b6150d2cae088fc07de2738a84b8bcedebef29802cf0b0a82ab6face"}}, + {name = "yarl-1.22.0-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl",url = "https://files.pythonhosted.org/packages/c9/66/59db471aecfbd559a1fd48aedd954435558cd98c7d0da8b03cc6c140a32c/yarl-1.22.0-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl",hashes = {sha256 = "efb07073be061c8f79d03d04139a80ba33cbd390ca8f0297aae9cce6411e4c6b"}}, + {name = "yarl-1.22.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl",url = "https://files.pythonhosted.org/packages/03/1f/c5d94abc91557384719da10ff166b916107c1b45e4d0423a88457071dd88/yarl-1.22.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl",hashes = {sha256 = "e51ac5435758ba97ad69617e13233da53908beccc6cfcd6c34bbed8dcbede486"}}, + {name = "yarl-1.22.0-cp314-cp314t-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/5f/97/aa6a143d3afba17b6465733681c70cf175af89f76ec8d9286e08437a7454/yarl-1.22.0-cp314-cp314t-musllinux_1_2_aarch64.whl",hashes = {sha256 = "33e32a0dd0c8205efa8e83d04fc9f19313772b78522d1bdc7d9aed706bfd6138"}}, + {name = "yarl-1.22.0-cp314-cp314t-musllinux_1_2_armv7l.whl",url = "https://files.pythonhosted.org/packages/43/3c/45a2b6d80195959239a7b2a8810506d4eea5487dce61c2a3393e7fc3c52e/yarl-1.22.0-cp314-cp314t-musllinux_1_2_armv7l.whl",hashes = {sha256 = "bf4a21e58b9cde0e401e683ebd00f6ed30a06d14e93f7c8fd059f8b6e8f87b6a"}}, + {name = "yarl-1.22.0-cp314-cp314t-musllinux_1_2_ppc64le.whl",url = "https://files.pythonhosted.org/packages/86/a0/c2ab48d74599c7c84cb104ebd799c5813de252bea0f360ffc29d270c2caa/yarl-1.22.0-cp314-cp314t-musllinux_1_2_ppc64le.whl",hashes = {sha256 = "e4b582bab49ac33c8deb97e058cd67c2c50dac0dd134874106d9c774fd272529"}}, + {name = "yarl-1.22.0-cp314-cp314t-musllinux_1_2_s390x.whl",url = "https://files.pythonhosted.org/packages/32/75/f8919b2eafc929567d3d8411f72bdb1a2109c01caaab4ebfa5f8ffadc15b/yarl-1.22.0-cp314-cp314t-musllinux_1_2_s390x.whl",hashes = {sha256 = "0b5bcc1a9c4839e7e30b7b30dd47fe5e7e44fb7054ec29b5bb8d526aa1041093"}}, + {name = "yarl-1.22.0-cp314-cp314t-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/cf/72/6a85bba382f22cf78add705d8c3731748397d986e197e53ecc7835e76de7/yarl-1.22.0-cp314-cp314t-musllinux_1_2_x86_64.whl",hashes = {sha256 = "c0232bce2170103ec23c454e54a57008a9a72b5d1c3105dc2496750da8cfa47c"}}, + {name = "yarl-1.22.0-cp314-cp314t-win32.whl",url = "https://files.pythonhosted.org/packages/35/18/55e6011f7c044dc80b98893060773cefcfdbf60dfefb8cb2f58b9bacbd83/yarl-1.22.0-cp314-cp314t-win32.whl",hashes = {sha256 = "8009b3173bcd637be650922ac455946197d858b3630b6d8787aa9e5c4564533e"}}, + {name = "yarl-1.22.0-cp314-cp314t-win_amd64.whl",url = "https://files.pythonhosted.org/packages/f9/86/0f0dccb6e59a9e7f122c5afd43568b1d31b8ab7dda5f1b01fb5c7025c9a9/yarl-1.22.0-cp314-cp314t-win_amd64.whl",hashes = {sha256 = "9fb17ea16e972c63d25d4a97f016d235c78dd2344820eb35bc034bc32012ee27"}}, + {name = "yarl-1.22.0-cp314-cp314t-win_arm64.whl",url = "https://files.pythonhosted.org/packages/48/b7/503c98092fb3b344a179579f55814b613c1fbb1c23b3ec14a7b008a66a6e/yarl-1.22.0-cp314-cp314t-win_arm64.whl",hashes = {sha256 = "9f6d73c1436b934e3f01df1e1b21ff765cd1d28c77dfb9ace207f746d4610ee1"}}, + {name = "yarl-1.22.0-cp313-cp313-macosx_10_13_universal2.whl",url = "https://files.pythonhosted.org/packages/ea/f3/d67de7260456ee105dc1d162d43a019ecad6b91e2f51809d6cddaa56690e/yarl-1.22.0-cp313-cp313-macosx_10_13_universal2.whl",hashes = {sha256 = "8dee9c25c74997f6a750cd317b8ca63545169c098faee42c84aa5e506c819b53"}}, + {name = "yarl-1.22.0-cp313-cp313-macosx_10_13_x86_64.whl",url = "https://files.pythonhosted.org/packages/01/88/04d98af0b47e0ef42597b9b28863b9060bb515524da0a65d5f4db160b2d5/yarl-1.22.0-cp313-cp313-macosx_10_13_x86_64.whl",hashes = {sha256 = "01e73b85a5434f89fc4fe27dcda2aff08ddf35e4d47bbbea3bdcd25321af538a"}}, + {name = "yarl-1.22.0-cp313-cp313-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/18/91/3274b215fd8442a03975ce6bee5fe6aa57a8326b29b9d3d56234a1dca244/yarl-1.22.0-cp313-cp313-macosx_11_0_arm64.whl",hashes = {sha256 = "22965c2af250d20c873cdbee8ff958fb809940aeb2e74ba5f20aaf6b7ac8c70c"}}, + {name = "yarl-1.22.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl",url = "https://files.pythonhosted.org/packages/61/3a/caf4e25036db0f2da4ca22a353dfeb3c9d3c95d2761ebe9b14df8fc16eb0/yarl-1.22.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl",hashes = {sha256 = "b4f15793aa49793ec8d1c708ab7f9eded1aa72edc5174cae703651555ed1b601"}}, + {name = "yarl-1.22.0-cp313-cp313-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl",url = "https://files.pythonhosted.org/packages/6e/9e/51a77ac7516e8e7803b06e01f74e78649c24ee1021eca3d6a739cb6ea49c/yarl-1.22.0-cp313-cp313-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl",hashes = {sha256 = "e5542339dcf2747135c5c85f68680353d5cb9ffd741c0f2e8d832d054d41f35a"}}, + {name = "yarl-1.22.0-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl",url = "https://files.pythonhosted.org/packages/d4/f8/33b92454789dde8407f156c00303e9a891f1f51a0330b0fad7c909f87692/yarl-1.22.0-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl",hashes = {sha256 = "5c401e05ad47a75869c3ab3e35137f8468b846770587e70d71e11de797d113df"}}, + {name = "yarl-1.22.0-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl",url = "https://files.pythonhosted.org/packages/d9/9a/c5db84ea024f76838220280f732970aa4ee154015d7f5c1bfb60a267af6f/yarl-1.22.0-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl",hashes = {sha256 = "243dda95d901c733f5b59214d28b0120893d91777cb8aa043e6ef059d3cddfe2"}}, + {name = "yarl-1.22.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl",url = "https://files.pythonhosted.org/packages/11/c9/cd8538dc2e7727095e0c1d867bad1e40c98f37763e6d995c1939f5fdc7b1/yarl-1.22.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl",hashes = {sha256 = "bec03d0d388060058f5d291a813f21c011041938a441c593374da6077fe21b1b"}}, + {name = "yarl-1.22.0-cp313-cp313-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/a1/b9/ab437b261702ced75122ed78a876a6dec0a1b0f5e17a4ac7a9a2482d8abe/yarl-1.22.0-cp313-cp313-musllinux_1_2_aarch64.whl",hashes = {sha256 = "b0748275abb8c1e1e09301ee3cf90c8a99678a4e92e4373705f2a2570d581273"}}, + {name = "yarl-1.22.0-cp313-cp313-musllinux_1_2_armv7l.whl",url = "https://files.pythonhosted.org/packages/b2/9d/8e1ae6d1d008a9567877b08f0ce4077a29974c04c062dabdb923ed98e6fe/yarl-1.22.0-cp313-cp313-musllinux_1_2_armv7l.whl",hashes = {sha256 = "47fdb18187e2a4e18fda2c25c05d8251a9e4a521edaed757fef033e7d8498d9a"}}, + {name = "yarl-1.22.0-cp313-cp313-musllinux_1_2_ppc64le.whl",url = "https://files.pythonhosted.org/packages/ca/5a/09b7be3905962f145b73beb468cdd53db8aa171cf18c80400a54c5b82846/yarl-1.22.0-cp313-cp313-musllinux_1_2_ppc64le.whl",hashes = {sha256 = "c7044802eec4524fde550afc28edda0dd5784c4c45f0be151a2d3ba017daca7d"}}, + {name = "yarl-1.22.0-cp313-cp313-musllinux_1_2_s390x.whl",url = "https://files.pythonhosted.org/packages/aa/7f/59ec509abf90eda5048b0bc3e2d7b5099dffdb3e6b127019895ab9d5ef44/yarl-1.22.0-cp313-cp313-musllinux_1_2_s390x.whl",hashes = {sha256 = "139718f35149ff544caba20fce6e8a2f71f1e39b92c700d8438a0b1d2a631a02"}}, + {name = "yarl-1.22.0-cp313-cp313-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/e5/84/891158426bc8036bfdfd862fabd0e0fa25df4176ec793e447f4b85cf1be4/yarl-1.22.0-cp313-cp313-musllinux_1_2_x86_64.whl",hashes = {sha256 = "e1b51bebd221006d3d2f95fbe124b22b247136647ae5dcc8c7acafba66e5ee67"}}, + {name = "yarl-1.22.0-cp313-cp313-win32.whl",url = "https://files.pythonhosted.org/packages/bb/49/03da1580665baa8bef5e8ed34c6df2c2aca0a2f28bf397ed238cc1bbc6f2/yarl-1.22.0-cp313-cp313-win32.whl",hashes = {sha256 = "d3e32536234a95f513bd374e93d717cf6b2231a791758de6c509e3653f234c95"}}, + {name = "yarl-1.22.0-cp313-cp313-win_amd64.whl",url = "https://files.pythonhosted.org/packages/9a/ee/450914ae11b419eadd067c6183ae08381cfdfcb9798b90b2b713bbebddda/yarl-1.22.0-cp313-cp313-win_amd64.whl",hashes = {sha256 = "47743b82b76d89a1d20b83e60d5c20314cbd5ba2befc9cda8f28300c4a08ed4d"}}, + {name = "yarl-1.22.0-cp313-cp313-win_arm64.whl",url = "https://files.pythonhosted.org/packages/98/4d/264a01eae03b6cf629ad69bae94e3b0e5344741e929073678e84bf7a3e3b/yarl-1.22.0-cp313-cp313-win_arm64.whl",hashes = {sha256 = "5d0fcda9608875f7d052eff120c7a5da474a6796fe4d83e152e0e4d42f6d1a9b"}}, + {name = "yarl-1.22.0-cp313-cp313t-macosx_10_13_universal2.whl",url = "https://files.pythonhosted.org/packages/88/fc/6908f062a2f77b5f9f6d69cecb1747260831ff206adcbc5b510aff88df91/yarl-1.22.0-cp313-cp313t-macosx_10_13_universal2.whl",hashes = {sha256 = "719ae08b6972befcba4310e49edb1161a88cdd331e3a694b84466bd938a6ab10"}}, + {name = "yarl-1.22.0-cp313-cp313t-macosx_10_13_x86_64.whl",url = "https://files.pythonhosted.org/packages/65/47/76594ae8eab26210b4867be6f49129861ad33da1f1ebdf7051e98492bf62/yarl-1.22.0-cp313-cp313t-macosx_10_13_x86_64.whl",hashes = {sha256 = "47d8a5c446df1c4db9d21b49619ffdba90e77c89ec6e283f453856c74b50b9e3"}}, + {name = "yarl-1.22.0-cp313-cp313t-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/ab/ce/05e9828a49271ba6b5b038b15b3934e996980dd78abdfeb52a04cfb9467e/yarl-1.22.0-cp313-cp313t-macosx_11_0_arm64.whl",hashes = {sha256 = "cfebc0ac8333520d2d0423cbbe43ae43c8838862ddb898f5ca68565e395516e9"}}, + {name = "yarl-1.22.0-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl",url = "https://files.pythonhosted.org/packages/d1/c5/7dffad5e4f2265b29c9d7ec869c369e4223166e4f9206fc2243ee9eea727/yarl-1.22.0-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl",hashes = {sha256 = "4398557cbf484207df000309235979c79c4356518fd5c99158c7d38203c4da4f"}}, + {name = "yarl-1.22.0-cp313-cp313t-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl",url = "https://files.pythonhosted.org/packages/50/b2/375b933c93a54bff7fc041e1a6ad2c0f6f733ffb0c6e642ce56ee3b39970/yarl-1.22.0-cp313-cp313t-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl",hashes = {sha256 = "2ca6fd72a8cd803be290d42f2dec5cdcd5299eeb93c2d929bf060ad9efaf5de0"}}, + {name = "yarl-1.22.0-cp313-cp313t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl",url = "https://files.pythonhosted.org/packages/66/50/bfc2a29a1d78644c5a7220ce2f304f38248dc94124a326794e677634b6cf/yarl-1.22.0-cp313-cp313t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl",hashes = {sha256 = "ca1f59c4e1ab6e72f0a23c13fca5430f889634166be85dbf1013683e49e3278e"}}, + {name = "yarl-1.22.0-cp313-cp313t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl",url = "https://files.pythonhosted.org/packages/46/96/f3941a46af7d5d0f0498f86d71275696800ddcdd20426298e572b19b91ff/yarl-1.22.0-cp313-cp313t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl",hashes = {sha256 = "6c5010a52015e7c70f86eb967db0f37f3c8bd503a695a49f8d45700144667708"}}, + {name = "yarl-1.22.0-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl",url = "https://files.pythonhosted.org/packages/c1/42/8b27c83bb875cd89448e42cd627e0fb971fa1675c9ec546393d18826cb50/yarl-1.22.0-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl",hashes = {sha256 = "9d7672ecf7557476642c88497c2f8d8542f8e36596e928e9bcba0e42e1e7d71f"}}, + {name = "yarl-1.22.0-cp313-cp313t-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/49/36/99ca3122201b382a3cf7cc937b95235b0ac944f7e9f2d5331d50821ed352/yarl-1.22.0-cp313-cp313t-musllinux_1_2_aarch64.whl",hashes = {sha256 = "3b7c88eeef021579d600e50363e0b6ee4f7f6f728cd3486b9d0f3ee7b946398d"}}, + {name = "yarl-1.22.0-cp313-cp313t-musllinux_1_2_armv7l.whl",url = "https://files.pythonhosted.org/packages/85/b4/47328bf996acd01a4c16ef9dcd2f59c969f495073616586f78cd5f2efb99/yarl-1.22.0-cp313-cp313t-musllinux_1_2_armv7l.whl",hashes = {sha256 = "f4afb5c34f2c6fecdcc182dfcfc6af6cccf1aa923eed4d6a12e9d96904e1a0d8"}}, + {name = "yarl-1.22.0-cp313-cp313t-musllinux_1_2_ppc64le.whl",url = "https://files.pythonhosted.org/packages/c2/ad/b77d7b3f14a4283bffb8e92c6026496f6de49751c2f97d4352242bba3990/yarl-1.22.0-cp313-cp313t-musllinux_1_2_ppc64le.whl",hashes = {sha256 = "59c189e3e99a59cf8d83cbb31d4db02d66cda5a1a4374e8a012b51255341abf5"}}, + {name = "yarl-1.22.0-cp313-cp313t-musllinux_1_2_s390x.whl",url = "https://files.pythonhosted.org/packages/81/c8/06e1d69295792ba54d556f06686cbd6a7ce39c22307100e3fb4a2c0b0a1d/yarl-1.22.0-cp313-cp313t-musllinux_1_2_s390x.whl",hashes = {sha256 = "5a3bf7f62a289fa90f1990422dc8dff5a458469ea71d1624585ec3a4c8d6960f"}}, + {name = "yarl-1.22.0-cp313-cp313t-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/4b/b8/4c0e9e9f597074b208d18cef227d83aac36184bfbc6eab204ea55783dbc5/yarl-1.22.0-cp313-cp313t-musllinux_1_2_x86_64.whl",hashes = {sha256 = "de6b9a04c606978fdfe72666fa216ffcf2d1a9f6a381058d4378f8d7b1e5de62"}}, + {name = "yarl-1.22.0-cp313-cp313t-win32.whl",url = "https://files.pythonhosted.org/packages/e0/e5/11f140a58bf4c6ad7aca69a892bff0ee638c31bea4206748fc0df4ebcb3a/yarl-1.22.0-cp313-cp313t-win32.whl",hashes = {sha256 = "1834bb90991cc2999f10f97f5f01317f99b143284766d197e43cd5b45eb18d03"}}, + {name = "yarl-1.22.0-cp313-cp313t-win_amd64.whl",url = "https://files.pythonhosted.org/packages/31/74/8b74bae38ed7fe6793d0c15a0c8207bbb819cf287788459e5ed230996cdd/yarl-1.22.0-cp313-cp313t-win_amd64.whl",hashes = {sha256 = "ff86011bd159a9d2dfc89c34cfd8aff12875980e3bd6a39ff097887520e60249"}}, + {name = "yarl-1.22.0-cp313-cp313t-win_arm64.whl",url = "https://files.pythonhosted.org/packages/69/66/991858aa4b5892d57aef7ee1ba6b4d01ec3b7eb3060795d34090a3ca3278/yarl-1.22.0-cp313-cp313t-win_arm64.whl",hashes = {sha256 = "7861058d0582b847bc4e3a4a4c46828a410bca738673f35a29ba3ca5db0b473b"}}, + {name = "yarl-1.22.0-cp312-cp312-macosx_10_13_universal2.whl",url = "https://files.pythonhosted.org/packages/75/ff/46736024fee3429b80a165a732e38e5d5a238721e634ab41b040d49f8738/yarl-1.22.0-cp312-cp312-macosx_10_13_universal2.whl",hashes = {sha256 = "e340382d1afa5d32b892b3ff062436d592ec3d692aeea3bef3a5cfe11bbf8c6f"}}, + {name = "yarl-1.22.0-cp312-cp312-macosx_10_13_x86_64.whl",url = "https://files.pythonhosted.org/packages/5a/9a/b312ed670df903145598914770eb12de1bac44599549b3360acc96878df8/yarl-1.22.0-cp312-cp312-macosx_10_13_x86_64.whl",hashes = {sha256 = "f1e09112a2c31ffe8d80be1b0988fa6a18c5d5cad92a9ffbb1c04c91bfe52ad2"}}, + {name = "yarl-1.22.0-cp312-cp312-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/ba/f5/0601483296f09c3c65e303d60c070a5c19fcdbc72daa061e96170785bc7d/yarl-1.22.0-cp312-cp312-macosx_11_0_arm64.whl",hashes = {sha256 = "939fe60db294c786f6b7c2d2e121576628468f65453d86b0fe36cb52f987bd74"}}, + {name = "yarl-1.22.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl",url = "https://files.pythonhosted.org/packages/60/41/9a1fe0b73dbcefce72e46cf149b0e0a67612d60bfc90fb59c2b2efdfbd86/yarl-1.22.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl",hashes = {sha256 = "e1651bf8e0398574646744c1885a41198eba53dc8a9312b954073f845c90a8df"}}, + {name = "yarl-1.22.0-cp312-cp312-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl",url = "https://files.pythonhosted.org/packages/17/7a/795cb6dfee561961c30b800f0ed616b923a2ec6258b5def2a00bf8231334/yarl-1.22.0-cp312-cp312-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl",hashes = {sha256 = "b8a0588521a26bf92a57a1705b77b8b59044cdceccac7151bd8d229e66b8dedb"}}, + {name = "yarl-1.22.0-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl",url = "https://files.pythonhosted.org/packages/d7/93/a58f4d596d2be2ae7bab1a5846c4d270b894958845753b2c606d666744d3/yarl-1.22.0-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl",hashes = {sha256 = "42188e6a615c1a75bcaa6e150c3fe8f3e8680471a6b10150c5f7e83f47cc34d2"}}, + {name = "yarl-1.22.0-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl",url = "https://files.pythonhosted.org/packages/61/92/682279d0e099d0e14d7fd2e176bd04f48de1484f56546a3e1313cd6c8e7c/yarl-1.22.0-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl",hashes = {sha256 = "f6d2cb59377d99718913ad9a151030d6f83ef420a2b8f521d94609ecc106ee82"}}, + {name = "yarl-1.22.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl",url = "https://files.pythonhosted.org/packages/db/0f/0d52c98b8a885aeda831224b78f3be7ec2e1aa4a62091f9f9188c3c65b56/yarl-1.22.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl",hashes = {sha256 = "50678a3b71c751d58d7908edc96d332af328839eea883bb554a43f539101277a"}}, + {name = "yarl-1.22.0-cp312-cp312-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/22/42/d2685e35908cbeaa6532c1fc73e89e7f2efb5d8a7df3959ea8e37177c5a3/yarl-1.22.0-cp312-cp312-musllinux_1_2_aarch64.whl",hashes = {sha256 = "1e8fbaa7cec507aa24ea27a01456e8dd4b6fab829059b69844bd348f2d467124"}}, + {name = "yarl-1.22.0-cp312-cp312-musllinux_1_2_armv7l.whl",url = "https://files.pythonhosted.org/packages/a2/83/cf8c7bcc6355631762f7d8bdab920ad09b82efa6b722999dfb05afa6cfac/yarl-1.22.0-cp312-cp312-musllinux_1_2_armv7l.whl",hashes = {sha256 = "433885ab5431bc3d3d4f2f9bd15bfa1614c522b0f1405d62c4f926ccd69d04fa"}}, + {name = "yarl-1.22.0-cp312-cp312-musllinux_1_2_ppc64le.whl",url = "https://files.pythonhosted.org/packages/25/e1/5302ff9b28f0c59cac913b91fe3f16c59a033887e57ce9ca5d41a3a94737/yarl-1.22.0-cp312-cp312-musllinux_1_2_ppc64le.whl",hashes = {sha256 = "b790b39c7e9a4192dc2e201a282109ed2985a1ddbd5ac08dc56d0e121400a8f7"}}, + {name = "yarl-1.22.0-cp312-cp312-musllinux_1_2_s390x.whl",url = "https://files.pythonhosted.org/packages/bf/cd/4617eb60f032f19ae3a688dc990d8f0d89ee0ea378b61cac81ede3e52fae/yarl-1.22.0-cp312-cp312-musllinux_1_2_s390x.whl",hashes = {sha256 = "31f0b53913220599446872d757257be5898019c85e7971599065bc55065dc99d"}}, + {name = "yarl-1.22.0-cp312-cp312-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/59/65/afc6e62bb506a319ea67b694551dab4a7e6fb7bf604e9bd9f3e11d575fec/yarl-1.22.0-cp312-cp312-musllinux_1_2_x86_64.whl",hashes = {sha256 = "a49370e8f711daec68d09b821a34e1167792ee2d24d405cbc2387be4f158b520"}}, + {name = "yarl-1.22.0-cp312-cp312-win32.whl",url = "https://files.pythonhosted.org/packages/e7/3d/68bf18d50dc674b942daec86a9ba922d3113d8399b0e52b9897530442da2/yarl-1.22.0-cp312-cp312-win32.whl",hashes = {sha256 = "70dfd4f241c04bd9239d53b17f11e6ab672b9f1420364af63e8531198e3f5fe8"}}, + {name = "yarl-1.22.0-cp312-cp312-win_amd64.whl",url = "https://files.pythonhosted.org/packages/c8/9a/6ad1a9b37c2f72874f93e691b2e7ecb6137fb2b899983125db4204e47575/yarl-1.22.0-cp312-cp312-win_amd64.whl",hashes = {sha256 = "8884d8b332a5e9b88e23f60bb166890009429391864c685e17bd73a9eda9105c"}}, + {name = "yarl-1.22.0-cp312-cp312-win_arm64.whl",url = "https://files.pythonhosted.org/packages/44/c5/c21b562d1680a77634d748e30c653c3ca918beb35555cff24986fff54598/yarl-1.22.0-cp312-cp312-win_arm64.whl",hashes = {sha256 = "ea70f61a47f3cc93bdf8b2f368ed359ef02a01ca6393916bc8ff877427181e74"}}, + {name = "yarl-1.22.0-py3-none-any.whl",url = "https://files.pythonhosted.org/packages/73/ae/b48f95715333080afb75a4504487cbe142cae1268afc482d06692d605ae6/yarl-1.22.0-py3-none-any.whl",hashes = {sha256 = "1380560bdba02b6b6c90de54133c81c9f2a453dee9912fe58c1dcced1edb7cff"}}, + {name = "yarl-1.22.0-cp311-cp311-macosx_10_9_universal2.whl",url = "https://files.pythonhosted.org/packages/4d/27/5ab13fc84c76a0250afd3d26d5936349a35be56ce5785447d6c423b26d92/yarl-1.22.0-cp311-cp311-macosx_10_9_universal2.whl",hashes = {sha256 = "1ab72135b1f2db3fed3997d7e7dc1b80573c67138023852b6efb336a5eae6511"}}, + {name = "yarl-1.22.0-cp311-cp311-macosx_10_9_x86_64.whl",url = "https://files.pythonhosted.org/packages/6a/a1/d065d51d02dc02ce81501d476b9ed2229d9a990818332242a882d5d60340/yarl-1.22.0-cp311-cp311-macosx_10_9_x86_64.whl",hashes = {sha256 = "669930400e375570189492dc8d8341301578e8493aec04aebc20d4717f899dd6"}}, + {name = "yarl-1.22.0-cp311-cp311-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/c1/da/8da9f6a53f67b5106ffe902c6fa0164e10398d4e150d85838b82f424072a/yarl-1.22.0-cp311-cp311-macosx_11_0_arm64.whl",hashes = {sha256 = "792a2af6d58177ef7c19cbf0097aba92ca1b9cb3ffdd9c7470e156c8f9b5e028"}}, + {name = "yarl-1.22.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl",url = "https://files.pythonhosted.org/packages/68/fe/2c1f674960c376e29cb0bec1249b117d11738db92a6ccc4a530b972648db/yarl-1.22.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl",hashes = {sha256 = "3ea66b1c11c9150f1372f69afb6b8116f2dd7286f38e14ea71a44eee9ec51b9d"}}, + {name = "yarl-1.22.0-cp311-cp311-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl",url = "https://files.pythonhosted.org/packages/95/26/812a540e1c3c6418fec60e9bbd38e871eaba9545e94fa5eff8f4a8e28e1e/yarl-1.22.0-cp311-cp311-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl",hashes = {sha256 = "3e2daa88dc91870215961e96a039ec73e4937da13cf77ce17f9cad0c18df3503"}}, + {name = "yarl-1.22.0-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl",url = "https://files.pythonhosted.org/packages/0b/f5/5777b19e26fdf98563985e481f8be3d8a39f8734147a6ebf459d0dab5a6b/yarl-1.22.0-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl",hashes = {sha256 = "ba440ae430c00eee41509353628600212112cd5018d5def7e9b05ea7ac34eb65"}}, + {name = "yarl-1.22.0-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl",url = "https://files.pythonhosted.org/packages/86/08/24bd2477bd59c0bbd994fe1d93b126e0472e4e3df5a96a277b0a55309e89/yarl-1.22.0-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl",hashes = {sha256 = "e6438cc8f23a9c1478633d216b16104a586b9761db62bfacb6425bac0a36679e"}}, + {name = "yarl-1.22.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl",url = "https://files.pythonhosted.org/packages/46/00/71b90ed48e895667ecfb1eaab27c1523ee2fa217433ed77a73b13205ca4b/yarl-1.22.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl",hashes = {sha256 = "4c52a6e78aef5cf47a98ef8e934755abf53953379b7d53e68b15ff4420e6683d"}}, + {name = "yarl-1.22.0-cp311-cp311-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/30/2d/f715501cae832651d3282387c6a9236cd26bd00d0ff1e404b3dc52447884/yarl-1.22.0-cp311-cp311-musllinux_1_2_aarch64.whl",hashes = {sha256 = "3b06bcadaac49c70f4c88af4ffcfbe3dc155aab3163e75777818092478bcbbe7"}}, + {name = "yarl-1.22.0-cp311-cp311-musllinux_1_2_armv7l.whl",url = "https://files.pythonhosted.org/packages/f8/f9/a678c992d78e394e7126ee0b0e4e71bd2775e4334d00a9278c06a6cce96a/yarl-1.22.0-cp311-cp311-musllinux_1_2_armv7l.whl",hashes = {sha256 = "6944b2dc72c4d7f7052683487e3677456050ff77fcf5e6204e98caf785ad1967"}}, + {name = "yarl-1.22.0-cp311-cp311-musllinux_1_2_ppc64le.whl",url = "https://files.pythonhosted.org/packages/2c/d1/b49454411a60edb6fefdcad4f8e6dbba7d8019e3a508a1c5836cba6d0781/yarl-1.22.0-cp311-cp311-musllinux_1_2_ppc64le.whl",hashes = {sha256 = "d5372ca1df0f91a86b047d1277c2aaf1edb32d78bbcefffc81b40ffd18f027ed"}}, + {name = "yarl-1.22.0-cp311-cp311-musllinux_1_2_s390x.whl",url = "https://files.pythonhosted.org/packages/87/e5/40d7a94debb8448c7771a916d1861d6609dddf7958dc381117e7ba36d9e8/yarl-1.22.0-cp311-cp311-musllinux_1_2_s390x.whl",hashes = {sha256 = "51af598701f5299012b8416486b40fceef8c26fc87dc6d7d1f6fc30609ea0aa6"}}, + {name = "yarl-1.22.0-cp311-cp311-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/35/d8/611cc282502381ad855448643e1ad0538957fc82ae83dfe7762c14069e14/yarl-1.22.0-cp311-cp311-musllinux_1_2_x86_64.whl",hashes = {sha256 = "b266bd01fedeffeeac01a79ae181719ff848a5a13ce10075adbefc8f1daee70e"}}, + {name = "yarl-1.22.0-cp311-cp311-win32.whl",url = "https://files.pythonhosted.org/packages/2d/df/fadd00fb1c90e1a5a8bd731fa3d3de2e165e5a3666a095b04e31b04d9cb6/yarl-1.22.0-cp311-cp311-win32.whl",hashes = {sha256 = "a9b1ba5610a4e20f655258d5a1fdc7ebe3d837bb0e45b581398b99eb98b1f5ca"}}, + {name = "yarl-1.22.0-cp311-cp311-win_amd64.whl",url = "https://files.pythonhosted.org/packages/b5/f7/149bb6f45f267cb5c074ac40c01c6b3ea6d8a620d34b337f6321928a1b4d/yarl-1.22.0-cp311-cp311-win_amd64.whl",hashes = {sha256 = "078278b9b0b11568937d9509b589ee83ef98ed6d561dfe2020e24a9fd08eaa2b"}}, + {name = "yarl-1.22.0-cp311-cp311-win_arm64.whl",url = "https://files.pythonhosted.org/packages/2b/13/88b78b93ad3f2f0b78e13bfaaa24d11cbc746e93fe76d8c06bf139615646/yarl-1.22.0-cp311-cp311-win_arm64.whl",hashes = {sha256 = "b6a6f620cfe13ccec221fa312139135166e47ae169f8253f72a0abc0dae94376"}}, + {name = "yarl-1.22.0-cp310-cp310-macosx_10_9_universal2.whl",url = "https://files.pythonhosted.org/packages/d1/43/a2204825342f37c337f5edb6637040fa14e365b2fcc2346960201d457579/yarl-1.22.0-cp310-cp310-macosx_10_9_universal2.whl",hashes = {sha256 = "c7bd6683587567e5a49ee6e336e0612bec8329be1b7d4c8af5687dcdeb67ee1e"}}, + {name = "yarl-1.22.0-cp310-cp310-macosx_10_9_x86_64.whl",url = "https://files.pythonhosted.org/packages/44/6f/674f3e6f02266428c56f704cd2501c22f78e8b2eeb23f153117cc86fb28a/yarl-1.22.0-cp310-cp310-macosx_10_9_x86_64.whl",hashes = {sha256 = "5cdac20da754f3a723cceea5b3448e1a2074866406adeb4ef35b469d089adb8f"}}, + {name = "yarl-1.22.0-cp310-cp310-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/b8/12/5b274d8a0f30c07b91b2f02cba69152600b47830fcfb465c108880fcee9c/yarl-1.22.0-cp310-cp310-macosx_11_0_arm64.whl",hashes = {sha256 = "07a524d84df0c10f41e3ee918846e1974aba4ec017f990dc735aad487a0bdfdf"}}, + {name = "yarl-1.22.0-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl",url = "https://files.pythonhosted.org/packages/e2/7f/df1b6949b1fa1aa9ff6de6e2631876ad4b73c4437822026e85d8acb56bb1/yarl-1.22.0-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl",hashes = {sha256 = "e1b329cb8146d7b736677a2440e422eadd775d1806a81db2d4cded80a48efc1a"}}, + {name = "yarl-1.22.0-cp310-cp310-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl",url = "https://files.pythonhosted.org/packages/84/09/f92ed93bd6cd77872ab6c3462df45ca45cd058d8f1d0c9b4f54c1704429f/yarl-1.22.0-cp310-cp310-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl",hashes = {sha256 = "75976c6945d85dbb9ee6308cd7ff7b1fb9409380c82d6119bd778d8fcfe2931c"}}, + {name = "yarl-1.22.0-cp310-cp310-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl",url = "https://files.pythonhosted.org/packages/c3/97/ac3f3feae7d522cf7ccec3d340bb0b2b61c56cb9767923df62a135092c6b/yarl-1.22.0-cp310-cp310-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl",hashes = {sha256 = "80ddf7a5f8c86cb3eb4bc9028b07bbbf1f08a96c5c0bc1244be5e8fefcb94147"}}, + {name = "yarl-1.22.0-cp310-cp310-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl",url = "https://files.pythonhosted.org/packages/06/49/f3219097403b9c84a4d079b1d7bda62dd9b86d0d6e4428c02d46ab2c77fc/yarl-1.22.0-cp310-cp310-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl",hashes = {sha256 = "d332fc2e3c94dad927f2112395772a4e4fedbcf8f80efc21ed7cdfae4d574fdb"}}, + {name = "yarl-1.22.0-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl",url = "https://files.pythonhosted.org/packages/35/9f/06b765d45c0e44e8ecf0fe15c9eacbbde342bb5b7561c46944f107bfb6c3/yarl-1.22.0-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl",hashes = {sha256 = "0cf71bf877efeac18b38d3930594c0948c82b64547c1cf420ba48722fe5509f6"}}, + {name = "yarl-1.22.0-cp310-cp310-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/c5/69/599e7cea8d0fcb1694323b0db0dda317fa3162f7b90166faddecf532166f/yarl-1.22.0-cp310-cp310-musllinux_1_2_aarch64.whl",hashes = {sha256 = "663e1cadaddae26be034a6ab6072449a8426ddb03d500f43daf952b74553bba0"}}, + {name = "yarl-1.22.0-cp310-cp310-musllinux_1_2_armv7l.whl",url = "https://files.pythonhosted.org/packages/95/6f/9dfd12c8bc90fea9eab39832ee32ea48f8e53d1256252a77b710c065c89f/yarl-1.22.0-cp310-cp310-musllinux_1_2_armv7l.whl",hashes = {sha256 = "6dcbb0829c671f305be48a7227918cfcd11276c2d637a8033a99a02b67bf9eda"}}, + {name = "yarl-1.22.0-cp310-cp310-musllinux_1_2_ppc64le.whl",url = "https://files.pythonhosted.org/packages/57/2e/34c5b4eb9b07e16e873db5b182c71e5f06f9b5af388cdaa97736d79dd9a6/yarl-1.22.0-cp310-cp310-musllinux_1_2_ppc64le.whl",hashes = {sha256 = "f0d97c18dfd9a9af4490631905a3f131a8e4c9e80a39353919e2cfed8f00aedc"}}, + {name = "yarl-1.22.0-cp310-cp310-musllinux_1_2_s390x.whl",url = "https://files.pythonhosted.org/packages/31/71/fa7e10fb772d273aa1f096ecb8ab8594117822f683bab7d2c5a89914c92a/yarl-1.22.0-cp310-cp310-musllinux_1_2_s390x.whl",hashes = {sha256 = "437840083abe022c978470b942ff832c3940b2ad3734d424b7eaffcd07f76737"}}, + {name = "yarl-1.22.0-cp310-cp310-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/26/da/11374c04e8e1184a6a03cf9c8f5688d3e5cec83ed6f31ad3481b3207f709/yarl-1.22.0-cp310-cp310-musllinux_1_2_x86_64.whl",hashes = {sha256 = "a899cbd98dce6f5d8de1aad31cb712ec0a530abc0a86bd6edaa47c1090138467"}}, + {name = "yarl-1.22.0-cp310-cp310-win32.whl",url = "https://files.pythonhosted.org/packages/82/8f/e2d01f161b0c034a30410e375e191a5d27608c1f8693bab1a08b089ca096/yarl-1.22.0-cp310-cp310-win32.whl",hashes = {sha256 = "595697f68bd1f0c1c159fcb97b661fc9c3f5db46498043555d04805430e79bea"}}, + {name = "yarl-1.22.0-cp310-cp310-win_amd64.whl",url = "https://files.pythonhosted.org/packages/62/46/94c76196642dbeae634c7a61ba3da88cd77bed875bf6e4a8bed037505aa6/yarl-1.22.0-cp310-cp310-win_amd64.whl",hashes = {sha256 = "cb95a9b1adaa48e41815a55ae740cfda005758104049a640a398120bf02515ca"}}, + {name = "yarl-1.22.0-cp310-cp310-win_arm64.whl",url = "https://files.pythonhosted.org/packages/af/af/7df4f179d3b1a6dcb9a4bd2ffbc67642746fcafdb62580e66876ce83fff4/yarl-1.22.0-cp310-cp310-win_arm64.whl",hashes = {sha256 = "b85b982afde6df99ecc996990d4ad7ccbdbb70e2a4ba4de0aecde5922ba98a0b"}}, ] marker = "\"default\" in dependency_groups or \"dev\" in extras" @@ -1706,59 +2265,116 @@ dependencies = [ [[packages]] name = "propcache" -version = "0.3.2" +version = "0.4.1" requires-python = ">=3.9" -sdist = {name = "propcache-0.3.2.tar.gz", url = "https://files.pythonhosted.org/packages/a6/16/43264e4a779dd8588c21a70f0709665ee8f611211bdd2c87d952cfa7c776/propcache-0.3.2.tar.gz", hashes = {sha256 = "20d7d62e4e7ef05f221e0db2856b979540686342e7dd9973b815599c7057e168"}} -wheels = [ - {name = "propcache-0.3.2-cp313-cp313-macosx_10_13_universal2.whl",url = "https://files.pythonhosted.org/packages/dc/d1/8c747fafa558c603c4ca19d8e20b288aa0c7cda74e9402f50f31eb65267e/propcache-0.3.2-cp313-cp313-macosx_10_13_universal2.whl",hashes = {sha256 = "ca592ed634a73ca002967458187109265e980422116c0a107cf93d81f95af945"}}, - {name = "propcache-0.3.2-cp313-cp313-macosx_10_13_x86_64.whl",url = "https://files.pythonhosted.org/packages/61/99/d606cb7986b60d89c36de8a85d58764323b3a5ff07770a99d8e993b3fa73/propcache-0.3.2-cp313-cp313-macosx_10_13_x86_64.whl",hashes = {sha256 = "9ecb0aad4020e275652ba3975740f241bd12a61f1a784df044cf7477a02bc252"}}, - {name = "propcache-0.3.2-cp313-cp313-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/8c/96/ef98f91bbb42b79e9bb82bdd348b255eb9d65f14dbbe3b1594644c4073f7/propcache-0.3.2-cp313-cp313-macosx_11_0_arm64.whl",hashes = {sha256 = "7f08f1cc28bd2eade7a8a3d2954ccc673bb02062e3e7da09bc75d843386b342f"}}, - {name = "propcache-0.3.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",url = "https://files.pythonhosted.org/packages/5b/ad/3f0f9a705fb630d175146cd7b1d2bf5555c9beaed54e94132b21aac098a6/propcache-0.3.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",hashes = {sha256 = "d1a342c834734edb4be5ecb1e9fb48cb64b1e2320fccbd8c54bf8da8f2a84c33"}}, - {name = "propcache-0.3.2-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl",url = "https://files.pythonhosted.org/packages/3a/38/2085cda93d2c8b6ec3e92af2c89489a36a5886b712a34ab25de9fbca7992/propcache-0.3.2-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl",hashes = {sha256 = "8a544caaae1ac73f1fecfae70ded3e93728831affebd017d53449e3ac052ac1e"}}, - {name = "propcache-0.3.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl",url = "https://files.pythonhosted.org/packages/61/c1/d72ea2dc83ac7f2c8e182786ab0fc2c7bd123a1ff9b7975bee671866fe5f/propcache-0.3.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl",hashes = {sha256 = "310d11aa44635298397db47a3ebce7db99a4cc4b9bbdfcf6c98a60c8d5261cf1"}}, - {name = "propcache-0.3.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",url = "https://files.pythonhosted.org/packages/af/81/b324c44ae60c56ef12007105f1460d5c304b0626ab0cc6b07c8f2a9aa0b8/propcache-0.3.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",hashes = {sha256 = "4c1396592321ac83157ac03a2023aa6cc4a3cc3cfdecb71090054c09e5a7cce3"}}, - {name = "propcache-0.3.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl",url = "https://files.pythonhosted.org/packages/09/73/88549128bb89e66d2aff242488f62869014ae092db63ccea53c1cc75a81d/propcache-0.3.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl",hashes = {sha256 = "8cabf5b5902272565e78197edb682017d21cf3b550ba0460ee473753f28d23c1"}}, - {name = "propcache-0.3.2-cp313-cp313-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/b9/3f/3bdd14e737d145114a5eb83cb172903afba7242f67c5877f9909a20d948d/propcache-0.3.2-cp313-cp313-musllinux_1_2_aarch64.whl",hashes = {sha256 = "0a2f2235ac46a7aa25bdeb03a9e7060f6ecbd213b1f9101c43b3090ffb971ef6"}}, - {name = "propcache-0.3.2-cp313-cp313-musllinux_1_2_armv7l.whl",url = "https://files.pythonhosted.org/packages/0f/ca/2f4aa819c357d3107c3763d7ef42c03980f9ed5c48c82e01e25945d437c1/propcache-0.3.2-cp313-cp313-musllinux_1_2_armv7l.whl",hashes = {sha256 = "92b69e12e34869a6970fd2f3da91669899994b47c98f5d430b781c26f1d9f387"}}, - {name = "propcache-0.3.2-cp313-cp313-musllinux_1_2_i686.whl",url = "https://files.pythonhosted.org/packages/cd/4a/e65276c7477533c59085251ae88505caf6831c0e85ff8b2e31ebcbb949b1/propcache-0.3.2-cp313-cp313-musllinux_1_2_i686.whl",hashes = {sha256 = "54e02207c79968ebbdffc169591009f4474dde3b4679e16634d34c9363ff56b4"}}, - {name = "propcache-0.3.2-cp313-cp313-musllinux_1_2_ppc64le.whl",url = "https://files.pythonhosted.org/packages/7c/54/fc7152e517cf5578278b242396ce4d4b36795423988ef39bb8cd5bf274c8/propcache-0.3.2-cp313-cp313-musllinux_1_2_ppc64le.whl",hashes = {sha256 = "4adfb44cb588001f68c5466579d3f1157ca07f7504fc91ec87862e2b8e556b88"}}, - {name = "propcache-0.3.2-cp313-cp313-musllinux_1_2_s390x.whl",url = "https://files.pythonhosted.org/packages/b9/80/abeb4a896d2767bf5f1ea7b92eb7be6a5330645bd7fb844049c0e4045d9d/propcache-0.3.2-cp313-cp313-musllinux_1_2_s390x.whl",hashes = {sha256 = "fd3e6019dc1261cd0291ee8919dd91fbab7b169bb76aeef6c716833a3f65d206"}}, - {name = "propcache-0.3.2-cp313-cp313-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/b3/db/ea12a49aa7b2b6d68a5da8293dcf50068d48d088100ac016ad92a6a780e6/propcache-0.3.2-cp313-cp313-musllinux_1_2_x86_64.whl",hashes = {sha256 = "4c181cad81158d71c41a2bce88edce078458e2dd5ffee7eddd6b05da85079f43"}}, - {name = "propcache-0.3.2-cp313-cp313-win32.whl",url = "https://files.pythonhosted.org/packages/d1/e5/9076a0bbbfb65d1198007059c65639dfd56266cf8e477a9707e4b1999ff4/propcache-0.3.2-cp313-cp313-win32.whl",hashes = {sha256 = "8a08154613f2249519e549de2330cf8e2071c2887309a7b07fb56098f5170a02"}}, - {name = "propcache-0.3.2-cp313-cp313-win_amd64.whl",url = "https://files.pythonhosted.org/packages/d3/f5/b369e026b09a26cd77aa88d8fffd69141d2ae00a2abaaf5380d2603f4b7f/propcache-0.3.2-cp313-cp313-win_amd64.whl",hashes = {sha256 = "e41671f1594fc4ab0a6dec1351864713cb3a279910ae8b58f884a88a0a632c05"}}, - {name = "propcache-0.3.2-cp313-cp313t-macosx_10_13_universal2.whl",url = "https://files.pythonhosted.org/packages/a4/3a/6ece377b55544941a08d03581c7bc400a3c8cd3c2865900a68d5de79e21f/propcache-0.3.2-cp313-cp313t-macosx_10_13_universal2.whl",hashes = {sha256 = "9a3cf035bbaf035f109987d9d55dc90e4b0e36e04bbbb95af3055ef17194057b"}}, - {name = "propcache-0.3.2-cp313-cp313t-macosx_10_13_x86_64.whl",url = "https://files.pythonhosted.org/packages/0c/da/64a2bb16418740fa634b0e9c3d29edff1db07f56d3546ca2d86ddf0305e1/propcache-0.3.2-cp313-cp313t-macosx_10_13_x86_64.whl",hashes = {sha256 = "156c03d07dc1323d8dacaa221fbe028c5c70d16709cdd63502778e6c3ccca1b0"}}, - {name = "propcache-0.3.2-cp313-cp313t-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/36/7b/f025e06ea51cb72c52fb87e9b395cced02786610b60a3ed51da8af017170/propcache-0.3.2-cp313-cp313t-macosx_11_0_arm64.whl",hashes = {sha256 = "74413c0ba02ba86f55cf60d18daab219f7e531620c15f1e23d95563f505efe7e"}}, - {name = "propcache-0.3.2-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",url = "https://files.pythonhosted.org/packages/a4/00/faa1b1b7c3b74fc277f8642f32a4c72ba1d7b2de36d7cdfb676db7f4303e/propcache-0.3.2-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",hashes = {sha256 = "f066b437bb3fa39c58ff97ab2ca351db465157d68ed0440abecb21715eb24b28"}}, - {name = "propcache-0.3.2-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl",url = "https://files.pythonhosted.org/packages/74/ab/935beb6f1756e0476a4d5938ff44bf0d13a055fed880caf93859b4f1baf4/propcache-0.3.2-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl",hashes = {sha256 = "f1304b085c83067914721e7e9d9917d41ad87696bf70f0bc7dee450e9c71ad0a"}}, - {name = "propcache-0.3.2-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl",url = "https://files.pythonhosted.org/packages/f8/9d/994a5c1ce4389610838d1caec74bdf0e98b306c70314d46dbe4fcf21a3e2/propcache-0.3.2-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl",hashes = {sha256 = "ab50cef01b372763a13333b4e54021bdcb291fc9a8e2ccb9c2df98be51bcde6c"}}, - {name = "propcache-0.3.2-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",url = "https://files.pythonhosted.org/packages/2b/00/a10afce3d1ed0287cef2e09506d3be9822513f2c1e96457ee369adb9a6cd/propcache-0.3.2-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",hashes = {sha256 = "fad3b2a085ec259ad2c2842666b2a0a49dea8463579c606426128925af1ed725"}}, - {name = "propcache-0.3.2-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl",url = "https://files.pythonhosted.org/packages/2e/a8/2aa6716ffa566ca57c749edb909ad27884680887d68517e4be41b02299f3/propcache-0.3.2-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl",hashes = {sha256 = "261fa020c1c14deafd54c76b014956e2f86991af198c51139faf41c4d5e83892"}}, - {name = "propcache-0.3.2-cp313-cp313t-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/36/4f/345ca9183b85ac29c8694b0941f7484bf419c7f0fea2d1e386b4f7893eed/propcache-0.3.2-cp313-cp313t-musllinux_1_2_aarch64.whl",hashes = {sha256 = "46d7f8aa79c927e5f987ee3a80205c987717d3659f035c85cf0c3680526bdb44"}}, - {name = "propcache-0.3.2-cp313-cp313t-musllinux_1_2_armv7l.whl",url = "https://files.pythonhosted.org/packages/3e/ca/fcd54f78b59e3f97b3b9715501e3147f5340167733d27db423aa321e7148/propcache-0.3.2-cp313-cp313t-musllinux_1_2_armv7l.whl",hashes = {sha256 = "6d8f3f0eebf73e3c0ff0e7853f68be638b4043c65a70517bb575eff54edd8dbe"}}, - {name = "propcache-0.3.2-cp313-cp313t-musllinux_1_2_i686.whl",url = "https://files.pythonhosted.org/packages/8b/95/8e6a6bbbd78ac89c30c225210a5c687790e532ba4088afb8c0445b77ef37/propcache-0.3.2-cp313-cp313t-musllinux_1_2_i686.whl",hashes = {sha256 = "03c89c1b14a5452cf15403e291c0ccd7751d5b9736ecb2c5bab977ad6c5bcd81"}}, - {name = "propcache-0.3.2-cp313-cp313t-musllinux_1_2_ppc64le.whl",url = "https://files.pythonhosted.org/packages/ee/b0/0dd03616142baba28e8b2d14ce5df6631b4673850a3d4f9c0f9dd714a404/propcache-0.3.2-cp313-cp313t-musllinux_1_2_ppc64le.whl",hashes = {sha256 = "0cc17efde71e12bbaad086d679ce575268d70bc123a5a71ea7ad76f70ba30bba"}}, - {name = "propcache-0.3.2-cp313-cp313t-musllinux_1_2_s390x.whl",url = "https://files.pythonhosted.org/packages/c5/98/2c12407a7e4fbacd94ddd32f3b1e3d5231e77c30ef7162b12a60e2dd5ce3/propcache-0.3.2-cp313-cp313t-musllinux_1_2_s390x.whl",hashes = {sha256 = "acdf05d00696bc0447e278bb53cb04ca72354e562cf88ea6f9107df8e7fd9770"}}, - {name = "propcache-0.3.2-cp313-cp313t-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/35/91/9cb56efbb428b006bb85db28591e40b7736847b8331d43fe335acf95f6c8/propcache-0.3.2-cp313-cp313t-musllinux_1_2_x86_64.whl",hashes = {sha256 = "4445542398bd0b5d32df908031cb1b30d43ac848e20470a878b770ec2dcc6330"}}, - {name = "propcache-0.3.2-cp313-cp313t-win32.whl",url = "https://files.pythonhosted.org/packages/9a/4c/b0fe775a2bdd01e176b14b574be679d84fc83958335790f7c9a686c1f468/propcache-0.3.2-cp313-cp313t-win32.whl",hashes = {sha256 = "f86e5d7cd03afb3a1db8e9f9f6eff15794e79e791350ac48a8c924e6f439f394"}}, - {name = "propcache-0.3.2-cp313-cp313t-win_amd64.whl",url = "https://files.pythonhosted.org/packages/a4/ff/47f08595e3d9b5e149c150f88d9714574f1a7cbd89fe2817158a952674bf/propcache-0.3.2-cp313-cp313t-win_amd64.whl",hashes = {sha256 = "9704bedf6e7cbe3c65eca4379a9b53ee6a83749f047808cbb5044d40d7d72198"}}, - {name = "propcache-0.3.2-cp312-cp312-macosx_10_13_universal2.whl",url = "https://files.pythonhosted.org/packages/a8/42/9ca01b0a6f48e81615dca4765a8f1dd2c057e0540f6116a27dc5ee01dfb6/propcache-0.3.2-cp312-cp312-macosx_10_13_universal2.whl",hashes = {sha256 = "8de106b6c84506b31c27168582cd3cb3000a6412c16df14a8628e5871ff83c10"}}, - {name = "propcache-0.3.2-cp312-cp312-macosx_10_13_x86_64.whl",url = "https://files.pythonhosted.org/packages/af/6e/21293133beb550f9c901bbece755d582bfaf2176bee4774000bd4dd41884/propcache-0.3.2-cp312-cp312-macosx_10_13_x86_64.whl",hashes = {sha256 = "28710b0d3975117239c76600ea351934ac7b5ff56e60953474342608dbbb6154"}}, - {name = "propcache-0.3.2-cp312-cp312-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/0c/c8/0393a0a3a2b8760eb3bde3c147f62b20044f0ddac81e9d6ed7318ec0d852/propcache-0.3.2-cp312-cp312-macosx_11_0_arm64.whl",hashes = {sha256 = "ce26862344bdf836650ed2487c3d724b00fbfec4233a1013f597b78c1cb73615"}}, - {name = "propcache-0.3.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",url = "https://files.pythonhosted.org/packages/37/2c/489afe311a690399d04a3e03b069225670c1d489eb7b044a566511c1c498/propcache-0.3.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",hashes = {sha256 = "bca54bd347a253af2cf4544bbec232ab982f4868de0dd684246b67a51bc6b1db"}}, - {name = "propcache-0.3.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl",url = "https://files.pythonhosted.org/packages/9d/ca/63b520d2f3d418c968bf596839ae26cf7f87bead026b6192d4da6a08c467/propcache-0.3.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl",hashes = {sha256 = "55780d5e9a2ddc59711d727226bb1ba83a22dd32f64ee15594b9392b1f544eb1"}}, - {name = "propcache-0.3.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl",url = "https://files.pythonhosted.org/packages/11/60/1d0ed6fff455a028d678df30cc28dcee7af77fa2b0e6962ce1df95c9a2a9/propcache-0.3.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl",hashes = {sha256 = "035e631be25d6975ed87ab23153db6a73426a48db688070d925aa27e996fe93c"}}, - {name = "propcache-0.3.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",url = "https://files.pythonhosted.org/packages/37/7c/54fd5301ef38505ab235d98827207176a5c9b2aa61939b10a460ca53e123/propcache-0.3.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",hashes = {sha256 = "ee6f22b6eaa39297c751d0e80c0d3a454f112f5c6481214fcf4c092074cecd67"}}, - {name = "propcache-0.3.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl",url = "https://files.pythonhosted.org/packages/ee/1a/89a40e0846f5de05fdc6779883bf46ba980e6df4d2ff8fb02643de126592/propcache-0.3.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl",hashes = {sha256 = "7ca3aee1aa955438c4dba34fc20a9f390e4c79967257d830f137bd5a8a32ed3b"}}, - {name = "propcache-0.3.2-cp312-cp312-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/5e/33/ca98368586c9566a6b8d5ef66e30484f8da84c0aac3f2d9aec6d31a11bd5/propcache-0.3.2-cp312-cp312-musllinux_1_2_aarch64.whl",hashes = {sha256 = "7a4f30862869fa2b68380d677cc1c5fcf1e0f2b9ea0cf665812895c75d0ca3b8"}}, - {name = "propcache-0.3.2-cp312-cp312-musllinux_1_2_armv7l.whl",url = "https://files.pythonhosted.org/packages/ba/11/ace870d0aafe443b33b2f0b7efdb872b7c3abd505bfb4890716ad7865e9d/propcache-0.3.2-cp312-cp312-musllinux_1_2_armv7l.whl",hashes = {sha256 = "b77ec3c257d7816d9f3700013639db7491a434644c906a2578a11daf13176251"}}, - {name = "propcache-0.3.2-cp312-cp312-musllinux_1_2_i686.whl",url = "https://files.pythonhosted.org/packages/5b/d2/86fd6f7adffcfc74b42c10a6b7db721d1d9ca1055c45d39a1a8f2a740a21/propcache-0.3.2-cp312-cp312-musllinux_1_2_i686.whl",hashes = {sha256 = "cab90ac9d3f14b2d5050928483d3d3b8fb6b4018893fc75710e6aa361ecb2474"}}, - {name = "propcache-0.3.2-cp312-cp312-musllinux_1_2_ppc64le.whl",url = "https://files.pythonhosted.org/packages/07/94/2d7d1e328f45ff34a0a284cf5a2847013701e24c2a53117e7c280a4316b3/propcache-0.3.2-cp312-cp312-musllinux_1_2_ppc64le.whl",hashes = {sha256 = "0b504d29f3c47cf6b9e936c1852246c83d450e8e063d50562115a6be6d3a2535"}}, - {name = "propcache-0.3.2-cp312-cp312-musllinux_1_2_s390x.whl",url = "https://files.pythonhosted.org/packages/b7/05/37ae63a0087677e90b1d14710e532ff104d44bc1efa3b3970fff99b891dc/propcache-0.3.2-cp312-cp312-musllinux_1_2_s390x.whl",hashes = {sha256 = "ce2ac2675a6aa41ddb2a0c9cbff53780a617ac3d43e620f8fd77ba1c84dcfc06"}}, - {name = "propcache-0.3.2-cp312-cp312-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/a4/7c/3f539fcae630408d0bd8bf3208b9a647ccad10976eda62402a80adf8fc34/propcache-0.3.2-cp312-cp312-musllinux_1_2_x86_64.whl",hashes = {sha256 = "62b4239611205294cc433845b914131b2a1f03500ff3c1ed093ed216b82621e1"}}, - {name = "propcache-0.3.2-cp312-cp312-win32.whl",url = "https://files.pythonhosted.org/packages/7c/d2/34b9eac8c35f79f8a962546b3e97e9d4b990c420ee66ac8255d5d9611648/propcache-0.3.2-cp312-cp312-win32.whl",hashes = {sha256 = "df4a81b9b53449ebc90cc4deefb052c1dd934ba85012aa912c7ea7b7e38b60c1"}}, - {name = "propcache-0.3.2-cp312-cp312-win_amd64.whl",url = "https://files.pythonhosted.org/packages/19/61/d582be5d226cf79071681d1b46b848d6cb03d7b70af7063e33a2787eaa03/propcache-0.3.2-cp312-cp312-win_amd64.whl",hashes = {sha256 = "7046e79b989d7fe457bb755844019e10f693752d169076138abf17f31380800c"}}, - {name = "propcache-0.3.2-py3-none-any.whl",url = "https://files.pythonhosted.org/packages/cc/35/cc0aaecf278bb4575b8555f2b137de5ab821595ddae9da9d3cd1da4072c7/propcache-0.3.2-py3-none-any.whl",hashes = {sha256 = "98f1ec44fb675f5052cccc8e609c46ed23a35a1cfd18545ad4e29002d858a43f"}}, +sdist = {name = "propcache-0.4.1.tar.gz", url = "https://files.pythonhosted.org/packages/9e/da/e9fc233cf63743258bff22b3dfa7ea5baef7b5bc324af47a0ad89b8ffc6f/propcache-0.4.1.tar.gz", hashes = {sha256 = "f48107a8c637e80362555f37ecf49abe20370e557cc4ab374f04ec4423c97c3d"}} +wheels = [ + {name = "propcache-0.4.1-cp314-cp314-macosx_10_13_universal2.whl",url = "https://files.pythonhosted.org/packages/8e/5c/bca52d654a896f831b8256683457ceddd490ec18d9ec50e97dfd8fc726a8/propcache-0.4.1-cp314-cp314-macosx_10_13_universal2.whl",hashes = {sha256 = "3f7124c9d820ba5548d431afb4632301acf965db49e666aa21c305cbe8c6de12"}}, + {name = "propcache-0.4.1-cp314-cp314-macosx_10_13_x86_64.whl",url = "https://files.pythonhosted.org/packages/65/9b/03b04e7d82a5f54fb16113d839f5ea1ede58a61e90edf515f6577c66fa8f/propcache-0.4.1-cp314-cp314-macosx_10_13_x86_64.whl",hashes = {sha256 = "c0d4b719b7da33599dfe3b22d3db1ef789210a0597bc650b7cee9c77c2be8c5c"}}, + {name = "propcache-0.4.1-cp314-cp314-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/b2/fa/89a8ef0468d5833a23fff277b143d0573897cf75bd56670a6d28126c7d68/propcache-0.4.1-cp314-cp314-macosx_11_0_arm64.whl",hashes = {sha256 = "9f302f4783709a78240ebc311b793f123328716a60911d667e0c036bc5dcbded"}}, + {name = "propcache-0.4.1-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl",url = "https://files.pythonhosted.org/packages/86/bd/47816020d337f4a746edc42fe8d53669965138f39ee117414c7d7a340cfe/propcache-0.4.1-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl",hashes = {sha256 = "c80ee5802e3fb9ea37938e7eecc307fb984837091d5fd262bb37238b1ae97641"}}, + {name = "propcache-0.4.1-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl",url = "https://files.pythonhosted.org/packages/df/f6/c5fa1357cc9748510ee55f37173eb31bfde6d94e98ccd9e6f033f2fc06e1/propcache-0.4.1-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl",hashes = {sha256 = "ed5a841e8bb29a55fb8159ed526b26adc5bdd7e8bd7bf793ce647cb08656cdf4"}}, + {name = "propcache-0.4.1-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl",url = "https://files.pythonhosted.org/packages/80/1e/e5889652a7c4a3846683401a48f0f2e5083ce0ec1a8a5221d8058fbd1adf/propcache-0.4.1-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl",hashes = {sha256 = "55c72fd6ea2da4c318e74ffdf93c4fe4e926051133657459131a95c846d16d44"}}, + {name = "propcache-0.4.1-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl",url = "https://files.pythonhosted.org/packages/b2/f2/889ad4b2408f72fe1a4f6a19491177b30ea7bf1a0fd5f17050ca08cfc882/propcache-0.4.1-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl",hashes = {sha256 = "8326e144341460402713f91df60ade3c999d601e7eb5ff8f6f7862d54de0610d"}}, + {name = "propcache-0.4.1-cp314-cp314-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/27/73/033d63069b57b0812c8bd19f311faebeceb6ba31b8f32b73432d12a0b826/propcache-0.4.1-cp314-cp314-musllinux_1_2_aarch64.whl",hashes = {sha256 = "060b16ae65bc098da7f6d25bf359f1f31f688384858204fe5d652979e0015e5b"}}, + {name = "propcache-0.4.1-cp314-cp314-musllinux_1_2_armv7l.whl",url = "https://files.pythonhosted.org/packages/dc/89/ce24f3dc182630b4e07aa6d15f0ff4b14ed4b9955fae95a0b54c58d66c05/propcache-0.4.1-cp314-cp314-musllinux_1_2_armv7l.whl",hashes = {sha256 = "89eb3fa9524f7bec9de6e83cf3faed9d79bffa560672c118a96a171a6f55831e"}}, + {name = "propcache-0.4.1-cp314-cp314-musllinux_1_2_ppc64le.whl",url = "https://files.pythonhosted.org/packages/a9/24/ef0d5fd1a811fb5c609278d0209c9f10c35f20581fcc16f818da959fc5b4/propcache-0.4.1-cp314-cp314-musllinux_1_2_ppc64le.whl",hashes = {sha256 = "dee69d7015dc235f526fe80a9c90d65eb0039103fe565776250881731f06349f"}}, + {name = "propcache-0.4.1-cp314-cp314-musllinux_1_2_s390x.whl",url = "https://files.pythonhosted.org/packages/f5/02/98ec20ff5546f68d673df2f7a69e8c0d076b5abd05ca882dc7ee3a83653d/propcache-0.4.1-cp314-cp314-musllinux_1_2_s390x.whl",hashes = {sha256 = "5558992a00dfd54ccbc64a32726a3357ec93825a418a401f5cc67df0ac5d9e49"}}, + {name = "propcache-0.4.1-cp314-cp314-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/a0/87/492694f76759b15f0467a2a93ab68d32859672b646aa8a04ce4864e7932d/propcache-0.4.1-cp314-cp314-musllinux_1_2_x86_64.whl",hashes = {sha256 = "c9b822a577f560fbd9554812526831712c1436d2c046cedee4c3796d3543b144"}}, + {name = "propcache-0.4.1-cp314-cp314-win32.whl",url = "https://files.pythonhosted.org/packages/ee/36/66367de3575db1d2d3f3d177432bd14ee577a39d3f5d1b3d5df8afe3b6e2/propcache-0.4.1-cp314-cp314-win32.whl",hashes = {sha256 = "ab4c29b49d560fe48b696cdcb127dd36e0bc2472548f3bf56cc5cb3da2b2984f"}}, + {name = "propcache-0.4.1-cp314-cp314-win_amd64.whl",url = "https://files.pythonhosted.org/packages/0c/2a/a758b47de253636e1b8aef181c0b4f4f204bf0dd964914fb2af90a95b49b/propcache-0.4.1-cp314-cp314-win_amd64.whl",hashes = {sha256 = "5a103c3eb905fcea0ab98be99c3a9a5ab2de60228aa5aceedc614c0281cf6153"}}, + {name = "propcache-0.4.1-cp314-cp314-win_arm64.whl",url = "https://files.pythonhosted.org/packages/34/5e/63bd5896c3fec12edcbd6f12508d4890d23c265df28c74b175e1ef9f4f3b/propcache-0.4.1-cp314-cp314-win_arm64.whl",hashes = {sha256 = "74c1fb26515153e482e00177a1ad654721bf9207da8a494a0c05e797ad27b992"}}, + {name = "propcache-0.4.1-cp314-cp314t-macosx_10_13_universal2.whl",url = "https://files.pythonhosted.org/packages/99/85/9ff785d787ccf9bbb3f3106f79884a130951436f58392000231b4c737c80/propcache-0.4.1-cp314-cp314t-macosx_10_13_universal2.whl",hashes = {sha256 = "824e908bce90fb2743bd6b59db36eb4f45cd350a39637c9f73b1c1ea66f5b75f"}}, + {name = "propcache-0.4.1-cp314-cp314t-macosx_10_13_x86_64.whl",url = "https://files.pythonhosted.org/packages/90/85/2431c10c8e7ddb1445c1f7c4b54d886e8ad20e3c6307e7218f05922cad67/propcache-0.4.1-cp314-cp314t-macosx_10_13_x86_64.whl",hashes = {sha256 = "c2b5e7db5328427c57c8e8831abda175421b709672f6cfc3d630c3b7e2146393"}}, + {name = "propcache-0.4.1-cp314-cp314t-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/01/20/b0972d902472da9bcb683fa595099911f4d2e86e5683bcc45de60dd05dc3/propcache-0.4.1-cp314-cp314t-macosx_11_0_arm64.whl",hashes = {sha256 = "6f6ff873ed40292cd4969ef5310179afd5db59fdf055897e282485043fc80ad0"}}, + {name = "propcache-0.4.1-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl",url = "https://files.pythonhosted.org/packages/e2/e3/7dc89f4f21e8f99bad3d5ddb3a3389afcf9da4ac69e3deb2dcdc96e74169/propcache-0.4.1-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl",hashes = {sha256 = "49a2dc67c154db2c1463013594c458881a069fcf98940e61a0569016a583020a"}}, + {name = "propcache-0.4.1-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl",url = "https://files.pythonhosted.org/packages/20/67/89800c8352489b21a8047c773067644e3897f02ecbbd610f4d46b7f08612/propcache-0.4.1-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl",hashes = {sha256 = "005f08e6a0529984491e37d8dbc3dd86f84bd78a8ceb5fa9a021f4c48d4984be"}}, + {name = "propcache-0.4.1-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl",url = "https://files.pythonhosted.org/packages/e2/a1/b52b055c766a54ce6d9c16d9aca0cad8059acd9637cdf8aa0222f4a026ef/propcache-0.4.1-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl",hashes = {sha256 = "5c3310452e0d31390da9035c348633b43d7e7feb2e37be252be6da45abd1abcc"}}, + {name = "propcache-0.4.1-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl",url = "https://files.pythonhosted.org/packages/48/c8/33cee30bd890672c63743049f3c9e4be087e6780906bfc3ec58528be59c1/propcache-0.4.1-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl",hashes = {sha256 = "4c3c70630930447f9ef1caac7728c8ad1c56bc5015338b20fed0d08ea2480b3a"}}, + {name = "propcache-0.4.1-cp314-cp314t-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/0c/b1/8f08a143b204b418285c88b83d00edbd61afbc2c6415ffafc8905da7038b/propcache-0.4.1-cp314-cp314t-musllinux_1_2_aarch64.whl",hashes = {sha256 = "8e57061305815dfc910a3634dcf584f08168a8836e6999983569f51a8544cd89"}}, + {name = "propcache-0.4.1-cp314-cp314t-musllinux_1_2_armv7l.whl",url = "https://files.pythonhosted.org/packages/cf/12/96e4664c82ca2f31e1c8dff86afb867348979eb78d3cb8546a680287a1e9/propcache-0.4.1-cp314-cp314t-musllinux_1_2_armv7l.whl",hashes = {sha256 = "521a463429ef54143092c11a77e04056dd00636f72e8c45b70aaa3140d639726"}}, + {name = "propcache-0.4.1-cp314-cp314t-musllinux_1_2_ppc64le.whl",url = "https://files.pythonhosted.org/packages/18/ed/e7a9cfca28133386ba52278136d42209d3125db08d0a6395f0cba0c0285c/propcache-0.4.1-cp314-cp314t-musllinux_1_2_ppc64le.whl",hashes = {sha256 = "120c964da3fdc75e3731aa392527136d4ad35868cc556fd09bb6d09172d9a367"}}, + {name = "propcache-0.4.1-cp314-cp314t-musllinux_1_2_s390x.whl",url = "https://files.pythonhosted.org/packages/f5/76/16d8bf65e8845dd62b4e2b57444ab81f07f40caa5652b8969b87ddcf2ef6/propcache-0.4.1-cp314-cp314t-musllinux_1_2_s390x.whl",hashes = {sha256 = "d8f353eb14ee3441ee844ade4277d560cdd68288838673273b978e3d6d2c8f36"}}, + {name = "propcache-0.4.1-cp314-cp314t-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/e7/70/c99e9edb5d91d5ad8a49fa3c1e8285ba64f1476782fed10ab251ff413ba1/propcache-0.4.1-cp314-cp314t-musllinux_1_2_x86_64.whl",hashes = {sha256 = "ab2943be7c652f09638800905ee1bab2c544e537edb57d527997a24c13dc1455"}}, + {name = "propcache-0.4.1-cp314-cp314t-win32.whl",url = "https://files.pythonhosted.org/packages/08/02/87b25304249a35c0915d236575bc3574a323f60b47939a2262b77632a3ee/propcache-0.4.1-cp314-cp314t-win32.whl",hashes = {sha256 = "05674a162469f31358c30bcaa8883cb7829fa3110bf9c0991fe27d7896c42d85"}}, + {name = "propcache-0.4.1-cp314-cp314t-win_amd64.whl",url = "https://files.pythonhosted.org/packages/cb/ef/3c6ecf8b317aa982f309835e8f96987466123c6e596646d4e6a1dfcd080f/propcache-0.4.1-cp314-cp314t-win_amd64.whl",hashes = {sha256 = "990f6b3e2a27d683cb7602ed6c86f15ee6b43b1194736f9baaeb93d0016633b1"}}, + {name = "propcache-0.4.1-cp314-cp314t-win_arm64.whl",url = "https://files.pythonhosted.org/packages/c4/2d/346e946d4951f37eca1e4f55be0f0174c52cd70720f84029b02f296f4a38/propcache-0.4.1-cp314-cp314t-win_arm64.whl",hashes = {sha256 = "ecef2343af4cc68e05131e45024ba34f6095821988a9d0a02aa7c73fcc448aa9"}}, + {name = "propcache-0.4.1-cp313-cp313-macosx_10_13_universal2.whl",url = "https://files.pythonhosted.org/packages/bf/df/6d9c1b6ac12b003837dde8a10231a7344512186e87b36e855bef32241942/propcache-0.4.1-cp313-cp313-macosx_10_13_universal2.whl",hashes = {sha256 = "43eedf29202c08550aac1d14e0ee619b0430aaef78f85864c1a892294fbc28cf"}}, + {name = "propcache-0.4.1-cp313-cp313-macosx_10_13_x86_64.whl",url = "https://files.pythonhosted.org/packages/8b/e8/677a0025e8a2acf07d3418a2e7ba529c9c33caf09d3c1f25513023c1db56/propcache-0.4.1-cp313-cp313-macosx_10_13_x86_64.whl",hashes = {sha256 = "d62cdfcfd89ccb8de04e0eda998535c406bf5e060ffd56be6c586cbcc05b3311"}}, + {name = "propcache-0.4.1-cp313-cp313-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/89/a4/92380f7ca60f99ebae761936bc48a72a639e8a47b29050615eef757cb2a7/propcache-0.4.1-cp313-cp313-macosx_11_0_arm64.whl",hashes = {sha256 = "cae65ad55793da34db5f54e4029b89d3b9b9490d8abe1b4c7ab5d4b8ec7ebf74"}}, + {name = "propcache-0.4.1-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl",url = "https://files.pythonhosted.org/packages/2d/48/c5ac64dee5262044348d1d78a5f85dd1a57464a60d30daee946699963eb3/propcache-0.4.1-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl",hashes = {sha256 = "333ddb9031d2704a301ee3e506dc46b1fe5f294ec198ed6435ad5b6a085facfe"}}, + {name = "propcache-0.4.1-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl",url = "https://files.pythonhosted.org/packages/c6/0c/cd762dd011a9287389a6a3eb43aa30207bde253610cca06824aeabfe9653/propcache-0.4.1-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl",hashes = {sha256 = "fd0858c20f078a32cf55f7e81473d96dcf3b93fd2ccdb3d40fdf54b8573df3af"}}, + {name = "propcache-0.4.1-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl",url = "https://files.pythonhosted.org/packages/30/3e/49861e90233ba36890ae0ca4c660e95df565b2cd15d4a68556ab5865974e/propcache-0.4.1-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl",hashes = {sha256 = "678ae89ebc632c5c204c794f8dab2837c5f159aeb59e6ed0539500400577298c"}}, + {name = "propcache-0.4.1-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl",url = "https://files.pythonhosted.org/packages/f1/8b/544bc867e24e1bd48f3118cecd3b05c694e160a168478fa28770f22fd094/propcache-0.4.1-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl",hashes = {sha256 = "d472aeb4fbf9865e0c6d622d7f4d54a4e101a89715d8904282bb5f9a2f476c3f"}}, + {name = "propcache-0.4.1-cp313-cp313-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/50/a6/4282772fd016a76d3e5c0df58380a5ea64900afd836cec2c2f662d1b9bb3/propcache-0.4.1-cp313-cp313-musllinux_1_2_aarch64.whl",hashes = {sha256 = "4d3df5fa7e36b3225954fba85589da77a0fe6a53e3976de39caf04a0db4c36f1"}}, + {name = "propcache-0.4.1-cp313-cp313-musllinux_1_2_armv7l.whl",url = "https://files.pythonhosted.org/packages/3e/ec/d8a7cd406ee1ddb705db2139f8a10a8a427100347bd698e7014351c7af09/propcache-0.4.1-cp313-cp313-musllinux_1_2_armv7l.whl",hashes = {sha256 = "ee17f18d2498f2673e432faaa71698032b0127ebf23ae5974eeaf806c279df24"}}, + {name = "propcache-0.4.1-cp313-cp313-musllinux_1_2_ppc64le.whl",url = "https://files.pythonhosted.org/packages/f6/6c/f38ab64af3764f431e359f8baf9e0a21013e24329e8b85d2da32e8ed07ca/propcache-0.4.1-cp313-cp313-musllinux_1_2_ppc64le.whl",hashes = {sha256 = "580e97762b950f993ae618e167e7be9256b8353c2dcd8b99ec100eb50f5286aa"}}, + {name = "propcache-0.4.1-cp313-cp313-musllinux_1_2_s390x.whl",url = "https://files.pythonhosted.org/packages/d6/e3/fa846bd70f6534d647886621388f0a265254d30e3ce47e5c8e6e27dbf153/propcache-0.4.1-cp313-cp313-musllinux_1_2_s390x.whl",hashes = {sha256 = "501d20b891688eb8e7aa903021f0b72d5a55db40ffaab27edefd1027caaafa61"}}, + {name = "propcache-0.4.1-cp313-cp313-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/e2/39/8163fc6f3133fea7b5f2827e8eba2029a0277ab2c5beee6c1db7b10fc23d/propcache-0.4.1-cp313-cp313-musllinux_1_2_x86_64.whl",hashes = {sha256 = "9a0bd56e5b100aef69bd8562b74b46254e7c8812918d3baa700c8a8009b0af66"}}, + {name = "propcache-0.4.1-cp313-cp313-win32.whl",url = "https://files.pythonhosted.org/packages/93/89/caa9089970ca49c7c01662bd0eeedfe85494e863e8043565aeb6472ce8fe/propcache-0.4.1-cp313-cp313-win32.whl",hashes = {sha256 = "bcc9aaa5d80322bc2fb24bb7accb4a30f81e90ab8d6ba187aec0744bc302ad81"}}, + {name = "propcache-0.4.1-cp313-cp313-win_amd64.whl",url = "https://files.pythonhosted.org/packages/f5/ab/f76ec3c3627c883215b5c8080debb4394ef5a7a29be811f786415fc1e6fd/propcache-0.4.1-cp313-cp313-win_amd64.whl",hashes = {sha256 = "381914df18634f5494334d201e98245c0596067504b9372d8cf93f4bb23e025e"}}, + {name = "propcache-0.4.1-cp313-cp313-win_arm64.whl",url = "https://files.pythonhosted.org/packages/59/1b/e71ae98235f8e2ba5004d8cb19765a74877abf189bc53fc0c80d799e56c3/propcache-0.4.1-cp313-cp313-win_arm64.whl",hashes = {sha256 = "8873eb4460fd55333ea49b7d189749ecf6e55bf85080f11b1c4530ed3034cba1"}}, + {name = "propcache-0.4.1-cp313-cp313t-macosx_10_13_universal2.whl",url = "https://files.pythonhosted.org/packages/83/ce/a31bbdfc24ee0dcbba458c8175ed26089cf109a55bbe7b7640ed2470cfe9/propcache-0.4.1-cp313-cp313t-macosx_10_13_universal2.whl",hashes = {sha256 = "92d1935ee1f8d7442da9c0c4fa7ac20d07e94064184811b685f5c4fada64553b"}}, + {name = "propcache-0.4.1-cp313-cp313t-macosx_10_13_x86_64.whl",url = "https://files.pythonhosted.org/packages/25/9c/442a45a470a68456e710d96cacd3573ef26a1d0a60067e6a7d5e655621ed/propcache-0.4.1-cp313-cp313t-macosx_10_13_x86_64.whl",hashes = {sha256 = "473c61b39e1460d386479b9b2f337da492042447c9b685f28be4f74d3529e566"}}, + {name = "propcache-0.4.1-cp313-cp313t-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/f4/bf/b1d5e21dbc3b2e889ea4327044fb16312a736d97640fb8b6aa3f9c7b3b65/propcache-0.4.1-cp313-cp313t-macosx_11_0_arm64.whl",hashes = {sha256 = "c0ef0aaafc66fbd87842a3fe3902fd889825646bc21149eafe47be6072725835"}}, + {name = "propcache-0.4.1-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl",url = "https://files.pythonhosted.org/packages/f4/04/5b4c54a103d480e978d3c8a76073502b18db0c4bc17ab91b3cb5092ad949/propcache-0.4.1-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl",hashes = {sha256 = "f95393b4d66bfae908c3ca8d169d5f79cd65636ae15b5e7a4f6e67af675adb0e"}}, + {name = "propcache-0.4.1-cp313-cp313t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl",url = "https://files.pythonhosted.org/packages/b4/c1/86f846827fb969c4b78b0af79bba1d1ea2156492e1b83dea8b8a6ae27395/propcache-0.4.1-cp313-cp313t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl",hashes = {sha256 = "c07fda85708bc48578467e85099645167a955ba093be0a2dcba962195676e859"}}, + {name = "propcache-0.4.1-cp313-cp313t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl",url = "https://files.pythonhosted.org/packages/36/1d/fc272a63c8d3bbad6878c336c7a7dea15e8f2d23a544bda43205dfa83ada/propcache-0.4.1-cp313-cp313t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl",hashes = {sha256 = "af223b406d6d000830c6f65f1e6431783fc3f713ba3e6cc8c024d5ee96170a4b"}}, + {name = "propcache-0.4.1-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl",url = "https://files.pythonhosted.org/packages/07/0c/01f2219d39f7e53d52e5173bcb09c976609ba30209912a0680adfb8c593a/propcache-0.4.1-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl",hashes = {sha256 = "a78372c932c90ee474559c5ddfffd718238e8673c340dc21fe45c5b8b54559a0"}}, + {name = "propcache-0.4.1-cp313-cp313t-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/2d/18/cd28081658ce597898f0c4d174d4d0f3c5b6d4dc27ffafeef835c95eb359/propcache-0.4.1-cp313-cp313t-musllinux_1_2_aarch64.whl",hashes = {sha256 = "564d9f0d4d9509e1a870c920a89b2fec951b44bf5ba7d537a9e7c1ccec2c18af"}}, + {name = "propcache-0.4.1-cp313-cp313t-musllinux_1_2_armv7l.whl",url = "https://files.pythonhosted.org/packages/7a/71/1f9e22eb8b8316701c2a19fa1f388c8a3185082607da8e406a803c9b954e/propcache-0.4.1-cp313-cp313t-musllinux_1_2_armv7l.whl",hashes = {sha256 = "17612831fda0138059cc5546f4d12a2aacfb9e47068c06af35c400ba58ba7393"}}, + {name = "propcache-0.4.1-cp313-cp313t-musllinux_1_2_ppc64le.whl",url = "https://files.pythonhosted.org/packages/4a/65/3d4b61f36af2b4eddba9def857959f1016a51066b4f1ce348e0cf7881f58/propcache-0.4.1-cp313-cp313t-musllinux_1_2_ppc64le.whl",hashes = {sha256 = "41a89040cb10bd345b3c1a873b2bf36413d48da1def52f268a055f7398514874"}}, + {name = "propcache-0.4.1-cp313-cp313t-musllinux_1_2_s390x.whl",url = "https://files.pythonhosted.org/packages/2a/42/26746ab087faa77c1c68079b228810436ccd9a5ce9ac85e2b7307195fd06/propcache-0.4.1-cp313-cp313t-musllinux_1_2_s390x.whl",hashes = {sha256 = "e35b88984e7fa64aacecea39236cee32dd9bd8c55f57ba8a75cf2399553f9bd7"}}, + {name = "propcache-0.4.1-cp313-cp313t-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/94/13/630690fe201f5502d2403dd3cfd451ed8858fe3c738ee88d095ad2ff407b/propcache-0.4.1-cp313-cp313t-musllinux_1_2_x86_64.whl",hashes = {sha256 = "6f8b465489f927b0df505cbe26ffbeed4d6d8a2bbc61ce90eb074ff129ef0ab1"}}, + {name = "propcache-0.4.1-cp313-cp313t-win32.whl",url = "https://files.pythonhosted.org/packages/92/f7/1d4ec5841505f423469efbfc381d64b7b467438cd5a4bbcbb063f3b73d27/propcache-0.4.1-cp313-cp313t-win32.whl",hashes = {sha256 = "2ad890caa1d928c7c2965b48f3a3815c853180831d0e5503d35cf00c472f4717"}}, + {name = "propcache-0.4.1-cp313-cp313t-win_amd64.whl",url = "https://files.pythonhosted.org/packages/48/f0/615c30622316496d2cbbc29f5985f7777d3ada70f23370608c1d3e081c1f/propcache-0.4.1-cp313-cp313t-win_amd64.whl",hashes = {sha256 = "f7ee0e597f495cf415bcbd3da3caa3bd7e816b74d0d52b8145954c5e6fd3ff37"}}, + {name = "propcache-0.4.1-cp313-cp313t-win_arm64.whl",url = "https://files.pythonhosted.org/packages/fd/ca/6002e46eccbe0e33dcd4069ef32f7f1c9e243736e07adca37ae8c4830ec3/propcache-0.4.1-cp313-cp313t-win_arm64.whl",hashes = {sha256 = "929d7cbe1f01bb7baffb33dc14eb5691c95831450a26354cd210a8155170c93a"}}, + {name = "propcache-0.4.1-cp312-cp312-macosx_10_13_universal2.whl",url = "https://files.pythonhosted.org/packages/a2/0f/f17b1b2b221d5ca28b4b876e8bb046ac40466513960646bda8e1853cdfa2/propcache-0.4.1-cp312-cp312-macosx_10_13_universal2.whl",hashes = {sha256 = "e153e9cd40cc8945138822807139367f256f89c6810c2634a4f6902b52d3b4e2"}}, + {name = "propcache-0.4.1-cp312-cp312-macosx_10_13_x86_64.whl",url = "https://files.pythonhosted.org/packages/76/47/8ccf75935f51448ba9a16a71b783eb7ef6b9ee60f5d14c7f8a8a79fbeed7/propcache-0.4.1-cp312-cp312-macosx_10_13_x86_64.whl",hashes = {sha256 = "cd547953428f7abb73c5ad82cbb32109566204260d98e41e5dfdc682eb7f8403"}}, + {name = "propcache-0.4.1-cp312-cp312-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/0a/b6/5c9a0e42df4d00bfb4a3cbbe5cf9f54260300c88a0e9af1f47ca5ce17ac0/propcache-0.4.1-cp312-cp312-macosx_11_0_arm64.whl",hashes = {sha256 = "f048da1b4f243fc44f205dfd320933a951b8d89e0afd4c7cacc762a8b9165207"}}, + {name = "propcache-0.4.1-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl",url = "https://files.pythonhosted.org/packages/9e/d3/6c7ee328b39a81ee877c962469f1e795f9db87f925251efeb0545e0020d0/propcache-0.4.1-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl",hashes = {sha256 = "ec17c65562a827bba85e3872ead335f95405ea1674860d96483a02f5c698fa72"}}, + {name = "propcache-0.4.1-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl",url = "https://files.pythonhosted.org/packages/01/5d/1c53f4563490b1d06a684742cc6076ef944bc6457df6051b7d1a877c057b/propcache-0.4.1-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl",hashes = {sha256 = "405aac25c6394ef275dee4c709be43745d36674b223ba4eb7144bf4d691b7367"}}, + {name = "propcache-0.4.1-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl",url = "https://files.pythonhosted.org/packages/20/e1/ce4620633b0e2422207c3cb774a0ee61cac13abc6217763a7b9e2e3f4a12/propcache-0.4.1-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl",hashes = {sha256 = "0013cb6f8dde4b2a2f66903b8ba740bdfe378c943c4377a200551ceb27f379e4"}}, + {name = "propcache-0.4.1-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl",url = "https://files.pythonhosted.org/packages/46/4b/3aae6835b8e5f44ea6a68348ad90f78134047b503765087be2f9912140ea/propcache-0.4.1-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl",hashes = {sha256 = "15932ab57837c3368b024473a525e25d316d8353016e7cc0e5ba9eb343fbb1cf"}}, + {name = "propcache-0.4.1-cp312-cp312-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/6e/a5/8a5e8678bcc9d3a1a15b9a29165640d64762d424a16af543f00629c87338/propcache-0.4.1-cp312-cp312-musllinux_1_2_aarch64.whl",hashes = {sha256 = "031dce78b9dc099f4c29785d9cf5577a3faf9ebf74ecbd3c856a7b92768c3df3"}}, + {name = "propcache-0.4.1-cp312-cp312-musllinux_1_2_armv7l.whl",url = "https://files.pythonhosted.org/packages/f1/63/b7b215eddeac83ca1c6b934f89d09a625aa9ee4ba158338854c87210cc36/propcache-0.4.1-cp312-cp312-musllinux_1_2_armv7l.whl",hashes = {sha256 = "ab08df6c9a035bee56e31af99be621526bd237bea9f32def431c656b29e41778"}}, + {name = "propcache-0.4.1-cp312-cp312-musllinux_1_2_ppc64le.whl",url = "https://files.pythonhosted.org/packages/57/74/f580099a58c8af587cac7ba19ee7cb418506342fbbe2d4a4401661cca886/propcache-0.4.1-cp312-cp312-musllinux_1_2_ppc64le.whl",hashes = {sha256 = "4d7af63f9f93fe593afbf104c21b3b15868efb2c21d07d8732c0c4287e66b6a6"}}, + {name = "propcache-0.4.1-cp312-cp312-musllinux_1_2_s390x.whl",url = "https://files.pythonhosted.org/packages/c4/ee/542f1313aff7eaf19c2bb758c5d0560d2683dac001a1c96d0774af799843/propcache-0.4.1-cp312-cp312-musllinux_1_2_s390x.whl",hashes = {sha256 = "cfc27c945f422e8b5071b6e93169679e4eb5bf73bbcbf1ba3ae3a83d2f78ebd9"}}, + {name = "propcache-0.4.1-cp312-cp312-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/8f/18/9c6b015dd9c6930f6ce2229e1f02fb35298b847f2087ea2b436a5bfa7287/propcache-0.4.1-cp312-cp312-musllinux_1_2_x86_64.whl",hashes = {sha256 = "35c3277624a080cc6ec6f847cbbbb5b49affa3598c4535a0a4682a697aaa5c75"}}, + {name = "propcache-0.4.1-cp312-cp312-win32.whl",url = "https://files.pythonhosted.org/packages/80/9e/e7b85720b98c45a45e1fca6a177024934dc9bc5f4d5dd04207f216fc33ed/propcache-0.4.1-cp312-cp312-win32.whl",hashes = {sha256 = "671538c2262dadb5ba6395e26c1731e1d52534bfe9ae56d0b5573ce539266aa8"}}, + {name = "propcache-0.4.1-cp312-cp312-win_amd64.whl",url = "https://files.pythonhosted.org/packages/54/09/d19cff2a5aaac632ec8fc03737b223597b1e347416934c1b3a7df079784c/propcache-0.4.1-cp312-cp312-win_amd64.whl",hashes = {sha256 = "cb2d222e72399fcf5890d1d5cc1060857b9b236adff2792ff48ca2dfd46c81db"}}, + {name = "propcache-0.4.1-cp312-cp312-win_arm64.whl",url = "https://files.pythonhosted.org/packages/68/ab/6b5c191bb5de08036a8c697b265d4ca76148efb10fa162f14af14fb5f076/propcache-0.4.1-cp312-cp312-win_arm64.whl",hashes = {sha256 = "204483131fb222bdaaeeea9f9e6c6ed0cac32731f75dfc1d4a567fc1926477c1"}}, + {name = "propcache-0.4.1-py3-none-any.whl",url = "https://files.pythonhosted.org/packages/5b/5a/bc7b4a4ef808fa59a816c17b20c4bef6884daebbdf627ff2a161da67da19/propcache-0.4.1-py3-none-any.whl",hashes = {sha256 = "af2a6052aeb6cf17d3e46ee169099044fd8224cbaf75c76a2ef596e8163e2237"}}, + {name = "propcache-0.4.1-cp311-cp311-macosx_10_9_universal2.whl",url = "https://files.pythonhosted.org/packages/8c/d4/4e2c9aaf7ac2242b9358f98dccd8f90f2605402f5afeff6c578682c2c491/propcache-0.4.1-cp311-cp311-macosx_10_9_universal2.whl",hashes = {sha256 = "60a8fda9644b7dfd5dece8c61d8a85e271cb958075bfc4e01083c148b61a7caf"}}, + {name = "propcache-0.4.1-cp311-cp311-macosx_10_9_x86_64.whl",url = "https://files.pythonhosted.org/packages/c2/21/d7b68e911f9c8e18e4ae43bdbc1e1e9bbd971f8866eb81608947b6f585ff/propcache-0.4.1-cp311-cp311-macosx_10_9_x86_64.whl",hashes = {sha256 = "c30b53e7e6bda1d547cabb47c825f3843a0a1a42b0496087bb58d8fedf9f41b5"}}, + {name = "propcache-0.4.1-cp311-cp311-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/d3/1d/11605e99ac8ea9435651ee71ab4cb4bf03f0949586246476a25aadfec54a/propcache-0.4.1-cp311-cp311-macosx_11_0_arm64.whl",hashes = {sha256 = "6918ecbd897443087a3b7cd978d56546a812517dcaaca51b49526720571fa93e"}}, + {name = "propcache-0.4.1-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl",url = "https://files.pythonhosted.org/packages/58/1a/3c62c127a8466c9c843bccb503d40a273e5cc69838805f322e2826509e0d/propcache-0.4.1-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl",hashes = {sha256 = "3d902a36df4e5989763425a8ab9e98cd8ad5c52c823b34ee7ef307fd50582566"}}, + {name = "propcache-0.4.1-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl",url = "https://files.pythonhosted.org/packages/56/b9/8fa98f850960b367c4b8fe0592e7fc341daa7a9462e925228f10a60cf74f/propcache-0.4.1-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl",hashes = {sha256 = "a9695397f85973bb40427dedddf70d8dc4a44b22f1650dd4af9eedf443d45165"}}, + {name = "propcache-0.4.1-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl",url = "https://files.pythonhosted.org/packages/46/a6/0ab4f660eb59649d14b3d3d65c439421cf2f87fe5dd68591cbe3c1e78a89/propcache-0.4.1-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl",hashes = {sha256 = "2bb07ffd7eaad486576430c89f9b215f9e4be68c4866a96e97db9e97fead85dc"}}, + {name = "propcache-0.4.1-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl",url = "https://files.pythonhosted.org/packages/52/6a/57f43e054fb3d3a56ac9fc532bc684fc6169a26c75c353e65425b3e56eef/propcache-0.4.1-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl",hashes = {sha256 = "fd6f30fdcf9ae2a70abd34da54f18da086160e4d7d9251f81f3da0ff84fc5a48"}}, + {name = "propcache-0.4.1-cp311-cp311-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/40/e2/27e6feebb5f6b8408fa29f5efbb765cd54c153ac77314d27e457a3e993b7/propcache-0.4.1-cp311-cp311-musllinux_1_2_aarch64.whl",hashes = {sha256 = "fc38cba02d1acba4e2869eef1a57a43dfbd3d49a59bf90dda7444ec2be6a5570"}}, + {name = "propcache-0.4.1-cp311-cp311-musllinux_1_2_armv7l.whl",url = "https://files.pythonhosted.org/packages/9e/f8/91c27b22ccda1dbc7967f921c42825564fa5336a01ecd72eb78a9f4f53c2/propcache-0.4.1-cp311-cp311-musllinux_1_2_armv7l.whl",hashes = {sha256 = "67fad6162281e80e882fb3ec355398cf72864a54069d060321f6cd0ade95fe85"}}, + {name = "propcache-0.4.1-cp311-cp311-musllinux_1_2_ppc64le.whl",url = "https://files.pythonhosted.org/packages/f2/26/7f00bd6bd1adba5aafe5f4a66390f243acab58eab24ff1a08bebb2ef9d40/propcache-0.4.1-cp311-cp311-musllinux_1_2_ppc64le.whl",hashes = {sha256 = "f10207adf04d08bec185bae14d9606a1444715bc99180f9331c9c02093e1959e"}}, + {name = "propcache-0.4.1-cp311-cp311-musllinux_1_2_s390x.whl",url = "https://files.pythonhosted.org/packages/84/89/fd108ba7815c1117ddca79c228f3f8a15fc82a73bca8b142eb5de13b2785/propcache-0.4.1-cp311-cp311-musllinux_1_2_s390x.whl",hashes = {sha256 = "e9b0d8d0845bbc4cfcdcbcdbf5086886bc8157aa963c31c777ceff7846c77757"}}, + {name = "propcache-0.4.1-cp311-cp311-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/79/37/3ec3f7e3173e73f1d600495d8b545b53802cbf35506e5732dd8578db3724/propcache-0.4.1-cp311-cp311-musllinux_1_2_x86_64.whl",hashes = {sha256 = "981333cb2f4c1896a12f4ab92a9cc8f09ea664e9b7dbdc4eff74627af3a11c0f"}}, + {name = "propcache-0.4.1-cp311-cp311-win32.whl",url = "https://files.pythonhosted.org/packages/61/b0/b2631c19793f869d35f47d5a3a56fb19e9160d3c119f15ac7344fc3ccae7/propcache-0.4.1-cp311-cp311-win32.whl",hashes = {sha256 = "f1d2f90aeec838a52f1c1a32fe9a619fefd5e411721a9117fbf82aea638fe8a1"}}, + {name = "propcache-0.4.1-cp311-cp311-win_amd64.whl",url = "https://files.pythonhosted.org/packages/f4/78/6cce448e2098e9f3bfc91bb877f06aa24b6ccace872e39c53b2f707c4648/propcache-0.4.1-cp311-cp311-win_amd64.whl",hashes = {sha256 = "364426a62660f3f699949ac8c621aad6977be7126c5807ce48c0aeb8e7333ea6"}}, + {name = "propcache-0.4.1-cp311-cp311-win_arm64.whl",url = "https://files.pythonhosted.org/packages/9c/e9/754f180cccd7f51a39913782c74717c581b9cc8177ad0e949f4d51812383/propcache-0.4.1-cp311-cp311-win_arm64.whl",hashes = {sha256 = "e53f3a38d3510c11953f3e6a33f205c6d1b001129f972805ca9b42fc308bc239"}}, + {name = "propcache-0.4.1-cp310-cp310-macosx_10_9_universal2.whl",url = "https://files.pythonhosted.org/packages/3c/0e/934b541323035566a9af292dba85a195f7b78179114f2c6ebb24551118a9/propcache-0.4.1-cp310-cp310-macosx_10_9_universal2.whl",hashes = {sha256 = "7c2d1fa3201efaf55d730400d945b5b3ab6e672e100ba0f9a409d950ab25d7db"}}, + {name = "propcache-0.4.1-cp310-cp310-macosx_10_9_x86_64.whl",url = "https://files.pythonhosted.org/packages/a1/6b/db0d03d96726d995dc7171286c6ba9d8d14251f37433890f88368951a44e/propcache-0.4.1-cp310-cp310-macosx_10_9_x86_64.whl",hashes = {sha256 = "1eb2994229cc8ce7fe9b3db88f5465f5fd8651672840b2e426b88cdb1a30aac8"}}, + {name = "propcache-0.4.1-cp310-cp310-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/e4/c3/82728404aea669e1600f304f2609cde9e665c18df5a11cdd57ed73c1dceb/propcache-0.4.1-cp310-cp310-macosx_11_0_arm64.whl",hashes = {sha256 = "66c1f011f45a3b33d7bcb22daed4b29c0c9e2224758b6be00686731e1b46f925"}}, + {name = "propcache-0.4.1-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl",url = "https://files.pythonhosted.org/packages/df/1b/39313ddad2bf9187a1432654c38249bab4562ef535ef07f5eb6eb04d0b1b/propcache-0.4.1-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl",hashes = {sha256 = "9a52009f2adffe195d0b605c25ec929d26b36ef986ba85244891dee3b294df21"}}, + {name = "propcache-0.4.1-cp310-cp310-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl",url = "https://files.pythonhosted.org/packages/5b/01/f1d0b57d136f294a142acf97f4ed58c8e5b974c21e543000968357115011/propcache-0.4.1-cp310-cp310-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl",hashes = {sha256 = "5d4e2366a9c7b837555cf02fb9be2e3167d333aff716332ef1b7c3a142ec40c5"}}, + {name = "propcache-0.4.1-cp310-cp310-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl",url = "https://files.pythonhosted.org/packages/a1/c8/038d909c61c5bb039070b3fb02ad5cccdb1dde0d714792e251cdb17c9c05/propcache-0.4.1-cp310-cp310-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl",hashes = {sha256 = "9d2b6caef873b4f09e26ea7e33d65f42b944837563a47a94719cc3544319a0db"}}, + {name = "propcache-0.4.1-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl",url = "https://files.pythonhosted.org/packages/08/57/8c87e93142b2c1fa2408e45695205a7ba05fb5db458c0bf5c06ba0e09ea6/propcache-0.4.1-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl",hashes = {sha256 = "2b16ec437a8c8a965ecf95739448dd938b5c7f56e67ea009f4300d8df05f32b7"}}, + {name = "propcache-0.4.1-cp310-cp310-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/42/df/5615fec76aa561987a534759b3686008a288e73107faa49a8ae5795a9f7a/propcache-0.4.1-cp310-cp310-musllinux_1_2_aarch64.whl",hashes = {sha256 = "296f4c8ed03ca7476813fe666c9ea97869a8d7aec972618671b33a38a5182ef4"}}, + {name = "propcache-0.4.1-cp310-cp310-musllinux_1_2_armv7l.whl",url = "https://files.pythonhosted.org/packages/d5/21/62949eb3a7a54afe8327011c90aca7e03547787a88fb8bd9726806482fea/propcache-0.4.1-cp310-cp310-musllinux_1_2_armv7l.whl",hashes = {sha256 = "1f0978529a418ebd1f49dad413a2b68af33f85d5c5ca5c6ca2a3bed375a7ac60"}}, + {name = "propcache-0.4.1-cp310-cp310-musllinux_1_2_ppc64le.whl",url = "https://files.pythonhosted.org/packages/30/ee/ab4d727dd70806e5b4de96a798ae7ac6e4d42516f030ee60522474b6b332/propcache-0.4.1-cp310-cp310-musllinux_1_2_ppc64le.whl",hashes = {sha256 = "fd138803047fb4c062b1c1dd95462f5209456bfab55c734458f15d11da288f8f"}}, + {name = "propcache-0.4.1-cp310-cp310-musllinux_1_2_s390x.whl",url = "https://files.pythonhosted.org/packages/8a/0b/38b46208e6711b016aa8966a3ac793eee0d05c7159d8342aa27fc0bc365e/propcache-0.4.1-cp310-cp310-musllinux_1_2_s390x.whl",hashes = {sha256 = "8c9b3cbe4584636d72ff556d9036e0c9317fa27b3ac1f0f558e7e84d1c9c5900"}}, + {name = "propcache-0.4.1-cp310-cp310-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/cf/81/5abec54355ed344476bee711e9f04815d4b00a311ab0535599204eecc257/propcache-0.4.1-cp310-cp310-musllinux_1_2_x86_64.whl",hashes = {sha256 = "f93243fdc5657247533273ac4f86ae106cc6445a0efacb9a1bfe982fcfefd90c"}}, + {name = "propcache-0.4.1-cp310-cp310-win32.whl",url = "https://files.pythonhosted.org/packages/ec/b6/1f237c04e32063cb034acd5f6ef34ef3a394f75502e72703545631ab1ef6/propcache-0.4.1-cp310-cp310-win32.whl",hashes = {sha256 = "a0ee98db9c5f80785b266eb805016e36058ac72c51a064040f2bc43b61101cdb"}}, + {name = "propcache-0.4.1-cp310-cp310-win_amd64.whl",url = "https://files.pythonhosted.org/packages/a6/67/354aac4e0603a15f76439caf0427781bcd6797f370377f75a642133bc954/propcache-0.4.1-cp310-cp310-win_amd64.whl",hashes = {sha256 = "1cdb7988c4e5ac7f6d175a28a9aa0c94cb6f2ebe52756a3c0cda98d2809a9e37"}}, + {name = "propcache-0.4.1-cp310-cp310-win_arm64.whl",url = "https://files.pythonhosted.org/packages/e0/e1/74e55b9fd1a4c209ff1a9a824bf6c8b3d1fc5a1ac3eabe23462637466785/propcache-0.4.1-cp310-cp310-win_arm64.whl",hashes = {sha256 = "d82ad62b19645419fe79dd63b3f9253e15b30e955c0170e5cebc350c1844e581"}}, ] marker = "\"default\" in dependency_groups or \"dev\" in extras" @@ -1824,62 +2440,123 @@ dependencies = [ [[packages]] name = "frozenlist" -version = "1.7.0" +version = "1.8.0" requires-python = ">=3.9" -sdist = {name = "frozenlist-1.7.0.tar.gz", url = "https://files.pythonhosted.org/packages/79/b1/b64018016eeb087db503b038296fd782586432b9c077fc5c7839e9cb6ef6/frozenlist-1.7.0.tar.gz", hashes = {sha256 = "2e310d81923c2437ea8670467121cc3e9b0f76d3043cc1d2331d56c7fb7a3a8f"}} -wheels = [ - {name = "frozenlist-1.7.0-cp313-cp313-macosx_10_13_universal2.whl",url = "https://files.pythonhosted.org/packages/24/90/6b2cebdabdbd50367273c20ff6b57a3dfa89bd0762de02c3a1eb42cb6462/frozenlist-1.7.0-cp313-cp313-macosx_10_13_universal2.whl",hashes = {sha256 = "ee80eeda5e2a4e660651370ebffd1286542b67e268aa1ac8d6dbe973120ef7ee"}}, - {name = "frozenlist-1.7.0-cp313-cp313-macosx_10_13_x86_64.whl",url = "https://files.pythonhosted.org/packages/83/2e/5b70b6a3325363293fe5fc3ae74cdcbc3e996c2a11dde2fd9f1fb0776d19/frozenlist-1.7.0-cp313-cp313-macosx_10_13_x86_64.whl",hashes = {sha256 = "d1a81c85417b914139e3a9b995d4a1c84559afc839a93cf2cb7f15e6e5f6ed2d"}}, - {name = "frozenlist-1.7.0-cp313-cp313-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/f4/25/a0895c99270ca6966110f4ad98e87e5662eab416a17e7fd53c364bf8b954/frozenlist-1.7.0-cp313-cp313-macosx_11_0_arm64.whl",hashes = {sha256 = "cbb65198a9132ebc334f237d7b0df163e4de83fb4f2bdfe46c1e654bdb0c5d43"}}, - {name = "frozenlist-1.7.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",url = "https://files.pythonhosted.org/packages/19/7c/71bb0bbe0832793c601fff68cd0cf6143753d0c667f9aec93d3c323f4b55/frozenlist-1.7.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",hashes = {sha256 = "dab46c723eeb2c255a64f9dc05b8dd601fde66d6b19cdb82b2e09cc6ff8d8b5d"}}, - {name = "frozenlist-1.7.0-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl",url = "https://files.pythonhosted.org/packages/c0/45/ed2798718910fe6eb3ba574082aaceff4528e6323f9a8570be0f7028d8e9/frozenlist-1.7.0-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl",hashes = {sha256 = "6aeac207a759d0dedd2e40745575ae32ab30926ff4fa49b1635def65806fddee"}}, - {name = "frozenlist-1.7.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl",url = "https://files.pythonhosted.org/packages/ba/e2/8417ae0f8eacb1d071d4950f32f229aa6bf68ab69aab797b72a07ea68d4f/frozenlist-1.7.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl",hashes = {sha256 = "bd8c4e58ad14b4fa7802b8be49d47993182fdd4023393899632c88fd8cd994eb"}}, - {name = "frozenlist-1.7.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl",url = "https://files.pythonhosted.org/packages/f8/b7/2ace5450ce85f2af05a871b8c8719b341294775a0a6c5585d5e6170f2ce7/frozenlist-1.7.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl",hashes = {sha256 = "04fb24d104f425da3540ed83cbfc31388a586a7696142004c577fa61c6298c3f"}}, - {name = "frozenlist-1.7.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl",url = "https://files.pythonhosted.org/packages/46/b9/6989292c5539553dba63f3c83dc4598186ab2888f67c0dc1d917e6887db6/frozenlist-1.7.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl",hashes = {sha256 = "6a5c505156368e4ea6b53b5ac23c92d7edc864537ff911d2fb24c140bb175e60"}}, - {name = "frozenlist-1.7.0-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl",url = "https://files.pythonhosted.org/packages/72/31/bc8c5c99c7818293458fe745dab4fd5730ff49697ccc82b554eb69f16a24/frozenlist-1.7.0-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl",hashes = {sha256 = "8bd7eb96a675f18aa5c553eb7ddc24a43c8c18f22e1f9925528128c052cdbe00"}}, - {name = "frozenlist-1.7.0-cp313-cp313-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/59/52/460db4d7ba0811b9ccb85af996019f5d70831f2f5f255f7cc61f86199795/frozenlist-1.7.0-cp313-cp313-musllinux_1_2_aarch64.whl",hashes = {sha256 = "05579bf020096fe05a764f1f84cd104a12f78eaab68842d036772dc6d4870b4b"}}, - {name = "frozenlist-1.7.0-cp313-cp313-musllinux_1_2_armv7l.whl",url = "https://files.pythonhosted.org/packages/ba/c9/f4b39e904c03927b7ecf891804fd3b4df3db29b9e487c6418e37988d6e9d/frozenlist-1.7.0-cp313-cp313-musllinux_1_2_armv7l.whl",hashes = {sha256 = "376b6222d114e97eeec13d46c486facd41d4f43bab626b7c3f6a8b4e81a5192c"}}, - {name = "frozenlist-1.7.0-cp313-cp313-musllinux_1_2_i686.whl",url = "https://files.pythonhosted.org/packages/b8/33/3f8d6ced42f162d743e3517781566b8481322be321b486d9d262adf70bfb/frozenlist-1.7.0-cp313-cp313-musllinux_1_2_i686.whl",hashes = {sha256 = "0aa7e176ebe115379b5b1c95b4096fb1c17cce0847402e227e712c27bdb5a949"}}, - {name = "frozenlist-1.7.0-cp313-cp313-musllinux_1_2_ppc64le.whl",url = "https://files.pythonhosted.org/packages/3e/e8/ad683e75da6ccef50d0ab0c2b2324b32f84fc88ceee778ed79b8e2d2fe2e/frozenlist-1.7.0-cp313-cp313-musllinux_1_2_ppc64le.whl",hashes = {sha256 = "3fbba20e662b9c2130dc771e332a99eff5da078b2b2648153a40669a6d0e36ca"}}, - {name = "frozenlist-1.7.0-cp313-cp313-musllinux_1_2_s390x.whl",url = "https://files.pythonhosted.org/packages/b2/14/8d19ccdd3799310722195a72ac94ddc677541fb4bef4091d8e7775752360/frozenlist-1.7.0-cp313-cp313-musllinux_1_2_s390x.whl",hashes = {sha256 = "f3f4410a0a601d349dd406b5713fec59b4cee7e71678d5b17edda7f4655a940b"}}, - {name = "frozenlist-1.7.0-cp313-cp313-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/ce/13/c12bf657494c2fd1079a48b2db49fa4196325909249a52d8f09bc9123fd7/frozenlist-1.7.0-cp313-cp313-musllinux_1_2_x86_64.whl",hashes = {sha256 = "e2cdfaaec6a2f9327bf43c933c0319a7c429058e8537c508964a133dffee412e"}}, - {name = "frozenlist-1.7.0-cp313-cp313-win32.whl",url = "https://files.pythonhosted.org/packages/d7/8b/e7f9dfde869825489382bc0d512c15e96d3964180c9499efcec72e85db7e/frozenlist-1.7.0-cp313-cp313-win32.whl",hashes = {sha256 = "5fc4df05a6591c7768459caba1b342d9ec23fa16195e744939ba5914596ae3e1"}}, - {name = "frozenlist-1.7.0-cp313-cp313-win_amd64.whl",url = "https://files.pythonhosted.org/packages/35/89/a487a98d94205d85745080a37860ff5744b9820a2c9acbcdd9440bfddf98/frozenlist-1.7.0-cp313-cp313-win_amd64.whl",hashes = {sha256 = "52109052b9791a3e6b5d1b65f4b909703984b770694d3eb64fad124c835d7cba"}}, - {name = "frozenlist-1.7.0-cp313-cp313t-macosx_10_13_universal2.whl",url = "https://files.pythonhosted.org/packages/56/d5/5c4cf2319a49eddd9dd7145e66c4866bdc6f3dbc67ca3d59685149c11e0d/frozenlist-1.7.0-cp313-cp313t-macosx_10_13_universal2.whl",hashes = {sha256 = "a6f86e4193bb0e235ef6ce3dde5cbabed887e0b11f516ce8a0f4d3b33078ec2d"}}, - {name = "frozenlist-1.7.0-cp313-cp313t-macosx_10_13_x86_64.whl",url = "https://files.pythonhosted.org/packages/a4/7d/ec2c1e1dc16b85bc9d526009961953df9cec8481b6886debb36ec9107799/frozenlist-1.7.0-cp313-cp313t-macosx_10_13_x86_64.whl",hashes = {sha256 = "82d664628865abeb32d90ae497fb93df398a69bb3434463d172b80fc25b0dd7d"}}, - {name = "frozenlist-1.7.0-cp313-cp313t-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/69/86/f9596807b03de126e11e7d42ac91e3d0b19a6599c714a1989a4e85eeefc4/frozenlist-1.7.0-cp313-cp313t-macosx_11_0_arm64.whl",hashes = {sha256 = "912a7e8375a1c9a68325a902f3953191b7b292aa3c3fb0d71a216221deca460b"}}, - {name = "frozenlist-1.7.0-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",url = "https://files.pythonhosted.org/packages/5e/cb/df6de220f5036001005f2d726b789b2c0b65f2363b104bbc16f5be8084f8/frozenlist-1.7.0-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",hashes = {sha256 = "9537c2777167488d539bc5de2ad262efc44388230e5118868e172dd4a552b146"}}, - {name = "frozenlist-1.7.0-cp313-cp313t-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl",url = "https://files.pythonhosted.org/packages/83/1f/de84c642f17c8f851a2905cee2dae401e5e0daca9b5ef121e120e19aa825/frozenlist-1.7.0-cp313-cp313t-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl",hashes = {sha256 = "f34560fb1b4c3e30ba35fa9a13894ba39e5acfc5f60f57d8accde65f46cc5e74"}}, - {name = "frozenlist-1.7.0-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl",url = "https://files.pythonhosted.org/packages/88/3c/c840bfa474ba3fa13c772b93070893c6e9d5c0350885760376cbe3b6c1b3/frozenlist-1.7.0-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl",hashes = {sha256 = "acd03d224b0175f5a850edc104ac19040d35419eddad04e7cf2d5986d98427f1"}}, - {name = "frozenlist-1.7.0-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl",url = "https://files.pythonhosted.org/packages/a6/1c/3efa6e7d5a39a1d5ef0abeb51c48fb657765794a46cf124e5aca2c7a592c/frozenlist-1.7.0-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl",hashes = {sha256 = "f2038310bc582f3d6a09b3816ab01737d60bf7b1ec70f5356b09e84fb7408ab1"}}, - {name = "frozenlist-1.7.0-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl",url = "https://files.pythonhosted.org/packages/4f/00/d5c5e09d4922c395e2f2f6b79b9a20dab4b67daaf78ab92e7729341f61f6/frozenlist-1.7.0-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl",hashes = {sha256 = "b8c05e4c8e5f36e5e088caa1bf78a687528f83c043706640a92cb76cd6999384"}}, - {name = "frozenlist-1.7.0-cp313-cp313t-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl",url = "https://files.pythonhosted.org/packages/4e/27/72765be905619dfde25a7f33813ac0341eb6b076abede17a2e3fbfade0cb/frozenlist-1.7.0-cp313-cp313t-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl",hashes = {sha256 = "765bb588c86e47d0b68f23c1bee323d4b703218037765dcf3f25c838c6fecceb"}}, - {name = "frozenlist-1.7.0-cp313-cp313t-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/88/67/c94103a23001b17808eb7dd1200c156bb69fb68e63fcf0693dde4cd6228c/frozenlist-1.7.0-cp313-cp313t-musllinux_1_2_aarch64.whl",hashes = {sha256 = "32dc2e08c67d86d0969714dd484fd60ff08ff81d1a1e40a77dd34a387e6ebc0c"}}, - {name = "frozenlist-1.7.0-cp313-cp313t-musllinux_1_2_armv7l.whl",url = "https://files.pythonhosted.org/packages/42/34/a3e2c00c00f9e2a9db5653bca3fec306349e71aff14ae45ecc6d0951dd24/frozenlist-1.7.0-cp313-cp313t-musllinux_1_2_armv7l.whl",hashes = {sha256 = "c0303e597eb5a5321b4de9c68e9845ac8f290d2ab3f3e2c864437d3c5a30cd65"}}, - {name = "frozenlist-1.7.0-cp313-cp313t-musllinux_1_2_i686.whl",url = "https://files.pythonhosted.org/packages/bb/73/f89b7fbce8b0b0c095d82b008afd0590f71ccb3dee6eee41791cf8cd25fd/frozenlist-1.7.0-cp313-cp313t-musllinux_1_2_i686.whl",hashes = {sha256 = "a47f2abb4e29b3a8d0b530f7c3598badc6b134562b1a5caee867f7c62fee51e3"}}, - {name = "frozenlist-1.7.0-cp313-cp313t-musllinux_1_2_ppc64le.whl",url = "https://files.pythonhosted.org/packages/cd/45/e365fdb554159462ca12df54bc59bfa7a9a273ecc21e99e72e597564d1ae/frozenlist-1.7.0-cp313-cp313t-musllinux_1_2_ppc64le.whl",hashes = {sha256 = "3d688126c242a6fabbd92e02633414d40f50bb6002fa4cf995a1d18051525657"}}, - {name = "frozenlist-1.7.0-cp313-cp313t-musllinux_1_2_s390x.whl",url = "https://files.pythonhosted.org/packages/00/11/47b6117002a0e904f004d70ec5194fe9144f117c33c851e3d51c765962d0/frozenlist-1.7.0-cp313-cp313t-musllinux_1_2_s390x.whl",hashes = {sha256 = "4e7e9652b3d367c7bd449a727dc79d5043f48b88d0cbfd4f9f1060cf2b414104"}}, - {name = "frozenlist-1.7.0-cp313-cp313t-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/40/37/5f9f3c3fd7f7746082ec67bcdc204db72dad081f4f83a503d33220a92973/frozenlist-1.7.0-cp313-cp313t-musllinux_1_2_x86_64.whl",hashes = {sha256 = "1a85e345b4c43db8b842cab1feb41be5cc0b10a1830e6295b69d7310f99becaf"}}, - {name = "frozenlist-1.7.0-cp313-cp313t-win32.whl",url = "https://files.pythonhosted.org/packages/0b/31/8fbc5af2d183bff20f21aa743b4088eac4445d2bb1cdece449ae80e4e2d1/frozenlist-1.7.0-cp313-cp313t-win32.whl",hashes = {sha256 = "3a14027124ddb70dfcee5148979998066897e79f89f64b13328595c4bdf77c81"}}, - {name = "frozenlist-1.7.0-cp313-cp313t-win_amd64.whl",url = "https://files.pythonhosted.org/packages/bb/ed/41956f52105b8dbc26e457c5705340c67c8cc2b79f394b79bffc09d0e938/frozenlist-1.7.0-cp313-cp313t-win_amd64.whl",hashes = {sha256 = "3bf8010d71d4507775f658e9823210b7427be36625b387221642725b515dcf3e"}}, - {name = "frozenlist-1.7.0-cp312-cp312-macosx_10_13_universal2.whl",url = "https://files.pythonhosted.org/packages/ef/a2/c8131383f1e66adad5f6ecfcce383d584ca94055a34d683bbb24ac5f2f1c/frozenlist-1.7.0-cp312-cp312-macosx_10_13_universal2.whl",hashes = {sha256 = "3dbf9952c4bb0e90e98aec1bd992b3318685005702656bc6f67c1a32b76787f2"}}, - {name = "frozenlist-1.7.0-cp312-cp312-macosx_10_13_x86_64.whl",url = "https://files.pythonhosted.org/packages/4c/9d/02754159955088cb52567337d1113f945b9e444c4960771ea90eb73de8db/frozenlist-1.7.0-cp312-cp312-macosx_10_13_x86_64.whl",hashes = {sha256 = "1f5906d3359300b8a9bb194239491122e6cf1444c2efb88865426f170c262cdb"}}, - {name = "frozenlist-1.7.0-cp312-cp312-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/01/7a/0046ef1bd6699b40acd2067ed6d6670b4db2f425c56980fa21c982c2a9db/frozenlist-1.7.0-cp312-cp312-macosx_11_0_arm64.whl",hashes = {sha256 = "3dabd5a8f84573c8d10d8859a50ea2dec01eea372031929871368c09fa103478"}}, - {name = "frozenlist-1.7.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",url = "https://files.pythonhosted.org/packages/d6/a2/a910bafe29c86997363fb4c02069df4ff0b5bc39d33c5198b4e9dd42d8f8/frozenlist-1.7.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",hashes = {sha256 = "aa57daa5917f1738064f302bf2626281a1cb01920c32f711fbc7bc36111058a8"}}, - {name = "frozenlist-1.7.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl",url = "https://files.pythonhosted.org/packages/64/3e/5036af9d5031374c64c387469bfcc3af537fc0f5b1187d83a1cf6fab1639/frozenlist-1.7.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl",hashes = {sha256 = "c193dda2b6d49f4c4398962810fa7d7c78f032bf45572b3e04dd5249dff27e08"}}, - {name = "frozenlist-1.7.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl",url = "https://files.pythonhosted.org/packages/06/39/6a17b7c107a2887e781a48ecf20ad20f1c39d94b2a548c83615b5b879f28/frozenlist-1.7.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl",hashes = {sha256 = "bfe2b675cf0aaa6d61bf8fbffd3c274b3c9b7b1623beb3809df8a81399a4a9c4"}}, - {name = "frozenlist-1.7.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl",url = "https://files.pythonhosted.org/packages/be/00/711d1337c7327d88c44d91dd0f556a1c47fb99afc060ae0ef66b4d24793d/frozenlist-1.7.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl",hashes = {sha256 = "8fc5d5cda37f62b262405cf9652cf0856839c4be8ee41be0afe8858f17f4c94b"}}, - {name = "frozenlist-1.7.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl",url = "https://files.pythonhosted.org/packages/24/fe/74e6ec0639c115df13d5850e75722750adabdc7de24e37e05a40527ca539/frozenlist-1.7.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl",hashes = {sha256 = "b0d5ce521d1dd7d620198829b87ea002956e4319002ef0bc8d3e6d045cb4646e"}}, - {name = "frozenlist-1.7.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl",url = "https://files.pythonhosted.org/packages/8d/db/48421f62a6f77c553575201e89048e97198046b793f4a089c79a6e3268bd/frozenlist-1.7.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl",hashes = {sha256 = "488d0a7d6a0008ca0db273c542098a0fa9e7dfaa7e57f70acef43f32b3f69dca"}}, - {name = "frozenlist-1.7.0-cp312-cp312-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/1d/fa/cb4a76bea23047c8462976ea7b7a2bf53997a0ca171302deae9d6dd12096/frozenlist-1.7.0-cp312-cp312-musllinux_1_2_aarch64.whl",hashes = {sha256 = "15a7eaba63983d22c54d255b854e8108e7e5f3e89f647fc854bd77a237e767df"}}, - {name = "frozenlist-1.7.0-cp312-cp312-musllinux_1_2_armv7l.whl",url = "https://files.pythonhosted.org/packages/5d/32/476a4b5cfaa0ec94d3f808f193301debff2ea42288a099afe60757ef6282/frozenlist-1.7.0-cp312-cp312-musllinux_1_2_armv7l.whl",hashes = {sha256 = "1eaa7e9c6d15df825bf255649e05bd8a74b04a4d2baa1ae46d9c2d00b2ca2cb5"}}, - {name = "frozenlist-1.7.0-cp312-cp312-musllinux_1_2_i686.whl",url = "https://files.pythonhosted.org/packages/8d/ba/9a28042f84a6bf8ea5dbc81cfff8eaef18d78b2a1ad9d51c7bc5b029ad16/frozenlist-1.7.0-cp312-cp312-musllinux_1_2_i686.whl",hashes = {sha256 = "e4389e06714cfa9d47ab87f784a7c5be91d3934cd6e9a7b85beef808297cc025"}}, - {name = "frozenlist-1.7.0-cp312-cp312-musllinux_1_2_ppc64le.whl",url = "https://files.pythonhosted.org/packages/bc/29/3a32959e68f9cf000b04e79ba574527c17e8842e38c91d68214a37455786/frozenlist-1.7.0-cp312-cp312-musllinux_1_2_ppc64le.whl",hashes = {sha256 = "73bd45e1488c40b63fe5a7df892baf9e2a4d4bb6409a2b3b78ac1c6236178e01"}}, - {name = "frozenlist-1.7.0-cp312-cp312-musllinux_1_2_s390x.whl",url = "https://files.pythonhosted.org/packages/80/e8/edf2f9e00da553f07f5fa165325cfc302dead715cab6ac8336a5f3d0adc2/frozenlist-1.7.0-cp312-cp312-musllinux_1_2_s390x.whl",hashes = {sha256 = "99886d98e1643269760e5fe0df31e5ae7050788dd288947f7f007209b8c33f08"}}, - {name = "frozenlist-1.7.0-cp312-cp312-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/1c/80/9a0eb48b944050f94cc51ee1c413eb14a39543cc4f760ed12657a5a3c45a/frozenlist-1.7.0-cp312-cp312-musllinux_1_2_x86_64.whl",hashes = {sha256 = "290a172aae5a4c278c6da8a96222e6337744cd9c77313efe33d5670b9f65fc43"}}, - {name = "frozenlist-1.7.0-cp312-cp312-win32.whl",url = "https://files.pythonhosted.org/packages/f3/74/87601e0fb0369b7a2baf404ea921769c53b7ae00dee7dcfe5162c8c6dbf0/frozenlist-1.7.0-cp312-cp312-win32.whl",hashes = {sha256 = "426c7bc70e07cfebc178bc4c2bf2d861d720c4fff172181eeb4a4c41d4ca2ad3"}}, - {name = "frozenlist-1.7.0-cp312-cp312-win_amd64.whl",url = "https://files.pythonhosted.org/packages/0b/15/c026e9a9fc17585a9d461f65d8593d281fedf55fbf7eb53f16c6df2392f9/frozenlist-1.7.0-cp312-cp312-win_amd64.whl",hashes = {sha256 = "563b72efe5da92e02eb68c59cb37205457c977aa7a449ed1b37e6939e5c47c6a"}}, - {name = "frozenlist-1.7.0-py3-none-any.whl",url = "https://files.pythonhosted.org/packages/ee/45/b82e3c16be2182bff01179db177fe144d58b5dc787a7d4492c6ed8b9317f/frozenlist-1.7.0-py3-none-any.whl",hashes = {sha256 = "9a5af342e34f7e97caf8c995864c7a396418ae2859cc6fdf1b1073020d516a7e"}}, +sdist = {name = "frozenlist-1.8.0.tar.gz", url = "https://files.pythonhosted.org/packages/2d/f5/c831fac6cc817d26fd54c7eaccd04ef7e0288806943f7cc5bbf69f3ac1f0/frozenlist-1.8.0.tar.gz", hashes = {sha256 = "3ede829ed8d842f6cd48fc7081d7a41001a56f1f38603f9d49bf3020d59a31ad"}} +wheels = [ + {name = "frozenlist-1.8.0-cp314-cp314-macosx_10_13_universal2.whl",url = "https://files.pythonhosted.org/packages/f1/c8/85da824b7e7b9b6e7f7705b2ecaf9591ba6f79c1177f324c2735e41d36a2/frozenlist-1.8.0-cp314-cp314-macosx_10_13_universal2.whl",hashes = {sha256 = "cee686f1f4cadeb2136007ddedd0aaf928ab95216e7691c63e50a8ec066336d0"}}, + {name = "frozenlist-1.8.0-cp314-cp314-macosx_10_13_x86_64.whl",url = "https://files.pythonhosted.org/packages/8e/e8/a1185e236ec66c20afd72399522f142c3724c785789255202d27ae992818/frozenlist-1.8.0-cp314-cp314-macosx_10_13_x86_64.whl",hashes = {sha256 = "119fb2a1bd47307e899c2fac7f28e85b9a543864df47aa7ec9d3c1b4545f096f"}}, + {name = "frozenlist-1.8.0-cp314-cp314-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/a1/93/72b1736d68f03fda5fdf0f2180fb6caaae3894f1b854d006ac61ecc727ee/frozenlist-1.8.0-cp314-cp314-macosx_11_0_arm64.whl",hashes = {sha256 = "4970ece02dbc8c3a92fcc5228e36a3e933a01a999f7094ff7c23fbd2beeaa67c"}}, + {name = "frozenlist-1.8.0-cp314-cp314-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl",url = "https://files.pythonhosted.org/packages/a7/b2/fabede9fafd976b991e9f1b9c8c873ed86f202889b864756f240ce6dd855/frozenlist-1.8.0-cp314-cp314-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl",hashes = {sha256 = "cba69cb73723c3f329622e34bdbf5ce1f80c21c290ff04256cff1cd3c2036ed2"}}, + {name = "frozenlist-1.8.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl",url = "https://files.pythonhosted.org/packages/3a/3b/d9b1e0b0eed36e70477ffb8360c49c85c8ca8ef9700a4e6711f39a6e8b45/frozenlist-1.8.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl",hashes = {sha256 = "778a11b15673f6f1df23d9586f83c4846c471a8af693a22e066508b77d201ec8"}}, + {name = "frozenlist-1.8.0-cp314-cp314-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl",url = "https://files.pythonhosted.org/packages/dc/94/be719d2766c1138148564a3960fc2c06eb688da592bdc25adcf856101be7/frozenlist-1.8.0-cp314-cp314-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl",hashes = {sha256 = "0325024fe97f94c41c08872db482cf8ac4800d80e79222c6b0b7b162d5b13686"}}, + {name = "frozenlist-1.8.0-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl",url = "https://files.pythonhosted.org/packages/e4/09/6712b6c5465f083f52f50cf74167b92d4ea2f50e46a9eea0523d658454ae/frozenlist-1.8.0-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl",hashes = {sha256 = "97260ff46b207a82a7567b581ab4190bd4dfa09f4db8a8b49d1a958f6aa4940e"}}, + {name = "frozenlist-1.8.0-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl",url = "https://files.pythonhosted.org/packages/f8/d4/cd065cdcf21550b54f3ce6a22e143ac9e4836ca42a0de1022da8498eac89/frozenlist-1.8.0-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl",hashes = {sha256 = "54b2077180eb7f83dd52c40b2750d0a9f175e06a42e3213ce047219de902717a"}}, + {name = "frozenlist-1.8.0-cp314-cp314-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/62/c3/f57a5c8c70cd1ead3d5d5f776f89d33110b1addae0ab010ad774d9a44fb9/frozenlist-1.8.0-cp314-cp314-musllinux_1_2_aarch64.whl",hashes = {sha256 = "2f05983daecab868a31e1da44462873306d3cbfd76d1f0b5b69c473d21dbb128"}}, + {name = "frozenlist-1.8.0-cp314-cp314-musllinux_1_2_armv7l.whl",url = "https://files.pythonhosted.org/packages/6c/52/232476fe9cb64f0742f3fde2b7d26c1dac18b6d62071c74d4ded55e0ef94/frozenlist-1.8.0-cp314-cp314-musllinux_1_2_armv7l.whl",hashes = {sha256 = "33f48f51a446114bc5d251fb2954ab0164d5be02ad3382abcbfe07e2531d650f"}}, + {name = "frozenlist-1.8.0-cp314-cp314-musllinux_1_2_ppc64le.whl",url = "https://files.pythonhosted.org/packages/5f/85/07bf3f5d0fb5414aee5f47d33c6f5c77bfe49aac680bfece33d4fdf6a246/frozenlist-1.8.0-cp314-cp314-musllinux_1_2_ppc64le.whl",hashes = {sha256 = "154e55ec0655291b5dd1b8731c637ecdb50975a2ae70c606d100750a540082f7"}}, + {name = "frozenlist-1.8.0-cp314-cp314-musllinux_1_2_s390x.whl",url = "https://files.pythonhosted.org/packages/11/99/ae3a33d5befd41ac0ca2cc7fd3aa707c9c324de2e89db0e0f45db9a64c26/frozenlist-1.8.0-cp314-cp314-musllinux_1_2_s390x.whl",hashes = {sha256 = "4314debad13beb564b708b4a496020e5306c7333fa9a3ab90374169a20ffab30"}}, + {name = "frozenlist-1.8.0-cp314-cp314-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/b2/60/b1d2da22f4970e7a155f0adde9b1435712ece01b3cd45ba63702aea33938/frozenlist-1.8.0-cp314-cp314-musllinux_1_2_x86_64.whl",hashes = {sha256 = "073f8bf8becba60aa931eb3bc420b217bb7d5b8f4750e6f8b3be7f3da85d38b7"}}, + {name = "frozenlist-1.8.0-cp314-cp314-win32.whl",url = "https://files.pythonhosted.org/packages/3f/ab/945b2f32de889993b9c9133216c068b7fcf257d8595a0ac420ac8677cab0/frozenlist-1.8.0-cp314-cp314-win32.whl",hashes = {sha256 = "bac9c42ba2ac65ddc115d930c78d24ab8d4f465fd3fc473cdedfccadb9429806"}}, + {name = "frozenlist-1.8.0-cp314-cp314-win_amd64.whl",url = "https://files.pythonhosted.org/packages/59/ad/9caa9b9c836d9ad6f067157a531ac48b7d36499f5036d4141ce78c230b1b/frozenlist-1.8.0-cp314-cp314-win_amd64.whl",hashes = {sha256 = "3e0761f4d1a44f1d1a47996511752cf3dcec5bbdd9cc2b4fe595caf97754b7a0"}}, + {name = "frozenlist-1.8.0-cp314-cp314-win_arm64.whl",url = "https://files.pythonhosted.org/packages/82/13/e6950121764f2676f43534c555249f57030150260aee9dcf7d64efda11dd/frozenlist-1.8.0-cp314-cp314-win_arm64.whl",hashes = {sha256 = "d1eaff1d00c7751b7c6662e9c5ba6eb2c17a2306ba5e2a37f24ddf3cc953402b"}}, + {name = "frozenlist-1.8.0-cp314-cp314t-macosx_10_13_universal2.whl",url = "https://files.pythonhosted.org/packages/c0/c7/43200656ecc4e02d3f8bc248df68256cd9572b3f0017f0a0c4e93440ae23/frozenlist-1.8.0-cp314-cp314t-macosx_10_13_universal2.whl",hashes = {sha256 = "d3bb933317c52d7ea5004a1c442eef86f426886fba134ef8cf4226ea6ee1821d"}}, + {name = "frozenlist-1.8.0-cp314-cp314t-macosx_10_13_x86_64.whl",url = "https://files.pythonhosted.org/packages/d1/29/55c5f0689b9c0fb765055629f472c0de484dcaf0acee2f7707266ae3583c/frozenlist-1.8.0-cp314-cp314t-macosx_10_13_x86_64.whl",hashes = {sha256 = "8009897cdef112072f93a0efdce29cd819e717fd2f649ee3016efd3cd885a7ed"}}, + {name = "frozenlist-1.8.0-cp314-cp314t-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/ba/7d/b7282a445956506fa11da8c2db7d276adcbf2b17d8bb8407a47685263f90/frozenlist-1.8.0-cp314-cp314t-macosx_11_0_arm64.whl",hashes = {sha256 = "2c5dcbbc55383e5883246d11fd179782a9d07a986c40f49abe89ddf865913930"}}, + {name = "frozenlist-1.8.0-cp314-cp314t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl",url = "https://files.pythonhosted.org/packages/62/1c/3d8622e60d0b767a5510d1d3cf21065b9db874696a51ea6d7a43180a259c/frozenlist-1.8.0-cp314-cp314t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl",hashes = {sha256 = "39ecbc32f1390387d2aa4f5a995e465e9e2f79ba3adcac92d68e3e0afae6657c"}}, + {name = "frozenlist-1.8.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl",url = "https://files.pythonhosted.org/packages/2d/14/aa36d5f85a89679a85a1d44cd7a6657e0b1c75f61e7cad987b203d2daca8/frozenlist-1.8.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl",hashes = {sha256 = "92db2bf818d5cc8d9c1f1fc56b897662e24ea5adb36ad1f1d82875bd64e03c24"}}, + {name = "frozenlist-1.8.0-cp314-cp314t-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl",url = "https://files.pythonhosted.org/packages/05/23/6bde59eb55abd407d34f77d39a5126fb7b4f109a3f611d3929f14b700c66/frozenlist-1.8.0-cp314-cp314t-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl",hashes = {sha256 = "2dc43a022e555de94c3b68a4ef0b11c4f747d12c024a520c7101709a2144fb37"}}, + {name = "frozenlist-1.8.0-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl",url = "https://files.pythonhosted.org/packages/d2/3f/22cff331bfad7a8afa616289000ba793347fcd7bc275f3b28ecea2a27909/frozenlist-1.8.0-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl",hashes = {sha256 = "cb89a7f2de3602cfed448095bab3f178399646ab7c61454315089787df07733a"}}, + {name = "frozenlist-1.8.0-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl",url = "https://files.pythonhosted.org/packages/a4/89/5b057c799de4838b6c69aa82b79705f2027615e01be996d2486a69ca99c4/frozenlist-1.8.0-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl",hashes = {sha256 = "33139dc858c580ea50e7e60a1b0ea003efa1fd42e6ec7fdbad78fff65fad2fd2"}}, + {name = "frozenlist-1.8.0-cp314-cp314t-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/30/de/2c22ab3eb2a8af6d69dc799e48455813bab3690c760de58e1bf43b36da3e/frozenlist-1.8.0-cp314-cp314t-musllinux_1_2_aarch64.whl",hashes = {sha256 = "168c0969a329b416119507ba30b9ea13688fafffac1b7822802537569a1cb0ef"}}, + {name = "frozenlist-1.8.0-cp314-cp314t-musllinux_1_2_armv7l.whl",url = "https://files.pythonhosted.org/packages/59/f7/970141a6a8dbd7f556d94977858cfb36fa9b66e0892c6dd780d2219d8cd8/frozenlist-1.8.0-cp314-cp314t-musllinux_1_2_armv7l.whl",hashes = {sha256 = "28bd570e8e189d7f7b001966435f9dac6718324b5be2990ac496cf1ea9ddb7fe"}}, + {name = "frozenlist-1.8.0-cp314-cp314t-musllinux_1_2_ppc64le.whl",url = "https://files.pythonhosted.org/packages/c1/15/ca1adae83a719f82df9116d66f5bb28bb95557b3951903d39135620ef157/frozenlist-1.8.0-cp314-cp314t-musllinux_1_2_ppc64le.whl",hashes = {sha256 = "b2a095d45c5d46e5e79ba1e5b9cb787f541a8dee0433836cea4b96a2c439dcd8"}}, + {name = "frozenlist-1.8.0-cp314-cp314t-musllinux_1_2_s390x.whl",url = "https://files.pythonhosted.org/packages/ac/83/dca6dc53bf657d371fbc88ddeb21b79891e747189c5de990b9dfff2ccba1/frozenlist-1.8.0-cp314-cp314t-musllinux_1_2_s390x.whl",hashes = {sha256 = "eab8145831a0d56ec9c4139b6c3e594c7a83c2c8be25d5bcf2d86136a532287a"}}, + {name = "frozenlist-1.8.0-cp314-cp314t-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/96/52/abddd34ca99be142f354398700536c5bd315880ed0a213812bc491cff5e4/frozenlist-1.8.0-cp314-cp314t-musllinux_1_2_x86_64.whl",hashes = {sha256 = "974b28cf63cc99dfb2188d8d222bc6843656188164848c4f679e63dae4b0708e"}}, + {name = "frozenlist-1.8.0-cp314-cp314t-win32.whl",url = "https://files.pythonhosted.org/packages/af/d3/76bd4ed4317e7119c2b7f57c3f6934aba26d277acc6309f873341640e21f/frozenlist-1.8.0-cp314-cp314t-win32.whl",hashes = {sha256 = "342c97bf697ac5480c0a7ec73cd700ecfa5a8a40ac923bd035484616efecc2df"}}, + {name = "frozenlist-1.8.0-cp314-cp314t-win_amd64.whl",url = "https://files.pythonhosted.org/packages/89/76/c615883b7b521ead2944bb3480398cbb07e12b7b4e4d073d3752eb721558/frozenlist-1.8.0-cp314-cp314t-win_amd64.whl",hashes = {sha256 = "06be8f67f39c8b1dc671f5d83aaefd3358ae5cdcf8314552c57e7ed3e6475bdd"}}, + {name = "frozenlist-1.8.0-cp314-cp314t-win_arm64.whl",url = "https://files.pythonhosted.org/packages/e0/a3/5982da14e113d07b325230f95060e2169f5311b1017ea8af2a29b374c289/frozenlist-1.8.0-cp314-cp314t-win_arm64.whl",hashes = {sha256 = "102e6314ca4da683dca92e3b1355490fed5f313b768500084fbe6371fddfdb79"}}, + {name = "frozenlist-1.8.0-cp313-cp313-macosx_10_13_universal2.whl",url = "https://files.pythonhosted.org/packages/2d/40/0832c31a37d60f60ed79e9dfb5a92e1e2af4f40a16a29abcc7992af9edff/frozenlist-1.8.0-cp313-cp313-macosx_10_13_universal2.whl",hashes = {sha256 = "8d92f1a84bb12d9e56f818b3a746f3efba93c1b63c8387a73dde655e1e42282a"}}, + {name = "frozenlist-1.8.0-cp313-cp313-macosx_10_13_x86_64.whl",url = "https://files.pythonhosted.org/packages/30/ba/b0b3de23f40bc55a7057bd38434e25c34fa48e17f20ee273bbde5e0650f3/frozenlist-1.8.0-cp313-cp313-macosx_10_13_x86_64.whl",hashes = {sha256 = "96153e77a591c8adc2ee805756c61f59fef4cf4073a9275ee86fe8cba41241f7"}}, + {name = "frozenlist-1.8.0-cp313-cp313-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/0c/ab/6e5080ee374f875296c4243c381bbdef97a9ac39c6e3ce1d5f7d42cb78d6/frozenlist-1.8.0-cp313-cp313-macosx_11_0_arm64.whl",hashes = {sha256 = "f21f00a91358803399890ab167098c131ec2ddd5f8f5fd5fe9c9f2c6fcd91e40"}}, + {name = "frozenlist-1.8.0-cp313-cp313-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl",url = "https://files.pythonhosted.org/packages/d5/4e/e4691508f9477ce67da2015d8c00acd751e6287739123113a9fca6f1604e/frozenlist-1.8.0-cp313-cp313-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl",hashes = {sha256 = "fb30f9626572a76dfe4293c7194a09fb1fe93ba94c7d4f720dfae3b646b45027"}}, + {name = "frozenlist-1.8.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl",url = "https://files.pythonhosted.org/packages/40/76/c202df58e3acdf12969a7895fd6f3bc016c642e6726aa63bd3025e0fc71c/frozenlist-1.8.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl",hashes = {sha256 = "eaa352d7047a31d87dafcacbabe89df0aa506abb5b1b85a2fb91bc3faa02d822"}}, + {name = "frozenlist-1.8.0-cp313-cp313-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl",url = "https://files.pythonhosted.org/packages/f9/c0/8746afb90f17b73ca5979c7a3958116e105ff796e718575175319b5bb4ce/frozenlist-1.8.0-cp313-cp313-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl",hashes = {sha256 = "03ae967b4e297f58f8c774c7eabcce57fe3c2434817d4385c50661845a058121"}}, + {name = "frozenlist-1.8.0-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl",url = "https://files.pythonhosted.org/packages/7e/eb/4c7eefc718ff72f9b6c4893291abaae5fbc0c82226a32dcd8ef4f7a5dbef/frozenlist-1.8.0-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl",hashes = {sha256 = "f6292f1de555ffcc675941d65fffffb0a5bcd992905015f85d0592201793e0e5"}}, + {name = "frozenlist-1.8.0-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl",url = "https://files.pythonhosted.org/packages/c2/4e/e5c02187cf704224f8b21bee886f3d713ca379535f16893233b9d672ea71/frozenlist-1.8.0-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl",hashes = {sha256 = "29548f9b5b5e3460ce7378144c3010363d8035cea44bc0bf02d57f5a685e084e"}}, + {name = "frozenlist-1.8.0-cp313-cp313-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/1f/96/cb85ec608464472e82ad37a17f844889c36100eed57bea094518bf270692/frozenlist-1.8.0-cp313-cp313-musllinux_1_2_aarch64.whl",hashes = {sha256 = "ec3cc8c5d4084591b4237c0a272cc4f50a5b03396a47d9caaf76f5d7b38a4f11"}}, + {name = "frozenlist-1.8.0-cp313-cp313-musllinux_1_2_armv7l.whl",url = "https://files.pythonhosted.org/packages/5d/6f/4ae69c550e4cee66b57887daeebe006fe985917c01d0fff9caab9883f6d0/frozenlist-1.8.0-cp313-cp313-musllinux_1_2_armv7l.whl",hashes = {sha256 = "517279f58009d0b1f2e7c1b130b377a349405da3f7621ed6bfae50b10adf20c1"}}, + {name = "frozenlist-1.8.0-cp313-cp313-musllinux_1_2_ppc64le.whl",url = "https://files.pythonhosted.org/packages/7a/58/afd56de246cf11780a40a2c28dc7cbabbf06337cc8ddb1c780a2d97e88d8/frozenlist-1.8.0-cp313-cp313-musllinux_1_2_ppc64le.whl",hashes = {sha256 = "db1e72ede2d0d7ccb213f218df6a078a9c09a7de257c2fe8fcef16d5925230b1"}}, + {name = "frozenlist-1.8.0-cp313-cp313-musllinux_1_2_s390x.whl",url = "https://files.pythonhosted.org/packages/cb/36/cdfaf6ed42e2644740d4a10452d8e97fa1c062e2a8006e4b09f1b5fd7d63/frozenlist-1.8.0-cp313-cp313-musllinux_1_2_s390x.whl",hashes = {sha256 = "b4dec9482a65c54a5044486847b8a66bf10c9cb4926d42927ec4e8fd5db7fed8"}}, + {name = "frozenlist-1.8.0-cp313-cp313-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/03/a8/9ea226fbefad669f11b52e864c55f0bd57d3c8d7eb07e9f2e9a0b39502e1/frozenlist-1.8.0-cp313-cp313-musllinux_1_2_x86_64.whl",hashes = {sha256 = "21900c48ae04d13d416f0e1e0c4d81f7931f73a9dfa0b7a8746fb2fe7dd970ed"}}, + {name = "frozenlist-1.8.0-cp313-cp313-win32.whl",url = "https://files.pythonhosted.org/packages/1e/0b/1b5531611e83ba7d13ccc9988967ea1b51186af64c42b7a7af465dcc9568/frozenlist-1.8.0-cp313-cp313-win32.whl",hashes = {sha256 = "8b7b94a067d1c504ee0b16def57ad5738701e4ba10cec90529f13fa03c833496"}}, + {name = "frozenlist-1.8.0-cp313-cp313-win_amd64.whl",url = "https://files.pythonhosted.org/packages/d8/cf/174c91dbc9cc49bc7b7aab74d8b734e974d1faa8f191c74af9b7e80848e6/frozenlist-1.8.0-cp313-cp313-win_amd64.whl",hashes = {sha256 = "878be833caa6a3821caf85eb39c5ba92d28e85df26d57afb06b35b2efd937231"}}, + {name = "frozenlist-1.8.0-cp313-cp313-win_arm64.whl",url = "https://files.pythonhosted.org/packages/c1/17/502cd212cbfa96eb1388614fe39a3fc9ab87dbbe042b66f97acb57474834/frozenlist-1.8.0-cp313-cp313-win_arm64.whl",hashes = {sha256 = "44389d135b3ff43ba8cc89ff7f51f5a0bb6b63d829c8300f79a2fe4fe61bcc62"}}, + {name = "frozenlist-1.8.0-cp313-cp313t-macosx_10_13_universal2.whl",url = "https://files.pythonhosted.org/packages/d2/5c/3bbfaa920dfab09e76946a5d2833a7cbdf7b9b4a91c714666ac4855b88b4/frozenlist-1.8.0-cp313-cp313t-macosx_10_13_universal2.whl",hashes = {sha256 = "e25ac20a2ef37e91c1b39938b591457666a0fa835c7783c3a8f33ea42870db94"}}, + {name = "frozenlist-1.8.0-cp313-cp313t-macosx_10_13_x86_64.whl",url = "https://files.pythonhosted.org/packages/d2/d6/f03961ef72166cec1687e84e8925838442b615bd0b8854b54923ce5b7b8a/frozenlist-1.8.0-cp313-cp313t-macosx_10_13_x86_64.whl",hashes = {sha256 = "07cdca25a91a4386d2e76ad992916a85038a9b97561bf7a3fd12d5d9ce31870c"}}, + {name = "frozenlist-1.8.0-cp313-cp313t-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/1e/bb/a6d12b7ba4c3337667d0e421f7181c82dda448ce4e7ad7ecd249a16fa806/frozenlist-1.8.0-cp313-cp313t-macosx_11_0_arm64.whl",hashes = {sha256 = "4e0c11f2cc6717e0a741f84a527c52616140741cd812a50422f83dc31749fb52"}}, + {name = "frozenlist-1.8.0-cp313-cp313t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl",url = "https://files.pythonhosted.org/packages/bc/71/d1fed0ffe2c2ccd70b43714c6cab0f4188f09f8a67a7914a6b46ee30f274/frozenlist-1.8.0-cp313-cp313t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl",hashes = {sha256 = "b3210649ee28062ea6099cfda39e147fa1bc039583c8ee4481cb7811e2448c51"}}, + {name = "frozenlist-1.8.0-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl",url = "https://files.pythonhosted.org/packages/c9/1f/fb1685a7b009d89f9bf78a42d94461bc06581f6e718c39344754a5d9bada/frozenlist-1.8.0-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl",hashes = {sha256 = "581ef5194c48035a7de2aefc72ac6539823bb71508189e5de01d60c9dcd5fa65"}}, + {name = "frozenlist-1.8.0-cp313-cp313t-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl",url = "https://files.pythonhosted.org/packages/e6/3b/b991fe1612703f7e0d05c0cf734c1b77aaf7c7d321df4572e8d36e7048c8/frozenlist-1.8.0-cp313-cp313t-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl",hashes = {sha256 = "3ef2d026f16a2b1866e1d86fc4e1291e1ed8a387b2c333809419a2f8b3a77b82"}}, + {name = "frozenlist-1.8.0-cp313-cp313t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl",url = "https://files.pythonhosted.org/packages/ca/ec/c5c618767bcdf66e88945ec0157d7f6c4a1322f1473392319b7a2501ded7/frozenlist-1.8.0-cp313-cp313t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl",hashes = {sha256 = "5500ef82073f599ac84d888e3a8c1f77ac831183244bfd7f11eaa0289fb30714"}}, + {name = "frozenlist-1.8.0-cp313-cp313t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl",url = "https://files.pythonhosted.org/packages/7c/ce/3934758637d8f8a88d11f0585d6495ef54b2044ed6ec84492a91fa3b27aa/frozenlist-1.8.0-cp313-cp313t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl",hashes = {sha256 = "50066c3997d0091c411a66e710f4e11752251e6d2d73d70d8d5d4c76442a199d"}}, + {name = "frozenlist-1.8.0-cp313-cp313t-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/fc/4f/a7e4d0d467298f42de4b41cbc7ddaf19d3cfeabaf9ff97c20c6c7ee409f9/frozenlist-1.8.0-cp313-cp313t-musllinux_1_2_aarch64.whl",hashes = {sha256 = "5c1c8e78426e59b3f8005e9b19f6ff46e5845895adbde20ece9218319eca6506"}}, + {name = "frozenlist-1.8.0-cp313-cp313t-musllinux_1_2_armv7l.whl",url = "https://files.pythonhosted.org/packages/dc/48/c7b163063d55a83772b268e6d1affb960771b0e203b632cfe09522d67ea5/frozenlist-1.8.0-cp313-cp313t-musllinux_1_2_armv7l.whl",hashes = {sha256 = "eefdba20de0d938cec6a89bd4d70f346a03108a19b9df4248d3cf0d88f1b0f51"}}, + {name = "frozenlist-1.8.0-cp313-cp313t-musllinux_1_2_ppc64le.whl",url = "https://files.pythonhosted.org/packages/9f/d0/2366d3c4ecdc2fd391e0afa6e11500bfba0ea772764d631bbf82f0136c9d/frozenlist-1.8.0-cp313-cp313t-musllinux_1_2_ppc64le.whl",hashes = {sha256 = "cf253e0e1c3ceb4aaff6df637ce033ff6535fb8c70a764a8f46aafd3d6ab798e"}}, + {name = "frozenlist-1.8.0-cp313-cp313t-musllinux_1_2_s390x.whl",url = "https://files.pythonhosted.org/packages/b8/94/daff920e82c1b70e3618a2ac39fbc01ae3e2ff6124e80739ce5d71c9b920/frozenlist-1.8.0-cp313-cp313t-musllinux_1_2_s390x.whl",hashes = {sha256 = "032efa2674356903cd0261c4317a561a6850f3ac864a63fc1583147fb05a79b0"}}, + {name = "frozenlist-1.8.0-cp313-cp313t-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/e3/20/bba307ab4235a09fdcd3cc5508dbabd17c4634a1af4b96e0f69bfe551ebd/frozenlist-1.8.0-cp313-cp313t-musllinux_1_2_x86_64.whl",hashes = {sha256 = "6da155091429aeba16851ecb10a9104a108bcd32f6c1642867eadaee401c1c41"}}, + {name = "frozenlist-1.8.0-cp313-cp313t-win32.whl",url = "https://files.pythonhosted.org/packages/fd/00/04ca1c3a7a124b6de4f8a9a17cc2fcad138b4608e7a3fc5877804b8715d7/frozenlist-1.8.0-cp313-cp313t-win32.whl",hashes = {sha256 = "0f96534f8bfebc1a394209427d0f8a63d343c9779cda6fc25e8e121b5fd8555b"}}, + {name = "frozenlist-1.8.0-cp313-cp313t-win_amd64.whl",url = "https://files.pythonhosted.org/packages/59/5e/c69f733a86a94ab10f68e496dc6b7e8bc078ebb415281d5698313e3af3a1/frozenlist-1.8.0-cp313-cp313t-win_amd64.whl",hashes = {sha256 = "5d63a068f978fc69421fb0e6eb91a9603187527c86b7cd3f534a5b77a592b888"}}, + {name = "frozenlist-1.8.0-cp313-cp313t-win_arm64.whl",url = "https://files.pythonhosted.org/packages/16/6c/be9d79775d8abe79b05fa6d23da99ad6e7763a1d080fbae7290b286093fd/frozenlist-1.8.0-cp313-cp313t-win_arm64.whl",hashes = {sha256 = "bf0a7e10b077bf5fb9380ad3ae8ce20ef919a6ad93b4552896419ac7e1d8e042"}}, + {name = "frozenlist-1.8.0-cp312-cp312-macosx_10_13_universal2.whl",url = "https://files.pythonhosted.org/packages/69/29/948b9aa87e75820a38650af445d2ef2b6b8a6fab1a23b6bb9e4ef0be2d59/frozenlist-1.8.0-cp312-cp312-macosx_10_13_universal2.whl",hashes = {sha256 = "78f7b9e5d6f2fdb88cdde9440dc147259b62b9d3b019924def9f6478be254ac1"}}, + {name = "frozenlist-1.8.0-cp312-cp312-macosx_10_13_x86_64.whl",url = "https://files.pythonhosted.org/packages/64/80/4f6e318ee2a7c0750ed724fa33a4bdf1eacdc5a39a7a24e818a773cd91af/frozenlist-1.8.0-cp312-cp312-macosx_10_13_x86_64.whl",hashes = {sha256 = "229bf37d2e4acdaf808fd3f06e854a4a7a3661e871b10dc1f8f1896a3b05f18b"}}, + {name = "frozenlist-1.8.0-cp312-cp312-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/2b/94/5c8a2b50a496b11dd519f4a24cb5496cf125681dd99e94c604ccdea9419a/frozenlist-1.8.0-cp312-cp312-macosx_11_0_arm64.whl",hashes = {sha256 = "f833670942247a14eafbb675458b4e61c82e002a148f49e68257b79296e865c4"}}, + {name = "frozenlist-1.8.0-cp312-cp312-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl",url = "https://files.pythonhosted.org/packages/6a/bd/d91c5e39f490a49df14320f4e8c80161cfcce09f1e2cde1edd16a551abb3/frozenlist-1.8.0-cp312-cp312-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl",hashes = {sha256 = "494a5952b1c597ba44e0e78113a7266e656b9794eec897b19ead706bd7074383"}}, + {name = "frozenlist-1.8.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl",url = "https://files.pythonhosted.org/packages/8f/83/f61505a05109ef3293dfb1ff594d13d64a2324ac3482be2cedc2be818256/frozenlist-1.8.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl",hashes = {sha256 = "96f423a119f4777a4a056b66ce11527366a8bb92f54e541ade21f2374433f6d4"}}, + {name = "frozenlist-1.8.0-cp312-cp312-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl",url = "https://files.pythonhosted.org/packages/d8/cb/cb6c7b0f7d4023ddda30cf56b8b17494eb3a79e3fda666bf735f63118b35/frozenlist-1.8.0-cp312-cp312-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl",hashes = {sha256 = "3462dd9475af2025c31cc61be6652dfa25cbfb56cbbf52f4ccfe029f38decaf8"}}, + {name = "frozenlist-1.8.0-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl",url = "https://files.pythonhosted.org/packages/31/c5/cd7a1f3b8b34af009fb17d4123c5a778b44ae2804e3ad6b86204255f9ec5/frozenlist-1.8.0-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl",hashes = {sha256 = "c4c800524c9cd9bac5166cd6f55285957fcfc907db323e193f2afcd4d9abd69b"}}, + {name = "frozenlist-1.8.0-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl",url = "https://files.pythonhosted.org/packages/c0/01/2f95d3b416c584a1e7f0e1d6d31998c4a795f7544069ee2e0962a4b60740/frozenlist-1.8.0-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl",hashes = {sha256 = "d6a5df73acd3399d893dafc71663ad22534b5aa4f94e8a2fabfe856c3c1b6a52"}}, + {name = "frozenlist-1.8.0-cp312-cp312-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/ce/03/024bf7720b3abaebcff6d0793d73c154237b85bdf67b7ed55e5e9596dc9a/frozenlist-1.8.0-cp312-cp312-musllinux_1_2_aarch64.whl",hashes = {sha256 = "405e8fe955c2280ce66428b3ca55e12b3c4e9c336fb2103a4937e891c69a4a29"}}, + {name = "frozenlist-1.8.0-cp312-cp312-musllinux_1_2_armv7l.whl",url = "https://files.pythonhosted.org/packages/69/fa/f8abdfe7d76b731f5d8bd217827cf6764d4f1d9763407e42717b4bed50a0/frozenlist-1.8.0-cp312-cp312-musllinux_1_2_armv7l.whl",hashes = {sha256 = "908bd3f6439f2fef9e85031b59fd4f1297af54415fb60e4254a95f75b3cab3f3"}}, + {name = "frozenlist-1.8.0-cp312-cp312-musllinux_1_2_ppc64le.whl",url = "https://files.pythonhosted.org/packages/f5/3c/b051329f718b463b22613e269ad72138cc256c540f78a6de89452803a47d/frozenlist-1.8.0-cp312-cp312-musllinux_1_2_ppc64le.whl",hashes = {sha256 = "294e487f9ec720bd8ffcebc99d575f7eff3568a08a253d1ee1a0378754b74143"}}, + {name = "frozenlist-1.8.0-cp312-cp312-musllinux_1_2_s390x.whl",url = "https://files.pythonhosted.org/packages/0f/ae/58282e8f98e444b3f4dd42448ff36fa38bef29e40d40f330b22e7108f565/frozenlist-1.8.0-cp312-cp312-musllinux_1_2_s390x.whl",hashes = {sha256 = "74c51543498289c0c43656701be6b077f4b265868fa7f8a8859c197006efb608"}}, + {name = "frozenlist-1.8.0-cp312-cp312-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/8f/96/007e5944694d66123183845a106547a15944fbbb7154788cbf7272789536/frozenlist-1.8.0-cp312-cp312-musllinux_1_2_x86_64.whl",hashes = {sha256 = "776f352e8329135506a1d6bf16ac3f87bc25b28e765949282dcc627af36123aa"}}, + {name = "frozenlist-1.8.0-cp312-cp312-win32.whl",url = "https://files.pythonhosted.org/packages/66/bb/852b9d6db2fa40be96f29c0d1205c306288f0684df8fd26ca1951d461a56/frozenlist-1.8.0-cp312-cp312-win32.whl",hashes = {sha256 = "433403ae80709741ce34038da08511d4a77062aa924baf411ef73d1146e74faf"}}, + {name = "frozenlist-1.8.0-cp312-cp312-win_amd64.whl",url = "https://files.pythonhosted.org/packages/b8/af/38e51a553dd66eb064cdf193841f16f077585d4d28394c2fa6235cb41765/frozenlist-1.8.0-cp312-cp312-win_amd64.whl",hashes = {sha256 = "34187385b08f866104f0c0617404c8eb08165ab1272e884abc89c112e9c00746"}}, + {name = "frozenlist-1.8.0-cp312-cp312-win_arm64.whl",url = "https://files.pythonhosted.org/packages/a7/06/1dc65480ab147339fecc70797e9c2f69d9cea9cf38934ce08df070fdb9cb/frozenlist-1.8.0-cp312-cp312-win_arm64.whl",hashes = {sha256 = "fe3c58d2f5db5fbd18c2987cba06d51b0529f52bc3a6cdc33d3f4eab725104bd"}}, + {name = "frozenlist-1.8.0-py3-none-any.whl",url = "https://files.pythonhosted.org/packages/9a/9a/e35b4a917281c0b8419d4207f4334c8e8c5dbf4f3f5f9ada73958d937dcc/frozenlist-1.8.0-py3-none-any.whl",hashes = {sha256 = "0c18a16eab41e82c295618a77502e17b195883241c563b00f0aa5106fc4eaa0d"}}, + {name = "frozenlist-1.8.0-cp311-cp311-macosx_10_9_universal2.whl",url = "https://files.pythonhosted.org/packages/bc/03/077f869d540370db12165c0aa51640a873fb661d8b315d1d4d67b284d7ac/frozenlist-1.8.0-cp311-cp311-macosx_10_9_universal2.whl",hashes = {sha256 = "09474e9831bc2b2199fad6da3c14c7b0fbdd377cce9d3d77131be28906cb7d84"}}, + {name = "frozenlist-1.8.0-cp311-cp311-macosx_10_9_x86_64.whl",url = "https://files.pythonhosted.org/packages/df/b5/7610b6bd13e4ae77b96ba85abea1c8cb249683217ef09ac9e0ae93f25a91/frozenlist-1.8.0-cp311-cp311-macosx_10_9_x86_64.whl",hashes = {sha256 = "17c883ab0ab67200b5f964d2b9ed6b00971917d5d8a92df149dc2c9779208ee9"}}, + {name = "frozenlist-1.8.0-cp311-cp311-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/6e/ef/0e8f1fe32f8a53dd26bdd1f9347efe0778b0fddf62789ea683f4cc7d787d/frozenlist-1.8.0-cp311-cp311-macosx_11_0_arm64.whl",hashes = {sha256 = "fa47e444b8ba08fffd1c18e8cdb9a75db1b6a27f17507522834ad13ed5922b93"}}, + {name = "frozenlist-1.8.0-cp311-cp311-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl",url = "https://files.pythonhosted.org/packages/11/b1/71a477adc7c36e5fb628245dfbdea2166feae310757dea848d02bd0689fd/frozenlist-1.8.0-cp311-cp311-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl",hashes = {sha256 = "2552f44204b744fba866e573be4c1f9048d6a324dfe14475103fd51613eb1d1f"}}, + {name = "frozenlist-1.8.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl",url = "https://files.pythonhosted.org/packages/45/7e/afe40eca3a2dc19b9904c0f5d7edfe82b5304cb831391edec0ac04af94c2/frozenlist-1.8.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl",hashes = {sha256 = "957e7c38f250991e48a9a73e6423db1bb9dd14e722a10f6b8bb8e16a0f55f695"}}, + {name = "frozenlist-1.8.0-cp311-cp311-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl",url = "https://files.pythonhosted.org/packages/a6/aa/7416eac95603ce428679d273255ffc7c998d4132cfae200103f164b108aa/frozenlist-1.8.0-cp311-cp311-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl",hashes = {sha256 = "8585e3bb2cdea02fc88ffa245069c36555557ad3609e83be0ec71f54fd4abb52"}}, + {name = "frozenlist-1.8.0-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl",url = "https://files.pythonhosted.org/packages/8b/3d/2a2d1f683d55ac7e3875e4263d28410063e738384d3adc294f5ff3d7105e/frozenlist-1.8.0-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl",hashes = {sha256 = "edee74874ce20a373d62dc28b0b18b93f645633c2943fd90ee9d898550770581"}}, + {name = "frozenlist-1.8.0-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl",url = "https://files.pythonhosted.org/packages/78/1e/2d5565b589e580c296d3bb54da08d206e797d941a83a6fdea42af23be79c/frozenlist-1.8.0-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl",hashes = {sha256 = "c9a63152fe95756b85f31186bddf42e4c02c6321207fd6601a1c89ebac4fe567"}}, + {name = "frozenlist-1.8.0-cp311-cp311-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/aa/c3/65872fcf1d326a7f101ad4d86285c403c87be7d832b7470b77f6d2ed5ddc/frozenlist-1.8.0-cp311-cp311-musllinux_1_2_aarch64.whl",hashes = {sha256 = "b6db2185db9be0a04fecf2f241c70b63b1a242e2805be291855078f2b404dd6b"}}, + {name = "frozenlist-1.8.0-cp311-cp311-musllinux_1_2_armv7l.whl",url = "https://files.pythonhosted.org/packages/a0/76/ac9ced601d62f6956f03cc794f9e04c81719509f85255abf96e2510f4265/frozenlist-1.8.0-cp311-cp311-musllinux_1_2_armv7l.whl",hashes = {sha256 = "f4be2e3d8bc8aabd566f8d5b8ba7ecc09249d74ba3c9ed52e54dc23a293f0b92"}}, + {name = "frozenlist-1.8.0-cp311-cp311-musllinux_1_2_ppc64le.whl",url = "https://files.pythonhosted.org/packages/b9/49/ecccb5f2598daf0b4a1415497eba4c33c1e8ce07495eb07d2860c731b8d5/frozenlist-1.8.0-cp311-cp311-musllinux_1_2_ppc64le.whl",hashes = {sha256 = "c8d1634419f39ea6f5c427ea2f90ca85126b54b50837f31497f3bf38266e853d"}}, + {name = "frozenlist-1.8.0-cp311-cp311-musllinux_1_2_s390x.whl",url = "https://files.pythonhosted.org/packages/53/4b/ddf24113323c0bbcc54cb38c8b8916f1da7165e07b8e24a717b4a12cbf10/frozenlist-1.8.0-cp311-cp311-musllinux_1_2_s390x.whl",hashes = {sha256 = "1a7fa382a4a223773ed64242dbe1c9c326ec09457e6b8428efb4118c685c3dfd"}}, + {name = "frozenlist-1.8.0-cp311-cp311-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/a7/fb/9b9a084d73c67175484ba2789a59f8eebebd0827d186a8102005ce41e1ba/frozenlist-1.8.0-cp311-cp311-musllinux_1_2_x86_64.whl",hashes = {sha256 = "11847b53d722050808926e785df837353bd4d75f1d494377e59b23594d834967"}}, + {name = "frozenlist-1.8.0-cp311-cp311-win32.whl",url = "https://files.pythonhosted.org/packages/95/a3/c8fb25aac55bf5e12dae5c5aa6a98f85d436c1dc658f21c3ac73f9fa95e5/frozenlist-1.8.0-cp311-cp311-win32.whl",hashes = {sha256 = "27c6e8077956cf73eadd514be8fb04d77fc946a7fe9f7fe167648b0b9085cc25"}}, + {name = "frozenlist-1.8.0-cp311-cp311-win_amd64.whl",url = "https://files.pythonhosted.org/packages/0a/f5/603d0d6a02cfd4c8f2a095a54672b3cf967ad688a60fb9faf04fc4887f65/frozenlist-1.8.0-cp311-cp311-win_amd64.whl",hashes = {sha256 = "ac913f8403b36a2c8610bbfd25b8013488533e71e62b4b4adce9c86c8cea905b"}}, + {name = "frozenlist-1.8.0-cp311-cp311-win_arm64.whl",url = "https://files.pythonhosted.org/packages/5d/16/c2c9ab44e181f043a86f9a8f84d5124b62dbcb3a02c0977ec72b9ac1d3e0/frozenlist-1.8.0-cp311-cp311-win_arm64.whl",hashes = {sha256 = "d4d3214a0f8394edfa3e303136d0575eece0745ff2b47bd2cb2e66dd92d4351a"}}, + {name = "frozenlist-1.8.0-cp310-cp310-macosx_10_9_universal2.whl",url = "https://files.pythonhosted.org/packages/83/4a/557715d5047da48d54e659203b9335be7bfaafda2c3f627b7c47e0b3aaf3/frozenlist-1.8.0-cp310-cp310-macosx_10_9_universal2.whl",hashes = {sha256 = "b37f6d31b3dcea7deb5e9696e529a6aa4a898adc33db82da12e4c60a7c4d2011"}}, + {name = "frozenlist-1.8.0-cp310-cp310-macosx_10_9_x86_64.whl",url = "https://files.pythonhosted.org/packages/a2/fb/c85f9fed3ea8fe8740e5b46a59cc141c23b842eca617da8876cfce5f760e/frozenlist-1.8.0-cp310-cp310-macosx_10_9_x86_64.whl",hashes = {sha256 = "ef2b7b394f208233e471abc541cc6991f907ffd47dc72584acee3147899d6565"}}, + {name = "frozenlist-1.8.0-cp310-cp310-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/63/70/26ca3f06aace16f2352796b08704338d74b6d1a24ca38f2771afbb7ed915/frozenlist-1.8.0-cp310-cp310-macosx_11_0_arm64.whl",hashes = {sha256 = "a88f062f072d1589b7b46e951698950e7da00442fc1cacbe17e19e025dc327ad"}}, + {name = "frozenlist-1.8.0-cp310-cp310-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl",url = "https://files.pythonhosted.org/packages/5d/ed/c7895fd2fde7f3ee70d248175f9b6cdf792fb741ab92dc59cd9ef3bd241b/frozenlist-1.8.0-cp310-cp310-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl",hashes = {sha256 = "f57fb59d9f385710aa7060e89410aeb5058b99e62f4d16b08b91986b9a2140c2"}}, + {name = "frozenlist-1.8.0-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl",url = "https://files.pythonhosted.org/packages/6b/83/4d587dccbfca74cb8b810472392ad62bfa100bf8108c7223eb4c4fa2f7b3/frozenlist-1.8.0-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl",hashes = {sha256 = "799345ab092bee59f01a915620b5d014698547afd011e691a208637312db9186"}}, + {name = "frozenlist-1.8.0-cp310-cp310-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl",url = "https://files.pythonhosted.org/packages/6a/c6/fd3b9cd046ec5fff9dab66831083bc2077006a874a2d3d9247dea93ddf7e/frozenlist-1.8.0-cp310-cp310-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl",hashes = {sha256 = "c23c3ff005322a6e16f71bf8692fcf4d5a304aaafe1e262c98c6d4adc7be863e"}}, + {name = "frozenlist-1.8.0-cp310-cp310-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl",url = "https://files.pythonhosted.org/packages/ce/80/6693f55eb2e085fc8afb28cf611448fb5b90e98e068fa1d1b8d8e66e5c7d/frozenlist-1.8.0-cp310-cp310-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl",hashes = {sha256 = "8a76ea0f0b9dfa06f254ee06053d93a600865b3274358ca48a352ce4f0798450"}}, + {name = "frozenlist-1.8.0-cp310-cp310-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl",url = "https://files.pythonhosted.org/packages/97/d6/e9459f7c5183854abd989ba384fe0cc1a0fb795a83c033f0571ec5933ca4/frozenlist-1.8.0-cp310-cp310-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl",hashes = {sha256 = "c7366fe1418a6133d5aa824ee53d406550110984de7637d65a178010f759c6ef"}}, + {name = "frozenlist-1.8.0-cp310-cp310-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/97/92/24e97474b65c0262e9ecd076e826bfd1d3074adcc165a256e42e7b8a7249/frozenlist-1.8.0-cp310-cp310-musllinux_1_2_aarch64.whl",hashes = {sha256 = "13d23a45c4cebade99340c4165bd90eeb4a56c6d8a9d8aa49568cac19a6d0dc4"}}, + {name = "frozenlist-1.8.0-cp310-cp310-musllinux_1_2_armv7l.whl",url = "https://files.pythonhosted.org/packages/ee/bf/dc394a097508f15abff383c5108cb8ad880d1f64a725ed3b90d5c2fbf0bb/frozenlist-1.8.0-cp310-cp310-musllinux_1_2_armv7l.whl",hashes = {sha256 = "e4a3408834f65da56c83528fb52ce7911484f0d1eaf7b761fc66001db1646eff"}}, + {name = "frozenlist-1.8.0-cp310-cp310-musllinux_1_2_ppc64le.whl",url = "https://files.pythonhosted.org/packages/40/90/25b201b9c015dbc999a5baf475a257010471a1fa8c200c843fd4abbee725/frozenlist-1.8.0-cp310-cp310-musllinux_1_2_ppc64le.whl",hashes = {sha256 = "42145cd2748ca39f32801dad54aeea10039da6f86e303659db90db1c4b614c8c"}}, + {name = "frozenlist-1.8.0-cp310-cp310-musllinux_1_2_s390x.whl",url = "https://files.pythonhosted.org/packages/84/f4/b5bc148df03082f05d2dd30c089e269acdbe251ac9a9cf4e727b2dbb8a3d/frozenlist-1.8.0-cp310-cp310-musllinux_1_2_s390x.whl",hashes = {sha256 = "e2de870d16a7a53901e41b64ffdf26f2fbb8917b3e6ebf398098d72c5b20bd7f"}}, + {name = "frozenlist-1.8.0-cp310-cp310-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/db/4b/87e95b5d15097c302430e647136b7d7ab2398a702390cf4c8601975709e7/frozenlist-1.8.0-cp310-cp310-musllinux_1_2_x86_64.whl",hashes = {sha256 = "20e63c9493d33ee48536600d1a5c95eefc870cd71e7ab037763d1fbb89cc51e7"}}, + {name = "frozenlist-1.8.0-cp310-cp310-win32.whl",url = "https://files.pythonhosted.org/packages/e5/70/78a0315d1fea97120591a83e0acd644da638c872f142fd72a6cebee825f3/frozenlist-1.8.0-cp310-cp310-win32.whl",hashes = {sha256 = "adbeebaebae3526afc3c96fad434367cafbfd1b25d72369a9e5858453b1bb71a"}}, + {name = "frozenlist-1.8.0-cp310-cp310-win_amd64.whl",url = "https://files.pythonhosted.org/packages/66/aa/3f04523fb189a00e147e60c5b2205126118f216b0aa908035c45336e27e4/frozenlist-1.8.0-cp310-cp310-win_amd64.whl",hashes = {sha256 = "667c3777ca571e5dbeb76f331562ff98b957431df140b54c85fd4d52eea8d8f6"}}, + {name = "frozenlist-1.8.0-cp310-cp310-win_arm64.whl",url = "https://files.pythonhosted.org/packages/39/75/1135feecdd7c336938bd55b4dc3b0dfc46d85b9be12ef2628574b28de776/frozenlist-1.8.0-cp310-cp310-win_arm64.whl",hashes = {sha256 = "80f85f0a7cc86e7a54c46d99c9e1318ff01f4687c172ede30fd52d19d1da1c8e"}}, ] marker = "\"default\" in dependency_groups or \"dev\" in extras" @@ -1903,11 +2580,11 @@ dependencies = [ [[packages]] name = "attrs" -version = "25.3.0" -requires-python = ">=3.8" -sdist = {name = "attrs-25.3.0.tar.gz", url = "https://files.pythonhosted.org/packages/5a/b0/1367933a8532ee6ff8d63537de4f1177af4bff9f3e829baf7331f595bb24/attrs-25.3.0.tar.gz", hashes = {sha256 = "75d7cefc7fb576747b2c81b4442d4d4a1ce0900973527c011d1030fd3bf4af1b"}} +version = "25.4.0" +requires-python = ">=3.9" +sdist = {name = "attrs-25.4.0.tar.gz", url = "https://files.pythonhosted.org/packages/6b/5c/685e6633917e101e5dcb62b9dd76946cbb57c26e133bae9e0cd36033c0a9/attrs-25.4.0.tar.gz", hashes = {sha256 = "16d5969b87f0859ef33a48b35d55ac1be6e42ae49d5e853b597db70c35c57e11"}} wheels = [ - {name = "attrs-25.3.0-py3-none-any.whl",url = "https://files.pythonhosted.org/packages/77/06/bb80f5f86020c4551da315d78b3ab75e8228f89f0162f2c3a819e407941a/attrs-25.3.0-py3-none-any.whl",hashes = {sha256 = "427318ce031701fea540783410126f03899a97ffc6f61596ad581ac2e40e3bc3"}}, + {name = "attrs-25.4.0-py3-none-any.whl",url = "https://files.pythonhosted.org/packages/3a/2a/7cc015f5b9f5db42b7d48157e23356022889fc354a2813c15934b7cb5c0e/attrs-25.4.0-py3-none-any.whl",hashes = {sha256 = "adcf7e2a1fb3b36ac48d97835bb6d8ade15b8dcce26aba8bf1d14847b57a3373"}}, ] marker = "\"default\" in dependency_groups or \"dev\" in extras" @@ -1931,11 +2608,11 @@ dependencies = [ [[packages]] name = "cachetools" -version = "6.1.0" +version = "6.2.0" requires-python = ">=3.9" -sdist = {name = "cachetools-6.1.0.tar.gz", url = "https://files.pythonhosted.org/packages/8a/89/817ad5d0411f136c484d535952aef74af9b25e0d99e90cdffbe121e6d628/cachetools-6.1.0.tar.gz", hashes = {sha256 = "b4c4f404392848db3ce7aac34950d17be4d864da4b8b66911008e430bc544587"}} +sdist = {name = "cachetools-6.2.0.tar.gz", url = "https://files.pythonhosted.org/packages/9d/61/e4fad8155db4a04bfb4734c7c8ff0882f078f24294d42798b3568eb63bff/cachetools-6.2.0.tar.gz", hashes = {sha256 = "38b328c0889450f05f5e120f56ab68c8abaf424e1275522b138ffc93253f7e32"}} wheels = [ - {name = "cachetools-6.1.0-py3-none-any.whl",url = "https://files.pythonhosted.org/packages/00/f0/2ef431fe4141f5e334759d73e81120492b23b2824336883a91ac04ba710b/cachetools-6.1.0-py3-none-any.whl",hashes = {sha256 = "1c7bb3cf9193deaf3508b7c5f2a79986c13ea38965c5adcff1f84519cf39163e"}}, + {name = "cachetools-6.2.0-py3-none-any.whl",url = "https://files.pythonhosted.org/packages/6c/56/3124f61d37a7a4e7cc96afc5492c78ba0cb551151e530b54669ddd1436ef/cachetools-6.2.0-py3-none-any.whl",hashes = {sha256 = "1c76a8960c0041fcc21097e357f882197c79da0dbff766e7317890a65d7d8ba6"}}, ] marker = "\"dev\" in extras" @@ -1944,11 +2621,11 @@ dependencies = [] [[packages]] name = "certifi" -version = "2025.7.9" +version = "2025.10.5" requires-python = ">=3.7" -sdist = {name = "certifi-2025.7.9.tar.gz", url = "https://files.pythonhosted.org/packages/de/8a/c729b6b60c66a38f590c4e774decc4b2ec7b0576be8f1aa984a53ffa812a/certifi-2025.7.9.tar.gz", hashes = {sha256 = "c1d2ec05395148ee10cf672ffc28cd37ea0ab0d99f9cc74c43e588cbd111b079"}} +sdist = {name = "certifi-2025.10.5.tar.gz", url = "https://files.pythonhosted.org/packages/4c/5b/b6ce21586237c77ce67d01dc5507039d444b630dd76611bbca2d8e5dcd91/certifi-2025.10.5.tar.gz", hashes = {sha256 = "47c09d31ccf2acf0be3f701ea53595ee7e0b8fa08801c6624be771df09ae7b43"}} wheels = [ - {name = "certifi-2025.7.9-py3-none-any.whl",url = "https://files.pythonhosted.org/packages/66/f3/80a3f974c8b535d394ff960a11ac20368e06b736da395b551a49ce950cce/certifi-2025.7.9-py3-none-any.whl",hashes = {sha256 = "d842783a14f8fdd646895ac26f719a061408834473cfc10203f6a575beb15d39"}}, + {name = "certifi-2025.10.5-py3-none-any.whl",url = "https://files.pythonhosted.org/packages/e4/37/af0d2ef3967ac0d6113837b44a4f0bfe1328c2b9763bd5b1744520e5cfed/certifi-2025.10.5-py3-none-any.whl",hashes = {sha256 = "0f212c2744a9bb6de0c56639a6f68afe01ecd92d91f14ae897c4fe7bbeeef0de"}}, ] marker = "\"default\" in dependency_groups or \"dev\" in extras or \"recommended\" in extras" @@ -2053,6 +2730,31 @@ wheels = [ {name = "coverage-7.10.7-cp312-cp312-win_amd64.whl",url = "https://files.pythonhosted.org/packages/ca/f3/3bf7905288b45b075918d372498f1cf845b5b579b723c8fd17168018d5f5/coverage-7.10.7-cp312-cp312-win_amd64.whl",hashes = {sha256 = "f51328ffe987aecf6d09f3cd9d979face89a617eacdaea43e7b3080777f647ba"}}, {name = "coverage-7.10.7-cp312-cp312-win_arm64.whl",url = "https://files.pythonhosted.org/packages/5c/44/3e32dbe933979d05cf2dac5e697c8599cfe038aaf51223ab901e208d5a62/coverage-7.10.7-cp312-cp312-win_arm64.whl",hashes = {sha256 = "bda5e34f8a75721c96085903c6f2197dc398c20ffd98df33f866a9c8fd95f4bf"}}, {name = "coverage-7.10.7-py3-none-any.whl",url = "https://files.pythonhosted.org/packages/ec/16/114df1c291c22cac3b0c127a73e0af5c12ed7bbb6558d310429a0ae24023/coverage-7.10.7-py3-none-any.whl",hashes = {sha256 = "f7941f6f2fe6dd6807a1208737b8a0cbcf1cc6d7b07d24998ad2d63590868260"}}, + {name = "coverage-7.10.7-cp311-cp311-macosx_10_9_x86_64.whl",url = "https://files.pythonhosted.org/packages/d2/5d/c1a17867b0456f2e9ce2d8d4708a4c3a089947d0bec9c66cdf60c9e7739f/coverage-7.10.7-cp311-cp311-macosx_10_9_x86_64.whl",hashes = {sha256 = "a609f9c93113be646f44c2a0256d6ea375ad047005d7f57a5c15f614dc1b2f59"}}, + {name = "coverage-7.10.7-cp311-cp311-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/54/f0/514dcf4b4e3698b9a9077f084429681bf3aad2b4a72578f89d7f643eb506/coverage-7.10.7-cp311-cp311-macosx_11_0_arm64.whl",hashes = {sha256 = "65646bb0359386e07639c367a22cf9b5bf6304e8630b565d0626e2bdf329227a"}}, + {name = "coverage-7.10.7-cp311-cp311-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl",url = "https://files.pythonhosted.org/packages/20/f6/9626b81d17e2a4b25c63ac1b425ff307ecdeef03d67c9a147673ae40dc36/coverage-7.10.7-cp311-cp311-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl",hashes = {sha256 = "5f33166f0dfcce728191f520bd2692914ec70fac2713f6bf3ce59c3deacb4699"}}, + {name = "coverage-7.10.7-cp311-cp311-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl",url = "https://files.pythonhosted.org/packages/b0/ef/bd8e719c2f7417ba03239052e099b76ea1130ac0cbb183ee1fcaa58aaff3/coverage-7.10.7-cp311-cp311-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl",hashes = {sha256 = "35f5e3f9e455bb17831876048355dca0f758b6df22f49258cb5a91da23ef437d"}}, + {name = "coverage-7.10.7-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl",url = "https://files.pythonhosted.org/packages/a5/b6/bf054de41ec948b151ae2b79a55c107f5760979538f5fb80c195f2517718/coverage-7.10.7-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl",hashes = {sha256 = "4da86b6d62a496e908ac2898243920c7992499c1712ff7c2b6d837cc69d9467e"}}, + {name = "coverage-7.10.7-cp311-cp311-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl",url = "https://files.pythonhosted.org/packages/0f/e5/3860756aa6f9318227443c6ce4ed7bf9e70bb7f1447a0353f45ac5c7974b/coverage-7.10.7-cp311-cp311-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl",hashes = {sha256 = "6b8b09c1fad947c84bbbc95eca841350fad9cbfa5a2d7ca88ac9f8d836c92e23"}}, + {name = "coverage-7.10.7-cp311-cp311-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/26/0f/bd08bd042854f7fd07b45808927ebcce99a7ed0f2f412d11629883517ac2/coverage-7.10.7-cp311-cp311-musllinux_1_2_aarch64.whl",hashes = {sha256 = "4376538f36b533b46f8971d3a3e63464f2c7905c9800db97361c43a2b14792ab"}}, + {name = "coverage-7.10.7-cp311-cp311-musllinux_1_2_i686.whl",url = "https://files.pythonhosted.org/packages/8e/a7/4777b14de4abcc2e80c6b1d430f5d51eb18ed1d75fca56cbce5f2db9b36e/coverage-7.10.7-cp311-cp311-musllinux_1_2_i686.whl",hashes = {sha256 = "121da30abb574f6ce6ae09840dae322bef734480ceafe410117627aa54f76d82"}}, + {name = "coverage-7.10.7-cp311-cp311-musllinux_1_2_riscv64.whl",url = "https://files.pythonhosted.org/packages/34/72/17d082b00b53cd45679bad682fac058b87f011fd8b9fe31d77f5f8d3a4e4/coverage-7.10.7-cp311-cp311-musllinux_1_2_riscv64.whl",hashes = {sha256 = "88127d40df529336a9836870436fc2751c339fbaed3a836d42c93f3e4bd1d0a2"}}, + {name = "coverage-7.10.7-cp311-cp311-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/81/7a/92367572eb5bdd6a84bfa278cc7e97db192f9f45b28c94a9ca1a921c3577/coverage-7.10.7-cp311-cp311-musllinux_1_2_x86_64.whl",hashes = {sha256 = "ba58bbcd1b72f136080c0bccc2400d66cc6115f3f906c499013d065ac33a4b61"}}, + {name = "coverage-7.10.7-cp311-cp311-win32.whl",url = "https://files.pythonhosted.org/packages/2f/88/a23cc185f6a805dfc4fdf14a94016835eeb85e22ac3a0e66d5e89acd6462/coverage-7.10.7-cp311-cp311-win32.whl",hashes = {sha256 = "972b9e3a4094b053a4e46832b4bc829fc8a8d347160eb39d03f1690316a99c14"}}, + {name = "coverage-7.10.7-cp311-cp311-win_amd64.whl",url = "https://files.pythonhosted.org/packages/fe/ef/0b510a399dfca17cec7bc2f05ad8bd78cf55f15c8bc9a73ab20c5c913c2e/coverage-7.10.7-cp311-cp311-win_amd64.whl",hashes = {sha256 = "a7b55a944a7f43892e28ad4bc0561dfd5f0d73e605d1aa5c3c976b52aea121d2"}}, + {name = "coverage-7.10.7-cp311-cp311-win_arm64.whl",url = "https://files.pythonhosted.org/packages/51/7f/023657f301a276e4ba1850f82749bc136f5a7e8768060c2e5d9744a22951/coverage-7.10.7-cp311-cp311-win_arm64.whl",hashes = {sha256 = "736f227fb490f03c6488f9b6d45855f8e0fd749c007f9303ad30efab0e73c05a"}}, + {name = "coverage-7.10.7-cp310-cp310-macosx_10_9_x86_64.whl",url = "https://files.pythonhosted.org/packages/e5/6c/3a3f7a46888e69d18abe3ccc6fe4cb16cccb1e6a2f99698931dafca489e6/coverage-7.10.7-cp310-cp310-macosx_10_9_x86_64.whl",hashes = {sha256 = "fc04cc7a3db33664e0c2d10eb8990ff6b3536f6842c9590ae8da4c614b9ed05a"}}, + {name = "coverage-7.10.7-cp310-cp310-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/03/94/952d30f180b1a916c11a56f5c22d3535e943aa22430e9e3322447e520e1c/coverage-7.10.7-cp310-cp310-macosx_11_0_arm64.whl",hashes = {sha256 = "e201e015644e207139f7e2351980feb7040e6f4b2c2978892f3e3789d1c125e5"}}, + {name = "coverage-7.10.7-cp310-cp310-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl",url = "https://files.pythonhosted.org/packages/50/2b/9e0cf8ded1e114bcd8b2fd42792b57f1c4e9e4ea1824cde2af93a67305be/coverage-7.10.7-cp310-cp310-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl",hashes = {sha256 = "240af60539987ced2c399809bd34f7c78e8abe0736af91c3d7d0e795df633d17"}}, + {name = "coverage-7.10.7-cp310-cp310-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl",url = "https://files.pythonhosted.org/packages/19/20/d0384ac06a6f908783d9b6aa6135e41b093971499ec488e47279f5b846e6/coverage-7.10.7-cp310-cp310-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl",hashes = {sha256 = "8421e088bc051361b01c4b3a50fd39a4b9133079a2229978d9d30511fd05231b"}}, + {name = "coverage-7.10.7-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl",url = "https://files.pythonhosted.org/packages/60/83/5c283cff3d41285f8eab897651585db908a909c572bdc014bcfaf8a8b6ae/coverage-7.10.7-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl",hashes = {sha256 = "6be8ed3039ae7f7ac5ce058c308484787c86e8437e72b30bf5e88b8ea10f3c87"}}, + {name = "coverage-7.10.7-cp310-cp310-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl",url = "https://files.pythonhosted.org/packages/60/22/02eb98fdc5ff79f423e990d877693e5310ae1eab6cb20ae0b0b9ac45b23b/coverage-7.10.7-cp310-cp310-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl",hashes = {sha256 = "e28299d9f2e889e6d51b1f043f58d5f997c373cc12e6403b90df95b8b047c13e"}}, + {name = "coverage-7.10.7-cp310-cp310-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/b4/bc/25c83bcf3ad141b32cd7dc45485ef3c01a776ca3aa8ef0a93e77e8b5bc43/coverage-7.10.7-cp310-cp310-musllinux_1_2_aarch64.whl",hashes = {sha256 = "c4e16bd7761c5e454f4efd36f345286d6f7c5fa111623c355691e2755cae3b9e"}}, + {name = "coverage-7.10.7-cp310-cp310-musllinux_1_2_i686.whl",url = "https://files.pythonhosted.org/packages/3c/b7/95574702888b58c0928a6e982038c596f9c34d52c5e5107f1eef729399b5/coverage-7.10.7-cp310-cp310-musllinux_1_2_i686.whl",hashes = {sha256 = "b1c81d0e5e160651879755c9c675b974276f135558cf4ba79fee7b8413a515df"}}, + {name = "coverage-7.10.7-cp310-cp310-musllinux_1_2_riscv64.whl",url = "https://files.pythonhosted.org/packages/47/b6/40095c185f235e085df0e0b158f6bd68cc6e1d80ba6c7721dc81d97ec318/coverage-7.10.7-cp310-cp310-musllinux_1_2_riscv64.whl",hashes = {sha256 = "606cc265adc9aaedcc84f1f064f0e8736bc45814f15a357e30fca7ecc01504e0"}}, + {name = "coverage-7.10.7-cp310-cp310-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/c8/50/4aea0556da7a4b93ec9168420d170b55e2eb50ae21b25062513d020c6861/coverage-7.10.7-cp310-cp310-musllinux_1_2_x86_64.whl",hashes = {sha256 = "10b24412692df990dbc34f8fb1b6b13d236ace9dfdd68df5b28c2e39cafbba13"}}, + {name = "coverage-7.10.7-cp310-cp310-win32.whl",url = "https://files.pythonhosted.org/packages/6a/28/ea1a84a60828177ae3b100cb6723838523369a44ec5742313ed7db3da160/coverage-7.10.7-cp310-cp310-win32.whl",hashes = {sha256 = "b51dcd060f18c19290d9b8a9dd1e0181538df2ce0717f562fff6cf74d9fc0b5b"}}, + {name = "coverage-7.10.7-cp310-cp310-win_amd64.whl",url = "https://files.pythonhosted.org/packages/fc/1a/a81d46bbeb3c3fd97b9602ebaa411e076219a150489bcc2c025f151bd52d/coverage-7.10.7-cp310-cp310-win_amd64.whl",hashes = {sha256 = "3a622ac801b17198020f09af3eaf45666b344a0d69fc2a6ffe2ea83aeef1d807"}}, ] marker = "\"dev\" in extras" @@ -2105,6 +2807,20 @@ wheels = [ {name = "httptools-0.6.4-cp312-cp312-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/52/d8/254d16a31d543073a0e57f1c329ca7378d8924e7e292eda72d0064987486/httptools-0.6.4-cp312-cp312-musllinux_1_2_aarch64.whl",hashes = {sha256 = "ec4f178901fa1834d4a060320d2f3abc5c9e39766953d038f1458cb885f47e81"}}, {name = "httptools-0.6.4-cp312-cp312-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/5f/3c/4aee161b4b7a971660b8be71a92c24d6c64372c1ab3ae7f366b3680df20f/httptools-0.6.4-cp312-cp312-musllinux_1_2_x86_64.whl",hashes = {sha256 = "f9eb89ecf8b290f2e293325c646a211ff1c2493222798bb80a530c5e7502494f"}}, {name = "httptools-0.6.4-cp312-cp312-win_amd64.whl",url = "https://files.pythonhosted.org/packages/12/b7/5cae71a8868e555f3f67a50ee7f673ce36eac970f029c0c5e9d584352961/httptools-0.6.4-cp312-cp312-win_amd64.whl",hashes = {sha256 = "db78cb9ca56b59b016e64b6031eda5653be0589dba2b1b43453f6e8b405a0970"}}, + {name = "httptools-0.6.4-cp311-cp311-macosx_10_9_universal2.whl",url = "https://files.pythonhosted.org/packages/7b/26/bb526d4d14c2774fe07113ca1db7255737ffbb119315839af2065abfdac3/httptools-0.6.4-cp311-cp311-macosx_10_9_universal2.whl",hashes = {sha256 = "f47f8ed67cc0ff862b84a1189831d1d33c963fb3ce1ee0c65d3b0cbe7b711069"}}, + {name = "httptools-0.6.4-cp311-cp311-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/a6/17/3e0d3e9b901c732987a45f4f94d4e2c62b89a041d93db89eafb262afd8d5/httptools-0.6.4-cp311-cp311-macosx_11_0_arm64.whl",hashes = {sha256 = "0614154d5454c21b6410fdf5262b4a3ddb0f53f1e1721cfd59d55f32138c578a"}}, + {name = "httptools-0.6.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",url = "https://files.pythonhosted.org/packages/b7/24/0fe235d7b69c42423c7698d086d4db96475f9b50b6ad26a718ef27a0bce6/httptools-0.6.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",hashes = {sha256 = "f8787367fbdfccae38e35abf7641dafc5310310a5987b689f4c32cc8cc3ee975"}}, + {name = "httptools-0.6.4-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl",url = "https://files.pythonhosted.org/packages/b1/2f/205d1f2a190b72da6ffb5f41a3736c26d6fa7871101212b15e9b5cd8f61d/httptools-0.6.4-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl",hashes = {sha256 = "40b0f7fe4fd38e6a507bdb751db0379df1e99120c65fbdc8ee6c1d044897a636"}}, + {name = "httptools-0.6.4-cp311-cp311-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/6e/4c/d09ce0eff09057a206a74575ae8f1e1e2f0364d20e2442224f9e6612c8b9/httptools-0.6.4-cp311-cp311-musllinux_1_2_aarch64.whl",hashes = {sha256 = "40a5ec98d3f49904b9fe36827dcf1aadfef3b89e2bd05b0e35e94f97c2b14721"}}, + {name = "httptools-0.6.4-cp311-cp311-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/3e/d2/84c9e23edbccc4a4c6f96a1b8d99dfd2350289e94f00e9ccc7aadde26fb5/httptools-0.6.4-cp311-cp311-musllinux_1_2_x86_64.whl",hashes = {sha256 = "dacdd3d10ea1b4ca9df97a0a303cbacafc04b5cd375fa98732678151643d4988"}}, + {name = "httptools-0.6.4-cp311-cp311-win_amd64.whl",url = "https://files.pythonhosted.org/packages/d0/46/4d8e7ba9581416de1c425b8264e2cadd201eb709ec1584c381f3e98f51c1/httptools-0.6.4-cp311-cp311-win_amd64.whl",hashes = {sha256 = "288cd628406cc53f9a541cfaf06041b4c71d751856bab45e3702191f931ccd17"}}, + {name = "httptools-0.6.4-cp310-cp310-macosx_10_9_universal2.whl",url = "https://files.pythonhosted.org/packages/3b/6f/972f8eb0ea7d98a1c6be436e2142d51ad2a64ee18e02b0e7ff1f62171ab1/httptools-0.6.4-cp310-cp310-macosx_10_9_universal2.whl",hashes = {sha256 = "3c73ce323711a6ffb0d247dcd5a550b8babf0f757e86a52558fe5b86d6fefcc0"}}, + {name = "httptools-0.6.4-cp310-cp310-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/6a/b0/17c672b4bc5c7ba7f201eada4e96c71d0a59fbc185e60e42580093a86f21/httptools-0.6.4-cp310-cp310-macosx_11_0_arm64.whl",hashes = {sha256 = "345c288418f0944a6fe67be8e6afa9262b18c7626c3ef3c28adc5eabc06a68da"}}, + {name = "httptools-0.6.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",url = "https://files.pythonhosted.org/packages/92/5e/b4a826fe91971a0b68e8c2bd4e7db3e7519882f5a8ccdb1194be2b3ab98f/httptools-0.6.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",hashes = {sha256 = "deee0e3343f98ee8047e9f4c5bc7cedbf69f5734454a94c38ee829fb2d5fa3c1"}}, + {name = "httptools-0.6.4-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl",url = "https://files.pythonhosted.org/packages/b0/51/ce61e531e40289a681a463e1258fa1e05e0be54540e40d91d065a264cd8f/httptools-0.6.4-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl",hashes = {sha256 = "ca80b7485c76f768a3bc83ea58373f8db7b015551117375e4918e2aa77ea9b50"}}, + {name = "httptools-0.6.4-cp310-cp310-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/ea/9e/270b7d767849b0c96f275c695d27ca76c30671f8eb8cc1bab6ced5c5e1d0/httptools-0.6.4-cp310-cp310-musllinux_1_2_aarch64.whl",hashes = {sha256 = "90d96a385fa941283ebd231464045187a31ad932ebfa541be8edf5b3c2328959"}}, + {name = "httptools-0.6.4-cp310-cp310-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/81/86/ced96e3179c48c6f656354e106934e65c8963d48b69be78f355797f0e1b3/httptools-0.6.4-cp310-cp310-musllinux_1_2_x86_64.whl",hashes = {sha256 = "59e724f8b332319e2875efd360e61ac07f33b492889284a3e05e6d13746876f4"}}, + {name = "httptools-0.6.4-cp310-cp310-win_amd64.whl",url = "https://files.pythonhosted.org/packages/75/73/187a3f620ed3175364ddb56847d7a608a6fc42d551e133197098c0143eca/httptools-0.6.4-cp310-cp310-win_amd64.whl",hashes = {sha256 = "c26f313951f6e26147833fc923f78f95604bbec812a43e5ee37f26dc9e5a686c"}}, ] marker = "\"default\" in dependency_groups" @@ -2113,11 +2829,11 @@ dependencies = [] [[packages]] name = "identify" -version = "2.6.12" +version = "2.6.15" requires-python = ">=3.9" -sdist = {name = "identify-2.6.12.tar.gz", url = "https://files.pythonhosted.org/packages/a2/88/d193a27416618628a5eea64e3223acd800b40749a96ffb322a9b55a49ed1/identify-2.6.12.tar.gz", hashes = {sha256 = "d8de45749f1efb108badef65ee8386f0f7bb19a7f26185f74de6367bffbaf0e6"}} +sdist = {name = "identify-2.6.15.tar.gz", url = "https://files.pythonhosted.org/packages/ff/e7/685de97986c916a6d93b3876139e00eef26ad5bbbd61925d670ae8013449/identify-2.6.15.tar.gz", hashes = {sha256 = "e4f4864b96c6557ef2a1e1c951771838f4edc9df3a72ec7118b338801b11c7bf"}} wheels = [ - {name = "identify-2.6.12-py2.py3-none-any.whl",url = "https://files.pythonhosted.org/packages/7a/cd/18f8da995b658420625f7ef13f037be53ae04ec5ad33f9b718240dcfd48c/identify-2.6.12-py2.py3-none-any.whl",hashes = {sha256 = "ad9672d5a72e0d2ff7c5c8809b62dfa60458626352fb0eb7b55e69bdc45334a2"}}, + {name = "identify-2.6.15-py2.py3-none-any.whl",url = "https://files.pythonhosted.org/packages/0f/1c/e5fd8f973d4f375adb21565739498e2e9a1e54c858a97b9a8ccfdc81da9b/identify-2.6.15-py2.py3-none-any.whl",hashes = {sha256 = "1181ef7608e00704db228516541eb83a88a9f94433a8c80bb9b5bd54b1d81757"}}, ] marker = "\"dev\" in extras" @@ -2154,64 +2870,126 @@ dependencies = [ [[packages]] name = "lxml" -version = "6.0.1" +version = "6.0.2" requires-python = ">=3.8" -sdist = {name = "lxml-6.0.1.tar.gz", url = "https://files.pythonhosted.org/packages/8f/bd/f9d01fd4132d81c6f43ab01983caea69ec9614b913c290a26738431a015d/lxml-6.0.1.tar.gz", hashes = {sha256 = "2b3a882ebf27dd026df3801a87cf49ff791336e0f94b0fad195db77e01240690"}} -wheels = [ - {name = "lxml-6.0.1-cp314-cp314-macosx_10_13_universal2.whl",url = "https://files.pythonhosted.org/packages/38/e3/b7eb612ce07abe766918a7e581ec6a0e5212352194001fd287c3ace945f0/lxml-6.0.1-cp314-cp314-macosx_10_13_universal2.whl",hashes = {sha256 = "29b0e849ec7030e3ecb6112564c9f7ad6881e3b2375dd4a0c486c5c1f3a33859"}}, - {name = "lxml-6.0.1-cp314-cp314-macosx_10_13_x86_64.whl",url = "https://files.pythonhosted.org/packages/35/8f/ab3639a33595cf284fe733c6526da2ca3afbc5fd7f244ae67f3303cec654/lxml-6.0.1-cp314-cp314-macosx_10_13_x86_64.whl",hashes = {sha256 = "02a0f7e629f73cc0be598c8b0611bf28ec3b948c549578a26111b01307fd4051"}}, - {name = "lxml-6.0.1-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.whl",url = "https://files.pythonhosted.org/packages/2c/65/819d54f2e94d5c4458c1db8c1ccac9d05230b27c1038937d3d788eb406f9/lxml-6.0.1-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.whl",hashes = {sha256 = "beab5e54de016e730875f612ba51e54c331e2fa6dc78ecf9a5415fc90d619348"}}, - {name = "lxml-6.0.1-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl",url = "https://files.pythonhosted.org/packages/5b/4a/d4a74ce942e60025cdaa883c5a4478921a99ce8607fc3130f1e349a83b28/lxml-6.0.1-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl",hashes = {sha256 = "92a08aefecd19ecc4ebf053c27789dd92c87821df2583a4337131cf181a1dffa"}}, - {name = "lxml-6.0.1-cp314-cp314-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl",url = "https://files.pythonhosted.org/packages/cb/48/67f15461884074edd58af17b1827b983644d1fae83b3d909e9045a08b61e/lxml-6.0.1-cp314-cp314-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl",hashes = {sha256 = "36c8fa7e177649470bc3dcf7eae6bee1e4984aaee496b9ccbf30e97ac4127fa2"}}, - {name = "lxml-6.0.1-cp314-cp314-manylinux_2_26_i686.manylinux_2_28_i686.whl",url = "https://files.pythonhosted.org/packages/b6/d4/ec1bf1614828a5492f4af0b6a9ee2eb3e92440aea3ac4fa158e5228b772b/lxml-6.0.1-cp314-cp314-manylinux_2_26_i686.manylinux_2_28_i686.whl",hashes = {sha256 = "5d08e0f1af6916267bb7eff21c09fa105620f07712424aaae09e8cb5dd4164d1"}}, - {name = "lxml-6.0.1-cp314-cp314-manylinux_2_26_ppc64le.manylinux_2_28_ppc64le.whl",url = "https://files.pythonhosted.org/packages/65/2b/c85929dacac08821f2100cea3eb258ce5c8804a4e32b774f50ebd7592850/lxml-6.0.1-cp314-cp314-manylinux_2_26_ppc64le.manylinux_2_28_ppc64le.whl",hashes = {sha256 = "9705cdfc05142f8c38c97a61bd3a29581ceceb973a014e302ee4a73cc6632476"}}, - {name = "lxml-6.0.1-cp314-cp314-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl",url = "https://files.pythonhosted.org/packages/d0/36/cf544d75c269b9aad16752fd9f02d8e171c5a493ca225cb46bb7ba72868c/lxml-6.0.1-cp314-cp314-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl",hashes = {sha256 = "74555e2da7c1636e30bff4e6e38d862a634cf020ffa591f1f63da96bf8b34772"}}, - {name = "lxml-6.0.1-cp314-cp314-manylinux_2_31_armv7l.whl",url = "https://files.pythonhosted.org/packages/c2/e8/83dbc946ee598fd75fdeae6151a725ddeaab39bb321354a9468d4c9f44f3/lxml-6.0.1-cp314-cp314-manylinux_2_31_armv7l.whl",hashes = {sha256 = "e38b5f94c5a2a5dadaddd50084098dfd005e5a2a56cd200aaf5e0a20e8941782"}}, - {name = "lxml-6.0.1-cp314-cp314-manylinux_2_38_riscv64.manylinux_2_39_riscv64.whl",url = "https://files.pythonhosted.org/packages/f4/72/889c633b47c06205743ba935f4d1f5aa4eb7f0325d701ed2b0540df1b004/lxml-6.0.1-cp314-cp314-manylinux_2_38_riscv64.manylinux_2_39_riscv64.whl",hashes = {sha256 = "a5ec101a92ddacb4791977acfc86c1afd624c032974bfb6a21269d1083c9bc49"}}, - {name = "lxml-6.0.1-cp314-cp314-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/b0/b6/f42a21a1428479b66ea0da7bd13e370436aecaff0cfe93270c7e165bd2a4/lxml-6.0.1-cp314-cp314-musllinux_1_2_aarch64.whl",hashes = {sha256 = "5c17e70c82fd777df586c12114bbe56e4e6f823a971814fd40dec9c0de518772"}}, - {name = "lxml-6.0.1-cp314-cp314-musllinux_1_2_armv7l.whl",url = "https://files.pythonhosted.org/packages/51/b0/5f8c1e8890e2ee1c2053c2eadd1cb0e4b79e2304e2912385f6ca666f48b1/lxml-6.0.1-cp314-cp314-musllinux_1_2_armv7l.whl",hashes = {sha256 = "45fdd0415a0c3d91640b5d7a650a8f37410966a2e9afebb35979d06166fd010e"}}, - {name = "lxml-6.0.1-cp314-cp314-musllinux_1_2_ppc64le.whl",url = "https://files.pythonhosted.org/packages/eb/f9/820b5125660dae489ca3a21a36d9da2e75dd6b5ffe922088f94bbff3b8a0/lxml-6.0.1-cp314-cp314-musllinux_1_2_ppc64le.whl",hashes = {sha256 = "d417eba28981e720a14fcb98f95e44e7a772fe25982e584db38e5d3b6ee02e79"}}, - {name = "lxml-6.0.1-cp314-cp314-musllinux_1_2_riscv64.whl",url = "https://files.pythonhosted.org/packages/23/8e/a557fae9eec236618aecf9ff35fec18df41b6556d825f3ad6017d9f6e878/lxml-6.0.1-cp314-cp314-musllinux_1_2_riscv64.whl",hashes = {sha256 = "8e5d116b9e59be7934febb12c41cce2038491ec8fdb743aeacaaf36d6e7597e4"}}, - {name = "lxml-6.0.1-cp314-cp314-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/fa/fd/b266cfaab81d93a539040be699b5854dd24c84e523a1711ee5f615aa7000/lxml-6.0.1-cp314-cp314-musllinux_1_2_x86_64.whl",hashes = {sha256 = "c238f0d0d40fdcb695c439fe5787fa69d40f45789326b3bb6ef0d61c4b588d6e"}}, - {name = "lxml-6.0.1-cp314-cp314-win32.whl",url = "https://files.pythonhosted.org/packages/25/6c/6f9610fbf1de002048e80585ea4719591921a0316a8565968737d9f125ca/lxml-6.0.1-cp314-cp314-win32.whl",hashes = {sha256 = "537b6cf1c5ab88cfd159195d412edb3e434fee880f206cbe68dff9c40e17a68a"}}, - {name = "lxml-6.0.1-cp314-cp314-win_amd64.whl",url = "https://files.pythonhosted.org/packages/72/a5/506775e3988677db24dc75a7b03e04038e0b3d114ccd4bccea4ce0116c15/lxml-6.0.1-cp314-cp314-win_amd64.whl",hashes = {sha256 = "911d0a2bb3ef3df55b3d97ab325a9ca7e438d5112c102b8495321105d25a441b"}}, - {name = "lxml-6.0.1-cp314-cp314-win_arm64.whl",url = "https://files.pythonhosted.org/packages/0a/44/9613f300201b8700215856e5edd056d4e58dd23368699196b58877d4408b/lxml-6.0.1-cp314-cp314-win_arm64.whl",hashes = {sha256 = "2834377b0145a471a654d699bdb3a2155312de492142ef5a1d426af2c60a0a31"}}, - {name = "lxml-6.0.1-cp313-cp313-macosx_10_13_universal2.whl",url = "https://files.pythonhosted.org/packages/43/c4/cd757eeec4548e6652eff50b944079d18ce5f8182d2b2cf514e125e8fbcb/lxml-6.0.1-cp313-cp313-macosx_10_13_universal2.whl",hashes = {sha256 = "485eda5d81bb7358db96a83546949c5fe7474bec6c68ef3fa1fb61a584b00eea"}}, - {name = "lxml-6.0.1-cp313-cp313-macosx_10_13_x86_64.whl",url = "https://files.pythonhosted.org/packages/ff/99/0290bb86a7403893f5e9658490c705fcea103b9191f2039752b071b4ef07/lxml-6.0.1-cp313-cp313-macosx_10_13_x86_64.whl",hashes = {sha256 = "d12160adea318ce3d118f0b4fbdff7d1225c75fb7749429541b4d217b85c3f76"}}, - {name = "lxml-6.0.1-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl",url = "https://files.pythonhosted.org/packages/88/a7/4bb54dd1e626342a0f7df6ec6ca44fdd5d0e100ace53acc00e9a689ead04/lxml-6.0.1-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl",hashes = {sha256 = "48c8d335d8ab72f9265e7ba598ae5105a8272437403f4032107dbcb96d3f0b29"}}, - {name = "lxml-6.0.1-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl",url = "https://files.pythonhosted.org/packages/71/8d/20f51cd07a7cbef6214675a8a5c62b2559a36d9303fe511645108887c458/lxml-6.0.1-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl",hashes = {sha256 = "405e7cf9dbdbb52722c231e0f1257214202dfa192327fab3de45fd62e0554082"}}, - {name = "lxml-6.0.1-cp313-cp313-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl",url = "https://files.pythonhosted.org/packages/5a/63/efceeee7245d45f97d548e48132258a36244d3c13c6e3ddbd04db95ff496/lxml-6.0.1-cp313-cp313-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl",hashes = {sha256 = "299a790d403335a6a057ade46f92612ebab87b223e4e8c5308059f2dc36f45ed"}}, - {name = "lxml-6.0.1-cp313-cp313-manylinux_2_26_i686.manylinux_2_28_i686.whl",url = "https://files.pythonhosted.org/packages/57/5d/92cb3d3499f5caba17f7933e6be3b6c7de767b715081863337ced42eb5f2/lxml-6.0.1-cp313-cp313-manylinux_2_26_i686.manylinux_2_28_i686.whl",hashes = {sha256 = "48da704672f6f9c461e9a73250440c647638cc6ff9567ead4c3b1f189a604ee8"}}, - {name = "lxml-6.0.1-cp313-cp313-manylinux_2_26_ppc64le.manylinux_2_28_ppc64le.whl",url = "https://files.pythonhosted.org/packages/69/f8/606fa16a05d7ef5e916c6481c634f40870db605caffed9d08b1a4fb6b989/lxml-6.0.1-cp313-cp313-manylinux_2_26_ppc64le.manylinux_2_28_ppc64le.whl",hashes = {sha256 = "21e364e1bb731489e3f4d51db416f991a5d5da5d88184728d80ecfb0904b1d68"}}, - {name = "lxml-6.0.1-cp313-cp313-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl",url = "https://files.pythonhosted.org/packages/b3/01/15d5fc74ebb49eac4e5df031fbc50713dcc081f4e0068ed963a510b7d457/lxml-6.0.1-cp313-cp313-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl",hashes = {sha256 = "1bce45a2c32032afddbd84ed8ab092130649acb935536ef7a9559636ce7ffd4a"}}, - {name = "lxml-6.0.1-cp313-cp313-manylinux_2_31_armv7l.whl",url = "https://files.pythonhosted.org/packages/42/a5/1b85e2aaaf8deaa67e04c33bddb41f8e73d07a077bf9db677cec7128bfb4/lxml-6.0.1-cp313-cp313-manylinux_2_31_armv7l.whl",hashes = {sha256 = "fa164387ff20ab0e575fa909b11b92ff1481e6876835014e70280769920c4433"}}, - {name = "lxml-6.0.1-cp313-cp313-manylinux_2_38_riscv64.manylinux_2_39_riscv64.whl",url = "https://files.pythonhosted.org/packages/42/23/f3bb1292f55a725814317172eeb296615db3becac8f1a059b53c51fc1da8/lxml-6.0.1-cp313-cp313-manylinux_2_38_riscv64.manylinux_2_39_riscv64.whl",hashes = {sha256 = "7587ac5e000e1594e62278422c5783b34a82b22f27688b1074d71376424b73e8"}}, - {name = "lxml-6.0.1-cp313-cp313-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/b4/be/4d768f581ccd0386d424bac615d9002d805df7cc8482ae07d529f60a3c1e/lxml-6.0.1-cp313-cp313-musllinux_1_2_aarch64.whl",hashes = {sha256 = "57478424ac4c9170eabf540237125e8d30fad1940648924c058e7bc9fb9cf6dd"}}, - {name = "lxml-6.0.1-cp313-cp313-musllinux_1_2_armv7l.whl",url = "https://files.pythonhosted.org/packages/40/07/ed61d1a3e77d1a9f856c4fab15ee5c09a2853fb7af13b866bb469a3a6d42/lxml-6.0.1-cp313-cp313-musllinux_1_2_armv7l.whl",hashes = {sha256 = "09c74afc7786c10dd6afaa0be2e4805866beadc18f1d843cf517a7851151b499"}}, - {name = "lxml-6.0.1-cp313-cp313-musllinux_1_2_ppc64le.whl",url = "https://files.pythonhosted.org/packages/01/37/77e7971212e5c38a55431744f79dff27fd751771775165caea096d055ca4/lxml-6.0.1-cp313-cp313-musllinux_1_2_ppc64le.whl",hashes = {sha256 = "7fd70681aeed83b196482d42a9b0dc5b13bab55668d09ad75ed26dff3be5a2f5"}}, - {name = "lxml-6.0.1-cp313-cp313-musllinux_1_2_riscv64.whl",url = "https://files.pythonhosted.org/packages/32/a3/e98806d483941cd9061cc838b1169626acef7b2807261fbe5e382fcef881/lxml-6.0.1-cp313-cp313-musllinux_1_2_riscv64.whl",hashes = {sha256 = "10a72e456319b030b3dd900df6b1f19d89adf06ebb688821636dc406788cf6ac"}}, - {name = "lxml-6.0.1-cp313-cp313-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/07/de/9bb5a05e42e8623bf06b4638931ea8c8f5eb5a020fe31703abdbd2e83547/lxml-6.0.1-cp313-cp313-musllinux_1_2_x86_64.whl",hashes = {sha256 = "b0fa45fb5f55111ce75b56c703843b36baaf65908f8b8d2fbbc0e249dbc127ed"}}, - {name = "lxml-6.0.1-cp313-cp313-win32.whl",url = "https://files.pythonhosted.org/packages/f2/43/c1cb2a7c67226266c463ef8a53b82d42607228beb763b5fbf4867e88a21f/lxml-6.0.1-cp313-cp313-win32.whl",hashes = {sha256 = "01dab65641201e00c69338c9c2b8a0f2f484b6b3a22d10779bb417599fae32b5"}}, - {name = "lxml-6.0.1-cp313-cp313-win_amd64.whl",url = "https://files.pythonhosted.org/packages/34/96/6a6c3b8aa480639c1a0b9b6faf2a63fb73ab79ffcd2a91cf28745faa22de/lxml-6.0.1-cp313-cp313-win_amd64.whl",hashes = {sha256 = "bdf8f7c8502552d7bff9e4c98971910a0a59f60f88b5048f608d0a1a75e94d1c"}}, - {name = "lxml-6.0.1-cp313-cp313-win_arm64.whl",url = "https://files.pythonhosted.org/packages/8c/66/622e8515121e1fd773e3738dae71b8df14b12006d9fb554ce90886689fd0/lxml-6.0.1-cp313-cp313-win_arm64.whl",hashes = {sha256 = "a6aeca75959426b9fd8d4782c28723ba224fe07cfa9f26a141004210528dcbe2"}}, - {name = "lxml-6.0.1-cp312-cp312-macosx_10_13_universal2.whl",url = "https://files.pythonhosted.org/packages/b0/a9/82b244c8198fcdf709532e39a1751943a36b3e800b420adc739d751e0299/lxml-6.0.1-cp312-cp312-macosx_10_13_universal2.whl",hashes = {sha256 = "c03ac546adaabbe0b8e4a15d9ad815a281afc8d36249c246aecf1aaad7d6f200"}}, - {name = "lxml-6.0.1-cp312-cp312-macosx_10_13_x86_64.whl",url = "https://files.pythonhosted.org/packages/c9/8d/1ed2bc20281b0e7ed3e6c12b0a16e64ae2065d99be075be119ba88486e6d/lxml-6.0.1-cp312-cp312-macosx_10_13_x86_64.whl",hashes = {sha256 = "33b862c7e3bbeb4ba2c96f3a039f925c640eeba9087a4dc7a572ec0f19d89392"}}, - {name = "lxml-6.0.1-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl",url = "https://files.pythonhosted.org/packages/76/53/d7fd3af95b72a3493bf7fbe842a01e339d8f41567805cecfecd5c71aa5ee/lxml-6.0.1-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl",hashes = {sha256 = "7a3ec1373f7d3f519de595032d4dcafae396c29407cfd5073f42d267ba32440d"}}, - {name = "lxml-6.0.1-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl",url = "https://files.pythonhosted.org/packages/9d/51/4e57cba4d55273c400fb63aefa2f0d08d15eac021432571a7eeefee67bed/lxml-6.0.1-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl",hashes = {sha256 = "03b12214fb1608f4cffa181ec3d046c72f7e77c345d06222144744c122ded870"}}, - {name = "lxml-6.0.1-cp312-cp312-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl",url = "https://files.pythonhosted.org/packages/f6/6e/5f290bc26fcc642bc32942e903e833472271614e24d64ad28aaec09d5dae/lxml-6.0.1-cp312-cp312-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl",hashes = {sha256 = "207ae0d5f0f03b30f95e649a6fa22aa73f5825667fee9c7ec6854d30e19f2ed8"}}, - {name = "lxml-6.0.1-cp312-cp312-manylinux_2_26_i686.manylinux_2_28_i686.whl",url = "https://files.pythonhosted.org/packages/13/d4/2e7551a86992ece4f9a0f6eebd4fb7e312d30f1e372760e2109e721d4ce6/lxml-6.0.1-cp312-cp312-manylinux_2_26_i686.manylinux_2_28_i686.whl",hashes = {sha256 = "32297b09ed4b17f7b3f448de87a92fb31bb8747496623483788e9f27c98c0f00"}}, - {name = "lxml-6.0.1-cp312-cp312-manylinux_2_26_ppc64le.manylinux_2_28_ppc64le.whl",url = "https://files.pythonhosted.org/packages/8a/5f/cb49d727fc388bf5fd37247209bab0da11697ddc5e976ccac4826599939e/lxml-6.0.1-cp312-cp312-manylinux_2_26_ppc64le.manylinux_2_28_ppc64le.whl",hashes = {sha256 = "7e18224ea241b657a157c85e9cac82c2b113ec90876e01e1f127312006233756"}}, - {name = "lxml-6.0.1-cp312-cp312-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl",url = "https://files.pythonhosted.org/packages/ca/b8/66c1ef8c87ad0f958b0a23998851e610607c74849e75e83955d5641272e6/lxml-6.0.1-cp312-cp312-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl",hashes = {sha256 = "a07a994d3c46cd4020c1ea566345cf6815af205b1e948213a4f0f1d392182072"}}, - {name = "lxml-6.0.1-cp312-cp312-manylinux_2_31_armv7l.whl",url = "https://files.pythonhosted.org/packages/1a/ef/131d3d6b9590e64fdbb932fbc576b81fcc686289da19c7cb796257310e82/lxml-6.0.1-cp312-cp312-manylinux_2_31_armv7l.whl",hashes = {sha256 = "2287fadaa12418a813b05095485c286c47ea58155930cfbd98c590d25770e225"}}, - {name = "lxml-6.0.1-cp312-cp312-manylinux_2_38_riscv64.manylinux_2_39_riscv64.whl",url = "https://files.pythonhosted.org/packages/bc/3f/07f48ae422dce44902309aa7ed386c35310929dc592439c403ec16ef9137/lxml-6.0.1-cp312-cp312-manylinux_2_38_riscv64.manylinux_2_39_riscv64.whl",hashes = {sha256 = "b4e597efca032ed99f418bd21314745522ab9fa95af33370dcee5533f7f70136"}}, - {name = "lxml-6.0.1-cp312-cp312-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/11/c7/125315d7b14ab20d9155e8316f7d287a4956098f787c22d47560b74886c4/lxml-6.0.1-cp312-cp312-musllinux_1_2_aarch64.whl",hashes = {sha256 = "9696d491f156226decdd95d9651c6786d43701e49f32bf23715c975539aa2b3b"}}, - {name = "lxml-6.0.1-cp312-cp312-musllinux_1_2_armv7l.whl",url = "https://files.pythonhosted.org/packages/8b/c3/51143c3a5fc5168a7c3ee626418468ff20d30f5a59597e7b156c1e61fba8/lxml-6.0.1-cp312-cp312-musllinux_1_2_armv7l.whl",hashes = {sha256 = "e4e3cd3585f3c6f87cdea44cda68e692cc42a012f0131d25957ba4ce755241a7"}}, - {name = "lxml-6.0.1-cp312-cp312-musllinux_1_2_ppc64le.whl",url = "https://files.pythonhosted.org/packages/11/86/73102370a420ec4529647b31c4a8ce8c740c77af3a5fae7a7643212d6f6e/lxml-6.0.1-cp312-cp312-musllinux_1_2_ppc64le.whl",hashes = {sha256 = "45cbc92f9d22c28cd3b97f8d07fcefa42e569fbd587dfdac76852b16a4924277"}}, - {name = "lxml-6.0.1-cp312-cp312-musllinux_1_2_riscv64.whl",url = "https://files.pythonhosted.org/packages/d7/2d/aad90afaec51029aef26ef773b8fd74a9e8706e5e2f46a57acd11a421c02/lxml-6.0.1-cp312-cp312-musllinux_1_2_riscv64.whl",hashes = {sha256 = "f8c9bcfd2e12299a442fba94459adf0b0d001dbc68f1594439bfa10ad1ecb74b"}}, - {name = "lxml-6.0.1-cp312-cp312-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/63/01/c9e42c8c2d8b41f4bdefa42ab05448852e439045f112903dd901b8fbea4d/lxml-6.0.1-cp312-cp312-musllinux_1_2_x86_64.whl",hashes = {sha256 = "1e9dc2b9f1586e7cd77753eae81f8d76220eed9b768f337dc83a3f675f2f0cf9"}}, - {name = "lxml-6.0.1-cp312-cp312-win32.whl",url = "https://files.pythonhosted.org/packages/bc/1f/962ea2696759abe331c3b0e838bb17e92224f39c638c2068bf0d8345e913/lxml-6.0.1-cp312-cp312-win32.whl",hashes = {sha256 = "987ad5c3941c64031f59c226167f55a04d1272e76b241bfafc968bdb778e07fb"}}, - {name = "lxml-6.0.1-cp312-cp312-win_amd64.whl",url = "https://files.pythonhosted.org/packages/41/e2/22c86a990b51b44442b75c43ecb2f77b8daba8c4ba63696921966eac7022/lxml-6.0.1-cp312-cp312-win_amd64.whl",hashes = {sha256 = "abb05a45394fd76bf4a60c1b7bec0e6d4e8dfc569fc0e0b1f634cd983a006ddc"}}, - {name = "lxml-6.0.1-cp312-cp312-win_arm64.whl",url = "https://files.pythonhosted.org/packages/b2/21/dc0c73325e5eb94ef9c9d60dbb5dcdcb2e7114901ea9509735614a74e75a/lxml-6.0.1-cp312-cp312-win_arm64.whl",hashes = {sha256 = "c4be29bce35020d8579d60aa0a4e95effd66fcfce31c46ffddf7e5422f73a299"}}, +sdist = {name = "lxml-6.0.2.tar.gz", url = "https://files.pythonhosted.org/packages/aa/88/262177de60548e5a2bfc46ad28232c9e9cbde697bd94132aeb80364675cb/lxml-6.0.2.tar.gz", hashes = {sha256 = "cd79f3367bd74b317dda655dc8fcfa304d9eb6e4fb06b7168c5cf27f96e0cd62"}} +wheels = [ + {name = "lxml-6.0.2-cp314-cp314-macosx_10_13_universal2.whl",url = "https://files.pythonhosted.org/packages/03/15/d4a377b385ab693ce97b472fe0c77c2b16ec79590e688b3ccc71fba19884/lxml-6.0.2-cp314-cp314-macosx_10_13_universal2.whl",hashes = {sha256 = "b0c732aa23de8f8aec23f4b580d1e52905ef468afb4abeafd3fec77042abb6fe"}}, + {name = "lxml-6.0.2-cp314-cp314-macosx_10_13_x86_64.whl",url = "https://files.pythonhosted.org/packages/c8/e8/c128e37589463668794d503afaeb003987373c5f94d667124ffd8078bbd9/lxml-6.0.2-cp314-cp314-macosx_10_13_x86_64.whl",hashes = {sha256 = "4468e3b83e10e0317a89a33d28f7aeba1caa4d1a6fd457d115dd4ffe90c5931d"}}, + {name = "lxml-6.0.2-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.whl",url = "https://files.pythonhosted.org/packages/00/ce/74903904339decdf7da7847bb5741fc98a5451b42fc419a86c0c13d26fe2/lxml-6.0.2-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.whl",hashes = {sha256 = "abd44571493973bad4598a3be7e1d807ed45aa2adaf7ab92ab7c62609569b17d"}}, + {name = "lxml-6.0.2-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl",url = "https://files.pythonhosted.org/packages/1f/d3/131dec79ce61c5567fecf82515bd9bc36395df42501b50f7f7f3bd065df0/lxml-6.0.2-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl",hashes = {sha256 = "370cd78d5855cfbffd57c422851f7d3864e6ae72d0da615fca4dad8c45d375a5"}}, + {name = "lxml-6.0.2-cp314-cp314-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl",url = "https://files.pythonhosted.org/packages/3a/ea/a43ba9bb750d4ffdd885f2cd333572f5bb900cd2408b67fdda07e85978a0/lxml-6.0.2-cp314-cp314-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl",hashes = {sha256 = "901e3b4219fa04ef766885fb40fa516a71662a4c61b80c94d25336b4934b71c0"}}, + {name = "lxml-6.0.2-cp314-cp314-manylinux_2_26_i686.manylinux_2_28_i686.whl",url = "https://files.pythonhosted.org/packages/60/23/6885b451636ae286c34628f70a7ed1fcc759f8d9ad382d132e1c8d3d9bfd/lxml-6.0.2-cp314-cp314-manylinux_2_26_i686.manylinux_2_28_i686.whl",hashes = {sha256 = "a4bf42d2e4cf52c28cc1812d62426b9503cdb0c87a6de81442626aa7d69707ba"}}, + {name = "lxml-6.0.2-cp314-cp314-manylinux_2_26_ppc64le.manylinux_2_28_ppc64le.whl",url = "https://files.pythonhosted.org/packages/48/5b/fc2ddfc94ddbe3eebb8e9af6e3fd65e2feba4967f6a4e9683875c394c2d8/lxml-6.0.2-cp314-cp314-manylinux_2_26_ppc64le.manylinux_2_28_ppc64le.whl",hashes = {sha256 = "b2c7fdaa4d7c3d886a42534adec7cfac73860b89b4e5298752f60aa5984641a0"}}, + {name = "lxml-6.0.2-cp314-cp314-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl",url = "https://files.pythonhosted.org/packages/29/9c/47293c58cc91769130fbf85531280e8cc7868f7fbb6d92f4670071b9cb3e/lxml-6.0.2-cp314-cp314-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl",hashes = {sha256 = "98a5e1660dc7de2200b00d53fa00bcd3c35a3608c305d45a7bbcaf29fa16e83d"}}, + {name = "lxml-6.0.2-cp314-cp314-manylinux_2_31_armv7l.whl",url = "https://files.pythonhosted.org/packages/9b/da/ba6eceb830c762b48e711ded880d7e3e89fc6c7323e587c36540b6b23c6b/lxml-6.0.2-cp314-cp314-manylinux_2_31_armv7l.whl",hashes = {sha256 = "dc051506c30b609238d79eda75ee9cab3e520570ec8219844a72a46020901e37"}}, + {name = "lxml-6.0.2-cp314-cp314-manylinux_2_38_riscv64.manylinux_2_39_riscv64.whl",url = "https://files.pythonhosted.org/packages/a5/24/7be3f82cb7990b89118d944b619e53c656c97dc89c28cfb143fdb7cd6f4d/lxml-6.0.2-cp314-cp314-manylinux_2_38_riscv64.manylinux_2_39_riscv64.whl",hashes = {sha256 = "8799481bbdd212470d17513a54d568f44416db01250f49449647b5ab5b5dccb9"}}, + {name = "lxml-6.0.2-cp314-cp314-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/1b/bd/dcfb9ea1e16c665efd7538fc5d5c34071276ce9220e234217682e7d2c4a5/lxml-6.0.2-cp314-cp314-musllinux_1_2_aarch64.whl",hashes = {sha256 = "9261bb77c2dab42f3ecd9103951aeca2c40277701eb7e912c545c1b16e0e4917"}}, + {name = "lxml-6.0.2-cp314-cp314-musllinux_1_2_armv7l.whl",url = "https://files.pythonhosted.org/packages/21/04/a60b0ff9314736316f28316b694bccbbabe100f8483ad83852d77fc7468e/lxml-6.0.2-cp314-cp314-musllinux_1_2_armv7l.whl",hashes = {sha256 = "65ac4a01aba353cfa6d5725b95d7aed6356ddc0a3cd734de00124d285b04b64f"}}, + {name = "lxml-6.0.2-cp314-cp314-musllinux_1_2_ppc64le.whl",url = "https://files.pythonhosted.org/packages/d6/bd/7d54bd1846e5a310d9c715921c5faa71cf5c0853372adf78aee70c8d7aa2/lxml-6.0.2-cp314-cp314-musllinux_1_2_ppc64le.whl",hashes = {sha256 = "b22a07cbb82fea98f8a2fd814f3d1811ff9ed76d0fc6abc84eb21527596e7cc8"}}, + {name = "lxml-6.0.2-cp314-cp314-musllinux_1_2_riscv64.whl",url = "https://files.pythonhosted.org/packages/fd/32/5643d6ab947bc371da21323acb2a6e603cedbe71cb4c99c8254289ab6f4e/lxml-6.0.2-cp314-cp314-musllinux_1_2_riscv64.whl",hashes = {sha256 = "d759cdd7f3e055d6bc8d9bec3ad905227b2e4c785dc16c372eb5b5e83123f48a"}}, + {name = "lxml-6.0.2-cp314-cp314-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/33/da/34c1ec4cff1eea7d0b4cd44af8411806ed943141804ac9c5d565302afb78/lxml-6.0.2-cp314-cp314-musllinux_1_2_x86_64.whl",hashes = {sha256 = "945da35a48d193d27c188037a05fec5492937f66fb1958c24fc761fb9d40d43c"}}, + {name = "lxml-6.0.2-cp314-cp314-win32.whl",url = "https://files.pythonhosted.org/packages/82/57/4eca3e31e54dc89e2c3507e1cd411074a17565fa5ffc437c4ae0a00d439e/lxml-6.0.2-cp314-cp314-win32.whl",hashes = {sha256 = "be3aaa60da67e6153eb15715cc2e19091af5dc75faef8b8a585aea372507384b"}}, + {name = "lxml-6.0.2-cp314-cp314-win_amd64.whl",url = "https://files.pythonhosted.org/packages/e3/e0/c96cf13eccd20c9421ba910304dae0f619724dcf1702864fd59dd386404d/lxml-6.0.2-cp314-cp314-win_amd64.whl",hashes = {sha256 = "fa25afbadead523f7001caf0c2382afd272c315a033a7b06336da2637d92d6ed"}}, + {name = "lxml-6.0.2-cp314-cp314-win_arm64.whl",url = "https://files.pythonhosted.org/packages/d5/5d/b3f03e22b3d38d6f188ef044900a9b29b2fe0aebb94625ce9fe244011d34/lxml-6.0.2-cp314-cp314-win_arm64.whl",hashes = {sha256 = "063eccf89df5b24e361b123e257e437f9e9878f425ee9aae3144c77faf6da6d8"}}, + {name = "lxml-6.0.2-cp314-cp314t-macosx_10_13_universal2.whl",url = "https://files.pythonhosted.org/packages/5e/5c/42c2c4c03554580708fc738d13414801f340c04c3eff90d8d2d227145275/lxml-6.0.2-cp314-cp314t-macosx_10_13_universal2.whl",hashes = {sha256 = "6162a86d86893d63084faaf4ff937b3daea233e3682fb4474db07395794fa80d"}}, + {name = "lxml-6.0.2-cp314-cp314t-macosx_10_13_x86_64.whl",url = "https://files.pythonhosted.org/packages/bf/4f/12df843e3e10d18d468a7557058f8d3733e8b6e12401f30b1ef29360740f/lxml-6.0.2-cp314-cp314t-macosx_10_13_x86_64.whl",hashes = {sha256 = "414aaa94e974e23a3e92e7ca5b97d10c0cf37b6481f50911032c69eeb3991bba"}}, + {name = "lxml-6.0.2-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl",url = "https://files.pythonhosted.org/packages/e4/0c/9dc31e6c2d0d418483cbcb469d1f5a582a1cd00a1f4081953d44051f3c50/lxml-6.0.2-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl",hashes = {sha256 = "48461bd21625458dd01e14e2c38dd0aea69addc3c4f960c30d9f59d7f93be601"}}, + {name = "lxml-6.0.2-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl",url = "https://files.pythonhosted.org/packages/e7/2b/9b870c6ca24c841bdd887504808f0417aa9d8d564114689266f19ddf29c8/lxml-6.0.2-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl",hashes = {sha256 = "25fcc59afc57d527cfc78a58f40ab4c9b8fd096a9a3f964d2781ffb6eb33f4ed"}}, + {name = "lxml-6.0.2-cp314-cp314t-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl",url = "https://files.pythonhosted.org/packages/bf/0c/4f5f2a4dd319a178912751564471355d9019e220c20d7db3fb8307ed8582/lxml-6.0.2-cp314-cp314t-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl",hashes = {sha256 = "5179c60288204e6ddde3f774a93350177e08876eaf3ab78aa3a3649d43eb7d37"}}, + {name = "lxml-6.0.2-cp314-cp314t-manylinux_2_26_i686.manylinux_2_28_i686.whl",url = "https://files.pythonhosted.org/packages/12/64/554eed290365267671fe001a20d72d14f468ae4e6acef1e179b039436967/lxml-6.0.2-cp314-cp314t-manylinux_2_26_i686.manylinux_2_28_i686.whl",hashes = {sha256 = "967aab75434de148ec80597b75062d8123cadf2943fb4281f385141e18b21338"}}, + {name = "lxml-6.0.2-cp314-cp314t-manylinux_2_26_ppc64le.manylinux_2_28_ppc64le.whl",url = "https://files.pythonhosted.org/packages/7a/31/1d748aa275e71802ad9722df32a7a35034246b42c0ecdd8235412c3396ef/lxml-6.0.2-cp314-cp314t-manylinux_2_26_ppc64le.manylinux_2_28_ppc64le.whl",hashes = {sha256 = "d100fcc8930d697c6561156c6810ab4a508fb264c8b6779e6e61e2ed5e7558f9"}}, + {name = "lxml-6.0.2-cp314-cp314t-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl",url = "https://files.pythonhosted.org/packages/8f/41/2c11916bcac09ed561adccacceaedd2bf0e0b25b297ea92aab99fd03d0fa/lxml-6.0.2-cp314-cp314t-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl",hashes = {sha256 = "2ca59e7e13e5981175b8b3e4ab84d7da57993eeff53c07764dcebda0d0e64ecd"}}, + {name = "lxml-6.0.2-cp314-cp314t-manylinux_2_31_armv7l.whl",url = "https://files.pythonhosted.org/packages/99/05/4e5c2873d8f17aa018e6afde417c80cc5d0c33be4854cce3ef5670c49367/lxml-6.0.2-cp314-cp314t-manylinux_2_31_armv7l.whl",hashes = {sha256 = "957448ac63a42e2e49531b9d6c0fa449a1970dbc32467aaad46f11545be9af1d"}}, + {name = "lxml-6.0.2-cp314-cp314t-manylinux_2_38_riscv64.manylinux_2_39_riscv64.whl",url = "https://files.pythonhosted.org/packages/0f/c9/dcc2da1bebd6275cdc723b515f93edf548b82f36a5458cca3578bc899332/lxml-6.0.2-cp314-cp314t-manylinux_2_38_riscv64.manylinux_2_39_riscv64.whl",hashes = {sha256 = "b7fc49c37f1786284b12af63152fe1d0990722497e2d5817acfe7a877522f9a9"}}, + {name = "lxml-6.0.2-cp314-cp314t-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/9c/e2/5172e4e7468afca64a37b81dba152fc5d90e30f9c83c7c3213d6a02a5ce4/lxml-6.0.2-cp314-cp314t-musllinux_1_2_aarch64.whl",hashes = {sha256 = "e19e0643cc936a22e837f79d01a550678da8377d7d801a14487c10c34ee49c7e"}}, + {name = "lxml-6.0.2-cp314-cp314t-musllinux_1_2_armv7l.whl",url = "https://files.pythonhosted.org/packages/a5/b3/15461fd3e5cd4ddcb7938b87fc20b14ab113b92312fc97afe65cd7c85de1/lxml-6.0.2-cp314-cp314t-musllinux_1_2_armv7l.whl",hashes = {sha256 = "1db01e5cf14345628e0cbe71067204db658e2fb8e51e7f33631f5f4735fefd8d"}}, + {name = "lxml-6.0.2-cp314-cp314t-musllinux_1_2_ppc64le.whl",url = "https://files.pythonhosted.org/packages/05/33/f310b987c8bf9e61c4dd8e8035c416bd3230098f5e3cfa69fc4232de7059/lxml-6.0.2-cp314-cp314t-musllinux_1_2_ppc64le.whl",hashes = {sha256 = "875c6b5ab39ad5291588aed6925fac99d0097af0dd62f33c7b43736043d4a2ec"}}, + {name = "lxml-6.0.2-cp314-cp314t-musllinux_1_2_riscv64.whl",url = "https://files.pythonhosted.org/packages/70/ff/51c80e75e0bc9382158133bdcf4e339b5886c6ee2418b5199b3f1a61ed6d/lxml-6.0.2-cp314-cp314t-musllinux_1_2_riscv64.whl",hashes = {sha256 = "cdcbed9ad19da81c480dfd6dd161886db6096083c9938ead313d94b30aadf272"}}, + {name = "lxml-6.0.2-cp314-cp314t-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/56/4d/4856e897df0d588789dd844dbed9d91782c4ef0b327f96ce53c807e13128/lxml-6.0.2-cp314-cp314t-musllinux_1_2_x86_64.whl",hashes = {sha256 = "80dadc234ebc532e09be1975ff538d154a7fa61ea5031c03d25178855544728f"}}, + {name = "lxml-6.0.2-cp314-cp314t-win32.whl",url = "https://files.pythonhosted.org/packages/0f/85/86766dfebfa87bea0ab78e9ff7a4b4b45225df4b4d3b8cc3c03c5cd68464/lxml-6.0.2-cp314-cp314t-win32.whl",hashes = {sha256 = "da08e7bb297b04e893d91087df19638dc7a6bb858a954b0cc2b9f5053c922312"}}, + {name = "lxml-6.0.2-cp314-cp314t-win_amd64.whl",url = "https://files.pythonhosted.org/packages/fe/1a/b248b355834c8e32614650b8008c69ffeb0ceb149c793961dd8c0b991bb3/lxml-6.0.2-cp314-cp314t-win_amd64.whl",hashes = {sha256 = "252a22982dca42f6155125ac76d3432e548a7625d56f5a273ee78a5057216eca"}}, + {name = "lxml-6.0.2-cp314-cp314t-win_arm64.whl",url = "https://files.pythonhosted.org/packages/92/aa/df863bcc39c5e0946263454aba394de8a9084dbaff8ad143846b0d844739/lxml-6.0.2-cp314-cp314t-win_arm64.whl",hashes = {sha256 = "bb4c1847b303835d89d785a18801a883436cdfd5dc3d62947f9c49e24f0f5a2c"}}, + {name = "lxml-6.0.2-cp313-cp313-macosx_10_13_universal2.whl",url = "https://files.pythonhosted.org/packages/53/fd/4e8f0540608977aea078bf6d79f128e0e2c2bba8af1acf775c30baa70460/lxml-6.0.2-cp313-cp313-macosx_10_13_universal2.whl",hashes = {sha256 = "9b33d21594afab46f37ae58dfadd06636f154923c4e8a4d754b0127554eb2e77"}}, + {name = "lxml-6.0.2-cp313-cp313-macosx_10_13_x86_64.whl",url = "https://files.pythonhosted.org/packages/5d/f4/2a94a3d3dfd6c6b433501b8d470a1960a20ecce93245cf2db1706adf6c19/lxml-6.0.2-cp313-cp313-macosx_10_13_x86_64.whl",hashes = {sha256 = "6c8963287d7a4c5c9a432ff487c52e9c5618667179c18a204bdedb27310f022f"}}, + {name = "lxml-6.0.2-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl",url = "https://files.pythonhosted.org/packages/25/2e/4efa677fa6b322013035d38016f6ae859d06cac67437ca7dc708a6af7028/lxml-6.0.2-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl",hashes = {sha256 = "1941354d92699fb5ffe6ed7b32f9649e43c2feb4b97205f75866f7d21aa91452"}}, + {name = "lxml-6.0.2-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl",url = "https://files.pythonhosted.org/packages/ce/0f/526e78a6d38d109fdbaa5049c62e1d32fdd70c75fb61c4eadf3045d3d124/lxml-6.0.2-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl",hashes = {sha256 = "bb2f6ca0ae2d983ded09357b84af659c954722bbf04dea98030064996d156048"}}, + {name = "lxml-6.0.2-cp313-cp313-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl",url = "https://files.pythonhosted.org/packages/81/76/99de58d81fa702cc0ea7edae4f4640416c2062813a00ff24bd70ac1d9c9b/lxml-6.0.2-cp313-cp313-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl",hashes = {sha256 = "eb2a12d704f180a902d7fa778c6d71f36ceb7b0d317f34cdc76a5d05aa1dd1df"}}, + {name = "lxml-6.0.2-cp313-cp313-manylinux_2_26_i686.manylinux_2_28_i686.whl",url = "https://files.pythonhosted.org/packages/b5/35/9e57d25482bc9a9882cb0037fdb9cc18f4b79d85df94fa9d2a89562f1d25/lxml-6.0.2-cp313-cp313-manylinux_2_26_i686.manylinux_2_28_i686.whl",hashes = {sha256 = "6ec0e3f745021bfed19c456647f0298d60a24c9ff86d9d051f52b509663feeb1"}}, + {name = "lxml-6.0.2-cp313-cp313-manylinux_2_26_ppc64le.manylinux_2_28_ppc64le.whl",url = "https://files.pythonhosted.org/packages/a6/8e/cb99bd0b83ccc3e8f0f528e9aa1f7a9965dfec08c617070c5db8d63a87ce/lxml-6.0.2-cp313-cp313-manylinux_2_26_ppc64le.manylinux_2_28_ppc64le.whl",hashes = {sha256 = "846ae9a12d54e368933b9759052d6206a9e8b250291109c48e350c1f1f49d916"}}, + {name = "lxml-6.0.2-cp313-cp313-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl",url = "https://files.pythonhosted.org/packages/d0/34/9e591954939276bb679b73773836c6684c22e56d05980e31d52a9a8deb18/lxml-6.0.2-cp313-cp313-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl",hashes = {sha256 = "ef9266d2aa545d7374938fb5c484531ef5a2ec7f2d573e62f8ce722c735685fd"}}, + {name = "lxml-6.0.2-cp313-cp313-manylinux_2_31_armv7l.whl",url = "https://files.pythonhosted.org/packages/8d/27/b29ff065f9aaca443ee377aff699714fcbffb371b4fce5ac4ca759e436d5/lxml-6.0.2-cp313-cp313-manylinux_2_31_armv7l.whl",hashes = {sha256 = "4077b7c79f31755df33b795dc12119cb557a0106bfdab0d2c2d97bd3cf3dffa6"}}, + {name = "lxml-6.0.2-cp313-cp313-manylinux_2_38_riscv64.manylinux_2_39_riscv64.whl",url = "https://files.pythonhosted.org/packages/2b/9f/f756f9c2cd27caa1a6ef8c32ae47aadea697f5c2c6d07b0dae133c244fbe/lxml-6.0.2-cp313-cp313-manylinux_2_38_riscv64.manylinux_2_39_riscv64.whl",hashes = {sha256 = "a7c5d5e5f1081955358533be077166ee97ed2571d6a66bdba6ec2f609a715d1a"}}, + {name = "lxml-6.0.2-cp313-cp313-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/61/46/bb85ea42d2cb1bd8395484fd72f38e3389611aa496ac7772da9205bbda0e/lxml-6.0.2-cp313-cp313-musllinux_1_2_aarch64.whl",hashes = {sha256 = "8f8d0cbd0674ee89863a523e6994ac25fd5be9c8486acfc3e5ccea679bad2679"}}, + {name = "lxml-6.0.2-cp313-cp313-musllinux_1_2_armv7l.whl",url = "https://files.pythonhosted.org/packages/95/0c/443fc476dcc8e41577f0af70458c50fe299a97bb6b7505bb1ae09aa7f9ac/lxml-6.0.2-cp313-cp313-musllinux_1_2_armv7l.whl",hashes = {sha256 = "2cbcbf6d6e924c28f04a43f3b6f6e272312a090f269eff68a2982e13e5d57659"}}, + {name = "lxml-6.0.2-cp313-cp313-musllinux_1_2_ppc64le.whl",url = "https://files.pythonhosted.org/packages/48/78/6ef0b359d45bb9697bc5a626e1992fa5d27aa3f8004b137b2314793b50a0/lxml-6.0.2-cp313-cp313-musllinux_1_2_ppc64le.whl",hashes = {sha256 = "dfb874cfa53340009af6bdd7e54ebc0d21012a60a4e65d927c2e477112e63484"}}, + {name = "lxml-6.0.2-cp313-cp313-musllinux_1_2_riscv64.whl",url = "https://files.pythonhosted.org/packages/ff/ea/e1d33808f386bc1339d08c0dcada6e4712d4ed8e93fcad5f057070b7988a/lxml-6.0.2-cp313-cp313-musllinux_1_2_riscv64.whl",hashes = {sha256 = "fb8dae0b6b8b7f9e96c26fdd8121522ce5de9bb5538010870bd538683d30e9a2"}}, + {name = "lxml-6.0.2-cp313-cp313-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/4f/47/eba75dfd8183673725255247a603b4ad606f4ae657b60c6c145b381697da/lxml-6.0.2-cp313-cp313-musllinux_1_2_x86_64.whl",hashes = {sha256 = "358d9adae670b63e95bc59747c72f4dc97c9ec58881d4627fe0120da0f90d314"}}, + {name = "lxml-6.0.2-cp313-cp313-win32.whl",url = "https://files.pythonhosted.org/packages/76/04/5c5e2b8577bc936e219becb2e98cdb1aca14a4921a12995b9d0c523502ae/lxml-6.0.2-cp313-cp313-win32.whl",hashes = {sha256 = "e8cd2415f372e7e5a789d743d133ae474290a90b9023197fd78f32e2dc6873e2"}}, + {name = "lxml-6.0.2-cp313-cp313-win_amd64.whl",url = "https://files.pythonhosted.org/packages/fe/0a/4643ccc6bb8b143e9f9640aa54e38255f9d3b45feb2cbe7ae2ca47e8782e/lxml-6.0.2-cp313-cp313-win_amd64.whl",hashes = {sha256 = "b30d46379644fbfc3ab81f8f82ae4de55179414651f110a1514f0b1f8f6cb2d7"}}, + {name = "lxml-6.0.2-cp313-cp313-win_arm64.whl",url = "https://files.pythonhosted.org/packages/31/ef/dcf1d29c3f530577f61e5fe2f1bd72929acf779953668a8a47a479ae6f26/lxml-6.0.2-cp313-cp313-win_arm64.whl",hashes = {sha256 = "13dcecc9946dca97b11b7c40d29fba63b55ab4170d3c0cf8c0c164343b9bfdcf"}}, + {name = "lxml-6.0.2-cp312-cp312-macosx_10_13_universal2.whl",url = "https://files.pythonhosted.org/packages/f3/c8/8ff2bc6b920c84355146cd1ab7d181bc543b89241cfb1ebee824a7c81457/lxml-6.0.2-cp312-cp312-macosx_10_13_universal2.whl",hashes = {sha256 = "a59f5448ba2ceccd06995c95ea59a7674a10de0810f2ce90c9006f3cbc044456"}}, + {name = "lxml-6.0.2-cp312-cp312-macosx_10_13_x86_64.whl",url = "https://files.pythonhosted.org/packages/37/6f/9aae1008083bb501ef63284220ce81638332f9ccbfa53765b2b7502203cf/lxml-6.0.2-cp312-cp312-macosx_10_13_x86_64.whl",hashes = {sha256 = "e8113639f3296706fbac34a30813929e29247718e88173ad849f57ca59754924"}}, + {name = "lxml-6.0.2-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl",url = "https://files.pythonhosted.org/packages/f1/ca/31fb37f99f37f1536c133476674c10b577e409c0a624384147653e38baf2/lxml-6.0.2-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl",hashes = {sha256 = "a8bef9b9825fa8bc816a6e641bb67219489229ebc648be422af695f6e7a4fa7f"}}, + {name = "lxml-6.0.2-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl",url = "https://files.pythonhosted.org/packages/da/87/f6cb9442e4bada8aab5ae7e1046264f62fdbeaa6e3f6211b93f4c0dd97f1/lxml-6.0.2-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl",hashes = {sha256 = "65ea18d710fd14e0186c2f973dc60bb52039a275f82d3c44a0e42b43440ea534"}}, + {name = "lxml-6.0.2-cp312-cp312-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl",url = "https://files.pythonhosted.org/packages/c8/20/a7760713e65888db79bbae4f6146a6ae5c04e4a204a3c48896c408cd6ed2/lxml-6.0.2-cp312-cp312-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl",hashes = {sha256 = "c371aa98126a0d4c739ca93ceffa0fd7a5d732e3ac66a46e74339acd4d334564"}}, + {name = "lxml-6.0.2-cp312-cp312-manylinux_2_26_i686.manylinux_2_28_i686.whl",url = "https://files.pythonhosted.org/packages/a2/b0/7e64e0460fcb36471899f75831509098f3fd7cd02a3833ac517433cb4f8f/lxml-6.0.2-cp312-cp312-manylinux_2_26_i686.manylinux_2_28_i686.whl",hashes = {sha256 = "700efd30c0fa1a3581d80a748157397559396090a51d306ea59a70020223d16f"}}, + {name = "lxml-6.0.2-cp312-cp312-manylinux_2_26_ppc64le.manylinux_2_28_ppc64le.whl",url = "https://files.pythonhosted.org/packages/b9/e1/e5df362e9ca4e2f48ed6411bd4b3a0ae737cc842e96877f5bf9428055ab4/lxml-6.0.2-cp312-cp312-manylinux_2_26_ppc64le.manylinux_2_28_ppc64le.whl",hashes = {sha256 = "c33e66d44fe60e72397b487ee92e01da0d09ba2d66df8eae42d77b6d06e5eba0"}}, + {name = "lxml-6.0.2-cp312-cp312-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl",url = "https://files.pythonhosted.org/packages/c6/d1/232b3309a02d60f11e71857778bfcd4acbdb86c07db8260caf7d008b08f8/lxml-6.0.2-cp312-cp312-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl",hashes = {sha256 = "90a345bbeaf9d0587a3aaffb7006aa39ccb6ff0e96a57286c0cb2fd1520ea192"}}, + {name = "lxml-6.0.2-cp312-cp312-manylinux_2_31_armv7l.whl",url = "https://files.pythonhosted.org/packages/35/35/d955a070994725c4f7d80583a96cab9c107c57a125b20bb5f708fe941011/lxml-6.0.2-cp312-cp312-manylinux_2_31_armv7l.whl",hashes = {sha256 = "064fdadaf7a21af3ed1dcaa106b854077fbeada827c18f72aec9346847cd65d0"}}, + {name = "lxml-6.0.2-cp312-cp312-manylinux_2_38_riscv64.manylinux_2_39_riscv64.whl",url = "https://files.pythonhosted.org/packages/1e/be/667d17363b38a78c4bd63cfd4b4632029fd68d2c2dc81f25ce9eb5224dd5/lxml-6.0.2-cp312-cp312-manylinux_2_38_riscv64.manylinux_2_39_riscv64.whl",hashes = {sha256 = "fbc74f42c3525ac4ffa4b89cbdd00057b6196bcefe8bce794abd42d33a018092"}}, + {name = "lxml-6.0.2-cp312-cp312-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/ea/47/62c70aa4a1c26569bc958c9ca86af2bb4e1f614e8c04fb2989833874f7ae/lxml-6.0.2-cp312-cp312-musllinux_1_2_aarch64.whl",hashes = {sha256 = "6ddff43f702905a4e32bc24f3f2e2edfe0f8fde3277d481bffb709a4cced7a1f"}}, + {name = "lxml-6.0.2-cp312-cp312-musllinux_1_2_armv7l.whl",url = "https://files.pythonhosted.org/packages/bd/55/6ceddaca353ebd0f1908ef712c597f8570cc9c58130dbb89903198e441fd/lxml-6.0.2-cp312-cp312-musllinux_1_2_armv7l.whl",hashes = {sha256 = "6da5185951d72e6f5352166e3da7b0dc27aa70bd1090b0eb3f7f7212b53f1bb8"}}, + {name = "lxml-6.0.2-cp312-cp312-musllinux_1_2_ppc64le.whl",url = "https://files.pythonhosted.org/packages/cf/e8/fd63e15da5e3fd4c2146f8bbb3c14e94ab850589beab88e547b2dbce22e1/lxml-6.0.2-cp312-cp312-musllinux_1_2_ppc64le.whl",hashes = {sha256 = "57a86e1ebb4020a38d295c04fc79603c7899e0df71588043eb218722dabc087f"}}, + {name = "lxml-6.0.2-cp312-cp312-musllinux_1_2_riscv64.whl",url = "https://files.pythonhosted.org/packages/76/47/b3ec58dc5c374697f5ba37412cd2728f427d056315d124dd4b61da381877/lxml-6.0.2-cp312-cp312-musllinux_1_2_riscv64.whl",hashes = {sha256 = "2047d8234fe735ab77802ce5f2297e410ff40f5238aec569ad7c8e163d7b19a6"}}, + {name = "lxml-6.0.2-cp312-cp312-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/19/93/03ba725df4c3d72afd9596eef4a37a837ce8e4806010569bedfcd2cb68fd/lxml-6.0.2-cp312-cp312-musllinux_1_2_x86_64.whl",hashes = {sha256 = "6f91fd2b2ea15a6800c8e24418c0775a1694eefc011392da73bc6cef2623b322"}}, + {name = "lxml-6.0.2-cp312-cp312-win32.whl",url = "https://files.pythonhosted.org/packages/c6/80/c06de80bfce881d0ad738576f243911fccf992687ae09fd80b734712b39c/lxml-6.0.2-cp312-cp312-win32.whl",hashes = {sha256 = "3ae2ce7d6fedfb3414a2b6c5e20b249c4c607f72cb8d2bb7cc9c6ec7c6f4e849"}}, + {name = "lxml-6.0.2-cp312-cp312-win_amd64.whl",url = "https://files.pythonhosted.org/packages/f7/d7/0cdfb6c3e30893463fb3d1e52bc5f5f99684a03c29a0b6b605cfae879cd5/lxml-6.0.2-cp312-cp312-win_amd64.whl",hashes = {sha256 = "72c87e5ee4e58a8354fb9c7c84cbf95a1c8236c127a5d1b7683f04bed8361e1f"}}, + {name = "lxml-6.0.2-cp312-cp312-win_arm64.whl",url = "https://files.pythonhosted.org/packages/ea/7b/93c73c67db235931527301ed3785f849c78991e2e34f3fd9a6663ffda4c5/lxml-6.0.2-cp312-cp312-win_arm64.whl",hashes = {sha256 = "61cb10eeb95570153e0c0e554f58df92ecf5109f75eacad4a95baa709e26c3d6"}}, + {name = "lxml-6.0.2-cp311-cp311-macosx_10_9_universal2.whl",url = "https://files.pythonhosted.org/packages/77/d5/becbe1e2569b474a23f0c672ead8a29ac50b2dc1d5b9de184831bda8d14c/lxml-6.0.2-cp311-cp311-macosx_10_9_universal2.whl",hashes = {sha256 = "13e35cbc684aadf05d8711a5d1b5857c92e5e580efa9a0d2be197199c8def607"}}, + {name = "lxml-6.0.2-cp311-cp311-macosx_10_9_x86_64.whl",url = "https://files.pythonhosted.org/packages/28/66/1ced58f12e804644426b85d0bb8a4478ca77bc1761455da310505f1a3526/lxml-6.0.2-cp311-cp311-macosx_10_9_x86_64.whl",hashes = {sha256 = "3b1675e096e17c6fe9c0e8c81434f5736c0739ff9ac6123c87c2d452f48fc938"}}, + {name = "lxml-6.0.2-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl",url = "https://files.pythonhosted.org/packages/11/84/549098ffea39dfd167e3f174b4ce983d0eed61f9d8d25b7bf2a57c3247fc/lxml-6.0.2-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl",hashes = {sha256 = "8ac6e5811ae2870953390452e3476694196f98d447573234592d30488147404d"}}, + {name = "lxml-6.0.2-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl",url = "https://files.pythonhosted.org/packages/ac/bd/f207f16abf9749d2037453d56b643a7471d8fde855a231a12d1e095c4f01/lxml-6.0.2-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl",hashes = {sha256 = "5aa0fc67ae19d7a64c3fe725dc9a1bb11f80e01f78289d05c6f62545affec438"}}, + {name = "lxml-6.0.2-cp311-cp311-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl",url = "https://files.pythonhosted.org/packages/15/ae/bd813e87d8941d52ad5b65071b1affb48da01c4ed3c9c99e40abb266fbff/lxml-6.0.2-cp311-cp311-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl",hashes = {sha256 = "de496365750cc472b4e7902a485d3f152ecf57bd3ba03ddd5578ed8ceb4c5964"}}, + {name = "lxml-6.0.2-cp311-cp311-manylinux_2_26_i686.manylinux_2_28_i686.whl",url = "https://files.pythonhosted.org/packages/02/cd/9bfef16bd1d874fbe0cb51afb00329540f30a3283beb9f0780adbb7eec03/lxml-6.0.2-cp311-cp311-manylinux_2_26_i686.manylinux_2_28_i686.whl",hashes = {sha256 = "200069a593c5e40b8f6fc0d84d86d970ba43138c3e68619ffa234bc9bb806a4d"}}, + {name = "lxml-6.0.2-cp311-cp311-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl",url = "https://files.pythonhosted.org/packages/b8/89/ea8f91594bc5dbb879734d35a6f2b0ad50605d7fb419de2b63d4211765cc/lxml-6.0.2-cp311-cp311-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl",hashes = {sha256 = "7d2de809c2ee3b888b59f995625385f74629707c9355e0ff856445cdcae682b7"}}, + {name = "lxml-6.0.2-cp311-cp311-manylinux_2_31_armv7l.whl",url = "https://files.pythonhosted.org/packages/b9/37/9c735274f5dbec726b2db99b98a43950395ba3d4a1043083dba2ad814170/lxml-6.0.2-cp311-cp311-manylinux_2_31_armv7l.whl",hashes = {sha256 = "b2c3da8d93cf5db60e8858c17684c47d01fee6405e554fb55018dd85fc23b178"}}, + {name = "lxml-6.0.2-cp311-cp311-manylinux_2_38_riscv64.manylinux_2_39_riscv64.whl",url = "https://files.pythonhosted.org/packages/20/28/7dfe1ba3475d8bfca3878365075abe002e05d40dfaaeb7ec01b4c587d533/lxml-6.0.2-cp311-cp311-manylinux_2_38_riscv64.manylinux_2_39_riscv64.whl",hashes = {sha256 = "442de7530296ef5e188373a1ea5789a46ce90c4847e597856570439621d9c553"}}, + {name = "lxml-6.0.2-cp311-cp311-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/e7/cf/5f14bc0de763498fc29510e3532bf2b4b3a1c1d5d0dff2e900c16ba021ef/lxml-6.0.2-cp311-cp311-musllinux_1_2_aarch64.whl",hashes = {sha256 = "2593c77efde7bfea7f6389f1ab249b15ed4aa5bc5cb5131faa3b843c429fbedb"}}, + {name = "lxml-6.0.2-cp311-cp311-musllinux_1_2_armv7l.whl",url = "https://files.pythonhosted.org/packages/1c/b0/bb8275ab5472f32b28cfbbcc6db7c9d092482d3439ca279d8d6fa02f7025/lxml-6.0.2-cp311-cp311-musllinux_1_2_armv7l.whl",hashes = {sha256 = "3e3cb08855967a20f553ff32d147e14329b3ae70ced6edc2f282b94afbc74b2a"}}, + {name = "lxml-6.0.2-cp311-cp311-musllinux_1_2_riscv64.whl",url = "https://files.pythonhosted.org/packages/25/4c/7c222753bc72edca3b99dbadba1b064209bc8ed4ad448af990e60dcce462/lxml-6.0.2-cp311-cp311-musllinux_1_2_riscv64.whl",hashes = {sha256 = "2ed6c667fcbb8c19c6791bbf40b7268ef8ddf5a96940ba9404b9f9a304832f6c"}}, + {name = "lxml-6.0.2-cp311-cp311-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/6c/8c/478a0dc6b6ed661451379447cdbec77c05741a75736d97e5b2b729687828/lxml-6.0.2-cp311-cp311-musllinux_1_2_x86_64.whl",hashes = {sha256 = "b8f18914faec94132e5b91e69d76a5c1d7b0c73e2489ea8929c4aaa10b76bbf7"}}, + {name = "lxml-6.0.2-cp311-cp311-win32.whl",url = "https://files.pythonhosted.org/packages/2d/d9/5be3a6ab2784cdf9accb0703b65e1b64fcdd9311c9f007630c7db0cfcce1/lxml-6.0.2-cp311-cp311-win32.whl",hashes = {sha256 = "6605c604e6daa9e0d7f0a2137bdc47a2e93b59c60a65466353e37f8272f47c46"}}, + {name = "lxml-6.0.2-cp311-cp311-win_amd64.whl",url = "https://files.pythonhosted.org/packages/e2/7d/ca6fb13349b473d5732fb0ee3eec8f6c80fc0688e76b7d79c1008481bf1f/lxml-6.0.2-cp311-cp311-win_amd64.whl",hashes = {sha256 = "e5867f2651016a3afd8dd2c8238baa66f1e2802f44bc17e236f547ace6647078"}}, + {name = "lxml-6.0.2-cp311-cp311-win_arm64.whl",url = "https://files.pythonhosted.org/packages/ab/a2/51363b5ecd3eab46563645f3a2c3836a2fc67d01a1b87c5017040f39f567/lxml-6.0.2-cp311-cp311-win_arm64.whl",hashes = {sha256 = "4197fb2534ee05fd3e7afaab5d8bfd6c2e186f65ea7f9cd6a82809c887bd1285"}}, + {name = "lxml-6.0.2-pp311-pypy311_pp73-macosx_10_15_x86_64.whl",url = "https://files.pythonhosted.org/packages/0b/11/29d08bc103a62c0eba8016e7ed5aeebbf1e4312e83b0b1648dd203b0e87d/lxml-6.0.2-pp311-pypy311_pp73-macosx_10_15_x86_64.whl",hashes = {sha256 = "1c06035eafa8404b5cf475bb37a9f6088b0aca288d4ccc9d69389750d5543700"}}, + {name = "lxml-6.0.2-pp311-pypy311_pp73-manylinux2014_aarch64.manylinux_2_17_aarch64.whl",url = "https://files.pythonhosted.org/packages/12/b3/52ab9a3b31e5ab8238da241baa19eec44d2ab426532441ee607165aebb52/lxml-6.0.2-pp311-pypy311_pp73-manylinux2014_aarch64.manylinux_2_17_aarch64.whl",hashes = {sha256 = "c7d13103045de1bdd6fe5d61802565f1a3537d70cd3abf596aa0af62761921ee"}}, + {name = "lxml-6.0.2-pp311-pypy311_pp73-manylinux2014_x86_64.manylinux_2_17_x86_64.whl",url = "https://files.pythonhosted.org/packages/a0/33/1eaf780c1baad88224611df13b1c2a9dfa460b526cacfe769103ff50d845/lxml-6.0.2-pp311-pypy311_pp73-manylinux2014_x86_64.manylinux_2_17_x86_64.whl",hashes = {sha256 = "0a3c150a95fbe5ac91de323aa756219ef9cf7fde5a3f00e2281e30f33fa5fa4f"}}, + {name = "lxml-6.0.2-pp311-pypy311_pp73-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl",url = "https://files.pythonhosted.org/packages/7a/c1/27428a2ff348e994ab4f8777d3a0ad510b6b92d37718e5887d2da99952a2/lxml-6.0.2-pp311-pypy311_pp73-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl",hashes = {sha256 = "60fa43be34f78bebb27812ed90f1925ec99560b0fa1decdb7d12b84d857d31e9"}}, + {name = "lxml-6.0.2-pp311-pypy311_pp73-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl",url = "https://files.pythonhosted.org/packages/f0/d0/3020fa12bcec4ab62f97aab026d57c2f0cfd480a558758d9ca233bb6a79d/lxml-6.0.2-pp311-pypy311_pp73-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl",hashes = {sha256 = "21c73b476d3cfe836be731225ec3421fa2f048d84f6df6a8e70433dff1376d5a"}}, + {name = "lxml-6.0.2-pp311-pypy311_pp73-win_amd64.whl",url = "https://files.pythonhosted.org/packages/6c/77/d7f491cbc05303ac6801651aabeb262d43f319288c1ea96c66b1d2692ff3/lxml-6.0.2-pp311-pypy311_pp73-win_amd64.whl",hashes = {sha256 = "27220da5be049e936c3aca06f174e8827ca6445a4353a1995584311487fc4e3e"}}, + {name = "lxml-6.0.2-cp310-cp310-macosx_10_9_universal2.whl",url = "https://files.pythonhosted.org/packages/db/8a/f8192a08237ef2fb1b19733f709db88a4c43bc8ab8357f01cb41a27e7f6a/lxml-6.0.2-cp310-cp310-macosx_10_9_universal2.whl",hashes = {sha256 = "e77dd455b9a16bbd2a5036a63ddbd479c19572af81b624e79ef422f929eef388"}}, + {name = "lxml-6.0.2-cp310-cp310-macosx_10_9_x86_64.whl",url = "https://files.pythonhosted.org/packages/12/64/27bcd07ae17ff5e5536e8d88f4c7d581b48963817a13de11f3ac3329bfa2/lxml-6.0.2-cp310-cp310-macosx_10_9_x86_64.whl",hashes = {sha256 = "5d444858b9f07cefff6455b983aea9a67f7462ba1f6cbe4a21e8bf6791bf2153"}}, + {name = "lxml-6.0.2-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.whl",url = "https://files.pythonhosted.org/packages/02/5a/a7d53b3291c324e0b6e48f3c797be63836cc52156ddf8f33cd72aac78866/lxml-6.0.2-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.whl",hashes = {sha256 = "f952dacaa552f3bb8834908dddd500ba7d508e6ea6eb8c52eb2d28f48ca06a31"}}, + {name = "lxml-6.0.2-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.whl",url = "https://files.pythonhosted.org/packages/f5/55/d465e9b89df1761674d8672bb3e4ae2c47033b01ec243964b6e334c6743f/lxml-6.0.2-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.whl",hashes = {sha256 = "71695772df6acea9f3c0e59e44ba8ac50c4f125217e84aab21074a1a55e7e5c9"}}, + {name = "lxml-6.0.2-cp310-cp310-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl",url = "https://files.pythonhosted.org/packages/62/38/3073cd7e3e8dfc3ba3c3a139e33bee3a82de2bfb0925714351ad3d255c13/lxml-6.0.2-cp310-cp310-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl",hashes = {sha256 = "17f68764f35fd78d7c4cc4ef209a184c38b65440378013d24b8aecd327c3e0c8"}}, + {name = "lxml-6.0.2-cp310-cp310-manylinux_2_26_i686.manylinux_2_28_i686.whl",url = "https://files.pythonhosted.org/packages/4a/d3/1e001588c5e2205637b08985597827d3827dbaaece16348c8822bfe61c29/lxml-6.0.2-cp310-cp310-manylinux_2_26_i686.manylinux_2_28_i686.whl",hashes = {sha256 = "058027e261afed589eddcfe530fcc6f3402d7fd7e89bfd0532df82ebc1563dba"}}, + {name = "lxml-6.0.2-cp310-cp310-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl",url = "https://files.pythonhosted.org/packages/20/cf/cab09478699b003857ed6ebfe95e9fb9fa3d3c25f1353b905c9b73cfb624/lxml-6.0.2-cp310-cp310-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl",hashes = {sha256 = "a8ffaeec5dfea5881d4c9d8913a32d10cfe3923495386106e4a24d45300ef79c"}}, + {name = "lxml-6.0.2-cp310-cp310-manylinux_2_31_armv7l.whl",url = "https://files.pythonhosted.org/packages/a3/84/02a2d0c38ac9a8b9f9e5e1bbd3f24b3f426044ad618b552e9549ee91bd63/lxml-6.0.2-cp310-cp310-manylinux_2_31_armv7l.whl",hashes = {sha256 = "f2e3b1a6bb38de0bc713edd4d612969dd250ca8b724be8d460001a387507021c"}}, + {name = "lxml-6.0.2-cp310-cp310-manylinux_2_38_riscv64.manylinux_2_39_riscv64.whl",url = "https://files.pythonhosted.org/packages/56/87/e1ceadcc031ec4aa605fe95476892d0b0ba3b7f8c7dcdf88fdeff59a9c86/lxml-6.0.2-cp310-cp310-manylinux_2_38_riscv64.manylinux_2_39_riscv64.whl",hashes = {sha256 = "d6690ec5ec1cce0385cb20896b16be35247ac8c2046e493d03232f1c2414d321"}}, + {name = "lxml-6.0.2-cp310-cp310-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/fe/13/5bb6cf42bb228353fd4ac5f162c6a84fd68a4d6f67c1031c8cf97e131fc6/lxml-6.0.2-cp310-cp310-musllinux_1_2_aarch64.whl",hashes = {sha256 = "f2a50c3c1d11cad0ebebbac357a97b26aa79d2bcaf46f256551152aa85d3a4d1"}}, + {name = "lxml-6.0.2-cp310-cp310-musllinux_1_2_armv7l.whl",url = "https://files.pythonhosted.org/packages/e4/e2/ea0498552102e59834e297c5c6dff8d8ded3db72ed5e8aad77871476f073/lxml-6.0.2-cp310-cp310-musllinux_1_2_armv7l.whl",hashes = {sha256 = "3efe1b21c7801ffa29a1112fab3b0f643628c30472d507f39544fd48e9549e34"}}, + {name = "lxml-6.0.2-cp310-cp310-musllinux_1_2_riscv64.whl",url = "https://files.pythonhosted.org/packages/6a/9e/8de42b52a73abb8af86c66c969b3b4c2a96567b6ac74637c037d2e3baa60/lxml-6.0.2-cp310-cp310-musllinux_1_2_riscv64.whl",hashes = {sha256 = "59c45e125140b2c4b33920d21d83681940ca29f0b83f8629ea1a2196dc8cfe6a"}}, + {name = "lxml-6.0.2-cp310-cp310-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/28/a2/de776a573dfb15114509a37351937c367530865edb10a90189d0b4b9b70a/lxml-6.0.2-cp310-cp310-musllinux_1_2_x86_64.whl",hashes = {sha256 = "452b899faa64f1805943ec1c0c9ebeaece01a1af83e130b69cdefeda180bb42c"}}, + {name = "lxml-6.0.2-cp310-cp310-win32.whl",url = "https://files.pythonhosted.org/packages/50/a0/3ae1b1f8964c271b5eec91db2043cf8c6c0bce101ebb2a633b51b044db6c/lxml-6.0.2-cp310-cp310-win32.whl",hashes = {sha256 = "1e786a464c191ca43b133906c6903a7e4d56bef376b75d97ccbb8ec5cf1f0a4b"}}, + {name = "lxml-6.0.2-cp310-cp310-win_amd64.whl",url = "https://files.pythonhosted.org/packages/d1/70/bd42491f0634aad41bdfc1e46f5cff98825fb6185688dc82baa35d509f1a/lxml-6.0.2-cp310-cp310-win_amd64.whl",hashes = {sha256 = "dacf3c64ef3f7440e3167aa4b49aa9e0fb99e0aa4f9ff03795640bf94531bcb0"}}, + {name = "lxml-6.0.2-cp310-cp310-win_arm64.whl",url = "https://files.pythonhosted.org/packages/d2/d0/05c6a72299f54c2c561a6c6cbb2f512e047fca20ea97a05e57931f194ac4/lxml-6.0.2-cp310-cp310-win_arm64.whl",hashes = {sha256 = "45f93e6f75123f88d7f0cfd90f2d05f441b808562bf0bc01070a00f53f5028b5"}}, + {name = "lxml-6.0.2-pp310-pypy310_pp73-macosx_10_15_x86_64.whl",url = "https://files.pythonhosted.org/packages/e7/9c/780c9a8fce3f04690b374f72f41306866b0400b9d0fdf3e17aaa37887eed/lxml-6.0.2-pp310-pypy310_pp73-macosx_10_15_x86_64.whl",hashes = {sha256 = "e748d4cf8fef2526bb2a589a417eba0c8674e29ffcb570ce2ceca44f1e567bf6"}}, + {name = "lxml-6.0.2-pp310-pypy310_pp73-manylinux2014_aarch64.manylinux_2_17_aarch64.whl",url = "https://files.pythonhosted.org/packages/f5/5a/1ab260c00adf645d8bf7dec7f920f744b032f69130c681302821d5debea6/lxml-6.0.2-pp310-pypy310_pp73-manylinux2014_aarch64.manylinux_2_17_aarch64.whl",hashes = {sha256 = "4ddb1049fa0579d0cbd00503ad8c58b9ab34d1254c77bc6a5576d96ec7853dba"}}, + {name = "lxml-6.0.2-pp310-pypy310_pp73-manylinux2014_x86_64.manylinux_2_17_x86_64.whl",url = "https://files.pythonhosted.org/packages/f2/37/565f3b3d7ffede22874b6d86be1a1763d00f4ea9fc5b9b6ccb11e4ec8612/lxml-6.0.2-pp310-pypy310_pp73-manylinux2014_x86_64.manylinux_2_17_x86_64.whl",hashes = {sha256 = "cb233f9c95f83707dae461b12b720c1af9c28c2d19208e1be03387222151daf5"}}, + {name = "lxml-6.0.2-pp310-pypy310_pp73-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl",url = "https://files.pythonhosted.org/packages/22/ec/f3a1b169b2fb9d03467e2e3c0c752ea30e993be440a068b125fc7dd248b0/lxml-6.0.2-pp310-pypy310_pp73-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl",hashes = {sha256 = "bc456d04db0515ce3320d714a1eac7a97774ff0849e7718b492d957da4631dd4"}}, + {name = "lxml-6.0.2-pp310-pypy310_pp73-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl",url = "https://files.pythonhosted.org/packages/77/a2/585a28fe3e67daa1cf2f06f34490d556d121c25d500b10082a7db96e3bcd/lxml-6.0.2-pp310-pypy310_pp73-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl",hashes = {sha256 = "2613e67de13d619fd283d58bda40bff0ee07739f624ffee8b13b631abf33083d"}}, + {name = "lxml-6.0.2-pp310-pypy310_pp73-win_amd64.whl",url = "https://files.pythonhosted.org/packages/7b/d9/a57dd8bcebd7c69386c20263830d4fa72d27e6b72a229ef7a48e88952d9a/lxml-6.0.2-pp310-pypy310_pp73-win_amd64.whl",hashes = {sha256 = "24a8e756c982c001ca8d59e87c80c4d9dcd4d9b44a4cbeb8d9be4482c514d41d"}}, ] marker = "\"recommended\" in extras" @@ -2220,40 +2998,87 @@ dependencies = [] [[packages]] name = "markupsafe" -version = "3.0.2" +version = "3.0.3" requires-python = ">=3.9" -sdist = {name = "markupsafe-3.0.2.tar.gz", url = "https://files.pythonhosted.org/packages/b2/97/5d42485e71dfc078108a86d6de8fa46db44a1a9295e89c5d6d4a06e23a62/markupsafe-3.0.2.tar.gz", hashes = {sha256 = "ee55d3edf80167e48ea11a923c7386f4669df67d7994554387f84e7d8b0a2bf0"}} -wheels = [ - {name = "MarkupSafe-3.0.2-cp313-cp313-macosx_10_13_universal2.whl",url = "https://files.pythonhosted.org/packages/83/0e/67eb10a7ecc77a0c2bbe2b0235765b98d164d81600746914bebada795e97/MarkupSafe-3.0.2-cp313-cp313-macosx_10_13_universal2.whl",hashes = {sha256 = "ba9527cdd4c926ed0760bc301f6728ef34d841f405abf9d4f959c478421e4efd"}}, - {name = "MarkupSafe-3.0.2-cp313-cp313-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/2b/6d/9409f3684d3335375d04e5f05744dfe7e9f120062c9857df4ab490a1031a/MarkupSafe-3.0.2-cp313-cp313-macosx_11_0_arm64.whl",hashes = {sha256 = "f8b3d067f2e40fe93e1ccdd6b2e1d16c43140e76f02fb1319a05cf2b79d99430"}}, - {name = "MarkupSafe-3.0.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",url = "https://files.pythonhosted.org/packages/d2/f5/6eadfcd3885ea85fe2a7c128315cc1bb7241e1987443d78c8fe712d03091/MarkupSafe-3.0.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",hashes = {sha256 = "569511d3b58c8791ab4c2e1285575265991e6d8f8700c7be0e88f86cb0672094"}}, - {name = "MarkupSafe-3.0.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",url = "https://files.pythonhosted.org/packages/0c/91/96cf928db8236f1bfab6ce15ad070dfdd02ed88261c2afafd4b43575e9e9/MarkupSafe-3.0.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",hashes = {sha256 = "15ab75ef81add55874e7ab7055e9c397312385bd9ced94920f2802310c930396"}}, - {name = "MarkupSafe-3.0.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl",url = "https://files.pythonhosted.org/packages/c2/cf/c9d56af24d56ea04daae7ac0940232d31d5a8354f2b457c6d856b2057d69/MarkupSafe-3.0.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl",hashes = {sha256 = "f3818cb119498c0678015754eba762e0d61e5b52d34c8b13d770f0719f7b1d79"}}, - {name = "MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/2a/9f/8619835cd6a711d6272d62abb78c033bda638fdc54c4e7f4272cf1c0962b/MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_aarch64.whl",hashes = {sha256 = "cdb82a876c47801bb54a690c5ae105a46b392ac6099881cdfb9f6e95e4014c6a"}}, - {name = "MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_i686.whl",url = "https://files.pythonhosted.org/packages/f9/bf/176950a1792b2cd2102b8ffeb5133e1ed984547b75db47c25a67d3359f77/MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_i686.whl",hashes = {sha256 = "cabc348d87e913db6ab4aa100f01b08f481097838bdddf7c7a84b7575b7309ca"}}, - {name = "MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/ce/4f/9a02c1d335caabe5c4efb90e1b6e8ee944aa245c1aaaab8e8a618987d816/MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_x86_64.whl",hashes = {sha256 = "444dcda765c8a838eaae23112db52f1efaf750daddb2d9ca300bcae1039adc5c"}}, - {name = "MarkupSafe-3.0.2-cp313-cp313-win32.whl",url = "https://files.pythonhosted.org/packages/ee/55/c271b57db36f748f0e04a759ace9f8f759ccf22b4960c270c78a394f58be/MarkupSafe-3.0.2-cp313-cp313-win32.whl",hashes = {sha256 = "bcf3e58998965654fdaff38e58584d8937aa3096ab5354d493c77d1fdd66d7a1"}}, - {name = "MarkupSafe-3.0.2-cp313-cp313-win_amd64.whl",url = "https://files.pythonhosted.org/packages/29/88/07df22d2dd4df40aba9f3e402e6dc1b8ee86297dddbad4872bd5e7b0094f/MarkupSafe-3.0.2-cp313-cp313-win_amd64.whl",hashes = {sha256 = "e6a2a455bd412959b57a172ce6328d2dd1f01cb2135efda2e4576e8a23fa3b0f"}}, - {name = "MarkupSafe-3.0.2-cp313-cp313t-macosx_10_13_universal2.whl",url = "https://files.pythonhosted.org/packages/62/6a/8b89d24db2d32d433dffcd6a8779159da109842434f1dd2f6e71f32f738c/MarkupSafe-3.0.2-cp313-cp313t-macosx_10_13_universal2.whl",hashes = {sha256 = "b5a6b3ada725cea8a5e634536b1b01c30bcdcd7f9c6fff4151548d5bf6b3a36c"}}, - {name = "MarkupSafe-3.0.2-cp313-cp313t-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/7a/06/a10f955f70a2e5a9bf78d11a161029d278eeacbd35ef806c3fd17b13060d/MarkupSafe-3.0.2-cp313-cp313t-macosx_11_0_arm64.whl",hashes = {sha256 = "a904af0a6162c73e3edcb969eeeb53a63ceeb5d8cf642fade7d39e7963a22ddb"}}, - {name = "MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",url = "https://files.pythonhosted.org/packages/34/cf/65d4a571869a1a9078198ca28f39fba5fbb910f952f9dbc5220afff9f5e6/MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",hashes = {sha256 = "4aa4e5faecf353ed117801a068ebab7b7e09ffb6e1d5e412dc852e0da018126c"}}, - {name = "MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",url = "https://files.pythonhosted.org/packages/0c/e3/90e9651924c430b885468b56b3d597cabf6d72be4b24a0acd1fa0e12af67/MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",hashes = {sha256 = "c0ef13eaeee5b615fb07c9a7dadb38eac06a0608b41570d8ade51c56539e509d"}}, - {name = "MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl",url = "https://files.pythonhosted.org/packages/66/8c/6c7cf61f95d63bb866db39085150df1f2a5bd3335298f14a66b48e92659c/MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl",hashes = {sha256 = "d16a81a06776313e817c951135cf7340a3e91e8c1ff2fac444cfd75fffa04afe"}}, - {name = "MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/bb/35/cbe9238ec3f47ac9a7c8b3df7a808e7cb50fe149dc7039f5f454b3fba218/MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_aarch64.whl",hashes = {sha256 = "6381026f158fdb7c72a168278597a5e3a5222e83ea18f543112b2662a9b699c5"}}, - {name = "MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_i686.whl",url = "https://files.pythonhosted.org/packages/e6/32/7621a4382488aa283cc05e8984a9c219abad3bca087be9ec77e89939ded9/MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_i686.whl",hashes = {sha256 = "3d79d162e7be8f996986c064d1c7c817f6df3a77fe3d6859f6f9e7be4b8c213a"}}, - {name = "MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/0d/80/0985960e4b89922cb5a0bac0ed39c5b96cbc1a536a99f30e8c220a996ed9/MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_x86_64.whl",hashes = {sha256 = "131a3c7689c85f5ad20f9f6fb1b866f402c445b220c19fe4308c0b147ccd2ad9"}}, - {name = "MarkupSafe-3.0.2-cp313-cp313t-win32.whl",url = "https://files.pythonhosted.org/packages/82/78/fedb03c7d5380df2427038ec8d973587e90561b2d90cd472ce9254cf348b/MarkupSafe-3.0.2-cp313-cp313t-win32.whl",hashes = {sha256 = "ba8062ed2cf21c07a9e295d5b8a2a5ce678b913b45fdf68c32d95d6c1291e0b6"}}, - {name = "MarkupSafe-3.0.2-cp313-cp313t-win_amd64.whl",url = "https://files.pythonhosted.org/packages/4f/65/6079a46068dfceaeabb5dcad6d674f5f5c61a6fa5673746f42a9f4c233b3/MarkupSafe-3.0.2-cp313-cp313t-win_amd64.whl",hashes = {sha256 = "e444a31f8db13eb18ada366ab3cf45fd4b31e4db1236a4448f68778c1d1a5a2f"}}, - {name = "MarkupSafe-3.0.2-cp312-cp312-macosx_10_13_universal2.whl",url = "https://files.pythonhosted.org/packages/22/09/d1f21434c97fc42f09d290cbb6350d44eb12f09cc62c9476effdb33a18aa/MarkupSafe-3.0.2-cp312-cp312-macosx_10_13_universal2.whl",hashes = {sha256 = "9778bd8ab0a994ebf6f84c2b949e65736d5575320a17ae8984a77fab08db94cf"}}, - {name = "MarkupSafe-3.0.2-cp312-cp312-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/6b/b0/18f76bba336fa5aecf79d45dcd6c806c280ec44538b3c13671d49099fdd0/MarkupSafe-3.0.2-cp312-cp312-macosx_11_0_arm64.whl",hashes = {sha256 = "846ade7b71e3536c4e56b386c2a47adf5741d2d8b94ec9dc3e92e5e1ee1e2225"}}, - {name = "MarkupSafe-3.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",url = "https://files.pythonhosted.org/packages/e0/25/dd5c0f6ac1311e9b40f4af06c78efde0f3b5cbf02502f8ef9501294c425b/MarkupSafe-3.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",hashes = {sha256 = "1c99d261bd2d5f6b59325c92c73df481e05e57f19837bdca8413b9eac4bd8028"}}, - {name = "MarkupSafe-3.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",url = "https://files.pythonhosted.org/packages/f3/f0/89e7aadfb3749d0f52234a0c8c7867877876e0a20b60e2188e9850794c17/MarkupSafe-3.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",hashes = {sha256 = "e17c96c14e19278594aa4841ec148115f9c7615a47382ecb6b82bd8fea3ab0c8"}}, - {name = "MarkupSafe-3.0.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl",url = "https://files.pythonhosted.org/packages/d5/da/f2eeb64c723f5e3777bc081da884b414671982008c47dcc1873d81f625b6/MarkupSafe-3.0.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl",hashes = {sha256 = "88416bd1e65dcea10bc7569faacb2c20ce071dd1f87539ca2ab364bf6231393c"}}, - {name = "MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/da/0e/1f32af846df486dce7c227fe0f2398dc7e2e51d4a370508281f3c1c5cddc/MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_aarch64.whl",hashes = {sha256 = "2181e67807fc2fa785d0592dc2d6206c019b9502410671cc905d132a92866557"}}, - {name = "MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_i686.whl",url = "https://files.pythonhosted.org/packages/c4/f6/bb3ca0532de8086cbff5f06d137064c8410d10779c4c127e0e47d17c0b71/MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_i686.whl",hashes = {sha256 = "52305740fe773d09cffb16f8ed0427942901f00adedac82ec8b67752f58a1b22"}}, - {name = "MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/a2/82/8be4c96ffee03c5b4a034e60a31294daf481e12c7c43ab8e34a1453ee48b/MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_x86_64.whl",hashes = {sha256 = "ad10d3ded218f1039f11a75f8091880239651b52e9bb592ca27de44eed242a48"}}, - {name = "MarkupSafe-3.0.2-cp312-cp312-win32.whl",url = "https://files.pythonhosted.org/packages/51/ae/97827349d3fcffee7e184bdf7f41cd6b88d9919c80f0263ba7acd1bbcb18/MarkupSafe-3.0.2-cp312-cp312-win32.whl",hashes = {sha256 = "0f4ca02bea9a23221c0182836703cbf8930c5e9454bacce27e767509fa286a30"}}, - {name = "MarkupSafe-3.0.2-cp312-cp312-win_amd64.whl",url = "https://files.pythonhosted.org/packages/c1/80/a61f99dc3a936413c3ee4e1eecac96c0da5ed07ad56fd975f1a9da5bc630/MarkupSafe-3.0.2-cp312-cp312-win_amd64.whl",hashes = {sha256 = "8e06879fc22a25ca47312fbe7c8264eb0b662f6db27cb2d3bbbc74b1df4b9b87"}}, +sdist = {name = "markupsafe-3.0.3.tar.gz", url = "https://files.pythonhosted.org/packages/7e/99/7690b6d4034fffd95959cbe0c02de8deb3098cc577c67bb6a24fe5d7caa7/markupsafe-3.0.3.tar.gz", hashes = {sha256 = "722695808f4b6457b320fdc131280796bdceb04ab50fe1795cd540799ebe1698"}} +wheels = [ + {name = "markupsafe-3.0.3-cp314-cp314-macosx_10_13_x86_64.whl",url = "https://files.pythonhosted.org/packages/33/8a/8e42d4838cd89b7dde187011e97fe6c3af66d8c044997d2183fbd6d31352/markupsafe-3.0.3-cp314-cp314-macosx_10_13_x86_64.whl",hashes = {sha256 = "eaa9599de571d72e2daf60164784109f19978b327a3910d3e9de8c97b5b70cfe"}}, + {name = "markupsafe-3.0.3-cp314-cp314-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/b5/64/7660f8a4a8e53c924d0fa05dc3a55c9cee10bbd82b11c5afb27d44b096ce/markupsafe-3.0.3-cp314-cp314-macosx_11_0_arm64.whl",hashes = {sha256 = "c47a551199eb8eb2121d4f0f15ae0f923d31350ab9280078d1e5f12b249e0026"}}, + {name = "markupsafe-3.0.3-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl",url = "https://files.pythonhosted.org/packages/da/ef/e648bfd021127bef5fa12e1720ffed0c6cbb8310c8d9bea7266337ff06de/markupsafe-3.0.3-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl",hashes = {sha256 = "f34c41761022dd093b4b6896d4810782ffbabe30f2d443ff5f083e0cbbb8c737"}}, + {name = "markupsafe-3.0.3-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl",url = "https://files.pythonhosted.org/packages/41/3c/a36c2450754618e62008bf7435ccb0f88053e07592e6028a34776213d877/markupsafe-3.0.3-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl",hashes = {sha256 = "457a69a9577064c05a97c41f4e65148652db078a3a509039e64d3467b9e7ef97"}}, + {name = "markupsafe-3.0.3-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl",url = "https://files.pythonhosted.org/packages/bc/20/b7fdf89a8456b099837cd1dc21974632a02a999ec9bf7ca3e490aacd98e7/markupsafe-3.0.3-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl",hashes = {sha256 = "e8afc3f2ccfa24215f8cb28dcf43f0113ac3c37c2f0f0806d8c70e4228c5cf4d"}}, + {name = "markupsafe-3.0.3-cp314-cp314-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/9a/a7/591f592afdc734f47db08a75793a55d7fbcc6902a723ae4cfbab61010cc5/markupsafe-3.0.3-cp314-cp314-musllinux_1_2_aarch64.whl",hashes = {sha256 = "ec15a59cf5af7be74194f7ab02d0f59a62bdcf1a537677ce67a2537c9b87fcda"}}, + {name = "markupsafe-3.0.3-cp314-cp314-musllinux_1_2_riscv64.whl",url = "https://files.pythonhosted.org/packages/7d/33/45b24e4f44195b26521bc6f1a82197118f74df348556594bd2262bda1038/markupsafe-3.0.3-cp314-cp314-musllinux_1_2_riscv64.whl",hashes = {sha256 = "0eb9ff8191e8498cca014656ae6b8d61f39da5f95b488805da4bb029cccbfbaf"}}, + {name = "markupsafe-3.0.3-cp314-cp314-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/ff/0e/53dfaca23a69fbfbbf17a4b64072090e70717344c52eaaaa9c5ddff1e5f0/markupsafe-3.0.3-cp314-cp314-musllinux_1_2_x86_64.whl",hashes = {sha256 = "2713baf880df847f2bece4230d4d094280f4e67b1e813eec43b4c0e144a34ffe"}}, + {name = "markupsafe-3.0.3-cp314-cp314-win32.whl",url = "https://files.pythonhosted.org/packages/46/11/f333a06fc16236d5238bfe74daccbca41459dcd8d1fa952e8fbd5dccfb70/markupsafe-3.0.3-cp314-cp314-win32.whl",hashes = {sha256 = "729586769a26dbceff69f7a7dbbf59ab6572b99d94576a5592625d5b411576b9"}}, + {name = "markupsafe-3.0.3-cp314-cp314-win_amd64.whl",url = "https://files.pythonhosted.org/packages/28/52/182836104b33b444e400b14f797212f720cbc9ed6ba34c800639d154e821/markupsafe-3.0.3-cp314-cp314-win_amd64.whl",hashes = {sha256 = "bdc919ead48f234740ad807933cdf545180bfbe9342c2bb451556db2ed958581"}}, + {name = "markupsafe-3.0.3-cp314-cp314-win_arm64.whl",url = "https://files.pythonhosted.org/packages/6f/18/acf23e91bd94fd7b3031558b1f013adfa21a8e407a3fdb32745538730382/markupsafe-3.0.3-cp314-cp314-win_arm64.whl",hashes = {sha256 = "5a7d5dc5140555cf21a6fefbdbf8723f06fcd2f63ef108f2854de715e4422cb4"}}, + {name = "markupsafe-3.0.3-cp314-cp314t-macosx_10_13_x86_64.whl",url = "https://files.pythonhosted.org/packages/3c/f0/57689aa4076e1b43b15fdfa646b04653969d50cf30c32a102762be2485da/markupsafe-3.0.3-cp314-cp314t-macosx_10_13_x86_64.whl",hashes = {sha256 = "1353ef0c1b138e1907ae78e2f6c63ff67501122006b0f9abad68fda5f4ffc6ab"}}, + {name = "markupsafe-3.0.3-cp314-cp314t-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/89/c3/2e67a7ca217c6912985ec766c6393b636fb0c2344443ff9d91404dc4c79f/markupsafe-3.0.3-cp314-cp314t-macosx_11_0_arm64.whl",hashes = {sha256 = "1085e7fbddd3be5f89cc898938f42c0b3c711fdcb37d75221de2666af647c175"}}, + {name = "markupsafe-3.0.3-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl",url = "https://files.pythonhosted.org/packages/f0/00/be561dce4e6ca66b15276e184ce4b8aec61fe83662cce2f7d72bd3249d28/markupsafe-3.0.3-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl",hashes = {sha256 = "1b52b4fb9df4eb9ae465f8d0c228a00624de2334f216f178a995ccdcf82c4634"}}, + {name = "markupsafe-3.0.3-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl",url = "https://files.pythonhosted.org/packages/50/09/c419f6f5a92e5fadde27efd190eca90f05e1261b10dbd8cbcb39cd8ea1dc/markupsafe-3.0.3-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl",hashes = {sha256 = "fed51ac40f757d41b7c48425901843666a6677e3e8eb0abcff09e4ba6e664f50"}}, + {name = "markupsafe-3.0.3-cp314-cp314t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl",url = "https://files.pythonhosted.org/packages/22/44/a0681611106e0b2921b3033fc19bc53323e0b50bc70cffdd19f7d679bb66/markupsafe-3.0.3-cp314-cp314t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl",hashes = {sha256 = "f190daf01f13c72eac4efd5c430a8de82489d9cff23c364c3ea822545032993e"}}, + {name = "markupsafe-3.0.3-cp314-cp314t-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/5f/57/1b0b3f100259dc9fffe780cfb60d4be71375510e435efec3d116b6436d43/markupsafe-3.0.3-cp314-cp314t-musllinux_1_2_aarch64.whl",hashes = {sha256 = "e56b7d45a839a697b5eb268c82a71bd8c7f6c94d6fd50c3d577fa39a9f1409f5"}}, + {name = "markupsafe-3.0.3-cp314-cp314t-musllinux_1_2_riscv64.whl",url = "https://files.pythonhosted.org/packages/26/6a/4bf6d0c97c4920f1597cc14dd720705eca0bf7c787aebc6bb4d1bead5388/markupsafe-3.0.3-cp314-cp314t-musllinux_1_2_riscv64.whl",hashes = {sha256 = "f3e98bb3798ead92273dc0e5fd0f31ade220f59a266ffd8a4f6065e0a3ce0523"}}, + {name = "markupsafe-3.0.3-cp314-cp314t-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/14/c7/ca723101509b518797fedc2fdf79ba57f886b4aca8a7d31857ba3ee8281f/markupsafe-3.0.3-cp314-cp314t-musllinux_1_2_x86_64.whl",hashes = {sha256 = "5678211cb9333a6468fb8d8be0305520aa073f50d17f089b5b4b477ea6e67fdc"}}, + {name = "markupsafe-3.0.3-cp314-cp314t-win32.whl",url = "https://files.pythonhosted.org/packages/fb/df/5bd7a48c256faecd1d36edc13133e51397e41b73bb77e1a69deab746ebac/markupsafe-3.0.3-cp314-cp314t-win32.whl",hashes = {sha256 = "915c04ba3851909ce68ccc2b8e2cd691618c4dc4c4232fb7982bca3f41fd8c3d"}}, + {name = "markupsafe-3.0.3-cp314-cp314t-win_amd64.whl",url = "https://files.pythonhosted.org/packages/1a/8a/0402ba61a2f16038b48b39bccca271134be00c5c9f0f623208399333c448/markupsafe-3.0.3-cp314-cp314t-win_amd64.whl",hashes = {sha256 = "4faffd047e07c38848ce017e8725090413cd80cbc23d86e55c587bf979e579c9"}}, + {name = "markupsafe-3.0.3-cp314-cp314t-win_arm64.whl",url = "https://files.pythonhosted.org/packages/70/bc/6f1c2f612465f5fa89b95bead1f44dcb607670fd42891d8fdcd5d039f4f4/markupsafe-3.0.3-cp314-cp314t-win_arm64.whl",hashes = {sha256 = "32001d6a8fc98c8cb5c947787c5d08b0a50663d139f1305bac5885d98d9b40fa"}}, + {name = "markupsafe-3.0.3-cp313-cp313-macosx_10_13_x86_64.whl",url = "https://files.pythonhosted.org/packages/38/2f/907b9c7bbba283e68f20259574b13d005c121a0fa4c175f9bed27c4597ff/markupsafe-3.0.3-cp313-cp313-macosx_10_13_x86_64.whl",hashes = {sha256 = "e1cf1972137e83c5d4c136c43ced9ac51d0e124706ee1c8aa8532c1287fa8795"}}, + {name = "markupsafe-3.0.3-cp313-cp313-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/9c/d9/5f7756922cdd676869eca1c4e3c0cd0df60ed30199ffd775e319089cb3ed/markupsafe-3.0.3-cp313-cp313-macosx_11_0_arm64.whl",hashes = {sha256 = "116bb52f642a37c115f517494ea5feb03889e04df47eeff5b130b1808ce7c219"}}, + {name = "markupsafe-3.0.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl",url = "https://files.pythonhosted.org/packages/00/07/575a68c754943058c78f30db02ee03a64b3c638586fba6a6dd56830b30a3/markupsafe-3.0.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl",hashes = {sha256 = "133a43e73a802c5562be9bbcd03d090aa5a1fe899db609c29e8c8d815c5f6de6"}}, + {name = "markupsafe-3.0.3-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl",url = "https://files.pythonhosted.org/packages/a9/21/9b05698b46f218fc0e118e1f8168395c65c8a2c750ae2bab54fc4bd4e0e8/markupsafe-3.0.3-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl",hashes = {sha256 = "ccfcd093f13f0f0b7fdd0f198b90053bf7b2f02a3927a30e63f3ccc9df56b676"}}, + {name = "markupsafe-3.0.3-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl",url = "https://files.pythonhosted.org/packages/7f/71/544260864f893f18b6827315b988c146b559391e6e7e8f7252839b1b846a/markupsafe-3.0.3-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl",hashes = {sha256 = "509fa21c6deb7a7a273d629cf5ec029bc209d1a51178615ddf718f5918992ab9"}}, + {name = "markupsafe-3.0.3-cp313-cp313-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/c2/28/b50fc2f74d1ad761af2f5dcce7492648b983d00a65b8c0e0cb457c82ebbe/markupsafe-3.0.3-cp313-cp313-musllinux_1_2_aarch64.whl",hashes = {sha256 = "a4afe79fb3de0b7097d81da19090f4df4f8d3a2b3adaa8764138aac2e44f3af1"}}, + {name = "markupsafe-3.0.3-cp313-cp313-musllinux_1_2_riscv64.whl",url = "https://files.pythonhosted.org/packages/ed/76/104b2aa106a208da8b17a2fb72e033a5a9d7073c68f7e508b94916ed47a9/markupsafe-3.0.3-cp313-cp313-musllinux_1_2_riscv64.whl",hashes = {sha256 = "795e7751525cae078558e679d646ae45574b47ed6e7771863fcc079a6171a0fc"}}, + {name = "markupsafe-3.0.3-cp313-cp313-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/b5/99/16a5eb2d140087ebd97180d95249b00a03aa87e29cc224056274f2e45fd6/markupsafe-3.0.3-cp313-cp313-musllinux_1_2_x86_64.whl",hashes = {sha256 = "8485f406a96febb5140bfeca44a73e3ce5116b2501ac54fe953e488fb1d03b12"}}, + {name = "markupsafe-3.0.3-cp313-cp313-win32.whl",url = "https://files.pythonhosted.org/packages/19/bc/e7140ed90c5d61d77cea142eed9f9c303f4c4806f60a1044c13e3f1471d0/markupsafe-3.0.3-cp313-cp313-win32.whl",hashes = {sha256 = "bdd37121970bfd8be76c5fb069c7751683bdf373db1ed6c010162b2a130248ed"}}, + {name = "markupsafe-3.0.3-cp313-cp313-win_amd64.whl",url = "https://files.pythonhosted.org/packages/05/73/c4abe620b841b6b791f2edc248f556900667a5a1cf023a6646967ae98335/markupsafe-3.0.3-cp313-cp313-win_amd64.whl",hashes = {sha256 = "9a1abfdc021a164803f4d485104931fb8f8c1efd55bc6b748d2f5774e78b62c5"}}, + {name = "markupsafe-3.0.3-cp313-cp313-win_arm64.whl",url = "https://files.pythonhosted.org/packages/f0/3a/fa34a0f7cfef23cf9500d68cb7c32dd64ffd58a12b09225fb03dd37d5b80/markupsafe-3.0.3-cp313-cp313-win_arm64.whl",hashes = {sha256 = "7e68f88e5b8799aa49c85cd116c932a1ac15caaa3f5db09087854d218359e485"}}, + {name = "markupsafe-3.0.3-cp313-cp313t-macosx_10_13_x86_64.whl",url = "https://files.pythonhosted.org/packages/e4/d7/e05cd7efe43a88a17a37b3ae96e79a19e846f3f456fe79c57ca61356ef01/markupsafe-3.0.3-cp313-cp313t-macosx_10_13_x86_64.whl",hashes = {sha256 = "218551f6df4868a8d527e3062d0fb968682fe92054e89978594c28e642c43a73"}}, + {name = "markupsafe-3.0.3-cp313-cp313t-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/99/9e/e412117548182ce2148bdeacdda3bb494260c0b0184360fe0d56389b523b/markupsafe-3.0.3-cp313-cp313t-macosx_11_0_arm64.whl",hashes = {sha256 = "3524b778fe5cfb3452a09d31e7b5adefeea8c5be1d43c4f810ba09f2ceb29d37"}}, + {name = "markupsafe-3.0.3-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl",url = "https://files.pythonhosted.org/packages/bc/e6/fa0ffcda717ef64a5108eaa7b4f5ed28d56122c9a6d70ab8b72f9f715c80/markupsafe-3.0.3-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl",hashes = {sha256 = "4e885a3d1efa2eadc93c894a21770e4bc67899e3543680313b09f139e149ab19"}}, + {name = "markupsafe-3.0.3-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl",url = "https://files.pythonhosted.org/packages/96/ec/2102e881fe9d25fc16cb4b25d5f5cde50970967ffa5dddafdb771237062d/markupsafe-3.0.3-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl",hashes = {sha256 = "8709b08f4a89aa7586de0aadc8da56180242ee0ada3999749b183aa23df95025"}}, + {name = "markupsafe-3.0.3-cp313-cp313t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl",url = "https://files.pythonhosted.org/packages/4b/30/6f2fce1f1f205fc9323255b216ca8a235b15860c34b6798f810f05828e32/markupsafe-3.0.3-cp313-cp313t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl",hashes = {sha256 = "b8512a91625c9b3da6f127803b166b629725e68af71f8184ae7e7d54686a56d6"}}, + {name = "markupsafe-3.0.3-cp313-cp313t-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/58/47/4a0ccea4ab9f5dcb6f79c0236d954acb382202721e704223a8aafa38b5c8/markupsafe-3.0.3-cp313-cp313t-musllinux_1_2_aarch64.whl",hashes = {sha256 = "9b79b7a16f7fedff2495d684f2b59b0457c3b493778c9eed31111be64d58279f"}}, + {name = "markupsafe-3.0.3-cp313-cp313t-musllinux_1_2_riscv64.whl",url = "https://files.pythonhosted.org/packages/6a/70/3780e9b72180b6fecb83a4814d84c3bf4b4ae4bf0b19c27196104149734c/markupsafe-3.0.3-cp313-cp313t-musllinux_1_2_riscv64.whl",hashes = {sha256 = "12c63dfb4a98206f045aa9563db46507995f7ef6d83b2f68eda65c307c6829eb"}}, + {name = "markupsafe-3.0.3-cp313-cp313t-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/98/c5/c03c7f4125180fc215220c035beac6b9cb684bc7a067c84fc69414d315f5/markupsafe-3.0.3-cp313-cp313t-musllinux_1_2_x86_64.whl",hashes = {sha256 = "8f71bc33915be5186016f675cd83a1e08523649b0e33efdb898db577ef5bb009"}}, + {name = "markupsafe-3.0.3-cp313-cp313t-win32.whl",url = "https://files.pythonhosted.org/packages/80/d6/2d1b89f6ca4bff1036499b1e29a1d02d282259f3681540e16563f27ebc23/markupsafe-3.0.3-cp313-cp313t-win32.whl",hashes = {sha256 = "69c0b73548bc525c8cb9a251cddf1931d1db4d2258e9599c28c07ef3580ef354"}}, + {name = "markupsafe-3.0.3-cp313-cp313t-win_amd64.whl",url = "https://files.pythonhosted.org/packages/2b/98/e48a4bfba0a0ffcf9925fe2d69240bfaa19c6f7507b8cd09c70684a53c1e/markupsafe-3.0.3-cp313-cp313t-win_amd64.whl",hashes = {sha256 = "1b4b79e8ebf6b55351f0d91fe80f893b4743f104bff22e90697db1590e47a218"}}, + {name = "markupsafe-3.0.3-cp313-cp313t-win_arm64.whl",url = "https://files.pythonhosted.org/packages/0e/72/e3cc540f351f316e9ed0f092757459afbc595824ca724cbc5a5d4263713f/markupsafe-3.0.3-cp313-cp313t-win_arm64.whl",hashes = {sha256 = "ad2cf8aa28b8c020ab2fc8287b0f823d0a7d8630784c31e9ee5edea20f406287"}}, + {name = "markupsafe-3.0.3-cp312-cp312-macosx_10_13_x86_64.whl",url = "https://files.pythonhosted.org/packages/5a/72/147da192e38635ada20e0a2e1a51cf8823d2119ce8883f7053879c2199b5/markupsafe-3.0.3-cp312-cp312-macosx_10_13_x86_64.whl",hashes = {sha256 = "d53197da72cc091b024dd97249dfc7794d6a56530370992a5e1a08983ad9230e"}}, + {name = "markupsafe-3.0.3-cp312-cp312-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/9a/81/7e4e08678a1f98521201c3079f77db69fb552acd56067661f8c2f534a718/markupsafe-3.0.3-cp312-cp312-macosx_11_0_arm64.whl",hashes = {sha256 = "1872df69a4de6aead3491198eaf13810b565bdbeec3ae2dc8780f14458ec73ce"}}, + {name = "markupsafe-3.0.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl",url = "https://files.pythonhosted.org/packages/1e/2c/799f4742efc39633a1b54a92eec4082e4f815314869865d876824c257c1e/markupsafe-3.0.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl",hashes = {sha256 = "3a7e8ae81ae39e62a41ec302f972ba6ae23a5c5396c8e60113e9066ef893da0d"}}, + {name = "markupsafe-3.0.3-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl",url = "https://files.pythonhosted.org/packages/3c/2e/8d0c2ab90a8c1d9a24f0399058ab8519a3279d1bd4289511d74e909f060e/markupsafe-3.0.3-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl",hashes = {sha256 = "d6dd0be5b5b189d31db7cda48b91d7e0a9795f31430b7f271219ab30f1d3ac9d"}}, + {name = "markupsafe-3.0.3-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl",url = "https://files.pythonhosted.org/packages/2c/54/887f3092a85238093a0b2154bd629c89444f395618842e8b0c41783898ea/markupsafe-3.0.3-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl",hashes = {sha256 = "94c6f0bb423f739146aec64595853541634bde58b2135f27f61c1ffd1cd4d16a"}}, + {name = "markupsafe-3.0.3-cp312-cp312-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/c9/2f/336b8c7b6f4a4d95e91119dc8521402461b74a485558d8f238a68312f11c/markupsafe-3.0.3-cp312-cp312-musllinux_1_2_aarch64.whl",hashes = {sha256 = "be8813b57049a7dc738189df53d69395eba14fb99345e0a5994914a3864c8a4b"}}, + {name = "markupsafe-3.0.3-cp312-cp312-musllinux_1_2_riscv64.whl",url = "https://files.pythonhosted.org/packages/32/43/67935f2b7e4982ffb50a4d169b724d74b62a3964bc1a9a527f5ac4f1ee2b/markupsafe-3.0.3-cp312-cp312-musllinux_1_2_riscv64.whl",hashes = {sha256 = "83891d0e9fb81a825d9a6d61e3f07550ca70a076484292a70fde82c4b807286f"}}, + {name = "markupsafe-3.0.3-cp312-cp312-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/89/e0/4486f11e51bbba8b0c041098859e869e304d1c261e59244baa3d295d47b7/markupsafe-3.0.3-cp312-cp312-musllinux_1_2_x86_64.whl",hashes = {sha256 = "77f0643abe7495da77fb436f50f8dab76dbc6e5fd25d39589a0f1fe6548bfa2b"}}, + {name = "markupsafe-3.0.3-cp312-cp312-win32.whl",url = "https://files.pythonhosted.org/packages/2f/e1/78ee7a023dac597a5825441ebd17170785a9dab23de95d2c7508ade94e0e/markupsafe-3.0.3-cp312-cp312-win32.whl",hashes = {sha256 = "d88b440e37a16e651bda4c7c2b930eb586fd15ca7406cb39e211fcff3bf3017d"}}, + {name = "markupsafe-3.0.3-cp312-cp312-win_amd64.whl",url = "https://files.pythonhosted.org/packages/aa/5b/bec5aa9bbbb2c946ca2733ef9c4ca91c91b6a24580193e891b5f7dbe8e1e/markupsafe-3.0.3-cp312-cp312-win_amd64.whl",hashes = {sha256 = "26a5784ded40c9e318cfc2bdb30fe164bdb8665ded9cd64d500a34fb42067b1c"}}, + {name = "markupsafe-3.0.3-cp312-cp312-win_arm64.whl",url = "https://files.pythonhosted.org/packages/e5/f1/216fc1bbfd74011693a4fd837e7026152e89c4bcf3e77b6692fba9923123/markupsafe-3.0.3-cp312-cp312-win_arm64.whl",hashes = {sha256 = "35add3b638a5d900e807944a078b51922212fb3dedb01633a8defc4b01a3c85f"}}, + {name = "markupsafe-3.0.3-cp311-cp311-macosx_10_9_x86_64.whl",url = "https://files.pythonhosted.org/packages/08/db/fefacb2136439fc8dd20e797950e749aa1f4997ed584c62cfb8ef7c2be0e/markupsafe-3.0.3-cp311-cp311-macosx_10_9_x86_64.whl",hashes = {sha256 = "1cc7ea17a6824959616c525620e387f6dd30fec8cb44f649e31712db02123dad"}}, + {name = "markupsafe-3.0.3-cp311-cp311-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/e1/2e/5898933336b61975ce9dc04decbc0a7f2fee78c30353c5efba7f2d6ff27a/markupsafe-3.0.3-cp311-cp311-macosx_11_0_arm64.whl",hashes = {sha256 = "4bd4cd07944443f5a265608cc6aab442e4f74dff8088b0dfc8238647b8f6ae9a"}}, + {name = "markupsafe-3.0.3-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl",url = "https://files.pythonhosted.org/packages/1d/09/adf2df3699d87d1d8184038df46a9c80d78c0148492323f4693df54e17bb/markupsafe-3.0.3-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl",hashes = {sha256 = "6b5420a1d9450023228968e7e6a9ce57f65d148ab56d2313fcd589eee96a7a50"}}, + {name = "markupsafe-3.0.3-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl",url = "https://files.pythonhosted.org/packages/30/ac/0273f6fcb5f42e314c6d8cd99effae6a5354604d461b8d392b5ec9530a54/markupsafe-3.0.3-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl",hashes = {sha256 = "0bf2a864d67e76e5c9a34dc26ec616a66b9888e25e7b9460e1c76d3293bd9dbf"}}, + {name = "markupsafe-3.0.3-cp311-cp311-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl",url = "https://files.pythonhosted.org/packages/19/ae/31c1be199ef767124c042c6c3e904da327a2f7f0cd63a0337e1eca2967a8/markupsafe-3.0.3-cp311-cp311-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl",hashes = {sha256 = "bc51efed119bc9cfdf792cdeaa4d67e8f6fcccab66ed4bfdd6bde3e59bfcbb2f"}}, + {name = "markupsafe-3.0.3-cp311-cp311-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/b2/76/7edcab99d5349a4532a459e1fe64f0b0467a3365056ae550d3bcf3f79e1e/markupsafe-3.0.3-cp311-cp311-musllinux_1_2_aarch64.whl",hashes = {sha256 = "068f375c472b3e7acbe2d5318dea141359e6900156b5b2ba06a30b169086b91a"}}, + {name = "markupsafe-3.0.3-cp311-cp311-musllinux_1_2_riscv64.whl",url = "https://files.pythonhosted.org/packages/a4/28/6e74cdd26d7514849143d69f0bf2399f929c37dc2b31e6829fd2045b2765/markupsafe-3.0.3-cp311-cp311-musllinux_1_2_riscv64.whl",hashes = {sha256 = "7be7b61bb172e1ed687f1754f8e7484f1c8019780f6f6b0786e76bb01c2ae115"}}, + {name = "markupsafe-3.0.3-cp311-cp311-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/62/7e/a145f36a5c2945673e590850a6f8014318d5577ed7e5920a4b3448e0865d/markupsafe-3.0.3-cp311-cp311-musllinux_1_2_x86_64.whl",hashes = {sha256 = "f9e130248f4462aaa8e2552d547f36ddadbeaa573879158d721bbd33dfe4743a"}}, + {name = "markupsafe-3.0.3-cp311-cp311-win32.whl",url = "https://files.pythonhosted.org/packages/0f/62/d9c46a7f5c9adbeeeda52f5b8d802e1094e9717705a645efc71b0913a0a8/markupsafe-3.0.3-cp311-cp311-win32.whl",hashes = {sha256 = "0db14f5dafddbb6d9208827849fad01f1a2609380add406671a26386cdf15a19"}}, + {name = "markupsafe-3.0.3-cp311-cp311-win_amd64.whl",url = "https://files.pythonhosted.org/packages/83/8a/4414c03d3f891739326e1783338e48fb49781cc915b2e0ee052aa490d586/markupsafe-3.0.3-cp311-cp311-win_amd64.whl",hashes = {sha256 = "de8a88e63464af587c950061a5e6a67d3632e36df62b986892331d4620a35c01"}}, + {name = "markupsafe-3.0.3-cp311-cp311-win_arm64.whl",url = "https://files.pythonhosted.org/packages/35/73/893072b42e6862f319b5207adc9ae06070f095b358655f077f69a35601f0/markupsafe-3.0.3-cp311-cp311-win_arm64.whl",hashes = {sha256 = "3b562dd9e9ea93f13d53989d23a7e775fdfd1066c33494ff43f5418bc8c58a5c"}}, + {name = "markupsafe-3.0.3-cp310-cp310-macosx_10_9_x86_64.whl",url = "https://files.pythonhosted.org/packages/e8/4b/3541d44f3937ba468b75da9eebcae497dcf67adb65caa16760b0a6807ebb/markupsafe-3.0.3-cp310-cp310-macosx_10_9_x86_64.whl",hashes = {sha256 = "2f981d352f04553a7171b8e44369f2af4055f888dfb147d55e42d29e29e74559"}}, + {name = "markupsafe-3.0.3-cp310-cp310-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/98/1b/fbd8eed11021cabd9226c37342fa6ca4e8a98d8188a8d9b66740494960e4/markupsafe-3.0.3-cp310-cp310-macosx_11_0_arm64.whl",hashes = {sha256 = "e1c1493fb6e50ab01d20a22826e57520f1284df32f2d8601fdd90b6304601419"}}, + {name = "markupsafe-3.0.3-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl",url = "https://files.pythonhosted.org/packages/40/01/e560d658dc0bb8ab762670ece35281dec7b6c1b33f5fbc09ebb57a185519/markupsafe-3.0.3-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl",hashes = {sha256 = "1ba88449deb3de88bd40044603fafffb7bc2b055d626a330323a9ed736661695"}}, + {name = "markupsafe-3.0.3-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl",url = "https://files.pythonhosted.org/packages/af/cd/ce6e848bbf2c32314c9b237839119c5a564a59725b53157c856e90937b7a/markupsafe-3.0.3-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl",hashes = {sha256 = "f42d0984e947b8adf7dd6dde396e720934d12c506ce84eea8476409563607591"}}, + {name = "markupsafe-3.0.3-cp310-cp310-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl",url = "https://files.pythonhosted.org/packages/c9/2a/b5c12c809f1c3045c4d580b035a743d12fcde53cf685dbc44660826308da/markupsafe-3.0.3-cp310-cp310-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl",hashes = {sha256 = "c0c0b3ade1c0b13b936d7970b1d37a57acde9199dc2aecc4c336773e1d86049c"}}, + {name = "markupsafe-3.0.3-cp310-cp310-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/cf/e3/9427a68c82728d0a88c50f890d0fc072a1484de2f3ac1ad0bfc1a7214fd5/markupsafe-3.0.3-cp310-cp310-musllinux_1_2_aarch64.whl",hashes = {sha256 = "0303439a41979d9e74d18ff5e2dd8c43ed6c6001fd40e5bf2e43f7bd9bbc523f"}}, + {name = "markupsafe-3.0.3-cp310-cp310-musllinux_1_2_riscv64.whl",url = "https://files.pythonhosted.org/packages/bc/36/23578f29e9e582a4d0278e009b38081dbe363c5e7165113fad546918a232/markupsafe-3.0.3-cp310-cp310-musllinux_1_2_riscv64.whl",hashes = {sha256 = "d2ee202e79d8ed691ceebae8e0486bd9a2cd4794cec4824e1c99b6f5009502f6"}}, + {name = "markupsafe-3.0.3-cp310-cp310-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/56/21/dca11354e756ebd03e036bd8ad58d6d7168c80ce1fe5e75218e4945cbab7/markupsafe-3.0.3-cp310-cp310-musllinux_1_2_x86_64.whl",hashes = {sha256 = "177b5253b2834fe3678cb4a5f0059808258584c559193998be2601324fdeafb1"}}, + {name = "markupsafe-3.0.3-cp310-cp310-win32.whl",url = "https://files.pythonhosted.org/packages/87/99/faba9369a7ad6e4d10b6a5fbf71fa2a188fe4a593b15f0963b73859a1bbd/markupsafe-3.0.3-cp310-cp310-win32.whl",hashes = {sha256 = "2a15a08b17dd94c53a1da0438822d70ebcd13f8c3a95abe3a9ef9f11a94830aa"}}, + {name = "markupsafe-3.0.3-cp310-cp310-win_amd64.whl",url = "https://files.pythonhosted.org/packages/d6/25/55dc3ab959917602c96985cb1253efaa4ff42f71194bddeb61eb7278b8be/markupsafe-3.0.3-cp310-cp310-win_amd64.whl",hashes = {sha256 = "c4ffb7ebf07cfe8931028e3e4c85f0357459a3f9f9490886198848f4fa002ec8"}}, + {name = "markupsafe-3.0.3-cp310-cp310-win_arm64.whl",url = "https://files.pythonhosted.org/packages/d0/9e/0a02226640c255d1da0b8d12e24ac2aa6734da68bff14c05dd53b94a0fc3/markupsafe-3.0.3-cp310-cp310-win_arm64.whl",hashes = {sha256 = "e2103a929dfa2fcaf9bb4e7c091983a49c9ac3b19c9061b6d5427dd7d14d81a1"}}, ] marker = "\"dev\" in extras" @@ -2296,6 +3121,10 @@ requires-python = ">=3.8" sdist = {name = "multiprocess-0.70.16.tar.gz", url = "https://files.pythonhosted.org/packages/b5/ae/04f39c5d0d0def03247c2893d6f2b83c136bf3320a2154d7b8858f2ba72d/multiprocess-0.70.16.tar.gz", hashes = {sha256 = "161af703d4652a0e1410be6abccecde4a7ddffd19341be0a7011b94aeb171ac1"}} wheels = [ {name = "multiprocess-0.70.16-py312-none-any.whl",url = "https://files.pythonhosted.org/packages/0a/7d/a988f258104dcd2ccf1ed40fdc97e26c4ac351eeaf81d76e266c52d84e2f/multiprocess-0.70.16-py312-none-any.whl",hashes = {sha256 = "fc0544c531920dde3b00c29863377f87e1632601092ea2daca74e4beb40faa2e"}}, + {name = "multiprocess-0.70.16-py311-none-any.whl",url = "https://files.pythonhosted.org/packages/50/15/b56e50e8debaf439f44befec5b2af11db85f6e0f344c3113ae0be0593a91/multiprocess-0.70.16-py311-none-any.whl",hashes = {sha256 = "af4cabb0dac72abfb1e794fa7855c325fd2b55a10a44628a3c1ad3311c04127a"}}, + {name = "multiprocess-0.70.16-pp310-pypy310_pp73-macosx_10_13_x86_64.whl",url = "https://files.pythonhosted.org/packages/ef/76/6e712a2623d146d314f17598df5de7224c85c0060ef63fd95cc15a25b3fa/multiprocess-0.70.16-pp310-pypy310_pp73-macosx_10_13_x86_64.whl",hashes = {sha256 = "476887be10e2f59ff183c006af746cb6f1fd0eadcfd4ef49e605cbe2659920ee"}}, + {name = "multiprocess-0.70.16-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl",url = "https://files.pythonhosted.org/packages/0f/ab/1e6e8009e380e22254ff539ebe117861e5bdb3bff1fc977920972237c6c7/multiprocess-0.70.16-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl",hashes = {sha256 = "d951bed82c8f73929ac82c61f01a7b5ce8f3e5ef40f5b52553b4f547ce2b08ec"}}, + {name = "multiprocess-0.70.16-py310-none-any.whl",url = "https://files.pythonhosted.org/packages/bc/f7/7ec7fddc92e50714ea3745631f79bd9c96424cb2702632521028e57d3a36/multiprocess-0.70.16-py310-none-any.whl",hashes = {sha256 = "c4a9944c67bd49f823687463660a2d6daae94c289adff97e0f9d696ba6371d02"}}, ] marker = "\"default\" in dependency_groups" @@ -2332,37 +3161,45 @@ dependencies = [] [[packages]] name = "pyarrow" -version = "20.0.0" +version = "21.0.0" requires-python = ">=3.9" -sdist = {name = "pyarrow-20.0.0.tar.gz", url = "https://files.pythonhosted.org/packages/a2/ee/a7810cb9f3d6e9238e61d312076a9859bf3668fd21c69744de9532383912/pyarrow-20.0.0.tar.gz", hashes = {sha256 = "febc4a913592573c8d5805091a6c2b5064c8bd6e002131f01061797d91c783c1"}} -wheels = [ - {name = "pyarrow-20.0.0-cp313-cp313-macosx_12_0_arm64.whl",url = "https://files.pythonhosted.org/packages/9b/aa/daa413b81446d20d4dad2944110dcf4cf4f4179ef7f685dd5a6d7570dc8e/pyarrow-20.0.0-cp313-cp313-macosx_12_0_arm64.whl",hashes = {sha256 = "a15532e77b94c61efadde86d10957950392999503b3616b2ffcef7621a002893"}}, - {name = "pyarrow-20.0.0-cp313-cp313-macosx_12_0_x86_64.whl",url = "https://files.pythonhosted.org/packages/ff/75/2303d1caa410925de902d32ac215dc80a7ce7dd8dfe95358c165f2adf107/pyarrow-20.0.0-cp313-cp313-macosx_12_0_x86_64.whl",hashes = {sha256 = "dd43f58037443af715f34f1322c782ec463a3c8a94a85fdb2d987ceb5658e061"}}, - {name = "pyarrow-20.0.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",url = "https://files.pythonhosted.org/packages/92/41/fe18c7c0b38b20811b73d1bdd54b1fccba0dab0e51d2048878042d84afa8/pyarrow-20.0.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",hashes = {sha256 = "aa0d288143a8585806e3cc7c39566407aab646fb9ece164609dac1cfff45f6ae"}}, - {name = "pyarrow-20.0.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",url = "https://files.pythonhosted.org/packages/da/ab/7dbf3d11db67c72dbf36ae63dcbc9f30b866c153b3a22ef728523943eee6/pyarrow-20.0.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",hashes = {sha256 = "b6953f0114f8d6f3d905d98e987d0924dabce59c3cda380bdfaa25a6201563b4"}}, - {name = "pyarrow-20.0.0-cp313-cp313-manylinux_2_28_aarch64.whl",url = "https://files.pythonhosted.org/packages/90/c3/0c7da7b6dac863af75b64e2f827e4742161128c350bfe7955b426484e226/pyarrow-20.0.0-cp313-cp313-manylinux_2_28_aarch64.whl",hashes = {sha256 = "991f85b48a8a5e839b2128590ce07611fae48a904cae6cab1f089c5955b57eb5"}}, - {name = "pyarrow-20.0.0-cp313-cp313-manylinux_2_28_x86_64.whl",url = "https://files.pythonhosted.org/packages/be/27/43a47fa0ff9053ab5203bb3faeec435d43c0d8bfa40179bfd076cdbd4e1c/pyarrow-20.0.0-cp313-cp313-manylinux_2_28_x86_64.whl",hashes = {sha256 = "97c8dc984ed09cb07d618d57d8d4b67a5100a30c3818c2fb0b04599f0da2de7b"}}, - {name = "pyarrow-20.0.0-cp313-cp313-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/bc/0b/d56c63b078876da81bbb9ba695a596eabee9b085555ed12bf6eb3b7cab0e/pyarrow-20.0.0-cp313-cp313-musllinux_1_2_aarch64.whl",hashes = {sha256 = "9b71daf534f4745818f96c214dbc1e6124d7daf059167330b610fc69b6f3d3e3"}}, - {name = "pyarrow-20.0.0-cp313-cp313-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/92/ac/7d4bd020ba9145f354012838692d48300c1b8fe5634bfda886abcada67ed/pyarrow-20.0.0-cp313-cp313-musllinux_1_2_x86_64.whl",hashes = {sha256 = "e8b88758f9303fa5a83d6c90e176714b2fd3852e776fc2d7e42a22dd6c2fb368"}}, - {name = "pyarrow-20.0.0-cp313-cp313-win_amd64.whl",url = "https://files.pythonhosted.org/packages/9d/07/290f4abf9ca702c5df7b47739c1b2c83588641ddfa2cc75e34a301d42e55/pyarrow-20.0.0-cp313-cp313-win_amd64.whl",hashes = {sha256 = "30b3051b7975801c1e1d387e17c588d8ab05ced9b1e14eec57915f79869b5031"}}, - {name = "pyarrow-20.0.0-cp313-cp313t-macosx_12_0_arm64.whl",url = "https://files.pythonhosted.org/packages/95/df/720bb17704b10bd69dde086e1400b8eefb8f58df3f8ac9cff6c425bf57f1/pyarrow-20.0.0-cp313-cp313t-macosx_12_0_arm64.whl",hashes = {sha256 = "ca151afa4f9b7bc45bcc791eb9a89e90a9eb2772767d0b1e5389609c7d03db63"}}, - {name = "pyarrow-20.0.0-cp313-cp313t-macosx_12_0_x86_64.whl",url = "https://files.pythonhosted.org/packages/d9/72/0d5f875efc31baef742ba55a00a25213a19ea64d7176e0fe001c5d8b6e9a/pyarrow-20.0.0-cp313-cp313t-macosx_12_0_x86_64.whl",hashes = {sha256 = "4680f01ecd86e0dd63e39eb5cd59ef9ff24a9d166db328679e36c108dc993d4c"}}, - {name = "pyarrow-20.0.0-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",url = "https://files.pythonhosted.org/packages/d5/bc/e48b4fa544d2eea72f7844180eb77f83f2030b84c8dad860f199f94307ed/pyarrow-20.0.0-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",hashes = {sha256 = "7f4c8534e2ff059765647aa69b75d6543f9fef59e2cd4c6d18015192565d2b70"}}, - {name = "pyarrow-20.0.0-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",url = "https://files.pythonhosted.org/packages/c3/01/974043a29874aa2cf4f87fb07fd108828fc7362300265a2a64a94965e35b/pyarrow-20.0.0-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",hashes = {sha256 = "3e1f8a47f4b4ae4c69c4d702cfbdfe4d41e18e5c7ef6f1bb1c50918c1e81c57b"}}, - {name = "pyarrow-20.0.0-cp313-cp313t-manylinux_2_28_aarch64.whl",url = "https://files.pythonhosted.org/packages/68/95/cc0d3634cde9ca69b0e51cbe830d8915ea32dda2157560dda27ff3b3337b/pyarrow-20.0.0-cp313-cp313t-manylinux_2_28_aarch64.whl",hashes = {sha256 = "a1f60dc14658efaa927f8214734f6a01a806d7690be4b3232ba526836d216122"}}, - {name = "pyarrow-20.0.0-cp313-cp313t-manylinux_2_28_x86_64.whl",url = "https://files.pythonhosted.org/packages/29/c2/3ad40e07e96a3e74e7ed7cc8285aadfa84eb848a798c98ec0ad009eb6bcc/pyarrow-20.0.0-cp313-cp313t-manylinux_2_28_x86_64.whl",hashes = {sha256 = "204a846dca751428991346976b914d6d2a82ae5b8316a6ed99789ebf976551e6"}}, - {name = "pyarrow-20.0.0-cp313-cp313t-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/eb/cb/65fa110b483339add6a9bc7b6373614166b14e20375d4daa73483755f830/pyarrow-20.0.0-cp313-cp313t-musllinux_1_2_aarch64.whl",hashes = {sha256 = "f3b117b922af5e4c6b9a9115825726cac7d8b1421c37c2b5e24fbacc8930612c"}}, - {name = "pyarrow-20.0.0-cp313-cp313t-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/98/7b/f30b1954589243207d7a0fbc9997401044bf9a033eec78f6cb50da3f304a/pyarrow-20.0.0-cp313-cp313t-musllinux_1_2_x86_64.whl",hashes = {sha256 = "e724a3fd23ae5b9c010e7be857f4405ed5e679db5c93e66204db1a69f733936a"}}, - {name = "pyarrow-20.0.0-cp313-cp313t-win_amd64.whl",url = "https://files.pythonhosted.org/packages/37/40/ad395740cd641869a13bcf60851296c89624662575621968dcfafabaa7f6/pyarrow-20.0.0-cp313-cp313t-win_amd64.whl",hashes = {sha256 = "82f1ee5133bd8f49d31be1299dc07f585136679666b502540db854968576faf9"}}, - {name = "pyarrow-20.0.0-cp312-cp312-macosx_12_0_arm64.whl",url = "https://files.pythonhosted.org/packages/a1/d6/0c10e0d54f6c13eb464ee9b67a68b8c71bcf2f67760ef5b6fbcddd2ab05f/pyarrow-20.0.0-cp312-cp312-macosx_12_0_arm64.whl",hashes = {sha256 = "75a51a5b0eef32727a247707d4755322cb970be7e935172b6a3a9f9ae98404ba"}}, - {name = "pyarrow-20.0.0-cp312-cp312-macosx_12_0_x86_64.whl",url = "https://files.pythonhosted.org/packages/7e/e2/04e9874abe4094a06fd8b0cbb0f1312d8dd7d707f144c2ec1e5e8f452ffa/pyarrow-20.0.0-cp312-cp312-macosx_12_0_x86_64.whl",hashes = {sha256 = "211d5e84cecc640c7a3ab900f930aaff5cd2702177e0d562d426fb7c4f737781"}}, - {name = "pyarrow-20.0.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",url = "https://files.pythonhosted.org/packages/31/fd/c565e5dcc906a3b471a83273039cb75cb79aad4a2d4a12f76cc5ae90a4b8/pyarrow-20.0.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",hashes = {sha256 = "4ba3cf4182828be7a896cbd232aa8dd6a31bd1f9e32776cc3796c012855e1199"}}, - {name = "pyarrow-20.0.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",url = "https://files.pythonhosted.org/packages/af/a9/3bdd799e2c9b20c1ea6dc6fa8e83f29480a97711cf806e823f808c2316ac/pyarrow-20.0.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",hashes = {sha256 = "2c3a01f313ffe27ac4126f4c2e5ea0f36a5fc6ab51f8726cf41fee4b256680bd"}}, - {name = "pyarrow-20.0.0-cp312-cp312-manylinux_2_28_aarch64.whl",url = "https://files.pythonhosted.org/packages/10/f7/da98ccd86354c332f593218101ae56568d5dcedb460e342000bd89c49cc1/pyarrow-20.0.0-cp312-cp312-manylinux_2_28_aarch64.whl",hashes = {sha256 = "a2791f69ad72addd33510fec7bb14ee06c2a448e06b649e264c094c5b5f7ce28"}}, - {name = "pyarrow-20.0.0-cp312-cp312-manylinux_2_28_x86_64.whl",url = "https://files.pythonhosted.org/packages/bb/1b/2168d6050e52ff1e6cefc61d600723870bf569cbf41d13db939c8cf97a16/pyarrow-20.0.0-cp312-cp312-manylinux_2_28_x86_64.whl",hashes = {sha256 = "4250e28a22302ce8692d3a0e8ec9d9dde54ec00d237cff4dfa9c1fbf79e472a8"}}, - {name = "pyarrow-20.0.0-cp312-cp312-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/b2/66/2d976c0c7158fd25591c8ca55aee026e6d5745a021915a1835578707feb3/pyarrow-20.0.0-cp312-cp312-musllinux_1_2_aarch64.whl",hashes = {sha256 = "89e030dc58fc760e4010148e6ff164d2f44441490280ef1e97a542375e41058e"}}, - {name = "pyarrow-20.0.0-cp312-cp312-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/31/a9/dfb999c2fc6911201dcbf348247f9cc382a8990f9ab45c12eabfd7243a38/pyarrow-20.0.0-cp312-cp312-musllinux_1_2_x86_64.whl",hashes = {sha256 = "6102b4864d77102dbbb72965618e204e550135a940c2534711d5ffa787df2a5a"}}, - {name = "pyarrow-20.0.0-cp312-cp312-win_amd64.whl",url = "https://files.pythonhosted.org/packages/a0/8e/9adee63dfa3911be2382fb4d92e4b2e7d82610f9d9f668493bebaa2af50f/pyarrow-20.0.0-cp312-cp312-win_amd64.whl",hashes = {sha256 = "96d6a0a37d9c98be08f5ed6a10831d88d52cac7b13f5287f1e0f625a0de8062b"}}, +sdist = {name = "pyarrow-21.0.0.tar.gz", url = "https://files.pythonhosted.org/packages/ef/c2/ea068b8f00905c06329a3dfcd40d0fcc2b7d0f2e355bdb25b65e0a0e4cd4/pyarrow-21.0.0.tar.gz", hashes = {sha256 = "5051f2dccf0e283ff56335760cbc8622cf52264d67e359d5569541ac11b6d5bc"}} +wheels = [ + {name = "pyarrow-21.0.0-cp313-cp313-macosx_12_0_arm64.whl",url = "https://files.pythonhosted.org/packages/16/ca/c7eaa8e62db8fb37ce942b1ea0c6d7abfe3786ca193957afa25e71b81b66/pyarrow-21.0.0-cp313-cp313-macosx_12_0_arm64.whl",hashes = {sha256 = "e99310a4ebd4479bcd1964dff9e14af33746300cb014aa4a3781738ac63baf4a"}}, + {name = "pyarrow-21.0.0-cp313-cp313-macosx_12_0_x86_64.whl",url = "https://files.pythonhosted.org/packages/ce/e8/e87d9e3b2489302b3a1aea709aaca4b781c5252fcb812a17ab6275a9a484/pyarrow-21.0.0-cp313-cp313-macosx_12_0_x86_64.whl",hashes = {sha256 = "d2fe8e7f3ce329a71b7ddd7498b3cfac0eeb200c2789bd840234f0dc271a8efe"}}, + {name = "pyarrow-21.0.0-cp313-cp313-manylinux_2_28_aarch64.whl",url = "https://files.pythonhosted.org/packages/84/52/79095d73a742aa0aba370c7942b1b655f598069489ab387fe47261a849e1/pyarrow-21.0.0-cp313-cp313-manylinux_2_28_aarch64.whl",hashes = {sha256 = "f522e5709379d72fb3da7785aa489ff0bb87448a9dc5a75f45763a795a089ebd"}}, + {name = "pyarrow-21.0.0-cp313-cp313-manylinux_2_28_x86_64.whl",url = "https://files.pythonhosted.org/packages/89/4b/7782438b551dbb0468892a276b8c789b8bbdb25ea5c5eb27faadd753e037/pyarrow-21.0.0-cp313-cp313-manylinux_2_28_x86_64.whl",hashes = {sha256 = "69cbbdf0631396e9925e048cfa5bce4e8c3d3b41562bbd70c685a8eb53a91e61"}}, + {name = "pyarrow-21.0.0-cp313-cp313-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/b3/62/0f29de6e0a1e33518dec92c65be0351d32d7ca351e51ec5f4f837a9aab91/pyarrow-21.0.0-cp313-cp313-musllinux_1_2_aarch64.whl",hashes = {sha256 = "731c7022587006b755d0bdb27626a1a3bb004bb56b11fb30d98b6c1b4718579d"}}, + {name = "pyarrow-21.0.0-cp313-cp313-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/90/c7/0fa1f3f29cf75f339768cc698c8ad4ddd2481c1742e9741459911c9ac477/pyarrow-21.0.0-cp313-cp313-musllinux_1_2_x86_64.whl",hashes = {sha256 = "dc56bc708f2d8ac71bd1dcb927e458c93cec10b98eb4120206a4091db7b67b99"}}, + {name = "pyarrow-21.0.0-cp313-cp313-win_amd64.whl",url = "https://files.pythonhosted.org/packages/01/63/581f2076465e67b23bc5a37d4a2abff8362d389d29d8105832e82c9c811c/pyarrow-21.0.0-cp313-cp313-win_amd64.whl",hashes = {sha256 = "186aa00bca62139f75b7de8420f745f2af12941595bbbfa7ed3870ff63e25636"}}, + {name = "pyarrow-21.0.0-cp313-cp313t-macosx_12_0_arm64.whl",url = "https://files.pythonhosted.org/packages/c9/ab/357d0d9648bb8241ee7348e564f2479d206ebe6e1c47ac5027c2e31ecd39/pyarrow-21.0.0-cp313-cp313t-macosx_12_0_arm64.whl",hashes = {sha256 = "a7a102574faa3f421141a64c10216e078df467ab9576684d5cd696952546e2da"}}, + {name = "pyarrow-21.0.0-cp313-cp313t-macosx_12_0_x86_64.whl",url = "https://files.pythonhosted.org/packages/3f/8a/5685d62a990e4cac2043fc76b4661bf38d06efed55cf45a334b455bd2759/pyarrow-21.0.0-cp313-cp313t-macosx_12_0_x86_64.whl",hashes = {sha256 = "1e005378c4a2c6db3ada3ad4c217b381f6c886f0a80d6a316fe586b90f77efd7"}}, + {name = "pyarrow-21.0.0-cp313-cp313t-manylinux_2_28_aarch64.whl",url = "https://files.pythonhosted.org/packages/fc/de/c0828ee09525c2bafefd3e736a248ebe764d07d0fd762d4f0929dbc516c9/pyarrow-21.0.0-cp313-cp313t-manylinux_2_28_aarch64.whl",hashes = {sha256 = "65f8e85f79031449ec8706b74504a316805217b35b6099155dd7e227eef0d4b6"}}, + {name = "pyarrow-21.0.0-cp313-cp313t-manylinux_2_28_x86_64.whl",url = "https://files.pythonhosted.org/packages/6e/26/a2865c420c50b7a3748320b614f3484bfcde8347b2639b2b903b21ce6a72/pyarrow-21.0.0-cp313-cp313t-manylinux_2_28_x86_64.whl",hashes = {sha256 = "3a81486adc665c7eb1a2bde0224cfca6ceaba344a82a971ef059678417880eb8"}}, + {name = "pyarrow-21.0.0-cp313-cp313t-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/0a/f9/4ee798dc902533159250fb4321267730bc0a107d8c6889e07c3add4fe3a5/pyarrow-21.0.0-cp313-cp313t-musllinux_1_2_aarch64.whl",hashes = {sha256 = "fc0d2f88b81dcf3ccf9a6ae17f89183762c8a94a5bdcfa09e05cfe413acf0503"}}, + {name = "pyarrow-21.0.0-cp313-cp313t-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/5a/da/e02544d6997037a4b0d22d8e5f66bc9315c3671371a8b18c79ade1cefe14/pyarrow-21.0.0-cp313-cp313t-musllinux_1_2_x86_64.whl",hashes = {sha256 = "6299449adf89df38537837487a4f8d3bd91ec94354fdd2a7d30bc11c48ef6e79"}}, + {name = "pyarrow-21.0.0-cp313-cp313t-win_amd64.whl",url = "https://files.pythonhosted.org/packages/e5/4e/519c1bc1876625fe6b71e9a28287c43ec2f20f73c658b9ae1d485c0c206e/pyarrow-21.0.0-cp313-cp313t-win_amd64.whl",hashes = {sha256 = "222c39e2c70113543982c6b34f3077962b44fca38c0bd9e68bb6781534425c10"}}, + {name = "pyarrow-21.0.0-cp312-cp312-macosx_12_0_arm64.whl",url = "https://files.pythonhosted.org/packages/ca/d4/d4f817b21aacc30195cf6a46ba041dd1be827efa4a623cc8bf39a1c2a0c0/pyarrow-21.0.0-cp312-cp312-macosx_12_0_arm64.whl",hashes = {sha256 = "3a302f0e0963db37e0a24a70c56cf91a4faa0bca51c23812279ca2e23481fccd"}}, + {name = "pyarrow-21.0.0-cp312-cp312-macosx_12_0_x86_64.whl",url = "https://files.pythonhosted.org/packages/a2/9c/dcd38ce6e4b4d9a19e1d36914cb8e2b1da4e6003dd075474c4cfcdfe0601/pyarrow-21.0.0-cp312-cp312-macosx_12_0_x86_64.whl",hashes = {sha256 = "b6b27cf01e243871390474a211a7922bfbe3bda21e39bc9160daf0da3fe48876"}}, + {name = "pyarrow-21.0.0-cp312-cp312-manylinux_2_28_aarch64.whl",url = "https://files.pythonhosted.org/packages/4f/74/2a2d9f8d7a59b639523454bec12dba35ae3d0a07d8ab529dc0809f74b23c/pyarrow-21.0.0-cp312-cp312-manylinux_2_28_aarch64.whl",hashes = {sha256 = "e72a8ec6b868e258a2cd2672d91f2860ad532d590ce94cdf7d5e7ec674ccf03d"}}, + {name = "pyarrow-21.0.0-cp312-cp312-manylinux_2_28_x86_64.whl",url = "https://files.pythonhosted.org/packages/ad/90/2660332eeb31303c13b653ea566a9918484b6e4d6b9d2d46879a33ab0622/pyarrow-21.0.0-cp312-cp312-manylinux_2_28_x86_64.whl",hashes = {sha256 = "b7ae0bbdc8c6674259b25bef5d2a1d6af5d39d7200c819cf99e07f7dfef1c51e"}}, + {name = "pyarrow-21.0.0-cp312-cp312-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/33/27/1a93a25c92717f6aa0fca06eb4700860577d016cd3ae51aad0e0488ac899/pyarrow-21.0.0-cp312-cp312-musllinux_1_2_aarch64.whl",hashes = {sha256 = "58c30a1729f82d201627c173d91bd431db88ea74dcaa3885855bc6203e433b82"}}, + {name = "pyarrow-21.0.0-cp312-cp312-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/05/d9/4d09d919f35d599bc05c6950095e358c3e15148ead26292dfca1fb659b0c/pyarrow-21.0.0-cp312-cp312-musllinux_1_2_x86_64.whl",hashes = {sha256 = "072116f65604b822a7f22945a7a6e581cfa28e3454fdcc6939d4ff6090126623"}}, + {name = "pyarrow-21.0.0-cp312-cp312-win_amd64.whl",url = "https://files.pythonhosted.org/packages/71/30/f3795b6e192c3ab881325ffe172e526499eb3780e306a15103a2764916a2/pyarrow-21.0.0-cp312-cp312-win_amd64.whl",hashes = {sha256 = "cf56ec8b0a5c8c9d7021d6fd754e688104f9ebebf1bf4449613c9531f5346a18"}}, + {name = "pyarrow-21.0.0-cp311-cp311-macosx_12_0_arm64.whl",url = "https://files.pythonhosted.org/packages/94/dc/80564a3071a57c20b7c32575e4a0120e8a330ef487c319b122942d665960/pyarrow-21.0.0-cp311-cp311-macosx_12_0_arm64.whl",hashes = {sha256 = "c077f48aab61738c237802836fc3844f85409a46015635198761b0d6a688f87b"}}, + {name = "pyarrow-21.0.0-cp311-cp311-macosx_12_0_x86_64.whl",url = "https://files.pythonhosted.org/packages/ea/cc/3b51cb2db26fe535d14f74cab4c79b191ed9a8cd4cbba45e2379b5ca2746/pyarrow-21.0.0-cp311-cp311-macosx_12_0_x86_64.whl",hashes = {sha256 = "689f448066781856237eca8d1975b98cace19b8dd2ab6145bf49475478bcaa10"}}, + {name = "pyarrow-21.0.0-cp311-cp311-manylinux_2_28_aarch64.whl",url = "https://files.pythonhosted.org/packages/24/11/a4431f36d5ad7d83b87146f515c063e4d07ef0b7240876ddb885e6b44f2e/pyarrow-21.0.0-cp311-cp311-manylinux_2_28_aarch64.whl",hashes = {sha256 = "479ee41399fcddc46159a551705b89c05f11e8b8cb8e968f7fec64f62d91985e"}}, + {name = "pyarrow-21.0.0-cp311-cp311-manylinux_2_28_x86_64.whl",url = "https://files.pythonhosted.org/packages/74/dc/035d54638fc5d2971cbf1e987ccd45f1091c83bcf747281cf6cc25e72c88/pyarrow-21.0.0-cp311-cp311-manylinux_2_28_x86_64.whl",hashes = {sha256 = "40ebfcb54a4f11bcde86bc586cbd0272bac0d516cfa539c799c2453768477569"}}, + {name = "pyarrow-21.0.0-cp311-cp311-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/2e/3b/89fced102448a9e3e0d4dded1f37fa3ce4700f02cdb8665457fcc8015f5b/pyarrow-21.0.0-cp311-cp311-musllinux_1_2_aarch64.whl",hashes = {sha256 = "8d58d8497814274d3d20214fbb24abcad2f7e351474357d552a8d53bce70c70e"}}, + {name = "pyarrow-21.0.0-cp311-cp311-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/fb/bb/ea7f1bd08978d39debd3b23611c293f64a642557e8141c80635d501e6d53/pyarrow-21.0.0-cp311-cp311-musllinux_1_2_x86_64.whl",hashes = {sha256 = "585e7224f21124dd57836b1530ac8f2df2afc43c861d7bf3d58a4870c42ae36c"}}, + {name = "pyarrow-21.0.0-cp311-cp311-win_amd64.whl",url = "https://files.pythonhosted.org/packages/6e/0b/77ea0600009842b30ceebc3337639a7380cd946061b620ac1a2f3cb541e2/pyarrow-21.0.0-cp311-cp311-win_amd64.whl",hashes = {sha256 = "555ca6935b2cbca2c0e932bedd853e9bc523098c39636de9ad4693b5b1df86d6"}}, + {name = "pyarrow-21.0.0-cp310-cp310-macosx_12_0_arm64.whl",url = "https://files.pythonhosted.org/packages/17/d9/110de31880016e2afc52d8580b397dbe47615defbf09ca8cf55f56c62165/pyarrow-21.0.0-cp310-cp310-macosx_12_0_arm64.whl",hashes = {sha256 = "e563271e2c5ff4d4a4cbeb2c83d5cf0d4938b891518e676025f7268c6fe5fe26"}}, + {name = "pyarrow-21.0.0-cp310-cp310-macosx_12_0_x86_64.whl",url = "https://files.pythonhosted.org/packages/df/5f/c1c1997613abf24fceb087e79432d24c19bc6f7259cab57c2c8e5e545fab/pyarrow-21.0.0-cp310-cp310-macosx_12_0_x86_64.whl",hashes = {sha256 = "fee33b0ca46f4c85443d6c450357101e47d53e6c3f008d658c27a2d020d44c79"}}, + {name = "pyarrow-21.0.0-cp310-cp310-manylinux_2_28_aarch64.whl",url = "https://files.pythonhosted.org/packages/3e/ed/b1589a777816ee33ba123ba1e4f8f02243a844fed0deec97bde9fb21a5cf/pyarrow-21.0.0-cp310-cp310-manylinux_2_28_aarch64.whl",hashes = {sha256 = "7be45519b830f7c24b21d630a31d48bcebfd5d4d7f9d3bdb49da9cdf6d764edb"}}, + {name = "pyarrow-21.0.0-cp310-cp310-manylinux_2_28_x86_64.whl",url = "https://files.pythonhosted.org/packages/44/28/b6672962639e85dc0ac36f71ab3a8f5f38e01b51343d7aa372a6b56fa3f3/pyarrow-21.0.0-cp310-cp310-manylinux_2_28_x86_64.whl",hashes = {sha256 = "26bfd95f6bff443ceae63c65dc7e048670b7e98bc892210acba7e4995d3d4b51"}}, + {name = "pyarrow-21.0.0-cp310-cp310-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/f8/cc/de02c3614874b9089c94eac093f90ca5dfa6d5afe45de3ba847fd950fdf1/pyarrow-21.0.0-cp310-cp310-musllinux_1_2_aarch64.whl",hashes = {sha256 = "bd04ec08f7f8bd113c55868bd3fc442a9db67c27af098c5f814a3091e71cc61a"}}, + {name = "pyarrow-21.0.0-cp310-cp310-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/a6/3e/99473332ac40278f196e105ce30b79ab8affab12f6194802f2593d6b0be2/pyarrow-21.0.0-cp310-cp310-musllinux_1_2_x86_64.whl",hashes = {sha256 = "9b0b14b49ac10654332a805aedfc0147fb3469cbf8ea951b3d040dab12372594"}}, + {name = "pyarrow-21.0.0-cp310-cp310-win_amd64.whl",url = "https://files.pythonhosted.org/packages/7b/f5/c372ef60593d713e8bfbb7e0c743501605f0ad00719146dc075faf11172b/pyarrow-21.0.0-cp310-cp310-win_amd64.whl",hashes = {sha256 = "9d9f8bcb4c3be7738add259738abdeddc363de1b80e3310e04067aa1ca596634"}}, ] marker = "\"default\" in dependency_groups" @@ -2397,6 +3234,11 @@ wheels = [ {name = "pycryptodomex-3.23.0-cp37-abi3-win32.whl",url = "https://files.pythonhosted.org/packages/8d/67/09ee8500dd22614af5fbaa51a4aee6e342b5fa8aecf0a6cb9cbf52fa6d45/pycryptodomex-3.23.0-cp37-abi3-win32.whl",hashes = {sha256 = "189afbc87f0b9f158386bf051f720e20fa6145975f1e76369303d0f31d1a8d7c"}}, {name = "pycryptodomex-3.23.0-cp37-abi3-win_amd64.whl",url = "https://files.pythonhosted.org/packages/69/96/11f36f71a865dd6df03716d33bd07a67e9d20f6b8d39820470b766af323c/pycryptodomex-3.23.0-cp37-abi3-win_amd64.whl",hashes = {sha256 = "52e5ca58c3a0b0bd5e100a9fbc8015059b05cffc6c66ce9d98b4b45e023443b9"}}, {name = "pycryptodomex-3.23.0-cp37-abi3-win_arm64.whl",url = "https://files.pythonhosted.org/packages/f9/93/45c1cdcbeb182ccd2e144c693eaa097763b08b38cded279f0053ed53c553/pycryptodomex-3.23.0-cp37-abi3-win_arm64.whl",hashes = {sha256 = "02d87b80778c171445d67e23d1caef279bf4b25c3597050ccd2e13970b57fd51"}}, + {name = "pycryptodomex-3.23.0-pp310-pypy310_pp73-macosx_10_15_x86_64.whl",url = "https://files.pythonhosted.org/packages/f3/b8/3e76d948c3c4ac71335bbe75dac53e154b40b0f8f1f022dfa295257a0c96/pycryptodomex-3.23.0-pp310-pypy310_pp73-macosx_10_15_x86_64.whl",hashes = {sha256 = "ebfff755c360d674306e5891c564a274a47953562b42fb74a5c25b8fc1fb1cb5"}}, + {name = "pycryptodomex-3.23.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",url = "https://files.pythonhosted.org/packages/6a/cf/80f4297a4820dfdfd1c88cf6c4666a200f204b3488103d027b5edd9176ec/pycryptodomex-3.23.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",hashes = {sha256 = "eca54f4bb349d45afc17e3011ed4264ef1cc9e266699874cdd1349c504e64798"}}, + {name = "pycryptodomex-3.23.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",url = "https://files.pythonhosted.org/packages/d1/42/1e969ee0ad19fe3134b0e1b856c39bd0b70d47a4d0e81c2a8b05727394c9/pycryptodomex-3.23.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",hashes = {sha256 = "4f2596e643d4365e14d0879dc5aafe6355616c61c2176009270f3048f6d9a61f"}}, + {name = "pycryptodomex-3.23.0-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl",url = "https://files.pythonhosted.org/packages/6e/c3/1de4f7631fea8a992a44ba632aa40e0008764c0fb9bf2854b0acf78c2cf2/pycryptodomex-3.23.0-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl",hashes = {sha256 = "fdfac7cda115bca3a5abb2f9e43bc2fb66c2b65ab074913643803ca7083a79ea"}}, + {name = "pycryptodomex-3.23.0-pp310-pypy310_pp73-win_amd64.whl",url = "https://files.pythonhosted.org/packages/f2/5f/af7da8e6f1e42b52f44a24d08b8e4c726207434e2593732d39e7af5e7256/pycryptodomex-3.23.0-pp310-pypy310_pp73-win_amd64.whl",hashes = {sha256 = "14c37aaece158d0ace436f76a7bb19093db3b4deade9797abfc39ec6cd6cc2fe"}}, ] marker = "\"recommended\" in extras" @@ -2434,24 +3276,24 @@ dependencies = [] [[packages]] name = "safetensors" -version = "0.5.3" -requires-python = ">=3.7" -sdist = {name = "safetensors-0.5.3.tar.gz", url = "https://files.pythonhosted.org/packages/71/7e/2d5d6ee7b40c0682315367ec7475693d110f512922d582fef1bd4a63adc3/safetensors-0.5.3.tar.gz", hashes = {sha256 = "b6b0d6ecacec39a4fdd99cc19f4576f5219ce858e6fd8dbe7609df0b8dc56965"}} -wheels = [ - {name = "safetensors-0.5.3-cp38-abi3-macosx_10_12_x86_64.whl",url = "https://files.pythonhosted.org/packages/18/ae/88f6c49dbd0cc4da0e08610019a3c78a7d390879a919411a410a1876d03a/safetensors-0.5.3-cp38-abi3-macosx_10_12_x86_64.whl",hashes = {sha256 = "bd20eb133db8ed15b40110b7c00c6df51655a2998132193de2f75f72d99c7073"}}, - {name = "safetensors-0.5.3-cp38-abi3-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/b8/3b/11f1b4a2f5d2ab7da34ecc062b0bc301f2be024d110a6466726bec8c055c/safetensors-0.5.3-cp38-abi3-macosx_11_0_arm64.whl",hashes = {sha256 = "21d01c14ff6c415c485616b8b0bf961c46b3b343ca59110d38d744e577f9cce7"}}, - {name = "safetensors-0.5.3-cp38-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",url = "https://files.pythonhosted.org/packages/5d/9a/add3e6fef267658075c5a41573c26d42d80c935cdc992384dfae435feaef/safetensors-0.5.3-cp38-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",hashes = {sha256 = "11bce6164887cd491ca75c2326a113ba934be596e22b28b1742ce27b1d076467"}}, - {name = "safetensors-0.5.3-cp38-abi3-manylinux_2_17_armv7l.manylinux2014_armv7l.whl",url = "https://files.pythonhosted.org/packages/df/5c/bf2cae92222513cc23b3ff85c4a1bb2811a2c3583ac0f8e8d502751de934/safetensors-0.5.3-cp38-abi3-manylinux_2_17_armv7l.manylinux2014_armv7l.whl",hashes = {sha256 = "4a243be3590bc3301c821da7a18d87224ef35cbd3e5f5727e4e0728b8172411e"}}, - {name = "safetensors-0.5.3-cp38-abi3-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl",url = "https://files.pythonhosted.org/packages/58/11/7456afb740bd45782d0f4c8e8e1bb9e572f1bf82899fb6ace58af47b4282/safetensors-0.5.3-cp38-abi3-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl",hashes = {sha256 = "8bd84b12b1670a6f8e50f01e28156422a2bc07fb16fc4e98bded13039d688a0d"}}, - {name = "safetensors-0.5.3-cp38-abi3-manylinux_2_17_s390x.manylinux2014_s390x.whl",url = "https://files.pythonhosted.org/packages/57/3d/fe73a9d2ace487e7285f6e157afee2383bd1ddb911b7cb44a55cf812eae3/safetensors-0.5.3-cp38-abi3-manylinux_2_17_s390x.manylinux2014_s390x.whl",hashes = {sha256 = "391ac8cab7c829452175f871fcaf414aa1e292b5448bd02620f675a7f3e7abb9"}}, - {name = "safetensors-0.5.3-cp38-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",url = "https://files.pythonhosted.org/packages/a6/f8/dae3421624fcc87a89d42e1898a798bc7ff72c61f38973a65d60df8f124c/safetensors-0.5.3-cp38-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",hashes = {sha256 = "cead1fa41fc54b1e61089fa57452e8834f798cb1dc7a09ba3524f1eb08e0317a"}}, - {name = "safetensors-0.5.3-cp38-abi3-manylinux_2_5_i686.manylinux1_i686.whl",url = "https://files.pythonhosted.org/packages/ce/20/1fbe16f9b815f6c5a672f5b760951e20e17e43f67f231428f871909a37f6/safetensors-0.5.3-cp38-abi3-manylinux_2_5_i686.manylinux1_i686.whl",hashes = {sha256 = "1077f3e94182d72618357b04b5ced540ceb71c8a813d3319f1aba448e68a770d"}}, - {name = "safetensors-0.5.3-cp38-abi3-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/5f/18/8e108846b506487aa4629fe4116b27db65c3dde922de2c8e0cc1133f3f29/safetensors-0.5.3-cp38-abi3-musllinux_1_2_aarch64.whl",hashes = {sha256 = "799021e78287bac619c7b3f3606730a22da4cda27759ddf55d37c8db7511c74b"}}, - {name = "safetensors-0.5.3-cp38-abi3-musllinux_1_2_armv7l.whl",url = "https://files.pythonhosted.org/packages/82/5a/c116111d8291af6c8c8a8b40628fe833b9db97d8141c2a82359d14d9e078/safetensors-0.5.3-cp38-abi3-musllinux_1_2_armv7l.whl",hashes = {sha256 = "df26da01aaac504334644e1b7642fa000bfec820e7cef83aeac4e355e03195ff"}}, - {name = "safetensors-0.5.3-cp38-abi3-musllinux_1_2_i686.whl",url = "https://files.pythonhosted.org/packages/7d/ff/41fcc4d3b7de837963622e8610d998710705bbde9a8a17221d85e5d0baad/safetensors-0.5.3-cp38-abi3-musllinux_1_2_i686.whl",hashes = {sha256 = "32c3ef2d7af8b9f52ff685ed0bc43913cdcde135089ae322ee576de93eae5135"}}, - {name = "safetensors-0.5.3-cp38-abi3-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/40/ad/2b113098e69c985a3d8fbda4b902778eae4a35b7d5188859b4a63d30c161/safetensors-0.5.3-cp38-abi3-musllinux_1_2_x86_64.whl",hashes = {sha256 = "37f1521be045e56fc2b54c606d4455573e717b2d887c579ee1dbba5f868ece04"}}, - {name = "safetensors-0.5.3-cp38-abi3-win32.whl",url = "https://files.pythonhosted.org/packages/0a/0c/95aeb51d4246bd9a3242d3d8349c1112b4ee7611a4b40f0c5c93b05f001d/safetensors-0.5.3-cp38-abi3-win32.whl",hashes = {sha256 = "cfc0ec0846dcf6763b0ed3d1846ff36008c6e7290683b61616c4b040f6a54ace"}}, - {name = "safetensors-0.5.3-cp38-abi3-win_amd64.whl",url = "https://files.pythonhosted.org/packages/69/e2/b011c38e5394c4c18fb5500778a55ec43ad6106126e74723ffaee246f56e/safetensors-0.5.3-cp38-abi3-win_amd64.whl",hashes = {sha256 = "836cbbc320b47e80acd40e44c8682db0e8ad7123209f69b093def21ec7cafd11"}}, +version = "0.6.2" +requires-python = ">=3.9" +sdist = {name = "safetensors-0.6.2.tar.gz", url = "https://files.pythonhosted.org/packages/ac/cc/738f3011628920e027a11754d9cae9abec1aed00f7ae860abbf843755233/safetensors-0.6.2.tar.gz", hashes = {sha256 = "43ff2aa0e6fa2dc3ea5524ac7ad93a9839256b8703761e76e2d0b2a3fa4f15d9"}} +wheels = [ + {name = "safetensors-0.6.2-cp38-abi3-macosx_10_12_x86_64.whl",url = "https://files.pythonhosted.org/packages/4d/b1/3f5fd73c039fc87dba3ff8b5d528bfc5a32b597fea8e7a6a4800343a17c7/safetensors-0.6.2-cp38-abi3-macosx_10_12_x86_64.whl",hashes = {sha256 = "9c85ede8ec58f120bad982ec47746981e210492a6db876882aa021446af8ffba"}}, + {name = "safetensors-0.6.2-cp38-abi3-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/8c/c9/bb114c158540ee17907ec470d01980957fdaf87b4aa07914c24eba87b9c6/safetensors-0.6.2-cp38-abi3-macosx_11_0_arm64.whl",hashes = {sha256 = "d6675cf4b39c98dbd7d940598028f3742e0375a6b4d4277e76beb0c35f4b843b"}}, + {name = "safetensors-0.6.2-cp38-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",url = "https://files.pythonhosted.org/packages/d3/8e/f70c34e47df3110e8e0bb268d90db8d4be8958a54ab0336c9be4fe86dac8/safetensors-0.6.2-cp38-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",hashes = {sha256 = "1d2d2b3ce1e2509c68932ca03ab8f20570920cd9754b05063d4368ee52833ecd"}}, + {name = "safetensors-0.6.2-cp38-abi3-manylinux_2_17_armv7l.manylinux2014_armv7l.whl",url = "https://files.pythonhosted.org/packages/2a/f5/be9c6a7c7ef773e1996dc214e73485286df1836dbd063e8085ee1976f9cb/safetensors-0.6.2-cp38-abi3-manylinux_2_17_armv7l.manylinux2014_armv7l.whl",hashes = {sha256 = "93de35a18f46b0f5a6a1f9e26d91b442094f2df02e9fd7acf224cfec4238821a"}}, + {name = "safetensors-0.6.2-cp38-abi3-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl",url = "https://files.pythonhosted.org/packages/c9/55/23f2d0a2c96ed8665bf17a30ab4ce5270413f4d74b6d87dd663258b9af31/safetensors-0.6.2-cp38-abi3-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl",hashes = {sha256 = "89a89b505f335640f9120fac65ddeb83e40f1fd081cb8ed88b505bdccec8d0a1"}}, + {name = "safetensors-0.6.2-cp38-abi3-manylinux_2_17_s390x.manylinux2014_s390x.whl",url = "https://files.pythonhosted.org/packages/98/c6/affb0bd9ce02aa46e7acddbe087912a04d953d7a4d74b708c91b5806ef3f/safetensors-0.6.2-cp38-abi3-manylinux_2_17_s390x.manylinux2014_s390x.whl",hashes = {sha256 = "fc4d0d0b937e04bdf2ae6f70cd3ad51328635fe0e6214aa1fc811f3b576b3bda"}}, + {name = "safetensors-0.6.2-cp38-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",url = "https://files.pythonhosted.org/packages/fe/5d/5a514d7b88e310c8b146e2404e0dc161282e78634d9358975fd56dfd14be/safetensors-0.6.2-cp38-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",hashes = {sha256 = "8045db2c872db8f4cbe3faa0495932d89c38c899c603f21e9b6486951a5ecb8f"}}, + {name = "safetensors-0.6.2-cp38-abi3-manylinux_2_5_i686.manylinux1_i686.whl",url = "https://files.pythonhosted.org/packages/7a/7b/4fc3b2ba62c352b2071bea9cfbad330fadda70579f617506ae1a2f129cab/safetensors-0.6.2-cp38-abi3-manylinux_2_5_i686.manylinux1_i686.whl",hashes = {sha256 = "81e67e8bab9878bb568cffbc5f5e655adb38d2418351dc0859ccac158f753e19"}}, + {name = "safetensors-0.6.2-cp38-abi3-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/5a/50/0057e11fe1f3cead9254315a6c106a16dd4b1a19cd247f7cc6414f6b7866/safetensors-0.6.2-cp38-abi3-musllinux_1_2_aarch64.whl",hashes = {sha256 = "b0e4d029ab0a0e0e4fdf142b194514695b1d7d3735503ba700cf36d0fc7136ce"}}, + {name = "safetensors-0.6.2-cp38-abi3-musllinux_1_2_armv7l.whl",url = "https://files.pythonhosted.org/packages/e9/29/473f789e4ac242593ac1656fbece6e1ecd860bb289e635e963667807afe3/safetensors-0.6.2-cp38-abi3-musllinux_1_2_armv7l.whl",hashes = {sha256 = "fa48268185c52bfe8771e46325a1e21d317207bcabcb72e65c6e28e9ffeb29c7"}}, + {name = "safetensors-0.6.2-cp38-abi3-musllinux_1_2_i686.whl",url = "https://files.pythonhosted.org/packages/68/52/f7324aad7f2df99e05525c84d352dc217e0fa637a4f603e9f2eedfbe2c67/safetensors-0.6.2-cp38-abi3-musllinux_1_2_i686.whl",hashes = {sha256 = "d83c20c12c2d2f465997c51b7ecb00e407e5f94d7dec3ea0cc11d86f60d3fde5"}}, + {name = "safetensors-0.6.2-cp38-abi3-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/ad/fe/cad1d9762868c7c5dc70c8620074df28ebb1a8e4c17d4c0cb031889c457e/safetensors-0.6.2-cp38-abi3-musllinux_1_2_x86_64.whl",hashes = {sha256 = "d944cea65fad0ead848b6ec2c37cc0b197194bec228f8020054742190e9312ac"}}, + {name = "safetensors-0.6.2-cp38-abi3-win32.whl",url = "https://files.pythonhosted.org/packages/59/a7/e2158e17bbe57d104f0abbd95dff60dda916cf277c9f9663b4bf9bad8b6e/safetensors-0.6.2-cp38-abi3-win32.whl",hashes = {sha256 = "cab75ca7c064d3911411461151cb69380c9225798a20e712b102edda2542ddb1"}}, + {name = "safetensors-0.6.2-cp38-abi3-win_amd64.whl",url = "https://files.pythonhosted.org/packages/2c/c3/c0be1135726618dc1e28d181b8c442403d8dbb9e273fd791de2d4384bcdd/safetensors-0.6.2-cp38-abi3-win_amd64.whl",hashes = {sha256 = "c7b214870df923cbc1593c3faee16bec59ea462758699bd3fee399d00aac072c"}}, ] marker = "\"default\" in dependency_groups" @@ -2573,6 +3415,34 @@ wheels = [ {name = "ujson-5.11.0-cp312-cp312-win32.whl",url = "https://files.pythonhosted.org/packages/7e/81/546042f0b23c9040d61d46ea5ca76f0cc5e0d399180ddfb2ae976ebff5b5/ujson-5.11.0-cp312-cp312-win32.whl",hashes = {sha256 = "be6b0eaf92cae8cdee4d4c9e074bde43ef1c590ed5ba037ea26c9632fb479c88"}}, {name = "ujson-5.11.0-cp312-cp312-win_amd64.whl",url = "https://files.pythonhosted.org/packages/44/1b/27c05dc8c9728f44875d74b5bfa948ce91f6c33349232619279f35c6e817/ujson-5.11.0-cp312-cp312-win_amd64.whl",hashes = {sha256 = "b7b136cc6abc7619124fd897ef75f8e63105298b5ca9bdf43ebd0e1fa0ee105f"}}, {name = "ujson-5.11.0-cp312-cp312-win_arm64.whl",url = "https://files.pythonhosted.org/packages/22/2d/37b6557c97c3409c202c838aa9c960ca3896843b4295c4b7bb2bbd260664/ujson-5.11.0-cp312-cp312-win_arm64.whl",hashes = {sha256 = "6cd2df62f24c506a0ba322d5e4fe4466d47a9467b57e881ee15a31f7ecf68ff6"}}, + {name = "ujson-5.11.0-cp311-cp311-macosx_10_9_x86_64.whl",url = "https://files.pythonhosted.org/packages/da/ea/80346b826349d60ca4d612a47cdf3533694e49b45e9d1c07071bb867a184/ujson-5.11.0-cp311-cp311-macosx_10_9_x86_64.whl",hashes = {sha256 = "d7c46cb0fe5e7056b9acb748a4c35aa1b428025853032540bb7e41f46767321f"}}, + {name = "ujson-5.11.0-cp311-cp311-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/57/df/b53e747562c89515e18156513cc7c8ced2e5e3fd6c654acaa8752ffd7cd9/ujson-5.11.0-cp311-cp311-macosx_11_0_arm64.whl",hashes = {sha256 = "d8951bb7a505ab2a700e26f691bdfacf395bc7e3111e3416d325b513eea03a58"}}, + {name = "ujson-5.11.0-cp311-cp311-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl",url = "https://files.pythonhosted.org/packages/41/b8/ab67ec8c01b8a3721fd13e5cb9d85ab2a6066a3a5e9148d661a6870d6293/ujson-5.11.0-cp311-cp311-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl",hashes = {sha256 = "952c0be400229940248c0f5356514123d428cba1946af6fa2bbd7503395fef26"}}, + {name = "ujson-5.11.0-cp311-cp311-manylinux_2_24_i686.manylinux_2_28_i686.whl",url = "https://files.pythonhosted.org/packages/7b/c7/fb84f27cd80a2c7e2d3c6012367aecade0da936790429801803fa8d4bffc/ujson-5.11.0-cp311-cp311-manylinux_2_24_i686.manylinux_2_28_i686.whl",hashes = {sha256 = "94fcae844f1e302f6f8095c5d1c45a2f0bfb928cccf9f1b99e3ace634b980a2a"}}, + {name = "ujson-5.11.0-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl",url = "https://files.pythonhosted.org/packages/5d/7c/48706f7c1e917ecb97ddcfb7b1d756040b86ed38290e28579d63bd3fcc48/ujson-5.11.0-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl",hashes = {sha256 = "7e0ec1646db172beb8d3df4c32a9d78015e671d2000af548252769e33079d9a6"}}, + {name = "ujson-5.11.0-cp311-cp311-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/ec/ce/48877c6eb4afddfd6bd1db6be34456538c07ca2d6ed233d3f6c6efc2efe8/ujson-5.11.0-cp311-cp311-musllinux_1_2_aarch64.whl",hashes = {sha256 = "da473b23e3a54448b008d33f742bcd6d5fb2a897e42d1fc6e7bf306ea5d18b1b"}}, + {name = "ujson-5.11.0-cp311-cp311-musllinux_1_2_i686.whl",url = "https://files.pythonhosted.org/packages/8b/7a/2c20dc97ad70cd7c31ad0596ba8e2cf8794d77191ba4d1e0bded69865477/ujson-5.11.0-cp311-cp311-musllinux_1_2_i686.whl",hashes = {sha256 = "aa6b3d4f1c0d3f82930f4cbd7fe46d905a4a9205a7c13279789c1263faf06dba"}}, + {name = "ujson-5.11.0-cp311-cp311-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/15/f5/ca454f2f6a2c840394b6f162fff2801450803f4ff56c7af8ce37640b8a2a/ujson-5.11.0-cp311-cp311-musllinux_1_2_x86_64.whl",hashes = {sha256 = "4843f3ab4fe1cc596bb7e02228ef4c25d35b4bb0809d6a260852a4bfcab37ba3"}}, + {name = "ujson-5.11.0-cp311-cp311-win32.whl",url = "https://files.pythonhosted.org/packages/fe/d3/9ba310e07969bc9906eb7548731e33a0f448b122ad9705fed699c9b29345/ujson-5.11.0-cp311-cp311-win32.whl",hashes = {sha256 = "e979fbc469a7f77f04ec2f4e853ba00c441bf2b06720aa259f0f720561335e34"}}, + {name = "ujson-5.11.0-cp311-cp311-win_amd64.whl",url = "https://files.pythonhosted.org/packages/57/f7/da05b4a8819f1360be9e71fb20182f0bb3ec611a36c3f213f4d20709e099/ujson-5.11.0-cp311-cp311-win_amd64.whl",hashes = {sha256 = "683f57f0dd3acdd7d9aff1de0528d603aafcb0e6d126e3dc7ce8b020a28f5d01"}}, + {name = "ujson-5.11.0-cp311-cp311-win_arm64.whl",url = "https://files.pythonhosted.org/packages/9a/cc/f3f9ac0f24f00a623a48d97dc3814df5c2dc368cfb00031aa4141527a24b/ujson-5.11.0-cp311-cp311-win_arm64.whl",hashes = {sha256 = "7855ccea3f8dad5e66d8445d754fc1cf80265a4272b5f8059ebc7ec29b8d0835"}}, + {name = "ujson-5.11.0-pp311-pypy311_pp73-macosx_10_15_x86_64.whl",url = "https://files.pythonhosted.org/packages/50/17/30275aa2933430d8c0c4ead951cc4fdb922f575a349aa0b48a6f35449e97/ujson-5.11.0-pp311-pypy311_pp73-macosx_10_15_x86_64.whl",hashes = {sha256 = "abae0fb58cc820092a0e9e8ba0051ac4583958495bfa5262a12f628249e3b362"}}, + {name = "ujson-5.11.0-pp311-pypy311_pp73-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/c3/15/42b3924258eac2551f8f33fa4e35da20a06a53857ccf3d4deb5e5d7c0b6c/ujson-5.11.0-pp311-pypy311_pp73-macosx_11_0_arm64.whl",hashes = {sha256 = "fac6c0649d6b7c3682a0a6e18d3de6857977378dce8d419f57a0b20e3d775b39"}}, + {name = "ujson-5.11.0-pp311-pypy311_pp73-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl",url = "https://files.pythonhosted.org/packages/94/7e/0519ff7955aba581d1fe1fb1ca0e452471250455d182f686db5ac9e46119/ujson-5.11.0-pp311-pypy311_pp73-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl",hashes = {sha256 = "4b42c115c7c6012506e8168315150d1e3f76e7ba0f4f95616f4ee599a1372bbc"}}, + {name = "ujson-5.11.0-pp311-pypy311_pp73-manylinux_2_24_i686.manylinux_2_28_i686.whl",url = "https://files.pythonhosted.org/packages/74/cf/209d90506b7d6c5873f82c5a226d7aad1a1da153364e9ebf61eff0740c33/ujson-5.11.0-pp311-pypy311_pp73-manylinux_2_24_i686.manylinux_2_28_i686.whl",hashes = {sha256 = "86baf341d90b566d61a394869ce77188cc8668f76d7bb2c311d77a00f4bdf844"}}, + {name = "ujson-5.11.0-pp311-pypy311_pp73-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl",url = "https://files.pythonhosted.org/packages/e9/97/bd939bb76943cb0e1d2b692d7e68629f51c711ef60425fa5bb6968037ecd/ujson-5.11.0-pp311-pypy311_pp73-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl",hashes = {sha256 = "4598bf3965fc1a936bd84034312bcbe00ba87880ef1ee33e33c1e88f2c398b49"}}, + {name = "ujson-5.11.0-pp311-pypy311_pp73-win_amd64.whl",url = "https://files.pythonhosted.org/packages/52/5b/8c5e33228f7f83f05719964db59f3f9f276d272dc43752fa3bbf0df53e7b/ujson-5.11.0-pp311-pypy311_pp73-win_amd64.whl",hashes = {sha256 = "416389ec19ef5f2013592f791486bef712ebce0cd59299bf9df1ba40bb2f6e04"}}, + {name = "ujson-5.11.0-cp310-cp310-macosx_10_9_x86_64.whl",url = "https://files.pythonhosted.org/packages/86/0c/8bf7a4fabfd01c7eed92d9b290930ce6d14910dec708e73538baa38885d1/ujson-5.11.0-cp310-cp310-macosx_10_9_x86_64.whl",hashes = {sha256 = "446e8c11c06048611c9d29ef1237065de0af07cabdd97e6b5b527b957692ec25"}}, + {name = "ujson-5.11.0-cp310-cp310-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/7b/2e/eeab0b8b641817031ede4f790db4c4942df44a12f44d72b3954f39c6a115/ujson-5.11.0-cp310-cp310-macosx_11_0_arm64.whl",hashes = {sha256 = "16ccb973b7ada0455201808ff11d48fe9c3f034a6ab5bd93b944443c88299f89"}}, + {name = "ujson-5.11.0-cp310-cp310-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl",url = "https://files.pythonhosted.org/packages/21/1b/a4e7a41870797633423ea79618526747353fd7be9191f3acfbdee0bf264b/ujson-5.11.0-cp310-cp310-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl",hashes = {sha256 = "3134b783ab314d2298d58cda7e47e7a0f7f71fc6ade6ac86d5dbeaf4b9770fa6"}}, + {name = "ujson-5.11.0-cp310-cp310-manylinux_2_24_i686.manylinux_2_28_i686.whl",url = "https://files.pythonhosted.org/packages/94/ae/4e0d91b8f6db7c9b76423b3649612189506d5a06ddd3b6334b6d37f77a01/ujson-5.11.0-cp310-cp310-manylinux_2_24_i686.manylinux_2_28_i686.whl",hashes = {sha256 = "185f93ebccffebc8baf8302c869fac70dd5dd78694f3b875d03a31b03b062cdb"}}, + {name = "ujson-5.11.0-cp310-cp310-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl",url = "https://files.pythonhosted.org/packages/b3/cc/46b124c2697ca2da7c65c4931ed3cb670646978157aa57a7a60f741c530f/ujson-5.11.0-cp310-cp310-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl",hashes = {sha256 = "d06e87eded62ff0e5f5178c916337d2262fdbc03b31688142a3433eabb6511db"}}, + {name = "ujson-5.11.0-cp310-cp310-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/39/eb/20dd1282bc85dede2f1c62c45b4040bc4c389c80a05983515ab99771bca7/ujson-5.11.0-cp310-cp310-musllinux_1_2_aarch64.whl",hashes = {sha256 = "181fb5b15703a8b9370b25345d2a1fd1359f0f18776b3643d24e13ed9c036d4c"}}, + {name = "ujson-5.11.0-cp310-cp310-musllinux_1_2_i686.whl",url = "https://files.pythonhosted.org/packages/64/a2/80072439065d493e3a4b1fbeec991724419a1b4c232e2d1147d257cac193/ujson-5.11.0-cp310-cp310-musllinux_1_2_i686.whl",hashes = {sha256 = "a4df61a6df0a4a8eb5b9b1ffd673429811f50b235539dac586bb7e9e91994138"}}, + {name = "ujson-5.11.0-cp310-cp310-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/5d/7e/d77f9e9c039d58299c350c978e086a804d1fceae4fd4a1cc6e8d0133f838/ujson-5.11.0-cp310-cp310-musllinux_1_2_x86_64.whl",hashes = {sha256 = "6eff24e1abd79e0ec6d7eae651dd675ddbc41f9e43e29ef81e16b421da896915"}}, + {name = "ujson-5.11.0-cp310-cp310-win32.whl",url = "https://files.pythonhosted.org/packages/ab/f1/697559d45acc849cada6b3571d53522951b1a64027400507aabc6a710178/ujson-5.11.0-cp310-cp310-win32.whl",hashes = {sha256 = "30f607c70091483550fbd669a0b37471e5165b317d6c16e75dba2aa967608723"}}, + {name = "ujson-5.11.0-cp310-cp310-win_amd64.whl",url = "https://files.pythonhosted.org/packages/86/a2/70b73a0f55abe0e6b8046d365d74230c20c5691373e6902a599b2dc79ba1/ujson-5.11.0-cp310-cp310-win_amd64.whl",hashes = {sha256 = "3d2720e9785f84312b8e2cb0c2b87f1a0b1c53aaab3b2af3ab817d54409012e0"}}, + {name = "ujson-5.11.0-cp310-cp310-win_arm64.whl",url = "https://files.pythonhosted.org/packages/1c/5f/b19104afa455630b43efcad3a24495b9c635d92aa8f2da4f30e375deb1a2/ujson-5.11.0-cp310-cp310-win_arm64.whl",hashes = {sha256 = "85e6796631165f719084a9af00c79195d3ebf108151452fefdcb1c8bb50f0105"}}, ] marker = "sys_platform != \"win32\" and implementation_name == \"cpython\" and \"default\" in dependency_groups" @@ -2581,17 +3451,16 @@ dependencies = [] [[packages]] name = "wcwidth" -version = "0.2.13" -sdist = {name = "wcwidth-0.2.13.tar.gz", url = "https://files.pythonhosted.org/packages/6c/63/53559446a878410fc5a5974feb13d31d78d752eb18aeba59c7fef1af7598/wcwidth-0.2.13.tar.gz", hashes = {sha256 = "72ea0c06399eb286d978fdedb6923a9eb47e1c486ce63e9b4e64fc18303972b5"}} +version = "0.2.14" +requires-python = ">=3.6" +sdist = {name = "wcwidth-0.2.14.tar.gz", url = "https://files.pythonhosted.org/packages/24/30/6b0809f4510673dc723187aeaf24c7f5459922d01e2f794277a3dfb90345/wcwidth-0.2.14.tar.gz", hashes = {sha256 = "4d478375d31bc5395a3c55c40ccdf3354688364cd61c4f6adacaa9215d0b3605"}} wheels = [ - {name = "wcwidth-0.2.13-py2.py3-none-any.whl",url = "https://files.pythonhosted.org/packages/fd/84/fd2ba7aafacbad3c4201d395674fc6348826569da3c0937e75505ead3528/wcwidth-0.2.13-py2.py3-none-any.whl",hashes = {sha256 = "3da69048e4540d84af32131829ff948f1e022c1c6bdb8d6102117aac784f6859"}}, + {name = "wcwidth-0.2.14-py2.py3-none-any.whl",url = "https://files.pythonhosted.org/packages/af/b5/123f13c975e9f27ab9c0770f514345bd406d0e8d3b7a0723af9d43f710af/wcwidth-0.2.14-py2.py3-none-any.whl",hashes = {sha256 = "a7bb560c8aee30f9957e5f9895805edd20602f2d7f720186dfd906e82b4982e1"}}, ] marker = "\"default\" in dependency_groups or \"dev\" in extras" [packages.tool.pdm] -dependencies = [ - "backports-functools-lru-cache>=1.2.1; python_version < \"3.2\"", -] +dependencies = [] [[packages]] name = "websockets" @@ -2622,6 +3491,34 @@ wheels = [ {name = "websockets-15.0.1-cp312-cp312-win32.whl",url = "https://files.pythonhosted.org/packages/ff/83/de1f7709376dc3ca9b7eeb4b9a07b4526b14876b6d372a4dc62312bebee0/websockets-15.0.1-cp312-cp312-win32.whl",hashes = {sha256 = "c338ffa0520bdb12fbc527265235639fb76e7bc7faafbb93f6ba80d9c06578a9"}}, {name = "websockets-15.0.1-cp312-cp312-win_amd64.whl",url = "https://files.pythonhosted.org/packages/7d/71/abf2ebc3bbfa40f391ce1428c7168fb20582d0ff57019b69ea20fa698043/websockets-15.0.1-cp312-cp312-win_amd64.whl",hashes = {sha256 = "fcd5cf9e305d7b8338754470cf69cf81f420459dbae8a3b40cee57417f4614a7"}}, {name = "websockets-15.0.1-py3-none-any.whl",url = "https://files.pythonhosted.org/packages/fa/a8/5b41e0da817d64113292ab1f8247140aac61cbf6cfd085d6a0fa77f4984f/websockets-15.0.1-py3-none-any.whl",hashes = {sha256 = "f7a866fbc1e97b5c617ee4116daaa09b722101d4a3c170c787450ba409f9736f"}}, + {name = "websockets-15.0.1-cp311-cp311-macosx_10_9_universal2.whl",url = "https://files.pythonhosted.org/packages/9f/32/18fcd5919c293a398db67443acd33fde142f283853076049824fc58e6f75/websockets-15.0.1-cp311-cp311-macosx_10_9_universal2.whl",hashes = {sha256 = "823c248b690b2fd9303ba00c4f66cd5e2d8c3ba4aa968b2779be9532a4dad431"}}, + {name = "websockets-15.0.1-cp311-cp311-macosx_10_9_x86_64.whl",url = "https://files.pythonhosted.org/packages/76/70/ba1ad96b07869275ef42e2ce21f07a5b0148936688c2baf7e4a1f60d5058/websockets-15.0.1-cp311-cp311-macosx_10_9_x86_64.whl",hashes = {sha256 = "678999709e68425ae2593acf2e3ebcbcf2e69885a5ee78f9eb80e6e371f1bf57"}}, + {name = "websockets-15.0.1-cp311-cp311-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/86/f2/10b55821dd40eb696ce4704a87d57774696f9451108cff0d2824c97e0f97/websockets-15.0.1-cp311-cp311-macosx_11_0_arm64.whl",hashes = {sha256 = "d50fd1ee42388dcfb2b3676132c78116490976f1300da28eb629272d5d93e905"}}, + {name = "websockets-15.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",url = "https://files.pythonhosted.org/packages/a5/90/1c37ae8b8a113d3daf1065222b6af61cc44102da95388ac0018fcb7d93d9/websockets-15.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",hashes = {sha256 = "d99e5546bf73dbad5bf3547174cd6cb8ba7273062a23808ffea025ecb1cf8562"}}, + {name = "websockets-15.0.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl",url = "https://files.pythonhosted.org/packages/8e/8d/96e8e288b2a41dffafb78e8904ea7367ee4f891dafc2ab8d87e2124cb3d3/websockets-15.0.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl",hashes = {sha256 = "66dd88c918e3287efc22409d426c8f729688d89a0c587c88971a0faa2c2f3792"}}, + {name = "websockets-15.0.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl",url = "https://files.pythonhosted.org/packages/93/1f/5d6dbf551766308f6f50f8baf8e9860be6182911e8106da7a7f73785f4c4/websockets-15.0.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl",hashes = {sha256 = "8dd8327c795b3e3f219760fa603dcae1dcc148172290a8ab15158cf85a953413"}}, + {name = "websockets-15.0.1-cp311-cp311-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/d4/78/2d4fed9123e6620cbf1706c0de8a1632e1a28e7774d94346d7de1bba2ca3/websockets-15.0.1-cp311-cp311-musllinux_1_2_aarch64.whl",hashes = {sha256 = "8fdc51055e6ff4adeb88d58a11042ec9a5eae317a0a53d12c062c8a8865909e8"}}, + {name = "websockets-15.0.1-cp311-cp311-musllinux_1_2_i686.whl",url = "https://files.pythonhosted.org/packages/e7/3b/66d4c1b444dd1a9823c4a81f50231b921bab54eee2f69e70319b4e21f1ca/websockets-15.0.1-cp311-cp311-musllinux_1_2_i686.whl",hashes = {sha256 = "693f0192126df6c2327cce3baa7c06f2a117575e32ab2308f7f8216c29d9e2e3"}}, + {name = "websockets-15.0.1-cp311-cp311-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/08/ff/e9eed2ee5fed6f76fdd6032ca5cd38c57ca9661430bb3d5fb2872dc8703c/websockets-15.0.1-cp311-cp311-musllinux_1_2_x86_64.whl",hashes = {sha256 = "54479983bd5fb469c38f2f5c7e3a24f9a4e70594cd68cd1fa6b9340dadaff7cf"}}, + {name = "websockets-15.0.1-cp311-cp311-win32.whl",url = "https://files.pythonhosted.org/packages/d8/75/994634a49b7e12532be6a42103597b71098fd25900f7437d6055ed39930a/websockets-15.0.1-cp311-cp311-win32.whl",hashes = {sha256 = "16b6c1b3e57799b9d38427dda63edcbe4926352c47cf88588c0be4ace18dac85"}}, + {name = "websockets-15.0.1-cp311-cp311-win_amd64.whl",url = "https://files.pythonhosted.org/packages/98/93/e36c73f78400a65f5e236cd376713c34182e6663f6889cd45a4a04d8f203/websockets-15.0.1-cp311-cp311-win_amd64.whl",hashes = {sha256 = "27ccee0071a0e75d22cb35849b1db43f2ecd3e161041ac1ee9d2352ddf72f065"}}, + {name = "websockets-15.0.1-cp310-cp310-macosx_10_9_universal2.whl",url = "https://files.pythonhosted.org/packages/1e/da/6462a9f510c0c49837bbc9345aca92d767a56c1fb2939e1579df1e1cdcf7/websockets-15.0.1-cp310-cp310-macosx_10_9_universal2.whl",hashes = {sha256 = "d63efaa0cd96cf0c5fe4d581521d9fa87744540d4bc999ae6e08595a1014b45b"}}, + {name = "websockets-15.0.1-cp310-cp310-macosx_10_9_x86_64.whl",url = "https://files.pythonhosted.org/packages/1c/9f/9d11c1a4eb046a9e106483b9ff69bce7ac880443f00e5ce64261b47b07e7/websockets-15.0.1-cp310-cp310-macosx_10_9_x86_64.whl",hashes = {sha256 = "ac60e3b188ec7574cb761b08d50fcedf9d77f1530352db4eef1707fe9dee7205"}}, + {name = "websockets-15.0.1-cp310-cp310-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/d5/4f/b462242432d93ea45f297b6179c7333dd0402b855a912a04e7fc61c0d71f/websockets-15.0.1-cp310-cp310-macosx_11_0_arm64.whl",hashes = {sha256 = "5756779642579d902eed757b21b0164cd6fe338506a8083eb58af5c372e39d9a"}}, + {name = "websockets-15.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",url = "https://files.pythonhosted.org/packages/6e/0c/6afa1f4644d7ed50284ac59cc70ef8abd44ccf7d45850d989ea7310538d0/websockets-15.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",hashes = {sha256 = "0fdfe3e2a29e4db3659dbd5bbf04560cea53dd9610273917799f1cde46aa725e"}}, + {name = "websockets-15.0.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl",url = "https://files.pythonhosted.org/packages/dd/d4/ffc8bd1350b229ca7a4db2a3e1c482cf87cea1baccd0ef3e72bc720caeec/websockets-15.0.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl",hashes = {sha256 = "4c2529b320eb9e35af0fa3016c187dffb84a3ecc572bcee7c3ce302bfeba52bf"}}, + {name = "websockets-15.0.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl",url = "https://files.pythonhosted.org/packages/97/3a/5323a6bb94917af13bbb34009fac01e55c51dfde354f63692bf2533ffbc2/websockets-15.0.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl",hashes = {sha256 = "ac1e5c9054fe23226fb11e05a6e630837f074174c4c2f0fe442996112a6de4fb"}}, + {name = "websockets-15.0.1-cp310-cp310-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/a6/cc/1aeb0f7cee59ef065724041bb7ed667b6ab1eeffe5141696cccec2687b66/websockets-15.0.1-cp310-cp310-musllinux_1_2_aarch64.whl",hashes = {sha256 = "5df592cd503496351d6dc14f7cdad49f268d8e618f80dce0cd5a36b93c3fc08d"}}, + {name = "websockets-15.0.1-cp310-cp310-musllinux_1_2_i686.whl",url = "https://files.pythonhosted.org/packages/79/f9/c86f8f7af208e4161a7f7e02774e9d0a81c632ae76db2ff22549e1718a51/websockets-15.0.1-cp310-cp310-musllinux_1_2_i686.whl",hashes = {sha256 = "0a34631031a8f05657e8e90903e656959234f3a04552259458aac0b0f9ae6fd9"}}, + {name = "websockets-15.0.1-cp310-cp310-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/c7/b9/828b0bc6753db905b91df6ae477c0b14a141090df64fb17f8a9d7e3516cf/websockets-15.0.1-cp310-cp310-musllinux_1_2_x86_64.whl",hashes = {sha256 = "3d00075aa65772e7ce9e990cab3ff1de702aa09be3940d1dc88d5abf1ab8a09c"}}, + {name = "websockets-15.0.1-cp310-cp310-win32.whl",url = "https://files.pythonhosted.org/packages/89/fb/250f5533ec468ba6327055b7d98b9df056fb1ce623b8b6aaafb30b55d02e/websockets-15.0.1-cp310-cp310-win32.whl",hashes = {sha256 = "1234d4ef35db82f5446dca8e35a7da7964d02c127b095e172e54397fb6a6c256"}}, + {name = "websockets-15.0.1-cp310-cp310-win_amd64.whl",url = "https://files.pythonhosted.org/packages/1c/46/aca7082012768bb98e5608f01658ff3ac8437e563eca41cf068bd5849a5e/websockets-15.0.1-cp310-cp310-win_amd64.whl",hashes = {sha256 = "39c1fec2c11dc8d89bba6b2bf1556af381611a173ac2b511cf7231622058af41"}}, + {name = "websockets-15.0.1-pp310-pypy310_pp73-macosx_10_15_x86_64.whl",url = "https://files.pythonhosted.org/packages/02/9e/d40f779fa16f74d3468357197af8d6ad07e7c5a27ea1ca74ceb38986f77a/websockets-15.0.1-pp310-pypy310_pp73-macosx_10_15_x86_64.whl",hashes = {sha256 = "0c9e74d766f2818bb95f84c25be4dea09841ac0f734d1966f415e4edfc4ef1c3"}}, + {name = "websockets-15.0.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/bc/cd/5b887b8585a593073fd92f7c23ecd3985cd2c3175025a91b0d69b0551372/websockets-15.0.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl",hashes = {sha256 = "1009ee0c7739c08a0cd59de430d6de452a55e42d6b522de7aa15e6f67db0b8e1"}}, + {name = "websockets-15.0.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",url = "https://files.pythonhosted.org/packages/fe/ae/d34f7556890341e900a95acf4886833646306269f899d58ad62f588bf410/websockets-15.0.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",hashes = {sha256 = "76d1f20b1c7a2fa82367e04982e708723ba0e7b8d43aa643d3dcd404d74f1475"}}, + {name = "websockets-15.0.1-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl",url = "https://files.pythonhosted.org/packages/71/e6/5fd43993a87db364ec60fc1d608273a1a465c0caba69176dd160e197ce42/websockets-15.0.1-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl",hashes = {sha256 = "f29d80eb9a9263b8d109135351caf568cc3f80b9928bccde535c235de55c22d9"}}, + {name = "websockets-15.0.1-pp310-pypy310_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl",url = "https://files.pythonhosted.org/packages/2b/fb/c492d6daa5ec067c2988ac80c61359ace5c4c674c532985ac5a123436cec/websockets-15.0.1-pp310-pypy310_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl",hashes = {sha256 = "b359ed09954d7c18bbc1680f380c7301f92c60bf924171629c5db97febb12f04"}}, + {name = "websockets-15.0.1-pp310-pypy310_pp73-win_amd64.whl",url = "https://files.pythonhosted.org/packages/68/a1/dcb68430b1d00b698ae7a7e0194433bce4f07ded185f0ee5fb21e2a2e91e/websockets-15.0.1-pp310-pypy310_pp73-win_amd64.whl",hashes = {sha256 = "cad21560da69f4ce7658ca2cb83138fb4cf695a2ba3e475e0559e05991aa8122"}}, ] marker = "\"default\" in dependency_groups" @@ -2688,6 +3585,26 @@ wheels = [ {name = "wrapt-1.17.3-cp312-cp312-win_amd64.whl",url = "https://files.pythonhosted.org/packages/28/de/49493f962bd3c586ab4b88066e967aa2e0703d6ef2c43aa28cb83bf7b507/wrapt-1.17.3-cp312-cp312-win_amd64.whl",hashes = {sha256 = "e71d5c6ebac14875668a1e90baf2ea0ef5b7ac7918355850c0908ae82bcb297c"}}, {name = "wrapt-1.17.3-cp312-cp312-win_arm64.whl",url = "https://files.pythonhosted.org/packages/f1/48/0f7102fe9cb1e8a5a77f80d4f0956d62d97034bbe88d33e94699f99d181d/wrapt-1.17.3-cp312-cp312-win_arm64.whl",hashes = {sha256 = "604d076c55e2fdd4c1c03d06dc1a31b95130010517b5019db15365ec4a405fc6"}}, {name = "wrapt-1.17.3-py3-none-any.whl",url = "https://files.pythonhosted.org/packages/1f/f6/a933bd70f98e9cf3e08167fc5cd7aaaca49147e48411c0bd5ae701bb2194/wrapt-1.17.3-py3-none-any.whl",hashes = {sha256 = "7171ae35d2c33d326ac19dd8facb1e82e5fd04ef8c6c0e394d7af55a55051c22"}}, + {name = "wrapt-1.17.3-cp311-cp311-macosx_10_9_universal2.whl",url = "https://files.pythonhosted.org/packages/52/db/00e2a219213856074a213503fdac0511203dceefff26e1daa15250cc01a0/wrapt-1.17.3-cp311-cp311-macosx_10_9_universal2.whl",hashes = {sha256 = "273a736c4645e63ac582c60a56b0acb529ef07f78e08dc6bfadf6a46b19c0da7"}}, + {name = "wrapt-1.17.3-cp311-cp311-macosx_10_9_x86_64.whl",url = "https://files.pythonhosted.org/packages/5e/30/ca3c4a5eba478408572096fe9ce36e6e915994dd26a4e9e98b4f729c06d9/wrapt-1.17.3-cp311-cp311-macosx_10_9_x86_64.whl",hashes = {sha256 = "5531d911795e3f935a9c23eb1c8c03c211661a5060aab167065896bbf62a5f85"}}, + {name = "wrapt-1.17.3-cp311-cp311-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/31/25/3e8cc2c46b5329c5957cec959cb76a10718e1a513309c31399a4dad07eb3/wrapt-1.17.3-cp311-cp311-macosx_11_0_arm64.whl",hashes = {sha256 = "0610b46293c59a3adbae3dee552b648b984176f8562ee0dba099a56cfbe4df1f"}}, + {name = "wrapt-1.17.3-cp311-cp311-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl",url = "https://files.pythonhosted.org/packages/5d/8f/a32a99fc03e4b37e31b57cb9cefc65050ea08147a8ce12f288616b05ef54/wrapt-1.17.3-cp311-cp311-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl",hashes = {sha256 = "b32888aad8b6e68f83a8fdccbf3165f5469702a7544472bdf41f582970ed3311"}}, + {name = "wrapt-1.17.3-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl",url = "https://files.pythonhosted.org/packages/31/57/4930cb8d9d70d59c27ee1332a318c20291749b4fba31f113c2f8ac49a72e/wrapt-1.17.3-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl",hashes = {sha256 = "8cccf4f81371f257440c88faed6b74f1053eef90807b77e31ca057b2db74edb1"}}, + {name = "wrapt-1.17.3-cp311-cp311-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/a8/f3/1afd48de81d63dd66e01b263a6fbb86e1b5053b419b9b33d13e1f6d0f7d0/wrapt-1.17.3-cp311-cp311-musllinux_1_2_aarch64.whl",hashes = {sha256 = "d8a210b158a34164de8bb68b0e7780041a903d7b00c87e906fb69928bf7890d5"}}, + {name = "wrapt-1.17.3-cp311-cp311-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/1e/d7/4ad5327612173b144998232f98a85bb24b60c352afb73bc48e3e0d2bdc4e/wrapt-1.17.3-cp311-cp311-musllinux_1_2_x86_64.whl",hashes = {sha256 = "79573c24a46ce11aab457b472efd8d125e5a51da2d1d24387666cd85f54c05b2"}}, + {name = "wrapt-1.17.3-cp311-cp311-win32.whl",url = "https://files.pythonhosted.org/packages/bb/59/e0adfc831674a65694f18ea6dc821f9fcb9ec82c2ce7e3d73a88ba2e8718/wrapt-1.17.3-cp311-cp311-win32.whl",hashes = {sha256 = "c31eebe420a9a5d2887b13000b043ff6ca27c452a9a22fa71f35f118e8d4bf89"}}, + {name = "wrapt-1.17.3-cp311-cp311-win_amd64.whl",url = "https://files.pythonhosted.org/packages/83/88/16b7231ba49861b6f75fc309b11012ede4d6b0a9c90969d9e0db8d991aeb/wrapt-1.17.3-cp311-cp311-win_amd64.whl",hashes = {sha256 = "0b1831115c97f0663cb77aa27d381237e73ad4f721391a9bfb2fe8bc25fa6e77"}}, + {name = "wrapt-1.17.3-cp311-cp311-win_arm64.whl",url = "https://files.pythonhosted.org/packages/9a/1e/c4d4f3398ec073012c51d1c8d87f715f56765444e1a4b11e5180577b7e6e/wrapt-1.17.3-cp311-cp311-win_arm64.whl",hashes = {sha256 = "5a7b3c1ee8265eb4c8f1b7d29943f195c00673f5ab60c192eba2d4a7eae5f46a"}}, + {name = "wrapt-1.17.3-cp310-cp310-macosx_10_9_universal2.whl",url = "https://files.pythonhosted.org/packages/3f/23/bb82321b86411eb51e5a5db3fb8f8032fd30bd7c2d74bfe936136b2fa1d6/wrapt-1.17.3-cp310-cp310-macosx_10_9_universal2.whl",hashes = {sha256 = "88bbae4d40d5a46142e70d58bf664a89b6b4befaea7b2ecc14e03cedb8e06c04"}}, + {name = "wrapt-1.17.3-cp310-cp310-macosx_10_9_x86_64.whl",url = "https://files.pythonhosted.org/packages/45/69/f3c47642b79485a30a59c63f6d739ed779fb4cc8323205d047d741d55220/wrapt-1.17.3-cp310-cp310-macosx_10_9_x86_64.whl",hashes = {sha256 = "e6b13af258d6a9ad602d57d889f83b9d5543acd471eee12eb51f5b01f8eb1bc2"}}, + {name = "wrapt-1.17.3-cp310-cp310-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/d1/71/e7e7f5670c1eafd9e990438e69d8fb46fa91a50785332e06b560c869454f/wrapt-1.17.3-cp310-cp310-macosx_11_0_arm64.whl",hashes = {sha256 = "fd341868a4b6714a5962c1af0bd44f7c404ef78720c7de4892901e540417111c"}}, + {name = "wrapt-1.17.3-cp310-cp310-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl",url = "https://files.pythonhosted.org/packages/de/17/9f8f86755c191d6779d7ddead1a53c7a8aa18bccb7cea8e7e72dfa6a8a09/wrapt-1.17.3-cp310-cp310-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl",hashes = {sha256 = "f9b2601381be482f70e5d1051a5965c25fb3625455a2bf520b5a077b22afb775"}}, + {name = "wrapt-1.17.3-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl",url = "https://files.pythonhosted.org/packages/f2/15/dd576273491f9f43dd09fce517f6c2ce6eb4fe21681726068db0d0467096/wrapt-1.17.3-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl",hashes = {sha256 = "343e44b2a8e60e06a7e0d29c1671a0d9951f59174f3709962b5143f60a2a98bd"}}, + {name = "wrapt-1.17.3-cp310-cp310-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/0c/c4/5eb4ce0d4814521fee7aa806264bf7a114e748ad05110441cd5b8a5c744b/wrapt-1.17.3-cp310-cp310-musllinux_1_2_aarch64.whl",hashes = {sha256 = "33486899acd2d7d3066156b03465b949da3fd41a5da6e394ec49d271baefcf05"}}, + {name = "wrapt-1.17.3-cp310-cp310-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/31/4b/819e9e0eb5c8dc86f60dfc42aa4e2c0d6c3db8732bce93cc752e604bb5f5/wrapt-1.17.3-cp310-cp310-musllinux_1_2_x86_64.whl",hashes = {sha256 = "e6f40a8aa5a92f150bdb3e1c44b7e98fb7113955b2e5394122fa5532fec4b418"}}, + {name = "wrapt-1.17.3-cp310-cp310-win32.whl",url = "https://files.pythonhosted.org/packages/f8/83/ed6baf89ba3a56694700139698cf703aac9f0f9eb03dab92f57551bd5385/wrapt-1.17.3-cp310-cp310-win32.whl",hashes = {sha256 = "a36692b8491d30a8c75f1dfee65bef119d6f39ea84ee04d9f9311f83c5ad9390"}}, + {name = "wrapt-1.17.3-cp310-cp310-win_amd64.whl",url = "https://files.pythonhosted.org/packages/2f/90/ee61d36862340ad7e9d15a02529df6b948676b9a5829fd5e16640156627d/wrapt-1.17.3-cp310-cp310-win_amd64.whl",hashes = {sha256 = "afd964fd43b10c12213574db492cb8f73b2f0826c8df07a68288f8f19af2ebe6"}}, + {name = "wrapt-1.17.3-cp310-cp310-win_arm64.whl",url = "https://files.pythonhosted.org/packages/bd/c3/cefe0bd330d389c9983ced15d326f45373f4073c9f4a8c2f99b50bfea329/wrapt-1.17.3-cp310-cp310-win_arm64.whl",hashes = {sha256 = "af338aa93554be859173c39c85243970dc6a289fa907402289eeae7543e1ae18"}}, ] marker = "\"default\" in dependency_groups" @@ -2696,11 +3613,11 @@ dependencies = [] [[packages]] name = "anyio" -version = "4.9.0" +version = "4.11.0" requires-python = ">=3.9" -sdist = {name = "anyio-4.9.0.tar.gz", url = "https://files.pythonhosted.org/packages/95/7d/4c1bd541d4dffa1b52bd83fb8527089e097a106fc90b467a7313b105f840/anyio-4.9.0.tar.gz", hashes = {sha256 = "673c0c244e15788651a4ff38710fea9675823028a6f08a5eda409e0c9840a028"}} +sdist = {name = "anyio-4.11.0.tar.gz", url = "https://files.pythonhosted.org/packages/c6/78/7d432127c41b50bccba979505f272c16cbcadcc33645d5fa3a738110ae75/anyio-4.11.0.tar.gz", hashes = {sha256 = "82a8d0b81e318cc5ce71a5f1f8b5c4e63619620b63141ef8c995fa0db95a57c4"}} wheels = [ - {name = "anyio-4.9.0-py3-none-any.whl",url = "https://files.pythonhosted.org/packages/a1/ee/48ca1a7c89ffec8b6a0c5d02b89c305671d5ffd8d3c94acf8b8c408575bb/anyio-4.9.0-py3-none-any.whl",hashes = {sha256 = "9f76d541cad6e36af7beb62e978876f3b41e3e04f2c1fbf0884604c0a9c4d93c"}}, + {name = "anyio-4.11.0-py3-none-any.whl",url = "https://files.pythonhosted.org/packages/15/b3/9b1a8074496371342ec1e796a96f99c82c945a339cd81a8e73de28b4cf9e/anyio-4.11.0-py3-none-any.whl",hashes = {sha256 = "0287e96f4d26d4149305414d4e3bc32f0dcd0862365a4bddea19d7a1ec38c4fc"}}, ] marker = "\"default\" in dependency_groups or \"dev\" in extras" @@ -2755,30 +3672,57 @@ dependencies = [ [[packages]] name = "pandas" -version = "2.3.1" +version = "2.3.3" requires-python = ">=3.9" -sdist = {name = "pandas-2.3.1.tar.gz", url = "https://files.pythonhosted.org/packages/d1/6f/75aa71f8a14267117adeeed5d21b204770189c0a0025acbdc03c337b28fc/pandas-2.3.1.tar.gz", hashes = {sha256 = "0a95b9ac964fe83ce317827f80304d37388ea77616b1425f0ae41c9d2d0d7bb2"}} -wheels = [ - {name = "pandas-2.3.1-cp313-cp313-macosx_10_13_x86_64.whl",url = "https://files.pythonhosted.org/packages/32/ed/ff0a67a2c5505e1854e6715586ac6693dd860fbf52ef9f81edee200266e7/pandas-2.3.1-cp313-cp313-macosx_10_13_x86_64.whl",hashes = {sha256 = "9026bd4a80108fac2239294a15ef9003c4ee191a0f64b90f170b40cfb7cf2d22"}}, - {name = "pandas-2.3.1-cp313-cp313-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/c7/db/d8f24a7cc9fb0972adab0cc80b6817e8bef888cfd0024eeb5a21c0bb5c4a/pandas-2.3.1-cp313-cp313-macosx_11_0_arm64.whl",hashes = {sha256 = "6de8547d4fdb12421e2d047a2c446c623ff4c11f47fddb6b9169eb98ffba485a"}}, - {name = "pandas-2.3.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",url = "https://files.pythonhosted.org/packages/0f/b0/80f6ec783313f1e2356b28b4fd8d2148c378370045da918c73145e6aab50/pandas-2.3.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",hashes = {sha256 = "782647ddc63c83133b2506912cc6b108140a38a37292102aaa19c81c83db2928"}}, - {name = "pandas-2.3.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",url = "https://files.pythonhosted.org/packages/e9/e2/20a317688435470872885e7fc8f95109ae9683dec7c50be29b56911515a5/pandas-2.3.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",hashes = {sha256 = "2ba6aff74075311fc88504b1db890187a3cd0f887a5b10f5525f8e2ef55bfdb9"}}, - {name = "pandas-2.3.1-cp313-cp313-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/55/79/20d746b0a96c67203a5bee5fb4e00ac49c3e8009a39e1f78de264ecc5729/pandas-2.3.1-cp313-cp313-musllinux_1_2_aarch64.whl",hashes = {sha256 = "e5635178b387bd2ba4ac040f82bc2ef6e6b500483975c4ebacd34bec945fda12"}}, - {name = "pandas-2.3.1-cp313-cp313-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/7c/0f/145c8b41e48dbf03dd18fdd7f24f8ba95b8254a97a3379048378f33e7838/pandas-2.3.1-cp313-cp313-musllinux_1_2_x86_64.whl",hashes = {sha256 = "6f3bf5ec947526106399a9e1d26d40ee2b259c66422efdf4de63c848492d91bb"}}, - {name = "pandas-2.3.1-cp313-cp313-win_amd64.whl",url = "https://files.pythonhosted.org/packages/b2/c0/54415af59db5cdd86a3d3bf79863e8cc3fa9ed265f0745254061ac09d5f2/pandas-2.3.1-cp313-cp313-win_amd64.whl",hashes = {sha256 = "1c78cf43c8fde236342a1cb2c34bcff89564a7bfed7e474ed2fffa6aed03a956"}}, - {name = "pandas-2.3.1-cp313-cp313t-macosx_10_13_x86_64.whl",url = "https://files.pythonhosted.org/packages/48/64/2fd2e400073a1230e13b8cd604c9bc95d9e3b962e5d44088ead2e8f0cfec/pandas-2.3.1-cp313-cp313t-macosx_10_13_x86_64.whl",hashes = {sha256 = "8dfc17328e8da77be3cf9f47509e5637ba8f137148ed0e9b5241e1baf526e20a"}}, - {name = "pandas-2.3.1-cp313-cp313t-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/d8/0a/d84fd79b0293b7ef88c760d7dca69828d867c89b6d9bc52d6a27e4d87316/pandas-2.3.1-cp313-cp313t-macosx_11_0_arm64.whl",hashes = {sha256 = "ec6c851509364c59a5344458ab935e6451b31b818be467eb24b0fe89bd05b6b9"}}, - {name = "pandas-2.3.1-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",url = "https://files.pythonhosted.org/packages/50/ae/ff885d2b6e88f3c7520bb74ba319268b42f05d7e583b5dded9837da2723f/pandas-2.3.1-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",hashes = {sha256 = "911580460fc4884d9b05254b38a6bfadddfcc6aaef856fb5859e7ca202e45275"}}, - {name = "pandas-2.3.1-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",url = "https://files.pythonhosted.org/packages/85/86/1fa345fc17caf5d7780d2699985c03dbe186c68fee00b526813939062bb0/pandas-2.3.1-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",hashes = {sha256 = "2f4d6feeba91744872a600e6edbbd5b033005b431d5ae8379abee5bcfa479fab"}}, - {name = "pandas-2.3.1-cp313-cp313t-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/81/aa/e58541a49b5e6310d89474333e994ee57fea97c8aaa8fc7f00b873059bbf/pandas-2.3.1-cp313-cp313t-musllinux_1_2_aarch64.whl",hashes = {sha256 = "fe37e757f462d31a9cd7580236a82f353f5713a80e059a29753cf938c6775d96"}}, - {name = "pandas-2.3.1-cp313-cp313t-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/d5/f9/07086f5b0f2a19872554abeea7658200824f5835c58a106fa8f2ae96a46c/pandas-2.3.1-cp313-cp313t-musllinux_1_2_x86_64.whl",hashes = {sha256 = "5db9637dbc24b631ff3707269ae4559bce4b7fd75c1c4d7e13f40edc42df4444"}}, - {name = "pandas-2.3.1-cp312-cp312-macosx_10_13_x86_64.whl",url = "https://files.pythonhosted.org/packages/46/de/b8445e0f5d217a99fe0eeb2f4988070908979bec3587c0633e5428ab596c/pandas-2.3.1-cp312-cp312-macosx_10_13_x86_64.whl",hashes = {sha256 = "689968e841136f9e542020698ee1c4fbe9caa2ed2213ae2388dc7b81721510d3"}}, - {name = "pandas-2.3.1-cp312-cp312-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/1e/e0/801cdb3564e65a5ac041ab99ea6f1d802a6c325bb6e58c79c06a3f1cd010/pandas-2.3.1-cp312-cp312-macosx_11_0_arm64.whl",hashes = {sha256 = "025e92411c16cbe5bb2a4abc99732a6b132f439b8aab23a59fa593eb00704232"}}, - {name = "pandas-2.3.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",url = "https://files.pythonhosted.org/packages/51/a5/c76a8311833c24ae61a376dbf360eb1b1c9247a5d9c1e8b356563b31b80c/pandas-2.3.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",hashes = {sha256 = "9b7ff55f31c4fcb3e316e8f7fa194566b286d6ac430afec0d461163312c5841e"}}, - {name = "pandas-2.3.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",url = "https://files.pythonhosted.org/packages/da/01/e383018feba0a1ead6cf5fe8728e5d767fee02f06a3d800e82c489e5daaf/pandas-2.3.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",hashes = {sha256 = "7dcb79bf373a47d2a40cf7232928eb7540155abbc460925c2c96d2d30b006eb4"}}, - {name = "pandas-2.3.1-cp312-cp312-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/5b/14/cec7760d7c9507f11c97d64f29022e12a6cc4fc03ac694535e89f88ad2ec/pandas-2.3.1-cp312-cp312-musllinux_1_2_aarch64.whl",hashes = {sha256 = "56a342b231e8862c96bdb6ab97170e203ce511f4d0429589c8ede1ee8ece48b8"}}, - {name = "pandas-2.3.1-cp312-cp312-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/50/b9/6e2d2c6728ed29fb3d4d4d302504fb66f1a543e37eb2e43f352a86365cdf/pandas-2.3.1-cp312-cp312-musllinux_1_2_x86_64.whl",hashes = {sha256 = "ca7ed14832bce68baef331f4d7f294411bed8efd032f8109d690df45e00c4679"}}, - {name = "pandas-2.3.1-cp312-cp312-win_amd64.whl",url = "https://files.pythonhosted.org/packages/80/a5/3a92893e7399a691bad7664d977cb5e7c81cf666c81f89ea76ba2bff483d/pandas-2.3.1-cp312-cp312-win_amd64.whl",hashes = {sha256 = "ac942bfd0aca577bef61f2bc8da8147c4ef6879965ef883d8e8d5d2dc3e744b8"}}, +sdist = {name = "pandas-2.3.3.tar.gz", url = "https://files.pythonhosted.org/packages/33/01/d40b85317f86cf08d853a4f495195c73815fdf205eef3993821720274518/pandas-2.3.3.tar.gz", hashes = {sha256 = "e05e1af93b977f7eafa636d043f9f94c7ee3ac81af99c13508215942e64c993b"}} +wheels = [ + {name = "pandas-2.3.3-cp314-cp314-macosx_10_13_x86_64.whl",url = "https://files.pythonhosted.org/packages/04/fd/74903979833db8390b73b3a8a7d30d146d710bd32703724dd9083950386f/pandas-2.3.3-cp314-cp314-macosx_10_13_x86_64.whl",hashes = {sha256 = "ee15f284898e7b246df8087fc82b87b01686f98ee67d85a17b7ab44143a3a9a0"}}, + {name = "pandas-2.3.3-cp314-cp314-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/21/00/266d6b357ad5e6d3ad55093a7e8efc7dd245f5a842b584db9f30b0f0a287/pandas-2.3.3-cp314-cp314-macosx_11_0_arm64.whl",hashes = {sha256 = "1611aedd912e1ff81ff41c745822980c49ce4a7907537be8692c8dbc31924593"}}, + {name = "pandas-2.3.3-cp314-cp314-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl",url = "https://files.pythonhosted.org/packages/ca/05/d01ef80a7a3a12b2f8bbf16daba1e17c98a2f039cbc8e2f77a2c5a63d382/pandas-2.3.3-cp314-cp314-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl",hashes = {sha256 = "6d2cefc361461662ac48810cb14365a365ce864afe85ef1f447ff5a1e99ea81c"}}, + {name = "pandas-2.3.3-cp314-cp314-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl",url = "https://files.pythonhosted.org/packages/15/b2/0e62f78c0c5ba7e3d2c5945a82456f4fac76c480940f805e0b97fcbc2f65/pandas-2.3.3-cp314-cp314-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl",hashes = {sha256 = "ee67acbbf05014ea6c763beb097e03cd629961c8a632075eeb34247120abcb4b"}}, + {name = "pandas-2.3.3-cp314-cp314-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/c5/33/dd70400631b62b9b29c3c93d2feee1d0964dc2bae2e5ad7a6c73a7f25325/pandas-2.3.3-cp314-cp314-musllinux_1_2_aarch64.whl",hashes = {sha256 = "c46467899aaa4da076d5abc11084634e2d197e9460643dd455ac3db5856b24d6"}}, + {name = "pandas-2.3.3-cp314-cp314-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/d3/18/b5d48f55821228d0d2692b34fd5034bb185e854bdb592e9c640f6290e012/pandas-2.3.3-cp314-cp314-musllinux_1_2_x86_64.whl",hashes = {sha256 = "6253c72c6a1d990a410bc7de641d34053364ef8bcd3126f7e7450125887dffe3"}}, + {name = "pandas-2.3.3-cp314-cp314-win_amd64.whl",url = "https://files.pythonhosted.org/packages/a6/3d/124ac75fcd0ecc09b8fdccb0246ef65e35b012030defb0e0eba2cbbbe948/pandas-2.3.3-cp314-cp314-win_amd64.whl",hashes = {sha256 = "1b07204a219b3b7350abaae088f451860223a52cfb8a6c53358e7948735158e5"}}, + {name = "pandas-2.3.3-cp314-cp314t-macosx_10_13_x86_64.whl",url = "https://files.pythonhosted.org/packages/89/9c/0e21c895c38a157e0faa1fb64587a9226d6dd46452cac4532d80c3c4a244/pandas-2.3.3-cp314-cp314t-macosx_10_13_x86_64.whl",hashes = {sha256 = "2462b1a365b6109d275250baaae7b760fd25c726aaca0054649286bcfbb3e8ec"}}, + {name = "pandas-2.3.3-cp314-cp314t-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/d7/82/b69a1c95df796858777b68fbe6a81d37443a33319761d7c652ce77797475/pandas-2.3.3-cp314-cp314t-macosx_11_0_arm64.whl",hashes = {sha256 = "0242fe9a49aa8b4d78a4fa03acb397a58833ef6199e9aa40a95f027bb3a1b6e7"}}, + {name = "pandas-2.3.3-cp314-cp314t-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl",url = "https://files.pythonhosted.org/packages/f9/88/702bde3ba0a94b8c73a0181e05144b10f13f29ebfc2150c3a79062a8195d/pandas-2.3.3-cp314-cp314t-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl",hashes = {sha256 = "a21d830e78df0a515db2b3d2f5570610f5e6bd2e27749770e8bb7b524b89b450"}}, + {name = "pandas-2.3.3-cp314-cp314t-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl",url = "https://files.pythonhosted.org/packages/a4/1e/1bac1a839d12e6a82ec6cb40cda2edde64a2013a66963293696bbf31fbbb/pandas-2.3.3-cp314-cp314t-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl",hashes = {sha256 = "2e3ebdb170b5ef78f19bfb71b0dc5dc58775032361fa188e814959b74d726dd5"}}, + {name = "pandas-2.3.3-cp314-cp314t-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/44/91/483de934193e12a3b1d6ae7c8645d083ff88dec75f46e827562f1e4b4da6/pandas-2.3.3-cp314-cp314t-musllinux_1_2_aarch64.whl",hashes = {sha256 = "d051c0e065b94b7a3cea50eb1ec32e912cd96dba41647eb24104b6c6c14c5788"}}, + {name = "pandas-2.3.3-cp314-cp314t-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/70/44/5191d2e4026f86a2a109053e194d3ba7a31a2d10a9c2348368c63ed4e85a/pandas-2.3.3-cp314-cp314t-musllinux_1_2_x86_64.whl",hashes = {sha256 = "3869faf4bd07b3b66a9f462417d0ca3a9df29a9f6abd5d0d0dbab15dac7abe87"}}, + {name = "pandas-2.3.3-cp313-cp313-macosx_10_13_x86_64.whl",url = "https://files.pythonhosted.org/packages/cd/4b/18b035ee18f97c1040d94debd8f2e737000ad70ccc8f5513f4eefad75f4b/pandas-2.3.3-cp313-cp313-macosx_10_13_x86_64.whl",hashes = {sha256 = "56851a737e3470de7fa88e6131f41281ed440d29a9268dcbf0002da5ac366713"}}, + {name = "pandas-2.3.3-cp313-cp313-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/31/94/72fac03573102779920099bcac1c3b05975c2cb5f01eac609faf34bed1ca/pandas-2.3.3-cp313-cp313-macosx_11_0_arm64.whl",hashes = {sha256 = "bdcd9d1167f4885211e401b3036c0c8d9e274eee67ea8d0758a256d60704cfe8"}}, + {name = "pandas-2.3.3-cp313-cp313-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl",url = "https://files.pythonhosted.org/packages/16/87/9472cf4a487d848476865321de18cc8c920b8cab98453ab79dbbc98db63a/pandas-2.3.3-cp313-cp313-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl",hashes = {sha256 = "e32e7cc9af0f1cc15548288a51a3b681cc2a219faa838e995f7dc53dbab1062d"}}, + {name = "pandas-2.3.3-cp313-cp313-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl",url = "https://files.pythonhosted.org/packages/15/07/284f757f63f8a8d69ed4472bfd85122bd086e637bf4ed09de572d575a693/pandas-2.3.3-cp313-cp313-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl",hashes = {sha256 = "318d77e0e42a628c04dc56bcef4b40de67918f7041c2b061af1da41dcff670ac"}}, + {name = "pandas-2.3.3-cp313-cp313-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/33/81/a3afc88fca4aa925804a27d2676d22dcd2031c2ebe08aabd0ae55b9ff282/pandas-2.3.3-cp313-cp313-musllinux_1_2_aarch64.whl",hashes = {sha256 = "4e0a175408804d566144e170d0476b15d78458795bb18f1304fb94160cabf40c"}}, + {name = "pandas-2.3.3-cp313-cp313-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/8d/0f/b4d4ae743a83742f1153464cf1a8ecfafc3ac59722a0b5c8602310cb7158/pandas-2.3.3-cp313-cp313-musllinux_1_2_x86_64.whl",hashes = {sha256 = "93c2d9ab0fc11822b5eece72ec9587e172f63cff87c00b062f6e37448ced4493"}}, + {name = "pandas-2.3.3-cp313-cp313-win_amd64.whl",url = "https://files.pythonhosted.org/packages/4f/c7/e54682c96a895d0c808453269e0b5928a07a127a15704fedb643e9b0a4c8/pandas-2.3.3-cp313-cp313-win_amd64.whl",hashes = {sha256 = "f8bfc0e12dc78f777f323f55c58649591b2cd0c43534e8355c51d3fede5f4dee"}}, + {name = "pandas-2.3.3-cp313-cp313t-macosx_10_13_x86_64.whl",url = "https://files.pythonhosted.org/packages/f9/ca/3f8d4f49740799189e1395812f3bf23b5e8fc7c190827d55a610da72ce55/pandas-2.3.3-cp313-cp313t-macosx_10_13_x86_64.whl",hashes = {sha256 = "75ea25f9529fdec2d2e93a42c523962261e567d250b0013b16210e1d40d7c2e5"}}, + {name = "pandas-2.3.3-cp313-cp313t-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/0e/5a/f43efec3e8c0cc92c4663ccad372dbdff72b60bdb56b2749f04aa1d07d7e/pandas-2.3.3-cp313-cp313t-macosx_11_0_arm64.whl",hashes = {sha256 = "74ecdf1d301e812db96a465a525952f4dde225fdb6d8e5a521d47e1f42041e21"}}, + {name = "pandas-2.3.3-cp313-cp313t-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl",url = "https://files.pythonhosted.org/packages/46/b1/85331edfc591208c9d1a63a06baa67b21d332e63b7a591a5ba42a10bb507/pandas-2.3.3-cp313-cp313t-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl",hashes = {sha256 = "6435cb949cb34ec11cc9860246ccb2fdc9ecd742c12d3304989017d53f039a78"}}, + {name = "pandas-2.3.3-cp313-cp313t-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl",url = "https://files.pythonhosted.org/packages/44/23/78d645adc35d94d1ac4f2a3c4112ab6f5b8999f4898b8cdf01252f8df4a9/pandas-2.3.3-cp313-cp313t-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl",hashes = {sha256 = "900f47d8f20860de523a1ac881c4c36d65efcb2eb850e6948140fa781736e110"}}, + {name = "pandas-2.3.3-cp313-cp313t-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/53/da/d10013df5e6aaef6b425aa0c32e1fc1f3e431e4bcabd420517dceadce354/pandas-2.3.3-cp313-cp313t-musllinux_1_2_aarch64.whl",hashes = {sha256 = "a45c765238e2ed7d7c608fc5bc4a6f88b642f2f01e70c0c23d2224dd21829d86"}}, + {name = "pandas-2.3.3-cp313-cp313t-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/bd/17/e756653095a083d8a37cbd816cb87148debcfcd920129b25f99dd8d04271/pandas-2.3.3-cp313-cp313t-musllinux_1_2_x86_64.whl",hashes = {sha256 = "c4fc4c21971a1a9f4bdb4c73978c7f7256caa3e62b323f70d6cb80db583350bc"}}, + {name = "pandas-2.3.3-cp312-cp312-macosx_10_13_x86_64.whl",url = "https://files.pythonhosted.org/packages/9c/fb/231d89e8637c808b997d172b18e9d4a4bc7bf31296196c260526055d1ea0/pandas-2.3.3-cp312-cp312-macosx_10_13_x86_64.whl",hashes = {sha256 = "6d21f6d74eb1725c2efaa71a2bfc661a0689579b58e9c0ca58a739ff0b002b53"}}, + {name = "pandas-2.3.3-cp312-cp312-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/5c/bd/bf8064d9cfa214294356c2d6702b716d3cf3bb24be59287a6a21e24cae6b/pandas-2.3.3-cp312-cp312-macosx_11_0_arm64.whl",hashes = {sha256 = "3fd2f887589c7aa868e02632612ba39acb0b8948faf5cc58f0850e165bd46f35"}}, + {name = "pandas-2.3.3-cp312-cp312-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl",url = "https://files.pythonhosted.org/packages/57/56/cf2dbe1a3f5271370669475ead12ce77c61726ffd19a35546e31aa8edf4e/pandas-2.3.3-cp312-cp312-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl",hashes = {sha256 = "ecaf1e12bdc03c86ad4a7ea848d66c685cb6851d807a26aa245ca3d2017a1908"}}, + {name = "pandas-2.3.3-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl",url = "https://files.pythonhosted.org/packages/e5/63/cd7d615331b328e287d8233ba9fdf191a9c2d11b6af0c7a59cfcec23de68/pandas-2.3.3-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl",hashes = {sha256 = "b3d11d2fda7eb164ef27ffc14b4fcab16a80e1ce67e9f57e19ec0afaf715ba89"}}, + {name = "pandas-2.3.3-cp312-cp312-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/a6/de/8b1895b107277d52f2b42d3a6806e69cfef0d5cf1d0ba343470b9d8e0a04/pandas-2.3.3-cp312-cp312-musllinux_1_2_aarch64.whl",hashes = {sha256 = "a68e15f780eddf2b07d242e17a04aa187a7ee12b40b930bfdd78070556550e98"}}, + {name = "pandas-2.3.3-cp312-cp312-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/87/21/84072af3187a677c5893b170ba2c8fbe450a6ff911234916da889b698220/pandas-2.3.3-cp312-cp312-musllinux_1_2_x86_64.whl",hashes = {sha256 = "371a4ab48e950033bcf52b6527eccb564f52dc826c02afd9a1bc0ab731bba084"}}, + {name = "pandas-2.3.3-cp312-cp312-win_amd64.whl",url = "https://files.pythonhosted.org/packages/86/41/585a168330ff063014880a80d744219dbf1dd7a1c706e75ab3425a987384/pandas-2.3.3-cp312-cp312-win_amd64.whl",hashes = {sha256 = "a16dcec078a01eeef8ee61bf64074b4e524a2a3f4b3be9326420cabe59c4778b"}}, + {name = "pandas-2.3.3-cp311-cp311-macosx_10_9_x86_64.whl",url = "https://files.pythonhosted.org/packages/c1/fa/7ac648108144a095b4fb6aa3de1954689f7af60a14cf25583f4960ecb878/pandas-2.3.3-cp311-cp311-macosx_10_9_x86_64.whl",hashes = {sha256 = "602b8615ebcc4a0c1751e71840428ddebeb142ec02c786e8ad6b1ce3c8dec523"}}, + {name = "pandas-2.3.3-cp311-cp311-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/9b/35/74442388c6cf008882d4d4bdfc4109be87e9b8b7ccd097ad1e7f006e2e95/pandas-2.3.3-cp311-cp311-macosx_11_0_arm64.whl",hashes = {sha256 = "8fe25fc7b623b0ef6b5009149627e34d2a4657e880948ec3c840e9402e5c1b45"}}, + {name = "pandas-2.3.3-cp311-cp311-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl",url = "https://files.pythonhosted.org/packages/fe/e4/de154cbfeee13383ad58d23017da99390b91d73f8c11856f2095e813201b/pandas-2.3.3-cp311-cp311-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl",hashes = {sha256 = "b468d3dad6ff947df92dcb32ede5b7bd41a9b3cceef0a30ed925f6d01fb8fa66"}}, + {name = "pandas-2.3.3-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl",url = "https://files.pythonhosted.org/packages/bf/c9/63f8d545568d9ab91476b1818b4741f521646cbdd151c6efebf40d6de6f7/pandas-2.3.3-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl",hashes = {sha256 = "b98560e98cb334799c0b07ca7967ac361a47326e9b4e5a7dfb5ab2b1c9d35a1b"}}, + {name = "pandas-2.3.3-cp311-cp311-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/f2/00/a5ac8c7a0e67fd1a6059e40aa08fa1c52cc00709077d2300e210c3ce0322/pandas-2.3.3-cp311-cp311-musllinux_1_2_aarch64.whl",hashes = {sha256 = "1d37b5848ba49824e5c30bedb9c830ab9b7751fd049bc7914533e01c65f79791"}}, + {name = "pandas-2.3.3-cp311-cp311-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/27/4d/5c23a5bc7bd209231618dd9e606ce076272c9bc4f12023a70e03a86b4067/pandas-2.3.3-cp311-cp311-musllinux_1_2_x86_64.whl",hashes = {sha256 = "db4301b2d1f926ae677a751eb2bd0e8c5f5319c9cb3f88b0becbbb0b07b34151"}}, + {name = "pandas-2.3.3-cp311-cp311-win_amd64.whl",url = "https://files.pythonhosted.org/packages/8e/59/712db1d7040520de7a4965df15b774348980e6df45c129b8c64d0dbe74ef/pandas-2.3.3-cp311-cp311-win_amd64.whl",hashes = {sha256 = "f086f6fe114e19d92014a1966f43a3e62285109afe874f067f5abbdcbb10e59c"}}, + {name = "pandas-2.3.3-cp310-cp310-macosx_10_9_x86_64.whl",url = "https://files.pythonhosted.org/packages/3d/f7/f425a00df4fcc22b292c6895c6831c0c8ae1d9fac1e024d16f98a9ce8749/pandas-2.3.3-cp310-cp310-macosx_10_9_x86_64.whl",hashes = {sha256 = "376c6446ae31770764215a6c937f72d917f214b43560603cd60da6408f183b6c"}}, + {name = "pandas-2.3.3-cp310-cp310-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/13/4f/66d99628ff8ce7857aca52fed8f0066ce209f96be2fede6cef9f84e8d04f/pandas-2.3.3-cp310-cp310-macosx_11_0_arm64.whl",hashes = {sha256 = "e19d192383eab2f4ceb30b412b22ea30690c9e618f78870357ae1d682912015a"}}, + {name = "pandas-2.3.3-cp310-cp310-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl",url = "https://files.pythonhosted.org/packages/1d/03/3fc4a529a7710f890a239cc496fc6d50ad4a0995657dccc1d64695adb9f4/pandas-2.3.3-cp310-cp310-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl",hashes = {sha256 = "5caf26f64126b6c7aec964f74266f435afef1c1b13da3b0636c7518a1fa3e2b1"}}, + {name = "pandas-2.3.3-cp310-cp310-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl",url = "https://files.pythonhosted.org/packages/40/a8/4dac1f8f8235e5d25b9955d02ff6f29396191d4e665d71122c3722ca83c5/pandas-2.3.3-cp310-cp310-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl",hashes = {sha256 = "dd7478f1463441ae4ca7308a70e90b33470fa593429f9d4c578dd00d1fa78838"}}, + {name = "pandas-2.3.3-cp310-cp310-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/df/91/82cc5169b6b25440a7fc0ef3a694582418d875c8e3ebf796a6d6470aa578/pandas-2.3.3-cp310-cp310-musllinux_1_2_aarch64.whl",hashes = {sha256 = "4793891684806ae50d1288c9bae9330293ab4e083ccd1c5e383c34549c6e4250"}}, + {name = "pandas-2.3.3-cp310-cp310-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/10/ae/89b3283800ab58f7af2952704078555fa60c807fff764395bb57ea0b0dbd/pandas-2.3.3-cp310-cp310-musllinux_1_2_x86_64.whl",hashes = {sha256 = "28083c648d9a99a5dd035ec125d42439c6c1c525098c58af0fc38dd1a7a1b3d4"}}, + {name = "pandas-2.3.3-cp310-cp310-win_amd64.whl",url = "https://files.pythonhosted.org/packages/85/72/530900610650f54a35a19476eca5104f38555afccda1aa11a92ee14cb21d/pandas-2.3.3-cp310-cp310-win_amd64.whl",hashes = {sha256 = "503cf027cf9940d2ceaa1a93cfb5f8c8c7e6e90720a2850378f0b3f3b1e06826"}}, ] marker = "\"default\" in dependency_groups" @@ -2860,11 +3804,11 @@ dependencies = [] [[packages]] name = "ruamel-yaml" -version = "0.18.14" +version = "0.18.15" requires-python = ">=3.8" -sdist = {name = "ruamel.yaml-0.18.14.tar.gz", url = "https://files.pythonhosted.org/packages/39/87/6da0df742a4684263261c253f00edd5829e6aca970fff69e75028cccc547/ruamel.yaml-0.18.14.tar.gz", hashes = {sha256 = "7227b76aaec364df15936730efbf7d72b30c0b79b1d578bbb8e3dcb2d81f52b7"}} +sdist = {name = "ruamel.yaml-0.18.15.tar.gz", url = "https://files.pythonhosted.org/packages/3e/db/f3950f5e5031b618aae9f423a39bf81a55c148aecd15a34527898e752cf4/ruamel.yaml-0.18.15.tar.gz", hashes = {sha256 = "dbfca74b018c4c3fba0b9cc9ee33e53c371194a9000e694995e620490fd40700"}} wheels = [ - {name = "ruamel.yaml-0.18.14-py3-none-any.whl",url = "https://files.pythonhosted.org/packages/af/6d/6fe4805235e193aad4aaf979160dd1f3c487c57d48b810c816e6e842171b/ruamel.yaml-0.18.14-py3-none-any.whl",hashes = {sha256 = "710ff198bb53da66718c7db27eec4fbcc9aa6ca7204e4c1df2f282b6fe5eb6b2"}}, + {name = "ruamel.yaml-0.18.15-py3-none-any.whl",url = "https://files.pythonhosted.org/packages/d1/e5/f2a0621f1781b76a38194acae72f01e37b1941470407345b6e8653ad7640/ruamel.yaml-0.18.15-py3-none-any.whl",hashes = {sha256 = "148f6488d698b7a5eded5ea793a025308b25eca97208181b6a026037f391f701"}}, ] marker = "\"dev\" in extras" @@ -2875,28 +3819,54 @@ dependencies = [ [[packages]] name = "ruamel-yaml-clib" -version = "0.2.12" +version = "0.2.14" requires-python = ">=3.9" -sdist = {name = "ruamel.yaml.clib-0.2.12.tar.gz", url = "https://files.pythonhosted.org/packages/20/84/80203abff8ea4993a87d823a5f632e4d92831ef75d404c9fc78d0176d2b5/ruamel.yaml.clib-0.2.12.tar.gz", hashes = {sha256 = "6c8fbb13ec503f99a91901ab46e0b07ae7941cd527393187039aec586fdfd36f"}} -wheels = [ - {name = "ruamel.yaml.clib-0.2.12-cp313-cp313-macosx_14_0_arm64.whl",url = "https://files.pythonhosted.org/packages/29/00/4864119668d71a5fa45678f380b5923ff410701565821925c69780356ffa/ruamel.yaml.clib-0.2.12-cp313-cp313-macosx_14_0_arm64.whl",hashes = {sha256 = "4c8c5d82f50bb53986a5e02d1b3092b03622c02c2eb78e29bec33fd9593bae1a"}}, - {name = "ruamel.yaml.clib-0.2.12-cp313-cp313-manylinux2014_aarch64.whl",url = "https://files.pythonhosted.org/packages/7f/5e/212f473a93ae78c669ffa0cb051e3fee1139cb2d385d2ae1653d64281507/ruamel.yaml.clib-0.2.12-cp313-cp313-manylinux2014_aarch64.whl",hashes = {sha256 = "e7e3736715fbf53e9be2a79eb4db68e4ed857017344d697e8b9749444ae57475"}}, - {name = "ruamel.yaml.clib-0.2.12-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",url = "https://files.pythonhosted.org/packages/1f/8f/ecfbe2123ade605c49ef769788f79c38ddb1c8fa81e01f4dbf5cf1a44b16/ruamel.yaml.clib-0.2.12-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",hashes = {sha256 = "0b7e75b4965e1d4690e93021adfcecccbca7d61c7bddd8e22406ef2ff20d74ef"}}, - {name = "ruamel.yaml.clib-0.2.12-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl",url = "https://files.pythonhosted.org/packages/e2/a9/28f60726d29dfc01b8decdb385de4ced2ced9faeb37a847bd5cf26836815/ruamel.yaml.clib-0.2.12-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl",hashes = {sha256 = "96777d473c05ee3e5e3c3e999f5d23c6f4ec5b0c38c098b3a5229085f74236c6"}}, - {name = "ruamel.yaml.clib-0.2.12-cp313-cp313-musllinux_1_1_i686.whl",url = "https://files.pythonhosted.org/packages/84/7e/8e7ec45920daa7f76046578e4f677a3215fe8f18ee30a9cb7627a19d9b4c/ruamel.yaml.clib-0.2.12-cp313-cp313-musllinux_1_1_i686.whl",hashes = {sha256 = "3bc2a80e6420ca8b7d3590791e2dfc709c88ab9152c00eeb511c9875ce5778bf"}}, - {name = "ruamel.yaml.clib-0.2.12-cp313-cp313-musllinux_1_1_x86_64.whl",url = "https://files.pythonhosted.org/packages/c5/b3/d650eaade4ca225f02a648321e1ab835b9d361c60d51150bac49063b83fa/ruamel.yaml.clib-0.2.12-cp313-cp313-musllinux_1_1_x86_64.whl",hashes = {sha256 = "e188d2699864c11c36cdfdada94d781fd5d6b0071cd9c427bceb08ad3d7c70e1"}}, - {name = "ruamel.yaml.clib-0.2.12-cp313-cp313-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/87/b8/01c29b924dcbbed75cc45b30c30d565d763b9c4d540545a0eeecffb8f09c/ruamel.yaml.clib-0.2.12-cp313-cp313-musllinux_1_2_aarch64.whl",hashes = {sha256 = "4f6f3eac23941b32afccc23081e1f50612bdbe4e982012ef4f5797986828cd01"}}, - {name = "ruamel.yaml.clib-0.2.12-cp313-cp313-win32.whl",url = "https://files.pythonhosted.org/packages/30/8c/ed73f047a73638257aa9377ad356bea4d96125b305c34a28766f4445cc0f/ruamel.yaml.clib-0.2.12-cp313-cp313-win32.whl",hashes = {sha256 = "6442cb36270b3afb1b4951f060eccca1ce49f3d087ca1ca4563a6eb479cb3de6"}}, - {name = "ruamel.yaml.clib-0.2.12-cp313-cp313-win_amd64.whl",url = "https://files.pythonhosted.org/packages/b0/85/e8e751d8791564dd333d5d9a4eab0a7a115f7e349595417fd50ecae3395c/ruamel.yaml.clib-0.2.12-cp313-cp313-win_amd64.whl",hashes = {sha256 = "e5b8daf27af0b90da7bb903a876477a9e6d7270be6146906b276605997c7e9a3"}}, - {name = "ruamel.yaml.clib-0.2.12-cp312-cp312-macosx_14_0_arm64.whl",url = "https://files.pythonhosted.org/packages/48/41/e7a405afbdc26af961678474a55373e1b323605a4f5e2ddd4a80ea80f628/ruamel.yaml.clib-0.2.12-cp312-cp312-macosx_14_0_arm64.whl",hashes = {sha256 = "20b0f8dc160ba83b6dcc0e256846e1a02d044e13f7ea74a3d1d56ede4e48c632"}}, - {name = "ruamel.yaml.clib-0.2.12-cp312-cp312-manylinux2014_aarch64.whl",url = "https://files.pythonhosted.org/packages/ec/b0/b850385604334c2ce90e3ee1013bd911aedf058a934905863a6ea95e9eb4/ruamel.yaml.clib-0.2.12-cp312-cp312-manylinux2014_aarch64.whl",hashes = {sha256 = "943f32bc9dedb3abff9879edc134901df92cfce2c3d5c9348f172f62eb2d771d"}}, - {name = "ruamel.yaml.clib-0.2.12-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",url = "https://files.pythonhosted.org/packages/44/d0/3f68a86e006448fb6c005aee66565b9eb89014a70c491d70c08de597f8e4/ruamel.yaml.clib-0.2.12-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",hashes = {sha256 = "95c3829bb364fdb8e0332c9931ecf57d9be3519241323c5274bd82f709cebc0c"}}, - {name = "ruamel.yaml.clib-0.2.12-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl",url = "https://files.pythonhosted.org/packages/52/a9/d39f3c5ada0a3bb2870d7db41901125dbe2434fa4f12ca8c5b83a42d7c53/ruamel.yaml.clib-0.2.12-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl",hashes = {sha256 = "749c16fcc4a2b09f28843cda5a193e0283e47454b63ec4b81eaa2242f50e4ccd"}}, - {name = "ruamel.yaml.clib-0.2.12-cp312-cp312-musllinux_1_1_i686.whl",url = "https://files.pythonhosted.org/packages/b0/fa/097e38135dadd9ac25aecf2a54be17ddf6e4c23e43d538492a90ab3d71c6/ruamel.yaml.clib-0.2.12-cp312-cp312-musllinux_1_1_i686.whl",hashes = {sha256 = "bf165fef1f223beae7333275156ab2022cffe255dcc51c27f066b4370da81e31"}}, - {name = "ruamel.yaml.clib-0.2.12-cp312-cp312-musllinux_1_1_x86_64.whl",url = "https://files.pythonhosted.org/packages/ec/d5/a659ca6f503b9379b930f13bc6b130c9f176469b73b9834296822a83a132/ruamel.yaml.clib-0.2.12-cp312-cp312-musllinux_1_1_x86_64.whl",hashes = {sha256 = "32621c177bbf782ca5a18ba4d7af0f1082a3f6e517ac2a18b3974d4edf349680"}}, - {name = "ruamel.yaml.clib-0.2.12-cp312-cp312-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/db/5d/36619b61ffa2429eeaefaab4f3374666adf36ad8ac6330d855848d7d36fd/ruamel.yaml.clib-0.2.12-cp312-cp312-musllinux_1_2_aarch64.whl",hashes = {sha256 = "b82a7c94a498853aa0b272fd5bc67f29008da798d4f93a2f9f289feb8426a58d"}}, - {name = "ruamel.yaml.clib-0.2.12-cp312-cp312-win32.whl",url = "https://files.pythonhosted.org/packages/b1/82/85cb92f15a4231c89b95dfe08b09eb6adca929ef7df7e17ab59902b6f589/ruamel.yaml.clib-0.2.12-cp312-cp312-win32.whl",hashes = {sha256 = "e8c4ebfcfd57177b572e2040777b8abc537cdef58a2120e830124946aa9b42c5"}}, - {name = "ruamel.yaml.clib-0.2.12-cp312-cp312-win_amd64.whl",url = "https://files.pythonhosted.org/packages/d7/8f/c3654f6f1ddb75daf3922c3d8fc6005b1ab56671ad56ffb874d908bfa668/ruamel.yaml.clib-0.2.12-cp312-cp312-win_amd64.whl",hashes = {sha256 = "0467c5965282c62203273b838ae77c0d29d7638c8a4e3a1c8bdd3602c10904e4"}}, +sdist = {name = "ruamel.yaml.clib-0.2.14.tar.gz", url = "https://files.pythonhosted.org/packages/d8/e9/39ec4d4b3f91188fad1842748f67d4e749c77c37e353c4e545052ee8e893/ruamel.yaml.clib-0.2.14.tar.gz", hashes = {sha256 = "803f5044b13602d58ea378576dd75aa759f52116a0232608e8fdada4da33752e"}} +wheels = [ + {name = "ruamel.yaml.clib-0.2.14-cp314-cp314-macosx_10_15_universal2.whl",url = "https://files.pythonhosted.org/packages/21/e2/a59ff65c26aaf21a24eb38df777cb9af5d87ba8fc8107c163c2da9d1e85e/ruamel.yaml.clib-0.2.14-cp314-cp314-macosx_10_15_universal2.whl",hashes = {sha256 = "7df6f6e9d0e33c7b1d435defb185095386c469109de723d514142632a7b9d07f"}}, + {name = "ruamel.yaml.clib-0.2.14-cp314-cp314-macosx_15_0_arm64.whl",url = "https://files.pythonhosted.org/packages/6b/fa/3234f913fe9a6525a7b97c6dad1f51e72b917e6872e051a5e2ffd8b16fbb/ruamel.yaml.clib-0.2.14-cp314-cp314-macosx_15_0_arm64.whl",hashes = {sha256 = "70eda7703b8126f5e52fcf276e6c0f40b0d314674f896fc58c47b0aef2b9ae83"}}, + {name = "ruamel.yaml.clib-0.2.14-cp314-cp314-musllinux_1_2_i686.whl",url = "https://files.pythonhosted.org/packages/ef/ec/4edbf17ac2c87fa0845dd366ef8d5852b96eb58fcd65fc1ecf5fe27b4641/ruamel.yaml.clib-0.2.14-cp314-cp314-musllinux_1_2_i686.whl",hashes = {sha256 = "a0cb71ccc6ef9ce36eecb6272c81afdc2f565950cdcec33ae8e6cd8f7fc86f27"}}, + {name = "ruamel.yaml.clib-0.2.14-cp314-cp314-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/15/18/b0e1fafe59051de9e79cdd431863b03593ecfa8341c110affad7c8121efc/ruamel.yaml.clib-0.2.14-cp314-cp314-musllinux_1_2_x86_64.whl",hashes = {sha256 = "e7cb9ad1d525d40f7d87b6df7c0ff916a66bc52cb61b66ac1b2a16d0c1b07640"}}, + {name = "ruamel.yaml.clib-0.2.14-cp313-cp313-macosx_10_13_universal2.whl",url = "https://files.pythonhosted.org/packages/d7/ae/e3811f05415594025e96000349d3400978adaed88d8f98d494352d9761ee/ruamel.yaml.clib-0.2.14-cp313-cp313-macosx_10_13_universal2.whl",hashes = {sha256 = "7e4f9da7e7549946e02a6122dcad00b7c1168513acb1f8a726b1aaf504a99d32"}}, + {name = "ruamel.yaml.clib-0.2.14-cp313-cp313-macosx_15_0_arm64.whl",url = "https://files.pythonhosted.org/packages/72/06/7d51f4688d6d72bb72fa74254e1593c4f5ebd0036be5b41fe39315b275e9/ruamel.yaml.clib-0.2.14-cp313-cp313-macosx_15_0_arm64.whl",hashes = {sha256 = "dd7546c851e59c06197a7c651335755e74aa383a835878ca86d2c650c07a2f85"}}, + {name = "ruamel.yaml.clib-0.2.14-cp313-cp313-manylinux2014_aarch64.whl",url = "https://files.pythonhosted.org/packages/5a/08/b4499234a420ef42960eeb05585df5cc7eb25ccb8c980490b079e6367050/ruamel.yaml.clib-0.2.14-cp313-cp313-manylinux2014_aarch64.whl",hashes = {sha256 = "1c1acc3a0209ea9042cc3cfc0790edd2eddd431a2ec3f8283d081e4d5018571e"}}, + {name = "ruamel.yaml.clib-0.2.14-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",url = "https://files.pythonhosted.org/packages/b6/ba/1975a27dedf1c4c33306ee67c948121be8710b19387aada29e2f139c43ee/ruamel.yaml.clib-0.2.14-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",hashes = {sha256 = "2070bf0ad1540d5c77a664de07ebcc45eebd1ddcab71a7a06f26936920692beb"}}, + {name = "ruamel.yaml.clib-0.2.14-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl",url = "https://files.pythonhosted.org/packages/20/15/8a19a13d27f3bd09fa18813add8380a29115a47b553845f08802959acbce/ruamel.yaml.clib-0.2.14-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl",hashes = {sha256 = "9bd8fe07f49c170e09d76773fb86ad9135e0beee44f36e1576a201b0676d3d1d"}}, + {name = "ruamel.yaml.clib-0.2.14-cp313-cp313-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/19/ee/8d6146a079ad21e534b5083c9ee4a4c8bec42f79cf87594b60978286b39a/ruamel.yaml.clib-0.2.14-cp313-cp313-musllinux_1_2_aarch64.whl",hashes = {sha256 = "ff86876889ea478b1381089e55cf9e345707b312beda4986f823e1d95e8c0f59"}}, + {name = "ruamel.yaml.clib-0.2.14-cp313-cp313-musllinux_1_2_i686.whl",url = "https://files.pythonhosted.org/packages/a9/f5/426b714abdc222392e68f3b8ad323930d05a214a27c7e7a0f06c69126401/ruamel.yaml.clib-0.2.14-cp313-cp313-musllinux_1_2_i686.whl",hashes = {sha256 = "1f118b707eece8cf84ecbc3e3ec94d9db879d85ed608f95870d39b2d2efa5dca"}}, + {name = "ruamel.yaml.clib-0.2.14-cp313-cp313-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/3d/ac/3c5c2b27a183f4fda8a57c82211721c016bcb689a4a175865f7646db9f94/ruamel.yaml.clib-0.2.14-cp313-cp313-musllinux_1_2_x86_64.whl",hashes = {sha256 = "b30110b29484adc597df6bd92a37b90e63a8c152ca8136aad100a02f8ba6d1b6"}}, + {name = "ruamel.yaml.clib-0.2.14-cp313-cp313-win32.whl",url = "https://files.pythonhosted.org/packages/92/2e/06f56a71fd55021c993ed6e848c9b2e5e9cfce180a42179f0ddd28253f7c/ruamel.yaml.clib-0.2.14-cp313-cp313-win32.whl",hashes = {sha256 = "f4e97a1cf0b7a30af9e1d9dad10a5671157b9acee790d9e26996391f49b965a2"}}, + {name = "ruamel.yaml.clib-0.2.14-cp313-cp313-win_amd64.whl",url = "https://files.pythonhosted.org/packages/51/79/76aba16a1689b50528224b182f71097ece338e7a4ab55e84c2e73443b78a/ruamel.yaml.clib-0.2.14-cp313-cp313-win_amd64.whl",hashes = {sha256 = "090782b5fb9d98df96509eecdbcaffd037d47389a89492320280d52f91330d78"}}, + {name = "ruamel.yaml.clib-0.2.14-cp312-cp312-macosx_10_13_universal2.whl",url = "https://files.pythonhosted.org/packages/b4/42/ccfb34a25289afbbc42017e4d3d4288e61d35b2e00cfc6b92974a6a1f94b/ruamel.yaml.clib-0.2.14-cp312-cp312-macosx_10_13_universal2.whl",hashes = {sha256 = "6aeadc170090ff1889f0d2c3057557f9cd71f975f17535c26a5d37af98f19c27"}}, + {name = "ruamel.yaml.clib-0.2.14-cp312-cp312-macosx_14_0_arm64.whl",url = "https://files.pythonhosted.org/packages/82/73/e628a92e80197ff6a79ab81ec3fa00d4cc082d58ab78d3337b7ba7043301/ruamel.yaml.clib-0.2.14-cp312-cp312-macosx_14_0_arm64.whl",hashes = {sha256 = "5e56ac47260c0eed992789fa0b8efe43404a9adb608608631a948cee4fc2b052"}}, + {name = "ruamel.yaml.clib-0.2.14-cp312-cp312-manylinux2014_aarch64.whl",url = "https://files.pythonhosted.org/packages/2b/c5/346c7094344a60419764b4b1334d9e0285031c961176ff88ffb652405b0c/ruamel.yaml.clib-0.2.14-cp312-cp312-manylinux2014_aarch64.whl",hashes = {sha256 = "a911aa73588d9a8b08d662b9484bc0567949529824a55d3885b77e8dd62a127a"}}, + {name = "ruamel.yaml.clib-0.2.14-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",url = "https://files.pythonhosted.org/packages/df/99/65080c863eb06d4498de3d6c86f3e90595e02e159fd8529f1565f56cfe2c/ruamel.yaml.clib-0.2.14-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",hashes = {sha256 = "a05ba88adf3d7189a974b2de7a9d56731548d35dc0a822ec3dc669caa7019b29"}}, + {name = "ruamel.yaml.clib-0.2.14-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl",url = "https://files.pythonhosted.org/packages/3d/e3/0de85f3e3333f8e29e4b10244374a202a87665d1131798946ee22cf05c7c/ruamel.yaml.clib-0.2.14-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl",hashes = {sha256 = "fb04c5650de6668b853623eceadcdb1a9f2fee381f5d7b6bc842ee7c239eeec4"}}, + {name = "ruamel.yaml.clib-0.2.14-cp312-cp312-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/d9/25/0d2f09d8833c7fd77ab8efeff213093c16856479a9d293180a0d89f6bed9/ruamel.yaml.clib-0.2.14-cp312-cp312-musllinux_1_2_aarch64.whl",hashes = {sha256 = "df3ec9959241d07bc261f4983d25a1205ff37703faf42b474f15d54d88b4f8c9"}}, + {name = "ruamel.yaml.clib-0.2.14-cp312-cp312-musllinux_1_2_i686.whl",url = "https://files.pythonhosted.org/packages/d3/8c/959f10c2e2153cbdab834c46e6954b6dd9e3b109c8f8c0a3cf1618310985/ruamel.yaml.clib-0.2.14-cp312-cp312-musllinux_1_2_i686.whl",hashes = {sha256 = "fbc08c02e9b147a11dfcaa1ac8a83168b699863493e183f7c0c8b12850b7d259"}}, + {name = "ruamel.yaml.clib-0.2.14-cp312-cp312-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/ed/6b/e580a7c18b485e1a5f30a32cda96b20364b0ba649d9d2baaf72f8bd21f83/ruamel.yaml.clib-0.2.14-cp312-cp312-musllinux_1_2_x86_64.whl",hashes = {sha256 = "c099cafc1834d3c5dac305865d04235f7c21c167c8dd31ebc3d6bbc357e2f023"}}, + {name = "ruamel.yaml.clib-0.2.14-cp312-cp312-win32.whl",url = "https://files.pythonhosted.org/packages/ef/44/3455eebc761dc8e8fdced90f2b0a3fa61e32ba38b50de4130e2d57db0f21/ruamel.yaml.clib-0.2.14-cp312-cp312-win32.whl",hashes = {sha256 = "b5b0f7e294700b615a3bcf6d28b26e6da94e8eba63b079f4ec92e9ba6c0d6b54"}}, + {name = "ruamel.yaml.clib-0.2.14-cp312-cp312-win_amd64.whl",url = "https://files.pythonhosted.org/packages/76/ab/5121f7f3b651db93de546f8c982c241397aad0a4765d793aca1dac5eadee/ruamel.yaml.clib-0.2.14-cp312-cp312-win_amd64.whl",hashes = {sha256 = "a37f40a859b503304dd740686359fcf541d6fb3ff7fc10f539af7f7150917c68"}}, + {name = "ruamel.yaml.clib-0.2.14-cp311-cp311-macosx_10_9_universal2.whl",url = "https://files.pythonhosted.org/packages/b3/9f/3c51e9578b8c36fcc4bdd271a1a5bb65963a74a4b6ad1a989768a22f6c2a/ruamel.yaml.clib-0.2.14-cp311-cp311-macosx_10_9_universal2.whl",hashes = {sha256 = "5bae1a073ca4244620425cd3d3aa9746bde590992b98ee8c7c8be8c597ca0d4e"}}, + {name = "ruamel.yaml.clib-0.2.14-cp311-cp311-macosx_13_0_arm64.whl",url = "https://files.pythonhosted.org/packages/4a/16/cb02815bc2ae9c66760c0c061d23c7358f9ba51dae95ac85247662b7fbe2/ruamel.yaml.clib-0.2.14-cp311-cp311-macosx_13_0_arm64.whl",hashes = {sha256 = "0a54e5e40a7a691a426c2703b09b0d61a14294d25cfacc00631aa6f9c964df0d"}}, + {name = "ruamel.yaml.clib-0.2.14-cp311-cp311-manylinux2014_aarch64.whl",url = "https://files.pythonhosted.org/packages/31/c6/fc687cd1b93bff8e40861eea46d6dc1a6a778d9a085684e4045ff26a8e40/ruamel.yaml.clib-0.2.14-cp311-cp311-manylinux2014_aarch64.whl",hashes = {sha256 = "10d9595b6a19778f3269399eff6bab642608e5966183abc2adbe558a42d4efc9"}}, + {name = "ruamel.yaml.clib-0.2.14-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",url = "https://files.pythonhosted.org/packages/45/5d/65a2bc08b709b08576b3f307bf63951ee68a8e047cbbda6f1c9864ecf9a7/ruamel.yaml.clib-0.2.14-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",hashes = {sha256 = "dba72975485f2b87b786075e18a6e5d07dc2b4d8973beb2732b9b2816f1bad70"}}, + {name = "ruamel.yaml.clib-0.2.14-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl",url = "https://files.pythonhosted.org/packages/fb/d0/a70a03614d9a6788a3661ab1538879ed2aae4e84d861f101243116308a37/ruamel.yaml.clib-0.2.14-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl",hashes = {sha256 = "29757bdb7c142f9595cc1b62ec49a3d1c83fab9cef92db52b0ccebaad4eafb98"}}, + {name = "ruamel.yaml.clib-0.2.14-cp311-cp311-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/77/30/c93fa457611f79946d5cb6cc97493ca5425f3f21891d7b1f9b44eaa1b38e/ruamel.yaml.clib-0.2.14-cp311-cp311-musllinux_1_2_aarch64.whl",hashes = {sha256 = "557df28dbccf79b152fe2d1b935f6063d9cc431199ea2b0e84892f35c03bb0ee"}}, + {name = "ruamel.yaml.clib-0.2.14-cp311-cp311-musllinux_1_2_i686.whl",url = "https://files.pythonhosted.org/packages/40/85/e2c54ad637117cd13244a4649946eaa00f32edcb882d1f92df90e079ab00/ruamel.yaml.clib-0.2.14-cp311-cp311-musllinux_1_2_i686.whl",hashes = {sha256 = "26a8de280ab0d22b6e3ec745b4a5a07151a0f74aad92dd76ab9c8d8d7087720d"}}, + {name = "ruamel.yaml.clib-0.2.14-cp311-cp311-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/81/50/f899072c38877d8ef5382e0b3d47f8c4346226c1f52d6945d6f64fec6a2f/ruamel.yaml.clib-0.2.14-cp311-cp311-musllinux_1_2_x86_64.whl",hashes = {sha256 = "e501c096aa3889133d674605ebd018471bc404a59cbc17da3c5924421c54d97c"}}, + {name = "ruamel.yaml.clib-0.2.14-cp311-cp311-win32.whl",url = "https://files.pythonhosted.org/packages/99/7c/96d4b5075e30c65ea2064e40c2d657c7c235d7b6ef18751cf89a935b9041/ruamel.yaml.clib-0.2.14-cp311-cp311-win32.whl",hashes = {sha256 = "915748cfc25b8cfd81b14d00f4bfdb2ab227a30d6d43459034533f4d1c207a2a"}}, + {name = "ruamel.yaml.clib-0.2.14-cp311-cp311-win_amd64.whl",url = "https://files.pythonhosted.org/packages/7d/8c/73ee2babd04e8bfcf1fd5c20aa553d18bf0ebc24b592b4f831d12ae46cc0/ruamel.yaml.clib-0.2.14-cp311-cp311-win_amd64.whl",hashes = {sha256 = "4ccba93c1e5a40af45b2f08e4591969fa4697eae951c708f3f83dcbf9f6c6bb1"}}, + {name = "ruamel.yaml.clib-0.2.14-cp310-cp310-macosx_10_9_universal2.whl",url = "https://files.pythonhosted.org/packages/b4/56/35a0a752415ae01992c68f5a6513bdef0e1b6fbdb60d7619342ce12346a0/ruamel.yaml.clib-0.2.14-cp310-cp310-macosx_10_9_universal2.whl",hashes = {sha256 = "f8b2acb0ffdd2ce8208accbec2dca4a06937d556fdcaefd6473ba1b5daa7e3c4"}}, + {name = "ruamel.yaml.clib-0.2.14-cp310-cp310-macosx_13_0_arm64.whl",url = "https://files.pythonhosted.org/packages/98/6a/9a68184ab93619f4607ff1675e4ef01e8accfcbff0d482f4ca44c10d8eab/ruamel.yaml.clib-0.2.14-cp310-cp310-macosx_13_0_arm64.whl",hashes = {sha256 = "aef953f3b8bd0b50bd52a2e52fb54a6a2171a1889d8dea4a5959d46c6624c451"}}, + {name = "ruamel.yaml.clib-0.2.14-cp310-cp310-manylinux2014_aarch64.whl",url = "https://files.pythonhosted.org/packages/2b/3f/cfed5f088628128a9ec66f46794fd4d165642155c7b78c26d83b16c6bf7b/ruamel.yaml.clib-0.2.14-cp310-cp310-manylinux2014_aarch64.whl",hashes = {sha256 = "a0ac90efbc7a77b0d796c03c8cc4e62fd710b3f1e4c32947713ef2ef52e09543"}}, + {name = "ruamel.yaml.clib-0.2.14-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",url = "https://files.pythonhosted.org/packages/3a/d5/5ce2cc156c1da48160171968d91f066d305840fbf930ee955a509d025a44/ruamel.yaml.clib-0.2.14-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",hashes = {sha256 = "9bf6b699223afe6c7fe9f2ef76e0bfa6dd892c21e94ce8c957478987ade76cd8"}}, + {name = "ruamel.yaml.clib-0.2.14-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl",url = "https://files.pythonhosted.org/packages/2b/71/d0b56bc902b38ebe4be8e270f730f929eec4edaf8a0fa7028f4ef64fa950/ruamel.yaml.clib-0.2.14-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl",hashes = {sha256 = "d73a0187718f6eec5b2f729b0f98e4603f7bd9c48aa65d01227d1a5dcdfbe9e8"}}, + {name = "ruamel.yaml.clib-0.2.14-cp310-cp310-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/4b/db/1f37449dd89c540218598316ccafc1a0aed60215e72efa315c5367cfd015/ruamel.yaml.clib-0.2.14-cp310-cp310-musllinux_1_2_aarch64.whl",hashes = {sha256 = "81f6d3b19bc703679a5705c6a16dabdc79823c71d791d73c65949be7f3012c02"}}, + {name = "ruamel.yaml.clib-0.2.14-cp310-cp310-musllinux_1_2_i686.whl",url = "https://files.pythonhosted.org/packages/5d/53/c498b30f35efcd9f47cb084d7ad9374f2b907470f73913dec6396b81397d/ruamel.yaml.clib-0.2.14-cp310-cp310-musllinux_1_2_i686.whl",hashes = {sha256 = "b28caeaf3e670c08cb7e8de221266df8494c169bd6ed8875493fab45be9607a4"}}, + {name = "ruamel.yaml.clib-0.2.14-cp310-cp310-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/34/79/492cfad9baed68914840c39e5f3c1cc251f51a897ddb3f532601215cbb12/ruamel.yaml.clib-0.2.14-cp310-cp310-musllinux_1_2_x86_64.whl",hashes = {sha256 = "94f3efb718f8f49b031f2071ec7a27dd20cbfe511b4dfd54ecee54c956da2b31"}}, + {name = "ruamel.yaml.clib-0.2.14-cp310-cp310-win32.whl",url = "https://files.pythonhosted.org/packages/ca/f5/479ebfd5ba396e209ade90f7282d84b90c57b3e07be8dc6fcd02a6df7ffc/ruamel.yaml.clib-0.2.14-cp310-cp310-win32.whl",hashes = {sha256 = "27c070cf3888e90d992be75dd47292ff9aa17dafd36492812a6a304a1aedc182"}}, + {name = "ruamel.yaml.clib-0.2.14-cp310-cp310-win_amd64.whl",url = "https://files.pythonhosted.org/packages/57/31/a044520fdb3bd409889f67f1efebda0658033c7ab3f390cee37531cc9a9e/ruamel.yaml.clib-0.2.14-cp310-cp310-win_amd64.whl",hashes = {sha256 = "4f4a150a737fccae13fb51234d41304ff2222e3b7d4c8e9428ed1a6ab48389b8"}}, ] marker = "platform_python_implementation == \"CPython\" and python_version < \"3.14\" and python_full_version >= \"3.10.0\" and \"dev\" in extras" @@ -2970,46 +3940,194 @@ dependencies = [] [[packages]] name = "xxhash" -version = "3.5.0" +version = "3.6.0" requires-python = ">=3.7" -sdist = {name = "xxhash-3.5.0.tar.gz", url = "https://files.pythonhosted.org/packages/00/5e/d6e5258d69df8b4ed8c83b6664f2b47d30d2dec551a29ad72a6c69eafd31/xxhash-3.5.0.tar.gz", hashes = {sha256 = "84f2caddf951c9cbf8dc2e22a89d4ccf5d86391ac6418fe81e3c67d0cf60b45f"}} -wheels = [ - {name = "xxhash-3.5.0-cp313-cp313-macosx_10_13_x86_64.whl",url = "https://files.pythonhosted.org/packages/c9/b8/e4b3ad92d249be5c83fa72916c9091b0965cb0faeff05d9a0a3870ae6bff/xxhash-3.5.0-cp313-cp313-macosx_10_13_x86_64.whl",hashes = {sha256 = "37889a0d13b0b7d739cfc128b1c902f04e32de17b33d74b637ad42f1c55101f6"}}, - {name = "xxhash-3.5.0-cp313-cp313-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/fc/d8/b3627a0aebfbfa4c12a41e22af3742cf08c8ea84f5cc3367b5de2d039cce/xxhash-3.5.0-cp313-cp313-macosx_11_0_arm64.whl",hashes = {sha256 = "97a662338797c660178e682f3bc180277b9569a59abfb5925e8620fba00b9fc5"}}, - {name = "xxhash-3.5.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",url = "https://files.pythonhosted.org/packages/c3/cc/762312960691da989c7cd0545cb120ba2a4148741c6ba458aa723c00a3f8/xxhash-3.5.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",hashes = {sha256 = "7f85e0108d51092bdda90672476c7d909c04ada6923c14ff9d913c4f7dc8a3bc"}}, - {name = "xxhash-3.5.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl",url = "https://files.pythonhosted.org/packages/fe/e9/cc266f1042c3c13750e86a535496b58beb12bf8c50a915c336136f6168dc/xxhash-3.5.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl",hashes = {sha256 = "cd2fd827b0ba763ac919440042302315c564fdb797294d86e8cdd4578e3bc7f3"}}, - {name = "xxhash-3.5.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl",url = "https://files.pythonhosted.org/packages/bf/85/a836cd0dc5cc20376de26b346858d0ac9656f8f730998ca4324921a010b9/xxhash-3.5.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl",hashes = {sha256 = "82085c2abec437abebf457c1d12fccb30cc8b3774a0814872511f0f0562c768c"}}, - {name = "xxhash-3.5.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",url = "https://files.pythonhosted.org/packages/b4/0e/15c243775342ce840b9ba34aceace06a1148fa1630cd8ca269e3223987f5/xxhash-3.5.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",hashes = {sha256 = "07fda5de378626e502b42b311b049848c2ef38784d0d67b6f30bb5008642f8eb"}}, - {name = "xxhash-3.5.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl",url = "https://files.pythonhosted.org/packages/87/a1/b028bb02636dfdc190da01951d0703b3d904301ed0ef6094d948983bef0e/xxhash-3.5.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl",hashes = {sha256 = "c279f0d2b34ef15f922b77966640ade58b4ccdfef1c4d94b20f2a364617a493f"}}, - {name = "xxhash-3.5.0-cp313-cp313-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/80/d5/73c73b03fc0ac73dacf069fdf6036c9abad82de0a47549e9912c955ab449/xxhash-3.5.0-cp313-cp313-musllinux_1_2_aarch64.whl",hashes = {sha256 = "89e66ceed67b213dec5a773e2f7a9e8c58f64daeb38c7859d8815d2c89f39ad7"}}, - {name = "xxhash-3.5.0-cp313-cp313-musllinux_1_2_i686.whl",url = "https://files.pythonhosted.org/packages/b6/2a/5043dba5ddbe35b4fe6ea0a111280ad9c3d4ba477dd0f2d1fe1129bda9d0/xxhash-3.5.0-cp313-cp313-musllinux_1_2_i686.whl",hashes = {sha256 = "bcd51708a633410737111e998ceb3b45d3dbc98c0931f743d9bb0a209033a326"}}, - {name = "xxhash-3.5.0-cp313-cp313-musllinux_1_2_ppc64le.whl",url = "https://files.pythonhosted.org/packages/a2/b2/9a8ded888b7b190aed75b484eb5c853ddd48aa2896e7b59bbfbce442f0a1/xxhash-3.5.0-cp313-cp313-musllinux_1_2_ppc64le.whl",hashes = {sha256 = "3ff2c0a34eae7df88c868be53a8dd56fbdf592109e21d4bfa092a27b0bf4a7bf"}}, - {name = "xxhash-3.5.0-cp313-cp313-musllinux_1_2_s390x.whl",url = "https://files.pythonhosted.org/packages/98/62/440083fafbc917bf3e4b67c2ade621920dd905517e85631c10aac955c1d2/xxhash-3.5.0-cp313-cp313-musllinux_1_2_s390x.whl",hashes = {sha256 = "4e28503dccc7d32e0b9817aa0cbfc1f45f563b2c995b7a66c4c8a0d232e840c7"}}, - {name = "xxhash-3.5.0-cp313-cp313-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/75/db/009206f7076ad60a517e016bb0058381d96a007ce3f79fa91d3010f49cc2/xxhash-3.5.0-cp313-cp313-musllinux_1_2_x86_64.whl",hashes = {sha256 = "a6c50017518329ed65a9e4829154626f008916d36295b6a3ba336e2458824c8c"}}, - {name = "xxhash-3.5.0-cp313-cp313-win32.whl",url = "https://files.pythonhosted.org/packages/1f/6d/c61e0668943a034abc3a569cdc5aeae37d686d9da7e39cf2ed621d533e36/xxhash-3.5.0-cp313-cp313-win32.whl",hashes = {sha256 = "53a068fe70301ec30d868ece566ac90d873e3bb059cf83c32e76012c889b8637"}}, - {name = "xxhash-3.5.0-cp313-cp313-win_amd64.whl",url = "https://files.pythonhosted.org/packages/96/14/8416dce965f35e3d24722cdf79361ae154fa23e2ab730e5323aa98d7919e/xxhash-3.5.0-cp313-cp313-win_amd64.whl",hashes = {sha256 = "80babcc30e7a1a484eab952d76a4f4673ff601f54d5142c26826502740e70b43"}}, - {name = "xxhash-3.5.0-cp313-cp313-win_arm64.whl",url = "https://files.pythonhosted.org/packages/27/ee/518b72faa2073f5aa8e3262408d284892cb79cf2754ba0c3a5870645ef73/xxhash-3.5.0-cp313-cp313-win_arm64.whl",hashes = {sha256 = "4811336f1ce11cac89dcbd18f3a25c527c16311709a89313c3acaf771def2d4b"}}, - {name = "xxhash-3.5.0-cp312-cp312-macosx_10_9_x86_64.whl",url = "https://files.pythonhosted.org/packages/07/0e/1bfce2502c57d7e2e787600b31c83535af83746885aa1a5f153d8c8059d6/xxhash-3.5.0-cp312-cp312-macosx_10_9_x86_64.whl",hashes = {sha256 = "14470ace8bd3b5d51318782cd94e6f94431974f16cb3b8dc15d52f3b69df8e00"}}, - {name = "xxhash-3.5.0-cp312-cp312-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/3f/d6/8ca450d6fe5b71ce521b4e5db69622383d039e2b253e9b2f24f93265b52c/xxhash-3.5.0-cp312-cp312-macosx_11_0_arm64.whl",hashes = {sha256 = "59aa1203de1cb96dbeab595ded0ad0c0056bb2245ae11fac11c0ceea861382b9"}}, - {name = "xxhash-3.5.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",url = "https://files.pythonhosted.org/packages/5b/84/de7c89bc6ef63d750159086a6ada6416cc4349eab23f76ab870407178b93/xxhash-3.5.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",hashes = {sha256 = "08424f6648526076e28fae6ea2806c0a7d504b9ef05ae61d196d571e5c879c84"}}, - {name = "xxhash-3.5.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl",url = "https://files.pythonhosted.org/packages/fe/86/51258d3e8a8545ff26468c977101964c14d56a8a37f5835bc0082426c672/xxhash-3.5.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl",hashes = {sha256 = "61a1ff00674879725b194695e17f23d3248998b843eb5e933007ca743310f793"}}, - {name = "xxhash-3.5.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl",url = "https://files.pythonhosted.org/packages/02/0a/96973bd325412feccf23cf3680fd2246aebf4b789122f938d5557c54a6b2/xxhash-3.5.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl",hashes = {sha256 = "f2f2c61bee5844d41c3eb015ac652a0229e901074951ae48581d58bfb2ba01be"}}, - {name = "xxhash-3.5.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",url = "https://files.pythonhosted.org/packages/11/a7/81dba5010f7e733de88af9555725146fc133be97ce36533867f4c7e75066/xxhash-3.5.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",hashes = {sha256 = "9d32a592cac88d18cc09a89172e1c32d7f2a6e516c3dfde1b9adb90ab5df54a6"}}, - {name = "xxhash-3.5.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl",url = "https://files.pythonhosted.org/packages/fb/7d/f29006ab398a173f4501c0e4977ba288f1c621d878ec217b4ff516810c04/xxhash-3.5.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl",hashes = {sha256 = "70dabf941dede727cca579e8c205e61121afc9b28516752fd65724be1355cc90"}}, - {name = "xxhash-3.5.0-cp312-cp312-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/8a/6e/6e88b8f24612510e73d4d70d9b0c7dff62a2e78451b9f0d042a5462c8d03/xxhash-3.5.0-cp312-cp312-musllinux_1_2_aarch64.whl",hashes = {sha256 = "e5d0ddaca65ecca9c10dcf01730165fd858533d0be84c75c327487c37a906a27"}}, - {name = "xxhash-3.5.0-cp312-cp312-musllinux_1_2_i686.whl",url = "https://files.pythonhosted.org/packages/af/51/7862f4fa4b75a25c3b4163c8a873f070532fe5f2d3f9b3fc869c8337a398/xxhash-3.5.0-cp312-cp312-musllinux_1_2_i686.whl",hashes = {sha256 = "3e5b5e16c5a480fe5f59f56c30abdeba09ffd75da8d13f6b9b6fd224d0b4d0a2"}}, - {name = "xxhash-3.5.0-cp312-cp312-musllinux_1_2_ppc64le.whl",url = "https://files.pythonhosted.org/packages/22/61/8d6a40f288f791cf79ed5bb113159abf0c81d6efb86e734334f698eb4c59/xxhash-3.5.0-cp312-cp312-musllinux_1_2_ppc64le.whl",hashes = {sha256 = "149b7914451eb154b3dfaa721315117ea1dac2cc55a01bfbd4df7c68c5dd683d"}}, - {name = "xxhash-3.5.0-cp312-cp312-musllinux_1_2_s390x.whl",url = "https://files.pythonhosted.org/packages/17/02/215c4698955762d45a8158117190261b2dbefe9ae7e5b906768c09d8bc74/xxhash-3.5.0-cp312-cp312-musllinux_1_2_s390x.whl",hashes = {sha256 = "eade977f5c96c677035ff39c56ac74d851b1cca7d607ab3d8f23c6b859379cab"}}, - {name = "xxhash-3.5.0-cp312-cp312-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/31/5c/b7a8db8a3237cff3d535261325d95de509f6a8ae439a5a7a4ffcff478189/xxhash-3.5.0-cp312-cp312-musllinux_1_2_x86_64.whl",hashes = {sha256 = "fa9f547bd98f5553d03160967866a71056a60960be00356a15ecc44efb40ba8e"}}, - {name = "xxhash-3.5.0-cp312-cp312-win32.whl",url = "https://files.pythonhosted.org/packages/78/e3/dd76659b2811b3fd06892a8beb850e1996b63e9235af5a86ea348f053e9e/xxhash-3.5.0-cp312-cp312-win32.whl",hashes = {sha256 = "f7b58d1fd3551b8c80a971199543379be1cee3d0d409e1f6d8b01c1a2eebf1f8"}}, - {name = "xxhash-3.5.0-cp312-cp312-win_amd64.whl",url = "https://files.pythonhosted.org/packages/d9/6b/1c443fe6cfeb4ad1dcf231cdec96eb94fb43d6498b4469ed8b51f8b59a37/xxhash-3.5.0-cp312-cp312-win_amd64.whl",hashes = {sha256 = "fa0cafd3a2af231b4e113fba24a65d7922af91aeb23774a8b78228e6cd785e3e"}}, - {name = "xxhash-3.5.0-cp312-cp312-win_arm64.whl",url = "https://files.pythonhosted.org/packages/0f/eb/04405305f290173acc0350eba6d2f1a794b57925df0398861a20fbafa415/xxhash-3.5.0-cp312-cp312-win_arm64.whl",hashes = {sha256 = "586886c7e89cb9828bcd8a5686b12e161368e0064d040e225e72607b43858ba2"}}, +sdist = {name = "xxhash-3.6.0.tar.gz", url = "https://files.pythonhosted.org/packages/02/84/30869e01909fb37a6cc7e18688ee8bf1e42d57e7e0777636bd47524c43c7/xxhash-3.6.0.tar.gz", hashes = {sha256 = "f0162a78b13a0d7617b2845b90c763339d1f1d82bb04a4b07f4ab535cc5e05d6"}} +wheels = [ + {name = "xxhash-3.6.0-cp314-cp314-macosx_10_13_x86_64.whl",url = "https://files.pythonhosted.org/packages/7e/5e/0138bc4484ea9b897864d59fce9be9086030825bc778b76cb5a33a906d37/xxhash-3.6.0-cp314-cp314-macosx_10_13_x86_64.whl",hashes = {sha256 = "a40a3d35b204b7cc7643cbcf8c9976d818cb47befcfac8bbefec8038ac363f3e"}}, + {name = "xxhash-3.6.0-cp314-cp314-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/18/d7/5dac2eb2ec75fd771957a13e5dda560efb2176d5203f39502a5fc571f899/xxhash-3.6.0-cp314-cp314-macosx_11_0_arm64.whl",hashes = {sha256 = "a54844be970d3fc22630b32d515e79a90d0a3ddb2644d8d7402e3c4c8da61405"}}, + {name = "xxhash-3.6.0-cp314-cp314-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl",url = "https://files.pythonhosted.org/packages/fe/71/8bc5be2bb00deb5682e92e8da955ebe5fa982da13a69da5a40a4c8db12fb/xxhash-3.6.0-cp314-cp314-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl",hashes = {sha256 = "016e9190af8f0a4e3741343777710e3d5717427f175adfdc3e72508f59e2a7f3"}}, + {name = "xxhash-3.6.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl",url = "https://files.pythonhosted.org/packages/e7/3b/52badfb2aecec2c377ddf1ae75f55db3ba2d321c5e164f14461c90837ef3/xxhash-3.6.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl",hashes = {sha256 = "4f6f72232f849eb9d0141e2ebe2677ece15adfd0fa599bc058aad83c714bb2c6"}}, + {name = "xxhash-3.6.0-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl",url = "https://files.pythonhosted.org/packages/a2/2b/ae46b4e9b92e537fa30d03dbc19cdae57ed407e9c26d163895e968e3de85/xxhash-3.6.0-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl",hashes = {sha256 = "63275a8aba7865e44b1813d2177e0f5ea7eadad3dd063a21f7cf9afdc7054063"}}, + {name = "xxhash-3.6.0-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl",url = "https://files.pythonhosted.org/packages/f5/80/49f88d3afc724b4ac7fbd664c8452d6db51b49915be48c6982659e0e7942/xxhash-3.6.0-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl",hashes = {sha256 = "3cd01fa2aa00d8b017c97eb46b9a794fbdca53fc14f845f5a328c71254b0abb7"}}, + {name = "xxhash-3.6.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl",url = "https://files.pythonhosted.org/packages/ed/ba/603ce3961e339413543d8cd44f21f2c80e2a7c5cfe692a7b1f2cccf58f3c/xxhash-3.6.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl",hashes = {sha256 = "0226aa89035b62b6a86d3c68df4d7c1f47a342b8683da2b60cedcddb46c4d95b"}}, + {name = "xxhash-3.6.0-cp314-cp314-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/78/d1/8e225ff7113bf81545cfdcd79eef124a7b7064a0bba53605ff39590b95c2/xxhash-3.6.0-cp314-cp314-musllinux_1_2_aarch64.whl",hashes = {sha256 = "c6e193e9f56e4ca4923c61238cdaced324f0feac782544eb4c6d55ad5cc99ddd"}}, + {name = "xxhash-3.6.0-cp314-cp314-musllinux_1_2_i686.whl",url = "https://files.pythonhosted.org/packages/6f/58/0f89d149f0bad89def1a8dd38feb50ccdeb643d9797ec84707091d4cb494/xxhash-3.6.0-cp314-cp314-musllinux_1_2_i686.whl",hashes = {sha256 = "9176dcaddf4ca963d4deb93866d739a343c01c969231dbe21680e13a5d1a5bf0"}}, + {name = "xxhash-3.6.0-cp314-cp314-musllinux_1_2_ppc64le.whl",url = "https://files.pythonhosted.org/packages/11/38/5eab81580703c4df93feb5f32ff8fa7fe1e2c51c1f183ee4e48d4bb9d3d7/xxhash-3.6.0-cp314-cp314-musllinux_1_2_ppc64le.whl",hashes = {sha256 = "c1ce4009c97a752e682b897aa99aef84191077a9433eb237774689f14f8ec152"}}, + {name = "xxhash-3.6.0-cp314-cp314-musllinux_1_2_s390x.whl",url = "https://files.pythonhosted.org/packages/5e/6b/953dc4b05c3ce678abca756416e4c130d2382f877a9c30a20d08ee6a77c0/xxhash-3.6.0-cp314-cp314-musllinux_1_2_s390x.whl",hashes = {sha256 = "8cb2f4f679b01513b7adbb9b1b2f0f9cdc31b70007eaf9d59d0878809f385b11"}}, + {name = "xxhash-3.6.0-cp314-cp314-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/08/a9/238ec0d4e81a10eb5026d4a6972677cbc898ba6c8b9dbaec12ae001b1b35/xxhash-3.6.0-cp314-cp314-musllinux_1_2_x86_64.whl",hashes = {sha256 = "653a91d7c2ab54a92c19ccf43508b6a555440b9be1bc8be553376778be7f20b5"}}, + {name = "xxhash-3.6.0-cp314-cp314-win32.whl",url = "https://files.pythonhosted.org/packages/f1/ee/3cf8589e06c2164ac77c3bf0aa127012801128f1feebf2a079272da5737c/xxhash-3.6.0-cp314-cp314-win32.whl",hashes = {sha256 = "a756fe893389483ee8c394d06b5ab765d96e68fbbfe6fde7aa17e11f5720559f"}}, + {name = "xxhash-3.6.0-cp314-cp314-win_amd64.whl",url = "https://files.pythonhosted.org/packages/02/5d/a19552fbc6ad4cb54ff953c3908bbc095f4a921bc569433d791f755186f1/xxhash-3.6.0-cp314-cp314-win_amd64.whl",hashes = {sha256 = "39be8e4e142550ef69629c9cd71b88c90e9a5db703fecbcf265546d9536ca4ad"}}, + {name = "xxhash-3.6.0-cp314-cp314-win_arm64.whl",url = "https://files.pythonhosted.org/packages/b1/11/dafa0643bc30442c887b55baf8e73353a344ee89c1901b5a5c54a6c17d39/xxhash-3.6.0-cp314-cp314-win_arm64.whl",hashes = {sha256 = "25915e6000338999236f1eb68a02a32c3275ac338628a7eaa5a269c401995679"}}, + {name = "xxhash-3.6.0-cp314-cp314t-macosx_10_13_x86_64.whl",url = "https://files.pythonhosted.org/packages/2c/db/0e99732ed7f64182aef4a6fb145e1a295558deec2a746265dcdec12d191e/xxhash-3.6.0-cp314-cp314t-macosx_10_13_x86_64.whl",hashes = {sha256 = "c5294f596a9017ca5a3e3f8884c00b91ab2ad2933cf288f4923c3fd4346cf3d4"}}, + {name = "xxhash-3.6.0-cp314-cp314t-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/55/f4/2a7c3c68e564a099becfa44bb3d398810cc0ff6749b0d3cb8ccb93f23c14/xxhash-3.6.0-cp314-cp314t-macosx_11_0_arm64.whl",hashes = {sha256 = "1cf9dcc4ab9cff01dfbba78544297a3a01dafd60f3bde4e2bfd016cf7e4ddc67"}}, + {name = "xxhash-3.6.0-cp314-cp314t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl",url = "https://files.pythonhosted.org/packages/c6/d9/72a29cddc7250e8a5819dad5d466facb5dc4c802ce120645630149127e73/xxhash-3.6.0-cp314-cp314t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl",hashes = {sha256 = "01262da8798422d0685f7cef03b2bd3f4f46511b02830861df548d7def4402ad"}}, + {name = "xxhash-3.6.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl",url = "https://files.pythonhosted.org/packages/63/93/b21590e1e381040e2ca305a884d89e1c345b347404f7780f07f2cdd47ef4/xxhash-3.6.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl",hashes = {sha256 = "51a73fb7cb3a3ead9f7a8b583ffd9b8038e277cdb8cb87cf890e88b3456afa0b"}}, + {name = "xxhash-3.6.0-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl",url = "https://files.pythonhosted.org/packages/ce/b8/edab8a7d4fa14e924b29be877d54155dcbd8b80be85ea00d2be3413a9ed4/xxhash-3.6.0-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl",hashes = {sha256 = "b9c6df83594f7df8f7f708ce5ebeacfc69f72c9fbaaababf6cf4758eaada0c9b"}}, + {name = "xxhash-3.6.0-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl",url = "https://files.pythonhosted.org/packages/27/67/dfa980ac7f0d509d54ea0d5a486d2bb4b80c3f1bb22b66e6a05d3efaf6c0/xxhash-3.6.0-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl",hashes = {sha256 = "627f0af069b0ea56f312fd5189001c24578868643203bca1abbc2c52d3a6f3ca"}}, + {name = "xxhash-3.6.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl",url = "https://files.pythonhosted.org/packages/8c/63/8ffc2cc97e811c0ca5d00ab36604b3ea6f4254f20b7bc658ca825ce6c954/xxhash-3.6.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl",hashes = {sha256 = "aa912c62f842dfd013c5f21a642c9c10cd9f4c4e943e0af83618b4a404d9091a"}}, + {name = "xxhash-3.6.0-cp314-cp314t-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/4b/77/07f0e7a3edd11a6097e990f6e5b815b6592459cb16dae990d967693e6ea9/xxhash-3.6.0-cp314-cp314t-musllinux_1_2_aarch64.whl",hashes = {sha256 = "b465afd7909db30168ab62afe40b2fcf79eedc0b89a6c0ab3123515dc0df8b99"}}, + {name = "xxhash-3.6.0-cp314-cp314t-musllinux_1_2_i686.whl",url = "https://files.pythonhosted.org/packages/ae/d8/bc5fa0d152837117eb0bef6f83f956c509332ce133c91c63ce07ee7c4873/xxhash-3.6.0-cp314-cp314t-musllinux_1_2_i686.whl",hashes = {sha256 = "a881851cf38b0a70e7c4d3ce81fc7afd86fbc2a024f4cfb2a97cf49ce04b75d3"}}, + {name = "xxhash-3.6.0-cp314-cp314t-musllinux_1_2_ppc64le.whl",url = "https://files.pythonhosted.org/packages/26/a5/d749334130de9411783873e9b98ecc46688dad5db64ca6e04b02acc8b473/xxhash-3.6.0-cp314-cp314t-musllinux_1_2_ppc64le.whl",hashes = {sha256 = "9b3222c686a919a0f3253cfc12bb118b8b103506612253b5baeaac10d8027cf6"}}, + {name = "xxhash-3.6.0-cp314-cp314t-musllinux_1_2_s390x.whl",url = "https://files.pythonhosted.org/packages/89/72/abed959c956a4bfc72b58c0384bb7940663c678127538634d896b1195c10/xxhash-3.6.0-cp314-cp314t-musllinux_1_2_s390x.whl",hashes = {sha256 = "c5aa639bc113e9286137cec8fadc20e9cd732b2cc385c0b7fa673b84fc1f2a93"}}, + {name = "xxhash-3.6.0-cp314-cp314t-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/0c/b3/62fd2b586283b7d7d665fb98e266decadf31f058f1cf6c478741f68af0cb/xxhash-3.6.0-cp314-cp314t-musllinux_1_2_x86_64.whl",hashes = {sha256 = "5c1343d49ac102799905e115aee590183c3921d475356cb24b4de29a4bc56518"}}, + {name = "xxhash-3.6.0-cp314-cp314t-win32.whl",url = "https://files.pythonhosted.org/packages/9a/9a/c19c42c5b3f5a4aad748a6d5b4f23df3bed7ee5445accc65a0fb3ff03953/xxhash-3.6.0-cp314-cp314t-win32.whl",hashes = {sha256 = "5851f033c3030dd95c086b4a36a2683c2ff4a799b23af60977188b057e467119"}}, + {name = "xxhash-3.6.0-cp314-cp314t-win_amd64.whl",url = "https://files.pythonhosted.org/packages/03/d6/4cc450345be9924fd5dc8c590ceda1db5b43a0a889587b0ae81a95511360/xxhash-3.6.0-cp314-cp314t-win_amd64.whl",hashes = {sha256 = "0444e7967dac37569052d2409b00a8860c2135cff05502df4da80267d384849f"}}, + {name = "xxhash-3.6.0-cp314-cp314t-win_arm64.whl",url = "https://files.pythonhosted.org/packages/0f/c9/7243eb3f9eaabd1a88a5a5acadf06df2d83b100c62684b7425c6a11bcaa8/xxhash-3.6.0-cp314-cp314t-win_arm64.whl",hashes = {sha256 = "bb79b1e63f6fd84ec778a4b1916dfe0a7c3fdb986c06addd5db3a0d413819d95"}}, + {name = "xxhash-3.6.0-cp313-cp313-macosx_10_13_x86_64.whl",url = "https://files.pythonhosted.org/packages/33/76/35d05267ac82f53ae9b0e554da7c5e281ee61f3cad44c743f0fcd354f211/xxhash-3.6.0-cp313-cp313-macosx_10_13_x86_64.whl",hashes = {sha256 = "599e64ba7f67472481ceb6ee80fa3bd828fd61ba59fb11475572cc5ee52b89ec"}}, + {name = "xxhash-3.6.0-cp313-cp313-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/31/a8/3fbce1cd96534a95e35d5120637bf29b0d7f5d8fa2f6374e31b4156dd419/xxhash-3.6.0-cp313-cp313-macosx_11_0_arm64.whl",hashes = {sha256 = "7d8b8aaa30fca4f16f0c84a5c8d7ddee0e25250ec2796c973775373257dde8f1"}}, + {name = "xxhash-3.6.0-cp313-cp313-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl",url = "https://files.pythonhosted.org/packages/0c/ea/d387530ca7ecfa183cb358027f1833297c6ac6098223fd14f9782cd0015c/xxhash-3.6.0-cp313-cp313-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl",hashes = {sha256 = "d597acf8506d6e7101a4a44a5e428977a51c0fadbbfd3c39650cca9253f6e5a6"}}, + {name = "xxhash-3.6.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl",url = "https://files.pythonhosted.org/packages/ba/0c/71435dcb99874b09a43b8d7c54071e600a7481e42b3e3ce1eb5226a5711a/xxhash-3.6.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl",hashes = {sha256 = "858dc935963a33bc33490128edc1c12b0c14d9c7ebaa4e387a7869ecc4f3e263"}}, + {name = "xxhash-3.6.0-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl",url = "https://files.pythonhosted.org/packages/84/7a/c2b3d071e4bb4a90b7057228a99b10d51744878f4a8a6dd643c8bd897620/xxhash-3.6.0-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl",hashes = {sha256 = "ba284920194615cb8edf73bf52236ce2e1664ccd4a38fdb543506413529cc546"}}, + {name = "xxhash-3.6.0-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl",url = "https://files.pythonhosted.org/packages/81/5f/640b6eac0128e215f177df99eadcd0f1b7c42c274ab6a394a05059694c5a/xxhash-3.6.0-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl",hashes = {sha256 = "4b54219177f6c6674d5378bd862c6aedf64725f70dd29c472eaae154df1a2e89"}}, + {name = "xxhash-3.6.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl",url = "https://files.pythonhosted.org/packages/5e/1e/3c3d3ef071b051cc3abbe3721ffb8365033a172613c04af2da89d5548a87/xxhash-3.6.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl",hashes = {sha256 = "42c36dd7dbad2f5238950c377fcbf6811b1cdb1c444fab447960030cea60504d"}}, + {name = "xxhash-3.6.0-cp313-cp313-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/2c/bd/4a5f68381939219abfe1c22a9e3a5854a4f6f6f3c4983a87d255f21f2e5d/xxhash-3.6.0-cp313-cp313-musllinux_1_2_aarch64.whl",hashes = {sha256 = "f22927652cba98c44639ffdc7aaf35828dccf679b10b31c4ad72a5b530a18eb7"}}, + {name = "xxhash-3.6.0-cp313-cp313-musllinux_1_2_i686.whl",url = "https://files.pythonhosted.org/packages/eb/37/b80fe3d5cfb9faff01a02121a0f4d565eb7237e9e5fc66e73017e74dcd36/xxhash-3.6.0-cp313-cp313-musllinux_1_2_i686.whl",hashes = {sha256 = "b45fad44d9c5c119e9c6fbf2e1c656a46dc68e280275007bbfd3d572b21426db"}}, + {name = "xxhash-3.6.0-cp313-cp313-musllinux_1_2_ppc64le.whl",url = "https://files.pythonhosted.org/packages/d7/fd/2c0a00c97b9e18f72e1f240ad4e8f8a90fd9d408289ba9c7c495ed7dc05c/xxhash-3.6.0-cp313-cp313-musllinux_1_2_ppc64le.whl",hashes = {sha256 = "6f2580ffab1a8b68ef2b901cde7e55fa8da5e4be0977c68f78fc80f3c143de42"}}, + {name = "xxhash-3.6.0-cp313-cp313-musllinux_1_2_s390x.whl",url = "https://files.pythonhosted.org/packages/93/86/5dd8076a926b9a95db3206aba20d89a7fc14dd5aac16e5c4de4b56033140/xxhash-3.6.0-cp313-cp313-musllinux_1_2_s390x.whl",hashes = {sha256 = "40c391dd3cd041ebc3ffe6f2c862f402e306eb571422e0aa918d8070ba31da11"}}, + {name = "xxhash-3.6.0-cp313-cp313-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/af/3c/0bb129170ee8f3650f08e993baee550a09593462a5cddd8e44d0011102b1/xxhash-3.6.0-cp313-cp313-musllinux_1_2_x86_64.whl",hashes = {sha256 = "f205badabde7aafd1a31e8ca2a3e5a763107a71c397c4481d6a804eb5063d8bd"}}, + {name = "xxhash-3.6.0-cp313-cp313-win32.whl",url = "https://files.pythonhosted.org/packages/e9/3a/6797e0114c21d1725e2577508e24006fd7ff1d8c0c502d3b52e45c1771d8/xxhash-3.6.0-cp313-cp313-win32.whl",hashes = {sha256 = "2577b276e060b73b73a53042ea5bd5203d3e6347ce0d09f98500f418a9fcf799"}}, + {name = "xxhash-3.6.0-cp313-cp313-win_amd64.whl",url = "https://files.pythonhosted.org/packages/86/15/9bc32671e9a38b413a76d24722a2bf8784a132c043063a8f5152d390b0f9/xxhash-3.6.0-cp313-cp313-win_amd64.whl",hashes = {sha256 = "757320d45d2fbcce8f30c42a6b2f47862967aea7bf458b9625b4bbe7ee390392"}}, + {name = "xxhash-3.6.0-cp313-cp313-win_arm64.whl",url = "https://files.pythonhosted.org/packages/39/c5/cc01e4f6188656e56112d6a8e0dfe298a16934b8c47a247236549a3f7695/xxhash-3.6.0-cp313-cp313-win_arm64.whl",hashes = {sha256 = "457b8f85dec5825eed7b69c11ae86834a018b8e3df5e77783c999663da2f96d6"}}, + {name = "xxhash-3.6.0-cp313-cp313t-macosx_10_13_x86_64.whl",url = "https://files.pythonhosted.org/packages/f3/30/25e5321c8732759e930c555176d37e24ab84365482d257c3b16362235212/xxhash-3.6.0-cp313-cp313t-macosx_10_13_x86_64.whl",hashes = {sha256 = "a42e633d75cdad6d625434e3468126c73f13f7584545a9cf34e883aa1710e702"}}, + {name = "xxhash-3.6.0-cp313-cp313t-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/9f/3c/0573299560d7d9f8ab1838f1efc021a280b5ae5ae2e849034ef3dee18810/xxhash-3.6.0-cp313-cp313t-macosx_11_0_arm64.whl",hashes = {sha256 = "568a6d743219e717b07b4e03b0a828ce593833e498c3b64752e0f5df6bfe84db"}}, + {name = "xxhash-3.6.0-cp313-cp313t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl",url = "https://files.pythonhosted.org/packages/7a/1c/52d83a06e417cd9d4137722693424885cc9878249beb3a7c829e74bf7ce9/xxhash-3.6.0-cp313-cp313t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl",hashes = {sha256 = "bec91b562d8012dae276af8025a55811b875baace6af510412a5e58e3121bc54"}}, + {name = "xxhash-3.6.0-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl",url = "https://files.pythonhosted.org/packages/e3/8e/c6d158d12a79bbd0b878f8355432075fc82759e356ab5a111463422a239b/xxhash-3.6.0-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl",hashes = {sha256 = "78e7f2f4c521c30ad5e786fdd6bae89d47a32672a80195467b5de0480aa97b1f"}}, + {name = "xxhash-3.6.0-cp313-cp313t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl",url = "https://files.pythonhosted.org/packages/bc/68/c4c80614716345d55071a396cf03d06e34b5f4917a467faf43083c995155/xxhash-3.6.0-cp313-cp313t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl",hashes = {sha256 = "3ed0df1b11a79856df5ffcab572cbd6b9627034c1c748c5566fa79df9048a7c5"}}, + {name = "xxhash-3.6.0-cp313-cp313t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl",url = "https://files.pythonhosted.org/packages/7e/e9/ae27c8ffec8b953efa84c7c4a6c6802c263d587b9fc0d6e7cea64e08c3af/xxhash-3.6.0-cp313-cp313t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl",hashes = {sha256 = "0e4edbfc7d420925b0dd5e792478ed393d6e75ff8fc219a6546fb446b6a417b1"}}, + {name = "xxhash-3.6.0-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl",url = "https://files.pythonhosted.org/packages/d7/6b/33e21afb1b5b3f46b74b6bd1913639066af218d704cc0941404ca717fc57/xxhash-3.6.0-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl",hashes = {sha256 = "fba27a198363a7ef87f8c0f6b171ec36b674fe9053742c58dd7e3201c1ab30ee"}}, + {name = "xxhash-3.6.0-cp313-cp313t-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/96/b6/fcabd337bc5fa624e7203aa0fa7d0c49eed22f72e93229431752bddc83d9/xxhash-3.6.0-cp313-cp313t-musllinux_1_2_aarch64.whl",hashes = {sha256 = "794fe9145fe60191c6532fa95063765529770edcdd67b3d537793e8004cabbfd"}}, + {name = "xxhash-3.6.0-cp313-cp313t-musllinux_1_2_i686.whl",url = "https://files.pythonhosted.org/packages/4b/d3/9ee6160e644d660fcf176c5825e61411c7f62648728f69c79ba237250143/xxhash-3.6.0-cp313-cp313t-musllinux_1_2_i686.whl",hashes = {sha256 = "6105ef7e62b5ac73a837778efc331a591d8442f8ef5c7e102376506cb4ae2729"}}, + {name = "xxhash-3.6.0-cp313-cp313t-musllinux_1_2_ppc64le.whl",url = "https://files.pythonhosted.org/packages/0d/98/e8de5baa5109394baf5118f5e72ab21a86387c4f89b0e77ef3e2f6b0327b/xxhash-3.6.0-cp313-cp313t-musllinux_1_2_ppc64le.whl",hashes = {sha256 = "f01375c0e55395b814a679b3eea205db7919ac2af213f4a6682e01220e5fe292"}}, + {name = "xxhash-3.6.0-cp313-cp313t-musllinux_1_2_s390x.whl",url = "https://files.pythonhosted.org/packages/7b/1d/71056535dec5c3177eeb53e38e3d367dd1d16e024e63b1cee208d572a033/xxhash-3.6.0-cp313-cp313t-musllinux_1_2_s390x.whl",hashes = {sha256 = "d706dca2d24d834a4661619dcacf51a75c16d65985718d6a7d73c1eeeb903ddf"}}, + {name = "xxhash-3.6.0-cp313-cp313t-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/dc/6c/5cbde9de2cd967c322e651c65c543700b19e7ae3e0aae8ece3469bf9683d/xxhash-3.6.0-cp313-cp313t-musllinux_1_2_x86_64.whl",hashes = {sha256 = "5f059d9faeacd49c0215d66f4056e1326c80503f51a1532ca336a385edadd033"}}, + {name = "xxhash-3.6.0-cp313-cp313t-win32.whl",url = "https://files.pythonhosted.org/packages/19/fa/0172e350361d61febcea941b0cc541d6e6c8d65d153e85f850a7b256ff8a/xxhash-3.6.0-cp313-cp313t-win32.whl",hashes = {sha256 = "1244460adc3a9be84731d72b8e80625788e5815b68da3da8b83f78115a40a7ec"}}, + {name = "xxhash-3.6.0-cp313-cp313t-win_amd64.whl",url = "https://files.pythonhosted.org/packages/ad/e6/e8cf858a2b19d6d45820f072eff1bea413910592ff17157cabc5f1227a16/xxhash-3.6.0-cp313-cp313t-win_amd64.whl",hashes = {sha256 = "b1e420ef35c503869c4064f4a2f2b08ad6431ab7b229a05cce39d74268bca6b8"}}, + {name = "xxhash-3.6.0-cp313-cp313t-win_arm64.whl",url = "https://files.pythonhosted.org/packages/56/15/064b197e855bfb7b343210e82490ae672f8bc7cdf3ddb02e92f64304ee8a/xxhash-3.6.0-cp313-cp313t-win_arm64.whl",hashes = {sha256 = "ec44b73a4220623235f67a996c862049f375df3b1052d9899f40a6382c32d746"}}, + {name = "xxhash-3.6.0-cp312-cp312-macosx_10_13_x86_64.whl",url = "https://files.pythonhosted.org/packages/9a/07/d9412f3d7d462347e4511181dea65e47e0d0e16e26fbee2ea86a2aefb657/xxhash-3.6.0-cp312-cp312-macosx_10_13_x86_64.whl",hashes = {sha256 = "01362c4331775398e7bb34e3ab403bc9ee9f7c497bc7dee6272114055277dd3c"}}, + {name = "xxhash-3.6.0-cp312-cp312-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/79/35/0429ee11d035fc33abe32dca1b2b69e8c18d236547b9a9b72c1929189b9a/xxhash-3.6.0-cp312-cp312-macosx_11_0_arm64.whl",hashes = {sha256 = "b7b2df81a23f8cb99656378e72501b2cb41b1827c0f5a86f87d6b06b69f9f204"}}, + {name = "xxhash-3.6.0-cp312-cp312-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl",url = "https://files.pythonhosted.org/packages/b7/f2/57eb99aa0f7d98624c0932c5b9a170e1806406cdbcdb510546634a1359e0/xxhash-3.6.0-cp312-cp312-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl",hashes = {sha256 = "dc94790144e66b14f67b10ac8ed75b39ca47536bf8800eb7c24b50271ea0c490"}}, + {name = "xxhash-3.6.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl",url = "https://files.pythonhosted.org/packages/4c/ed/6224ba353690d73af7a3f1c7cdb1fc1b002e38f783cb991ae338e1eb3d79/xxhash-3.6.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl",hashes = {sha256 = "93f107c673bccf0d592cdba077dedaf52fe7f42dcd7676eba1f6d6f0c3efffd2"}}, + {name = "xxhash-3.6.0-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl",url = "https://files.pythonhosted.org/packages/38/86/fb6b6130d8dd6b8942cc17ab4d90e223653a89aa32ad2776f8af7064ed13/xxhash-3.6.0-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl",hashes = {sha256 = "2aa5ee3444c25b69813663c9f8067dcfaa2e126dc55e8dddf40f4d1c25d7effa"}}, + {name = "xxhash-3.6.0-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl",url = "https://files.pythonhosted.org/packages/ee/dc/e84875682b0593e884ad73b2d40767b5790d417bde603cceb6878901d647/xxhash-3.6.0-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl",hashes = {sha256 = "f7f99123f0e1194fa59cc69ad46dbae2e07becec5df50a0509a808f90a0f03f0"}}, + {name = "xxhash-3.6.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl",url = "https://files.pythonhosted.org/packages/11/4f/426f91b96701ec2f37bb2b8cec664eff4f658a11f3fa9d94f0a887ea6d2b/xxhash-3.6.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl",hashes = {sha256 = "49e03e6fe2cac4a1bc64952dd250cf0dbc5ef4ebb7b8d96bce82e2de163c82a2"}}, + {name = "xxhash-3.6.0-cp312-cp312-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/53/5a/ddbb83eee8e28b778eacfc5a85c969673e4023cdeedcfcef61f36731610b/xxhash-3.6.0-cp312-cp312-musllinux_1_2_aarch64.whl",hashes = {sha256 = "bd17fede52a17a4f9a7bc4472a5867cb0b160deeb431795c0e4abe158bc784e9"}}, + {name = "xxhash-3.6.0-cp312-cp312-musllinux_1_2_i686.whl",url = "https://files.pythonhosted.org/packages/1e/c2/ff69efd07c8c074ccdf0a4f36fcdd3d27363665bcdf4ba399abebe643465/xxhash-3.6.0-cp312-cp312-musllinux_1_2_i686.whl",hashes = {sha256 = "6fb5f5476bef678f69db04f2bd1efbed3030d2aba305b0fc1773645f187d6a4e"}}, + {name = "xxhash-3.6.0-cp312-cp312-musllinux_1_2_ppc64le.whl",url = "https://files.pythonhosted.org/packages/58/ca/faa05ac19b3b622c7c9317ac3e23954187516298a091eb02c976d0d3dd45/xxhash-3.6.0-cp312-cp312-musllinux_1_2_ppc64le.whl",hashes = {sha256 = "843b52f6d88071f87eba1631b684fcb4b2068cd2180a0224122fe4ef011a9374"}}, + {name = "xxhash-3.6.0-cp312-cp312-musllinux_1_2_s390x.whl",url = "https://files.pythonhosted.org/packages/d4/7a/06aa7482345480cc0cb597f5c875b11a82c3953f534394f620b0be2f700c/xxhash-3.6.0-cp312-cp312-musllinux_1_2_s390x.whl",hashes = {sha256 = "7d14a6cfaf03b1b6f5f9790f76880601ccc7896aff7ab9cd8978a939c1eb7e0d"}}, + {name = "xxhash-3.6.0-cp312-cp312-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/23/07/63ffb386cd47029aa2916b3d2f454e6cc5b9f5c5ada3790377d5430084e7/xxhash-3.6.0-cp312-cp312-musllinux_1_2_x86_64.whl",hashes = {sha256 = "418daf3db71e1413cfe211c2f9a528456936645c17f46b5204705581a45390ae"}}, + {name = "xxhash-3.6.0-cp312-cp312-win32.whl",url = "https://files.pythonhosted.org/packages/0f/93/14fde614cadb4ddf5e7cebf8918b7e8fac5ae7861c1875964f17e678205c/xxhash-3.6.0-cp312-cp312-win32.whl",hashes = {sha256 = "50fc255f39428a27299c20e280d6193d8b63b8ef8028995323bf834a026b4fbb"}}, + {name = "xxhash-3.6.0-cp312-cp312-win_amd64.whl",url = "https://files.pythonhosted.org/packages/13/5d/0d125536cbe7565a83d06e43783389ecae0c0f2ed037b48ede185de477c0/xxhash-3.6.0-cp312-cp312-win_amd64.whl",hashes = {sha256 = "c0f2ab8c715630565ab8991b536ecded9416d615538be8ecddce43ccf26cbc7c"}}, + {name = "xxhash-3.6.0-cp312-cp312-win_arm64.whl",url = "https://files.pythonhosted.org/packages/54/85/6ec269b0952ec7e36ba019125982cf11d91256a778c7c3f98a4c5043d283/xxhash-3.6.0-cp312-cp312-win_arm64.whl",hashes = {sha256 = "eae5c13f3bc455a3bbb68bdc513912dc7356de7e2280363ea235f71f54064829"}}, + {name = "xxhash-3.6.0-cp311-cp311-macosx_10_9_x86_64.whl",url = "https://files.pythonhosted.org/packages/17/d4/cc2f0400e9154df4b9964249da78ebd72f318e35ccc425e9f403c392f22a/xxhash-3.6.0-cp311-cp311-macosx_10_9_x86_64.whl",hashes = {sha256 = "b47bbd8cf2d72797f3c2772eaaac0ded3d3af26481a26d7d7d41dc2d3c46b04a"}}, + {name = "xxhash-3.6.0-cp311-cp311-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/5e/ec/1cc11cd13e26ea8bc3cb4af4eaadd8d46d5014aebb67be3f71fb0b68802a/xxhash-3.6.0-cp311-cp311-macosx_11_0_arm64.whl",hashes = {sha256 = "2b6821e94346f96db75abaa6e255706fb06ebd530899ed76d32cd99f20dc52fa"}}, + {name = "xxhash-3.6.0-cp311-cp311-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl",url = "https://files.pythonhosted.org/packages/04/5f/19fe357ea348d98ca22f456f75a30ac0916b51c753e1f8b2e0e6fb884cce/xxhash-3.6.0-cp311-cp311-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl",hashes = {sha256 = "d0a9751f71a1a65ce3584e9cae4467651c7e70c9d31017fa57574583a4540248"}}, + {name = "xxhash-3.6.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl",url = "https://files.pythonhosted.org/packages/90/3b/d1f1a8f5442a5fd8beedae110c5af7604dc37349a8e16519c13c19a9a2de/xxhash-3.6.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl",hashes = {sha256 = "8b29ee68625ab37b04c0b40c3fafdf24d2f75ccd778333cfb698f65f6c463f62"}}, + {name = "xxhash-3.6.0-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl",url = "https://files.pythonhosted.org/packages/c4/ef/3a9b05eb527457d5db13a135a2ae1a26c80fecd624d20f3e8dcc4cb170f3/xxhash-3.6.0-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl",hashes = {sha256 = "6812c25fe0d6c36a46ccb002f40f27ac903bf18af9f6dd8f9669cb4d176ab18f"}}, + {name = "xxhash-3.6.0-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl",url = "https://files.pythonhosted.org/packages/0f/18/ccc194ee698c6c623acbf0f8c2969811a8a4b6185af5e824cd27b9e4fd3e/xxhash-3.6.0-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl",hashes = {sha256 = "4ccbff013972390b51a18ef1255ef5ac125c92dc9143b2d1909f59abc765540e"}}, + {name = "xxhash-3.6.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl",url = "https://files.pythonhosted.org/packages/a5/86/cf2c0321dc3940a7aa73076f4fd677a0fb3e405cb297ead7d864fd90847e/xxhash-3.6.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl",hashes = {sha256 = "297b7fbf86c82c550e12e8fb71968b3f033d27b874276ba3624ea868c11165a8"}}, + {name = "xxhash-3.6.0-cp311-cp311-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/82/fb/96213c8560e6f948a1ecc9a7613f8032b19ee45f747f4fca4eb31bb6d6ed/xxhash-3.6.0-cp311-cp311-musllinux_1_2_aarch64.whl",hashes = {sha256 = "dea26ae1eb293db089798d3973a5fc928a18fdd97cc8801226fae705b02b14b0"}}, + {name = "xxhash-3.6.0-cp311-cp311-musllinux_1_2_i686.whl",url = "https://files.pythonhosted.org/packages/40/aa/4395e669b0606a096d6788f40dbdf2b819d6773aa290c19e6e83cbfc312f/xxhash-3.6.0-cp311-cp311-musllinux_1_2_i686.whl",hashes = {sha256 = "7a0b169aafb98f4284f73635a8e93f0735f9cbde17bd5ec332480484241aaa77"}}, + {name = "xxhash-3.6.0-cp311-cp311-musllinux_1_2_ppc64le.whl",url = "https://files.pythonhosted.org/packages/67/74/b044fcd6b3d89e9b1b665924d85d3f400636c23590226feb1eb09e1176ce/xxhash-3.6.0-cp311-cp311-musllinux_1_2_ppc64le.whl",hashes = {sha256 = "08d45aef063a4531b785cd72de4887766d01dc8f362a515693df349fdb825e0c"}}, + {name = "xxhash-3.6.0-cp311-cp311-musllinux_1_2_s390x.whl",url = "https://files.pythonhosted.org/packages/bc/fd/3ce73bf753b08cb19daee1eb14aa0d7fe331f8da9c02dd95316ddfe5275e/xxhash-3.6.0-cp311-cp311-musllinux_1_2_s390x.whl",hashes = {sha256 = "929142361a48ee07f09121fe9e96a84950e8d4df3bb298ca5d88061969f34d7b"}}, + {name = "xxhash-3.6.0-cp311-cp311-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/ba/b3/5a4241309217c5c876f156b10778f3ab3af7ba7e3259e6d5f5c7d0129eb2/xxhash-3.6.0-cp311-cp311-musllinux_1_2_x86_64.whl",hashes = {sha256 = "51312c768403d8540487dbbfb557454cfc55589bbde6424456951f7fcd4facb3"}}, + {name = "xxhash-3.6.0-cp311-cp311-win32.whl",url = "https://files.pythonhosted.org/packages/c0/01/99bfbc15fb9abb9a72b088c1d95219fc4782b7d01fc835bd5744d66dd0b8/xxhash-3.6.0-cp311-cp311-win32.whl",hashes = {sha256 = "d1927a69feddc24c987b337ce81ac15c4720955b667fe9b588e02254b80446fd"}}, + {name = "xxhash-3.6.0-cp311-cp311-win_amd64.whl",url = "https://files.pythonhosted.org/packages/65/79/9d24d7f53819fe301b231044ea362ce64e86c74f6e8c8e51320de248b3e5/xxhash-3.6.0-cp311-cp311-win_amd64.whl",hashes = {sha256 = "26734cdc2d4ffe449b41d186bbeac416f704a482ed835d375a5c0cb02bc63fef"}}, + {name = "xxhash-3.6.0-cp311-cp311-win_arm64.whl",url = "https://files.pythonhosted.org/packages/30/4e/15cd0e3e8772071344eab2961ce83f6e485111fed8beb491a3f1ce100270/xxhash-3.6.0-cp311-cp311-win_arm64.whl",hashes = {sha256 = "d72f67ef8bf36e05f5b6c65e8524f265bd61071471cd4cf1d36743ebeeeb06b7"}}, + {name = "xxhash-3.6.0-pp311-pypy311_pp73-macosx_10_15_x86_64.whl",url = "https://files.pythonhosted.org/packages/93/1e/8aec23647a34a249f62e2398c42955acd9b4c6ed5cf08cbea94dc46f78d2/xxhash-3.6.0-pp311-pypy311_pp73-macosx_10_15_x86_64.whl",hashes = {sha256 = "0f7b7e2ec26c1666ad5fc9dbfa426a6a3367ceaf79db5dd76264659d509d73b0"}}, + {name = "xxhash-3.6.0-pp311-pypy311_pp73-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl",url = "https://files.pythonhosted.org/packages/b8/0b/b14510b38ba91caf43006209db846a696ceea6a847a0c9ba0a5b1adc53d6/xxhash-3.6.0-pp311-pypy311_pp73-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl",hashes = {sha256 = "5dc1e14d14fa0f5789ec29a7062004b5933964bb9b02aae6622b8f530dc40296"}}, + {name = "xxhash-3.6.0-pp311-pypy311_pp73-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl",url = "https://files.pythonhosted.org/packages/50/55/15a7b8a56590e66ccd374bbfa3f9ffc45b810886c8c3b614e3f90bd2367c/xxhash-3.6.0-pp311-pypy311_pp73-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl",hashes = {sha256 = "881b47fc47e051b37d94d13e7455131054b56749b91b508b0907eb07900d1c13"}}, + {name = "xxhash-3.6.0-pp311-pypy311_pp73-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl",url = "https://files.pythonhosted.org/packages/62/b2/5ac99a041a29e58e95f907876b04f7067a0242cb85b5f39e726153981503/xxhash-3.6.0-pp311-pypy311_pp73-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl",hashes = {sha256 = "c6dc31591899f5e5666f04cc2e529e69b4072827085c1ef15294d91a004bc1bd"}}, + {name = "xxhash-3.6.0-pp311-pypy311_pp73-win_amd64.whl",url = "https://files.pythonhosted.org/packages/7b/d9/8d95e906764a386a3d3b596f3c68bb63687dfca806373509f51ce8eea81f/xxhash-3.6.0-pp311-pypy311_pp73-win_amd64.whl",hashes = {sha256 = "15e0dac10eb9309508bfc41f7f9deaa7755c69e35af835db9cb10751adebc35d"}}, + {name = "xxhash-3.6.0-cp310-cp310-macosx_10_9_x86_64.whl",url = "https://files.pythonhosted.org/packages/34/ee/f9f1d656ad168681bb0f6b092372c1e533c4416b8069b1896a175c46e484/xxhash-3.6.0-cp310-cp310-macosx_10_9_x86_64.whl",hashes = {sha256 = "87ff03d7e35c61435976554477a7f4cd1704c3596a89a8300d5ce7fc83874a71"}}, + {name = "xxhash-3.6.0-cp310-cp310-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/a3/b1/93508d9460b292c74a09b83d16750c52a0ead89c51eea9951cb97a60d959/xxhash-3.6.0-cp310-cp310-macosx_11_0_arm64.whl",hashes = {sha256 = "f572dfd3d0e2eb1a57511831cf6341242f5a9f8298a45862d085f5b93394a27d"}}, + {name = "xxhash-3.6.0-cp310-cp310-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl",url = "https://files.pythonhosted.org/packages/07/55/28c93a3662f2d200c70704efe74aab9640e824f8ce330d8d3943bf7c9b3c/xxhash-3.6.0-cp310-cp310-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl",hashes = {sha256 = "89952ea539566b9fed2bbd94e589672794b4286f342254fad28b149f9615fef8"}}, + {name = "xxhash-3.6.0-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl",url = "https://files.pythonhosted.org/packages/c1/96/fec0be9bb4b8f5d9c57d76380a366f31a1781fb802f76fc7cda6c84893c7/xxhash-3.6.0-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl",hashes = {sha256 = "48e6f2ffb07a50b52465a1032c3cf1f4a5683f944acaca8a134a2f23674c2058"}}, + {name = "xxhash-3.6.0-cp310-cp310-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl",url = "https://files.pythonhosted.org/packages/c4/a0/c706845ba77b9611f81fd2e93fad9859346b026e8445e76f8c6fd057cc6d/xxhash-3.6.0-cp310-cp310-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl",hashes = {sha256 = "b5b848ad6c16d308c3ac7ad4ba6bede80ed5df2ba8ed382f8932df63158dd4b2"}}, + {name = "xxhash-3.6.0-cp310-cp310-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl",url = "https://files.pythonhosted.org/packages/67/1e/164126a2999e5045f04a69257eea946c0dc3e86541b400d4385d646b53d7/xxhash-3.6.0-cp310-cp310-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl",hashes = {sha256 = "a034590a727b44dd8ac5914236a7b8504144447a9682586c3327e935f33ec8cc"}}, + {name = "xxhash-3.6.0-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl",url = "https://files.pythonhosted.org/packages/2d/4b/55ab404c56cd70a2cf5ecfe484838865d0fea5627365c6c8ca156bd09c8f/xxhash-3.6.0-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl",hashes = {sha256 = "8a8f1972e75ebdd161d7896743122834fe87378160c20e97f8b09166213bf8cc"}}, + {name = "xxhash-3.6.0-cp310-cp310-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/45/e6/52abf06bac316db33aa269091ae7311bd53cfc6f4b120ae77bac1b348091/xxhash-3.6.0-cp310-cp310-musllinux_1_2_aarch64.whl",hashes = {sha256 = "ee34327b187f002a596d7b167ebc59a1b729e963ce645964bbc050d2f1b73d07"}}, + {name = "xxhash-3.6.0-cp310-cp310-musllinux_1_2_i686.whl",url = "https://files.pythonhosted.org/packages/34/37/db94d490b8691236d356bc249c08819cbcef9273a1a30acf1254ff9ce157/xxhash-3.6.0-cp310-cp310-musllinux_1_2_i686.whl",hashes = {sha256 = "339f518c3c7a850dd033ab416ea25a692759dc7478a71131fe8869010d2b75e4"}}, + {name = "xxhash-3.6.0-cp310-cp310-musllinux_1_2_ppc64le.whl",url = "https://files.pythonhosted.org/packages/b7/36/c4f219ef4a17a4f7a64ed3569bc2b5a9c8311abdb22249ac96093625b1a4/xxhash-3.6.0-cp310-cp310-musllinux_1_2_ppc64le.whl",hashes = {sha256 = "bf48889c9630542d4709192578aebbd836177c9f7a4a2778a7d6340107c65f06"}}, + {name = "xxhash-3.6.0-cp310-cp310-musllinux_1_2_s390x.whl",url = "https://files.pythonhosted.org/packages/fd/06/bfac889a374fc2fc439a69223d1750eed2e18a7db8514737ab630534fa08/xxhash-3.6.0-cp310-cp310-musllinux_1_2_s390x.whl",hashes = {sha256 = "5576b002a56207f640636056b4160a378fe36a58db73ae5c27a7ec8db35f71d4"}}, + {name = "xxhash-3.6.0-cp310-cp310-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/c9/d1/555d8447e0dd32ad0930a249a522bb2e289f0d08b6b16204cfa42c1f5a0c/xxhash-3.6.0-cp310-cp310-musllinux_1_2_x86_64.whl",hashes = {sha256 = "af1f3278bd02814d6dedc5dec397993b549d6f16c19379721e5a1d31e132c49b"}}, + {name = "xxhash-3.6.0-cp310-cp310-win32.whl",url = "https://files.pythonhosted.org/packages/d1/15/8751330b5186cedc4ed4b597989882ea05e0408b53fa47bcb46a6125bfc6/xxhash-3.6.0-cp310-cp310-win32.whl",hashes = {sha256 = "aed058764db109dc9052720da65fafe84873b05eb8b07e5e653597951af57c3b"}}, + {name = "xxhash-3.6.0-cp310-cp310-win_amd64.whl",url = "https://files.pythonhosted.org/packages/bb/cc/53f87e8b5871a6eb2ff7e89c48c66093bda2be52315a8161ddc54ea550c4/xxhash-3.6.0-cp310-cp310-win_amd64.whl",hashes = {sha256 = "e82da5670f2d0d98950317f82a0e4a0197150ff19a6df2ba40399c2a3b9ae5fb"}}, + {name = "xxhash-3.6.0-cp310-cp310-win_arm64.whl",url = "https://files.pythonhosted.org/packages/9f/00/60f9ea3bb697667a14314d7269956f58bf56bb73864f8f8d52a3c2535e9a/xxhash-3.6.0-cp310-cp310-win_arm64.whl",hashes = {sha256 = "4a082ffff8c6ac07707fb6b671caf7c6e020c75226c561830b73d862060f281d"}}, ] marker = "\"default\" in dependency_groups" [packages.tool.pdm] dependencies = [] +[[packages]] +name = "scipy" +version = "1.15.3" +requires-python = ">=3.10" +sdist = {name = "scipy-1.15.3.tar.gz", url = "https://files.pythonhosted.org/packages/0f/37/6964b830433e654ec7485e45a00fc9a27cf868d622838f6b6d9c5ec0d532/scipy-1.15.3.tar.gz", hashes = {sha256 = "eae3cf522bc7df64b42cad3925c876e1b0b6c35c1337c93e12c0f366f55b0eaf"}} +wheels = [ + {name = "scipy-1.15.3-cp311-cp311-macosx_10_13_x86_64.whl",url = "https://files.pythonhosted.org/packages/96/ab/5cc9f80f28f6a7dff646c5756e559823614a42b1939d86dd0ed550470210/scipy-1.15.3-cp311-cp311-macosx_10_13_x86_64.whl",hashes = {sha256 = "993439ce220d25e3696d1b23b233dd010169b62f6456488567e830654ee37a6b"}}, + {name = "scipy-1.15.3-cp311-cp311-macosx_12_0_arm64.whl",url = "https://files.pythonhosted.org/packages/4a/4a/66ba30abe5ad1a3ad15bfb0b59d22174012e8056ff448cb1644deccbfed2/scipy-1.15.3-cp311-cp311-macosx_12_0_arm64.whl",hashes = {sha256 = "34716e281f181a02341ddeaad584205bd2fd3c242063bd3423d61ac259ca7eba"}}, + {name = "scipy-1.15.3-cp311-cp311-macosx_14_0_arm64.whl",url = "https://files.pythonhosted.org/packages/4b/fa/a7e5b95afd80d24313307f03624acc65801846fa75599034f8ceb9e2cbf6/scipy-1.15.3-cp311-cp311-macosx_14_0_arm64.whl",hashes = {sha256 = "3b0334816afb8b91dab859281b1b9786934392aa3d527cd847e41bb6f45bee65"}}, + {name = "scipy-1.15.3-cp311-cp311-macosx_14_0_x86_64.whl",url = "https://files.pythonhosted.org/packages/17/99/f3aaddccf3588bb4aea70ba35328c204cadd89517a1612ecfda5b2dd9d7a/scipy-1.15.3-cp311-cp311-macosx_14_0_x86_64.whl",hashes = {sha256 = "6db907c7368e3092e24919b5e31c76998b0ce1684d51a90943cb0ed1b4ffd6c1"}}, + {name = "scipy-1.15.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",url = "https://files.pythonhosted.org/packages/56/c5/1032cdb565f146109212153339f9cb8b993701e9fe56b1c97699eee12586/scipy-1.15.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",hashes = {sha256 = "721d6b4ef5dc82ca8968c25b111e307083d7ca9091bc38163fb89243e85e3889"}}, + {name = "scipy-1.15.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",url = "https://files.pythonhosted.org/packages/bd/37/89f19c8c05505d0601ed5650156e50eb881ae3918786c8fd7262b4ee66d3/scipy-1.15.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",hashes = {sha256 = "39cb9c62e471b1bb3750066ecc3a3f3052b37751c7c3dfd0fd7e48900ed52982"}}, + {name = "scipy-1.15.3-cp311-cp311-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/7e/31/be59513aa9695519b18e1851bb9e487de66f2d31f835201f1b42f5d4d475/scipy-1.15.3-cp311-cp311-musllinux_1_2_aarch64.whl",hashes = {sha256 = "795c46999bae845966368a3c013e0e00947932d68e235702b5c3f6ea799aa8c9"}}, + {name = "scipy-1.15.3-cp311-cp311-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/10/c0/4f5f3eeccc235632aab79b27a74a9130c6c35df358129f7ac8b29f562ac7/scipy-1.15.3-cp311-cp311-musllinux_1_2_x86_64.whl",hashes = {sha256 = "18aaacb735ab38b38db42cb01f6b92a2d0d4b6aabefeb07f02849e47f8fb3594"}}, + {name = "scipy-1.15.3-cp311-cp311-win_amd64.whl",url = "https://files.pythonhosted.org/packages/ab/a7/0ddaf514ce8a8714f6ed243a2b391b41dbb65251affe21ee3077ec45ea9a/scipy-1.15.3-cp311-cp311-win_amd64.whl",hashes = {sha256 = "ae48a786a28412d744c62fd7816a4118ef97e5be0bee968ce8f0a2fba7acf3bb"}}, + {name = "scipy-1.15.3-cp310-cp310-macosx_10_13_x86_64.whl",url = "https://files.pythonhosted.org/packages/78/2f/4966032c5f8cc7e6a60f1b2e0ad686293b9474b65246b0c642e3ef3badd0/scipy-1.15.3-cp310-cp310-macosx_10_13_x86_64.whl",hashes = {sha256 = "a345928c86d535060c9c2b25e71e87c39ab2f22fc96e9636bd74d1dbf9de448c"}}, + {name = "scipy-1.15.3-cp310-cp310-macosx_12_0_arm64.whl",url = "https://files.pythonhosted.org/packages/a0/6e/0c3bf90fae0e910c274db43304ebe25a6b391327f3f10b5dcc638c090795/scipy-1.15.3-cp310-cp310-macosx_12_0_arm64.whl",hashes = {sha256 = "ad3432cb0f9ed87477a8d97f03b763fd1d57709f1bbde3c9369b1dff5503b253"}}, + {name = "scipy-1.15.3-cp310-cp310-macosx_14_0_arm64.whl",url = "https://files.pythonhosted.org/packages/ea/b1/4deb37252311c1acff7f101f6453f0440794f51b6eacb1aad4459a134081/scipy-1.15.3-cp310-cp310-macosx_14_0_arm64.whl",hashes = {sha256 = "aef683a9ae6eb00728a542b796f52a5477b78252edede72b8327a886ab63293f"}}, + {name = "scipy-1.15.3-cp310-cp310-macosx_14_0_x86_64.whl",url = "https://files.pythonhosted.org/packages/38/7d/f457626e3cd3c29b3a49ca115a304cebb8cc6f31b04678f03b216899d3c6/scipy-1.15.3-cp310-cp310-macosx_14_0_x86_64.whl",hashes = {sha256 = "1c832e1bd78dea67d5c16f786681b28dd695a8cb1fb90af2e27580d3d0967e92"}}, + {name = "scipy-1.15.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",url = "https://files.pythonhosted.org/packages/db/0a/92b1de4a7adc7a15dcf5bddc6e191f6f29ee663b30511ce20467ef9b82e4/scipy-1.15.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",hashes = {sha256 = "263961f658ce2165bbd7b99fa5135195c3a12d9bef045345016b8b50c315cb82"}}, + {name = "scipy-1.15.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",url = "https://files.pythonhosted.org/packages/8e/6d/41991e503e51fc1134502694c5fa7a1671501a17ffa12716a4a9151af3df/scipy-1.15.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",hashes = {sha256 = "9e2abc762b0811e09a0d3258abee2d98e0c703eee49464ce0069590846f31d40"}}, + {name = "scipy-1.15.3-cp310-cp310-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/25/e1/3df8f83cb15f3500478c889be8fb18700813b95e9e087328230b98d547ff/scipy-1.15.3-cp310-cp310-musllinux_1_2_aarch64.whl",hashes = {sha256 = "ed7284b21a7a0c8f1b6e5977ac05396c0d008b89e05498c8b7e8f4a1423bba0e"}}, + {name = "scipy-1.15.3-cp310-cp310-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/93/3e/b3257cf446f2a3533ed7809757039016b74cd6f38271de91682aa844cfc5/scipy-1.15.3-cp310-cp310-musllinux_1_2_x86_64.whl",hashes = {sha256 = "5380741e53df2c566f4d234b100a484b420af85deb39ea35a1cc1be84ff53a5c"}}, + {name = "scipy-1.15.3-cp310-cp310-win_amd64.whl",url = "https://files.pythonhosted.org/packages/d1/84/55bc4881973d3f79b479a5a2e2df61c8c9a04fcb986a213ac9c02cfb659b/scipy-1.15.3-cp310-cp310-win_amd64.whl",hashes = {sha256 = "9d61e97b186a57350f6d6fd72640f9e99d5a4a2b8fbf4b9ee9a841eab327dc13"}}, +] +marker = "python_full_version >= \"3.10.0\" and python_version < \"3.12\" and \"dev\" in extras" + +[packages.tool.pdm] +dependencies = [ + "numpy<2.5,>=1.23.5", +] + +[[packages]] +name = "numpy" +version = "2.2.6" +requires-python = ">=3.10" +sdist = {name = "numpy-2.2.6.tar.gz", url = "https://files.pythonhosted.org/packages/76/21/7d2a95e4bba9dc13d043ee156a356c0a8f0c6309dff6b21b4d71a073b8a8/numpy-2.2.6.tar.gz", hashes = {sha256 = "e29554e2bef54a90aa5cc07da6ce955accb83f21ab5de01a62c8478897b264fd"}} +wheels = [ + {name = "numpy-2.2.6-cp311-cp311-macosx_10_9_x86_64.whl",url = "https://files.pythonhosted.org/packages/da/a8/4f83e2aa666a9fbf56d6118faaaf5f1974d456b1823fda0a176eff722839/numpy-2.2.6-cp311-cp311-macosx_10_9_x86_64.whl",hashes = {sha256 = "f9f1adb22318e121c5c69a09142811a201ef17ab257a1e66ca3025065b7f53ae"}}, + {name = "numpy-2.2.6-cp311-cp311-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/b3/2b/64e1affc7972decb74c9e29e5649fac940514910960ba25cd9af4488b66c/numpy-2.2.6-cp311-cp311-macosx_11_0_arm64.whl",hashes = {sha256 = "c820a93b0255bc360f53eca31a0e676fd1101f673dda8da93454a12e23fc5f7a"}}, + {name = "numpy-2.2.6-cp311-cp311-macosx_14_0_arm64.whl",url = "https://files.pythonhosted.org/packages/4a/9f/0121e375000b5e50ffdd8b25bf78d8e1a5aa4cca3f185d41265198c7b834/numpy-2.2.6-cp311-cp311-macosx_14_0_arm64.whl",hashes = {sha256 = "3d70692235e759f260c3d837193090014aebdf026dfd167834bcba43e30c2a42"}}, + {name = "numpy-2.2.6-cp311-cp311-macosx_14_0_x86_64.whl",url = "https://files.pythonhosted.org/packages/31/0d/b48c405c91693635fbe2dcd7bc84a33a602add5f63286e024d3b6741411c/numpy-2.2.6-cp311-cp311-macosx_14_0_x86_64.whl",hashes = {sha256 = "481b49095335f8eed42e39e8041327c05b0f6f4780488f61286ed3c01368d491"}}, + {name = "numpy-2.2.6-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",url = "https://files.pythonhosted.org/packages/52/b8/7f0554d49b565d0171eab6e99001846882000883998e7b7d9f0d98b1f934/numpy-2.2.6-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",hashes = {sha256 = "b64d8d4d17135e00c8e346e0a738deb17e754230d7e0810ac5012750bbd85a5a"}}, + {name = "numpy-2.2.6-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",url = "https://files.pythonhosted.org/packages/b3/dd/2238b898e51bd6d389b7389ffb20d7f4c10066d80351187ec8e303a5a475/numpy-2.2.6-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",hashes = {sha256 = "ba10f8411898fc418a521833e014a77d3ca01c15b0c6cdcce6a0d2897e6dbbdf"}}, + {name = "numpy-2.2.6-cp311-cp311-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/83/6c/44d0325722cf644f191042bf47eedad61c1e6df2432ed65cbe28509d404e/numpy-2.2.6-cp311-cp311-musllinux_1_2_aarch64.whl",hashes = {sha256 = "bd48227a919f1bafbdda0583705e547892342c26fb127219d60a5c36882609d1"}}, + {name = "numpy-2.2.6-cp311-cp311-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/ae/9d/81e8216030ce66be25279098789b665d49ff19eef08bfa8cb96d4957f422/numpy-2.2.6-cp311-cp311-musllinux_1_2_x86_64.whl",hashes = {sha256 = "9551a499bf125c1d4f9e250377c1ee2eddd02e01eac6644c080162c0c51778ab"}}, + {name = "numpy-2.2.6-cp311-cp311-win32.whl",url = "https://files.pythonhosted.org/packages/6a/fd/e19617b9530b031db51b0926eed5345ce8ddc669bb3bc0044b23e275ebe8/numpy-2.2.6-cp311-cp311-win32.whl",hashes = {sha256 = "0678000bb9ac1475cd454c6b8c799206af8107e310843532b04d49649c717a47"}}, + {name = "numpy-2.2.6-cp311-cp311-win_amd64.whl",url = "https://files.pythonhosted.org/packages/31/0a/f354fb7176b81747d870f7991dc763e157a934c717b67b58456bc63da3df/numpy-2.2.6-cp311-cp311-win_amd64.whl",hashes = {sha256 = "e8213002e427c69c45a52bbd94163084025f533a55a59d6f9c5b820774ef3303"}}, + {name = "numpy-2.2.6-cp310-cp310-macosx_10_9_x86_64.whl",url = "https://files.pythonhosted.org/packages/9a/3e/ed6db5be21ce87955c0cbd3009f2803f59fa08df21b5df06862e2d8e2bdd/numpy-2.2.6-cp310-cp310-macosx_10_9_x86_64.whl",hashes = {sha256 = "b412caa66f72040e6d268491a59f2c43bf03eb6c96dd8f0307829feb7fa2b6fb"}}, + {name = "numpy-2.2.6-cp310-cp310-macosx_11_0_arm64.whl",url = "https://files.pythonhosted.org/packages/22/c2/4b9221495b2a132cc9d2eb862e21d42a009f5a60e45fc44b00118c174bff/numpy-2.2.6-cp310-cp310-macosx_11_0_arm64.whl",hashes = {sha256 = "8e41fd67c52b86603a91c1a505ebaef50b3314de0213461c7a6e99c9a3beff90"}}, + {name = "numpy-2.2.6-cp310-cp310-macosx_14_0_arm64.whl",url = "https://files.pythonhosted.org/packages/fd/77/dc2fcfc66943c6410e2bf598062f5959372735ffda175b39906d54f02349/numpy-2.2.6-cp310-cp310-macosx_14_0_arm64.whl",hashes = {sha256 = "37e990a01ae6ec7fe7fa1c26c55ecb672dd98b19c3d0e1d1f326fa13cb38d163"}}, + {name = "numpy-2.2.6-cp310-cp310-macosx_14_0_x86_64.whl",url = "https://files.pythonhosted.org/packages/7a/4f/1cb5fdc353a5f5cc7feb692db9b8ec2c3d6405453f982435efc52561df58/numpy-2.2.6-cp310-cp310-macosx_14_0_x86_64.whl",hashes = {sha256 = "5a6429d4be8ca66d889b7cf70f536a397dc45ba6faeb5f8c5427935d9592e9cf"}}, + {name = "numpy-2.2.6-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",url = "https://files.pythonhosted.org/packages/eb/17/96a3acd228cec142fcb8723bd3cc39c2a474f7dcf0a5d16731980bcafa95/numpy-2.2.6-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",hashes = {sha256 = "efd28d4e9cd7d7a8d39074a4d44c63eda73401580c5c76acda2ce969e0a38e83"}}, + {name = "numpy-2.2.6-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",url = "https://files.pythonhosted.org/packages/b4/63/3de6a34ad7ad6646ac7d2f55ebc6ad439dbbf9c4370017c50cf403fb19b5/numpy-2.2.6-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",hashes = {sha256 = "fc7b73d02efb0e18c000e9ad8b83480dfcd5dfd11065997ed4c6747470ae8915"}}, + {name = "numpy-2.2.6-cp310-cp310-musllinux_1_2_aarch64.whl",url = "https://files.pythonhosted.org/packages/07/b6/89d837eddef52b3d0cec5c6ba0456c1bf1b9ef6a6672fc2b7873c3ec4e2e/numpy-2.2.6-cp310-cp310-musllinux_1_2_aarch64.whl",hashes = {sha256 = "74d4531beb257d2c3f4b261bfb0fc09e0f9ebb8842d82a7b4209415896adc680"}}, + {name = "numpy-2.2.6-cp310-cp310-musllinux_1_2_x86_64.whl",url = "https://files.pythonhosted.org/packages/01/c8/dc6ae86e3c61cfec1f178e5c9f7858584049b6093f843bca541f94120920/numpy-2.2.6-cp310-cp310-musllinux_1_2_x86_64.whl",hashes = {sha256 = "8fc377d995680230e83241d8a96def29f204b5782f371c532579b4f20607a289"}}, + {name = "numpy-2.2.6-cp310-cp310-win32.whl",url = "https://files.pythonhosted.org/packages/5b/c5/0064b1b7e7c89137b471ccec1fd2282fceaae0ab3a9550f2568782d80357/numpy-2.2.6-cp310-cp310-win32.whl",hashes = {sha256 = "b093dd74e50a8cba3e873868d9e93a85b78e0daf2e98c6797566ad8044e8363d"}}, + {name = "numpy-2.2.6-cp310-cp310-win_amd64.whl",url = "https://files.pythonhosted.org/packages/a3/dd/4b822569d6b96c39d1215dbae0582fd99954dcbcf0c1a13c61783feaca3f/numpy-2.2.6-cp310-cp310-win_amd64.whl",hashes = {sha256 = "f0fd6321b839904e15c46e0d257fdd101dd7f530fe03fd6359c1ea63738703f3"}}, + {name = "numpy-2.2.6-pp310-pypy310_pp73-macosx_10_15_x86_64.whl",url = "https://files.pythonhosted.org/packages/9e/3b/d94a75f4dbf1ef5d321523ecac21ef23a3cd2ac8b78ae2aac40873590229/numpy-2.2.6-pp310-pypy310_pp73-macosx_10_15_x86_64.whl",hashes = {sha256 = "0b605b275d7bd0c640cad4e5d30fa701a8d59302e127e5f79138ad62762c3e3d"}}, + {name = "numpy-2.2.6-pp310-pypy310_pp73-macosx_14_0_x86_64.whl",url = "https://files.pythonhosted.org/packages/17/f4/09b2fa1b58f0fb4f7c7963a1649c64c4d315752240377ed74d9cd878f7b5/numpy-2.2.6-pp310-pypy310_pp73-macosx_14_0_x86_64.whl",hashes = {sha256 = "7befc596a7dc9da8a337f79802ee8adb30a552a94f792b9c9d18c840055907db"}}, + {name = "numpy-2.2.6-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",url = "https://files.pythonhosted.org/packages/af/30/feba75f143bdc868a1cc3f44ccfa6c4b9ec522b36458e738cd00f67b573f/numpy-2.2.6-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",hashes = {sha256 = "ce47521a4754c8f4593837384bd3424880629f718d87c5d44f8ed763edd63543"}}, + {name = "numpy-2.2.6-pp310-pypy310_pp73-win_amd64.whl",url = "https://files.pythonhosted.org/packages/37/48/ac2a9584402fb6c0cd5b5d1a91dcf176b15760130dd386bbafdbfe3640bf/numpy-2.2.6-pp310-pypy310_pp73-win_amd64.whl",hashes = {sha256 = "d042d24c90c41b54fd506da306759e06e568864df8ec17ccc17e9e884634fd00"}}, +] +marker = "python_full_version >= \"3.10.0\" and python_version < \"3.12\" and \"default\" in dependency_groups or python_full_version >= \"3.10.0\" and python_version < \"3.12\" and \"dev\" in extras" + +[packages.tool.pdm] +dependencies = [] + [[packages]] name = "tomli" version = "2.3.0" diff --git a/scripts/generate_pylock.sh b/scripts/generate_pylock.sh index ad953391..c62a6b51 100755 --- a/scripts/generate_pylock.sh +++ b/scripts/generate_pylock.sh @@ -1,8 +1,13 @@ #!/usr/bin/env sh -set -e +set -ex -# Script to generate pylock.toml from scratch -# If pylock.toml already exists just run `pdm lock --update-reuse` +usage() { + echo "Script to generate pylock.toml from scratch" + echo "If pylock.toml already exists just run \`pdm lock --update-reuse\`" + echo "Usage: $0 [-f] [-h]" + echo " -f Force update of all dependencies" + echo " -h Show this help message" +} # Check if pdm is available if ! command -v pdm >/dev/null 2>&1 @@ -11,8 +16,24 @@ then exit 1 fi +FORCE_REGEN=0 +while getopts "fh" opt; do + case $opt in + f) FORCE_REGEN=1 ;; + h) usage + exit 0 ;; + *) usage + exit 1 ;; + esac +done + +set +e +update_stratagy="$([ $FORCE_REGEN -eq 0 ] && echo "--update-reuse")" +set -e + # Locking all dependencies to the same version for all supported # python versions is not possible (mostly due to numpy) # so we need to lock separately for python >=3.12 and <3.12 -pdm lock --python "~=3.12" --update-reuse -pdm lock --append --python "<3.12" --update-reuse +# Only set update-reuse if not forcing regeneration +pdm lock --python "~=3.12" $update_stratagy +pdm lock --append --python "<3.12" $update_stratagy From bde3ae8308737d447db5780f18ac95572b2251b8 Mon Sep 17 00:00:00 2001 From: Jared O'Connell <46976761+jaredoconnell@users.noreply.github.com> Date: Thu, 9 Oct 2025 12:40:56 -0400 Subject: [PATCH 57/90] Optimize use of lowercase in src/guidellm/utils/registry.py Co-authored-by: Samuel Monson Signed-off-by: Jared O'Connell <46976761+jaredoconnell@users.noreply.github.com> --- src/guidellm/utils/registry.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/src/guidellm/utils/registry.py b/src/guidellm/utils/registry.py index 2fdfc318..e6f1b657 100644 --- a/src/guidellm/utils/registry.py +++ b/src/guidellm/utils/registry.py @@ -211,8 +211,9 @@ def get_registered_object(cls, name: str) -> RegistryObjT | None: if name in cls.registry: return cls.registry[name] + name_casefold = name.lower() for k, v in cls.registry.items(): - if name.lower() == k.lower(): + if name_casefold == k.lower(): return v return None # Not found From a65b5b11fa8950ca9070ea7b028fe34068c24335 Mon Sep 17 00:00:00 2001 From: Samuel Monson Date: Wed, 20 Aug 2025 11:32:12 -0400 Subject: [PATCH 58/90] Reapply Fix errors with metric accumulation (#266) Fixes a issue in metric calculation that caused incorrect statistics at extreme changes in concurrency and an issue where the first decode token was not counted in total tokens per second. - [x] Fixed issue where merged concurrency change events would double-count concurrency - [x] Ensure first decode token is counted when calculating total tokens per second - Run unit tests: `tox -e test-unit -- -m "regression and sanity"` --- - [x] "I certify that all code in this PR is my own, except as noted below." - [x] Includes AI-assisted code completion - [ ] Includes code generated by an AI application - [x] Includes AI-generated tests (NOTE: AI written tests should have a docstring that includes `## WRITTEN BY AI ##`) --------- Signed-off-by: Samuel Monson --- src/guidellm/utils/statistics.py | 25 +++++++++++++------------ 1 file changed, 13 insertions(+), 12 deletions(-) diff --git a/src/guidellm/utils/statistics.py b/src/guidellm/utils/statistics.py index c820de9d..acd9d4f1 100644 --- a/src/guidellm/utils/statistics.py +++ b/src/guidellm/utils/statistics.py @@ -275,18 +275,9 @@ def from_request_times( """ if distribution_type == "concurrency": # convert to delta changes based on when requests were running - time_deltas: dict[float, int] = defaultdict(int) - for start, end in requests: - time_deltas[start] += 1 - time_deltas[end] -= 1 - - # convert to the events over time measuring concurrency changes - events = [] - active = 0 - - for time, delta in sorted(time_deltas.items()): - active += delta - events.append((time, active)) + events = [(start, 1) for start, _ in requests] + [ + (end, -1) for _, end in requests + ] elif distribution_type == "rate": # convert to events for when requests finished global_start = min(start for start, _ in requests) if requests else 0 @@ -313,6 +304,16 @@ def from_request_times( else: flattened_events.append((time, val)) + if distribution_type == "concurrency": + # convert to the events over time measuring concurrency changes + events_over_time: list[tuple[float, float]] = [] + active = 0 + for time, delta in flattened_events: + active += delta # type: ignore [assignment] + events_over_time.append((time, active)) + + flattened_events = events_over_time + # convert to value distribution function distribution: dict[float, float] = defaultdict(float) From e1fb966db608e80e20b0f7acece4f342b2b82632 Mon Sep 17 00:00:00 2001 From: Samuel Monson Date: Tue, 7 Oct 2025 14:27:19 -0400 Subject: [PATCH 59/90] Disable base class initialization Signed-off-by: Samuel Monson --- src/guidellm/utils/pydantic_utils.py | 11 +++++++++++ 1 file changed, 11 insertions(+) diff --git a/src/guidellm/utils/pydantic_utils.py b/src/guidellm/utils/pydantic_utils.py index 27c2e1cf..d3e3f6e6 100644 --- a/src/guidellm/utils/pydantic_utils.py +++ b/src/guidellm/utils/pydantic_utils.py @@ -275,6 +275,17 @@ class DatabaseConfig(BaseConfig): schema_discriminator: ClassVar[str] = "model_type" + def __new__(cls, *args, **kwargs): # noqa: ARG004 + """ + Prevent direct instantiation of base classes that use this mixin. + + Only allows instantiation of concrete subclasses, not the base class. + """ + base_type = cls.__pydantic_schema_base_type__() + if cls is base_type: + raise TypeError(f"only children of '{cls.__name__}' may be instantiated") + return super().__new__(cls) + @classmethod def register_decorator( cls, clazz: RegisterClassT, name: str | list[str] | None = None From a9aad63c46274e3534be62e26d134e26ae6b7c5a Mon Sep 17 00:00:00 2001 From: Samuel Monson Date: Mon, 6 Oct 2025 12:09:23 -0400 Subject: [PATCH 60/90] Test cleanup Signed-off-by: Samuel Monson --- tests/unit/objects/__init__.py | 0 tests/unit/objects/test_pydantic.py | 43 ------------------- .../{objects => utils}/test_statistics.py | 2 +- 3 files changed, 1 insertion(+), 44 deletions(-) delete mode 100644 tests/unit/objects/__init__.py delete mode 100644 tests/unit/objects/test_pydantic.py rename tests/unit/{objects => utils}/test_statistics.py (99%) diff --git a/tests/unit/objects/__init__.py b/tests/unit/objects/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/tests/unit/objects/test_pydantic.py b/tests/unit/objects/test_pydantic.py deleted file mode 100644 index cb7f438f..00000000 --- a/tests/unit/objects/test_pydantic.py +++ /dev/null @@ -1,43 +0,0 @@ -import pytest -from pydantic import computed_field - -from guidellm.objects.pydantic import StandardBaseModel - - -class ExampleModel(StandardBaseModel): - name: str - age: int - - @computed_field # type: ignore[misc] - @property - def computed(self) -> str: - return self.name + " " + str(self.age) - - -@pytest.mark.smoke -def test_standard_base_model_initialization(): - example = ExampleModel(name="John Doe", age=30) - assert example.name == "John Doe" - assert example.age == 30 - assert example.computed == "John Doe 30" - - -@pytest.mark.smoke -def test_standard_base_model_invalid_initialization(): - with pytest.raises(ValueError): - ExampleModel(name="John Doe", age="thirty") # type: ignore[arg-type] - - -@pytest.mark.smoke -def test_standard_base_model_marshalling(): - example = ExampleModel(name="John Doe", age=30) - serialized = example.model_dump() - assert serialized["name"] == "John Doe" - assert serialized["age"] == 30 - assert serialized["computed"] == "John Doe 30" - - serialized["computed"] = "Jane Doe 40" - deserialized = ExampleModel.model_validate(serialized) - assert deserialized.name == "John Doe" - assert deserialized.age == 30 - assert deserialized.computed == "John Doe 30" diff --git a/tests/unit/objects/test_statistics.py b/tests/unit/utils/test_statistics.py similarity index 99% rename from tests/unit/objects/test_statistics.py rename to tests/unit/utils/test_statistics.py index ede77175..d0f04d99 100644 --- a/tests/unit/objects/test_statistics.py +++ b/tests/unit/utils/test_statistics.py @@ -5,7 +5,7 @@ import numpy as np import pytest -from guidellm.objects import ( +from guidellm.utils.statistics import ( DistributionSummary, Percentiles, RunningStats, From 440b4e3d0af9c6a91a92992760517214c11f28a7 Mon Sep 17 00:00:00 2001 From: Samuel Monson Date: Mon, 6 Oct 2025 15:59:44 -0400 Subject: [PATCH 61/90] Fix backend tests Signed-off-by: Samuel Monson --- src/guidellm/scheduler/objects.py | 2 ++ tests/unit/{backend => backends}/__init__.py | 0 tests/unit/{backend => backends}/test_backend.py | 2 +- tests/unit/{backend => backends}/test_objects.py | 1 - tests/unit/{backend => backends}/test_openai_backend.py | 4 ++-- 5 files changed, 5 insertions(+), 4 deletions(-) rename tests/unit/{backend => backends}/__init__.py (100%) rename tests/unit/{backend => backends}/test_backend.py (99%) rename tests/unit/{backend => backends}/test_objects.py (99%) rename tests/unit/{backend => backends}/test_openai_backend.py (99%) diff --git a/src/guidellm/scheduler/objects.py b/src/guidellm/scheduler/objects.py index fdca28b3..21d30ec8 100644 --- a/src/guidellm/scheduler/objects.py +++ b/src/guidellm/scheduler/objects.py @@ -20,6 +20,7 @@ Protocol, TypeVar, Union, + runtime_checkable, ) from pydantic import Field, computed_field @@ -232,6 +233,7 @@ def model_copy(self, **kwargs) -> ScheduledRequestInfo: # type: ignore[override ) +@runtime_checkable class BackendInterface(Protocol, Generic[RequestT, ResponseT]): """ Abstract interface for request processing backends. diff --git a/tests/unit/backend/__init__.py b/tests/unit/backends/__init__.py similarity index 100% rename from tests/unit/backend/__init__.py rename to tests/unit/backends/__init__.py diff --git a/tests/unit/backend/test_backend.py b/tests/unit/backends/test_backend.py similarity index 99% rename from tests/unit/backend/test_backend.py rename to tests/unit/backends/test_backend.py index 49b65077..ebd0da87 100644 --- a/tests/unit/backend/test_backend.py +++ b/tests/unit/backends/test_backend.py @@ -80,7 +80,7 @@ async def default_model(self) -> str | None: def test_class_signatures(self): """Test Backend inheritance and type relationships.""" assert issubclass(Backend, RegistryMixin) - assert issubclass(Backend, BackendInterface) + assert isinstance(Backend, BackendInterface) assert hasattr(Backend, "create") assert hasattr(Backend, "register") assert hasattr(Backend, "get_registered_object") diff --git a/tests/unit/backend/test_objects.py b/tests/unit/backends/test_objects.py similarity index 99% rename from tests/unit/backend/test_objects.py rename to tests/unit/backends/test_objects.py index 34a6350c..bf903733 100644 --- a/tests/unit/backend/test_objects.py +++ b/tests/unit/backends/test_objects.py @@ -397,7 +397,6 @@ def valid_instances(self, request): def test_class_signatures(self): """Test GenerationRequestTimings inheritance and type relationships.""" assert issubclass(GenerationRequestTimings, MeasuredRequestTimings) - assert issubclass(GenerationRequestTimings, StandardBaseModel) assert hasattr(GenerationRequestTimings, "model_dump") assert hasattr(GenerationRequestTimings, "model_validate") diff --git a/tests/unit/backend/test_openai_backend.py b/tests/unit/backends/test_openai_backend.py similarity index 99% rename from tests/unit/backend/test_openai_backend.py rename to tests/unit/backends/test_openai_backend.py index 7c7f528d..2180b501 100644 --- a/tests/unit/backend/test_openai_backend.py +++ b/tests/unit/backends/test_openai_backend.py @@ -237,7 +237,7 @@ async def test_info(self): target="http://test", model="test-model", timeout=30.0 ) - info = backend.info() + info = backend.info assert info["target"] == "http://test" assert info["model"] == "test-model" @@ -1074,7 +1074,7 @@ def test_get_chat_message_media_item_jpeg_file(self): mock_image = Mock(spec=Image.Image) mock_image.tobytes.return_value = b"fake_jpeg_data" - with patch("guidellm.backend.openai.Image.open", return_value=mock_image): + with patch("guidellm.backends.openai.Image.open", return_value=mock_image): result = backend._get_chat_message_media_item(mock_jpeg_path) expected_data = base64.b64encode(b"fake_jpeg_data").decode("utf-8") From 272304c316586d7ce25936935757fd3e1a762aaf Mon Sep 17 00:00:00 2001 From: Samuel Monson Date: Mon, 6 Oct 2025 16:22:30 -0400 Subject: [PATCH 62/90] Initial scheduler test fixes Signed-off-by: Samuel Monson --- tests/unit/scheduler/test_constraints.py | 4 ++-- tests/unit/scheduler/test_environment.py | 2 +- tests/unit/scheduler/test_objects.py | 7 +++---- 3 files changed, 6 insertions(+), 7 deletions(-) diff --git a/tests/unit/scheduler/test_constraints.py b/tests/unit/scheduler/test_constraints.py index 931af413..1e343a57 100644 --- a/tests/unit/scheduler/test_constraints.py +++ b/tests/unit/scheduler/test_constraints.py @@ -286,11 +286,11 @@ def test_create_constraint_raises(self, valid_instances): def test_call_raises(self, valid_instances): """Test that calling constraint raises RuntimeError.""" instance, _ = valid_instances - state = SchedulerState(node_id="test_node", num_processes=1, start_time=0.0) + state = SchedulerState(node_id=0, num_processes=1, start_time=0.0) request = ScheduledRequestInfo( request_id="test_request", status="pending", - scheduler_node_id="test_node", + scheduler_node_id=0, scheduler_process_id=1, scheduler_start_time=0.0, ) diff --git a/tests/unit/scheduler/test_environment.py b/tests/unit/scheduler/test_environment.py index c73abe42..ba0e2787 100644 --- a/tests/unit/scheduler/test_environment.py +++ b/tests/unit/scheduler/test_environment.py @@ -246,7 +246,7 @@ async def test_sync_run_start(self, valid_instances, mock_time, delay, expected) with ( patch("time.time", return_value=mock_time), - patch("guidellm.scheduler.environment.settings") as mock_settings, + patch("guidellm.scheduler.environments.settings") as mock_settings, ): mock_settings.scheduler_start_delay_non_distributed = delay start_time = await instance.sync_run_start() diff --git a/tests/unit/scheduler/test_objects.py b/tests/unit/scheduler/test_objects.py index df794ff8..2fc63988 100644 --- a/tests/unit/scheduler/test_objects.py +++ b/tests/unit/scheduler/test_objects.py @@ -110,7 +110,7 @@ def test_generic_type_parameters(self): if hasattr(generic_base, "__args__"): type_params = generic_base.__args__ - assert len(type_params) == 3, "Should have 3 type parameters" + assert len(type_params) == 2, "Should have 2 type parameters" param_names = [param.__name__ for param in type_params] expected_names = ["RequestT", "ResponseT"] assert param_names == expected_names @@ -119,7 +119,7 @@ def test_generic_type_parameters(self): def test_implementation_construction(self): """Test that a complete concrete implementation can be instantiated.""" - class ConcreteBackend(BackendInterface[str, MeasuredRequestTimings, str]): + class ConcreteBackend(BackendInterface[str, str]): @property def processes_limit(self) -> int | None: return 4 @@ -162,7 +162,7 @@ async def resolve( async def test_implementation_async_methods(self): # noqa: C901 """Test that async methods work correctly in concrete implementation.""" - class AsyncBackend(BackendInterface[dict, MeasuredRequestTimings, dict]): + class AsyncBackend(BackendInterface[dict, dict]): def __init__(self): self.startup_called = False self.validate_called = False @@ -434,7 +434,6 @@ def valid_instances(self, request): @pytest.mark.smoke def test_class_signatures(self): """Test MeasuredRequestTimings inheritance and type relationships.""" - assert issubclass(MeasuredRequestTimings, StandardBaseModel) assert hasattr(MeasuredRequestTimings, "model_dump") assert hasattr(MeasuredRequestTimings, "model_validate") From 544c8887cec6fccb7a24dfde23ebe108e19d4e08 Mon Sep 17 00:00:00 2001 From: Samuel Monson Date: Tue, 7 Oct 2025 15:41:02 -0400 Subject: [PATCH 63/90] Fix MeasuredRequestTimings tests Signed-off-by: Samuel Monson --- tests/unit/scheduler/test_objects.py | 55 ++++++++++++++++------------ 1 file changed, 32 insertions(+), 23 deletions(-) diff --git a/tests/unit/scheduler/test_objects.py b/tests/unit/scheduler/test_objects.py index 2fc63988..fc5610fd 100644 --- a/tests/unit/scheduler/test_objects.py +++ b/tests/unit/scheduler/test_objects.py @@ -3,7 +3,7 @@ import inspect import typing from collections.abc import AsyncIterator -from typing import Any, Optional, TypeVar, Union +from typing import Any, Literal, Optional, TypeVar, Union import pytest from pydantic import ValidationError @@ -25,6 +25,13 @@ from guidellm.utils import StandardBaseModel +@MeasuredRequestTimings.register("test_request_timings") +class ConcreteMeasuredRequestTimings(MeasuredRequestTimings): + """Concrete test implementation of MeasuredRequestTimings for testing.""" + + timings_type: Literal["test_request_timings"] = "test_request_timings" + + def test_request_t(): """Validate that RequestT is a TypeVar usable for generics and isn't bound.""" assert isinstance(RequestT, TypeVar) @@ -400,19 +407,23 @@ class TestRequestTimings: @pytest.fixture( params=[ - {}, + {"timings_type": "test_request_timings"}, { + "timings_type": "test_request_timings", "request_start": None, "request_end": None, }, { + "timings_type": "test_request_timings", "request_start": 1000.0, "request_end": 1100.0, }, { + "timings_type": "test_request_timings", "request_start": 1000.0, }, { + "timings_type": "test_request_timings", "request_start": 0.0, "request_end": 0.0, }, @@ -428,7 +439,7 @@ class TestRequestTimings: def valid_instances(self, request): """Creates various valid configurations of MeasuredRequestTimings.""" constructor_args = request.param - instance = MeasuredRequestTimings(**constructor_args) + instance = MeasuredRequestTimings.model_validate(constructor_args) return instance, constructor_args @pytest.mark.smoke @@ -446,7 +457,13 @@ def test_class_signatures(self): assert field_info.default is None @pytest.mark.smoke - def test_initialization(self, valid_instances): + def test_initialization(self): + """Base class initialization should fail.""" + with pytest.raises(TypeError): + MeasuredRequestTimings() + + @pytest.mark.smoke + def test_validation(self, valid_instances): """Test initialization with valid configurations.""" instance, constructor_args = valid_instances assert isinstance(instance, MeasuredRequestTimings) @@ -467,9 +484,9 @@ def test_initialization(self, valid_instances): ) def test_invalid_initialization(self, field, value): """Test invalid initialization scenarios.""" - kwargs = {field: value} + kwargs = {"timings_type": "test_request_timings", field: value} with pytest.raises(ValidationError): - MeasuredRequestTimings(**kwargs) + MeasuredRequestTimings.model_validate(kwargs) @pytest.mark.smoke def test_marshalling(self, valid_instances): @@ -533,6 +550,7 @@ class TestScheduledRequestInfo: "finalized": 2150.0, }, "request_timings": { + "timings_type": "test_request_timings", "request_start": 2060.0, "request_end": 2110.0, }, @@ -585,8 +603,8 @@ def valid_instances(self, request): **constructor_args["scheduler_timings"] ) if "request_timings" in constructor_args: - constructor_args["request_timings"] = MeasuredRequestTimings( - **constructor_args["request_timings"] + constructor_args["request_timings"] = MeasuredRequestTimings.model_validate( + constructor_args["request_timings"] ) instance = ScheduledRequestInfo(**constructor_args) @@ -596,7 +614,6 @@ def valid_instances(self, request): def test_class_signatures(self): """Test ScheduledRequestInfo inheritance and type relationships.""" assert issubclass(ScheduledRequestInfo, StandardBaseModel) - assert issubclass(ScheduledRequestInfo, typing.Generic) assert hasattr(ScheduledRequestInfo, "model_dump") assert hasattr(ScheduledRequestInfo, "model_validate") @@ -606,18 +623,6 @@ def test_class_signatures(self): assert isinstance(ScheduledRequestInfo.started_at, property) assert isinstance(ScheduledRequestInfo.completed_at, property) - # Check that it's properly generic - orig_bases = getattr(ScheduledRequestInfo, "__orig_bases__", ()) - generic_base = next( - ( - base - for base in orig_bases - if hasattr(base, "__origin__") and base.__origin__ is typing.Generic - ), - None, - ) - assert generic_base is not None - # Check required fields fields = ScheduledRequestInfo.model_fields for key in self.CHECK_KEYS: @@ -719,7 +724,9 @@ def test_started_at_property(self): scheduler_process_id=0, scheduler_start_time=1000.0, scheduler_timings=RequestSchedulerTimings(resolve_start=2000.0), - request_timings=MeasuredRequestTimings(request_start=2100.0), + request_timings=MeasuredRequestTimings.model_validate( + {"timings_type": "test_request_timings", "request_start": 2100.0} + ), ) assert instance.started_at == 2100.0 @@ -755,7 +762,9 @@ def test_completed_at_property(self): scheduler_process_id=0, scheduler_start_time=1000.0, scheduler_timings=RequestSchedulerTimings(resolve_end=2000.0), - request_timings=MeasuredRequestTimings(request_end=2100.0), + request_timings=MeasuredRequestTimings.model_validate( + {"timings_type": "test_request_timings", "request_end": 2100.0} + ), ) assert instance.completed_at == 2100.0 From 5032e9e3bc5ae5caba70af8f09e0eb4a10b14915 Mon Sep 17 00:00:00 2001 From: Samuel Monson Date: Tue, 7 Oct 2025 17:14:34 -0400 Subject: [PATCH 64/90] Patch time.time in workgroup lifecycle test Signed-off-by: Samuel Monson --- tests/unit/scheduler/test_worker_group.py | 20 ++++++++++++++++++++ 1 file changed, 20 insertions(+) diff --git a/tests/unit/scheduler/test_worker_group.py b/tests/unit/scheduler/test_worker_group.py index b72fb95b..ff87c0b9 100644 --- a/tests/unit/scheduler/test_worker_group.py +++ b/tests/unit/scheduler/test_worker_group.py @@ -9,6 +9,7 @@ from multiprocessing.process import BaseProcess from multiprocessing.synchronize import Barrier, Event from typing import Any, Generic, Literal +from unittest.mock import patch import pytest from pydantic import Field @@ -48,6 +49,23 @@ class MockRequestTimings(MeasuredRequestTimings): timings_type: Literal["mock"] = Field(default="mock") +class MockTime: + """Deterministic time mock for testing.""" + + def __init__(self, start_time: float = 1000.0): + self.current_time = start_time + self.increment = 0.1 + + def time(self) -> float: + """Return current mock time and increment for next call.""" + current = self.current_time + self.current_time += self.increment + return current + + +mock_time = MockTime() + + class MockBackend(BackendInterface): """Mock backend for testing worker group functionality.""" @@ -67,6 +85,7 @@ def processes_limit(self) -> int | None: def requests_limit(self) -> int | None: return self._requests_limit + @property def info(self) -> dict[str, Any]: return {"type": "mock"} @@ -249,6 +268,7 @@ def test_invalid_initialization_missing(self): @pytest.mark.smoke @async_timeout(10) @pytest.mark.asyncio + @patch.object(time, "time", mock_time.time) async def test_lifecycle(self, valid_instances: tuple[WorkerProcessGroup, dict]): # noqa: C901, PLR0912 """Test the lifecycle methods of WorkerProcessGroup.""" instance, constructor_args = valid_instances From 4971e561f892c2fd744472c51543529eb0b20dfc Mon Sep 17 00:00:00 2001 From: Samuel Monson Date: Tue, 7 Oct 2025 18:14:29 -0400 Subject: [PATCH 65/90] Tear down worker process group in instance fixture Signed-off-by: Samuel Monson --- tests/unit/scheduler/test_worker_group.py | 14 +++++++++++++- 1 file changed, 13 insertions(+), 1 deletion(-) diff --git a/tests/unit/scheduler/test_worker_group.py b/tests/unit/scheduler/test_worker_group.py index ff87c0b9..80bb6c23 100644 --- a/tests/unit/scheduler/test_worker_group.py +++ b/tests/unit/scheduler/test_worker_group.py @@ -163,7 +163,19 @@ def valid_instances(self, request): """Fixture providing test data for WorkerProcessGroup.""" constructor_args = request.param.copy() instance = WorkerProcessGroup(**request.param, backend=MockBackend()) - return instance, constructor_args + yield instance, constructor_args + + # Shutting down. Attempting shut down. + try: + if hasattr(instance, "processes") and instance.processes is not None: + asyncio.run(instance.shutdown()) + # It's not...it's-it's not...it's not shutting down...it's not... + except Exception: # noqa: BLE001 + if hasattr(instance, "processes") and instance.processes is not None: + # Gahhh...! + for proc in instance.processes: + proc.kill() + proc.join(timeout=1.0) @pytest.mark.smoke def test_class_signatures(self, valid_instances): From 567689595f5cafd1b30d93f1d8e698aab5d36c2c Mon Sep 17 00:00:00 2001 From: Samuel Monson Date: Wed, 8 Oct 2025 16:02:35 -0400 Subject: [PATCH 66/90] Match main tests to current CLI Signed-off-by: Samuel Monson --- tests/unit/test_main.py | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/tests/unit/test_main.py b/tests/unit/test_main.py index e813dba4..c8fa71c2 100644 --- a/tests/unit/test_main.py +++ b/tests/unit/test_main.py @@ -19,10 +19,10 @@ def test_benchmark_run_with_backend_args(): "--backend-args", '{"headers": {"Authorization": "Bearer my-token"}, "verify": false}', "--target", - "http://localhost:8000", + "http://localhost:9", "--data", "prompt_tokens=1,output_tokens=1", - "--rate-type", + "--profile", "constant", "--rate", "1", @@ -36,7 +36,7 @@ def test_benchmark_run_with_backend_args(): assert "Invalid header format" not in result.output -@patch("guidellm.__main__.benchmark_with_scenario") +@patch("guidellm.__main__.benchmark_generative_text") def test_cli_backend_args_header_removal(mock_benchmark_func, tmp_path: Path): """ Tests that --backend-args from the CLI correctly overrides scenario @@ -47,11 +47,11 @@ def test_cli_backend_args_header_removal(mock_benchmark_func, tmp_path: Path): # Create a scenario file with a header that should be overridden and removed scenario_content = { "backend_type": "openai_http", - "backend_args": {"headers": {"Authorization": "should-be-removed"}}, + "backend_kwargs": {"headers": {"Authorization": "should-be-removed"}}, "data": "prompt_tokens=10,output_tokens=10", "max_requests": 1, "target": "http://dummy-target", - "rate_type": "synchronous", + "profile": "synchronous", "processor": "gpt2", } with scenario_path.open("w") as f: @@ -65,7 +65,7 @@ def test_cli_backend_args_header_removal(mock_benchmark_func, tmp_path: Path): "run", "--scenario", str(scenario_path), - "--backend-args", + "--backend-kwargs", '{"headers": {"Authorization": null, "Custom-Header": "Custom-Value"}}', ], catch_exceptions=False, @@ -79,6 +79,6 @@ def test_cli_backend_args_header_removal(mock_benchmark_func, tmp_path: Path): scenario = call_args["scenario"] # Verify the backend_args were merged correctly - backend_args = scenario.backend_args + backend_args = scenario.backend_kwargs expected_headers = {"Authorization": None, "Custom-Header": "Custom-Value"} assert backend_args["headers"] == expected_headers From 5f36174595d0a3ed0a581005f453b57a8dc772fb Mon Sep 17 00:00:00 2001 From: Samuel Monson Date: Wed, 8 Oct 2025 16:04:30 -0400 Subject: [PATCH 67/90] Various small fixes to utils tests Signed-off-by: Samuel Monson --- tests/unit/utils/test_auto_importer.py | 3 ++- tests/unit/utils/test_registry.py | 4 +++- tests/unit/utils/test_text.py | 28 ++------------------------ 3 files changed, 7 insertions(+), 28 deletions(-) diff --git a/tests/unit/utils/test_auto_importer.py b/tests/unit/utils/test_auto_importer.py index cc71bce3..5f930ba2 100644 --- a/tests/unit/utils/test_auto_importer.py +++ b/tests/unit/utils/test_auto_importer.py @@ -4,6 +4,7 @@ from __future__ import annotations +import sys from unittest import mock import pytest @@ -191,9 +192,9 @@ class TestClass(AutoImporterMixin): mock_import.assert_any_call("test.package.subpackage") @pytest.mark.sanity - @mock.patch("sys.modules", {"test.package.existing": mock.MagicMock()}) @mock.patch("importlib.import_module") @mock.patch("pkgutil.walk_packages") + @mock.patch.dict(sys.modules, {"test.package.existing": mock.MagicMock()}) def test_skip_already_imported_modules(self, mock_walk, mock_import): """Test that modules already in sys.modules are tracked but not re-imported.""" diff --git a/tests/unit/utils/test_registry.py b/tests/unit/utils/test_registry.py index eed126d3..47253b72 100644 --- a/tests/unit/utils/test_registry.py +++ b/tests/unit/utils/test_registry.py @@ -579,7 +579,9 @@ def validate_value(value: int) -> bool: if hasattr(inspect, "get_annotations"): # Python 3.10+ try: - annotations = inspect.get_annotations(registered_class.__init__) + annotations = inspect.get_annotations( + registered_class.__init__, eval_str=True + ) assert "value" in annotations assert annotations["value"] is int return_ann = annotations.get("return") diff --git a/tests/unit/utils/test_text.py b/tests/unit/utils/test_text.py index 3774ca1f..154291d6 100644 --- a/tests/unit/utils/test_text.py +++ b/tests/unit/utils/test_text.py @@ -42,7 +42,7 @@ class TestFormatValueDisplay: "expected", ), [ - (42.0, "test", "", None, None, None, "42 [info]test[/info]"), + (42.0, "test", "", None, None, 0, "42 [info]test[/info]"), (42.5, "test", "ms", None, None, 1, "42.5ms [info]test[/info]"), (42.123, "test", "", None, 5, 2, " 42.12 [info]test[/info]"), ( @@ -78,34 +78,10 @@ def test_invocation( assert label in result assert units in result value_check = ( - str(int(value)) - if decimal_places == 0 - else ( - f"{value:.{decimal_places}f}" - if decimal_places is not None - else str(value) - ) + str(int(value)) if decimal_places == 0 else f"{value:.{decimal_places}f}" ) assert value_check in result or str(value) in result - @pytest.mark.sanity - @pytest.mark.parametrize( - ("value", "label"), - [ - (None, "test"), - (42.0, None), - ("not_number", "test"), - ], - ) - def test_invocation_with_none_values(self, value, label): - """Test format_value_display with None/invalid inputs still works.""" - result = format_value_display(value, label) - assert isinstance(result, str) - if label is not None: - assert str(label) in result - if value is not None: - assert str(value) in result - class TestSplitTextListByLength: """Test suite for split_text_list_by_length.""" From 155623631824f56b8bab6364516ffb6235ac3d03 Mon Sep 17 00:00:00 2001 From: Samuel Monson Date: Wed, 8 Oct 2025 17:20:35 -0400 Subject: [PATCH 68/90] Fix typing import for python3.10 Signed-off-by: Samuel Monson --- src/guidellm/benchmark/types.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/src/guidellm/benchmark/types.py b/src/guidellm/benchmark/types.py index 04ad4061..1ef65a68 100644 --- a/src/guidellm/benchmark/types.py +++ b/src/guidellm/benchmark/types.py @@ -2,12 +2,13 @@ from collections.abc import Iterable from pathlib import Path -from typing import Any, TypeAliasType +from typing import Any from datasets import Dataset, DatasetDict, IterableDataset, IterableDatasetDict from transformers import ( # type: ignore[import] PreTrainedTokenizerBase, ) +from typing_extensions import TypeAliasType from guidellm.benchmark.aggregator import ( Aggregator, From 87ba006c78d237012ebc253dceb8e4c7e20fc284 Mon Sep 17 00:00:00 2001 From: Jared O'Connell Date: Thu, 9 Oct 2025 18:11:19 -0400 Subject: [PATCH 69/90] Fixed quality errors Many of the quality errors are due to using the older union style, and have appeared due to the upgrade of the minimum Python version from 3.9 to 3.10 Signed-off-by: Jared O'Connell --- src/guidellm/backends/openai.py | 2 +- src/guidellm/benchmark/aggregator.py | 2 +- src/guidellm/benchmark/benchmarker.py | 4 +- src/guidellm/benchmark/output.py | 2 +- src/guidellm/benchmark/scenario.py | 2 +- src/guidellm/dataset/creator.py | 10 ++-- src/guidellm/dataset/file.py | 4 +- src/guidellm/dataset/hf_datasets.py | 6 +-- src/guidellm/scheduler/constraints.py | 20 ++++---- src/guidellm/scheduler/worker.py | 2 +- src/guidellm/utils/encoding.py | 72 +++++++++++++-------------- src/guidellm/utils/hf_datasets.py | 3 +- src/guidellm/utils/hf_transformers.py | 8 +-- src/guidellm/utils/messaging.py | 6 +-- src/guidellm/utils/mixins.py | 2 +- src/guidellm/utils/pydantic_utils.py | 3 +- src/guidellm/utils/random.py | 7 ++- src/guidellm/utils/registry.py | 5 +- src/guidellm/utils/statistics.py | 44 ++++++++-------- src/guidellm/utils/synchronous.py | 13 ++--- src/guidellm/utils/typing.py | 7 +-- 21 files changed, 108 insertions(+), 116 deletions(-) diff --git a/src/guidellm/backends/openai.py b/src/guidellm/backends/openai.py index ce83076f..fd14ee65 100644 --- a/src/guidellm/backends/openai.py +++ b/src/guidellm/backends/openai.py @@ -559,7 +559,7 @@ def _get_chat_messages( resolved_content.append(item) elif isinstance(item, str): resolved_content.append({"type": "text", "text": item}) - elif isinstance(item, (Image.Image, Path)): + elif isinstance(item, Image.Image | Path): resolved_content.append(self._get_chat_message_media_item(item)) else: raise ValueError(f"Unsupported content item type: {type(item)}") diff --git a/src/guidellm/benchmark/aggregator.py b/src/guidellm/benchmark/aggregator.py index e965c482..be70276b 100644 --- a/src/guidellm/benchmark/aggregator.py +++ b/src/guidellm/benchmark/aggregator.py @@ -267,7 +267,7 @@ def resolve( resolved = {} for key, val in aggregators.items(): - if isinstance(val, (Aggregator, CompilableAggregator)): + if isinstance(val, Aggregator | CompilableAggregator): resolved[key] = val else: aggregator_class = cls.get_registered_object(key) diff --git a/src/guidellm/benchmark/benchmarker.py b/src/guidellm/benchmark/benchmarker.py index 5f05065a..99410e4c 100644 --- a/src/guidellm/benchmark/benchmarker.py +++ b/src/guidellm/benchmark/benchmarker.py @@ -228,12 +228,12 @@ def _combine( existing: dict[str, Any] | StandardBaseDict, addition: dict[str, Any] | StandardBaseDict, ) -> dict[str, Any] | StandardBaseDict: - if not isinstance(existing, (dict, StandardBaseDict)): + if not isinstance(existing, dict | StandardBaseDict): raise ValueError( f"Existing value {existing} (type: {type(existing).__name__}) " f"is not a valid type for merging." ) - if not isinstance(addition, (dict, StandardBaseDict)): + if not isinstance(addition, dict | StandardBaseDict): raise ValueError( f"Addition value {addition} (type: {type(addition).__name__}) " f"is not a valid type for merging." diff --git a/src/guidellm/benchmark/output.py b/src/guidellm/benchmark/output.py index 56775dac..c4e8fb0f 100644 --- a/src/guidellm/benchmark/output.py +++ b/src/guidellm/benchmark/output.py @@ -90,7 +90,7 @@ def resolve( if not output_formats: return {} - if isinstance(output_formats, (list, tuple)): + if isinstance(output_formats, list | tuple): # support list of output keys: ["csv", "json"] # support list of files: ["path/to/file.json", "path/to/file.csv"] formats_list = output_formats diff --git a/src/guidellm/benchmark/scenario.py b/src/guidellm/benchmark/scenario.py index b53ef424..5299616f 100644 --- a/src/guidellm/benchmark/scenario.py +++ b/src/guidellm/benchmark/scenario.py @@ -38,7 +38,7 @@ def parse_float_list(value: str | float | list[float]) -> list[float]: or convert single float list of one or pass float list through. """ - if isinstance(value, (int, float)): + if isinstance(value, int | float): return [value] elif isinstance(value, list): return value diff --git a/src/guidellm/dataset/creator.py b/src/guidellm/dataset/creator.py index a74ec8c0..b95f4c50 100644 --- a/src/guidellm/dataset/creator.py +++ b/src/guidellm/dataset/creator.py @@ -95,10 +95,10 @@ def create( data, data_args, processor, processor_args, random_seed ) - if isinstance(dataset, (DatasetDict, IterableDatasetDict)): + if isinstance(dataset, DatasetDict | IterableDatasetDict): dataset = cls.extract_dataset_split(dataset, split, split_pref_order) - if not isinstance(dataset, (Dataset, IterableDataset)): + if not isinstance(dataset, Dataset | IterableDataset): raise ValueError( f"Unsupported data type: {type(dataset)} given for {dataset}." ) @@ -145,10 +145,10 @@ def extract_args_column_mappings( def extract_dataset_name( cls, dataset: Union[Dataset, IterableDataset, DatasetDict, IterableDatasetDict] ) -> Optional[str]: - if isinstance(dataset, (DatasetDict, IterableDatasetDict)): + if isinstance(dataset, DatasetDict | IterableDatasetDict): dataset = dataset[list(dataset.keys())[0]] - if isinstance(dataset, (Dataset, IterableDataset)): + if isinstance(dataset, Dataset | IterableDataset): if not hasattr(dataset, "info") or not hasattr( dataset.info, "dataset_name" ): @@ -165,7 +165,7 @@ def extract_dataset_split( specified_split: Union[Literal["auto"], str] = "auto", split_pref_order: Optional[Union[Literal["auto"], list[str]]] = "auto", ) -> Union[Dataset, IterableDataset]: - if not isinstance(dataset, (DatasetDict, IterableDatasetDict)): + if not isinstance(dataset, DatasetDict | IterableDatasetDict): raise ValueError( f"Unsupported data type: {type(dataset)} given for {dataset}." ) diff --git a/src/guidellm/dataset/file.py b/src/guidellm/dataset/file.py index 5d6df1d9..455ef580 100644 --- a/src/guidellm/dataset/file.py +++ b/src/guidellm/dataset/file.py @@ -31,7 +31,7 @@ class FileDatasetCreator(DatasetCreator): @classmethod def is_supported(cls, data: Any, data_args: Optional[dict[str, Any]]) -> bool: # noqa: ARG003 - if isinstance(data, (str, Path)) and (path := Path(data)).exists(): + if isinstance(data, str | Path) and (path := Path(data)).exists(): # local folder or py file, assume supported return path.suffix.lower() in cls.SUPPORTED_TYPES @@ -46,7 +46,7 @@ def handle_create( processor_args: Optional[dict[str, Any]], # noqa: ARG003 random_seed: int, # noqa: ARG003 ) -> Union[Dataset, DatasetDict, IterableDataset, IterableDatasetDict]: - if not isinstance(data, (str, Path)): + if not isinstance(data, str | Path): raise ValueError(f"Unsupported data type: {type(data)} given for {data}. ") path = Path(data) diff --git a/src/guidellm/dataset/hf_datasets.py b/src/guidellm/dataset/hf_datasets.py index 7f91facd..56c79936 100644 --- a/src/guidellm/dataset/hf_datasets.py +++ b/src/guidellm/dataset/hf_datasets.py @@ -25,11 +25,11 @@ def is_supported(cls, data: Any, data_args: Optional[dict[str, Any]]) -> bool: # base type is supported return True - if isinstance(data, (str, Path)) and (path := Path(data)).exists(): + if isinstance(data, str | Path) and (path := Path(data)).exists(): # local folder or py file, assume supported return path.is_dir() or path.suffix == ".py" - if isinstance(data, (str, Path)): + if isinstance(data, str | Path): try: # try to load dataset return get_dataset_config_info(data) is not None @@ -47,7 +47,7 @@ def handle_create( processor_args: Optional[dict[str, Any]], # noqa: ARG003 random_seed: int, # noqa: ARG003 ) -> Union[Dataset, DatasetDict, IterableDataset, IterableDatasetDict]: - if isinstance(data, (str, Path)): + if isinstance(data, str | Path): data = load_dataset(data, **(data_args or {})) elif data_args: raise ValueError( diff --git a/src/guidellm/scheduler/constraints.py b/src/guidellm/scheduler/constraints.py index c724a74a..c974225a 100644 --- a/src/guidellm/scheduler/constraints.py +++ b/src/guidellm/scheduler/constraints.py @@ -450,7 +450,7 @@ def __call__( current_index = max(0, self.current_index) max_num = ( self.max_num - if isinstance(self.max_num, (int, float)) + if isinstance(self.max_num, int | float) else self.max_num[min(current_index, len(self.max_num) - 1)] ) @@ -489,7 +489,7 @@ def _validate_max_num( raise ValueError( f"max_num must be set and truthful, received {value} ({val} failed)" ) - if not isinstance(val, (int, float)) or val <= 0: + if not isinstance(val, int | float) or val <= 0: raise ValueError( f"max_num must be a positive num, received {value} ({val} failed)" ) @@ -568,7 +568,7 @@ def __call__( current_index = max(0, self.current_index) max_duration = ( self.max_duration - if isinstance(self.max_duration, (int, float)) + if isinstance(self.max_duration, int | float) else self.max_duration[min(current_index, len(self.max_duration) - 1)] ) @@ -607,7 +607,7 @@ def _validate_max_duration( "max_duration must be set and truthful, " f"received {value} ({val} failed)" ) - if not isinstance(val, (int, float)) or val <= 0: + if not isinstance(val, int | float) or val <= 0: raise ValueError( "max_duration must be a positive num," f"received {value} ({val} failed)" @@ -682,7 +682,7 @@ def __call__( current_index = max(0, self.current_index) max_errors = ( self.max_errors - if isinstance(self.max_errors, (int, float)) + if isinstance(self.max_errors, int | float) else self.max_errors[min(current_index, len(self.max_errors) - 1)] ) errors_exceeded = state.errored_requests >= max_errors @@ -710,7 +710,7 @@ def _validate_max_errors( "max_errors must be set and truthful, " f"received {value} ({val} failed)" ) - if not isinstance(val, (int, float)) or val <= 0: + if not isinstance(val, int | float) or val <= 0: raise ValueError( f"max_errors must be a positive num,received {value} ({val} failed)" ) @@ -799,7 +799,7 @@ def __call__( current_index = max(0, self.current_index) max_error_rate = ( self.max_error_rate - if isinstance(self.max_error_rate, (int, float)) + if isinstance(self.max_error_rate, int | float) else self.max_error_rate[min(current_index, len(self.max_error_rate) - 1)] ) @@ -850,7 +850,7 @@ def _validate_max_error_rate( "max_error_rate must be set and truthful, " f"received {value} ({val} failed)" ) - if not isinstance(val, (int, float)) or val <= 0 or val >= 1: + if not isinstance(val, int | float) or val <= 0 or val >= 1: raise ValueError( "max_error_rate must be a number between 0 and 1," f"received {value} ({val} failed)" @@ -940,7 +940,7 @@ def __call__( current_index = max(0, self.current_index) max_error_rate = ( self.max_error_rate - if isinstance(self.max_error_rate, (int, float)) + if isinstance(self.max_error_rate, int | float) else self.max_error_rate[min(current_index, len(self.max_error_rate) - 1)] ) @@ -982,7 +982,7 @@ def _validate_max_error_rate( "max_error_rate must be set and truthful, " f"received {value} ({val} failed)" ) - if not isinstance(val, (int, float)) or val <= 0 or val >= 1: + if not isinstance(val, int | float) or val <= 0 or val >= 1: raise ValueError( "max_error_rate must be a number between 0 and 1," f"received {value} ({val} failed)" diff --git a/src/guidellm/scheduler/worker.py b/src/guidellm/scheduler/worker.py index 5f2fb74b..104ab418 100644 --- a/src/guidellm/scheduler/worker.py +++ b/src/guidellm/scheduler/worker.py @@ -310,7 +310,7 @@ async def _process_next_request(self): # Pull request from the queue request, request_info = await self.messaging.get() - if isinstance(request, (list, tuple)): + if isinstance(request, list | tuple): raise NotImplementedError("Multi-turn requests are not yet supported") # Calculate targeted start and set pending state for request diff --git a/src/guidellm/utils/encoding.py b/src/guidellm/utils/encoding.py index 6823fb77..916d6633 100644 --- a/src/guidellm/utils/encoding.py +++ b/src/guidellm/utils/encoding.py @@ -12,7 +12,7 @@ import json from collections.abc import Mapping -from typing import Annotated, Any, ClassVar, Generic, Literal, Optional, TypeVar, cast +from typing import Any, ClassVar, Generic, Literal, TypeVar, cast try: import msgpack # type: ignore[import-untyped] # Optional dependency @@ -45,7 +45,6 @@ HAS_ORJSON = False from pydantic import BaseModel -from typing_extensions import TypeAlias __all__ = [ "Encoder", @@ -60,14 +59,10 @@ ObjT = TypeVar("ObjT") MsgT = TypeVar("MsgT") -SerializationTypesAlias: TypeAlias = Annotated[ - Optional[Literal["dict", "sequence"]], - "Type alias for available serialization strategies", -] -EncodingTypesAlias: TypeAlias = Annotated[ - Optional[Literal["msgpack", "msgspec"]], - "Type alias for available binary encoding formats", -] +# Type alias for available serialization strategies +SerializationTypesAlias = Literal["dict", "sequence"] | None +# "Type alias for available binary encoding formats" +EncodingTypesAlias = Literal["msgpack", "msgspec"] class MessageEncoding(Generic[ObjT, MsgT]): @@ -405,7 +400,7 @@ def to_dict(self, obj: Any) -> Any: if isinstance(obj, BaseModel): return self.to_dict_pydantic(obj) - if isinstance(obj, (list, tuple)) and any( + if isinstance(obj, list | tuple) and any( isinstance(item, BaseModel) for item in obj ): return [ @@ -432,7 +427,7 @@ def from_dict(self, data: Any) -> Any: :param data: Dictionary representation possibly containing type metadata :return: Reconstructed object with proper types restored """ - if isinstance(data, (list, tuple)): + if isinstance(data, list | tuple): return [ self.from_dict_pydantic(item) if isinstance(item, dict) and "*PYD*" in item @@ -493,7 +488,7 @@ def to_sequence(self, obj: Any) -> str | Any: if isinstance(obj, BaseModel): payload_type = "pydantic" payload = self.to_sequence_pydantic(obj) - elif isinstance(obj, (list, tuple)) and any( + elif isinstance(obj, list | tuple) and any( isinstance(item, BaseModel) for item in obj ): payload_type = "collection_sequence" @@ -694,33 +689,36 @@ def pack_next_sequence( # noqa: C901, PLR0912 length=(payload_len.bit_length() + 7) // 8 if payload_len > 0 else 1, byteorder="big", ) - if type_ == "pydantic": - payload_type = b"P" - elif type_ == "python": - payload_type = b"p" - elif type_ == "collection_tuple": - payload_type = b"T" - elif type_ == "collection_sequence": - payload_type = b"S" - elif type_ == "collection_mapping": - payload_type = b"M" - else: - raise ValueError(f"Unknown type for packing: {type_}") + match type_: + case "pydantic": + payload_type = b"P" + case "python": + payload_type = b"p" + case "collection_tuple": + payload_type = b"T" + case "collection_sequence": + payload_type = b"S" + case "collection_mapping": + payload_type = b"M" + case _: + raise ValueError(f"Unknown type for packing: {type_}") delimiter = b"|" else: payload_len_output = str(payload_len) - if type_ == "pydantic": - payload_type = "P" - elif type_ == "python": - payload_type = "p" - elif type_ == "collection_tuple": - payload_type = "T" - elif type_ == "collection_sequence": - payload_type = "S" - elif type_ == "collection_mapping": - payload_type = "M" - else: - raise ValueError(f"Unknown type for packing: {type_}") + + match type_: + case "pydantic": + payload_type = "P" + case "python": + payload_type = "p" + case "collection_tuple": + payload_type = "T" + case "collection_sequence": + payload_type = "S" + case "collection_mapping": + payload_type = "M" + case _: + raise ValueError(f"Unknown type for packing: {type_}") delimiter = "|" # Type ignores because types are enforced at runtime diff --git a/src/guidellm/utils/hf_datasets.py b/src/guidellm/utils/hf_datasets.py index 73e55ebc..86f04485 100644 --- a/src/guidellm/utils/hf_datasets.py +++ b/src/guidellm/utils/hf_datasets.py @@ -1,5 +1,4 @@ from pathlib import Path -from typing import Union from datasets import Dataset @@ -11,7 +10,7 @@ } -def save_dataset_to_file(dataset: Dataset, output_path: Union[str, Path]) -> None: +def save_dataset_to_file(dataset: Dataset, output_path: str | Path) -> None: """ Saves a HuggingFace Dataset to file in a supported format. diff --git a/src/guidellm/utils/hf_transformers.py b/src/guidellm/utils/hf_transformers.py index 1f2aa1b5..636988c3 100644 --- a/src/guidellm/utils/hf_transformers.py +++ b/src/guidellm/utils/hf_transformers.py @@ -1,5 +1,5 @@ from pathlib import Path -from typing import Any, Optional, Union +from typing import Any from transformers import AutoTokenizer, PreTrainedTokenizerBase # type: ignore[import] @@ -9,15 +9,15 @@ def check_load_processor( - processor: Optional[Union[str, Path, PreTrainedTokenizerBase]], - processor_args: Optional[dict[str, Any]], + processor: str | Path | PreTrainedTokenizerBase | None, + processor_args: dict[str, Any] | None, error_msg: str, ) -> PreTrainedTokenizerBase: if processor is None: raise ValueError(f"Processor/Tokenizer is required for {error_msg}.") try: - if isinstance(processor, (str, Path)): + if isinstance(processor, str | Path): loaded = AutoTokenizer.from_pretrained( processor, **(processor_args or {}), diff --git a/src/guidellm/utils/messaging.py b/src/guidellm/utils/messaging.py index 9311259d..4dce576d 100644 --- a/src/guidellm/utils/messaging.py +++ b/src/guidellm/utils/messaging.py @@ -16,13 +16,13 @@ import threading import time from abc import ABC, abstractmethod -from collections.abc import Iterable +from collections.abc import Callable, Iterable from multiprocessing.connection import Connection from multiprocessing.context import BaseContext from multiprocessing.managers import SyncManager from multiprocessing.synchronize import Event as ProcessingEvent from threading import Event as ThreadingEvent -from typing import Any, Callable, Generic, Protocol, TypeVar, cast +from typing import Any, Generic, Protocol, TypeVar, cast import culsans from pydantic import BaseModel @@ -420,7 +420,7 @@ def _create_check_stop_callable( stop_events = tuple( item for item in stop_criteria or [] - if isinstance(item, (ThreadingEvent, ProcessingEvent)) + if isinstance(item, ThreadingEvent | ProcessingEvent) ) stop_callbacks = tuple(item for item in stop_criteria or [] if callable(item)) diff --git a/src/guidellm/utils/mixins.py b/src/guidellm/utils/mixins.py index b001ff2d..7cf28d00 100644 --- a/src/guidellm/utils/mixins.py +++ b/src/guidellm/utils/mixins.py @@ -91,7 +91,7 @@ def create_info_dict(cls, obj: Any) -> dict[str, Any]: "attributes": ( { key: val - if isinstance(val, (str, int, float, bool, list, dict)) + if isinstance(val, str | int | float | bool | list | dict) else repr(val) for key, val in obj.__dict__.items() if not key.startswith("_") diff --git a/src/guidellm/utils/pydantic_utils.py b/src/guidellm/utils/pydantic_utils.py index 55816ef1..05f5ad81 100644 --- a/src/guidellm/utils/pydantic_utils.py +++ b/src/guidellm/utils/pydantic_utils.py @@ -11,11 +11,10 @@ from __future__ import annotations from abc import ABC, abstractmethod -from typing import Any, ClassVar, Generic, TypeVar, cast +from typing import Any, ClassVar, Generic, TypeVar, cast, get_args, get_origin from pydantic import BaseModel, ConfigDict, Field, GetCoreSchemaHandler from pydantic_core import CoreSchema, core_schema -from typing_extensions import get_args, get_origin from guidellm.utils.registry import RegistryMixin diff --git a/src/guidellm/utils/random.py b/src/guidellm/utils/random.py index ceef20b9..6c8f396d 100644 --- a/src/guidellm/utils/random.py +++ b/src/guidellm/utils/random.py @@ -1,6 +1,5 @@ import random from collections.abc import Iterator -from typing import Optional __all__ = ["IntegerRangeSampler"] @@ -9,9 +8,9 @@ class IntegerRangeSampler: def __init__( self, average: int, - variance: Optional[int], - min_value: Optional[int], - max_value: Optional[int], + variance: int | None, + min_value: int | None, + max_value: int | None, random_seed: int, ): self.average = average diff --git a/src/guidellm/utils/registry.py b/src/guidellm/utils/registry.py index e6f1b657..e4727cbd 100644 --- a/src/guidellm/utils/registry.py +++ b/src/guidellm/utils/registry.py @@ -10,7 +10,8 @@ from __future__ import annotations -from typing import Any, Callable, ClassVar, Generic, TypeVar, cast +from collections.abc import Callable +from typing import Any, ClassVar, Generic, TypeVar, cast from guidellm.utils.auto_importer import AutoImporterMixin @@ -103,7 +104,7 @@ def register_decorator( if name is None: name = obj.__name__ - elif not isinstance(name, (str, list)): + elif not isinstance(name, str | list): raise ValueError( "RegistryMixin.register_decorator name must be a string or " f"an iterable of strings. Got {name}." diff --git a/src/guidellm/utils/statistics.py b/src/guidellm/utils/statistics.py index acd9d4f1..04484c2c 100644 --- a/src/guidellm/utils/statistics.py +++ b/src/guidellm/utils/statistics.py @@ -149,7 +149,7 @@ def from_distribution_function( in the output :return: DistributionSummary instance with calculated statistical metrics """ - values, weights = zip(*distribution) if distribution else ([], []) + values, weights = zip(*distribution, strict=True) if distribution else ([], []) values = np.array(values) # type: ignore[assignment] weights = np.array(weights) # type: ignore[assignment] @@ -247,7 +247,7 @@ def from_values( ) return DistributionSummary.from_distribution_function( - distribution=list(zip(values, weights)), + distribution=list(zip(values, weights, strict=True)), include_cdf=include_cdf, ) @@ -389,7 +389,8 @@ def from_iterable_request_times( events[global_end] = 0 for (_, end), first_iter, first_iter_count, total_count in zip( - requests, first_iter_times, first_iter_counts, iter_counts + requests, first_iter_times, first_iter_counts, iter_counts, + strict=True ): events[first_iter] += first_iter_count @@ -499,36 +500,36 @@ def from_values( ) _, successful_values, successful_weights = ( - zip(*successful) + zip(*successful, strict=True) if ( successful := list( filter( lambda val: val[0] == "successful", - zip(value_types, values, weights), + zip(value_types, values, weights, strict=True), ) ) ) else ([], [], []) ) _, incomplete_values, incomplete_weights = ( - zip(*incomplete) + zip(*incomplete, strict=True) if ( incomplete := list( filter( lambda val: val[0] == "incomplete", - zip(value_types, values, weights), + zip(value_types, values, weights, strict=True), ) ) ) else ([], [], []) ) _, errored_values, errored_weights = ( - zip(*errored) + zip(*errored, strict=True) if ( errored := list( filter( lambda val: val[0] == "error", - zip(value_types, values, weights), + zip(value_types, values, weights, strict=True), ) ) ) @@ -604,36 +605,36 @@ def from_request_times( ) _, successful_requests = ( - zip(*successful) + zip(*successful, strict=True) if ( successful := list( filter( lambda val: val[0] == "successful", - zip(request_types, requests), + zip(request_types, requests, strict=True), ) ) ) else ([], []) ) _, incomplete_requests = ( - zip(*incomplete) + zip(*incomplete, strict=True) if ( incomplete := list( filter( lambda val: val[0] == "incomplete", - zip(request_types, requests), + zip(request_types, requests, strict=True), ) ) ) else ([], []) ) _, errored_requests = ( - zip(*errored) + zip(*errored, strict=True) if ( errored := list( filter( lambda val: val[0] == "error", - zip(request_types, requests), + zip(request_types, requests, strict=True), ) ) ) @@ -734,7 +735,7 @@ def from_iterable_request_times( successful_iter_counts, successful_first_iter_counts, ) = ( - zip(*successful) + zip(*successful, strict=True) if ( successful := list( filter( @@ -745,6 +746,7 @@ def from_iterable_request_times( first_iter_times, iter_counts, first_iter_counts, + strict=True, ), ) ) @@ -758,7 +760,7 @@ def from_iterable_request_times( incomplete_iter_counts, incomplete_first_iter_counts, ) = ( - zip(*incomplete) + zip(*incomplete, strict=True) if ( incomplete := list( filter( @@ -769,6 +771,7 @@ def from_iterable_request_times( first_iter_times, iter_counts, first_iter_counts, + strict=True, ), ) ) @@ -782,7 +785,7 @@ def from_iterable_request_times( errored_iter_counts, errored_first_iter_counts, ) = ( - zip(*errored) + zip(*errored, strict=True) if ( errored := list( filter( @@ -793,6 +796,7 @@ def from_iterable_request_times( first_iter_times, iter_counts, first_iter_counts, + strict=True, ), ) ) @@ -904,7 +908,7 @@ def __add__(self, value: Any) -> float: :return: Updated mean after adding the value :raises ValueError: If value is not numeric (int or float) """ - if not isinstance(value, (int, float)): + if not isinstance(value, int | float): raise ValueError( f"Value must be an int or float, got {type(value)} instead.", ) @@ -921,7 +925,7 @@ def __iadd__(self, value: Any) -> RunningStats: :return: Self reference for method chaining :raises ValueError: If value is not numeric (int or float) """ - if not isinstance(value, (int, float)): + if not isinstance(value, int | float): raise ValueError( f"Value must be an int or float, got {type(value)} instead.", ) diff --git a/src/guidellm/utils/synchronous.py b/src/guidellm/utils/synchronous.py index 64c14e94..d37daec2 100644 --- a/src/guidellm/utils/synchronous.py +++ b/src/guidellm/utils/synchronous.py @@ -16,9 +16,6 @@ from multiprocessing.synchronize import Event as ProcessingEvent from threading import Barrier as ThreadingBarrier from threading import Event as ThreadingEvent -from typing import Annotated, Union - -from typing_extensions import TypeAlias __all__ = [ "SyncObjectTypesAlias", @@ -28,10 +25,10 @@ ] -SyncObjectTypesAlias: TypeAlias = Annotated[ - Union[ThreadingEvent, ProcessingEvent, ThreadingBarrier, ProcessingBarrier], - "Type alias for threading and multiprocessing synchronization object types", -] +# Type alias for threading and multiprocessing synchronization object types +SyncObjectTypesAlias = ( + ThreadingEvent | ProcessingEvent | ThreadingBarrier | ProcessingBarrier +) async def wait_for_sync_event( @@ -146,7 +143,7 @@ async def wait_for_sync_objects( tasks = [ asyncio.create_task( wait_for_sync_barrier(obj, poll_interval) - if isinstance(obj, (ThreadingBarrier, ProcessingBarrier)) + if isinstance(obj, ThreadingBarrier | ProcessingBarrier) else wait_for_sync_event(obj, poll_interval) ) for obj in objects diff --git a/src/guidellm/utils/typing.py b/src/guidellm/utils/typing.py index 8146ea1e..8d3580ef 100644 --- a/src/guidellm/utils/typing.py +++ b/src/guidellm/utils/typing.py @@ -1,14 +1,9 @@ from __future__ import annotations from collections.abc import Iterator +from types import UnionType from typing import Annotated, Literal, Union, get_args, get_origin -# Backwards compatibility for Python <3.10 -try: - from types import UnionType # type: ignore[attr-defined] -except ImportError: - UnionType = Union - # Backwards compatibility for Python <3.12 try: from typing import TypeAliasType # type: ignore[attr-defined] From 1bd8846a10b58b1b3fdce55925335621e32d0c00 Mon Sep 17 00:00:00 2001 From: Jared O'Connell Date: Thu, 9 Oct 2025 18:14:33 -0400 Subject: [PATCH 70/90] Run auto-formatter Signed-off-by: Jared O'Connell --- setup.py | 7 +- src/guidellm/backends/objects.py | 28 +++---- src/guidellm/backends/openai.py | 74 +++++++++---------- src/guidellm/benchmark/aggregator.py | 22 +++--- src/guidellm/benchmark/output.py | 16 ++-- src/guidellm/benchmark/profile.py | 4 +- src/guidellm/benchmark/scenario.py | 3 +- src/guidellm/dataset/creator.py | 38 +++++----- src/guidellm/dataset/entrypoints.py | 12 +-- src/guidellm/dataset/file.py | 16 ++-- src/guidellm/dataset/hf_datasets.py | 12 +-- src/guidellm/dataset/in_memory.py | 12 +-- src/guidellm/dataset/synthetic.py | 38 +++++----- src/guidellm/logger.py | 2 +- src/guidellm/preprocess/dataset.py | 44 +++++------ src/guidellm/presentation/data_models.py | 16 ++-- src/guidellm/presentation/injector.py | 3 +- src/guidellm/request/loader.py | 30 +++----- src/guidellm/request/request.py | 4 +- src/guidellm/scheduler/objects.py | 6 +- src/guidellm/utils/statistics.py | 3 +- tests/integration/scheduler/test_scheduler.py | 2 +- tests/unit/benchmark/test_output.py | 9 ++- tests/unit/dataset/test_synthetic.py | 2 +- tests/unit/mock_backend.py | 12 +-- tests/unit/mock_benchmark.py | 1 + tests/unit/utils/test_encoding.py | 2 +- tests/unit/utils/test_typing.py | 5 +- 28 files changed, 203 insertions(+), 220 deletions(-) diff --git a/setup.py b/setup.py index 623bad28..d3b92889 100644 --- a/setup.py +++ b/setup.py @@ -1,7 +1,6 @@ import os import re from pathlib import Path -from typing import Optional, Union from packaging.version import Version from setuptools import setup @@ -11,7 +10,7 @@ TAG_VERSION_PATTERN = re.compile(r"^v(\d+\.\d+\.\d+)$") -def get_last_version_diff() -> tuple[Version, Optional[str], Optional[int]]: +def get_last_version_diff() -> tuple[Version, str | None, int | None]: """ Get the last version, last tag, and the number of commits since the last tag. If no tags are found, return the last release version and None for the tag/commits. @@ -38,8 +37,8 @@ def get_last_version_diff() -> tuple[Version, Optional[str], Optional[int]]: def get_next_version( - build_type: str, build_iteration: Optional[Union[str, int]] -) -> tuple[Version, Optional[str], int]: + build_type: str, build_iteration: str | int | None +) -> tuple[Version, str | None, int]: """ Get the next version based on the build type and iteration. - build_type == release: take the last version and add a post if build iteration diff --git a/src/guidellm/backends/objects.py b/src/guidellm/backends/objects.py index 05280940..001aeb70 100644 --- a/src/guidellm/backends/objects.py +++ b/src/guidellm/backends/objects.py @@ -7,7 +7,7 @@ """ import uuid -from typing import Any, Literal, Optional +from typing import Any, Literal from pydantic import Field @@ -73,32 +73,32 @@ class GenerationResponse(StandardBaseModel): request_args: dict[str, Any] = Field( description="Arguments passed to the backend for this request." ) - value: Optional[str] = Field( + value: str | None = Field( default=None, description="Complete generated text content. None for streaming responses.", ) - delta: Optional[str] = Field( + delta: str | None = Field( default=None, description="Incremental text content for streaming responses." ) iterations: int = Field( default=0, description="Number of generation iterations completed." ) - request_prompt_tokens: Optional[int] = Field( + request_prompt_tokens: int | None = Field( default=None, description="Token count from the original request prompt." ) - request_output_tokens: Optional[int] = Field( + request_output_tokens: int | None = Field( default=None, description="Expected output token count from the original request.", ) - response_prompt_tokens: Optional[int] = Field( + response_prompt_tokens: int | None = Field( default=None, description="Actual prompt token count reported by the backend." ) - response_output_tokens: Optional[int] = Field( + response_output_tokens: int | None = Field( default=None, description="Actual output token count reported by the backend." ) @property - def prompt_tokens(self) -> Optional[int]: + def prompt_tokens(self) -> int | None: """ :return: The number of prompt tokens used in the request (response_prompt_tokens if available, otherwise request_prompt_tokens). @@ -106,7 +106,7 @@ def prompt_tokens(self) -> Optional[int]: return self.response_prompt_tokens or self.request_prompt_tokens @property - def output_tokens(self) -> Optional[int]: + def output_tokens(self) -> int | None: """ :return: The number of output tokens generated in the response (response_output_tokens if available, otherwise request_output_tokens). @@ -114,7 +114,7 @@ def output_tokens(self) -> Optional[int]: return self.response_output_tokens or self.request_output_tokens @property - def total_tokens(self) -> Optional[int]: + def total_tokens(self) -> int | None: """ :return: The total number of tokens used in the request and response. Sum of prompt_tokens and output_tokens. @@ -125,7 +125,7 @@ def total_tokens(self) -> Optional[int]: def preferred_prompt_tokens( self, preferred_source: Literal["request", "response"] - ) -> Optional[int]: + ) -> int | None: if preferred_source == "request": return self.request_prompt_tokens or self.response_prompt_tokens else: @@ -133,7 +133,7 @@ def preferred_prompt_tokens( def preferred_output_tokens( self, preferred_source: Literal["request", "response"] - ) -> Optional[int]: + ) -> int | None: if preferred_source == "request": return self.request_output_tokens or self.response_output_tokens else: @@ -146,11 +146,11 @@ class GenerationRequestTimings(MeasuredRequestTimings): """Timing model for tracking generation request lifecycle events.""" timings_type: Literal["generation_request_timings"] = "generation_request_timings" - first_iteration: Optional[float] = Field( + first_iteration: float | None = Field( default=None, description="Unix timestamp when the first generation iteration began.", ) - last_iteration: Optional[float] = Field( + last_iteration: float | None = Field( default=None, description="Unix timestamp when the last generation iteration completed.", ) diff --git a/src/guidellm/backends/openai.py b/src/guidellm/backends/openai.py index fd14ee65..fd539063 100644 --- a/src/guidellm/backends/openai.py +++ b/src/guidellm/backends/openai.py @@ -17,7 +17,7 @@ import time from collections.abc import AsyncIterator from pathlib import Path -from typing import Any, ClassVar, Optional, Union +from typing import Any, ClassVar import httpx from PIL import Image @@ -38,8 +38,8 @@ class UsageStats: """Token usage statistics for generation requests.""" - prompt_tokens: Optional[int] = None - output_tokens: Optional[int] = None + prompt_tokens: int | None = None + output_tokens: int | None = None @Backend.register("openai_http") @@ -78,19 +78,19 @@ class OpenAIHTTPBackend(Backend): def __init__( self, target: str, - model: Optional[str] = None, - api_key: Optional[str] = None, - organization: Optional[str] = None, - project: Optional[str] = None, + model: str | None = None, + api_key: str | None = None, + organization: str | None = None, + project: str | None = None, timeout: float = 60.0, http2: bool = True, follow_redirects: bool = True, - max_output_tokens: Optional[int] = None, + max_output_tokens: int | None = None, stream_response: bool = True, - extra_query: Optional[dict] = None, - extra_body: Optional[dict] = None, - remove_from_body: Optional[list[str]] = None, - headers: Optional[dict] = None, + extra_query: dict | None = None, + extra_body: dict | None = None, + remove_from_body: list[str] | None = None, + headers: dict | None = None, verify: bool = False, ): """ @@ -137,7 +137,7 @@ def __init__( # Runtime state self._in_process = False - self._async_client: Optional[httpx.AsyncClient] = None + self._async_client: httpx.AsyncClient | None = None @property def info(self) -> dict[str, Any]: @@ -264,7 +264,7 @@ async def available_models(self) -> list[str]: return [item["id"] for item in response.json()["data"]] - async def default_model(self) -> Optional[str]: + async def default_model(self) -> str | None: """ Get the default model for this backend. @@ -280,7 +280,7 @@ async def resolve( self, request: GenerationRequest, request_info: ScheduledRequestInfo, - history: Optional[list[tuple[GenerationRequest, GenerationResponse]]] = None, + history: list[tuple[GenerationRequest, GenerationResponse]] | None = None, ) -> AsyncIterator[tuple[GenerationResponse, ScheduledRequestInfo]]: """ Process a generation request and yield progressive responses. @@ -363,12 +363,12 @@ async def resolve( async def text_completions( self, - prompt: Union[str, list[str]], - request_id: Optional[str], # noqa: ARG002 - output_token_count: Optional[int] = None, + prompt: str | list[str], + request_id: str | None, # noqa: ARG002 + output_token_count: int | None = None, stream_response: bool = True, **kwargs, - ) -> AsyncIterator[tuple[Optional[str], Optional[UsageStats]]]: + ) -> AsyncIterator[tuple[str | None, UsageStats | None]]: """ Generate text completions using the /v1/completions endpoint. @@ -431,17 +431,13 @@ async def text_completions( async def chat_completions( self, - content: Union[ - str, - list[Union[str, dict[str, Union[str, dict[str, str]]], Path, Image.Image]], - Any, - ], - request_id: Optional[str] = None, # noqa: ARG002 - output_token_count: Optional[int] = None, + content: str | list[str | dict[str, str | dict[str, str]] | Path | Image.Image] | Any, + request_id: str | None = None, # noqa: ARG002 + output_token_count: int | None = None, raw_content: bool = False, stream_response: bool = True, **kwargs, - ) -> AsyncIterator[tuple[Optional[str], Optional[UsageStats]]]: + ) -> AsyncIterator[tuple[str | None, UsageStats | None]]: """ Generate chat completions using the /v1/chat/completions endpoint. @@ -502,10 +498,10 @@ async def chat_completions( def _build_headers( self, - api_key: Optional[str], - organization: Optional[str], - project: Optional[str], - user_headers: Optional[dict], + api_key: str | None, + organization: str | None, + project: str | None, + user_headers: dict | None, ) -> dict[str, str]: headers = {} @@ -541,11 +537,7 @@ def _get_params(self, endpoint_type: str) -> dict[str, str]: def _get_chat_messages( self, - content: Union[ - str, - list[Union[str, dict[str, Union[str, dict[str, str]]], Path, Image.Image]], - Any, - ], + content: str | list[str | dict[str, str | dict[str, str]] | Path | Image.Image] | Any, ) -> list[dict[str, Any]]: if isinstance(content, str): return [{"role": "user", "content": content}] @@ -567,7 +559,7 @@ def _get_chat_messages( return [{"role": "user", "content": resolved_content}] def _get_chat_message_media_item( - self, item: Union[Path, Image.Image] + self, item: Path | Image.Image ) -> dict[str, Any]: if isinstance(item, Image.Image): encoded = base64.b64encode(item.tobytes()).decode("utf-8") @@ -597,8 +589,8 @@ def _get_chat_message_media_item( def _get_body( self, endpoint_type: str, - request_kwargs: Optional[dict[str, Any]], - max_output_tokens: Optional[int] = None, + request_kwargs: dict[str, Any] | None, + max_output_tokens: int | None = None, **kwargs, ) -> dict[str, Any]: # Start with endpoint-specific extra body parameters @@ -628,7 +620,7 @@ def _get_body( return {key: val for key, val in body.items() if val is not None} - def _get_completions_text_content(self, data: dict) -> Optional[str]: + def _get_completions_text_content(self, data: dict) -> str | None: if not data.get("choices"): return None @@ -639,7 +631,7 @@ def _get_completions_text_content(self, data: dict) -> Optional[str]: or choice.get("message", {}).get("content") ) - def _get_completions_usage_stats(self, data: dict) -> Optional[UsageStats]: + def _get_completions_usage_stats(self, data: dict) -> UsageStats | None: if not data.get("usage"): return None diff --git a/src/guidellm/benchmark/aggregator.py b/src/guidellm/benchmark/aggregator.py index be70276b..b33a7b14 100644 --- a/src/guidellm/benchmark/aggregator.py +++ b/src/guidellm/benchmark/aggregator.py @@ -975,7 +975,7 @@ def _calculate_requests_per_second( filtered_statuses = [] filtered_times = [] - for status, request in zip(statuses, requests): + for status, request in zip(statuses, requests, strict=False): if not all_defined( safe_getattr(request.scheduler_info.request_timings, "request_start"), safe_getattr(request.scheduler_info.request_timings, "request_end"), @@ -1005,7 +1005,7 @@ def _calculate_request_concurrency( filtered_statuses = [] filtered_times = [] - for status, request in zip(statuses, requests): + for status, request in zip(statuses, requests, strict=False): if not all_defined( safe_getattr(request.scheduler_info.request_timings, "request_start"), safe_getattr(request.scheduler_info.request_timings, "request_end"), @@ -1035,7 +1035,7 @@ def _calculate_request_latency( filtered_statuses = [] filtered_values = [] - for status, request in zip(statuses, requests): + for status, request in zip(statuses, requests, strict=False): if not all_defined(request.request_latency): continue @@ -1056,7 +1056,7 @@ def _calculate_prompt_token_count( filtered_statuses = [] filtered_values = [] - for status, request in zip(statuses, requests): + for status, request in zip(statuses, requests, strict=False): if not all_defined(request.prompt_tokens): continue @@ -1077,7 +1077,7 @@ def _calculate_output_token_count( filtered_statuses = [] filtered_values = [] - for status, request in zip(statuses, requests): + for status, request in zip(statuses, requests, strict=False): if not all_defined(request.output_tokens): continue @@ -1098,7 +1098,7 @@ def _calculate_total_token_count( filtered_statuses = [] filtered_values = [] - for status, request in zip(statuses, requests): + for status, request in zip(statuses, requests, strict=False): if not all_defined(request.total_tokens): continue @@ -1119,7 +1119,7 @@ def _calculate_time_to_first_token_ms( filtered_statuses = [] filtered_values = [] - for status, request in zip(statuses, requests): + for status, request in zip(statuses, requests, strict=False): if not all_defined(request.time_to_first_token_ms): continue @@ -1141,7 +1141,7 @@ def _calculate_time_per_output_token_ms( filtered_values = [] filtered_weights = [] - for status, request in zip(statuses, requests): + for status, request in zip(statuses, requests, strict=False): if not all_defined(request.time_to_first_token_ms): continue @@ -1174,7 +1174,7 @@ def _calculate_inter_token_latency_ms( filtered_values = [] filtered_weights = [] - for status, request in zip(statuses, requests): + for status, request in zip(statuses, requests, strict=False): if not all_defined(request.inter_token_latency_ms): continue @@ -1199,7 +1199,7 @@ def _calculate_output_tokens_per_second( filtered_first_iter_times = [] filtered_iter_counts = [] - for status, request in zip(statuses, requests): + for status, request in zip(statuses, requests, strict=False): if not all_defined(request.output_tokens_per_second): continue @@ -1234,7 +1234,7 @@ def _calculate_tokens_per_second( filtered_iter_counts = [] filtered_first_iter_counts = [] - for status, request in zip(statuses, requests): + for status, request in zip(statuses, requests, strict=False): if not all_defined(request.tokens_per_second): continue diff --git a/src/guidellm/benchmark/output.py b/src/guidellm/benchmark/output.py index c4e8fb0f..cacadc94 100644 --- a/src/guidellm/benchmark/output.py +++ b/src/guidellm/benchmark/output.py @@ -34,10 +34,11 @@ DistributionSummary, RegistryMixin, StatusDistributionSummary, + camelize_str, + recursive_key_update, safe_format_timestamp, split_text_list_by_length, ) -from guidellm.utils import recursive_key_update, camelize_str __all__ = [ "GenerativeBenchmarkerCSV", @@ -369,7 +370,7 @@ def _print_line( f"Value and style length mismatch: {len(value)} vs {len(style)}" ) - for val, sty in zip(value, style): + for val, sty in zip(value, style, strict=False): text.append(val, style=sty) self.console.print(Padding.indent(text, indent)) @@ -568,8 +569,8 @@ async def finalize(self, report: GenerativeBenchmarksReport) -> Path: benchmark_values: list[str | float | list[float]] = [] # Add basic run description info - desc_headers, desc_values = ( - self._get_benchmark_desc_headers_and_values(benchmark) + desc_headers, desc_values = self._get_benchmark_desc_headers_and_values( + benchmark ) benchmark_headers.extend(desc_headers) benchmark_values.extend(desc_values) @@ -680,7 +681,8 @@ def _get_benchmark_status_metrics_stats( return headers, values def _get_benchmark_extras_headers_and_values( - self, benchmark: GenerativeBenchmark, + self, + benchmark: GenerativeBenchmark, ) -> tuple[list[str], list[str]]: headers = ["Profile", "Backend", "Generator Data"] values: list[str] = [ @@ -733,9 +735,7 @@ async def finalize(self, report: GenerativeBenchmarksReport) -> Path: ui_api_data = {} for k, v in camel_data.items(): placeholder_key = f"window.{k} = {{}};" - replacement_value = ( - f"window.{k} = {json.dumps(v, indent=2)};\n" - ) + replacement_value = f"window.{k} = {json.dumps(v, indent=2)};\n" ui_api_data[placeholder_key] = replacement_value create_report(ui_api_data, output_path) diff --git a/src/guidellm/benchmark/profile.py b/src/guidellm/benchmark/profile.py index 3ff8d0e0..ec4fa839 100644 --- a/src/guidellm/benchmark/profile.py +++ b/src/guidellm/benchmark/profile.py @@ -679,7 +679,9 @@ def next_strategy( prev_benchmark.metrics.requests_per_second.successful.mean ) if self.synchronous_rate <= 0 and self.throughput_rate <= 0: - raise RuntimeError("Invalid rates in sweep; aborting. Were there any successful requests?") + raise RuntimeError( + "Invalid rates in sweep; aborting. Were there any successful requests?" + ) self.measured_rates = list( np.linspace( self.synchronous_rate, diff --git a/src/guidellm/benchmark/scenario.py b/src/guidellm/benchmark/scenario.py index 5299616f..73a9a050 100644 --- a/src/guidellm/benchmark/scenario.py +++ b/src/guidellm/benchmark/scenario.py @@ -1,10 +1,11 @@ from __future__ import annotations import json +from collections.abc import Callable from functools import cache, wraps from inspect import Parameter, signature from pathlib import Path -from typing import Annotated, Any, Callable, Literal, TypeVar +from typing import Annotated, Any, Literal, TypeVar import yaml from loguru import logger diff --git a/src/guidellm/dataset/creator.py b/src/guidellm/dataset/creator.py index b95f4c50..fe712c23 100644 --- a/src/guidellm/dataset/creator.py +++ b/src/guidellm/dataset/creator.py @@ -1,6 +1,6 @@ from abc import ABC, abstractmethod from pathlib import Path -from typing import Any, Literal, Optional, Union +from typing import Any, Literal from datasets import Dataset, DatasetDict, IterableDataset, IterableDatasetDict from transformers import PreTrainedTokenizerBase # type: ignore[import] @@ -80,12 +80,12 @@ class DatasetCreator(ABC): def create( cls, data: Any, - data_args: Optional[dict[str, Any]], - processor: Optional[Union[str, Path, PreTrainedTokenizerBase]], - processor_args: Optional[dict[str, Any]], + data_args: dict[str, Any] | None, + processor: str | Path | PreTrainedTokenizerBase | None, + processor_args: dict[str, Any] | None, random_seed: int = 42, - split_pref_order: Optional[list[str]] = None, - ) -> tuple[Union[Dataset, IterableDataset], dict[ColumnInputTypes, str]]: + split_pref_order: list[str] | None = None, + ) -> tuple[Dataset | IterableDataset, dict[ColumnInputTypes, str]]: if not cls.is_supported(data, data_args): raise ValueError(f"Unsupported data type: {type(data)} given for {data}. ") @@ -106,7 +106,7 @@ def create( return dataset, column_mappings @classmethod - def extract_args_split(cls, data_args: Optional[dict[str, Any]]) -> str: + def extract_args_split(cls, data_args: dict[str, Any] | None) -> str: split = "auto" if data_args and "split" in data_args: @@ -118,7 +118,7 @@ def extract_args_split(cls, data_args: Optional[dict[str, Any]]) -> str: @classmethod def extract_args_column_mappings( cls, - data_args: Optional[dict[str, Any]], + data_args: dict[str, Any] | None, ) -> dict[ColumnInputTypes, str]: columns: dict[ColumnInputTypes, str] = {} @@ -143,8 +143,8 @@ def extract_args_column_mappings( @classmethod def extract_dataset_name( - cls, dataset: Union[Dataset, IterableDataset, DatasetDict, IterableDatasetDict] - ) -> Optional[str]: + cls, dataset: Dataset | IterableDataset | DatasetDict | IterableDatasetDict + ) -> str | None: if isinstance(dataset, DatasetDict | IterableDatasetDict): dataset = dataset[list(dataset.keys())[0]] @@ -161,10 +161,10 @@ def extract_dataset_name( @classmethod def extract_dataset_split( cls, - dataset: Union[DatasetDict, IterableDatasetDict], - specified_split: Union[Literal["auto"], str] = "auto", - split_pref_order: Optional[Union[Literal["auto"], list[str]]] = "auto", - ) -> Union[Dataset, IterableDataset]: + dataset: DatasetDict | IterableDatasetDict, + specified_split: Literal["auto"] | str = "auto", + split_pref_order: Literal["auto"] | list[str] | None = "auto", + ) -> Dataset | IterableDataset: if not isinstance(dataset, DatasetDict | IterableDatasetDict): raise ValueError( f"Unsupported data type: {type(dataset)} given for {dataset}." @@ -199,15 +199,15 @@ def extract_dataset_split( @classmethod @abstractmethod - def is_supported(cls, data: Any, data_args: Optional[dict[str, Any]]) -> bool: ... + def is_supported(cls, data: Any, data_args: dict[str, Any] | None) -> bool: ... @classmethod @abstractmethod def handle_create( cls, data: Any, - data_args: Optional[dict[str, Any]], - processor: Optional[Union[str, Path, PreTrainedTokenizerBase]], - processor_args: Optional[dict[str, Any]], + data_args: dict[str, Any] | None, + processor: str | Path | PreTrainedTokenizerBase | None, + processor_args: dict[str, Any] | None, random_seed: int, - ) -> Union[Dataset, DatasetDict, IterableDataset, IterableDatasetDict]: ... + ) -> Dataset | DatasetDict | IterableDataset | IterableDatasetDict: ... diff --git a/src/guidellm/dataset/entrypoints.py b/src/guidellm/dataset/entrypoints.py index cf689956..1da2222a 100644 --- a/src/guidellm/dataset/entrypoints.py +++ b/src/guidellm/dataset/entrypoints.py @@ -1,5 +1,5 @@ from pathlib import Path -from typing import Any, Optional, Union +from typing import Any from datasets import Dataset, IterableDataset from transformers import PreTrainedTokenizerBase # type: ignore[import] @@ -15,12 +15,12 @@ def load_dataset( data: Any, - data_args: Optional[dict[str, Any]], - processor: Optional[Union[str, Path, PreTrainedTokenizerBase]], - processor_args: Optional[dict[str, Any]], + data_args: dict[str, Any] | None, + processor: str | Path | PreTrainedTokenizerBase | None, + processor_args: dict[str, Any] | None, random_seed: int = 42, - split_pref_order: Optional[list[str]] = None, -) -> tuple[Union[Dataset, IterableDataset], dict[ColumnInputTypes, str]]: + split_pref_order: list[str] | None = None, +) -> tuple[Dataset | IterableDataset, dict[ColumnInputTypes, str]]: creators = [ InMemoryDatasetCreator, SyntheticDatasetCreator, diff --git a/src/guidellm/dataset/file.py b/src/guidellm/dataset/file.py index 455ef580..718cb46f 100644 --- a/src/guidellm/dataset/file.py +++ b/src/guidellm/dataset/file.py @@ -1,5 +1,5 @@ from pathlib import Path -from typing import Any, Optional, Union +from typing import Any import pandas as pd # type: ignore[import] from datasets import ( @@ -30,7 +30,7 @@ class FileDatasetCreator(DatasetCreator): } @classmethod - def is_supported(cls, data: Any, data_args: Optional[dict[str, Any]]) -> bool: # noqa: ARG003 + def is_supported(cls, data: Any, data_args: dict[str, Any] | None) -> bool: # noqa: ARG003 if isinstance(data, str | Path) and (path := Path(data)).exists(): # local folder or py file, assume supported return path.suffix.lower() in cls.SUPPORTED_TYPES @@ -41,11 +41,11 @@ def is_supported(cls, data: Any, data_args: Optional[dict[str, Any]]) -> bool: def handle_create( cls, data: Any, - data_args: Optional[dict[str, Any]], - processor: Optional[Union[str, Path, PreTrainedTokenizerBase]], # noqa: ARG003 - processor_args: Optional[dict[str, Any]], # noqa: ARG003 + data_args: dict[str, Any] | None, + processor: str | Path | PreTrainedTokenizerBase | None, # noqa: ARG003 + processor_args: dict[str, Any] | None, # noqa: ARG003 random_seed: int, # noqa: ARG003 - ) -> Union[Dataset, DatasetDict, IterableDataset, IterableDatasetDict]: + ) -> Dataset | DatasetDict | IterableDataset | IterableDatasetDict: if not isinstance(data, str | Path): raise ValueError(f"Unsupported data type: {type(data)} given for {data}. ") @@ -63,8 +63,8 @@ def handle_create( @classmethod def load_dataset( - cls, path: Path, data_args: Optional[dict[str, Any]] - ) -> Union[Dataset, IterableDataset]: + cls, path: Path, data_args: dict[str, Any] | None + ) -> Dataset | IterableDataset: if path.suffix.lower() in {".txt", ".text"}: with path.open("r") as file: items = file.readlines() diff --git a/src/guidellm/dataset/hf_datasets.py b/src/guidellm/dataset/hf_datasets.py index 56c79936..bd8d8c23 100644 --- a/src/guidellm/dataset/hf_datasets.py +++ b/src/guidellm/dataset/hf_datasets.py @@ -1,5 +1,5 @@ from pathlib import Path -from typing import Any, Optional, Union +from typing import Any from datasets import ( Dataset, @@ -18,7 +18,7 @@ class HFDatasetsCreator(DatasetCreator): @classmethod - def is_supported(cls, data: Any, data_args: Optional[dict[str, Any]]) -> bool: # noqa: ARG003 + def is_supported(cls, data: Any, data_args: dict[str, Any] | None) -> bool: # noqa: ARG003 if isinstance( data, (Dataset, DatasetDict, IterableDataset, IterableDatasetDict) ): @@ -42,11 +42,11 @@ def is_supported(cls, data: Any, data_args: Optional[dict[str, Any]]) -> bool: def handle_create( cls, data: Any, - data_args: Optional[dict[str, Any]], - processor: Optional[Union[str, Path, PreTrainedTokenizerBase]], # noqa: ARG003 - processor_args: Optional[dict[str, Any]], # noqa: ARG003 + data_args: dict[str, Any] | None, + processor: str | Path | PreTrainedTokenizerBase | None, # noqa: ARG003 + processor_args: dict[str, Any] | None, # noqa: ARG003 random_seed: int, # noqa: ARG003 - ) -> Union[Dataset, DatasetDict, IterableDataset, IterableDatasetDict]: + ) -> Dataset | DatasetDict | IterableDataset | IterableDatasetDict: if isinstance(data, str | Path): data = load_dataset(data, **(data_args or {})) elif data_args: diff --git a/src/guidellm/dataset/in_memory.py b/src/guidellm/dataset/in_memory.py index af84f658..0461948c 100644 --- a/src/guidellm/dataset/in_memory.py +++ b/src/guidellm/dataset/in_memory.py @@ -1,6 +1,6 @@ from collections.abc import Iterable from pathlib import Path -from typing import Any, Optional, Union +from typing import Any from datasets import ( Dataset, @@ -17,18 +17,18 @@ class InMemoryDatasetCreator(DatasetCreator): @classmethod - def is_supported(cls, data: Any, data_args: Optional[dict[str, Any]]) -> bool: # noqa: ARG003 + def is_supported(cls, data: Any, data_args: dict[str, Any] | None) -> bool: # noqa: ARG003 return isinstance(data, Iterable) and not isinstance(data, str) @classmethod def handle_create( cls, data: Any, - data_args: Optional[dict[str, Any]], - processor: Optional[Union[str, Path, PreTrainedTokenizerBase]], # noqa: ARG003 - processor_args: Optional[dict[str, Any]], # noqa: ARG003 + data_args: dict[str, Any] | None, + processor: str | Path | PreTrainedTokenizerBase | None, # noqa: ARG003 + processor_args: dict[str, Any] | None, # noqa: ARG003 random_seed: int, # noqa: ARG003 - ) -> Union[Dataset, DatasetDict, IterableDataset, IterableDatasetDict]: + ) -> Dataset | DatasetDict | IterableDataset | IterableDatasetDict: if not isinstance(data, Iterable): raise TypeError( f"Unsupported data format. Expected Iterable[Any], got {type(data)}" diff --git a/src/guidellm/dataset/synthetic.py b/src/guidellm/dataset/synthetic.py index 8c30f0f7..8a1626fe 100644 --- a/src/guidellm/dataset/synthetic.py +++ b/src/guidellm/dataset/synthetic.py @@ -3,7 +3,7 @@ from collections.abc import Iterable, Iterator from itertools import cycle from pathlib import Path -from typing import Any, Literal, Optional, Union +from typing import Any, Literal import yaml from datasets import ( @@ -35,17 +35,17 @@ class SyntheticDatasetConfig(BaseModel): description="The average number of text tokens generated for prompts.", gt=0, ) - prompt_tokens_stdev: Optional[int] = Field( + prompt_tokens_stdev: int | None = Field( description="The standard deviation of the tokens generated for prompts.", gt=0, default=None, ) - prompt_tokens_min: Optional[int] = Field( + prompt_tokens_min: int | None = Field( description="The minimum number of text tokens generated for prompts.", gt=0, default=None, ) - prompt_tokens_max: Optional[int] = Field( + prompt_tokens_max: int | None = Field( description="The maximum number of text tokens generated for prompts.", gt=0, default=None, @@ -54,17 +54,17 @@ class SyntheticDatasetConfig(BaseModel): description="The average number of text tokens generated for outputs.", gt=0, ) - output_tokens_stdev: Optional[int] = Field( + output_tokens_stdev: int | None = Field( description="The standard deviation of the tokens generated for outputs.", gt=0, default=None, ) - output_tokens_min: Optional[int] = Field( + output_tokens_min: int | None = Field( description="The minimum number of text tokens generated for outputs.", gt=0, default=None, ) - output_tokens_max: Optional[int] = Field( + output_tokens_max: int | None = Field( description="The maximum number of text tokens generated for outputs.", gt=0, default=None, @@ -80,7 +80,7 @@ class SyntheticDatasetConfig(BaseModel): ) @staticmethod - def parse_str(data: Union[str, Path]) -> "SyntheticDatasetConfig": + def parse_str(data: str | Path) -> "SyntheticDatasetConfig": if ( isinstance(data, Path) or data.strip().endswith(".config") @@ -117,7 +117,7 @@ def parse_key_value_pairs(data: str) -> "SyntheticDatasetConfig": return SyntheticDatasetConfig(**config_dict) # type: ignore[arg-type] @staticmethod - def parse_config_file(data: Union[str, Path]) -> "SyntheticDatasetConfig": + def parse_config_file(data: str | Path) -> "SyntheticDatasetConfig": with Path(data).open("r") as file: config_dict = yaml.safe_load(file) @@ -128,7 +128,7 @@ class SyntheticTextItemsGenerator( Iterable[ dict[ Literal["prompt", "prompt_tokens_count", "output_tokens_count"], - Union[str, int], + str | int, ] ] ): @@ -150,7 +150,7 @@ def __iter__( ) -> Iterator[ dict[ Literal["prompt", "prompt_tokens_count", "output_tokens_count"], - Union[str, int], + str | int, ] ]: prompt_tokens_sampler = IntegerRangeSampler( @@ -177,7 +177,7 @@ def __iter__( for _, prompt_tokens, output_tokens in zip( range(self.config.samples), prompt_tokens_sampler, - output_tokens_sampler, + output_tokens_sampler, strict=False, ): start_index = rand.randint(0, len(self.text_creator.words)) prompt_text = self.processor.decode( @@ -194,7 +194,7 @@ def __iter__( } def _create_prompt( - self, prompt_tokens: int, start_index: int, unique_prefix: Optional[int] = None + self, prompt_tokens: int, start_index: int, unique_prefix: int | None = None ) -> list[int]: if prompt_tokens <= 0: return [] @@ -224,7 +224,7 @@ class SyntheticDatasetCreator(DatasetCreator): def is_supported( cls, data: Any, - data_args: Optional[dict[str, Any]], # noqa: ARG003 + data_args: dict[str, Any] | None, # noqa: ARG003 ) -> bool: if ( isinstance(data, Path) @@ -248,11 +248,11 @@ def is_supported( def handle_create( cls, data: Any, - data_args: Optional[dict[str, Any]], - processor: Optional[Union[str, Path, PreTrainedTokenizerBase]], - processor_args: Optional[dict[str, Any]], + data_args: dict[str, Any] | None, + processor: str | Path | PreTrainedTokenizerBase | None, + processor_args: dict[str, Any] | None, random_seed: int, - ) -> Union[Dataset, DatasetDict, IterableDataset, IterableDatasetDict]: + ) -> Dataset | DatasetDict | IterableDataset | IterableDatasetDict: processor = check_load_processor( processor, processor_args, @@ -270,7 +270,7 @@ def handle_create( @classmethod def extract_args_column_mappings( cls, - data_args: Optional[dict[str, Any]], + data_args: dict[str, Any] | None, ) -> dict[ColumnInputTypes, str]: data_args_columns = super().extract_args_column_mappings(data_args) diff --git a/src/guidellm/logger.py b/src/guidellm/logger.py index 70259bad..da3464f9 100644 --- a/src/guidellm/logger.py +++ b/src/guidellm/logger.py @@ -72,7 +72,7 @@ def configure_logger(config: LoggingSettings = settings.logging): sys.stdout, level=config.console_log_level.upper(), format="{time:YY-MM-DD HH:mm:ss}|{level: <8} \ - |{name}:{function}:{line} - {message}" + |{name}:{function}:{line} - {message}", ) if config.log_file or config.log_file_level: diff --git a/src/guidellm/preprocess/dataset.py b/src/guidellm/preprocess/dataset.py index a94b8a14..b02efec5 100644 --- a/src/guidellm/preprocess/dataset.py +++ b/src/guidellm/preprocess/dataset.py @@ -1,9 +1,9 @@ import json import os -from collections.abc import Iterator +from collections.abc import Callable, Iterator from enum import Enum from pathlib import Path -from typing import Any, Callable, Optional, Union +from typing import Any import yaml from datasets import Dataset @@ -32,7 +32,7 @@ def handle_ignore_strategy( min_prompt_tokens: int, tokenizer: PreTrainedTokenizerBase, **_kwargs, -) -> Optional[str]: +) -> str | None: """ Ignores prompts that are shorter than the required minimum token length. @@ -56,7 +56,7 @@ def handle_concatenate_strategy( tokenizer: PreTrainedTokenizerBase, concat_delimiter: str, **_kwargs, -) -> Optional[str]: +) -> str | None: """ Concatenates prompts until the minimum token requirement is met. @@ -117,7 +117,7 @@ def handle_error_strategy( min_prompt_tokens: int, tokenizer: PreTrainedTokenizerBase, **_kwargs, -) -> Optional[str]: +) -> str | None: """ Raises an error if the prompt is too short. @@ -150,24 +150,24 @@ class TokensConfig(BaseModel): description="The average number of tokens.", gt=0, ) - stdev: Optional[int] = Field( + stdev: int | None = Field( description="The standard deviation of the tokens.", gt=0, default=None, ) - min: Optional[int] = Field( + min: int | None = Field( description="The minimum number of tokens.", gt=0, default=None, ) - max: Optional[int] = Field( + max: int | None = Field( description="The maximum number of tokens.", gt=0, default=None, ) @staticmethod - def parse_str(data: Union[str, Path]) -> "TokensConfig": + def parse_str(data: str | Path) -> "TokensConfig": """ Parses a string or path into a TokensConfig object. Supports: - JSON string @@ -215,14 +215,14 @@ def parse_key_value_pairs(data: str) -> "TokensConfig": return TokensConfig(**config_dict) # type: ignore[arg-type] @staticmethod - def parse_config_file(data: Union[str, Path]) -> "TokensConfig": + def parse_config_file(data: str | Path) -> "TokensConfig": with Path(data).open("r") as file: config_dict = yaml.safe_load(file) return TokensConfig(**config_dict) -def _validate_output_suffix(output_path: Union[str, Path]) -> None: +def _validate_output_suffix(output_path: str | Path) -> None: output_path = Path(output_path) suffix = output_path.suffix.lower() if suffix not in SUPPORTED_TYPES: @@ -233,18 +233,18 @@ def _validate_output_suffix(output_path: Union[str, Path]) -> None: def process_dataset( - data: Union[str, Path], - output_path: Union[str, Path], - processor: Union[str, Path, PreTrainedTokenizerBase], - prompt_tokens: Union[str, Path], - output_tokens: Union[str, Path], - processor_args: Optional[dict[str, Any]] = None, - data_args: Optional[dict[str, Any]] = None, + data: str | Path, + output_path: str | Path, + processor: str | Path | PreTrainedTokenizerBase, + prompt_tokens: str | Path, + output_tokens: str | Path, + processor_args: dict[str, Any] | None = None, + data_args: dict[str, Any] | None = None, short_prompt_strategy: ShortPromptStrategy = ShortPromptStrategy.IGNORE, - pad_char: Optional[str] = None, - concat_delimiter: Optional[str] = None, + pad_char: str | None = None, + concat_delimiter: str | None = None, push_to_hub: bool = False, - hub_dataset_id: Optional[str] = None, + hub_dataset_id: str | None = None, random_seed: int = 42, ) -> None: """ @@ -354,7 +354,7 @@ def process_dataset( def push_dataset_to_hub( - hub_dataset_id: Optional[str], + hub_dataset_id: str | None, processed_dataset: Dataset, ) -> None: """ diff --git a/src/guidellm/presentation/data_models.py b/src/guidellm/presentation/data_models.py index c1e8f13f..ff2863b4 100644 --- a/src/guidellm/presentation/data_models.py +++ b/src/guidellm/presentation/data_models.py @@ -1,7 +1,7 @@ import random from collections import defaultdict from math import ceil -from typing import TYPE_CHECKING, Optional, Union +from typing import TYPE_CHECKING from pydantic import BaseModel, computed_field @@ -12,14 +12,14 @@ class Bucket(BaseModel): - value: Union[float, int] + value: float | int count: int @staticmethod def from_data( - data: Union[list[float], list[int]], - bucket_width: Optional[float] = None, - n_buckets: Optional[int] = None, + data: list[float] | list[int], + bucket_width: float | None = None, + n_buckets: int | None = None, ) -> tuple[list["Bucket"], float]: if not data: return [], 1.0 @@ -35,7 +35,7 @@ def from_data( else: n_buckets = ceil(range_v / bucket_width) - bucket_counts: defaultdict[Union[float, int], int] = defaultdict(int) + bucket_counts: defaultdict[float | int, int] = defaultdict(int) for val in data: idx = int((val - min_v) // bucket_width) if idx >= n_buckets: @@ -80,7 +80,7 @@ def from_benchmarks(cls, benchmarks: list["GenerativeBenchmark"]): class Distribution(BaseModel): - statistics: Optional[DistributionSummary] = None + statistics: DistributionSummary | None = None buckets: list[Bucket] bucket_width: float @@ -190,7 +190,7 @@ class TabularDistributionSummary(DistributionSummary): """ @computed_field - def percentile_rows(self) -> list[dict[str, Union[str, float]]]: + def percentile_rows(self) -> list[dict[str, str | float]]: rows = [ {"percentile": name, "value": value} for name, value in self.percentiles.model_dump().items() diff --git a/src/guidellm/presentation/injector.py b/src/guidellm/presentation/injector.py index bb1fd684..1e78080e 100644 --- a/src/guidellm/presentation/injector.py +++ b/src/guidellm/presentation/injector.py @@ -1,6 +1,5 @@ import re from pathlib import Path -from typing import Union from loguru import logger @@ -8,7 +7,7 @@ from guidellm.utils.text import load_text -def create_report(js_data: dict, output_path: Union[str, Path]) -> Path: +def create_report(js_data: dict, output_path: str | Path) -> Path: """ Creates a report from the dictionary and saves it to the output path. diff --git a/src/guidellm/request/loader.py b/src/guidellm/request/loader.py index 607a7455..e4a6934e 100644 --- a/src/guidellm/request/loader.py +++ b/src/guidellm/request/loader.py @@ -4,8 +4,6 @@ from typing import ( Any, Literal, - Optional, - Union, ) from datasets import Dataset, DatasetDict, IterableDataset, IterableDatasetDict @@ -43,9 +41,9 @@ def description(self) -> RequestLoaderDescription: ... class GenerativeRequestLoaderDescription(RequestLoaderDescription): type_: Literal["generative_request_loader"] = "generative_request_loader" # type: ignore[assignment] data: str - data_args: Optional[dict[str, Any]] + data_args: dict[str, Any] | None processor: str - processor_args: Optional[dict[str, Any]] + processor_args: dict[str, Any] | None class GenerativeRequestLoader(RequestLoader): @@ -69,18 +67,10 @@ class GenerativeRequestLoader(RequestLoader): def __init__( self, - data: Union[ - str, - Path, - Iterable[Union[str, dict[str, Any]]], - Dataset, - DatasetDict, - IterableDataset, - IterableDatasetDict, - ], - data_args: Optional[dict[str, Any]], - processor: Optional[Union[str, Path, PreTrainedTokenizerBase]], - processor_args: Optional[dict[str, Any]], + data: str | Path | Iterable[str | dict[str, Any]] | Dataset | DatasetDict | IterableDataset | IterableDatasetDict, + data_args: dict[str, Any] | None, + processor: str | Path | PreTrainedTokenizerBase | None, + processor_args: dict[str, Any] | None, shuffle: bool = True, iter_type: Literal["finite", "infinite"] = "finite", random_seed: int = 42, @@ -202,7 +192,7 @@ def _extract_text_column(self) -> str: "'data_args' dictionary." ) - def _extract_prompt_tokens_count_column(self) -> Optional[str]: + def _extract_prompt_tokens_count_column(self) -> str | None: column_names = self._dataset_columns() if column_names and "prompt_tokens_count" in column_names: @@ -213,7 +203,7 @@ def _extract_prompt_tokens_count_column(self) -> Optional[str]: return None - def _extract_output_tokens_count_column(self) -> Optional[str]: + def _extract_output_tokens_count_column(self) -> str | None: column_names = self._dataset_columns() if column_names and "output_tokens_count" in column_names: @@ -224,7 +214,7 @@ def _extract_output_tokens_count_column(self) -> Optional[str]: return None - def _dataset_columns(self, err_msg: Optional[str] = None) -> Optional[list[str]]: + def _dataset_columns(self, err_msg: str | None = None) -> list[str] | None: try: column_names = self.dataset.column_names @@ -240,7 +230,7 @@ def _dataset_columns(self, err_msg: Optional[str] = None) -> Optional[list[str]] def _get_dataset_iter( self, scope_create_count: int - ) -> Optional[Iterator[dict[str, Any]]]: + ) -> Iterator[dict[str, Any]] | None: if scope_create_count > 0 and self.iter_type != "infinite": return None diff --git a/src/guidellm/request/request.py b/src/guidellm/request/request.py index bf4e59fb..83dc40f1 100644 --- a/src/guidellm/request/request.py +++ b/src/guidellm/request/request.py @@ -1,5 +1,5 @@ import uuid -from typing import Any, Literal, Optional +from typing import Any, Literal from pydantic import Field @@ -33,7 +33,7 @@ class GenerationRequest(StandardBaseModel): of output tokens. Used for controlling the behavior of the backend. """ - request_id: Optional[str] = Field( + request_id: str | None = Field( default_factory=lambda: str(uuid.uuid4()), description="The unique identifier for the request.", ) diff --git a/src/guidellm/scheduler/objects.py b/src/guidellm/scheduler/objects.py index 21d30ec8..e2583987 100644 --- a/src/guidellm/scheduler/objects.py +++ b/src/guidellm/scheduler/objects.py @@ -19,7 +19,6 @@ Literal, Protocol, TypeVar, - Union, runtime_checkable, ) @@ -56,10 +55,7 @@ MultiTurnRequestT = TypeAliasType( "MultiTurnRequestT", - Union[ - list[Union[RequestT, tuple[RequestT, float]]], - tuple[Union[RequestT, tuple[RequestT, float]]], - ], + list[RequestT | tuple[RequestT, float]] | tuple[RequestT | tuple[RequestT, float]], type_params=(RequestT,), ) """Multi-turn request structure supporting conversation history with optional delays.""" diff --git a/src/guidellm/utils/statistics.py b/src/guidellm/utils/statistics.py index 04484c2c..f71a2c24 100644 --- a/src/guidellm/utils/statistics.py +++ b/src/guidellm/utils/statistics.py @@ -389,8 +389,7 @@ def from_iterable_request_times( events[global_end] = 0 for (_, end), first_iter, first_iter_count, total_count in zip( - requests, first_iter_times, first_iter_counts, iter_counts, - strict=True + requests, first_iter_times, first_iter_counts, iter_counts, strict=True ): events[first_iter] += first_iter_count diff --git a/tests/integration/scheduler/test_scheduler.py b/tests/integration/scheduler/test_scheduler.py index 51abf59b..65bff95f 100644 --- a/tests/integration/scheduler/test_scheduler.py +++ b/tests/integration/scheduler/test_scheduler.py @@ -167,7 +167,7 @@ def _request_indices(): _request_indices(), received_updates.keys(), received_updates.values(), - received_responses, + received_responses, strict=False, ): assert req == f"req_{index}" assert resp in (f"response_for_{req}", f"mock_error_for_{req}") diff --git a/tests/unit/benchmark/test_output.py b/tests/unit/benchmark/test_output.py index 6763d978..67e65e2e 100644 --- a/tests/unit/benchmark/test_output.py +++ b/tests/unit/benchmark/test_output.py @@ -10,7 +10,10 @@ from guidellm.benchmark import ( GenerativeBenchmarksReport, ) -from guidellm.benchmark.output import GenerativeBenchmarkerConsole, GenerativeBenchmarkerCSV +from guidellm.benchmark.output import ( + GenerativeBenchmarkerConsole, + GenerativeBenchmarkerCSV, +) from tests.unit.mock_benchmark import mock_generative_benchmark @@ -80,6 +83,7 @@ def test_file_yaml(): mock_path.unlink() + @pytest.mark.asyncio async def test_file_csv(): mock_benchmark = mock_generative_benchmark() @@ -105,7 +109,8 @@ def test_console_benchmarks_profile_str(): console = GenerativeBenchmarkerConsole() mock_benchmark = mock_generative_benchmark() assert ( - console._get_profile_str(mock_benchmark) == "type=synchronous, strategies=['synchronous']" + console._get_profile_str(mock_benchmark) + == "type=synchronous, strategies=['synchronous']" ) diff --git a/tests/unit/dataset/test_synthetic.py b/tests/unit/dataset/test_synthetic.py index e3110fa3..544634c8 100644 --- a/tests/unit/dataset/test_synthetic.py +++ b/tests/unit/dataset/test_synthetic.py @@ -530,7 +530,7 @@ def mock_sampler_side_effect(*args, **kwargs): # Results should be identical with same seed assert len(items1) == len(items2) - for item1, item2 in zip(items1, items2): + for item1, item2 in zip(items1, items2, strict=False): assert item1["prompt"] == item2["prompt"] assert item1["prompt_tokens_count"] == item2["prompt_tokens_count"] assert item1["output_tokens_count"] == item2["output_tokens_count"] diff --git a/tests/unit/mock_backend.py b/tests/unit/mock_backend.py index 5ac069a8..3b7237e0 100644 --- a/tests/unit/mock_backend.py +++ b/tests/unit/mock_backend.py @@ -6,7 +6,7 @@ import random import time from collections.abc import AsyncIterator -from typing import Any, Optional +from typing import Any from lorem.text import TextLorem @@ -32,7 +32,7 @@ def __init__( self, target: str = "mock-target", model: str = "mock-model", - iter_delay: Optional[float] = None, + iter_delay: float | None = None, ): """ Initialize mock backend. @@ -53,7 +53,7 @@ def target(self) -> str: return self._target @property - def model(self) -> Optional[str]: + def model(self) -> str | None: """Model name for the mock backend.""" return self._model @@ -87,7 +87,7 @@ async def validate(self) -> None: if not self._in_process: raise RuntimeError("Backend not started up for process") - async def default_model(self) -> Optional[str]: + async def default_model(self) -> str | None: """ Return the default model for the mock backend. """ @@ -97,7 +97,7 @@ async def resolve( self, request: GenerationRequest, request_info: ScheduledRequestInfo, - history: Optional[list[tuple[GenerationRequest, GenerationResponse]]] = None, + history: list[tuple[GenerationRequest, GenerationResponse]] | None = None, ) -> AsyncIterator[tuple[GenerationResponse, ScheduledRequestInfo]]: """ Process a generation request and yield progressive responses. @@ -170,7 +170,7 @@ def _estimate_prompt_tokens(content: str) -> int: return len(str(content).split()) @staticmethod - def _get_tokens(token_count: Optional[int] = None) -> list[str]: + def _get_tokens(token_count: int | None = None) -> list[str]: """ Generate mock tokens for response. """ diff --git a/tests/unit/mock_benchmark.py b/tests/unit/mock_benchmark.py index cdf4375a..d7bfe7c9 100644 --- a/tests/unit/mock_benchmark.py +++ b/tests/unit/mock_benchmark.py @@ -1,4 +1,5 @@ """Mock benchmark objects for unit testing.""" + from guidellm.backends import GenerationRequestTimings from guidellm.benchmark import ( BenchmarkSchedulerStats, diff --git a/tests/unit/utils/test_encoding.py b/tests/unit/utils/test_encoding.py index cc4600cf..5664bcb0 100644 --- a/tests/unit/utils/test_encoding.py +++ b/tests/unit/utils/test_encoding.py @@ -476,7 +476,7 @@ def test_to_from_sequence_collections(self, collection): seq = inst.to_sequence(collection) out = inst.from_sequence(seq) assert len(out) == len(collection) - assert all(a == b for a, b in zip(out, list(collection))) + assert all(a == b for a, b in zip(out, list(collection), strict=False)) @pytest.mark.sanity def test_to_from_sequence_mapping(self): diff --git a/tests/unit/utils/test_typing.py b/tests/unit/utils/test_typing.py index fafa8765..009473f5 100644 --- a/tests/unit/utils/test_typing.py +++ b/tests/unit/utils/test_typing.py @@ -2,10 +2,9 @@ Test suite for the typing utilities module. """ -from typing import Annotated, Literal, Union +from typing import Annotated, Literal, TypeAlias, Union import pytest -from typing_extensions import TypeAlias from guidellm.utils.typing import get_literal_vals @@ -15,7 +14,7 @@ Literal["synchronous", "concurrent", "throughput", "constant", "poisson"], "Valid strategy type identifiers for scheduling request patterns", ] -StrategyProfileType: TypeAlias = Union[LocalStrategyType, LocalProfileType] +StrategyProfileType: TypeAlias = LocalStrategyType | LocalProfileType class TestGetLiteralVals: From 1e8974c2f60fe340cacdb1beaef0957c46e47398 Mon Sep 17 00:00:00 2001 From: Jared O'Connell Date: Thu, 9 Oct 2025 20:10:42 -0400 Subject: [PATCH 71/90] Fix remaining ruff errors Signed-off-by: Jared O'Connell --- pyproject.toml | 2 +- src/guidellm/__main__.py | 7 +++---- src/guidellm/backends/openai.py | 6 ++++-- src/guidellm/benchmark/profile.py | 3 ++- src/guidellm/dataset/hf_datasets.py | 4 ++-- src/guidellm/request/loader.py | 3 ++- tests/unit/benchmark/test_output.py | 2 +- tests/unit/mock_server/test_server.py | 2 +- tests/unit/scheduler/test_objects.py | 8 ++++---- tests/unit/scheduler/test_strategies.py | 2 +- tests/unit/utils/test_synchronous.py | 2 +- tests/unit/utils/test_typing.py | 6 +++--- 12 files changed, 25 insertions(+), 22 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 935587d0..f1624d3f 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -167,7 +167,7 @@ ignore_missing_imports = true target-version = "py310" line-length = 88 indent-width = 4 -exclude = ["build", "dist", "env", ".venv"] +exclude = ["build", "dist", "env", ".venv*"] [tool.ruff.format] quote-style = "double" diff --git a/src/guidellm/__main__.py b/src/guidellm/__main__.py index 0a035551..dbc8e1da 100644 --- a/src/guidellm/__main__.py +++ b/src/guidellm/__main__.py @@ -28,7 +28,7 @@ import asyncio import codecs from pathlib import Path -from typing import Annotated, Union +from typing import Annotated import click from pydantic import ValidationError @@ -78,9 +78,8 @@ "run", ] -STRATEGY_PROFILE_CHOICES: Annotated[ - list[str], "Available strategy and profile choices for benchmark execution types" -] = list(get_literal_vals(Union[ProfileType, StrategyType])) +# Available strategy and profile choices for benchmark execution types +STRATEGY_PROFILE_CHOICES: list[str] = list(get_literal_vals(ProfileType | StrategyType)) def decode_escaped_str(_ctx, _param, value): diff --git a/src/guidellm/backends/openai.py b/src/guidellm/backends/openai.py index fd539063..c8eb70f3 100644 --- a/src/guidellm/backends/openai.py +++ b/src/guidellm/backends/openai.py @@ -33,6 +33,8 @@ __all__ = ["OpenAIHTTPBackend", "UsageStats"] +ContentT = str | list[str | dict[str, str | dict[str, str]] | Path | Image.Image] | Any + @dataclasses.dataclass class UsageStats: @@ -431,7 +433,7 @@ async def text_completions( async def chat_completions( self, - content: str | list[str | dict[str, str | dict[str, str]] | Path | Image.Image] | Any, + content: ContentT, request_id: str | None = None, # noqa: ARG002 output_token_count: int | None = None, raw_content: bool = False, @@ -537,7 +539,7 @@ def _get_params(self, endpoint_type: str) -> dict[str, str]: def _get_chat_messages( self, - content: str | list[str | dict[str, str | dict[str, str]] | Path | Image.Image] | Any, + content: ContentT, ) -> list[dict[str, Any]]: if isinstance(content, str): return [{"role": "user", "content": content}] diff --git a/src/guidellm/benchmark/profile.py b/src/guidellm/benchmark/profile.py index ec4fa839..87a9a2be 100644 --- a/src/guidellm/benchmark/profile.py +++ b/src/guidellm/benchmark/profile.py @@ -680,7 +680,8 @@ def next_strategy( ) if self.synchronous_rate <= 0 and self.throughput_rate <= 0: raise RuntimeError( - "Invalid rates in sweep; aborting. Were there any successful requests?" + "Invalid rates in sweep; aborting. " + "Were there any successful requests?" ) self.measured_rates = list( np.linspace( diff --git a/src/guidellm/dataset/hf_datasets.py b/src/guidellm/dataset/hf_datasets.py index bd8d8c23..d1be46c1 100644 --- a/src/guidellm/dataset/hf_datasets.py +++ b/src/guidellm/dataset/hf_datasets.py @@ -20,7 +20,7 @@ class HFDatasetsCreator(DatasetCreator): @classmethod def is_supported(cls, data: Any, data_args: dict[str, Any] | None) -> bool: # noqa: ARG003 if isinstance( - data, (Dataset, DatasetDict, IterableDataset, IterableDatasetDict) + data, Dataset | DatasetDict | IterableDataset | IterableDatasetDict ): # base type is supported return True @@ -55,7 +55,7 @@ def handle_create( ) if isinstance( - data, (Dataset, DatasetDict, IterableDataset, IterableDatasetDict) + data, Dataset | DatasetDict | IterableDataset | IterableDatasetDict ): return data diff --git a/src/guidellm/request/loader.py b/src/guidellm/request/loader.py index e4a6934e..ac34131e 100644 --- a/src/guidellm/request/loader.py +++ b/src/guidellm/request/loader.py @@ -67,7 +67,8 @@ class GenerativeRequestLoader(RequestLoader): def __init__( self, - data: str | Path | Iterable[str | dict[str, Any]] | Dataset | DatasetDict | IterableDataset | IterableDatasetDict, + data: str | Path | Iterable[str | dict[str, Any]] | Dataset | DatasetDict | \ + IterableDataset | IterableDatasetDict, data_args: dict[str, Any] | None, processor: str | Path | PreTrainedTokenizerBase | None, processor_args: dict[str, Any] | None, diff --git a/tests/unit/benchmark/test_output.py b/tests/unit/benchmark/test_output.py index 67e65e2e..6310da88 100644 --- a/tests/unit/benchmark/test_output.py +++ b/tests/unit/benchmark/test_output.py @@ -93,7 +93,7 @@ async def test_file_csv(): csv_benchmarker = GenerativeBenchmarkerCSV(output_path=mock_path) await csv_benchmarker.finalize(report) - with mock_path.open("r") as file: + with mock_path.open("r") as file: # noqa: ASYNC230 # This is a test. reader = csv.reader(file) headers = next(reader) rows = list(reader) diff --git a/tests/unit/mock_server/test_server.py b/tests/unit/mock_server/test_server.py index 008103c3..ba712fb6 100644 --- a/tests/unit/mock_server/test_server.py +++ b/tests/unit/mock_server/test_server.py @@ -162,7 +162,7 @@ async def test_health_endpoint(self, mock_server_instance): assert "status" in data assert data["status"] == "healthy" assert "timestamp" in data - assert isinstance(data["timestamp"], (int, float)) + assert isinstance(data["timestamp"], int | float) @pytest.mark.smoke @pytest.mark.asyncio diff --git a/tests/unit/scheduler/test_objects.py b/tests/unit/scheduler/test_objects.py index fc5610fd..2e0374e4 100644 --- a/tests/unit/scheduler/test_objects.py +++ b/tests/unit/scheduler/test_objects.py @@ -340,7 +340,7 @@ def test_class_signatures(self): for key in self.CHECK_KEYS: assert key in fields field_info = fields[key] - assert field_info.annotation in (Union[float, None], Optional[float]) + assert field_info.annotation in (Union[float, None], Optional[float]) # noqa: UP007 assert field_info.default is None @pytest.mark.smoke @@ -453,7 +453,7 @@ def test_class_signatures(self): for key in self.CHECK_KEYS: assert key in fields field_info = fields[key] - assert field_info.annotation in (Union[float, None], Optional[float]) + assert field_info.annotation in (Union[float, None], Optional[float]) # noqa: UP007 assert field_info.default is None @pytest.mark.smoke @@ -704,11 +704,11 @@ def test_marshalling(self, valid_instances): else: assert original_value is None or isinstance( original_value, - (RequestSchedulerTimings, MeasuredRequestTimings), + RequestSchedulerTimings | MeasuredRequestTimings, ) assert reconstructed_value is None or isinstance( reconstructed_value, - (RequestSchedulerTimings, MeasuredRequestTimings), + RequestSchedulerTimings | MeasuredRequestTimings, ) else: assert original_value == reconstructed_value diff --git a/tests/unit/scheduler/test_strategies.py b/tests/unit/scheduler/test_strategies.py index 67a2d77d..143a3130 100644 --- a/tests/unit/scheduler/test_strategies.py +++ b/tests/unit/scheduler/test_strategies.py @@ -225,7 +225,7 @@ def test_lifecycle( for index in range(max(5, startup_requests + 2)): offset = instance.next_offset() - assert isinstance(offset, (int, float)) + assert isinstance(offset, int | float) if index < startup_requests: expected_offset = initial_offset + (index + 1) * startup_delay diff --git a/tests/unit/utils/test_synchronous.py b/tests/unit/utils/test_synchronous.py index 1a9ea2c9..620ba3fa 100644 --- a/tests/unit/utils/test_synchronous.py +++ b/tests/unit/utils/test_synchronous.py @@ -226,7 +226,7 @@ async def test_invocation(self, objects_types, expected_result): async def set_target(): await asyncio.sleep(0.01) obj = objects[expected_result] - if isinstance(obj, (threading.Event, ProcessingEvent)): + if isinstance(obj, threading.Event | ProcessingEvent): obj.set() else: await asyncio.to_thread(obj.wait) diff --git a/tests/unit/utils/test_typing.py b/tests/unit/utils/test_typing.py index 009473f5..1e31ef8e 100644 --- a/tests/unit/utils/test_typing.py +++ b/tests/unit/utils/test_typing.py @@ -2,7 +2,7 @@ Test suite for the typing utilities module. """ -from typing import Annotated, Literal, TypeAlias, Union +from typing import Annotated, Literal, TypeAlias import pytest @@ -53,7 +53,7 @@ def test_inline_union_type(self): ### WRITTEN BY AI ### """ - result = get_literal_vals(Union[LocalProfileType, LocalStrategyType]) + result = get_literal_vals(LocalProfileType | LocalStrategyType) expected = frozenset( { "synchronous", @@ -117,6 +117,6 @@ def test_literal_union(self): ### WRITTEN BY AI ### """ - result = get_literal_vals(Union[Literal["test", "test2"], Literal["test3"]]) + result = get_literal_vals(Literal["test", "test2"] | Literal["test3"]) expected = frozenset({"test", "test2", "test3"}) assert result == expected From d0dad5aa7f752e88d250284bb48cb2f8ed78d5ff Mon Sep 17 00:00:00 2001 From: Jared O'Connell Date: Fri, 10 Oct 2025 12:55:32 -0400 Subject: [PATCH 72/90] Fix unit tests Signed-off-by: Jared O'Connell --- tests/unit/scheduler/test_objects.py | 4 ++-- tests/unit/utils/test_synchronous.py | 32 +++++++++++++++++----------- 2 files changed, 22 insertions(+), 14 deletions(-) diff --git a/tests/unit/scheduler/test_objects.py b/tests/unit/scheduler/test_objects.py index 2e0374e4..2fc4c86f 100644 --- a/tests/unit/scheduler/test_objects.py +++ b/tests/unit/scheduler/test_objects.py @@ -3,6 +3,7 @@ import inspect import typing from collections.abc import AsyncIterator +from types import UnionType from typing import Any, Literal, Optional, TypeVar, Union import pytest @@ -62,8 +63,7 @@ def test_multi_turn_request_t(): assert MultiTurnRequestT.__name__ == "MultiTurnRequestT" value = MultiTurnRequestT.__value__ - assert hasattr(value, "__origin__") - assert value.__origin__ is Union + assert isinstance(value, UnionType) type_params = getattr(MultiTurnRequestT, "__type_params__", ()) assert len(type_params) == 1 diff --git a/tests/unit/utils/test_synchronous.py b/tests/unit/utils/test_synchronous.py index 620ba3fa..7acd5b4a 100644 --- a/tests/unit/utils/test_synchronous.py +++ b/tests/unit/utils/test_synchronous.py @@ -6,7 +6,7 @@ from functools import wraps from multiprocessing.synchronize import Barrier as ProcessingBarrier from multiprocessing.synchronize import Event as ProcessingEvent -from typing import Union +from typing import get_args import pytest @@ -32,17 +32,25 @@ async def new_func(*args, **kwargs): def test_sync_object_types_alias(): - """Test that SyncObjectTypesAlias is defined correctly as a type alias.""" - assert hasattr(SyncObjectTypesAlias, "__origin__") - if hasattr(SyncObjectTypesAlias, "__args__"): - actual_type = SyncObjectTypesAlias.__args__[0] - assert hasattr(actual_type, "__origin__") - assert actual_type.__origin__ is Union - union_args = actual_type.__args__ - assert threading.Event in union_args - assert ProcessingEvent in union_args - assert threading.Barrier in union_args - assert ProcessingBarrier in union_args + """ + Test that SyncObjectTypesAlias is defined correctly as a type alias. + + ## WRITTEN BY AI ## + """ + # Get the actual types from the union alias + actual_types = get_args(SyncObjectTypesAlias) + + # Define the set of expected types + expected_types = { + threading.Event, + ProcessingEvent, + threading.Barrier, + ProcessingBarrier, + } + + # Assert that the set of actual types matches the expected set. + # Using a set comparison is robust as it ignores the order. + assert set(actual_types) == expected_types class TestWaitForSyncEvent: From f862943890f4ad7a5112b93c9eb10f795d8d21f7 Mon Sep 17 00:00:00 2001 From: Mark Kurtz Date: Fri, 10 Oct 2025 16:28:25 -0400 Subject: [PATCH 73/90] Finalize general refactor implementation for data pathways and enabling multimodal --- src/guidellm/__main__.py | 17 +- src/guidellm/backends/__init__.py | 22 +- src/guidellm/backends/backend.py | 4 +- src/guidellm/backends/objects.py | 90 -- src/guidellm/backends/openai.py | 277 ++-- src/guidellm/backends/response_handlers.py | 283 ++++ src/guidellm/benchmark/__init__.py | 54 +- src/guidellm/benchmark/aggregator.py | 1261 --------------- src/guidellm/benchmark/benchmarker.py | 184 +-- src/guidellm/benchmark/entrypoints.py | 61 +- src/guidellm/benchmark/objects.py | 475 ------ src/guidellm/benchmark/output.py | 10 +- src/guidellm/benchmark/profile.py | 14 +- src/guidellm/benchmark/progress.py | 40 +- src/guidellm/benchmark/schemas.py | 1379 +++++++++++++++++ src/guidellm/data/__init__.py | 20 +- src/guidellm/data/collators.py | 2 +- src/guidellm/data/loaders.py | 123 +- src/guidellm/data/objects.py | 157 -- src/guidellm/data/preprocessors/formatters.py | 185 ++- src/guidellm/data/preprocessors/mappers.py | 2 +- src/guidellm/data/schemas.py | 13 + src/guidellm/scheduler/__init__.py | 10 +- src/guidellm/scheduler/constraints.py | 20 +- src/guidellm/scheduler/environments.py | 16 +- src/guidellm/scheduler/scheduler.py | 7 +- .../scheduler/{objects.py => schemas.py} | 174 +-- src/guidellm/scheduler/strategies.py | 12 +- src/guidellm/scheduler/worker.py | 41 +- src/guidellm/scheduler/worker_group.py | 30 +- src/guidellm/schemas/__init__.py | 20 + src/guidellm/schemas/info.py | 132 ++ src/guidellm/schemas/request.py | 164 ++ src/guidellm/schemas/response.py | 97 ++ src/guidellm/schemas/stats.py | 213 +++ src/guidellm/utils/statistics.py | 134 +- src/guidellm/utils/text.py | 11 - tests/unit/backend/test_backend.py | 4 +- tests/unit/backend/test_objects.py | 4 +- tests/unit/backend/test_openai_backend.py | 6 +- tests/unit/mock_benchmark.py | 2 +- tests/unit/utils/test_encoding.py | 4 +- 42 files changed, 2864 insertions(+), 2910 deletions(-) delete mode 100644 src/guidellm/backends/objects.py create mode 100644 src/guidellm/backends/response_handlers.py delete mode 100644 src/guidellm/benchmark/aggregator.py delete mode 100644 src/guidellm/benchmark/objects.py create mode 100644 src/guidellm/benchmark/schemas.py delete mode 100644 src/guidellm/data/objects.py create mode 100644 src/guidellm/data/schemas.py rename src/guidellm/scheduler/{objects.py => schemas.py} (61%) create mode 100644 src/guidellm/schemas/__init__.py create mode 100644 src/guidellm/schemas/info.py create mode 100644 src/guidellm/schemas/request.py create mode 100644 src/guidellm/schemas/response.py create mode 100644 src/guidellm/schemas/stats.py diff --git a/src/guidellm/__main__.py b/src/guidellm/__main__.py index 4bb43d0f..f45637fc 100644 --- a/src/guidellm/__main__.py +++ b/src/guidellm/__main__.py @@ -48,7 +48,6 @@ from guidellm.backends import BackendType from guidellm.benchmark import ( GenerativeConsoleBenchmarkerProgress, - InjectExtrasAggregator, ProfileType, benchmark_generative_text, reimport_benchmarks_report, @@ -56,10 +55,10 @@ from guidellm.benchmark.scenario import ( GenerativeTextScenario, ) -from guidellm.data import GenerativeRequestType from guidellm.mock_server import MockServer, MockServerConfig from guidellm.preprocess.dataset import ShortPromptStrategy, process_dataset from guidellm.scheduler import StrategyType +from guidellm.schemas import GenerativeRequestType from guidellm.settings import print_config from guidellm.utils import Console, DefaultGroupHandler, get_literal_vals from guidellm.utils import cli as cli_tools @@ -375,9 +374,9 @@ def benchmark(): ), ) @click.option( - "--request-samples", + "--sample-requests", "--output-sampling", # legacy alias - "request_samples", + "sample_requests", type=int, help=( "The number of samples for each request status and each benchmark to save " @@ -451,11 +450,10 @@ def run( disable_console_outputs, disable_progress, display_scheduler_stats, - # Aggregators configuration - output_extras, + # Benchmarker configuration + sample_requests, warmup, cooldown, - request_samples, # Constraints configuration max_seconds, max_requests, @@ -519,11 +517,10 @@ def run( else None ), print_updates=not disable_console_outputs, - # Aggregators configuration - add_aggregators={"extras": InjectExtrasAggregator(extras=output_extras)}, + # Benchmarker configuration + sample_requests=sample_requests, warmup=warmup, cooldown=cooldown, - sample_requests=request_samples, # Constraints configuration max_seconds=max_seconds, max_requests=max_requests, diff --git a/src/guidellm/backends/__init__.py b/src/guidellm/backends/__init__.py index 4bcf5683..b07c42ad 100644 --- a/src/guidellm/backends/__init__.py +++ b/src/guidellm/backends/__init__.py @@ -9,20 +9,22 @@ Backend, BackendType, ) -from .objects import ( - GenerationRequest, - GenerationRequestTimings, - GenerationResponse, - GenerationTokenStats, -) from .openai import OpenAIHTTPBackend +from .response_handlers import ( + AudioResponseHandler, + ChatCompletionsResponseHandler, + GenerationResponseHandler, + GenerationResponseHandlerFactory, + TextCompletionsResponseHandler, +) __all__ = [ + "AudioResponseHandler", "Backend", "BackendType", - "GenerationRequest", - "GenerationRequestTimings", - "GenerationResponse", - "GenerationTokenStats", + "ChatCompletionsResponseHandler", + "GenerationResponseHandler", + "GenerationResponseHandlerFactory", "OpenAIHTTPBackend", + "TextCompletionsResponseHandler", ] diff --git a/src/guidellm/backends/backend.py b/src/guidellm/backends/backend.py index a7d82979..6b122c7d 100644 --- a/src/guidellm/backends/backend.py +++ b/src/guidellm/backends/backend.py @@ -16,11 +16,11 @@ from abc import abstractmethod from typing import Literal -from guidellm.backends.objects import ( +from guidellm.scheduler import BackendInterface +from guidellm.schemas import ( GenerationRequest, GenerationResponse, ) -from guidellm.scheduler import BackendInterface from guidellm.utils import RegistryMixin __all__ = [ diff --git a/src/guidellm/backends/objects.py b/src/guidellm/backends/objects.py deleted file mode 100644 index 88d25949..00000000 --- a/src/guidellm/backends/objects.py +++ /dev/null @@ -1,90 +0,0 @@ -""" -Backend object models for request and response handling. - -Provides standardized models for generation requests, responses, and timing -information to ensure consistent data handling across different backend -implementations. -""" - -from __future__ import annotations - -from typing import Literal - -from pydantic import Field - -from guidellm.data import ( - GenerationRequest, - GenerationRequestArguments, - GenerationRequestTimings, -) -from guidellm.scheduler import ( - SchedulerMessagingPydanticRegistry, -) -from guidellm.utils import StandardBaseModel - -__all__ = [ - "GenerationRequest", - "GenerationRequestArguments", - "GenerationRequestTimings", - "GenerationResponse", - "GenerationTokenStats", -] - - -@SchedulerMessagingPydanticRegistry.register() -class GenerationTokenStats(StandardBaseModel): - """Token statistics for generation requests and responses.""" - - request: int | None = Field( - default=None, description="Number of tokens in the original request." - ) - response: int | None = Field( - default=None, description="Number of tokens in the generated response." - ) - - def value( - self, preference: Literal["request", "response"] | None = None - ) -> int | None: - if preference == "request": - return self.request - if preference == "response": - return self.response - return self.response if self.response is not None else self.request - - -@SchedulerMessagingPydanticRegistry.register() -class GenerationResponse(StandardBaseModel): - """Response model for backend generation operations.""" - - request_id: str = Field( - description="Unique identifier matching the original GenerationRequest." - ) - request_args: GenerationRequestArguments = Field( - description="Arguments passed to the backend for this request." - ) - text: str | None = Field( - default=None, - description="The generated response text.", - ) - iterations: int = Field( - default=0, description="Number of generation iterations completed." - ) - - prompt_stats: GenerationTokenStats = Field( - default_factory=GenerationTokenStats, - description="Token statistics from the prompt.", - ) - output_stats: GenerationTokenStats = Field( - default_factory=GenerationTokenStats, - description="Token statistics from the generated output.", - ) - - def total_tokens( - self, preference: Literal["request", "response"] | None = None - ) -> int | None: - prompt_tokens = self.prompt_stats.value(preference=preference) - output_tokens = self.output_stats.value(preference=preference) - - if prompt_tokens is None and output_tokens is None: - return None - return (prompt_tokens or 0) + (output_tokens or 0) diff --git a/src/guidellm/backends/openai.py b/src/guidellm/backends/openai.py index f8ccaafb..eb4b744a 100644 --- a/src/guidellm/backends/openai.py +++ b/src/guidellm/backends/openai.py @@ -13,22 +13,19 @@ from __future__ import annotations import asyncio -import json import time from collections.abc import AsyncIterator -from typing import Any, cast +from typing import Any import httpx -from pydantic import dataclasses from guidellm.backends.backend import Backend -from guidellm.backends.objects import ( +from guidellm.backends.response_handlers import GenerationResponseHandlerFactory +from guidellm.schemas import ( GenerationRequest, - GenerationRequestTimings, GenerationResponse, - GenerationTokenStats, + RequestInfo, ) -from guidellm.scheduler import ScheduledRequestInfo try: import orjson @@ -38,25 +35,7 @@ orjson = None HAS_ORJSON = False -__all__ = ["OpenAIHTTPBackend", "UsageStats"] - - -@dataclasses.dataclass -class UsageStats: - """Token usage statistics for generation requests.""" - - prompt_tokens: int | None = None - output_tokens: int | None = None - - -open_ai_paths: dict[str, str] = { - "health": "health", - "models": "v1/models", - "text_completions": "v1/completions", - "chat_completions": "v1/chat/completions", - "audio_transcriptions": "v1/audio/transcriptions", - "audio_translations": "v1/audio/translations", -} +__all__ = ["OpenAIHTTPBackend"] @Backend.register("openai_http") @@ -87,6 +66,8 @@ def __init__( self, target: str, model: str | None = None, + api_routes: dict[str, str] | None = None, + response_handlers: dict[str, Any] | None = None, timeout: float = 60.0, http2: bool = True, follow_redirects: bool = True, @@ -100,6 +81,15 @@ def __init__( self.model = model # Store configuration + self.api_routes = api_routes or { + "health": "health", + "models": "v1/models", + "text_completions": "v1/completions", + "chat_completions": "v1/chat/completions", + "audio_transcriptions": "v1/audio/transcriptions", + "audio_translations": "v1/audio/translations", + } + self.response_handlers = response_handlers self.timeout = timeout self.http2 = http2 self.follow_redirects = follow_redirects @@ -124,7 +114,7 @@ def info(self) -> dict[str, Any]: "http2": self.http2, "follow_redirects": self.follow_redirects, "verify": self.verify, - "openai_paths": open_ai_paths, + "openai_paths": self.api_routes, "validate_backend": self.validate_backend, } @@ -169,7 +159,8 @@ async def validate(self): :raises RuntimeError: If backend cannot connect or validate configuration. """ - self._check_in_process() + if self._async_client is None: + raise RuntimeError("Backend not started up for process.") if not self.validate_backend: return @@ -191,9 +182,10 @@ async def available_models(self) -> list[str]: :raises HTTPError: If models endpoint returns an error. :raises RuntimeError: If backend is not initialized. """ - self._check_in_process() + if self._async_client is None: + raise RuntimeError("Backend not started up for process.") - target = f"{self.target}/{open_ai_paths['models']}" + target = f"{self.target}/{self.api_routes['models']}" response = await self._async_client.get(target) response.raise_for_status() @@ -214,9 +206,9 @@ async def default_model(self) -> str | None: async def resolve( # noqa: C901 self, request: GenerationRequest, - request_info: ScheduledRequestInfo, + request_info: RequestInfo, history: list[tuple[GenerationRequest, GenerationResponse]] | None = None, - ) -> AsyncIterator[tuple[GenerationResponse, ScheduledRequestInfo]]: + ) -> AsyncIterator[tuple[GenerationResponse, RequestInfo]]: """ Process a generation request and yield progressive responses. @@ -229,181 +221,104 @@ async def resolve( # noqa: C901 :raises NotImplementedError: If history is provided. :yields: Tuples of (response, updated_request_info) as generation progresses. """ - self._check_in_process() + if self._async_client is None: + raise RuntimeError("Backend not started up for process.") + if history is not None: raise NotImplementedError( "Multi-turn requests with conversation history are not yet supported" ) - request_info.request_timings = GenerationRequestTimings() - request.arguments.url = ( - request.arguments.url or f"{self.target}/{request.arguments.path}" - if request.arguments.path is not None - else f"{self.target}/{open_ai_paths[request.request_type]}" + response_handler = ( + self.response_handlers.get(request.request_type) + if self.response_handlers + else None ) - request_info.request_timings.request_start = time.time() + if response_handler is None: + response_handler_class = ( + GenerationResponseHandlerFactory.get_registered_object( + request.request_type + ) + ) + if response_handler_class is None: + raise ValueError( + "No response handler registered for request type " + f"'{request.request_type}'" + ) + response_handler = response_handler_class() + + if (request_path := self.api_routes.get(request.request_type)) is None: + raise ValueError(f"Unsupported request type '{request.request_type}'") + request_url = f"{self.target}/{request_path}" + request_info.timings.request_start = time.time() if not request.arguments.stream: response = await self._async_client.request( request.arguments.method or "POST", - request.arguments.url, - content=request.arguments.content_body, - files=request.arguments.request_files, - json=request.arguments.json_body, + request_url, params=request.arguments.params, headers=request.arguments.headers, + json=request.arguments.body if not request.arguments.files else None, + data=request.arguments.body if request.arguments.files else None, + files=( + { + key: tuple(value) if isinstance(value, list) else value + for key, value in request.arguments.files.items() + } + if request.arguments.files + else None + ), ) + request_info.timings.request_end = time.time() response.raise_for_status() data = response.json() - prompt_stats, output_stats = self._extract_response_stats(data, request) - request_info.request_timings.request_end = time.time() - - yield ( - GenerationResponse( - request_id=request.request_id, - request_args=request.arguments, - text=self._extract_response_text(data), - iterations=0, - prompt_stats=prompt_stats, - output_stats=output_stats, - ), - request_info, - ) + yield response_handler.compile_non_streaming(request, data), request_info return - deltas = [] - prompt_stats = None - output_stats = None - end_reached = False - try: async with self._async_client.stream( request.arguments.method or "POST", - request.arguments.url, - content=request.arguments.content_body, - files=request.arguments.request_files, - json=request.arguments.json_body, + request_url, params=request.arguments.params, headers=request.arguments.headers, + json=request.arguments.body if not request.arguments.files else None, + data=request.arguments.body if request.arguments.files else None, + files=( + { + key: tuple(value) if isinstance(value, list) else value + for key, value in request.arguments.files.items() + } + if request.arguments.files + else None + ), ) as stream: stream.raise_for_status() - buffer = bytearray() + end_reached = False - async for chunk in stream.aiter_bytes(): - if not chunk or end_reached: + async for chunk in stream.aiter_lines(): + if end_reached: continue - buffer.extend(chunk) - - while (start := buffer.find(b"data:")) != -1 and ( - end := buffer.find(b"\n", start) - ) != -1: - line = buffer[start + len(b"data:") : end].strip() - buffer = buffer[end + 1 :] - - if not line: - continue - - if line == b"[DONE]": - if request_info.request_timings.request_end is None: - request_info.request_timings.request_end = time.time() - end_reached = True - break - - data = ( - json.loads(line) if not HAS_ORJSON else orjson.loads(line) - ) - - if "usage" in data and data["usage"] is not None: - request_info.request_timings.request_end = time.time() - prompt_stats, output_stats = self._extract_response_stats( - data, request - ) - else: - if request_info.request_timings.first_iteration is None: - request_info.request_timings.first_iteration = ( - time.time() - ) - request_info.request_timings.last_iteration = time.time() - deltas.append(self._extract_response_text(data)) - - yield ( - GenerationResponse( - request_id=request.request_id, - request_args=request.arguments, - text="".join(deltas) if deltas else None, - iterations=len(deltas), - prompt_stats=prompt_stats or GenerationTokenStats(), - output_stats=output_stats or GenerationTokenStats(), - ), - request_info, - ) - except asyncio.CancelledError as err: - yield ( # Ensure we yield what we have so far before stopping - GenerationResponse( - request_id=request.request_id, - request_args=request.arguments, - text="".join(deltas) if deltas else None, - iterations=len(deltas), - prompt_stats=prompt_stats or GenerationTokenStats(), - output_stats=output_stats or GenerationTokenStats(), - ), - request_info, - ) - raise err - - def _extract_response_text(self, data: dict) -> str: - if not data: - return None - - object_type = data.get("object") or data.get("type") - - if object_type == "text_completion": - return data.get("choices", [{}])[0].get("text", "") - - if object_type == "chat.completion": - return data.get("choices", [{}])[0].get("message", {}).get("content", "") - - if object_type == "chat.completion.chunk": - return data.get("choices", [{}])[0].get("delta", {}).get("content", "") - - if "text" in data: - return data.get("text", "") - - if "delta" in data: - return data.get("delta", "") - raise ValueError(f"Unsupported response format: {data}") - - def _extract_response_stats( - self, data: dict, request: GenerationRequest - ) -> tuple[GenerationTokenStats, GenerationTokenStats]: - prompt_stats = GenerationTokenStats() - output_stats = GenerationTokenStats() + if ( + iterations := response_handler.add_streaming_line(chunk) + ) is None or iterations < 0: + end_reached = end_reached or iterations is None + continue - if not data or not (usage := cast("dict", data.get("usage"))): - return prompt_stats, output_stats + if request_info.timings.first_iteration is None: + request_info.timings.first_iteration = time.time() + request_info.timings.last_iteration = time.time() - prompt_stats.request = request.stats.get("prompt_tokens") - prompt_stats.response = usage.get("prompt_tokens", usage.get("input_tokens")) - prompt_token_details = usage.get( - "prompt_tokens_details", usage.get("input_tokens_details") - ) - if prompt_token_details: - for key, val in prompt_token_details.items(): - setattr(prompt_stats, key, val) + if request_info.timings.iterations is None: + request_info.timings.iterations = 0 + request_info.timings.iterations += iterations - output_stats.request = request.stats.get("output_tokens") - output_stats.response = usage.get( - "completion_tokens", usage.get("output_tokens") - ) - output_token_details = usage.get( - "completion_tokens_details", usage.get("output_tokens_details") - ) - if output_token_details: - for key, val in output_token_details.items(): - setattr(output_stats, key, val) + request_info.timings.request_end = time.time() - return prompt_stats, output_stats + yield response_handler.compile_streaming(request), request_info + except asyncio.CancelledError as err: + yield response_handler.compile_streaming(request), request_info + raise err def _resolve_validate_kwargs( self, validate_backend: bool | str | dict[str, Any] @@ -414,8 +329,8 @@ def _resolve_validate_kwargs( if validate_kwargs is True: validate_kwargs = "health" - if isinstance(validate_kwargs, str) and validate_kwargs in open_ai_paths: - validate_kwargs = f"{self.target}/{open_ai_paths[validate_kwargs]}" + if isinstance(validate_kwargs, str) and validate_kwargs in self.api_routes: + validate_kwargs = f"{self.target}/{self.api_routes[validate_kwargs]}" if isinstance(validate_kwargs, str): validate_kwargs = { @@ -433,9 +348,3 @@ def _resolve_validate_kwargs( validate_kwargs["method"] = "GET" return validate_kwargs - - def _check_in_process(self): - if not self._in_process or self._async_client is None: - raise RuntimeError( - "Backend not started up for process, cannot process requests." - ) diff --git a/src/guidellm/backends/response_handlers.py b/src/guidellm/backends/response_handlers.py new file mode 100644 index 00000000..492f8e99 --- /dev/null +++ b/src/guidellm/backends/response_handlers.py @@ -0,0 +1,283 @@ +from __future__ import annotations + +import json +from typing import Any, Protocol, cast + +from guidellm.schemas import GenerationRequest, GenerationResponse, UsageMetrics +from guidellm.utils import RegistryMixin + +try: + import orjson +except ImportError: + orjson = None + +__all__ = [ + "AudioResponseHandler", + "ChatCompletionsResponseHandler", + "GenerationResponseHandler", + "GenerationResponseHandlerFactory", + "TextCompletionsResponseHandler", +] + + +class GenerationResponseHandler(Protocol): + def compile_non_streaming( + self, request: GenerationRequest, response: Any + ) -> GenerationResponse: ... + + def add_streaming_line(self, line: str) -> int | None: ... + + def compile_streaming(self, request: GenerationRequest) -> GenerationResponse: ... + + +class GenerationResponseHandlerFactory(RegistryMixin[type[GenerationResponseHandler]]): + pass + + +@GenerationResponseHandlerFactory.register("text_completions") +class TextCompletionsResponseHandler(GenerationResponseHandler): + def __init__(self): + self.streaming_texts: list[str] = [] + self.streaming_usage: dict[str, int | dict[str, int]] | None = None + + def compile_non_streaming( + self, request: GenerationRequest, response: dict + ) -> GenerationResponse: + choices = cast("list[dict]", response.get("choices", [])) + usage = cast("dict[str, int | dict[str, int]]", response.get("usage", {})) + input_metrics, output_metrics = self.extract_metrics(usage) + + return GenerationResponse( + request_id=request.request_id, + request_args=str( + request.arguments.model_dump() if request.arguments else None + ), + text=choices[0].get("text", "") if choices else "", + input_metrics=input_metrics, + output_metrics=output_metrics, + ) + + def add_streaming_line(self, line: str) -> int | None: + if line == "data: [DONE]": + return None + + if not line or not (line := line.strip()) or not line.startswith("data:"): + return 0 + + line = line[len("data:") :].strip() + data = cast( + "dict[str, Any]", + json.loads(line) if orjson is None else orjson.loads(line), + ) + updated = False + + if (choices := cast("list[dict]", data.get("choices"))) and ( + text := choices[0].get("text") + ): + self.streaming_texts.append(text) + updated = True + + if usage := cast("dict[str, int | dict[str, int]]", data.get("usage")): + self.streaming_usage = usage + + return 1 if updated else 0 + + def compile_streaming(self, request: GenerationRequest) -> GenerationResponse: + input_metrics, output_metrics = self.extract_metrics(self.streaming_usage) + + return GenerationResponse( + request_id=request.request_id, + request_args=str( + request.arguments.model_dump() if request.arguments else None + ), + text="".join(self.streaming_texts), + input_metrics=input_metrics, + output_metrics=output_metrics, + ) + + def extract_metrics( + self, usage: dict[str, int | dict[str, int]] | None + ) -> tuple[UsageMetrics, UsageMetrics]: + if not usage: + return UsageMetrics(), UsageMetrics() + + input_details = cast("dict[str, int]", usage.get("prompt_tokens_details", {})) + output_details = cast( + "dict[str, int]", usage.get("completion_tokens_details", {}) + ) + + return UsageMetrics( + text_tokens=input_details.get("prompt_tokens") + or cast("int", usage.get("prompt_tokens")), + image_tokens=input_details.get("image_tokens"), + video_tokens=input_details.get("video_tokens"), + audio_tokens=input_details.get("audio_tokens"), + audio_seconds=input_details.get("seconds"), + ), UsageMetrics( + text_tokens=output_details.get("completion_tokens") + or cast("int", usage.get("completion_tokens")), + image_tokens=output_details.get("image_tokens"), + video_tokens=output_details.get("video_tokens"), + audio_tokens=output_details.get("audio_tokens"), + audio_seconds=output_details.get("seconds"), + ) + + +@GenerationResponseHandlerFactory.register("chat_completions") +class ChatCompletionsResponseHandler(TextCompletionsResponseHandler): + def compile_non_streaming( + self, request: GenerationRequest, response: dict + ) -> GenerationResponse: + choices = cast("list[dict]", response.get("choices", [])) + usage = cast("dict[str, int | dict[str, int]]", response.get("usage", {})) + input_metrics, output_metrics = self.extract_metrics(usage) + + return GenerationResponse( + request_id=request.request_id, + request_args=str( + request.arguments.model_dump() if request.arguments else None + ), + text=cast("dict", choices[0].get("message", {})).get("content", "") + if choices + else "", + input_metrics=input_metrics, + output_metrics=output_metrics, + ) + + def add_streaming_line(self, line: str) -> int | None: + if line == "data: [DONE]": + return None + + if not line or not (line := line.strip()) or not line.startswith("data:"): + return 0 + + line = line[len("data:") :].strip() + data = cast( + "dict[str, Any]", + json.loads(line) if orjson is None else orjson.loads(line), + ) + updated = False + + # Extract delta content for chat completion chunks + if choices := cast("list[dict]", data.get("choices")): + delta = choices[0].get("delta", {}) + if content := delta.get("content"): + self.streaming_texts.append(content) + updated = True + + if usage := cast("dict[str, int | dict[str, int]]", data.get("usage")): + self.streaming_usage = usage + + return 1 if updated else 0 + + def compile_streaming(self, request: GenerationRequest) -> GenerationResponse: + input_metrics, output_metrics = self.extract_metrics(self.streaming_usage) + + return GenerationResponse( + request_id=request.request_id, + request_args=str( + request.arguments.model_dump() if request.arguments else None + ), + text="".join(self.streaming_texts), + input_metrics=input_metrics, + output_metrics=output_metrics, + ) + + +@GenerationResponseHandlerFactory.register( + ["audio_transcriptions", "audio_translations"] +) +class AudioResponseHandler: + def __init__(self): + self.streaming_buffer: bytearray = bytearray() + self.streaming_texts: list[str] = [] + self.streaming_usage: dict[str, int | dict[str, int]] | None = None + + def compile_non_streaming( + self, request: GenerationRequest, response: dict + ) -> GenerationResponse: + usage = cast("dict[str, int]", response.get("usage", {})) + input_details = cast("dict[str, int]", usage.get("input_token_details", {})) + output_details = cast("dict[str, int]", usage.get("output_token_details", {})) + text = response.get("text", "") + + return GenerationResponse( + request_id=request.request_id, + request_args=str( + request.arguments.model_dump() if request.arguments else None + ), + text=text, + input_metrics=UsageMetrics( + text_tokens=input_details.get("text_tokens", usage.get("input_tokens")), + audio_tokens=input_details.get( + "audio_tokens", usage.get("input_tokens") + ), + audio_seconds=input_details.get("seconds", usage.get("seconds")), + ), + output_metrics=UsageMetrics( + text_tokens=output_details.get( + "text_tokens", usage.get("output_tokens") + ), + ), + ) + + def add_streaming_line(self, line: str) -> int | None: + if line == "data: [DONE]": + return None + + if not line or not (line := line.strip()) or not line.startswith("{"): + return 0 + + data = cast( + "dict[str, Any]", + json.loads(line) if orjson is None else orjson.loads(line), + ) + updated = False + + if text := data.get("text"): + self.streaming_texts.append(text) + updated = True + + if usage := cast("dict[str, int | dict[str, int]]", data.get("usage")): + self.streaming_usage = usage + + return 1 if updated else 0 + + def compile_streaming(self, request: GenerationRequest) -> GenerationResponse: + input_metrics, output_metrics = self.extract_metrics(self.streaming_usage) + + return GenerationResponse( + request_id=request.request_id, + request_args=str( + request.arguments.model_dump() if request.arguments else None + ), + text="".join(self.streaming_texts), + input_metrics=input_metrics, + output_metrics=output_metrics, + ) + + def extract_metrics( + self, usage: dict[str, int | dict[str, int]] | None + ) -> tuple[UsageMetrics, UsageMetrics]: + if not usage: + return UsageMetrics(), UsageMetrics() + + input_details = cast("dict[str, int]", usage.get("input_token_details", {})) + output_details = cast("dict[str, int]", usage.get("output_token_details", {})) + + return UsageMetrics( + text_tokens=( + input_details.get("text_tokens") + or cast("int", usage.get("input_tokens")) + ), + audio_tokens=( + input_details.get("audio_tokens") + or cast("int", usage.get("audio_tokens")) + ), + audio_seconds=( + input_details.get("seconds") or cast("int", usage.get("seconds")) + ), + ), UsageMetrics( + text_tokens=output_details.get("text_tokens") + or cast("int", usage.get("output_tokens")), + ) diff --git a/src/guidellm/benchmark/__init__.py b/src/guidellm/benchmark/__init__.py index 76324a65..57756d15 100644 --- a/src/guidellm/benchmark/__init__.py +++ b/src/guidellm/benchmark/__init__.py @@ -1,25 +1,5 @@ -from .aggregator import ( - Aggregator, - AggregatorState, - CompilableAggregator, - GenerativeRequestsAggregator, - GenerativeStatsProgressAggregator, - InjectExtrasAggregator, - SchedulerStatsAggregator, - SerializableAggregator, -) from .benchmarker import Benchmarker from .entrypoints import benchmark_generative_text, reimport_benchmarks_report -from .objects import ( - Benchmark, - BenchmarkMetrics, - BenchmarkSchedulerStats, - BenchmarkT, - GenerativeBenchmark, - GenerativeBenchmarksReport, - GenerativeMetrics, - GenerativeRequestStats, -) from .output import ( GenerativeBenchmarkerConsole, GenerativeBenchmarkerCSV, @@ -40,20 +20,34 @@ BenchmarkerProgressGroup, GenerativeConsoleBenchmarkerProgress, ) +from .schemas import ( + Benchmark, + BenchmarkArgs, + BenchmarkerDict, + BenchmarkSchedulerStats, + EstimatedBenchmarkState, + GenerativeAudioMetricsSummary, + GenerativeBenchmark, + GenerativeBenchmarksReport, + GenerativeImageMetricsSummary, + GenerativeMetrics, + GenerativeMetricsSummary, + GenerativeVideoMetricsSummary, + SchedulerDict, +) __all__ = [ - "Aggregator", - "AggregatorState", "AsyncProfile", "Benchmark", - "BenchmarkMetrics", + "BenchmarkArgs", "BenchmarkSchedulerStats", - "BenchmarkT", "Benchmarker", + "BenchmarkerDict", "BenchmarkerProgress", "BenchmarkerProgressGroup", - "CompilableAggregator", "ConcurrentProfile", + "EstimatedBenchmarkState", + "GenerativeAudioMetricsSummary", "GenerativeBenchmark", "GenerativeBenchmarkerCSV", "GenerativeBenchmarkerConsole", @@ -61,15 +55,13 @@ "GenerativeBenchmarkerOutput", "GenerativeBenchmarksReport", "GenerativeConsoleBenchmarkerProgress", + "GenerativeImageMetricsSummary", "GenerativeMetrics", - "GenerativeRequestStats", - "GenerativeRequestsAggregator", - "GenerativeStatsProgressAggregator", - "InjectExtrasAggregator", + "GenerativeMetricsSummary", + "GenerativeVideoMetricsSummary", "Profile", "ProfileType", - "SchedulerStatsAggregator", - "SerializableAggregator", + "SchedulerDict", "SweepProfile", "SynchronousProfile", "ThroughputProfile", diff --git a/src/guidellm/benchmark/aggregator.py b/src/guidellm/benchmark/aggregator.py deleted file mode 100644 index 2dc3c56f..00000000 --- a/src/guidellm/benchmark/aggregator.py +++ /dev/null @@ -1,1261 +0,0 @@ -""" -Benchmark result aggregation and compilation interfaces. - -Provides protocols and implementations for collecting, processing, and compiling -benchmark data from scheduler executions into final metrics and statistics. - -Classes: - Aggregator: Protocol for processing benchmark data updates. - CompilableAggregator: Protocol for aggregators that can compile final results. - SchedulerStatsAggregator: Aggregates scheduler timing and performance metrics. - GenerativeRequestsStatsProgressAggregator: Tracks generation metrics during run. - GenerativeRequestsAggregator: Compiles complete generative benchmark results. - -Functions: - add_aggregate_metric: Helper for accumulating timing and count metrics. - -Type Variables: - RequestT: Generic request object type. - ResponseT: Generic response object type. - RequestTimingsT: Generic request timing object type. -""" - -from __future__ import annotations - -import math -import random -from abc import ABC, abstractmethod -from typing import ( - Any, - ClassVar, - Generic, - Literal, - Protocol, - runtime_checkable, -) - -from pydantic import Field, PrivateAttr - -from guidellm.backends import ( - GenerationRequest, - GenerationResponse, -) -from guidellm.benchmark.objects import ( - BenchmarkSchedulerStats, - GenerativeMetrics, - GenerativeRequestStats, -) -from guidellm.scheduler import ( - RequestT, - ResponseT, - ScheduledRequestInfo, - SchedulerState, -) -from guidellm.settings import settings -from guidellm.utils import ( - InfoMixin, - PydanticClassRegistryMixin, - StatusBreakdown, - StatusDistributionSummary, - all_defined, - safe_divide, - safe_getattr, -) - -__all__ = [ - "Aggregator", - "AggregatorState", - "CompilableAggregator", - "GenerativeRequestsAggregator", - "GenerativeStatsProgressAggregator", - "InjectExtrasAggregator", - "SchedulerStatsAggregator", - "SerializableAggregator", -] - - -class AggregatorState(dict[str, Any]): - def add_metric( - self, - key: str, - value: int | float | None, - start_val: int | float | None = 0.0, - count: int | None = 1, - duration: float | None = None, - duration_div: Literal["total", "avg"] = "total", - prefix: str | None = None, - ): - """ - Add timing or count metrics to aggregation state. - """ - if prefix: - self.add_metric( - key=f"{prefix}_{key}", - value=value, - start_val=start_val, - count=count, - duration=duration, - duration_div=duration_div, - ) - return - - if not all_defined(value, start_val, count): - return - - delta_val = value - start_val - self[f"{key}_total"] = self.get(f"{key}_total", 0) + delta_val - self[f"{key}_count"] = self.get(f"{key}_count", 0) + count - self[f"{key}_avg"] = safe_divide( - self.get(f"{key}_total"), self.get(f"{key}_count") - ) - - if all_defined(duration): - self[f"{key}_duration"] = duration - self[f"{key}_rate"] = safe_divide( - self.get(f"{key}_{duration_div}"), duration - ) - - def set_metric( - self, - key: str, - value: int | float | None, - type_: Literal["total", "count", "avg", "duration", "rate"], - prefix: str | None = None, - ): - if prefix: - self.set_metric( - key=f"{prefix}_{key}", - value=value, - type_=type_, - prefix=None, - ) - return - - self[f"{key}_{type_}"] = value - - def get_metric( - self, - key: str, - type_: Literal["total", "count", "avg", "duration", "rate"], - default: int | float | None = None, - prefix: str | None = None, - ) -> int | float | None: - if prefix: - return self.get_metric( - key=f"{prefix}_{key}", - type_=type_, - default=default, - ) - - return self.get(f"{key}_{type_}", default) - - -@runtime_checkable -class Aggregator(Protocol[ResponseT, RequestT]): - """ - Protocol for processing benchmark data updates during execution. - - Defines the interface for aggregators that collect and process request/response - data from scheduler executions. Implementations update aggregation state with - each completed request for eventual compilation into final metrics. - """ - - def __call__( - self, - state: AggregatorState, - response: ResponseT | None, - request: RequestT, - request_info: ScheduledRequestInfo, - scheduler_state: SchedulerState, - ) -> dict[str, Any] | None: - """ - Process a completed request and update aggregation state. - - :param state: Current aggregation state to update in-place. - :param response: Response generated for the request, if successful. - :param request: The processed request object. - :param request_info: Scheduling metadata and timing information. - :param scheduler_state: Current scheduler execution state. - :return: Optional intermediate updates for progress reporting. - """ - - -@runtime_checkable -class CompilableAggregator(Protocol[ResponseT, RequestT]): - """ - Protocol for aggregators that compile final results from aggregated state. - - Extends the Aggregator protocol with the ability to transform accumulated - state into final benchmark results and metrics after execution completes. - """ - - def __call__( - self, - state: AggregatorState, - response: ResponseT | None, - request: RequestT, - request_info: ScheduledRequestInfo, - scheduler_state: SchedulerState, - ) -> dict[str, Any] | None: - """ - Process a completed request and update aggregation state. - - :param state: Current aggregation state to update in-place. - :param response: Response generated for the request, if successful. - :param request: The processed request object. - :param request_info: Scheduling metadata and timing information. - :param scheduler_state: Current scheduler execution state. - :return: Optional intermediate updates for progress reporting. - """ - - def compile( - self, state: AggregatorState, scheduler_state: SchedulerState - ) -> dict[str, Any]: - """ - Compile aggregated state into final benchmark results. - - :param agg_state: The accumulated aggregation state. - :param scheduler_state: Final scheduler execution state. - :return: Compiled benchmark results and metrics. - """ - - -class SerializableAggregator( - PydanticClassRegistryMixin[type["SerializableAggregator"]], - ABC, - Generic[ResponseT, RequestT], -): - schema_discriminator: ClassVar[str] = "type_" - - @classmethod - def __pydantic_schema_base_type__(cls) -> type[SerializableAggregator]: - if cls.__name__ == "SerializableAggregator": - return cls - - return SerializableAggregator - - @classmethod - @abstractmethod - def validated_kwargs(cls, *args, **kwargs) -> dict[str, Any]: - """ - Validate and process arguments for constraint creation. - - Must be implemented by subclasses to handle their specific parameter patterns. - - :param args: Positional arguments passed to the constraint - :param kwargs: Keyword arguments passed to the constraint - :return: Validated dictionary of parameters for constraint creation - :raises NotImplementedError: Must be implemented by subclasses - """ - ... - - @classmethod - def resolve( - cls, - aggregators: dict[ - str, - Any | dict[str, Any] | Aggregator | CompilableAggregator, - ], - ) -> dict[str, Aggregator | CompilableAggregator]: - """ - Resolve mixed aggregator specifications to callable aggregators. - - :param aggregators: Dictionary mapping aggregator keys to specifications - :return: Dictionary mapping aggregator keys to callable functions - :raises ValueError: If any key is not registered in the factory - """ - resolved = {} - - for key, val in aggregators.items(): - if isinstance(val, (Aggregator, CompilableAggregator)): - resolved[key] = val - else: - aggregator_class = cls.get_registered_object(key) - kwargs = aggregator_class.validated_kwargs(**val) - resolved[key] = aggregator_class(**kwargs) - - return resolved - - type_: Literal["aggregator"] = Field(default="aggregator", description="") - - @abstractmethod - def __call__( - self, - state: AggregatorState, - response: ResponseT | None, - request: RequestT, - request_info: ScheduledRequestInfo, - scheduler_state: SchedulerState, - ) -> dict[str, Any] | None: - """ - Process a completed request and update aggregation state. - - :param agg_state: Current aggregation state to update in-place. - :param response: Response generated for the request, if successful. - :param request: The processed request object. - :param request_info: Scheduling metadata and timing information. - :param scheduler_state: Current scheduler execution state. - :return: Optional intermediate updates for progress reporting. - """ - - @abstractmethod - def compile( - self, state: AggregatorState, scheduler_state: SchedulerState - ) -> dict[str, Any]: - """ - Compile aggregated state into final benchmark results. - - :param agg_state: The accumulated aggregation state. - :param scheduler_state: Final scheduler execution state. - :return: Compiled benchmark results and metrics. - """ - - -@SerializableAggregator.register("inject_extras") -class InjectExtrasAggregator(SerializableAggregator[ResponseT, RequestT], InfoMixin): - """ - Aggregator for injecting extra metadata into the output. - """ - - @classmethod - def validated_kwargs(cls, extras: dict[str, Any], **_kwargs) -> dict[str, Any]: - return {"extras": extras} - - type_: Literal["inject_extras"] = Field(default="inject_extras") - extras: dict[str, Any] | None = Field(default_factory=None) - - def __call__( - self, - state: AggregatorState, - response: ResponseT | None, - request: RequestT, - request_info: ScheduledRequestInfo, - scheduler_state: SchedulerState, - ) -> dict[str, Any] | None: - """ - Inject extra metadata into the aggregation state. - - :param agg_state: Current aggregation state to update. - :param response: Response generated for the request, if successful. - :param request: The processed request object. - :param request_info: Scheduling metadata and timing information. - :param scheduler_state: Current scheduler execution state. - :return: Updated aggregation state with injected extras. - """ - _ = (state, response, request, request_info, scheduler_state) # unused - return None - - def compile( - self, state: AggregatorState, scheduler_state: SchedulerState - ) -> dict[str, Any]: - _ = (state, scheduler_state) # unused - return {"extras": self.extras} if self.extras else {} - - -@SerializableAggregator.register("scheduler_stats") -class SchedulerStatsAggregator(SerializableAggregator[ResponseT, RequestT], InfoMixin): - """ - Aggregates scheduler timing and performance metrics. - - Collects timing data for various scheduler phases including queuing, - resolution, and processing delays to generate performance statistics. - """ - - @classmethod - def validated_kwargs(cls, *_args, **_kwargs) -> dict[str, Any]: - return {} - - type_: Literal["scheduler_stats"] = Field(default="scheduler_stats") - - def __call__( - self, - state: AggregatorState, - response: ResponseT | None, - request: RequestT, - request_info: ScheduledRequestInfo, - scheduler_state: SchedulerState, - ) -> dict[str, Any] | None: - """ - Aggregate scheduler timing metrics for a completed request. - - :param agg_state: Current aggregation state to update. - :param response: Response generated for the request, if successful. - :param request: The processed request object. - :param request_info: Scheduling metadata and timing information. - :param scheduler_state: Current scheduler execution state. - :return: Updated aggregation state for intermediate reporting. - """ - _ = (response, request, scheduler_state) # unused - if request_info.status not in ("completed", "errored", "cancelled"): - # Only compile scheduler stats for processed requests - return None - - state["updated_scheduler_stats"] = True - state.add_metric( - key="queued_time", - value=request_info.scheduler_timings.dequeued, - start_val=request_info.scheduler_timings.queued, - ) - state.add_metric( - key="worker_resolve_start_delay", - value=request_info.scheduler_timings.resolve_start, - start_val=request_info.scheduler_timings.scheduled_at, - ) - state.add_metric( - key="worker_resolve_time", - value=request_info.scheduler_timings.resolve_end, - start_val=request_info.scheduler_timings.resolve_start, - ) - state.add_metric( - key="worker_resolve_end_delay", - value=request_info.scheduler_timings.resolve_end, - start_val=safe_getattr(request_info.request_timings, "request_end"), - ) - state.add_metric( - key="finalized_delay", - value=request_info.scheduler_timings.finalized, - start_val=request_info.scheduler_timings.resolve_end, - ) - state.add_metric( - key="worker_targeted_start_delay", - value=request_info.scheduler_timings.resolve_start, - start_val=request_info.scheduler_timings.targeted_start, - ) - state.add_metric( - key="request_start_delay", - value=request_info.scheduler_timings.resolve_start, - start_val=safe_getattr(request_info.request_timings, "request_start"), - ) - state.add_metric( - key="request_time", - value=safe_getattr(request_info.request_timings, "request_end"), - start_val=safe_getattr(request_info.request_timings, "request_start"), - ) - state.add_metric( - key="request_targeted_start_delay", - value=safe_getattr(request_info.request_timings, "request_start"), - start_val=request_info.scheduler_timings.targeted_start, - ) - - return state - - def compile( - self, state: AggregatorState, scheduler_state: SchedulerState - ) -> dict[Literal["run_stats"], BenchmarkSchedulerStats]: - """ - Compile scheduler timing metrics into benchmark statistics. - - :param agg_state: Accumulated timing data and counts. - :param scheduler_state: Final scheduler execution state. - :return: Dictionary containing compiled scheduler statistics. - """ - return { - "run_stats": BenchmarkSchedulerStats( - start_time=scheduler_state.start_time, - end_time=scheduler_state.end_time, - requests_made=StatusBreakdown[int, int, int, int]( - successful=scheduler_state.successful_requests, - incomplete=scheduler_state.cancelled_requests, - errored=scheduler_state.errored_requests, - total=( - scheduler_state.successful_requests - + scheduler_state.cancelled_requests - + scheduler_state.errored_requests - ), - ), - queued_time_avg=state.get_metric( - key="queued_time", type_="avg", default=0.0 - ), - worker_resolve_start_delay_avg=state.get_metric( - key="worker_resolve_start_delay", type_="avg", default=0.0 - ), - worker_resolve_time_avg=state.get_metric( - key="worker_resolve_time", type_="avg", default=0.0 - ), - worker_resolve_end_delay_avg=state.get_metric( - key="worker_resolve_end_delay", type_="avg", default=0.0 - ), - finalized_delay_avg=state.get_metric( - key="finalized_delay", type_="avg", default=0.0 - ), - worker_targeted_start_delay_avg=state.get_metric( - key="worker_targeted_start_delay", type_="avg", default=0.0 - ), - request_start_delay_avg=state.get_metric( - key="request_start_delay", type_="avg", default=0.0 - ), - request_time_avg=state.get_metric( - key="request_time", type_="avg", default=0.0 - ), - request_targeted_start_delay_avg=state.get_metric( - key="request_targeted_start_delay", type_="avg", default=0.0 - ), - ), - } - - -@SerializableAggregator.register("generative_stats_progress") -class GenerativeStatsProgressAggregator( - SerializableAggregator[GenerationResponse, GenerationRequest] -): - """ - Tracks generative model metrics during benchmark execution. - - Aggregates token-level metrics including time to first token, inter-token - latency, and token counts for real-time progress monitoring. - """ - - @classmethod - def validated_kwargs(cls, *_args, **_kwargs) -> dict[str, Any]: - return {} - - type_: Literal["generative_stats_progress"] = Field( - default="generative_stats_progress" - ) - - def __call__( - self, - state: AggregatorState, - response: GenerationResponse | None, - request: GenerationRequest, - request_info: ScheduledRequestInfo, - scheduler_state: SchedulerState, - ) -> dict[str, Any] | None: - """ - Aggregate generative model metrics for a completed request. - - :param agg_state: Current aggregation state to update. - :param response: Generation response with token and timing data. - :param request: The processed generation request. - :param request_info: Scheduling metadata and timing information. - :param scheduler_state: Current scheduler execution state. - :return: Updated aggregation state for progress reporting. - """ - _ = (request,) # unused - - # Request Concurrency - state.set_metric( - key="requests", - value=scheduler_state.processing_requests, - type_="avg", - ) - - if request_info.status in {"completed", "errored", "cancelled"}: - # Only compile progress stats for processed requests - state["updated_generative_stats"] = True - start_time = scheduler_state.start_time - end_time = ( - safe_getattr(request_info.request_timings, "request_end") - or request_info.scheduler_timings.resolve_end - ) - duration = end_time - start_time if end_time else None - - for prefix in (request_info.status, None): - requests_count = ( - scheduler_state.processed_requests - if prefix is None - else scheduler_state.successful_requests - if request_info.status == "completed" - else scheduler_state.cancelled_requests - if request_info.status == "cancelled" - else scheduler_state.errored_requests - ) - - # Requests per Second - if duration is not None: - state.set_metric( - key="requests", - value=safe_divide(requests_count, duration), - type_="rate", - prefix=prefix, - ) - - # Request Latency - state.add_metric( - key="request_latency", - value=safe_getattr(request_info.request_timings, "request_end"), - start_val=safe_getattr( - request_info.request_timings, "request_start" - ), - prefix=prefix, - ) - - # Time to First Token - state.add_metric( - key="time_to_first_token", - value=safe_getattr(request_info.request_timings, "first_iteration"), - start_val=safe_getattr( - request_info.request_timings, "request_start" - ), - prefix=prefix, - ) - - output_tokens = response.output_stats.value() if response else None - prompt_tokens = response.prompt_stats.value() if response else None - total_tokens = response.total_tokens() if response else None - - # Inter Token Latency - state.add_metric( - key="inter_token_latency", - value=safe_getattr(request_info.request_timings, "last_iteration"), - start_val=safe_getattr( - request_info.request_timings, "first_iteration" - ), - count=( - output_tokens - 1 - if output_tokens and output_tokens > 1 - else None - ), - prefix=prefix, - ) - - # Time per Output Token - state.add_metric( - key="time_per_output_token", - value=safe_getattr(request_info.request_timings, "request_start"), - start_val=safe_getattr( - request_info.request_timings, "last_iteration" - ), - count=output_tokens, - prefix=prefix, - ) - - # Prompt Tokens - state.add_metric( - key="prompt_tokens", - value=prompt_tokens, - duration=duration, - prefix=prefix, - ) - - # Output Tokens - state.add_metric( - key="output_tokens", - value=output_tokens, - duration=duration, - prefix=prefix, - ) - - # Total Tokens - state.add_metric( - key="total_tokens", - value=total_tokens, - duration=duration, - prefix=prefix, - ) - - return state - - def compile( - self, state: AggregatorState, scheduler_state: SchedulerState - ) -> dict[str, Any]: - """ - Compile progress metrics into final results. - - GenerativeStatsProgressAggregator is primarily for progress tracking, - so compilation returns the aggregated state as-is. - - :param agg_state: The accumulated aggregation state. - :param scheduler_state: Final scheduler execution state. - :return: The aggregated state as final results. - """ - _ = (state, scheduler_state) # unused - return {} - - -@SerializableAggregator.register("generative_requests") -class GenerativeRequestsAggregator( - SerializableAggregator[GenerationResponse, GenerationRequest], -): - """ - Compiles complete generative benchmark results with warmup/cooldown filtering. - - Aggregates request data during execution and compiles comprehensive metrics - including timing distributions, token statistics, and throughput measurements. - Supports filtering warmup and cooldown periods from final results. - """ - - @classmethod - def validated_kwargs( - cls, - sample_requests: int | None = 20, - warmup: int | float | None = None, - cooldown: int | float | None = None, - **_kwargs, - ) -> dict[str, Any]: - return { - "sample_requests": sample_requests, - "warmup": warmup, - "cooldown": cooldown, - } - - type_: Literal["generative_requests"] = Field(default="generative_requests") - - sample_requests: int | None = Field(default=20, description="") - warmup: int | float | None = Field( - default=None, - description="Number of warmup requests to ignore at benchmark start", - ) - cooldown: int | float | None = Field( - default=None, - description="Number of cooldown requests to ignore at benchmark end", - ) - _in_cooldown: bool = PrivateAttr(False) - _in_warmup: bool = PrivateAttr(False) - - def __call__( - self, - state: AggregatorState, - response: GenerationResponse | None, - request: GenerationRequest, - request_info: ScheduledRequestInfo, - scheduler_state: SchedulerState, - ) -> dict[str, Any] | None: - """ - Collect completed requests for final compilation. - - Filters requests based on warmup/cooldown settings and categorizes by - completion status for comprehensive benchmark analysis. - - :param agg_state: Current aggregation state to update. - :param response: Generation response data. - :param request: The processed generation request. - :param request_info: Scheduling metadata and timing information. - :param scheduler_state: Current scheduler execution state. - :return: None, as this aggregator only collects for final compilation. - """ - # Skip invalid requests - if request_info.status not in {"completed", "canceled", "errored"} or ( - request_info.status == "canceled" - and safe_getattr(request_info.scheduler_timings, "resolve_start") is None - # Canceled requests that never started should not be kept - ): - return None - - status = { - "updated_generative_requests": True, - "requests_in_warmup": False, - "requests_in_cooldown": False, - } - - if self._is_in_warmup(request_info, scheduler_state): - status["requests_in_warmup"] = True - return status - - if self._is_in_cooldown(request_info, scheduler_state): - status["requests_in_cooldown"] = True - return status - - if "completed" not in state: - state["completed"] = [] - state["errored"] = [] - state["incomplete"] = [] - - # Categorize request by status - if request_info.status == "completed": - state["completed"].append((response, request, request_info)) - elif request_info.status == "canceled": - state["incomplete"].append((response, request, request_info)) - else: - state["errored"].append((response, request, request_info)) - - return status - - def compile( - self, - state: AggregatorState, - scheduler_state: SchedulerState, # noqa: ARG002 - ) -> dict[str, Any]: - """ - Compile aggregated requests into comprehensive benchmark results. - - Transforms collected request data into detailed metrics including timing - distributions, token statistics, throughput measurements, and status breakdowns. - - :param agg_state: Accumulated request data categorized by completion status. - :param scheduler_state: Final scheduler execution state. - :return: Complete benchmark results with metrics and request statistics. - """ - successful: list[GenerativeRequestStats] = [ - self._create_generative_request_stats(response, request, request_info) - for (response, request, request_info) in state.get("completed", []) - ] - incomplete: list[GenerativeRequestStats] = [ - self._create_generative_request_stats(response, request, request_info) - for (response, request, request_info) in state.get("incomplete", []) - ] - errored: list[GenerativeRequestStats] = [ - self._create_generative_request_stats(response, request, request_info) - for (response, request, request_info) in state.get("errored", []) - ] - - # Use all requests for metrics calculations (not sampled) - total: list[GenerativeRequestStats] = successful + incomplete + errored - total_types: list[Literal["successful", "incomplete", "error"]] = [ - *["successful"] * len(successful), - *["incomplete"] * len(incomplete), - *["error"] * len(errored), - ] - start_time = min( - [math.inf] - + [ - req.scheduler_info.request_timings.request_start - for req in total - if req.scheduler_info.request_timings.request_start is not None - ] - ) - end_time = max( - [-1 * math.inf] - + [ - req.scheduler_info.request_timings.request_end - for req in total - if req.scheduler_info.request_timings.request_end is not None - ] - ) - - return { - "start_time": start_time, - "end_time": end_time, - "request_totals": StatusBreakdown[int, int, int, int]( - successful=len(successful), - incomplete=len(incomplete), - errored=len(errored), - total=len(total), - ), - "requests": StatusBreakdown[ - list[GenerativeRequestStats], - list[GenerativeRequestStats], - list[GenerativeRequestStats], - list[GenerativeRequestStats], - ]( - successful=self._sample_request_stats(successful, self.sample_requests), - incomplete=self._sample_request_stats(incomplete, self.sample_requests), - errored=self._sample_request_stats(errored, self.sample_requests), - ), - "metrics": GenerativeMetrics( - requests_per_second=self._calculate_requests_per_second( - statuses=total_types, requests=total - ), - request_concurrency=self._calculate_request_concurrency( - statuses=total_types, requests=total - ), - request_latency=self._calculate_request_latency( - statuses=total_types, requests=total - ), - prompt_token_count=self._calculate_prompt_token_count( - statuses=total_types, requests=total - ), - output_token_count=self._calculate_output_token_count( - statuses=total_types, requests=total - ), - total_token_count=self._calculate_total_token_count( - statuses=total_types, requests=total - ), - time_to_first_token_ms=self._calculate_time_to_first_token_ms( - statuses=total_types, requests=total - ), - time_per_output_token_ms=self._calculate_time_per_output_token_ms( - statuses=total_types, requests=total - ), - inter_token_latency_ms=self._calculate_inter_token_latency_ms( - statuses=total_types, requests=total - ), - output_tokens_per_second=self._calculate_output_tokens_per_second( - statuses=total_types, requests=total - ), - tokens_per_second=self._calculate_tokens_per_second( - statuses=total_types, requests=total - ), - ), - } - - def _is_in_warmup( - self, - request_info: ScheduledRequestInfo, - scheduler_state: SchedulerState, - ) -> bool: - """Check if the current request is within the warmup period.""" - if self.warmup is None: - return False - - if 0 < self.warmup < 1: # Percentage-based warmup - return ( - scheduler_state.remaining_fraction is not None - and scheduler_state.remaining_fraction > (1 - self.warmup) - ) - - if self.warmup >= 1: # Count/time-based warmup - if scheduler_state.processed_requests < self.warmup: - return True - - current_time = request_info.scheduler_timings.targeted_start - return ( - current_time is not None - and (current_time - scheduler_state.start_time) < self.warmup - ) - - return False - - def _is_in_cooldown( - self, - request_info: ScheduledRequestInfo, - scheduler_state: SchedulerState, - ) -> bool: - """Check if the current request is within the cooldown period.""" - if self.cooldown is None: - return False - - if 0 < self.cooldown < 1: # Percentage-based cooldown - return ( - scheduler_state.remaining_fraction is not None - and scheduler_state.remaining_fraction < self.cooldown - ) - - if self.cooldown >= 1: # Count/time-based cooldown - if scheduler_state.remaining_requests <= self.cooldown: - return True - - current_time = ( - request_info.scheduler_timings.resolve_end - or request_info.scheduler_timings.targeted_start - ) - return ( - current_time is not None - and scheduler_state.remaining_duration is not None - and scheduler_state.remaining_duration < self.cooldown - ) - - return False - - @classmethod - def _create_generative_request_stats( - cls, - response: GenerationResponse | None, - request: GenerationRequest, - request_info: ScheduledRequestInfo, - ) -> GenerativeRequestStats: - return GenerativeRequestStats( - request_id=request.request_id, - request_type=request.request_type, - request_args=str(request.arguments), - output=response.text if response else None, - iterations=response.iterations if response else 0, - prompt_tokens=( - response.prompt_stats.value(settings.preferred_prompt_tokens_source) - if response - else None - ), - output_tokens=( - response.output_stats.value(settings.preferred_output_tokens_source) - if response - else None - ), - total_tokens=( - response.total_tokens(settings.preferred_output_tokens_source) - if response - else None - ), - scheduler_info=request_info, - ) - - @classmethod - def _sample_request_stats( - cls, stats: list[GenerativeRequestStats], sample_size: int | None - ) -> list[GenerativeRequestStats]: - if sample_size is None or sample_size <= 0 or not stats: - return stats - - return random.sample(stats, min(sample_size, len(stats))) - - @classmethod - def _calculate_requests_per_second( - cls, - statuses: list[Literal["successful", "incomplete", "error"]], - requests: list[GenerativeRequestStats], - ) -> StatusDistributionSummary: - filtered_statuses = [] - filtered_times = [] - - for status, request in zip(statuses, requests): - if not all_defined( - safe_getattr(request.scheduler_info.request_timings, "request_start"), - safe_getattr(request.scheduler_info.request_timings, "request_end"), - ): - continue - - filtered_statuses.append(status) - filtered_times.append( - ( - request.scheduler_info.request_timings.request_start, - request.scheduler_info.request_timings.request_end, - ) - ) - - return StatusDistributionSummary.from_request_times( - request_types=filtered_statuses, - requests=filtered_times, - distribution_type="rate", - ) - - @classmethod - def _calculate_request_concurrency( - cls, - statuses: list[Literal["successful", "incomplete", "error"]], - requests: list[GenerativeRequestStats], - ) -> StatusDistributionSummary: - filtered_statuses = [] - filtered_times = [] - - for status, request in zip(statuses, requests): - if not all_defined( - safe_getattr(request.scheduler_info.request_timings, "request_start"), - safe_getattr(request.scheduler_info.request_timings, "request_end"), - ): - continue - - filtered_statuses.append(status) - filtered_times.append( - ( - request.scheduler_info.request_timings.request_start, - request.scheduler_info.request_timings.request_end, - ) - ) - - return StatusDistributionSummary.from_request_times( - request_types=filtered_statuses, - requests=filtered_times, - distribution_type="concurrency", - ) - - @classmethod - def _calculate_request_latency( - cls, - statuses: list[Literal["successful", "incomplete", "error"]], - requests: list[GenerativeRequestStats], - ) -> StatusDistributionSummary: - filtered_statuses = [] - filtered_values = [] - - for status, request in zip(statuses, requests): - if not all_defined(request.request_latency): - continue - - filtered_statuses.append(status) - filtered_values.append(request.request_latency) - - return StatusDistributionSummary.from_values( - value_types=filtered_statuses, - values=filtered_values, - ) - - @classmethod - def _calculate_prompt_token_count( - cls, - statuses: list[Literal["successful", "incomplete", "error"]], - requests: list[GenerativeRequestStats], - ) -> StatusDistributionSummary: - filtered_statuses = [] - filtered_values = [] - - for status, request in zip(statuses, requests): - if not all_defined(request.prompt_tokens): - continue - - filtered_statuses.append(status) - filtered_values.append(request.prompt_tokens) - - return StatusDistributionSummary.from_values( - value_types=filtered_statuses, - values=filtered_values, - ) - - @classmethod - def _calculate_output_token_count( - cls, - statuses: list[Literal["successful", "incomplete", "error"]], - requests: list[GenerativeRequestStats], - ) -> StatusDistributionSummary: - filtered_statuses = [] - filtered_values = [] - - for status, request in zip(statuses, requests): - if not all_defined(request.output_tokens): - continue - - filtered_statuses.append(status) - filtered_values.append(request.output_tokens) - - return StatusDistributionSummary.from_values( - value_types=filtered_statuses, - values=filtered_values, - ) - - @classmethod - def _calculate_total_token_count( - cls, - statuses: list[Literal["successful", "incomplete", "error"]], - requests: list[GenerativeRequestStats], - ) -> StatusDistributionSummary: - filtered_statuses = [] - filtered_values = [] - - for status, request in zip(statuses, requests): - if not all_defined(request.total_tokens): - continue - - filtered_statuses.append(status) - filtered_values.append(request.total_tokens) - - return StatusDistributionSummary.from_values( - value_types=filtered_statuses, - values=filtered_values, - ) - - @classmethod - def _calculate_time_to_first_token_ms( - cls, - statuses: list[Literal["successful", "incomplete", "error"]], - requests: list[GenerativeRequestStats], - ) -> StatusDistributionSummary: - filtered_statuses = [] - filtered_values = [] - - for status, request in zip(statuses, requests): - if not all_defined(request.time_to_first_token_ms): - continue - - filtered_statuses.append(status) - filtered_values.append(request.time_to_first_token_ms) - - return StatusDistributionSummary.from_values( - value_types=filtered_statuses, - values=filtered_values, - ) - - @classmethod - def _calculate_time_per_output_token_ms( - cls, - statuses: list[Literal["successful", "incomplete", "error"]], - requests: list[GenerativeRequestStats], - ) -> StatusDistributionSummary: - filtered_statuses = [] - filtered_values = [] - filtered_weights = [] - - for status, request in zip(statuses, requests): - if not all_defined(request.time_to_first_token_ms): - continue - - # Add time to first token separately to better reflect in distribution - filtered_statuses.append(status) - filtered_values.append(request.time_to_first_token_ms) - filtered_weights.append(1) - - if not all_defined(request.inter_token_latency_ms): - continue - - # Add tokens after the first token to get the full distribution - filtered_statuses.append(status) - filtered_values.append(request.inter_token_latency_ms) - filtered_weights.append(request.output_tokens - 1) - - return StatusDistributionSummary.from_values( - value_types=filtered_statuses, - values=filtered_values, - weights=filtered_weights, - ) - - @classmethod - def _calculate_inter_token_latency_ms( - cls, - statuses: list[Literal["successful", "incomplete", "error"]], - requests: list[GenerativeRequestStats], - ) -> StatusDistributionSummary: - filtered_statuses = [] - filtered_values = [] - filtered_weights = [] - - for status, request in zip(statuses, requests): - if not all_defined(request.inter_token_latency_ms): - continue - - filtered_statuses.append(status) - filtered_values.append(request.inter_token_latency_ms) - filtered_weights.append(request.output_tokens - 1) - - return StatusDistributionSummary.from_values( - value_types=filtered_statuses, - values=filtered_values, - weights=filtered_weights, - ) - - @classmethod - def _calculate_output_tokens_per_second( - cls, - statuses: list[Literal["successful", "incomplete", "error"]], - requests: list[GenerativeRequestStats], - ) -> StatusDistributionSummary: - filtered_statuses = [] - filtered_request_times = [] - filtered_first_iter_times = [] - filtered_iter_counts = [] - - for status, request in zip(statuses, requests): - if not all_defined(request.output_tokens_per_second): - continue - - filtered_statuses.append(status) - filtered_request_times.append( - ( - request.scheduler_info.request_timings.request_start, - request.scheduler_info.request_timings.request_end, - ) - ) - filtered_first_iter_times.append( - request.scheduler_info.request_timings.first_iteration - ) - filtered_iter_counts.append(request.output_tokens) - - return StatusDistributionSummary.from_iterable_request_times( - request_types=filtered_statuses, - requests=filtered_request_times, - first_iter_times=filtered_first_iter_times, - iter_counts=filtered_iter_counts, - ) - - @classmethod - def _calculate_tokens_per_second( - cls, - statuses: list[Literal["successful", "incomplete", "error"]], - requests: list[GenerativeRequestStats], - ) -> StatusDistributionSummary: - filtered_statuses = [] - filtered_request_times = [] - filtered_first_iter_times = [] - filtered_iter_counts = [] - filtered_first_iter_counts = [] - - for status, request in zip(statuses, requests): - if not all_defined(request.tokens_per_second): - continue - - filtered_statuses.append(status) - filtered_request_times.append( - ( - request.scheduler_info.request_timings.request_start, - request.scheduler_info.request_timings.request_end, - ) - ) - filtered_first_iter_times.append( - request.scheduler_info.request_timings.first_iteration - ) - filtered_iter_counts.append(request.output_tokens - 1) - filtered_first_iter_counts.append(request.prompt_tokens + 1) - - return StatusDistributionSummary.from_iterable_request_times( - request_types=filtered_statuses, - requests=filtered_request_times, - first_iter_times=filtered_first_iter_times, - iter_counts=filtered_iter_counts, - first_iter_counts=filtered_first_iter_counts, - ) diff --git a/src/guidellm/benchmark/benchmarker.py b/src/guidellm/benchmark/benchmarker.py index ae591c23..fd8c1aa8 100644 --- a/src/guidellm/benchmark/benchmarker.py +++ b/src/guidellm/benchmark/benchmarker.py @@ -20,21 +20,16 @@ import uuid from abc import ABC from collections.abc import AsyncIterator, Iterable -from typing import ( - Any, - Generic, -) +from typing import Generic -from guidellm.benchmark.aggregator import ( - Aggregator, - AggregatorState, - CompilableAggregator, -) -from guidellm.benchmark.objects import BenchmarkerDict, BenchmarkT, SchedulerDict from guidellm.benchmark.profile import Profile +from guidellm.benchmark.schemas import ( + BenchmarkArgs, + BenchmarkT, + EstimatedBenchmarkState, +) from guidellm.scheduler import ( BackendInterface, - Constraint, Environment, NonDistributedEnvironment, RequestT, @@ -43,8 +38,7 @@ SchedulerState, SchedulingStrategy, ) -from guidellm.utils import InfoMixin, ThreadSafeSingletonMixin -from guidellm.utils.pydantic_utils import StandardBaseDict +from guidellm.utils import ThreadSafeSingletonMixin __all__ = ["Benchmarker"] @@ -67,18 +61,18 @@ class Benchmarker( async def run( self, + benchmark_class: type[BenchmarkT], requests: Iterable[RequestT | Iterable[RequestT | tuple[RequestT, float]]], backend: BackendInterface[RequestT, ResponseT], profile: Profile, - benchmark_class: type[BenchmarkT], - benchmark_aggregators: dict[ - str, - Aggregator[ResponseT, RequestT] | CompilableAggregator[ResponseT, RequestT], - ], environment: Environment | None = None, + sample_requests: int | None = 20, + warmup: float | None = None, + cooldown: float | None = None, + prefer_response_metrics: bool = True, ) -> AsyncIterator[ tuple[ - AggregatorState | None, + EstimatedBenchmarkState | None, BenchmarkT | None, SchedulingStrategy, SchedulerState | None, @@ -110,9 +104,16 @@ async def run( while strategy is not None: yield None, None, strategy, None - aggregators_state = { - key: AggregatorState() for key in benchmark_aggregators - } + args = BenchmarkArgs( + run_id=run_id, + run_index=len(profile.completed_strategies), + sample_requests=sample_requests, + warmup=warmup, + cooldown=cooldown, + prefer_response_metrics=prefer_response_metrics, + ) + estimated_state = EstimatedBenchmarkState() + scheduler_state = None async for ( response, @@ -126,33 +127,30 @@ async def run( env=environment, **constraints, ): - aggregators_update = AggregatorState() - for key, aggregator in benchmark_aggregators.items(): - update = aggregator( - aggregators_state[key], - response, - request, - request_info, - scheduler_state, - ) - if update: - aggregators_update.update(update) - yield aggregators_update, None, strategy, scheduler_state + benchmark_class.update_estimate( + args, + estimated_state, + response, + request, + request_info, + scheduler_state, + ) + yield estimated_state, None, strategy, scheduler_state - benchmark_kwargs = self._compile_benchmark_kwargs( - run_id=run_id, - run_index=len(profile.completed_strategies), + if scheduler_state is None: + raise RuntimeError("Scheduler state is None after execution.") + + benchmark = benchmark_class.compile( + args=args, + estimated_state=estimated_state, + scheduler_state=scheduler_state, profile=profile, requests=requests, backend=backend, environment=environment, - aggregators=benchmark_aggregators, - aggregators_state=aggregators_state, strategy=strategy, constraints=constraints, - scheduler_state=scheduler_state, ) - benchmark = benchmark_class(**benchmark_kwargs) yield None, benchmark, strategy, None try: @@ -160,107 +158,3 @@ async def run( except StopIteration: strategy = None constraints = None - - @classmethod - def _compile_benchmark_kwargs( - cls, - run_id: str, - run_index: int, - profile: Profile, - requests: Iterable[RequestT | Iterable[RequestT | tuple[RequestT, float]]], - backend: BackendInterface[RequestT, ResponseT], - environment: Environment, - aggregators: dict[ - str, - Aggregator[ResponseT, RequestT] | CompilableAggregator[ResponseT, RequestT], - ], - aggregators_state: dict[str, dict[str, Any]], - strategy: SchedulingStrategy, - constraints: dict[str, Any | dict[str, Any] | Constraint], - scheduler_state: SchedulerState | None, - ) -> dict[str, Any]: - """ - Compile benchmark construction parameters from execution results. - - Aggregates metadata from scheduler execution and compiles it into - structured parameters for benchmark object construction. - - :param run_id: Unique identifier for the benchmark run. - :param run_index: Index of this strategy in the benchmark profile. - :param profile: Benchmark profile containing strategy configuration. - :param requests: Request datasets used for the benchmark. - :param backend: Backend interface used for request processing. - :param environment: Execution environment for coordination. - :param aggregators: Metric aggregation functions by name. - :param aggregators_state: Current state of metric aggregators. - :param strategy: Scheduling strategy that was executed. - :param constraints: Runtime constraints applied during execution. - :param scheduler_state: Final state of scheduler execution. - :return: Dictionary of parameters for benchmark object construction. - :raises ValueError: If aggregator output conflicts with existing keys. - """ - benchmark_kwargs = { - "run_id": run_id, - "run_index": run_index, - "scheduler": SchedulerDict( - strategy=strategy, - constraints={ - key: InfoMixin.extract_from_obj(val) - for key, val in constraints.items() - }, - state=scheduler_state, - ), - "benchmarker": BenchmarkerDict( - profile=profile, - requests=InfoMixin.extract_from_obj(requests), - backend=backend.info, - environment=environment.info, - aggregators={ - key: InfoMixin.extract_from_obj(aggregator) - for key, aggregator in aggregators.items() - }, - ), - "env_args": StandardBaseDict(), - "extras": StandardBaseDict(), - } - - def _combine( - existing: dict[str, Any] | StandardBaseDict, - addition: dict[str, Any] | StandardBaseDict, - ) -> dict[str, Any] | StandardBaseDict: - if not isinstance(existing, (dict, StandardBaseDict)): - raise ValueError( - f"Existing value {existing} (type: {type(existing).__name__}) " - f"is not a valid type for merging." - ) - if not isinstance(addition, (dict, StandardBaseDict)): - raise ValueError( - f"Addition value {addition} (type: {type(addition).__name__}) " - f"is not a valid type for merging." - ) - - add_kwargs = ( - addition if isinstance(addition, dict) else addition.model_dump() - ) - - if isinstance(existing, dict): - return {**add_kwargs, **existing} - - return existing.__class__(**{**add_kwargs, **existing.model_dump()}) - - for key, aggregator in aggregators.items(): - if not isinstance(aggregator, CompilableAggregator): - continue - - compiled = aggregator.compile(aggregators_state[key], scheduler_state) - - for field_name, field_val in compiled.items(): - if field_name in benchmark_kwargs: - # If the key already exists, merge the values - benchmark_kwargs[field_name] = _combine( - benchmark_kwargs[field_name], field_val - ) - else: - benchmark_kwargs[field_name] = field_val - - return benchmark_kwargs diff --git a/src/guidellm/benchmark/entrypoints.py b/src/guidellm/benchmark/entrypoints.py index e400907a..a94c6282 100644 --- a/src/guidellm/benchmark/entrypoints.py +++ b/src/guidellm/benchmark/entrypoints.py @@ -4,35 +4,17 @@ from typing import Any, Callable, Literal from torch.utils.data import Sampler -from transformers import ( # type: ignore[import] - PreTrainedTokenizerBase, -) +from transformers import PreTrainedTokenizerBase # type: ignore[import] -from guidellm.backends import ( - Backend, - BackendType, - GenerationRequest, - GenerationResponse, -) -from guidellm.benchmark.aggregator import ( - Aggregator, - CompilableAggregator, - GenerativeRequestsAggregator, - GenerativeStatsProgressAggregator, - SchedulerStatsAggregator, - SerializableAggregator, -) +from guidellm.backends import Backend, BackendType from guidellm.benchmark.benchmarker import Benchmarker -from guidellm.benchmark.objects import GenerativeBenchmark, GenerativeBenchmarksReport from guidellm.benchmark.output import ( GenerativeBenchmarkerConsole, GenerativeBenchmarkerOutput, ) from guidellm.benchmark.profile import Profile, ProfileType -from guidellm.benchmark.progress import ( - BenchmarkerProgress, - BenchmarkerProgressGroup, -) +from guidellm.benchmark.progress import BenchmarkerProgress, BenchmarkerProgressGroup +from guidellm.benchmark.schemas import GenerativeBenchmark, GenerativeBenchmarksReport from guidellm.data import ( DataLoader, DatasetPreprocessor, @@ -46,6 +28,7 @@ NonDistributedEnvironment, StrategyType, ) +from guidellm.schemas import GenerationRequest, GenerationResponse from guidellm.utils import Console, InfoMixin __all__ = [ @@ -96,13 +79,11 @@ async def benchmark_generative_text( # noqa: C901, PLR0915, PLR0912 # Updates configuration progress: tuple[str, ...] | list[str] | list[BenchmarkerProgress] | None = None, print_updates: bool = False, - # Aggregators configuration - add_aggregators: ( - dict[str, str | dict[str, Any] | Aggregator | CompilableAggregator] | None - ) = None, + # Benchmarker configuration + benchmark_cls: type[GenerativeBenchmark] = GenerativeBenchmark, + sample_requests: int | None = 10, warmup: float | None = None, cooldown: float | None = None, - sample_requests: int | None = 10, # Constraints configuration max_seconds: int | float | None = None, max_requests: int | None = None, @@ -241,25 +222,6 @@ async def benchmark_generative_text( # noqa: C901, PLR0915, PLR0912 status_level="success", ) - with console.print_update_step( - title="Creating benchmark aggregators" - ) as console_step: - aggregators = { - "scheduler_stats": SchedulerStatsAggregator(), - "requests_progress": GenerativeStatsProgressAggregator(), - "requests": GenerativeRequestsAggregator( - request_samples=sample_requests, - warmup=warmup, - cooldown=cooldown, - ), - **SerializableAggregator.resolve(add_aggregators or {}), - } - console_step.finish( - title="Benchmark aggregators created", - details={key: str(val) for key, val in aggregators.items()}, - status_level="success", - ) - with console.print_update_step(title="Resolving output formats") as console_step: output_formats = GenerativeBenchmarkerOutput.resolve( output_formats=(output_formats or {}), output_path=output_path @@ -291,12 +253,15 @@ async def benchmark_generative_text( # noqa: C901, PLR0915, PLR0912 GenerationRequest, GenerationResponse, ]().run( + benchmark_class=benchmark_cls, requests=request_loader, backend=backend, profile=profile, environment=NonDistributedEnvironment(), - benchmark_aggregators=aggregators, - benchmark_class=GenerativeBenchmark, + sample_requests=sample_requests, + warmup=warmup, + cooldown=cooldown, + prefer_response_metrics=True, ), ): if benchmark: diff --git a/src/guidellm/benchmark/objects.py b/src/guidellm/benchmark/objects.py deleted file mode 100644 index c3481303..00000000 --- a/src/guidellm/benchmark/objects.py +++ /dev/null @@ -1,475 +0,0 @@ -""" -Benchmark data models and metrics for performance measurement and analysis. - -Provides comprehensive data structures for capturing, storing, and analyzing -benchmark results from scheduler executions. Includes timing measurements, -token statistics, and performance metrics for generative AI workloads. - -Classes: - BenchmarkSchedulerStats: Scheduler timing and performance statistics. - BenchmarkMetrics: Core benchmark metrics and distributions. - BenchmarkRequestStats: Individual request processing statistics. - Benchmark: Base benchmark result container with generic metrics. - GenerativeRequestStats: Request statistics for generative AI workloads. - GenerativeMetrics: Comprehensive metrics for generative benchmarks. - GenerativeBenchmark: Complete generative benchmark results and analysis. - GenerativeBenchmarksReport: Container for multiple benchmark results. - -Type Variables: - BenchmarkMetricsT: Generic benchmark metrics type. - BenchmarkRequestStatsT: Generic request statistics type. - BenchmarkT: Generic benchmark container type. -""" - -from __future__ import annotations - -import json -import uuid -from pathlib import Path -from typing import Any, ClassVar, Generic, Literal, TypeVar - -import yaml -from pydantic import Field, computed_field - -from guidellm.benchmark.profile import ( - Profile, -) -from guidellm.data import ( - GenerativeRequestType, -) -from guidellm.scheduler import ( - ScheduledRequestInfo, - SchedulerState, - SchedulingStrategy, -) -from guidellm.utils import ( - StandardBaseDict, - StandardBaseModel, - StatusBreakdown, - StatusDistributionSummary, -) - -__all__ = [ - "Benchmark", - "BenchmarkMetrics", - "BenchmarkSchedulerStats", - "BenchmarkT", - "GenerativeBenchmark", - "GenerativeBenchmarksReport", - "GenerativeMetrics", - "GenerativeRequestStats", -] - - -class BenchmarkSchedulerStats(StandardBaseDict): - """Scheduler timing and performance statistics.""" - - start_time: float = Field( - description="Unix timestamp when the benchmark run started" - ) - end_time: float = Field(description="Unix timestamp when the benchmark run ended") - requests_made: StatusBreakdown[int, int, int, int] = Field( - description="Request counts by status: successful, incomplete, errored, total" - ) - queued_time_avg: float = Field( - description="Avg time requests spent in the queue (seconds)" - ) - worker_resolve_start_delay_avg: float = Field( - description="Avg delay before worker begins resolving req after dequeue (sec)" - ) - worker_resolve_time_avg: float = Field( - description="Avg time for worker to resolve requests (seconds)" - ) - worker_resolve_end_delay_avg: float = Field( - description="Avg delay after request end till worker resolves (seconds)" - ) - finalized_delay_avg: float = Field( - description="Avg delay after resolve til finalized with in scheduler (sec)" - ) - worker_targeted_start_delay_avg: float = Field( - description="Avg delay from targeted start to actual worker start (seconds)" - ) - request_start_delay_avg: float = Field( - description="Avg delay after resolve til request start (seconds)" - ) - request_time_avg: float = Field(description="Avg request processing time (seconds)") - request_targeted_start_delay_avg: float = Field( - description="Avg delay from targeted start to actual request start" - ) - - -class SchedulerDict(StandardBaseDict): - """Scheduler configuration and execution state dictionary.""" - - strategy: SchedulingStrategy - constraints: dict[str, dict[str, Any]] - state: SchedulerState - - -class BenchmarkerDict(StandardBaseDict): - """Benchmarker configuration and component settings dictionary.""" - - profile: Profile - requests: dict[str, Any] - backend: dict[str, Any] - environment: dict[str, Any] - aggregators: dict[str, dict[str, Any]] - - -class BenchmarkMetrics(StandardBaseDict): - """Core benchmark metrics and statistical distributions.""" - - requests_per_second: StatusDistributionSummary = Field( - description="Distribution of requests per second across benchmark execution" - ) - request_concurrency: StatusDistributionSummary = Field( - description="Distribution of concurrent request counts during execution" - ) - request_latency: StatusDistributionSummary = Field( - description="Distribution of request latencies for completed requests" - ) - - -BenchmarkMetricsT = TypeVar("BenchmarkMetricsT", bound=BenchmarkMetrics) - - -class BenchmarkRequestStats(StandardBaseDict): - """Individual request processing statistics and scheduling metadata.""" - - scheduler_info: ScheduledRequestInfo = Field( - description="Scheduler metadata and timing information for the request" - ) - - -BenchmarkRequestStatsT = TypeVar("BenchmarkRequestStatsT", bound=BenchmarkRequestStats) - - -class Benchmark(StandardBaseDict, Generic[BenchmarkMetricsT, BenchmarkRequestStatsT]): - """Base benchmark result container with execution metadata.""" - - type_: Literal["benchmark"] = "benchmark" - id_: str = Field( - default_factory=lambda: str(uuid.uuid4()), - description="Unique identifier for this benchmark execution", - ) - run_id: str = Field( - description="Identifier for the benchmarker run containing this benchmark" - ) - run_index: int = Field( - description="Sequential index of this benchmark within the benchmarker run" - ) - scheduler: SchedulerDict = Field( - description="Scheduler configuration and execution state" - ) - benchmarker: BenchmarkerDict = Field( - description="Benchmarker configuration and component settings" - ) - env_args: StandardBaseDict = Field( - description="Environment arguments and runtime configuration" - ) - extras: StandardBaseDict = Field( - description="Additional metadata and custom benchmark parameters" - ) - run_stats: BenchmarkSchedulerStats = Field( - description="Scheduler timing and performance statistics" - ) - start_time: float = Field( - default=-1.0, description="Unix timestamp when the first request was initiated" - ) - end_time: float = Field( - default=-1.0, description="Unix timestamp when the last request completed" - ) - - @computed_field # type: ignore[misc] - @property - def duration(self) -> float: - """ - Benchmark execution duration in seconds. - - :return: Time elapsed from first request start to last request completion. - """ - return self.end_time - self.start_time - - metrics: BenchmarkMetricsT = Field( - description="Performance metrics and statistical distributions" - ) - request_totals: StatusBreakdown[int, int, int, int] = Field( - description="Request counts by status: successful, incomplete, errored, total" - ) - requests: StatusBreakdown[ - list[BenchmarkRequestStatsT], - list[BenchmarkRequestStatsT], - list[BenchmarkRequestStatsT], - None, - ] = Field( - description="Request details grouped by status: successful, incomplete, errored" - ) - - -BenchmarkT = TypeVar("BenchmarkT", bound=Benchmark) - - -class GenerativeRequestStats(BenchmarkRequestStats): - """Request statistics for generative AI text generation workloads.""" - - type_: Literal["generative_request_stats"] = "generative_request_stats" - request_id: str = Field(description="Unique identifier for the request") - request_type: GenerativeRequestType | str = Field( - description="Type of generative request: text or chat completion" - ) - request_args: str | None = Field( - default=None, description="Arguments passed to the backend for this request" - ) - output: str | None = Field( - description="Generated text output, if request completed successfully" - ) - iterations: int = Field( - description="Number of processing iterations for the request" - ) - prompt_tokens: int | None = Field( - description="Number of tokens in the input prompt" - ) - output_tokens: int | None = Field( - description="Number of tokens in the generated output" - ) - - @computed_field # type: ignore[misc] - @property - def total_tokens(self) -> int | None: - """ - Total token count including prompt and output tokens. - - :return: Sum of prompt and output tokens, or None if either is unavailable. - """ - if self.prompt_tokens is None and self.output_tokens is None: - return None - - return (self.prompt_tokens or 0) + (self.output_tokens or 0) - - @computed_field # type: ignore[misc] - @property - def request_latency(self) -> float | None: - """ - End-to-end request processing latency in seconds. - - :return: Duration from request start to completion, or None if unavailable. - """ - if ( - not self.scheduler_info.request_timings.request_end - or not self.scheduler_info.request_timings.request_start - ): - return None - - return ( - self.scheduler_info.request_timings.request_end - - self.scheduler_info.request_timings.request_start - ) - - @computed_field # type: ignore[misc] - @property - def time_to_first_token_ms(self) -> float | None: - """ - Time to first token generation in milliseconds. - - :return: Latency from request start to first token, or None if unavailable. - """ - if ( - not self.scheduler_info.request_timings.first_iteration - or not self.scheduler_info.request_timings.request_start - ): - return None - - return 1000 * ( - self.scheduler_info.request_timings.first_iteration - - self.scheduler_info.request_timings.request_start - ) - - @computed_field # type: ignore[misc] - @property - def time_per_output_token_ms(self) -> float | None: - """ - Average time per output token in milliseconds. - - Includes time for first token and all subsequent tokens. - - :return: Average milliseconds per output token, or None if unavailable. - """ - if ( - not self.scheduler_info.request_timings.request_start - or not self.scheduler_info.request_timings.last_iteration - or not self.output_tokens - ): - return None - - return ( - 1000 - * ( - self.scheduler_info.request_timings.last_iteration - - self.scheduler_info.request_timings.request_start - ) - / self.output_tokens - ) - - @computed_field # type: ignore[misc] - @property - def inter_token_latency_ms(self) -> float | None: - """ - Average inter-token latency in milliseconds. - - Measures time between token generations, excluding first token. - - :return: Average milliseconds between tokens, or None if unavailable. - """ - if ( - not self.scheduler_info.request_timings.first_iteration - or not self.scheduler_info.request_timings.last_iteration - or not self.output_tokens - or self.output_tokens <= 1 - ): - return None - - return ( - 1000 - * ( - self.scheduler_info.request_timings.last_iteration - - self.scheduler_info.request_timings.first_iteration - ) - / (self.output_tokens - 1) - ) - - @computed_field # type: ignore[misc] - @property - def tokens_per_second(self) -> float | None: - """ - Overall token throughput including prompt and output tokens. - - :return: Total tokens per second, or None if unavailable. - """ - if not (latency := self.request_latency) or not (tokens := self.total_tokens): - return None - - return tokens / latency - - @computed_field # type: ignore[misc] - @property - def output_tokens_per_second(self) -> float | None: - """ - Output token generation throughput. - - :return: Output tokens per second, or None if unavailable. - """ - if not (latency := self.request_latency) or not self.output_tokens: - return None - - return self.output_tokens / latency - - -class GenerativeMetrics(BenchmarkMetrics): - """Comprehensive metrics for generative AI benchmarks.""" - - prompt_token_count: StatusDistributionSummary = Field( - description="Distribution of prompt token counts by request status" - ) - output_token_count: StatusDistributionSummary = Field( - description="Distribution of output token counts by request status" - ) - total_token_count: StatusDistributionSummary = Field( - description="Distribution of total token counts by request status" - ) - time_to_first_token_ms: StatusDistributionSummary = Field( - description="Distribution of first token latencies in milliseconds" - ) - time_per_output_token_ms: StatusDistributionSummary = Field( - description="Distribution of average time per output token in milliseconds" - ) - inter_token_latency_ms: StatusDistributionSummary = Field( - description="Distribution of inter-token latencies in milliseconds" - ) - output_tokens_per_second: StatusDistributionSummary = Field( - description="Distribution of output token generation rates" - ) - tokens_per_second: StatusDistributionSummary = Field( - description="Distribution of total token throughput including prompt and output" - ) - - -class GenerativeBenchmark(Benchmark[GenerativeMetrics, GenerativeRequestStats]): - """Complete generative AI benchmark results with specialized metrics.""" - - type_: Literal["generative_benchmark"] = "generative_benchmark" # type: ignore[assignment] - - -class GenerativeBenchmarksReport(StandardBaseModel): - """Container for multiple benchmark results with load/save functionality.""" - - DEFAULT_FILE: ClassVar[str] = "benchmarks.json" - - @staticmethod - def load_file( - path: str | Path, type_: Literal["json", "yaml"] | None = None - ) -> GenerativeBenchmarksReport: - """ - Load a report from a file. - - :param path: The path to load the report from. - :param type_: File type override, auto-detected from extension if None. - :return: The loaded report. - :raises ValueError: If file type is unsupported. - """ - path = Path(path) if not isinstance(path, Path) else path - - if path.is_dir(): - path = path / GenerativeBenchmarksReport.DEFAULT_FILE - - path.parent.mkdir(parents=True, exist_ok=True) - path_suffix = path.suffix.lower()[1:] - - with path.open("r") as file: - if (type_ or path_suffix) == "json": - model_dict = json.loads(file.read()) - elif (type_ or path_suffix) in ["yaml", "yml"]: - model_dict = yaml.safe_load(file) - else: - raise ValueError(f"Unsupported file type: {type_} for {path}.") - - return GenerativeBenchmarksReport.model_validate(model_dict) - - benchmarks: list[GenerativeBenchmark] = Field( - description="The list of completed benchmarks contained within the report.", - default_factory=list, - ) - - def save_file( - self, path: str | Path | None, type_: Literal["json", "yaml"] | None = None - ) -> Path: - """ - Save the report to a file. - - :param path: The path to save the report to. - :param type_: File type override, auto-detected from extension if None. - :return: The path to the saved report. - :raises ValueError: If file type is unsupported. - """ - if path is None: - path = Path.cwd() - elif not isinstance(path, Path): - path = Path(path) - - if path.is_dir(): - path = path / GenerativeBenchmarksReport.DEFAULT_FILE - - path.parent.mkdir(parents=True, exist_ok=True) - path_suffix = path.suffix.lower()[1:] - model_dict = self.model_dump() - - if (type_ or path_suffix) == "json": - save_str = json.dumps(model_dict) - elif (type_ or path_suffix) in ["yaml", "yml"]: - save_str = yaml.dump(model_dict) - else: - raise ValueError(f"Unsupported file type: {type_} for {path}.") - - with path.open("w") as file: - file.write(save_str) - - return path diff --git a/src/guidellm/benchmark/output.py b/src/guidellm/benchmark/output.py index 95b51d70..8b8213ca 100644 --- a/src/guidellm/benchmark/output.py +++ b/src/guidellm/benchmark/output.py @@ -14,17 +14,17 @@ from rich.padding import Padding from rich.text import Text -from guidellm.benchmark.objects import ( - GenerativeBenchmark, - GenerativeBenchmarksReport, - GenerativeMetrics, -) from guidellm.benchmark.profile import ( AsyncProfile, ConcurrentProfile, SweepProfile, ThroughputProfile, ) +from guidellm.benchmark.schemas import ( + GenerativeBenchmark, + GenerativeBenchmarksReport, + GenerativeMetrics, +) from guidellm.presentation import UIDataBuilder from guidellm.presentation.injector import create_report from guidellm.settings import settings diff --git a/src/guidellm/benchmark/profile.py b/src/guidellm/benchmark/profile.py index fd2a3850..93e86cba 100644 --- a/src/guidellm/benchmark/profile.py +++ b/src/guidellm/benchmark/profile.py @@ -46,7 +46,7 @@ from guidellm.utils import PydanticClassRegistryMixin if TYPE_CHECKING: - from guidellm.benchmark.objects import Benchmark + from guidellm.benchmark.schemas import Benchmark __all__ = [ "AsyncProfile", @@ -667,9 +667,9 @@ def next_strategy( return SynchronousStrategy() if prev_strategy.type_ == "synchronous": - self.synchronous_rate = ( - prev_benchmark.metrics.requests_per_second.successful.mean - ) + self.synchronous_rate = prev_benchmark.get_request_metrics_sample()[ + "request_throughput" + ] return ThroughputStrategy( max_concurrency=self.max_concurrency, @@ -677,9 +677,9 @@ def next_strategy( ) if prev_strategy.type_ == "throughput": - self.throughput_rate = ( - prev_benchmark.metrics.requests_per_second.successful.mean - ) + self.throughput_rate = prev_benchmark.get_request_metrics_sample()[ + "request_throughput" + ] if self.synchronous_rate <= 0 and self.throughput_rate <= 0: raise RuntimeError( "Invalid rates in sweep; aborting. " diff --git a/src/guidellm/benchmark/progress.py b/src/guidellm/benchmark/progress.py index f93b3a83..9389c742 100644 --- a/src/guidellm/benchmark/progress.py +++ b/src/guidellm/benchmark/progress.py @@ -37,14 +37,10 @@ TimeRemainingColumn, ) -from guidellm.benchmark.aggregator import AggregatorState -from guidellm.benchmark.objects import BenchmarkT, GenerativeBenchmark +from guidellm.benchmark.aggregator import EstimatedBenchmarkState from guidellm.benchmark.profile import Profile -from guidellm.scheduler import ( - SchedulerState, - SchedulingStrategy, - StrategyType, -) +from guidellm.benchmark.schemas import BenchmarkT, GenerativeBenchmark +from guidellm.scheduler import SchedulerState, SchedulingStrategy, StrategyType from guidellm.utils import Colors, format_value_display __all__ = [ @@ -98,7 +94,7 @@ def __call__( profile: Profile, agen: AsyncIterable[ tuple[ - AggregatorState | None, + EstimatedBenchmarkState | None, BenchmarkT | None, SchedulingStrategy, SchedulerState | None, @@ -106,7 +102,7 @@ def __call__( ], ) -> AsyncIterator[ tuple[ - AggregatorState | None, + EstimatedBenchmarkState | None, BenchmarkT | None, SchedulingStrategy, SchedulerState | None, @@ -125,7 +121,7 @@ def __call__( async def aiterator() -> AsyncIterator[ tuple[ - AggregatorState | None, + EstimatedBenchmarkState | None, BenchmarkT | None, SchedulingStrategy, SchedulerState | None, @@ -181,7 +177,9 @@ async def on_benchmark_start(self, strategy: SchedulingStrategy): @abstractmethod async def on_benchmark_update( - self, aggregator_update: AggregatorState, scheduler_state: SchedulerState + self, + aggregator_update: EstimatedBenchmarkState, + scheduler_state: SchedulerState, ): """ Handle benchmark execution progress update. @@ -205,7 +203,7 @@ async def on_finalize(self): async def on_raw_update( self, profile: Profile, - aggregator_update: AggregatorState | None, + aggregator_update: EstimatedBenchmarkState | None, benchmark: BenchmarkT | None, strategy: SchedulingStrategy, scheduler_state: SchedulerState | None, @@ -290,7 +288,9 @@ async def on_benchmark_start(self, strategy: SchedulingStrategy): ) async def on_benchmark_update( - self, aggregator_update: AggregatorState, scheduler_state: SchedulerState + self, + aggregator_update: EstimatedBenchmarkState, + scheduler_state: SchedulerState, ): """ Distribute benchmark updates to all handlers. @@ -322,7 +322,7 @@ async def on_finalize(self): async def on_raw_update( self, profile: Profile, - aggregator_update: AggregatorState | None, + aggregator_update: EstimatedBenchmarkState | None, benchmark: BenchmarkT | None, strategy: SchedulingStrategy, scheduler_state: SchedulerState | None, @@ -432,7 +432,9 @@ async def on_benchmark_start(self, strategy: SchedulingStrategy): self._sync_run_progress() async def on_benchmark_update( - self, aggregator_update: AggregatorState | None, scheduler_state: SchedulerState + self, + aggregator_update: EstimatedBenchmarkState | None, + scheduler_state: SchedulerState, ): """ Update display with current benchmark progress. @@ -545,7 +547,9 @@ def start_benchmark(self, strategy: SchedulingStrategy): ) def update_benchmark( - self, aggregator_update: AggregatorState, scheduler_state: SchedulerState + self, + aggregator_update: EstimatedBenchmarkState, + scheduler_state: SchedulerState, ): self.benchmark_task_states[self.current_index].update( aggregator_update, scheduler_state @@ -800,7 +804,9 @@ def start(self, strategy: SchedulingStrategy): self.strategy_type = strategy.type_ def update( - self, aggregator_update: AggregatorState, scheduler_state: SchedulerState + self, + aggregator_update: EstimatedBenchmarkState, + scheduler_state: SchedulerState, ): self.progress = ( (1.0 - scheduler_state.remaining_fraction) diff --git a/src/guidellm/benchmark/schemas.py b/src/guidellm/benchmark/schemas.py new file mode 100644 index 00000000..1b11aae6 --- /dev/null +++ b/src/guidellm/benchmark/schemas.py @@ -0,0 +1,1379 @@ +""" +Benchmark data models and metrics for performance measurement and analysis. + +Provides comprehensive data structures for capturing, storing, and analyzing +benchmark results from scheduler executions. Includes timing measurements, +token statistics, and performance metrics for generative AI workloads. + +Classes: + BenchmarkSchedulerStats: Scheduler timing and performance statistics. + BenchmarkMetrics: Core benchmark metrics and distributions. + BenchmarkRequestStats: Individual request processing statistics. + Benchmark: Base benchmark result container with generic metrics. + GenerativeRequestStats: Request statistics for generative AI workloads. + GenerativeMetrics: Comprehensive metrics for generative benchmarks. + GenerativeBenchmark: Complete generative benchmark results and analysis. + GenerativeBenchmarksReport: Container for multiple benchmark results. + +Type Variables: + BenchmarkMetricsT: Generic benchmark metrics type. + BenchmarkRequestStatsT: Generic request statistics type. + BenchmarkT: Generic benchmark container type. +""" + +from __future__ import annotations + +import json +import random +import time +import uuid +from abc import ABC, abstractmethod +from collections.abc import Iterable +from pathlib import Path +from typing import Any, ClassVar, Literal, TypeVar, cast + +import yaml +from pydantic import Field, computed_field + +from guidellm.benchmark.profile import Profile +from guidellm.benchmark.schemas import BenchmarkerDict, SchedulerDict +from guidellm.scheduler import ( + BackendInterface, + Environment, + SchedulerState, + SchedulingStrategy, +) +from guidellm.schemas import ( + GenerationRequest, + GenerationResponse, + GenerativeRequestStats, + RequestInfo, +) +from guidellm.schemas.request import UsageMetrics +from guidellm.utils import ( + InfoMixin, + StandardBaseDict, + StandardBaseModel, + StatusBreakdown, + StatusDistributionSummary, +) +from guidellm.utils.pydantic_utils import StandardBaseDict + +__all__ = [ + "Benchmark", + "BenchmarkArgs", + "BenchmarkSchedulerStats", + "BenchmarkT", + "BenchmarkerDict", + "EstimatedBenchmarkState", + "GenerativeAudioMetricsSummary", + "GenerativeBenchmark", + "GenerativeBenchmarksReport", + "GenerativeImageMetricsSummary", + "GenerativeMetrics", + "GenerativeMetricsSummary", + "GenerativeTextMetricsSummary", + "GenerativeVideoMetricsSummary", + "SchedulerDict", +] + + +class EstimatedBenchmarkState(dict[str, Any]): + benchmark_state_group: ClassVar[Literal["benchmark_state"]] = "benchmark_state" + benchmark_metrics_group: ClassVar[Literal["benchmark_metrics"]] = ( + "benchmark_metrics" + ) + scheduler_state_group: ClassVar[Literal["scheduler_state"]] = "scheduler_state" + + def get_metric( + self, + group: str, + key: str, + default: int | float | None = None, + ) -> int | float | None: + return self.get(f"{group}_{key}", default) + + def set_metric( + self, + group: str, + key: str, + value: bool | int | float | None, + start_val: bool | int | float | None = None, + ) -> bool | int | float | None: + if value is None: + return None + + if start_val is not None: + value -= start_val + self[f"{group}_{key}"] = value + + return value + + def add_avg_metric( + self, + group: str, + key: str, + value: bool | int | float | None, + start_val: bool | int | float | None = 0.0, + count: int | None = 1, + ): + if value is None or count is None: + return + + if start_val is not None: + value -= start_val + + total_key = f"{group}_{key}_total" + count_key = f"{group}_{key}_count" + self[total_key] = self.get(total_key, 0) + value + self[count_key] = self.get(count_key, 0) + count + + average = self[total_key] / self[count_key] + self.set_metric( + group=group, + key=key, + value=average, + ) + + def add_avg_rate_metric( + self, + group: str, + key: str, + value: bool | int | float | None, + start_val: bool | int | float | None = 0.0, + start_time: float | None = None, + end_time: float | None = None, + numerator_type: Literal["avg", "total", "count"] = "total", + ): + if value is None: + return + + self.add_avg_metric( + group=group, + key=key, + value=value, + start_val=start_val, + ) + start_time_key = f"{group}_{key}_start_time" + if self.get(start_time_key) is None: + if start_time is None: + start_time = time.time() + else: + self[start_time_key] = start_time or self[start_time_key] + + end_time = end_time or time.time() + elapsed_time = end_time - self[start_time_key] + + if elapsed_time > 0: + numerator_key = ( + f"{group}_{key}_{numerator_type}" + if numerator_type != "avg" + else f"{group}_{key}" + ) + rate = self[numerator_key] / elapsed_time + self.set_metric( + group=group, + key=f"{key}_per_second", + value=rate, + ) + + def add_time_averaged_metric( + self, + group: str, + key: str, + value: bool | int | float | None, + recorded_time: float | None = None, + ): + if value is None: + return + + if recorded_time is None: + recorded_time = time.time() + + time_avg_numerator_key = f"{group}_{key}_time_avg_numerator" + time_avg_denominator_key = f"{group}_{key}_time_avg_denominator" + last_recorded_time_key = f"{group}_{key}_last_recorded_time" + + if last_recorded_time_key not in self: + self[last_recorded_time_key] = recorded_time + self[time_avg_numerator_key] = value + self[time_avg_denominator_key] = 0.0 + else: + time_delta = recorded_time - self[last_recorded_time_key] + self[time_avg_numerator_key] += value * time_delta + self[time_avg_denominator_key] += time_delta + self[last_recorded_time_key] = recorded_time + + if self[time_avg_denominator_key] > 0: + average = self[time_avg_numerator_key] / self[time_avg_denominator_key] + else: + average = value + + self.set_metric( + group=group, + key=key, + value=average, + ) + + +class BenchmarkArgs(StandardBaseDict): + run_id: str = Field( + default_factory=lambda: str(uuid.uuid4()), + description="Unique identifier for the benchmark run", + ) + run_index: int = Field(default=0, description="Index of the benchmark run") + sample_requests: int | None = Field( + default=20, + description="Number of requests to sample and keep in the final benchmark for metrics", + ) + warmup: int | float | None = Field( + default=None, description="Warmup time before benchmarking starts" + ) + cooldown: int | float | None = Field( + default=None, description="Cooldown time after benchmarking ends" + ) + prefer_response_metrics: bool = Field( + default=True, + description="Whether to prefer response metrics over request metrics", + ) + + def is_in_warmup( + self, request_info: RequestInfo, scheduler_state: SchedulerState + ) -> bool: + if self.warmup is not None and 0 < self.warmup < 1: + # Percentage-based warmup + return ( + scheduler_state.remaining_fraction is not None + and scheduler_state.remaining_fraction > (1 - self.warmup) + ) + + if self.warmup is not None and self.warmup > 1: + # Count/time-based warmup + if scheduler_state.processed_requests < self.warmup: + return True + + current_time = request_info.timings.targeted_start + return ( + current_time is not None + and (current_time - scheduler_state.start_time) < self.warmup + ) + + return False + + def is_in_cooldown( + self, request_info: RequestInfo, scheduler_state: SchedulerState + ) -> bool: + if self.cooldown is not None and 0 < self.cooldown < 1: + # Percentage-based cooldown + return ( + scheduler_state.remaining_fraction is not None + and scheduler_state.remaining_fraction < self.cooldown + ) + + if self.cooldown is not None and self.cooldown > 1: + # Count/time-based cooldown + if ( + scheduler_state.remaining_requests is not None + and scheduler_state.remaining_requests <= self.cooldown + ): + return True + + current_time = ( + request_info.timings.resolve_end or request_info.timings.targeted_start + ) + return ( + current_time is not None + and scheduler_state.remaining_duration is not None + and scheduler_state.remaining_duration < self.cooldown + ) + + return False + + +class Benchmark(ABC): + @abstractmethod + def get_run_metrics_sample( + self, + ) -> dict[Literal["start_time", "end_time", "duration"], float]: ... + + @abstractmethod + def get_request_metrics_sample( + self, + ) -> dict[ + Literal[ + "request_count", + "request_latency", + "request_throughput", + "request_concurrency", + ], + float, + ]: ... + + @abstractmethod + @classmethod + def update_estimate( + cls, + args: BenchmarkArgs, + state: EstimatedBenchmarkState, + response: Any, + request: Any, + request_info: RequestInfo, + scheduler_state: SchedulerState, + ): ... + + @abstractmethod + @classmethod + def compile( + cls, + args: BenchmarkArgs, + estimated_state: EstimatedBenchmarkState, + scheduler_state: SchedulerState, + profile: Profile, + requests: Iterable, + backend: BackendInterface, + environment: Environment, + strategy: SchedulingStrategy, + constraints: dict[str, dict[str, Any]], + ) -> Any: ... + + +BenchmarkT = TypeVar("BenchmarkT", bound=Benchmark) + + +class BenchmarkSchedulerStats(StandardBaseDict): + """Scheduler timing and performance statistics.""" + + group_name: ClassVar[Literal["scheduler_stats"]] = "scheduler_stats" + + start_time: float = Field( + description="Unix timestamp when the benchmark run started" + ) + end_time: float = Field(description="Unix timestamp when the benchmark run ended") + requests_made: StatusBreakdown[int, int, int, int] = Field( + description="Request counts by status: successful, incomplete, errored, total" + ) + queued_time_avg: float = Field( + description="Avg time requests spent in the queue (seconds)" + ) + worker_resolve_start_delay_avg: float = Field( + description="Avg delay before worker begins resolving req after dequeue (sec)" + ) + worker_resolve_time_avg: float = Field( + description="Avg time for worker to resolve requests (seconds)" + ) + worker_resolve_end_delay_avg: float = Field( + description="Avg delay after request end till worker resolves (seconds)" + ) + finalized_delay_avg: float = Field( + description="Avg delay after resolve til finalized with in scheduler (sec)" + ) + worker_targeted_start_delay_avg: float = Field( + description="Avg delay from targeted start to actual worker start (seconds)" + ) + request_start_delay_avg: float = Field( + description="Avg delay after resolve til request start (seconds)" + ) + request_time_avg: float = Field(description="Avg request processing time (seconds)") + request_targeted_start_delay_avg: float = Field( + description="Avg delay from targeted start to actual request start" + ) + + @classmethod + def update_estimate(cls, state: EstimatedBenchmarkState, request_info: RequestInfo): + state.set_metric(group=cls.group_name, key="updated", value=True) + state.add_avg_metric( + group=cls.group_name, + key="queued_time", + value=request_info.timings.dequeued, + start_val=request_info.timings.queued, + ) + state.add_avg_metric( + group=cls.group_name, + key="worker_resolve_start_delay", + value=request_info.timings.resolve_start, + start_val=request_info.timings.scheduled_at, + ) + state.add_avg_metric( + group=cls.group_name, + key="worker_resolve_time", + value=request_info.timings.resolve_end, + start_val=request_info.timings.resolve_start, + ) + state.add_avg_metric( + group=cls.group_name, + key="worker_resolve_end_delay", + value=request_info.timings.request_end, + start_val=request_info.timings.resolve_end, + ) + state.add_avg_metric( + group=cls.group_name, + key="finalized_delay", + value=request_info.timings.finalized, + start_val=request_info.timings.resolve_end, + ) + state.add_avg_metric( + group=cls.group_name, + key="worker_targeted_start_delay", + value=request_info.timings.resolve_start, + start_val=request_info.timings.targeted_start, + ) + state.add_avg_metric( + group=cls.group_name, + key="request_start_delay", + value=request_info.timings.request_start, + start_val=request_info.timings.resolve_start, + ) + state.add_avg_metric( + group=cls.group_name, + key="request_time", + value=request_info.timings.request_end, + start_val=request_info.timings.request_start, + ) + state.add_avg_metric( + group=cls.group_name, + key="request_targeted_start_delay", + value=request_info.timings.request_start, + start_val=request_info.timings.targeted_start, + ) + + @classmethod + def compile( + cls, estimated_state: EstimatedBenchmarkState, scheduler_state: SchedulerState + ) -> BenchmarkSchedulerStats: + return BenchmarkSchedulerStats( + start_time=scheduler_state.start_time, + end_time=scheduler_state.end_time or scheduler_state.start_time, + requests_made=StatusBreakdown[int, int, int, int]( + successful=scheduler_state.successful_requests, + incomplete=scheduler_state.cancelled_requests, + errored=scheduler_state.errored_requests, + total=( + scheduler_state.successful_requests + + scheduler_state.cancelled_requests + + scheduler_state.errored_requests + ), + ), + queued_time_avg=cast( + "float", + estimated_state.get_metric( + group=cls.group_name, key="queued_time", default=-1.0 + ), + ), + worker_resolve_start_delay_avg=cast( + "float", + estimated_state.get_metric( + group=cls.group_name, key="worker_resolve_start_delay", default=-1.0 + ), + ), + worker_resolve_time_avg=cast( + "float", + estimated_state.get_metric( + group=cls.group_name, key="worker_resolve_time", default=-1.0 + ), + ), + worker_resolve_end_delay_avg=cast( + "float", + estimated_state.get_metric( + group=cls.group_name, key="worker_resolve_end_delay", default=-1.0 + ), + ), + finalized_delay_avg=cast( + "float", + estimated_state.get_metric( + group=cls.group_name, key="finalized_delay", default=-1.0 + ), + ), + worker_targeted_start_delay_avg=cast( + "float", + estimated_state.get_metric( + group=cls.group_name, + key="worker_targeted_start_delay", + default=-1.0, + ), + ), + request_start_delay_avg=cast( + "float", + estimated_state.get_metric( + group=cls.group_name, key="request_start_delay", default=-1.0 + ), + ), + request_time_avg=cast( + "float", + estimated_state.get_metric( + group=cls.group_name, key="request_time", default=-1.0 + ), + ), + request_targeted_start_delay_avg=cast( + "float", + estimated_state.get_metric( + group=cls.group_name, + key="request_targeted_start_delay", + default=-1.0, + ), + ), + ) + + +class GenerativeMetricsSummary(StandardBaseDict): + input: StatusDistributionSummary = Field(description="") + input_per_second: StatusDistributionSummary = Field(description="") + input_concurrency: StatusDistributionSummary = Field(description="") + + output: StatusDistributionSummary = Field(description="") + output_per_second: StatusDistributionSummary = Field(description="") + output_concurrency: StatusDistributionSummary = Field(description="") + + total: StatusDistributionSummary = Field(description="") + total_per_second: StatusDistributionSummary = Field(description="") + total_concurrency: StatusDistributionSummary = Field(description="") + + @classmethod + def compile( + cls, + request_types: list[Literal["successful", "incomplete", "error"]], + request_times: list[tuple[float, float]], + input_values: list[int | float], + output_values: list[int | float], + ) -> GenerativeMetricsSummary: + total_values = [ + input_val + output_val + for input_val, output_val in zip(input_values, output_values) + ] + + return GenerativeMetricsSummary( + input=StatusDistributionSummary.from_values( + value_types=request_types, + values=input_values, + ), + input_per_second=StatusDistributionSummary.from_request_times( + request_types=request_types, + requests=request_times, + distribution_type="rate", + weights=input_values, + ), + input_concurrency=StatusDistributionSummary.from_request_times( + request_types=request_types, + requests=request_times, + distribution_type="concurrency", + weights=input_values, + ), + output=StatusDistributionSummary.from_values( + value_types=request_types, + values=output_values, + ), + output_per_second=StatusDistributionSummary.from_request_times( + request_types=request_types, + requests=request_times, + distribution_type="rate", + weights=output_values, + ), + output_concurrency=StatusDistributionSummary.from_request_times( + request_types=request_types, + requests=request_times, + distribution_type="concurrency", + weights=output_values, + ), + total=StatusDistributionSummary.from_values( + value_types=request_types, + values=total_values, + ), + total_per_second=StatusDistributionSummary.from_request_times( + request_types=request_types, + requests=request_times, + distribution_type="rate", + weights=total_values, + ), + total_concurrency=StatusDistributionSummary.from_request_times( + request_types=request_types, + requests=request_times, + distribution_type="concurrency", + weights=total_values, + ), + ) + + +class GenerativeTextMetricsSummary(StandardBaseDict): + tokens: GenerativeMetricsSummary = Field(description="") + words: GenerativeMetricsSummary = Field(description="") + characters: GenerativeMetricsSummary = Field(description="") + bytes: GenerativeMetricsSummary = Field(description="") + + @classmethod + def compile( + cls, + request_types: list[Literal["successful", "incomplete", "error"]], + request_times: list[tuple[float, float]], + input_metrics: list[UsageMetrics], + output_metrics: list[UsageMetrics], + ) -> GenerativeTextMetricsSummary: + return GenerativeTextMetricsSummary( + tokens=GenerativeMetricsSummary.compile( + request_types=request_types, + request_times=request_times, + input_values=[metrics.text_tokens or 0 for metrics in input_metrics], + output_values=[metrics.text_tokens or 0 for metrics in output_metrics], + ), + words=GenerativeMetricsSummary.compile( + request_types=request_types, + request_times=request_times, + input_values=[metrics.text_words or 0 for metrics in input_metrics], + output_values=[metrics.text_words or 0 for metrics in output_metrics], + ), + characters=GenerativeMetricsSummary.compile( + request_types=request_types, + request_times=request_times, + input_values=[ + metrics.text_characters or 0 for metrics in input_metrics + ], + output_values=[ + metrics.text_characters or 0 for metrics in output_metrics + ], + ), + bytes=GenerativeMetricsSummary.compile( + request_types=request_types, + request_times=request_times, + input_values=[metrics.text_bytes or 0 for metrics in input_metrics], + output_values=[metrics.text_bytes or 0 for metrics in output_metrics], + ), + ) + + +class GenerativeImageMetricsSummary(StandardBaseDict): + tokens: GenerativeMetricsSummary = Field(description="") + images: GenerativeMetricsSummary = Field(description="") + pixels: GenerativeMetricsSummary = Field(description="") + bytes: GenerativeMetricsSummary = Field(description="") + + @classmethod + def compile( + cls, + request_types: list[Literal["successful", "incomplete", "error"]], + request_times: list[tuple[float, float]], + input_metrics: list[UsageMetrics], + output_metrics: list[UsageMetrics], + ) -> GenerativeImageMetricsSummary: + return GenerativeImageMetricsSummary( + tokens=GenerativeMetricsSummary.compile( + request_types=request_types, + request_times=request_times, + input_values=[metrics.image_tokens or 0 for metrics in input_metrics], + output_values=[metrics.image_tokens or 0 for metrics in output_metrics], + ), + images=GenerativeMetricsSummary.compile( + request_types=request_types, + request_times=request_times, + input_values=[metrics.image_count or 0 for metrics in input_metrics], + output_values=[metrics.image_count or 0 for metrics in output_metrics], + ), + pixels=GenerativeMetricsSummary.compile( + request_types=request_types, + request_times=request_times, + input_values=[metrics.image_pixels or 0 for metrics in input_metrics], + output_values=[metrics.image_pixels or 0 for metrics in output_metrics], + ), + bytes=GenerativeMetricsSummary.compile( + request_types=request_types, + request_times=request_times, + input_values=[metrics.image_bytes or 0 for metrics in input_metrics], + output_values=[metrics.image_bytes or 0 for metrics in output_metrics], + ), + ) + + +class GenerativeVideoMetricsSummary(StandardBaseDict): + tokens: GenerativeMetricsSummary = Field(description="") + frames: GenerativeMetricsSummary = Field(description="") + seconds: GenerativeMetricsSummary = Field(description="") + bytes: GenerativeMetricsSummary = Field(description="") + + @classmethod + def compile( + cls, + request_types: list[Literal["successful", "incomplete", "error"]], + request_times: list[tuple[float, float]], + input_metrics: list[UsageMetrics], + output_metrics: list[UsageMetrics], + ) -> GenerativeVideoMetricsSummary: + return GenerativeVideoMetricsSummary( + tokens=GenerativeMetricsSummary.compile( + request_types=request_types, + request_times=request_times, + input_values=[metrics.video_tokens or 0 for metrics in input_metrics], + output_values=[metrics.video_tokens or 0 for metrics in output_metrics], + ), + frames=GenerativeMetricsSummary.compile( + request_types=request_types, + request_times=request_times, + input_values=[metrics.video_frames or 0 for metrics in input_metrics], + output_values=[metrics.video_frames or 0 for metrics in output_metrics], + ), + seconds=GenerativeMetricsSummary.compile( + request_types=request_types, + request_times=request_times, + input_values=[metrics.video_seconds or 0 for metrics in input_metrics], + output_values=[ + metrics.video_seconds or 0 for metrics in output_metrics + ], + ), + bytes=GenerativeMetricsSummary.compile( + request_types=request_types, + request_times=request_times, + input_values=[metrics.video_bytes or 0 for metrics in input_metrics], + output_values=[metrics.video_bytes or 0 for metrics in output_metrics], + ), + ) + + +class GenerativeAudioMetricsSummary(StandardBaseDict): + tokens: GenerativeMetricsSummary = Field(description="") + samples: GenerativeMetricsSummary = Field(description="") + seconds: GenerativeMetricsSummary = Field(description="") + bytes: GenerativeMetricsSummary = Field(description="") + + @classmethod + def compile( + cls, + request_types: list[Literal["successful", "incomplete", "error"]], + request_times: list[tuple[float, float]], + input_metrics: list[UsageMetrics], + output_metrics: list[UsageMetrics], + ) -> GenerativeAudioMetricsSummary: + return GenerativeAudioMetricsSummary( + tokens=GenerativeMetricsSummary.compile( + request_types=request_types, + request_times=request_times, + input_values=[metrics.audio_tokens or 0 for metrics in input_metrics], + output_values=[metrics.audio_tokens or 0 for metrics in output_metrics], + ), + samples=GenerativeMetricsSummary.compile( + request_types=request_types, + request_times=request_times, + input_values=[metrics.audio_samples or 0 for metrics in input_metrics], + output_values=[ + metrics.audio_samples or 0 for metrics in output_metrics + ], + ), + seconds=GenerativeMetricsSummary.compile( + request_types=request_types, + request_times=request_times, + input_values=[metrics.audio_seconds or 0 for metrics in input_metrics], + output_values=[ + metrics.audio_seconds or 0 for metrics in output_metrics + ], + ), + bytes=GenerativeMetricsSummary.compile( + request_types=request_types, + request_times=request_times, + input_values=[metrics.audio_bytes or 0 for metrics in input_metrics], + output_values=[metrics.audio_bytes or 0 for metrics in output_metrics], + ), + ) + + +class GenerativeMetrics(StandardBaseDict): + """Comprehensive metrics for generative AI benchmarks.""" + + # Request stats + requests_per_second: StatusDistributionSummary = Field( + description="Distribution of requests per second across benchmark execution" + ) + request_concurrency: StatusDistributionSummary = Field( + description="Distribution of concurrent request counts during execution" + ) + request_latency: StatusDistributionSummary = Field( + description="Distribution of request latencies for completed requests" + ) + + # General token stats + prompt_token_count: StatusDistributionSummary = Field( + description="Distribution of prompt token counts by request status" + ) + output_token_count: StatusDistributionSummary = Field( + description="Distribution of output token counts by request status" + ) + total_token_count: StatusDistributionSummary = Field( + description="Distribution of total token counts by request status" + ) + time_to_first_token_ms: StatusDistributionSummary = Field( + description="Distribution of first token latencies in milliseconds" + ) + time_per_output_token_ms: StatusDistributionSummary = Field( + description="Distribution of average time per output token in milliseconds" + ) + inter_token_latency_ms: StatusDistributionSummary = Field( + description="Distribution of inter-token latencies in milliseconds" + ) + output_tokens_per_second: StatusDistributionSummary = Field( + description="Distribution of output token generation rates" + ) + tokens_per_second: StatusDistributionSummary = Field( + description="Distribution of total token throughput including prompt and output" + ) + + # Domain specific stats + text: GenerativeTextMetricsSummary = Field(description="") + image: GenerativeImageMetricsSummary = Field(description="") + video: GenerativeVideoMetricsSummary = Field(description="") + audio: GenerativeAudioMetricsSummary = Field(description="") + + @classmethod + def update_estimate( + cls, + state: EstimatedBenchmarkState, + response: GenerationResponse | None, + request: GenerationRequest, + request_info: RequestInfo, + scheduler_state: SchedulerState, + ): + # Always track concurrency + state.add_time_averaged_metric( + group=EstimatedBenchmarkState.benchmark_metrics_group, + key="concurrency", + value=scheduler_state.processing_requests, + ) + + if request_info.status not in {"completed", "errored", "cancelled"}: + return + + state.set_metric( + group=EstimatedBenchmarkState.benchmark_metrics_group, + key="updated", + value=True, + ) + start_time = scheduler_state.start_time + end_time = request_info.timings.request_end or request_info.timings.resolve_end + duration = end_time - start_time if end_time else None + + for prefix in (request_info.status, "total"): + requests_count = ( + scheduler_state.successful_requests + if prefix == "successful" + else scheduler_state.errored_requests + if prefix == "errored" + else scheduler_state.cancelled_requests + if prefix == "cancelled" + else scheduler_state.processed_requests + ) + + # Request stats + state.set_metric( + group=EstimatedBenchmarkState.benchmark_metrics_group, + key=f"{prefix}_requests", + value=requests_count, + ) + state.set_metric( + group=EstimatedBenchmarkState.benchmark_metrics_group, + key=f"{prefix}_requests_per_second", + value=requests_count / duration if duration else None, + ) + state.add_avg_metric( + group=EstimatedBenchmarkState.benchmark_metrics_group, + key=f"{prefix}_request_latency", + value=( + request_info.timings.request_end or request_info.timings.resolve_end + ), + start_val=( + request_info.timings.request_start + or request_info.timings.resolve_start + ), + ) + + # Input/output token stats + state.add_avg_rate_metric( + group=EstimatedBenchmarkState.benchmark_metrics_group, + key="input_tokens", + value=(response.input_metrics.total_tokens if response else None) + or request.input_metrics.total_tokens, + ) + state.add_avg_rate_metric( + group=EstimatedBenchmarkState.benchmark_metrics_group, + key="input_text_tokens", + value=(response.input_metrics.text_tokens if response else None) + or request.input_metrics.text_tokens, + ) + state.add_avg_rate_metric( + group=EstimatedBenchmarkState.benchmark_metrics_group, + key="input_images", + value=(response.input_metrics.image_count if response else None) + or request.input_metrics.image_count, + ) + state.add_avg_rate_metric( + group=EstimatedBenchmarkState.benchmark_metrics_group, + key="input_video_frames", + value=(response.input_metrics.video_frames if response else None) + or request.input_metrics.video_frames, + ) + state.add_avg_rate_metric( + group=EstimatedBenchmarkState.benchmark_metrics_group, + key="input_audio_seconds", + value=request.input_metrics.audio_seconds if request else None, + ) + state.add_avg_rate_metric( + group=EstimatedBenchmarkState.benchmark_metrics_group, + key="output_tokens", + value=(response.output_metrics.total_tokens if response else None) + or request.output_metrics.total_tokens, + ) + + # General stats + output_tokens = ( + response.output_metrics.total_tokens if response else None + ) or request.output_metrics.total_tokens + state.add_avg_metric( + group=EstimatedBenchmarkState.benchmark_metrics_group, + key=f"{prefix}_time_to_first_token", + value=request_info.timings.first_iteration, + start_val=request_info.timings.request_start + or request_info.timings.resolve_start, + ) + state.add_avg_metric( + group=EstimatedBenchmarkState.benchmark_metrics_group, + key=f"{prefix}_inter_token_latency", + value=request_info.timings.last_iteration, + start_val=request_info.timings.first_iteration, + count=output_tokens - 1 + if output_tokens and output_tokens > 1 + else None, + ) + state.add_avg_metric( + group=EstimatedBenchmarkState.benchmark_metrics_group, + key=f"{prefix}_time_per_output_token", + value=( + request_info.timings.request_end or request_info.timings.resolve_end + ), + start_val=( + request_info.timings.first_iteration + or request_info.timings.request_start + or request_info.timings.resolve_start + ), + count=output_tokens, + ) + + @classmethod + def compile( + cls, + completed: list[GenerativeRequestStats], + errored: list[GenerativeRequestStats], + incomplete: list[GenerativeRequestStats], + ) -> GenerativeMetrics: + requests = completed + errored + incomplete + request_types = cast( + "list[Literal['successful', 'error', 'incomplete']]", + ["successful"] * len(completed) + + ["error"] * len(errored) + + ["incomplete"] * len(incomplete), + ) + request_times = [ + ( + req.info.timings.request_start or req.info.timings.resolve_start or 0, + req.info.timings.request_end or req.info.timings.resolve_end or 0, + ) + for req in requests + ] + input_metrics = [req.input_metrics for req in requests] + output_metrics = [req.output_metrics for req in requests] + + return GenerativeMetrics( + # Request stats + requests_per_second=StatusDistributionSummary.from_request_times( + request_types=request_types, + requests=request_times, + distribution_type="rate", + ), + request_concurrency=StatusDistributionSummary.from_request_times( + request_types=request_types, + requests=request_times, + distribution_type="concurrency", + ), + request_latency=StatusDistributionSummary.from_values( + value_types=request_types, + values=[req.request_latency or 0.0 for req in requests], + ), + # General token stats + prompt_token_count=StatusDistributionSummary.from_values( + value_types=request_types, + values=[float(req.prompt_tokens or 0) for req in requests], + ), + output_token_count=StatusDistributionSummary.from_values( + value_types=request_types, + values=[float(req.output_tokens or 0) for req in requests], + ), + total_token_count=StatusDistributionSummary.from_values( + value_types=request_types, + values=[float(req.total_tokens or 0) for req in requests], + ), + time_to_first_token_ms=StatusDistributionSummary.from_values( + value_types=request_types, + values=[req.time_to_first_token_ms or 0.0 for req in requests], + ), + time_per_output_token_ms=StatusDistributionSummary.from_values( + value_types=request_types, + values=[req.time_per_output_token_ms or 0.0 for req in requests], + ), + inter_token_latency_ms=StatusDistributionSummary.from_values( + value_types=request_types, + values=[req.inter_token_latency_ms or 0.0 for req in requests], + ), + output_tokens_per_second=StatusDistributionSummary.from_values( + value_types=request_types, + values=[req.output_tokens_per_second or 0.0 for req in requests], + ), + tokens_per_second=StatusDistributionSummary.from_values( + value_types=request_types, + values=[req.tokens_per_second or 0.0 for req in requests], + ), + # Domain-specific stats + text=GenerativeTextMetricsSummary.compile( + request_types=request_types, + request_times=request_times, + input_metrics=input_metrics, + output_metrics=output_metrics, + ), + image=GenerativeImageMetricsSummary.compile( + request_types=request_types, + request_times=request_times, + input_metrics=input_metrics, + output_metrics=output_metrics, + ), + video=GenerativeVideoMetricsSummary.compile( + request_types=request_types, + request_times=request_times, + input_metrics=input_metrics, + output_metrics=output_metrics, + ), + audio=GenerativeAudioMetricsSummary.compile( + request_types=request_types, + request_times=request_times, + input_metrics=input_metrics, + output_metrics=output_metrics, + ), + ) + + +class SchedulerDict(StandardBaseDict): + """Scheduler configuration and execution state dictionary.""" + + strategy: SchedulingStrategy + constraints: dict[str, dict[str, Any]] + state: SchedulerState + + +class BenchmarkerDict(StandardBaseDict): + """Benchmarker configuration and component settings dictionary.""" + + args: BenchmarkArgs + profile: Profile + requests: dict[str, Any] + backend: dict[str, Any] + environment: dict[str, Any] + + +class GenerativeBenchmark(Benchmark, StandardBaseDict): + """Complete generative AI benchmark results with specialized metrics.""" + + group_name: ClassVar[Literal["generative_benchmark"]] = "generative_benchmark" + + type_: Literal["generative_benchmark"] = "generative_benchmark" # type: ignore[assignment] + id_: str = Field( + default_factory=lambda: str(uuid.uuid4()), + description="Unique identifier for this benchmark execution", + ) + run_id: str = Field( + description="Identifier for the benchmarker run containing this benchmark" + ) + run_index: int = Field( + description="Sequential index of this benchmark within the benchmarker run" + ) + scheduler: SchedulerDict = Field( + description="Scheduler configuration and execution state" + ) + benchmarker: BenchmarkerDict = Field( + description="Benchmarker configuration and component settings" + ) + run_stats: BenchmarkSchedulerStats = Field( + description="Scheduler timing and performance statistics" + ) + start_time: float = Field( + default=-1.0, description="Unix timestamp when the first request was initiated" + ) + end_time: float = Field( + default=-1.0, description="Unix timestamp when the last request completed" + ) + + def get_run_metrics_sample( + self, + ) -> dict[Literal["start_time", "end_time", "duration"], float]: + return { + "start_time": self.start_time, + "end_time": self.end_time, + "duration": self.duration, + } + + def get_request_metrics_sample( + self, + ) -> dict[ + Literal[ + "request_count", + "request_latency", + "request_throughput", + "request_concurrency", + ], + float, + ]: + return { + "request_count": self.request_totals.successful, + "request_latency": self.metrics.request_latency.successful.mean, + "request_throughput": self.metrics.requests_per_second.successful.mean, + "request_concurrency": self.metrics.request_concurrency.successful.mean, + } + + @computed_field # type: ignore[misc] + @property + def duration(self) -> float: + """ + Benchmark execution duration in seconds. + + :return: Time elapsed from first request start to last request completion. + """ + return self.end_time - self.start_time + + metrics: GenerativeMetrics = Field( + description="Performance metrics and statistical distributions" + ) + request_totals: StatusBreakdown[int, int, int, int] = Field( + description="Request counts by status: successful, incomplete, errored, total" + ) + requests: StatusBreakdown[ + list[GenerativeRequestStats], + list[GenerativeRequestStats], + list[GenerativeRequestStats], + None, + ] = Field( + description="Request details grouped by status: successful, incomplete, errored" + ) + + @classmethod + def update_estimate( + cls, + args: BenchmarkArgs, + state: EstimatedBenchmarkState, + response: GenerationResponse | None, + request: GenerationRequest, + request_info: RequestInfo, + scheduler_state: SchedulerState, + ): + # Update child metric groups + BenchmarkSchedulerStats.update_estimate(state, request_info) + GenerativeMetrics.update_estimate( + state, response, request, request_info, scheduler_state + ) + + # Store requests and sampling info, update counts + if "requests_completed" not in state: + state["requests_completed"] = [] + state["samples_completed"] = [] + state["requests_errored"] = [] + state["samples_errored"] = [] + state["requests_incomplete"] = [] + state["samples_incomplete"] = [] + + if request_info.status not in {"completed", "errored", "cancelled"}: + # Must be fully resolved to be added + return + + if ( + request_info.status == "cancelled" + and request_info.timings.resolve_start is None + ): + # Cancelled requests that never started should not be added + return + + state.set_metric(group=cls.group_name, key="updated", value=True) + if state.set_metric( + group=cls.group_name, + key="in_warmup", + value=args.is_in_warmup(request_info, scheduler_state), + ) or state.set_metric( + group=cls.group_name, + key="in_cooldown", + value=args.is_in_cooldown(request_info, scheduler_state), + ): + return + + if response is None: + response = GenerationResponse( + request_id=request.request_id, request_args=str(request.arguments) + ) + + stats = response.compile_stats( + request, request_info, args.prefer_response_metrics + ) + + # Determine status and get corresponding lists + if request_info.status == "completed": + requests_list = state["requests_completed"] + samples_list = state["samples_completed"] + elif request_info.status == "errored": + requests_list = state["requests_errored"] + samples_list = state["samples_errored"] + else: # cancelled (incomplete) + requests_list = state["requests_incomplete"] + samples_list = state["samples_incomplete"] + + # Add to requests list + requests_list.append(stats) + current_index = len(requests_list) - 1 + + # Handle request sampling logic + if args.sample_requests is None: + # No sampling, add index to samples list + samples_list.append(current_index) + elif args.sample_requests > 0 and len(samples_list) < args.sample_requests: + # Space in samples list, add index + samples_list.append(current_index) + elif ( + args.sample_requests > 0 + and (replace_index := random.randrange(len(requests_list))) + < args.sample_requests + ): + # No space, adding based on reservoir sampling + samples_list[replace_index] = current_index + # Sampling set to 0, don't keep any requests + + @classmethod + def compile( + cls, + args: BenchmarkArgs, + estimated_state: EstimatedBenchmarkState, + scheduler_state: SchedulerState, + profile: Profile, + requests: Iterable, + backend: BackendInterface, + environment: Environment, + strategy: SchedulingStrategy, + constraints: dict[str, dict[str, Any]], + ) -> GenerativeBenchmark: + return GenerativeBenchmark( + run_id=args.run_id, + run_index=args.run_index, + scheduler=SchedulerDict( + strategy=strategy, + constraints={ + key: InfoMixin.extract_from_obj(val) + for key, val in constraints.items() + }, + state=scheduler_state, + ), + benchmarker=BenchmarkerDict( + args=args, + profile=profile, + requests=InfoMixin.extract_from_obj(requests), + backend=backend.info, + environment=environment.info, + ), + run_stats=BenchmarkSchedulerStats.compile(estimated_state, scheduler_state), + start_time=scheduler_state.start_time or -1.0, + end_time=scheduler_state.end_time or -1.0, + metrics=GenerativeMetrics.compile( + completed=estimated_state.get("requests_completed", []), + errored=estimated_state.get("requests_errored", []), + incomplete=estimated_state.get("requests_incomplete", []), + ), + request_totals=StatusBreakdown[int, int, int, int]( + successful=len(estimated_state.get("requests_completed", [])), + incomplete=len(estimated_state.get("requests_incomplete", [])), + errored=len(estimated_state.get("requests_errored", [])), + total=( + len(estimated_state.get("requests_completed", [])) + + len(estimated_state.get("requests_incomplete", [])) + + len(estimated_state.get("requests_errored", [])) + ), + ), + requests=StatusBreakdown[ + list[GenerativeRequestStats], + list[GenerativeRequestStats], + list[GenerativeRequestStats], + None, + ]( + successful=estimated_state.get("requests_completed", []), + incomplete=estimated_state.get("requests_incomplete", []), + errored=estimated_state.get("requests_errored", []), + total=None, + ), + ) + + +class GenerativeBenchmarksReport(StandardBaseModel): + """Container for multiple benchmark results with load/save functionality.""" + + DEFAULT_FILE: ClassVar[str] = "benchmarks.json" + + @staticmethod + def load_file( + path: str | Path, type_: Literal["json", "yaml"] | None = None + ) -> GenerativeBenchmarksReport: + """ + Load a report from a file. + + :param path: The path to load the report from. + :param type_: File type override, auto-detected from extension if None. + :return: The loaded report. + :raises ValueError: If file type is unsupported. + """ + path = Path(path) if not isinstance(path, Path) else path + + if path.is_dir(): + path = path / GenerativeBenchmarksReport.DEFAULT_FILE + + path.parent.mkdir(parents=True, exist_ok=True) + path_suffix = path.suffix.lower()[1:] + + with path.open("r") as file: + if (type_ or path_suffix) == "json": + model_dict = json.loads(file.read()) + elif (type_ or path_suffix) in ["yaml", "yml"]: + model_dict = yaml.safe_load(file) + else: + raise ValueError(f"Unsupported file type: {type_} for {path}.") + + return GenerativeBenchmarksReport.model_validate(model_dict) + + benchmarks: list[GenerativeBenchmark] = Field( + description="The list of completed benchmarks contained within the report.", + default_factory=list, + ) + + def save_file( + self, path: str | Path | None, type_: Literal["json", "yaml"] | None = None + ) -> Path: + """ + Save the report to a file. + + :param path: The path to save the report to. + :param type_: File type override, auto-detected from extension if None. + :return: The path to the saved report. + :raises ValueError: If file type is unsupported. + """ + if path is None: + path = Path.cwd() + elif not isinstance(path, Path): + path = Path(path) + + if path.is_dir(): + path = path / GenerativeBenchmarksReport.DEFAULT_FILE + + path.parent.mkdir(parents=True, exist_ok=True) + path_suffix = path.suffix.lower()[1:] + model_dict = self.model_dump() + + if (type_ or path_suffix) == "json": + save_str = json.dumps(model_dict) + elif (type_ or path_suffix) in ["yaml", "yml"]: + save_str = yaml.dump(model_dict) + else: + raise ValueError(f"Unsupported file type: {type_} for {path}.") + + with path.open("w") as file: + file.write(save_str) + + return path diff --git a/src/guidellm/data/__init__.py b/src/guidellm/data/__init__.py index d25c719a..0bff1b64 100644 --- a/src/guidellm/data/__init__.py +++ b/src/guidellm/data/__init__.py @@ -4,39 +4,25 @@ DatasetDeserializer, DatasetDeserializerFactory, ) -from .loaders import DataLoader -from .objects import ( - GenerationRequest, - GenerationRequestArguments, - GenerationRequestTimings, - GenerativeDatasetColumnType, - GenerativeRequestType, -) +from .loaders import DataLoader, DatasetsIterator from .preprocessors import ( DataDependentPreprocessor, DatasetPreprocessor, PreprocessorRegistry, ) from .processor import ProcessorFactory +from .schemas import GenerativeDatasetColumnType __all__ = [ - "ColumnMapper", - "ColumnMapperRegistry", "DataDependentPreprocessor", "DataLoader", "DataNotSupportedError", "DatasetDeserializer", "DatasetDeserializerFactory", "DatasetPreprocessor", - "GenerationRequest", - "GenerationRequestArguments", - "GenerationRequestTimings", - "GenerativeDatasetArgs", + "DatasetsIterator", "GenerativeDatasetColumnType", "GenerativeRequestCollator", - "GenerativeRequestType", "PreprocessorRegistry", "ProcessorFactory", - "RequestFormatter", - "RequestFormatterRegistry", ] diff --git a/src/guidellm/data/collators.py b/src/guidellm/data/collators.py index 4d12f0c0..f9e1ade4 100644 --- a/src/guidellm/data/collators.py +++ b/src/guidellm/data/collators.py @@ -1,6 +1,6 @@ from __future__ import annotations -from guidellm.data.objects import GenerationRequest +from guidellm.schemas import GenerationRequest __all__ = ["GenerativeRequestCollator"] diff --git a/src/guidellm/data/loaders.py b/src/guidellm/data/loaders.py index 89098964..f397ad51 100644 --- a/src/guidellm/data/loaders.py +++ b/src/guidellm/data/loaders.py @@ -1,42 +1,82 @@ from __future__ import annotations import contextlib -import math from collections.abc import Callable, Iterator from typing import Any, Literal -from datasets import Dataset, IterableDataset +import torch from torch.utils.data import Sampler from torch.utils.data.dataloader import DataLoader as PyTorchDataLoader +from torch.utils.data.dataset import IterableDataset as TorchIterableDataset from transformers import PreTrainedTokenizerBase from guidellm.data.deserializers import DatasetDeserializerFactory -from guidellm.data.objects import GenerationRequest from guidellm.data.preprocessors import DataDependentPreprocessor, DatasetPreprocessor -__all__ = ["DataIterator", "DataLoader"] +__all__ = ["DataLoader", "DatasetsIterator"] -class DataIterator: +class DatasetsIterator(TorchIterableDataset): def __init__( self, - datasets: list[Dataset | IterableDataset], + data: list[Any], + data_args: list[dict[str, Any]] | None, + data_samples: int, + processor_factory: Callable[[], PreTrainedTokenizerBase], preprocessors: list[DatasetPreprocessor | DataDependentPreprocessor], - precache_size: int | None = None, + random_seed: int, ): - self.datasets = datasets + if not data or not isinstance(data, list): + raise ValueError(f"Data must be a non-empty list, got {data}.") + + if data_args is None: + data_args = [{} for _ in data] + + if len(data) != len(data_args): + raise ValueError( + f"Length of data ({len(data)}) must match length of data_args " + f"({len(data_args)})." + ) + + self.datasets = [] + for datum, data_kwargs in zip(data, data_args): + self.datasets.append( + DatasetDeserializerFactory.deserialize( + data=datum, + processor_factory=processor_factory, + random_seed=random_seed, + **data_kwargs, + ) + ) self.preprocessors = preprocessors - self.precache = ( - None if not precache_size else list(self.generator(precache_size)) + for preprocessor in self.preprocessors: + if isinstance(preprocessor, DataDependentPreprocessor): + preprocessor.setup_data( + datasets=self.datasets, + data_args=data_args, + ) + self.precache: list[Any] | None = ( + list(self.generator(data_samples)) if data_samples else None ) def __iter__(self): + worker_info = torch.utils.data.get_worker_info() + modulus = worker_info.num_workers if worker_info is not None else 1 + index = worker_info.id if worker_info is not None else 0 + if self.precache is not None: - yield from self.precache + for index, item in enumerate(self.precache): + if index == index % modulus: + yield item else: - yield from self.generator() + yield from self.generator(modulus=modulus, offset=index) - def generator(self, max_items: int | None = None) -> Iterator[Any]: + def generator( + self, + max_items: int | None = None, + modulus: int | None = None, + offset: int | None = None, + ) -> Iterator[Any]: gen_count = 0 with contextlib.suppress(StopIteration): @@ -44,10 +84,18 @@ def generator(self, max_items: int | None = None) -> Iterator[Any]: while max_items is None or gen_count < max_items: row = {"items": [next(dataset_iter) for dataset_iter in dataset_iters]} + gen_count += 1 + + if ( + modulus is not None + and offset is not None + and (gen_count % modulus) != offset + ): + continue + for preprocessor in self.preprocessors: row = preprocessor(row) yield row - gen_count += 1 if max_items is not None and gen_count < max_items: raise ValueError( @@ -56,7 +104,7 @@ def generator(self, max_items: int | None = None) -> Iterator[Any]: ) -class DataLoader(PyTorchDataLoader[GenerationRequest]): +class DataLoader(PyTorchDataLoader): def __init__( self, data: list[Any], @@ -70,50 +118,21 @@ def __init__( random_seed: int = 42, **kwargs: Any, ): - if not data or not isinstance(data, list): - raise ValueError(f"Data must be a non-empty list, got {data}.") - - if data_args is None: - data_args = [{} for _ in data] - - if len(data) != len(data_args): - raise ValueError( - f"Length of data ({len(data)}) must match length of data_args " - f"({len(data_args)})." - ) - - datasets = [] - for datum, data_kwargs in zip(data, data_args): - datasets.append( - DatasetDeserializerFactory.deserialize( - data=datum, - processor_factory=processor_factory, - random_seed=random_seed, - **data_kwargs, - ) - ) - for preprocessor in preprocessors: - if isinstance(preprocessor, DataDependentPreprocessor): - preprocessor.setup_data( - datasets=datasets, - data_args=data_args, - ) - - data_iterator = DataIterator( - datasets=datasets, + iterator = DatasetsIterator( + data=data, + data_args=data_args, + data_samples=data_samples, + processor_factory=processor_factory, preprocessors=preprocessors, - precache_size=data_samples - if data_samples != math.inf and data_samples > 0 - else None, + random_seed=random_seed, ) - dataset = IterableDataset.from_generator(data_iterator.__iter__) super().__init__( - dataset=dataset, + dataset=iterator, batch_size=1, shuffle=sampler == "shuffle", sampler=sampler if sampler != "shuffle" else None, collate_fn=collator, - num_workers=num_workers, + num_workers=num_workers or 0, **kwargs, ) diff --git a/src/guidellm/data/objects.py b/src/guidellm/data/objects.py deleted file mode 100644 index 095014d3..00000000 --- a/src/guidellm/data/objects.py +++ /dev/null @@ -1,157 +0,0 @@ -from __future__ import annotations - -import uuid -from typing import Any, Literal - -from pydantic import Field - -from guidellm.scheduler import ( - MeasuredRequestTimings, - SchedulerMessagingPydanticRegistry, -) -from guidellm.utils import StandardBaseDict, StandardBaseModel - -__all__ = [ - "GenerationRequest", - "GenerationRequestArguments", - "GenerationRequestTimings", - "GenerativeDatasetColumnType", - "GenerativeRequestType", -] - - -GenerativeRequestType = Literal[ - "text_completions", - "chat_completions", - "audio_transcriptions", - "audio_translations", -] - -GenerativeDatasetColumnType = Literal[ - "prompt_tokens_count_column", - "output_tokens_count_column", - "prefix_column", - "text_column", - "image_column", - "video_column", - "audio_column", -] - - -class GenerationRequestArguments(StandardBaseDict): - @classmethod - def model_combine_dict( # noqa: C901, PLR0912 - cls, *arguments: GenerationRequestArguments | dict[str, Any] - ) -> dict[str, Any]: - combined = {} - - for args in arguments: - args_dict = args if isinstance(args, dict) else args.model_dump() - combined["url"] = args_dict.get("url", combined.get("url")) - combined["path"] = args_dict.get("path", combined.get("path")) - combined["method"] = args_dict.get("method", combined.get("method")) - combined["stream"] = args_dict.get("stream", combined.get("stream")) - combined["content_body"] = args_dict.get( - "content_body", combined.get("content_body") - ) - - if (json_body := args_dict.get("json_body")) is not None: - combined["json_body"] = combined.get("json_body", {}) + json_body - if (files := args_dict.get("files")) is not None: - combined["files"] = combined.get("files", {}) + files - if (params := args_dict.get("params")) is not None: - combined["params"] = combined.get("params", {}) + params - if (headers := args_dict.get("headers")) is not None: - combined["headers"] = combined.get("headers", {}) + headers - - return combined - - url: str | None = Field( - default=None, - description="The URL endpoint to which the request will be sent.", - ) - path: str | None = Field( - default=None, - description="The path to append to the base URL for the request.", - ) - method: str | None = Field( - default=None, - description="The HTTP method to use for the request (e.g., 'POST', 'GET').", - ) - stream: bool | None = Field( - default=None, - description="Whether to stream the response, if applicable.", - ) - content_body: Any | None = Field( - default=None, - description="Raw content to send in the request body, if applicable.", - ) - json_body: dict[str, Any] | None = Field( - default=None, - description="JSON content to include in the request body, if applicable.", - ) - files: dict[str, Any] | None = Field( - default=None, - description="Files to include in the request, if applicable.", - ) - params: dict[str, Any] | None = Field( - default=None, - description="Query parameters to include in the request URL, if applicable.", - ) - headers: dict[str, str] | None = Field( - default=None, - description="HTTP headers to include in the request, if applicable.", - ) - - @property - def request_files(self) -> dict[str, Any] | None: - if not self.files: - return None - - return { - key: value if not isinstance(value, list) else tuple(value) - for key, value in self.files.items() - } - - -@SchedulerMessagingPydanticRegistry.register() -class GenerationRequest(StandardBaseModel): - """Request model for backend generation operations.""" - - request_id: str = Field( - default_factory=lambda: str(uuid.uuid4()), - description="Unique identifier for the request.", - ) - request_type: GenerativeRequestType | str = Field( - description=( - "Type of request. If url is not provided in arguments, " - "this will be used to determine the request url." - ), - ) - arguments: GenerationRequestArguments = Field( - description=( - "Payload for the request, structured as a dictionary of arguments to pass " - "to the respective backend method. For example, can contain " - "'json', 'headers', 'files', etc." - ) - ) - stats: dict[Literal["prompt_tokens", "output_tokens"], int] = Field( - default_factory=dict, - description="Request statistics including prompt and output token counts.", - ) - - -@SchedulerMessagingPydanticRegistry.register() -@MeasuredRequestTimings.register("generation_request_timings") -class GenerationRequestTimings(MeasuredRequestTimings): - """Timing model for tracking generation request lifecycle events.""" - - timings_type: Literal["generation_request_timings"] = "generation_request_timings" - first_iteration: float | None = Field( - default=None, - description="Unix timestamp when the first generation iteration began.", - ) - last_iteration: float | None = Field( - default=None, - description="Unix timestamp when the last generation iteration completed.", - ) diff --git a/src/guidellm/data/preprocessors/formatters.py b/src/guidellm/data/preprocessors/formatters.py index 02bb7398..76b0083b 100644 --- a/src/guidellm/data/preprocessors/formatters.py +++ b/src/guidellm/data/preprocessors/formatters.py @@ -2,21 +2,18 @@ from typing import Any -from guidellm.data.objects import ( - GenerationRequest, - GenerationRequestArguments, - GenerativeDatasetColumnType, -) from guidellm.data.preprocessors.preprocessor import ( DatasetPreprocessor, PreprocessorRegistry, ) +from guidellm.data.schemas import GenerativeDatasetColumnType from guidellm.data.utils import ( encode_audio_as_dict, encode_audio_as_file, encode_image, encode_video, ) +from guidellm.schemas import GenerationRequest, GenerationRequestArguments, UsageMetrics __all__ = [ "GenerativeAudioTranscriptionRequestFormatter", @@ -48,49 +45,52 @@ def __init__( def __call__( self, columns: dict[GenerativeDatasetColumnType, list[Any]] ) -> GenerationRequest: - arguments = {"json_body": {}} - stats = {} + body: dict[str, Any] = {} + arguments: GenerationRequestArguments = GenerationRequestArguments(body=body) + input_metrics = UsageMetrics() + output_metrics = UsageMetrics() # Add model if self.model is not None: - arguments["json_body"]["model"] = self.model + body["model"] = self.model # Configure streaming if self.stream: - arguments["json_body"].update( - {"stream": True, "stream_options": {"include_usage": True}} - ) - arguments["stream"] = True + arguments.stream = True + body["stream"] = True # Handle output tokens - if output_tokens := columns.get("output_tokens_count_column", []): - output_count = output_tokens[0] - stats["output_tokens"] = output_count - arguments["json_body"].update( - {"max_tokens": output_count, "stop": None, "ignore_eos": True} - ) + if output_tokens := sum( + count for count in columns.get("output_tokens_count_column", []) if count + ): + output_metrics.text_tokens = output_tokens + body["max_tokens"] = output_tokens + body["stop"] = None + body["ignore_eos"] = True elif self.max_tokens is not None: - arguments["json_body"]["max_tokens"] = self.max_tokens + body["max_tokens"] = self.max_tokens # Handle prompt tokens - if prompt_tokens := columns.get("prompt_tokens_count_column", []): - stats["prompt_tokens"] = prompt_tokens[0] + if prompt_tokens := sum( + count for count in columns.get("prompt_tokens_count_column", []) if count + ): + input_metrics.text_tokens = prompt_tokens # Apply extra arguments if self.extras: - arguments = GenerationRequestArguments.model_combine_dict( - arguments, self.extras - ) + arguments.model_combine(self.extras) # Build prompt - arguments["json_body"]["prompt"] = "".join( - columns.get("prefix_column", []) + columns.get("text_column", []) - ) + prefix = "".join(pre for pre in columns.get("prefix_column", []) if pre) + text = "".join(txt for txt in columns.get("text_column", []) if txt) + if prefix or text: + body["prompt"] = prefix + text return GenerationRequest( request_type="text_completions", - arguments=GenerationRequestArguments(**arguments), - stats=stats, + arguments=arguments, + input_metrics=input_metrics, + output_metrics=output_metrics, ) @@ -126,53 +126,56 @@ def __init__( def __call__( self, columns: dict[GenerativeDatasetColumnType, list[Any]] ) -> GenerationRequest: - arguments = {"json_body": {}} - stats = {} + body: dict[str, Any] = {} + arguments = GenerationRequestArguments(body=body) + input_metrics = UsageMetrics() + output_metrics = UsageMetrics() # Add model if self.model is not None: - arguments["json_body"]["model"] = self.model + body["model"] = self.model # Configure streaming if self.stream: - arguments["json_body"].update( - {"stream": True, "stream_options": {"include_usage": True}} - ) - arguments["stream"] = True + arguments.stream = True + body.update({"stream": True, "stream_options": {"include_usage": True}}) # Handle output tokens - if output_tokens := columns.pop("output_tokens_count_column", []): - output_count = output_tokens[0] - stats["output_tokens"] = output_count - arguments["json_body"].update( + if output_tokens := sum( + count for count in columns.get("output_tokens_count_column", []) if count + ): + output_metrics.text_tokens = output_tokens + body.update( { - "max_completion_tokens": output_count, + "max_completion_tokens": output_tokens, "stop": None, "ignore_eos": True, } ) elif self.max_completion_tokens is not None: - arguments["json_body"]["max_completion_tokens"] = self.max_completion_tokens + body["max_completion_tokens"] = self.max_completion_tokens # Handle prompt tokens - if prompt_tokens := columns.pop("prompt_tokens_count_column", []): - stats["prompt_tokens"] = prompt_tokens[0] + if prompt_tokens := sum( + count for count in columns.get("prompt_tokens_count_column", []) if count + ): + input_metrics.text_tokens = prompt_tokens # Apply extra arguments if self.extras: - arguments = GenerationRequestArguments.model_combine_dict( - arguments, self.extras - ) + arguments.model_combine(self.extras) # Build messages - arguments["json_body"]["messages"] = ( + body["messages"] = ( [ {"role": "system", "content": prefix} - for prefix in columns.pop("prefix_column", []) + for prefix in columns.get("prefix_column", []) + if prefix ] + [ {"role": "user", "content": [{"type": "text", "text": text}]} - for text in columns.pop("text_column", []) + for text in columns.get("text_column", []) + if text ] + [ { @@ -186,7 +189,8 @@ def __call__( } ], } - for image in columns.pop("image_column", []) + for image in columns.get("image_column", []) + if image ] + [ { @@ -200,7 +204,8 @@ def __call__( } ], } - for video in columns.pop("video_column", []) + for video in columns.get("video_column", []) + if video ] + [ { @@ -214,14 +219,16 @@ def __call__( } ], } - for audio in columns.pop("audio_column", []) + for audio in columns.get("audio_column", []) + if audio ] ) return GenerationRequest( request_type="chat_completions", - arguments=GenerationRequestArguments(**arguments), - stats=stats, + arguments=arguments, + input_metrics=input_metrics, + output_metrics=output_metrics, ) @@ -230,63 +237,72 @@ class GenerativeAudioTranscriptionRequestFormatter(DatasetPreprocessor): def __init__( self, model: str, - extra_args: dict[str, Any] | GenerationRequestArguments | None = None, + extras: dict[str, Any] | GenerationRequestArguments | None = None, stream: bool = True, encode_kwargs: dict[str, Any] | None = None, ): self.model = model - self.extra_args = extra_args + self.extras = ( + GenerationRequestArguments(**extras) + if extras and isinstance(extras, dict) + else extras + ) self.stream = stream self.encode_audio_kwargs = encode_kwargs or {} - def __call__( + def __call__( # noqa: C901 self, columns: dict[GenerativeDatasetColumnType, list[Any]] ) -> GenerationRequest: - arguments = {"json_body": {}, "files": {}} - stats = {} + body: dict[str, Any] = {} + arguments = GenerationRequestArguments(body=body, files={}) + input_metrics = UsageMetrics() + output_metrics = UsageMetrics() # Add model if self.model is not None: - arguments["json_body"]["model"] = self.model + body["model"] = self.model # Configure streaming if self.stream: - arguments["stream"] = True - arguments["json_body"].update( - {"stream": True, "stream_options": {"include_usage": True}} - ) + arguments.stream = True + body.update({"stream": True, "stream_options": {"include_usage": True}}) - # Apply extra arguments - if self.extra_args: - arguments = GenerationRequestArguments.model_combine_dict( - arguments, self.extra_args - ) + # Handle output tokens + if output_tokens := sum( + count for count in columns.get("output_tokens_count_column", []) if count + ): + output_metrics.text_tokens = output_tokens - # Handle stats tokens - if output_tokens := columns.get("output_tokens_count_column", []): - output_count = output_tokens[0] - stats["output_tokens"] = output_count - if prompt_tokens := columns.get("prompt_tokens_count_column", []): - stats["prompt_tokens"] = prompt_tokens[0] + # Handle prompt tokens (for audio duration tracking) + if prompt_tokens := sum( + count for count in columns.get("prompt_tokens_count_column", []) if count + ): + input_metrics.text_tokens = prompt_tokens + + # Apply extra arguments + if self.extras: + arguments.model_combine(self.extras) # Build audio input - if audio := columns.get("audio_column", []): - arguments["files"]["file"] = encode_audio_as_file( + if audio := [aud for aud in columns.get("audio_column", []) if aud]: + file_name, content, mime_type = encode_audio_as_file( audio[0], **self.encode_audio_kwargs ) + arguments.files = {"file": (file_name, content, mime_type)} else: raise ValueError("No audio column found for audio transcription request.") # Build prompt - if (prefix := columns.get("prefix_column", [])) or ( - text := columns.get("text_column", []) - ): - arguments["json_body"]["prompt"] = "".join(prefix) + "".join(text) + prefix = "".join(pre for pre in columns.get("prefix_column", []) if pre) + text = "".join(txt for txt in columns.get("text_column", []) if txt) + if prefix or text: + body["prompt"] = prefix + text return GenerationRequest( request_type="audio_transcriptions", - arguments=GenerationRequestArguments(**arguments), - stats=stats, + arguments=arguments, + input_metrics=input_metrics, + output_metrics=output_metrics, ) @@ -299,5 +315,4 @@ def __call__( ) -> GenerationRequest: result = super().__call__(columns) result.request_type = "audio_translations" - return result diff --git a/src/guidellm/data/preprocessors/mappers.py b/src/guidellm/data/preprocessors/mappers.py index 5e64b51c..cbfa9c20 100644 --- a/src/guidellm/data/preprocessors/mappers.py +++ b/src/guidellm/data/preprocessors/mappers.py @@ -5,11 +5,11 @@ from datasets import Dataset, IterableDataset -from guidellm.data.objects import GenerativeDatasetColumnType from guidellm.data.preprocessors.preprocessor import ( DataDependentPreprocessor, PreprocessorRegistry, ) +from guidellm.data.schemas import GenerativeDatasetColumnType __all__ = ["GenerativeColumnMapper"] diff --git a/src/guidellm/data/schemas.py b/src/guidellm/data/schemas.py new file mode 100644 index 00000000..c4421e07 --- /dev/null +++ b/src/guidellm/data/schemas.py @@ -0,0 +1,13 @@ +from typing import Literal + +__all__ = ["GenerativeDatasetColumnType"] + +GenerativeDatasetColumnType = Literal[ + "prompt_tokens_count_column", + "output_tokens_count_column", + "prefix_column", + "text_column", + "image_column", + "video_column", + "audio_column", +] diff --git a/src/guidellm/scheduler/__init__.py b/src/guidellm/scheduler/__init__.py index 64647424..2f5eb53f 100644 --- a/src/guidellm/scheduler/__init__.py +++ b/src/guidellm/scheduler/__init__.py @@ -12,21 +12,18 @@ UnserializableConstraintInitializer, ) from .environments import Environment, NonDistributedEnvironment -from .objects import ( +from .scheduler import Scheduler +from .schemas import ( BackendInterface, BackendT, - MeasuredRequestTimings, MultiTurnRequestT, - RequestSchedulerTimings, RequestT, ResponseT, - ScheduledRequestInfo, SchedulerMessagingPydanticRegistry, SchedulerState, SchedulerUpdateAction, SchedulerUpdateActionProgress, ) -from .scheduler import Scheduler from .strategies import ( AsyncConstantStrategy, AsyncPoissonStrategy, @@ -62,16 +59,13 @@ "MaxErrorsConstraint", "MaxGlobalErrorRateConstraint", "MaxNumberConstraint", - "MeasuredRequestTimings", "MultiTurnRequestT", "NoDelayRequestTimings", "NonDistributedEnvironment", "PoissonRateRequestTimings", "PydanticConstraintInitializer", - "RequestSchedulerTimings", "RequestT", "ResponseT", - "ScheduledRequestInfo", "ScheduledRequestTimings", "Scheduler", "SchedulerMessagingPydanticRegistry", diff --git a/src/guidellm/scheduler/constraints.py b/src/guidellm/scheduler/constraints.py index c724a74a..94419f38 100644 --- a/src/guidellm/scheduler/constraints.py +++ b/src/guidellm/scheduler/constraints.py @@ -16,12 +16,12 @@ from pydantic import Field, field_validator -from guidellm.scheduler.objects import ( - ScheduledRequestInfo, +from guidellm.scheduler.schemas import ( SchedulerState, SchedulerUpdateAction, SchedulerUpdateActionProgress, ) +from guidellm.schemas import RequestInfo from guidellm.settings import settings from guidellm.utils import InfoMixin, RegistryMixin, StandardBaseModel @@ -46,7 +46,7 @@ class Constraint(Protocol): """Protocol for constraint evaluation functions that control scheduler behavior.""" def __call__( - self, state: SchedulerState, request: ScheduledRequestInfo + self, state: SchedulerState, request: RequestInfo ) -> SchedulerUpdateAction: """ Evaluate constraint against scheduler state and request information. @@ -370,7 +370,7 @@ def create_constraint( def __call__( self, state: SchedulerState, # noqa: ARG002 - request: ScheduledRequestInfo, # noqa: ARG002 + request: RequestInfo, # noqa: ARG002 ) -> SchedulerUpdateAction: """ Raise error since unserializable constraints cannot be invoked. @@ -438,7 +438,7 @@ def create_constraint(self, **kwargs) -> Constraint: # noqa: ARG002 def __call__( self, state: SchedulerState, - request_info: ScheduledRequestInfo, # noqa: ARG002 + request_info: RequestInfo, # noqa: ARG002 ) -> SchedulerUpdateAction: """ Evaluate constraint against current scheduler state and request count. @@ -556,7 +556,7 @@ def create_constraint(self, **kwargs) -> Constraint: # noqa: ARG002 def __call__( self, state: SchedulerState, - request_info: ScheduledRequestInfo, # noqa: ARG002 + request_info: RequestInfo, # noqa: ARG002 ) -> SchedulerUpdateAction: """ Evaluate constraint against current scheduler state and elapsed time. @@ -670,7 +670,7 @@ def create_constraint(self, **kwargs) -> Constraint: # noqa: ARG002 def __call__( self, state: SchedulerState, - request_info: ScheduledRequestInfo, # noqa: ARG002 + request_info: RequestInfo, # noqa: ARG002 ) -> SchedulerUpdateAction: """ Evaluate constraint against current error count. @@ -787,7 +787,7 @@ def create_constraint(self, **kwargs) -> Constraint: # noqa: ARG002 return self.model_copy() # type: ignore[return-value] def __call__( - self, state: SchedulerState, request_info: ScheduledRequestInfo + self, state: SchedulerState, request_info: RequestInfo ) -> SchedulerUpdateAction: """ Evaluate constraint against sliding window error rate. @@ -928,7 +928,7 @@ def create_constraint(self, **kwargs) -> Constraint: # noqa: ARG002 def __call__( self, state: SchedulerState, - request_info: ScheduledRequestInfo, # noqa: ARG002 + request_info: RequestInfo, # noqa: ARG002 ) -> SchedulerUpdateAction: """ Evaluate constraint against global error rate. @@ -1007,7 +1007,7 @@ def info(self) -> dict[str, Any]: def __call__( self, state: SchedulerState, - request_info: ScheduledRequestInfo, # noqa: ARG002 + request_info: RequestInfo, # noqa: ARG002 ) -> SchedulerUpdateAction: create_exceeded = state.created_requests >= self.num_requests processed_exceeded = state.processed_requests >= self.num_requests diff --git a/src/guidellm/scheduler/environments.py b/src/guidellm/scheduler/environments.py index 6234f8f6..ed756a06 100644 --- a/src/guidellm/scheduler/environments.py +++ b/src/guidellm/scheduler/environments.py @@ -25,14 +25,14 @@ ) from guidellm.scheduler.constraints import Constraint -from guidellm.scheduler.objects import ( +from guidellm.scheduler.schemas import ( MultiTurnRequestT, RequestT, ResponseT, - ScheduledRequestInfo, SchedulerState, ) from guidellm.scheduler.strategies import SchedulingStrategy +from guidellm.schemas import RequestInfo from guidellm.settings import settings from guidellm.utils import InfoMixin @@ -93,7 +93,7 @@ async def update_run_iteration( self, response: ResponseT | None, request: RequestT, - request_info: ScheduledRequestInfo, + request_info: RequestInfo, state: SchedulerState, ): """ @@ -131,7 +131,7 @@ async def sync_run_end( tuple[ ResponseT, RequestT | MultiTurnRequestT[RequestT], - ScheduledRequestInfo, + RequestInfo, SchedulerState, ] ]: @@ -162,7 +162,7 @@ class NonDistributedEnvironment(Environment): from guidellm.scheduler import ( MaxNumberConstraint, NonDistributedEnvironment, - ScheduledRequestInfo, + RequestInfo, SchedulerState, SynchronousStrategy, ) @@ -182,7 +182,7 @@ class NonDistributedEnvironment(Environment): for req in local_req: state.processed_requests += 1 await env.update_run_iteration( - f"resp_{req}", req, ScheduledRequestInfo(), state + f"resp_{req}", req, RequestInfo(), state ) async for nonlocal_req in env.sync_run_end(): state.processed_requests += 1 @@ -224,7 +224,7 @@ async def update_run_iteration( self, response: ResponseT | None, request: RequestT, - request_info: ScheduledRequestInfo, + request_info: RequestInfo, state: SchedulerState, ): """ @@ -251,7 +251,7 @@ async def sync_run_end( tuple[ ResponseT, RequestT | MultiTurnRequestT[RequestT], - ScheduledRequestInfo, + RequestInfo, SchedulerState, ] ]: diff --git a/src/guidellm/scheduler/scheduler.py b/src/guidellm/scheduler/scheduler.py index d9bb7c23..e03d6161 100644 --- a/src/guidellm/scheduler/scheduler.py +++ b/src/guidellm/scheduler/scheduler.py @@ -18,16 +18,16 @@ ConstraintsInitializerFactory, ) from guidellm.scheduler.environments import Environment, NonDistributedEnvironment -from guidellm.scheduler.objects import ( +from guidellm.scheduler.schemas import ( BackendInterface, MultiTurnRequestT, RequestT, ResponseT, - ScheduledRequestInfo, SchedulerState, ) from guidellm.scheduler.strategies import SchedulingStrategy from guidellm.scheduler.worker_group import WorkerProcessGroup +from guidellm.schemas import RequestInfo from guidellm.utils.singleton import ThreadSafeSingletonMixin __all__ = ["Scheduler"] @@ -75,7 +75,7 @@ async def run( tuple[ ResponseT | None, RequestT, - ScheduledRequestInfo, + RequestInfo, SchedulerState, ] ]: @@ -146,6 +146,7 @@ async def run( ) yield response, request, request_info, state except Exception as err: # noqa: BLE001 + raise err await env.sync_run_error(err) finally: # Ensure all worker processes are cleaned up for error or completion diff --git a/src/guidellm/scheduler/objects.py b/src/guidellm/scheduler/schemas.py similarity index 61% rename from src/guidellm/scheduler/objects.py rename to src/guidellm/scheduler/schemas.py index b7f2efc3..eab50f14 100644 --- a/src/guidellm/scheduler/objects.py +++ b/src/guidellm/scheduler/schemas.py @@ -10,11 +10,9 @@ from __future__ import annotations import time -import uuid from collections.abc import AsyncIterator from typing import ( Any, - ClassVar, Generic, Literal, Protocol, @@ -22,11 +20,11 @@ Union, ) -from pydantic import Field, computed_field +from pydantic import Field from typing_extensions import TypeAliasType, TypedDict +from guidellm.schemas import RequestInfo from guidellm.utils import ( - PydanticClassRegistryMixin, RegistryMixin, StandardBaseModel, ) @@ -35,12 +33,9 @@ __all__ = [ "BackendInterface", "BackendT", - "MeasuredRequestTimings", "MultiTurnRequestT", - "RequestSchedulerTimings", "RequestT", "ResponseT", - "ScheduledRequestInfo", "SchedulerMessagingPydanticRegistry", "SchedulerState", "SchedulerUpdateAction", @@ -71,167 +66,6 @@ class SchedulerMessagingPydanticRegistry(RegistryMixin[RegistryObjT]): """ -@SchedulerMessagingPydanticRegistry.register() -class RequestSchedulerTimings(StandardBaseModel): - """ - Scheduler-level timing measurements for request lifecycle tracking. - All timestamps are expected to be in Unix time (seconds since epoch). - """ - - targeted_start: float | None = Field( - default=None, - description="When the request was initially targeted for execution", - ) - queued: float | None = Field( - default=None, - description="When the request was placed into the processing queue", - ) - dequeued: float | None = Field( - default=None, - description="When the request was removed from the queue for processing", - ) - scheduled_at: float | None = Field( - default=None, description="When the request was scheduled for processing" - ) - resolve_start: float | None = Field( - default=None, description="When backend resolution of the request began" - ) - resolve_end: float | None = Field( - default=None, description="When backend resolution of the request completed" - ) - finalized: float | None = Field( - default=None, - description="When the request was processed/acknowledged by the scheduler", - ) - - -@SchedulerMessagingPydanticRegistry.register() -class MeasuredRequestTimings(PydanticClassRegistryMixin["MeasuredRequestTimings"]): - """ - Base timing measurements for backend request processing. - All timestamps are expected to be in Unix time (seconds since epoch). - """ - - @classmethod - def __pydantic_schema_base_type__(cls) -> type[MeasuredRequestTimings]: - if cls.__name__ == "MeasuredRequestTimings": - return cls - - return MeasuredRequestTimings - - schema_discriminator: ClassVar[str] = "timings_type" - - timings_type: Literal["measured_request_timings"] = Field( - default="measured_request_timings", - description="Type identifier for the timing measurement", - ) - request_start: float | None = Field( - default=None, description="When the backend began processing the request" - ) - request_end: float | None = Field( - default=None, description="When the backend completed processing the request" - ) - - -@SchedulerMessagingPydanticRegistry.register() -class ScheduledRequestInfo(StandardBaseModel): - """ - Complete request information including status, timings, and metadata. - - Central data structure for tracking request lifecycle from creation through - completion, containing scheduling metadata, timing measurements, and processing - status. Used by scheduler components to coordinate request processing across - distributed worker processes. - - Example: - :: - from guidellm.scheduler.objects import ScheduledRequestInfo - - # Create request info with automatic ID generation - request_info = ScheduledRequestInfo() - request_info.status = "in_progress" - request_info.scheduler_timings.queued = time.time() - - # Check processing completion - if request_info.completed_at: - duration = request_info.completed_at - request_info.started_at - """ - - request_id: str = Field( - description="Unique identifier for the request", - default_factory=lambda: str(uuid.uuid4()), - ) - status: Literal[ - "queued", "pending", "in_progress", "completed", "errored", "cancelled" - ] = Field(description="Current processing status of the request", default="queued") - scheduler_node_id: int = Field( - description="ID/rank of the scheduler node handling the request", - default=-1, - ) - scheduler_process_id: int = Field( - description="ID/rank of the node's scheduler process handling the request", - default=-1, - ) - scheduler_start_time: float = Field( - description="Unix timestamp for the local time when scheduler processing began", - default=-1, - ) - - error: str | None = Field( - default=None, description="Error message if the request.status is 'errored'" - ) - scheduler_timings: RequestSchedulerTimings = Field( - default_factory=RequestSchedulerTimings, - description="Scheduler-level timing measurements for request lifecycle", - ) - request_timings: MeasuredRequestTimings | None = Field( - default=None, - description="Backend-specific timing measurements for request processing", - ) - - @computed_field # type: ignore[misc] - @property - def started_at(self) -> float | None: - """ - Get the effective request processing start time. - - :return: Unix timestamp when processing began, or None if not started. - """ - request_start = ( - self.request_timings.request_start if self.request_timings else None - ) - - return request_start or self.scheduler_timings.resolve_start - - @computed_field # type: ignore[misc] - @property - def completed_at(self) -> float | None: - """ - Get the effective request processing completion time. - - :return: Unix timestamp when processing completed, or None if not completed. - """ - request_end = self.request_timings.request_end if self.request_timings else None - - return request_end or self.scheduler_timings.resolve_end - - def model_copy(self, **kwargs) -> ScheduledRequestInfo: # type: ignore[override] # noqa: ARG002 - """ - Create a deep copy of the request info with copied timing objects. - - :return: New ScheduledRequestInfo instance with independent timing objects - """ - return super().model_copy( - update={ - "scheduler_timings": self.scheduler_timings.model_copy(), - "request_timings": ( - self.request_timings.model_copy() if self.request_timings else None - ), - }, - deep=False, - ) - - class BackendInterface(Protocol, Generic[RequestT, ResponseT]): """ Abstract interface for request processing backends. @@ -297,9 +131,9 @@ async def process_shutdown(self) -> None: async def resolve( self, request: RequestT, - request_info: ScheduledRequestInfo, + request_info: RequestInfo, history: list[tuple[RequestT, ResponseT]] | None = None, - ) -> AsyncIterator[tuple[ResponseT, ScheduledRequestInfo]]: + ) -> AsyncIterator[tuple[ResponseT, RequestInfo]]: """ Process a request and yield incremental response updates. diff --git a/src/guidellm/scheduler/strategies.py b/src/guidellm/scheduler/strategies.py index 8c791671..267001e5 100644 --- a/src/guidellm/scheduler/strategies.py +++ b/src/guidellm/scheduler/strategies.py @@ -17,7 +17,7 @@ from pydantic import Field, PrivateAttr -from guidellm.scheduler.objects import ScheduledRequestInfo +from guidellm.schemas import RequestInfo from guidellm.utils import InfoMixin, PydanticClassRegistryMixin, StandardBaseModel __all__ = [ @@ -83,7 +83,7 @@ def next_offset(self) -> float: """ @abstractmethod - def request_completed(self, request_info: ScheduledRequestInfo): + def request_completed(self, request_info: RequestInfo): """ Handle request completion and update internal timing state. @@ -129,7 +129,7 @@ def next_offset(self) -> float: return self.offset - def request_completed(self, request_info: ScheduledRequestInfo): + def request_completed(self, request_info: RequestInfo): """ Update timing state based on the completed request. @@ -197,7 +197,7 @@ def next_offset(self) -> float: return self.offset + startup_percent * self.startup_duration - def request_completed(self, request_info: ScheduledRequestInfo): + def request_completed(self, request_info: RequestInfo): """ Handle request completion (no action needed for throughput strategy). @@ -236,7 +236,7 @@ def next_offset(self) -> float: return self.offset + interval * num_requests - def request_completed(self, request_info: ScheduledRequestInfo): + def request_completed(self, request_info: RequestInfo): """ Handle request completion (no action needed for constant rate strategy). @@ -283,7 +283,7 @@ def next_offset(self) -> float: return self.offset - def request_completed(self, request_info: ScheduledRequestInfo): + def request_completed(self, request_info: RequestInfo): """ Handle request completion (no action needed for Poisson rate strategy). diff --git a/src/guidellm/scheduler/worker.py b/src/guidellm/scheduler/worker.py index 1832d25f..4b426058 100644 --- a/src/guidellm/scheduler/worker.py +++ b/src/guidellm/scheduler/worker.py @@ -29,15 +29,14 @@ ] = False -from guidellm.scheduler.objects import ( +from guidellm.scheduler.schemas import ( BackendInterface, MultiTurnRequestT, RequestT, ResponseT, - ScheduledRequestInfo, - SchedulerMessagingPydanticRegistry, ) from guidellm.scheduler.strategies import ScheduledRequestTimings +from guidellm.schemas import RequestInfo from guidellm.utils import ( InterProcessMessaging, wait_for_sync_barrier, @@ -77,7 +76,7 @@ def __init__( tuple[ ResponseT | None, RequestT | MultiTurnRequestT[RequestT], - ScheduledRequestInfo, + RequestInfo, ], ], backend: BackendInterface[RequestT, ResponseT], @@ -241,8 +240,7 @@ async def _processing_startup(self): # Get messaging system ready await self.messaging.start( - receive_stop_criteria=[self.requests_generated_event], - pydantic_models=list(SchedulerMessagingPydanticRegistry.registry.values()), + receive_stop_criteria=[self.requests_generated_event] ) self.messaging_started = True @@ -289,56 +287,59 @@ async def _cancel_requests_loop(self): while True: try: request: RequestT - request_info: ScheduledRequestInfo + request_info: RequestInfo request, request_info = await self.messaging.get( timeout=self.messaging.poll_interval ) except asyncio.TimeoutError: continue - request_info.scheduler_node_id = self.messaging.worker_index + request_info.scheduler_node_id = self.messaging.worker_index or -1 request_info.error = "Request was cancelled" - request_info.scheduler_timings.resolve_end = time.time() + request_info.timings.resolve_end = time.time() self._send_update("cancelled", None, request, request_info) async def _process_next_request(self): request: RequestT | MultiTurnRequestT[RequestT] | None = None - request_info: ScheduledRequestInfo | None = None + request_info: RequestInfo | None = None response: ResponseT | None = None try: # Pull request from the queue request, request_info = await self.messaging.get() + if request is None or request_info is None: + raise RuntimeError("Received invalid request or request info") + if isinstance(request, (list, tuple)): raise NotImplementedError("Multi-turn requests are not yet supported") # Calculate targeted start and set pending state for request - request_info.scheduler_node_id = self.messaging.worker_index - request_info.scheduler_timings.dequeued = time.time() + request_info.scheduler_node_id = self.messaging.worker_index or -1 + request_info.timings.dequeued = time.time() target_start = ( request_info.scheduler_start_time + self.request_timings.next_offset() ) - request_info.scheduler_timings.targeted_start = target_start + request_info.timings.targeted_start = target_start self._send_update("pending", response, request, request_info) # Schedule the request current_time = time.time() - request_info.scheduler_timings.scheduled_at = current_time + request_info.timings.scheduled_at = current_time if target_start > current_time: await asyncio.sleep(target_start - current_time) # Adapt delay so that scheduled at reflects the sleep time - request_info.scheduler_timings.scheduled_at = target_start + request_info.timings.scheduled_at = target_start # Process the request with the backend - request_info.scheduler_timings.resolve_start = time.time() + request_info.timings.resolve_start = time.time() self._send_update("in_progress", response, request, request_info) async for resp, info in self.backend.resolve(request, request_info, None): response = resp request_info = info # Complete the request - request_info.scheduler_timings.resolve_end = time.time() + request_info.timings.resolve_end = time.time() self._send_update("completed", response, request, request_info) response = request = request_info = None @@ -346,13 +347,13 @@ async def _process_next_request(self): # Handle cancellation if request is not None and request_info is not None: request_info.error = "Request was cancelled" - request_info.scheduler_timings.resolve_end = time.time() + request_info.timings.resolve_end = time.time() self._send_update("cancelled", response, request, request_info) raise except Exception as exc: # noqa: BLE001 if request is not None and request_info is not None: request_info.error = str(exc) - request_info.scheduler_timings.resolve_end = time.time() + request_info.timings.resolve_end = time.time() self._send_update("errored", response, request, request_info) def _send_update( @@ -362,7 +363,7 @@ def _send_update( ], response: ResponseT | None, request: RequestT | MultiTurnRequestT[RequestT], - request_info: ScheduledRequestInfo, + request_info: RequestInfo, ): prev_status = request_info.status diff --git a/src/guidellm/scheduler/worker_group.py b/src/guidellm/scheduler/worker_group.py index 278fb44d..0f3a1acb 100644 --- a/src/guidellm/scheduler/worker_group.py +++ b/src/guidellm/scheduler/worker_group.py @@ -23,18 +23,17 @@ from typing import Generic, NamedTuple from guidellm.scheduler.constraints import Constraint, RequestsExhaustedConstraint -from guidellm.scheduler.objects import ( +from guidellm.scheduler.schemas import ( BackendInterface, MultiTurnRequestT, RequestT, ResponseT, - ScheduledRequestInfo, - SchedulerMessagingPydanticRegistry, SchedulerState, SchedulerUpdateAction, ) from guidellm.scheduler.strategies import SchedulingStrategy from guidellm.scheduler.worker import WorkerProcess +from guidellm.schemas import RequestInfo from guidellm.settings import settings from guidellm.utils import ( InterProcessMessaging, @@ -130,12 +129,12 @@ def __init__( self.messaging: InterProcessMessaging[ tuple[ RequestT | MultiTurnRequestT[RequestT], - ScheduledRequestInfo, + RequestInfo, ], tuple[ ResponseT | None, RequestT | MultiTurnRequestT[RequestT], - ScheduledRequestInfo, + RequestInfo, SchedulerState, ], ] = None @@ -303,7 +302,6 @@ async def start(self, start_time: float): send_stopped_event=send_requests_stopped_event, send_stop_criteria=[stop_send_requests_event], receive_stop_criteria=[self.shutdown_event], - pydantic_models=list(SchedulerMessagingPydanticRegistry.registry.values()), ) if (wait_time := start_time - time.time()) > 0: @@ -320,7 +318,7 @@ async def request_updates( tuple[ ResponseT | None, RequestT, - ScheduledRequestInfo, + RequestInfo, SchedulerState, ] ]: @@ -485,7 +483,7 @@ def requests_generator( :return: Generator yielding (request, request_info) tuples """ - def _iter(): + def _iter() -> Iterator[RequestT | MultiTurnRequestT[RequestT]]: if requests is not None: yield from requests @@ -494,7 +492,7 @@ def _iter(): yield from cycle_requests count = 0 - request_info: ScheduledRequestInfo = None + request_info: RequestInfo = None for request in _iter(): count += 1 @@ -505,14 +503,14 @@ def _iter(): request_id = request.id else: request_id = str(uuid.uuid4()) - request_info: ScheduledRequestInfo = ScheduledRequestInfo( + request_info: RequestInfo = RequestInfo( request_id=request_id, status="queued", scheduler_process_id=0, scheduler_start_time=self.start_time, ) state_update = self._locked_update(request_info) - request_info.scheduler_timings.queued = time.time() + request_info.timings.queued = time.time() yield (request, request_info) @@ -532,12 +530,12 @@ def received_callback( update: tuple[ ResponseT | None, RequestT | MultiTurnRequestT, - ScheduledRequestInfo, + RequestInfo, ], ) -> tuple[ ResponseT | None, RequestT | MultiTurnRequestT, - ScheduledRequestInfo, + RequestInfo, SchedulerState, ]: """ @@ -585,7 +583,7 @@ def received_callback( def _locked_update( self, - info: ScheduledRequestInfo | None = None, + info: RequestInfo | None = None, **add_constraints: dict[str, Constraint], ) -> _StateUpdate: with self._update_lock: @@ -605,7 +603,7 @@ def _locked_update( state_copy.end_processing_time is not None, ) - def _update_state_request_counts(self, info: ScheduledRequestInfo): + def _update_state_request_counts(self, info: RequestInfo): if info.status == "queued": self._queued_requests.add(info.request_id) self._state.queued_requests = len(self._queued_requests) @@ -642,7 +640,7 @@ def _update_state_request_counts(self, info: ScheduledRequestInfo): else: raise ValueError(f"Unknown request_info status {info.status} for {info}") - def _update_with_constraints(self, info: ScheduledRequestInfo): + def _update_with_constraints(self, info: RequestInfo): actions: dict[str, SchedulerUpdateAction] = { name: const(self._state, info) for name, const in self.constraints.items() } diff --git a/src/guidellm/schemas/__init__.py b/src/guidellm/schemas/__init__.py new file mode 100644 index 00000000..d49cd952 --- /dev/null +++ b/src/guidellm/schemas/__init__.py @@ -0,0 +1,20 @@ +from .info import RequestInfo, RequestTimings +from .request import ( + GenerationRequest, + GenerationRequestArguments, + GenerativeRequestType, + UsageMetrics, +) +from .response import GenerationResponse +from .stats import GenerativeRequestStats + +__all__ = [ + "GenerationRequest", + "GenerationRequestArguments", + "GenerationResponse", + "GenerativeRequestStats", + "GenerativeRequestType", + "RequestInfo", + "RequestTimings", + "UsageMetrics", +] diff --git a/src/guidellm/schemas/info.py b/src/guidellm/schemas/info.py new file mode 100644 index 00000000..3d5e61b7 --- /dev/null +++ b/src/guidellm/schemas/info.py @@ -0,0 +1,132 @@ +""" +Core data structures and interfaces for the GuideLLM scheduler system. + +Provides type-safe abstractions for distributed request processing, timing +measurements, and backend interfaces for benchmarking operations. Central to +the scheduler architecture, enabling request lifecycle tracking, backend +coordination, and state management across distributed worker processes. +""" + +from __future__ import annotations + +import uuid +from typing import Literal + +from pydantic import Field, computed_field + +from guidellm.utils import StandardBaseDict, StandardBaseModel + +__all__ = ["RequestInfo", "RequestTimings"] + + +class RequestTimings(StandardBaseDict): + targeted_start: float | None = Field( + default=None, + description="When the request was initially targeted for execution", + ) + queued: float | None = Field( + default=None, + description="When the request was placed into the processing queue", + ) + dequeued: float | None = Field( + default=None, + description="When the request was removed from the queue for processing", + ) + scheduled_at: float | None = Field( + default=None, description="When the request was scheduled for processing" + ) + resolve_start: float | None = Field( + default=None, description="When backend resolution of the request began" + ) + request_start: float | None = Field( + default=None, description="When the backend began processing the request" + ) + first_iteration: float | None = Field( + default=None, + description="Unix timestamp when the first generation iteration began.", + ) + last_iteration: float | None = Field( + default=None, + description="Unix timestamp when the last generation iteration completed.", + ) + iterations: int | None = Field( + default=None, + description="Total number of streaming update iterations performed.", + ) + request_end: float | None = Field( + default=None, description="When the backend completed processing the request" + ) + resolve_end: float | None = Field( + default=None, description="When backend resolution of the request completed" + ) + finalized: float | None = Field( + default=None, + description="When the request was processed/acknowledged by the scheduler", + ) + + +class RequestInfo(StandardBaseModel): + request_id: str = Field( + description="Unique identifier for the request", + default_factory=lambda: str(uuid.uuid4()), + ) + status: Literal[ + "queued", "pending", "in_progress", "completed", "errored", "cancelled" + ] = Field(description="Current processing status of the request", default="queued") + scheduler_node_id: int = Field( + description="ID/rank of the scheduler node handling the request", + default=-1, + ) + scheduler_process_id: int = Field( + description="ID/rank of the node's scheduler process handling the request", + default=-1, + ) + scheduler_start_time: float = Field( + description="Unix timestamp for the local time when scheduler processing began", + default=-1, + ) + timings: RequestTimings = Field( + default_factory=RequestTimings, + description="Timing measurements for the request lifecycle", + ) + + error: str | None = Field( + default=None, description="Error message if the request.status is 'errored'" + ) + + @computed_field # type: ignore[misc] + @property + def started_at(self) -> float | None: + """ + Get the effective request processing start time. + + :return: Unix timestamp when processing began, or None if not started. + """ + request_start = self.timings.request_start if self.timings else None + + return request_start or self.timings.resolve_start + + @computed_field # type: ignore[misc] + @property + def completed_at(self) -> float | None: + """ + Get the effective request processing completion time. + + :return: Unix timestamp when processing completed, or None if not completed. + """ + request_end = self.timings.request_end if self.timings else None + + return request_end or self.timings.resolve_end + + def model_copy(self, **kwargs) -> RequestInfo: # type: ignore[override] # noqa: ARG002 + """ + Create a deep copy of the request info with copied timing objects. + + :return: New ScheduledRequestInfo instance with independent timing objects + """ + return super().model_copy( + update={ + "timings": self.timings.model_copy(), + }, + deep=False, + ) diff --git a/src/guidellm/schemas/request.py b/src/guidellm/schemas/request.py new file mode 100644 index 00000000..de1f838a --- /dev/null +++ b/src/guidellm/schemas/request.py @@ -0,0 +1,164 @@ +from __future__ import annotations + +import uuid +from typing import Any, Literal + +from pydantic import Field, computed_field + +from guidellm.utils import StandardBaseDict, StandardBaseModel + +__all__ = [ + "GenerationRequest", + "GenerationRequestArguments", + "GenerativeRequestType", + "UsageMetrics", +] + + +GenerativeRequestType = Literal[ + "text_completions", + "chat_completions", + "audio_transcriptions", + "audio_translations", +] + + +class GenerationRequestArguments(StandardBaseDict): + method: str | None = Field( + default=None, + description="The HTTP method to use for the request (e.g., 'POST', 'GET').", + ) + stream: bool | None = Field( + default=None, + description="Whether to stream the response, if applicable.", + ) + headers: dict[str, str] | None = Field( + default=None, + description="HTTP headers to include in the request, if applicable.", + ) + params: dict[str, Any] | None = Field( + default=None, + description="Query parameters to include in the request URL, if applicable.", + ) + body: dict[str, Any] | None = Field( + default=None, + description="Content to include in the main request body.", + ) + files: dict[str, Any] | None = Field( + default=None, + description="Files to include in the request, if applicable.", + ) + + def model_combine( + self, additional: GenerationRequestArguments | dict[str, Any] + ) -> GenerationRequestArguments: + additional_dict = ( + additional.model_dump() + if isinstance(additional, GenerationRequestArguments) + else additional + ) + + for overwrite in ("method", "stream"): + if (val := additional_dict.get(overwrite)) is not None: + setattr(self, overwrite, val) + + for combine in ("headers", "params", "json_body", "files"): + if (val := additional_dict.get(combine)) is not None: + setattr(self, combine, {**getattr(self, combine, {}), **val}) + + return self + + +class UsageMetrics(StandardBaseDict): + # Text stats + text_tokens: int | None = Field( + default=None, description="Number of text tokens processed/generated." + ) + text_words: int | None = Field( + default=None, description="Number of text words processed/generated." + ) + text_characters: int | None = Field( + default=None, description="Number of text characters processed/generated." + ) + text_bytes: int | None = Field( + default=None, description="Number of text bytes processed/generated." + ) + + # Vision image stats + image_tokens: int | None = Field( + default=None, description="Number of image tokens processed/generated." + ) + image_count: int | None = Field( + default=None, description="Number of images processed/generated." + ) + image_pixels: int | None = Field( + default=None, description="Number of image pixels processed/generated." + ) + image_bytes: int | None = Field( + default=None, description="Number of image bytes processed/generated." + ) + + # Vision video stats + video_tokens: int | None = Field( + default=None, description="Number of video tokens processed/generated." + ) + video_frames: int | None = Field( + default=None, description="Number of video frames processed/generated." + ) + video_seconds: float | None = Field( + default=None, description="Duration of video processed/generated in seconds." + ) + video_bytes: int | None = Field( + default=None, description="Number of video bytes processed/generated." + ) + + # Audio stats + audio_tokens: int | None = Field( + default=None, description="Number of audio tokens processed/generated." + ) + audio_samples: int | None = Field( + default=None, description="Number of audio samples processed/generated." + ) + audio_seconds: float | None = Field( + default=None, description="Duration of audio processed/generated in seconds." + ) + audio_bytes: int | None = Field( + default=None, description="Number of audio bytes processed/generated." + ) + + @computed_field # type: ignore[misc] + @property + def total_tokens(self) -> int | None: + return (self.text_tokens or 0) + (self.image_tokens or 0) + ( + self.video_tokens or 0 + ) + (self.audio_tokens or 0) or None + + +class GenerationRequest(StandardBaseModel): + """Request model for backend generation operations.""" + + request_id: str = Field( + default_factory=lambda: str(uuid.uuid4()), + description="Unique identifier for the request.", + ) + request_type: GenerativeRequestType | str = Field( + description=( + "Type of request. If url is not provided in arguments, " + "this will be used to determine the request url." + ), + ) + arguments: GenerationRequestArguments = Field( + description=( + "Payload for the request, structured as a dictionary of arguments to pass " + "to the respective backend method. For example, can contain " + "'json', 'headers', 'files', etc." + ) + ) + input_metrics: UsageMetrics = Field( + default_factory=UsageMetrics, + description="Input statistics including token counts and audio duration.", + ) + output_metrics: UsageMetrics = Field( + default_factory=UsageMetrics, + description="Output statistics including token counts and audio duration.", + ) diff --git a/src/guidellm/schemas/response.py b/src/guidellm/schemas/response.py new file mode 100644 index 00000000..779d5c88 --- /dev/null +++ b/src/guidellm/schemas/response.py @@ -0,0 +1,97 @@ +""" +Backend object models for request and response handling. + +Provides standardized models for generation requests, responses, and timing +information to ensure consistent data handling across different backend +implementations. +""" + +from __future__ import annotations + +from typing import TYPE_CHECKING + +from pydantic import Field + +from guidellm.schemas.info import RequestInfo +from guidellm.schemas.request import GenerationRequest, UsageMetrics +from guidellm.utils import StandardBaseModel + +if TYPE_CHECKING: + from guidellm.schemas.stats import GenerativeRequestStats + +__all__ = ["GenerationResponse"] + + +class GenerationResponse(StandardBaseModel): + """Response model for backend generation operations.""" + + request_id: str = Field( + description="Unique identifier matching the original GenerationRequest." + ) + request_args: str | None = Field( + description="Arguments passed to the backend for this request." + ) + text: str | None = Field( + default=None, + description="The generated response text.", + ) + input_metrics: UsageMetrics = Field( + default_factory=UsageMetrics, + description="Token statistics from the input.", + ) + output_metrics: UsageMetrics = Field( + default_factory=UsageMetrics, + description="Token statistics from the generated output.", + ) + + def compile_stats( + self, + request: GenerationRequest, + info: RequestInfo, + prefer_response: bool = True, + ) -> GenerativeRequestStats: + """Compile and return request statistics. + + :param request: The original generation request. + :param info: Metadata and timing information for the request. + :return: A GenerativeRequestStats object containing detailed statistics. + """ + if request.request_id != self.request_id: + raise ValueError("Mismatched request IDs between request and response.") + + if info.request_id != self.request_id: + raise ValueError("Mismatched request IDs between info and response.") + + if info.status != "completed": + # clear out request output metrics if the request failed since those are not valid + request.output_metrics = UsageMetrics() + + base_input = request.input_metrics if prefer_response else self.input_metrics + override_input = ( + self.input_metrics if prefer_response else request.input_metrics + ) + base_output = request.output_metrics if prefer_response else self.output_metrics + override_output = ( + self.output_metrics if prefer_response else request.output_metrics + ) + + input_metrics_dict = base_input.model_dump() + for key, value in override_input.model_dump().items(): + if value is not None: + input_metrics_dict[key] = value + output_metrics_dict = base_output.model_dump() + for key, value in override_output.model_dump().items(): + if value is not None: + output_metrics_dict[key] = value + + return GenerativeRequestStats( + request_id=self.request_id, + request_type=request.request_type, + request_args=str( + request.arguments.model_dump() if request.arguments else {} + ), + output=self.text, + info=info, + input_metrics=UsageMetrics(**input_metrics_dict), + output_metrics=UsageMetrics(**output_metrics_dict), + ) diff --git a/src/guidellm/schemas/stats.py b/src/guidellm/schemas/stats.py new file mode 100644 index 00000000..3ed5de6f --- /dev/null +++ b/src/guidellm/schemas/stats.py @@ -0,0 +1,213 @@ +""" +Benchmark data models and metrics for performance measurement and analysis. + +Provides comprehensive data structures for capturing, storing, and analyzing +benchmark results from scheduler executions. Includes timing measurements, +token statistics, and performance metrics for generative AI workloads. + +Classes: + BenchmarkSchedulerStats: Scheduler timing and performance statistics. + BenchmarkMetrics: Core benchmark metrics and distributions. + BenchmarkRequestStats: Individual request processing statistics. + Benchmark: Base benchmark result container with generic metrics. + GenerativeRequestStats: Request statistics for generative AI workloads. + GenerativeMetrics: Comprehensive metrics for generative benchmarks. + GenerativeBenchmark: Complete generative benchmark results and analysis. + GenerativeBenchmarksReport: Container for multiple benchmark results. + +Type Variables: + BenchmarkMetricsT: Generic benchmark metrics type. + BenchmarkRequestStatsT: Generic request statistics type. + BenchmarkT: Generic benchmark container type. +""" + +from __future__ import annotations + +from typing import Literal + +from pydantic import Field, computed_field + +from guidellm.schemas.info import RequestInfo +from guidellm.schemas.request import GenerativeRequestType, UsageMetrics +from guidellm.utils import StandardBaseDict + +__all__ = ["GenerativeRequestStats"] + + +class GenerativeRequestStats(StandardBaseDict): + """Request statistics for generative AI text generation workloads.""" + + type_: Literal["generative_request_stats"] = "generative_request_stats" + request_id: str = Field(description="Unique identifier for the request") + request_type: GenerativeRequestType | str = Field( + description="Type of generative request: text or chat completion" + ) + request_args: str | None = Field( + default=None, description="Arguments passed to the backend for this request" + ) + output: str | None = Field( + description="Generated text output, if request completed successfully" + ) + info: RequestInfo = Field( + description="Metadata and timing information for the request" + ) + input_metrics: UsageMetrics = Field( + description="Usage statistics for the input prompt" + ) + output_metrics: UsageMetrics = Field( + description="Usage statistics for the generated output" + ) + + # Request stats + @computed_field # type: ignore[misc] + @property + def request_latency(self) -> float | None: + """ + End-to-end request processing latency in seconds. + + :return: Duration from request start to completion, or None if unavailable. + """ + if not self.info.timings.request_end or not self.info.timings.request_start: + return None + + return self.info.timings.request_end - self.info.timings.request_start + + # Genral token stats + @computed_field # type: ignore[misc] + @property + def prompt_tokens(self) -> int | None: + """Number of tokens in the input prompt.""" + return self.input_metrics.text_tokens + + @computed_field # type: ignore[misc] + @property + def input_tokens(self) -> int | None: + """Number of tokens in the input prompt.""" + return self.input_metrics.total_tokens + + @computed_field # type: ignore[misc] + @property + def output_tokens(self) -> int | None: + """Number of tokens in the generated output.""" + return self.output_metrics.total_tokens + + @computed_field # type: ignore[misc] + @property + def total_tokens(self) -> int | None: + """ + Total token count including prompt and output tokens. + + :return: Sum of prompt and output tokens, or None if either is unavailable. + """ + input_tokens = self.input_metrics.total_tokens + output_tokens = self.output_metrics.total_tokens + + if input_tokens is None and output_tokens is None: + return None + + return (input_tokens or 0) + (output_tokens or 0) + + @computed_field # type: ignore[misc] + @property + def time_to_first_token_ms(self) -> float | None: + """ + Time to first token generation in milliseconds. + + :return: Latency from request start to first token, or None if unavailable. + """ + if ( + not self.info.timings.first_iteration + or not self.info.timings.request_start + or self.info.timings.first_iteration == self.info.timings.last_iteration + ): + return None + + return 1000 * ( + self.info.timings.first_iteration - self.info.timings.request_start + ) + + @computed_field # type: ignore[misc] + @property + def time_per_output_token_ms(self) -> float | None: + """ + Average time per output token in milliseconds. + + Includes time for first token and all subsequent tokens. + + :return: Average milliseconds per output token, or None if unavailable. + """ + if ( + not self.info.timings.request_start + or not self.info.timings.last_iteration + or not self.output_metrics.total_tokens + ): + return None + + return ( + 1000 + * (self.info.timings.last_iteration - self.info.timings.request_start) + / self.output_metrics.total_tokens + ) + + @computed_field # type: ignore[misc] + @property + def inter_token_latency_ms(self) -> float | None: + """ + Average inter-token latency in milliseconds. + + Measures time between token generations, excluding first token. + + :return: Average milliseconds between tokens, or None if unavailable. + """ + if ( + not self.info.timings.first_iteration + or not self.info.timings.last_iteration + or not self.output_metrics.total_tokens + or self.output_metrics.total_tokens <= 1 + ): + return None + + return ( + 1000 + * (self.info.timings.last_iteration - self.info.timings.first_iteration) + / (self.output_metrics.total_tokens - 1) + ) + + @computed_field # type: ignore[misc] + @property + def tokens_per_second(self) -> float | None: + """ + Overall token throughput including prompt and output tokens. + + :return: Total tokens per second, or None if unavailable. + """ + if not (latency := self.request_latency) or self.total_tokens is None: + return None + + return self.total_tokens / latency + + @computed_field # type: ignore[misc] + @property + def output_tokens_per_second(self) -> float | None: + """ + Output token generation throughput. + + :return: Output tokens per second, or None if unavailable. + """ + if not (latency := self.request_latency) or self.output_tokens is None: + return None + + return self.output_tokens / latency + + @computed_field # type: ignore[misc] + @property + def output_tokens_per_iteration(self) -> float | None: + """ + Average output tokens generated per iteration. + + :return: Output tokens per iteration, or None if unavailable. + """ + if self.output_tokens is None or not self.info.timings.iterations: + return None + + return self.output_tokens / self.info.timings.iterations diff --git a/src/guidellm/utils/statistics.py b/src/guidellm/utils/statistics.py index c820de9d..4dfa7500 100644 --- a/src/guidellm/utils/statistics.py +++ b/src/guidellm/utils/statistics.py @@ -255,6 +255,7 @@ def from_values( def from_request_times( requests: list[tuple[float, float]], distribution_type: Literal["concurrency", "rate"], + weights: list[float] | None = None, include_cdf: bool = False, epsilon: float = 1e-6, ) -> DistributionSummary: @@ -273,66 +274,85 @@ def from_request_times( :return: DistributionSummary with timing-based statistical metrics :raises ValueError: If distribution_type is not "concurrency" or "rate" """ + if not weights: + weights = [1.0] * len(requests) + + if len(requests) != len(weights): + raise ValueError( + "The length of requests and weights must be the same.", + ) + + # First convert to timing events based on type + events: list[tuple[float, float]] = [] + if distribution_type == "concurrency": - # convert to delta changes based on when requests were running - time_deltas: dict[float, int] = defaultdict(int) - for start, end in requests: - time_deltas[start] += 1 - time_deltas[end] -= 1 - - # convert to the events over time measuring concurrency changes - events = [] - active = 0 - - for time, delta in sorted(time_deltas.items()): - active += delta - events.append((time, active)) + # For concurrency, each request adds to concurrency at start + # and subtracts at end + for (start, end), weight in zip(requests, weights): + events.append((start, weight)) + events.append((end, -1 * weight)) elif distribution_type == "rate": - # convert to events for when requests finished - global_start = min(start for start, _ in requests) if requests else 0 - events = [(global_start, 1)] + [(end, 1) for _, end in requests] + # For rate, each request is added at the end time only + events.append((min(0, *(start for start, _ in requests)), 0.0)) + for (_, end), weight in zip(requests, weights): + events.append((end, weight)) else: raise ValueError( f"Invalid distribution_type '{distribution_type}'. " "Must be 'concurrency' or 'rate'." ) - # combine any events that are very close together - flattened_events: list[tuple[float, float]] = [] - for time, val in sorted(events): - last_time, last_val = ( - flattened_events[-1] if flattened_events else (None, None) - ) + # Combine any events within epsilon of each other for stability + sorted_events = sorted(events, key=lambda event: event[0]) + flattened_events: list[tuple[float, float]] = ( + [sorted_events.pop(0)] if sorted_events else [] + ) + last_time = flattened_events[0][0] if flattened_events else 0.0 - if ( - last_time is not None - and last_val is not None - and abs(last_time - time) <= epsilon - ): + for time, val in sorted_events: + if abs(time - last_time) <= epsilon: + last_val = flattened_events[-1][1] flattened_events[-1] = (last_time, last_val + val) else: + last_time = time flattened_events.append((time, val)) - # convert to value distribution function + # Convert events to value distribution function distribution: dict[float, float] = defaultdict(float) - for ind in range(len(flattened_events) - 1): - start_time, value = flattened_events[ind] - end_time, _ = flattened_events[ind + 1] - duration = end_time - start_time - - if distribution_type == "concurrency": - # weight the concurrency value by the duration + if distribution_type == "concurrency": + # For concurrency, convert to active concurrency over time + active = 0.0 + for ind in range(len(flattened_events)): + time, change = flattened_events[ind] + active += change + flattened_events[ind] = (time, active) + + # Then convert to distribution by weighting each concurrency + # by duration to next event (last event is 0 concurrency) + for ind in range(len(flattened_events) - 1): + time, value = flattened_events[ind] + next_time = flattened_events[ind + 1][0] + duration = next_time - time distribution[value] += duration - elif distribution_type == "rate": - # weight the rate value by the duration - rate = value / duration + elif distribution_type == "rate": + # For rate, convert to distribution by converting each value + # to a rate (value/duration) weighted by duration from previous + # (first event is 0 rate) + for ind in range(1, len(flattened_events)): + time, value = flattened_events[ind] + prev_time = flattened_events[ind - 1][0] + duration = time - prev_time + rate = value / duration if duration > 0 else 0.0 distribution[rate] += duration - - distribution_list: list[tuple[float, float]] = sorted(distribution.items()) + else: + raise ValueError( + f"Invalid distribution_type '{distribution_type}'. " + "Must be 'concurrency' or 'rate'." + ) return DistributionSummary.from_distribution_function( - distribution=distribution_list, + distribution=sorted(distribution.items()), include_cdf=include_cdf, ) @@ -562,6 +582,7 @@ def from_request_times( request_types: list[Literal["successful", "incomplete", "error"]], requests: list[tuple[float, float]], distribution_type: Literal["concurrency", "rate"], + weights: list[float] | None = None, include_cdf: bool = False, epsilon: float = 1e-6, ) -> StatusDistributionSummary: @@ -602,65 +623,78 @@ def from_request_times( f"Got {len(request_types)} and {len(requests)} instead.", ) - _, successful_requests = ( + if weights is None: + weights = [1.0] * len(requests) + + if len(requests) != len(weights): + raise ValueError( + "The length of requests and weights must be the same." + f"Got {len(requests)} and {len(weights)} instead.", + ) + + _, successful_requests, successful_weights = ( zip(*successful) if ( successful := list( filter( lambda val: val[0] == "successful", - zip(request_types, requests), + zip(request_types, requests, weights), ) ) ) - else ([], []) + else ([], [], []) ) - _, incomplete_requests = ( + _, incomplete_requests, incomplete_weights = ( zip(*incomplete) if ( incomplete := list( filter( lambda val: val[0] == "incomplete", - zip(request_types, requests), + zip(request_types, requests, weights), ) ) ) - else ([], []) + else ([], [], []) ) - _, errored_requests = ( + _, errored_requests, errored_weights = ( zip(*errored) if ( errored := list( filter( lambda val: val[0] == "error", - zip(request_types, requests), + zip(request_types, requests, weights), ) ) ) - else ([], []) + else ([], [], []) ) return StatusDistributionSummary( total=DistributionSummary.from_request_times( requests, distribution_type=distribution_type, + weights=weights, include_cdf=include_cdf, epsilon=epsilon, ), successful=DistributionSummary.from_request_times( successful_requests, # type: ignore[arg-type] distribution_type=distribution_type, + weights=successful_weights, # type: ignore[arg-type] include_cdf=include_cdf, epsilon=epsilon, ), incomplete=DistributionSummary.from_request_times( incomplete_requests, # type: ignore[arg-type] distribution_type=distribution_type, + weights=incomplete_weights, # type: ignore[arg-type] include_cdf=include_cdf, epsilon=epsilon, ), errored=DistributionSummary.from_request_times( errored_requests, # type: ignore[arg-type] distribution_type=distribution_type, + weights=errored_weights, # type: ignore[arg-type] include_cdf=include_cdf, epsilon=epsilon, ), diff --git a/src/guidellm/utils/text.py b/src/guidellm/utils/text.py index 8385ec7b..1138c08f 100644 --- a/src/guidellm/utils/text.py +++ b/src/guidellm/utils/text.py @@ -13,7 +13,6 @@ import gzip import re import textwrap -from importlib.resources import as_file, files # type: ignore[attr-defined] from pathlib import Path from typing import Any @@ -21,7 +20,6 @@ import httpx from loguru import logger -from guidellm import data as package_data from guidellm.settings import settings from guidellm.utils.console import Colors @@ -238,15 +236,6 @@ def load_text(data: str | Path, encoding: str | None = None) -> str: response.raise_for_status() return response.text - # check package data - if isinstance(data, str) and data.startswith("data:"): - resource_path = files(package_data).joinpath(data[5:]) - with ( - as_file(resource_path) as resource_file, - gzip.open(resource_file, "rt", encoding=encoding) as file, - ): - return file.read() - # check gzipped files if isinstance(data, str) and data.endswith(".gz"): with gzip.open(data, "rt", encoding=encoding) as file: diff --git a/tests/unit/backend/test_backend.py b/tests/unit/backend/test_backend.py index 49b65077..79c0d932 100644 --- a/tests/unit/backend/test_backend.py +++ b/tests/unit/backend/test_backend.py @@ -13,11 +13,11 @@ import pytest from guidellm.backends.backend import Backend, BackendType -from guidellm.backends.objects import ( +from guidellm.scheduler import BackendInterface, ScheduledRequestInfo +from guidellm.schemas.response import ( GenerationRequest, GenerationRequestTimings, ) -from guidellm.scheduler import BackendInterface, ScheduledRequestInfo from guidellm.utils import RegistryMixin diff --git a/tests/unit/backend/test_objects.py b/tests/unit/backend/test_objects.py index 34a6350c..1831c459 100644 --- a/tests/unit/backend/test_objects.py +++ b/tests/unit/backend/test_objects.py @@ -9,12 +9,12 @@ import pytest from pydantic import ValidationError -from guidellm.backends.objects import ( +from guidellm.scheduler import MeasuredRequestTimings +from guidellm.schemas.response import ( GenerationRequest, GenerationRequestTimings, GenerationResponse, ) -from guidellm.scheduler import MeasuredRequestTimings from guidellm.utils import StandardBaseModel diff --git a/tests/unit/backend/test_openai_backend.py b/tests/unit/backend/test_openai_backend.py index 7c7f528d..a83c411a 100644 --- a/tests/unit/backend/test_openai_backend.py +++ b/tests/unit/backend/test_openai_backend.py @@ -15,13 +15,13 @@ from PIL import Image from guidellm.backends.backend import Backend -from guidellm.backends.objects import ( +from guidellm.backends.openai import OpenAIHTTPBackend, UsageStats +from guidellm.scheduler import ScheduledRequestInfo +from guidellm.schemas.response import ( GenerationRequest, GenerationRequestTimings, GenerationResponse, ) -from guidellm.backends.openai import OpenAIHTTPBackend, UsageStats -from guidellm.scheduler import ScheduledRequestInfo def async_timeout(delay): diff --git a/tests/unit/mock_benchmark.py b/tests/unit/mock_benchmark.py index d846767d..7ce73a67 100644 --- a/tests/unit/mock_benchmark.py +++ b/tests/unit/mock_benchmark.py @@ -7,8 +7,8 @@ GenerativeMetrics, GenerativeRequestStats, ) -from guidellm.benchmark.objects import BenchmarkerDict, SchedulerDict from guidellm.benchmark.profile import SynchronousProfile +from guidellm.benchmark.schemas import BenchmarkerDict, SchedulerDict from guidellm.scheduler import ScheduledRequestInfo, SchedulerState, SynchronousStrategy from guidellm.utils import ( DistributionSummary, diff --git a/tests/unit/utils/test_encoding.py b/tests/unit/utils/test_encoding.py index cc4600cf..69587323 100644 --- a/tests/unit/utils/test_encoding.py +++ b/tests/unit/utils/test_encoding.py @@ -6,11 +6,11 @@ import pytest from pydantic import BaseModel, Field -from guidellm.backends.objects import ( +from guidellm.scheduler.schemas import RequestSchedulerTimings, ScheduledRequestInfo +from guidellm.schemas.response import ( GenerationRequest, GenerationResponse, ) -from guidellm.scheduler.objects import RequestSchedulerTimings, ScheduledRequestInfo from guidellm.utils.encoding import Encoder, MessageEncoding, Serializer From b2436641458defda47f17a2335828b6e7816e1b0 Mon Sep 17 00:00:00 2001 From: Samuel Monson Date: Fri, 10 Oct 2025 16:47:36 -0400 Subject: [PATCH 74/90] Move asyncio timeout to common location Signed-off-by: Samuel Monson --- tests/unit/backends/test_backend.py | 14 +------ tests/unit/backends/test_openai_backend.py | 14 +------ tests/unit/scheduler/test_scheduler.py | 15 +------- tests/unit/scheduler/test_worker.py | 13 +------ tests/unit/scheduler/test_worker_group.py | 13 +------ tests/unit/testing_utils.py | 44 ++++++++++++++++++++++ tests/unit/utils/test_messaging.py | 15 +------- tests/unit/utils/test_synchronous.py | 15 +------- 8 files changed, 51 insertions(+), 92 deletions(-) create mode 100644 tests/unit/testing_utils.py diff --git a/tests/unit/backends/test_backend.py b/tests/unit/backends/test_backend.py index ebd0da87..d5a4b955 100644 --- a/tests/unit/backends/test_backend.py +++ b/tests/unit/backends/test_backend.py @@ -4,9 +4,7 @@ from __future__ import annotations -import asyncio from collections.abc import AsyncIterator -from functools import wraps from typing import Any from unittest.mock import Mock, patch @@ -19,17 +17,7 @@ ) from guidellm.scheduler import BackendInterface, ScheduledRequestInfo from guidellm.utils import RegistryMixin - - -def async_timeout(delay): - def decorator(func): - @wraps(func) - async def new_func(*args, **kwargs): - return await asyncio.wait_for(func(*args, **kwargs), timeout=delay) - - return new_func - - return decorator +from tests.unit.testing_utils import async_timeout def test_backend_type(): diff --git a/tests/unit/backends/test_openai_backend.py b/tests/unit/backends/test_openai_backend.py index 2180b501..834cd0e9 100644 --- a/tests/unit/backends/test_openai_backend.py +++ b/tests/unit/backends/test_openai_backend.py @@ -4,9 +4,7 @@ from __future__ import annotations -import asyncio import base64 -from functools import wraps from pathlib import Path from unittest.mock import AsyncMock, Mock, patch @@ -22,17 +20,7 @@ ) from guidellm.backends.openai import OpenAIHTTPBackend, UsageStats from guidellm.scheduler import ScheduledRequestInfo - - -def async_timeout(delay): - def decorator(func): - @wraps(func) - async def new_func(*args, **kwargs): - return await asyncio.wait_for(func(*args, **kwargs), timeout=delay) - - return new_func - - return decorator +from tests.unit.testing_utils import async_timeout def test_usage_stats(): diff --git a/tests/unit/scheduler/test_scheduler.py b/tests/unit/scheduler/test_scheduler.py index 33efc27f..407dab6c 100644 --- a/tests/unit/scheduler/test_scheduler.py +++ b/tests/unit/scheduler/test_scheduler.py @@ -4,7 +4,6 @@ import inspect import random import uuid -from functools import wraps from typing import Any, Generic import pytest @@ -20,19 +19,7 @@ SynchronousStrategy, ) from guidellm.utils.singleton import ThreadSafeSingletonMixin - - -def async_timeout(delay: float): - """Decorator to add timeout to async test functions.""" - - def decorator(func): - @wraps(func) - async def new_func(*args, **kwargs): - return await asyncio.wait_for(func(*args, **kwargs), timeout=delay) - - return new_func - - return decorator +from tests.unit.testing_utils import async_timeout class MockRequest(BaseModel): diff --git a/tests/unit/scheduler/test_worker.py b/tests/unit/scheduler/test_worker.py index b62d66d5..b6624483 100644 --- a/tests/unit/scheduler/test_worker.py +++ b/tests/unit/scheduler/test_worker.py @@ -5,7 +5,6 @@ import random import time from dataclasses import dataclass -from functools import wraps from multiprocessing import Barrier, Event, Process from multiprocessing.synchronize import Barrier as ProcessingBarrier from multiprocessing.synchronize import Event as ProcessingEvent @@ -27,21 +26,11 @@ WorkerProcess, ) from guidellm.utils import InterProcessMessagingQueue +from tests.unit.testing_utils import async_timeout STANDARD_NUM_REQUESTS: int = 200 -def async_timeout(delay): - def decorator(func): - @wraps(func) - async def new_func(*args, **kwargs): - return await asyncio.wait_for(func(*args, **kwargs), timeout=delay) - - return new_func - - return decorator - - @dataclass class TimingsBounds: exact: float | None = None diff --git a/tests/unit/scheduler/test_worker_group.py b/tests/unit/scheduler/test_worker_group.py index 80bb6c23..2b8176e7 100644 --- a/tests/unit/scheduler/test_worker_group.py +++ b/tests/unit/scheduler/test_worker_group.py @@ -3,7 +3,6 @@ import asyncio import inspect import time -from functools import wraps from multiprocessing.context import BaseContext from multiprocessing.managers import BaseManager from multiprocessing.process import BaseProcess @@ -30,17 +29,7 @@ ) from guidellm.scheduler.worker_group import WorkerGroupState from guidellm.utils import InterProcessMessaging - - -def async_timeout(delay): - def decorator(func): - @wraps(func) - async def new_func(*args, **kwargs): - return await asyncio.wait_for(func(*args, **kwargs), timeout=delay) - - return new_func - - return decorator +from tests.unit.testing_utils import async_timeout class MockRequestTimings(MeasuredRequestTimings): diff --git a/tests/unit/testing_utils.py b/tests/unit/testing_utils.py new file mode 100644 index 00000000..11b563b5 --- /dev/null +++ b/tests/unit/testing_utils.py @@ -0,0 +1,44 @@ +"""Common test utilities for async testing.""" + +from __future__ import annotations + +import asyncio +from collections.abc import Awaitable, Callable +from functools import wraps +from typing import Any, TypeVar + +import pytest + +# Type variables for proper typing +F = TypeVar("F", bound=Callable[..., Awaitable[Any]]) + + +def async_timeout(delay: float = 10.0, hard_fail: bool = False) -> Callable[[F], F]: + """ + Decorator to add timeout to async test functions. + + Uses a longer default timeout (30s) to reduce intermittent failures + while still catching truly hanging tests. + + Args: + delay: Timeout in seconds (default: 30.0) + + Returns: + Decorated function with timeout applied + """ + + def decorator(func: F) -> F: + @wraps(func) + async def wrapper(*args: Any, **kwargs: Any) -> Any: + try: + return await asyncio.wait_for(func(*args, **kwargs), timeout=delay) + except asyncio.TimeoutError: + msg = f"Test {func.__name__} timed out after {delay} seconds" + if hard_fail: + pytest.fail(msg) + else: + pytest.xfail(msg) + + return wrapper # type: ignore[return-value] + + return decorator diff --git a/tests/unit/utils/test_messaging.py b/tests/unit/utils/test_messaging.py index d6b3283d..7b021aa6 100644 --- a/tests/unit/utils/test_messaging.py +++ b/tests/unit/utils/test_messaging.py @@ -3,7 +3,6 @@ import asyncio import multiprocessing import threading -from functools import wraps from typing import Any, TypeVar import culsans @@ -22,19 +21,7 @@ InterProcessMessagingQueue, ) from guidellm.utils.messaging import ReceiveMessageT, SendMessageT - - -def async_timeout(delay: float): - """Decorator to add timeout to async test functions.""" - - def decorator(func): - @wraps(func) - async def new_func(*args, **kwargs): - return await asyncio.wait_for(func(*args, **kwargs), timeout=delay) - - return new_func - - return decorator +from tests.unit.testing_utils import async_timeout class MockMessage(BaseModel): diff --git a/tests/unit/utils/test_synchronous.py b/tests/unit/utils/test_synchronous.py index 7acd5b4a..eebd6a52 100644 --- a/tests/unit/utils/test_synchronous.py +++ b/tests/unit/utils/test_synchronous.py @@ -3,7 +3,6 @@ import asyncio import multiprocessing import threading -from functools import wraps from multiprocessing.synchronize import Barrier as ProcessingBarrier from multiprocessing.synchronize import Event as ProcessingEvent from typing import get_args @@ -16,19 +15,7 @@ wait_for_sync_event, wait_for_sync_objects, ) - - -def async_timeout(delay: float): - """Decorator to add timeout to async functions.""" - - def decorator(func): - @wraps(func) - async def new_func(*args, **kwargs): - return await asyncio.wait_for(func(*args, **kwargs), timeout=delay) - - return new_func - - return decorator +from tests.unit.testing_utils import async_timeout def test_sync_object_types_alias(): From cfcbd13342b3e33a72e2a8f8427a3ba7b47ea3f4 Mon Sep 17 00:00:00 2001 From: Samuel Monson Date: Fri, 10 Oct 2025 16:48:21 -0400 Subject: [PATCH 75/90] Fix duplicate timeout in openai backend tests Signed-off-by: Samuel Monson --- tests/unit/backends/test_openai_backend.py | 9 --------- 1 file changed, 9 deletions(-) diff --git a/tests/unit/backends/test_openai_backend.py b/tests/unit/backends/test_openai_backend.py index 834cd0e9..724075e8 100644 --- a/tests/unit/backends/test_openai_backend.py +++ b/tests/unit/backends/test_openai_backend.py @@ -218,7 +218,6 @@ def test_header_building(self): @pytest.mark.smoke @pytest.mark.asyncio @async_timeout(10.0) - @async_timeout(5.0) async def test_info(self): """Test info method.""" backend = OpenAIHTTPBackend( @@ -238,7 +237,6 @@ async def test_info(self): @pytest.mark.smoke @pytest.mark.asyncio @async_timeout(10.0) - @async_timeout(5.0) async def test_process_startup(self): """Test process startup.""" backend = OpenAIHTTPBackend(target="http://test") @@ -255,7 +253,6 @@ async def test_process_startup(self): @pytest.mark.smoke @pytest.mark.asyncio @async_timeout(10.0) - @async_timeout(5.0) async def test_process_startup_already_started(self): """Test process startup when already started.""" backend = OpenAIHTTPBackend(target="http://test") @@ -267,7 +264,6 @@ async def test_process_startup_already_started(self): @pytest.mark.smoke @pytest.mark.asyncio @async_timeout(10.0) - @async_timeout(5.0) async def test_process_shutdown(self): """Test process shutdown.""" backend = OpenAIHTTPBackend(target="http://test") @@ -284,7 +280,6 @@ async def test_process_shutdown(self): @pytest.mark.smoke @pytest.mark.asyncio @async_timeout(10.0) - @async_timeout(5.0) async def test_process_shutdown_not_started(self): """Test process shutdown when not started.""" backend = OpenAIHTTPBackend(target="http://test") @@ -295,7 +290,6 @@ async def test_process_shutdown_not_started(self): @pytest.mark.sanity @pytest.mark.asyncio @async_timeout(10.0) - @async_timeout(5.0) async def test_check_in_process(self): """Test _check_in_process method.""" backend = OpenAIHTTPBackend(target="http://test") @@ -313,7 +307,6 @@ async def test_check_in_process(self): @pytest.mark.sanity @pytest.mark.asyncio @async_timeout(10.0) - @async_timeout(5.0) async def test_available_models(self): """Test available_models method.""" backend = OpenAIHTTPBackend(target="http://test") @@ -334,7 +327,6 @@ async def test_available_models(self): @pytest.mark.sanity @pytest.mark.asyncio @async_timeout(10.0) - @async_timeout(5.0) async def test_default_model(self): """Test default_model method.""" # Test when model is already set @@ -358,7 +350,6 @@ async def test_default_model(self): @pytest.mark.regression @pytest.mark.asyncio @async_timeout(10.0) - @async_timeout(10.0) async def test_validate_with_model(self): """Test validate method when model is set.""" backend = OpenAIHTTPBackend(target="http://test", model="test-model") From 9ca2dba919f03afd2ade929ac1f3d81b36135f67 Mon Sep 17 00:00:00 2001 From: Jared O'Connell Date: Fri, 10 Oct 2025 15:23:19 -0400 Subject: [PATCH 76/90] Force time zone in tests Signed-off-by: Jared O'Connell --- tests/unit/utils/test_functions.py | 12 ++++++++++++ 1 file changed, 12 insertions(+) diff --git a/tests/unit/utils/test_functions.py b/tests/unit/utils/test_functions.py index 3b353759..3e542ca8 100644 --- a/tests/unit/utils/test_functions.py +++ b/tests/unit/utils/test_functions.py @@ -1,5 +1,6 @@ from __future__ import annotations +import time from datetime import datetime import pytest @@ -180,6 +181,17 @@ def test_single_value(self): assert result == 3.0 +@pytest.fixture(autouse=True) +def force_us_eastern_timezone(monkeypatch): + """ + Forces the timezone to US/Eastern for the duration of a test. + This ensures that timestamp formatting is consistent across all environments. + + ## WRITTEN BY AI ## + """ + monkeypatch.setenv("TZ", "America/New_York") + time.tzset() # Propagates the change to the underlying C library + class TestSafeFormatTimestamp: """Test suite for safe_format_timestamp function.""" From 8d20525c4cf4117553961a949b8166c7dc99a347 Mon Sep 17 00:00:00 2001 From: Samuel Monson Date: Fri, 10 Oct 2025 17:16:16 -0400 Subject: [PATCH 77/90] Fix function doc Signed-off-by: Samuel Monson --- tests/unit/testing_utils.py | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/tests/unit/testing_utils.py b/tests/unit/testing_utils.py index 11b563b5..c6b8c513 100644 --- a/tests/unit/testing_utils.py +++ b/tests/unit/testing_utils.py @@ -17,11 +17,8 @@ def async_timeout(delay: float = 10.0, hard_fail: bool = False) -> Callable[[F], """ Decorator to add timeout to async test functions. - Uses a longer default timeout (30s) to reduce intermittent failures - while still catching truly hanging tests. - Args: - delay: Timeout in seconds (default: 30.0) + delay: Timeout in seconds (default: 10.0) Returns: Decorated function with timeout applied From 3c646d4ceaee24356f569da765c6695a1a6a1bd7 Mon Sep 17 00:00:00 2001 From: Mark Kurtz Date: Mon, 13 Oct 2025 13:02:44 -0400 Subject: [PATCH 78/90] runnable state for multi modal refactor --- src/guidellm/__main__.py | 45 +++++------ src/guidellm/benchmark/progress.py | 78 ++++++++++--------- src/guidellm/benchmark/scenario.py | 11 +-- src/guidellm/benchmark/schemas.py | 69 ++++++++++------ src/guidellm/benchmark/types.py | 2 - src/guidellm/data/deserializers/file.py | 3 +- .../data/deserializers/huggingface.py | 3 +- src/guidellm/data/deserializers/memory.py | 3 +- src/guidellm/data/deserializers/synthetic.py | 6 +- src/guidellm/data/loaders.py | 2 +- src/guidellm/utils/registry.py | 2 +- src/guidellm/utils/statistics.py | 19 ++--- .../unit/data/deserializers/test_synthetic.py | 4 +- 13 files changed, 130 insertions(+), 117 deletions(-) diff --git a/src/guidellm/__main__.py b/src/guidellm/__main__.py index 7e37fb70..97f3e436 100644 --- a/src/guidellm/__main__.py +++ b/src/guidellm/__main__.py @@ -43,7 +43,7 @@ benchmark_generative_text, reimport_benchmarks_report, ) -from guidellm.benchmark.scenario import GenerativeTextScenario, get_builtin_scenarios +from guidellm.benchmark.scenario import GenerativeTextScenario from guidellm.mock_server import MockServer, MockServerConfig from guidellm.preprocess.dataset import ShortPromptStrategy, process_dataset from guidellm.scheduler import StrategyType @@ -123,25 +123,25 @@ def benchmark(): help="Run a benchmark against a generative model using the specified arguments.", context_settings={"auto_envvar_prefix": "GUIDELLM"}, ) -@click.option( - "--scenario", - type=cli_tools.Union( - click.Path( - exists=True, - readable=True, - file_okay=True, - dir_okay=False, - path_type=Path, - ), - click.Choice(get_builtin_scenarios()), - ), - default=None, - help=( - "The name of a builtin scenario or path to a config file. " - "Missing values from the config will use defaults. " - "Options specified on the commandline will override the scenario." - ), -) +# @click.option( +# "--scenario", +# type=cli_tools.Union( +# click.Path( +# exists=True, +# readable=True, +# file_okay=True, +# dir_okay=False, +# path_type=Path, +# ), +# click.Choice(get_builtin_scenarios()), +# ), +# default=None, +# help=( +# "The name of a builtin scenario or path to a config file. " +# "Missing values from the config will use defaults. " +# "Options specified on the commandline will override the scenario." +# ), +# ) @click.option( "--target", type=str, @@ -347,11 +347,6 @@ def benchmark(): help="Set this flag to display stats for the processes running the benchmarks", ) # Aggregators configuration -@click.option( - "--output-extras", - callback=cli_tools.parse_json, - help="A JSON string of extra data to save with the output benchmarks", -) @click.option( "--warmup", "--warmup-percent", # legacy alias diff --git a/src/guidellm/benchmark/progress.py b/src/guidellm/benchmark/progress.py index f9e29abb..5a88d696 100644 --- a/src/guidellm/benchmark/progress.py +++ b/src/guidellm/benchmark/progress.py @@ -808,7 +808,7 @@ def start(self, strategy: SchedulingStrategy): def update( self, - aggregator_update: EstimatedBenchmarkState, + estimated_state: EstimatedBenchmarkState, scheduler_state: SchedulerState, ): self.progress = ( @@ -816,64 +816,66 @@ def update( if scheduler_state.remaining_fraction is not None else 0.0 ) - status: Literal["in_warmup", "in_progress", "in_cooldown"] | None = ( - "in_progress" # Need to handle requests_in_* isn't in aggregator_update - ) - if aggregator_update.get("requests_in_warmup"): - status = "in_warmup" - elif aggregator_update.get("requests_in_cooldown"): - status = "in_cooldown" self._update_processing_states( - benchmark_status=status, + benchmark_status=estimated_state.get_metric( + group=EstimatedBenchmarkState.benchmark_state_group, + key="status", + default=None, + ), start_time=scheduler_state.start_time, successful_requests=scheduler_state.successful_requests, cancelled_requests=scheduler_state.cancelled_requests, errored_requests=scheduler_state.errored_requests, ) self._update_request_stats( - request_concurrency=aggregator_update.get_metric( - key="requests", type_="avg", prefix="completed" + request_concurrency=estimated_state.get_metric( + group=EstimatedBenchmarkState.benchmark_metrics_group, + key="concurrency_requests", ), - requests_per_second=aggregator_update.get_metric( - key="requests", - type_="rate", - prefix="completed", + requests_per_second=estimated_state.get_metric( + group=EstimatedBenchmarkState.benchmark_metrics_group, + key="completed_requests_per_second", ), - request_latency=aggregator_update.get_metric( - key="request_latency", type_="avg", prefix="completed" + request_latency=estimated_state.get_metric( + group=EstimatedBenchmarkState.benchmark_metrics_group, + key="completed_request_latency", ), ) self._update_token_stats( - output_tokens=aggregator_update.get_metric( - key="output_tokens", type_="avg", prefix="completed" + output_tokens=estimated_state.get_metric( + group=EstimatedBenchmarkState.benchmark_metrics_group, + key="completed_output_tokens_total", ), - output_tokens_rate=aggregator_update.get_metric( - key="output_tokens", type_="rate" + output_tokens_rate=estimated_state.get_metric( + group=EstimatedBenchmarkState.benchmark_metrics_group, + key="completed_output_tokens", ), - prompt_tokens=aggregator_update.get_metric( - key="prompt_tokens", type_="avg", prefix="completed" + prompt_tokens=estimated_state.get_metric( + group=EstimatedBenchmarkState.benchmark_metrics_group, + key="completed_input_tokens_total", ), - total_tokens_rate=aggregator_update.get_metric( - key="total_tokens", type_="rate" + total_tokens_rate=estimated_state.get_metric( + group=EstimatedBenchmarkState.benchmark_metrics_group, + key="completed_total_tokens", ), - time_to_first_token=( - aggregator_update.get_metric(key="time_to_first_token", type_="avg") + time_to_first_token=estimated_state.get_metric( + group=EstimatedBenchmarkState.benchmark_metrics_group, + key="completed_time_to_first_token", ), - inter_token_latency=( - aggregator_update.get_metric(key="inter_token_latency", type_="avg") + inter_token_latency=estimated_state.get_metric( + group=EstimatedBenchmarkState.benchmark_metrics_group, + key="completed_inter_token_latency", ), ) - if aggregator_update.get("updated_scheduler_stats"): + if estimated_state.get("updated_scheduler_stats"): self._update_system_stats( - request_targeted_start_delay=( - aggregator_update.get_metric( - key="request_targeted_start_delay", type_="avg", default=0.0 - ) + request_targeted_start_delay=estimated_state.get_metric( + group=EstimatedBenchmarkState.scheduler_state_group, + key="request_targeted_start_delay", ), - queued_time=( - aggregator_update.get_metric( - key="queued_time", type_="avg", default=0.0 - ) + queued_time=estimated_state.get_metric( + group=EstimatedBenchmarkState.scheduler_state_group, + key="queued_time", ), scheduler_overheads_time=0.0, # Need to add up metrics here ) diff --git a/src/guidellm/benchmark/scenario.py b/src/guidellm/benchmark/scenario.py index 73a9a050..59cdef27 100644 --- a/src/guidellm/benchmark/scenario.py +++ b/src/guidellm/benchmark/scenario.py @@ -9,11 +9,11 @@ import yaml from loguru import logger -from pydantic import BeforeValidator, Field, PositiveFloat, PositiveInt, SkipValidation +from pydantic import BeforeValidator, Field, PositiveFloat, PositiveInt from guidellm.backends import Backend, BackendType from guidellm.benchmark.profile import Profile, ProfileType -from guidellm.benchmark.types import AggregatorInputT, DataInputT, ProcessorInputT +from guidellm.benchmark.types import ProcessorInputT from guidellm.scheduler import StrategyType from guidellm.utils import StandardBaseModel @@ -108,11 +108,7 @@ class Config: # types like PreTrainedTokenizerBase arbitrary_types_allowed = True - data: Annotated[ - DataInputT, - # BUG: See https://github.com/pydantic/pydantic/issues/9541 - SkipValidation, - ] + data: Any profile: StrategyType | ProfileType | Profile rate: Annotated[list[PositiveFloat] | None, BeforeValidator(parse_float_list)] = ( None @@ -128,7 +124,6 @@ class Config: data_args: dict[str, Any] | None = None data_sampler: Literal["random"] | None = None # Aggregators configuration - add_aggregators: AggregatorInputT | None = None warmup: Annotated[float | None, Field(gt=0, le=1)] = None cooldown: Annotated[float | None, Field(gt=0, le=1)] = None request_samples: PositiveInt | None = 20 diff --git a/src/guidellm/benchmark/schemas.py b/src/guidellm/benchmark/schemas.py index 1b11aae6..41cb832f 100644 --- a/src/guidellm/benchmark/schemas.py +++ b/src/guidellm/benchmark/schemas.py @@ -36,7 +36,6 @@ from pydantic import Field, computed_field from guidellm.benchmark.profile import Profile -from guidellm.benchmark.schemas import BenchmarkerDict, SchedulerDict from guidellm.scheduler import ( BackendInterface, Environment, @@ -309,8 +308,8 @@ def get_request_metrics_sample( float, ]: ... - @abstractmethod @classmethod + @abstractmethod def update_estimate( cls, args: BenchmarkArgs, @@ -321,8 +320,8 @@ def update_estimate( scheduler_state: SchedulerState, ): ... - @abstractmethod @classmethod + @abstractmethod def compile( cls, args: BenchmarkArgs, @@ -537,7 +536,7 @@ def compile( ) -> GenerativeMetricsSummary: total_values = [ input_val + output_val - for input_val, output_val in zip(input_values, output_values) + for input_val, output_val in zip(input_values, output_values, strict=False) ] return GenerativeMetricsSummary( @@ -828,7 +827,7 @@ def update_estimate( # Always track concurrency state.add_time_averaged_metric( group=EstimatedBenchmarkState.benchmark_metrics_group, - key="concurrency", + key="concurrency_requests", value=scheduler_state.processing_requests, ) @@ -847,7 +846,7 @@ def update_estimate( for prefix in (request_info.status, "total"): requests_count = ( scheduler_state.successful_requests - if prefix == "successful" + if prefix == "completed" else scheduler_state.errored_requests if prefix == "errored" else scheduler_state.cancelled_requests @@ -914,11 +913,16 @@ def update_estimate( value=(response.output_metrics.total_tokens if response else None) or request.output_metrics.total_tokens, ) - - # General stats output_tokens = ( response.output_metrics.total_tokens if response else None ) or request.output_metrics.total_tokens + state.add_avg_rate_metric( + group=EstimatedBenchmarkState.benchmark_metrics_group, + key="total_tokens", + value=output_tokens, + ) + + # General stats state.add_avg_metric( group=EstimatedBenchmarkState.benchmark_metrics_group, key=f"{prefix}_time_to_first_token", @@ -1162,6 +1166,13 @@ def update_estimate( request_info: RequestInfo, scheduler_state: SchedulerState, ): + if ( + request_info.status == "cancelled" + and request_info.timings.resolve_start is None + ): + # Cancelled requests that never started should be ignored + return + # Update child metric groups BenchmarkSchedulerStats.update_estimate(state, request_info) GenerativeMetrics.update_estimate( @@ -1176,30 +1187,38 @@ def update_estimate( state["samples_errored"] = [] state["requests_incomplete"] = [] state["samples_incomplete"] = [] - - if request_info.status not in {"completed", "errored", "cancelled"}: - # Must be fully resolved to be added - return - - if ( - request_info.status == "cancelled" - and request_info.timings.resolve_start is None - ): - # Cancelled requests that never started should not be added - return - - state.set_metric(group=cls.group_name, key="updated", value=True) - if state.set_metric( - group=cls.group_name, + in_warmup = state.set_metric( + group=EstimatedBenchmarkState.benchmark_state_group, key="in_warmup", value=args.is_in_warmup(request_info, scheduler_state), - ) or state.set_metric( - group=cls.group_name, + ) + in_cooldown = state.set_metric( + group=EstimatedBenchmarkState.benchmark_state_group, key="in_cooldown", value=args.is_in_cooldown(request_info, scheduler_state), + ) + state[f"{EstimatedBenchmarkState.benchmark_state_group}_status"] = ( + "in_cooldown" + if in_cooldown + else "in_warmup" + if in_warmup + else "in_progress" + ) + + if ( + request_info.status not in {"completed", "errored", "cancelled"} + or in_warmup + or in_cooldown ): + # Must be fully resolved to be added return + state.set_metric( + group=EstimatedBenchmarkState.benchmark_state_group, + key="updated", + value=True, + ) + if response is None: response = GenerationResponse( request_id=request.request_id, request_args=str(request.arguments) diff --git a/src/guidellm/benchmark/types.py b/src/guidellm/benchmark/types.py index 2e861678..94df4b8e 100644 --- a/src/guidellm/benchmark/types.py +++ b/src/guidellm/benchmark/types.py @@ -1,10 +1,8 @@ from __future__ import annotations -from collections.abc import Iterable from pathlib import Path from typing import Any -from datasets import Dataset, DatasetDict, IterableDataset, IterableDatasetDict from transformers import PreTrainedTokenizerBase # type: ignore[import] from typing_extensions import TypeAliasType diff --git a/src/guidellm/data/deserializers/file.py b/src/guidellm/data/deserializers/file.py index 54b18edb..d57403db 100644 --- a/src/guidellm/data/deserializers/file.py +++ b/src/guidellm/data/deserializers/file.py @@ -1,7 +1,8 @@ from __future__ import annotations +from collections.abc import Callable from pathlib import Path -from typing import Any, Callable +from typing import Any import pandas as pd from datasets import Dataset, load_dataset diff --git a/src/guidellm/data/deserializers/huggingface.py b/src/guidellm/data/deserializers/huggingface.py index 69f7d506..e356043a 100644 --- a/src/guidellm/data/deserializers/huggingface.py +++ b/src/guidellm/data/deserializers/huggingface.py @@ -1,7 +1,8 @@ from __future__ import annotations +from collections.abc import Callable from pathlib import Path -from typing import Any, Callable +from typing import Any from datasets import ( Dataset, diff --git a/src/guidellm/data/deserializers/memory.py b/src/guidellm/data/deserializers/memory.py index ddca64a9..6f8888ec 100644 --- a/src/guidellm/data/deserializers/memory.py +++ b/src/guidellm/data/deserializers/memory.py @@ -3,8 +3,9 @@ import contextlib import csv import json +from collections.abc import Callable from io import StringIO -from typing import Any, Callable, cast +from typing import Any, cast from datasets import Dataset from transformers import PreTrainedTokenizerBase diff --git a/src/guidellm/data/deserializers/synthetic.py b/src/guidellm/data/deserializers/synthetic.py index c2078f1a..92e8fc14 100644 --- a/src/guidellm/data/deserializers/synthetic.py +++ b/src/guidellm/data/deserializers/synthetic.py @@ -1,10 +1,10 @@ from __future__ import annotations import math -from collections.abc import Iterator +from collections.abc import Callable, Iterator from pathlib import Path from random import Random -from typing import Any, Callable +from typing import Any import yaml from datasets import Features, IterableDataset, Value @@ -209,7 +209,7 @@ def _create_prefix_iter(self, faker: Faker, rand: Random) -> Iterator[str]: # Create prefix list maintaining the correct distribution prefixes = [] - for bucket, weight in zip(self.config.prefix_buckets, unnorm_weights): + for bucket, weight in zip(self.config.prefix_buckets, unnorm_weights, strict=False): bucket_prefixes = [ self._create_prompt(bucket.prefix_tokens, faker) for _ in range(bucket.prefix_count) diff --git a/src/guidellm/data/loaders.py b/src/guidellm/data/loaders.py index f397ad51..0d83d726 100644 --- a/src/guidellm/data/loaders.py +++ b/src/guidellm/data/loaders.py @@ -39,7 +39,7 @@ def __init__( ) self.datasets = [] - for datum, data_kwargs in zip(data, data_args): + for datum, data_kwargs in zip(data, data_args, strict=False): self.datasets.append( DatasetDeserializerFactory.deserialize( data=datum, diff --git a/src/guidellm/utils/registry.py b/src/guidellm/utils/registry.py index f341dfdd..1a1a213f 100644 --- a/src/guidellm/utils/registry.py +++ b/src/guidellm/utils/registry.py @@ -11,7 +11,7 @@ from __future__ import annotations from collections.abc import Callable -from typing import Any, ClassVar, Generic, TypeVar, cast +from typing import ClassVar, Generic, TypeVar, cast from guidellm.utils.auto_importer import AutoImporterMixin diff --git a/src/guidellm/utils/statistics.py b/src/guidellm/utils/statistics.py index 350d8311..0529cb0c 100644 --- a/src/guidellm/utils/statistics.py +++ b/src/guidellm/utils/statistics.py @@ -288,13 +288,14 @@ def from_request_times( if distribution_type == "concurrency": # For concurrency, each request adds to concurrency at start # and subtracts at end - for (start, end), weight in zip(requests, weights): + for (start, end), weight in zip(requests, weights, strict=False): events.append((start, weight)) events.append((end, -1 * weight)) elif distribution_type == "rate": # For rate, each request is added at the end time only - events.append((min(0, *(start for start, _ in requests)), 0.0)) - for (_, end), weight in zip(requests, weights): + global_start = min(start for start, _ in requests) if requests else 0.0 + events.append((global_start, 0.0)) + for (_, end), weight in zip(requests, weights, strict=False): events.append((end, weight)) else: raise ValueError( @@ -633,36 +634,36 @@ def from_request_times( ) _, successful_requests, successful_weights = ( - zip(*successful) + zip(*successful, strict=False) if ( successful := list( filter( lambda val: val[0] == "successful", - zip(request_types, requests, weights), + zip(request_types, requests, weights, strict=False), ) ) ) else ([], [], []) ) _, incomplete_requests, incomplete_weights = ( - zip(*incomplete) + zip(*incomplete, strict=False) if ( incomplete := list( filter( lambda val: val[0] == "incomplete", - zip(request_types, requests, weights), + zip(request_types, requests, weights, strict=False), ) ) ) else ([], [], []) ) _, errored_requests, errored_weights = ( - zip(*errored) + zip(*errored, strict=False) if ( errored := list( filter( lambda val: val[0] == "error", - zip(request_types, requests, weights), + zip(request_types, requests, weights, strict=False), ) ) ) diff --git a/tests/unit/data/deserializers/test_synthetic.py b/tests/unit/data/deserializers/test_synthetic.py index 58b76aee..de95227a 100644 --- a/tests/unit/data/deserializers/test_synthetic.py +++ b/tests/unit/data/deserializers/test_synthetic.py @@ -362,7 +362,7 @@ def test_random_seeding_consistency(self, simple_config, mock_tokenizer): items1 = [] items2 = [] - for i, (item1, item2) in enumerate(zip(generator1, generator2)): + for i, (item1, item2) in enumerate(zip(generator1, generator2, strict=False)): items1.append(item1) items2.append(item2) if i >= 2: # Only get 3 items @@ -370,7 +370,7 @@ def test_random_seeding_consistency(self, simple_config, mock_tokenizer): # With same seed and deterministic mocks, results should be identical assert len(items1) == len(items2) - for item1, item2 in zip(items1, items2): + for item1, item2 in zip(items1, items2, strict=False): assert item1["prompt_tokens_count"] == item2["prompt_tokens_count"] assert item1["output_tokens_count"] == item2["output_tokens_count"] From 2b0fef8c193e4f1dcee5ede9ea050195ed6bfe12 Mon Sep 17 00:00:00 2001 From: Mark Kurtz Date: Mon, 13 Oct 2025 14:15:58 -0400 Subject: [PATCH 79/90] Update src/guidellm/data/deserializers/synthetic.py Co-authored-by: Samuel Monson Signed-off-by: Mark Kurtz --- src/guidellm/data/deserializers/synthetic.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/guidellm/data/deserializers/synthetic.py b/src/guidellm/data/deserializers/synthetic.py index 92e8fc14..b518bf73 100644 --- a/src/guidellm/data/deserializers/synthetic.py +++ b/src/guidellm/data/deserializers/synthetic.py @@ -100,7 +100,7 @@ class SyntheticTextDatasetConfig(StandardBaseModel): @model_validator(mode="after") def check_prefix_options(self) -> SyntheticTextDatasetConfig: prefix_count = self.__pydantic_extra__.get("prefix_count", None) # type: ignore[attr-defined] - prefix_tokens = self.__pydantic_extra__.get("prefix_count", None) # type: ignore[attr-defined] + prefix_tokens = self.__pydantic_extra__.get("prefix_tokens", None) # type: ignore[attr-defined] if prefix_count is not None or prefix_tokens is not None: if self.prefix_buckets: raise ValueError( From 24f2ca38f4851e52df50672a7119bfb78dad7181 Mon Sep 17 00:00:00 2001 From: Mark Kurtz Date: Mon, 13 Oct 2025 14:53:36 -0400 Subject: [PATCH 80/90] Update src/guidellm/scheduler/worker_group.py Co-authored-by: Samuel Monson Signed-off-by: Mark Kurtz --- src/guidellm/scheduler/worker_group.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/guidellm/scheduler/worker_group.py b/src/guidellm/scheduler/worker_group.py index 1e9db124..41c41f21 100644 --- a/src/guidellm/scheduler/worker_group.py +++ b/src/guidellm/scheduler/worker_group.py @@ -161,7 +161,7 @@ async def create_processes(self): self.backend.requests_limit or math.inf, ) ) != math.inf: - max_conc = requests_limit # type: ignore[assignment] + max_conc = int(requests_limit) else: # If concurrency not specified, use settings max_conc = settings.max_concurrency From ef36af11195317fd8684389e18a0ba14e5204c7d Mon Sep 17 00:00:00 2001 From: Mark Kurtz Date: Mon, 13 Oct 2025 14:59:17 -0400 Subject: [PATCH 81/90] Fixes from review --- pyproject.toml | 6 +- src/guidellm/backends/response_handlers.py | 136 +++++++++---------- src/guidellm/benchmark/entrypoints.py | 19 +-- src/guidellm/benchmark/types.py | 8 +- src/guidellm/data/deserializers/synthetic.py | 4 +- src/guidellm/data/preprocessors/mappers.py | 9 +- src/guidellm/scheduler/worker.py | 12 +- src/guidellm/settings.py | 4 +- src/guidellm/utils/__init__.py | 11 +- src/guidellm/utils/encoding.py | 38 ++---- src/guidellm/utils/imports.py | 9 ++ 11 files changed, 109 insertions(+), 147 deletions(-) create mode 100644 src/guidellm/utils/imports.py diff --git a/pyproject.toml b/pyproject.toml index 51a2a695..8fe6d950 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -78,10 +78,8 @@ dependencies = [ [project.optional-dependencies] perf = ["orjson", "msgpack", "msgspec", "uvloop"] -recommended = [ - "tiktoken>=0.11.0", # For OpenAI tokenizer - "blobfile>=3.1.0", # For OpenAI tokenizer -] +openai = ["tiktoken>=0.11.0", "blobfile>=3.1.0"] +recommended = ["guidellm[perf,openai]"] dev = [ # build "build>=1.0.0", diff --git a/src/guidellm/backends/response_handlers.py b/src/guidellm/backends/response_handlers.py index 37952428..44c949e6 100644 --- a/src/guidellm/backends/response_handlers.py +++ b/src/guidellm/backends/response_handlers.py @@ -10,16 +10,10 @@ from __future__ import annotations -import json -from typing import Any, Protocol, cast +from typing import Any, Protocol from guidellm.schemas import GenerationRequest, GenerationResponse, UsageMetrics -from guidellm.utils import RegistryMixin - -try: - import orjson -except ImportError: - orjson = None # type: ignore[assignment] +from guidellm.utils import RegistryMixin, json __all__ = [ "AudioResponseHandler", @@ -115,8 +109,7 @@ def compile_non_streaming( :param response: Complete API response containing choices and usage data :return: Standardized GenerationResponse with extracted text and metrics """ - choices = cast("list[dict]", response.get("choices", [])) - usage = cast("dict[str, int | dict[str, int]]", response.get("usage", {})) + choices, usage = self.extract_choices_and_usage(response) input_metrics, output_metrics = self.extract_metrics(usage) return GenerationResponse( @@ -139,26 +132,17 @@ def add_streaming_line(self, line: str) -> int | None: :param line: Raw SSE line from the streaming response :return: 1 if text content was extracted, 0 if line ignored, None if done """ - if line == "data: [DONE]": - return None + if not (data := self.extract_line_data(line)): + return None if data is None else 0 - if not line or not (line := line.strip()) or not line.startswith("data:"): - return 0 - - line = line[len("data:") :].strip() - data = cast( - "dict[str, Any]", - json.loads(line) if orjson is None else orjson.loads(line), - ) updated = False + choices, usage = self.extract_choices_and_usage(data) - if (choices := cast("list[dict]", data.get("choices"))) and ( - text := choices[0].get("text") - ): + if text := choices[0].get("text"): self.streaming_texts.append(text) updated = True - if usage := cast("dict[str, int | dict[str, int]]", data.get("usage")): + if usage: self.streaming_usage = usage return 1 if updated else 0 @@ -182,6 +166,34 @@ def compile_streaming(self, request: GenerationRequest) -> GenerationResponse: output_metrics=output_metrics, ) + def extract_line_data(self, line: str) -> dict[str, Any] | None: + """ + Extract JSON data from a streaming response line. + + :param line: Raw line from the streaming response + :return: Parsed JSON data as a dictionary, or None if line is invalid + """ + if line == "data: [DONE]": + return None + + if not line or not (line := line.strip()) or not line.startswith("data:"): + return {} + + line = line[len("data:") :].strip() + + return json.loads(line) + + def extract_choices_and_usage( + self, response: dict + ) -> tuple[list[dict], dict[str, int | dict[str, int]]]: + """ + Extract choices and usage data from the API response. + + :param response: Complete API response containing choices and usage data + :return: Tuple of (choices list, usage dictionary) + """ + return response.get("choices", []), response.get("usage", {}) + def extract_metrics( self, usage: dict[str, int | dict[str, int]] | None ) -> tuple[UsageMetrics, UsageMetrics]: @@ -194,15 +206,14 @@ def extract_metrics( if not usage: return UsageMetrics(), UsageMetrics() - input_details = cast("dict[str, int]", usage.get("prompt_tokens_details", {})) - output_details = cast( - "dict[str, int]", usage.get("completion_tokens_details", {}) + input_details: dict[str, int] = usage.get("prompt_tokens_details", {}) or {} + output_details: dict[str, int] = ( + usage.get("completion_tokens_details", {}) or {} ) return UsageMetrics( text_tokens=( - input_details.get("prompt_tokens") - or cast("int", usage.get("prompt_tokens")) + input_details.get("prompt_tokens") or usage.get("prompt_tokens") ), image_tokens=input_details.get("image_tokens"), video_tokens=input_details.get("video_tokens"), @@ -211,7 +222,7 @@ def extract_metrics( ), UsageMetrics( text_tokens=( output_details.get("completion_tokens") - or cast("int", usage.get("completion_tokens")) + or usage.get("completion_tokens") ), image_tokens=output_details.get("image_tokens"), video_tokens=output_details.get("video_tokens"), @@ -243,8 +254,7 @@ def compile_non_streaming( :param response: Complete API response containing choices and usage data :return: Standardized GenerationResponse with extracted content and metrics """ - choices = cast("list[dict]", response.get("choices", [])) - usage = cast("dict[str, int | dict[str, int]]", response.get("usage", {})) + choices, usage = self.extract_choices_and_usage(response) input_metrics, output_metrics = self.extract_metrics(usage) return GenerationResponse( @@ -252,9 +262,7 @@ def compile_non_streaming( request_args=str( request.arguments.model_dump() if request.arguments else None ), - text=cast("dict", choices[0].get("message", {})).get("content", "") - if choices - else "", + text=(choices[0].get("message", {}).get("content", "") if choices else ""), input_metrics=input_metrics, output_metrics=output_metrics, ) @@ -269,27 +277,17 @@ def add_streaming_line(self, line: str) -> int | None: :param line: Raw SSE line from the streaming response :return: 1 if content was extracted, 0 if line ignored, None if done """ - if line == "data: [DONE]": - return None + if not (data := self.extract_line_data(line)): + return None if data is None else 0 - if not line or not (line := line.strip()) or not line.startswith("data:"): - return 0 - - line = line[len("data:") :].strip() - data = cast( - "dict[str, Any]", - json.loads(line) if orjson is None else orjson.loads(line), - ) updated = False + choices, usage = self.extract_choices_and_usage(data) - # Extract delta content for chat completion chunks - if choices := cast("list[dict]", data.get("choices")): - delta = choices[0].get("delta", {}) - if content := delta.get("content"): - self.streaming_texts.append(content) + if choices and (content := choices[0].get("delta", {}).get("content")): + self.streaming_texts.append(content) updated = True - if usage := cast("dict[str, int | dict[str, int]]", data.get("usage")): + if usage: self.streaming_usage = usage return 1 if updated else 0 @@ -355,10 +353,10 @@ def compile_non_streaming( :param response: Complete API response containing text and usage data :return: Standardized GenerationResponse with extracted text and metrics """ - usage = cast("dict[str, int]", response.get("usage", {})) - input_details = cast("dict[str, int]", usage.get("input_token_details", {})) - output_details = cast("dict[str, int]", usage.get("output_token_details", {})) - text = response.get("text", "") + usage: dict[str, int | dict[str, int]] = response.get("usage", {}) + input_details: dict[str, int] = usage.get("input_token_details", {}) or {} + output_details: dict[str, int] = usage.get("output_token_details", {}) or {} + text: str = response.get("text", "") return GenerationResponse( request_id=request.request_id, @@ -396,17 +394,16 @@ def add_streaming_line(self, line: str) -> int | None: if not line or not (line := line.strip()) or not line.startswith("{"): return 0 - data = cast( - "dict[str, Any]", - json.loads(line) if orjson is None else orjson.loads(line), - ) + data: dict[str, Any] = json.loads(line) + text: str + usage: dict[str, int | dict[str, int]] updated = False if text := data.get("text"): self.streaming_texts.append(text) updated = True - if usage := cast("dict[str, int | dict[str, int]]", data.get("usage")): + if usage := data.get("usage"): self.streaming_usage = usage return 1 if updated else 0 @@ -445,22 +442,15 @@ def extract_metrics( if not usage: return UsageMetrics(), UsageMetrics() - input_details = cast("dict[str, int]", usage.get("input_token_details", {})) - output_details = cast("dict[str, int]", usage.get("output_token_details", {})) + input_details: dict[str, int] = usage.get("input_token_details", {}) or {} + output_details: dict[str, int] = usage.get("output_token_details", {}) or {} return UsageMetrics( - text_tokens=( - input_details.get("text_tokens") - or cast("int", usage.get("input_tokens")) - ), + text_tokens=(input_details.get("text_tokens") or usage.get("input_tokens")), audio_tokens=( - input_details.get("audio_tokens") - or cast("int", usage.get("audio_tokens")) - ), - audio_seconds=( - input_details.get("seconds") or cast("int", usage.get("seconds")) + input_details.get("audio_tokens") or usage.get("audio_tokens") ), + audio_seconds=(input_details.get("seconds") or usage.get("seconds")), ), UsageMetrics( - text_tokens=output_details.get("text_tokens") - or cast("int", usage.get("output_tokens")), + text_tokens=output_details.get("text_tokens") or usage.get("output_tokens"), ) diff --git a/src/guidellm/benchmark/entrypoints.py b/src/guidellm/benchmark/entrypoints.py index 422355aa..f711dbfb 100644 --- a/src/guidellm/benchmark/entrypoints.py +++ b/src/guidellm/benchmark/entrypoints.py @@ -5,15 +5,14 @@ from typing import Any, Literal from torch.utils.data import Sampler -from transformers import PreTrainedTokenizerBase -from typing_extensions import TypeAliasType from guidellm.backends import Backend, BackendType from guidellm.benchmark.benchmarker import Benchmarker from guidellm.benchmark.output import GenerativeBenchmarkerOutput from guidellm.benchmark.profile import Profile, ProfileType -from guidellm.benchmark.progress import BenchmarkerProgress, BenchmarkerProgressGroup +from guidellm.benchmark.progress import BenchmarkerProgressGroup from guidellm.benchmark.schemas import GenerativeBenchmark, GenerativeBenchmarksReport +from guidellm.benchmark.types import OutputFormatT, ProcessorInputT, ProgressInputT from guidellm.data import ( DataLoader, DatasetPreprocessor, @@ -40,20 +39,6 @@ _CURRENT_WORKING_DIR = Path.cwd() -OutputFormatT = TypeAliasType( - "OutputFormatT", - tuple[str, ...] - | list[str] - | dict[str, str | dict[str, Any] | GenerativeBenchmarkerOutput] - | None, -) - -ProcessorInputT = TypeAliasType("ProcessorInputT", str | Path | PreTrainedTokenizerBase) - -ProgressInputT = TypeAliasType( - "ProgressInputT", tuple[str, ...] | list[str] | list[BenchmarkerProgress] -) - # Helper Functions diff --git a/src/guidellm/benchmark/types.py b/src/guidellm/benchmark/types.py index 94df4b8e..8fa2dbfb 100644 --- a/src/guidellm/benchmark/types.py +++ b/src/guidellm/benchmark/types.py @@ -9,13 +9,7 @@ from guidellm.benchmark.output import GenerativeBenchmarkerOutput from guidellm.benchmark.progress import BenchmarkerProgress -__all__ = [ - "AggregatorInputT", - "DataInputT", - "OutputFormatT", - "ProcessorInputT", - "ProgressInputT", -] +__all__ = ["OutputFormatT", "ProcessorInputT", "ProgressInputT"] OutputFormatT = TypeAliasType( diff --git a/src/guidellm/data/deserializers/synthetic.py b/src/guidellm/data/deserializers/synthetic.py index b518bf73..d9e415c6 100644 --- a/src/guidellm/data/deserializers/synthetic.py +++ b/src/guidellm/data/deserializers/synthetic.py @@ -209,7 +209,9 @@ def _create_prefix_iter(self, faker: Faker, rand: Random) -> Iterator[str]: # Create prefix list maintaining the correct distribution prefixes = [] - for bucket, weight in zip(self.config.prefix_buckets, unnorm_weights, strict=False): + for bucket, weight in zip( + self.config.prefix_buckets, unnorm_weights, strict=False + ): bucket_prefixes = [ self._create_prompt(bucket.prefix_tokens, faker) for _ in range(bucket.prefix_count) diff --git a/src/guidellm/data/preprocessors/mappers.py b/src/guidellm/data/preprocessors/mappers.py index cbfa9c20..0783103b 100644 --- a/src/guidellm/data/preprocessors/mappers.py +++ b/src/guidellm/data/preprocessors/mappers.py @@ -120,9 +120,16 @@ def datasets_mappings( for index, dataset in enumerate(datasets) } + # Parse out user mappings that were passed in and validate them + # Must be in the format of: + # {: []} + # where can be a single string or list of strings + # and each string can be any of: + # - a column name (assumes the first dataset was intended) + # - . where is the dataset index + # - . where is the dataset name for column_type, names in input_mappings.items(): mappings[column_type] = [] - for name in names if isinstance(names, list) else [names]: if "." in name: dataset, column_name = name.split(".", 1) diff --git a/src/guidellm/scheduler/worker.py b/src/guidellm/scheduler/worker.py index 4b426058..45716b78 100644 --- a/src/guidellm/scheduler/worker.py +++ b/src/guidellm/scheduler/worker.py @@ -232,18 +232,18 @@ async def _processing_startup(self): self.backend_started = True await self.backend.validate() - # Wait for all processes to be ready - await wait_for_sync_barrier( - self.startup_barrier, - poll_interval=self.messaging.poll_interval, - ) - # Get messaging system ready await self.messaging.start( receive_stop_criteria=[self.requests_generated_event] ) self.messaging_started = True + # Wait for all processes to be ready + await wait_for_sync_barrier( + self.startup_barrier, + poll_interval=self.messaging.poll_interval, + ) + self.startup_completed = True async def _processing_shutdown(self): diff --git a/src/guidellm/settings.py b/src/guidellm/settings.py index 222d85f9..20d9ff96 100644 --- a/src/guidellm/settings.py +++ b/src/guidellm/settings.py @@ -46,7 +46,7 @@ class LoggingSettings(BaseModel): disabled: bool = False clear_loggers: bool = True - console_log_level: str = "DEBUG" + console_log_level: str = "WARNING" log_file: str | None = None log_file_level: str | None = None @@ -145,7 +145,7 @@ class Settings(BaseSettings): mp_max_pending_buffer_percent: float = 0.5 mp_max_worker_buffer_percent: float = 0.2 max_concurrency: int = 512 - max_worker_processes: int = 2 + max_worker_processes: int = 10 scheduler_start_delay_non_distributed: float = 1.0 constraint_error_window_size: float = 30 constraint_error_min_processed: float = 30 diff --git a/src/guidellm/utils/__init__.py b/src/guidellm/utils/__init__.py index 702b2a9d..89312771 100644 --- a/src/guidellm/utils/__init__.py +++ b/src/guidellm/utils/__init__.py @@ -17,13 +17,9 @@ safe_getattr, safe_multiply, ) -from .hf_datasets import ( - SUPPORTED_TYPES, - save_dataset_to_file, -) -from .hf_transformers import ( - check_load_processor, -) +from .hf_datasets import SUPPORTED_TYPES, save_dataset_to_file +from .hf_transformers import check_load_processor +from .imports import json from .messaging import ( InterProcessMessaging, InterProcessMessagingManagerQueue, @@ -113,6 +109,7 @@ "format_value_display", "get_literal_vals", "is_punctuation", + "json", "load_text", "recursive_key_update", "safe_add", diff --git a/src/guidellm/utils/encoding.py b/src/guidellm/utils/encoding.py index 2931e98a..7ececef5 100644 --- a/src/guidellm/utils/encoding.py +++ b/src/guidellm/utils/encoding.py @@ -10,7 +10,6 @@ from __future__ import annotations -import json from collections.abc import Mapping from typing import Any, ClassVar, Generic, Literal, TypeVar, cast @@ -24,11 +23,11 @@ HAS_MSGPACK = False try: - from msgspec.msgpack import ( # type: ignore[import-not-found] # Optional dependency - Decoder as MsgspecDecoder, + from msgspec.msgpack import ( + Decoder as MsgspecDecoder, # type: ignore[import-not-found] # Optional dependency ) - from msgspec.msgpack import ( # type: ignore[import-not-found] # Optional dependency - Encoder as MsgspecEncoder, + from msgspec.msgpack import ( + Encoder as MsgspecEncoder, # type: ignore[import-not-found] # Optional dependency ) HAS_MSGSPEC = True @@ -36,16 +35,11 @@ MsgspecDecoder = MsgspecEncoder = None HAS_MSGSPEC = False -try: - import orjson # type: ignore[import-not-found] # Optional dependency - - HAS_ORJSON = True -except ImportError: - orjson = None - HAS_ORJSON = False from pydantic import BaseModel +from guidellm.utils.imports import json + __all__ = [ "Encoder", "EncodingTypesAlias", @@ -510,7 +504,7 @@ def to_sequence(self, obj: Any) -> str | Any: ): payload_type = "collection_mapping" keys = ",".join(str(key) for key in obj) - payload = keys.encode() + b"|" if HAS_ORJSON else keys + "|" + payload = keys.encode() + b"|" for item in obj.values(): is_pydantic = isinstance(item, BaseModel) payload = self.pack_next_sequence( @@ -601,15 +595,7 @@ def to_sequence_pydantic(self, obj: BaseModel) -> str | bytes: class_module: str = obj.__class__.__module__ json_data = obj.__pydantic_serializer__.to_json(obj) - return ( - (class_name.encode() + b"|" + class_module.encode() + b"|" + json_data) - if HAS_ORJSON - else ( - class_name + "|" + class_module + "|" + json_data.decode() - if isinstance(json_data, bytes) - else json_data - ) - ) + return class_name.encode() + b"|" + class_module.encode() + b"|" + json_data def from_sequence_pydantic(self, data: str | bytes) -> BaseModel: """ @@ -643,7 +629,7 @@ def to_sequence_python(self, obj: Any) -> str | bytes: :param obj: Python object to serialize :return: JSON string or bytes representation """ - return orjson.dumps(obj) if HAS_ORJSON else json.dumps(obj) + return json.dumps(obj) def from_sequence_python(self, data: str | bytes) -> Any: """ @@ -651,13 +637,7 @@ def from_sequence_python(self, data: str | bytes) -> Any: :param data: JSON string or bytes to deserialize :return: Reconstructed Python object - :raises ImportError: If orjson is required but not available """ - if isinstance(data, bytes): - if not HAS_ORJSON: - raise ImportError("orjson is not available, cannot deserialize bytes") - return orjson.loads(data) - return json.loads(data) def pack_next_sequence( # noqa: C901, PLR0912 diff --git a/src/guidellm/utils/imports.py b/src/guidellm/utils/imports.py new file mode 100644 index 00000000..9a6b82d1 --- /dev/null +++ b/src/guidellm/utils/imports.py @@ -0,0 +1,9 @@ +from __future__ import annotations + +try: + import orjson as json +except ImportError: + import json + + +__all__ = ["json"] From fcc1114a9c79e61b1c0ebe0bbae546499fea1545 Mon Sep 17 00:00:00 2001 From: Benjamin Blue Date: Mon, 13 Oct 2025 17:12:36 -0400 Subject: [PATCH 82/90] Features/add tooltip to line chart (#392) ## Summary The Metrics summary section now includes the data point dots on the line and a tooltip including the strategy label, e.g.: synchronous, constant@32.32, concurrent@5 ## Test Plan - Unit test for backend change - No tests written to check on tooltip addition, seems overflow complex to try and hover the dots on the chart but I didn't look into it. ## Related Issues - Resolves #277 --- - [x] "I certify that all code in this PR is my own." --------- Signed-off-by: dalthecow --- src/guidellm/presentation/data_models.py | 24 ++++++++++++++-- .../MetricLine/MetricLine.component.tsx | 28 +++++++++++++++++-- .../MetricsSummary.component.tsx | 2 +- src/ui/lib/store/benchmarksWindowData.ts | 10 +++++++ .../benchmarks/benchmarks.interfaces.ts | 1 + .../slices/benchmarks/benchmarks.selectors.ts | 25 ++++++++++++++--- tests/unit/presentation/test_data_models.py | 10 ++++++- 7 files changed, 89 insertions(+), 11 deletions(-) diff --git a/src/guidellm/presentation/data_models.py b/src/guidellm/presentation/data_models.py index ff2863b4..2401b3ef 100644 --- a/src/guidellm/presentation/data_models.py +++ b/src/guidellm/presentation/data_models.py @@ -5,6 +5,8 @@ from pydantic import BaseModel, computed_field +from guidellm.scheduler.strategy import SchedulingStrategy + if TYPE_CHECKING: from guidellm.benchmark import GenerativeBenchmark @@ -212,12 +214,30 @@ class BenchmarkDatum(BaseModel): ttft: TabularDistributionSummary throughput: TabularDistributionSummary time_per_request: TabularDistributionSummary + strategy_display_str: str + + @classmethod + def get_strategy_display_str(cls, strategy: SchedulingStrategy): + strategy_type = strategy if isinstance(strategy, str) else strategy.type_ + strategy_instance = ( + strategy if isinstance(strategy, SchedulingStrategy) else None + ) + + if strategy_type == "concurrent": + rate = f"@{strategy.streams}" if strategy_instance else "@##" # type: ignore[attr-defined] + elif strategy_type in ("constant", "poisson"): + rate = f"@{strategy.rate:.2f}" if strategy_instance else "@#.##" # type: ignore[attr-defined] + else: + rate = "" + return f"{strategy_type}{rate}" @classmethod def from_benchmark(cls, bm: "GenerativeBenchmark"): + rps = bm.metrics.requests_per_second.successful.mean return cls( - requests_per_second=bm.metrics.requests_per_second.successful.mean, - tpot=TabularDistributionSummary.from_distribution_summary( + strategy_display_str=cls.get_strategy_display_str(bm.args.strategy), + requests_per_second=rps, + itl=TabularDistributionSummary.from_distribution_summary( bm.metrics.inter_token_latency_ms.successful ), ttft=TabularDistributionSummary.from_distribution_summary( diff --git a/src/ui/lib/components/Charts/MetricLine/MetricLine.component.tsx b/src/ui/lib/components/Charts/MetricLine/MetricLine.component.tsx index 8b1b4df2..eb123593 100644 --- a/src/ui/lib/components/Charts/MetricLine/MetricLine.component.tsx +++ b/src/ui/lib/components/Charts/MetricLine/MetricLine.component.tsx @@ -1,5 +1,6 @@ -import { useTheme } from '@mui/material'; -import { ResponsiveLine } from '@nivo/line'; +import { Typography, useTheme } from '@mui/material'; +import { PointTooltipProps, ResponsiveLine } from '@nivo/line'; +import { BasicTooltip } from '@nivo/tooltip'; import React, { FC } from 'react'; import { useColor } from '@/lib/hooks/useColor'; @@ -49,11 +50,30 @@ export const Component: FC = ({ reverse: false, }; } + type PointTooltipPropsWithLabel = PointTooltipProps & { + point: { + data: { + label: string; + }; + }; + }; return ( ( + + {(point as PointTooltipPropsWithLabel).point.data.label} + + } + color={point.point.color} + enableChip={true} + /> + )} + pointSize={10} colors={[selectedColor]} margin={{ top: 20, right: 10, bottom: 20, left: 35.5 }} xScale={{ type: 'linear', min: minX }} @@ -92,7 +112,6 @@ export const Component: FC = ({ }} enableGridX={false} enableGridY={false} - pointSize={0} useMesh={true} layers={[ CustomAxes, @@ -115,6 +134,9 @@ export const Component: FC = ({ ), 'axes', 'lines', + 'points', + 'markers', + 'mesh', ]} theme={lineTheme} /> diff --git a/src/ui/lib/components/MetricsSummary/MetricsSummary.component.tsx b/src/ui/lib/components/MetricsSummary/MetricsSummary.component.tsx index 0d804f5c..9530d9e7 100644 --- a/src/ui/lib/components/MetricsSummary/MetricsSummary.component.tsx +++ b/src/ui/lib/components/MetricsSummary/MetricsSummary.component.tsx @@ -102,7 +102,7 @@ export const Component = () => { return ( <> - + diff --git a/src/ui/lib/store/benchmarksWindowData.ts b/src/ui/lib/store/benchmarksWindowData.ts index a589e8ed..b4af5063 100644 --- a/src/ui/lib/store/benchmarksWindowData.ts +++ b/src/ui/lib/store/benchmarksWindowData.ts @@ -1,5 +1,6 @@ export const benchmarksScript = `window.benchmarks = [ { + strategyDisplayStr: "synchronous", requestsPerSecond: 11.411616848282272, tpot: { mean: 8.758024845683707, @@ -171,6 +172,7 @@ export const benchmarksScript = `window.benchmarks = [ }, }, { + strategyDisplayStr: "constant@36.28", requestsPerSecond: 36.289181300710815, tpot: { mean: 588.0161376137819, @@ -342,6 +344,7 @@ export const benchmarksScript = `window.benchmarks = [ }, }, { + strategyDisplayStr: "constant@20.75", requestsPerSecond: 20.752070927855794, tpot: { mean: 116.28360712595156, @@ -513,6 +516,7 @@ export const benchmarksScript = `window.benchmarks = [ }, }, { + strategyDisplayStr: "constant@26.81", requestsPerSecond: 26.81917480361788, tpot: { mean: 299.7306064613554, @@ -684,6 +688,7 @@ export const benchmarksScript = `window.benchmarks = [ }, }, { + strategyDisplayStr: "constant@26.82", requestsPerSecond: 26.823988819498975, tpot: { mean: 683.8011571339198, @@ -855,6 +860,7 @@ export const benchmarksScript = `window.benchmarks = [ }, }, { + strategyDisplayStr: "constant@24.50", requestsPerSecond: 24.50047903792646, tpot: { mean: 742.9258901891964, @@ -1026,6 +1032,7 @@ export const benchmarksScript = `window.benchmarks = [ }, }, { + strategyDisplayStr: "constant@25.61", requestsPerSecond: 25.617829792196602, tpot: { mean: 663.3098317044122, @@ -1197,6 +1204,7 @@ export const benchmarksScript = `window.benchmarks = [ }, }, { + strategyDisplayStr: "constant@37.02", requestsPerSecond: 37.02892550982192, tpot: { mean: 606.4144710877113, @@ -1368,6 +1376,7 @@ export const benchmarksScript = `window.benchmarks = [ }, }, { + strategyDisplayStr: "constant@37.29", requestsPerSecond: 37.29183354201869, tpot: { mean: 603.3237551205925, @@ -1539,6 +1548,7 @@ export const benchmarksScript = `window.benchmarks = [ }, }, { + strategyDisplayStr: "throughput", requestsPerSecond: 37.45318312972309, tpot: { mean: 600.7204526769262, diff --git a/src/ui/lib/store/slices/benchmarks/benchmarks.interfaces.ts b/src/ui/lib/store/slices/benchmarks/benchmarks.interfaces.ts index 602ae17e..6c01d5e2 100644 --- a/src/ui/lib/store/slices/benchmarks/benchmarks.interfaces.ts +++ b/src/ui/lib/store/slices/benchmarks/benchmarks.interfaces.ts @@ -27,6 +27,7 @@ export interface BenchmarkMetrics { export interface Benchmark extends BenchmarkMetrics { requestsPerSecond: number; + strategyDisplayStr: string; } export type Benchmarks = Benchmark[]; diff --git a/src/ui/lib/store/slices/benchmarks/benchmarks.selectors.ts b/src/ui/lib/store/slices/benchmarks/benchmarks.selectors.ts index 53d54f40..d3da9bf9 100644 --- a/src/ui/lib/store/slices/benchmarks/benchmarks.selectors.ts +++ b/src/ui/lib/store/slices/benchmarks/benchmarks.selectors.ts @@ -11,6 +11,18 @@ import { selectSloState } from '../slo/slo.selectors'; export const selectBenchmarks = (state: RootState) => state.benchmarks.data; +const getUnitsByMetric = (metric: string) => { + switch (metric) { + case 'ttft': + case 'tpot': + return 'ms'; + case 'timePerRequest': + return 'sec'; + case 'throughput': + return 'tok/s'; + } +}; + export const selectMetricsSummaryLineData = createSelector( [selectBenchmarks, selectSloState], (benchmarks, sloState) => { @@ -18,8 +30,10 @@ export const selectMetricsSummaryLineData = createSelector( ?.slice() ?.sort((bm1, bm2) => (bm1.requestsPerSecond > bm2.requestsPerSecond ? 1 : -1)); const selectedPercentile = sloState.enforcedPercentile; - - const lineData: { [K in keyof BenchmarkMetrics]: Point[] } = { + interface PointWithLabel extends Point { + label: string; + } + const lineData: { [K in keyof BenchmarkMetrics]: PointWithLabel[] } = { ttft: [], tpot: [], timePerRequest: [], @@ -32,14 +46,17 @@ export const selectMetricsSummaryLineData = createSelector( 'throughput', ]; metrics.forEach((metric) => { - const data: Point[] = []; + const data: PointWithLabel[] = []; sortedByRPS?.forEach((benchmark) => { const percentile = benchmark[metric].percentileRows.find( (p) => p.percentile === selectedPercentile ); + const yValue = percentile?.value ?? 0; + const units = getUnitsByMetric(metric); data.push({ x: benchmark.requestsPerSecond, - y: percentile?.value ?? 0, + y: yValue, + label: `${benchmark.strategyDisplayStr} ${formatNumber(yValue)} ${units}`, }); }); diff --git a/tests/unit/presentation/test_data_models.py b/tests/unit/presentation/test_data_models.py index c1663c43..e879406d 100644 --- a/tests/unit/presentation/test_data_models.py +++ b/tests/unit/presentation/test_data_models.py @@ -1,6 +1,7 @@ import pytest -from guidellm.presentation.data_models import Bucket +from guidellm.presentation.data_models import BenchmarkDatum, Bucket +from tests.unit.mock_benchmark import mock_generative_benchmark @pytest.mark.smoke @@ -18,3 +19,10 @@ def test_bucket_from_data(): assert buckets[1].value == 8.0 assert buckets[1].count == 5 assert bucket_width == 1 + + +@pytest.mark.smoke +def test_from_benchmark_includes_strategy_display_str(): + mock_bm = mock_generative_benchmark() + bm = BenchmarkDatum.from_benchmark(mock_bm) + assert bm.strategy_display_str == "synchronous" From b0becd566631c111673d3b3552b0d6fbe183121c Mon Sep 17 00:00:00 2001 From: Mark Kurtz Date: Tue, 14 Oct 2025 15:13:12 -0400 Subject: [PATCH 83/90] Reenablement of flows and fixes --- src/guidellm/__main__.py | 10 +- src/guidellm/benchmark/__init__.py | 7 +- src/guidellm/benchmark/benchmarker.py | 62 +-- src/guidellm/benchmark/entrypoints.py | 48 +-- src/guidellm/benchmark/progress.py | 260 +---------- src/guidellm/benchmark/schemas.py | 8 +- src/guidellm/benchmark/types.py | 7 +- src/guidellm/data/loaders.py | 43 +- src/guidellm/data/preprocessors/formatters.py | 211 ++++++--- src/guidellm/data/utils/__init__.py | 16 +- src/guidellm/data/utils/functions.py | 405 ++++++++---------- src/guidellm/scheduler/worker_group.py | 128 +++--- src/guidellm/schemas/request.py | 3 - 13 files changed, 480 insertions(+), 728 deletions(-) diff --git a/src/guidellm/__main__.py b/src/guidellm/__main__.py index 97f3e436..680ac852 100644 --- a/src/guidellm/__main__.py +++ b/src/guidellm/__main__.py @@ -286,7 +286,7 @@ def benchmark(): ) @click.option( "--data-num-workers", - default=1, + default=None, type=int, help="The number of worker processes to use for data loading.", ) @@ -505,11 +505,9 @@ def run( output_formats=output_formats, # Updates configuration progress=( - [ - GenerativeConsoleBenchmarkerProgress( - display_scheduler_stats=display_scheduler_stats - ) - ] + GenerativeConsoleBenchmarkerProgress( + display_scheduler_stats=display_scheduler_stats + ) if not disable_progress else None ), diff --git a/src/guidellm/benchmark/__init__.py b/src/guidellm/benchmark/__init__.py index d987ebb3..4c7cc4a5 100644 --- a/src/guidellm/benchmark/__init__.py +++ b/src/guidellm/benchmark/__init__.py @@ -15,11 +15,7 @@ SynchronousProfile, ThroughputProfile, ) -from .progress import ( - BenchmarkerProgress, - BenchmarkerProgressGroup, - GenerativeConsoleBenchmarkerProgress, -) +from .progress import BenchmarkerProgress, GenerativeConsoleBenchmarkerProgress from .schemas import ( Benchmark, BenchmarkArgs, @@ -44,7 +40,6 @@ "Benchmarker", "BenchmarkerDict", "BenchmarkerProgress", - "BenchmarkerProgressGroup", "ConcurrentProfile", "EstimatedBenchmarkState", "GenerativeAudioMetricsSummary", diff --git a/src/guidellm/benchmark/benchmarker.py b/src/guidellm/benchmark/benchmarker.py index 2fa1c36e..ed9d789b 100644 --- a/src/guidellm/benchmark/benchmarker.py +++ b/src/guidellm/benchmark/benchmarker.py @@ -23,20 +23,19 @@ from typing import Generic from guidellm.benchmark.profile import Profile +from guidellm.benchmark.progress import BenchmarkerProgress from guidellm.benchmark.schemas import ( BenchmarkArgs, BenchmarkT, EstimatedBenchmarkState, ) +from guidellm.logger import logger from guidellm.scheduler import ( BackendInterface, Environment, - NonDistributedEnvironment, RequestT, ResponseT, Scheduler, - SchedulerState, - SchedulingStrategy, ) from guidellm.utils import ThreadSafeSingletonMixin @@ -65,19 +64,13 @@ async def run( requests: Iterable[RequestT | Iterable[RequestT | tuple[RequestT, float]]], backend: BackendInterface[RequestT, ResponseT], profile: Profile, - environment: Environment | None = None, + environment: Environment, + progress: BenchmarkerProgress[BenchmarkT] | None = None, sample_requests: int | None = 20, warmup: float | None = None, cooldown: float | None = None, prefer_response_metrics: bool = True, - ) -> AsyncIterator[ - tuple[ - EstimatedBenchmarkState | None, - BenchmarkT | None, - SchedulingStrategy, - SchedulerState | None, - ] - ]: + ) -> AsyncIterator[BenchmarkT]: """ Execute benchmark runs across multiple scheduling strategies. @@ -95,15 +88,17 @@ async def run( :raises Exception: If benchmark execution or compilation fails. """ with self.thread_lock: - if environment is None: - environment = NonDistributedEnvironment() + if progress: + await progress.on_initialize(profile) run_id = str(uuid.uuid4()) strategies_generator = profile.strategies_generator() strategy, constraints = next(strategies_generator) while strategy is not None: - yield None, None, strategy, None + if progress: + await progress.on_benchmark_start(strategy) + args = BenchmarkArgs( run_id=run_id, run_index=len(profile.completed_strategies), @@ -127,18 +122,23 @@ async def run( env=environment, **constraints or {}, ): - benchmark_class.update_estimate( - args, - estimated_state, - response, - request, - request_info, - scheduler_state, - ) - yield estimated_state, None, strategy, scheduler_state - - if scheduler_state is None: - raise RuntimeError("Scheduler state is None after execution.") + try: + benchmark_class.update_estimate( + args, + estimated_state, + response, + request, + request_info, + scheduler_state, + ) + if progress: + await progress.on_benchmark_update( + estimated_state, scheduler_state + ) + except Exception as err: + logger.error( + f"Error updating benchmark estimate/progress: {err}" + ) benchmark = benchmark_class.compile( args=args, @@ -151,10 +151,16 @@ async def run( strategy=strategy, constraints=constraints, ) - yield None, benchmark, strategy, None + if progress: + await progress.on_benchmark_complete(benchmark) + + yield benchmark try: strategy, constraints = strategies_generator.send(benchmark) except StopIteration: strategy = None constraints = None + + if progress: + await progress.on_finalize() diff --git a/src/guidellm/benchmark/entrypoints.py b/src/guidellm/benchmark/entrypoints.py index f711dbfb..18768216 100644 --- a/src/guidellm/benchmark/entrypoints.py +++ b/src/guidellm/benchmark/entrypoints.py @@ -10,9 +10,9 @@ from guidellm.benchmark.benchmarker import Benchmarker from guidellm.benchmark.output import GenerativeBenchmarkerOutput from guidellm.benchmark.profile import Profile, ProfileType -from guidellm.benchmark.progress import BenchmarkerProgressGroup +from guidellm.benchmark.progress import BenchmarkerProgress from guidellm.benchmark.schemas import GenerativeBenchmark, GenerativeBenchmarksReport -from guidellm.benchmark.types import OutputFormatT, ProcessorInputT, ProgressInputT +from guidellm.benchmark.types import OutputFormatT, ProcessorInputT from guidellm.data import ( DataLoader, DatasetPreprocessor, @@ -271,7 +271,6 @@ async def resolve_output_formats( return resolved -# @validate_call(config={"arbitrary_types_allowed": True}) async def benchmark_generative_text( # noqa: C901, PLR0915, PLR0912 # Required target: str, @@ -296,7 +295,7 @@ async def benchmark_generative_text( # noqa: C901, PLR0915, PLR0912 ) = "chat_completions", data_collator: Callable | Literal["generative"] | None = "generative", data_sampler: Sampler[int] | Literal["shuffle"] | None = None, - data_num_workers: int | None = 1, + data_num_workers: int | None = None, dataloader_kwargs: dict[str, Any] | None = None, random_seed: int = 42, # Output configuration @@ -308,7 +307,7 @@ async def benchmark_generative_text( # noqa: C901, PLR0915, PLR0912 | None ) = ("console", "json", "html", "csv"), # Updates configuration - progress: ProgressInputT | None = None, + progress: BenchmarkerProgress | None = None, print_updates: bool = False, # Benchmarker configuration benchmark_cls: type[GenerativeBenchmark] = GenerativeBenchmark, @@ -366,37 +365,26 @@ async def benchmark_generative_text( # noqa: C901, PLR0915, PLR0912 output_formats=output_formats, output_path=output_path, console=console ) - progress_group = BenchmarkerProgressGroup( - instances=progress or [], enabled=bool(progress) - ) report = GenerativeBenchmarksReport() console.print_update( title="Setup complete, starting benchmarks...", status="success" ) console.print("\n\n") - async for ( - _aggregator_update, - benchmark, - _strategy, - _scheduler_state, - ) in progress_group( - profile, - Benchmarker[ - GenerativeBenchmark, - GenerationRequest, - GenerationResponse, - ]().run( - benchmark_class=benchmark_cls, - requests=request_loader, - backend=backend, - profile=profile, - environment=NonDistributedEnvironment(), - sample_requests=sample_requests, - warmup=warmup, - cooldown=cooldown, - prefer_response_metrics=True, - ), + benchmarker: Benchmarker[ + GenerativeBenchmark, GenerationRequest, GenerationResponse + ] = Benchmarker() + async for benchmark in benchmarker.run( + benchmark_class=benchmark_cls, + requests=request_loader, + backend=backend, + profile=profile, + environment=NonDistributedEnvironment(), + progress=progress, + sample_requests=sample_requests, + warmup=warmup, + cooldown=cooldown, + prefer_response_metrics=True, ): if benchmark: report.benchmarks.append(benchmark) diff --git a/src/guidellm/benchmark/progress.py b/src/guidellm/benchmark/progress.py index 5a88d696..558def67 100644 --- a/src/guidellm/benchmark/progress.py +++ b/src/guidellm/benchmark/progress.py @@ -16,9 +16,7 @@ from __future__ import annotations -import asyncio from abc import ABC, abstractmethod -from collections.abc import AsyncIterable, AsyncIterator, Iterable from dataclasses import dataclass from datetime import datetime from typing import Any, Generic, Literal @@ -46,11 +44,7 @@ from guidellm.scheduler import SchedulerState, SchedulingStrategy, StrategyType from guidellm.utils import Colors, format_value_display -__all__ = [ - "BenchmarkerProgress", - "BenchmarkerProgressGroup", - "GenerativeConsoleBenchmarkerProgress", -] +__all__ = ["BenchmarkerProgress", "GenerativeConsoleBenchmarkerProgress"] class BenchmarkerProgress(Generic[BenchmarkT], ABC): @@ -62,106 +56,15 @@ class BenchmarkerProgress(Generic[BenchmarkT], ABC): enable/disable functionality for conditional progress tracking. """ - def __init__(self, enabled: bool = True): + def __init__(self): """ Initialize progress tracker. :param enabled: Whether to enable progress tracking and display. """ - self._enabled = enabled self.profile: Profile = None self.current_strategy: SchedulingStrategy = None - @property - def enabled(self) -> bool: - """ - :return: Whether progress tracking is currently enabled. - """ - return self._enabled - - @enabled.setter - def enabled(self, value: bool) -> None: - """ - :param value: True to enable progress tracking, False to disable. - :raises RuntimeError: If called after progress run has started. - """ - if self.profile is not None: - raise RuntimeError( - "Cannot change enabled state after __call__ for progress run" - ) - - self._enabled = value - - def __call__( - self, - profile: Profile, - agen: AsyncIterable[ - tuple[ - EstimatedBenchmarkState | None, - BenchmarkT | None, - SchedulingStrategy, - SchedulerState | None, - ] - ], - ) -> AsyncIterator[ - tuple[ - EstimatedBenchmarkState | None, - BenchmarkT | None, - SchedulingStrategy, - SchedulerState | None, - ] - ]: - """ - Track progress through benchmark execution pipeline. - - Wraps the provided async generator to monitor benchmark progress, - calling appropriate lifecycle hooks based on execution state. - - :param profile: Benchmark profile configuration. - :param agen: Async generator yielding benchmark execution updates. - :return: Async iterator forwarding original updates with progress tracking. - """ - - async def aiterator() -> AsyncIterator[ - tuple[ - EstimatedBenchmarkState | None, - BenchmarkT | None, - SchedulingStrategy, - SchedulerState | None, - ] - ]: - self.profile = profile - if self.enabled: - await self.on_initialize(profile) - - async for aggregator_update, benchmark, strategy, scheduler_state in agen: - if self.enabled: - await self.on_raw_update( - profile, - aggregator_update, - benchmark, - strategy, - scheduler_state, - ) - - if self.current_strategy != strategy: - self.current_strategy = strategy - await self.on_benchmark_start(strategy) - elif benchmark is not None: - await self.on_benchmark_complete(benchmark) - self.current_strategy = None - else: - await self.on_benchmark_update( - aggregator_update, scheduler_state - ) - - yield aggregator_update, benchmark, strategy, scheduler_state - - if self.enabled: - await self.on_finalize() - - return aiterator() - @abstractmethod async def on_initialize(self, profile: Profile): """ @@ -180,14 +83,12 @@ async def on_benchmark_start(self, strategy: SchedulingStrategy): @abstractmethod async def on_benchmark_update( - self, - aggregator_update: EstimatedBenchmarkState, - scheduler_state: SchedulerState, + self, estimated_state: EstimatedBenchmarkState, scheduler_state: SchedulerState ): """ Handle benchmark execution progress update. - :param aggregator_update: Current benchmark metrics and statistics. + :param estimated_state: Current benchmark metrics and statistics. :param scheduler_state: Current scheduler execution state. """ @@ -203,155 +104,6 @@ async def on_benchmark_complete(self, benchmark: BenchmarkT): async def on_finalize(self): """Finalize progress tracking and cleanup resources.""" - async def on_raw_update( - self, - profile: Profile, - aggregator_update: EstimatedBenchmarkState | None, - benchmark: BenchmarkT | None, - strategy: SchedulingStrategy, - scheduler_state: SchedulerState | None, - ): - """ - Handle raw benchmark execution update. - - Optional hook for accessing all execution state updates. Default - implementation does nothing. - - :param profile: Benchmark profile configuration. - :param aggregator_update: Current benchmark metrics and statistics. - :param benchmark: Completed benchmark if available. - :param strategy: Current scheduling strategy. - :param scheduler_state: Current scheduler execution state. - """ - - -class BenchmarkerProgressGroup(BenchmarkerProgress[BenchmarkT]): - """ - Composite progress handler that manages multiple progress instances. - - Distributes progress events to all contained progress instances, enabling - parallel progress tracking through multiple channels (e.g., console display - and file logging). - - :param instances: Collection of progress handlers to manage. - :param enabled: Whether the group is active. - """ - - def __init__( - self, - instances: ( - Iterable[BenchmarkerProgress[BenchmarkT]] - | list[BenchmarkerProgress[BenchmarkT]] - ), - enabled: bool = True, - ): - """ - Initialize progress group with handler instances. - - :param instances: Progress handler instances to coordinate. - :param enabled: Whether to enable the progress group. - """ - self.instances: list[BenchmarkerProgress[BenchmarkT]] = list(instances) - super().__init__(enabled=enabled) - - @property - def enabled(self) -> bool: - """Whether the progress group is currently enabled.""" - return self._enabled - - @enabled.setter - def enabled(self, value: bool): - """ - Set enabled state for group and all contained instances. - - :param value: New enabled state. - """ - self._enabled = value - for instance in self.instances: - instance.enabled = value - - async def on_initialize(self, profile: Profile): - """ - Initialize all progress handler instances. - - :param profile: Benchmark profile configuration. - """ - await asyncio.gather( - *[child.on_initialize(profile) for child in self.instances] - ) - - async def on_benchmark_start(self, strategy: SchedulingStrategy): - """ - Notify all handlers of benchmark strategy start. - - :param strategy: Scheduling strategy being executed. - """ - await asyncio.gather( - *[child.on_benchmark_start(strategy) for child in self.instances] - ) - - async def on_benchmark_update( - self, - aggregator_update: EstimatedBenchmarkState, - scheduler_state: SchedulerState, - ): - """ - Distribute benchmark updates to all handlers. - - :param aggregator_update: Current benchmark metrics and statistics. - :param scheduler_state: Current scheduler execution state. - """ - await asyncio.gather( - *[ - child.on_benchmark_update(aggregator_update, scheduler_state) - for child in self.instances - ] - ) - - async def on_benchmark_complete(self, benchmark: BenchmarkT): - """ - Notify all handlers of benchmark completion. - - :param benchmark: Completed benchmark results. - """ - await asyncio.gather( - *[child.on_benchmark_complete(benchmark) for child in self.instances] - ) - - async def on_finalize(self): - """Finalize all progress handler instances.""" - await asyncio.gather(*[child.on_finalize() for child in self.instances]) - - async def on_raw_update( - self, - profile: Profile, - aggregator_update: EstimatedBenchmarkState | None, - benchmark: BenchmarkT | None, - strategy: SchedulingStrategy, - scheduler_state: SchedulerState | None, - ): - """ - Distribute raw updates to all handlers. - - :param profile: Benchmark profile configuration. - :param aggregator_update: Current benchmark metrics and statistics. - :param benchmark: Completed benchmark if available. - :param strategy: Current scheduling strategy. - :param scheduler_state: Current scheduler execution state. - """ - await asyncio.gather( - *[ - child.on_raw_update( - profile, - aggregator_update, - benchmark, - strategy, - scheduler_state, - ) - for child in self.instances - ] - ) - class GenerativeConsoleBenchmarkerProgress( BenchmarkerProgress[GenerativeBenchmark], Live @@ -364,14 +116,14 @@ class GenerativeConsoleBenchmarkerProgress( bars in a structured console interface. """ - def __init__(self, enabled: bool = True, display_scheduler_stats: bool = False): + def __init__(self, display_scheduler_stats: bool = False): """ Initialize console progress display. :param enabled: Whether to enable progress tracking and display. :param display_scheduler_stats: Whether to display scheduler statistics. """ - BenchmarkerProgress.__init__(self, enabled=enabled) + BenchmarkerProgress.__init__(self) Live.__init__( self, refresh_per_second=4, diff --git a/src/guidellm/benchmark/schemas.py b/src/guidellm/benchmark/schemas.py index 41cb832f..62ae5b0e 100644 --- a/src/guidellm/benchmark/schemas.py +++ b/src/guidellm/benchmark/schemas.py @@ -157,6 +157,7 @@ def add_avg_rate_metric( if self.get(start_time_key) is None: if start_time is None: start_time = time.time() + self[start_time_key] = start_time else: self[start_time_key] = start_time or self[start_time_key] @@ -595,7 +596,6 @@ class GenerativeTextMetricsSummary(StandardBaseDict): tokens: GenerativeMetricsSummary = Field(description="") words: GenerativeMetricsSummary = Field(description="") characters: GenerativeMetricsSummary = Field(description="") - bytes: GenerativeMetricsSummary = Field(description="") @classmethod def compile( @@ -628,12 +628,6 @@ def compile( metrics.text_characters or 0 for metrics in output_metrics ], ), - bytes=GenerativeMetricsSummary.compile( - request_types=request_types, - request_times=request_times, - input_values=[metrics.text_bytes or 0 for metrics in input_metrics], - output_values=[metrics.text_bytes or 0 for metrics in output_metrics], - ), ) diff --git a/src/guidellm/benchmark/types.py b/src/guidellm/benchmark/types.py index 8fa2dbfb..983e3189 100644 --- a/src/guidellm/benchmark/types.py +++ b/src/guidellm/benchmark/types.py @@ -7,9 +7,8 @@ from typing_extensions import TypeAliasType from guidellm.benchmark.output import GenerativeBenchmarkerOutput -from guidellm.benchmark.progress import BenchmarkerProgress -__all__ = ["OutputFormatT", "ProcessorInputT", "ProgressInputT"] +__all__ = ["OutputFormatT", "ProcessorInputT"] OutputFormatT = TypeAliasType( @@ -21,7 +20,3 @@ ) ProcessorInputT = TypeAliasType("ProcessorInputT", str | Path | PreTrainedTokenizerBase) - -ProgressInputT = TypeAliasType( - "ProgressInputT", tuple[str, ...] | list[str] | list[BenchmarkerProgress] -) diff --git a/src/guidellm/data/loaders.py b/src/guidellm/data/loaders.py index 0d83d726..fcdea15d 100644 --- a/src/guidellm/data/loaders.py +++ b/src/guidellm/data/loaders.py @@ -12,6 +12,7 @@ from guidellm.data.deserializers import DatasetDeserializerFactory from guidellm.data.preprocessors import DataDependentPreprocessor, DatasetPreprocessor +from guidellm.logger import logger __all__ = ["DataLoader", "DatasetsIterator"] @@ -29,7 +30,7 @@ def __init__( if not data or not isinstance(data, list): raise ValueError(f"Data must be a non-empty list, got {data}.") - if data_args is None: + if not data_args: data_args = [{} for _ in data] if len(data) != len(data_args): @@ -61,15 +62,15 @@ def __init__( def __iter__(self): worker_info = torch.utils.data.get_worker_info() - modulus = worker_info.num_workers if worker_info is not None else 1 - index = worker_info.id if worker_info is not None else 0 + worker_modulus = worker_info.num_workers if worker_info is not None else 1 + worker_index = worker_info.id if worker_info is not None else 0 if self.precache is not None: for index, item in enumerate(self.precache): - if index == index % modulus: + if (index + worker_index) % worker_modulus == 0: yield item else: - yield from self.generator(modulus=modulus, offset=index) + yield from self.generator(modulus=worker_modulus, offset=worker_index) def generator( self, @@ -83,19 +84,25 @@ def generator( dataset_iters = [iter(dataset) for dataset in self.datasets] while max_items is None or gen_count < max_items: - row = {"items": [next(dataset_iter) for dataset_iter in dataset_iters]} - gen_count += 1 - - if ( - modulus is not None - and offset is not None - and (gen_count % modulus) != offset - ): - continue - - for preprocessor in self.preprocessors: - row = preprocessor(row) - yield row + try: + row = { + "items": [next(dataset_iter) for dataset_iter in dataset_iters] + } + gen_count += 1 + + if ( + modulus is not None + and offset is not None + and (gen_count % modulus) != offset + ): + continue + + for preprocessor in self.preprocessors: + row = preprocessor(row) + yield row + except Exception as err: + logger.error(f"Skipping data row due to error: {err}") + gen_count -= 1 if max_items is not None and gen_count < max_items: raise ValueError( diff --git a/src/guidellm/data/preprocessors/formatters.py b/src/guidellm/data/preprocessors/formatters.py index 76b0083b..ce0e46fc 100644 --- a/src/guidellm/data/preprocessors/formatters.py +++ b/src/guidellm/data/preprocessors/formatters.py @@ -7,12 +7,7 @@ PreprocessorRegistry, ) from guidellm.data.schemas import GenerativeDatasetColumnType -from guidellm.data.utils import ( - encode_audio_as_dict, - encode_audio_as_file, - encode_image, - encode_video, -) +from guidellm.data.utils import encode_audio, encode_image, encode_video, text_stats from guidellm.schemas import GenerationRequest, GenerationRequestArguments, UsageMetrics __all__ = [ @@ -45,30 +40,29 @@ def __init__( def __call__( self, columns: dict[GenerativeDatasetColumnType, list[Any]] ) -> GenerationRequest: - body: dict[str, Any] = {} - arguments: GenerationRequestArguments = GenerationRequestArguments(body=body) + arguments: GenerationRequestArguments = GenerationRequestArguments(body={}) input_metrics = UsageMetrics() output_metrics = UsageMetrics() # Add model if self.model is not None: - body["model"] = self.model + arguments.body["model"] = self.model # Configure streaming if self.stream: arguments.stream = True - body["stream"] = True + arguments.body["stream"] = True # Handle output tokens if output_tokens := sum( count for count in columns.get("output_tokens_count_column", []) if count ): output_metrics.text_tokens = output_tokens - body["max_tokens"] = output_tokens - body["stop"] = None - body["ignore_eos"] = True + arguments.body["max_tokens"] = output_tokens + arguments.body["stop"] = None + arguments.body["ignore_eos"] = True elif self.max_tokens is not None: - body["max_tokens"] = self.max_tokens + arguments.body["max_tokens"] = self.max_tokens # Handle prompt tokens if prompt_tokens := sum( @@ -84,7 +78,10 @@ def __call__( prefix = "".join(pre for pre in columns.get("prefix_column", []) if pre) text = "".join(txt for txt in columns.get("text_column", []) if txt) if prefix or text: - body["prompt"] = prefix + text + arguments.body["prompt"] = prefix + text + stats = text_stats(arguments.body["prompt"]) + input_metrics.text_characters = stats.get("num_chars") + input_metrics.text_words = stats.get("num_words") return GenerationRequest( request_type="text_completions", @@ -126,26 +123,27 @@ def __init__( def __call__( self, columns: dict[GenerativeDatasetColumnType, list[Any]] ) -> GenerationRequest: - body: dict[str, Any] = {} - arguments = GenerationRequestArguments(body=body) + arguments = GenerationRequestArguments(body={}) input_metrics = UsageMetrics() output_metrics = UsageMetrics() # Add model if self.model is not None: - body["model"] = self.model + arguments.body["model"] = self.model # Configure streaming if self.stream: arguments.stream = True - body.update({"stream": True, "stream_options": {"include_usage": True}}) + arguments.body.update( + {"stream": True, "stream_options": {"include_usage": True}} + ) # Handle output tokens if output_tokens := sum( count for count in columns.get("output_tokens_count_column", []) if count ): output_metrics.text_tokens = output_tokens - body.update( + arguments.body.update( { "max_completion_tokens": output_tokens, "stop": None, @@ -153,7 +151,7 @@ def __call__( } ) elif self.max_completion_tokens is not None: - body["max_completion_tokens"] = self.max_completion_tokens + arguments.body["max_completion_tokens"] = self.max_completion_tokens # Handle prompt tokens if prompt_tokens := sum( @@ -166,63 +164,120 @@ def __call__( arguments.model_combine(self.extras) # Build messages - body["messages"] = ( - [ - {"role": "system", "content": prefix} - for prefix in columns.get("prefix_column", []) - if prefix - ] - + [ + arguments.body["messages"] = [] + + for prefix in columns.get("prefix_column", []): + if not prefix: + continue + + stats = text_stats(prefix) + if (num_chars := stats.get("num_chars")) is not None: + input_metrics.text_characters = ( + input_metrics.text_characters or 0 + ) + num_chars + if (num_words := stats.get("num_words")) is not None: + input_metrics.text_words = (input_metrics.text_words or 0) + num_words + + arguments.body["messages"].append({"role": "system", "content": prefix}) + + for text in columns.get("text_column", []): + if not text: + continue + + stats = text_stats(text) + if (num_chars := stats.get("num_chars")) is not None: + input_metrics.text_characters = ( + input_metrics.text_characters or 0 + ) + num_chars + if (num_words := stats.get("num_words")) is not None: + input_metrics.text_words = (input_metrics.text_words or 0) + num_words + + arguments.body["messages"].append( {"role": "user", "content": [{"type": "text", "text": text}]} - for text in columns.get("text_column", []) - if text - ] - + [ + ) + + for image in columns.get("image_column", []): + if not image: + continue + + image_dict = encode_image(image, **self.encode_image_kwargs) + if (image_pixels := image_dict.get("image_pixels")) is not None: + input_metrics.image_pixels = ( + input_metrics.image_pixels or 0 + ) + image_pixels + if (image_bytes := image_dict.get("image_bytes")) is not None: + input_metrics.image_bytes = ( + input_metrics.image_bytes or 0 + ) + image_bytes + + arguments.body["messages"].append( { "role": "user", "content": [ - { - "type": "image_url", - "image_url": encode_image( - image, **self.encode_image_kwargs - ), - } + {"type": "image_url", "image_url": image_dict.get("image")} ], } - for image in columns.get("image_column", []) - if image - ] - + [ + ) + + for video in columns.get("video_column", []): + if not video: + continue + + video_dict = encode_video(video, **self.encode_video_kwargs) + if (video_frames := video_dict.get("video_frames")) is not None: + input_metrics.video_frames = ( + input_metrics.video_frames or 0 + ) + video_frames + if (video_seconds := video_dict.get("video_seconds")) is not None: + input_metrics.video_seconds = ( + input_metrics.video_seconds or 0.0 + ) + video_seconds + if (video_bytes := video_dict.get("video_bytes")) is not None: + input_metrics.video_bytes = ( + input_metrics.video_bytes or 0 + ) + video_bytes + + arguments.body["messages"].append( { "role": "user", "content": [ - { - "type": "video_url", - "video_url": encode_video( - video, **self.encode_video_kwargs - ), - } + {"type": "video_url", "video_url": video_dict.get("video")} ], } - for video in columns.get("video_column", []) - if video - ] - + [ + ) + + for audio in columns.get("audio_column", []): + if not audio: + continue + + audio_dict = encode_audio(audio, b64encode=True, **self.encode_audio_kwargs) + if (audio_samples := audio_dict.get("audio_samples")) is not None: + input_metrics.audio_samples = ( + input_metrics.audio_samples or 0 + ) + audio_samples + if (audio_seconds := audio_dict.get("audio_seconds")) is not None: + input_metrics.audio_seconds = ( + input_metrics.audio_seconds or 0.0 + ) + audio_seconds + if (audio_bytes := audio_dict.get("audio_bytes")) is not None: + input_metrics.audio_bytes = ( + input_metrics.audio_bytes or 0 + ) + audio_bytes + + arguments.body["messages"].append( { "role": "user", "content": [ { "type": "input_audio", - "input_audio": encode_audio_as_dict( - audio, **self.encode_audio_kwargs - ), + "input_audio": { + "data": audio_dict.get("audio"), + "format": audio_dict.get("format"), + }, } ], } - for audio in columns.get("audio_column", []) - if audio - ] - ) + ) return GenerationRequest( request_type="chat_completions", @@ -253,19 +308,18 @@ def __init__( def __call__( # noqa: C901 self, columns: dict[GenerativeDatasetColumnType, list[Any]] ) -> GenerationRequest: - body: dict[str, Any] = {} - arguments = GenerationRequestArguments(body=body, files={}) + arguments = GenerationRequestArguments(body={}, files={}) input_metrics = UsageMetrics() output_metrics = UsageMetrics() # Add model if self.model is not None: - body["model"] = self.model + arguments.body["model"] = self.model # Configure streaming if self.stream: arguments.stream = True - body.update({"stream": True, "stream_options": {"include_usage": True}}) + arguments.body["stream"] = True # Handle output tokens if output_tokens := sum( @@ -284,19 +338,36 @@ def __call__( # noqa: C901 arguments.model_combine(self.extras) # Build audio input - if audio := [aud for aud in columns.get("audio_column", []) if aud]: - file_name, content, mime_type = encode_audio_as_file( - audio[0], **self.encode_audio_kwargs + audio_columns = columns.get("audio_column", []) + if len(audio_columns) != 1: + raise ValueError( + f"GenerativeAudioTranscriptionRequestFormatter expects exactly " + f"one audio column, but got {len(audio_columns)}." + ) + + audio_dict = encode_audio( + audio_columns[0], b64encode=False, **self.encode_audio_kwargs + ) + input_metrics.audio_samples = audio_dict.get("audio_samples") + input_metrics.audio_seconds = audio_dict.get("audio_seconds") + input_metrics.audio_bytes = audio_dict.get("audio_bytes") + + arguments.files = { + "file": ( + audio_dict.get("file_name", "audio_input"), + audio_dict.get("audio"), + audio_dict.get("mimetype"), ) - arguments.files = {"file": (file_name, content, mime_type)} - else: - raise ValueError("No audio column found for audio transcription request.") + } # Build prompt prefix = "".join(pre for pre in columns.get("prefix_column", []) if pre) text = "".join(txt for txt in columns.get("text_column", []) if txt) if prefix or text: - body["prompt"] = prefix + text + arguments.body["prompt"] = prefix + text + stats = text_stats(arguments.body["prompt"]) + input_metrics.text_characters = stats.get("num_chars") + input_metrics.text_words = stats.get("num_words") return GenerationRequest( request_type="audio_transcriptions", diff --git a/src/guidellm/data/utils/__init__.py b/src/guidellm/data/utils/__init__.py index aac657f8..cd257898 100644 --- a/src/guidellm/data/utils/__init__.py +++ b/src/guidellm/data/utils/__init__.py @@ -1,34 +1,22 @@ from .dataset import DEFAULT_SPLITS, resolve_dataset_split from .functions import ( - download_audio, - download_image, - download_video, encode_audio, - encode_audio_as_dict, - encode_audio_as_file, encode_image, - encode_image_base64, encode_video, - encode_video_base64, get_file_format, is_url, resize_image, + text_stats, ) __all__ = [ "DEFAULT_SPLITS", - "download_audio", - "download_image", - "download_video", "encode_audio", - "encode_audio_as_dict", - "encode_audio_as_file", "encode_image", - "encode_image_base64", "encode_video", - "encode_video_base64", "get_file_format", "is_url", "resize_image", "resolve_dataset_split", + "text_stats", ] diff --git a/src/guidellm/data/utils/functions.py b/src/guidellm/data/utils/functions.py index 413b5a92..e11c5cb8 100644 --- a/src/guidellm/data/utils/functions.py +++ b/src/guidellm/data/utils/functions.py @@ -5,7 +5,6 @@ from pathlib import Path from typing import Any, Literal -import datasets import httpx import librosa import numpy as np @@ -15,19 +14,13 @@ from torch import Tensor __all__ = [ - "download_audio", - "download_image", - "download_video", "encode_audio", - "encode_audio_as_dict", - "encode_audio_as_file", "encode_image", - "encode_image_base64", "encode_video", - "encode_video_base64", "get_file_format", "is_url", "resize_image", + "text_stats", ] @@ -35,13 +28,30 @@ def is_url(text: Any) -> bool: return isinstance(text, str) and text.startswith(("http://", "https://")) +def text_stats( + text: str, +) -> dict[Literal["type", "text", "num_chars", "num_words"], str | int]: + """Compute basic text statistics.""" + num_chars = len(text) + num_words = len(text.split()) + + return { + "type": "text", + "text": text, + "num_chars": num_chars, + "num_words": num_words, + } + + def encode_image( - image: bytes | str | Path | np.ndarray | PILImage.Image | datasets.Image, + image: bytes | str | Path | np.ndarray | PILImage.Image, + width: int | None = None, + height: int | None = None, max_size: int | None = None, max_width: int | None = None, max_height: int | None = None, - encode_type: Literal["base64", "url"] | None = None, -) -> str: + encode_type: Literal["base64", "url"] | None = "base64", +) -> dict[Literal["type", "image", "image_pixels", "image_bytes"], str | int | None]: """ Input image types: - bytes: raw image bytes, decoded with Pillow @@ -64,71 +74,67 @@ def encode_image( - image url - "data:image/{type};base64, {data}" string """ - url = is_url(image) + if isinstance(image, str) and is_url(image): + if encode_type == "base64": + response = httpx.get(image) + response.raise_for_status() + return encode_image( + image=response.content, + max_size=max_size, + max_width=max_width, + max_height=max_height, + encode_type="base64", + ) - if ( - url - and (encode_type is None or encode_type == "url") - and (max_size is not None or max_width is not None or max_height is not None) - ): - raise ValueError("Cannot resize image when encode_type is 'url'") - elif url and (encode_type is None or encode_type == "url"): - return image - elif url and encode_type == "base64": - raise ValueError(f"Cannot convert non-url image to URL {image}") - - return encode_image_base64( - image=image, - max_size=max_size, - max_width=max_width, - max_height=max_height, - ) + if any([width, height, max_size, max_width, max_height]): + raise ValueError(f"Cannot resize image {image} when encode_type is 'url'") + return { + "type": "image_url", + "image": image, + "image_pixels": None, + "image_bytes": None, + } -def encode_image_base64( - image: bytes | str | Path | np.ndarray | PILImage.Image, - width: int | None = None, - height: int | None = None, - max_width: int | None = None, - max_height: int | None = None, - max_size: int | None = None, -) -> str: - if ( - isinstance(image, str) - and image.startswith("data:image/") - and ";base64," in image - ): - return image - - if is_url(image): - image = download_image(image) + decoded_image: PILImage.Image if isinstance(image, bytes): - image = PILImage.open(io.BytesIO(image)) - elif isinstance(image, (str, Path)): - image = PILImage.open(image) + decoded_image = PILImage.open(io.BytesIO(image)) + elif isinstance(image, str) and image.startswith("data:image/"): + _, encoded = image.split(",", 1) + image_data = base64.b64decode(encoded) + decoded_image = PILImage.open(io.BytesIO(image_data)) + elif isinstance(image, str | Path): + decoded_image = PILImage.open(image) elif isinstance(image, np.ndarray): - image = PILImage.fromarray(image) - elif not isinstance(image, PILImage.Image): - raise ValueError(f"Unsupported image type: {type(image)}") + decoded_image = PILImage.fromarray(image) + elif isinstance(image, PILImage.Image): + decoded_image = image + else: + raise ValueError(f"Unsupported image type: {type(image)} for {image}") - image = resize_image( - image, + output_image = resize_image( + decoded_image, width=width, height=height, max_width=max_width, max_height=max_height, max_size=max_size, ) - if image.mode != "RGB": - image = image.convert("RGB") + if output_image.mode != "RGB": + output_image = output_image.convert("RGB") buffer = io.BytesIO() - image.save(buffer, format="JPEG") + output_image.save(buffer, format="JPEG") image_bytes = buffer.getvalue() image_base64 = base64.b64encode(image_bytes).decode("utf-8") - return f"data:image/jpeg;base64,{image_base64}" + return { + "type": "image_base64", + "image": f"data:image/jpeg;base64,{image_base64}", + "image_pixels": output_image.width * output_image.height, + "image_bytes": len(image_bytes), + } def resize_image( @@ -176,16 +182,13 @@ def resize_image( return image -def download_image(url: str) -> bytes: - response = httpx.get(url) - response.raise_for_status() - return response.content - - def encode_video( video: bytes | str | Path, - encode_type: Literal["base64", "url"] | None = None, -) -> str: + encode_type: Literal["base64", "url"] | None = "base64", +) -> dict[ + Literal["type", "video", "video_frames", "video_seconds", "video_bytes"], + str | int | float | None, +]: """ Input video types: - bytes: raw video bytes @@ -202,97 +205,55 @@ def encode_video( - video url - "data:video/{type};base64, {data}" string """ - if ( - isinstance(video, str) - and is_url(video) - and (encode_type is None or encode_type == "url") - ): - return video - elif isinstance(video, str) and is_url(video) and encode_type == "base64": - raise ValueError(f"Cannot encode URL video {video}") - - return encode_video_base64(video=video) - - -def encode_video_base64(video: bytes | str | Path) -> str: - if ( - isinstance(video, str) - and video.startswith("data:video/") - and ";base64," in video - ): - return video - - video_format = "unknown" - if isinstance(video, str) and is_url(video): - video, video_format = download_video(video) - - if isinstance(video, (str, Path)): + if encode_type == "base64": + response = httpx.get(video) + response.raise_for_status() + return encode_video(video=response.content, encode_type="base64") + + return { + "type": "video_url", + "video": video, + "video_frames": None, + "video_seconds": None, + "video_bytes": None, + } + + if isinstance(video, str) and video.startswith("data:video/"): + data_str = video.split(",", 1)[1] + + return { + "type": "video_base64", + "video": video, + "video_frames": None, + "video_seconds": None, + "video_bytes": len(data_str) * 3 // 4, # base64 to bytes + } + + if isinstance(video, str | Path): path = Path(video) - video = path.read_bytes() + video_bytes = path.read_bytes() video_format = get_file_format(path) - elif not isinstance(video, bytes): - raise ValueError(f"Unsupported video type: {type(video)}") + elif isinstance(video, bytes): + video_bytes = video + video_format = "unknown" + else: + raise ValueError(f"Unsupported video type: {type(video)} for {video}") video_base64 = base64.b64encode(video).decode("utf-8") - return f"data:video/{video_format};base64,{video_base64}" - - -def download_video(url: str) -> tuple[bytes, str]: - response = httpx.get(url) - response.raise_for_status() - return response.content, get_file_format(url) - - -def encode_audio_as_dict( - audio: Any, - sample_rate: int = 16000, - encode_sample_rate: int = 16000, - max_duration: float | None = None, - mono: bool = True, - audio_format: str = "mp3", - bitrate: str = "64k", -) -> dict[Literal["data", "format"], Any]: - content, _, file_format = encode_audio( - audio=audio, - sample_rate=sample_rate, - encode_sample_rate=encode_sample_rate, - max_duration=max_duration, - mono=mono, - audio_format=audio_format, - bitrate=bitrate, - ) return { - "data": base64.b64encode(content).decode("utf-8"), - "format": file_format, + "type": "video_base64", + "video": f"data:video/{video_format};base64,{video_base64}", + "video_frames": None, + "video_seconds": None, + "video_bytes": len(video_bytes), } -def encode_audio_as_file( - audio: Any, - sample_rate: int = 16000, - encode_sample_rate: int = 16000, - max_duration: float | None = None, - mono: bool = True, - audio_format: str = "mp3", - bitrate: str = "64k", -) -> tuple[str, bytes, str]: - content, file_name, file_format = encode_audio( - audio=audio, - sample_rate=sample_rate, - encode_sample_rate=encode_sample_rate, - max_duration=max_duration, - mono=mono, - audio_format=audio_format, - bitrate=bitrate, - ) - - return file_name, content, f"audio/{file_format}" - - -def encode_audio( # noqa: PLR0912, PLR0911, C901 +def encode_audio( audio: Any, + b64encode: bool, sample_rate: int = 16000, file_name: str = "audio.wav", encode_sample_rate: int = 16000, @@ -300,38 +261,18 @@ def encode_audio( # noqa: PLR0912, PLR0911, C901 mono: bool = True, audio_format: str = "mp3", bitrate: str = "64k", -) -> tuple[bytes, str, str]: - audio_buffer: io.BytesIO = io.BytesIO() - - if hasattr(audio, "get_samples_played_in_range"): - # HF datasets Audio object - audio_samples = audio.get_samples_played_in_range( - start_seconds=0.0, - stop_seconds=None - if max_duration is None - else min(max_duration, audio.metadata.duration_seconds_from_header), - ) - return encode_audio( - audio=audio_samples.data.numpy(), - sample_rate=audio_samples.sample_rate, - encode_sample_rate=encode_sample_rate, - max_duration=max_duration, - mono=mono, - audio_format=audio_format, - bitrate=bitrate, - ) - - if isinstance(audio, Tensor): - return encode_audio( - audio=audio.numpy(), - sample_rate=sample_rate, - encode_sample_rate=encode_sample_rate, - max_duration=max_duration, - mono=mono, - audio_format=audio_format, - bitrate=bitrate, - ) - +) -> dict[ + Literal[ + "type", + "audio", + "format", + "mimetype", + "audio_samples", + "audio_seconds", + "audio_bytes", + ], + str | int | float | None, +]: if isinstance(audio, dict): sample_rate = audio.get("sample_rate", audio.get("sampling_rate", sample_rate)) if "data" not in audio and "url" not in audio: @@ -348,70 +289,65 @@ def encode_audio( # noqa: PLR0912, PLR0911, C901 bitrate=bitrate, ) - if isinstance(audio, str) and is_url(audio): - audio_bytes, file_name, _ = download_audio(audio) - return encode_audio( - audio=audio_bytes, - sample_rate=sample_rate, - encode_sample_rate=encode_sample_rate, - max_duration=max_duration, - mono=mono, - audio_format=audio_format, - bitrate=bitrate, - ) - - if isinstance(audio, (str, Path)): - if not Path(audio).exists(): - raise ValueError(f"Audio file does not exist: {audio}") - file_name = get_file_name(audio) - data, sample_rate = soundfile.read(str(audio), dtype="float32") + audio_numpy: np.ndarray - return encode_audio( - audio=data, - sample_rate=sample_rate, - encode_sample_rate=encode_sample_rate, - max_duration=max_duration, - mono=mono, - audio_format=audio_format, - bitrate=bitrate, + if hasattr(audio, "get_samples_played_in_range"): + # HF datasets Audio object + audio_samples = audio.get_samples_played_in_range( + start_seconds=0.0, + stop_seconds=( + None + if max_duration is None + else min(max_duration, audio.metadata.duration_seconds_from_header) + ), ) - - if isinstance(audio, bytes): - data, sample_rate = soundfile.read(io.BytesIO(audio), dtype="float32") - - return encode_audio( - audio=data, - sample_rate=sample_rate, - encode_sample_rate=encode_sample_rate, - max_duration=max_duration, - mono=mono, - audio_format=audio_format, - bitrate=bitrate, + audio_numpy = np.array(audio_samples.data) + elif isinstance(audio, Tensor): + audio_numpy = audio.numpy() + elif isinstance(audio, str | Path): + if is_url(audio): + response = httpx.get(audio) + response.raise_for_status() + audio_stream = response.content + file_name = get_file_name(audio) + else: + if not Path(audio).exists(): + raise ValueError(f"Audio file does not exist: {audio}") + file_name = get_file_name(audio) + audio_stream = Path(audio).read_bytes() + + audio_numpy, sample_rate = soundfile.read( + io.BytesIO(audio_stream), dtype="float32" ) - - if not isinstance(audio, np.ndarray): + elif isinstance(audio, bytes): + audio_numpy, sample_rate = soundfile.read(io.BytesIO(audio), dtype="float32") + elif isinstance(audio, np.ndarray): + audio_numpy = audio + else: raise ValueError(f"Unsupported audio type: {type(audio)}") if sample_rate != encode_sample_rate: - audio = librosa.resample( - audio.astype(np.float32), orig_sr=sample_rate, target_sr=encode_sample_rate + audio_numpy = librosa.resample( + audio_numpy.astype(np.float32), + orig_sr=sample_rate, + target_sr=encode_sample_rate, ) sample_rate = encode_sample_rate - audio = librosa.to_mono(audio) + audio_numpy = librosa.to_mono(audio_numpy) if ( max_duration is not None and max_duration > 0 - and (max_samples := int(max_duration * sample_rate)) < len(audio) + and (max_samples := int(max_duration * sample_rate)) < len(audio_numpy) ): - audio = audio[:max_samples] + audio_numpy = audio_numpy[max_samples:] audio_buffer = io.BytesIO() if audio_format.lower() == "mp3": wav = io.BytesIO() - soundfile.write(wav, audio, sample_rate, format="WAV", subtype="PCM_16") + soundfile.write(wav, audio_numpy, sample_rate, format="WAV", subtype="PCM_16") wav.seek(0) sound = AudioSegment.from_wav(wav) @@ -420,15 +356,22 @@ def encode_audio( # noqa: PLR0912, PLR0911, C901 soundfile.write(audio_buffer, audio, sample_rate, format=audio_format.upper()) audio_buffer.seek(0) - return audio_buffer.read(), file_name, audio_format.lower() - + decoded_audio = audio_buffer.read() -def download_audio(url: str) -> tuple[bytes, str, str]: - response = httpx.get(url) - response.raise_for_status() - content = response.content - - return content, get_file_name(url), get_file_format(url) + return { + "type": "audio_base64" if b64encode else "audio_file", + "audio": ( + base64.b64encode(decoded_audio).decode("utf-8") + if b64encode + else decoded_audio + ), + "file_name": file_name, + "format": audio_format, + "mimetype": f"audio/{audio_format}", + "audio_samples": len(audio_numpy), + "audio_seconds": len(audio_numpy) / sample_rate, + "audio_bytes": len(decoded_audio), + } def get_file_name(path: Path | str) -> str: diff --git a/src/guidellm/scheduler/worker_group.py b/src/guidellm/scheduler/worker_group.py index 41c41f21..21394668 100644 --- a/src/guidellm/scheduler/worker_group.py +++ b/src/guidellm/scheduler/worker_group.py @@ -22,6 +22,7 @@ from multiprocessing.synchronize import Barrier, Event from typing import Generic, NamedTuple +from guidellm.logger import logger from guidellm.scheduler.constraints import Constraint, RequestsExhaustedConstraint from guidellm.scheduler.schemas import ( BackendInterface, @@ -349,6 +350,7 @@ async def request_updates( """ while True: if self.error_event.is_set(): # type: ignore[union-attr] + logger.error("Error event set in WorkerProcessGroup") raise RuntimeError( "error_event is set in WorkerProcessGroup, " "indicating an error occurred in one of the worker processes." @@ -507,40 +509,47 @@ def _iter() -> Iterator[RequestT | MultiTurnRequestT[RequestT]]: while True: yield from cycle_requests - count = 0 - - for request in _iter(): - count += 1 - - if hasattr(request, "request_id"): - request_id = request.request_id - elif hasattr(request, "id"): - request_id = request.id - else: - request_id = str(uuid.uuid4()) - request_info: RequestInfo = RequestInfo( - request_id=request_id, - status="queued", - scheduler_process_id=0, - scheduler_start_time=self.start_time, + try: + count = 0 + request_iter = _iter() + for request in request_iter: + count += 1 + + if hasattr(request, "request_id"): + request_id = request.request_id + elif hasattr(request, "id"): + request_id = request.id + else: + request_id = str(uuid.uuid4()) + request_info: RequestInfo = RequestInfo( + request_id=request_id, + status="queued", + scheduler_process_id=0, + scheduler_start_time=self.start_time, + ) + state_update = self._locked_update(request_info) + request_info.timings.queued = time.time() + + yield (request, request_info) + + if state_update.stop_queueing: + self.stop_send_requests_event.set() + return + + # Reached the end, inject a RequestsExhaustedConstraint to record + self._locked_update( + info=None, + requests_exhausted={ + "requests_exhausted": RequestsExhaustedConstraint( + num_requests=count + ) + }, ) - state_update = self._locked_update(request_info) - request_info.timings.queued = time.time() - - yield (request, request_info) - - if state_update.stop_queueing: - self.stop_send_requests_event.set() - return - - # Reached the end, inject a RequestsExhaustedConstraint to record - self._locked_update( - info=None, - requests_exhausted={ - "requests_exhausted": RequestsExhaustedConstraint(num_requests=count) - }, - ) - self.stop_send_requests_event.set() + self.stop_send_requests_event.set() + except Exception as err: + logger.error(f"Error generating requests: {err}") + self.error_event.set() + raise err def received_callback( self, @@ -565,31 +574,40 @@ def received_callback( :param update: Tuple containing response, request, and request info :return: Updated tuple with injected scheduler state """ - response, request, request_info = update - state_update = self._locked_update(info=request_info) + try: + response, request, request_info = update + state_update = self._locked_update(info=request_info) - # Check if we need to tell workers to stop pulling new requests - # based on no more requests sent and all requests removed from queue - if ( - state_update.state.queued_requests == 0 - and self.stop_send_requests_event.is_set() - and not self.requests_generated_event.is_set() - ): - self.requests_generated_event.set() + # Check if we need to tell workers to stop pulling new requests + # based on no more requests sent and all requests removed from queue + if ( + state_update.state.queued_requests == 0 + and self.stop_send_requests_event.is_set() + and not self.requests_generated_event.is_set() + ): + self.requests_generated_event.set() - # Check if we need to tell workers to stop processing requests (constraints) - if state_update.stop_processing and not self.constraint_reached_event.is_set(): - self.constraint_reached_event.set() + # Check if we need to tell workers to stop processing requests (constraints) + if ( + state_update.stop_processing + and not self.constraint_reached_event.is_set() + ): + self.constraint_reached_event.set() - # Check if all requests have been processed and can shutdown - if ( - state_update.state.processed_requests == state_update.state.created_requests - and self.stop_send_requests_event.is_set() - and self.requests_generated_event.is_set() - and self.constraint_reached_event.is_set() - and not self.shutdown_event.is_set() - ): - self.shutdown_event.set() + # Check if all requests have been processed and can shutdown + if ( + state_update.state.processed_requests + == state_update.state.created_requests + and self.stop_send_requests_event.is_set() + and self.requests_generated_event.is_set() + and self.constraint_reached_event.is_set() + and not self.shutdown_event.is_set() + ): + self.shutdown_event.set() + except Exception as err: + logger.error(f"Error processing received update: {err}") + self.error_event.set() + raise err return ( response, diff --git a/src/guidellm/schemas/request.py b/src/guidellm/schemas/request.py index 3538ce0a..9e9189fc 100644 --- a/src/guidellm/schemas/request.py +++ b/src/guidellm/schemas/request.py @@ -114,9 +114,6 @@ class UsageMetrics(StandardBaseDict): text_characters: int | None = Field( default=None, description="Number of text characters processed/generated." ) - text_bytes: int | None = Field( - default=None, description="Number of text bytes processed/generated." - ) # Vision image stats image_tokens: int | None = Field( From 6adf7931864677213491d7a523913f962f6927d9 Mon Sep 17 00:00:00 2001 From: Mark Kurtz Date: Tue, 14 Oct 2025 15:21:18 -0400 Subject: [PATCH 84/90] Update src/guidellm/backends/openai.py Co-authored-by: Jared O'Connell <46976761+jaredoconnell@users.noreply.github.com> Signed-off-by: Mark Kurtz --- src/guidellm/backends/openai.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/guidellm/backends/openai.py b/src/guidellm/backends/openai.py index 9fac16b5..1e74fc6e 100644 --- a/src/guidellm/backends/openai.py +++ b/src/guidellm/backends/openai.py @@ -228,7 +228,7 @@ async def resolve( raise RuntimeError("Backend not started up for process.") if history is not None: - raise NotImplementedError("Multi-turn requests not yet supported") + raise NotImplementedError("Multi-turn requests not yet supported") response_handler = self._resolve_response_handler( request_type=request.request_type From 687b52fd2ff96ee643220bbb4cc9aea12edd5646 Mon Sep 17 00:00:00 2001 From: Mark Kurtz Date: Tue, 14 Oct 2025 15:35:29 -0400 Subject: [PATCH 85/90] Updates from review for multi modal data --- src/guidellm/backends/response_handlers.py | 67 +++++++++++----------- src/guidellm/benchmark/entrypoints.py | 2 +- 2 files changed, 34 insertions(+), 35 deletions(-) diff --git a/src/guidellm/backends/response_handlers.py b/src/guidellm/backends/response_handlers.py index 44c949e6..b7bd06ad 100644 --- a/src/guidellm/backends/response_handlers.py +++ b/src/guidellm/backends/response_handlers.py @@ -1,11 +1,10 @@ """ Response handlers for processing API responses from different generation backends. -This module provides a pluggable system for handling responses from various language -model backends, supporting both streaming and non-streaming responses. Each handler -implements the GenerationResponseHandler protocol to parse API responses, extract -usage metrics, and convert them into standardized GenerationResponse objects for the -benchmark system. +Provides a pluggable system for handling responses from language model backends, +supporting both streaming and non-streaming responses. Each handler implements the +GenerationResponseHandler protocol to parse API responses, extract usage metrics, +and convert them into standardized GenerationResponse objects. """ from __future__ import annotations @@ -26,11 +25,11 @@ class GenerationResponseHandler(Protocol): """ - Protocol defining the interface for handling generation API responses. + Protocol for handling generation API responses. - Response handlers implement this protocol to process both streaming and - non-streaming responses from different backend APIs, converting them into - standardized GenerationResponse objects with consistent metrics extraction. + Defines the interface for processing both streaming and non-streaming responses + from backend APIs, converting them into standardized GenerationResponse objects + with consistent metrics extraction. """ def compile_non_streaming( @@ -39,7 +38,7 @@ def compile_non_streaming( """ Process a complete non-streaming API response. - :param request: The original generation request + :param request: Original generation request :param response: Raw API response data from the backend :return: Standardized GenerationResponse with extracted metrics """ @@ -58,7 +57,7 @@ def compile_streaming(self, request: GenerationRequest) -> GenerationResponse: """ Compile accumulated streaming data into a final response. - :param request: The original generation request + :param request: Original generation request :return: Standardized GenerationResponse with extracted metrics """ ... @@ -68,9 +67,9 @@ class GenerationResponseHandlerFactory(RegistryMixin[type[GenerationResponseHand """ Factory for registering and creating response handlers by backend type. - Provides a registry-based system for associating handler classes with specific - backend API types, enabling automatic selection of the appropriate handler - for processing responses from different generation services. + Registry-based system for associating handler classes with specific backend API + types, enabling automatic selection of the appropriate handler for processing + responses from different generation services. """ @@ -79,9 +78,9 @@ class TextCompletionsResponseHandler(GenerationResponseHandler): """ Response handler for OpenAI-style text completion endpoints. - Processes responses from text completion APIs that return generated text - in the 'choices' array with 'text' fields. Handles both streaming and - non-streaming responses, extracting usage metrics for input and output tokens. + Processes responses from text completion APIs that return generated text in the + 'choices' array with 'text' fields. Handles both streaming and non-streaming + responses, extracting usage metrics for input and output tokens. Example: :: @@ -105,7 +104,7 @@ def compile_non_streaming( """ Process a complete text completion response. - :param request: The original generation request + :param request: Original generation request :param response: Complete API response containing choices and usage data :return: Standardized GenerationResponse with extracted text and metrics """ @@ -151,7 +150,7 @@ def compile_streaming(self, request: GenerationRequest) -> GenerationResponse: """ Compile accumulated streaming text chunks into a final response. - :param request: The original generation request + :param request: Original generation request :return: Standardized GenerationResponse with concatenated text and metrics """ input_metrics, output_metrics = self.extract_metrics(self.streaming_usage) @@ -171,7 +170,7 @@ def extract_line_data(self, line: str) -> dict[str, Any] | None: Extract JSON data from a streaming response line. :param line: Raw line from the streaming response - :return: Parsed JSON data as a dictionary, or None if line is invalid + :return: Parsed JSON data as dictionary, or None if line indicates completion """ if line == "data: [DONE]": return None @@ -190,7 +189,7 @@ def extract_choices_and_usage( Extract choices and usage data from the API response. :param response: Complete API response containing choices and usage data - :return: Tuple of (choices list, usage dictionary) + :return: Tuple of choices list and usage dictionary """ return response.get("choices", []), response.get("usage", {}) @@ -201,7 +200,7 @@ def extract_metrics( Extract input and output usage metrics from API response usage data. :param usage: Usage data dictionary from API response - :return: Tuple of (input_metrics, output_metrics) as UsageMetrics objects + :return: Tuple of input_metrics and output_metrics as UsageMetrics objects """ if not usage: return UsageMetrics(), UsageMetrics() @@ -236,9 +235,9 @@ class ChatCompletionsResponseHandler(TextCompletionsResponseHandler): """ Response handler for OpenAI-style chat completion endpoints. - Extends TextCompletionsResponseHandler to handle chat completion responses - where generated text is nested within message objects in the choices array. - Processes both streaming and non-streaming chat completion responses. + Extends TextCompletionsResponseHandler to handle chat completion responses where + generated text is nested within message objects in the choices array. Processes + both streaming and non-streaming chat completion responses. """ def compile_non_streaming( @@ -247,10 +246,10 @@ def compile_non_streaming( """ Process a complete chat completion response. - Extracts content from the message object within choices, handling the - nested structure specific to chat completion endpoints. + Extracts content from the message object within choices, handling the nested + structure specific to chat completion endpoints. - :param request: The original generation request + :param request: Original generation request :param response: Complete API response containing choices and usage data :return: Standardized GenerationResponse with extracted content and metrics """ @@ -271,8 +270,8 @@ def add_streaming_line(self, line: str) -> int | None: """ Process a single line from a chat completion streaming response. - Handles the chat completion specific delta structure where content - is nested within delta objects in the streaming response chunks. + Handles the chat completion specific delta structure where content is nested + within delta objects in the streaming response chunks. :param line: Raw SSE line from the streaming response :return: 1 if content was extracted, 0 if line ignored, None if done @@ -296,7 +295,7 @@ def compile_streaming(self, request: GenerationRequest) -> GenerationResponse: """ Compile accumulated streaming chat completion content into a final response. - :param request: The original generation request + :param request: Original generation request :return: Standardized GenerationResponse with concatenated content and metrics """ input_metrics, output_metrics = self.extract_metrics(self.streaming_usage) @@ -349,7 +348,7 @@ def compile_non_streaming( Extracts transcribed or translated text and audio-specific usage metrics including processing duration and token counts for audio content. - :param request: The original generation request + :param request: Original generation request :param response: Complete API response containing text and usage data :return: Standardized GenerationResponse with extracted text and metrics """ @@ -412,7 +411,7 @@ def compile_streaming(self, request: GenerationRequest) -> GenerationResponse: """ Compile accumulated streaming audio text into a final response. - :param request: The original generation request + :param request: Original generation request :return: Standardized GenerationResponse with concatenated text and metrics """ input_metrics, output_metrics = self.extract_metrics(self.streaming_usage) @@ -437,7 +436,7 @@ def extract_metrics( in addition to standard text token counts. :param usage: Usage data dictionary from audio API response - :return: Tuple of (input_metrics, output_metrics) as UsageMetrics objects + :return: Tuple of input_metrics and output_metrics as UsageMetrics objects """ if not usage: return UsageMetrics(), UsageMetrics() diff --git a/src/guidellm/benchmark/entrypoints.py b/src/guidellm/benchmark/entrypoints.py index 18768216..61dfa680 100644 --- a/src/guidellm/benchmark/entrypoints.py +++ b/src/guidellm/benchmark/entrypoints.py @@ -412,7 +412,7 @@ async def reimport_benchmarks_report( ) -> tuple[GenerativeBenchmarksReport, dict[str, Any]]: """ The command-line entry point for re-importing and displaying an - existing benchmarks report. Can also specify + existing benchmarks report. Can also specify an output format. Assumes the file provided exists. """ console = Console() From b162fb35f2f167841a1c2f220066e8b3a52b8dac Mon Sep 17 00:00:00 2001 From: Jared O'Connell <46976761+jaredoconnell@users.noreply.github.com> Date: Tue, 14 Oct 2025 15:44:39 -0400 Subject: [PATCH 86/90] Revert "Features/add tooltip to line chart" (#409) Reverts vllm-project/guidellm#392 That PR was rebased without using the new data types in the refactor branch. Due to it breaking things, it makes most sense to revert it and later submit a new one for the feature. --- src/guidellm/presentation/data_models.py | 24 ++-------------- .../MetricLine/MetricLine.component.tsx | 28 ++----------------- .../MetricsSummary.component.tsx | 2 +- src/ui/lib/store/benchmarksWindowData.ts | 10 ------- .../benchmarks/benchmarks.interfaces.ts | 1 - .../slices/benchmarks/benchmarks.selectors.ts | 25 +++-------------- tests/unit/presentation/test_data_models.py | 10 +------ 7 files changed, 11 insertions(+), 89 deletions(-) diff --git a/src/guidellm/presentation/data_models.py b/src/guidellm/presentation/data_models.py index 2401b3ef..ff2863b4 100644 --- a/src/guidellm/presentation/data_models.py +++ b/src/guidellm/presentation/data_models.py @@ -5,8 +5,6 @@ from pydantic import BaseModel, computed_field -from guidellm.scheduler.strategy import SchedulingStrategy - if TYPE_CHECKING: from guidellm.benchmark import GenerativeBenchmark @@ -214,30 +212,12 @@ class BenchmarkDatum(BaseModel): ttft: TabularDistributionSummary throughput: TabularDistributionSummary time_per_request: TabularDistributionSummary - strategy_display_str: str - - @classmethod - def get_strategy_display_str(cls, strategy: SchedulingStrategy): - strategy_type = strategy if isinstance(strategy, str) else strategy.type_ - strategy_instance = ( - strategy if isinstance(strategy, SchedulingStrategy) else None - ) - - if strategy_type == "concurrent": - rate = f"@{strategy.streams}" if strategy_instance else "@##" # type: ignore[attr-defined] - elif strategy_type in ("constant", "poisson"): - rate = f"@{strategy.rate:.2f}" if strategy_instance else "@#.##" # type: ignore[attr-defined] - else: - rate = "" - return f"{strategy_type}{rate}" @classmethod def from_benchmark(cls, bm: "GenerativeBenchmark"): - rps = bm.metrics.requests_per_second.successful.mean return cls( - strategy_display_str=cls.get_strategy_display_str(bm.args.strategy), - requests_per_second=rps, - itl=TabularDistributionSummary.from_distribution_summary( + requests_per_second=bm.metrics.requests_per_second.successful.mean, + tpot=TabularDistributionSummary.from_distribution_summary( bm.metrics.inter_token_latency_ms.successful ), ttft=TabularDistributionSummary.from_distribution_summary( diff --git a/src/ui/lib/components/Charts/MetricLine/MetricLine.component.tsx b/src/ui/lib/components/Charts/MetricLine/MetricLine.component.tsx index eb123593..8b1b4df2 100644 --- a/src/ui/lib/components/Charts/MetricLine/MetricLine.component.tsx +++ b/src/ui/lib/components/Charts/MetricLine/MetricLine.component.tsx @@ -1,6 +1,5 @@ -import { Typography, useTheme } from '@mui/material'; -import { PointTooltipProps, ResponsiveLine } from '@nivo/line'; -import { BasicTooltip } from '@nivo/tooltip'; +import { useTheme } from '@mui/material'; +import { ResponsiveLine } from '@nivo/line'; import React, { FC } from 'react'; import { useColor } from '@/lib/hooks/useColor'; @@ -50,30 +49,11 @@ export const Component: FC = ({ reverse: false, }; } - type PointTooltipPropsWithLabel = PointTooltipProps & { - point: { - data: { - label: string; - }; - }; - }; return ( ( - - {(point as PointTooltipPropsWithLabel).point.data.label} - - } - color={point.point.color} - enableChip={true} - /> - )} - pointSize={10} colors={[selectedColor]} margin={{ top: 20, right: 10, bottom: 20, left: 35.5 }} xScale={{ type: 'linear', min: minX }} @@ -112,6 +92,7 @@ export const Component: FC = ({ }} enableGridX={false} enableGridY={false} + pointSize={0} useMesh={true} layers={[ CustomAxes, @@ -134,9 +115,6 @@ export const Component: FC = ({ ), 'axes', 'lines', - 'points', - 'markers', - 'mesh', ]} theme={lineTheme} /> diff --git a/src/ui/lib/components/MetricsSummary/MetricsSummary.component.tsx b/src/ui/lib/components/MetricsSummary/MetricsSummary.component.tsx index 9530d9e7..0d804f5c 100644 --- a/src/ui/lib/components/MetricsSummary/MetricsSummary.component.tsx +++ b/src/ui/lib/components/MetricsSummary/MetricsSummary.component.tsx @@ -102,7 +102,7 @@ export const Component = () => { return ( <> - + diff --git a/src/ui/lib/store/benchmarksWindowData.ts b/src/ui/lib/store/benchmarksWindowData.ts index b4af5063..a589e8ed 100644 --- a/src/ui/lib/store/benchmarksWindowData.ts +++ b/src/ui/lib/store/benchmarksWindowData.ts @@ -1,6 +1,5 @@ export const benchmarksScript = `window.benchmarks = [ { - strategyDisplayStr: "synchronous", requestsPerSecond: 11.411616848282272, tpot: { mean: 8.758024845683707, @@ -172,7 +171,6 @@ export const benchmarksScript = `window.benchmarks = [ }, }, { - strategyDisplayStr: "constant@36.28", requestsPerSecond: 36.289181300710815, tpot: { mean: 588.0161376137819, @@ -344,7 +342,6 @@ export const benchmarksScript = `window.benchmarks = [ }, }, { - strategyDisplayStr: "constant@20.75", requestsPerSecond: 20.752070927855794, tpot: { mean: 116.28360712595156, @@ -516,7 +513,6 @@ export const benchmarksScript = `window.benchmarks = [ }, }, { - strategyDisplayStr: "constant@26.81", requestsPerSecond: 26.81917480361788, tpot: { mean: 299.7306064613554, @@ -688,7 +684,6 @@ export const benchmarksScript = `window.benchmarks = [ }, }, { - strategyDisplayStr: "constant@26.82", requestsPerSecond: 26.823988819498975, tpot: { mean: 683.8011571339198, @@ -860,7 +855,6 @@ export const benchmarksScript = `window.benchmarks = [ }, }, { - strategyDisplayStr: "constant@24.50", requestsPerSecond: 24.50047903792646, tpot: { mean: 742.9258901891964, @@ -1032,7 +1026,6 @@ export const benchmarksScript = `window.benchmarks = [ }, }, { - strategyDisplayStr: "constant@25.61", requestsPerSecond: 25.617829792196602, tpot: { mean: 663.3098317044122, @@ -1204,7 +1197,6 @@ export const benchmarksScript = `window.benchmarks = [ }, }, { - strategyDisplayStr: "constant@37.02", requestsPerSecond: 37.02892550982192, tpot: { mean: 606.4144710877113, @@ -1376,7 +1368,6 @@ export const benchmarksScript = `window.benchmarks = [ }, }, { - strategyDisplayStr: "constant@37.29", requestsPerSecond: 37.29183354201869, tpot: { mean: 603.3237551205925, @@ -1548,7 +1539,6 @@ export const benchmarksScript = `window.benchmarks = [ }, }, { - strategyDisplayStr: "throughput", requestsPerSecond: 37.45318312972309, tpot: { mean: 600.7204526769262, diff --git a/src/ui/lib/store/slices/benchmarks/benchmarks.interfaces.ts b/src/ui/lib/store/slices/benchmarks/benchmarks.interfaces.ts index 6c01d5e2..602ae17e 100644 --- a/src/ui/lib/store/slices/benchmarks/benchmarks.interfaces.ts +++ b/src/ui/lib/store/slices/benchmarks/benchmarks.interfaces.ts @@ -27,7 +27,6 @@ export interface BenchmarkMetrics { export interface Benchmark extends BenchmarkMetrics { requestsPerSecond: number; - strategyDisplayStr: string; } export type Benchmarks = Benchmark[]; diff --git a/src/ui/lib/store/slices/benchmarks/benchmarks.selectors.ts b/src/ui/lib/store/slices/benchmarks/benchmarks.selectors.ts index d3da9bf9..53d54f40 100644 --- a/src/ui/lib/store/slices/benchmarks/benchmarks.selectors.ts +++ b/src/ui/lib/store/slices/benchmarks/benchmarks.selectors.ts @@ -11,18 +11,6 @@ import { selectSloState } from '../slo/slo.selectors'; export const selectBenchmarks = (state: RootState) => state.benchmarks.data; -const getUnitsByMetric = (metric: string) => { - switch (metric) { - case 'ttft': - case 'tpot': - return 'ms'; - case 'timePerRequest': - return 'sec'; - case 'throughput': - return 'tok/s'; - } -}; - export const selectMetricsSummaryLineData = createSelector( [selectBenchmarks, selectSloState], (benchmarks, sloState) => { @@ -30,10 +18,8 @@ export const selectMetricsSummaryLineData = createSelector( ?.slice() ?.sort((bm1, bm2) => (bm1.requestsPerSecond > bm2.requestsPerSecond ? 1 : -1)); const selectedPercentile = sloState.enforcedPercentile; - interface PointWithLabel extends Point { - label: string; - } - const lineData: { [K in keyof BenchmarkMetrics]: PointWithLabel[] } = { + + const lineData: { [K in keyof BenchmarkMetrics]: Point[] } = { ttft: [], tpot: [], timePerRequest: [], @@ -46,17 +32,14 @@ export const selectMetricsSummaryLineData = createSelector( 'throughput', ]; metrics.forEach((metric) => { - const data: PointWithLabel[] = []; + const data: Point[] = []; sortedByRPS?.forEach((benchmark) => { const percentile = benchmark[metric].percentileRows.find( (p) => p.percentile === selectedPercentile ); - const yValue = percentile?.value ?? 0; - const units = getUnitsByMetric(metric); data.push({ x: benchmark.requestsPerSecond, - y: yValue, - label: `${benchmark.strategyDisplayStr} ${formatNumber(yValue)} ${units}`, + y: percentile?.value ?? 0, }); }); diff --git a/tests/unit/presentation/test_data_models.py b/tests/unit/presentation/test_data_models.py index e879406d..c1663c43 100644 --- a/tests/unit/presentation/test_data_models.py +++ b/tests/unit/presentation/test_data_models.py @@ -1,7 +1,6 @@ import pytest -from guidellm.presentation.data_models import BenchmarkDatum, Bucket -from tests.unit.mock_benchmark import mock_generative_benchmark +from guidellm.presentation.data_models import Bucket @pytest.mark.smoke @@ -19,10 +18,3 @@ def test_bucket_from_data(): assert buckets[1].value == 8.0 assert buckets[1].count == 5 assert bucket_width == 1 - - -@pytest.mark.smoke -def test_from_benchmark_includes_strategy_display_str(): - mock_bm = mock_generative_benchmark() - bm = BenchmarkDatum.from_benchmark(mock_bm) - assert bm.strategy_display_str == "synchronous" From e95007d7f3bc11619c4d7e70dacc2f7434fd93dd Mon Sep 17 00:00:00 2001 From: Mark Kurtz Date: Wed, 15 Oct 2025 12:51:12 -0400 Subject: [PATCH 87/90] Fixes for constant rate benchmarking race condition, simplfications, and minor bug fixes for stats accumulation and data loading --- src/guidellm/benchmark/benchmarker.py | 4 +- src/guidellm/benchmark/schemas.py | 236 +++++--- src/guidellm/data/loaders.py | 2 +- src/guidellm/scheduler/__init__.py | 17 +- src/guidellm/scheduler/environments.py | 72 +-- src/guidellm/scheduler/scheduler.py | 49 +- src/guidellm/scheduler/schemas.py | 106 ++-- src/guidellm/scheduler/strategies.py | 717 +++++++++---------------- src/guidellm/scheduler/worker.py | 147 +++-- src/guidellm/scheduler/worker_group.py | 81 ++- 10 files changed, 672 insertions(+), 759 deletions(-) diff --git a/src/guidellm/benchmark/benchmarker.py b/src/guidellm/benchmark/benchmarker.py index ed9d789b..6a5a5627 100644 --- a/src/guidellm/benchmark/benchmarker.py +++ b/src/guidellm/benchmark/benchmarker.py @@ -109,16 +109,18 @@ async def run( ) estimated_state = EstimatedBenchmarkState() scheduler_state = None + scheduler: Scheduler[RequestT, ResponseT] = Scheduler() async for ( response, request, request_info, scheduler_state, - ) in Scheduler[RequestT, ResponseT]().run( + ) in scheduler.run( requests=requests, backend=backend, strategy=strategy, + startup_duration=warmup if warmup and warmup >= 1 else 0.0, env=environment, **constraints or {}, ): diff --git a/src/guidellm/benchmark/schemas.py b/src/guidellm/benchmark/schemas.py index 62ae5b0e..2f2d8f98 100644 --- a/src/guidellm/benchmark/schemas.py +++ b/src/guidellm/benchmark/schemas.py @@ -56,7 +56,6 @@ StatusBreakdown, StatusDistributionSummary, ) -from guidellm.utils.pydantic_utils import StandardBaseDict __all__ = [ "Benchmark", @@ -127,7 +126,7 @@ def add_avg_metric( self[total_key] = self.get(total_key, 0) + value self[count_key] = self.get(count_key, 0) + count - average = self[total_key] / self[count_key] + average = self[total_key] / self[count_key] if self[count_key] > 0 else 0.0 self.set_metric( group=group, key=key, @@ -193,16 +192,19 @@ def add_time_averaged_metric( time_avg_numerator_key = f"{group}_{key}_time_avg_numerator" time_avg_denominator_key = f"{group}_{key}_time_avg_denominator" last_recorded_time_key = f"{group}_{key}_last_recorded_time" + last_recorded_value_key = f"{group}_{key}_last_recorded_value" if last_recorded_time_key not in self: self[last_recorded_time_key] = recorded_time + self[last_recorded_value_key] = value self[time_avg_numerator_key] = value self[time_avg_denominator_key] = 0.0 else: time_delta = recorded_time - self[last_recorded_time_key] - self[time_avg_numerator_key] += value * time_delta + self[time_avg_numerator_key] += self[last_recorded_value_key] * time_delta self[time_avg_denominator_key] += time_delta self[last_recorded_time_key] = recorded_time + self[last_recorded_value_key] = value if self[time_avg_denominator_key] > 0: average = self[time_avg_numerator_key] / self[time_avg_denominator_key] @@ -776,6 +778,9 @@ class GenerativeMetrics(StandardBaseDict): request_latency: StatusDistributionSummary = Field( description="Distribution of request latencies for completed requests" ) + request_streaming_iterations_count: StatusDistributionSummary = Field( + description="Distribution of stream iterations for completed requests" + ) # General token stats prompt_token_count: StatusDistributionSummary = Field( @@ -796,9 +801,15 @@ class GenerativeMetrics(StandardBaseDict): inter_token_latency_ms: StatusDistributionSummary = Field( description="Distribution of inter-token latencies in milliseconds" ) + output_tokens_wo_first_per_iteration: StatusDistributionSummary = Field( + description="Distribution of output tokens (without first) generated per streaming iteration" + ) output_tokens_per_second: StatusDistributionSummary = Field( description="Distribution of output token generation rates" ) + output_tokens_per_iteration: StatusDistributionSummary = Field( + description="Distribution of output tokens generated per streaming iteration" + ) tokens_per_second: StatusDistributionSummary = Field( description="Distribution of total token throughput including prompt and output" ) @@ -818,13 +829,42 @@ def update_estimate( request_info: RequestInfo, scheduler_state: SchedulerState, ): - # Always track concurrency - state.add_time_averaged_metric( - group=EstimatedBenchmarkState.benchmark_metrics_group, - key="concurrency_requests", - value=scheduler_state.processing_requests, + benchmark_start_time = scheduler_state.start_time + request_start_time = ( + request_info.timings.request_start or request_info.timings.resolve_start + ) + request_end_time = ( + request_info.timings.request_end or request_info.timings.resolve_end + ) + event_occurence_time = ( + request_info.timings.queued + if request_info.status == "queued" + else ( + request_info.timings.dequeued + if request_info.status == "pending" + else request_start_time + if request_info.status == "in_progress" + else request_end_time + ) + ) + benchmark_duration = ( + event_occurence_time - benchmark_start_time + if event_occurence_time + else None + ) + request_duration = ( + request_end_time - request_start_time if request_end_time else None ) + # Always track concurrency + if event_occurence_time is not None: + state.add_time_averaged_metric( + group=EstimatedBenchmarkState.benchmark_metrics_group, + key="concurrency_requests", + value=scheduler_state.processing_requests, + recorded_time=event_occurence_time, + ) + if request_info.status not in {"completed", "errored", "cancelled"}: return @@ -833,9 +873,6 @@ def update_estimate( key="updated", value=True, ) - start_time = scheduler_state.start_time - end_time = request_info.timings.request_end or request_info.timings.resolve_end - duration = end_time - start_time if end_time else None for prefix in (request_info.status, "total"): requests_count = ( @@ -847,8 +884,18 @@ def update_estimate( if prefix == "cancelled" else scheduler_state.processed_requests ) + input_tokens = ( + (response.input_metrics.total_tokens if response else None) + or request.input_metrics.total_tokens + or 0 + ) + output_tokens = ( + (response.output_metrics.total_tokens if response else None) + or request.output_metrics.total_tokens + or 0 + ) - # Request stats + # Request distribution stats state.set_metric( group=EstimatedBenchmarkState.benchmark_metrics_group, key=f"{prefix}_requests", @@ -857,96 +904,120 @@ def update_estimate( state.set_metric( group=EstimatedBenchmarkState.benchmark_metrics_group, key=f"{prefix}_requests_per_second", - value=requests_count / duration if duration else None, - ) - state.add_avg_metric( - group=EstimatedBenchmarkState.benchmark_metrics_group, - key=f"{prefix}_request_latency", value=( - request_info.timings.request_end or request_info.timings.resolve_end + requests_count / benchmark_duration if benchmark_duration else None ), - start_val=( - request_info.timings.request_start - or request_info.timings.resolve_start - ), - ) - - # Input/output token stats - state.add_avg_rate_metric( - group=EstimatedBenchmarkState.benchmark_metrics_group, - key="input_tokens", - value=(response.input_metrics.total_tokens if response else None) - or request.input_metrics.total_tokens, - ) - state.add_avg_rate_metric( - group=EstimatedBenchmarkState.benchmark_metrics_group, - key="input_text_tokens", - value=(response.input_metrics.text_tokens if response else None) - or request.input_metrics.text_tokens, ) - state.add_avg_rate_metric( + state.add_avg_metric( group=EstimatedBenchmarkState.benchmark_metrics_group, - key="input_images", - value=(response.input_metrics.image_count if response else None) - or request.input_metrics.image_count, + key=f"{prefix}_request_latency", + value=request_duration, ) - state.add_avg_rate_metric( + state.add_avg_metric( group=EstimatedBenchmarkState.benchmark_metrics_group, - key="input_video_frames", - value=(response.input_metrics.video_frames if response else None) - or request.input_metrics.video_frames, + key=f"{prefix}_request_streaming_iterations", + value=request_info.timings.iterations or 0, ) - state.add_avg_rate_metric( + + # Token iteration stats + state.add_avg_metric( group=EstimatedBenchmarkState.benchmark_metrics_group, - key="input_audio_seconds", - value=request.input_metrics.audio_seconds if request else None, + key="output_tokens_iterations", + value=output_tokens, + count=request_info.timings.iterations or 1, ) - state.add_avg_rate_metric( + state.add_avg_metric( group=EstimatedBenchmarkState.benchmark_metrics_group, - key="output_tokens", - value=(response.output_metrics.total_tokens if response else None) - or request.output_metrics.total_tokens, - ) - output_tokens = ( - response.output_metrics.total_tokens if response else None - ) or request.output_metrics.total_tokens - state.add_avg_rate_metric( - group=EstimatedBenchmarkState.benchmark_metrics_group, - key="total_tokens", - value=output_tokens, + key="output_tokens_wo_first_iterations", + value=output_tokens - 1 if output_tokens > 1 else 0, + count=request_info.timings.iterations or 1, ) - # General stats + # Token metrics stats state.add_avg_metric( group=EstimatedBenchmarkState.benchmark_metrics_group, key=f"{prefix}_time_to_first_token", value=request_info.timings.first_iteration, - start_val=request_info.timings.request_start - or request_info.timings.resolve_start, + start_val=request_start_time, ) state.add_avg_metric( group=EstimatedBenchmarkState.benchmark_metrics_group, key=f"{prefix}_inter_token_latency", value=request_info.timings.last_iteration, start_val=request_info.timings.first_iteration, - count=output_tokens - 1 - if output_tokens and output_tokens > 1 - else None, + count=(output_tokens or 1) - 1, ) state.add_avg_metric( group=EstimatedBenchmarkState.benchmark_metrics_group, key=f"{prefix}_time_per_output_token", - value=( - request_info.timings.request_end or request_info.timings.resolve_end - ), - start_val=( - request_info.timings.first_iteration - or request_info.timings.request_start - or request_info.timings.resolve_start - ), - count=output_tokens, + value=request_duration, + count=output_tokens or 0, ) + # Input/output throughput stats + if event_occurence_time is not None: + state.add_avg_rate_metric( + group=EstimatedBenchmarkState.benchmark_metrics_group, + key="input_tokens", + value=input_tokens, + start_time=benchmark_start_time, + end_time=event_occurence_time, + ) + state.add_avg_rate_metric( + group=EstimatedBenchmarkState.benchmark_metrics_group, + key="output_tokens", + value=output_tokens, + start_time=benchmark_start_time, + end_time=event_occurence_time, + ) + state.add_avg_rate_metric( + group=EstimatedBenchmarkState.benchmark_metrics_group, + key="total_tokens", + value=input_tokens + output_tokens, + start_time=benchmark_start_time, + end_time=event_occurence_time, + ) + state.add_avg_rate_metric( + group=EstimatedBenchmarkState.benchmark_metrics_group, + key="input_text_tokens", + value=( + (response.input_metrics.text_tokens if response else None) + or request.input_metrics.text_tokens + or 0 + ), + start_time=benchmark_start_time, + end_time=event_occurence_time, + ) + state.add_avg_rate_metric( + group=EstimatedBenchmarkState.benchmark_metrics_group, + key="input_images", + value=( + (response.input_metrics.image_count if response else None) + or request.input_metrics.image_count + or 0 + ), + start_time=benchmark_start_time, + end_time=event_occurence_time, + ) + state.add_avg_rate_metric( + group=EstimatedBenchmarkState.benchmark_metrics_group, + key="input_video_frames", + value=( + (response.input_metrics.video_frames if response else None) + or request.input_metrics.video_frames + or 0 + ), + start_time=benchmark_start_time, + end_time=event_occurence_time, + ) + state.add_avg_rate_metric( + group=EstimatedBenchmarkState.benchmark_metrics_group, + key="input_audio_seconds", + value=request.input_metrics.audio_seconds or 0, + start_time=benchmark_start_time, + end_time=event_occurence_time, + ) + @classmethod def compile( cls, @@ -987,6 +1058,10 @@ def compile( value_types=request_types, values=[req.request_latency or 0.0 for req in requests], ), + request_streaming_iterations_count=StatusDistributionSummary.from_values( + value_types=request_types, + values=[float(req.info.timings.iterations or 0) for req in requests], + ), # General token stats prompt_token_count=StatusDistributionSummary.from_values( value_types=request_types, @@ -1012,10 +1087,23 @@ def compile( value_types=request_types, values=[req.inter_token_latency_ms or 0.0 for req in requests], ), + output_tokens_wo_first_per_iteration=StatusDistributionSummary.from_values( + value_types=request_types, + values=[ + max(0.0, (req.output_metrics.total_tokens or 1.0) - 1.0) + for req in requests + ], + weights=[req.info.timings.iterations or 1 for req in requests], + ), output_tokens_per_second=StatusDistributionSummary.from_values( value_types=request_types, values=[req.output_tokens_per_second or 0.0 for req in requests], ), + output_tokens_per_iteration=StatusDistributionSummary.from_values( + value_types=request_types, + values=[req.output_tokens_per_iteration or 0.0 for req in requests], + weights=[req.info.timings.iterations or 1 for req in requests], + ), tokens_per_second=StatusDistributionSummary.from_values( value_types=request_types, values=[req.tokens_per_second or 0.0 for req in requests], diff --git a/src/guidellm/data/loaders.py b/src/guidellm/data/loaders.py index fcdea15d..fd46334d 100644 --- a/src/guidellm/data/loaders.py +++ b/src/guidellm/data/loaders.py @@ -65,7 +65,7 @@ def __iter__(self): worker_modulus = worker_info.num_workers if worker_info is not None else 1 worker_index = worker_info.id if worker_info is not None else 0 - if self.precache is not None: + if self.precache: for index, item in enumerate(self.precache): if (index + worker_index) % worker_modulus == 0: yield item diff --git a/src/guidellm/scheduler/__init__.py b/src/guidellm/scheduler/__init__.py index 2f5eb53f..9b74c44a 100644 --- a/src/guidellm/scheduler/__init__.py +++ b/src/guidellm/scheduler/__init__.py @@ -1,3 +1,15 @@ +""" +Scheduler subsystem for orchestrating benchmark workloads and managing worker processes. + +This module provides the core scheduling infrastructure for guidellm, including +strategies for controlling request timing patterns (synchronous, asynchronous, +constant rate, Poisson), constraints for limiting benchmark execution (duration, +error rates, request counts), and distributed execution through worker processes. +The scheduler coordinates between backend interfaces, manages benchmark state +transitions, and handles multi-turn request sequences with customizable timing +strategies and resource constraints. +""" + from .constraints import ( Constraint, ConstraintInitializer, @@ -28,11 +40,6 @@ AsyncConstantStrategy, AsyncPoissonStrategy, ConcurrentStrategy, - ConstantRateRequestTimings, - LastCompletionRequestTimings, - NoDelayRequestTimings, - PoissonRateRequestTimings, - ScheduledRequestTimings, SchedulingStrategy, StrategyT, StrategyType, diff --git a/src/guidellm/scheduler/environments.py b/src/guidellm/scheduler/environments.py index 69997e57..4f02d772 100644 --- a/src/guidellm/scheduler/environments.py +++ b/src/guidellm/scheduler/environments.py @@ -1,18 +1,19 @@ """ Environment abstractions for coordinating scheduler execution across distributed nodes. -Provides environment abstractions that handle synchronization, timing coordination, -error propagation, and lifecycle management for scheduler execution across single -or multiple nodes. The Environment protocol defines the interface for distributed +Provides abstractions that handle synchronization, timing coordination, error +propagation, and lifecycle management for scheduler execution across single or +multiple nodes. The Environment protocol defines the interface for distributed coordination while NonDistributedEnvironment provides a minimal implementation -for single-node execution. +for single-node execution. Environments manage the complete execution lifecycle +from parameter distribution through result aggregation. -Environment Execution Flow: -1. sync_run_params() - Distribute workload and synchronize parameters across nodes -2. sync_run_start() - Coordinate synchronized start time for all nodes -3. update_run_iteration() - Update state after each request (called per iteration) +Execution Flow: +1. sync_run_params() - Distribute workload and synchronize parameters +2. sync_run_start() - Coordinate synchronized start time +3. update_run_iteration() - Update state after each request iteration 4. sync_run_error() - Handle and propagate errors across nodes -5. sync_run_end() - Aggregate results and cleanup at completion +5. sync_run_end() - Aggregate results and finalize execution """ from __future__ import annotations @@ -39,12 +40,12 @@ class Environment(ABC, Generic[RequestT, ResponseT], InfoMixin): """ - Abstract base for coordinating scheduler execution across distributed nodes. + Abstract interface for coordinating scheduler execution across distributed nodes. - Defines the interface for managing distributed scheduler execution including + Defines the protocol for managing distributed scheduler execution including parameter synchronization, timing coordination, state updates, error propagation, - and result aggregation. Implementations handle the complexity of distributed - coordination while providing a unified interface for scheduler orchestration. + and result aggregation. Implementations handle distributed coordination complexity + while providing a unified interface for scheduler orchestration. """ @abstractmethod @@ -61,10 +62,6 @@ async def sync_run_params( """ Synchronize execution parameters across nodes and resolve local scope. - Coordinates parameter distribution and validation across active nodes. - In distributed environments, handles node assignment and workload partitioning. - In non-distributed environments, typically returns parameters unchanged. - :param requests: Complete set of requests to process across all nodes :param strategy: Scheduling strategy to apply during execution :param constraints: Runtime constraints to enforce during execution @@ -78,9 +75,6 @@ async def sync_run_start(self) -> float: """ Coordinate synchronized start time across all nodes. - Ensures all nodes begin processing simultaneously for accurate benchmarking - and consistent timing measurements across distributed execution. - :return: Unix timestamp when all nodes should begin processing :raises Exception: If startup synchronization fails across nodes """ @@ -97,11 +91,6 @@ async def update_run_iteration( """ Update environment state with completed request iteration results. - Called after each request processing to update execution progress and - synchronize any required state across nodes in distributed environments. - Generally, distributed is expected to store the iteration updates until - all nodes have processed and sync_run_end is called to retrieve them. - :param response: Response generated for the request, if successful :param request: The processed request :param request_info: Metadata about request processing including timings @@ -115,9 +104,6 @@ async def sync_run_error(self, err: list[Exception] | Exception): """ Handle and propagate errors across all active nodes. - Coordinates error handling when failures occur, ensuring all nodes are - notified for appropriate cleanup or shutdown procedures. - :param err: The exception(s) that occurred during execution """ ... @@ -136,10 +122,6 @@ async def sync_run_end( """ Finalize execution and aggregate results from all nodes. - Handles cleanup, result synchronization, and error propagation at execution - completion. Collects and yields results from worker nodes in distributed - environments. - :return: Iterator of (response, request, request_info, state) tuples from remote nodes in distributed environments, empty for non-distributed :raises Exception: Any errors that occurred during execution @@ -151,9 +133,9 @@ class NonDistributedEnvironment(Environment[RequestT, ResponseT]): """ Single-node scheduler execution environment with minimal coordination overhead. - Simplified environment for running schedulers on a single node without distributed - coordination requirements. Implements the Environment interface with no-op - synchronization for local testing, development, and single-machine benchmarking. + Implements the Environment interface with no-op synchronization for local testing, + development, and single-machine benchmarking. All synchronization methods return + immediately without distributed coordination logic. Example: :: @@ -165,29 +147,27 @@ class NonDistributedEnvironment(Environment[RequestT, ResponseT]): SynchronousStrategy, ) - - # Definitions + env = NonDistributedEnvironment() requests = [f"req_{ind}" for ind in range(5)] strategy = SynchronousStrategy() constraints = {"max_num": MaxNumberConstraint(max_num=5)} state = SchedulerState() - # Run environment local_req, local_strat, local_const = await env.sync_run_params( requests, strategy, constraints ) start_time = await env.sync_run_start() for req in local_req: state.processed_requests += 1 - await env.update_run_iteration( - f"resp_{req}", req, RequestInfo(), state - ) + await env.update_run_iteration(f"resp_{req}", req, RequestInfo(), state) async for nonlocal_req in env.sync_run_end(): state.processed_requests += 1 """ def __init__(self): - """Initialize with empty error storage for single-node execution.""" + """ + Initialize single-node environment with empty error storage. + """ self.run_errors: list[Exception] = [] async def sync_run_params( @@ -206,7 +186,7 @@ async def sync_run_params( :param requests: Requests to process locally :param strategy: Scheduling strategy to apply during execution :param constraints: Runtime constraints to enforce during execution - :return: Tuple containing the original (requests, strategy, constraints) + :return: Original (requests, strategy, constraints) tuple unchanged """ return requests, strategy, constraints @@ -214,7 +194,7 @@ async def sync_run_start(self) -> float: """ Return current time plus configured delay for single-node startup. - :return: Unix timestamp for when the run should start + :return: Unix timestamp when execution should begin """ return time.time() + settings.scheduler_start_delay_non_distributed @@ -229,7 +209,7 @@ async def update_run_iteration( No-op for single-node execution with no distributed state synchronization. :param response: Response generated for the request, if successful - :param request: The request that was processed + :param request: The processed request :param request_info: Metadata about request processing including timings :param state: Current scheduler state with metrics and progress """ @@ -256,7 +236,7 @@ async def sync_run_end( """ Finalize single-node execution and propagate any stored errors. - :return: Empty iterator since there are no remote nodes + :return: Empty iterator as there are no remote nodes :raises Exception: Any error stored during execution via sync_run_error """ if self.run_errors: diff --git a/src/guidellm/scheduler/scheduler.py b/src/guidellm/scheduler/scheduler.py index 0e19350b..ca5935fa 100644 --- a/src/guidellm/scheduler/scheduler.py +++ b/src/guidellm/scheduler/scheduler.py @@ -1,11 +1,10 @@ """ -Thread-safe singleton scheduler for distributed load generation workload coordination. +Thread-safe singleton scheduler for distributed benchmarking workload coordination. -Provides the core orchestration engine that coordinates request processing across -worker processes and distributed environments. Manages timing synchronization, -resource allocation, constraint enforcement, and result aggregation for -load generation operations. Integrates with backends, environments, and strategies -to enable scalable load testing across various scenarios including LLM inference. +Orchestrates request processing across worker processes with distributed timing +coordination, constraint enforcement, and result aggregation. Integrates with +backends, environments, and strategies to enable scalable load testing across +various scenarios including LLM inference benchmarking. """ from __future__ import annotations @@ -38,16 +37,14 @@ class Scheduler( Thread-safe singleton scheduler for distributed benchmarking workload coordination. Orchestrates request processing across worker processes with distributed timing - coordination, constraint enforcement, and result aggregation. Provides a unified - interface for executing benchmarking operations while abstracting the complexity - of multi-process coordination, environment synchronization, and resource management. - Implements singleton pattern to ensure consistent execution state across concurrent - benchmark operations. + coordination, constraint enforcement, and result aggregation. Abstracts the + complexity of multi-process coordination, environment synchronization, and + resource management while providing a unified interface for executing benchmarking + operations. Implements singleton pattern to ensure consistent execution state. Example: :: from guidellm.scheduler import Scheduler - from guidellm.backends import OpenAIBackend from guidellm.scheduler import NonDistributedEnvironment, SynchronousStrategy scheduler = Scheduler() @@ -58,7 +55,7 @@ class Scheduler( env=NonDistributedEnvironment(), max_requests=1000 ): - print(f"Processed: {request} with info: {info} and response: {response}") + print(f"Processed: {request}") """ async def run( @@ -66,6 +63,7 @@ async def run( requests: Iterable[RequestT | MultiTurnRequestT[RequestT]], backend: BackendInterface[RequestT, ResponseT], strategy: SchedulingStrategy, + startup_duration: float, env: Environment[RequestT, ResponseT] | None, **constraints: Any | dict[str, Any] | Constraint, ) -> AsyncIterator[ @@ -80,22 +78,23 @@ async def run( Execute distributed request processing with coordinated timing and constraints. Orchestrates the complete benchmarking workflow across worker processes with - environment synchronization, constraint enforcement, and error handling. - Manages resource lifecycle from initialization through cleanup while yielding - real-time processing updates for monitoring and aggregation. + environment synchronization, constraint enforcement, and error handling. Manages + resource lifecycle from initialization through cleanup while yielding real-time + processing updates for monitoring and aggregation. - :param requests: Request collection to process. Supports single requests or + :param requests: Request collection to process, supporting single requests or multi-turn sequences with optional inter-request delays :param backend: Backend interface for request processing and response generation :param strategy: Scheduling strategy controlling request timing and distribution + :param startup_duration: Duration in seconds for requests to ramp up :param env: Environment interface for distributed coordination and - synchronization + synchronization. Defaults to NonDistributedEnvironment if None :param constraints: Runtime constraints for execution control (max_requests, - max_duration, max_error_rate, etc.). Values can be primitives, dictionaries, - or constraint instances - :yields: Requests udpates as (response, request, request_info, scheduler_state) - tuples. Each request will generate three ordered updates: - queued, in_progress, completed | errored | cancelled. + max_duration, max_error_rate, etc.) as primitives, dictionaries, or + constraint instances + :yields: Request updates as (response, request, request_info, scheduler_state) + tuples. Each request generates three ordered updates: queued, in_progress, + completed | errored | cancelled :raises Exception: Worker process errors, environment synchronization failures, or constraint evaluation errors are propagated after cleanup """ @@ -122,10 +121,10 @@ async def run( # Setup the worker group, sync start with the environment worker_group = WorkerProcessGroup[RequestT, ResponseT]( requests=local_requests, - cycle_requests=local_requests, backend=backend, strategy=local_strategy, - constraints=local_constraints, + startup_duration=startup_duration, + **local_constraints, ) await worker_group.create_processes() local_start_time = await env.sync_run_start() diff --git a/src/guidellm/scheduler/schemas.py b/src/guidellm/scheduler/schemas.py index d53b55a1..21567c67 100644 --- a/src/guidellm/scheduler/schemas.py +++ b/src/guidellm/scheduler/schemas.py @@ -11,22 +11,13 @@ import time from collections.abc import AsyncIterator -from typing import ( - Any, - Generic, - Literal, - Protocol, - TypeVar, -) +from typing import Any, Generic, Literal, Protocol, TypeVar from pydantic import Field from typing_extensions import TypeAliasType, TypedDict from guidellm.schemas import RequestInfo -from guidellm.utils import ( - RegistryMixin, - StandardBaseModel, -) +from guidellm.utils import RegistryMixin, StandardBaseModel from guidellm.utils.registry import RegistryObjT __all__ = [ @@ -42,46 +33,45 @@ ] RequestT = TypeVar("RequestT") -"""Generic request object type for scheduler processing.""" +"Generic request object type for scheduler processing" ResponseT = TypeVar("ResponseT") -"""Generic response object type returned by backend processing.""" +"Generic response object type returned by backend processing" MultiTurnRequestT = TypeAliasType( "MultiTurnRequestT", list[RequestT | tuple[RequestT, float]] | tuple[RequestT | tuple[RequestT, float]], type_params=(RequestT,), ) -"""Multi-turn request structure supporting conversation history with optional delays.""" +"Multi-turn request structure supporting conversation history with optional delays" class SchedulerMessagingPydanticRegistry(RegistryMixin[RegistryObjT]): """ - Registry for enabling a generic interface to define the pydantic class types used - for inter-process messaging within the scheduler. + Registry for Pydantic types used in scheduler inter-process messaging. + + Enables generic interface for defining Pydantic class types used for + communication between distributed scheduler components and worker processes. """ class BackendInterface(Protocol, Generic[RequestT, ResponseT]): """ - Abstract interface for request processing backends. + Protocol defining the interface for request processing backends. - Defines the contract for backend implementations that process requests within - the scheduler system. Backends handle initialization, validation, processing, - and shutdown lifecycle management. Must ensure all properties are pickleable - before process_startup is invoked for multi-process environments. + Establishes the contract for backend implementations that process requests + within the scheduler system. Backends manage initialization, validation, + processing, and shutdown lifecycle. All properties must be pickleable before + process_startup is called for multi-process environments. Example: :: - from guidellm.scheduler.objects import BackendInterface - class CustomBackend(BackendInterface): @property def processes_limit(self) -> int: return 4 async def resolve(self, request, request_info, history=None): - # Process request and yield responses yield response, updated_request_info """ @@ -107,21 +97,21 @@ async def process_startup(self) -> None: """ Perform backend initialization and startup procedures. - :raises: Implementation-specific exceptions for startup failures. + :raises Exception: Implementation-specific exceptions for startup failures """ async def validate(self) -> None: """ Validate backend configuration and operational status. - :raises: Implementation-specific exceptions for validation failures. + :raises Exception: Implementation-specific exceptions for validation failures """ async def process_shutdown(self) -> None: """ Perform backend cleanup and shutdown procedures. - :raises: Implementation-specific exceptions for shutdown failures. + :raises Exception: Implementation-specific exceptions for shutdown failures """ async def resolve( @@ -135,23 +125,23 @@ async def resolve( :param request: The request object to process :param request_info: Scheduling metadata and timing information - :param history: Optional conversation history for multi-turn requests + :param history: Conversation history for multi-turn requests :yield: Tuples of (response, updated_request_info) for each response chunk - :raises: Implementation-specific exceptions for processing failures + :raises Exception: Implementation-specific exceptions for processing failures """ BackendT = TypeVar("BackendT", bound=BackendInterface) -"""Generic backend interface type for request processing.""" +"Generic backend interface type for request processing" class SchedulerUpdateActionProgress(TypedDict, total=False): """ - Progress information for a scheduler update action. + Progress tracking data for scheduler operations. - Optional progress tracking data that provides estimates for remaining work - in scheduler operations. Used by constraints and monitoring systems to - track execution progress and make termination decisions. + Provides estimates for remaining work in scheduler operations, including + fraction complete, request counts, and duration. Used by constraints and + monitoring systems to track execution progress and make termination decisions. """ remaining_fraction: float | None @@ -161,17 +151,14 @@ class SchedulerUpdateActionProgress(TypedDict, total=False): class SchedulerUpdateAction(StandardBaseModel): """ - Scheduler behavior control directives and actions. + Control directives for scheduler behavior and operations. Encapsulates control signals for scheduler operations including request queuing and processing directives. Used by constraints to communicate - termination conditions and progress information to scheduler components. + termination conditions and progress to scheduler components. Example: :: - from guidellm.scheduler.objects import SchedulerUpdateAction - - # Signal to stop queuing but continue processing action = SchedulerUpdateAction( request_queuing="stop", request_processing="continue", @@ -198,25 +185,18 @@ class SchedulerUpdateAction(StandardBaseModel): class SchedulerState(StandardBaseModel): """ - Scheduler operation state tracking and statistics. + Comprehensive state tracking for scheduler execution. - Comprehensive state container for tracking scheduler execution progress, - request counts, timing information, and constraint enforcement. Central - to scheduler coordination and provides real-time metrics for monitoring - and decision-making across distributed worker processes. + Tracks scheduler execution progress, request counts, timing information, + and constraint enforcement. Central to scheduler coordination, providing + real-time metrics for monitoring and decision-making across distributed + worker processes. Example: :: - from guidellm.scheduler.objects import SchedulerState - - # Initialize scheduler state state = SchedulerState(node_id=0, num_processes=4) - - # Track request processing state.created_requests += 1 state.queued_requests += 1 - - # Monitor completion progress completion_rate = state.processed_requests / state.created_requests """ @@ -234,41 +214,35 @@ class SchedulerState(StandardBaseModel): default=None, description="Unix timestamp when the scheduler stopped" ) end_queuing_time: float | None = Field( - default=None, description="When request queuing stopped, if applicable" + default=None, description="Unix timestamp when request queuing stopped" ) end_queuing_constraints: dict[str, SchedulerUpdateAction] = Field( default_factory=dict, description="Constraints that triggered queuing termination", ) end_processing_time: float | None = Field( - default=None, description="When request processing stopped, if applicable" + default=None, description="Unix timestamp when request processing stopped" ) end_processing_constraints: dict[str, SchedulerUpdateAction] = Field( default_factory=dict, - description="Constraints that triggered process ing termination", + description="Constraints that triggered processing termination", ) scheduler_constraints: dict[str, SchedulerUpdateAction] = Field( default_factory=dict, - description=( - "The latest state from all constraints applied during the scheduler run" - ), + description="Latest state from all constraints applied during scheduler run", ) remaining_fraction: float | None = Field( default=None, - description=( - "Estimated fraction for the remaining progress of the run, if known" - ), + description="Estimated fraction of remaining progress, if known", ) remaining_requests: float | None = Field( default=None, - description="Estimated number of requests remaining to be processed, if known", + description="Estimated number of remaining requests to process, if known", ) remaining_duration: float | None = Field( default=None, - description=( - "Estimated time remaining in seconds for the scheduler run, if known" - ), + description="Estimated remaining time in seconds for scheduler run, if known", ) created_requests: int = Field( @@ -279,13 +253,13 @@ class SchedulerState(StandardBaseModel): ) pending_requests: int = Field( default=0, - description="Total number of requests pending processing within a worker", + description="Number of requests pending processing within a worker", ) processing_requests: int = Field( default=0, description="Number of requests currently being processed" ) processed_requests: int = Field( - default=0, description="Total number of requests that completed processing" + default=0, description="Number of requests that completed processing" ) successful_requests: int = Field( default=0, description="Number of requests that completed successfully" diff --git a/src/guidellm/scheduler/strategies.py b/src/guidellm/scheduler/strategies.py index d3e31d43..5e13a26d 100644 --- a/src/guidellm/scheduler/strategies.py +++ b/src/guidellm/scheduler/strategies.py @@ -1,34 +1,32 @@ """ -Request scheduling strategies for controlling how benchmark requests are processed. +Request scheduling strategies for controlling benchmark request processing patterns. -This module provides timing implementations and concrete strategies that control request +Provides timing implementations and concrete strategies that control request concurrency, timing patterns, and throughput characteristics to simulate real-world -usage scenarios. The scheduling system separates timing logic from strategy constraints, -enabling flexible combination of timing behaviors with process and concurrency limits. +usage scenarios. Strategies define how requests are distributed across worker processes, +when they should be scheduled, and what constraints apply to concurrent processing. +The scheduling system separates timing logic from strategy constraints, enabling +flexible combination of timing behaviors with process and concurrency limits. """ from __future__ import annotations -import math +import asyncio import random import time -from abc import ABC, abstractmethod +from abc import abstractmethod +from multiprocessing import Lock, Value from typing import Annotated, ClassVar, Literal, TypeVar from pydantic import Field, PrivateAttr from guidellm.schemas import RequestInfo -from guidellm.utils import InfoMixin, PydanticClassRegistryMixin, StandardBaseModel +from guidellm.utils import InfoMixin, PydanticClassRegistryMixin __all__ = [ "AsyncConstantStrategy", "AsyncPoissonStrategy", "ConcurrentStrategy", - "ConstantRateRequestTimings", - "LastCompletionRequestTimings", - "NoDelayRequestTimings", - "PoissonRateRequestTimings", - "ScheduledRequestTimings", "SchedulingStrategy", "StrategyT", "StrategyType", @@ -43,308 +41,162 @@ ] -def _exponential_decay_tau(max_progress: float, convergence: float = 0.99) -> float: - """ - Calculate tau value for exponential decay to reach target progress level. - - :param max_progress: The max progress value to reach - :param convergence: The target convergence level for reaching max_progress - :return: The calculated tau value for the given max_progress and convergence - """ - return max_progress / (-math.log(1 - convergence)) - - -def _exponential_decay_fraction(progress: float, tau: float = 1.0) -> float: - """ - Calculate completion fraction based on exponential decay curve. - - :param progress: The current progress value (>=0) - :param tau: The scale factor for the exponential decay - :return: The fraction of completion based on exponential decay (0 -> 1) +class SchedulingStrategy(PydanticClassRegistryMixin["SchedulingStrategy"], InfoMixin): """ - return 1 - math.exp(-progress / tau) + Base class for scheduling strategies controlling request processing patterns. + Defines the interface for strategies that combine timing implementations with + process and concurrency constraints to enable various benchmark scenarios. + Strategies manage request timing, worker process coordination, and concurrency + limits across distributed execution environments. -class ScheduledRequestTimings(StandardBaseModel, ABC): - """ - Abstract base class for controlling when requests are scheduled. - - Defines the interface for timing implementations that determine request scheduling - behavior. Different implementations provide various patterns like synchronous, - constant-rate, or stochastic scheduling to simulate real-world scenarios. + :cvar schema_discriminator: Field name used for polymorphic deserialization """ - @abstractmethod - def next_offset(self) -> float: - """ - Calculate the time offset for the next request to be scheduled. - - :return: The offset in seconds from scheduler start time for next request - """ - - @abstractmethod - def request_completed(self, request_info: RequestInfo): - """ - Handle request completion and update internal timing state. - - :param request_info: Information about the completed request including - timing details and completion status - """ - + schema_discriminator: ClassVar[str] = "type_" -class LastCompletionRequestTimings(ScheduledRequestTimings): - """ - Timing implementation for synchronous and concurrent scheduling strategies. + @classmethod + def __pydantic_schema_base_type__(cls) -> type[SchedulingStrategy]: + if cls.__name__ == "SchedulingStrategy": + return cls - Schedules the next request immediately after the last request completes, enabling - sequential or limited concurrent processing with completion-based timing control. - """ + return SchedulingStrategy - offset: float = Field( - default=0.0, - description="Current time offset in seconds from scheduler start time", + type_: Literal["strategy"] = Field( + description="The type of scheduling strategy to schedule requests with", ) - startup_requests: int = Field( + worker_coount: int = Field( default=0, - description="Number of initial requests to schedule with equal spacing", - ge=0, - ) - startup_requests_delay: float = Field( - default=0.0, - description="Delay in seconds between startup requests", + description="Number of worker processes to use for this strategy", ge=0, ) - _requests_count: int = PrivateAttr(0) - - def next_offset(self) -> float: - """ - Get the current offset value and apply startup delay if applicable. - - :return: The current offset value in seconds from scheduler start time - """ - self._requests_count += 1 - - if self._requests_count <= self.startup_requests: - self.offset += self.startup_requests_delay - - return self.offset - - def request_completed(self, request_info: RequestInfo): - """ - Update timing state based on the completed request. - - :param request_info: Information about the completed request - """ - if ( - self._requests_count > self.startup_requests - and request_info.completed_at is not None - ): - # set the next sync offset to the time when the previous request completed - self.offset = request_info.completed_at - request_info.scheduler_start_time - - -class NoDelayRequestTimings(ScheduledRequestTimings): - """ - Timing implementation for throughput-maximizing scheduling strategies. - - Schedules requests with minimal delay to achieve maximum throughput, with optional - startup ramping to gradually increase request processing during initialization. - """ - - offset: float = Field( - default=0.0, - description="Base time offset in seconds from scheduler start time", + max_concurrency: int = Field( + default=0, + description="Maximum number of concurrent requests to allow", ge=0, ) startup_duration: float = Field( default=0.0, - description="Duration in seconds for gradual startup ramp", + description="Duration in seconds for startup request distribution", ge=0, ) - startup_target_requests: int = Field( - default=1, - description="Target number of requests to converge to during startup", - gt=0, - ) - startup_convergence: float = Field( - default=0.99, - description="Target convergence rate during startup phase", - ) - _start_time: float | None = PrivateAttr(None) - _requests_count: int = PrivateAttr(0) - - def next_offset(self) -> float: - """ - Calculate offset with optional startup adjustment. - - :return: Static offset plus any startup adjustment - """ - if self._start_time is None: - self._start_time = time.time() - - self._requests_count += 1 - elapsed = time.time() - self._start_time - if self.startup_duration > 0 and elapsed < self.startup_duration: - startup_percent = _exponential_decay_fraction( - self._requests_count, - _exponential_decay_tau( - self.startup_target_requests, self.startup_convergence - ), - ) - else: - startup_percent = 1.0 - - return self.offset + startup_percent * self.startup_duration + _processes_lock = PrivateAttr(None) + _processes_request_index = PrivateAttr(None) + _processes_start_time = PrivateAttr(None) + _cached_processes_start_time: float | None = PrivateAttr(None) - def request_completed(self, request_info: RequestInfo): + @property + def processes_limit(self) -> int | None: """ - Handle request completion (no action needed for throughput strategy). + Get the maximum number of worker processes supported by this strategy. - :param request_info: Information about the completed request (unused) + :return: Maximum number of worker processes, None if unlimited """ + return None - -class ConstantRateRequestTimings(ScheduledRequestTimings): - """ - Timing implementation for constant-rate scheduling strategies. - - Schedules requests at a fixed rate with evenly spaced intervals to provide - predictable timing behavior for steady-state load simulation. - """ - - rate: float = Field( - description="Target rate in requests per second", - gt=0, - ) - offset: float = Field( - default=0.0, - description="Base time offset in seconds from scheduler start time", - ge=0, - ) - _requests_count: int = PrivateAttr(0) - - def next_offset(self) -> float: + @property + def requests_limit(self) -> int | None: """ - Calculate the offset for the next request at a constant rate. + Get the maximum number of concurrent requests supported by this strategy. - :return: The offset in seconds for the next request + :return: Maximum number of concurrent requests, None if unlimited """ - num_requests = self._requests_count - self._requests_count += 1 - interval = 1.0 / self.rate - - return self.offset + interval * num_requests + return None - def request_completed(self, request_info: RequestInfo): + def init_processes_timings( + self, + worker_count: int, + max_concurrency: int, + startup_duration: float, + ): """ - Handle request completion (no action needed for constant rate strategy). + Initialize shared timing state for multi-process coordination. - :param request_info: Information about the completed request (unused) + :param worker_count: Number of worker processes to coordinate + :param max_concurrency: Maximum number of concurrent requests allowed + :param startup_duration: Duration in seconds for request startup ramping """ + self.worker_coount = worker_count + self.max_concurrency = max_concurrency + self.startup_duration = startup_duration + self._processes_request_index = Value("i", 0) + self._processes_lock = Lock() + self._processes_start_time = Value("d", -1.0) -class PoissonRateRequestTimings(ScheduledRequestTimings): - """ - Timing implementation for Poisson-distributed scheduling strategies. - - Schedules requests following a Poisson process with exponentially distributed - inter-arrival times to simulate realistic traffic patterns with random variance. - """ - - rate: float = Field( - description="Target average rate in requests per second", - gt=0, - ) - random_seed: int = Field( - default=42, - description="Seed for random number generator for reproducible behavior", - ) - offset: float = Field( - default=0.0, - description="Base time offset in seconds from scheduler start time", - ) - _requests_count: int = PrivateAttr(0) - _random: random.Random | None = PrivateAttr(None) - - def next_offset(self) -> float: + def init_processes_start(self, start_time: float): """ - Calculate the offset for the next request using Poisson distribution. + Set the synchronized start time for all worker processes. - :return: The cumulative offset in seconds for the next request + :param start_time: Unix timestamp when request processing should begin + :raises RuntimeError: If called before init_processes_timings """ - self._requests_count += 1 - - if self._random is None: - self._random = random.Random(self.random_seed) - else: - next_delay = self._random.expovariate(self.rate) - self.offset += next_delay + if self._processes_lock is None: + raise RuntimeError( + "SchedulingStrategy init_processes_start called before " + "init_processes_timings" + ) - return self.offset + with self._processes_lock: + self._processes_start_time.value = start_time - def request_completed(self, request_info: RequestInfo): + async def get_processes_start_time(self) -> float: """ - Handle request completion (no action needed for Poisson rate strategy). + Get the synchronized start time, waiting if not yet set. - :param request_info: Information about the completed request (unused) + :return: Unix timestamp when request processing began + :raises RuntimeError: If called before init_processes_timings """ + if self._processes_lock is None: + raise RuntimeError( + "SchedulingStrategy get_processes_start_time called before " + "init_processes_timings" + ) + while self._cached_processes_start_time is None: + with self._processes_lock: + if self._processes_start_time.value != -1.0: + self._cached_processes_start_time = self._processes_start_time.value + else: + await asyncio.sleep(0.01) # wait for start time to be set by main -class SchedulingStrategy(PydanticClassRegistryMixin["SchedulingStrategy"], InfoMixin): - """ - Abstract base class for scheduling strategies controlling request processing. - - Defines the interface for strategies that combine timing implementations with - process and concurrency constraints to enable various benchmark scenarios. - """ - - schema_discriminator: ClassVar[str] = "type_" - - @classmethod - def __pydantic_schema_base_type__(cls) -> type[SchedulingStrategy]: - if cls.__name__ == "SchedulingStrategy": - return cls - - return SchedulingStrategy - - type_: Literal["strategy"] = Field( - description="The type of scheduling strategy to schedule requests with", - ) + return self._cached_processes_start_time - @property - def processes_limit(self) -> int | None: + def next_request_index(self) -> int: """ - Get the maximum number of worker processes supported by this strategy. + Get the next sequential request index across all worker processes. - :return: Maximum number of worker processes, None if unlimited + :return: Globally unique request index for timing calculations + :raises RuntimeError: If called before init_processes_timings """ - return None + if self._processes_lock is None: + raise RuntimeError( + "SchedulingStrategy next_request_index called before " + "init_processes_timings" + ) - @property - def requests_limit(self) -> int | None: + with self._processes_lock: + self._processes_request_index.value += 1 + return self._processes_request_index.value + + @abstractmethod + async def next_request_time(self, offset: int) -> float: """ - Get the maximum number of concurrent requests supported by this strategy. + Calculate the scheduled start time for the next request. - :return: Maximum number of concurrent requests, None if unlimited + :param offset: Worker process offset for distributing request timing + :return: Unix timestamp when the request should be processed """ - return None - def create_request_timings( - self, local_rank: int, local_world_size: int, local_max_concurrency: int | float - ) -> ScheduledRequestTimings: + @abstractmethod + def request_completed(self, request_info: RequestInfo): """ - Create a timing instance to define scheduling behavior for a worker process. + Handle request completion and update internal timing state. - :param local_rank: The rank of the worker process within local world size - :param local_world_size: Total number of worker processes in local world - :param local_max_concurrency: Maximum concurrent requests for the worker - :return: A ScheduledRequestTimings instance for the worker process - :raises NotImplementedError: Must be implemented by subclasses + :param request_info: Information about the completed request including + timing details and completion status """ - raise NotImplementedError( - "create_worker_timings method must be implemented by subclasses." - ) StrategyT = TypeVar("StrategyT", bound=SchedulingStrategy) @@ -353,19 +205,18 @@ def create_request_timings( @SchedulingStrategy.register("synchronous") class SynchronousStrategy(SchedulingStrategy): """ - Sequential request processing strategy with single-process constraint. + Sequential request processing with strict single-request-at-a-time execution. Processes requests one at a time in strict sequential order, providing predictable timing behavior ideal for measuring maximum sequential throughput and ensuring - request isolation. + complete request isolation. Each request completes before the next begins. """ type_: Literal["synchronous"] = "synchronous" # type: ignore[assignment] + _process_last_request_time: float | None = PrivateAttr(None) def __str__(self) -> str: """ - Return string representation of the strategy. - :return: String identifier for synchronous strategy """ return "synchronous" @@ -373,52 +224,49 @@ def __str__(self) -> str: @property def processes_limit(self) -> int | None: """ - Get maximum number of worker processes for synchronous scheduling. - - :return: Always returns 1 to enforce single-process constraint + :return: Always 1 to enforce single-process constraint """ return 1 @property def requests_limit(self) -> int | None: """ - Get maximum number of concurrent requests for synchronous scheduling. - - :return: Always returns 1 to enforce single-request constraint + :return: Always 1 to enforce single-request constraint """ return 1 - def create_request_timings( - self, - local_rank: int, - local_world_size: int, - local_max_concurrency: int, # noqa: ARG002 - ) -> ScheduledRequestTimings: - """ - Create timing implementation for synchronous request scheduling. - - :param local_rank: The rank of the worker process (must be 0) - :param local_world_size: Total number of worker processes (must be 1) - :param local_max_concurrency: Maximum concurrent requests (unused) - :return: LastCompletionRequestTimings instance for sequential processing - :raises ValueError: If multiple workers or non-zero rank specified - """ - if local_world_size > 1 or local_rank != 0: - raise ValueError( - "SynchronousStrategy can only be used with a single worker process." - ) + async def next_request_time(self, offset: int) -> float: + """ + Calculate next request time based on previous completion. + + :param offset: Unused for synchronous strategy + :return: Time of last completion or start time if first request + """ + _ = offset # offset unused for synchronous strategy + + if self._process_last_request_time is not None: + return self._process_last_request_time + + return await self.get_processes_start_time() + + def request_completed(self, request_info: RequestInfo): + """ + Update timing state with completed request information. - return LastCompletionRequestTimings() + :param request_info: Completed request metadata including timing + """ + if request_info.completed_at is not None: + self._process_last_request_time = request_info.completed_at @SchedulingStrategy.register("concurrent") class ConcurrentStrategy(SchedulingStrategy): """ - Parallel request processing strategy with controlled concurrency limits. + Parallel request processing with fixed concurrency limits. Enables concurrent request processing up to a specified number of streams, - providing balanced throughput while maintaining predictable resource usage - and completion-based timing coordination. + providing balanced throughput while maintaining predictable resource usage. + Requests are distributed across streams with completion-based timing coordination. """ type_: Literal["concurrent"] = "concurrent" # type: ignore[assignment] @@ -426,16 +274,11 @@ class ConcurrentStrategy(SchedulingStrategy): description="Number of concurrent streams for scheduling requests", gt=0, ) - startup_duration: float = Field( - default=0.0, - description="Duration in seconds for distributing startup requests", - ge=0, - ) + + _process_last_request_time: float | None = PrivateAttr(None) def __str__(self) -> str: """ - Return string representation of the strategy. - :return: String identifier with stream count """ return f"concurrent@{self.streams}" @@ -443,8 +286,6 @@ def __str__(self) -> str: @property def processes_limit(self) -> int: """ - Get maximum number of worker processes for concurrent scheduling. - :return: Number of streams as maximum worker processes """ return self.streams @@ -452,72 +293,42 @@ def processes_limit(self) -> int: @property def requests_limit(self) -> int: """ - Get maximum number of concurrent requests for concurrent scheduling. - :return: Number of streams as maximum concurrent requests """ return self.streams - def create_request_timings( - self, - local_rank: int, - local_world_size: int, - local_max_concurrency: int, # noqa: ARG002 - ) -> LastCompletionRequestTimings: - """ - Create timing implementation for concurrent request scheduling. - - :param local_rank: The rank of the worker process (must be < streams) - :param local_world_size: Total worker processes (must not exceed streams) - :param local_max_concurrency: Maximum concurrent requests (unused) - :return: LastCompletionRequestTimings instance for stream-based processing - :raises ValueError: If worker configuration exceeds stream limits - """ - if local_world_size > self.streams: - raise ValueError( - "ConcurrentStrategy can only be used with up to " - f"{self.streams} worker processes." - ) + async def next_request_time(self, offset: int) -> float: + """ + Calculate next request time with stream-based distribution. - if local_rank >= self.streams: - raise ValueError( - f"Local rank {local_rank} exceeds the number of streams {self.streams}." - ) + :param offset: Worker process offset for distributing initial requests + :return: Time of last completion or staggered start time if first request + """ + if self._process_last_request_time is not None: + return self._process_last_request_time - if self.startup_duration > 0: - # Ensure equal global distribution of the start up for concurrent streams - # Ex: for 10 streams, 2 workers, and 8 seconds start up duration, - # the first worker should start at 0.0, 1.6, 3.2, 4.8, 6.4 - # and the second worker should start at 0.8, 2.4, 4.0, 5.6, 7.2 - delay_per_stream = self.startup_duration / self.streams - streams_per_worker = self.streams // local_world_size - - offset = local_rank * streams_per_worker * delay_per_stream - startup_requests = streams_per_worker + ( - 1 - if local_world_size > 1 and local_rank < self.streams % local_world_size - else 0 - ) - startup_requests_delay = delay_per_stream * local_world_size - else: - offset = 0.0 - startup_requests = 0 - startup_requests_delay = 0.0 + start_time = await self.get_processes_start_time() + + return start_time + (offset / self.worker_coount) + + def request_completed(self, request_info: RequestInfo): + """ + Update timing state with completed request information. - return LastCompletionRequestTimings( - offset=offset, - startup_requests=startup_requests, - startup_requests_delay=startup_requests_delay, - ) + :param request_info: Completed request metadata including timing + """ + if request_info.completed_at is not None: + self._process_last_request_time = request_info.completed_at @SchedulingStrategy.register("throughput") class ThroughputStrategy(SchedulingStrategy): """ - Maximum throughput strategy with optional concurrency limits. + Maximum throughput scheduling with optional concurrency limits. Schedules requests to maximize system throughput by allowing unlimited concurrent - processing with optional constraints and startup ramping for controlled ramp-up. + processing with optional constraints. Supports startup ramping to gradually + distribute initial requests for controlled system ramp-up. """ type_: Literal["throughput"] = "throughput" # type: ignore[assignment] @@ -526,16 +337,9 @@ class ThroughputStrategy(SchedulingStrategy): description="Maximum number of concurrent requests to schedule", gt=0, ) - startup_duration: float = Field( - default=0.0, - description="Duration in seconds for startup request distribution", - ge=0, - ) def __str__(self) -> str: """ - Return string representation of the strategy. - :return: String identifier for throughput strategy """ return "throughput" @@ -543,56 +347,57 @@ def __str__(self) -> str: @property def processes_limit(self) -> int | None: """ - Get maximum number of worker processes for throughput scheduling. - - :return: The max_concurrency value if set, otherwise None for unlimited + :return: Max concurrency if set, otherwise None for unlimited """ return self.max_concurrency @property def requests_limit(self) -> int | None: """ - Get maximum number of concurrent requests for throughput scheduling. - - :return: The max_concurrency value if set, otherwise None for unlimited + :return: Max concurrency if set, otherwise None for unlimited """ return self.max_concurrency - def create_request_timings( - self, local_rank: int, local_world_size: int, local_max_concurrency: int - ) -> ScheduledRequestTimings: + async def next_request_time(self, offset: int) -> float: """ - Create timing implementation for throughput request scheduling. + Calculate next request time with optional startup ramping. - :param local_rank: The rank of the worker process - :param local_world_size: Total number of worker processes - :param local_max_concurrency: Maximum concurrent requests for the worker - :return: NoDelayRequestTimings instance for immediate request scheduling + :param offset: Unused for throughput strategy + :return: Immediate start or ramped start time during startup period """ - if self.startup_duration > 0: - # Vary offset by up to 5% of the startup duration for a bit of variance - offset = 0.05 * self.startup_duration * (local_rank / local_world_size) - # Use local_max_concurrency as the target requests for startup convergence - startup_target_requests = local_max_concurrency - else: - offset = 0.0 - startup_target_requests = 1 + _ = offset # offset unused for throughput strategy + start_time = await self.get_processes_start_time() - return NoDelayRequestTimings( - startup_duration=self.startup_duration, - startup_target_requests=startup_target_requests, - offset=offset, - ) + if ( + self.startup_duration > 0 + and (time.time() - start_time) < self.startup_duration + and (current_index := self.next_request_index()) <= self.max_concurrency + ): + # linearly ramp start times to spread max_concurrency requests evenly + # over startup_duration + return start_time + self.startup_duration * ( + current_index / self.max_concurrency + ) + + return start_time + self.startup_duration + + def request_completed(self, request_info: RequestInfo): + """ + Handle request completion (no-op for throughput strategy). + + :param request_info: Completed request metadata (unused) + """ + _ = request_info # request_info unused for throughput strategy @SchedulingStrategy.register("constant") class AsyncConstantStrategy(ThroughputStrategy): """ - Asynchronous constant-rate scheduling strategy for predictable load patterns. + Constant-rate scheduling for predictable load patterns. Schedules requests at a fixed rate distributed evenly across worker processes, providing predictable timing behavior for steady-state load simulation and - consistent system performance measurement. + consistent system performance measurement. Requests arrive at uniform intervals. """ type_: Literal["constant"] = "constant" # type: ignore[assignment] @@ -600,53 +405,43 @@ class AsyncConstantStrategy(ThroughputStrategy): description="Rate for scheduling requests asynchronously in requests/second", gt=0, ) - startup_duration: float = Field( - default=0.0, - description="Duration in seconds for startup request distribution", - ge=0, - ) def __str__(self) -> str: """ - Return string representation of the strategy. - :return: String identifier with rate value """ return f"constant@{self.rate:.2f}" - def create_request_timings( - self, - local_rank: int, - local_world_size: int, - local_max_concurrency: int, # noqa: ARG002 - ) -> ScheduledRequestTimings: + async def next_request_time(self, offset: int) -> float: """ - Create timing implementation for constant-rate request scheduling. + Calculate next request time at fixed intervals. - :param local_rank: The rank of the worker process - :param local_world_size: Total number of worker processes for rate division - :param local_max_concurrency: Maximum concurrent requests for the worker - :return: ConstantRateRequestTimings instance with per-worker rate + :param offset: Unused for constant strategy + :return: Start time plus constant interval based on request index + """ + _ = offset # offset unused for throughput strategy + current_index = self.next_request_index() + start_time = await self.get_processes_start_time() + + return start_time + current_index / self.rate + + def request_completed(self, request_info: RequestInfo): """ - # Divide the rate evenly across all worker processes - worker_rate = self.rate / local_world_size - # Start each worker with an offset to interleave rates - worker_offset = (1 / self.rate) * local_rank + Handle request completion (no-op for constant strategy). - return ConstantRateRequestTimings( - rate=worker_rate, - offset=worker_offset, - ) + :param request_info: Completed request metadata (unused) + """ + _ = request_info # request_info unused for async constant strategy @SchedulingStrategy.register("poisson") class AsyncPoissonStrategy(ThroughputStrategy): """ - Asynchronous Poisson-distributed scheduling strategy for realistic load simulation. + Poisson-distributed scheduling for realistic load simulation. Schedules requests following a Poisson process with exponentially distributed inter-arrival times, providing realistic simulation of user behavior and network - traffic patterns with random variance around the target rate. + traffic patterns. Request arrivals have random variance around the target rate. """ type_: Literal["poisson"] = "poisson" # type: ignore[assignment] @@ -654,47 +449,71 @@ class AsyncPoissonStrategy(ThroughputStrategy): description="Rate for scheduling requests asynchronously in requests/second", gt=0, ) - startup_duration: float = Field( - default=0.0, - description="Duration in seconds for startup request distribution", - ge=0, - ) random_seed: int = Field( default=42, description="Random seed to use for Poisson distribution", ) + _random: random.Random | None = PrivateAttr(None) + _offset = PrivateAttr(None) + def __str__(self) -> str: """ - Return string representation of the strategy. - :return: String identifier with rate value """ return f"poisson@{self.rate:.2f}" - def create_request_timings( + def init_processes_timings( self, - local_rank: int, - local_world_size: int, - local_max_concurrency: int, # noqa: ARG002 - ) -> ScheduledRequestTimings: - """ - Create timing implementation for Poisson-distributed request scheduling. - - :param local_rank: The rank of the worker process for seed generation - :param local_world_size: Total number of worker processes for rate division - :param local_max_concurrency: Maximum concurrent requests for the worker - :return: PoissonRateRequestTimings instance with per-worker rate and unique seed - """ - # Divide the rate evenly across all worker processes - worker_rate = self.rate / local_world_size - # Use a different seed for each worker to ensure different sequences - worker_seed = self.random_seed + local_rank - # Start each worker with an offset to interleave rates - worker_offset = (1 / self.rate) * local_rank - - return PoissonRateRequestTimings( - rate=worker_rate, - random_seed=worker_seed, - offset=worker_offset, - ) + worker_count: int, + max_concurrency: int, + startup_duration: float, + ): + """ + Initialize Poisson-specific timing state. + + :param worker_count: Number of worker processes to coordinate + :param max_concurrency: Maximum number of concurrent requests allowed + :param startup_duration: Duration in seconds for request startup ramping + """ + super().init_processes_timings(worker_count, max_concurrency, startup_duration) + with self._processes_lock: + self._offset = Value("d", -1.0) + + def init_processes_start(self, start_time: float): + """ + Initialize the offset time for Poisson timing calculations. + + :param start_time: Unix timestamp when request processing should begin + """ + ThroughputStrategy.init_processes_start(self, start_time) + with self._processes_lock: + self._offset.value = start_time + + async def next_request_time(self, offset: int) -> float: + """ + Calculate next request time using exponential distribution. + + :param offset: Unused for Poisson strategy + :return: Next arrival time based on Poisson process + """ + _ = offset # offset unused for throughput strategy + _ = await self.get_processes_start_time() # ensure offset is initialized + + if self._random is None: + self._random = random.Random(self.random_seed) + + next_delay = self._random.expovariate(self.rate) + + with self._processes_lock: + self._offset.value += next_delay + + return self._offset.value + + def request_completed(self, request_info: RequestInfo): + """ + Handle request completion (no-op for Poisson strategy). + + :param request_info: Completed request metadata (unused) + """ + _ = request_info # request_info unused for async poisson strategy diff --git a/src/guidellm/scheduler/worker.py b/src/guidellm/scheduler/worker.py index 45716b78..a46455f9 100644 --- a/src/guidellm/scheduler/worker.py +++ b/src/guidellm/scheduler/worker.py @@ -1,10 +1,11 @@ """ -Individual worker process management for multi-process request execution. +Worker process implementation for distributed request execution and coordination. -Manages worker processes that handle request scheduling, backend processing, and -coordination in distributed benchmark environments. Workers consume requests from -queues, apply timing strategies, process requests through backends, and publish -status updates while maintaining synchronization across the process group. +Manages individual worker processes within the scheduler system, handling request +lifecycle from queue consumption through backend processing and status publication. +Workers coordinate with other processes through barriers and events, apply timing +strategies for request scheduling, maintain concurrency limits, and publish real-time +status updates throughout request processing. """ from __future__ import annotations @@ -19,13 +20,13 @@ import uvloop HAS_UVLOOP: Annotated[ - bool, "Flag indicating if uvloop is available for event loop optimization" + bool, "Flag indicating uvloop availability for event loop optimization" ] = True except ImportError: uvloop = None HAS_UVLOOP: Annotated[ - bool, "Flag indicating if uvloop is available for event loop optimization" + bool, "Flag indicating uvloop availability for event loop optimization" ] = False @@ -35,7 +36,7 @@ RequestT, ResponseT, ) -from guidellm.scheduler.strategies import ScheduledRequestTimings +from guidellm.scheduler.strategies import SchedulingStrategy from guidellm.schemas import RequestInfo from guidellm.utils import ( InterProcessMessaging, @@ -49,29 +50,34 @@ class WorkerProcess(Generic[RequestT, ResponseT]): """ - Individual worker process for distributed request execution and coordination. + Worker process for distributed request execution in the scheduler system. - Manages the complete request lifecycle from queue consumption through backend - processing and status publication. Coordinates with other workers through - barriers and events while maintaining configurable concurrency limits and - timing strategies for request scheduling. + Manages complete request lifecycle including queue consumption, backend processing, + timing strategy application, and status publication. Coordinates with other workers + through synchronization primitives while maintaining concurrency limits and handling + graceful shutdown scenarios including errors and cancellations. Example: :: worker = WorkerProcess( + worker_index=0, messaging=messaging_interface, + backend=backend_instance, + strategy=timing_strategy, async_limit=10, + fut_scheduling_time_limit=5.0, startup_barrier=barrier, + requests_generated_event=generated_event, + constraint_reached_event=constraint_event, shutdown_event=shutdown, error_event=error, - backend=backend_instance, - request_timings=timing_strategy ) worker.run() """ def __init__( self, + worker_index: int, messaging: InterProcessMessaging[ tuple[ ResponseT | None, @@ -80,8 +86,9 @@ def __init__( ], ], backend: BackendInterface[RequestT, ResponseT], - request_timings: ScheduledRequestTimings, + strategy: SchedulingStrategy, async_limit: int, + fut_scheduling_time_limit: float, startup_barrier: ProcessingBarrier, requests_generated_event: ProcessingEvent, constraint_reached_event: ProcessingEvent, @@ -91,22 +98,25 @@ def __init__( """ Initialize worker process instance. - :param messaging: Inter-process communication interface for request coordination - :param backend: Backend instance for processing requests - :param request_timings: Timing strategy for request scheduling - :param async_limit: Maximum concurrent requests this worker can handle - :param startup_barrier: Multiprocessing barrier for coordinated startup - :param requests_generated_event: Event signaling when request generation is - complete - :param constraint_reached_event: Event signaling when processing constraints - are met - :param shutdown_event: Event for signaling graceful shutdown - :param error_event: Event for signaling error conditions across processes + :param worker_index: Unique identifier for this worker within the process group + :param messaging: Inter-process messaging interface for request coordination + :param backend: Backend interface for processing requests + :param strategy: Scheduling strategy for determining request timing + :param async_limit: Maximum concurrent requests this worker can process + :param fut_scheduling_time_limit: Maximum time in seconds to schedule requests + into the future + :param startup_barrier: Synchronization barrier for coordinated startup + :param requests_generated_event: Event signaling request generation completion + :param constraint_reached_event: Event signaling processing constraint reached + :param shutdown_event: Event signaling graceful shutdown request + :param error_event: Event signaling error conditions across processes """ + self.worker_index = worker_index self.messaging = messaging self.backend = backend - self.request_timings = request_timings + self.strategy = strategy self.async_limit = async_limit + self.fut_scheduling_time_limit = fut_scheduling_time_limit self.startup_barrier = startup_barrier self.requests_generated_event = requests_generated_event self.constraint_reached_event = constraint_reached_event @@ -122,8 +132,8 @@ def run(self): """ Main entry point for worker process execution. - Initializes asyncio event loop with optional uvloop optimization and starts - worker async operations. Handles event loop cleanup for forked processes. + Initializes asyncio event loop with optional uvloop optimization and executes + worker async operations. Handles event loop cleanup and error propagation. :raises RuntimeError: If worker encounters unrecoverable error during execution """ @@ -142,9 +152,9 @@ async def run_async(self): """ Execute main asynchronous worker process logic. - Orchestrates concurrent execution of request processing and shutdown monitoring - tasks. Handles task cleanup, error propagation, and cancellation coordination - when any task completes or fails. + Orchestrates concurrent execution of request processing and shutdown monitoring. + Handles task cleanup, error propagation, and cancellation coordination when any + task completes or encounters an error. :raises RuntimeError: If worker tasks encounter unrecoverable errors :raises asyncio.CancelledError: If worker process was cancelled @@ -192,6 +202,7 @@ async def run_async(self): async def _stop_monitor( self, ) -> Literal["error_event", "shutdown_event"]: + """Monitor shutdown and error events for worker termination.""" exit_key = await wait_for_sync_objects( { "error_event": self.error_event, @@ -206,6 +217,12 @@ async def _stop_monitor( ) async def _process_requests(self): + """ + Manage request processing lifecycle from startup to shutdown. + + Coordinates startup synchronization, processes requests until constraints are + reached, then cancels pending requests until shutdown or error occurs. + """ try: # 1. Start up synchronization (backend, messaging, and other processes) # 2. Messaging startup, receive requests until requests_generated event @@ -227,6 +244,7 @@ async def _process_requests(self): await self._processing_shutdown() async def _processing_startup(self): + """Initialize backend, messaging, and synchronize with other workers.""" # Get backend ready await self.backend.process_startup() self.backend_started = True @@ -258,6 +276,12 @@ async def _processing_shutdown(self): self.startup_completed = False async def _process_requests_loop(self): + """ + Process requests continuously until cancelled with concurrency limits. + + Schedules and processes requests according to the timing strategy while + maintaining the configured concurrency limit through semaphore coordination. + """ try: # Run request processing async_semaphore = asyncio.Semaphore(self.async_limit) @@ -273,7 +297,18 @@ def _task_done(task): # Main loop; loop until canceled while True: await async_semaphore.acquire() - request_task = asyncio.create_task(self._process_next_request()) + request_time = await self.strategy.next_request_time( + offset=self.worker_index + ) + + if ( + time_until := request_time - time.time() + ) >= self.fut_scheduling_time_limit: + await asyncio.sleep(time_until - self.fut_scheduling_time_limit) + + request_task = asyncio.create_task( + self._process_next_request(target_start=request_time) + ) pending_tasks.add(request_task) request_task.add_done_callback(_task_done) except asyncio.CancelledError as err: @@ -284,6 +319,7 @@ def _task_done(task): raise err async def _cancel_requests_loop(self): + """Cancel all remaining queued requests until worker process terminates.""" while True: try: request: RequestT @@ -299,30 +335,32 @@ async def _cancel_requests_loop(self): request_info.timings.resolve_end = time.time() self._send_update("cancelled", None, request, request_info) - async def _process_next_request(self): + async def _process_next_request(self, target_start: float): + """ + Process a single request from queue to completion. + + Retrieves request from messaging queue, applies timing strategy, processes + through backend, and publishes status updates throughout the lifecycle. + + :param target_start: Unix timestamp when request should begin processing + """ request: RequestT | MultiTurnRequestT[RequestT] | None = None request_info: RequestInfo | None = None response: ResponseT | None = None try: - # Pull request from the queue + # Pull request from the queue, update state, and send "pending" update request, request_info = await self.messaging.get() + request_info.timings.dequeued = time.time() + request_info.scheduler_node_id = self.messaging.worker_index or -1 + request_info.timings.targeted_start = target_start + self._send_update("pending", response, request, request_info) if request is None or request_info is None: raise RuntimeError("Received invalid request or request info") - - if isinstance(request, (list, tuple)): + if isinstance(request, list | tuple): raise NotImplementedError("Multi-turn requests are not yet supported") - # Calculate targeted start and set pending state for request - request_info.scheduler_node_id = self.messaging.worker_index or -1 - request_info.timings.dequeued = time.time() - target_start = ( - request_info.scheduler_start_time + self.request_timings.next_offset() - ) - request_info.timings.targeted_start = target_start - self._send_update("pending", response, request, request_info) - # Schedule the request current_time = time.time() request_info.timings.scheduled_at = current_time @@ -355,6 +393,9 @@ async def _process_next_request(self): request_info.error = str(exc) request_info.timings.resolve_end = time.time() self._send_update("errored", response, request, request_info) + finally: + if request_info is not None: + self.strategy.request_completed(request_info) def _send_update( self, @@ -365,6 +406,18 @@ def _send_update( request: RequestT | MultiTurnRequestT[RequestT], request_info: RequestInfo, ): + """ + Publish request status update through messaging system. + + Updates request status and publishes to messaging queue for coordinator + consumption. Prevents duplicate status updates for the same state. + + :param new_status: New status for the request + :param response: Response object if available, None otherwise + :param request: Request object being processed + :param request_info: Request metadata and timing information + :raises Exception: If messaging system fails to publish the update + """ prev_status = request_info.status if new_status == prev_status: diff --git a/src/guidellm/scheduler/worker_group.py b/src/guidellm/scheduler/worker_group.py index 21394668..c6027989 100644 --- a/src/guidellm/scheduler/worker_group.py +++ b/src/guidellm/scheduler/worker_group.py @@ -14,7 +14,7 @@ import threading import time import uuid -from collections.abc import AsyncIterator, Generator, Iterable, Iterator +from collections.abc import AsyncIterator, Generator, Iterable from multiprocessing import get_context from multiprocessing.context import BaseContext from multiprocessing.managers import BaseManager @@ -62,7 +62,6 @@ class WorkerProcessGroup(Generic[RequestT, ResponseT]): group = WorkerProcessGroup( requests=request_iterable, - cycle_requests=None, backend=backend_instance, strategy=scheduling_strategy, constraints={"max_time": time_constraint} @@ -81,38 +80,25 @@ class WorkerProcessGroup(Generic[RequestT, ResponseT]): def __init__( self, - requests: Iterable[RequestT | MultiTurnRequestT[RequestT]] | None, - cycle_requests: Iterable[RequestT | MultiTurnRequestT[RequestT]] | None, + requests: Iterable[RequestT | MultiTurnRequestT[RequestT]], backend: BackendInterface[RequestT, ResponseT], strategy: SchedulingStrategy, - constraints: dict[str, Constraint], + startup_duration: float, + **constraints: dict[str, Constraint], ): """ Initialize a worker process group for distributed request processing. :param requests: Finite iterable of requests to process sequentially - :param cycle_requests: Iterable of requests to cycle through indefinitely :param backend: Backend interface for processing requests :param strategy: Scheduling strategy for request timing and distribution + :param startup_duration: Duration in seconds for request startup ramping :param constraints: Named constraints for controlling execution behavior - :raises ValueError: If neither requests nor cycle_requests are provided, - or if cycle_requests is an Iterator rather than Iterable """ - if requests is None and cycle_requests is None: - raise ValueError( - "At least one of 'requests' or 'cycle_requests' must be provided. " - ) - - if isinstance(cycle_requests, Iterator): - raise ValueError( - f"cycle_requests must be an Iterable or None, not an Iterator. " - f"Got {type(cycle_requests)}" - ) - self.requests = requests - self.cycle_requests = cycle_requests self.backend = backend self.strategy = strategy + self.startup_duration = startup_duration self.constraints = constraints # Multiprocessing contexts and primitives, created in create_processes @@ -186,9 +172,7 @@ async def create_processes(self): max_pending_size = max( 1, math.floor(max_conc * settings.mp_max_pending_buffer_percent) ) - per_proc_max_buffer_size = max( - 1, math.floor(per_proc_max_conc * settings.mp_max_worker_buffer_percent) - ) + per_proc_max_buffer_size = 1 # Initialize multiprocessing components self.mp_context = get_context(settings.mp_context_type) @@ -231,6 +215,11 @@ async def create_processes(self): # Initialize worker processes self.processes = [] + self.strategy.init_processes_timings( + worker_count=num_processes, + max_concurrency=max_conc, + startup_duration=self.startup_duration, + ) for rank in range(num_processes): # Distribute any remainder across the first N ranks async_limit = per_proc_max_conc + ( @@ -238,18 +227,16 @@ async def create_processes(self): ) worker = WorkerProcess[RequestT, ResponseT]( + worker_index=rank, messaging=self.messaging.create_worker_copy( worker_index=rank, max_buffer_send_size=None, max_buffer_receive_size=per_proc_max_buffer_size, ), backend=self.backend, - request_timings=self.strategy.create_request_timings( - local_rank=rank, - local_world_size=num_processes, - local_max_concurrency=async_limit, - ), + strategy=self.strategy, async_limit=async_limit, + fut_scheduling_time_limit=0.0, startup_barrier=self.startup_barrier, requests_generated_event=self.requests_generated_event, constraint_reached_event=self.constraint_reached_event, @@ -296,6 +283,7 @@ async def start(self, start_time: float): ): raise RuntimeError("create_processes() must be called before start()") + self.strategy.init_processes_start(start_time=start_time) stop_send_requests_event = threading.Event() send_requests_stopped_event = threading.Event() self.state = WorkerGroupState[RequestT, ResponseT]( @@ -308,11 +296,10 @@ async def start(self, start_time: float): constraint_reached_event=self.constraint_reached_event, shutdown_event=self.shutdown_event, error_event=self.error_event, + messaging=self.messaging, ) await self.messaging.start( - send_items=self.state.requests_generator( - self.requests, self.cycle_requests - ), + send_items=self.state.requests_generator(self.requests), receive_callback=self.state.received_callback, send_stopped_event=send_requests_stopped_event, send_stop_criteria=[stop_send_requests_event], @@ -424,6 +411,8 @@ async def shutdown(self) -> list[Exception]: # noqa: C901 class _StateUpdate(NamedTuple): + """Internal state update result with control flags.""" + state: SchedulerState stop_queueing: bool stop_processing: bool @@ -449,6 +438,15 @@ def __init__( constraint_reached_event: Event, shutdown_event: Event, error_event: Event, + messaging: InterProcessMessaging[ + tuple[RequestT | MultiTurnRequestT[RequestT], RequestInfo], + tuple[ + ResponseT | None, + RequestT | MultiTurnRequestT[RequestT], + RequestInfo, + SchedulerState, + ], + ], ): """ Initialize worker group state management. @@ -456,6 +454,7 @@ def __init__( :param start_time: Unix timestamp when processing should begin :param processes: List of worker process instances :param constraints: Named constraints for controlling execution behavior + :param stop_send_requests_event: Threading event for stopping request generation :param send_requests_stopped_event: Threading event for request coordination :param requests_generated_event: Multiprocessing event for generation completion :param constraint_reached_event: Multiprocessing event for constraint stopping @@ -471,6 +470,7 @@ def __init__( self.constraint_reached_event = constraint_reached_event self.shutdown_event = shutdown_event self.error_event = error_event + self.messaging = messaging self._update_lock: threading.Lock = threading.Lock() self._state: SchedulerState = SchedulerState( @@ -483,9 +483,7 @@ def __init__( self._processing_requests: set[RequestT | MultiTurnRequestT[RequestT]] = set() def requests_generator( - self, - requests: Iterable[RequestT | MultiTurnRequestT[RequestT]] | None, - cycle_requests: Iterable[RequestT | MultiTurnRequestT[RequestT]] | None, + self, requests: Iterable[RequestT | MultiTurnRequestT[RequestT]] ) -> Generator[ tuple[RequestT | MultiTurnRequestT[RequestT], RequestInfo], None, None ]: @@ -497,22 +495,12 @@ def requests_generator( constraints to determine when to stop request generation. :param requests: Finite iterable of requests to process sequentially - :param cycle_requests: Iterable of requests to cycle through indefinitely :return: Generator yielding (request, request_info) tuples """ - def _iter() -> Iterator[RequestT | MultiTurnRequestT[RequestT]]: - if requests is not None: - yield from requests - - if cycle_requests is not None: - while True: - yield from cycle_requests - try: count = 0 - request_iter = _iter() - for request in request_iter: + for request in iter(requests): count += 1 if hasattr(request, "request_id"): @@ -529,6 +517,9 @@ def _iter() -> Iterator[RequestT | MultiTurnRequestT[RequestT]]: ) state_update = self._locked_update(request_info) request_info.timings.queued = time.time() + self.messaging.buffer_receive_queue.sync_put( + (None, request, request_info, state_update.state) + ) yield (request, request_info) From 91f79b778020acc12163c5ae6231a99eea3cc3bd Mon Sep 17 00:00:00 2001 From: Mark Kurtz Date: Wed, 15 Oct 2025 13:15:56 -0400 Subject: [PATCH 88/90] Propagate valid failures from HuggingFace datasets loading (ones that are not related to not found errors) for better messaging to the user --- .../data/deserializers/deserializer.py | 2 +- .../data/deserializers/huggingface.py | 42 ++++++++++++------- 2 files changed, 28 insertions(+), 16 deletions(-) diff --git a/src/guidellm/data/deserializers/deserializer.py b/src/guidellm/data/deserializers/deserializer.py index cb362710..d50e4a9c 100644 --- a/src/guidellm/data/deserializers/deserializer.py +++ b/src/guidellm/data/deserializers/deserializer.py @@ -77,7 +77,7 @@ def deserialize( if dataset is None: raise DataNotSupportedError( f"No suitable deserializer found for data {data} " - f"with kwargs {data_kwargs} and type_ {type_}." + f"with kwargs {data_kwargs} and deserializer type {type_}." ) if resolve_split: diff --git a/src/guidellm/data/deserializers/huggingface.py b/src/guidellm/data/deserializers/huggingface.py index e356043a..80e0ed8c 100644 --- a/src/guidellm/data/deserializers/huggingface.py +++ b/src/guidellm/data/deserializers/huggingface.py @@ -12,6 +12,11 @@ load_dataset, load_from_disk, ) +from datasets.exceptions import ( + DataFilesNotFoundError, + DatasetNotFoundError, + FileNotFoundDatasetsError, +) from transformers import PreTrainedTokenizerBase from guidellm.data.deserializers.deserializer import ( @@ -35,38 +40,45 @@ def __call__( _ = (processor_factory, random_seed) if isinstance( - data, (Dataset, IterableDataset, DatasetDict, IterableDatasetDict) + data, Dataset | IterableDataset | DatasetDict | IterableDatasetDict ): return data load_error = None if ( - isinstance(data, (str, Path)) + isinstance(data, str | Path) and (path := Path(data)).exists() and ((path.is_file() and path.suffix == ".py") or path.is_dir()) ): # Handle python script or nested python script in a directory try: return load_dataset(str(data), **data_kwargs) - except Exception as err: # noqa: BLE001 - load_error = err - - if ( - isinstance(data, (str, Path)) - and (path := Path(data)).exists() - and path.is_dir() - ): - # Handle local dataset directory - try: - return load_from_disk(str(data), **data_kwargs) - except Exception as err: # noqa: BLE001 + except ( + FileNotFoundDatasetsError, + DatasetNotFoundError, + DataFilesNotFoundError, + ) as err: load_error = err + except Exception: # noqa: BLE001 + # Try loading as a local dataset directory next + try: + return load_from_disk(str(data), **data_kwargs) + except ( + FileNotFoundDatasetsError, + DatasetNotFoundError, + DataFilesNotFoundError, + ) as err2: + load_error = err2 try: # Handle dataset identifier from the Hugging Face Hub return load_dataset(str(data), **data_kwargs) - except Exception as err: # noqa: BLE001 + except ( + FileNotFoundDatasetsError, + DatasetNotFoundError, + DataFilesNotFoundError, + ) as err: load_error = err not_supported = DataNotSupportedError( From 5f4a731cd795fba049ff655565981d8823c49cc5 Mon Sep 17 00:00:00 2001 From: Mark Kurtz Date: Wed, 15 Oct 2025 15:51:13 -0400 Subject: [PATCH 89/90] Fixes from review --- src/guidellm/scheduler/strategies.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/src/guidellm/scheduler/strategies.py b/src/guidellm/scheduler/strategies.py index 5e13a26d..0cd3bc63 100644 --- a/src/guidellm/scheduler/strategies.py +++ b/src/guidellm/scheduler/strategies.py @@ -65,7 +65,7 @@ def __pydantic_schema_base_type__(cls) -> type[SchedulingStrategy]: type_: Literal["strategy"] = Field( description="The type of scheduling strategy to schedule requests with", ) - worker_coount: int = Field( + worker_count: int = Field( default=0, description="Number of worker processes to use for this strategy", ge=0, @@ -117,7 +117,7 @@ def init_processes_timings( :param max_concurrency: Maximum number of concurrent requests allowed :param startup_duration: Duration in seconds for request startup ramping """ - self.worker_coount = worker_count + self.worker_count = worker_count self.max_concurrency = max_concurrency self.startup_duration = startup_duration @@ -309,7 +309,7 @@ async def next_request_time(self, offset: int) -> float: start_time = await self.get_processes_start_time() - return start_time + (offset / self.worker_coount) + return start_time + (offset / self.worker_count) def request_completed(self, request_info: RequestInfo): """ From 57683a2a681ca7f4898045e2dfd48d9058fdfe7c Mon Sep 17 00:00:00 2001 From: Mark Kurtz Date: Thu, 16 Oct 2025 15:42:47 -0400 Subject: [PATCH 90/90] [GuideLLM Refactor] Reenablement of scenarios and fixes for benchmark package and CLI pathways (#414) ## Summary Changed the benchmarking entrypoint to take in an Args object which is now used to load scenarios. It enables a single source of truth in addition to being able to save the exact configurations in the report output. ## Details - [ ] ## Test Plan - ## Related Issues - Resolves # --- - [ ] "I certify that all code in this PR is my own, except as noted below." ## Use of AI - [ ] Includes AI-assisted code completion - [ ] Includes code generated by an AI application - [ ] Includes AI-generated tests (NOTE: AI written tests should have a docstring that includes `## WRITTEN BY AI ##`) --- pyproject.toml | 1 + src/guidellm/__main__.py | 568 +++++--------- src/guidellm/benchmark/__init__.py | 20 +- src/guidellm/benchmark/benchmarker.py | 57 +- src/guidellm/benchmark/entrypoints.py | 267 ++++--- src/guidellm/benchmark/profile.py | 233 +++--- src/guidellm/benchmark/scenario.py | 169 ---- src/guidellm/benchmark/scenarios/__init__.py | 40 + src/guidellm/benchmark/scenarios/chat.json | 6 +- src/guidellm/benchmark/scenarios/rag.json | 6 +- src/guidellm/benchmark/schemas.py | 740 ++++++++++++++++-- src/guidellm/benchmark/types.py | 22 - .../data/deserializers/deserializer.py | 15 +- src/guidellm/presentation/data_models.py | 12 +- src/guidellm/utils/cli.py | 44 +- 15 files changed, 1317 insertions(+), 883 deletions(-) delete mode 100644 src/guidellm/benchmark/scenario.py delete mode 100644 src/guidellm/benchmark/types.py diff --git a/pyproject.toml b/pyproject.toml index 8fe6d950..5135edad 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -9,6 +9,7 @@ include = ["*"] [tool.setuptools.package-data] "guidellm.data" = ["*.gz"] +"guidellm.benchmark.scenarios" = ["*.json", "**/*.json"] [tool.pdm] distribution = true diff --git a/src/guidellm/__main__.py b/src/guidellm/__main__.py index 680ac852..1e9ba96f 100644 --- a/src/guidellm/__main__.py +++ b/src/guidellm/__main__.py @@ -1,14 +1,12 @@ """ -GuideLLM command-line interface providing benchmarking, dataset preprocessing, and -mock server functionality. +GuideLLM command-line interface entry point. -This module serves as the primary entry point for the GuideLLM CLI application, -offering a comprehensive suite of tools for language model evaluation and testing. -It provides three main command groups: benchmark operations for performance testing -against generative models, dataset preprocessing utilities for data preparation and -transformation, and a mock server for testing and development scenarios. The CLI -supports various backends, output formats, and configuration options to accommodate -different benchmarking needs and deployment environments. +Primary CLI application providing benchmark execution, dataset preprocessing, and +mock server functionality for language model evaluation. Organizes commands into +three main groups: benchmark operations for performance testing, preprocessing +utilities for data transformation, and mock server capabilities for development +and testing. Supports multiple backends, output formats, and flexible configuration +through CLI options and environment variables. Example: :: @@ -30,6 +28,7 @@ from pathlib import Path import click +from pydantic import ValidationError try: import uvloop @@ -38,12 +37,13 @@ from guidellm.backends import BackendType from guidellm.benchmark import ( + BenchmarkGenerativeTextArgs, GenerativeConsoleBenchmarkerProgress, ProfileType, benchmark_generative_text, + get_builtin_scenarios, reimport_benchmarks_report, ) -from guidellm.benchmark.scenario import GenerativeTextScenario from guidellm.mock_server import MockServer, MockServerConfig from guidellm.preprocess.dataset import ShortPromptStrategy, process_dataset from guidellm.scheduler import StrategyType @@ -65,22 +65,21 @@ "run", ] -# Available strategy and profile choices for benchmark execution types STRATEGY_PROFILE_CHOICES: list[str] = list(get_literal_vals(ProfileType | StrategyType)) +"""Available strategy and profile type choices for benchmark execution.""" def decode_escaped_str(_ctx, _param, value): """ Decode escape sequences in Click option values. - Click automatically escapes characters in option values, converting sequences - like "\\n" to "\\\\n". This function properly decodes these escape sequences - to their intended characters for use in CLI options. + Click automatically escapes characters converting sequences like "\\n" to + "\\\\n". This function decodes these sequences to their intended characters. :param _ctx: Click context (unused) :param _param: Click parameter (unused) - :param value: String value to decode escape sequences from - :return: Decoded string with proper escape sequences + :param value: String value to decode + :return: Decoded string with proper escape sequences, or None if input is None :raises click.BadParameter: When escape sequence decoding fails """ if value is None: @@ -94,89 +93,76 @@ def decode_escaped_str(_ctx, _param, value): @click.group() @click.version_option(package_name="guidellm", message="guidellm version: %(version)s") def cli(): - """ - Main entry point for the GuideLLM command-line interface. - - This is the root command group that organizes all GuideLLM CLI functionality - into logical subgroups for benchmarking, preprocessing, configuration, and - mock server operations. - """ + """GuideLLM CLI for benchmarking, preprocessing, and testing language models.""" @cli.group( - help="Commands to run a new benchmark or load a prior one.", + help="Run a benchmark or load a previously saved benchmark report.", cls=DefaultGroupHandler, default="run", ) def benchmark(): - """ - Benchmark command group for running and managing performance tests. - - This command group provides functionality to execute new benchmarks against - generative models and load previously saved benchmark reports for analysis. - Supports various benchmarking strategies, output formats, and backend types. - """ + """Benchmark commands for performance testing generative models.""" @benchmark.command( "run", - help="Run a benchmark against a generative model using the specified arguments.", + help=( + "Run a benchmark against a generative model. " + "Supports multiple backends, data sources, strategies, and output formats. " + "Configuration can be loaded from a scenario file or specified via options." + ), context_settings={"auto_envvar_prefix": "GUIDELLM"}, ) -# @click.option( -# "--scenario", -# type=cli_tools.Union( -# click.Path( -# exists=True, -# readable=True, -# file_okay=True, -# dir_okay=False, -# path_type=Path, -# ), -# click.Choice(get_builtin_scenarios()), -# ), -# default=None, -# help=( -# "The name of a builtin scenario or path to a config file. " -# "Missing values from the config will use defaults. " -# "Options specified on the commandline will override the scenario." -# ), -# ) +@click.option( + "--scenario", + type=cli_tools.Union( + click.Path( + exists=True, + readable=True, + file_okay=True, + dir_okay=False, + path_type=Path, + ), + click.Choice(get_builtin_scenarios().keys()), + ), + default=None, + help=( + "Builtin scenario name or path to config file. " + "CLI options override scenario settings." + ), +) @click.option( "--target", type=str, - help="The target path for the backend to run benchmarks against. For example, http://localhost:8000", + help="Target backend URL (e.g., http://localhost:8000).", ) @click.option( "--data", type=str, multiple=True, help=( - "The HuggingFace dataset ID, a path to a HuggingFace dataset, " - "a path to a data file csv, json, jsonl, or txt, " - "or a synthetic data config as a json or key=value string." + "HuggingFace dataset ID, path to dataset, path to data file " + "(csv/json/jsonl/txt), or synthetic data config (json/key=value)." ), ) @click.option( "--profile", "--rate-type", # legacy alias "profile", + default=BenchmarkGenerativeTextArgs.get_default("profile"), type=click.Choice(STRATEGY_PROFILE_CHOICES), - help=( - "The type of benchmark to run. " - f"Supported types {', '.join(STRATEGY_PROFILE_CHOICES)}. " - ), + help=f"Benchmark profile type. Options: {', '.join(STRATEGY_PROFILE_CHOICES)}.", ) @click.option( "--rate", - default=GenerativeTextScenario.get_default("rate"), + type=float, + multiple=True, + default=BenchmarkGenerativeTextArgs.get_default("rate"), help=( - "The rates to run the benchmark at. " - "Can be a single number or a comma-separated list of numbers. " - "For rate-type=sweep, this is the number of benchmarks it runs in the sweep. " - "For rate-type=concurrent, this is the number of concurrent requests. " - "For rate-type=async,constant,poisson, this is the rate requests per second. " - "For rate-type=synchronous,throughput, this must not be set." + "Benchmark rate(s) to test. Meaning depends on profile: " + "sweep=number of benchmarks, concurrent=concurrent requests, " + "async/constant/poisson=requests per second." ), ) # Backend configuration @@ -185,166 +171,132 @@ def benchmark(): "--backend-type", # legacy alias "backend", type=click.Choice(list(get_literal_vals(BackendType))), - default=GenerativeTextScenario.get_default("backend"), - help=( - "The type of backend to use to run requests against. Defaults to 'openai_http'." - f" Supported types: {', '.join(get_literal_vals(BackendType))}" - ), + default=BenchmarkGenerativeTextArgs.get_default("backend"), + help=f"Backend type. Options: {', '.join(get_literal_vals(BackendType))}.", ) @click.option( "--backend-kwargs", "--backend-args", # legacy alias "backend_kwargs", callback=cli_tools.parse_json, - default=GenerativeTextScenario.get_default("backend_kwargs"), - help=( - "A JSON string containing any arguments to pass to the backend as a " - "dict with **kwargs." - ), + default=BenchmarkGenerativeTextArgs.get_default("backend_kwargs"), + help="JSON string of arguments to pass to the backend.", ) @click.option( "--model", - default=GenerativeTextScenario.get_default("model"), + default=BenchmarkGenerativeTextArgs.get_default("model"), type=str, - help=( - "The ID of the model to benchmark within the backend. " - "If None provided (default), then it will use the first model available." - ), + help="Model ID to benchmark. If not provided, uses first available model.", ) # Data configuration @click.option( "--request-type", - default="chat_completions", + default=BenchmarkGenerativeTextArgs.get_default("data_request_formatter"), type=click.Choice(list(get_literal_vals(GenerativeRequestType))), help=( - "The type of request to create for each data sample and send to the backend. " - f"Supported types: {list(get_literal_vals(GenerativeRequestType))}." + f"Request type to create for each data sample. " + f"Options: {', '.join(get_literal_vals(GenerativeRequestType))}." ), ) @click.option( "--request-formatter-kwargs", default=None, callback=cli_tools.parse_json, - help=( - "A JSON string containing any arguments to pass to the request formatter " - "as a dict with **kwargs." - ), + help="JSON string of arguments to pass to the request formatter.", ) @click.option( "--processor", - default=GenerativeTextScenario.get_default("processor"), + default=BenchmarkGenerativeTextArgs.get_default("processor"), type=str, help=( - "The processor or tokenizer to use to calculate token counts for statistics " - "and synthetic data generation. If None provided (default), will load " - "using the model arg, if needed." + "Processor or tokenizer for token count calculations. " + "If not provided, loads from model." ), ) @click.option( "--processor-args", - default=GenerativeTextScenario.get_default("processor_args"), + default=BenchmarkGenerativeTextArgs.get_default("processor_args"), callback=cli_tools.parse_json, - help=( - "A JSON string containing any arguments to pass to the processor constructor " - "as a dict with **kwargs." - ), + help="JSON string of arguments to pass to the processor constructor.", ) @click.option( "--data-args", multiple=True, - default=None, + default=BenchmarkGenerativeTextArgs.get_default("data_args"), callback=cli_tools.parse_json, - help=( - "A JSON string containing any arguments to pass to the dataset creation " - "as a dict with **kwargs." - ), + help="JSON string of arguments to pass to dataset creation.", ) @click.option( "--data-samples", - default=-1, + default=BenchmarkGenerativeTextArgs.get_default("data_samples"), type=int, help=( - "The number of samples to use from the dataset. If -1 (default), will use all " - "samples in the dataset and dynamically generate samples. " - "If >1, will precompile that number of items from the dataset configs." + "Number of samples from dataset. -1 (default) uses all samples " + "and dynamically generates more." ), ) @click.option( - "--data-column-mappings", - default=None, + "--data-column-mapper", + default=BenchmarkGenerativeTextArgs.get_default("data_column_mapper"), callback=cli_tools.parse_json, - help=( - "A JSON string of column mappings to apply to the dataset to map into request " - "column types." - ), + help="JSON string of column mappings to apply to the dataset.", ) @click.option( "--data-sampler", - default=None, + default=BenchmarkGenerativeTextArgs.get_default("data_sampler"), type=click.Choice(["shuffle"]), - help="The data sampler type to use.", + help="Data sampler type.", ) @click.option( "--data-num-workers", - default=None, + default=BenchmarkGenerativeTextArgs.get_default("data_num_workers"), type=int, - help="The number of worker processes to use for data loading.", + help="Number of worker processes for data loading.", ) @click.option( "--dataloader_kwargs", - default=None, + default=BenchmarkGenerativeTextArgs.get_default("dataloader_kwargs"), callback=cli_tools.parse_json, - help=( - "A JSON string containing any arguments to pass to the dataloader constructor " - "as a dict with **kwargs." - ), + help="JSON string of arguments to pass to the dataloader constructor.", ) @click.option( "--random-seed", - default=GenerativeTextScenario.get_default("random_seed"), + default=BenchmarkGenerativeTextArgs.get_default("random_seed"), type=int, - help="The random seed to use for benchmarking to ensure reproducibility.", + help="Random seed for reproducibility.", ) # Output configuration @click.option( "--output-path", type=click.Path(), - default=Path.cwd(), + default=BenchmarkGenerativeTextArgs.get_default("output_path"), help=( - "The path to save the output formats to, if the format is a file type. " - "If it is a directory, it will save all output formats selected under it. " - "If it is a file, it will save the corresponding output format to that file. " - "Any output formats that were given that do not match the file extension will " - "be saved in the parent directory of the file path. " - "Defaults to the current working directory. " + "Path to save output files. Can be a directory or file. " + "If a file, saves that format; mismatched formats save to parent directory." ), ) @click.option( "--output-formats", multiple=True, type=str, - default=("console", "json"), # ("console", "json", "html", "csv") - help=( - "The output formats to use for the benchmark results. " - "Defaults to console, json, html, and csv where the file formats " - "will be saved at the specified output path." - ), + default=BenchmarkGenerativeTextArgs.get_default("output_formats"), + help="Output formats for results (e.g., console, json, html, csv).", ) @click.option( "--disable-console-outputs", is_flag=True, - help="Set this flag to disable console output", + help="Disable console output.", ) # Updates configuration @click.option( "--disable-progress", is_flag=True, - help="Set this flag to disable progress updates to the console", + help="Disable progress updates to the console.", ) @click.option( "--display-scheduler-stats", is_flag=True, - help="Set this flag to display stats for the processes running the benchmarks", + help="Display scheduler process statistics.", ) # Aggregators configuration @click.option( @@ -352,13 +304,10 @@ def benchmark(): "--warmup-percent", # legacy alias "warmup", type=float, - default=GenerativeTextScenario.get_default("warmup"), + default=BenchmarkGenerativeTextArgs.get_default("warmup"), help=( - "The specification around the number of requests to run before benchmarking. " - "If within (0, 1), then the percent of requests/time to use for warmup. " - "If >=1, then the number of requests or seconds to use for warmup." - "Whether it's requests/time used is dependent on which constraint is active. " - "Default None for no warmup." + "Warmup specification: if in (0,1) = percent, if >=1 = number of " + "requests/seconds (depends on active constraint)." ), ) @click.option( @@ -366,13 +315,10 @@ def benchmark(): "--cooldown-percent", # legacy alias "cooldown", type=float, - default=GenerativeTextScenario.get_default("cooldown"), + default=BenchmarkGenerativeTextArgs.get_default("cooldown"), help=( - "The specification around the number of requests to run after benchmarking. " - "If within (0, 1), then the percent of requests/time to use for cooldown. " - "If >=1, then the number of requests or seconds to use for cooldown." - "Whether it's requests/time used is dependent on which constraint is active. " - "Default None for no cooldown." + "Cooldown specification: if in (0,1) = percent, if >=1 = number of " + "requests/seconds (depends on active constraint)." ), ) @click.option( @@ -381,129 +327,86 @@ def benchmark(): "sample_requests", type=int, help=( - "The number of samples for each request status and each benchmark to save " - "in the output file. If None (default), will save all samples. " - "Defaults to 20." + "Number of sample requests per status to save. " + "None (default) saves all, recommended: 20." ), ) # Constraints configuration @click.option( "--max-seconds", type=float, - default=GenerativeTextScenario.get_default("max_seconds"), + default=BenchmarkGenerativeTextArgs.get_default("max_seconds"), help=( - "The maximum number of seconds each benchmark can run for. " - "If None, will run until max_requests or the data is exhausted." + "Maximum seconds per benchmark. " + "If None, runs until max_requests or data exhaustion." ), ) @click.option( "--max-requests", type=int, - default=GenerativeTextScenario.get_default("max_requests"), + default=BenchmarkGenerativeTextArgs.get_default("max_requests"), help=( - "The maximum number of requests each benchmark can run for. " - "If None, will run until max_seconds or the data is exhausted." + "Maximum requests per benchmark. " + "If None, runs until max_seconds or data exhaustion." ), ) @click.option( "--max-errors", type=int, - default=GenerativeTextScenario.get_default("max_errors"), - help="Maximum number of errors allowed before stopping the benchmark", + default=BenchmarkGenerativeTextArgs.get_default("max_errors"), + help="Maximum errors before stopping the benchmark.", ) @click.option( "--max-error-rate", type=float, - default=GenerativeTextScenario.get_default("max_error_rate"), - help="Maximum error rate allowed before stopping the benchmark", + default=BenchmarkGenerativeTextArgs.get_default("max_error_rate"), + help="Maximum error rate before stopping the benchmark.", ) @click.option( "--max-global-error-rate", type=float, - default=GenerativeTextScenario.get_default("max_global_error_rate"), - help="Maximum global error rate allowed across all benchmarks", + default=BenchmarkGenerativeTextArgs.get_default("max_global_error_rate"), + help="Maximum global error rate across all benchmarks.", ) -def run( - target, - data, - profile, - rate, - # Backend Configuration - backend, - backend_kwargs, - model, - # Data configuration - request_type, - request_formatter_kwargs, - processor, - processor_args, - data_args, - data_samples, - data_column_mappings, - data_sampler, - data_num_workers, - dataloader_kwargs, - random_seed, - # Output configuration - output_path, - output_formats, - # Updates configuration - disable_console_outputs, - disable_progress, - display_scheduler_stats, - # Benchmarker configuration - sample_requests, - warmup, - cooldown, - # Constraints configuration - max_seconds, - max_requests, - max_errors, - max_error_rate, - max_global_error_rate, -): - """ - Execute a generative text benchmark against a target model backend. - - Runs comprehensive performance testing using various strategies and profiles, - collecting metrics on latency, throughput, error rates, and resource usage. - Supports multiple backends, data sources, output formats, and constraint types - for flexible benchmark configuration. - """ - data_request_formatter = ( +def run(**kwargs): + request_type = kwargs.pop("request_type", None) + request_formatter_kwargs = kwargs.pop("request_formatter_kwargs", None) + kwargs["data_request_formatter"] = ( request_type if not request_formatter_kwargs else {"request_type": request_type, **request_formatter_kwargs} ) + kwargs["data"] = cli_tools.format_list_arg( + kwargs.get("data"), default=[], simplify_single=False + ) + kwargs["data_args"] = cli_tools.format_list_arg( + kwargs.get("data_args"), default=[], simplify_single=False + ) + kwargs["rate"] = cli_tools.format_list_arg( + kwargs.get("rate"), default=None, simplify_single=True + ) + + disable_console_outputs = kwargs.pop("disable_console_outputs", False) + display_scheduler_stats = kwargs.pop("display_scheduler_stats", False) + disable_progress = kwargs.pop("disable_progress", False) + + try: + args = BenchmarkGenerativeTextArgs.create( + scenario=kwargs.pop("scenario", None), **kwargs + ) + except ValidationError as err: + # Translate pydantic valdation error to click argument error + errs = err.errors(include_url=False, include_context=True, include_input=True) + param_name = "--" + str(errs[0]["loc"][0]).replace("_", "-") + raise click.BadParameter( + errs[0]["msg"], ctx=click.get_current_context(), param_hint=param_name + ) from err if uvloop is not None: asyncio.set_event_loop_policy(uvloop.EventLoopPolicy()) asyncio.run( benchmark_generative_text( - target=target, - data=list(data), - # Benchmark configuration - profile=profile, - rate=rate, - # Backend configuration - backend=backend, - backend_kwargs=backend_kwargs, - model=model, - # Data configuration - processor=processor, - processor_args=processor_args, - data_args=data_args, - data_samples=data_samples, - data_column_mapper=data_column_mappings, - data_request_formatter=data_request_formatter, - data_sampler=data_sampler, - data_num_workers=data_num_workers, - dataloader_kwargs=dataloader_kwargs, - random_seed=random_seed, - # Output configuration - output_path=output_path, - output_formats=output_formats, - # Updates configuration + args=args, progress=( GenerativeConsoleBenchmarkerProgress( display_scheduler_stats=display_scheduler_stats @@ -511,22 +414,18 @@ def run( if not disable_progress else None ), - print_updates=not disable_console_outputs, - # Benchmarker configuration - sample_requests=sample_requests, - warmup=warmup, - cooldown=cooldown, - # Constraints configuration - max_seconds=max_seconds, - max_requests=max_requests, - max_errors=max_errors, - max_error_rate=max_error_rate, - max_global_error_rate=max_global_error_rate, + console=Console() if not disable_console_outputs else None, ) ) -@benchmark.command("from-file", help="Load a saved benchmark report.") +@benchmark.command( + "from-file", + help=( + "Load a saved benchmark report and optionally re-export to other formats. " + "PATH: Path to the saved benchmark report file (default: ./benchmarks.json)." + ), +) @click.argument( "path", type=click.Path(file_okay=True, dir_okay=False, exists=True), @@ -537,13 +436,9 @@ def run( type=click.Path(), default=Path.cwd(), help=( - "Allows re-exporting the benchmarks to other formats. " - "The path to save the output formats to, if the format is a file type. " - "If it is a directory, it will save all output formats selected under it. " - "If it is a file, it will save the corresponding output format to that file. " - "Any output formats that were given that do not match the file extension will " - "be saved in the parent directory of the file path. " - "Defaults to the current working directory. " + "Directory or file path to save re-exported benchmark results. " + "If a directory, all output formats will be saved there. " + "If a file, the matching format will be saved to that file." ), ) @click.option( @@ -551,57 +446,33 @@ def run( multiple=True, type=str, default=("console", "json"), # ("console", "json", "html", "csv") - help=( - "The output formats to use for the benchmark results. " - "Defaults to console, json, html, and csv where the file formats " - "will be saved at the specified output path." - ), + help="Output formats for benchmark results (e.g., console, json, html, csv).", ) def from_file(path, output_path, output_formats): - """ - Load and optionally re-export a previously saved benchmark report. - - Imports benchmark results from a saved file and provides optional conversion - to different output formats. Supports JSON, YAML, and CSV export formats - based on the output file extension. - """ asyncio.run(reimport_benchmarks_report(path, output_path, output_formats)) @cli.command( - short_help="Prints environment variable settings.", - help=( - "Print out the available configuration settings that can be set " - "through environment variables." - ), + short_help="Show configuration settings.", + help="Display environment variables for configuring GuideLLM behavior.", ) def config(): - """ - Display available GuideLLM configuration environment variables. - - Prints a comprehensive list of all environment variables that can be used - to configure GuideLLM behavior, including their current values, defaults, - and descriptions. - """ print_config() -@cli.group(help="General preprocessing tools and utilities.") +@cli.group(help="Tools for preprocessing datasets for use in benchmarks.") def preprocess(): - """ - Preprocessing command group for dataset preparation and transformation. - - This command group provides utilities for converting, processing, and - optimizing datasets for use in GuideLLM benchmarks. Includes functionality - for token count adjustments, format conversions, and data validation. - """ + """Dataset preprocessing utilities.""" @preprocess.command( + "dataset", help=( - "Convert a dataset to have specific prompt and output token sizes.\n" - "DATA: Path to the input dataset or dataset ID.\n" - "OUTPUT_PATH: Path to save the converted dataset, including file suffix." + "Process a dataset to have specific prompt and output token sizes. " + "Supports multiple strategies for handling prompts and optional " + "Hugging Face Hub upload.\n\n" + "DATA: Path to the input dataset or dataset ID.\n\n" + "OUTPUT_PATH: Path to save the processed dataset, including file suffix." ), context_settings={"auto_envvar_prefix": "GUIDELLM"}, ) @@ -619,81 +490,70 @@ def preprocess(): "--processor", type=str, required=True, - help=( - "The processor or tokenizer to use to calculate token counts for statistics " - "and synthetic data generation." - ), + help="Processor or tokenizer name for calculating token counts.", ) @click.option( "--processor-args", default=None, callback=cli_tools.parse_json, - help=( - "A JSON string containing any arguments to pass to the processor constructor " - "as a dict with **kwargs." - ), + help="JSON string of arguments to pass to the processor constructor.", ) @click.option( "--data-args", callback=cli_tools.parse_json, - help=( - "A JSON string containing any arguments to pass to the dataset creation " - "as a dict with **kwargs." - ), + help="JSON string of arguments to pass to dataset creation.", ) @click.option( "--short-prompt-strategy", type=click.Choice([s.value for s in ShortPromptStrategy]), default=ShortPromptStrategy.IGNORE.value, show_default=True, - help="Strategy to handle prompts shorter than the target length. ", + help="Strategy for handling prompts shorter than target length.", ) @click.option( "--pad-char", type=str, default="", callback=decode_escaped_str, - help="The token to pad short prompts with when using the 'pad' strategy.", + help="Character to pad short prompts with when using 'pad' strategy.", ) @click.option( "--concat-delimiter", type=str, default="", help=( - "The delimiter to use when concatenating prompts that are too short." - " Used when strategy is 'concatenate'." + "Delimiter for concatenating short prompts (used with 'concatenate' strategy)." ), ) @click.option( "--prompt-tokens", type=str, default=None, - help="Prompt tokens config (JSON, YAML file or key=value string)", + help="Prompt tokens configuration (JSON, YAML file, or key=value string).", ) @click.option( "--output-tokens", type=str, default=None, - help="Output tokens config (JSON, YAML file or key=value string)", + help="Output tokens configuration (JSON, YAML file, or key=value string).", ) @click.option( "--push-to-hub", is_flag=True, - help="Set this flag to push the converted dataset to the Hugging Face Hub.", + help="Push the processed dataset to Hugging Face Hub.", ) @click.option( "--hub-dataset-id", type=str, default=None, - help="The Hugging Face Hub dataset ID to push to. " - "Required if --push-to-hub is used.", + help=("Hugging Face Hub dataset ID for upload (required if --push-to-hub is set)."), ) @click.option( "--random-seed", type=int, default=42, show_default=True, - help="Random seed for prompt token sampling and output tokens sampling.", + help="Random seed for reproducible token sampling.", ) def dataset( data, @@ -710,13 +570,6 @@ def dataset( hub_dataset_id, random_seed, ): - """ - Convert and process datasets for specific prompt and output token requirements. - - Transforms datasets to meet target token length specifications using various - strategies for handling short prompts and output length adjustments. Supports - multiple input formats and can optionally push results to Hugging Face Hub. - """ process_dataset( data=data, output_path=output_path, @@ -734,71 +587,87 @@ def dataset( ) -@cli.command(help="Start the GuideLLM mock OpenAI/vLLM server for testing.") -@click.option("--host", default="127.0.0.1", help="Host to bind the server to") -@click.option("--port", default=8000, type=int, help="Port to bind the server to") -@click.option("--workers", default=1, type=int, help="Number of worker processes") +@cli.command( + "mock-server", + help=( + "Start a mock OpenAI/vLLM-compatible server for testing. " + "Simulates model inference with configurable latency and token generation." + ), +) @click.option( - "--model", default="llama-3.1-8b-instruct", help="The name of the model to mock" + "--host", + default="127.0.0.1", + help="Host address to bind the server to.", +) +@click.option( + "--port", + default=8000, + type=int, + help="Port number to bind the server to.", +) +@click.option( + "--workers", + default=1, + type=int, + help="Number of worker processes.", +) +@click.option( + "--model", + default="llama-3.1-8b-instruct", + help="Name of the model to mock.", +) +@click.option( + "--processor", + default=None, + help="Processor or tokenizer to use for requests.", ) -@click.option("--processor", default=None, help="The processor to use for requests") @click.option( "--request-latency", default=3, type=float, - help="Request latency in seconds for non-streaming requests", + help="Request latency in seconds for non-streaming requests.", ) @click.option( "--request-latency-std", default=0, type=float, - help=( - "Request latency standard deviation (normal distribution) " - "in seconds for non-streaming requests" - ), + help="Request latency standard deviation in seconds (normal distribution).", ) @click.option( "--ttft-ms", default=150, type=float, - help="Time to first token in milliseconds for streaming requests", + help="Time to first token in milliseconds for streaming requests.", ) @click.option( "--ttft-ms-std", default=0, type=float, - help=( - "Time to first token standard deviation (normal distribution) in milliseconds" - ), + help="Time to first token standard deviation in milliseconds.", ) @click.option( "--itl-ms", default=10, type=float, - help="Inter token latency in milliseconds for streaming requests", + help="Inter-token latency in milliseconds for streaming requests.", ) @click.option( "--itl-ms-std", default=0, type=float, - help=( - "Inter token latency standard deviation (normal distribution) " - "in milliseconds for streaming requests" - ), + help="Inter-token latency standard deviation in milliseconds.", ) @click.option( "--output-tokens", default=128, type=int, - help="Output tokens for streaming requests", + help="Number of output tokens for streaming requests.", ) @click.option( "--output-tokens-std", default=0, type=float, - help=( - "Output tokens standard deviation (normal distribution) for streaming requests" - ), + help="Output tokens standard deviation (normal distribution).", ) def mock_server( host: str, @@ -815,15 +684,6 @@ def mock_server( output_tokens: int, output_tokens_std: float, ): - """ - Start a GuideLLM mock OpenAI/vLLM-compatible server for testing and development. - - Launches a mock server that simulates model inference with configurable latency - characteristics, token generation patterns, and response timing. Useful for - testing GuideLLM benchmarks without requiring actual model deployment or for - development scenarios requiring predictable server behavior. - """ - config = MockServerConfig( host=host, port=port, diff --git a/src/guidellm/benchmark/__init__.py b/src/guidellm/benchmark/__init__.py index 4c7cc4a5..ef7b2900 100644 --- a/src/guidellm/benchmark/__init__.py +++ b/src/guidellm/benchmark/__init__.py @@ -1,3 +1,15 @@ +""" +Benchmark execution and performance analysis framework. + +Provides comprehensive benchmarking capabilities for LLM inference workloads, +including profile-based execution strategies, metrics collection and aggregation, +progress tracking, and multi-format output generation. Supports synchronous, +asynchronous, concurrent, sweep, and throughput-based benchmarking profiles for +evaluating model performance under various load conditions. +""" + +from __future__ import annotations + from .benchmarker import Benchmarker from .entrypoints import benchmark_generative_text, reimport_benchmarks_report from .output import ( @@ -16,10 +28,12 @@ ThroughputProfile, ) from .progress import BenchmarkerProgress, GenerativeConsoleBenchmarkerProgress +from .scenarios import get_builtin_scenarios from .schemas import ( Benchmark, - BenchmarkArgs, + BenchmarkerArgs, BenchmarkerDict, + BenchmarkGenerativeTextArgs, BenchmarkSchedulerStats, EstimatedBenchmarkState, GenerativeAudioMetricsSummary, @@ -35,9 +49,10 @@ __all__ = [ "AsyncProfile", "Benchmark", - "BenchmarkArgs", + "BenchmarkGenerativeTextArgs", "BenchmarkSchedulerStats", "Benchmarker", + "BenchmarkerArgs", "BenchmarkerDict", "BenchmarkerProgress", "ConcurrentProfile", @@ -61,7 +76,6 @@ "SynchronousProfile", "ThroughputProfile", "benchmark_generative_text", - "enable_scenarios", "get_builtin_scenarios", "reimport_benchmarks_report", ] diff --git a/src/guidellm/benchmark/benchmarker.py b/src/guidellm/benchmark/benchmarker.py index 6a5a5627..35b9cbf1 100644 --- a/src/guidellm/benchmark/benchmarker.py +++ b/src/guidellm/benchmark/benchmarker.py @@ -3,16 +3,9 @@ Provides the core benchmarking engine that coordinates request scheduling, data aggregation, and result compilation across different execution strategies -and environments. - -Classes: - Benchmarker: Abstract benchmark orchestrator for request processing workflows. - -Type Variables: - BenchmarkT: Generic benchmark result type. - RequestT: Generic request object type. - RequestTimingsT: Generic request timing object type. - ResponseT: Generic response object type. +and environments. The Benchmarker acts as the primary workflow coordinator, +managing the complete benchmark lifecycle from request submission through +result compilation while supporting thread-safe singleton operations. """ from __future__ import annotations @@ -25,7 +18,7 @@ from guidellm.benchmark.profile import Profile from guidellm.benchmark.progress import BenchmarkerProgress from guidellm.benchmark.schemas import ( - BenchmarkArgs, + BenchmarkerArgs, BenchmarkT, EstimatedBenchmarkState, ) @@ -50,12 +43,11 @@ class Benchmarker( """ Abstract benchmark orchestrator for request processing workflows. - Coordinates the execution of benchmarking runs across different scheduling + Coordinates execution of benchmarking runs across different scheduling strategies, aggregating metrics and compiling results. Manages the complete - benchmark lifecycle from request submission through result compilation. - - Implements thread-safe singleton pattern to ensure consistent state across - concurrent benchmark operations. + benchmark lifecycle from request submission through result compilation while + implementing thread-safe singleton pattern to ensure consistent state across + concurrent operations. """ async def run( @@ -74,18 +66,23 @@ async def run( """ Execute benchmark runs across multiple scheduling strategies. - Orchestrates the complete benchmark workflow: iterates through scheduling - strategies from the profile, executes requests through the scheduler, - aggregates metrics, and compiles final benchmark results. - - :param requests: Request datasets for processing across strategies. - :param backend: Backend interface for request processing. - :param profile: Benchmark profile defining strategies and constraints. - :param environment: Execution environment for coordination. - :param benchmark_aggregators: Metric aggregation functions by name. - :param benchmark_class: Class for constructing final benchmark objects. - :yield: Tuples of (metrics_update, benchmark_result, strategy, state). - :raises Exception: If benchmark execution or compilation fails. + Orchestrates the complete benchmark workflow by iterating through scheduling + strategies from the profile, executing requests through the scheduler, + aggregating metrics, and compiling final benchmark results. + + :param benchmark_class: Class for constructing final benchmark objects + :param requests: Request datasets for processing across strategies + :param backend: Backend interface for request processing + :param profile: Benchmark profile defining strategies and constraints + :param environment: Execution environment for coordination + :param progress: Optional progress tracker for benchmark lifecycle events + :param sample_requests: Number of sample requests to use for estimation + :param warmup: Optional warmup duration in seconds before benchmarking + :param cooldown: Optional cooldown duration in seconds after benchmarking + :param prefer_response_metrics: Whether to prefer response-based metrics over + request-based metrics + :yield: Compiled benchmark results for each strategy execution + :raises Exception: If benchmark execution or compilation fails """ with self.thread_lock: if progress: @@ -99,7 +96,7 @@ async def run( if progress: await progress.on_benchmark_start(strategy) - args = BenchmarkArgs( + args = BenchmarkerArgs( run_id=run_id, run_index=len(profile.completed_strategies), sample_requests=sample_requests, @@ -137,7 +134,7 @@ async def run( await progress.on_benchmark_update( estimated_state, scheduler_state ) - except Exception as err: + except Exception as err: # noqa: BLE001 logger.error( f"Error updating benchmark estimate/progress: {err}" ) diff --git a/src/guidellm/benchmark/entrypoints.py b/src/guidellm/benchmark/entrypoints.py index 61dfa680..1962f552 100644 --- a/src/guidellm/benchmark/entrypoints.py +++ b/src/guidellm/benchmark/entrypoints.py @@ -1,3 +1,15 @@ +""" +High-level entry points for executing generative text benchmarks. + +This module provides the primary interface for running generative text benchmarks +through the `benchmark_generative_text` function and re-importing existing benchmark +reports via `reimport_benchmarks_report`. It orchestrates the initialization and +coordination of backends, data loaders, profiles, and output formats to execute +comprehensive benchmarking workflows. The module handles all resolution logic for +converting user-provided arguments into fully configured components ready for +benchmarking execution. +""" + from __future__ import annotations from collections.abc import Callable @@ -5,14 +17,19 @@ from typing import Any, Literal from torch.utils.data import Sampler +from transformers import PreTrainedTokenizerBase +from typing_extensions import TypeAliasType from guidellm.backends import Backend, BackendType from guidellm.benchmark.benchmarker import Benchmarker from guidellm.benchmark.output import GenerativeBenchmarkerOutput from guidellm.benchmark.profile import Profile, ProfileType -from guidellm.benchmark.progress import BenchmarkerProgress -from guidellm.benchmark.schemas import GenerativeBenchmark, GenerativeBenchmarksReport -from guidellm.benchmark.types import OutputFormatT, ProcessorInputT +from guidellm.benchmark.progress import GenerativeConsoleBenchmarkerProgress +from guidellm.benchmark.schemas import ( + BenchmarkGenerativeTextArgs, + GenerativeBenchmark, + GenerativeBenchmarksReport, +) from guidellm.data import ( DataLoader, DatasetPreprocessor, @@ -35,12 +52,17 @@ ] -# Helper Variables - -_CURRENT_WORKING_DIR = Path.cwd() +# Helper Functions +OutputFormatT = TypeAliasType( + "OutputFormatT", + tuple[str, ...] + | list[str] + | dict[str, str | dict[str, Any] | GenerativeBenchmarkerOutput] + | None, +) -# Helper Functions +ProcessorInputT = TypeAliasType("ProcessorInputT", str | Path | PreTrainedTokenizerBase) async def resolve_backend( @@ -50,6 +72,16 @@ async def resolve_backend( console: Console | None = None, **backend_kwargs: dict[str, Any], ) -> tuple[Backend, str | None]: + """ + Initialize and validate a backend instance for benchmarking. + + :param backend: Backend type identifier or pre-configured Backend instance + :param target: Target endpoint URL or connection string for the backend + :param model: Model identifier to use with the backend, or None to use default + :param console: Console instance for progress reporting, or None + :param backend_kwargs: Additional keyword arguments passed to backend initialization + :return: Tuple of initialized Backend instance and resolved model identifier + """ console_step = ( console.print_update_step(title=f"Initializing backend {backend}") if console @@ -94,6 +126,14 @@ async def resolve_processor( model: str | None, console: Console | None = None, ) -> ProcessorInputT | None: + """ + Resolve the processor for tokenization, defaulting to model if not provided. + + :param processor: Processor identifier, path, tokenizer instance, or None + :param model: Model identifier to use as fallback processor + :param console: Console instance for progress reporting, or None + :return: Resolved processor or None if neither processor nor model provided + """ console_step = ( console.print_update_step(title=f"Resolving processor {processor}") if console @@ -137,6 +177,25 @@ async def resolve_request_loader( console: Console | None = None, **dataloader_kwargs: dict[str, Any] | None, ) -> DataLoader[GenerationRequest]: + """ + Construct a DataLoader for GenerationRequest objects from raw data inputs. + + :param data: List of data sources to load requests from + :param model: Model identifier for request formatting + :param data_args: Arguments for each data source in the data list + :param data_samples: Number of samples to draw from the dataset + :param processor: Processor for tokenization operations + :param processor_args: Arguments for processor initialization + :param data_column_mapper: Preprocessor or mapping for standardizing column names + :param data_request_formatter: Preprocessor or config for formatting requests + :param data_collator: Collation function or type for batching requests + :param data_sampler: Sampler instance or type for data sampling + :param data_num_workers: Number of worker processes for data loading + :param random_seed: Seed for reproducible random operations + :param console: Console instance for progress reporting, or None + :param dataloader_kwargs: Additional arguments passed to DataLoader initialization + :return: Configured DataLoader instance for GenerationRequest objects + """ console_step = ( console.print_update_step(title=f"Initializing request loader from {data}") if console @@ -210,6 +269,22 @@ async def resolve_profile( max_global_error_rate: float | None, console: Console | None = None, ) -> Profile: + """ + Resolve and configure a benchmark profile with rate and constraint settings. + + :param profile: Profile type identifier or pre-configured Profile instance + :param rate: Request rate(s) for the benchmark execution + :param random_seed: Seed for reproducible random operations + :param constraints: Dictionary of constraint initializers for benchmark limits + :param max_seconds: Maximum duration in seconds for the benchmark + :param max_requests: Maximum number of requests to process + :param max_errors: Maximum number of errors before stopping + :param max_error_rate: Maximum error rate threshold before stopping + :param max_global_error_rate: Maximum global error rate threshold before stopping + :param console: Console instance for progress reporting, or None + :return: Configured Profile instance ready for benchmarking + :raises ValueError: If constraints are provided with a pre-configured Profile + """ console_step = ( console.print_update_step(title=f"Resolving profile {profile}") if console @@ -253,6 +328,14 @@ async def resolve_output_formats( output_path: str | Path | None, console: Console | None = None, ) -> dict[str, GenerativeBenchmarkerOutput]: + """ + Resolve output format specifications into configured output handler instances. + + :param output_formats: Specification of desired output formats + :param output_path: Base path for output file generation, or None for default + :param console: Console instance for progress reporting, or None + :return: Dictionary mapping format names to configured output handler instances + """ console_step = ( console.print_update_step(title="Resolving output formats") if console else None ) @@ -271,120 +354,93 @@ async def resolve_output_formats( return resolved -async def benchmark_generative_text( # noqa: C901, PLR0915, PLR0912 - # Required - target: str, - data: list[Any], - # Benchmark configuration - profile: StrategyType | ProfileType | Profile = "sweep", - rate: float | list[float] | None = None, - # Backend configuration - backend: BackendType | Backend = "openai_http", - backend_kwargs: dict[str, Any] | None = None, - model: str | None = None, - # Data configuration - processor: ProcessorInputT | None = None, - processor_args: dict[str, Any] | None = None, - data_args: list[dict[str, Any]] | None = None, - data_samples: int = -1, - data_column_mapper: ( - DatasetPreprocessor | dict[str, str] | Literal["generative_column_mapper"] - ) = "generative_column_mapper", - data_request_formatter: ( - DatasetPreprocessor | dict[str, str] | str - ) = "chat_completions", - data_collator: Callable | Literal["generative"] | None = "generative", - data_sampler: Sampler[int] | Literal["shuffle"] | None = None, - data_num_workers: int | None = None, - dataloader_kwargs: dict[str, Any] | None = None, - random_seed: int = 42, - # Output configuration - output_path: str | Path | None = _CURRENT_WORKING_DIR, - output_formats: ( - tuple[str, ...] - | list[str] - | dict[str, str | dict[str, Any] | GenerativeBenchmarkerOutput] - | None - ) = ("console", "json", "html", "csv"), - # Updates configuration - progress: BenchmarkerProgress | None = None, - print_updates: bool = False, - # Benchmarker configuration - benchmark_cls: type[GenerativeBenchmark] = GenerativeBenchmark, - sample_requests: int | None = 10, - warmup: float | None = None, - cooldown: float | None = None, - # Constraints configuration - max_seconds: int | float | None = None, - max_requests: int | None = None, - max_errors: int | None = None, - max_error_rate: float | None = None, - max_global_error_rate: float | None = None, +# Main Entrypoints Functions + + +async def benchmark_generative_text( + args: BenchmarkGenerativeTextArgs, + progress: GenerativeConsoleBenchmarkerProgress | None = None, + console: Console | None = None, **constraints: dict[str, ConstraintInitializer | Any], ) -> tuple[GenerativeBenchmarksReport, dict[str, Any]]: - console = Console(quiet=not print_updates) + """ + Execute a comprehensive generative text benchmarking workflow. + + Orchestrates the full benchmarking pipeline by resolving all components (backend, + data loader, profile, outputs) from provided arguments, executing the benchmark + runs, and finalizing results in the specified output formats. + + :param args: Configuration arguments for the benchmark execution + :param progress: Progress tracker for benchmark execution, or None for no tracking + :param console: Console instance for status reporting, or None for silent operation + :param constraints: Additional constraint initializers for benchmark limits + :return: Tuple of GenerativeBenchmarksReport and dictionary of output format results + """ backend, model = await resolve_backend( - backend=backend, - target=target, - model=model, + backend=args.backend, + target=args.target, + model=args.model, console=console, - **(backend_kwargs or {}), + **(args.backend_kwargs or {}), ) processor = await resolve_processor( - processor=processor, model=model, console=console + processor=args.processor, model=model, console=console ) request_loader = await resolve_request_loader( - data=data, + data=args.data, model=model, - data_args=data_args, - data_samples=data_samples, + data_args=args.data_args, + data_samples=args.data_samples, processor=processor, - processor_args=processor_args, - data_column_mapper=data_column_mapper, - data_request_formatter=data_request_formatter, - data_collator=data_collator, - data_sampler=data_sampler, - data_num_workers=data_num_workers, - random_seed=random_seed, + processor_args=args.processor_args, + data_column_mapper=args.data_column_mapper, + data_request_formatter=args.data_request_formatter, + data_collator=args.data_collator, + data_sampler=args.data_sampler, + data_num_workers=args.data_num_workers, + random_seed=args.random_seed, console=console, - **(dataloader_kwargs or {}), + **(args.dataloader_kwargs or {}), ) profile = await resolve_profile( - profile=profile, - rate=rate, - random_seed=random_seed, + profile=args.profile, + rate=args.rate, + random_seed=args.random_seed, constraints=constraints, - max_seconds=max_seconds, - max_requests=max_requests, - max_errors=max_errors, - max_error_rate=max_error_rate, - max_global_error_rate=max_global_error_rate, + max_seconds=args.max_seconds, + max_requests=args.max_requests, + max_errors=args.max_errors, + max_error_rate=args.max_error_rate, + max_global_error_rate=args.max_global_error_rate, console=console, ) output_formats = await resolve_output_formats( - output_formats=output_formats, output_path=output_path, console=console + output_formats=args.output_formats, + output_path=args.output_path, + console=console, ) - report = GenerativeBenchmarksReport() - console.print_update( - title="Setup complete, starting benchmarks...", status="success" - ) - console.print("\n\n") + report = GenerativeBenchmarksReport(args=args) + if console: + console.print_update( + title="Setup complete, starting benchmarks...", status="success" + ) + console.print("\n\n") benchmarker: Benchmarker[ GenerativeBenchmark, GenerationRequest, GenerationResponse ] = Benchmarker() async for benchmark in benchmarker.run( - benchmark_class=benchmark_cls, + benchmark_class=args.benchmark_cls, requests=request_loader, backend=backend, profile=profile, environment=NonDistributedEnvironment(), progress=progress, - sample_requests=sample_requests, - warmup=warmup, - cooldown=cooldown, - prefer_response_metrics=True, + sample_requests=args.sample_requests, + warmup=args.warmup, + cooldown=args.cooldown, + prefer_response_metrics=args.prefer_response_metrics, ): if benchmark: report.benchmarks.append(benchmark) @@ -394,13 +450,17 @@ async def benchmark_generative_text( # noqa: C901, PLR0915, PLR0912 output_result = await output.finalize(report) output_format_results[key] = output_result - console.print("\n\n") - console.print_update( - title=f"Benchmarking complete, generated {len(report.benchmarks)} benchmark(s)", - status="success", - ) - for key, value in output_format_results.items(): - console.print_update(title=f" {key:<8}: {value}", status="debug") + if console: + console.print("\n\n") + console.print_update( + title=( + "Benchmarking complete, generated " + f"{len(report.benchmarks)} benchmark(s)" + ), + status="success", + ) + for key, value in output_format_results.items(): + console.print_update(title=f" {key:<8}: {value}", status="debug") return report, output_format_results @@ -411,9 +471,12 @@ async def reimport_benchmarks_report( output_formats: OutputFormatT = ("console", "json", "html", "csv"), ) -> tuple[GenerativeBenchmarksReport, dict[str, Any]]: """ - The command-line entry point for re-importing and displaying an - existing benchmarks report. Can also specify an output format. - Assumes the file provided exists. + Load and re-export an existing benchmarks report in specified formats. + + :param file: Path to the existing benchmark report file to load + :param output_path: Base path for output file generation, or None for default + :param output_formats: Specification of desired output formats for the report + :return: Tuple of loaded GenerativeBenchmarksReport and dictionary of output results """ console = Console() diff --git a/src/guidellm/benchmark/profile.py b/src/guidellm/benchmark/profile.py index 8564afde..4b3f36fd 100644 --- a/src/guidellm/benchmark/profile.py +++ b/src/guidellm/benchmark/profile.py @@ -1,32 +1,17 @@ """ -Benchmarking profile configurations for coordinating multi-strategy execution. - -Provides configurable profile abstractions for orchestrating sequential and -parallel execution of different scheduling strategies during benchmarking, -with automatic strategy generation and constraint management. - -Classes: - Profile: Abstract base for multi-strategy benchmarking profiles. - SynchronousProfile: Single synchronous strategy execution profile. - ConcurrentProfile: Fixed-concurrency strategy execution profile. - ThroughputProfile: Maximum throughput strategy execution profile. - AsyncProfile: Rate-based asynchronous strategy execution profile. - SweepProfile: Adaptive multi-strategy sweep execution profile. - -Type Aliases: - ProfileType: Literal type for supported profile configurations. +Profile configurations for orchestrating multi-strategy benchmark execution. + +Provides configurable abstractions for coordinating sequential execution of +scheduling strategies during benchmarking workflows. Profiles automatically +generate strategies based on configuration parameters, manage runtime +constraints, and track completion state across the execution sequence. """ from __future__ import annotations from abc import ABC, abstractmethod from collections.abc import Generator -from typing import ( - TYPE_CHECKING, - Any, - ClassVar, - Literal, -) +from typing import TYPE_CHECKING, Any, ClassVar, Literal import numpy as np from pydantic import ( @@ -75,11 +60,14 @@ class Profile( ABC, ): """ - Abstract base for multi-strategy benchmarking execution profiles. + Abstract base for coordinating multi-strategy benchmark execution. - Coordinates sequential execution of scheduling strategies with automatic - strategy generation, constraint management, and completion tracking for - comprehensive benchmarking workflows. + Manages sequential execution of scheduling strategies with automatic strategy + generation, constraint management, and completion tracking. Subclasses define + specific execution patterns like synchronous, concurrent, throughput-focused, + rate-based async, or adaptive sweep profiles. + + :cvar schema_discriminator: Field name used for polymorphic deserialization """ schema_discriminator: ClassVar[str] = "type_" @@ -100,14 +88,14 @@ def create( **kwargs: Any, ) -> Profile: """ - Create a profile instance based on the specified type. + Factory method to create a profile instance based on type. - :param rate_type: The type of profile to create. - :param rate: Rate parameter for profile configuration. - :param random_seed: Random seed for stochastic strategies. - :param kwargs: Additional arguments for profile configuration. - :return: Configured profile instance for the specified type. - :raises ValueError: If the profile type is not registered. + :param rate_type: Profile type identifier to instantiate + :param rate: Rate configuration for the profile strategy + :param random_seed: Seed for stochastic strategy reproducibility + :param kwargs: Additional profile-specific configuration parameters + :return: Configured profile instance for the specified type + :raises ValueError: If rate_type is not registered """ profile_class: type[Profile] = cls.get_registered_object(rate_type) resolved_kwargs = profile_class.resolve_args( @@ -128,33 +116,31 @@ def resolve_args( """ Resolve and validate arguments for profile construction. - :param rate_type: The type of the profile. - :param rate: Rate parameter for configuration. - :param random_seed: Random seed for stochastic strategies. - :param kwargs: Additional arguments to resolve. - :return: Dictionary of resolved arguments for profile construction. + :param rate_type: Profile type identifier + :param rate: Rate configuration parameter + :param random_seed: Seed for stochastic strategies + :param kwargs: Additional arguments to resolve and validate + :return: Resolved arguments dictionary for profile initialization """ ... type_: Literal["profile"] = Field( - description="The type of benchmarking profile to use", + description="Profile type discriminator for polymorphic serialization", ) completed_strategies: list[SchedulingStrategy] = Field( default_factory=list, - description="The strategies that have completed execution", + description="Strategies that have completed execution in this profile", ) constraints: dict[str, Any | dict[str, Any] | ConstraintInitializer] | None = Field( default=None, - description="Runtime constraints to apply during strategy execution", + description="Runtime constraints applied to strategy execution", ) @computed_field # type: ignore[misc] @property def strategy_types(self) -> list[StrategyType]: """ - :return: List of all strategy types expected to be executed or have been - executed in this profile. By default, this returns just the - completed strategies. + :return: Strategy types executed or expected to execute in this profile """ return [strat.type_ for strat in self.completed_strategies] @@ -169,10 +155,10 @@ def strategies_generator( None, ]: """ - Generate strategies and constraints for sequential profile execution. + Generate strategies and constraints for sequential execution. - :return: Generator yielding (strategy, constraints) tuples and - receiving benchmark results from each execution. + :return: Generator yielding (strategy, constraints) tuples and receiving + benchmark results after each execution """ prev_strategy: SchedulingStrategy | None = None prev_benchmark: Benchmark | None = None @@ -197,11 +183,11 @@ def next_strategy( prev_benchmark: Benchmark | None, ) -> SchedulingStrategy | None: """ - Generate the next strategy to execute in the profile sequence. + Generate the next strategy in the profile execution sequence. - :param prev_strategy: The previously completed strategy. - :param prev_benchmark: Benchmark results from the previous strategy. - :return: Next strategy to execute, or None if profile is complete. + :param prev_strategy: Previously completed strategy instance + :param prev_benchmark: Benchmark results from previous strategy execution + :return: Next strategy to execute, or None if profile complete """ ... @@ -214,10 +200,10 @@ def next_strategy_constraints( """ Generate constraints for the next strategy execution. - :param next_strategy: The next strategy to be executed. - :param prev_strategy: The previously completed strategy. - :param prev_benchmark: Benchmark results from the previous strategy. - :return: Constraints dictionary for the next strategy, or None. + :param next_strategy: Strategy to be executed next + :param prev_strategy: Previously completed strategy instance + :param prev_benchmark: Benchmark results from previous strategy execution + :return: Constraints dictionary for next strategy, or None """ _ = (prev_strategy, prev_benchmark) # unused return ( @@ -281,12 +267,12 @@ def resolve_args( """ Resolve arguments for synchronous profile construction. - :param rate_type: The type/strategy of the profile (ignored). - :param rate: Rate parameter (must be None, will be stripped). - :param random_seed: Random seed (ignored and stripped). - :param kwargs: Additional arguments to pass through. - :return: Dictionary of resolved arguments. - :raises ValueError: If rate is not None. + :param rate_type: Profile type identifier (ignored) + :param rate: Rate parameter (must be None) + :param random_seed: Random seed (ignored) + :param kwargs: Additional arguments passed through unchanged + :return: Resolved arguments dictionary + :raises ValueError: If rate is not None """ _ = (rate_type, random_seed) # unused if rate is not None: @@ -297,7 +283,7 @@ def resolve_args( @property def strategy_types(self) -> list[StrategyType]: """ - :return: The single synchronous strategy type. + :return: Single synchronous strategy type """ return [self.type_] @@ -309,9 +295,9 @@ def next_strategy( """ Generate synchronous strategy or None if already completed. - :param prev_strategy: The previously completed strategy (unused). - :param prev_benchmark: Benchmark results from the previous strategy (unused). - :return: SynchronousStrategy for the first execution, None afterward. + :param prev_strategy: Previously completed strategy (unused) + :param prev_benchmark: Benchmark results from previous execution (unused) + :return: SynchronousStrategy for first execution, None afterward """ _ = (prev_strategy, prev_benchmark) # unused if len(self.completed_strategies) >= 1: @@ -326,7 +312,7 @@ class ConcurrentProfile(Profile): type_: Literal["concurrent"] = "concurrent" # type: ignore[assignment] streams: list[PositiveInt] = Field( - description="Number of concurrent streams for request scheduling", + description="Concurrent stream counts for request scheduling", ) startup_duration: NonNegativeFloat = Field( default=0.0, @@ -347,20 +333,23 @@ def resolve_args( """ Resolve arguments for concurrent profile construction. - :param rate_type: The type/strategy of the profile (ignored). - :param rate: Rate parameter, remapped to streams. - :param random_seed: Random seed (ignored and stripped). - :param kwargs: Additional arguments to pass through. - :return: Dictionary of resolved arguments. - :raises ValueError: If rate is None. + :param rate_type: Profile type identifier (ignored) + :param rate: Rate parameter remapped to streams + :param random_seed: Random seed (ignored) + :param kwargs: Additional arguments passed through unchanged + :return: Resolved arguments dictionary + :raises ValueError: If rate is None """ _ = (rate_type, random_seed) # unused - kwargs["streams"] = [int(r) for r in rate] if rate else None + rate = rate if isinstance(rate, list) or rate is None else [rate] + kwargs["streams"] = [int(stream) for stream in rate] if rate else None return kwargs @property def strategy_types(self) -> list[StrategyType]: - """Get concurrent strategy types for each configured stream count.""" + """ + :return: Concurrent strategy types for each configured stream count + """ return [self.type_] * len(self.streams) def next_strategy( @@ -371,9 +360,9 @@ def next_strategy( """ Generate concurrent strategy for the next stream count. - :param prev_strategy: The previously completed strategy (unused). - :param prev_benchmark: Benchmark results from the previous strategy (unused). - :return: ConcurrentStrategy with next stream count, or None if complete. + :param prev_strategy: Previously completed strategy (unused) + :param prev_benchmark: Benchmark results from previous execution (unused) + :return: ConcurrentStrategy with next stream count, or None if complete """ _ = (prev_strategy, prev_benchmark) # unused @@ -395,7 +384,7 @@ class ThroughputProfile(Profile): type_: Literal["throughput"] = "throughput" # type: ignore[assignment] max_concurrency: PositiveInt | None = Field( default=None, - description="Maximum number of concurrent requests to schedule", + description="Maximum concurrent requests to schedule", ) startup_duration: NonNegativeFloat = Field( default=0.0, @@ -416,11 +405,11 @@ def resolve_args( """ Resolve arguments for throughput profile construction. - :param rate_type: The type/strategy of the profile (ignored). - :param rate: Rate parameter to remap to max_concurrency. - :param random_seed: Random seed (ignored and stripped). - :param kwargs: Additional arguments to pass through. - :return: Dictionary of resolved arguments. + :param rate_type: Profile type identifier (ignored) + :param rate: Rate parameter remapped to max_concurrency + :param random_seed: Random seed (ignored) + :param kwargs: Additional arguments passed through unchanged + :return: Resolved arguments dictionary """ _ = (rate_type, random_seed) # unused # Remap rate to max_concurrency, strip out random_seed @@ -431,7 +420,9 @@ def resolve_args( @property def strategy_types(self) -> list[StrategyType]: - """Get the single throughput strategy type.""" + """ + :return: Single throughput strategy type + """ return [self.type_] def next_strategy( @@ -442,9 +433,9 @@ def next_strategy( """ Generate throughput strategy or None if already completed. - :param prev_strategy: The previously completed strategy (unused). - :param prev_benchmark: Benchmark results from the previous strategy (unused). - :return: ThroughputStrategy for the first execution, None afterward. + :param prev_strategy: Previously completed strategy (unused) + :param prev_benchmark: Benchmark results from previous execution (unused) + :return: ThroughputStrategy for first execution, None afterward """ _ = (prev_strategy, prev_benchmark) # unused if len(self.completed_strategies) >= 1: @@ -458,13 +449,11 @@ def next_strategy( @Profile.register(["async", "constant", "poisson"]) class AsyncProfile(Profile): - """ - Rate-based asynchronous strategy execution profile with configurable patterns. - """ + """Rate-based asynchronous strategy execution profile with configurable patterns.""" type_: Literal["async", "constant", "poisson"] = "async" # type: ignore[assignment] strategy_type: Literal["constant", "poisson"] = Field( - description="Type of asynchronous strategy pattern to use", + description="Asynchronous strategy pattern type to use", ) rate: list[PositiveFloat] = Field( description="Request scheduling rate in requests per second", @@ -478,7 +467,7 @@ class AsyncProfile(Profile): ) max_concurrency: PositiveInt | None = Field( default=None, - description="Maximum number of concurrent requests to schedule", + description="Maximum concurrent requests to schedule", ) random_seed: int = Field( default=42, @@ -496,12 +485,12 @@ def resolve_args( """ Resolve arguments for async profile construction. - :param rate_type: The type/strategy of the profile. - :param rate: Rate parameter for the profile. - :param random_seed: Random seed for stochastic strategies. - :param kwargs: Additional arguments to pass through. - :return: Dictionary of resolved arguments. - :raises ValueError: If rate is None. + :param rate_type: Profile type identifier + :param rate: Rate configuration for the profile + :param random_seed: Seed for stochastic strategies + :param kwargs: Additional arguments passed through unchanged + :return: Resolved arguments dictionary + :raises ValueError: If rate is None """ if rate is None: raise ValueError("AsyncProfile requires a rate parameter") @@ -516,13 +505,15 @@ def resolve_args( if rate_type in ["constant", "poisson"] else kwargs.get("strategy_type", "constant") ) - kwargs["rate"] = rate + kwargs["rate"] = rate if isinstance(rate, list) else [rate] kwargs["random_seed"] = random_seed return kwargs @property def strategy_types(self) -> list[StrategyType]: - """Get async strategy types for each configured rate.""" + """ + :return: Async strategy types for each configured rate + """ num_strategies = len(self.rate) return [self.strategy_type] * num_strategies @@ -534,11 +525,11 @@ def next_strategy( """ Generate async strategy for the next configured rate. - :param prev_strategy: The previously completed strategy (unused). - :param prev_benchmark: Benchmark results from the previous strategy (unused). + :param prev_strategy: Previously completed strategy (unused) + :param prev_benchmark: Benchmark results from previous execution (unused) :return: AsyncConstantStrategy or AsyncPoissonStrategy for next rate, - or None if all rates completed. - :raises ValueError: If strategy_type is neither 'constant' nor 'poisson'. + or None if all rates completed + :raises ValueError: If strategy_type is neither 'constant' nor 'poisson' """ _ = (prev_strategy, prev_benchmark) # unused @@ -566,9 +557,7 @@ def next_strategy( @Profile.register("sweep") class SweepProfile(Profile): - """ - Adaptive multi-strategy sweep execution profile with rate discovery. - """ + """Adaptive multi-strategy sweep execution profile with rate discovery.""" type_: Literal["sweep"] = "sweep" # type: ignore[assignment] sweep_size: int = Field( @@ -585,7 +574,7 @@ class SweepProfile(Profile): ) max_concurrency: PositiveInt | None = Field( default=None, - description="Maximum number of concurrent requests to schedule", + description="Maximum concurrent requests to schedule", ) random_seed: int = Field( default=42, @@ -605,7 +594,7 @@ class SweepProfile(Profile): ) measured_rates: list[float] = Field( default_factory=list, - description="Calculated interpolated rates between synchronous and throughput", + description="Interpolated rates between synchronous and throughput", ) @classmethod @@ -619,11 +608,11 @@ def resolve_args( """ Resolve arguments for sweep profile construction. - :param rate_type: The type/strategy for async strategies in the sweep. - :param rate: Rate parameter (ignored for sweep). - :param random_seed: Random seed for stochastic strategies. - :param kwargs: Additional arguments to pass through. - :return: Dictionary of resolved arguments. + :param rate_type: Async strategy type for sweep execution + :param rate: Rate parameter specifying sweep size (if provided) + :param random_seed: Seed for stochastic strategies + :param kwargs: Additional arguments passed through unchanged + :return: Resolved arguments dictionary """ sweep_size_from_rate = int(rate[0]) if rate else settings.default_sweep_number kwargs["sweep_size"] = kwargs.get("sweep_size", sweep_size_from_rate) @@ -634,7 +623,9 @@ def resolve_args( @property def strategy_types(self) -> list[StrategyType]: - """Get strategy types for the complete sweep sequence.""" + """ + :return: Strategy types for the complete sweep sequence + """ types = ["synchronous", "throughput"] types += [self.strategy_type] * (self.sweep_size - len(types)) return types @@ -653,13 +644,13 @@ def next_strategy( """ Generate the next strategy in the adaptive sweep sequence. - Executes synchronous and throughput strategies first to measure - baseline rates, then generates interpolated rates for async strategies. + Executes synchronous and throughput strategies first to measure baseline + rates, then generates interpolated rates for async strategies. - :param prev_strategy: The previously completed strategy. - :param prev_benchmark: Benchmark results from the previous strategy. - :return: Next strategy in sweep sequence, or None if complete. - :raises ValueError: If strategy_type is neither 'constant' nor 'poisson'. + :param prev_strategy: Previously completed strategy instance + :param prev_benchmark: Benchmark results from previous strategy execution + :return: Next strategy in sweep sequence, or None if complete + :raises ValueError: If strategy_type is neither 'constant' nor 'poisson' """ if prev_strategy is None: return SynchronousStrategy() diff --git a/src/guidellm/benchmark/scenario.py b/src/guidellm/benchmark/scenario.py deleted file mode 100644 index 59cdef27..00000000 --- a/src/guidellm/benchmark/scenario.py +++ /dev/null @@ -1,169 +0,0 @@ -from __future__ import annotations - -import json -from collections.abc import Callable -from functools import cache, wraps -from inspect import Parameter, signature -from pathlib import Path -from typing import Annotated, Any, Literal, TypeVar - -import yaml -from loguru import logger -from pydantic import BeforeValidator, Field, PositiveFloat, PositiveInt - -from guidellm.backends import Backend, BackendType -from guidellm.benchmark.profile import Profile, ProfileType -from guidellm.benchmark.types import ProcessorInputT -from guidellm.scheduler import StrategyType -from guidellm.utils import StandardBaseModel - -__all__ = [ - "GenerativeTextScenario", - "Scenario", - "enable_scenarios", - "get_builtin_scenarios", -] - -SCENARIO_DIR = Path(__file__).parent / "scenarios/" - - -@cache -def get_builtin_scenarios() -> list[str]: - """Returns list of builtin scenario names.""" - return [p.stem for p in SCENARIO_DIR.glob("*.json")] - - -def parse_float_list(value: str | float | list[float]) -> list[float]: - """ - Parse a comma separated string to a list of float - or convert single float list of one or pass float - list through. - """ - if isinstance(value, int | float): - return [value] - elif isinstance(value, list): - return value - - values = value.split(",") if "," in value else [value] - - try: - return [float(val) for val in values] - except ValueError as err: - raise ValueError( - "must be a number or comma-separated list of numbers." - ) from err - - -T = TypeVar("T", bound="Scenario") - - -class Scenario(StandardBaseModel): - """ - Parent Scenario class with common options for all benchmarking types. - """ - - target: str - - @classmethod - def get_default(cls: type[T], field: str) -> Any: - """Get default values for model fields""" - return cls.model_fields[field].default - - @classmethod - def from_file(cls: type[T], filename: Path, overrides: dict | None = None) -> T: - """ - Attempt to create a new instance of the model using - data loaded from json or yaml file. - """ - try: - with filename.open() as f: - if str(filename).endswith(".json"): - data = json.load(f) - else: # Assume everything else is yaml - data = yaml.safe_load(f) - except (json.JSONDecodeError, yaml.YAMLError) as e: - logger.error(f"Failed to parse {filename} as type {cls.__name__}") - raise ValueError(f"Error when parsing file: {filename}") from e - - data.update(overrides or {}) - return cls.model_validate(data) - - @classmethod - def from_builtin(cls: type[T], name: str, overrides: dict | None = None) -> T: - filename = SCENARIO_DIR / f"{name}.json" - - if not filename.is_file(): - raise ValueError(f"{name} is not a valid builtin scenario") - - return cls.from_file(filename, overrides) - - -class GenerativeTextScenario(Scenario): - """ - Scenario class for generative text benchmarks. - """ - - class Config: - # NOTE: This prevents errors due to unvalidatable - # types like PreTrainedTokenizerBase - arbitrary_types_allowed = True - - data: Any - profile: StrategyType | ProfileType | Profile - rate: Annotated[list[PositiveFloat] | None, BeforeValidator(parse_float_list)] = ( - None - ) - random_seed: int = 42 - # Backend configuration - backend: BackendType | Backend = "openai_http" - backend_kwargs: dict[str, Any] | None = None - model: str | None = None - # Data configuration - processor: ProcessorInputT | None = None - processor_args: dict[str, Any] | None = None - data_args: dict[str, Any] | None = None - data_sampler: Literal["random"] | None = None - # Aggregators configuration - warmup: Annotated[float | None, Field(gt=0, le=1)] = None - cooldown: Annotated[float | None, Field(gt=0, le=1)] = None - request_samples: PositiveInt | None = 20 - # Constraints configuration - max_seconds: PositiveFloat | PositiveInt | None = None - max_requests: PositiveInt | None = None - max_errors: PositiveInt | None = None - max_error_rate: PositiveFloat | None = None - max_global_error_rate: PositiveFloat | None = None - - -# Decorator function to apply scenario to a function -def enable_scenarios(func: Callable) -> Any: - @wraps(func) - async def decorator(*args, scenario: Scenario | None = None, **kwargs) -> Any: - if scenario is not None: - kwargs.update(scenario.model_dump()) - return await func(*args, **kwargs) - - # Modify the signature of the decorator to include the `scenario` argument - sig = signature(func) - params = list(sig.parameters.values()) - # Place `scenario` before `**kwargs` or any parameter with a default value - loc = next( - ( - i - for i, p in enumerate(params) - if p.kind is Parameter.VAR_KEYWORD or p.default is not Parameter.empty - ), - len(params), - ) - params.insert( - loc, - Parameter( - "scenario", - Parameter.POSITIONAL_OR_KEYWORD, - default=None, - annotation=Scenario | None, - ), - ) - decorator.__signature__ = sig.replace(parameters=params) # type: ignore [attr-defined] - - return decorator diff --git a/src/guidellm/benchmark/scenarios/__init__.py b/src/guidellm/benchmark/scenarios/__init__.py index e69de29b..030f9bbd 100644 --- a/src/guidellm/benchmark/scenarios/__init__.py +++ b/src/guidellm/benchmark/scenarios/__init__.py @@ -0,0 +1,40 @@ +""" +Builtin benchmark scenario definitions and discovery utilities. + +This module provides access to predefined benchmark scenarios stored as JSON files +within the scenarios directory. It enables discovery and retrieval of builtin +scenarios by name or filename, supporting both stem names (without extension) and +full filenames for flexible scenario loading. +""" + +from __future__ import annotations + +from functools import cache +from pathlib import Path +from typing import Annotated + +__all__ = ["SCENARIO_DIR", "get_builtin_scenarios"] + +SCENARIO_DIR: Annotated[ + Path, + "Directory path containing builtin scenario JSON files", +] = Path(__file__).parent + + +@cache +def get_builtin_scenarios() -> dict[str, Path]: + """ + Retrieve all builtin scenario definitions from the scenarios directory. + + Scans the scenarios directory for JSON files and returns a mapping of scenario + names to their file paths. Each scenario is indexed by both its stem name + (filename without extension) and full filename for convenient lookup. + + :return: Dictionary mapping scenario names and filenames to their Path objects + """ + builtin = {} + for path in SCENARIO_DIR.glob("*.json"): + builtin[path.stem] = path + builtin[path.name] = path + + return builtin diff --git a/src/guidellm/benchmark/scenarios/chat.json b/src/guidellm/benchmark/scenarios/chat.json index 7ed4ce16..58fd18e2 100644 --- a/src/guidellm/benchmark/scenarios/chat.json +++ b/src/guidellm/benchmark/scenarios/chat.json @@ -1,4 +1,6 @@ { "profile": "sweep", - "data": "prompt_tokens=512,prompt_tokens_stdev=128,prompt_tokens_min=1,prompt_tokens_max=1024,output_tokens=256,output_tokens_stdev=64,output_tokens_min=1,output_tokens_max=1024" -} + "data": [ + "prompt_tokens=512,prompt_tokens_stdev=128,prompt_tokens_min=1,prompt_tokens_max=1024,output_tokens=256,output_tokens_stdev=64,output_tokens_min=1,output_tokens_max=1024" + ] +} \ No newline at end of file diff --git a/src/guidellm/benchmark/scenarios/rag.json b/src/guidellm/benchmark/scenarios/rag.json index d790ce60..ea38d76e 100644 --- a/src/guidellm/benchmark/scenarios/rag.json +++ b/src/guidellm/benchmark/scenarios/rag.json @@ -1,4 +1,6 @@ { "profile": "sweep", - "data": "prompt_tokens=4096,prompt_tokens_stdev=512,prompt_tokens_min=2048,prompt_tokens_max=6144,output_tokens=512,output_tokens_stdev=128,output_tokens_min=1,output_tokens_max=1024" -} + "data": [ + "prompt_tokens=4096,prompt_tokens_stdev=512,prompt_tokens_min=2048,prompt_tokens_max=6144,output_tokens=512,output_tokens_stdev=128,output_tokens_min=1,output_tokens_max=1024" + ] +} \ No newline at end of file diff --git a/src/guidellm/benchmark/schemas.py b/src/guidellm/benchmark/schemas.py index 2f2d8f98..9fd09461 100644 --- a/src/guidellm/benchmark/schemas.py +++ b/src/guidellm/benchmark/schemas.py @@ -1,24 +1,13 @@ """ -Benchmark data models and metrics for performance measurement and analysis. +Benchmark data models and metrics for generative AI performance measurement. Provides comprehensive data structures for capturing, storing, and analyzing -benchmark results from scheduler executions. Includes timing measurements, -token statistics, and performance metrics for generative AI workloads. - -Classes: - BenchmarkSchedulerStats: Scheduler timing and performance statistics. - BenchmarkMetrics: Core benchmark metrics and distributions. - BenchmarkRequestStats: Individual request processing statistics. - Benchmark: Base benchmark result container with generic metrics. - GenerativeRequestStats: Request statistics for generative AI workloads. - GenerativeMetrics: Comprehensive metrics for generative benchmarks. - GenerativeBenchmark: Complete generative benchmark results and analysis. - GenerativeBenchmarksReport: Container for multiple benchmark results. - -Type Variables: - BenchmarkMetricsT: Generic benchmark metrics type. - BenchmarkRequestStatsT: Generic request statistics type. - BenchmarkT: Generic benchmark container type. +benchmark results from scheduler-driven generative AI workload executions. +Core abstractions include base benchmark interfaces, generative-specific +metrics with token/latency distributions, request-level statistics tracking, +and multi-benchmark reporting capabilities. These models enable detailed +performance analysis including throughput, latency, concurrency patterns, and +domain-specific metrics for text, image, video, and audio generation tasks. """ from __future__ import annotations @@ -28,27 +17,33 @@ import time import uuid from abc import ABC, abstractmethod -from collections.abc import Iterable +from collections.abc import Callable, Iterable from pathlib import Path from typing import Any, ClassVar, Literal, TypeVar, cast import yaml -from pydantic import Field, computed_field - -from guidellm.benchmark.profile import Profile +from pydantic import ConfigDict, Field, computed_field, model_serializer +from torch.utils.data import Sampler +from transformers import PreTrainedTokenizerBase + +from guidellm.backends import Backend, BackendType +from guidellm.benchmark.profile import Profile, ProfileType +from guidellm.benchmark.scenarios import get_builtin_scenarios +from guidellm.data import DatasetPreprocessor from guidellm.scheduler import ( BackendInterface, Environment, SchedulerState, SchedulingStrategy, + StrategyType, ) from guidellm.schemas import ( GenerationRequest, GenerationResponse, GenerativeRequestStats, RequestInfo, + UsageMetrics, ) -from guidellm.schemas.request import UsageMetrics from guidellm.utils import ( InfoMixin, StandardBaseDict, @@ -59,9 +54,10 @@ __all__ = [ "Benchmark", - "BenchmarkArgs", + "BenchmarkGenerativeTextArgs", "BenchmarkSchedulerStats", "BenchmarkT", + "BenchmarkerArgs", "BenchmarkerDict", "EstimatedBenchmarkState", "GenerativeAudioMetricsSummary", @@ -77,6 +73,19 @@ class EstimatedBenchmarkState(dict[str, Any]): + """ + Accumulator for real-time benchmark metrics during scheduler execution. + + Tracks incremental metrics, running averages, and time-based statistics as + requests are processed. Maintains grouped metrics for benchmark state, + benchmark-level metrics, and scheduler-level metrics with support for + average, rate, and time-averaged metric calculations. + + :cvar benchmark_state_group: Metric group key for benchmark state tracking + :cvar benchmark_metrics_group: Metric group key for benchmark-level metrics + :cvar scheduler_state_group: Metric group key for scheduler-level metrics + """ + benchmark_state_group: ClassVar[Literal["benchmark_state"]] = "benchmark_state" benchmark_metrics_group: ClassVar[Literal["benchmark_metrics"]] = ( "benchmark_metrics" @@ -89,6 +98,14 @@ def get_metric( key: str, default: int | float | None = None, ) -> int | float | None: + """ + Retrieve a grouped metric value by group and key. + + :param group: Metric group identifier + :param key: Metric key within the group + :param default: Value returned if metric doesn't exist + :return: The metric value or default if not found + """ return self.get(f"{group}_{key}", default) def set_metric( @@ -98,6 +115,15 @@ def set_metric( value: bool | int | float | None, start_val: bool | int | float | None = None, ) -> bool | int | float | None: + """ + Set a grouped metric value, optionally adjusting by a starting value. + + :param group: Metric group identifier + :param key: Metric key within the group + :param value: Metric value to set + :param start_val: Optional starting value to subtract from the metric value + :return: The adjusted metric value or None if value is None + """ if value is None: return None @@ -115,6 +141,15 @@ def add_avg_metric( start_val: bool | int | float | None = 0.0, count: int | None = 1, ): + """ + Add a value to a running average metric calculation. + + :param group: Metric group identifier + :param key: Metric key within the group + :param value: Value to add to the average + :param start_val: Optional starting value to subtract before adding + :param count: Number of observations this value represents + """ if value is None or count is None: return @@ -143,6 +178,17 @@ def add_avg_rate_metric( end_time: float | None = None, numerator_type: Literal["avg", "total", "count"] = "total", ): + """ + Add a value to a rate-based average metric calculation. + + :param group: Metric group identifier + :param key: Metric key within the group + :param value: Value to add to the average + :param start_val: Optional starting value to subtract before adding + :param start_time: Start time for rate calculation, defaults to current time + :param end_time: End time for rate calculation, defaults to current time + :param numerator_type: Type of numerator for rate calculation + """ if value is None: return @@ -183,6 +229,14 @@ def add_time_averaged_metric( value: bool | int | float | None, recorded_time: float | None = None, ): + """ + Add a value to a time-weighted average metric calculation. + + :param group: Metric group identifier + :param key: Metric key within the group + :param value: Value to add to the time-weighted average + :param recorded_time: Time of the observation, defaults to current time + """ if value is None: return @@ -218,7 +272,16 @@ def add_time_averaged_metric( ) -class BenchmarkArgs(StandardBaseDict): +class BenchmarkerArgs(StandardBaseDict): + """ + Configuration parameters for benchmark execution and request sampling. + + Defines run identification, request sampling strategy, warmup/cooldown phases, + and metric preferences for benchmark executions. Provides methods to determine + whether a request falls within warmup or cooldown periods based on time, + request count, or percentage-based thresholds. + """ + run_id: str = Field( default_factory=lambda: str(uuid.uuid4()), description="Unique identifier for the benchmark run", @@ -226,7 +289,9 @@ class BenchmarkArgs(StandardBaseDict): run_index: int = Field(default=0, description="Index of the benchmark run") sample_requests: int | None = Field( default=20, - description="Number of requests to sample and keep in the final benchmark for metrics", + description=( + "Number of requests to sample and keep in the final benchmark for metrics" + ), ) warmup: int | float | None = Field( default=None, description="Warmup time before benchmarking starts" @@ -242,6 +307,13 @@ class BenchmarkArgs(StandardBaseDict): def is_in_warmup( self, request_info: RequestInfo, scheduler_state: SchedulerState ) -> bool: + """ + Check if a request is in the warmup phase. + + :param request_info: Information about the current request + :param scheduler_state: Current state of the scheduler + :return: True if the request is in warmup phase, False otherwise + """ if self.warmup is not None and 0 < self.warmup < 1: # Percentage-based warmup return ( @@ -265,6 +337,13 @@ def is_in_warmup( def is_in_cooldown( self, request_info: RequestInfo, scheduler_state: SchedulerState ) -> bool: + """ + Check if a request is in the cooldown phase. + + :param request_info: Information about the current request + :param scheduler_state: Current state of the scheduler + :return: True if the request is in cooldown phase, False otherwise + """ if self.cooldown is not None and 0 < self.cooldown < 1: # Percentage-based cooldown return ( @@ -293,10 +372,24 @@ def is_in_cooldown( class Benchmark(ABC): + """ + Abstract base interface for benchmark result implementations. + + Defines the contract for benchmark classes to provide run metrics sampling, + request metrics sampling, real-time estimate updates, and final compilation + of benchmark results from scheduler execution data. + """ + @abstractmethod def get_run_metrics_sample( self, - ) -> dict[Literal["start_time", "end_time", "duration"], float]: ... + ) -> dict[Literal["start_time", "end_time", "duration"], float]: + """ + Get a sample of run-level timing metrics. + + :return: Dictionary containing start_time, end_time, and duration metrics + """ + ... @abstractmethod def get_request_metrics_sample( @@ -309,25 +402,43 @@ def get_request_metrics_sample( "request_concurrency", ], float, - ]: ... + ]: + """ + Get a sample of request-level performance metrics. + + :return: Dictionary containing request count, latency, throughput, and + concurrency metrics + """ + ... @classmethod @abstractmethod def update_estimate( cls, - args: BenchmarkArgs, + args: BenchmarkerArgs, state: EstimatedBenchmarkState, response: Any, request: Any, request_info: RequestInfo, scheduler_state: SchedulerState, - ): ... + ): + """ + Update real-time benchmark estimates with new request data. + + :param args: Benchmark configuration arguments + :param state: Current estimated benchmark state to update + :param response: Response received from the backend + :param request: Original request sent to the backend + :param request_info: Metadata about the request execution + :param scheduler_state: Current state of the scheduler + """ + ... @classmethod @abstractmethod def compile( cls, - args: BenchmarkArgs, + args: BenchmarkerArgs, estimated_state: EstimatedBenchmarkState, scheduler_state: SchedulerState, profile: Profile, @@ -336,7 +447,22 @@ def compile( environment: Environment, strategy: SchedulingStrategy, constraints: dict[str, dict[str, Any]], - ) -> Any: ... + ) -> Any: + """ + Compile final benchmark results from accumulated state. + + :param args: Benchmark configuration arguments + :param estimated_state: Accumulated benchmark state from execution + :param scheduler_state: Final state of the scheduler + :param profile: Benchmark profile configuration + :param requests: Collection of requests executed + :param backend: Backend interface used for execution + :param environment: Execution environment configuration + :param strategy: Scheduling strategy used + :param constraints: Execution constraints applied + :return: Compiled benchmark results instance + """ + ... BenchmarkT = TypeVar("BenchmarkT", bound=Benchmark) @@ -382,6 +508,12 @@ class BenchmarkSchedulerStats(StandardBaseDict): @classmethod def update_estimate(cls, state: EstimatedBenchmarkState, request_info: RequestInfo): + """ + Update estimated scheduler statistics with request timing information. + + :param state: Current estimated benchmark state to update + :param request_info: Metadata about the request execution with timing data + """ state.set_metric(group=cls.group_name, key="updated", value=True) state.add_avg_metric( group=cls.group_name, @@ -442,6 +574,13 @@ def update_estimate(cls, state: EstimatedBenchmarkState, request_info: RequestIn def compile( cls, estimated_state: EstimatedBenchmarkState, scheduler_state: SchedulerState ) -> BenchmarkSchedulerStats: + """ + Compile final scheduler statistics from accumulated state. + + :param estimated_state: Accumulated benchmark state with scheduler metrics + :param scheduler_state: Final state of the scheduler + :return: Compiled scheduler statistics instance + """ return BenchmarkSchedulerStats( start_time=scheduler_state.start_time, end_time=scheduler_state.end_time or scheduler_state.start_time, @@ -517,17 +656,42 @@ def compile( class GenerativeMetricsSummary(StandardBaseDict): - input: StatusDistributionSummary = Field(description="") - input_per_second: StatusDistributionSummary = Field(description="") - input_concurrency: StatusDistributionSummary = Field(description="") + """ + Statistical summaries for input, output, and total metrics. + + Provides distribution summaries across successful, incomplete, and errored + requests for absolute values, per-second rates, and concurrency levels. + """ - output: StatusDistributionSummary = Field(description="") - output_per_second: StatusDistributionSummary = Field(description="") - output_concurrency: StatusDistributionSummary = Field(description="") + input: StatusDistributionSummary = Field( + description="Distribution of input metric values" + ) + input_per_second: StatusDistributionSummary = Field( + description="Distribution of input metric rates per second" + ) + input_concurrency: StatusDistributionSummary = Field( + description="Distribution of concurrent input metric values" + ) - total: StatusDistributionSummary = Field(description="") - total_per_second: StatusDistributionSummary = Field(description="") - total_concurrency: StatusDistributionSummary = Field(description="") + output: StatusDistributionSummary = Field( + description="Distribution of output metric values" + ) + output_per_second: StatusDistributionSummary = Field( + description="Distribution of output metric rates per second" + ) + output_concurrency: StatusDistributionSummary = Field( + description="Distribution of concurrent output metric values" + ) + + total: StatusDistributionSummary = Field( + description="Distribution of total metric values (input + output)" + ) + total_per_second: StatusDistributionSummary = Field( + description="Distribution of total metric rates per second" + ) + total_concurrency: StatusDistributionSummary = Field( + description="Distribution of concurrent total metric values" + ) @classmethod def compile( @@ -537,6 +701,15 @@ def compile( input_values: list[int | float], output_values: list[int | float], ) -> GenerativeMetricsSummary: + """ + Compile generative metrics summary from request data. + + :param request_types: Status types for each request + :param request_times: Start and end times for each request + :param input_values: Input metric values for each request + :param output_values: Output metric values for each request + :return: Compiled generative metrics summary + """ total_values = [ input_val + output_val for input_val, output_val in zip(input_values, output_values, strict=False) @@ -595,9 +768,22 @@ def compile( class GenerativeTextMetricsSummary(StandardBaseDict): - tokens: GenerativeMetricsSummary = Field(description="") - words: GenerativeMetricsSummary = Field(description="") - characters: GenerativeMetricsSummary = Field(description="") + """ + Text-specific metric summaries for generative benchmarks. + + Tracks token, word, and character-level metrics across input, output, and + total usage for text generation workloads. + """ + + tokens: GenerativeMetricsSummary = Field( + description="Token count metrics and distributions" + ) + words: GenerativeMetricsSummary = Field( + description="Word count metrics and distributions" + ) + characters: GenerativeMetricsSummary = Field( + description="Character count metrics and distributions" + ) @classmethod def compile( @@ -607,6 +793,15 @@ def compile( input_metrics: list[UsageMetrics], output_metrics: list[UsageMetrics], ) -> GenerativeTextMetricsSummary: + """ + Compile text metrics summary from request usage data. + + :param request_types: Status types for each request + :param request_times: Start and end times for each request + :param input_metrics: Input usage metrics for each request + :param output_metrics: Output usage metrics for each request + :return: Compiled text metrics summary + """ return GenerativeTextMetricsSummary( tokens=GenerativeMetricsSummary.compile( request_types=request_types, @@ -634,10 +829,25 @@ def compile( class GenerativeImageMetricsSummary(StandardBaseDict): - tokens: GenerativeMetricsSummary = Field(description="") - images: GenerativeMetricsSummary = Field(description="") - pixels: GenerativeMetricsSummary = Field(description="") - bytes: GenerativeMetricsSummary = Field(description="") + """ + Image-specific metric summaries for generative benchmarks. + + Tracks token, image count, pixel, and byte-level metrics across input, output, + and total usage for image generation workloads. + """ + + tokens: GenerativeMetricsSummary = Field( + description="Image token count metrics and distributions" + ) + images: GenerativeMetricsSummary = Field( + description="Image count metrics and distributions" + ) + pixels: GenerativeMetricsSummary = Field( + description="Pixel count metrics and distributions" + ) + bytes: GenerativeMetricsSummary = Field( + description="Byte size metrics and distributions" + ) @classmethod def compile( @@ -647,6 +857,15 @@ def compile( input_metrics: list[UsageMetrics], output_metrics: list[UsageMetrics], ) -> GenerativeImageMetricsSummary: + """ + Compile image metrics summary from request usage data. + + :param request_types: Status types for each request + :param request_times: Start and end times for each request + :param input_metrics: Input usage metrics for each request + :param output_metrics: Output usage metrics for each request + :return: Compiled image metrics summary + """ return GenerativeImageMetricsSummary( tokens=GenerativeMetricsSummary.compile( request_types=request_types, @@ -676,10 +895,25 @@ def compile( class GenerativeVideoMetricsSummary(StandardBaseDict): - tokens: GenerativeMetricsSummary = Field(description="") - frames: GenerativeMetricsSummary = Field(description="") - seconds: GenerativeMetricsSummary = Field(description="") - bytes: GenerativeMetricsSummary = Field(description="") + """ + Video-specific metric summaries for generative benchmarks. + + Tracks token, frame count, duration, and byte-level metrics across input, + output, and total usage for video generation workloads. + """ + + tokens: GenerativeMetricsSummary = Field( + description="Video token count metrics and distributions" + ) + frames: GenerativeMetricsSummary = Field( + description="Frame count metrics and distributions" + ) + seconds: GenerativeMetricsSummary = Field( + description="Duration metrics in seconds and distributions" + ) + bytes: GenerativeMetricsSummary = Field( + description="Byte size metrics and distributions" + ) @classmethod def compile( @@ -689,6 +923,15 @@ def compile( input_metrics: list[UsageMetrics], output_metrics: list[UsageMetrics], ) -> GenerativeVideoMetricsSummary: + """ + Compile video metrics summary from request usage data. + + :param request_types: Status types for each request + :param request_times: Start and end times for each request + :param input_metrics: Input usage metrics for each request + :param output_metrics: Output usage metrics for each request + :return: Compiled video metrics summary + """ return GenerativeVideoMetricsSummary( tokens=GenerativeMetricsSummary.compile( request_types=request_types, @@ -720,10 +963,25 @@ def compile( class GenerativeAudioMetricsSummary(StandardBaseDict): - tokens: GenerativeMetricsSummary = Field(description="") - samples: GenerativeMetricsSummary = Field(description="") - seconds: GenerativeMetricsSummary = Field(description="") - bytes: GenerativeMetricsSummary = Field(description="") + """ + Audio-specific metric summaries for generative benchmarks. + + Tracks token, sample count, duration, and byte-level metrics across input, + output, and total usage for audio generation workloads. + """ + + tokens: GenerativeMetricsSummary = Field( + description="Audio token count metrics and distributions" + ) + samples: GenerativeMetricsSummary = Field( + description="Sample count metrics and distributions" + ) + seconds: GenerativeMetricsSummary = Field( + description="Duration metrics in seconds and distributions" + ) + bytes: GenerativeMetricsSummary = Field( + description="Byte size metrics and distributions" + ) @classmethod def compile( @@ -733,6 +991,15 @@ def compile( input_metrics: list[UsageMetrics], output_metrics: list[UsageMetrics], ) -> GenerativeAudioMetricsSummary: + """ + Compile audio metrics summary from request usage data. + + :param request_types: Status types for each request + :param request_times: Start and end times for each request + :param input_metrics: Input usage metrics for each request + :param output_metrics: Output usage metrics for each request + :return: Compiled audio metrics summary + """ return GenerativeAudioMetricsSummary( tokens=GenerativeMetricsSummary.compile( request_types=request_types, @@ -802,7 +1069,10 @@ class GenerativeMetrics(StandardBaseDict): description="Distribution of inter-token latencies in milliseconds" ) output_tokens_wo_first_per_iteration: StatusDistributionSummary = Field( - description="Distribution of output tokens (without first) generated per streaming iteration" + description=( + "Distribution of output tokens (without first) generated per " + "streaming iteration" + ) ) output_tokens_per_second: StatusDistributionSummary = Field( description="Distribution of output token generation rates" @@ -815,10 +1085,18 @@ class GenerativeMetrics(StandardBaseDict): ) # Domain specific stats - text: GenerativeTextMetricsSummary = Field(description="") - image: GenerativeImageMetricsSummary = Field(description="") - video: GenerativeVideoMetricsSummary = Field(description="") - audio: GenerativeAudioMetricsSummary = Field(description="") + text: GenerativeTextMetricsSummary = Field( + description="Text-specific metrics for tokens, words, and characters" + ) + image: GenerativeImageMetricsSummary = Field( + description="Image-specific metrics for tokens, images, pixels, and bytes" + ) + video: GenerativeVideoMetricsSummary = Field( + description="Video-specific metrics for tokens, frames, duration, and bytes" + ) + audio: GenerativeAudioMetricsSummary = Field( + description="Audio-specific metrics for tokens, samples, duration, and bytes" + ) @classmethod def update_estimate( @@ -829,6 +1107,15 @@ def update_estimate( request_info: RequestInfo, scheduler_state: SchedulerState, ): + """ + Update real-time generative metrics estimates with new request data. + + :param state: Current estimated benchmark state to update + :param response: Response received from the backend + :param request: Original request sent to the backend + :param request_info: Metadata about the request execution + :param scheduler_state: Current state of the scheduler + """ benchmark_start_time = scheduler_state.start_time request_start_time = ( request_info.timings.request_start or request_info.timings.resolve_start @@ -1025,6 +1312,14 @@ def compile( errored: list[GenerativeRequestStats], incomplete: list[GenerativeRequestStats], ) -> GenerativeMetrics: + """ + Compile final generative metrics from request statistics. + + :param completed: Successfully completed request statistics + :param errored: Failed request statistics + :param incomplete: Incomplete/cancelled request statistics + :return: Compiled generative metrics with full distributions + """ requests = completed + errored + incomplete request_types = cast( "list[Literal['successful', 'error', 'incomplete']]", @@ -1139,19 +1434,30 @@ def compile( class SchedulerDict(StandardBaseDict): """Scheduler configuration and execution state dictionary.""" - strategy: SchedulingStrategy - constraints: dict[str, dict[str, Any]] - state: SchedulerState + strategy: SchedulingStrategy = Field( + description="Scheduling strategy used for request distribution" + ) + constraints: dict[str, dict[str, Any]] = Field( + description="Execution constraints applied during benchmarking" + ) + state: SchedulerState = Field( + description="Final state of the scheduler after execution" + ) class BenchmarkerDict(StandardBaseDict): """Benchmarker configuration and component settings dictionary.""" - args: BenchmarkArgs - profile: Profile - requests: dict[str, Any] - backend: dict[str, Any] - environment: dict[str, Any] + profile: Profile = Field(description="Benchmark profile configuration") + requests: dict[str, Any] = Field( + description="Request configuration and dataset information" + ) + backend: dict[str, Any] = Field( + description="Backend configuration and connection details" + ) + environment: dict[str, Any] = Field( + description="Execution environment configuration" + ) class GenerativeBenchmark(Benchmark, StandardBaseDict): @@ -1241,13 +1547,26 @@ def duration(self) -> float: @classmethod def update_estimate( cls, - args: BenchmarkArgs, + args: BenchmarkerArgs, state: EstimatedBenchmarkState, response: GenerationResponse | None, request: GenerationRequest, request_info: RequestInfo, scheduler_state: SchedulerState, ): + """ + Update generative benchmark estimates with new request data. + + Handles warmup/cooldown filtering, request sampling via reservoir sampling, + and delegates metric updates to child metric classes. + + :param args: Benchmark configuration arguments + :param state: Current estimated benchmark state to update + :param response: Response received from the backend + :param request: Original request sent to the backend + :param request_info: Metadata about the request execution + :param scheduler_state: Current state of the scheduler + """ if ( request_info.status == "cancelled" and request_info.timings.resolve_start is None @@ -1344,7 +1663,7 @@ def update_estimate( @classmethod def compile( cls, - args: BenchmarkArgs, + args: BenchmarkerArgs, estimated_state: EstimatedBenchmarkState, scheduler_state: SchedulerState, profile: Profile, @@ -1354,6 +1673,20 @@ def compile( strategy: SchedulingStrategy, constraints: dict[str, dict[str, Any]], ) -> GenerativeBenchmark: + """ + Compile final generative benchmark from accumulated state. + + :param args: Benchmark configuration arguments + :param estimated_state: Accumulated benchmark state from execution + :param scheduler_state: Final state of the scheduler + :param profile: Benchmark profile configuration + :param requests: Collection of requests executed + :param backend: Backend interface used for execution + :param environment: Execution environment configuration + :param strategy: Scheduling strategy used + :param constraints: Execution constraints applied + :return: Compiled generative benchmark instance + """ return GenerativeBenchmark( run_id=args.run_id, run_index=args.run_index, @@ -1366,7 +1699,6 @@ def compile( state=scheduler_state, ), benchmarker=BenchmarkerDict( - args=args, profile=profile, requests=InfoMixin.extract_from_obj(requests), backend=backend.info, @@ -1404,6 +1736,267 @@ def compile( ) +class BenchmarkGenerativeTextArgs(StandardBaseModel): + """ + Configuration arguments for generative text benchmark execution. + + Defines all parameters for benchmark setup including target endpoint, data + sources, backend configuration, processing pipeline, output formatting, and + execution constraints. Supports loading from scenario files and merging with + runtime overrides. + """ + + @classmethod + def create( + cls, scenario: Path | str | None, **kwargs: dict[str, Any] + ) -> BenchmarkGenerativeTextArgs: + """ + Create benchmark args from scenario file and/or keyword arguments. + + :param scenario: Path to scenario file or name of built-in scenario + :param kwargs: Additional keyword arguments to override scenario values + :return: Configured benchmark args instance + :raises ValueError: If scenario is not found or file format is unsupported + """ + constructor_kwargs = {} + + if scenario is not None: + if isinstance(scenario, str) and scenario in ( + builtin_scenarios := get_builtin_scenarios() + ): + scenario_path = builtin_scenarios[scenario] + elif Path(scenario).exists() and Path(scenario).is_file(): + scenario_path = Path(scenario) + else: + raise ValueError(f"Scenario '{scenario}' not found.") + + with scenario_path.open() as file: + if scenario_path.suffix == ".json": + scenario_data = json.load(file) + elif scenario_path.suffix in {".yaml", ".yml"}: + scenario_data = yaml.safe_load(file) + else: + raise ValueError( + f"Unsupported scenario file format: {scenario_path.suffix}" + ) + if "args" in scenario_data: + # loading from a report file + scenario_data = scenario_data["args"] + constructor_kwargs.update(scenario_data) + + for key, value in kwargs.items(): + if value != cls.get_default(key): + constructor_kwargs[key] = value + + return cls.model_validate(constructor_kwargs) + + @classmethod + def get_default(cls: BenchmarkGenerativeTextArgs, field: str) -> Any: + """ + Get default value for a model field. + + :param field: Name of the field to retrieve default for + :return: Default value for the specified field + :raises ValueError: If field is not found in model + """ + if field not in BenchmarkGenerativeTextArgs.model_fields: + raise ValueError( + f"Field '{field}' not found in BenchmarkGenerativeTextArgs" + ) + + field_info = BenchmarkGenerativeTextArgs.model_fields[field] + if field_info.default_factory is not None: + return field_info.default_factory() + + return field_info.default + + model_config = ConfigDict( + extra="ignore", + use_enum_values=True, + from_attributes=True, + arbitrary_types_allowed=True, + ) + + # Required + target: str = Field(description="Target endpoint URL for benchmark execution") + data: list[Any] = Field( + description="List of dataset sources or data files", + default_factory=list, + min_length=1, + ) + # Benchmark configuration + profile: StrategyType | ProfileType | Profile = Field( + default="sweep", description="Benchmark profile or scheduling strategy type" + ) + rate: float | list[float] | None = Field( + default=None, description="Request rate(s) for rate-based scheduling" + ) + # Backend configuration + backend: BackendType | Backend = Field( + default="openai_http", description="Backend type or instance for execution" + ) + backend_kwargs: dict[str, Any] | None = Field( + default=None, description="Additional backend configuration arguments" + ) + model: str | None = Field(default=None, description="Model identifier for backend") + # Data configuration + processor: str | Path | PreTrainedTokenizerBase | None = Field( + default=None, description="Tokenizer path, name, or instance for processing" + ) + processor_args: dict[str, Any] | None = Field( + default=None, description="Additional tokenizer configuration arguments" + ) + data_args: list[dict[str, Any]] | None = Field( + default_factory=list, description="Per-dataset configuration arguments" + ) + data_samples: int = Field( + default=-1, description="Number of samples to use from datasets (-1 for all)" + ) + data_column_mapper: ( + DatasetPreprocessor | dict[str, str] | Literal["generative_column_mapper"] + ) = Field( + default="generative_column_mapper", + description="Column mapping preprocessor for dataset fields", + ) + data_request_formatter: DatasetPreprocessor | dict[str, str] | str = Field( + default="chat_completions", + description="Request formatting preprocessor or template name", + ) + data_collator: Callable | Literal["generative"] | None = Field( + default="generative", description="Data collator for batch processing" + ) + data_sampler: Sampler[int] | Literal["shuffle"] | None = Field( + default=None, description="Data sampler for request ordering" + ) + data_num_workers: int | None = Field( + default=None, description="Number of workers for data loading" + ) + dataloader_kwargs: dict[str, Any] | None = Field( + default=None, description="Additional dataloader configuration arguments" + ) + random_seed: int = Field(default=42, description="Random seed for reproducibility") + # Output configuration + output_path: str | Path | None = Field( + default_factory=Path.cwd, description="Directory path for output files" + ) + output_formats: list[str] | dict[str, str | dict[str, Any]] | None = Field( + default_factory=lambda: ["console", "json"], + description="Output format names or configuration mappings", + ) + # Benchmarker configuration + benchmark_cls: type[GenerativeBenchmark] = Field( + default=GenerativeBenchmark, + description="Benchmark class to use for result compilation", + ) + sample_requests: int | None = Field( + default=10, + description="Number of requests to sample for detailed metrics (None for all)", + ) + warmup: float | None = Field( + default=None, + description="Warmup period in seconds, requests, or fraction (0-1)", + ) + cooldown: float | None = Field( + default=None, + description="Cooldown period in seconds, requests, or fraction (0-1)", + ) + prefer_response_metrics: bool = Field( + default=True, + description="Whether to prefer backend response metrics over request metrics", + ) + # Constraints configuration + max_seconds: int | float | None = Field( + default=None, description="Maximum benchmark execution time in seconds" + ) + max_requests: int | None = Field( + default=None, description="Maximum number of requests to execute" + ) + max_errors: int | None = Field( + default=None, description="Maximum number of errors before stopping" + ) + max_error_rate: float | None = Field( + default=None, description="Maximum error rate (0-1) before stopping" + ) + max_global_error_rate: float | None = Field( + default=None, description="Maximum global error rate (0-1) before stopping" + ) + + @model_serializer + def serialize_model(self): + """ + Custom serialization logic for benchmark args. + + Converts complex types to serializable formats including Profile to type + string, Backend to type string, and Path objects to strings. + + :return: Dictionary representation suitable for JSON/YAML serialization + """ + return { + # target - serialize as is + "target": self.target, + "data": [ + item if isinstance(item, str | type(None)) else str(item) + for item in self.data + ], # data - for each item in the list, if not a str or None, save str(item) + "profile": ( + self.profile.type_ + if isinstance(self.profile, Profile) + else self.profile + ), # profile - if instance of Profile, then save as profile.type_ + "rate": self.rate, + "backend": ( + self.backend.type_ + if isinstance(self.backend, Backend) + else self.backend + ), # backend - if instance of Backend, then save as backend.type_ + "backend_kwargs": self.backend_kwargs, + "model": self.model, + "processor": ( + self.processor + if isinstance(self.processor, str) + else str(self.processor) + if self.processor is not None + else None + ), # processor - if not str, then save as str(processor) + "processor_args": self.processor_args, + "data_args": self.data_args, + "data_samples": self.data_samples, + "data_column_mapper": ( + self.data_column_mapper + if isinstance(self.data_column_mapper, dict | str) + else {} + ), # data_column_mapper - if not dict or str, then save as an empty dict + "data_request_formatter": ( + self.data_request_formatter + if isinstance(self.data_request_formatter, dict | str) + else {} + ), # data_request_formatter - if not dict or str, then save as empty dict + "data_collator": ( + self.data_collator if isinstance(self.data_collator, str) else None + ), # data_collator - if not str, then save as None + "data_sampler": ( + self.data_sampler if isinstance(self.data_sampler, str) else None + ), # data_sampler - if not str, then save as None + "data_num_workers": self.data_num_workers, + "dataloader_kwargs": self.dataloader_kwargs, + "random_seed": self.random_seed, + "output_path": ( + str(self.output_path) if self.output_path is not None else None + ), # output_path - if not None, then ensure it's a str + "output_formats": self.output_formats, + # benchmark_cls - don't save at all (excluded) + "sample_requests": self.sample_requests, + "warmup": self.warmup, + "cooldown": self.cooldown, + "prefer_response_metrics": self.prefer_response_metrics, + "max_seconds": self.max_seconds, + "max_requests": self.max_requests, + "max_errors": self.max_errors, + "max_error_rate": self.max_error_rate, + "max_global_error_rate": self.max_global_error_rate, + } + + class GenerativeBenchmarksReport(StandardBaseModel): """Container for multiple benchmark results with load/save functionality.""" @@ -1439,6 +2032,9 @@ def load_file( return GenerativeBenchmarksReport.model_validate(model_dict) + args: BenchmarkGenerativeTextArgs = Field( + description="The benchmark arguments used for all benchmarks in the report." + ) benchmarks: list[GenerativeBenchmark] = Field( description="The list of completed benchmarks contained within the report.", default_factory=list, diff --git a/src/guidellm/benchmark/types.py b/src/guidellm/benchmark/types.py deleted file mode 100644 index 983e3189..00000000 --- a/src/guidellm/benchmark/types.py +++ /dev/null @@ -1,22 +0,0 @@ -from __future__ import annotations - -from pathlib import Path -from typing import Any - -from transformers import PreTrainedTokenizerBase # type: ignore[import] -from typing_extensions import TypeAliasType - -from guidellm.benchmark.output import GenerativeBenchmarkerOutput - -__all__ = ["OutputFormatT", "ProcessorInputT"] - - -OutputFormatT = TypeAliasType( - "OutputFormatT", - tuple[str, ...] - | list[str] - | dict[str, str | dict[str, Any] | GenerativeBenchmarkerOutput] - | None, -) - -ProcessorInputT = TypeAliasType("ProcessorInputT", str | Path | PreTrainedTokenizerBase) diff --git a/src/guidellm/data/deserializers/deserializer.py b/src/guidellm/data/deserializers/deserializer.py index d50e4a9c..7f0dae39 100644 --- a/src/guidellm/data/deserializers/deserializer.py +++ b/src/guidellm/data/deserializers/deserializer.py @@ -50,7 +50,11 @@ def deserialize( dataset = None if type_ is None: - for deserializer in cls.registered_objects(): + for name, deserializer in cls.registry.items(): + if name == "huggingface": + # Save Hugging Face til the end since it is a catch-all. + continue + deserializer_fn: DatasetDeserializer = ( deserializer() if isinstance(deserializer, type) else deserializer ) @@ -62,6 +66,15 @@ def deserialize( random_seed=random_seed, **data_kwargs, ) + + if dataset is None: + deserializer_fn = cls.get_registered_object("huggingface")() + dataset = deserializer_fn( + data=data, + processor_factory=processor_factory, + random_seed=random_seed, + **data_kwargs, + ) elif deserializer := cls.get_registered_object(type_) is not None: deserializer_fn: DatasetDeserializer = ( deserializer() if isinstance(deserializer, type) else deserializer diff --git a/src/guidellm/presentation/data_models.py b/src/guidellm/presentation/data_models.py index ff2863b4..62bf97d8 100644 --- a/src/guidellm/presentation/data_models.py +++ b/src/guidellm/presentation/data_models.py @@ -72,7 +72,7 @@ def from_benchmarks(cls, benchmarks: list["GenerativeBenchmark"]): bm.run_stats.start_time for bm in benchmarks if bm.start_time is not None ) return cls( - model=Model(name=model, size=0), + model=Model(name=model or "", size=0), task="N/A", timestamp=timestamp, dataset=Dataset(name="N/A"), @@ -117,11 +117,15 @@ def from_benchmarks(cls, benchmarks: list["GenerativeBenchmark"]): range(len(successful_requests)), min(5, len(successful_requests)) ) sample_prompts = [ - successful_requests[i].prompt.replace("\n", " ").replace('"', "'") + successful_requests[i].request_args.replace("\n", " ").replace('"', "'") + if successful_requests[i].request_args is not None + else "" for i in sample_indices ] sample_outputs = [ successful_requests[i].output.replace("\n", " ").replace('"', "'") + if successful_requests[i].output is not None + else "" for i in sample_indices ] @@ -155,10 +159,10 @@ def from_benchmarks(cls, benchmarks: list["GenerativeBenchmark"]): min_start_time = benchmarks[0].start_time all_req_times = [ - req.scheduler_info.started_at - min_start_time + req.info.timings.request_start - min_start_time for bm in benchmarks for req in bm.requests.successful - if req.scheduler_info.started_at is not None + if req.info.timings.request_start is not None ] number_of_buckets = len(benchmarks) request_over_time_buckets, bucket_width = Bucket.from_data( diff --git a/src/guidellm/utils/cli.py b/src/guidellm/utils/cli.py index f049e94e..a75c37a8 100644 --- a/src/guidellm/utils/cli.py +++ b/src/guidellm/utils/cli.py @@ -3,12 +3,31 @@ import click +__all__ = ["Union", "format_list_arg", "parse_json", "set_if_not_default"] + def parse_json(ctx, param, value): # noqa: ARG001 if value is None or value == [None]: return None - if isinstance(value, (list, tuple)): + if isinstance(value, list | tuple): return [parse_json(ctx, param, val) for val in value] + + if "{" not in value and "}" not in value and "=" in value: + # Treat it as a key=value pair if it doesn't look like JSON. + result = {} + for pair in value.split(","): + if "=" not in pair: + raise click.BadParameter( + f"{param.name} must be a valid JSON string or key=value pairs." + ) + key, val = pair.split("=", 1) + result[key.strip()] = val.strip() + return result + + if "{" not in value and "}" not in value: + # Treat it as a plain string if it doesn't look like JSON. + return value + try: return json.loads(value) except json.JSONDecodeError as err: @@ -28,6 +47,29 @@ def set_if_not_default(ctx: click.Context, **kwargs) -> dict[str, Any]: return values +def format_list_arg( + value: Any, default: Any = None, simplify_single: bool = False +) -> list[Any] | Any: + """ + Format a multi-argument value for display. + + :param value: The value to format, which can be a single value or a list/tuple. + :param default: The default value to set if the value is non truthy. + :param simplify_single: If True and the value is a single-item list/tuple, + return the single item instead of a list. + :return: Formatted list of values, or single value if simplify_single and applicable + """ + if not value: + return default + + if isinstance(value, tuple): + value = list(value) + elif not isinstance(value, list): + value = [value] + + return value if not simplify_single or len(value) != 1 else value[0] + + class Union(click.ParamType): """ A custom click parameter type that allows for multiple types to be accepted.