diff --git a/.dockerignore b/.dockerignore
index 2671039..2579097 100644
--- a/.dockerignore
+++ b/.dockerignore
@@ -13,7 +13,6 @@
.gitignore
.pytest_cache
Dockerfile
-README.md
build
data
dist
diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml
new file mode 100644
index 0000000..fe87f5c
--- /dev/null
+++ b/.github/workflows/ci.yaml
@@ -0,0 +1,22 @@
+name: CI
+
+on: [ push, pull_request ]
+
+jobs:
+ ci:
+ runs-on: ubuntu-latest
+ steps:
+ - uses: actions/checkout@v4
+ - name: Install poetry
+ run: pip install poetry==1.7.1
+ - name: Set up Python 3.11
+ uses: actions/setup-python@v5
+ with:
+ python-version: 3.11
+ cache: 'poetry'
+ - name: Install dependencies
+ run: |
+ python -m pip install --upgrade pip
+ poetry install -vv
+ - name: Test
+ run: pytest
diff --git a/.readthedocs.yaml b/.readthedocs.yaml
new file mode 100644
index 0000000..124910a
--- /dev/null
+++ b/.readthedocs.yaml
@@ -0,0 +1,35 @@
+# Read the Docs configuration file for Sphinx projects
+# See https://docs.readthedocs.io/en/stable/config-file/v2.html for details
+
+# Required
+version: 2
+
+# Set the OS, Python version and other tools you might need
+build:
+ os: ubuntu-22.04
+ tools:
+ python: "3.12"
+ # You can also specify other tool versions:
+ # nodejs: "20"
+ # rust: "1.70"
+ # golang: "1.20"
+
+# Build documentation in the "docs/" directory with Sphinx
+sphinx:
+ configuration: docs/conf.py
+ # You can configure Sphinx to use a different builder, for instance use the dirhtml builder for simpler URLs
+ # builder: "dirhtml"
+ # Fail on all warnings to avoid broken references
+ # fail_on_warning: true
+
+# Optionally build your docs in additional formats such as PDF and ePub
+# formats:
+# - pdf
+# - epub
+
+# Optional but recommended, declare the Python requirements required
+# to build your documentation
+# See https://docs.readthedocs.io/en/stable/guides/reproducible-builds.html
+# python:
+# install:
+# - requirements: docs/requirements.txt
\ No newline at end of file
diff --git a/CONTRIBUTING.rst b/CONTRIBUTING.rst
new file mode 100644
index 0000000..3ffe9be
--- /dev/null
+++ b/CONTRIBUTING.rst
@@ -0,0 +1,120 @@
+How to contribute to Youtool
+============================
+
+Thank you for considering to Youtool!
+
+First time setup in your local environment:
+-------------------------------------------
+
+- Make sure you have a `GitHub account `_
+
+- Fork Youtool to your GitHub account by clicking the `Fork `_ button
+
+- `Clone `_ your fork locally, replacing your-username in the command below with your actual username
+
+.. code-block::
+
+ git clone https://github.com/your-username/youtool
+ cd youtool
+
+Installation Poetry
+-------------------
+To manage dependencies and packaging for the project, we use Poetry.
+- Please follow the installation instructions provided in the `poetry `_
+
+
+Setting Up the Virtual Environment
+----------------------------------
+- After installing Poetry, you need to set up the virtual environment for the project. Navigate to the project directory and run the following command:
+
+.. code-block::
+
+ poetry shell
+
+This command will create and activate a virtual environment for the project.
+
+
+Installing Dependencies
+-----------------------
+- Once the virtual environment is activated, you can install the project dependencies by running:
+
+.. code-block::
+
+ poetry install
+
+This command will install all the dependencies listed in the pyproject.toml file.
+
+
+Creating a Local Branch from a Remote Branch
+--------------------------------------------
+To start contributing, you need to create a local branch based on a remote branch.
+Use the following commands to achieve this:
+1. Fetch the latest changes from the remote repository:
+
+.. code-block::
+
+ git fetch origin
+
+2. Create and switch to a new branch based on the remote branch:
+
+.. code-block::
+
+ git checkout -b origin/
+
+Push your commits to your fork on GitHub and `create a pull request `_. Link to the issue being addressed with fixes #123 in the pull request description.
+
+.. code-block::
+
+ git push --set-upstream origin nome-do-seu-branch
+
+Replace with your desired branch name and with the name of the remote branch you want to base your work on.
+
+By following these steps, you'll have a local branch set up and ready for your contributions.
+
+
+Running Tests
+-------------
+Before submitting your changes, it's important to run the tests to ensure everything is working correctly.
+Depending on whether you are inside or outside the virtual environment, use one of the following commands:
+
+1. Inside the virtual environment:
+If you have already activated the virtual environment with poetry shell, run:
+
+.. code-block::
+
+ pytest
+
+2. Outside the virtual environment:
+If you are not inside the virtual environment, you can still run the tests using Poetry:
+
+.. code-block::
+
+ poetry run pytest
+
+By following these steps, you'll ensure that all tests are run correctly before submitting your contributions.
+
+Updating Documentation
+----------------------
+Our documentation is hosted on Read the Docs, and the configuration files are located in the docs directory. To update the documentation, follow these steps:
+
+1. Navigate to the docs directory:
+
+.. code-block::
+
+ cd docs
+
+2. Make your changes:
+ Edit the necessary files to update the documentation.
+ The main configuration file is typically conf.py, but you may also need to update other ``.rst`` files as required.
+
+3. Build the documentation locally:
+ After making your changes, you can build the HTML version of the documentation to preview your updates.
+ Run the following command:
+
+.. code-block::
+
+ make html
+
+Open ``_build/html/index.html`` in your browser to view the docs.
+
+Read more about `Sphinx `_.
diff --git a/Dockerfile b/Dockerfile
index 9b88ea5..2cf2f89 100644
--- a/Dockerfile
+++ b/Dockerfile
@@ -11,12 +11,12 @@ RUN apt update \
&& apt clean \
&& rm -rf /var/lib/apt/lists/*
-COPY requirements/ /app/requirements
-RUN pip install --no-cache-dir -U pip \
- && pip install --no-cache-dir -r /app/requirements/base.txt \
- && pip install --no-cache-dir -r /app/requirements/cli.txt \
- && pip install --no-cache-dir -r /app/requirements/livechat.txt \
- && pip install --no-cache-dir -r /app/requirements/transcription.txt \
- && if [ "$DEV_BUILD" = "true" ]; then pip install --no-cache-dir -r /app/requirements/dev.txt; fi
-
COPY . /app/
+
+RUN pip install --no-cache-dir -U --upgrade pip \
+ && if [ "$DEV_BUILD" = "true" ]; \
+ then \
+ pip install poetry==1.4.2; \
+ poetry export -f requirements.txt --output requirements.txt --without-hashes --with dev; \
+ pip install --no-cache-dir -r requirements.txt; \
+ else pip install /app; fi
diff --git a/docs/Makefile b/docs/Makefile
new file mode 100644
index 0000000..d4bb2cb
--- /dev/null
+++ b/docs/Makefile
@@ -0,0 +1,20 @@
+# Minimal makefile for Sphinx documentation
+#
+
+# You can set these variables from the command line, and also
+# from the environment for the first two.
+SPHINXOPTS ?=
+SPHINXBUILD ?= sphinx-build
+SOURCEDIR = .
+BUILDDIR = _build
+
+# Put it first so that "make" without argument is like "make help".
+help:
+ @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
+
+.PHONY: help Makefile
+
+# Catch-all target: route all unknown targets to Sphinx using the new
+# "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS).
+%: Makefile
+ @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
diff --git a/docs/conf.py b/docs/conf.py
new file mode 100644
index 0000000..3f73132
--- /dev/null
+++ b/docs/conf.py
@@ -0,0 +1,27 @@
+# Configuration file for the Sphinx documentation builder.
+#
+# For the full list of built-in configuration values, see the documentation:
+# https://www.sphinx-doc.org/en/master/usage/configuration.html
+
+# -- Project information -----------------------------------------------------
+# https://www.sphinx-doc.org/en/master/usage/configuration.html#project-information
+
+project = 'Youtool'
+copyright = '2024, Álvaro Justen'
+author = 'Álvaro Justen'
+
+# -- General configuration ---------------------------------------------------
+# https://www.sphinx-doc.org/en/master/usage/configuration.html#general-configuration
+
+extensions = ["myst_parser"]
+
+templates_path = ['_templates']
+exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store']
+
+
+
+# -- Options for HTML output -------------------------------------------------
+# https://www.sphinx-doc.org/en/master/usage/configuration.html#options-for-html-output
+
+html_theme = 'alabaster'
+html_static_path = ['_static']
diff --git a/docs/contributing.rst b/docs/contributing.rst
new file mode 100644
index 0000000..3bdd7dc
--- /dev/null
+++ b/docs/contributing.rst
@@ -0,0 +1 @@
+.. include:: ../CONTRIBUTING.rst
\ No newline at end of file
diff --git a/docs/index.rst b/docs/index.rst
new file mode 100644
index 0000000..7094336
--- /dev/null
+++ b/docs/index.rst
@@ -0,0 +1,156 @@
+.. youtool documentation master file, created by
+ sphinx-quickstart on Mon Jul 8 14:31:22 2024.
+ You can adapt this file completely to your liking, but it should at least
+ contain the root `toctree` directive.
+
+Welcome to youtool documentation!
+=================================
+
+Easily access YouTube Data API v3 in batches
+--------------------------------------------
+
+.. toctree::
+ :maxdepth: 2
+
+ contributing
+
+--------------------------------------------
+
+Python library and command-line interface to crawl YouTube Data API v3 in batch operations and other related tasks.
+Easier to use than alternatives - you don't need to spend time learning the YouTube API and its caveats.
+With this library you can get:
+
+- Channel ID from channel URL (scraping) or username (API)
+- Channel information (title, subscribers etc.)
+- List of playlists for a channel
+- List of videos for a playlist
+- Video information (title, description, likes, comments etc.)
+- Comments
+- Livechat, including superchat (scraping using chat-downloader)
+- Automatic transcription (scraping using yt-dlp)
+
+The library will automatically:
+
+- Try as many keys as you provide
+- Use batch of 50 items in supported API endpoints
+- Paginate when needed
+
+Installation
+------------
+
+Install project by running
+
+.. code-block:: python
+
+ pip install youtool
+
+Using as a library
+------------------
+Just follow the tutorial/examples below and check the ``help()`` for ``YouTube`` methods.
+
+`GitHub Repository `_
+
+1. Initializing the YouTube API:
+
+.. code-block:: python
+
+ from youtool import YouTube
+
+ api_keys = ["key1", "key2", ...]
+ yt = YouTube(api_keys, disable_ipv6=True)
+
+Here, we are creating an instance of the YouTube class using a list of YouTube API keys.
+The disable_ipv6=True option is passed to disable IPv6 usage.
+
+2. Extracting Channel IDs by url:
+
+.. code-block:: python
+
+ channel_id_1 = yt.channel_id_from_url("https://youtube.com/c/PythonicCafe/")
+ print(f"Pythonic Café's channel ID (got from URL): {channel_id_1}")
+
+3. Extracting Channel IDs by username:
+
+.. code-block:: python
+
+ channel_id_2 = yt.channel_id_from_username("turicas")
+ print(f"Turicas' channel ID (got from username): {channel_id_2}")
+
+4. Listing Playlists from a Channel:
+
+.. code-block:: python
+
+ for playlist in yt.channel_playlists(channel_id_2):
+ for video in yt.playlist_videos(playlist["id"]):
+ print(f" Video: {video}")
+
+Here, we iterate through playlists of a specific channel (channel_id_2) and list the videos in each playlist.
+
+5. Searching for Videos:
+
+.. code-block:: python
+
+ for index, video in enumerate(yt.video_search(term="Álvaro Justen")):
+ print(f" Video: {video}")
+ if index == 4:
+ break
+
+This snippet searches for videos related to a specific term using the video_search method of the yt instance.
+
+6. Fetching Detailed Video Information:
+
+.. code-block:: python
+
+ last_video = list(yt.videos_infos([video["id"]]))[0]
+ pprint(last_video)
+
+Here, we fetch detailed information about a specific video using the videos_infos method of the yt instance.
+
+7. Fetching Channel Information:
+
+.. code-block:: python
+
+ for channel in yt.channels_infos([channel_id_1, channel_id_2]):
+ print(channel)
+
+This snippet fetches detailed information about multiple channels using the channels_infos method of the yt instance.
+
+8. Fetching Video Comments and Live Chat:
+
+.. code-block:: python
+
+ for comment in yt.video_comments(video_id):
+ print(comment)
+ for chat_message in yt.video_livechat(live_video_id):
+ print(chat_message)
+
+Here, we fetch comments and live chat messages from specific videos using the video_comments and video_livechat methods of the yt instance.
+
+9. Downloading Video Transcriptions:
+
+.. code-block:: python
+
+ yt.videos_transcriptions([video_id, live_video_id], language_code="pt", path=download_path)
+
+This snippet downloads transcriptions for specific videos using the videos_transcriptions method of the yt instance.
+
+How to contribute
+------------------
+
+Welcome to contributing documentation youtool project
+
+See :doc:`contributing` for more detail.
+
+- `Issue Tracker `_
+- `Source Code `_
+
+Support
+-------
+
+If you are having issues, please let us know
+
+License
+-------
+GNU Lesser General Public License (LGPL) version3
+
+This project was developed in a partnership between Pythonic Café and `Novelo Data `_
\ No newline at end of file
diff --git a/docs/make.bat b/docs/make.bat
new file mode 100644
index 0000000..32bb245
--- /dev/null
+++ b/docs/make.bat
@@ -0,0 +1,35 @@
+@ECHO OFF
+
+pushd %~dp0
+
+REM Command file for Sphinx documentation
+
+if "%SPHINXBUILD%" == "" (
+ set SPHINXBUILD=sphinx-build
+)
+set SOURCEDIR=.
+set BUILDDIR=_build
+
+%SPHINXBUILD% >NUL 2>NUL
+if errorlevel 9009 (
+ echo.
+ echo.The 'sphinx-build' command was not found. Make sure you have Sphinx
+ echo.installed, then set the SPHINXBUILD environment variable to point
+ echo.to the full path of the 'sphinx-build' executable. Alternatively you
+ echo.may add the Sphinx directory to PATH.
+ echo.
+ echo.If you don't have Sphinx installed, grab it from
+ echo.https://www.sphinx-doc.org/
+ exit /b 1
+)
+
+if "%1" == "" goto help
+
+%SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O%
+goto end
+
+:help
+%SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O%
+
+:end
+popd
diff --git a/poetry.lock b/poetry.lock
new file mode 100644
index 0000000..ad20baf
--- /dev/null
+++ b/poetry.lock
@@ -0,0 +1,1896 @@
+# This file is automatically @generated by Poetry 1.8.3 and should not be changed by hand.
+
+[[package]]
+name = "alabaster"
+version = "0.7.16"
+description = "A light, configurable Sphinx theme"
+optional = false
+python-versions = ">=3.9"
+files = [
+ {file = "alabaster-0.7.16-py3-none-any.whl", hash = "sha256:b46733c07dce03ae4e150330b975c75737fa60f0a7c591b6c8bf4928a28e2c92"},
+ {file = "alabaster-0.7.16.tar.gz", hash = "sha256:75a8b99c28a5dad50dd7f8ccdd447a121ddb3892da9e53d1ca5cca3106d58d65"},
+]
+
+[[package]]
+name = "asttokens"
+version = "2.4.1"
+description = "Annotate AST trees with source code positions"
+optional = false
+python-versions = "*"
+files = [
+ {file = "asttokens-2.4.1-py2.py3-none-any.whl", hash = "sha256:051ed49c3dcae8913ea7cd08e46a606dba30b79993209636c4875bc1d637bc24"},
+ {file = "asttokens-2.4.1.tar.gz", hash = "sha256:b03869718ba9a6eb027e134bfdf69f38a236d681c83c160d510768af11254ba0"},
+]
+
+[package.dependencies]
+six = ">=1.12.0"
+
+[package.extras]
+astroid = ["astroid (>=1,<2)", "astroid (>=2,<4)"]
+test = ["astroid (>=1,<2)", "astroid (>=2,<4)", "pytest"]
+
+[[package]]
+name = "autoflake"
+version = "2.3.1"
+description = "Removes unused imports and unused variables"
+optional = false
+python-versions = ">=3.8"
+files = [
+ {file = "autoflake-2.3.1-py3-none-any.whl", hash = "sha256:3ae7495db9084b7b32818b4140e6dc4fc280b712fb414f5b8fe57b0a8e85a840"},
+ {file = "autoflake-2.3.1.tar.gz", hash = "sha256:c98b75dc5b0a86459c4f01a1d32ac7eb4338ec4317a4469515ff1e687ecd909e"},
+]
+
+[package.dependencies]
+pyflakes = ">=3.0.0"
+
+[[package]]
+name = "babel"
+version = "2.15.0"
+description = "Internationalization utilities"
+optional = false
+python-versions = ">=3.8"
+files = [
+ {file = "Babel-2.15.0-py3-none-any.whl", hash = "sha256:08706bdad8d0a3413266ab61bd6c34d0c28d6e1e7badf40a2cebe67644e2e1fb"},
+ {file = "babel-2.15.0.tar.gz", hash = "sha256:8daf0e265d05768bc6c7a314cf1321e9a123afc328cc635c18622a2f30a04413"},
+]
+
+[package.extras]
+dev = ["freezegun (>=1.0,<2.0)", "pytest (>=6.0)", "pytest-cov"]
+
+[[package]]
+name = "backports-tarfile"
+version = "1.2.0"
+description = "Backport of CPython tarfile module"
+optional = false
+python-versions = ">=3.8"
+files = [
+ {file = "backports.tarfile-1.2.0-py3-none-any.whl", hash = "sha256:77e284d754527b01fb1e6fa8a1afe577858ebe4e9dad8919e34c862cb399bc34"},
+ {file = "backports_tarfile-1.2.0.tar.gz", hash = "sha256:d75e02c268746e1b8144c278978b6e98e85de6ad16f8e4b0844a154557eca991"},
+]
+
+[package.extras]
+docs = ["furo", "jaraco.packaging (>=9.3)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"]
+testing = ["jaraco.test", "pytest (!=8.0.*)", "pytest (>=6,!=8.1.*)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)"]
+
+[[package]]
+name = "black"
+version = "24.4.2"
+description = "The uncompromising code formatter."
+optional = false
+python-versions = ">=3.8"
+files = [
+ {file = "black-24.4.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:dd1b5a14e417189db4c7b64a6540f31730713d173f0b63e55fabd52d61d8fdce"},
+ {file = "black-24.4.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8e537d281831ad0e71007dcdcbe50a71470b978c453fa41ce77186bbe0ed6021"},
+ {file = "black-24.4.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eaea3008c281f1038edb473c1aa8ed8143a5535ff18f978a318f10302b254063"},
+ {file = "black-24.4.2-cp310-cp310-win_amd64.whl", hash = "sha256:7768a0dbf16a39aa5e9a3ded568bb545c8c2727396d063bbaf847df05b08cd96"},
+ {file = "black-24.4.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:257d724c2c9b1660f353b36c802ccece186a30accc7742c176d29c146df6e474"},
+ {file = "black-24.4.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:bdde6f877a18f24844e381d45e9947a49e97933573ac9d4345399be37621e26c"},
+ {file = "black-24.4.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e151054aa00bad1f4e1f04919542885f89f5f7d086b8a59e5000e6c616896ffb"},
+ {file = "black-24.4.2-cp311-cp311-win_amd64.whl", hash = "sha256:7e122b1c4fb252fd85df3ca93578732b4749d9be076593076ef4d07a0233c3e1"},
+ {file = "black-24.4.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:accf49e151c8ed2c0cdc528691838afd217c50412534e876a19270fea1e28e2d"},
+ {file = "black-24.4.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:88c57dc656038f1ab9f92b3eb5335ee9b021412feaa46330d5eba4e51fe49b04"},
+ {file = "black-24.4.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:be8bef99eb46d5021bf053114442914baeb3649a89dc5f3a555c88737e5e98fc"},
+ {file = "black-24.4.2-cp312-cp312-win_amd64.whl", hash = "sha256:415e686e87dbbe6f4cd5ef0fbf764af7b89f9057b97c908742b6008cc554b9c0"},
+ {file = "black-24.4.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:bf10f7310db693bb62692609b397e8d67257c55f949abde4c67f9cc574492cc7"},
+ {file = "black-24.4.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:98e123f1d5cfd42f886624d84464f7756f60ff6eab89ae845210631714f6db94"},
+ {file = "black-24.4.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:48a85f2cb5e6799a9ef05347b476cce6c182d6c71ee36925a6c194d074336ef8"},
+ {file = "black-24.4.2-cp38-cp38-win_amd64.whl", hash = "sha256:b1530ae42e9d6d5b670a34db49a94115a64596bc77710b1d05e9801e62ca0a7c"},
+ {file = "black-24.4.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:37aae07b029fa0174d39daf02748b379399b909652a806e5708199bd93899da1"},
+ {file = "black-24.4.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:da33a1a5e49c4122ccdfd56cd021ff1ebc4a1ec4e2d01594fef9b6f267a9e741"},
+ {file = "black-24.4.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ef703f83fc32e131e9bcc0a5094cfe85599e7109f896fe8bc96cc402f3eb4b6e"},
+ {file = "black-24.4.2-cp39-cp39-win_amd64.whl", hash = "sha256:b9176b9832e84308818a99a561e90aa479e73c523b3f77afd07913380ae2eab7"},
+ {file = "black-24.4.2-py3-none-any.whl", hash = "sha256:d36ed1124bb81b32f8614555b34cc4259c3fbc7eec17870e8ff8ded335b58d8c"},
+ {file = "black-24.4.2.tar.gz", hash = "sha256:c872b53057f000085da66a19c55d68f6f8ddcac2642392ad3a355878406fbd4d"},
+]
+
+[package.dependencies]
+click = ">=8.0.0"
+mypy-extensions = ">=0.4.3"
+packaging = ">=22.0"
+pathspec = ">=0.9.0"
+platformdirs = ">=2"
+
+[package.extras]
+colorama = ["colorama (>=0.4.3)"]
+d = ["aiohttp (>=3.7.4)", "aiohttp (>=3.7.4,!=3.9.0)"]
+jupyter = ["ipython (>=7.8.0)", "tokenize-rt (>=3.2.0)"]
+uvloop = ["uvloop (>=0.15.2)"]
+
+[[package]]
+name = "brotli"
+version = "1.1.0"
+description = "Python bindings for the Brotli compression library"
+optional = false
+python-versions = "*"
+files = [
+ {file = "Brotli-1.1.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:e1140c64812cb9b06c922e77f1c26a75ec5e3f0fb2bf92cc8c58720dec276752"},
+ {file = "Brotli-1.1.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c8fd5270e906eef71d4a8d19b7c6a43760c6abcfcc10c9101d14eb2357418de9"},
+ {file = "Brotli-1.1.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1ae56aca0402a0f9a3431cddda62ad71666ca9d4dc3a10a142b9dce2e3c0cda3"},
+ {file = "Brotli-1.1.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:43ce1b9935bfa1ede40028054d7f48b5469cd02733a365eec8a329ffd342915d"},
+ {file = "Brotli-1.1.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:7c4855522edb2e6ae7fdb58e07c3ba9111e7621a8956f481c68d5d979c93032e"},
+ {file = "Brotli-1.1.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:38025d9f30cf4634f8309c6874ef871b841eb3c347e90b0851f63d1ded5212da"},
+ {file = "Brotli-1.1.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:e6a904cb26bfefc2f0a6f240bdf5233be78cd2488900a2f846f3c3ac8489ab80"},
+ {file = "Brotli-1.1.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:a37b8f0391212d29b3a91a799c8e4a2855e0576911cdfb2515487e30e322253d"},
+ {file = "Brotli-1.1.0-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:e84799f09591700a4154154cab9787452925578841a94321d5ee8fb9a9a328f0"},
+ {file = "Brotli-1.1.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:f66b5337fa213f1da0d9000bc8dc0cb5b896b726eefd9c6046f699b169c41b9e"},
+ {file = "Brotli-1.1.0-cp310-cp310-win32.whl", hash = "sha256:be36e3d172dc816333f33520154d708a2657ea63762ec16b62ece02ab5e4daf2"},
+ {file = "Brotli-1.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:0c6244521dda65ea562d5a69b9a26120769b7a9fb3db2fe9545935ed6735b128"},
+ {file = "Brotli-1.1.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:a3daabb76a78f829cafc365531c972016e4aa8d5b4bf60660ad8ecee19df7ccc"},
+ {file = "Brotli-1.1.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c8146669223164fc87a7e3de9f81e9423c67a79d6b3447994dfb9c95da16e2d6"},
+ {file = "Brotli-1.1.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:30924eb4c57903d5a7526b08ef4a584acc22ab1ffa085faceb521521d2de32dd"},
+ {file = "Brotli-1.1.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ceb64bbc6eac5a140ca649003756940f8d6a7c444a68af170b3187623b43bebf"},
+ {file = "Brotli-1.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a469274ad18dc0e4d316eefa616d1d0c2ff9da369af19fa6f3daa4f09671fd61"},
+ {file = "Brotli-1.1.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:524f35912131cc2cabb00edfd8d573b07f2d9f21fa824bd3fb19725a9cf06327"},
+ {file = "Brotli-1.1.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:5b3cc074004d968722f51e550b41a27be656ec48f8afaeeb45ebf65b561481dd"},
+ {file = "Brotli-1.1.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:19c116e796420b0cee3da1ccec3b764ed2952ccfcc298b55a10e5610ad7885f9"},
+ {file = "Brotli-1.1.0-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:510b5b1bfbe20e1a7b3baf5fed9e9451873559a976c1a78eebaa3b86c57b4265"},
+ {file = "Brotli-1.1.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:a1fd8a29719ccce974d523580987b7f8229aeace506952fa9ce1d53a033873c8"},
+ {file = "Brotli-1.1.0-cp311-cp311-win32.whl", hash = "sha256:39da8adedf6942d76dc3e46653e52df937a3c4d6d18fdc94a7c29d263b1f5b50"},
+ {file = "Brotli-1.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:aac0411d20e345dc0920bdec5548e438e999ff68d77564d5e9463a7ca9d3e7b1"},
+ {file = "Brotli-1.1.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:316cc9b17edf613ac76b1f1f305d2a748f1b976b033b049a6ecdfd5612c70409"},
+ {file = "Brotli-1.1.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:caf9ee9a5775f3111642d33b86237b05808dafcd6268faa492250e9b78046eb2"},
+ {file = "Brotli-1.1.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:70051525001750221daa10907c77830bc889cb6d865cc0b813d9db7fefc21451"},
+ {file = "Brotli-1.1.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7f4bf76817c14aa98cc6697ac02f3972cb8c3da93e9ef16b9c66573a68014f91"},
+ {file = "Brotli-1.1.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d0c5516f0aed654134a2fc936325cc2e642f8a0e096d075209672eb321cff408"},
+ {file = "Brotli-1.1.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6c3020404e0b5eefd7c9485ccf8393cfb75ec38ce75586e046573c9dc29967a0"},
+ {file = "Brotli-1.1.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:4ed11165dd45ce798d99a136808a794a748d5dc38511303239d4e2363c0695dc"},
+ {file = "Brotli-1.1.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:4093c631e96fdd49e0377a9c167bfd75b6d0bad2ace734c6eb20b348bc3ea180"},
+ {file = "Brotli-1.1.0-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:7e4c4629ddad63006efa0ef968c8e4751c5868ff0b1c5c40f76524e894c50248"},
+ {file = "Brotli-1.1.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:861bf317735688269936f755fa136a99d1ed526883859f86e41a5d43c61d8966"},
+ {file = "Brotli-1.1.0-cp312-cp312-win32.whl", hash = "sha256:5f4d5ea15c9382135076d2fb28dde923352fe02951e66935a9efaac8f10e81b0"},
+ {file = "Brotli-1.1.0-cp312-cp312-win_amd64.whl", hash = "sha256:906bc3a79de8c4ae5b86d3d75a8b77e44404b0f4261714306e3ad248d8ab0951"},
+ {file = "Brotli-1.1.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:a090ca607cbb6a34b0391776f0cb48062081f5f60ddcce5d11838e67a01928d1"},
+ {file = "Brotli-1.1.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2de9d02f5bda03d27ede52e8cfe7b865b066fa49258cbab568720aa5be80a47d"},
+ {file = "Brotli-1.1.0-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2333e30a5e00fe0fe55903c8832e08ee9c3b1382aacf4db26664a16528d51b4b"},
+ {file = "Brotli-1.1.0-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:4d4a848d1837973bf0f4b5e54e3bec977d99be36a7895c61abb659301b02c112"},
+ {file = "Brotli-1.1.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:fdc3ff3bfccdc6b9cc7c342c03aa2400683f0cb891d46e94b64a197910dc4064"},
+ {file = "Brotli-1.1.0-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:5eeb539606f18a0b232d4ba45adccde4125592f3f636a6182b4a8a436548b914"},
+ {file = "Brotli-1.1.0-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:fd5f17ff8f14003595ab414e45fce13d073e0762394f957182e69035c9f3d7c2"},
+ {file = "Brotli-1.1.0-cp36-cp36m-musllinux_1_1_ppc64le.whl", hash = "sha256:069a121ac97412d1fe506da790b3e69f52254b9df4eb665cd42460c837193354"},
+ {file = "Brotli-1.1.0-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:e93dfc1a1165e385cc8239fab7c036fb2cd8093728cbd85097b284d7b99249a2"},
+ {file = "Brotli-1.1.0-cp36-cp36m-win32.whl", hash = "sha256:a599669fd7c47233438a56936988a2478685e74854088ef5293802123b5b2460"},
+ {file = "Brotli-1.1.0-cp36-cp36m-win_amd64.whl", hash = "sha256:d143fd47fad1db3d7c27a1b1d66162e855b5d50a89666af46e1679c496e8e579"},
+ {file = "Brotli-1.1.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:11d00ed0a83fa22d29bc6b64ef636c4552ebafcef57154b4ddd132f5638fbd1c"},
+ {file = "Brotli-1.1.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f733d788519c7e3e71f0855c96618720f5d3d60c3cb829d8bbb722dddce37985"},
+ {file = "Brotli-1.1.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:929811df5462e182b13920da56c6e0284af407d1de637d8e536c5cd00a7daf60"},
+ {file = "Brotli-1.1.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:0b63b949ff929fbc2d6d3ce0e924c9b93c9785d877a21a1b678877ffbbc4423a"},
+ {file = "Brotli-1.1.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:d192f0f30804e55db0d0e0a35d83a9fead0e9a359a9ed0285dbacea60cc10a84"},
+ {file = "Brotli-1.1.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:f296c40e23065d0d6650c4aefe7470d2a25fffda489bcc3eb66083f3ac9f6643"},
+ {file = "Brotli-1.1.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:919e32f147ae93a09fe064d77d5ebf4e35502a8df75c29fb05788528e330fe74"},
+ {file = "Brotli-1.1.0-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:23032ae55523cc7bccb4f6a0bf368cd25ad9bcdcc1990b64a647e7bbcce9cb5b"},
+ {file = "Brotli-1.1.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:224e57f6eac61cc449f498cc5f0e1725ba2071a3d4f48d5d9dffba42db196438"},
+ {file = "Brotli-1.1.0-cp37-cp37m-win32.whl", hash = "sha256:587ca6d3cef6e4e868102672d3bd9dc9698c309ba56d41c2b9c85bbb903cdb95"},
+ {file = "Brotli-1.1.0-cp37-cp37m-win_amd64.whl", hash = "sha256:2954c1c23f81c2eaf0b0717d9380bd348578a94161a65b3a2afc62c86467dd68"},
+ {file = "Brotli-1.1.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:efa8b278894b14d6da122a72fefcebc28445f2d3f880ac59d46c90f4c13be9a3"},
+ {file = "Brotli-1.1.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:03d20af184290887bdea3f0f78c4f737d126c74dc2f3ccadf07e54ceca3bf208"},
+ {file = "Brotli-1.1.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6172447e1b368dcbc458925e5ddaf9113477b0ed542df258d84fa28fc45ceea7"},
+ {file = "Brotli-1.1.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a743e5a28af5f70f9c080380a5f908d4d21d40e8f0e0c8901604d15cfa9ba751"},
+ {file = "Brotli-1.1.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:0541e747cce78e24ea12d69176f6a7ddb690e62c425e01d31cc065e69ce55b48"},
+ {file = "Brotli-1.1.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:cdbc1fc1bc0bff1cef838eafe581b55bfbffaed4ed0318b724d0b71d4d377619"},
+ {file = "Brotli-1.1.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:890b5a14ce214389b2cc36ce82f3093f96f4cc730c1cffdbefff77a7c71f2a97"},
+ {file = "Brotli-1.1.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:1ab4fbee0b2d9098c74f3057b2bc055a8bd92ccf02f65944a241b4349229185a"},
+ {file = "Brotli-1.1.0-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:141bd4d93984070e097521ed07e2575b46f817d08f9fa42b16b9b5f27b5ac088"},
+ {file = "Brotli-1.1.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:fce1473f3ccc4187f75b4690cfc922628aed4d3dd013d047f95a9b3919a86596"},
+ {file = "Brotli-1.1.0-cp38-cp38-win32.whl", hash = "sha256:db85ecf4e609a48f4b29055f1e144231b90edc90af7481aa731ba2d059226b1b"},
+ {file = "Brotli-1.1.0-cp38-cp38-win_amd64.whl", hash = "sha256:3d7954194c36e304e1523f55d7042c59dc53ec20dd4e9ea9d151f1b62b4415c0"},
+ {file = "Brotli-1.1.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:5fb2ce4b8045c78ebbc7b8f3c15062e435d47e7393cc57c25115cfd49883747a"},
+ {file = "Brotli-1.1.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:7905193081db9bfa73b1219140b3d315831cbff0d8941f22da695832f0dd188f"},
+ {file = "Brotli-1.1.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a77def80806c421b4b0af06f45d65a136e7ac0bdca3c09d9e2ea4e515367c7e9"},
+ {file = "Brotli-1.1.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8dadd1314583ec0bf2d1379f7008ad627cd6336625d6679cf2f8e67081b83acf"},
+ {file = "Brotli-1.1.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:901032ff242d479a0efa956d853d16875d42157f98951c0230f69e69f9c09bac"},
+ {file = "Brotli-1.1.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:22fc2a8549ffe699bfba2256ab2ed0421a7b8fadff114a3d201794e45a9ff578"},
+ {file = "Brotli-1.1.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:ae15b066e5ad21366600ebec29a7ccbc86812ed267e4b28e860b8ca16a2bc474"},
+ {file = "Brotli-1.1.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:949f3b7c29912693cee0afcf09acd6ebc04c57af949d9bf77d6101ebb61e388c"},
+ {file = "Brotli-1.1.0-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:89f4988c7203739d48c6f806f1e87a1d96e0806d44f0fba61dba81392c9e474d"},
+ {file = "Brotli-1.1.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:de6551e370ef19f8de1807d0a9aa2cdfdce2e85ce88b122fe9f6b2b076837e59"},
+ {file = "Brotli-1.1.0-cp39-cp39-win32.whl", hash = "sha256:f0d8a7a6b5983c2496e364b969f0e526647a06b075d034f3297dc66f3b360c64"},
+ {file = "Brotli-1.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:cdad5b9014d83ca68c25d2e9444e28e967ef16e80f6b436918c700c117a85467"},
+ {file = "Brotli-1.1.0.tar.gz", hash = "sha256:81de08ac11bcb85841e440c13611c00b67d3bf82698314928d0b676362546724"},
+]
+
+[[package]]
+name = "brotlicffi"
+version = "1.1.0.0"
+description = "Python CFFI bindings to the Brotli library"
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "brotlicffi-1.1.0.0-cp37-abi3-macosx_10_9_x86_64.whl", hash = "sha256:9b7ae6bd1a3f0df532b6d67ff674099a96d22bc0948955cb338488c31bfb8851"},
+ {file = "brotlicffi-1.1.0.0-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:19ffc919fa4fc6ace69286e0a23b3789b4219058313cf9b45625016bf7ff996b"},
+ {file = "brotlicffi-1.1.0.0-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9feb210d932ffe7798ee62e6145d3a757eb6233aa9a4e7db78dd3690d7755814"},
+ {file = "brotlicffi-1.1.0.0-cp37-abi3-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:84763dbdef5dd5c24b75597a77e1b30c66604725707565188ba54bab4f114820"},
+ {file = "brotlicffi-1.1.0.0-cp37-abi3-win32.whl", hash = "sha256:1b12b50e07c3911e1efa3a8971543e7648100713d4e0971b13631cce22c587eb"},
+ {file = "brotlicffi-1.1.0.0-cp37-abi3-win_amd64.whl", hash = "sha256:994a4f0681bb6c6c3b0925530a1926b7a189d878e6e5e38fae8efa47c5d9c613"},
+ {file = "brotlicffi-1.1.0.0-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:2e4aeb0bd2540cb91b069dbdd54d458da8c4334ceaf2d25df2f4af576d6766ca"},
+ {file = "brotlicffi-1.1.0.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4b7b0033b0d37bb33009fb2fef73310e432e76f688af76c156b3594389d81391"},
+ {file = "brotlicffi-1.1.0.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:54a07bb2374a1eba8ebb52b6fafffa2afd3c4df85ddd38fcc0511f2bb387c2a8"},
+ {file = "brotlicffi-1.1.0.0-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7901a7dc4b88f1c1475de59ae9be59799db1007b7d059817948d8e4f12e24e35"},
+ {file = "brotlicffi-1.1.0.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:ce01c7316aebc7fce59da734286148b1d1b9455f89cf2c8a4dfce7d41db55c2d"},
+ {file = "brotlicffi-1.1.0.0-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:246f1d1a90279bb6069de3de8d75a8856e073b8ff0b09dcca18ccc14cec85979"},
+ {file = "brotlicffi-1.1.0.0-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cc4bc5d82bc56ebd8b514fb8350cfac4627d6b0743382e46d033976a5f80fab6"},
+ {file = "brotlicffi-1.1.0.0-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:37c26ecb14386a44b118ce36e546ce307f4810bc9598a6e6cb4f7fca725ae7e6"},
+ {file = "brotlicffi-1.1.0.0-pp37-pypy37_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ca72968ae4eaf6470498d5c2887073f7efe3b1e7d7ec8be11a06a79cc810e990"},
+ {file = "brotlicffi-1.1.0.0-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:add0de5b9ad9e9aa293c3aa4e9deb2b61e99ad6c1634e01d01d98c03e6a354cc"},
+ {file = "brotlicffi-1.1.0.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:9b6068e0f3769992d6b622a1cd2e7835eae3cf8d9da123d7f51ca9c1e9c333e5"},
+ {file = "brotlicffi-1.1.0.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8557a8559509b61e65083f8782329188a250102372576093c88930c875a69838"},
+ {file = "brotlicffi-1.1.0.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2a7ae37e5d79c5bdfb5b4b99f2715a6035e6c5bf538c3746abc8e26694f92f33"},
+ {file = "brotlicffi-1.1.0.0-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:391151ec86bb1c683835980f4816272a87eaddc46bb91cbf44f62228b84d8cca"},
+ {file = "brotlicffi-1.1.0.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:2f3711be9290f0453de8eed5275d93d286abe26b08ab4a35d7452caa1fef532f"},
+ {file = "brotlicffi-1.1.0.0-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:1a807d760763e398bbf2c6394ae9da5815901aa93ee0a37bca5efe78d4ee3171"},
+ {file = "brotlicffi-1.1.0.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fa8ca0623b26c94fccc3a1fdd895be1743b838f3917300506d04aa3346fd2a14"},
+ {file = "brotlicffi-1.1.0.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3de0cf28a53a3238b252aca9fed1593e9d36c1d116748013339f0949bfc84112"},
+ {file = "brotlicffi-1.1.0.0-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6be5ec0e88a4925c91f3dea2bb0013b3a2accda6f77238f76a34a1ea532a1cb0"},
+ {file = "brotlicffi-1.1.0.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:d9eb71bb1085d996244439154387266fd23d6ad37161f6f52f1cd41dd95a3808"},
+ {file = "brotlicffi-1.1.0.0.tar.gz", hash = "sha256:b77827a689905143f87915310b93b273ab17888fd43ef350d4832c4a71083c13"},
+]
+
+[package.dependencies]
+cffi = ">=1.0.0"
+
+[[package]]
+name = "certifi"
+version = "2024.7.4"
+description = "Python package for providing Mozilla's CA Bundle."
+optional = false
+python-versions = ">=3.6"
+files = [
+ {file = "certifi-2024.7.4-py3-none-any.whl", hash = "sha256:c198e21b1289c2ab85ee4e67bb4b4ef3ead0892059901a8d5b622f24a1101e90"},
+ {file = "certifi-2024.7.4.tar.gz", hash = "sha256:5a1e7645bc0ec61a09e26c36f6106dd4cf40c6db3a1fb6352b0244e7fb057c7b"},
+]
+
+[[package]]
+name = "cffi"
+version = "1.16.0"
+description = "Foreign Function Interface for Python calling C code."
+optional = false
+python-versions = ">=3.8"
+files = [
+ {file = "cffi-1.16.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:6b3d6606d369fc1da4fd8c357d026317fbb9c9b75d36dc16e90e84c26854b088"},
+ {file = "cffi-1.16.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ac0f5edd2360eea2f1daa9e26a41db02dd4b0451b48f7c318e217ee092a213e9"},
+ {file = "cffi-1.16.0-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7e61e3e4fa664a8588aa25c883eab612a188c725755afff6289454d6362b9673"},
+ {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a72e8961a86d19bdb45851d8f1f08b041ea37d2bd8d4fd19903bc3083d80c896"},
+ {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5b50bf3f55561dac5438f8e70bfcdfd74543fd60df5fa5f62d94e5867deca684"},
+ {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7651c50c8c5ef7bdb41108b7b8c5a83013bfaa8a935590c5d74627c047a583c7"},
+ {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e4108df7fe9b707191e55f33efbcb2d81928e10cea45527879a4749cbe472614"},
+ {file = "cffi-1.16.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:32c68ef735dbe5857c810328cb2481e24722a59a2003018885514d4c09af9743"},
+ {file = "cffi-1.16.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:673739cb539f8cdaa07d92d02efa93c9ccf87e345b9a0b556e3ecc666718468d"},
+ {file = "cffi-1.16.0-cp310-cp310-win32.whl", hash = "sha256:9f90389693731ff1f659e55c7d1640e2ec43ff725cc61b04b2f9c6d8d017df6a"},
+ {file = "cffi-1.16.0-cp310-cp310-win_amd64.whl", hash = "sha256:e6024675e67af929088fda399b2094574609396b1decb609c55fa58b028a32a1"},
+ {file = "cffi-1.16.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b84834d0cf97e7d27dd5b7f3aca7b6e9263c56308ab9dc8aae9784abb774d404"},
+ {file = "cffi-1.16.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1b8ebc27c014c59692bb2664c7d13ce7a6e9a629be20e54e7271fa696ff2b417"},
+ {file = "cffi-1.16.0-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ee07e47c12890ef248766a6e55bd38ebfb2bb8edd4142d56db91b21ea68b7627"},
+ {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d8a9d3ebe49f084ad71f9269834ceccbf398253c9fac910c4fd7053ff1386936"},
+ {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e70f54f1796669ef691ca07d046cd81a29cb4deb1e5f942003f401c0c4a2695d"},
+ {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5bf44d66cdf9e893637896c7faa22298baebcd18d1ddb6d2626a6e39793a1d56"},
+ {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7b78010e7b97fef4bee1e896df8a4bbb6712b7f05b7ef630f9d1da00f6444d2e"},
+ {file = "cffi-1.16.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:c6a164aa47843fb1b01e941d385aab7215563bb8816d80ff3a363a9f8448a8dc"},
+ {file = "cffi-1.16.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e09f3ff613345df5e8c3667da1d918f9149bd623cd9070c983c013792a9a62eb"},
+ {file = "cffi-1.16.0-cp311-cp311-win32.whl", hash = "sha256:2c56b361916f390cd758a57f2e16233eb4f64bcbeee88a4881ea90fca14dc6ab"},
+ {file = "cffi-1.16.0-cp311-cp311-win_amd64.whl", hash = "sha256:db8e577c19c0fda0beb7e0d4e09e0ba74b1e4c092e0e40bfa12fe05b6f6d75ba"},
+ {file = "cffi-1.16.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:fa3a0128b152627161ce47201262d3140edb5a5c3da88d73a1b790a959126956"},
+ {file = "cffi-1.16.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:68e7c44931cc171c54ccb702482e9fc723192e88d25a0e133edd7aff8fcd1f6e"},
+ {file = "cffi-1.16.0-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:abd808f9c129ba2beda4cfc53bde801e5bcf9d6e0f22f095e45327c038bfe68e"},
+ {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:88e2b3c14bdb32e440be531ade29d3c50a1a59cd4e51b1dd8b0865c54ea5d2e2"},
+ {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fcc8eb6d5902bb1cf6dc4f187ee3ea80a1eba0a89aba40a5cb20a5087d961357"},
+ {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b7be2d771cdba2942e13215c4e340bfd76398e9227ad10402a8767ab1865d2e6"},
+ {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e715596e683d2ce000574bae5d07bd522c781a822866c20495e52520564f0969"},
+ {file = "cffi-1.16.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:2d92b25dbf6cae33f65005baf472d2c245c050b1ce709cc4588cdcdd5495b520"},
+ {file = "cffi-1.16.0-cp312-cp312-win32.whl", hash = "sha256:b2ca4e77f9f47c55c194982e10f058db063937845bb2b7a86c84a6cfe0aefa8b"},
+ {file = "cffi-1.16.0-cp312-cp312-win_amd64.whl", hash = "sha256:68678abf380b42ce21a5f2abde8efee05c114c2fdb2e9eef2efdb0257fba1235"},
+ {file = "cffi-1.16.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0c9ef6ff37e974b73c25eecc13952c55bceed9112be2d9d938ded8e856138bcc"},
+ {file = "cffi-1.16.0-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a09582f178759ee8128d9270cd1344154fd473bb77d94ce0aeb2a93ebf0feaf0"},
+ {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e760191dd42581e023a68b758769e2da259b5d52e3103c6060ddc02c9edb8d7b"},
+ {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:80876338e19c951fdfed6198e70bc88f1c9758b94578d5a7c4c91a87af3cf31c"},
+ {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a6a14b17d7e17fa0d207ac08642c8820f84f25ce17a442fd15e27ea18d67c59b"},
+ {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6602bc8dc6f3a9e02b6c22c4fc1e47aa50f8f8e6d3f78a5e16ac33ef5fefa324"},
+ {file = "cffi-1.16.0-cp38-cp38-win32.whl", hash = "sha256:131fd094d1065b19540c3d72594260f118b231090295d8c34e19a7bbcf2e860a"},
+ {file = "cffi-1.16.0-cp38-cp38-win_amd64.whl", hash = "sha256:31d13b0f99e0836b7ff893d37af07366ebc90b678b6664c955b54561fc36ef36"},
+ {file = "cffi-1.16.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:582215a0e9adbe0e379761260553ba11c58943e4bbe9c36430c4ca6ac74b15ed"},
+ {file = "cffi-1.16.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b29ebffcf550f9da55bec9e02ad430c992a87e5f512cd63388abb76f1036d8d2"},
+ {file = "cffi-1.16.0-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dc9b18bf40cc75f66f40a7379f6a9513244fe33c0e8aa72e2d56b0196a7ef872"},
+ {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9cb4a35b3642fc5c005a6755a5d17c6c8b6bcb6981baf81cea8bfbc8903e8ba8"},
+ {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b86851a328eedc692acf81fb05444bdf1891747c25af7529e39ddafaf68a4f3f"},
+ {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c0f31130ebc2d37cdd8e44605fb5fa7ad59049298b3f745c74fa74c62fbfcfc4"},
+ {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f8e709127c6c77446a8c0a8c8bf3c8ee706a06cd44b1e827c3e6a2ee6b8c098"},
+ {file = "cffi-1.16.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:748dcd1e3d3d7cd5443ef03ce8685043294ad6bd7c02a38d1bd367cfd968e000"},
+ {file = "cffi-1.16.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8895613bcc094d4a1b2dbe179d88d7fb4a15cee43c052e8885783fac397d91fe"},
+ {file = "cffi-1.16.0-cp39-cp39-win32.whl", hash = "sha256:ed86a35631f7bfbb28e108dd96773b9d5a6ce4811cf6ea468bb6a359b256b1e4"},
+ {file = "cffi-1.16.0-cp39-cp39-win_amd64.whl", hash = "sha256:3686dffb02459559c74dd3d81748269ffb0eb027c39a6fc99502de37d501faa8"},
+ {file = "cffi-1.16.0.tar.gz", hash = "sha256:bcb3ef43e58665bbda2fb198698fcae6776483e0c4a631aa5647806c25e02cc0"},
+]
+
+[package.dependencies]
+pycparser = "*"
+
+[[package]]
+name = "charset-normalizer"
+version = "3.3.2"
+description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet."
+optional = false
+python-versions = ">=3.7.0"
+files = [
+ {file = "charset-normalizer-3.3.2.tar.gz", hash = "sha256:f30c3cb33b24454a82faecaf01b19c18562b1e89558fb6c56de4d9118a032fd5"},
+ {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:25baf083bf6f6b341f4121c2f3c548875ee6f5339300e08be3f2b2ba1721cdd3"},
+ {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:06435b539f889b1f6f4ac1758871aae42dc3a8c0e24ac9e60c2384973ad73027"},
+ {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9063e24fdb1e498ab71cb7419e24622516c4a04476b17a2dab57e8baa30d6e03"},
+ {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6897af51655e3691ff853668779c7bad41579facacf5fd7253b0133308cf000d"},
+ {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1d3193f4a680c64b4b6a9115943538edb896edc190f0b222e73761716519268e"},
+ {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cd70574b12bb8a4d2aaa0094515df2463cb429d8536cfb6c7ce983246983e5a6"},
+ {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8465322196c8b4d7ab6d1e049e4c5cb460d0394da4a27d23cc242fbf0034b6b5"},
+ {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a9a8e9031d613fd2009c182b69c7b2c1ef8239a0efb1df3f7c8da66d5dd3d537"},
+ {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:beb58fe5cdb101e3a055192ac291b7a21e3b7ef4f67fa1d74e331a7f2124341c"},
+ {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e06ed3eb3218bc64786f7db41917d4e686cc4856944f53d5bdf83a6884432e12"},
+ {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:2e81c7b9c8979ce92ed306c249d46894776a909505d8f5a4ba55b14206e3222f"},
+ {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:572c3763a264ba47b3cf708a44ce965d98555f618ca42c926a9c1616d8f34269"},
+ {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fd1abc0d89e30cc4e02e4064dc67fcc51bd941eb395c502aac3ec19fab46b519"},
+ {file = "charset_normalizer-3.3.2-cp310-cp310-win32.whl", hash = "sha256:3d47fa203a7bd9c5b6cee4736ee84ca03b8ef23193c0d1ca99b5089f72645c73"},
+ {file = "charset_normalizer-3.3.2-cp310-cp310-win_amd64.whl", hash = "sha256:10955842570876604d404661fbccbc9c7e684caf432c09c715ec38fbae45ae09"},
+ {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:802fe99cca7457642125a8a88a084cef28ff0cf9407060f7b93dca5aa25480db"},
+ {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:573f6eac48f4769d667c4442081b1794f52919e7edada77495aaed9236d13a96"},
+ {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:549a3a73da901d5bc3ce8d24e0600d1fa85524c10287f6004fbab87672bf3e1e"},
+ {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f27273b60488abe721a075bcca6d7f3964f9f6f067c8c4c605743023d7d3944f"},
+ {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ceae2f17a9c33cb48e3263960dc5fc8005351ee19db217e9b1bb15d28c02574"},
+ {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:65f6f63034100ead094b8744b3b97965785388f308a64cf8d7c34f2f2e5be0c4"},
+ {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:753f10e867343b4511128c6ed8c82f7bec3bd026875576dfd88483c5c73b2fd8"},
+ {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4a78b2b446bd7c934f5dcedc588903fb2f5eec172f3d29e52a9096a43722adfc"},
+ {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e537484df0d8f426ce2afb2d0f8e1c3d0b114b83f8850e5f2fbea0e797bd82ae"},
+ {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:eb6904c354526e758fda7167b33005998fb68c46fbc10e013ca97f21ca5c8887"},
+ {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:deb6be0ac38ece9ba87dea880e438f25ca3eddfac8b002a2ec3d9183a454e8ae"},
+ {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:4ab2fe47fae9e0f9dee8c04187ce5d09f48eabe611be8259444906793ab7cbce"},
+ {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:80402cd6ee291dcb72644d6eac93785fe2c8b9cb30893c1af5b8fdd753b9d40f"},
+ {file = "charset_normalizer-3.3.2-cp311-cp311-win32.whl", hash = "sha256:7cd13a2e3ddeed6913a65e66e94b51d80a041145a026c27e6bb76c31a853c6ab"},
+ {file = "charset_normalizer-3.3.2-cp311-cp311-win_amd64.whl", hash = "sha256:663946639d296df6a2bb2aa51b60a2454ca1cb29835324c640dafb5ff2131a77"},
+ {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0b2b64d2bb6d3fb9112bafa732def486049e63de9618b5843bcdd081d8144cd8"},
+ {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:ddbb2551d7e0102e7252db79ba445cdab71b26640817ab1e3e3648dad515003b"},
+ {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:55086ee1064215781fff39a1af09518bc9255b50d6333f2e4c74ca09fac6a8f6"},
+ {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8f4a014bc36d3c57402e2977dada34f9c12300af536839dc38c0beab8878f38a"},
+ {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a10af20b82360ab00827f916a6058451b723b4e65030c5a18577c8b2de5b3389"},
+ {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8d756e44e94489e49571086ef83b2bb8ce311e730092d2c34ca8f7d925cb20aa"},
+ {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:90d558489962fd4918143277a773316e56c72da56ec7aa3dc3dbbe20fdfed15b"},
+ {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6ac7ffc7ad6d040517be39eb591cac5ff87416c2537df6ba3cba3bae290c0fed"},
+ {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:7ed9e526742851e8d5cc9e6cf41427dfc6068d4f5a3bb03659444b4cabf6bc26"},
+ {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:8bdb58ff7ba23002a4c5808d608e4e6c687175724f54a5dade5fa8c67b604e4d"},
+ {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:6b3251890fff30ee142c44144871185dbe13b11bab478a88887a639655be1068"},
+ {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:b4a23f61ce87adf89be746c8a8974fe1c823c891d8f86eb218bb957c924bb143"},
+ {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:efcb3f6676480691518c177e3b465bcddf57cea040302f9f4e6e191af91174d4"},
+ {file = "charset_normalizer-3.3.2-cp312-cp312-win32.whl", hash = "sha256:d965bba47ddeec8cd560687584e88cf699fd28f192ceb452d1d7ee807c5597b7"},
+ {file = "charset_normalizer-3.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:96b02a3dc4381e5494fad39be677abcb5e6634bf7b4fa83a6dd3112607547001"},
+ {file = "charset_normalizer-3.3.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:95f2a5796329323b8f0512e09dbb7a1860c46a39da62ecb2324f116fa8fdc85c"},
+ {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c002b4ffc0be611f0d9da932eb0f704fe2602a9a949d1f738e4c34c75b0863d5"},
+ {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a981a536974bbc7a512cf44ed14938cf01030a99e9b3a06dd59578882f06f985"},
+ {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3287761bc4ee9e33561a7e058c72ac0938c4f57fe49a09eae428fd88aafe7bb6"},
+ {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:42cb296636fcc8b0644486d15c12376cb9fa75443e00fb25de0b8602e64c1714"},
+ {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0a55554a2fa0d408816b3b5cedf0045f4b8e1a6065aec45849de2d6f3f8e9786"},
+ {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:c083af607d2515612056a31f0a8d9e0fcb5876b7bfc0abad3ecd275bc4ebc2d5"},
+ {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:87d1351268731db79e0f8e745d92493ee2841c974128ef629dc518b937d9194c"},
+ {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:bd8f7df7d12c2db9fab40bdd87a7c09b1530128315d047a086fa3ae3435cb3a8"},
+ {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:c180f51afb394e165eafe4ac2936a14bee3eb10debc9d9e4db8958fe36afe711"},
+ {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:8c622a5fe39a48f78944a87d4fb8a53ee07344641b0562c540d840748571b811"},
+ {file = "charset_normalizer-3.3.2-cp37-cp37m-win32.whl", hash = "sha256:db364eca23f876da6f9e16c9da0df51aa4f104a972735574842618b8c6d999d4"},
+ {file = "charset_normalizer-3.3.2-cp37-cp37m-win_amd64.whl", hash = "sha256:86216b5cee4b06df986d214f664305142d9c76df9b6512be2738aa72a2048f99"},
+ {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:6463effa3186ea09411d50efc7d85360b38d5f09b870c48e4600f63af490e56a"},
+ {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6c4caeef8fa63d06bd437cd4bdcf3ffefe6738fb1b25951440d80dc7df8c03ac"},
+ {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:37e55c8e51c236f95b033f6fb391d7d7970ba5fe7ff453dad675e88cf303377a"},
+ {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb69256e180cb6c8a894fee62b3afebae785babc1ee98b81cdf68bbca1987f33"},
+ {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ae5f4161f18c61806f411a13b0310bea87f987c7d2ecdbdaad0e94eb2e404238"},
+ {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b2b0a0c0517616b6869869f8c581d4eb2dd83a4d79e0ebcb7d373ef9956aeb0a"},
+ {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:45485e01ff4d3630ec0d9617310448a8702f70e9c01906b0d0118bdf9d124cf2"},
+ {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eb00ed941194665c332bf8e078baf037d6c35d7c4f3102ea2d4f16ca94a26dc8"},
+ {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:2127566c664442652f024c837091890cb1942c30937add288223dc895793f898"},
+ {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a50aebfa173e157099939b17f18600f72f84eed3049e743b68ad15bd69b6bf99"},
+ {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:4d0d1650369165a14e14e1e47b372cfcb31d6ab44e6e33cb2d4e57265290044d"},
+ {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:923c0c831b7cfcb071580d3f46c4baf50f174be571576556269530f4bbd79d04"},
+ {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:06a81e93cd441c56a9b65d8e1d043daeb97a3d0856d177d5c90ba85acb3db087"},
+ {file = "charset_normalizer-3.3.2-cp38-cp38-win32.whl", hash = "sha256:6ef1d82a3af9d3eecdba2321dc1b3c238245d890843e040e41e470ffa64c3e25"},
+ {file = "charset_normalizer-3.3.2-cp38-cp38-win_amd64.whl", hash = "sha256:eb8821e09e916165e160797a6c17edda0679379a4be5c716c260e836e122f54b"},
+ {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:c235ebd9baae02f1b77bcea61bce332cb4331dc3617d254df3323aa01ab47bd4"},
+ {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5b4c145409bef602a690e7cfad0a15a55c13320ff7a3ad7ca59c13bb8ba4d45d"},
+ {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:68d1f8a9e9e37c1223b656399be5d6b448dea850bed7d0f87a8311f1ff3dabb0"},
+ {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22afcb9f253dac0696b5a4be4a1c0f8762f8239e21b99680099abd9b2b1b2269"},
+ {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e27ad930a842b4c5eb8ac0016b0a54f5aebbe679340c26101df33424142c143c"},
+ {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1f79682fbe303db92bc2b1136016a38a42e835d932bab5b3b1bfcfbf0640e519"},
+ {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b261ccdec7821281dade748d088bb6e9b69e6d15b30652b74cbbac25e280b796"},
+ {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:122c7fa62b130ed55f8f285bfd56d5f4b4a5b503609d181f9ad85e55c89f4185"},
+ {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d0eccceffcb53201b5bfebb52600a5fb483a20b61da9dbc885f8b103cbe7598c"},
+ {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9f96df6923e21816da7e0ad3fd47dd8f94b2a5ce594e00677c0013018b813458"},
+ {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:7f04c839ed0b6b98b1a7501a002144b76c18fb1c1850c8b98d458ac269e26ed2"},
+ {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:34d1c8da1e78d2e001f363791c98a272bb734000fcef47a491c1e3b0505657a8"},
+ {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ff8fa367d09b717b2a17a052544193ad76cd49979c805768879cb63d9ca50561"},
+ {file = "charset_normalizer-3.3.2-cp39-cp39-win32.whl", hash = "sha256:aed38f6e4fb3f5d6bf81bfa990a07806be9d83cf7bacef998ab1a9bd660a581f"},
+ {file = "charset_normalizer-3.3.2-cp39-cp39-win_amd64.whl", hash = "sha256:b01b88d45a6fcb69667cd6d2f7a9aeb4bf53760d7fc536bf679ec94fe9f3ff3d"},
+ {file = "charset_normalizer-3.3.2-py3-none-any.whl", hash = "sha256:3e4d1f6587322d2788836a99c69062fbb091331ec940e02d12d179c1d53e25fc"},
+]
+
+[[package]]
+name = "chat-downloader"
+version = "0.2.8"
+description = "A simple tool used to retrieve chat messages from livestreams, videos, clips and past broadcasts. No authentication needed!"
+optional = false
+python-versions = ">=3.6"
+files = [
+ {file = "chat-downloader-0.2.8.tar.gz", hash = "sha256:5816ac06179f81190e74c773d8afda82f4be718ea6dffdf7c22bbe265e1dd428"},
+ {file = "chat_downloader-0.2.8-py2.py3-none-any.whl", hash = "sha256:2d9bbddc0a85371ba44814c3686ee6e5f70c0531d1f5ecc236eae5a5bbb90465"},
+]
+
+[package.dependencies]
+colorlog = "*"
+docstring-parser = "*"
+isodate = "*"
+requests = "*"
+websocket-client = "*"
+
+[package.extras]
+dev = ["coverage", "flake8", "pytest", "sphinx", "sphinx-rtd-theme", "sphinxcontrib-programoutput", "tox", "twine", "wheel"]
+
+[[package]]
+name = "click"
+version = "8.1.7"
+description = "Composable command line interface toolkit"
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "click-8.1.7-py3-none-any.whl", hash = "sha256:ae74fb96c20a0277a1d615f1e4d73c8414f5a98db8b799a7931d1582f3390c28"},
+ {file = "click-8.1.7.tar.gz", hash = "sha256:ca9853ad459e787e2192211578cc907e7594e294c7ccc834310722b41b9ca6de"},
+]
+
+[package.dependencies]
+colorama = {version = "*", markers = "platform_system == \"Windows\""}
+
+[[package]]
+name = "colorama"
+version = "0.4.6"
+description = "Cross-platform colored terminal text."
+optional = false
+python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7"
+files = [
+ {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"},
+ {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"},
+]
+
+[[package]]
+name = "colorlog"
+version = "6.8.2"
+description = "Add colours to the output of Python's logging module."
+optional = false
+python-versions = ">=3.6"
+files = [
+ {file = "colorlog-6.8.2-py3-none-any.whl", hash = "sha256:4dcbb62368e2800cb3c5abd348da7e53f6c362dda502ec27c560b2e58a66bd33"},
+ {file = "colorlog-6.8.2.tar.gz", hash = "sha256:3e3e079a41feb5a1b64f978b5ea4f46040a94f11f0e8bbb8261e3dbbeca64d44"},
+]
+
+[package.dependencies]
+colorama = {version = "*", markers = "sys_platform == \"win32\""}
+
+[package.extras]
+development = ["black", "flake8", "mypy", "pytest", "types-colorama"]
+
+[[package]]
+name = "cryptography"
+version = "42.0.8"
+description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers."
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "cryptography-42.0.8-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:81d8a521705787afe7a18d5bfb47ea9d9cc068206270aad0b96a725022e18d2e"},
+ {file = "cryptography-42.0.8-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:961e61cefdcb06e0c6d7e3a1b22ebe8b996eb2bf50614e89384be54c48c6b63d"},
+ {file = "cryptography-42.0.8-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e3ec3672626e1b9e55afd0df6d774ff0e953452886e06e0f1eb7eb0c832e8902"},
+ {file = "cryptography-42.0.8-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e599b53fd95357d92304510fb7bda8523ed1f79ca98dce2f43c115950aa78801"},
+ {file = "cryptography-42.0.8-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:5226d5d21ab681f432a9c1cf8b658c0cb02533eece706b155e5fbd8a0cdd3949"},
+ {file = "cryptography-42.0.8-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:6b7c4f03ce01afd3b76cf69a5455caa9cfa3de8c8f493e0d3ab7d20611c8dae9"},
+ {file = "cryptography-42.0.8-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:2346b911eb349ab547076f47f2e035fc8ff2c02380a7cbbf8d87114fa0f1c583"},
+ {file = "cryptography-42.0.8-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:ad803773e9df0b92e0a817d22fd8a3675493f690b96130a5e24f1b8fabbea9c7"},
+ {file = "cryptography-42.0.8-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:2f66d9cd9147ee495a8374a45ca445819f8929a3efcd2e3df6428e46c3cbb10b"},
+ {file = "cryptography-42.0.8-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:d45b940883a03e19e944456a558b67a41160e367a719833c53de6911cabba2b7"},
+ {file = "cryptography-42.0.8-cp37-abi3-win32.whl", hash = "sha256:a0c5b2b0585b6af82d7e385f55a8bc568abff8923af147ee3c07bd8b42cda8b2"},
+ {file = "cryptography-42.0.8-cp37-abi3-win_amd64.whl", hash = "sha256:57080dee41209e556a9a4ce60d229244f7a66ef52750f813bfbe18959770cfba"},
+ {file = "cryptography-42.0.8-cp39-abi3-macosx_10_12_universal2.whl", hash = "sha256:dea567d1b0e8bc5764b9443858b673b734100c2871dc93163f58c46a97a83d28"},
+ {file = "cryptography-42.0.8-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c4783183f7cb757b73b2ae9aed6599b96338eb957233c58ca8f49a49cc32fd5e"},
+ {file = "cryptography-42.0.8-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a0608251135d0e03111152e41f0cc2392d1e74e35703960d4190b2e0f4ca9c70"},
+ {file = "cryptography-42.0.8-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:dc0fdf6787f37b1c6b08e6dfc892d9d068b5bdb671198c72072828b80bd5fe4c"},
+ {file = "cryptography-42.0.8-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:9c0c1716c8447ee7dbf08d6db2e5c41c688544c61074b54fc4564196f55c25a7"},
+ {file = "cryptography-42.0.8-cp39-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:fff12c88a672ab9c9c1cf7b0c80e3ad9e2ebd9d828d955c126be4fd3e5578c9e"},
+ {file = "cryptography-42.0.8-cp39-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:cafb92b2bc622cd1aa6a1dce4b93307792633f4c5fe1f46c6b97cf67073ec961"},
+ {file = "cryptography-42.0.8-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:31f721658a29331f895a5a54e7e82075554ccfb8b163a18719d342f5ffe5ecb1"},
+ {file = "cryptography-42.0.8-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:b297f90c5723d04bcc8265fc2a0f86d4ea2e0f7ab4b6994459548d3a6b992a14"},
+ {file = "cryptography-42.0.8-cp39-abi3-win32.whl", hash = "sha256:2f88d197e66c65be5e42cd72e5c18afbfae3f741742070e3019ac8f4ac57262c"},
+ {file = "cryptography-42.0.8-cp39-abi3-win_amd64.whl", hash = "sha256:fa76fbb7596cc5839320000cdd5d0955313696d9511debab7ee7278fc8b5c84a"},
+ {file = "cryptography-42.0.8-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:ba4f0a211697362e89ad822e667d8d340b4d8d55fae72cdd619389fb5912eefe"},
+ {file = "cryptography-42.0.8-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:81884c4d096c272f00aeb1f11cf62ccd39763581645b0812e99a91505fa48e0c"},
+ {file = "cryptography-42.0.8-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:c9bb2ae11bfbab395bdd072985abde58ea9860ed84e59dbc0463a5d0159f5b71"},
+ {file = "cryptography-42.0.8-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:7016f837e15b0a1c119d27ecd89b3515f01f90a8615ed5e9427e30d9cdbfed3d"},
+ {file = "cryptography-42.0.8-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:5a94eccb2a81a309806027e1670a358b99b8fe8bfe9f8d329f27d72c094dde8c"},
+ {file = "cryptography-42.0.8-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:dec9b018df185f08483f294cae6ccac29e7a6e0678996587363dc352dc65c842"},
+ {file = "cryptography-42.0.8-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:343728aac38decfdeecf55ecab3264b015be68fc2816ca800db649607aeee648"},
+ {file = "cryptography-42.0.8-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:013629ae70b40af70c9a7a5db40abe5d9054e6f4380e50ce769947b73bf3caad"},
+ {file = "cryptography-42.0.8.tar.gz", hash = "sha256:8d09d05439ce7baa8e9e95b07ec5b6c886f548deb7e0f69ef25f64b3bce842f2"},
+]
+
+[package.dependencies]
+cffi = {version = ">=1.12", markers = "platform_python_implementation != \"PyPy\""}
+
+[package.extras]
+docs = ["sphinx (>=5.3.0)", "sphinx-rtd-theme (>=1.1.1)"]
+docstest = ["pyenchant (>=1.6.11)", "readme-renderer", "sphinxcontrib-spelling (>=4.0.1)"]
+nox = ["nox"]
+pep8test = ["check-sdist", "click", "mypy", "ruff"]
+sdist = ["build"]
+ssh = ["bcrypt (>=3.1.5)"]
+test = ["certifi", "pretend", "pytest (>=6.2.0)", "pytest-benchmark", "pytest-cov", "pytest-xdist"]
+test-randomorder = ["pytest-randomly"]
+
+[[package]]
+name = "decorator"
+version = "5.1.1"
+description = "Decorators for Humans"
+optional = false
+python-versions = ">=3.5"
+files = [
+ {file = "decorator-5.1.1-py3-none-any.whl", hash = "sha256:b8c3f85900b9dc423225913c5aace94729fe1fa9763b38939a95226f02d37186"},
+ {file = "decorator-5.1.1.tar.gz", hash = "sha256:637996211036b6385ef91435e4fae22989472f9d571faba8927ba8253acbc330"},
+]
+
+[[package]]
+name = "docstring-parser"
+version = "0.16"
+description = "Parse Python docstrings in reST, Google and Numpydoc format"
+optional = false
+python-versions = ">=3.6,<4.0"
+files = [
+ {file = "docstring_parser-0.16-py3-none-any.whl", hash = "sha256:bf0a1387354d3691d102edef7ec124f219ef639982d096e26e3b60aeffa90637"},
+ {file = "docstring_parser-0.16.tar.gz", hash = "sha256:538beabd0af1e2db0146b6bd3caa526c35a34d61af9fd2887f3a8a27a739aa6e"},
+]
+
+[[package]]
+name = "docutils"
+version = "0.21.2"
+description = "Docutils -- Python Documentation Utilities"
+optional = false
+python-versions = ">=3.9"
+files = [
+ {file = "docutils-0.21.2-py3-none-any.whl", hash = "sha256:dafca5b9e384f0e419294eb4d2ff9fa826435bf15f15b7bd45723e8ad76811b2"},
+ {file = "docutils-0.21.2.tar.gz", hash = "sha256:3a6b18732edf182daa3cd12775bbb338cf5691468f91eeeb109deff6ebfa986f"},
+]
+
+[[package]]
+name = "executing"
+version = "2.0.1"
+description = "Get the currently executing AST node of a frame, and other information"
+optional = false
+python-versions = ">=3.5"
+files = [
+ {file = "executing-2.0.1-py2.py3-none-any.whl", hash = "sha256:eac49ca94516ccc753f9fb5ce82603156e590b27525a8bc32cce8ae302eb61bc"},
+ {file = "executing-2.0.1.tar.gz", hash = "sha256:35afe2ce3affba8ee97f2d69927fa823b08b472b7b994e36a52a964b93d16147"},
+]
+
+[package.extras]
+tests = ["asttokens (>=2.1.0)", "coverage", "coverage-enable-subprocess", "ipython", "littleutils", "pytest", "rich"]
+
+[[package]]
+name = "flake8"
+version = "7.1.0"
+description = "the modular source code checker: pep8 pyflakes and co"
+optional = false
+python-versions = ">=3.8.1"
+files = [
+ {file = "flake8-7.1.0-py2.py3-none-any.whl", hash = "sha256:2e416edcc62471a64cea09353f4e7bdba32aeb079b6e360554c659a122b1bc6a"},
+ {file = "flake8-7.1.0.tar.gz", hash = "sha256:48a07b626b55236e0fb4784ee69a465fbf59d79eec1f5b4785c3d3bc57d17aa5"},
+]
+
+[package.dependencies]
+mccabe = ">=0.7.0,<0.8.0"
+pycodestyle = ">=2.12.0,<2.13.0"
+pyflakes = ">=3.2.0,<3.3.0"
+
+[[package]]
+name = "idna"
+version = "3.7"
+description = "Internationalized Domain Names in Applications (IDNA)"
+optional = false
+python-versions = ">=3.5"
+files = [
+ {file = "idna-3.7-py3-none-any.whl", hash = "sha256:82fee1fc78add43492d3a1898bfa6d8a904cc97d8427f683ed8e798d07761aa0"},
+ {file = "idna-3.7.tar.gz", hash = "sha256:028ff3aadf0609c1fd278d8ea3089299412a7a8b9bd005dd08b9f8285bcb5cfc"},
+]
+
+[[package]]
+name = "imagesize"
+version = "1.4.1"
+description = "Getting image size from png/jpeg/jpeg2000/gif file"
+optional = false
+python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
+files = [
+ {file = "imagesize-1.4.1-py2.py3-none-any.whl", hash = "sha256:0d8d18d08f840c19d0ee7ca1fd82490fdc3729b7ac93f49870406ddde8ef8d8b"},
+ {file = "imagesize-1.4.1.tar.gz", hash = "sha256:69150444affb9cb0d5cc5a92b3676f0b2fb7cd9ae39e947a5e11a36b4497cd4a"},
+]
+
+[[package]]
+name = "importlib-metadata"
+version = "8.0.0"
+description = "Read metadata from Python packages"
+optional = false
+python-versions = ">=3.8"
+files = [
+ {file = "importlib_metadata-8.0.0-py3-none-any.whl", hash = "sha256:15584cf2b1bf449d98ff8a6ff1abef57bf20f3ac6454f431736cd3e660921b2f"},
+ {file = "importlib_metadata-8.0.0.tar.gz", hash = "sha256:188bd24e4c346d3f0a933f275c2fec67050326a856b9a359881d7c2a697e8812"},
+]
+
+[package.dependencies]
+zipp = ">=0.5"
+
+[package.extras]
+doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"]
+perf = ["ipython"]
+test = ["flufl.flake8", "importlib-resources (>=1.3)", "jaraco.test (>=5.4)", "packaging", "pyfakefs", "pytest (>=6,!=8.1.*)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy", "pytest-perf (>=0.9.2)", "pytest-ruff (>=0.2.1)"]
+
+[[package]]
+name = "iniconfig"
+version = "2.0.0"
+description = "brain-dead simple config-ini parsing"
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374"},
+ {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"},
+]
+
+[[package]]
+name = "ipython"
+version = "8.26.0"
+description = "IPython: Productive Interactive Computing"
+optional = false
+python-versions = ">=3.10"
+files = [
+ {file = "ipython-8.26.0-py3-none-any.whl", hash = "sha256:e6b347c27bdf9c32ee9d31ae85defc525755a1869f14057e900675b9e8d6e6ff"},
+ {file = "ipython-8.26.0.tar.gz", hash = "sha256:1cec0fbba8404af13facebe83d04436a7434c7400e59f47acf467c64abd0956c"},
+]
+
+[package.dependencies]
+colorama = {version = "*", markers = "sys_platform == \"win32\""}
+decorator = "*"
+jedi = ">=0.16"
+matplotlib-inline = "*"
+pexpect = {version = ">4.3", markers = "sys_platform != \"win32\" and sys_platform != \"emscripten\""}
+prompt-toolkit = ">=3.0.41,<3.1.0"
+pygments = ">=2.4.0"
+stack-data = "*"
+traitlets = ">=5.13.0"
+typing-extensions = {version = ">=4.6", markers = "python_version < \"3.12\""}
+
+[package.extras]
+all = ["ipython[black,doc,kernel,matplotlib,nbconvert,nbformat,notebook,parallel,qtconsole]", "ipython[test,test-extra]"]
+black = ["black"]
+doc = ["docrepr", "exceptiongroup", "intersphinx-registry", "ipykernel", "ipython[test]", "matplotlib", "setuptools (>=18.5)", "sphinx (>=1.3)", "sphinx-rtd-theme", "sphinxcontrib-jquery", "tomli", "typing-extensions"]
+kernel = ["ipykernel"]
+matplotlib = ["matplotlib"]
+nbconvert = ["nbconvert"]
+nbformat = ["nbformat"]
+notebook = ["ipywidgets", "notebook"]
+parallel = ["ipyparallel"]
+qtconsole = ["qtconsole"]
+test = ["packaging", "pickleshare", "pytest", "pytest-asyncio (<0.22)", "testpath"]
+test-extra = ["curio", "ipython[test]", "matplotlib (!=3.2.0)", "nbformat", "numpy (>=1.23)", "pandas", "trio"]
+
+[[package]]
+name = "isodate"
+version = "0.6.1"
+description = "An ISO 8601 date/time/duration parser and formatter"
+optional = false
+python-versions = "*"
+files = [
+ {file = "isodate-0.6.1-py2.py3-none-any.whl", hash = "sha256:0751eece944162659049d35f4f549ed815792b38793f07cf73381c1c87cbed96"},
+ {file = "isodate-0.6.1.tar.gz", hash = "sha256:48c5881de7e8b0a0d648cb024c8062dc84e7b840ed81e864c7614fd3c127bde9"},
+]
+
+[package.dependencies]
+six = "*"
+
+[[package]]
+name = "isort"
+version = "5.13.2"
+description = "A Python utility / library to sort Python imports."
+optional = false
+python-versions = ">=3.8.0"
+files = [
+ {file = "isort-5.13.2-py3-none-any.whl", hash = "sha256:8ca5e72a8d85860d5a3fa69b8745237f2939afe12dbf656afbcb47fe72d947a6"},
+ {file = "isort-5.13.2.tar.gz", hash = "sha256:48fdfcb9face5d58a4f6dde2e72a1fb8dcaf8ab26f95ab49fab84c2ddefb0109"},
+]
+
+[package.extras]
+colors = ["colorama (>=0.4.6)"]
+
+[[package]]
+name = "jaraco-classes"
+version = "3.4.0"
+description = "Utility functions for Python class constructs"
+optional = false
+python-versions = ">=3.8"
+files = [
+ {file = "jaraco.classes-3.4.0-py3-none-any.whl", hash = "sha256:f662826b6bed8cace05e7ff873ce0f9283b5c924470fe664fff1c2f00f581790"},
+ {file = "jaraco.classes-3.4.0.tar.gz", hash = "sha256:47a024b51d0239c0dd8c8540c6c7f484be3b8fcf0b2d85c13825780d3b3f3acd"},
+]
+
+[package.dependencies]
+more-itertools = "*"
+
+[package.extras]
+docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"]
+testing = ["pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy", "pytest-ruff (>=0.2.1)"]
+
+[[package]]
+name = "jaraco-context"
+version = "5.3.0"
+description = "Useful decorators and context managers"
+optional = false
+python-versions = ">=3.8"
+files = [
+ {file = "jaraco.context-5.3.0-py3-none-any.whl", hash = "sha256:3e16388f7da43d384a1a7cd3452e72e14732ac9fe459678773a3608a812bf266"},
+ {file = "jaraco.context-5.3.0.tar.gz", hash = "sha256:c2f67165ce1f9be20f32f650f25d8edfc1646a8aeee48ae06fb35f90763576d2"},
+]
+
+[package.dependencies]
+"backports.tarfile" = {version = "*", markers = "python_version < \"3.12\""}
+
+[package.extras]
+docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"]
+testing = ["portend", "pytest (>=6,!=8.1.1)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy", "pytest-ruff (>=0.2.1)"]
+
+[[package]]
+name = "jaraco-functools"
+version = "4.0.1"
+description = "Functools like those found in stdlib"
+optional = false
+python-versions = ">=3.8"
+files = [
+ {file = "jaraco.functools-4.0.1-py3-none-any.whl", hash = "sha256:3b24ccb921d6b593bdceb56ce14799204f473976e2a9d4b15b04d0f2c2326664"},
+ {file = "jaraco_functools-4.0.1.tar.gz", hash = "sha256:d33fa765374c0611b52f8b3a795f8900869aa88c84769d4d1746cd68fb28c3e8"},
+]
+
+[package.dependencies]
+more-itertools = "*"
+
+[package.extras]
+docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-lint"]
+testing = ["jaraco.classes", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy", "pytest-ruff (>=0.2.1)"]
+
+[[package]]
+name = "jedi"
+version = "0.19.1"
+description = "An autocompletion tool for Python that can be used for text editors."
+optional = false
+python-versions = ">=3.6"
+files = [
+ {file = "jedi-0.19.1-py2.py3-none-any.whl", hash = "sha256:e983c654fe5c02867aef4cdfce5a2fbb4a50adc0af145f70504238f18ef5e7e0"},
+ {file = "jedi-0.19.1.tar.gz", hash = "sha256:cf0496f3651bc65d7174ac1b7d043eff454892c708a87d1b683e57b569927ffd"},
+]
+
+[package.dependencies]
+parso = ">=0.8.3,<0.9.0"
+
+[package.extras]
+docs = ["Jinja2 (==2.11.3)", "MarkupSafe (==1.1.1)", "Pygments (==2.8.1)", "alabaster (==0.7.12)", "babel (==2.9.1)", "chardet (==4.0.0)", "commonmark (==0.8.1)", "docutils (==0.17.1)", "future (==0.18.2)", "idna (==2.10)", "imagesize (==1.2.0)", "mock (==1.0.1)", "packaging (==20.9)", "pyparsing (==2.4.7)", "pytz (==2021.1)", "readthedocs-sphinx-ext (==2.1.4)", "recommonmark (==0.5.0)", "requests (==2.25.1)", "six (==1.15.0)", "snowballstemmer (==2.1.0)", "sphinx (==1.8.5)", "sphinx-rtd-theme (==0.4.3)", "sphinxcontrib-serializinghtml (==1.1.4)", "sphinxcontrib-websupport (==1.2.4)", "urllib3 (==1.26.4)"]
+qa = ["flake8 (==5.0.4)", "mypy (==0.971)", "types-setuptools (==67.2.0.1)"]
+testing = ["Django", "attrs", "colorama", "docopt", "pytest (<7.0.0)"]
+
+[[package]]
+name = "jeepney"
+version = "0.8.0"
+description = "Low-level, pure Python DBus protocol wrapper."
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "jeepney-0.8.0-py3-none-any.whl", hash = "sha256:c0a454ad016ca575060802ee4d590dd912e35c122fa04e70306de3d076cce755"},
+ {file = "jeepney-0.8.0.tar.gz", hash = "sha256:5efe48d255973902f6badc3ce55e2aa6c5c3b3bc642059ef3a91247bcfcc5806"},
+]
+
+[package.extras]
+test = ["async-timeout", "pytest", "pytest-asyncio (>=0.17)", "pytest-trio", "testpath", "trio"]
+trio = ["async_generator", "trio"]
+
+[[package]]
+name = "jinja2"
+version = "3.1.4"
+description = "A very fast and expressive template engine."
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "jinja2-3.1.4-py3-none-any.whl", hash = "sha256:bc5dd2abb727a5319567b7a813e6a2e7318c39f4f487cfe6c89c6f9c7d25197d"},
+ {file = "jinja2-3.1.4.tar.gz", hash = "sha256:4a3aee7acbbe7303aede8e9648d13b8bf88a429282aa6122a993f0ac800cb369"},
+]
+
+[package.dependencies]
+MarkupSafe = ">=2.0"
+
+[package.extras]
+i18n = ["Babel (>=2.7)"]
+
+[[package]]
+name = "keyring"
+version = "25.2.1"
+description = "Store and access your passwords safely."
+optional = false
+python-versions = ">=3.8"
+files = [
+ {file = "keyring-25.2.1-py3-none-any.whl", hash = "sha256:2458681cdefc0dbc0b7eb6cf75d0b98e59f9ad9b2d4edd319d18f68bdca95e50"},
+ {file = "keyring-25.2.1.tar.gz", hash = "sha256:daaffd42dbda25ddafb1ad5fec4024e5bbcfe424597ca1ca452b299861e49f1b"},
+]
+
+[package.dependencies]
+importlib-metadata = {version = ">=4.11.4", markers = "python_version < \"3.12\""}
+"jaraco.classes" = "*"
+"jaraco.context" = "*"
+"jaraco.functools" = "*"
+jeepney = {version = ">=0.4.2", markers = "sys_platform == \"linux\""}
+pywin32-ctypes = {version = ">=0.2.0", markers = "sys_platform == \"win32\""}
+SecretStorage = {version = ">=3.2", markers = "sys_platform == \"linux\""}
+
+[package.extras]
+completion = ["shtab (>=1.1.0)"]
+docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"]
+testing = ["pytest (>=6,!=8.1.*)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy", "pytest-ruff (>=0.2.1)"]
+
+[[package]]
+name = "loguru"
+version = "0.7.2"
+description = "Python logging made (stupidly) simple"
+optional = false
+python-versions = ">=3.5"
+files = [
+ {file = "loguru-0.7.2-py3-none-any.whl", hash = "sha256:003d71e3d3ed35f0f8984898359d65b79e5b21943f78af86aa5491210429b8eb"},
+ {file = "loguru-0.7.2.tar.gz", hash = "sha256:e671a53522515f34fd406340ee968cb9ecafbc4b36c679da03c18fd8d0bd51ac"},
+]
+
+[package.dependencies]
+colorama = {version = ">=0.3.4", markers = "sys_platform == \"win32\""}
+win32-setctime = {version = ">=1.0.0", markers = "sys_platform == \"win32\""}
+
+[package.extras]
+dev = ["Sphinx (==7.2.5)", "colorama (==0.4.5)", "colorama (==0.4.6)", "exceptiongroup (==1.1.3)", "freezegun (==1.1.0)", "freezegun (==1.2.2)", "mypy (==v0.910)", "mypy (==v0.971)", "mypy (==v1.4.1)", "mypy (==v1.5.1)", "pre-commit (==3.4.0)", "pytest (==6.1.2)", "pytest (==7.4.0)", "pytest-cov (==2.12.1)", "pytest-cov (==4.1.0)", "pytest-mypy-plugins (==1.9.3)", "pytest-mypy-plugins (==3.0.0)", "sphinx-autobuild (==2021.3.14)", "sphinx-rtd-theme (==1.3.0)", "tox (==3.27.1)", "tox (==4.11.0)"]
+
+[[package]]
+name = "markdown-it-py"
+version = "3.0.0"
+description = "Python port of markdown-it. Markdown parsing, done right!"
+optional = false
+python-versions = ">=3.8"
+files = [
+ {file = "markdown-it-py-3.0.0.tar.gz", hash = "sha256:e3f60a94fa066dc52ec76661e37c851cb232d92f9886b15cb560aaada2df8feb"},
+ {file = "markdown_it_py-3.0.0-py3-none-any.whl", hash = "sha256:355216845c60bd96232cd8d8c40e8f9765cc86f46880e43a8fd22dc1a1a8cab1"},
+]
+
+[package.dependencies]
+mdurl = ">=0.1,<1.0"
+
+[package.extras]
+benchmarking = ["psutil", "pytest", "pytest-benchmark"]
+code-style = ["pre-commit (>=3.0,<4.0)"]
+compare = ["commonmark (>=0.9,<1.0)", "markdown (>=3.4,<4.0)", "mistletoe (>=1.0,<2.0)", "mistune (>=2.0,<3.0)", "panflute (>=2.3,<3.0)"]
+linkify = ["linkify-it-py (>=1,<3)"]
+plugins = ["mdit-py-plugins"]
+profiling = ["gprof2dot"]
+rtd = ["jupyter_sphinx", "mdit-py-plugins", "myst-parser", "pyyaml", "sphinx", "sphinx-copybutton", "sphinx-design", "sphinx_book_theme"]
+testing = ["coverage", "pytest", "pytest-cov", "pytest-regressions"]
+
+[[package]]
+name = "markupsafe"
+version = "2.1.5"
+description = "Safely add untrusted strings to HTML/XML markup."
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a17a92de5231666cfbe003f0e4b9b3a7ae3afb1ec2845aadc2bacc93ff85febc"},
+ {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:72b6be590cc35924b02c78ef34b467da4ba07e4e0f0454a2c5907f473fc50ce5"},
+ {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e61659ba32cf2cf1481e575d0462554625196a1f2fc06a1c777d3f48e8865d46"},
+ {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2174c595a0d73a3080ca3257b40096db99799265e1c27cc5a610743acd86d62f"},
+ {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ae2ad8ae6ebee9d2d94b17fb62763125f3f374c25618198f40cbb8b525411900"},
+ {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:075202fa5b72c86ad32dc7d0b56024ebdbcf2048c0ba09f1cde31bfdd57bcfff"},
+ {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:598e3276b64aff0e7b3451b72e94fa3c238d452e7ddcd893c3ab324717456bad"},
+ {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fce659a462a1be54d2ffcacea5e3ba2d74daa74f30f5f143fe0c58636e355fdd"},
+ {file = "MarkupSafe-2.1.5-cp310-cp310-win32.whl", hash = "sha256:d9fad5155d72433c921b782e58892377c44bd6252b5af2f67f16b194987338a4"},
+ {file = "MarkupSafe-2.1.5-cp310-cp310-win_amd64.whl", hash = "sha256:bf50cd79a75d181c9181df03572cdce0fbb75cc353bc350712073108cba98de5"},
+ {file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:629ddd2ca402ae6dbedfceeba9c46d5f7b2a61d9749597d4307f943ef198fc1f"},
+ {file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5b7b716f97b52c5a14bffdf688f971b2d5ef4029127f1ad7a513973cfd818df2"},
+ {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6ec585f69cec0aa07d945b20805be741395e28ac1627333b1c5b0105962ffced"},
+ {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b91c037585eba9095565a3556f611e3cbfaa42ca1e865f7b8015fe5c7336d5a5"},
+ {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7502934a33b54030eaf1194c21c692a534196063db72176b0c4028e140f8f32c"},
+ {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:0e397ac966fdf721b2c528cf028494e86172b4feba51d65f81ffd65c63798f3f"},
+ {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:c061bb86a71b42465156a3ee7bd58c8c2ceacdbeb95d05a99893e08b8467359a"},
+ {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:3a57fdd7ce31c7ff06cdfbf31dafa96cc533c21e443d57f5b1ecc6cdc668ec7f"},
+ {file = "MarkupSafe-2.1.5-cp311-cp311-win32.whl", hash = "sha256:397081c1a0bfb5124355710fe79478cdbeb39626492b15d399526ae53422b906"},
+ {file = "MarkupSafe-2.1.5-cp311-cp311-win_amd64.whl", hash = "sha256:2b7c57a4dfc4f16f7142221afe5ba4e093e09e728ca65c51f5620c9aaeb9a617"},
+ {file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:8dec4936e9c3100156f8a2dc89c4b88d5c435175ff03413b443469c7c8c5f4d1"},
+ {file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:3c6b973f22eb18a789b1460b4b91bf04ae3f0c4234a0a6aa6b0a92f6f7b951d4"},
+ {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ac07bad82163452a6884fe8fa0963fb98c2346ba78d779ec06bd7a6262132aee"},
+ {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f5dfb42c4604dddc8e4305050aa6deb084540643ed5804d7455b5df8fe16f5e5"},
+ {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ea3d8a3d18833cf4304cd2fc9cbb1efe188ca9b5efef2bdac7adc20594a0e46b"},
+ {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:d050b3361367a06d752db6ead6e7edeb0009be66bc3bae0ee9d97fb326badc2a"},
+ {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:bec0a414d016ac1a18862a519e54b2fd0fc8bbfd6890376898a6c0891dd82e9f"},
+ {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:58c98fee265677f63a4385256a6d7683ab1832f3ddd1e66fe948d5880c21a169"},
+ {file = "MarkupSafe-2.1.5-cp312-cp312-win32.whl", hash = "sha256:8590b4ae07a35970728874632fed7bd57b26b0102df2d2b233b6d9d82f6c62ad"},
+ {file = "MarkupSafe-2.1.5-cp312-cp312-win_amd64.whl", hash = "sha256:823b65d8706e32ad2df51ed89496147a42a2a6e01c13cfb6ffb8b1e92bc910bb"},
+ {file = "MarkupSafe-2.1.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c8b29db45f8fe46ad280a7294f5c3ec36dbac9491f2d1c17345be8e69cc5928f"},
+ {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ec6a563cff360b50eed26f13adc43e61bc0c04d94b8be985e6fb24b81f6dcfdf"},
+ {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a549b9c31bec33820e885335b451286e2969a2d9e24879f83fe904a5ce59d70a"},
+ {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4f11aa001c540f62c6166c7726f71f7573b52c68c31f014c25cc7901deea0b52"},
+ {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:7b2e5a267c855eea6b4283940daa6e88a285f5f2a67f2220203786dfa59b37e9"},
+ {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:2d2d793e36e230fd32babe143b04cec8a8b3eb8a3122d2aceb4a371e6b09b8df"},
+ {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:ce409136744f6521e39fd8e2a24c53fa18ad67aa5bc7c2cf83645cce5b5c4e50"},
+ {file = "MarkupSafe-2.1.5-cp37-cp37m-win32.whl", hash = "sha256:4096e9de5c6fdf43fb4f04c26fb114f61ef0bf2e5604b6ee3019d51b69e8c371"},
+ {file = "MarkupSafe-2.1.5-cp37-cp37m-win_amd64.whl", hash = "sha256:4275d846e41ecefa46e2015117a9f491e57a71ddd59bbead77e904dc02b1bed2"},
+ {file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:656f7526c69fac7f600bd1f400991cc282b417d17539a1b228617081106feb4a"},
+ {file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:97cafb1f3cbcd3fd2b6fbfb99ae11cdb14deea0736fc2b0952ee177f2b813a46"},
+ {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f3fbcb7ef1f16e48246f704ab79d79da8a46891e2da03f8783a5b6fa41a9532"},
+ {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fa9db3f79de01457b03d4f01b34cf91bc0048eb2c3846ff26f66687c2f6d16ab"},
+ {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ffee1f21e5ef0d712f9033568f8344d5da8cc2869dbd08d87c84656e6a2d2f68"},
+ {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:5dedb4db619ba5a2787a94d877bc8ffc0566f92a01c0ef214865e54ecc9ee5e0"},
+ {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:30b600cf0a7ac9234b2638fbc0fb6158ba5bdcdf46aeb631ead21248b9affbc4"},
+ {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8dd717634f5a044f860435c1d8c16a270ddf0ef8588d4887037c5028b859b0c3"},
+ {file = "MarkupSafe-2.1.5-cp38-cp38-win32.whl", hash = "sha256:daa4ee5a243f0f20d528d939d06670a298dd39b1ad5f8a72a4275124a7819eff"},
+ {file = "MarkupSafe-2.1.5-cp38-cp38-win_amd64.whl", hash = "sha256:619bc166c4f2de5caa5a633b8b7326fbe98e0ccbfacabd87268a2b15ff73a029"},
+ {file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:7a68b554d356a91cce1236aa7682dc01df0edba8d043fd1ce607c49dd3c1edcf"},
+ {file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:db0b55e0f3cc0be60c1f19efdde9a637c32740486004f20d1cff53c3c0ece4d2"},
+ {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3e53af139f8579a6d5f7b76549125f0d94d7e630761a2111bc431fd820e163b8"},
+ {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:17b950fccb810b3293638215058e432159d2b71005c74371d784862b7e4683f3"},
+ {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4c31f53cdae6ecfa91a77820e8b151dba54ab528ba65dfd235c80b086d68a465"},
+ {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:bff1b4290a66b490a2f4719358c0cdcd9bafb6b8f061e45c7a2460866bf50c2e"},
+ {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:bc1667f8b83f48511b94671e0e441401371dfd0f0a795c7daa4a3cd1dde55bea"},
+ {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5049256f536511ee3f7e1b3f87d1d1209d327e818e6ae1365e8653d7e3abb6a6"},
+ {file = "MarkupSafe-2.1.5-cp39-cp39-win32.whl", hash = "sha256:00e046b6dd71aa03a41079792f8473dc494d564611a8f89bbbd7cb93295ebdcf"},
+ {file = "MarkupSafe-2.1.5-cp39-cp39-win_amd64.whl", hash = "sha256:fa173ec60341d6bb97a89f5ea19c85c5643c1e7dedebc22f5181eb73573142c5"},
+ {file = "MarkupSafe-2.1.5.tar.gz", hash = "sha256:d283d37a890ba4c1ae73ffadf8046435c76e7bc2247bbb63c00bd1a709c6544b"},
+]
+
+[[package]]
+name = "matplotlib-inline"
+version = "0.1.7"
+description = "Inline Matplotlib backend for Jupyter"
+optional = false
+python-versions = ">=3.8"
+files = [
+ {file = "matplotlib_inline-0.1.7-py3-none-any.whl", hash = "sha256:df192d39a4ff8f21b1895d72e6a13f5fcc5099f00fa84384e0ea28c2cc0653ca"},
+ {file = "matplotlib_inline-0.1.7.tar.gz", hash = "sha256:8423b23ec666be3d16e16b60bdd8ac4e86e840ebd1dd11a30b9f117f2fa0ab90"},
+]
+
+[package.dependencies]
+traitlets = "*"
+
+[[package]]
+name = "mccabe"
+version = "0.7.0"
+description = "McCabe checker, plugin for flake8"
+optional = false
+python-versions = ">=3.6"
+files = [
+ {file = "mccabe-0.7.0-py2.py3-none-any.whl", hash = "sha256:6c2d30ab6be0e4a46919781807b4f0d834ebdd6c6e3dca0bda5a15f863427b6e"},
+ {file = "mccabe-0.7.0.tar.gz", hash = "sha256:348e0240c33b60bbdf4e523192ef919f28cb2c3d7d5c7794f74009290f236325"},
+]
+
+[[package]]
+name = "mdurl"
+version = "0.1.2"
+description = "Markdown URL utilities"
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "mdurl-0.1.2-py3-none-any.whl", hash = "sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8"},
+ {file = "mdurl-0.1.2.tar.gz", hash = "sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba"},
+]
+
+[[package]]
+name = "more-itertools"
+version = "10.3.0"
+description = "More routines for operating on iterables, beyond itertools"
+optional = false
+python-versions = ">=3.8"
+files = [
+ {file = "more-itertools-10.3.0.tar.gz", hash = "sha256:e5d93ef411224fbcef366a6e8ddc4c5781bc6359d43412a65dd5964e46111463"},
+ {file = "more_itertools-10.3.0-py3-none-any.whl", hash = "sha256:ea6a02e24a9161e51faad17a8782b92a0df82c12c1c8886fec7f0c3fa1a1b320"},
+]
+
+[[package]]
+name = "mutagen"
+version = "1.47.0"
+description = "read and write audio tags for many formats"
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "mutagen-1.47.0-py3-none-any.whl", hash = "sha256:edd96f50c5907a9539d8e5bba7245f62c9f520aef333d13392a79a4f70aca719"},
+ {file = "mutagen-1.47.0.tar.gz", hash = "sha256:719fadef0a978c31b4cf3c956261b3c58b6948b32023078a2117b1de09f0fc99"},
+]
+
+[[package]]
+name = "mypy-extensions"
+version = "1.0.0"
+description = "Type system extensions for programs checked with the mypy type checker."
+optional = false
+python-versions = ">=3.5"
+files = [
+ {file = "mypy_extensions-1.0.0-py3-none-any.whl", hash = "sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d"},
+ {file = "mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782"},
+]
+
+[[package]]
+name = "nh3"
+version = "0.2.17"
+description = "Python bindings to the ammonia HTML sanitization library."
+optional = false
+python-versions = "*"
+files = [
+ {file = "nh3-0.2.17-cp37-abi3-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:551672fd71d06cd828e282abdb810d1be24e1abb7ae2543a8fa36a71c1006fe9"},
+ {file = "nh3-0.2.17-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:c551eb2a3876e8ff2ac63dff1585236ed5dfec5ffd82216a7a174f7c5082a78a"},
+ {file = "nh3-0.2.17-cp37-abi3-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:66f17d78826096291bd264f260213d2b3905e3c7fae6dfc5337d49429f1dc9f3"},
+ {file = "nh3-0.2.17-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0316c25b76289cf23be6b66c77d3608a4fdf537b35426280032f432f14291b9a"},
+ {file = "nh3-0.2.17-cp37-abi3-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:22c26e20acbb253a5bdd33d432a326d18508a910e4dcf9a3316179860d53345a"},
+ {file = "nh3-0.2.17-cp37-abi3-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:85cdbcca8ef10733bd31f931956f7fbb85145a4d11ab9e6742bbf44d88b7e351"},
+ {file = "nh3-0.2.17-cp37-abi3-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:40015514022af31975c0b3bca4014634fa13cb5dc4dbcbc00570acc781316dcc"},
+ {file = "nh3-0.2.17-cp37-abi3-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ba73a2f8d3a1b966e9cdba7b211779ad8a2561d2dba9674b8a19ed817923f65f"},
+ {file = "nh3-0.2.17-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c21bac1a7245cbd88c0b0e4a420221b7bfa838a2814ee5bb924e9c2f10a1120b"},
+ {file = "nh3-0.2.17-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:d7a25fd8c86657f5d9d576268e3b3767c5cd4f42867c9383618be8517f0f022a"},
+ {file = "nh3-0.2.17-cp37-abi3-musllinux_1_2_armv7l.whl", hash = "sha256:c790769152308421283679a142dbdb3d1c46c79c823008ecea8e8141db1a2062"},
+ {file = "nh3-0.2.17-cp37-abi3-musllinux_1_2_i686.whl", hash = "sha256:b4427ef0d2dfdec10b641ed0bdaf17957eb625b2ec0ea9329b3d28806c153d71"},
+ {file = "nh3-0.2.17-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:a3f55fabe29164ba6026b5ad5c3151c314d136fd67415a17660b4aaddacf1b10"},
+ {file = "nh3-0.2.17-cp37-abi3-win32.whl", hash = "sha256:1a814dd7bba1cb0aba5bcb9bebcc88fd801b63e21e2450ae6c52d3b3336bc911"},
+ {file = "nh3-0.2.17-cp37-abi3-win_amd64.whl", hash = "sha256:1aa52a7def528297f256de0844e8dd680ee279e79583c76d6fa73a978186ddfb"},
+ {file = "nh3-0.2.17.tar.gz", hash = "sha256:40d0741a19c3d645e54efba71cb0d8c475b59135c1e3c580f879ad5514cbf028"},
+]
+
+[[package]]
+name = "packaging"
+version = "24.1"
+description = "Core utilities for Python packages"
+optional = false
+python-versions = ">=3.8"
+files = [
+ {file = "packaging-24.1-py3-none-any.whl", hash = "sha256:5b8f2217dbdbd2f7f384c41c628544e6d52f2d0f53c6d0c3ea61aa5d1d7ff124"},
+ {file = "packaging-24.1.tar.gz", hash = "sha256:026ed72c8ed3fcce5bf8950572258698927fd1dbda10a5e981cdf0ac37f4f002"},
+]
+
+[[package]]
+name = "parso"
+version = "0.8.4"
+description = "A Python Parser"
+optional = false
+python-versions = ">=3.6"
+files = [
+ {file = "parso-0.8.4-py2.py3-none-any.whl", hash = "sha256:a418670a20291dacd2dddc80c377c5c3791378ee1e8d12bffc35420643d43f18"},
+ {file = "parso-0.8.4.tar.gz", hash = "sha256:eb3a7b58240fb99099a345571deecc0f9540ea5f4dd2fe14c2a99d6b281ab92d"},
+]
+
+[package.extras]
+qa = ["flake8 (==5.0.4)", "mypy (==0.971)", "types-setuptools (==67.2.0.1)"]
+testing = ["docopt", "pytest"]
+
+[[package]]
+name = "pathspec"
+version = "0.12.1"
+description = "Utility library for gitignore style pattern matching of file paths."
+optional = false
+python-versions = ">=3.8"
+files = [
+ {file = "pathspec-0.12.1-py3-none-any.whl", hash = "sha256:a0d503e138a4c123b27490a4f7beda6a01c6f288df0e4a8b79c7eb0dc7b4cc08"},
+ {file = "pathspec-0.12.1.tar.gz", hash = "sha256:a482d51503a1ab33b1c67a6c3813a26953dbdc71c31dacaef9a838c4e29f5712"},
+]
+
+[[package]]
+name = "pexpect"
+version = "4.9.0"
+description = "Pexpect allows easy control of interactive console applications."
+optional = false
+python-versions = "*"
+files = [
+ {file = "pexpect-4.9.0-py2.py3-none-any.whl", hash = "sha256:7236d1e080e4936be2dc3e326cec0af72acf9212a7e1d060210e70a47e253523"},
+ {file = "pexpect-4.9.0.tar.gz", hash = "sha256:ee7d41123f3c9911050ea2c2dac107568dc43b2d3b0c7557a33212c398ead30f"},
+]
+
+[package.dependencies]
+ptyprocess = ">=0.5"
+
+[[package]]
+name = "pkginfo"
+version = "1.10.0"
+description = "Query metadata from sdists / bdists / installed packages."
+optional = false
+python-versions = ">=3.6"
+files = [
+ {file = "pkginfo-1.10.0-py3-none-any.whl", hash = "sha256:889a6da2ed7ffc58ab5b900d888ddce90bce912f2d2de1dc1c26f4cb9fe65097"},
+ {file = "pkginfo-1.10.0.tar.gz", hash = "sha256:5df73835398d10db79f8eecd5cd86b1f6d29317589ea70796994d49399af6297"},
+]
+
+[package.extras]
+testing = ["pytest", "pytest-cov", "wheel"]
+
+[[package]]
+name = "platformdirs"
+version = "4.2.2"
+description = "A small Python package for determining appropriate platform-specific dirs, e.g. a `user data dir`."
+optional = false
+python-versions = ">=3.8"
+files = [
+ {file = "platformdirs-4.2.2-py3-none-any.whl", hash = "sha256:2d7a1657e36a80ea911db832a8a6ece5ee53d8de21edd5cc5879af6530b1bfee"},
+ {file = "platformdirs-4.2.2.tar.gz", hash = "sha256:38b7b51f512eed9e84a22788b4bce1de17c0adb134d6becb09836e37d8654cd3"},
+]
+
+[package.extras]
+docs = ["furo (>=2023.9.10)", "proselint (>=0.13)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1.25.2)"]
+test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4.3)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)"]
+type = ["mypy (>=1.8)"]
+
+[[package]]
+name = "pluggy"
+version = "1.5.0"
+description = "plugin and hook calling mechanisms for python"
+optional = false
+python-versions = ">=3.8"
+files = [
+ {file = "pluggy-1.5.0-py3-none-any.whl", hash = "sha256:44e1ad92c8ca002de6377e165f3e0f1be63266ab4d554740532335b9d75ea669"},
+ {file = "pluggy-1.5.0.tar.gz", hash = "sha256:2cffa88e94fdc978c4c574f15f9e59b7f4201d439195c3715ca9e2486f1d0cf1"},
+]
+
+[package.extras]
+dev = ["pre-commit", "tox"]
+testing = ["pytest", "pytest-benchmark"]
+
+[[package]]
+name = "prompt-toolkit"
+version = "3.0.47"
+description = "Library for building powerful interactive command lines in Python"
+optional = false
+python-versions = ">=3.7.0"
+files = [
+ {file = "prompt_toolkit-3.0.47-py3-none-any.whl", hash = "sha256:0d7bfa67001d5e39d02c224b663abc33687405033a8c422d0d675a5a13361d10"},
+ {file = "prompt_toolkit-3.0.47.tar.gz", hash = "sha256:1e1b29cb58080b1e69f207c893a1a7bf16d127a5c30c9d17a25a5d77792e5360"},
+]
+
+[package.dependencies]
+wcwidth = "*"
+
+[[package]]
+name = "ptyprocess"
+version = "0.7.0"
+description = "Run a subprocess in a pseudo terminal"
+optional = false
+python-versions = "*"
+files = [
+ {file = "ptyprocess-0.7.0-py2.py3-none-any.whl", hash = "sha256:4b41f3967fce3af57cc7e94b888626c18bf37a083e3651ca8feeb66d492fef35"},
+ {file = "ptyprocess-0.7.0.tar.gz", hash = "sha256:5c5d0a3b48ceee0b48485e0c26037c0acd7d29765ca3fbb5cb3831d347423220"},
+]
+
+[[package]]
+name = "pure-eval"
+version = "0.2.2"
+description = "Safely evaluate AST nodes without side effects"
+optional = false
+python-versions = "*"
+files = [
+ {file = "pure_eval-0.2.2-py3-none-any.whl", hash = "sha256:01eaab343580944bc56080ebe0a674b39ec44a945e6d09ba7db3cb8cec289350"},
+ {file = "pure_eval-0.2.2.tar.gz", hash = "sha256:2b45320af6dfaa1750f543d714b6d1c520a1688dec6fd24d339063ce0aaa9ac3"},
+]
+
+[package.extras]
+tests = ["pytest"]
+
+[[package]]
+name = "pycodestyle"
+version = "2.12.0"
+description = "Python style guide checker"
+optional = false
+python-versions = ">=3.8"
+files = [
+ {file = "pycodestyle-2.12.0-py2.py3-none-any.whl", hash = "sha256:949a39f6b86c3e1515ba1787c2022131d165a8ad271b11370a8819aa070269e4"},
+ {file = "pycodestyle-2.12.0.tar.gz", hash = "sha256:442f950141b4f43df752dd303511ffded3a04c2b6fb7f65980574f0c31e6e79c"},
+]
+
+[[package]]
+name = "pycparser"
+version = "2.22"
+description = "C parser in Python"
+optional = false
+python-versions = ">=3.8"
+files = [
+ {file = "pycparser-2.22-py3-none-any.whl", hash = "sha256:c3702b6d3dd8c7abc1afa565d7e63d53a1d0bd86cdc24edd75470f4de499cfcc"},
+ {file = "pycparser-2.22.tar.gz", hash = "sha256:491c8be9c040f5390f5bf44a5b07752bd07f56edf992381b05c701439eec10f6"},
+]
+
+[[package]]
+name = "pycryptodomex"
+version = "3.20.0"
+description = "Cryptographic library for Python"
+optional = false
+python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*"
+files = [
+ {file = "pycryptodomex-3.20.0-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:645bd4ca6f543685d643dadf6a856cc382b654cc923460e3a10a49c1b3832aeb"},
+ {file = "pycryptodomex-3.20.0-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:ff5c9a67f8a4fba4aed887216e32cbc48f2a6fb2673bb10a99e43be463e15913"},
+ {file = "pycryptodomex-3.20.0-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:8ee606964553c1a0bc74057dd8782a37d1c2bc0f01b83193b6f8bb14523b877b"},
+ {file = "pycryptodomex-3.20.0-cp27-cp27m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7805830e0c56d88f4d491fa5ac640dfc894c5ec570d1ece6ed1546e9df2e98d6"},
+ {file = "pycryptodomex-3.20.0-cp27-cp27m-musllinux_1_1_aarch64.whl", hash = "sha256:bc3ee1b4d97081260d92ae813a83de4d2653206967c4a0a017580f8b9548ddbc"},
+ {file = "pycryptodomex-3.20.0-cp27-cp27m-win32.whl", hash = "sha256:8af1a451ff9e123d0d8bd5d5e60f8e3315c3a64f3cdd6bc853e26090e195cdc8"},
+ {file = "pycryptodomex-3.20.0-cp27-cp27m-win_amd64.whl", hash = "sha256:cbe71b6712429650e3883dc81286edb94c328ffcd24849accac0a4dbcc76958a"},
+ {file = "pycryptodomex-3.20.0-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:76bd15bb65c14900d98835fcd10f59e5e0435077431d3a394b60b15864fddd64"},
+ {file = "pycryptodomex-3.20.0-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:653b29b0819605fe0898829c8ad6400a6ccde096146730c2da54eede9b7b8baa"},
+ {file = "pycryptodomex-3.20.0-cp27-cp27mu-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:62a5ec91388984909bb5398ea49ee61b68ecb579123694bffa172c3b0a107079"},
+ {file = "pycryptodomex-3.20.0-cp27-cp27mu-musllinux_1_1_aarch64.whl", hash = "sha256:108e5f1c1cd70ffce0b68739c75734437c919d2eaec8e85bffc2c8b4d2794305"},
+ {file = "pycryptodomex-3.20.0-cp35-abi3-macosx_10_9_universal2.whl", hash = "sha256:59af01efb011b0e8b686ba7758d59cf4a8263f9ad35911bfe3f416cee4f5c08c"},
+ {file = "pycryptodomex-3.20.0-cp35-abi3-macosx_10_9_x86_64.whl", hash = "sha256:82ee7696ed8eb9a82c7037f32ba9b7c59e51dda6f105b39f043b6ef293989cb3"},
+ {file = "pycryptodomex-3.20.0-cp35-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:91852d4480a4537d169c29a9d104dda44094c78f1f5b67bca76c29a91042b623"},
+ {file = "pycryptodomex-3.20.0-cp35-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bca649483d5ed251d06daf25957f802e44e6bb6df2e8f218ae71968ff8f8edc4"},
+ {file = "pycryptodomex-3.20.0-cp35-abi3-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6e186342cfcc3aafaad565cbd496060e5a614b441cacc3995ef0091115c1f6c5"},
+ {file = "pycryptodomex-3.20.0-cp35-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:25cd61e846aaab76d5791d006497134602a9e451e954833018161befc3b5b9ed"},
+ {file = "pycryptodomex-3.20.0-cp35-abi3-musllinux_1_1_i686.whl", hash = "sha256:9c682436c359b5ada67e882fec34689726a09c461efd75b6ea77b2403d5665b7"},
+ {file = "pycryptodomex-3.20.0-cp35-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:7a7a8f33a1f1fb762ede6cc9cbab8f2a9ba13b196bfaf7bc6f0b39d2ba315a43"},
+ {file = "pycryptodomex-3.20.0-cp35-abi3-win32.whl", hash = "sha256:c39778fd0548d78917b61f03c1fa8bfda6cfcf98c767decf360945fe6f97461e"},
+ {file = "pycryptodomex-3.20.0-cp35-abi3-win_amd64.whl", hash = "sha256:2a47bcc478741b71273b917232f521fd5704ab4b25d301669879e7273d3586cc"},
+ {file = "pycryptodomex-3.20.0-pp27-pypy_73-manylinux2010_x86_64.whl", hash = "sha256:1be97461c439a6af4fe1cf8bf6ca5936d3db252737d2f379cc6b2e394e12a458"},
+ {file = "pycryptodomex-3.20.0-pp27-pypy_73-win32.whl", hash = "sha256:19764605feea0df966445d46533729b645033f134baeb3ea26ad518c9fdf212c"},
+ {file = "pycryptodomex-3.20.0-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:f2e497413560e03421484189a6b65e33fe800d3bd75590e6d78d4dfdb7accf3b"},
+ {file = "pycryptodomex-3.20.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e48217c7901edd95f9f097feaa0388da215ed14ce2ece803d3f300b4e694abea"},
+ {file = "pycryptodomex-3.20.0-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d00fe8596e1cc46b44bf3907354e9377aa030ec4cd04afbbf6e899fc1e2a7781"},
+ {file = "pycryptodomex-3.20.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:88afd7a3af7ddddd42c2deda43d53d3dfc016c11327d0915f90ca34ebda91499"},
+ {file = "pycryptodomex-3.20.0-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:d3584623e68a5064a04748fb6d76117a21a7cb5eaba20608a41c7d0c61721794"},
+ {file = "pycryptodomex-3.20.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0daad007b685db36d977f9de73f61f8da2a7104e20aca3effd30752fd56f73e1"},
+ {file = "pycryptodomex-3.20.0-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5dcac11031a71348faaed1f403a0debd56bf5404232284cf8c761ff918886ebc"},
+ {file = "pycryptodomex-3.20.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:69138068268127cd605e03438312d8f271135a33140e2742b417d027a0539427"},
+ {file = "pycryptodomex-3.20.0.tar.gz", hash = "sha256:7a710b79baddd65b806402e14766c721aee8fb83381769c27920f26476276c1e"},
+]
+
+[[package]]
+name = "pyflakes"
+version = "3.2.0"
+description = "passive checker of Python programs"
+optional = false
+python-versions = ">=3.8"
+files = [
+ {file = "pyflakes-3.2.0-py2.py3-none-any.whl", hash = "sha256:84b5be138a2dfbb40689ca07e2152deb896a65c3a3e24c251c5c62489568074a"},
+ {file = "pyflakes-3.2.0.tar.gz", hash = "sha256:1c61603ff154621fb2a9172037d84dca3500def8c8b630657d1701f026f8af3f"},
+]
+
+[[package]]
+name = "pygments"
+version = "2.18.0"
+description = "Pygments is a syntax highlighting package written in Python."
+optional = false
+python-versions = ">=3.8"
+files = [
+ {file = "pygments-2.18.0-py3-none-any.whl", hash = "sha256:b8e6aca0523f3ab76fee51799c488e38782ac06eafcf95e7ba832985c8e7b13a"},
+ {file = "pygments-2.18.0.tar.gz", hash = "sha256:786ff802f32e91311bff3889f6e9a86e81505fe99f2735bb6d60ae0c5004f199"},
+]
+
+[package.extras]
+windows-terminal = ["colorama (>=0.4.6)"]
+
+[[package]]
+name = "pytest"
+version = "8.2.2"
+description = "pytest: simple powerful testing with Python"
+optional = false
+python-versions = ">=3.8"
+files = [
+ {file = "pytest-8.2.2-py3-none-any.whl", hash = "sha256:c434598117762e2bd304e526244f67bf66bbd7b5d6cf22138be51ff661980343"},
+ {file = "pytest-8.2.2.tar.gz", hash = "sha256:de4bb8104e201939ccdc688b27a89a7be2079b22e2bd2b07f806b6ba71117977"},
+]
+
+[package.dependencies]
+colorama = {version = "*", markers = "sys_platform == \"win32\""}
+iniconfig = "*"
+packaging = "*"
+pluggy = ">=1.5,<2.0"
+
+[package.extras]
+dev = ["argcomplete", "attrs (>=19.2)", "hypothesis (>=3.56)", "mock", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"]
+
+[[package]]
+name = "pytest-dependency"
+version = "0.6.0"
+description = "Manage dependencies of tests"
+optional = false
+python-versions = ">=3.4"
+files = [
+ {file = "pytest-dependency-0.6.0.tar.gz", hash = "sha256:934b0e6a39d95995062c193f7eaeed8a8ffa06ff1bcef4b62b0dc74a708bacc1"},
+]
+
+[package.dependencies]
+pytest = ">=3.7.0"
+setuptools = "*"
+
+[[package]]
+name = "pytest-mock"
+version = "3.14.0"
+description = "Thin-wrapper around the mock package for easier use with pytest"
+optional = false
+python-versions = ">=3.8"
+files = [
+ {file = "pytest-mock-3.14.0.tar.gz", hash = "sha256:2719255a1efeceadbc056d6bf3df3d1c5015530fb40cf347c0f9afac88410bd0"},
+ {file = "pytest_mock-3.14.0-py3-none-any.whl", hash = "sha256:0b72c38033392a5f4621342fe11e9219ac11ec9d375f8e2a0c164539e0d70f6f"},
+]
+
+[package.dependencies]
+pytest = ">=6.2.5"
+
+[package.extras]
+dev = ["pre-commit", "pytest-asyncio", "tox"]
+
+[[package]]
+name = "pywin32-ctypes"
+version = "0.2.2"
+description = "A (partial) reimplementation of pywin32 using ctypes/cffi"
+optional = false
+python-versions = ">=3.6"
+files = [
+ {file = "pywin32-ctypes-0.2.2.tar.gz", hash = "sha256:3426e063bdd5fd4df74a14fa3cf80a0b42845a87e1d1e81f6549f9daec593a60"},
+ {file = "pywin32_ctypes-0.2.2-py3-none-any.whl", hash = "sha256:bf490a1a709baf35d688fe0ecf980ed4de11d2b3e37b51e5442587a75d9957e7"},
+]
+
+[[package]]
+name = "readme-renderer"
+version = "43.0"
+description = "readme_renderer is a library for rendering readme descriptions for Warehouse"
+optional = false
+python-versions = ">=3.8"
+files = [
+ {file = "readme_renderer-43.0-py3-none-any.whl", hash = "sha256:19db308d86ecd60e5affa3b2a98f017af384678c63c88e5d4556a380e674f3f9"},
+ {file = "readme_renderer-43.0.tar.gz", hash = "sha256:1818dd28140813509eeed8d62687f7cd4f7bad90d4db586001c5dc09d4fde311"},
+]
+
+[package.dependencies]
+docutils = ">=0.13.1"
+nh3 = ">=0.2.14"
+Pygments = ">=2.5.1"
+
+[package.extras]
+md = ["cmarkgfm (>=0.8.0)"]
+
+[[package]]
+name = "requests"
+version = "2.32.3"
+description = "Python HTTP for Humans."
+optional = false
+python-versions = ">=3.8"
+files = [
+ {file = "requests-2.32.3-py3-none-any.whl", hash = "sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6"},
+ {file = "requests-2.32.3.tar.gz", hash = "sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760"},
+]
+
+[package.dependencies]
+certifi = ">=2017.4.17"
+charset-normalizer = ">=2,<4"
+idna = ">=2.5,<4"
+urllib3 = ">=1.21.1,<3"
+
+[package.extras]
+socks = ["PySocks (>=1.5.6,!=1.5.7)"]
+use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"]
+
+[[package]]
+name = "requests-toolbelt"
+version = "1.0.0"
+description = "A utility belt for advanced users of python-requests"
+optional = false
+python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
+files = [
+ {file = "requests-toolbelt-1.0.0.tar.gz", hash = "sha256:7681a0a3d047012b5bdc0ee37d7f8f07ebe76ab08caeccfc3921ce23c88d5bc6"},
+ {file = "requests_toolbelt-1.0.0-py2.py3-none-any.whl", hash = "sha256:cccfdd665f0a24fcf4726e690f65639d272bb0637b9b92dfd91a5568ccf6bd06"},
+]
+
+[package.dependencies]
+requests = ">=2.0.1,<3.0.0"
+
+[[package]]
+name = "rfc3986"
+version = "2.0.0"
+description = "Validating URI References per RFC 3986"
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "rfc3986-2.0.0-py2.py3-none-any.whl", hash = "sha256:50b1502b60e289cb37883f3dfd34532b8873c7de9f49bb546641ce9cbd256ebd"},
+ {file = "rfc3986-2.0.0.tar.gz", hash = "sha256:97aacf9dbd4bfd829baad6e6309fa6573aaf1be3f6fa735c8ab05e46cecb261c"},
+]
+
+[package.extras]
+idna2008 = ["idna"]
+
+[[package]]
+name = "rich"
+version = "13.7.1"
+description = "Render rich text, tables, progress bars, syntax highlighting, markdown and more to the terminal"
+optional = false
+python-versions = ">=3.7.0"
+files = [
+ {file = "rich-13.7.1-py3-none-any.whl", hash = "sha256:4edbae314f59eb482f54e9e30bf00d33350aaa94f4bfcd4e9e3110e64d0d7222"},
+ {file = "rich-13.7.1.tar.gz", hash = "sha256:9be308cb1fe2f1f57d67ce99e95af38a1e2bc71ad9813b0e247cf7ffbcc3a432"},
+]
+
+[package.dependencies]
+markdown-it-py = ">=2.2.0"
+pygments = ">=2.13.0,<3.0.0"
+
+[package.extras]
+jupyter = ["ipywidgets (>=7.5.1,<9)"]
+
+[[package]]
+name = "secretstorage"
+version = "3.3.3"
+description = "Python bindings to FreeDesktop.org Secret Service API"
+optional = false
+python-versions = ">=3.6"
+files = [
+ {file = "SecretStorage-3.3.3-py3-none-any.whl", hash = "sha256:f356e6628222568e3af06f2eba8df495efa13b3b63081dafd4f7d9a7b7bc9f99"},
+ {file = "SecretStorage-3.3.3.tar.gz", hash = "sha256:2403533ef369eca6d2ba81718576c5e0f564d5cca1b58f73a8b23e7d4eeebd77"},
+]
+
+[package.dependencies]
+cryptography = ">=2.0"
+jeepney = ">=0.6"
+
+[[package]]
+name = "setuptools"
+version = "70.2.0"
+description = "Easily download, build, install, upgrade, and uninstall Python packages"
+optional = false
+python-versions = ">=3.8"
+files = [
+ {file = "setuptools-70.2.0-py3-none-any.whl", hash = "sha256:b8b8060bb426838fbe942479c90296ce976249451118ef566a5a0b7d8b78fb05"},
+ {file = "setuptools-70.2.0.tar.gz", hash = "sha256:bd63e505105011b25c3c11f753f7e3b8465ea739efddaccef8f0efac2137bac1"},
+]
+
+[package.extras]
+doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "pyproject-hooks (!=1.1)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier"]
+test = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "importlib-metadata", "ini2toml[lite] (>=0.14)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "jaraco.test", "mypy (==1.10.0)", "packaging (>=23.2)", "pip (>=19.1)", "pyproject-hooks (!=1.1)", "pytest (>=6,!=8.1.*)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy", "pytest-perf", "pytest-ruff (>=0.3.2)", "pytest-subprocess", "pytest-timeout", "pytest-xdist (>=3)", "tomli", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"]
+
+[[package]]
+name = "six"
+version = "1.16.0"
+description = "Python 2 and 3 compatibility utilities"
+optional = false
+python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*"
+files = [
+ {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"},
+ {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"},
+]
+
+[[package]]
+name = "snowballstemmer"
+version = "2.2.0"
+description = "This package provides 29 stemmers for 28 languages generated from Snowball algorithms."
+optional = false
+python-versions = "*"
+files = [
+ {file = "snowballstemmer-2.2.0-py2.py3-none-any.whl", hash = "sha256:c8e1716e83cc398ae16824e5572ae04e0d9fc2c6b985fb0f900f5f0c96ecba1a"},
+ {file = "snowballstemmer-2.2.0.tar.gz", hash = "sha256:09b16deb8547d3412ad7b590689584cd0fe25ec8db3be37788be3810cbf19cb1"},
+]
+
+[[package]]
+name = "sphinx"
+version = "7.3.7"
+description = "Python documentation generator"
+optional = false
+python-versions = ">=3.9"
+files = [
+ {file = "sphinx-7.3.7-py3-none-any.whl", hash = "sha256:413f75440be4cacf328f580b4274ada4565fb2187d696a84970c23f77b64d8c3"},
+ {file = "sphinx-7.3.7.tar.gz", hash = "sha256:a4a7db75ed37531c05002d56ed6948d4c42f473a36f46e1382b0bd76ca9627bc"},
+]
+
+[package.dependencies]
+alabaster = ">=0.7.14,<0.8.0"
+babel = ">=2.9"
+colorama = {version = ">=0.4.5", markers = "sys_platform == \"win32\""}
+docutils = ">=0.18.1,<0.22"
+imagesize = ">=1.3"
+Jinja2 = ">=3.0"
+packaging = ">=21.0"
+Pygments = ">=2.14"
+requests = ">=2.25.0"
+snowballstemmer = ">=2.0"
+sphinxcontrib-applehelp = "*"
+sphinxcontrib-devhelp = "*"
+sphinxcontrib-htmlhelp = ">=2.0.0"
+sphinxcontrib-jsmath = "*"
+sphinxcontrib-qthelp = "*"
+sphinxcontrib-serializinghtml = ">=1.1.9"
+
+[package.extras]
+docs = ["sphinxcontrib-websupport"]
+lint = ["flake8 (>=3.5.0)", "importlib_metadata", "mypy (==1.9.0)", "pytest (>=6.0)", "ruff (==0.3.7)", "sphinx-lint", "tomli", "types-docutils", "types-requests"]
+test = ["cython (>=3.0)", "defusedxml (>=0.7.1)", "pytest (>=6.0)", "setuptools (>=67.0)"]
+
+[[package]]
+name = "sphinxcontrib-applehelp"
+version = "1.0.8"
+description = "sphinxcontrib-applehelp is a Sphinx extension which outputs Apple help books"
+optional = false
+python-versions = ">=3.9"
+files = [
+ {file = "sphinxcontrib_applehelp-1.0.8-py3-none-any.whl", hash = "sha256:cb61eb0ec1b61f349e5cc36b2028e9e7ca765be05e49641c97241274753067b4"},
+ {file = "sphinxcontrib_applehelp-1.0.8.tar.gz", hash = "sha256:c40a4f96f3776c4393d933412053962fac2b84f4c99a7982ba42e09576a70619"},
+]
+
+[package.extras]
+lint = ["docutils-stubs", "flake8", "mypy"]
+standalone = ["Sphinx (>=5)"]
+test = ["pytest"]
+
+[[package]]
+name = "sphinxcontrib-devhelp"
+version = "1.0.6"
+description = "sphinxcontrib-devhelp is a sphinx extension which outputs Devhelp documents"
+optional = false
+python-versions = ">=3.9"
+files = [
+ {file = "sphinxcontrib_devhelp-1.0.6-py3-none-any.whl", hash = "sha256:6485d09629944511c893fa11355bda18b742b83a2b181f9a009f7e500595c90f"},
+ {file = "sphinxcontrib_devhelp-1.0.6.tar.gz", hash = "sha256:9893fd3f90506bc4b97bdb977ceb8fbd823989f4316b28c3841ec128544372d3"},
+]
+
+[package.extras]
+lint = ["docutils-stubs", "flake8", "mypy"]
+standalone = ["Sphinx (>=5)"]
+test = ["pytest"]
+
+[[package]]
+name = "sphinxcontrib-htmlhelp"
+version = "2.0.5"
+description = "sphinxcontrib-htmlhelp is a sphinx extension which renders HTML help files"
+optional = false
+python-versions = ">=3.9"
+files = [
+ {file = "sphinxcontrib_htmlhelp-2.0.5-py3-none-any.whl", hash = "sha256:393f04f112b4d2f53d93448d4bce35842f62b307ccdc549ec1585e950bc35e04"},
+ {file = "sphinxcontrib_htmlhelp-2.0.5.tar.gz", hash = "sha256:0dc87637d5de53dd5eec3a6a01753b1ccf99494bd756aafecd74b4fa9e729015"},
+]
+
+[package.extras]
+lint = ["docutils-stubs", "flake8", "mypy"]
+standalone = ["Sphinx (>=5)"]
+test = ["html5lib", "pytest"]
+
+[[package]]
+name = "sphinxcontrib-jsmath"
+version = "1.0.1"
+description = "A sphinx extension which renders display math in HTML via JavaScript"
+optional = false
+python-versions = ">=3.5"
+files = [
+ {file = "sphinxcontrib-jsmath-1.0.1.tar.gz", hash = "sha256:a9925e4a4587247ed2191a22df5f6970656cb8ca2bd6284309578f2153e0c4b8"},
+ {file = "sphinxcontrib_jsmath-1.0.1-py2.py3-none-any.whl", hash = "sha256:2ec2eaebfb78f3f2078e73666b1415417a116cc848b72e5172e596c871103178"},
+]
+
+[package.extras]
+test = ["flake8", "mypy", "pytest"]
+
+[[package]]
+name = "sphinxcontrib-qthelp"
+version = "1.0.7"
+description = "sphinxcontrib-qthelp is a sphinx extension which outputs QtHelp documents"
+optional = false
+python-versions = ">=3.9"
+files = [
+ {file = "sphinxcontrib_qthelp-1.0.7-py3-none-any.whl", hash = "sha256:e2ae3b5c492d58fcbd73281fbd27e34b8393ec34a073c792642cd8e529288182"},
+ {file = "sphinxcontrib_qthelp-1.0.7.tar.gz", hash = "sha256:053dedc38823a80a7209a80860b16b722e9e0209e32fea98c90e4e6624588ed6"},
+]
+
+[package.extras]
+lint = ["docutils-stubs", "flake8", "mypy"]
+standalone = ["Sphinx (>=5)"]
+test = ["pytest"]
+
+[[package]]
+name = "sphinxcontrib-serializinghtml"
+version = "1.1.10"
+description = "sphinxcontrib-serializinghtml is a sphinx extension which outputs \"serialized\" HTML files (json and pickle)"
+optional = false
+python-versions = ">=3.9"
+files = [
+ {file = "sphinxcontrib_serializinghtml-1.1.10-py3-none-any.whl", hash = "sha256:326369b8df80a7d2d8d7f99aa5ac577f51ea51556ed974e7716cfd4fca3f6cb7"},
+ {file = "sphinxcontrib_serializinghtml-1.1.10.tar.gz", hash = "sha256:93f3f5dc458b91b192fe10c397e324f262cf163d79f3282c158e8436a2c4511f"},
+]
+
+[package.extras]
+lint = ["docutils-stubs", "flake8", "mypy"]
+standalone = ["Sphinx (>=5)"]
+test = ["pytest"]
+
+[[package]]
+name = "stack-data"
+version = "0.6.3"
+description = "Extract data from python stack frames and tracebacks for informative displays"
+optional = false
+python-versions = "*"
+files = [
+ {file = "stack_data-0.6.3-py3-none-any.whl", hash = "sha256:d5558e0c25a4cb0853cddad3d77da9891a08cb85dd9f9f91b9f8cd66e511e695"},
+ {file = "stack_data-0.6.3.tar.gz", hash = "sha256:836a778de4fec4dcd1dcd89ed8abff8a221f58308462e1c4aa2a3cf30148f0b9"},
+]
+
+[package.dependencies]
+asttokens = ">=2.1.0"
+executing = ">=1.2.0"
+pure-eval = "*"
+
+[package.extras]
+tests = ["cython", "littleutils", "pygments", "pytest", "typeguard"]
+
+[[package]]
+name = "tqdm"
+version = "4.66.4"
+description = "Fast, Extensible Progress Meter"
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "tqdm-4.66.4-py3-none-any.whl", hash = "sha256:b75ca56b413b030bc3f00af51fd2c1a1a5eac6a0c1cca83cbb37a5c52abce644"},
+ {file = "tqdm-4.66.4.tar.gz", hash = "sha256:e4d936c9de8727928f3be6079590e97d9abfe8d39a590be678eb5919ffc186bb"},
+]
+
+[package.dependencies]
+colorama = {version = "*", markers = "platform_system == \"Windows\""}
+
+[package.extras]
+dev = ["pytest (>=6)", "pytest-cov", "pytest-timeout", "pytest-xdist"]
+notebook = ["ipywidgets (>=6)"]
+slack = ["slack-sdk"]
+telegram = ["requests"]
+
+[[package]]
+name = "traitlets"
+version = "5.14.3"
+description = "Traitlets Python configuration system"
+optional = false
+python-versions = ">=3.8"
+files = [
+ {file = "traitlets-5.14.3-py3-none-any.whl", hash = "sha256:b74e89e397b1ed28cc831db7aea759ba6640cb3de13090ca145426688ff1ac4f"},
+ {file = "traitlets-5.14.3.tar.gz", hash = "sha256:9ed0579d3502c94b4b3732ac120375cda96f923114522847de4b3bb98b96b6b7"},
+]
+
+[package.extras]
+docs = ["myst-parser", "pydata-sphinx-theme", "sphinx"]
+test = ["argcomplete (>=3.0.3)", "mypy (>=1.7.0)", "pre-commit", "pytest (>=7.0,<8.2)", "pytest-mock", "pytest-mypy-testing"]
+
+[[package]]
+name = "twine"
+version = "5.1.1"
+description = "Collection of utilities for publishing packages on PyPI"
+optional = false
+python-versions = ">=3.8"
+files = [
+ {file = "twine-5.1.1-py3-none-any.whl", hash = "sha256:215dbe7b4b94c2c50a7315c0275d2258399280fbb7d04182c7e55e24b5f93997"},
+ {file = "twine-5.1.1.tar.gz", hash = "sha256:9aa0825139c02b3434d913545c7b847a21c835e11597f5255842d457da2322db"},
+]
+
+[package.dependencies]
+importlib-metadata = ">=3.6"
+keyring = ">=15.1"
+pkginfo = ">=1.8.1,<1.11"
+readme-renderer = ">=35.0"
+requests = ">=2.20"
+requests-toolbelt = ">=0.8.0,<0.9.0 || >0.9.0"
+rfc3986 = ">=1.4.0"
+rich = ">=12.0.0"
+urllib3 = ">=1.26.0"
+
+[[package]]
+name = "typing-extensions"
+version = "4.12.2"
+description = "Backported and Experimental Type Hints for Python 3.8+"
+optional = false
+python-versions = ">=3.8"
+files = [
+ {file = "typing_extensions-4.12.2-py3-none-any.whl", hash = "sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d"},
+ {file = "typing_extensions-4.12.2.tar.gz", hash = "sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8"},
+]
+
+[[package]]
+name = "urllib3"
+version = "2.2.2"
+description = "HTTP library with thread-safe connection pooling, file post, and more."
+optional = false
+python-versions = ">=3.8"
+files = [
+ {file = "urllib3-2.2.2-py3-none-any.whl", hash = "sha256:a448b2f64d686155468037e1ace9f2d2199776e17f0a46610480d311f73e3472"},
+ {file = "urllib3-2.2.2.tar.gz", hash = "sha256:dd505485549a7a552833da5e6063639d0d177c04f23bc3864e41e5dc5f612168"},
+]
+
+[package.extras]
+brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)"]
+h2 = ["h2 (>=4,<5)"]
+socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"]
+zstd = ["zstandard (>=0.18.0)"]
+
+[[package]]
+name = "wcwidth"
+version = "0.2.13"
+description = "Measures the displayed width of unicode strings in a terminal"
+optional = false
+python-versions = "*"
+files = [
+ {file = "wcwidth-0.2.13-py2.py3-none-any.whl", hash = "sha256:3da69048e4540d84af32131829ff948f1e022c1c6bdb8d6102117aac784f6859"},
+ {file = "wcwidth-0.2.13.tar.gz", hash = "sha256:72ea0c06399eb286d978fdedb6923a9eb47e1c486ce63e9b4e64fc18303972b5"},
+]
+
+[[package]]
+name = "websocket-client"
+version = "1.8.0"
+description = "WebSocket client for Python with low level API options"
+optional = false
+python-versions = ">=3.8"
+files = [
+ {file = "websocket_client-1.8.0-py3-none-any.whl", hash = "sha256:17b44cc997f5c498e809b22cdf2d9c7a9e71c02c8cc2b6c56e7c2d1239bfa526"},
+ {file = "websocket_client-1.8.0.tar.gz", hash = "sha256:3239df9f44da632f96012472805d40a23281a991027ce11d2f45a6f24ac4c3da"},
+]
+
+[package.extras]
+docs = ["Sphinx (>=6.0)", "myst-parser (>=2.0.0)", "sphinx-rtd-theme (>=1.1.0)"]
+optional = ["python-socks", "wsaccel"]
+test = ["websockets"]
+
+[[package]]
+name = "websockets"
+version = "12.0"
+description = "An implementation of the WebSocket Protocol (RFC 6455 & 7692)"
+optional = false
+python-versions = ">=3.8"
+files = [
+ {file = "websockets-12.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:d554236b2a2006e0ce16315c16eaa0d628dab009c33b63ea03f41c6107958374"},
+ {file = "websockets-12.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:2d225bb6886591b1746b17c0573e29804619c8f755b5598d875bb4235ea639be"},
+ {file = "websockets-12.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:eb809e816916a3b210bed3c82fb88eaf16e8afcf9c115ebb2bacede1797d2547"},
+ {file = "websockets-12.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c588f6abc13f78a67044c6b1273a99e1cf31038ad51815b3b016ce699f0d75c2"},
+ {file = "websockets-12.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5aa9348186d79a5f232115ed3fa9020eab66d6c3437d72f9d2c8ac0c6858c558"},
+ {file = "websockets-12.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6350b14a40c95ddd53e775dbdbbbc59b124a5c8ecd6fbb09c2e52029f7a9f480"},
+ {file = "websockets-12.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:70ec754cc2a769bcd218ed8d7209055667b30860ffecb8633a834dde27d6307c"},
+ {file = "websockets-12.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:6e96f5ed1b83a8ddb07909b45bd94833b0710f738115751cdaa9da1fb0cb66e8"},
+ {file = "websockets-12.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:4d87be612cbef86f994178d5186add3d94e9f31cc3cb499a0482b866ec477603"},
+ {file = "websockets-12.0-cp310-cp310-win32.whl", hash = "sha256:befe90632d66caaf72e8b2ed4d7f02b348913813c8b0a32fae1cc5fe3730902f"},
+ {file = "websockets-12.0-cp310-cp310-win_amd64.whl", hash = "sha256:363f57ca8bc8576195d0540c648aa58ac18cf85b76ad5202b9f976918f4219cf"},
+ {file = "websockets-12.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:5d873c7de42dea355d73f170be0f23788cf3fa9f7bed718fd2830eefedce01b4"},
+ {file = "websockets-12.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3f61726cae9f65b872502ff3c1496abc93ffbe31b278455c418492016e2afc8f"},
+ {file = "websockets-12.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ed2fcf7a07334c77fc8a230755c2209223a7cc44fc27597729b8ef5425aa61a3"},
+ {file = "websockets-12.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8e332c210b14b57904869ca9f9bf4ca32f5427a03eeb625da9b616c85a3a506c"},
+ {file = "websockets-12.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5693ef74233122f8ebab026817b1b37fe25c411ecfca084b29bc7d6efc548f45"},
+ {file = "websockets-12.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6e9e7db18b4539a29cc5ad8c8b252738a30e2b13f033c2d6e9d0549b45841c04"},
+ {file = "websockets-12.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:6e2df67b8014767d0f785baa98393725739287684b9f8d8a1001eb2839031447"},
+ {file = "websockets-12.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:bea88d71630c5900690fcb03161ab18f8f244805c59e2e0dc4ffadae0a7ee0ca"},
+ {file = "websockets-12.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:dff6cdf35e31d1315790149fee351f9e52978130cef6c87c4b6c9b3baf78bc53"},
+ {file = "websockets-12.0-cp311-cp311-win32.whl", hash = "sha256:3e3aa8c468af01d70332a382350ee95f6986db479ce7af14d5e81ec52aa2b402"},
+ {file = "websockets-12.0-cp311-cp311-win_amd64.whl", hash = "sha256:25eb766c8ad27da0f79420b2af4b85d29914ba0edf69f547cc4f06ca6f1d403b"},
+ {file = "websockets-12.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0e6e2711d5a8e6e482cacb927a49a3d432345dfe7dea8ace7b5790df5932e4df"},
+ {file = "websockets-12.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:dbcf72a37f0b3316e993e13ecf32f10c0e1259c28ffd0a85cee26e8549595fbc"},
+ {file = "websockets-12.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:12743ab88ab2af1d17dd4acb4645677cb7063ef4db93abffbf164218a5d54c6b"},
+ {file = "websockets-12.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7b645f491f3c48d3f8a00d1fce07445fab7347fec54a3e65f0725d730d5b99cb"},
+ {file = "websockets-12.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9893d1aa45a7f8b3bc4510f6ccf8db8c3b62120917af15e3de247f0780294b92"},
+ {file = "websockets-12.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1f38a7b376117ef7aff996e737583172bdf535932c9ca021746573bce40165ed"},
+ {file = "websockets-12.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:f764ba54e33daf20e167915edc443b6f88956f37fb606449b4a5b10ba42235a5"},
+ {file = "websockets-12.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:1e4b3f8ea6a9cfa8be8484c9221ec0257508e3a1ec43c36acdefb2a9c3b00aa2"},
+ {file = "websockets-12.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:9fdf06fd06c32205a07e47328ab49c40fc1407cdec801d698a7c41167ea45113"},
+ {file = "websockets-12.0-cp312-cp312-win32.whl", hash = "sha256:baa386875b70cbd81798fa9f71be689c1bf484f65fd6fb08d051a0ee4e79924d"},
+ {file = "websockets-12.0-cp312-cp312-win_amd64.whl", hash = "sha256:ae0a5da8f35a5be197f328d4727dbcfafa53d1824fac3d96cdd3a642fe09394f"},
+ {file = "websockets-12.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:5f6ffe2c6598f7f7207eef9a1228b6f5c818f9f4d53ee920aacd35cec8110438"},
+ {file = "websockets-12.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:9edf3fc590cc2ec20dc9d7a45108b5bbaf21c0d89f9fd3fd1685e223771dc0b2"},
+ {file = "websockets-12.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:8572132c7be52632201a35f5e08348137f658e5ffd21f51f94572ca6c05ea81d"},
+ {file = "websockets-12.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:604428d1b87edbf02b233e2c207d7d528460fa978f9e391bd8aaf9c8311de137"},
+ {file = "websockets-12.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1a9d160fd080c6285e202327aba140fc9a0d910b09e423afff4ae5cbbf1c7205"},
+ {file = "websockets-12.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:87b4aafed34653e465eb77b7c93ef058516cb5acf3eb21e42f33928616172def"},
+ {file = "websockets-12.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:b2ee7288b85959797970114deae81ab41b731f19ebcd3bd499ae9ca0e3f1d2c8"},
+ {file = "websockets-12.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:7fa3d25e81bfe6a89718e9791128398a50dec6d57faf23770787ff441d851967"},
+ {file = "websockets-12.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:a571f035a47212288e3b3519944f6bf4ac7bc7553243e41eac50dd48552b6df7"},
+ {file = "websockets-12.0-cp38-cp38-win32.whl", hash = "sha256:3c6cc1360c10c17463aadd29dd3af332d4a1adaa8796f6b0e9f9df1fdb0bad62"},
+ {file = "websockets-12.0-cp38-cp38-win_amd64.whl", hash = "sha256:1bf386089178ea69d720f8db6199a0504a406209a0fc23e603b27b300fdd6892"},
+ {file = "websockets-12.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:ab3d732ad50a4fbd04a4490ef08acd0517b6ae6b77eb967251f4c263011a990d"},
+ {file = "websockets-12.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:a1d9697f3337a89691e3bd8dc56dea45a6f6d975f92e7d5f773bc715c15dde28"},
+ {file = "websockets-12.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:1df2fbd2c8a98d38a66f5238484405b8d1d16f929bb7a33ed73e4801222a6f53"},
+ {file = "websockets-12.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:23509452b3bc38e3a057382c2e941d5ac2e01e251acce7adc74011d7d8de434c"},
+ {file = "websockets-12.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2e5fc14ec6ea568200ea4ef46545073da81900a2b67b3e666f04adf53ad452ec"},
+ {file = "websockets-12.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:46e71dbbd12850224243f5d2aeec90f0aaa0f2dde5aeeb8fc8df21e04d99eff9"},
+ {file = "websockets-12.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:b81f90dcc6c85a9b7f29873beb56c94c85d6f0dac2ea8b60d995bd18bf3e2aae"},
+ {file = "websockets-12.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:a02413bc474feda2849c59ed2dfb2cddb4cd3d2f03a2fedec51d6e959d9b608b"},
+ {file = "websockets-12.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:bbe6013f9f791944ed31ca08b077e26249309639313fff132bfbf3ba105673b9"},
+ {file = "websockets-12.0-cp39-cp39-win32.whl", hash = "sha256:cbe83a6bbdf207ff0541de01e11904827540aa069293696dd528a6640bd6a5f6"},
+ {file = "websockets-12.0-cp39-cp39-win_amd64.whl", hash = "sha256:fc4e7fa5414512b481a2483775a8e8be7803a35b30ca805afa4998a84f9fd9e8"},
+ {file = "websockets-12.0-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:248d8e2446e13c1d4326e0a6a4e9629cb13a11195051a73acf414812700badbd"},
+ {file = "websockets-12.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f44069528d45a933997a6fef143030d8ca8042f0dfaad753e2906398290e2870"},
+ {file = "websockets-12.0-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c4e37d36f0d19f0a4413d3e18c0d03d0c268ada2061868c1e6f5ab1a6d575077"},
+ {file = "websockets-12.0-pp310-pypy310_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3d829f975fc2e527a3ef2f9c8f25e553eb7bc779c6665e8e1d52aa22800bb38b"},
+ {file = "websockets-12.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:2c71bd45a777433dd9113847af751aae36e448bc6b8c361a566cb043eda6ec30"},
+ {file = "websockets-12.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:0bee75f400895aef54157b36ed6d3b308fcab62e5260703add87f44cee9c82a6"},
+ {file = "websockets-12.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:423fc1ed29f7512fceb727e2d2aecb952c46aa34895e9ed96071821309951123"},
+ {file = "websockets-12.0-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:27a5e9964ef509016759f2ef3f2c1e13f403725a5e6a1775555994966a66e931"},
+ {file = "websockets-12.0-pp38-pypy38_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c3181df4583c4d3994d31fb235dc681d2aaad744fbdbf94c4802485ececdecf2"},
+ {file = "websockets-12.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:b067cb952ce8bf40115f6c19f478dc71c5e719b7fbaa511359795dfd9d1a6468"},
+ {file = "websockets-12.0-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:00700340c6c7ab788f176d118775202aadea7602c5cc6be6ae127761c16d6b0b"},
+ {file = "websockets-12.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e469d01137942849cff40517c97a30a93ae79917752b34029f0ec72df6b46399"},
+ {file = "websockets-12.0-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ffefa1374cd508d633646d51a8e9277763a9b78ae71324183693959cf94635a7"},
+ {file = "websockets-12.0-pp39-pypy39_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba0cab91b3956dfa9f512147860783a1829a8d905ee218a9837c18f683239611"},
+ {file = "websockets-12.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:2cb388a5bfb56df4d9a406783b7f9dbefb888c09b71629351cc6b036e9259370"},
+ {file = "websockets-12.0-py3-none-any.whl", hash = "sha256:dc284bbc8d7c78a6c69e0c7325ab46ee5e40bb4d50e494d8131a07ef47500e9e"},
+ {file = "websockets-12.0.tar.gz", hash = "sha256:81df9cbcbb6c260de1e007e58c011bfebe2dafc8435107b0537f393dd38c8b1b"},
+]
+
+[[package]]
+name = "wheel"
+version = "0.43.0"
+description = "A built-package format for Python"
+optional = false
+python-versions = ">=3.8"
+files = [
+ {file = "wheel-0.43.0-py3-none-any.whl", hash = "sha256:55c570405f142630c6b9f72fe09d9b67cf1477fcf543ae5b8dcb1f5b7377da81"},
+ {file = "wheel-0.43.0.tar.gz", hash = "sha256:465ef92c69fa5c5da2d1cf8ac40559a8c940886afcef87dcf14b9470862f1d85"},
+]
+
+[package.extras]
+test = ["pytest (>=6.0.0)", "setuptools (>=65)"]
+
+[[package]]
+name = "win32-setctime"
+version = "1.1.0"
+description = "A small Python utility to set file creation time on Windows"
+optional = false
+python-versions = ">=3.5"
+files = [
+ {file = "win32_setctime-1.1.0-py3-none-any.whl", hash = "sha256:231db239e959c2fe7eb1d7dc129f11172354f98361c4fa2d6d2d7e278baa8aad"},
+ {file = "win32_setctime-1.1.0.tar.gz", hash = "sha256:15cf5750465118d6929ae4de4eb46e8edae9a5634350c01ba582df868e932cb2"},
+]
+
+[package.extras]
+dev = ["black (>=19.3b0)", "pytest (>=4.6.2)"]
+
+[[package]]
+name = "yt-dlp"
+version = "2024.7.2"
+description = "A feature-rich command-line audio/video downloader"
+optional = false
+python-versions = ">=3.8"
+files = [
+ {file = "yt_dlp-2024.7.2-py3-none-any.whl", hash = "sha256:4f76b48244c783e6ac06e8d7627bcf62cbeb4f6d79ba7e3cfc8249e680d4e691"},
+ {file = "yt_dlp-2024.7.2.tar.gz", hash = "sha256:2b0c86b579d4a044eaf3c4b00e3d7b24d82e6e26869fa11c288ea4395b387f41"},
+]
+
+[package.dependencies]
+brotli = {version = "*", markers = "implementation_name == \"cpython\""}
+brotlicffi = {version = "*", markers = "implementation_name != \"cpython\""}
+certifi = "*"
+mutagen = "*"
+pycryptodomex = "*"
+requests = ">=2.32.2,<3"
+urllib3 = ">=1.26.17,<3"
+websockets = ">=12.0"
+
+[package.extras]
+build = ["build", "hatchling", "pip", "setuptools", "wheel"]
+curl-cffi = ["curl-cffi (==0.5.10)"]
+dev = ["autopep8 (>=2.0,<3.0)", "pre-commit", "pytest (>=8.1,<9.0)", "ruff (>=0.5.0,<0.6.0)"]
+py2exe = ["py2exe (>=0.12)"]
+pyinstaller = ["pyinstaller (>=6.7.0)"]
+secretstorage = ["cffi", "secretstorage"]
+static-analysis = ["autopep8 (>=2.0,<3.0)", "ruff (>=0.5.0,<0.6.0)"]
+test = ["pytest (>=8.1,<9.0)"]
+
+[[package]]
+name = "zipp"
+version = "3.19.2"
+description = "Backport of pathlib-compatible object wrapper for zip files"
+optional = false
+python-versions = ">=3.8"
+files = [
+ {file = "zipp-3.19.2-py3-none-any.whl", hash = "sha256:f091755f667055f2d02b32c53771a7a6c8b47e1fdbc4b72a8b9072b3eef8015c"},
+ {file = "zipp-3.19.2.tar.gz", hash = "sha256:bf1dcf6450f873a13e952a29504887c89e6de7506209e5b1bcc3460135d4de19"},
+]
+
+[package.extras]
+doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"]
+test = ["big-O", "importlib-resources", "jaraco.functools", "jaraco.itertools", "jaraco.test", "more-itertools", "pytest (>=6,!=8.1.*)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-ignore-flaky", "pytest-mypy", "pytest-ruff (>=0.2.1)"]
+
+[metadata]
+lock-version = "2.0"
+python-versions = "^3.11"
+content-hash = "27c89f6f3e6a318198d21d63dbdbb98b7f27b1e32c774b61b6e9fc1cbc322fb5"
diff --git a/pyproject.toml b/pyproject.toml
new file mode 100644
index 0000000..bc91704
--- /dev/null
+++ b/pyproject.toml
@@ -0,0 +1,35 @@
+[tool.poetry]
+name = "youtool"
+version = "0.1.1"
+description = "Easy-to-use library to access YouTube Data API v3 in bulk operations"
+authors = ["Álvaro Justen "]
+readme = "README.md"
+
+[tool.poetry.dependencies]
+python = "^3.11"
+loguru = "^0.7.2"
+tqdm = "^4.66.4"
+yt-dlp = "^2024.7.2"
+chat-downloader = "^0.2.8"
+isodate = "^0.6.1"
+requests = "^2.32.3"
+
+[tool.poetry.group.dev.dependencies]
+pytest = "^8.2.2"
+autoflake = "^2.3.1"
+black = "^24.4.2"
+flake8 = "^7.1.0"
+ipython = "^8.26.0"
+isort = "^5.13.2"
+pytest-dependency = "^0.6.0"
+twine = "^5.1.1"
+wheel = "^0.43.0"
+
+pytest-mock = "^3.14.0"
+sphinx = "^7.3.7"
+[build-system]
+requires = ["poetry-core"]
+build-backend = "poetry.core.masonry.api"
+
+[tool.poetry.scripts]
+youtool = "youtool.cli:main"
diff --git a/requirements/base.txt b/requirements/base.txt
deleted file mode 100644
index ea93b32..0000000
--- a/requirements/base.txt
+++ /dev/null
@@ -1,2 +0,0 @@
-isodate
-requests
diff --git a/requirements/cli.txt b/requirements/cli.txt
deleted file mode 100644
index 0ff8c5c..0000000
--- a/requirements/cli.txt
+++ /dev/null
@@ -1,2 +0,0 @@
-loguru
-tqdm
diff --git a/requirements/dev.txt b/requirements/dev.txt
deleted file mode 100644
index 9a89a93..0000000
--- a/requirements/dev.txt
+++ /dev/null
@@ -1,9 +0,0 @@
-autoflake
-black
-flake8
-ipython
-isort
-pytest
-pytest-dependency
-twine
-wheel
diff --git a/requirements/livechat.txt b/requirements/livechat.txt
deleted file mode 100644
index f035dbe..0000000
--- a/requirements/livechat.txt
+++ /dev/null
@@ -1 +0,0 @@
-chat-downloader
diff --git a/requirements/transcription.txt b/requirements/transcription.txt
deleted file mode 100644
index 47e3da8..0000000
--- a/requirements/transcription.txt
+++ /dev/null
@@ -1,2 +0,0 @@
-webvtt-py
-yt-dlp
diff --git a/scripts/channel_data.py b/scripts/channel_data.py
new file mode 100644
index 0000000..e00b965
--- /dev/null
+++ b/scripts/channel_data.py
@@ -0,0 +1,187 @@
+# pip install youtool[livechat,transcription]
+import argparse
+import os
+import json
+import shelve
+from pathlib import Path
+
+from chat_downloader.errors import ChatDisabled, LoginRequired, NoChatReplay
+from tqdm import tqdm
+from youtool import YouTube
+
+
+class CsvLazyDictWriter: # Got and adapted from
+ """Lazy CSV dict writer, so you don't need to specify field names beforehand
+
+ This class is almost the same as `csv.DictWriter` with the following
+ differences:
+
+ - You don't need to pass `fieldnames` (it's extracted on the first
+ `.writerow` call);
+ - You can pass either a filename or a fobj (like `sys.stdout`);
+ """
+
+ def __init__(self, filename_or_fobj, encoding="utf-8", *args, **kwargs):
+ self.writer = None
+ self.filename_or_fobj = filename_or_fobj
+ self.encoding = encoding
+ self._fobj = None
+ self.writer_args = args
+ self.writer_kwargs = kwargs
+
+ def __enter__(self):
+ return self
+
+ def __exit__(self, exc_type, exc_value, traceback):
+ self.close()
+
+ @property
+ def fobj(self):
+ if self._fobj is None:
+ if getattr(self.filename_or_fobj, "read", None) is not None:
+ self._fobj = self.filename_or_fobj
+ else:
+ self._fobj = open(
+ self.filename_or_fobj, mode="w", encoding=self.encoding
+ )
+
+ return self._fobj
+
+ def writerow(self, row):
+ if self.writer is None:
+ self.writer = csv.DictWriter(
+ self.fobj,
+ fieldnames=list(row.keys()),
+ *self.writer_args,
+ **self.writer_kwargs
+ )
+ self.writer.writeheader()
+
+ self.writerow = self.writer.writerow
+ return self.writerow(row)
+
+ def __del__(self):
+ self.close()
+
+ def close(self):
+ if self._fobj and not self._fobj.closed:
+ self._fobj.close()
+
+
+# TODO: add options to get only part of the data (not all steps)
+parser = argparse.ArgumentParser()
+parser.add_argument("--api-key", default=os.environ.get("YOUTUBE_API_KEY"), help="Comma-separated list of YouTube API keys to use")
+parser.add_argument("username_or_channel_url", type=str)
+parser.add_argument("data_path", type=Path)
+parser.add_argument("language-code", default="pt-orig", help="See the list by running `yt-dlp --list-subs `")
+args = parser.parse_args()
+
+if not args.api_key:
+ import sys
+
+ print("ERROR: API key must be provided either by `--api-key` or `YOUTUBE_API_KEY` environment variable", file=sys.stderr)
+ exit(1)
+api_keys = [key.strip() for key in args.api_key.split(",") if key.strip()]
+
+
+username = args.username
+if username.startswith("https://"):
+ channel_url = username
+ username = [item for item in username.split("/") if item][-1]
+else:
+ channel_url = f"https://www.youtube.com/@{username}"
+data_path = args.data_path
+channel_csv_filename = data_path / f"{username}-channel.csv"
+playlist_csv_filename = data_path / f"{username}-playlist.csv"
+playlist_video_csv_filename = data_path / f"{username}-playlist-video.csv"
+video_csv_filename = data_path / f"{username}-video.csv"
+comment_csv_filename = data_path / f"{username}-comment.csv"
+livechat_csv_filename = data_path / f"username}-livechat.csv"
+language_code = args.language_code
+video_transcription_path = data_path / Path(f"{username}-transcriptions")
+
+yt = YouTube(api_keys, disable_ipv6=True)
+video_transcription_path.mkdir(parents=True, exist_ok=True)
+channel_writer = CsvLazyDictWriter(channel_csv_filename)
+playlist_writer = CsvLazyDictWriter(playlist_csv_filename)
+video_writer = CsvLazyDictWriter(video_csv_filename)
+comment_writer = CsvLazyDictWriter(comment_csv_filename)
+livechat_writer = CsvLazyDictWriter(livechat_csv_filename)
+playlist_video_writer = CsvLazyDictWriter(playlist_video_csv_filename)
+
+print("Retrieving channel info")
+channel_id = yt.channel_id_from_url(channel_url)
+channel_info = list(yt.channels_infos([channel_id]))[0]
+channel_writer.writerow(channel_info)
+channel_writer.close()
+
+main_playlist = {
+ "id": channel_info["playlist_id"],
+ "title": "Uploads",
+ "description": channel_info["description"],
+ "videos": channel_info["videos"],
+ "channel_id": channel_id,
+ "channel_title": channel_info["title"],
+ "published_at": channel_info["published_at"],
+ "thumbnail_url": channel_info["thumbnail_url"],
+}
+playlist_writer.writerow(main_playlist)
+playlist_ids = [channel_info["playlist_id"]]
+for playlist in tqdm(yt.channel_playlists(channel_id), desc="Retrieving channel playlists"):
+ playlist_writer.writerow(playlist)
+ playlist_ids.append(playlist["id"])
+playlist_writer.close()
+
+video_ids = []
+for playlist_id in tqdm(playlist_ids, desc="Retrieving playlists' videos"):
+ for video in yt.playlist_videos(playlist_id):
+ if video["id"] not in video_ids:
+ video_ids.append(video["id"])
+ row = {
+ "playlist_id": playlist_id,
+ "video_id": video["id"],
+ "video_status": video["status"],
+ "channel_id": video["channel_id"],
+ "channel_title": video["channel_title"],
+ "playlist_channel_id": video["playlist_channel_id"],
+ "playlist_channel_title": video["playlist_channel_title"],
+ "title": video["title"],
+ "description": video["description"],
+ "published_at": video["published_at"],
+ "added_to_playlist_at": video["added_to_playlist_at"],
+ "tags": video["tags"],
+ }
+ playlist_video_writer.writerow(row)
+playlist_video_writer.close()
+
+videos = []
+for video in tqdm(yt.videos_infos(video_ids), desc="Retrieving detailed video information"):
+ videos.append(video)
+ video_writer.writerow(video)
+video_writer.close()
+
+for video_id in tqdm(video_ids, desc="Retrieving video comments"):
+ try:
+ for comment in yt.video_comments(video_id):
+ comment_writer.writerow(comment)
+ except RuntimeError: # Comments disabled
+ continue
+comment_writer.close()
+
+print("Retrieving transcriptions")
+yt.videos_transcriptions(
+ video_ids,
+ language_code=language_code,
+ path=video_transcription_path,
+ skip_downloaded=True,
+ batch_size=10,
+)
+
+# TODO: live chat code will freeze if it's not available
+for video_id in tqdm(video_ids, desc="Retrieving live chat"):
+ try:
+ for comment in yt.video_livechat(video_id):
+ livechat_writer.writerow(comment)
+ except (LoginRequired, NoChatReplay, ChatDisabled):
+ continue
+livechat_writer.close()
diff --git a/scripts/clean_vtt.py b/scripts/clean_vtt.py
new file mode 100644
index 0000000..3412b59
--- /dev/null
+++ b/scripts/clean_vtt.py
@@ -0,0 +1,43 @@
+# pip install webvtt-py
+import argparse
+import io
+import json
+import os
+import shelve
+import time
+from pathlib import Path
+
+import tiktoken
+import webvtt
+from openai import APITimeoutError, OpenAI
+from rows.utils import CsvLazyDictWriter
+from tqdm import tqdm
+
+
+def vtt_clean(vtt_content, same_line=False):
+ result_lines, last_line = [], None
+ for caption in webvtt.read_buffer(io.StringIO(vtt_content)):
+ new_lines = caption.text.strip().splitlines()
+ for line in new_lines:
+ line = line.strip()
+ if not line or line == last_line:
+ continue
+ result_lines.append(f"{str(caption.start).split('.')[0]} {line}\n" if not same_line else f"{line} ")
+ last_line = line
+ return "".join(result_lines)
+
+
+parser = argparse.ArgumentParser()
+parser.add_argument("input_path", type=Path)
+parser.add_argument("output_path", type=Path)
+args = parser.parse_args()
+
+for filename in tqdm(args.input_path.glob("*.vtt")):
+ new_filename = args.output_path / filename.name
+ if new_filename.exists():
+ continue
+ with filename.open() as fobj:
+ data = fobj.read()
+ result = vtt_clean(data)
+ with new_filename.open(mode="w") as fobj:
+ fobj.write(result)
diff --git a/setup.cfg b/setup.cfg
index 77478cb..2cffba5 100644
--- a/setup.cfg
+++ b/setup.cfg
@@ -24,6 +24,10 @@ packages = find:
python_requires = >=3.7
install_requires = file: requirements/base.txt
+[options.entry_points]
+console_scripts =
+ youtool = youtool:cli
+
[options.extras_require]
cli = file: requirements/cli.txt
dev = file: requirements/dev.txt
diff --git a/setup.py b/setup.py
deleted file mode 100644
index 6068493..0000000
--- a/setup.py
+++ /dev/null
@@ -1,3 +0,0 @@
-from setuptools import setup
-
-setup()
diff --git a/tests/commands/__init__.py b/tests/commands/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/tests/commands/conftest.py b/tests/commands/conftest.py
new file mode 100644
index 0000000..9970eab
--- /dev/null
+++ b/tests/commands/conftest.py
@@ -0,0 +1,29 @@
+import pytest
+
+
+@pytest.fixture
+def channels_urls():
+ return [
+ "https://www.youtube.com/@Turicas/featured",
+ "https://www.youtube.com/c/PythonicCaf%C3%A9"
+ ]
+
+
+@pytest.fixture
+def videos_ids():
+ return [
+ "video_id_1",
+ "video_id_2"
+ ]
+
+
+@pytest.fixture
+def videos_urls(videos_ids):
+ return [
+ f"https://www.youtube.com/?v={video_id}" for video_id in videos_ids
+ ]
+
+
+@pytest.fixture
+def usernames():
+ return ["Turicas", "PythonicCafe"]
diff --git a/tests/commands/test_base.py b/tests/commands/test_base.py
new file mode 100644
index 0000000..7cf87d3
--- /dev/null
+++ b/tests/commands/test_base.py
@@ -0,0 +1,193 @@
+import csv
+import argparse
+import pytest
+
+from pathlib import Path
+from unittest.mock import MagicMock, patch, mock_open
+from youtool.commands import Command
+
+
+class TestCommand(Command):
+ name = "command_name"
+ arguments = [
+ {"name": "--test-arg", "help": "Test argument", "default": "default_value", "type": str}
+ ]
+
+ @classmethod
+ def execute(cls, **kwargs):
+ return "executed"
+
+@pytest.fixture
+def subparsers():
+ """Fixture to create subparsers for argument parsing."""
+ parser = argparse.ArgumentParser()
+ return parser.add_subparsers()
+
+
+def test_generate_parser(subparsers):
+ """Test to verify the parser generation.
+
+ This test checks if the `generate_parser` method correctly generates a parser
+ for the command and sets the appropriate properties
+ """
+ parser = TestCommand.generate_parser(subparsers)
+
+ assert parser is not None, "Parser should not be None"
+ assert isinstance(parser, argparse.ArgumentParser), "Parser should be an instance of argparse.ArgumentParser"
+ assert parser.prog.endswith(TestCommand.name), f"Parser prog should end with '{TestCommand.name}'"
+
+
+def test_parse_arguments(subparsers):
+ """Test to verify argument parsing.
+
+ This test checks if the `parse_arguments` method correctly adds the command's
+ arguments to the parser and sets the default function to the command's execute method.
+ """
+ subparsers_mock = MagicMock(spec=subparsers)
+
+ TestCommand.parse_arguments(subparsers_mock)
+
+ subparsers_mock.add_parser.assert_called_once_with(TestCommand.name, help=TestCommand.__doc__)
+ parser_mock = subparsers_mock.add_parser.return_value
+ parser_mock.add_argument.assert_called_once_with("--test-arg", help="Test argument", default="default_value", type=str)
+ parser_mock.set_defaults.assert_called_once_with(func=TestCommand.execute)
+
+
+def test_command():
+ """Test to verify that the `execute` method is implemented.
+
+ This test ensures that if a command does not implement the `execute` method,
+ a `NotImplementedError` is raised.
+ """
+ class MyCommand(Command):
+ pass
+
+ with pytest.raises(NotImplementedError):
+ MyCommand.execute()
+
+
+@pytest.fixture
+def mock_csv_file():
+ """Fixture to provide mock CSV content for tests."""
+
+ csv_content = """URL
+ http://example.com
+ http://example2.com
+ """
+ return csv_content
+
+def test_data_from_csv_valid(mock_csv_file):
+ """Test to verify reading data from a valid CSV file.
+
+ This test checks if the `data_from_csv` method correctly reads data from a valid CSV file
+ and returns the expected list of URLs.
+
+ Args:
+ mock_csv_file (str): The mock CSV file content.
+ """
+ with patch('pathlib.Path.is_file', return_value=True):
+ with patch('builtins.open', mock_open(read_data=mock_csv_file)):
+ data_column_name = "URL"
+ file_path = Path("tests/resources/csv_valid.csv")
+ result = Command.data_from_csv(file_path, data_column_name)
+ assert len(result) == 2
+ assert result[0] == "http://example.com"
+ assert result[1] == "http://example2.com"
+
+def test_data_from_csv_file_not_found():
+ """Test to verify behavior when the specified column is not found in the CSV file.
+
+ This test checks if the `data_from_csv` method raises an exception when the specified
+ column does not exist in the CSV file.
+ """
+ with patch('pathlib.Path.is_file', return_value=False):
+ file_path = Path("/fake/path/not_found.csv")
+ with pytest.raises(FileNotFoundError):
+ Command.data_from_csv(file_path, "URL")
+
+def test_data_from_csv_column_not_found(mock_csv_file):
+ with patch('pathlib.Path.is_file', return_value=True):
+ with patch('builtins.open', mock_open(read_data=mock_csv_file)):
+ file_path = Path("tests/resources/csv_column_not_found.csv")
+ with pytest.raises(Exception) as exc_info:
+ Command.data_from_csv(file_path, "NonExistentColumn")
+ assert "Column NonExistentColumn not found on tests/resources/csv_column_not_found.csv" in str(exc_info.value)
+
+
+@pytest.fixture
+def sample_data():
+ """Fixture to provide sample data for tests."""
+ return [
+ {"id": "123", "name": "Channel One"},
+ {"id": "456", "name": "Channel Two"}
+ ]
+
+def test_data_to_csv_with_output_file_path(tmp_path, sample_data):
+ """Test to verify writing data to a CSV file with an output file path specified.
+
+ This test checks if the `data_to_csv` method correctly writes the sample data to
+ a CSV file when an output file path is provided.
+ """
+ output_file_path = tmp_path / "output.csv"
+
+ result_path = Command.data_to_csv(sample_data, str(output_file_path))
+
+ assert result_path == str(output_file_path)
+ assert output_file_path.exists()
+ with output_file_path.open('r') as f:
+ reader = csv.DictReader(f)
+ rows = list(reader)
+ assert len(rows) == 2
+ assert rows[0]["id"] == "123" and rows[1]["id"] == "456"
+
+def test_data_to_csv_without_output_file_path(sample_data):
+ """Test to verify writing data to a CSV format without an output file path specified.
+
+ This test checks if the `data_to_csv` method correctly returns the CSV content
+ as a string when no output file path is provided.
+ """
+ csv_content = Command.data_to_csv(sample_data)
+
+ assert "id,name" in csv_content
+ assert "123,Channel One" in csv_content
+ assert "456,Channel Two" in csv_content
+
+def test_data_to_csv_output(tmp_path):
+ """
+ Test to verify the content of the output CSV file.
+
+ This test checks if the `data_to_csv` method writes the expected content
+ to the output CSV file.
+ """
+ output_file_path = tmp_path / "output.csv"
+
+ data = [
+ {"id": 1, "name": "Test1"},
+ {"id": 2, "name": "Test2"}
+ ]
+
+ expected_output = "id,name\n1,Test1\n2,Test2\n"
+ result = Command.data_to_csv(data, str(output_file_path))
+ assert Path(output_file_path).is_file()
+ assert expected_output == Path(output_file_path).read_text()
+ assert str(output_file_path) == result
+
+def test_filter_fields():
+ channel_info = {
+ 'channel_id': '123456',
+ 'channel_name': 'Test Channel',
+ 'subscribers': 1000,
+ 'videos': 50,
+ 'category': 'Tech'
+ }
+
+ info_columns = ['channel_id', 'channel_name', 'subscribers']
+ filtered_info = Command.filter_fields(channel_info, info_columns)
+
+ expected_result = {
+ 'channel_id': '123456',
+ 'channel_name': 'Test Channel',
+ 'subscribers': 1000
+ }
+
+ assert filtered_info == expected_result, f"Expected {expected_result}, but got {filtered_info}"
diff --git a/tests/commands/test_channel_id.py b/tests/commands/test_channel_id.py
new file mode 100644
index 0000000..04400ef
--- /dev/null
+++ b/tests/commands/test_channel_id.py
@@ -0,0 +1,80 @@
+import csv
+import pytest
+
+from io import StringIO
+
+from unittest.mock import patch, call
+from youtool.commands.channel_id import ChannelId
+
+@pytest.fixture
+def csv_file(tmp_path):
+ """Fixture to create a temporary CSV file with a single YouTube channel URL."""
+
+ csv_content = "channel_url\nhttps://www.youtube.com/@Turicas/featured\n"
+ csv_file = tmp_path / "urls.csv"
+ csv_file.write_text(csv_content)
+ return csv_file
+
+@pytest.fixture
+def youtube_api_mock():
+ """Fixture to mock the YouTube API.
+
+ This fixture mocks the `YouTube` class and its `channel_id_from_url` method
+ to return a channel ID based on the URL.
+ """
+ with patch("youtool.commands.channel_id.YouTube") as mock:
+ mock.return_value.channel_id_from_url.side_effect = lambda url: f"channel-{url}"
+ yield mock
+
+def test_channels_ids_csv_preparation(youtube_api_mock):
+ """Fixture to mock the YouTube API.
+
+ This fixture mocks the `YouTube` class and its `channel_id_from_url` method
+ to return a channel ID based on the URL.
+ """
+ urls = ["https://www.youtube.com/@Turicas/featured", "https://www.youtube.com/c/PythonicCaf%C3%A9"]
+ api_key = "test_api_key"
+ id_column_name = "custom_id_column"
+ expected_result_data = [
+ {id_column_name: "channel-https://www.youtube.com/@Turicas/featured"},
+ {id_column_name: "channel-https://www.youtube.com/c/PythonicCaf%C3%A9"}
+ ]
+ with StringIO() as csv_file:
+ writer = csv.DictWriter(csv_file, fieldnames=[id_column_name])
+ writer.writeheader()
+ writer.writerows(expected_result_data)
+ expected_result_csv = csv_file.getvalue()
+
+ result = ChannelId.execute(urls=urls, api_key=api_key, id_column_name=id_column_name)
+
+ youtube_api_mock.return_value.channel_id_from_url.assert_has_calls([call(url) for url in urls], any_order=True)
+ assert result == expected_result_csv
+
+
+def test_resolve_urls_with_direct_urls():
+ """Test to verify resolving URLs when provided directly.
+
+ This test checks if the `resolve_urls` method of the `ChannelId` class correctly
+ returns the given list of URLs when provided directly.
+ """
+ urls = ["https://www.youtube.com/@Turicas/featured"]
+ result = ChannelId.resolve_urls(urls, None, None)
+ assert result == urls
+
+def test_resolve_urls_with_file_path(csv_file):
+ """Test to verify resolving URLs from a CSV file.
+
+ This test checks if the `resolve_urls` method of the `ChannelId` class correctly
+ reads URLs from a given CSV file.
+ """
+ result = ChannelId.resolve_urls(None, csv_file, "channel_url")
+ assert result == ["https://www.youtube.com/@Turicas/featured"]
+
+def test_resolve_urls_raises_exception():
+ """Test to verify exception raising when no URLs are provided.
+
+ This test checks if the `resolve_urls` method of the `ChannelId` class raises an exception
+ when neither direct URLs nor a file path are provided.
+ """
+ with pytest.raises(Exception, match="Either 'username' or 'url' must be provided for the channel-id command"):
+ ChannelId.resolve_urls(None, None, None)
diff --git a/tests/commands/test_channel_info.py b/tests/commands/test_channel_info.py
new file mode 100644
index 0000000..329680e
--- /dev/null
+++ b/tests/commands/test_channel_info.py
@@ -0,0 +1,57 @@
+import pytest
+
+from unittest.mock import Mock, call
+
+from youtool.commands.channel_info import ChannelInfo
+
+
+def test_filter_fields():
+ """Test to verify the filtering of channel information fields.
+
+ This test checks if the `filter_fields` method of the `ChannelInfo` class correctly
+ filters out unwanted fields from the channel information dictionary based on the provided columns.
+ """
+ channel_info = {
+ 'channel_id': '123456',
+ 'channel_name': 'Test Channel',
+ 'subscribers': 1000,
+ 'videos': 50,
+ 'category': 'Tech'
+ }
+
+ info_columns = ['channel_id', 'channel_name', 'subscribers']
+ filtered_info = ChannelInfo.filter_fields(channel_info, info_columns)
+
+ expected_result = {
+ 'channel_id': '123456',
+ 'channel_name': 'Test Channel',
+ 'subscribers': 1000
+ }
+
+ assert filtered_info == expected_result, f"Expected {expected_result}, but got {filtered_info}"
+
+
+def test_channel_ids_from_urls_and_usernames(mocker, channels_urls, usernames):
+ ids_from_urls_mock = "id_from_url"
+ ids_from_usernames_mock = "id_from_username"
+ youtube_mock = mocker.patch("youtool.commands.channel_info.YouTube")
+
+ channel_id_from_url_mock = Mock(return_value=ids_from_urls_mock)
+ channel_id_from_username_mock = Mock(return_value=ids_from_usernames_mock)
+ channels_infos_mock = Mock(return_value=[])
+
+ youtube_mock.return_value.channel_id_from_url = channel_id_from_url_mock
+ youtube_mock.return_value.channel_id_from_username = channel_id_from_username_mock
+ youtube_mock.return_value.channels_infos = channels_infos_mock
+
+ ChannelInfo.execute(urls=channels_urls, usernames=usernames)
+
+ channel_id_from_url_mock.assert_has_calls(
+ [call(url) for url in channels_urls]
+ )
+ channel_id_from_username_mock.assert_has_calls(
+ [call(username) for username in usernames]
+ )
+ channels_infos_mock.assert_called_once()
+ assert ids_from_usernames_mock in channels_infos_mock.call_args.args[0]
+ assert ids_from_urls_mock in channels_infos_mock.call_args.args[0]
diff --git a/tests/commands/test_video_comments.py b/tests/commands/test_video_comments.py
new file mode 100644
index 0000000..386c5de
--- /dev/null
+++ b/tests/commands/test_video_comments.py
@@ -0,0 +1,69 @@
+import csv
+import pytest
+
+from io import StringIO
+from datetime import datetime
+from unittest.mock import Mock
+from youtool.commands import VideoComments
+
+
+def test_video_comments(mocker):
+ """Test case for fetching video comments and verifying the output.
+
+ This test mocks the YouTube API to simulate fetching comments for a video,
+ then compares the generated CSV output with expected comments.
+ """
+ youtube_mock = mocker.patch("youtool.commands.video_comments.YouTube")
+ video_id = "video_id_mock"
+
+ expected_result = [
+ {"text": "my_comment", "author": "my_name"}
+ ]
+
+ csv_file = StringIO()
+ csv_writer = csv.DictWriter(csv_file, fieldnames=expected_result[0].keys())
+ csv_writer.writeheader()
+ csv_writer.writerows(expected_result)
+
+ videos_comments_mock = Mock(return_value=expected_result)
+ youtube_mock.return_value.video_comments = videos_comments_mock
+ result = VideoComments.execute(id=video_id)
+
+ videos_comments_mock.assert_called_once_with(video_id)
+
+ assert result == csv_file.getvalue()
+
+
+def test_video_comments_with_file_output(mocker, tmp_path):
+ """Test case for fetching video comments and saving them to a CSV file.
+
+ This test mocks the YouTube API to simulate fetching comments for a video,
+ then saves the comments to a temporary CSV file.
+ """
+ youtube_mock = mocker.patch("youtool.commands.video_comments.YouTube")
+ video_id = "video_id_mock"
+
+ expected_result = [
+ {"text": "my_comment", "author": "my_name"}
+ ]
+
+ csv_file = StringIO()
+ csv_writer = csv.DictWriter(csv_file, fieldnames=expected_result[0].keys())
+ csv_writer.writeheader()
+ csv_writer.writerows(expected_result)
+
+ timestamp = datetime.now().strftime("%f")
+ output_file_name = f"output_{timestamp}.csv"
+ output_file_path = tmp_path / output_file_name
+
+ videos_comments_mock = Mock(return_value=expected_result)
+ youtube_mock.return_value.video_comments = videos_comments_mock
+
+ result_file_path = VideoComments.execute(id=video_id, output_file_path=output_file_path)
+
+ with open(result_file_path, "r") as result_csv_file:
+ result_csv = result_csv_file.read()
+
+ videos_comments_mock.assert_called_once_with(video_id)
+
+ assert result_csv.replace("\r", "") == csv_file.getvalue().replace("\r", "")
diff --git a/tests/commands/test_video_info.py b/tests/commands/test_video_info.py
new file mode 100644
index 0000000..f4da48f
--- /dev/null
+++ b/tests/commands/test_video_info.py
@@ -0,0 +1,106 @@
+import csv
+import pytest
+
+from unittest.mock import Mock
+from pathlib import Path
+from youtool.commands import VideoInfo
+
+
+@pytest.fixture
+def youtube_mock(mocker, mock_video_info):
+ """Fixture to mock the YouTube instance and its videos_infos method."""
+ mock = mocker.patch("youtool.commands.video_info.YouTube")
+ mock_instance = mock.return_value
+ mock_instance.videos_infos = Mock(return_value=mock_video_info)
+ return mock_instance
+
+@pytest.fixture
+def mock_video_info():
+ """Fixture to return mock video information."""
+ return [
+ {"id": "tmrhPou85HQ", "title": "Title 1", "description": "Description 1", "published_at": "2021-01-01", "view_count": 100, "like_count": 10, "comment_count": 5},
+ {"id": "qoI_x9fylaw", "title": "Title 2", "description": "Description 2", "published_at": "2021-02-01", "view_count": 200, "like_count": 20, "comment_count": 10}
+ ]
+
+def test_execute_with_ids_and_urls(youtube_mock, mocker, tmp_path, mock_video_info):
+ """Test the execute method with provided video IDs and URLs.
+
+ This test verifies that the execute method can handle both video IDs and URLs,
+ and correctly writes the video information to the output CSV file.
+ """
+ ids = ["tmrhPou85HQ", "qoI_x9fylaw"]
+ urls = ["https://www.youtube.com/watch?v=tmrhPou85HQ&ab_channel=Turicas", "https://www.youtube.com/watch?v=qoI_x9fylaw&ab_channel=PythonicCaf%C3%A9"]
+ output_file_path = tmp_path / "output.csv"
+
+ VideoInfo.execute(ids=ids, urls=urls, output_file_path=str(output_file_path), api_key="test_api_key")
+
+ assert Path(output_file_path).is_file()
+ with open(output_file_path, 'r') as f:
+ reader = csv.DictReader(f)
+ csv_data = list(reader)
+
+ assert csv_data[0]["id"] == "tmrhPou85HQ"
+ assert csv_data[1]["id"] == "qoI_x9fylaw"
+
+def test_execute_missing_arguments():
+ """Test the execute method raises an exception when missing required arguments.
+
+ This test verifies that the execute method raises an exception if neither
+ video IDs nor URLs are provided.
+
+ Raises:
+ Exception: If neither 'ids' nor 'urls' is provided.
+ """
+ with pytest.raises(Exception) as exc_info:
+ VideoInfo.execute(api_key="test_api_key")
+
+ assert str(exc_info.value) == "Either 'ids' or 'urls' must be provided for the video-info command"
+
+def test_execute_with_input_file_path(youtube_mock, mocker, tmp_path, mock_video_info):
+ """Test the execute method with an input CSV file containing video URLs and IDs.
+
+ This test verifies that the execute method can read video URLs and IDs from
+ an input CSV file and correctly writes the video information to the output CSV file.
+ """
+ input_csv_content = """video_id,video_url
+ tmrhPou85HQ,https://www.youtube.com/watch?v=tmrhPou85HQ&ab_channel=Turicas
+ qoI_x9fylaw,https://www.youtube.com/watch?v=qoI_x9fylaw&ab_channel=PythonicCaf%C3%A9
+ """
+ input_file_path = tmp_path / "input.csv"
+ output_file_path = tmp_path / "output.csv"
+
+ with open(input_file_path, 'w') as f:
+ f.write(input_csv_content)
+
+ VideoInfo.execute(input_file_path=str(input_file_path), output_file_path=str(output_file_path), api_key="test_api_key")
+
+ assert Path(output_file_path).is_file()
+ with open(output_file_path, 'r') as f:
+ reader = csv.DictReader(f)
+ csv_data = list(reader)
+
+ assert csv_data[0]["id"] == "tmrhPou85HQ"
+ assert csv_data[1]["id"] == "qoI_x9fylaw"
+
+
+def test_execute_with_info_columns(youtube_mock, mocker, tmp_path, mock_video_info):
+ """Test the execute method with specified info columns.
+
+ This test verifies that the execute method can filter the video information
+ based on specified columns and correctly writes the filtered information
+ to the output CSV file.
+ """
+ ids = ["tmrhPou85HQ", "qoI_x9fylaw"]
+ output_file_path = tmp_path / "output.csv"
+
+ VideoInfo.execute(ids=ids, output_file_path=str(output_file_path), api_key="test_api_key", info_columns="id,title")
+
+ assert Path(output_file_path).is_file()
+ with open(output_file_path, 'r') as f:
+ reader = csv.DictReader(f)
+ csv_data = list(reader)
+
+ assert csv_data[0]["id"] == "tmrhPou85HQ"
+ assert csv_data[0]["title"] == "Title 1"
+ assert csv_data[1]["id"] == "qoI_x9fylaw"
+ assert csv_data[1]["title"] == "Title 2"
diff --git a/tests/commands/test_video_livechat.py b/tests/commands/test_video_livechat.py
new file mode 100644
index 0000000..c91db87
--- /dev/null
+++ b/tests/commands/test_video_livechat.py
@@ -0,0 +1,67 @@
+import csv
+import pytest
+
+from io import StringIO
+from datetime import datetime
+from unittest.mock import Mock
+from youtool.commands import VideoLiveChat
+
+
+def test_video_livechat(mocker):
+ """Test case for fetching live chat messages from a YouTube video.
+
+ Mocks the YouTube API to return expected live chat messages and verifies if the execute method correctly formats and returns the data.
+ """
+ youtube_mock = mocker.patch("youtool.commands.video_livechat.YouTube")
+ video_id = "video_id_mock"
+
+ expected_result = [
+ {column: "data" for column in VideoLiveChat.CHAT_MESSAGE_COLUMNS}
+ ]
+
+ csv_file = StringIO()
+ csv_writer = csv.DictWriter(csv_file, fieldnames=expected_result[0].keys())
+ csv_writer.writeheader()
+ csv_writer.writerows(expected_result)
+
+ videos_livechat_mock = Mock(return_value=expected_result)
+ youtube_mock.return_value.video_livechat = videos_livechat_mock
+ result = VideoLiveChat.execute(id=video_id)
+
+ videos_livechat_mock.assert_called_once_with(video_id)
+
+ assert result == csv_file.getvalue()
+
+
+def test_video_livechat_with_file_output(mocker, tmp_path):
+ """Test case for fetching live chat messages from a YouTube video and saving them to a CSV file.
+
+ Mocks the YouTube API to return expected live chat messages and verifies if the execute method correctly saves the data to a CSV file.
+ """
+ youtube_mock = mocker.patch("youtool.commands.video_livechat.YouTube")
+ video_id = "video_id_mock"
+
+ expected_result = [
+ {column: "data" for column in VideoLiveChat.CHAT_MESSAGE_COLUMNS}
+ ]
+
+ csv_file = StringIO()
+ csv_writer = csv.DictWriter(csv_file, fieldnames=expected_result[0].keys())
+ csv_writer.writeheader()
+ csv_writer.writerows(expected_result)
+
+ timestamp = datetime.now().strftime("%f")
+ output_file_name = f"output_{timestamp}.csv"
+ output_file_path = tmp_path / output_file_name
+
+ videos_livechat_mock = Mock(return_value=expected_result)
+ youtube_mock.return_value.video_livechat = videos_livechat_mock
+
+ result_file_path = VideoLiveChat.execute(id=video_id, output_file_path=output_file_path)
+
+ with open(result_file_path, "r") as result_csv_file:
+ result_csv = result_csv_file.read()
+
+ videos_livechat_mock.assert_called_once_with(video_id)
+
+ assert result_csv.replace("\r", "") == csv_file.getvalue().replace("\r", "")
diff --git a/tests/commands/test_video_search.py b/tests/commands/test_video_search.py
new file mode 100644
index 0000000..a30a879
--- /dev/null
+++ b/tests/commands/test_video_search.py
@@ -0,0 +1,86 @@
+import csv
+import pytest
+
+from io import StringIO
+from unittest.mock import Mock
+
+from datetime import datetime
+
+from youtool.commands.video_search import VideoSearch
+
+
+def test_video_search_string_output(mocker, videos_ids, videos_urls):
+ """Test the execution of the video-search command and verify the output as string.
+
+ This test simulates the execution of the `VideoSearch.execute` command with a list of video IDs and URLs,
+ and checks if the output is correctly formatted as a CSV string.
+ """
+ youtube_mock = mocker.patch("youtool.commands.video_search.YouTube")
+ expected_videos_infos = [
+ {
+ column: f"v_{index}" for column in VideoSearch.INFO_COLUMNS
+ } for index, _ in enumerate(videos_ids)
+ ]
+
+ csv_file = StringIO()
+ csv_writer = csv.DictWriter(csv_file, fieldnames=VideoSearch.INFO_COLUMNS)
+ csv_writer.writeheader()
+ csv_writer.writerows(expected_videos_infos)
+
+ videos_infos_mock = Mock(return_value=expected_videos_infos)
+ youtube_mock.return_value.videos_infos = videos_infos_mock
+
+ result = VideoSearch.execute(ids=videos_ids, urls=videos_urls)
+
+ videos_infos_mock.assert_called_once_with(list(set(videos_ids)))
+ assert result == csv_file.getvalue()
+
+
+def test_video_search_file_output(mocker, videos_ids, videos_urls, tmp_path):
+ """Test the execution of the video-search command and verify the output to a file.
+
+ This test simulates the execution of the `VideoSearch.execute` command with a list of video IDs and URLs,
+ and checks if the output is correctly written to a CSV file.
+ """
+ youtube_mock = mocker.patch("youtool.commands.video_search.YouTube")
+ expected_videos_infos = [
+ {
+ column: f"v_{index}" for column in VideoSearch.INFO_COLUMNS
+ } for index, _ in enumerate(videos_ids)
+ ]
+
+ expected_csv_file = StringIO()
+ csv_writer = csv.DictWriter(expected_csv_file, fieldnames=VideoSearch.INFO_COLUMNS)
+ csv_writer.writeheader()
+ csv_writer.writerows(expected_videos_infos)
+
+ timestamp = datetime.now().strftime("%f")
+ output_file_name = f"output_{timestamp}.csv"
+ output_file_path = tmp_path / output_file_name
+
+ videos_infos_mock = Mock(return_value=expected_videos_infos)
+ youtube_mock.return_value.videos_infos = videos_infos_mock
+
+ result_file_path = VideoSearch.execute(
+ ids=videos_ids, urls=videos_urls, output_file_path=output_file_path
+ )
+
+ with open(result_file_path, "r") as result_csv_file:
+ result_csv = result_csv_file.read()
+
+ videos_infos_mock.assert_called_once_with(list(set(videos_ids)))
+ assert result_csv.replace("\r", "") == expected_csv_file.getvalue().replace("\r", "")
+
+
+def test_video_search_no_id_and_url_error():
+ """Test if the video-search command raises an exception when neither IDs nor URLs are provided.
+
+ This test checks if executing the `VideoSearch.execute` command without providing IDs or URLs
+ raises the expected exception.
+
+ Assertions:
+ - Assert that the raised exception matches the expected error message.
+ """
+
+ with pytest.raises(Exception, match="Either 'ids' or 'urls' must be provided"):
+ VideoSearch.execute(ids=None, urls=None)
diff --git a/tests/commands/test_video_transcription.py b/tests/commands/test_video_transcription.py
new file mode 100644
index 0000000..d3ee1f3
--- /dev/null
+++ b/tests/commands/test_video_transcription.py
@@ -0,0 +1,70 @@
+from unittest.mock import Mock
+
+from youtool.commands import VideoTranscription
+
+
+def test_video_transcription(mocker, videos_ids, videos_urls, tmp_path):
+ """
+ Test the video transcription command.
+
+ This test verifies the functionality of the VideoTranscription.execute method.
+ It mocks the YouTube API to simulate fetching transcriptions for given video IDs or URLs.
+ Transcriptions are expected to be saved in VTT format in the specified temporary directory.
+ """
+ youtube_mock = mocker.patch("youtool.commands.video_transcription.YouTube")
+
+ language_code = "pt_br"
+
+ videos_transcriptions_mock = Mock()
+ youtube_mock.return_value.videos_transcriptions = videos_transcriptions_mock
+
+ for video_id in videos_ids:
+ open(tmp_path / f"{video_id}.{language_code}.vtt", "a").close()
+
+ result = VideoTranscription.execute(
+ ids=videos_ids, urls=videos_urls, language_code=language_code, output_dir=tmp_path
+ )
+
+ videos_transcriptions_mock.assert_called_once_with(
+ list(set(videos_ids)), language_code, tmp_path
+ )
+
+ for video_id in videos_ids:
+ assert str(tmp_path / f"{video_id}.{language_code}.vtt") in result
+
+
+def test_video_transcription_input_from_file(mocker, videos_ids, tmp_path):
+ """Test the video transcription command with input from a CSV file.
+
+ This test verifies the functionality of the VideoTranscription.execute method when
+ video IDs are provided via a CSV file. It mocks the YouTube API to simulate fetching
+ transcriptions for the listed video IDs. Transcriptions are expected to be saved in
+ VTT format in the specified temporary directory.
+ """
+ youtube_mock = mocker.patch("youtool.commands.video_transcription.YouTube")
+
+ language_code = "pt_br"
+
+ videos_transcriptions_mock = Mock()
+ youtube_mock.return_value.videos_transcriptions = videos_transcriptions_mock
+
+ input_file_path = tmp_path / "input_file.csv"
+
+ with open(input_file_path, "w") as input_csv:
+ input_csv.write("video_id\n" + "\n".join(videos_ids))
+
+ for video_id in videos_ids:
+ open(tmp_path / f"{video_id}.{language_code}.vtt", "a").close()
+
+ result = VideoTranscription.execute(
+ ids=None, urls=None,
+ language_code=language_code, output_dir=tmp_path,
+ input_file_path=input_file_path
+ )
+
+ videos_transcriptions_mock.assert_called_once_with(
+ list(set(videos_ids)), language_code, tmp_path
+ )
+
+ for video_id in videos_ids:
+ assert str(tmp_path / f"{video_id}.{language_code}.vtt") in result
\ No newline at end of file
diff --git a/tests/test_cli.py b/tests/test_cli.py
new file mode 100644
index 0000000..92aa4fa
--- /dev/null
+++ b/tests/test_cli.py
@@ -0,0 +1,30 @@
+import pytest
+
+from pathlib import Path
+from subprocess import run
+
+from youtool.commands import COMMANDS
+
+from youtool.commands.base import Command
+
+
+@pytest.mark.parametrize(
+ "command", COMMANDS
+)
+def test_missing_api_key(monkeypatch: pytest.MonkeyPatch, command: Command):
+ """Test to verify behavior when the YouTube API key is missing.
+
+ This test ensures that when the YouTube API key is not set, running any command
+ from the youtool CLI results in an appropriate error message and exit code.
+ """
+ monkeypatch.delenv('YOUTUBE_API_KEY', raising=False)
+ cli_path = Path("youtool") / "cli.py"
+ command_string = ["python", cli_path, command.name]
+ for arg in command.arguments:
+ if arg.get("required"):
+ command_string.append(arg.get("name"))
+ command_string.append("test_value")
+ result = run(command_string, capture_output=True, text=True, check=False)
+
+ assert result.returncode == 2
+ assert "YouTube API Key is required" in result.stderr
diff --git a/youtool/cli.py b/youtool/cli.py
new file mode 100644
index 0000000..3964dc6
--- /dev/null
+++ b/youtool/cli.py
@@ -0,0 +1,46 @@
+import argparse
+import os
+
+from commands import COMMANDS
+
+
+def main():
+ """Main function for the YouTube CLI Tool.
+
+ This function sets up the argument parser for the CLI tool, including options for the YouTube API key and
+ command-specific subparsers. It then parses the command-line arguments, retrieving the YouTube API key
+ from either the command-line argument '--api-key' or the environment variable 'YOUTUBE_API_KEY'. If the API
+ key is not provided through any means, it raises an argparse.ArgumentError.
+
+ Finally, the function executes the appropriate command based on the parsed arguments. If an exception occurs
+ during the execution of the command, it is caught and raised as an argparse error for proper handling.
+
+ Raises:
+ argparse.ArgumentError: If the YouTube API key is not provided.
+ argparse.ArgumentError: If there is an error during the execution of the command.
+ """
+ parser = argparse.ArgumentParser(description="CLI Tool for managing YouTube videos add playlists")
+ parser.add_argument("--api-key", type=str, help="YouTube API Key", dest="api_key")
+ parser.add_argument("--debug", default=False, action="store_true", help="Debug mode", dest="debug")
+
+ subparsers = parser.add_subparsers(required=True, dest="command", title="Command", help="Command to be executed")
+
+ for command in COMMANDS:
+ command.parse_arguments(subparsers)
+
+ args = parser.parse_args()
+ args.api_key = args.api_key or os.environ.get("YOUTUBE_API_KEY")
+
+ if not args.api_key:
+ parser.error("YouTube API Key is required")
+
+ try:
+ print(args.func(**args.__dict__))
+ except Exception as error:
+ if args.debug:
+ raise error
+ parser.error(error)
+
+
+if __name__ == "__main__":
+ main()
diff --git a/youtool/commands/__init__.py b/youtool/commands/__init__.py
new file mode 100644
index 0000000..be52c63
--- /dev/null
+++ b/youtool/commands/__init__.py
@@ -0,0 +1,23 @@
+from .base import Command
+from .channel_id import ChannelId
+from .channel_info import ChannelInfo
+from .video_info import VideoInfo
+from .video_search import VideoSearch
+from .video_comments import VideoComments
+from .video_livechat import VideoLiveChat
+from .video_transcription import VideoTranscription
+
+COMMANDS = [
+ ChannelId,
+ ChannelInfo,
+ VideoInfo,
+ VideoSearch,
+ VideoComments,
+ VideoLiveChat,
+ VideoTranscription
+]
+
+__all__ = [
+ "Command", "COMMANDS", "ChannelId", "ChannelInfo", "VideoInfo", "VideoSearch", "VideoComments",
+ "VideoLiveChat", "VideoTranscription"
+]
diff --git a/youtool/commands/base.py b/youtool/commands/base.py
new file mode 100644
index 0000000..50068d6
--- /dev/null
+++ b/youtool/commands/base.py
@@ -0,0 +1,148 @@
+import csv
+import argparse
+
+from typing import List, Dict, Any, Optional
+from io import StringIO
+from pathlib import Path
+from datetime import datetime
+from urllib.parse import urlparse, parse_qsl
+
+
+class Command:
+ """A base class for commands to inherit from, following a specific structure.
+
+ Attributes:
+ name (str): The name of the command.
+ arguments (List[Dict[str, Any]]): A list of dictionaries, each representing an argument for the command.
+ """
+ name: str
+ arguments: List[Dict[str, Any]]
+
+ @staticmethod
+ def video_id_from_url(video_url: str) -> Optional[str]:
+ parsed_url = urlparse(video_url)
+ parsed_url_query = dict(parse_qsl(parsed_url.query))
+ return parsed_url_query.get("v")
+
+ @classmethod
+ def generate_parser(cls, subparsers: argparse._SubParsersAction):
+ """Creates a parser for the command and adds it to the subparsers.
+
+ Args:
+ subparsers (argparse._SubParsersAction): The subparsers action to add the parser to.
+
+ Returns:
+ argparse.ArgumentParser: The parser for the command.
+ """
+ return subparsers.add_parser(cls.name, help=cls.__doc__)
+
+ @classmethod
+ def parse_arguments(cls, subparsers: argparse._SubParsersAction) -> None:
+ """Parses the arguments for the command and sets the command's execute method as the default function to call.
+
+ Args:
+ subparsers (argparse._SubParsersAction): The subparsers action to add the parser to.
+ """
+ parser = cls.generate_parser(subparsers)
+ for argument in cls.arguments:
+ argument_copy = {**argument}
+ argument_name = argument_copy.pop("name")
+ parser.add_argument(argument_name, **argument_copy)
+ parser.set_defaults(func=cls.execute)
+
+ @staticmethod
+ def filter_fields(video_info: Dict, info_columns: Optional[List] = None) -> Dict:
+ """Filters the fields of a dictionary containing video information based on specified columns.
+
+ Args:
+ video_info (Dict): A dictionary containing video information.
+ info_columns (Optional[List], optional): A list specifying which fields to include in the filtered output.
+ If None, returns the entire video_info dictionary. Defaults to None.
+
+ Returns:
+ A dictionary containing only the fields specified in info_columns (if provided)
+ or the entire video_info dictionary if info_columns is None.
+ """
+ return {
+ field: value for field, value in video_info.items() if field in info_columns
+ } if info_columns else video_info
+
+
+ @classmethod
+ def execute(cls, **kwargs) -> str: # noqa: D417
+ """Executes the command.
+
+ This method should be overridden by subclasses to define the command's behavior.
+
+ Args:
+ arguments (argparse.Namespace): The parsed arguments for the command.
+ """
+ raise NotImplementedError()
+
+ @staticmethod
+ def data_from_csv(
+ file_path: Path,
+ data_column_name: Optional[str] = None,
+ raise_column_exception: bool = True
+ ) -> List[str]:
+ """Extracts a list of URLs from a specified CSV file.
+
+ Args:
+ file_path: The path to the CSV file containing the URLs.
+ data_column_name: The name of the column in the CSV file that contains the URLs.
+ If not provided, it defaults to `ChannelId.URL_COLUMN_NAME`.
+
+ Returns:
+ A list of URLs extracted from the specified CSV file.
+
+ Raises:
+ Exception: If the file path is invalid or the file cannot be found.
+ """
+ data = []
+
+ if not file_path.is_file():
+ raise FileNotFoundError(f"Invalid file path: {file_path}")
+
+ with file_path.open('r', newline='') as csv_file:
+ reader = csv.DictReader(csv_file)
+ fieldnames = reader.fieldnames
+
+ if fieldnames is None:
+ raise ValueError("Fieldnames is None")
+
+ if data_column_name not in fieldnames:
+ if raise_column_exception:
+ raise Exception(f"Column {data_column_name} not found on {file_path}")
+ return data
+
+ for row in reader:
+ value = row.get(data_column_name)
+ if value is not None:
+ data.append(str(value))
+ return data
+
+ @classmethod
+ def data_to_csv(cls, data: List[Dict], output_file_path: Optional[str] = None) -> str:
+ """Converts a list of channel IDs into a CSV file.
+
+ Parameters:
+ channels_ids (List[str]): List of channel IDs to be written to the CSV.
+ output_file_path (str, optional): Path to the file where the CSV will be saved. If not provided, the CSV will be returned as a string.
+ channel_id_column_name (str, optional): Name of the column in the CSV that will contain the channel IDs.
+ If not provided, the default value defined in ChannelId.CHANNEL_ID_COLUMN_NAME will be used.
+
+ Returns:
+ str: The path of the created CSV file or, if no path is provided, the contents of the CSV as a string.
+ """
+ if output_file_path:
+ output_path = Path(output_file_path)
+ if output_path.is_dir():
+ command_name = cls.name.replace("-", "_")
+ timestamp = datetime.now().strftime("%M%S%f")
+ output_file_path = output_path / f"{command_name}_{timestamp}.csv"
+
+ with (Path(output_file_path).open('w', newline='') if output_file_path else StringIO()) as csv_file:
+ writer = csv.DictWriter(csv_file, fieldnames=list(data[0].keys()) if data else [])
+ writer.writeheader()
+ writer.writerows(data)
+ return str(output_file_path) if output_file_path else csv_file.getvalue()
diff --git a/youtool/commands/channel_id.py b/youtool/commands/channel_id.py
new file mode 100644
index 0000000..d42f311
--- /dev/null
+++ b/youtool/commands/channel_id.py
@@ -0,0 +1,86 @@
+
+from pathlib import Path
+
+from youtool import YouTube
+
+from .base import Command
+
+
+class ChannelId(Command):
+ """Get channel IDs from a list of URLs (or CSV filename with URLs inside), generate CSV output (just the IDs)."""
+ name = "channel-id"
+ arguments = [
+ {"name": "--urls", "type": str, "help": "Channels urls", "nargs": "*"},
+ {"name": "--urls-file-path", "type": str, "help": "Channels urls csv file path"},
+ {"name": "--output-file-path", "type": str, "help": "Output csv file path"},
+ {"name": "--url-column-name", "type": str, "help": "URL column name on csv input files"},
+ {"name": "--id-column-name", "type": str, "help": "Channel ID column name on csv output files"}
+ ]
+
+ URL_COLUMN_NAME: str = "channel_url"
+ CHANNEL_ID_COLUMN_NAME: str = "channel_id"
+
+ @classmethod
+ def execute(cls, **kwargs) -> str: # noqa: D417
+ """Execute the channel-id command to fetch YouTube channel IDs from URLs and save them to a CSV file.
+
+ This method retrieves YouTube channel IDs from a list of provided URLs or from a file containing URLs.
+ It then saves these channel IDs to a CSV file if an output file path is specified.
+
+ Args:
+ urls (list[str], optional): A list of YouTube channel URLs. Either this or urls_file_path must be provided.
+ urls_file_path (str, optional): Path to a CSV file containing YouTube channel URLs.
+ Requires url_column_name to specify the column with URLs.
+ output_file_path (str, optional): Path to the output CSV file where channel IDs will be saved.
+ If not provided, the result will be returned as a string.
+ api_key (str): The API key to authenticate with the YouTube Data API.
+ url_column_name (str, optional): The name of the column in the urls_file_path CSV file that contains the URLs.
+ Default is "url".
+ id_column_name (str, optional): The name of the column for channel IDs in the output CSV file.
+ Default is "channel_id".
+
+ Returns:
+ str: A message indicating the result of the command. If output_file_path is specified, the message will
+ include the path to the generated CSV file. Otherwise, it will return the result as a string.
+
+ Raises:
+ Exception: If neither urls nor urls_file_path is provided.
+ """
+ urls = kwargs.get("urls")
+ urls_file_path = kwargs.get("urls_file_path")
+ output_file_path = kwargs.get("output_file_path")
+ api_key = kwargs.get("api_key")
+
+ url_column_name = kwargs.get("url_column_name")
+ id_column_name = kwargs.get("id_column_name")
+
+ urls = cls.resolve_urls(urls, urls_file_path, url_column_name)
+
+ youtube = YouTube([api_key], disable_ipv6=True)
+
+ channels_ids = [
+ youtube.channel_id_from_url(url) for url in urls if url
+ ]
+
+ result = cls.data_to_csv(
+ data=[
+ {
+ (id_column_name or cls.CHANNEL_ID_COLUMN_NAME): channel_id
+ } for channel_id in channels_ids
+ ],
+ output_file_path=output_file_path
+ )
+
+ return result
+
+ @classmethod
+ def resolve_urls(cls, urls, urls_file_path, url_column_name):
+ if urls_file_path and not urls:
+ urls = cls.data_from_csv(
+ file_path=Path(urls_file_path),
+ data_column_name=url_column_name or cls.URL_COLUMN_NAME
+ )
+
+ if not urls:
+ raise Exception("Either 'username' or 'url' must be provided for the channel-id command")
+ return urls
diff --git a/youtool/commands/channel_info.py b/youtool/commands/channel_info.py
new file mode 100644
index 0000000..09103af
--- /dev/null
+++ b/youtool/commands/channel_info.py
@@ -0,0 +1,122 @@
+import csv
+
+from typing import List, Dict, Optional, Self
+
+from youtool import YouTube
+
+from .base import Command
+
+
+class ChannelInfo(Command):
+ """Get channel info from a list of IDs (or CSV filename with IDs inside), generate CSV output
+ (same schema for `channel` dicts)
+ """
+ name = "channel-info"
+ arguments = [
+ {"name": "--urls", "type": str, "help": "Channel URLs", "nargs": "*"},
+ {"name": "--usernames", "type": str, "help": "Channel usernames", "nargs": "*"},
+ {"name": "--ids", "type": str, "help": "Channel IDs", "nargs": "*"},
+ {"name": "--urls-file-path", "type": str, "help": "Channel URLs CSV file path"},
+ {"name": "--usernames-file-path", "type": str, "help": "Channel usernames CSV file path"},
+ {"name": "--ids-file-path", "type": str, "help": "Channel IDs CSV file path"},
+ {"name": "--output-file-path", "type": str, "help": "Output CSV file path"},
+ {"name": "--url-column-name", "type": str, "help": "URL column name on CSV input files"},
+ {"name": "--username-column-name", "type": str, "help": "Username column name on CSV input files"},
+ {"name": "--id-column-name", "type": str, "help": "ID column name on CSV input files"},
+ ]
+
+ URL_COLUMN_NAME: str = "channel_url"
+ USERNAME_COLUMN_NAME: str = "channel_username"
+ ID_COLUMN_NAME: str = "channel_id"
+ INFO_COLUMNS: List[str] = [
+ "id", "title", "description", "published_at", "view_count", "subscriber_count", "video_count"
+ ]
+
+ @staticmethod
+ def filter_fields(channel_info: Dict, info_columns: Optional[List] = None):
+ """Filters the fields of a dictionary containing channel information based on
+ specified columns.
+
+ Args:
+ channel_info (Dict): A dictionary containing channel information.
+ info_columns (Optional[List], optional): A list specifying which fields
+ to include in the filtered output. If None, returns the entire
+ channel_info dictionary. Defaults to None.
+
+ Returns:
+ Dict: A dictionary containing only the fields specified in info_columns
+ (if provided) or the entire channel_info dictionary if info_columns is None.
+ """
+ return {
+ field: value for field, value in channel_info.items() if field in info_columns
+ } if info_columns else channel_info
+
+ @classmethod
+ def execute(cls: Self, **kwargs) -> str:
+ """Execute the channel-info command to fetch YouTube channel information from URLs or
+ usernames and save them to a CSV file.
+
+ Args:
+ urls (list[str], optional): A list of YouTube channel URLs. If not provided, `urls_file_path` must be specified.
+ usernames (list[str], optional): A list of YouTube channel usernames. If not provided, `usernames_file_path` must be specified.
+ urls_file_path (str, optional): Path to a CSV file containing YouTube channel URLs.
+ usernames_file_path (str, optional): Path to a CSV file containing YouTube channel usernames.
+ output_file_path (str, optional): Path to the output CSV file where channel information will be saved.
+ api_key (str): The API key to authenticate with the YouTube Data API.
+ url_column_name (str, optional): The name of the column in the `urls_file_path` CSV file that contains the URLs.
+ Default is "channel_url".
+ username_column_name (str, optional): The name of the column in the `usernames_file_path` CSV file that contains the usernames.
+ Default is "channel_username".
+ info_columns (str, optional): Comma-separated list of columns to include in the output CSV.
+ Default is the class attribute `INFO_COLUMNS`.
+
+ Returns:
+ str: A message indicating the result of the command. If `output_file_path` is specified, the message will
+ include the path to the generated CSV file. Otherwise, it will return the result as a string.
+
+ Raises:
+ Exception: If neither `urls`, `usernames`, `urls_file_path` nor `usernames_file_path` is provided.
+ """
+
+ urls = kwargs.get("urls")
+ usernames = kwargs.get("usernames")
+ urls_file_path = kwargs.get("urls_file_path")
+ usernames_file_path = kwargs.get("usernames_file_path")
+ output_file_path = kwargs.get("output_file_path")
+ api_key = kwargs.get("api_key")
+
+ url_column_name = kwargs.get("url_column_name")
+ username_column_name = kwargs.get("username_column_name")
+ info_columns = kwargs.get("info_columns")
+
+ info_columns = [
+ column.strip() for column in info_columns.split(",")
+ ] if info_columns else ChannelInfo.INFO_COLUMNS
+
+ if urls_file_path and not urls:
+ urls = ChannelInfo.data_from_file(urls_file_path, url_column_name)
+ if usernames_file_path and not usernames:
+ usernames = ChannelInfo.data_from_file(usernames_file_path, username_column_name)
+
+ if not urls and not usernames:
+ raise Exception("Either 'urls' or 'usernames' must be provided for the channel-info command")
+
+ youtube = YouTube([api_key], disable_ipv6=True)
+
+ channels_ids = [
+ youtube.channel_id_from_url(url) for url in (urls or []) if url
+ ] + [
+ youtube.channel_id_from_username(username) for username in (usernames or []) if username
+ ]
+ channel_ids = list(
+ set([channel_id for channel_id in channels_ids if channel_id])
+ )
+
+ return cls.data_to_csv(
+ data=[
+ ChannelInfo.filter_fields(
+ channel_info, info_columns
+ ) for channel_info in (youtube.channels_infos(channel_ids) or [])
+ ],
+ output_file_path=output_file_path
+ )
diff --git a/youtool/commands/video_comments.py b/youtool/commands/video_comments.py
new file mode 100644
index 0000000..ec07e18
--- /dev/null
+++ b/youtool/commands/video_comments.py
@@ -0,0 +1,47 @@
+import csv
+from typing import List, Dict, Optional, Self
+
+from youtool import YouTube
+from .base import Command
+
+class VideoComments(Command):
+ """Get comments from a video ID, generate CSV output (same schema for comment dicts)"""
+
+ name = "video-comments"
+ arguments = [
+ {"name": "--id", "type": str, "help": "Video ID", "required": True},
+ {"name": "--output-file-path", "type": str, "help": "Output CSV file path"}
+ ]
+
+ COMMENT_COLUMNS: List[str] = [
+ "comment_id", "author_display_name", "text_display", "like_count", "published_at"
+ ]
+
+ @classmethod
+ def execute(cls: Self, **kwargs) -> str:
+ """
+ Execute the get-comments command to fetch comments from a YouTube video and save them to a CSV file.
+
+ Args:
+ id (str): The ID of the YouTube video.
+ output_file_path (str): Path to the output CSV file where comments will be saved.
+ api_key (str): The API key to authenticate with the YouTube Data API.
+
+ Returns:
+ A message indicating the result of the command. If output_file_path is specified,
+ the message will include the path to the generated CSV file.
+ Otherwise, it will return the result as a string.
+ """
+ video_id = kwargs.get("id")
+ output_file_path = kwargs.get("output_file_path")
+ api_key = kwargs.get("api_key")
+
+ youtube = YouTube([api_key], disable_ipv6=True)
+
+ comments = list(youtube.video_comments(video_id))
+
+ return cls.data_to_csv(
+ data=comments,
+ output_file_path=output_file_path
+ )
+
\ No newline at end of file
diff --git a/youtool/commands/video_info.py b/youtool/commands/video_info.py
new file mode 100644
index 0000000..bfa6534
--- /dev/null
+++ b/youtool/commands/video_info.py
@@ -0,0 +1,92 @@
+import csv
+
+from typing import List, Dict, Optional, Self
+
+from youtool import YouTube
+
+from .base import Command
+
+
+class VideoInfo(Command):
+ """Get video info from a list of IDs or URLs (or CSV filename with URLs/IDs inside), generate CSV output (same schema for video dicts)")
+ """
+ name = "video-info"
+ arguments = [
+ {"name": "--ids", "type": str, "help": "Video IDs", "nargs": "*"},
+ {"name": "--urls", "type": str, "help": "Video URLs", "nargs": "*"},
+ {"name": "--input-file-path", "type": str, "help": "Input CSV file path with URLs/IDs"},
+ {"name": "--output-file-path", "type": str, "help": "Output CSV file path"}
+ ]
+
+ ID_COLUMN_NAME: str = "video_id"
+ URL_COLUMN_NAME: str = "video_url"
+ INFO_COLUMNS: List[str] = [
+ "id", "title", "description", "published_at", "view_count", "like_count", "comment_count"
+ ]
+
+ @classmethod
+ def execute(cls: Self, **kwargs) -> str:
+ """
+ Execute the video-info command to fetch YouTube video information from IDs or URLs and save them to a CSV file.
+
+ Args:
+ ids (list[str], optional): A list of YouTube video IDs. If not provided, input_file_path must be specified.
+ urls (list[str], optional): A list of YouTube video URLs. If not provided, input_file_path must be specified.
+ input_file_path (str, optional): Path to a CSV file containing YouTube video URLs or IDs.
+ output_file_path (str, optional): Path to the output CSV file where video information will be saved.
+ api_key (str): The API key to authenticate with the YouTube Data API.
+ url_column_name (str, optional): The name of the column in the input_file_path CSV file that contains the URLs.
+ Default is "video_url".
+ id_column_name (str, optional): The name of the column in the input_file_path CSV file that contains the IDs.
+ Default is "video_id".
+ info_columns (str, optional): Comma-separated list of columns to include in the output CSV.
+ Default is the class attribute INFO_COLUMNS.
+
+ Returns:
+ str: A message indicating the result of the command. If output_file_path is specified, the message will
+ include the path to the generated CSV file. Otherwise, it will return the result as a string.
+
+ Raises:
+ Exception: If neither ids, urls, nor input_file_path is provided.
+ """
+
+ ids = kwargs.get("ids", [])
+ urls = kwargs.get("urls", [])
+ input_file_path = kwargs.get("input_file_path")
+ output_file_path = kwargs.get("output_file_path")
+ api_key = kwargs.get("api_key")
+
+ info_columns = kwargs.get("info_columns")
+
+ info_columns = [
+ column.strip() for column in info_columns.split(",")
+ ] if info_columns else VideoInfo.INFO_COLUMNS
+
+ if input_file_path:
+ with open(input_file_path, mode='r') as infile:
+ reader = csv.DictReader(infile)
+ for row in reader:
+ if cls.ID_COLUMN_NAME in row:
+ ids.append(row[cls.ID_COLUMN_NAME])
+ elif cls.URL_COLUMN_NAME in row:
+ urls.append(row[cls.URL_COLUMN_NAME])
+
+ if not ids and not urls:
+ raise Exception("Either 'ids' or 'urls' must be provided for the video-info command")
+
+ youtube = YouTube([api_key], disable_ipv6=True)
+
+ if urls:
+ ids += [cls.video_id_from_url(url) for url in urls]
+
+ # Remove duplicated
+ ids = list(set(ids))
+ videos_infos = list(youtube.videos_infos([_id for _id in ids if _id]))
+ return cls.data_to_csv(
+ data=[
+ VideoInfo.filter_fields(
+ video_info, info_columns
+ ) for video_info in videos_infos
+ ],
+ output_file_path=output_file_path
+ )
diff --git a/youtool/commands/video_livechat.py b/youtool/commands/video_livechat.py
new file mode 100644
index 0000000..4469839
--- /dev/null
+++ b/youtool/commands/video_livechat.py
@@ -0,0 +1,84 @@
+import csv
+from typing import List, Dict, Optional, Self
+from chat_downloader import ChatDownloader
+from chat_downloader.errors import ChatDisabled, LoginRequired, NoChatReplay
+from .base import Command
+from datetime import datetime
+
+class VideoLiveChat(Command):
+ """Get live chat comments from a video ID, generate CSV output (same schema for chat_message dicts)"""
+ name = "video-livechat"
+ arguments = [
+ {"name": "--id", "type": str, "help": "Video ID", "required": True},
+ {"name": "--output-file-path", "type": str, "help": "Output CSV file path"},
+ {"name": "--expand-emojis", "help": "Expand emojis in chat messages", "default": True, "action": "store_true"}
+ ]
+
+ CHAT_COLUMNS: List[str] = [
+ "id", "video_id", "created_at", "type", "action", "video_time",
+ "author", "author_id", "author_image_url", "text",
+ "money_currency", "money_amount"
+ ]
+
+ @staticmethod
+ def parse_timestamp(timestamp: str) -> str:
+ try:
+ return datetime.utcfromtimestamp(int(timestamp)).strftime('%Y-%m-%d %H:%M:%S')
+ except ValueError:
+ return datetime.utcfromtimestamp(int(timestamp) / 1000000).strftime('%Y-%m-%d %H:%M:%S')
+
+ @staticmethod
+ def parse_decimal(value: Optional[str]) -> Optional[float]:
+ return float(value.replace(',', '')) if value else None
+
+ @classmethod
+ def execute(cls: Self, **kwargs) -> str:
+ """
+ Execute the video-livechat command to fetch live chat messages from a YouTube video and save them to a CSV file.
+
+ Args:
+ id (str): The ID of the YouTube video.
+ output_file_path (str): Path to the output CSV file where chat messages will be saved.
+ expand_emojis (bool): Whether to expand emojis in chat messages. Defaults to True.
+ api_key (str): The API key to authenticate with the YouTube Data API.
+
+ Returns:
+ A message indicating the result of the command. If output_file_path is specified,
+ the message will include the path to the generated CSV file.
+ Otherwise, it will return the result as a string.
+ """
+ video_id = kwargs.get("id")
+ output_file_path = kwargs.get("output_file_path")
+ expand_emojis = kwargs.get("expand_emojis", True)
+
+ downloader = ChatDownloader()
+ video_url = f"https://youtube.com/watch?v={video_id}"
+
+ chat_messages = []
+ try:
+ live = downloader.get_chat(video_url, message_groups=["messages", "superchat"])
+ for message in live:
+ text = message["message"]
+ if expand_emojis:
+ for emoji in message.get("emotes", []):
+ for shortcut in (emoji.get("shortcuts") or []):
+ text = text.replace(shortcut, emoji["id"])
+ money = message.get("money", {}) or {}
+ chat_messages.append({
+ "id": message["message_id"],
+ "video_id": video_id,
+ "created_at": cls.parse_timestamp(message["timestamp"]),
+ "type": message["message_type"],
+ "action": message["action_type"],
+ "video_time": float(message["time_in_seconds"]),
+ "author": message["author"]["name"],
+ "author_id": message["author"]["id"],
+ "author_image_url": [img for img in message["author"]["images"] if img["id"] == "source"][0]["url"],
+ "text": text,
+ "money_currency": money.get("currency"),
+ "money_amount": cls.parse_decimal(money.get("amount")),
+ })
+ except (LoginRequired, NoChatReplay, ChatDisabled):
+ raise
+
+ return cls.data_to_csv(chat_messages, output_file_path)
diff --git a/youtool/commands/video_search.py b/youtool/commands/video_search.py
new file mode 100644
index 0000000..4713a84
--- /dev/null
+++ b/youtool/commands/video_search.py
@@ -0,0 +1,94 @@
+import csv
+
+from typing import List, Dict, Optional, Self
+
+from youtool import YouTube
+
+from .base import Command
+
+
+class VideoSearch(Command):
+ """
+ Search video info from a list of IDs or URLs (or CSV filename with URLs/IDs inside),
+ generate CSV output (simplified video dict schema or option to get full video info)
+ """
+ name = "video-search"
+ arguments = [
+ {"name": "--ids", "type": str, "help": "Video IDs", "nargs": "*"},
+ {"name": "--urls", "type": str, "help": "Video URLs", "nargs": "*"},
+ {"name": "--input-file-path", "type": str, "help": "Input CSV file path with URLs/IDs"},
+ {"name": "--output-file-path", "type": str, "help": "Output CSV file path"},
+ {"name": "--full-info", "type": bool, "help": "Option to get full video info", "default": False},
+ {"name": "--url-column-name", "type": str, "help": "URL column name on csv input files"},
+ {"name": "--id-column-name", "type": str, "help": "Channel ID column name on csv output files"}
+ ]
+
+ ID_COLUMN_NAME: str = "video_id"
+ URL_COLUMN_NAME: str = "video_url"
+ INFO_COLUMNS: List[str] = [
+ "id", "title", "published_at", "view_count"
+ ]
+ FULL_INFO_COLUMNS: List[str] = [
+ "id", "title", "description", "published_at", "view_count", "like_count", "comment_count"
+ ]
+
+ @classmethod
+ def execute(cls: Self, **kwargs) -> str:
+ """
+ Execute the video-search command to fetch YouTube video information from IDs or URLs and save them to a CSV file.
+
+ Args:
+ ids (list[str], optional): A list of YouTube video IDs. If not provided, input_file_path must be specified.
+ urls (list[str], optional): A list of YouTube video URLs. If not provided, input_file_path must be specified.
+ input_file_path (str, optional): Path to a CSV file containing YouTube video URLs or IDs.
+ output_file_path (str, optional): Path to the output CSV file where video information will be saved.
+ api_key (str): The API key to authenticate with the YouTube Data API.
+ full_info (bool, optional): Flag to indicate whether to get full video info. Default is False.
+ url_column_name (str, optional): The name of the column in the input CSV file that contains the URLs. Default is "video_url".
+ id_column_name (str, optional): The name of the column in the input CSV file that contains the IDs. Default is "video_id".
+
+ Returns:
+ str: A message indicating the result of the command. If output_file_path is specified,
+ the message will include the path to the generated CSV file.
+ Otherwise, it will return the result as a string.
+
+ Raises:
+ Exception: If neither ids, urls, nor input_file_path is provided.
+ """
+ ids = kwargs.get("ids", [])
+ urls = kwargs.get("urls", [])
+ output_file_path = kwargs.get("output_file_path")
+ api_key = kwargs.get("api_key")
+ full_info = kwargs.get("full_info", False)
+
+ url_column_name = kwargs.get("url_column_name", cls.URL_COLUMN_NAME)
+ id_column_name = kwargs.get("id_column_name", cls.ID_COLUMN_NAME)
+
+ info_columns = VideoSearch.FULL_INFO_COLUMNS if full_info else VideoSearch.INFO_COLUMNS
+
+ if (input_file_path := kwargs.get("input_file_path")):
+ if (urls_from_csv := cls.data_from_csv(input_file_path, url_column_name)):
+ ids += [cls.video_id_from_url(url) for url in urls_from_csv]
+ if (ids_from_csv := cls.data_from_csv(input_file_path, id_column_name)):
+ ids += ids_from_csv
+
+ if not ids and not urls:
+ raise Exception("Either 'ids' or 'urls' must be provided for the video-search command")
+
+ youtube = YouTube([api_key], disable_ipv6=True)
+
+ if urls:
+ ids += [cls.video_id_from_url(url) for url in urls]
+
+ # Remove duplicated
+ ids = list(set(ids))
+ videos_infos = list(youtube.videos_infos([_id for _id in ids if _id]))
+
+ return cls.data_to_csv(
+ data=[
+ VideoSearch.filter_fields(
+ video_info, info_columns
+ ) for video_info in videos_infos
+ ],
+ output_file_path=output_file_path
+ )
diff --git a/youtool/commands/video_transcription.py b/youtool/commands/video_transcription.py
new file mode 100644
index 0000000..e895e5a
--- /dev/null
+++ b/youtool/commands/video_transcription.py
@@ -0,0 +1,78 @@
+import csv
+from pathlib import Path
+from typing import List, Dict
+from .base import Command
+from youtool import YouTube
+
+class VideoTranscription(Command):
+ """Download video transcriptions based on language code, path, and list of video IDs or URLs (or CSV filename with URLs/IDs inside).
+ Download files to destination and report results."""
+
+ name = "video-transcription"
+ arguments = [
+ {"name": "--ids", "type": str, "help": "Video IDs", "nargs": "*"},
+ {"name": "--urls", "type": str, "help": "Video URLs", "nargs": "*"},
+ {"name": "--input-file-path", "type": str, "help": "CSV file path containing video IDs or URLs"},
+ {"name": "--output-dir", "type": str, "help": "Output directory to save transcriptions"},
+ {"name": "--language-code", "type": str, "help": "Language code for transcription"},
+ {"name": "--api-key", "type": str, "help": "API key for YouTube Data API"},
+ {"name": "--url-column-name", "type": str, "help": "URL column name on csv input files"},
+ {"name": "--id-column-name", "type": str, "help": "Channel ID column name on csv output files"}
+ ]
+
+ ID_COLUMN_NAME: str = "video_id"
+ URL_COLUMN_NAME: str = "video_url"
+
+ @classmethod
+ def execute(cls, **kwargs) -> str:
+ """Execute the video-transcription command to download transcriptions of videos based on IDs or URLs and save them to files.
+
+ Args:
+ ids (List[str]): A list of YouTube video IDs.
+ urls (List[str]): A list of YouTube video URLs.
+ input_file_path (str): Path to a CSV file containing YouTube video IDs or URLs.
+ output_dir (str): Directory path to save the transcription files.
+ language_code (str): Language code for the transcription language.
+ api_key (str): The API key to authenticate with the YouTube Data API.
+ url_column_name (str, optional): Column name for URLs in the CSV input file. Defaults to "video_url".
+ id_column_name (str, optional): Column name for IDs in the CSV output file. Defaults to "video_id".
+
+ Returns:
+ str: A message indicating the result of the command. Reports success or failure for each video transcription download.
+ """
+ ids = kwargs.get("ids") or []
+ urls = kwargs.get("urls") or []
+ input_file_path = kwargs.get("input_file_path")
+ output_dir = kwargs.get("output_dir")
+ language_code = kwargs.get("language_code")
+ api_key = kwargs.get("api_key")
+
+ url_column_name = kwargs.get("url_column_name", cls.URL_COLUMN_NAME)
+ id_column_name = kwargs.get("id_column_name", cls.ID_COLUMN_NAME)
+
+ youtube = YouTube([api_key], disable_ipv6=True)
+
+ if (input_file_path := kwargs.get("input_file_path")):
+ if (urls_from_csv := cls.data_from_csv(input_file_path, url_column_name, False)):
+ ids += [cls.video_id_from_url(url) for url in urls_from_csv]
+ if (ids_from_csv := cls.data_from_csv(input_file_path, id_column_name, False)):
+ ids += ids_from_csv
+
+ if not ids and not urls:
+ raise Exception(
+ "Either 'ids' or 'urls' must be provided for the video-transcription command"
+ )
+
+ if urls:
+ ids += [cls.video_id_from_url(url) for url in urls]
+
+ # Remove duplicated
+ ids = list(set(ids))
+ youtube.videos_transcriptions(ids, language_code, output_dir)
+ output_dir_path = Path(output_dir)
+ saved_transcriptions = [
+ str(
+ output_dir_path / f"{v_id}.{language_code}.vtt"
+ ) for v_id in ids if (output_dir_path / f"{v_id}.{language_code}.vtt").is_file()
+ ]
+ return "\n".join(saved_transcriptions)