From 4154f7697d050d5750c01483fc5ca8d6c515e8a0 Mon Sep 17 00:00:00 2001 From: Gabor Boros Date: Tue, 21 Jun 2022 18:12:29 +0200 Subject: [PATCH 1/5] refactor!: port the driver to Python 3 This commit brings a major refactoring that affects every piece of the driver, mostly, in a backward-incompatible way. For the extensive list of changes, please refer to the CHANGELOG.rst. Co-authored-by: lsabi <13497689+lsabi@users.noreply.github.com> Signed-off-by: Gabor Boros --- .bandit | 2 - .coveragerc | 20 +- .github/FUNDING.yml | 1 + .github/PULL_REQUEST_TEMPLATE.md | 7 + .github/dependabot.yml | 7 + .github/workflows/build.yml | 59 + .github/workflows/codeql-analysis.yml | 73 + .gitignore | 49 +- .pre-commit-config.yaml | 22 + .travis.yml | 38 - AUTHORS.rst | 10 + CHANGELOG.rst | 83 + CODE_OF_CONDUCT.md => CODE_OF_CONDUCT.rst | 27 +- CONTRIBUTING.md | 45 - CONTRIBUTING.rst | 137 + MANIFEST.in | 15 +- Makefile | 164 +- README.md | 260 -- README.rst | 127 + docs/Makefile | 20 + docs/authors.rst | 1 + docs/conf.py | 155 ++ docs/contributing.rst | 1 + docs/history.rst | 1 + docs/index.rst | 25 + docs/make.bat | 36 + docs/modules.rst | 7 + docs/readme.rst | 1 + docs/requirements.txt | 545 ++++ docs/rethinkdb.rst | 85 + docs/vulnerabilities.rst | 52 + mypy.ini | 1 + poetry.lock | 1388 ++++++++++ pylintrc | 503 ++++ pyproject.toml | 78 + pytest.ini | 11 +- requirements.txt | 15 - rethinkdb/__init__.py | 83 +- rethinkdb/__main__.py | 113 - rethinkdb/_dump.py | 260 -- rethinkdb/_export.py | 676 ----- rethinkdb/_import.py | 1728 ------------ rethinkdb/_index_rebuild.py | 275 -- rethinkdb/_restore.py | 347 --- rethinkdb/ast.py | 2393 ++++++++++------- rethinkdb/asyncio_net/__init__.py | 0 rethinkdb/asyncio_net/net_asyncio.py | 383 --- rethinkdb/backports/__init__.py | 4 - .../backports/ssl_match_hostname/LICENSE.txt | 51 - .../backports/ssl_match_hostname/README.txt | 52 - .../backports/ssl_match_hostname/__init__.py | 126 - rethinkdb/docs.py | 730 ----- rethinkdb/encoder.py | 224 ++ rethinkdb/errors.py | 366 +-- rethinkdb/gevent_net/__init__.py | 0 rethinkdb/gevent_net/net_gevent.py | 336 --- rethinkdb/handshake.py | 436 ++- rethinkdb/helpers.py | 17 - rethinkdb/logger.py | 127 - rethinkdb/net.py | 1098 +++++--- rethinkdb/py.typed | 2 + rethinkdb/query.py | 933 +++++-- rethinkdb/repl.py | 69 + rethinkdb/tornado_net/__init__.py | 0 rethinkdb/tornado_net/net_tornado.py | 327 --- rethinkdb/trio_net/__init__.py | 0 rethinkdb/trio_net/net_trio.py | 528 ---- rethinkdb/twisted_net/__init__.py | 0 rethinkdb/twisted_net/net_twisted.py | 455 ---- rethinkdb/utilities.py | 65 + rethinkdb/utils_common.py | 476 ---- scripts/convert_protofile.py | 98 +- scripts/install-db.sh | 9 +- scripts/prepare_remote_test.py | 183 -- scripts/upload-coverage.sh | 13 - scripts/upload-pypi.sh | 41 - setup.cfg | 23 - setup.py | 103 - tests/__init__.py | 6 +- tests/conftest.py | 11 - tests/helpers.py | 166 +- tests/integration/__init__.py | 0 .../integration/test_ast.py | 24 +- tests/integration/test_asyncio.py | 42 - tests/integration/test_connect.py | 29 - tests/integration/test_cursor.py | 104 - tests/integration/test_data_write.py | 430 --- tests/integration/test_database.py | 56 - tests/integration/test_date_and_time.py | 49 - tests/integration/test_index.py | 175 -- tests/integration/test_ping.py | 63 - tests/integration/test_queries.py | 1232 +++++++++ tests/integration/test_repl.py | 130 +- tests/integration/test_table.py | 109 - tests/integration/test_tornado.py | 29 - tests/integration/test_trio.py | 36 - tests/integration/test_write_hooks.py | 56 - tests/test_date_and_time.py | 15 +- tests/test_encoder.py | 96 + tests/test_errors.py | 189 ++ tests/test_handshake.py | 570 ++-- tests/test_helpers.py | 46 - tests/test_logger.py | 102 - tests/test_net.py | 444 +-- tests/test_query.py | 868 ++++++ tests/test_repl.py | 147 + tests/test_utilities.py | 101 + tests/test_utils_common.py | 74 - 108 files changed, 10730 insertions(+), 11890 deletions(-) delete mode 100644 .bandit create mode 100644 .github/dependabot.yml create mode 100644 .github/workflows/build.yml create mode 100644 .github/workflows/codeql-analysis.yml create mode 100644 .pre-commit-config.yaml delete mode 100644 .travis.yml create mode 100644 AUTHORS.rst create mode 100644 CHANGELOG.rst rename CODE_OF_CONDUCT.md => CODE_OF_CONDUCT.rst (87%) delete mode 100644 CONTRIBUTING.md create mode 100644 CONTRIBUTING.rst delete mode 100644 README.md create mode 100644 README.rst create mode 100644 docs/Makefile create mode 100644 docs/authors.rst create mode 100755 docs/conf.py create mode 100644 docs/contributing.rst create mode 100644 docs/history.rst create mode 100644 docs/index.rst create mode 100644 docs/make.bat create mode 100644 docs/modules.rst create mode 100644 docs/readme.rst create mode 100644 docs/requirements.txt create mode 100644 docs/rethinkdb.rst create mode 100644 docs/vulnerabilities.rst create mode 100644 mypy.ini create mode 100644 poetry.lock create mode 100644 pylintrc create mode 100644 pyproject.toml delete mode 100644 requirements.txt delete mode 100644 rethinkdb/__main__.py delete mode 100755 rethinkdb/_dump.py delete mode 100755 rethinkdb/_export.py delete mode 100755 rethinkdb/_import.py delete mode 100755 rethinkdb/_index_rebuild.py delete mode 100755 rethinkdb/_restore.py delete mode 100644 rethinkdb/asyncio_net/__init__.py delete mode 100644 rethinkdb/asyncio_net/net_asyncio.py delete mode 100644 rethinkdb/backports/__init__.py delete mode 100644 rethinkdb/backports/ssl_match_hostname/LICENSE.txt delete mode 100644 rethinkdb/backports/ssl_match_hostname/README.txt delete mode 100644 rethinkdb/backports/ssl_match_hostname/__init__.py delete mode 100644 rethinkdb/docs.py create mode 100644 rethinkdb/encoder.py delete mode 100644 rethinkdb/gevent_net/__init__.py delete mode 100644 rethinkdb/gevent_net/net_gevent.py delete mode 100644 rethinkdb/helpers.py delete mode 100644 rethinkdb/logger.py create mode 100644 rethinkdb/py.typed create mode 100644 rethinkdb/repl.py delete mode 100644 rethinkdb/tornado_net/__init__.py delete mode 100644 rethinkdb/tornado_net/net_tornado.py delete mode 100644 rethinkdb/trio_net/__init__.py delete mode 100644 rethinkdb/trio_net/net_trio.py delete mode 100644 rethinkdb/twisted_net/__init__.py delete mode 100644 rethinkdb/twisted_net/net_twisted.py create mode 100644 rethinkdb/utilities.py delete mode 100644 rethinkdb/utils_common.py delete mode 100644 scripts/prepare_remote_test.py delete mode 100644 scripts/upload-coverage.sh delete mode 100644 scripts/upload-pypi.sh delete mode 100644 setup.cfg delete mode 100644 setup.py delete mode 100644 tests/conftest.py delete mode 100644 tests/integration/__init__.py rename rethinkdb/version.py => tests/integration/test_ast.py (62%) delete mode 100644 tests/integration/test_asyncio.py delete mode 100644 tests/integration/test_connect.py delete mode 100644 tests/integration/test_cursor.py delete mode 100644 tests/integration/test_data_write.py delete mode 100644 tests/integration/test_database.py delete mode 100644 tests/integration/test_date_and_time.py delete mode 100644 tests/integration/test_index.py delete mode 100644 tests/integration/test_ping.py create mode 100644 tests/integration/test_queries.py delete mode 100644 tests/integration/test_table.py delete mode 100644 tests/integration/test_tornado.py delete mode 100644 tests/integration/test_trio.py delete mode 100644 tests/integration/test_write_hooks.py create mode 100644 tests/test_encoder.py create mode 100644 tests/test_errors.py delete mode 100644 tests/test_helpers.py delete mode 100644 tests/test_logger.py create mode 100644 tests/test_query.py create mode 100644 tests/test_repl.py create mode 100644 tests/test_utilities.py delete mode 100644 tests/test_utils_common.py diff --git a/.bandit b/.bandit deleted file mode 100644 index 6e4e7d26..00000000 --- a/.bandit +++ /dev/null @@ -1,2 +0,0 @@ -[bandit] -exclude: /tests diff --git a/.coveragerc b/.coveragerc index 32e71361..572b9e78 100644 --- a/.coveragerc +++ b/.coveragerc @@ -1,14 +1,12 @@ [run] -include = rethinkdb/* +source = rethinkdb +branch = True +omit = *tests* + *__init__* + rethinkdb/ql2_pb2.py [report] -exclude_lines = - pragma: no cover - - def __unicode__ - def __repr__ - -omit = - rethinkdb/version.py - -show_missing = True +sort = cover +fail_under = 72 +exclude_lines = pragma: no cover + if __name__ == .__main__.: diff --git a/.github/FUNDING.yml b/.github/FUNDING.yml index 7b666369..96d8ab36 100644 --- a/.github/FUNDING.yml +++ b/.github/FUNDING.yml @@ -1,3 +1,4 @@ +--- # These are supported funding model platforms github: # Replace with up to 4 GitHub Sponsors-enabled usernames e.g., [user1, user2] diff --git a/.github/PULL_REQUEST_TEMPLATE.md b/.github/PULL_REQUEST_TEMPLATE.md index fc4a2e02..86f1fe6f 100644 --- a/.github/PULL_REQUEST_TEMPLATE.md +++ b/.github/PULL_REQUEST_TEMPLATE.md @@ -1,14 +1,21 @@ **Reason for the change** + If applicable, link the related issue/bug report or write down in few sentences the motivation. **Description** + A clear and concise description of what did you changed and why. **Code examples** + If applicable, add code examples to help explain your changes. **Checklist** + +- [ ] Unit tests created/updated +- [ ] Documentation extended/updated - [ ] I have read and agreed to the [RethinkDB Contributor License Agreement](http://rethinkdb.com/community/cla/) **References** + Anything else related to the change e.g. documentations, RFCs, etc. diff --git a/.github/dependabot.yml b/.github/dependabot.yml new file mode 100644 index 00000000..f99a6c14 --- /dev/null +++ b/.github/dependabot.yml @@ -0,0 +1,7 @@ +--- +version: 2 +updates: + - package-ecosystem: pip + directory: / + schedule: + interval: daily diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml new file mode 100644 index 00000000..dac639ae --- /dev/null +++ b/.github/workflows/build.yml @@ -0,0 +1,59 @@ +--- +name: Build + +on: [push, release] + +jobs: + build: + name: Test Python ${{ matrix.python-version }} + runs-on: ubuntu-latest + strategy: + fail-fast: false + matrix: + python-version: ['3.8'] + + steps: + - uses: actions/checkout@v3 + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@v3 + with: + python-version: ${{ matrix.python-version }} + + - name: Install requirements + run: | + pip install poetry pre-commit + poetry install + + - name: Install RethinkDB and compile proto file + run: | + ./scripts/install-db.sh + make ql2.proto + + - name: Run linters + run: | + pre-commit run --all-files + poetry run make lint + + - name: Run tests + run: | + # Download and install test reporter + make download-test-reporter + make test-reporter-before + # Start DB and run tests + rethinkdb& + poetry run make test + killall rethinkdb + + - name: Upload coverage report + if: ${{ matrix.python-version == '3.8' }} + env: + CC_TEST_REPORTER_ID: ${{ secrets.CC_TEST_REPORTER_ID }} + run: | + make upload-coverage + + - name: Deploy to PyPi + env: + POETRY_HTTP_BASIC_PYPI_USERNAME: ${{ secrets.POETRY_HTTP_BASIC_PYPI_USERNAME }} + POETRY_HTTP_BASIC_PYPI_PASSWORD: ${{ secrets.POETRY_HTTP_BASIC_PYPI_PASSWORD }} + if: ${{ github.event_name == 'release' && matrix.python-version == '3.8' }} + run: poetry publish --build diff --git a/.github/workflows/codeql-analysis.yml b/.github/workflows/codeql-analysis.yml new file mode 100644 index 00000000..4f2817f5 --- /dev/null +++ b/.github/workflows/codeql-analysis.yml @@ -0,0 +1,73 @@ +--- +# For most projects, this workflow file will not need changing; you simply need +# to commit it to your repository. +# +# You may wish to alter this file to override the set of languages analyzed, +# or to provide custom queries or build logic. +# +# ******** NOTE ******** +# We have attempted to detect the languages in your repository. Please check +# the `language` matrix defined below to confirm you have the correct set of +# supported CodeQL languages. +# +name: CodeQL + +on: + push: + branches: [master] + pull_request: + # The branches below must be a subset of the branches above + branches: [master] + schedule: + - cron: 28 8 * * 0 + +jobs: + analyze: + name: Analyze + runs-on: ubuntu-latest + permissions: + actions: read + contents: read + security-events: write + + strategy: + fail-fast: false + matrix: + language: [python] + # CodeQL supports [ 'cpp', 'csharp', 'go', 'java', 'javascript', 'python', 'ruby' ] + # Learn more about CodeQL language support at https://aka.ms/codeql-docs/language-support + + steps: + - name: Checkout repository + uses: actions/checkout@v3 + + # Initializes the CodeQL tools for scanning. + - name: Initialize CodeQL + uses: github/codeql-action/init@v2 + with: + languages: ${{ matrix.language }} + # If you wish to specify custom queries, you can do so here or in a config file. + # By default, queries listed here will override any specified in a config file. + # Prefix the list here with "+" to use these queries and those in the config file. + + # Details on CodeQL's query packs refer to : https://docs.github.com/en/code-security/code-scanning/automatically-scanning-your-code-for-vulnerabilities-and-errors/configuring-code-scanning#using-queries-in-ql-packs + # queries: security-extended,security-and-quality + + + # Autobuild attempts to build any compiled languages (C/C++, C#, or Java). + # If this step fails, then you should remove it and run the build manually (see below) + - name: Autobuild + uses: github/codeql-action/autobuild@v2 + + # ℹ️ Command-line programs to run using the OS shell. + # 📚 See https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_idstepsrun + + # If the Autobuild fails above, remove it and uncomment the following three lines. + # modify them (or add more) to build your code if your project, please refer to the EXAMPLE below for guidance. + + # - run: | + # echo "Run, Build Application using script" + # ./location_of_script_within_repo/buildscript.sh + + - name: Perform CodeQL Analysis + uses: github/codeql-action/analyze@v2 diff --git a/.gitignore b/.gitignore index 72d80ecc..fc9c9c92 100644 --- a/.gitignore +++ b/.gitignore @@ -8,6 +8,7 @@ __pycache__/ # Distribution / packaging .Python +env/ build/ develop-eggs/ dist/ @@ -23,7 +24,6 @@ wheels/ *.egg-info/ .installed.cfg *.egg -MANIFEST # PyInstaller # Usually these files are written by a python script from a template @@ -47,30 +47,49 @@ coverage.xml .hypothesis/ .pytest_cache/ -# Translations -*.mo -*.pot +# Sphinx documentation +docs/_build/ + +# PyBuilder +target/ + +# Jupyter Notebook +.ipynb_checkpoints # pyenv .python-version -# Environments -*.pid +# dotenv .env + +# virtualenv .venv -env/ venv/ -ENV/ -env.bak/ -venv.bak/ virtualenv/ +ENV/ -# RethinkDB -rethinkdb/ql2_pb2.py -rethinkdb/*.proto -rethinkdb_data/ -rebirthdb_data/ +# Rope project settings +.ropeproject + +# mkdocs documentation +/site + +# mypy +.mypy_cache/ # Editors .vscode/ .idea/ + +# Misc +.DS_Store +TODO + +# rethinkdb +examples/ +rethinkdb_data/ +rethinkdb/ql2_pb2.py +ql2.proto + +# test reporter +cc-test-reporter diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml new file mode 100644 index 00000000..5d761586 --- /dev/null +++ b/.pre-commit-config.yaml @@ -0,0 +1,22 @@ +--- +# See https://pre-commit.com for more information +# See https://pre-commit.com/hooks.html for more hooks + +default_stages: [commit, push] +fail_fast: false +repos: + - repo: https://github.com/pre-commit/pre-commit-hooks + rev: v4.2.0 + hooks: + - id: trailing-whitespace + - id: end-of-file-fixer + - id: check-toml + - id: check-json + - id: pretty-format-json + - id: check-merge-conflict + - id: check-added-large-files + + - repo: https://github.com/jumanjihouse/pre-commit-hook-yamlfmt.git + rev: 0.2.1 + hooks: + - id: yamlfmt diff --git a/.travis.yml b/.travis.yml deleted file mode 100644 index ecb5857a..00000000 --- a/.travis.yml +++ /dev/null @@ -1,38 +0,0 @@ -cache: pip -dist: xenial -language: python -sudo: required - -python: - - "2.7" - - "3.5" - - "3.6" - - "3.7" - - "3.8" - -allow_failure: - - python: "3.8" - -install: - - pip install -r requirements.txt - - pip freeze - -before_script: - - make prepare - - make install-db - -script: - - make test-ci - -after_success: - - make upload-coverage - -deploy: - provider: script - script: make upload-pypi - on: - python: 3.8 - tags: true - -notifications: - email: false diff --git a/AUTHORS.rst b/AUTHORS.rst new file mode 100644 index 00000000..c2754b34 --- /dev/null +++ b/AUTHORS.rst @@ -0,0 +1,10 @@ +======= +Credits +======= + +Contributors +------------ + +Check the whole list of contributors here_. + +.. _here: https://github.com/rethinkdb/rethinkdb-python/graphs/contributors diff --git a/CHANGELOG.rst b/CHANGELOG.rst new file mode 100644 index 00000000..aaf63d70 --- /dev/null +++ b/CHANGELOG.rst @@ -0,0 +1,83 @@ +CHANGELOG +========= + +All notable changes to this project will be documented in this file. +The format is based on `Keep a Changelog`_, and this project adheres to +`Semantic Versioning`_. + +.. _Keep a Changelog: https://keepachangelog.com/en/1.0.0/ +.. _Semantic Versioning: https://semver.org/spec/v2.0.0.html + +.. Hyperlinks for releases + +.. _Unreleased: https://github.com/rethinkdb/rethinkdb-python/compare/master...master +.. .. _2.5.0: https://github.com/rethinkdb/rethinkdb-python/releases/tag/v2.5.0 + +Unreleased_ +----------- + +Added +~~~~~ + +* `ValueError` raised by `ReqlTimeoutError` and `ReqlAuthError` if only host or port set +* New error type for invalid handshake state: `InvalidHandshakeStateError` + +Changed +~~~~~~~ + +* QueryPrinter's `print_query` became a property and renamed to `query` +* QueryPrinter's `print_carrots` became a property and renamed to `carrots` +* Renamed `ReqlAvailabilityError` to `ReqlOperationError` +* Extract REPL helper class to a separate file +* `HandshakeV1_0` is waiting `bytes` for `username` and `password` attributes instead of `str` +* `HandshakeV1_0` defines `username` and `password` attributes as protected attributes +* `HandshakeV1_0` has a hardcoded `JSONEncoder` and `JSONDecoder` from now on +* `HandshakeV1_0` raises `InvalidHandshakeStateError` when an unrecognized state called in `next_message` +* Moved `ReQLEncoder`, `ReQLDecoder`, `recursively_make_hashable` to `encoder` module +* Moved `T` to `utilities` to module and renamed to `EnhancedTuple` +* Renamed `EnhancedTuple`/`T`'s `intsp` parameter to `int_separator` +* Renamed `recursively_make_hashable` to `make_hashable` +* Renamed `optargs` to `kwargs` in `ast` module +* Renamed internal `_continue` method of connection to `resume` to make it public +* Internal `_stop`, `_continue` methods of `Connection` became public +* Renamed internal `_error` to `raise_error` +* Internal `_extend`, `_error` of `Cursor` became public +* Renamed `Rql*` to `Reql*` + +Fixed +~~~~~ + +* Fixed a potential "no-member" error of `RqlBoolOperatorQuery` +* Fixed variety of quality issues in `ast` module + +Removed +~~~~~~~ + +* Errors are not re-exported as `__all__` for `rethinkdb` +* Removed `Rql*` aliases for `Reql*` exceptions +* Removed `auth_key` in favor of `password` in connection params + +.. EXAMPLE CHANGELOG ENTRY + + 0.1.0_ - 2020-01-xx + -------------------- + + Added + ~~~~~ + + * TODO. + + Changed + ~~~~~~~ + + * TODO. + + Fixed + ~~~~~ + + * TODO. + + Removed + ~~~~~~~ + + * TODO diff --git a/CODE_OF_CONDUCT.md b/CODE_OF_CONDUCT.rst similarity index 87% rename from CODE_OF_CONDUCT.md rename to CODE_OF_CONDUCT.rst index 9c628288..014997ea 100644 --- a/CODE_OF_CONDUCT.md +++ b/CODE_OF_CONDUCT.rst @@ -1,6 +1,8 @@ -# Contributor Covenant Code of Conduct +Contributor Covenant Code of Conduct +************************************ -## Our Pledge +Our Pledge +========== In the interest of fostering an open and welcoming environment, we as contributors and maintainers pledge to making participation in our project and @@ -9,7 +11,8 @@ size, disability, ethnicity, sex characteristics, gender identity and expression level of experience, education, socio-economic status, nationality, personal appearance, race, religion, or sexual identity and orientation. -## Our Standards +Our Standards +============= Examples of behaviour that contributes to creating a positive environment include: @@ -28,7 +31,8 @@ Examples of unacceptable behaviour by participants include: * Publishing others' private information, such as a physical or electronic address, without explicit permission * Other conduct which could reasonably be considered inappropriate in a professional setting -## Our Responsibilities +Our Responsibilities +==================== Project maintainers are responsible for clarifying the standards of acceptable behaviour and are expected to take appropriate and fair corrective action in @@ -40,7 +44,8 @@ that are not aligned to this Code of Conduct, or to ban temporarily or permanently any contributor for other behaviors that they deem inappropriate, threatening, offensive, or harmful. -## Scope +Scope +===== This Code of Conduct applies both within project spaces and in public spaces when an individual is representing the project or its community. Examples of @@ -49,10 +54,11 @@ address, posting via an official social media account, or acting as an appointed representative at an online or offline event. Representation of a project may be further defined and clarified by project maintainers. -## Enforcement +Enforcement +=========== Instances of abusive, harassing, or otherwise unacceptable behaviour may be -reported by contacting the project team at open@rethinkdb.com. All +reported by contacting the project team at gabor.brs@gmail.com. All complaints will be reviewed and investigated and will result in a response that is deemed necessary and appropriate to the circumstances. The project team is obligated to maintain confidentiality with regard to the reporter of an incident. @@ -62,10 +68,13 @@ Project maintainers who do not follow or enforce the Code of Conduct in good faith may face temporary or permanent repercussions as determined by other members of the project's leadership. -## Attribution +Attribution +=========== -This Code of Conduct is adapted from the Contributor Covenant, version 1.4, +This Code of Conduct is adapted from the `Contributor Covenant`_, version 1.4, available at https://www.contributor-covenant.org/version/1/4/code-of-conduct.html +.. _`Contributor Covenant`: https://www.contributor-covenant.org + For answers to common questions about this code of conduct, see https://www.contributor-covenant.org/faq diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md deleted file mode 100644 index e8c48beb..00000000 --- a/CONTRIBUTING.md +++ /dev/null @@ -1,45 +0,0 @@ -# Contributing - -Contributions are welcome, and they are greatly appreciated! Every little bit helps! You can contribute in many ways, not limited to this document. - -## Types of Contributions - -### Report Bugs - -First of all, please check that the bug is not reported yet. If that's already reported then upvote the existing bug instead of opening a new bug report. - -Report bugs at https://github.com/rethinkdb/rethinkdb-python/issues. If you are reporting a bug, please include: - -- Your operating system name and version. -- Any details about your local setup that might be helpful in troubleshooting. -- Detailed steps to reproduce the bug. - -### Fix Bugs - -Look through the GitHub issues for bugs. Anything tagged with "bug", "good first issue" and "help wanted" is open to whoever wants to implement it. - -### Implement Features - -Look through the GitHub issues for features. Anything tagged with "enhancement", "good first issue" and "help wanted" is open to whoever wants to implement it. In case you added a new Rule or Precondition, do not forget to add them to the docs as well. - -### Write Documentation - -RethinkDB could always use more documentation, whether as part of the official docs, in docstrings, or even on the web in blog posts, articles, and such. To extend the documentation on the website, visit the [www](https://github.com/rethinkdb/www) repo. For extending the docs, you can check the [docs](https://github.com/rethinkdb/docs) repo. - -### Submit A Feature - -First of all, please check that the feature request is not reported yet. If that's already reported then upvote the existing request instead of opening a new one. - -If you are proposing a feature: - -- Check if there is an opened feature request for the same idea. -- Explain in detail how it would work. -- Keep the scope as narrow as possible, to make it easier to implement. -- Remember that this is an open-source project, and that contributions are welcome :) - -## Pull Request Guidelines - -Before you submit a pull request, check that it meets these guidelines: - -1. The pull request should include tests (if applicable) -2. If the pull request adds functionality, the docs should be updated too. diff --git a/CONTRIBUTING.rst b/CONTRIBUTING.rst new file mode 100644 index 00000000..84c97b98 --- /dev/null +++ b/CONTRIBUTING.rst @@ -0,0 +1,137 @@ +.. highlight:: shell + +============ +Contributing +============ + +Contributions are welcome, and they are greatly appreciated! Every little bit +helps, and credit will always be given. + +You can contribute in many ways: + +Types of Contributions +---------------------- + +Report Bugs +~~~~~~~~~~~ + +Report bugs at https://github.com/rethinkdb/rethinkdb-python/issues. + +If you are reporting a bug, please include: + +- Your operating system name and version. +- Any details about your local setup that might be helpful in troubleshooting. +- Detailed steps to reproduce the bug. + +Fix Bugs +~~~~~~~~ + +Look through the GitHub issues for bugs. Anything tagged with "bug" and "help +wanted" is open to whoever wants to implement it. + +Implement Features +~~~~~~~~~~~~~~~~~~ + +Look through the GitHub issues for features. Anything tagged with "enhancement" +and "help wanted" is open to whoever wants to implement it. In case you added a +new Rule or Precondition, do not forget to add them to the docs as well. + +Write Documentation +~~~~~~~~~~~~~~~~~~~ + +RethinkDB could always use more documentation, whether as part of the +official RethinkDB docs, in docstrings, or even on the web in blog posts, +articles, and such. + +Submit Feedback +~~~~~~~~~~~~~~~ + +The best way to send feedback is to file an issue at https://github.com/rethinkdb/rethinkdb-python/issues. + +If you are proposing a feature: + +- Explain in detail how it would work. +- Keep the scope as narrow as possible, to make it easier to implement. +- Remember that this is a volunteer-driven project, and that contributions + are welcome :) + +Get Started! +------------ + +.. note:: + + You may want to see the developer documentation, not that one which is intended for + our users. To generate and open the documentation, run ``make docs``. + +Ready to contribute? Here's how to set up `rethinkdb`'s Python client for local development. + +As `step 0` make sure you have python 3.7+, [https://python-poetry.org/](poetry) and [https://pre-commit.com/](pre-commit) installed. + +1. Fork the `rethinkdb-python` repo on GitHub. +2. Clone your fork locally:: + + $ git clone git@github.com:your_name_here/rethinkdb-python.git + +3. Install your local copy. Assuming you have poetry installed, this is how you set up your fork for local development:: + + $ cd rethinkdb-python/ + $ poetry install -E all + +4. Create a branch for local development:: + + $ git checkout -b name-of-your-bugfix-or-feature + + Now you can make your changes locally. + +5. When you're done making changes, check that your changes pass linters and the tests:: + + $ poetry shell + $ make ql2.proto + $ make format + $ make lint + $ make test + $ pre-commit run --all-files + + You will need ``make`` not just for executing the command, but to build (and test) + the documentations page as well. + + Also, running ``make test`` runs integration tests. To make them pass, you need a + running RethinkDB server. To run only unit tests, execute ``make test-unit``. + +6. Commit your changes and push your branch to GitHub:: + + $ git add . + $ git commit -m "Your detailed description of your changes." + $ git push origin name-of-your-bugfix-or-feature + +7. Submit a pull request through the GitHub website. + +Pull Request Guidelines +----------------------- + +Before you submit a pull request, check that it meets these guidelines: + +1. The pull request should include tests. +2. If the pull request adds functionality, the docs should be updated. Put + your new functionality into a function with a docstring, and add the + feature to the list in README.rst. +3. The pull request should work for Python 3.7 and 3.8. + +Releasing +--------- + +A reminder for the maintainers on how to release. +Make sure all your changes are committed (including an entry in CHANGELOG.rst). + +After all, create a tag and a release on GitHub. The rest will be handled by +Travis. + +Please follow this checklist for the release: + +1. Make sure that formatters are not complaining (``make format`` returns 0) +2. Make sure that linters are not complaining (``make lint`` returns 0) +3. Make sure developer documentation is up-to-date (``make docs`` returns 0) +4. Update CHANGELOG.rst - do not forget to update the unreleased link comparison +5. Update version in ``pyproject.toml``, ``CHANGELOG.rst`` and ``rethinkdb/__init__.py`` +6. Create a new Release on GitHub with a detailed release description based on + the previous releases. diff --git a/MANIFEST.in b/MANIFEST.in index c41b03bc..12224df3 100644 --- a/MANIFEST.in +++ b/MANIFEST.in @@ -1,8 +1,13 @@ +include AUTHORS.rst +include CONTRIBUTING.rst +include CHANGELOG.rst include LICENSE -include *.txt -include Makefile -include pytest.ini -include .coveragerc +include README.rst + +recursive-include tests *.py recursive-include scripts *.py recursive-include scripts *.sh -recursive-include tests *.py +recursive-include docs *.rst conf.py Makefile make.bat *.jpg *.png *.gif + +recursive-exclude * __pycache__ +recursive-exclude * *.py[co] diff --git a/Makefile b/Makefile index 23d0dbde..266ef45b 100644 --- a/Makefile +++ b/Makefile @@ -1,4 +1,4 @@ -# Copyright 2018 RethinkDB +# Copyright 2022 RethinkDB # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -12,70 +12,130 @@ # See the License for the specific language governing permissions and # limitations under the License. -.PHONY: default help test-unit test-integration test-remote upload-coverage upload-pypi clean prepare +.DEFAULT_GOAL := help PACKAGE_NAME = rethinkdb PROTO_FILE_NAME = ql2.proto PROTO_FILE_URL = https://raw.githubusercontent.com/rethinkdb/rethinkdb/next/src/rdb_protocol/${PROTO_FILE_NAME} -TARGET_PROTO_FILE = ${PACKAGE_NAME}/${PROTO_FILE_NAME} +TARGET_PROTO_FILE = ${PROTO_FILE_NAME} FILE_CONVERTER_NAME = ./scripts/convert_protofile.py -REMOTE_TEST_SETUP_NAME = ./scripts/prepare_remote_test.py CONVERTED_PROTO_FILE_NAME = ql2_pb2.py TARGET_CONVERTED_PROTO_FILE = ${PACKAGE_NAME}/${CONVERTED_PROTO_FILE_NAME} +define BROWSER_PYSCRIPT +import os, webbrowser, sys -default: help +try: + from urllib import pathname2url +except: + from urllib.request import pathname2url +webbrowser.open("file://" + pathname2url(os.path.abspath(sys.argv[1]))) +endef +export BROWSER_PYSCRIPT + +define PRINT_HELP_PYSCRIPT +import re, sys + +for line in sys.stdin: + match = re.match(r'^([a-zA-Z_-]+):.*?## (.*)$$', line) + if match: + target, help = match.groups() + print("%-20s %s" % (target, help)) +endef +export PRINT_HELP_PYSCRIPT + +BROWSER := python -c "$$BROWSER_PYSCRIPT" + +.PHONY: help help: - @echo "Usage:" - @echo - @echo " make help Print this help message" - @echo " make test-unit Run unit tests" - @echo " make test-integration Run integration tests" - @echo " make test-integration-2.4 Run integration tests" - @echo " make test-remote Run tests on digital ocean" - @echo " make upload-coverage Upload unit test coverage" - @echo " make upload-pypi Release ${PACKAGE_NAME} package to PyPi" - @echo " make clean Cleanup source directory" - @echo " make prepare Prepare ${PACKAGE_NAME} for build" - -test-unit: - pytest -v -m unit - -test-integration: - @rethinkdb& - pytest -v -m integration - @killall rethinkdb - -test-ci: - @rethinkdb& - pytest -v --cov rethinkdb --cov-report xml - @killall rethinkdb - -test-remote: - python ${REMOTE_TEST_SETUP_NAME} pytest -m integration - -install-db: - @sh scripts/install-db.sh + @python -c "$$PRINT_HELP_PYSCRIPT" < $(MAKEFILE_LIST) + +.PHONY: clean +clean: clean-build clean-pyc clean-test clean-mypy ## remove all build, test, coverage and Python artifacts + +.PHONY: clean-build +clean-build: ## remove build artifacts + rm -fr build/ + rm -fr dist/ + rm -fr .eggs/ + find . -name '*.egg-info' -exec rm -fr {} + + find . -name '*.egg' -exec rm -f {} + + +.PHONY: clean-mypy +clean-mypy: ## remove mypy related artifacts + rm -rf .mypy_cache + +.PHONY: clean-pyc +clean-pyc: ## remove Python file artifacts + find . -name '*.pyc' -exec rm -f {} + + find . -name '*.pyo' -exec rm -f {} + + find . -name '*~' -exec rm -f {} + + find . -name '__pycache__' -exec rm -fr {} + + +.PHONY: clean-test +clean-test: ## remove test and coverage artifacts + rm -fr .tox/ \ + .coverage \ + htmlcov/ \ + .pytest_cache \ + .hypothesis/ + +.PHONY: docs +docs: ## generate Sphinx HTML documentation, including API docs + rm -f docs/rethinkdb.rst + rm -f docs/modules.rst + poetry export --E all --dev -f requirements.txt > docs/requirements.txt + sphinx-apidoc -o docs/ rethinkdb + $(MAKE) -C docs clean + $(MAKE) -C docs html + $(BROWSER) docs/_build/html/index.html + +.PHONY: format +format: ## run formatters on the package + isort rethinkdb tests + black rethinkdb tests + +.PHONY: lint +lint: ## run linters against the package + mypy rethinkdb + bandit -q -r rethinkdb + pylint rethinkdb + flake8 rethinkdb --count --ignore=E203,E501,W503 --show-source --statistics + +.PHONY: ql2.proto +ql2.proto: ## download and convert protobuf file + curl -sqo ${TARGET_PROTO_FILE} ${PROTO_FILE_URL} + python ${FILE_CONVERTER_NAME} -l python -i ${TARGET_PROTO_FILE} -o ${TARGET_CONVERTED_PROTO_FILE} + +.PHONY: test-unit +test-unit: ## run unit tests and generate coverage + coverage run -m pytest -m "not integration" -vv + coverage report +.PHONY: test-integration +test-integration: ## run unit tests and generate coverage + coverage run -m pytest -m "integration" -vv + coverage report + +.PHONY: test +test: ## run all tests and generate coverage + coverage run -m pytest -vv + coverage report + coverage xml + +.PHONY: download-test-reporter +download-test-reporter: + curl -L https://codeclimate.com/downloads/test-reporter/test-reporter-latest-linux-amd64 > ./cc-test-reporter + chmod +x ./cc-test-reporter + +.PHONY: test-reporter-before +test-reporter-before: + ./cc-test-reporter before-build + +.PHONY: upload-coverage upload-coverage: - @sh scripts/upload-coverage.sh - -upload-pypi: prepare - @sh scripts/upload-pypi.sh - -clean: - @rm -rf \ - ${TARGET_PROTO_FILE} \ - ${TARGET_CONVERTED_PROTO_FILE} \ - .pytest_cache \ - .eggs \ - .dist \ - *.egg-info - -prepare: - curl -qo ${TARGET_PROTO_FILE} ${PROTO_FILE_URL} - python ${FILE_CONVERTER_NAME} -l python -i ${TARGET_PROTO_FILE} -o ${TARGET_CONVERTED_PROTO_FILE} + ./cc-test-reporter after-build -t "coverage.py" diff --git a/README.md b/README.md deleted file mode 100644 index 99844043..00000000 --- a/README.md +++ /dev/null @@ -1,260 +0,0 @@ -# RethinkDB Python driver -[![PyPI version](https://badge.fury.io/py/rethinkdb.svg)](https://badge.fury.io/py/rethinkdb) [![Build Status](https://travis-ci.org/rethinkdb/rethinkdb-python.svg?branch=master)](https://travis-ci.org/rethinkdb/rethinkdb-python) [![Codacy Badge](https://api.codacy.com/project/badge/Grade/2b5231a6f90a4a1ba2fc795f8466bbe4)](https://www.codacy.com/app/rethinkdb/rethinkdb-python?utm_source=github.com&utm_medium=referral&utm_content=rethinkdb/rethinkdb-python&utm_campaign=Badge_Grade) [![Codacy Badge](https://api.codacy.com/project/badge/Coverage/2b5231a6f90a4a1ba2fc795f8466bbe4)](https://www.codacy.com/app/rethinkdb/rethinkdb-python?utm_source=github.com&utm_medium=referral&utm_content=rethinkdb/rethinkdb-python&utm_campaign=Badge_Coverage) - -## Overview - -### What is RethinkDB? -RethinkDB is the first open-source scalable database built for realtime applications. It exposes a new database access model -- instead of polling for changes, the developer can tell the database to continuously push updated query results to applications in realtime. RethinkDB allows developers to build scalable realtime apps in a fraction of the time with less effort. - -## Installation -```bash -$ pip install rethinkdb -``` -*Note: this package is the extracted driver of RethinkDB's original python driver.* - -## Quickstart -The main difference with the previous driver (except the name of the package) is we are **not** importing RethinkDB as `r`. If you would like to use `RethinkDB`'s python driver as a drop in replacement, you should do the following: - -```python -from rethinkdb import r - -connection = r.connect(db='test') -``` - -## Blocking and Non-blocking I/O -This driver supports blocking I/O (i.e. standard Python sockets) as well as -non-blocking I/O through multiple async frameworks: - -* [Asyncio](https://docs.python.org/3/library/asyncio.html) -* [Gevent](http://www.gevent.org/) -* [Tornado](https://www.tornadoweb.org/en/stable/) -* [Trio](https://trio.readthedocs.io/en/latest/) -* [Twisted](https://twistedmatrix.com/trac/) - -The following examples demonstrate how to use the driver in each mode. - -### Default mode (blocking I/O) -The driver's default mode of operation is to use blocking I/O, i.e. standard Python -sockets. This example shows how to create a table, populate with data, and get every -document. - -```python -from rethinkdb import r - -connection = r.connect(db='test') - -r.table_create('marvel').run(connection) - -marvel_heroes = r.table('marvel') -marvel_heroes.insert({ - 'id': 1, - 'name': 'Iron Man', - 'first_appearance': 'Tales of Suspense #39' -}).run(connection) - -for hero in marvel_heroes.run(connection): - print(hero['name']) -``` - -### Asyncio mode -Asyncio mode is compatible with Python ≥ 3.4, which is when asyncio was -introduced into the standard library. - -```python -import asyncio -from rethinkdb import r - -# Native coroutines are supported in Python ≥ 3.5. In Python 3.4, you should -# use the @asyncio.couroutine decorator instead of "async def", and "yield from" -# instead of "await". -async def main(): - r.set_loop_type('asyncio') - connection = await r.connect(db='test') - - await r.table_create('marvel').run(connection) - - marvel_heroes = r.table('marvel') - await marvel_heroes.insert({ - 'id': 1, - 'name': 'Iron Man', - 'first_appearance': 'Tales of Suspense #39' - }).run(connection) - - # "async for" is supported in Python ≥ 3.6. In earlier versions, you should - # call "await cursor.next()" in a loop. - cursor = await marvel_heroes.run(connection) - async for hero in cursor: - print(hero['name']) - -asyncio.get_event_loop().run_until_complete(main()) -``` - -### Gevent mode - -```python -import gevent -from rethinkdb import r - -def main(): - r.set_loop_type('gevent') - connection = r.connect(db='test') - - r.table_create('marvel').run(connection) - - marvel_heroes = r.table('marvel') - marvel_heroes.insert({ - 'id': 1, - 'name': 'Iron Man', - 'first_appearance': 'Tales of Suspense #39' - }).run(connection) - - for hero in marvel_heroes.run(connection): - print(hero['name']) - -gevent.joinall([gevent.spawn(main)]) -``` - -### Tornado mode -Tornado mode is compatible with Tornado < 5.0.0. Tornado 5 is not supported. - -```python -from rethinkdb import r -from tornado import gen -from tornado.ioloop import IOLoop - -@gen.coroutine -def main(): - r.set_loop_type('tornado') - connection = yield r.connect(db='test') - - yield r.table_create('marvel').run(connection) - - marvel_heroes = r.table('marvel') - yield marvel_heroes.insert({ - 'id': 1, - 'name': 'Iron Man', - 'first_appearance': 'Tales of Suspense #39' - }).run(connection) - - cursor = yield marvel_heroes.run(connection) - while (yield cursor.fetch_next()): - hero = yield cursor.next() - print(hero['name']) - -IOLoop.current().run_sync(main) -``` - -### Trio mode - -```python -from rethinkdb import r -import trio - -async def main(): - r.set_loop_type('trio') - async with trio.open_nursery() as nursery: - async with r.open(db='test', nursery=nursery) as conn: - await r.table_create('marvel').run(conn) - marvel_heroes = r.table('marvel') - await marvel_heroes.insert({ - 'id': 1, - 'name': 'Iron Man', - 'first_appearance': 'Tales of Suspense #39' - }).run(conn) - - # "async for" is supported in Python ≥ 3.6. In earlier versions, you should - # call "await cursor.next()" in a loop. - cursor = await marvel_heroes.run(conn) - async with cursor: - async for hero in cursor: - print(hero['name']) - -trio.run(main) -``` - -The Trio mode also supports a database connection pool. You can modify the example above -as follows: - -```python -db_pool = r.ConnectionPool(db='test', nursery=nursery) -async with db_pool.connection() as conn: - ... -await db_pool.close() -``` - -### Twisted mode - -```python -from rethinkdb import r -from twisted.internet import reactor, defer - -@defer.inlineCallbacks -def main(): - r.set_loop_type('twisted') - connection = yield r.connect(db='test') - - yield r.table_create('marvel').run(connection) - - marvel_heroes = r.table('marvel') - yield marvel_heroes.insert({ - 'id': 1, - 'name': 'Iron Man', - 'first_appearance': 'Tales of Suspense #39' - }).run(connection) - - cursor = yield marvel_heroes.run(connection) - while (yield cursor.fetch_next()): - hero = yield cursor.next() - print(hero['name']) - -main().addCallback(lambda d: print("stopping") or reactor.stop()) -reactor.run() -``` - -## Misc -To help the migration from rethinkdb<2.4 we introduced a shortcut which can easily replace the old `import rethinkdb as r` import with `from rethinkdb import r`. - -## Run tests -In the `Makefile` you can find three different test commands: `test-unit`, `test-integration` and `test-remote`. As RethinkDB has dropped the support of Windows, we would like to ensure that those of us who are using Windows for development can still contribute. Because of this, we support running integration tests against Digital Ocean Droplets as well. - -Before you run any test, make sure that you install the requirements. -```bash -$ pip install -r requirements.txt -$ make prepare -``` - -### Running unit tests -```bash -$ make test-unit -``` - -### Running integration tests -*To run integration tests locally, make sure you intstalled RethinkDB* -```bash -$ make test-integration -``` - -### Running remote integration tests -*To run the remote tests, you need to have a Digital Ocean account and an API key.* - -Remote test will create a new temporary SSH key and a Droplet for you until the tests are finished. - -**Available environment variables** - -| Variable name | Default value | -|---------------|---------------| -| DO_TOKEN | N/A | -| DO_SIZE | 512MB | -| DO_REGION | sfo2 | - -```bash -$ pip install paramiko python-digitalocean -$ export DO_TOKEN= -$ make test-remote -``` - -## New features -Github's Issue tracker is **ONLY** used for reporting bugs. NO NEW FEATURE ACCEPTED! Use [spectrum](https://spectrum.chat/rethinkdb) for supporting features. - -## Contributing -Hurray! You reached this section which means, that you would like to contribute. Please read our contributing guide lines and feel free to open a pull request. diff --git a/README.rst b/README.rst new file mode 100644 index 00000000..a361f5dd --- /dev/null +++ b/README.rst @@ -0,0 +1,127 @@ +RethinkDB Python Client +*********************** + +.. image:: https://img.shields.io/pypi/v/rethinkdb.svg + :target: https://pypi.python.org/pypi/rethinkdb + :alt: PyPi Package + +.. image:: https://github.com/rethinkdb/rethinkdb-python/actions/workflows/build/badge.svg?branch=master + :target: https://github.com/rethinkdb/rethinkdb-python/actions/workflows/build.yml + :alt: Build Status + +.. image:: https://api.codeclimate.com/v1/badges/e5023776401a5f0e82f1/maintainability + :target: https://codeclimate.com/github/rethinkdb/rethinkdb-python/maintainability + :alt: Maintainability + +.. image:: https://api.codeclimate.com/v1/badges/e5023776401a5f0e82f1/test_coverage + :target: https://codeclimate.com/github/rethinkdb/rethinkdb-python/test_coverage + :alt: Test Coverage + +.. image:: https://img.shields.io/badge/code%20style-black-000000.svg + :target: https://github.com/ambv/black + :alt: Black Formatted + +RethinkDB is the first open-source scalable database built for realtime applications. +It exposes a new database access model -- instead of polling for changes, the developer +can tell the database to continuously push updated query results to applications in realtime. +RethinkDB allows developers to build scalable realtime apps in a fraction of the time with +less effort. + +Utility Tools +============ +If you came here looking for an utility tool, this functionality has been dropped during the porting from python 2 to 3. +Instead, you should look (and we suggest to use) https://github.com/BOOMfinity-Developers/GoThink + + +Installation +============ + +RethinkDB's Python Client can be installed by running ``pip install rethinkdb`` and it requires +Python 3.7.0+ to run. This is the preferred method to install RethinkDB Python client, as it +will always install the most recent stable release. If you don't have `pip`_ +installed, this `Python installation guide`_ can guide +you through the process. + +.. _pip: https://pip.pypa.io +.. _Python installation guide: http://docs.python-guide.org/en/latest/starting/installation/ + +Installing extras +----------------- + +RethinkDB's Python Client tries to be as tiny as its possible, hence some functionalities +are requiring extra dependencies to be installed. + +To install `rethinkdb` with an extra package run ``pip install rethinkdb[]``, +where ```` is the name of the extra option. To install multiple extra packages +list the extra names separated by comma as described in `pip's examples`_ section point +number six. + ++---------------------+--------------------------------------------+ +| Extra | Description | ++=====================+============================================+ +| all | alias to install all the extras available | ++---------------------+--------------------------------------------+ + +.. _`pip's examples`: https://pip.pypa.io/en/stable/reference/pip_install/#examples + +Usage examples +============== + +TODO + +Contributing +============ + +Hurray, You reached this section, which means you are ready +to contribute. + +Please read our contibuting guideline_. This guideline will +walk you through how can you successfully contribute to +RethinkDB Python client. + +.. _guideline: https://github.com/rethinkdb/rethinkdb-python/blob/master/CONTRIBUTING.rst + +Installation +------------ + +For development you will need poetry_, pre-commit_ and shellcheck_. After poetry installed, +simply run `poetry install -E all`. This command will both create the virtualenv +and install all development dependencies for you. + +.. _poetry: https://python-poetry.org/docs/#installation +.. _pre-commit: https://pre-commit.com/#install +.. _shellcheck: https://www.shellcheck.net/ + + +Useful make Commands +-------------------- + ++------------------+-------------------------------------+ +| Command | Description | ++==================+=====================================+ +| help | Print available make commands | ++------------------+-------------------------------------+ +| clean | Remove all artifacts | ++------------------+-------------------------------------+ +| clean-build | Remove build artifacts | ++------------------+-------------------------------------+ +| clean-mypy | Remove mypy artifacts | ++------------------+-------------------------------------+ +| clean-pyc | Remove Python artifacts | ++------------------+-------------------------------------+ +| clean-test | Remove test artifacts | ++------------------+-------------------------------------+ +| docs | Generate Sphinx documentation | ++------------------+-------------------------------------+ +| format | Run several formatters | ++------------------+-------------------------------------+ +| lint | Run several linters after format | ++------------------+-------------------------------------+ +| protobuf | Download and convert protobuf file | ++------------------+-------------------------------------+ +| test | Run all tests with coverage | ++------------------+-------------------------------------+ +| test-unit | Run unit tests with coverage | ++------------------+-------------------------------------+ +| test-integration | Run integration tests with coverage | ++------------------+-------------------------------------+ diff --git a/docs/Makefile b/docs/Makefile new file mode 100644 index 00000000..92efdc83 --- /dev/null +++ b/docs/Makefile @@ -0,0 +1,20 @@ +# Minimal makefile for Sphinx documentation +# + +# You can set these variables from the command line. +SPHINXOPTS = +SPHINXBUILD = python -msphinx +SPHINXPROJ = rethinkdb +SOURCEDIR = . +BUILDDIR = _build + +# Put it first so that "make" without argument is like "make help". +help: + @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) + +.PHONY: help Makefile + +# Catch-all target: route all unknown targets to Sphinx using the new +# "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS). +%: Makefile + @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) diff --git a/docs/authors.rst b/docs/authors.rst new file mode 100644 index 00000000..e122f914 --- /dev/null +++ b/docs/authors.rst @@ -0,0 +1 @@ +.. include:: ../AUTHORS.rst diff --git a/docs/conf.py b/docs/conf.py new file mode 100755 index 00000000..4a38e705 --- /dev/null +++ b/docs/conf.py @@ -0,0 +1,155 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# +# rethinkdb documentation build configuration file, created by +# sphinx-quickstart on Fri Jun 9 13:47:02 2017. +# +# This file is execfile()d with the current directory set to its +# containing dir. +# +# Note that not all possible configuration values are present in this +# autogenerated file. +# +# All configuration values have a default; values that are commented out +# serve to show the default. +# +# If extensions (or modules to document with autodoc) are in another +# directory, add these directories to sys.path here. If the directory is +# relative to the documentation root, use os.path.abspath to make it +# absolute, like shown here. + +import os +import sys +from datetime import date + +sys.path.insert(0, os.path.abspath("..")) + + +# -- General configuration --------------------------------------------- + +# If your documentation needs a minimal Sphinx version, state it here. +# +# needs_sphinx = '1.0' + +# Add any Sphinx extension module names here, as strings. They can be +# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom ones. +extensions = ["sphinx.ext.autodoc", "sphinx.ext.viewcode", "sphinx_rtd_theme"] + +# Add any paths that contain templates here, relative to this directory. +templates_path = ["_templates"] + +# The suffix(es) of source filenames. +# You can specify multiple suffix as a list of string: +# +# source_suffix = ['.rst', '.md'] +source_suffix = ".rst" + +# The master toctree document. +master_doc = "index" + +# General information about the project. +project = "RethinkDB Python Client" +copyright = f"{date.today().year}, RethinkDB" +author = "RethinkDB" + +# The version info for the project you're documenting, acts as replacement +# for |version| and |release|, also used in various other places throughout +# the built documents. +# +# The short X.Y version. +version = "latest" +# The full version, including alpha/beta/rc tags. +release = "latest" + +# The language for content autogenerated by Sphinx. Refer to documentation +# for a list of supported languages. +# +# This is also used if you do content translation via gettext catalogs. +# Usually you set "language" from the command line for these cases. +language = None + +# List of patterns, relative to source directory, that match files and +# directories to ignore when looking for source files. +# This patterns also effect to html_static_path and html_extra_path +exclude_patterns = ["_build", "Thumbs.db", ".DS_Store"] + +# The name of the Pygments (syntax highlighting) style to use. +pygments_style = "sphinx" + +# If true, `todo` and `todoList` produce output, else they produce nothing. +todo_include_todos = False + + +# -- Options for HTML output ------------------------------------------- + +# The theme to use for HTML and HTML Help pages. See the documentation for +# a list of builtin themes. +# +html_theme = "sphinx_rtd_theme" + +# Theme options are theme-specific and customize the look and feel of a +# theme further. For a list of options available for each theme, see the +# documentation. +# +# html_theme_options = {} + +# Add any paths that contain custom static files (such as style sheets) here, +# relative to this directory. They are copied after the builtin static files, +# so a file named "default.css" will overwrite the builtin "default.css". +html_static_path = ["_static"] + + +# -- Options for HTMLHelp output --------------------------------------- + +# Output file base name for HTML help builder. +htmlhelp_basename = "rethinkdbdoc" + + +# -- Options for LaTeX output ------------------------------------------ + +latex_elements = { + # The paper size ('letterpaper' or 'a4paper'). + # + # 'papersize': 'letterpaper', + # The font size ('10pt', '11pt' or '12pt'). + # + # 'pointsize': '10pt', + # Additional stuff for the LaTeX preamble. + # + # 'preamble': '', + # Latex figure (float) alignment + # + # 'figure_align': 'htbp', +} + +# Grouping the document tree into LaTeX files. List of tuples +# (source start file, target name, title, author, documentclass +# [howto, manual, or own class]). +latex_documents = [ + (master_doc, "rethinkdb.tex", "rethinkdb Documentation", author, "manual") +] + + +# -- Options for manual page output ------------------------------------ + +# One entry per manual page. List of tuples +# (source start file, name, description, authors, manual section). +man_pages = [(master_doc, "rethinkdb", "rethinkdb Documentation", [author], 1)] + + +# -- Options for Texinfo output ---------------------------------------- + +# Grouping the document tree into Texinfo files. List of tuples +# (source start file, target name, title, author, +# dir menu entry, description, category) +texinfo_documents = [ + ( + master_doc, + "rethinkdb", + "rethinkdb Documentation", + author, + "rethinkdb", + "One line description of project.", + "Miscellaneous", + ) +] diff --git a/docs/contributing.rst b/docs/contributing.rst new file mode 100644 index 00000000..e582053e --- /dev/null +++ b/docs/contributing.rst @@ -0,0 +1 @@ +.. include:: ../CONTRIBUTING.rst diff --git a/docs/history.rst b/docs/history.rst new file mode 100644 index 00000000..565b0521 --- /dev/null +++ b/docs/history.rst @@ -0,0 +1 @@ +.. include:: ../CHANGELOG.rst diff --git a/docs/index.rst b/docs/index.rst new file mode 100644 index 00000000..bdb65ec4 --- /dev/null +++ b/docs/index.rst @@ -0,0 +1,25 @@ +======================================== +Documentation of RethinkDB Python client +======================================== + +.. warning:: + + This documentation is intended for RethinkDB's Python client + developers. This documentation should not live on a public site + to not confuse any of our users. + +.. toctree:: + :maxdepth: 2 + + readme + modules + contributing + vulnerabilities + authors + history + +Indices and tables +================== +* :ref:`genindex` +* :ref:`modindex` +* :ref:`search` diff --git a/docs/make.bat b/docs/make.bat new file mode 100644 index 00000000..b6f0a539 --- /dev/null +++ b/docs/make.bat @@ -0,0 +1,36 @@ +@ECHO OFF + +pushd %~dp0 + +REM Command file for Sphinx documentation + +if "%SPHINXBUILD%" == "" ( + set SPHINXBUILD=python -msphinx +) +set SOURCEDIR=. +set BUILDDIR=_build +set SPHINXPROJ=rethinkdb + +if "%1" == "" goto help + +%SPHINXBUILD% >NUL 2>NUL +if errorlevel 9009 ( + echo. + echo.The Sphinx module was not found. Make sure you have Sphinx installed, + echo.then set the SPHINXBUILD environment variable to point to the full + echo.path of the 'sphinx-build' executable. Alternatively you may add the + echo.Sphinx directory to PATH. + echo. + echo.If you don't have Sphinx installed, grab it from + echo.http://sphinx-doc.org/ + exit /b 1 +) + +%SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% +goto end + +:help +%SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% + +:end +popd diff --git a/docs/modules.rst b/docs/modules.rst new file mode 100644 index 00000000..b07efe57 --- /dev/null +++ b/docs/modules.rst @@ -0,0 +1,7 @@ +rethinkdb +========= + +.. toctree:: + :maxdepth: 4 + + rethinkdb diff --git a/docs/readme.rst b/docs/readme.rst new file mode 100644 index 00000000..72a33558 --- /dev/null +++ b/docs/readme.rst @@ -0,0 +1 @@ +.. include:: ../README.rst diff --git a/docs/requirements.txt b/docs/requirements.txt new file mode 100644 index 00000000..1b9eea8a --- /dev/null +++ b/docs/requirements.txt @@ -0,0 +1,545 @@ +alabaster==0.7.12; python_version >= "3.6" and python_full_version < "3.0.0" or python_full_version >= "3.4.0" and python_version >= "3.6" \ + --hash=sha256:446438bdcca0e05bd45ea2de1668c1d9b032e1a9154c2c259092d77031ddd359 \ + --hash=sha256:a661d72d58e6ea8a57f7a86e37d86716863ee5e92788398526d58b26a4e4dc02 +astroid==2.11.6; python_full_version >= "3.6.2" \ + --hash=sha256:ba33a82a9a9c06a5ceed98180c5aab16e29c285b828d94696bf32d6015ea82a9 \ + --hash=sha256:4f933d0bf5e408b03a6feb5d23793740c27e07340605f236496cd6ce552043d6 +atomicwrites==1.4.0; python_version >= "3.7" and python_full_version < "3.0.0" and sys_platform == "win32" or sys_platform == "win32" and python_version >= "3.7" and python_full_version >= "3.4.0" \ + --hash=sha256:6d1784dea7c0c8d4a5172b6c620f40b6e4cbfdf96d783691f2e1302a7b88e197 \ + --hash=sha256:ae70396ad1a434f9c7046fd2dd196fc04b12f9e91ffb859164193be8b6168a7a +attrs==21.4.0; python_version >= "3.7" and python_full_version < "3.0.0" or python_full_version >= "3.5.0" and python_version >= "3.7" \ + --hash=sha256:2d27e3784d7a565d36ab851fe94887c5eccd6a463168875832a1be79c82828b4 \ + --hash=sha256:626ba8234211db98e869df76230a137c4c40a12d72445c45d5f5b716f076e2fd +babel==2.10.3; python_version >= "3.6" and python_full_version < "3.0.0" or python_full_version >= "3.4.0" and python_version >= "3.6" \ + --hash=sha256:ff56f4892c1c4bf0d814575ea23471c230d544203c7748e8c68f0089478d48eb \ + --hash=sha256:7614553711ee97490f732126dc077f8d0ae084ebc6a96e23db1482afabdb2c51 +bandit==1.7.4; python_version >= "3.7" \ + --hash=sha256:412d3f259dab4077d0e7f0c11f50f650cc7d10db905d98f6520a95a18049658a \ + --hash=sha256:2d63a8c573417bae338962d4b9b06fbc6080f74ecd955a092849e1e65c717bd2 +black==22.3.0; python_full_version >= "3.6.2" \ + --hash=sha256:2497f9c2386572e28921fa8bec7be3e51de6801f7459dffd6e62492531c47e09 \ + --hash=sha256:5795a0375eb87bfe902e80e0c8cfaedf8af4d49694d69161e5bd3206c18618bb \ + --hash=sha256:e3556168e2e5c49629f7b0f377070240bd5511e45e25a4497bb0073d9dda776a \ + --hash=sha256:67c8301ec94e3bcc8906740fe071391bce40a862b7be0b86fb5382beefecd968 \ + --hash=sha256:fd57160949179ec517d32ac2ac898b5f20d68ed1a9c977346efbac9c2f1e779d \ + --hash=sha256:cc1e1de68c8e5444e8f94c3670bb48a2beef0e91dddfd4fcc29595ebd90bb9ce \ + --hash=sha256:6d2fc92002d44746d3e7db7cf9313cf4452f43e9ea77a2c939defce3b10b5c82 \ + --hash=sha256:a6342964b43a99dbc72f72812bf88cad8f0217ae9acb47c0d4f141a6416d2d7b \ + --hash=sha256:328efc0cc70ccb23429d6be184a15ce613f676bdfc85e5fe8ea2a9354b4e9015 \ + --hash=sha256:06f9d8846f2340dfac80ceb20200ea5d1b3f181dd0556b47af4e8e0b24fa0a6b \ + --hash=sha256:ad4efa5fad66b903b4a5f96d91461d90b9507a812b3c5de657d544215bb7877a \ + --hash=sha256:e8477ec6bbfe0312c128e74644ac8a02ca06bcdb8982d4ee06f209be28cdf163 \ + --hash=sha256:637a4014c63fbf42a692d22b55d8ad6968a946b4a6ebc385c5505d9625b6a464 \ + --hash=sha256:863714200ada56cbc366dc9ae5291ceb936573155f8bf8e9de92aef51f3ad0f0 \ + --hash=sha256:10dbe6e6d2988049b4655b2b739f98785a884d4d6b85bc35133a8fb9a2233176 \ + --hash=sha256:cee3e11161dde1b2a33a904b850b0899e0424cc331b7295f2a9698e79f9a69a0 \ + --hash=sha256:5891ef8abc06576985de8fa88e95ab70641de6c1fca97e2a15820a9b69e51b20 \ + --hash=sha256:30d78ba6bf080eeaf0b7b875d924b15cd46fec5fd044ddfbad38c8ea9171043a \ + --hash=sha256:ee8f1f7228cce7dffc2b464f07ce769f478968bfb3dd1254a4c2eeed84928aad \ + --hash=sha256:6ee227b696ca60dd1c507be80a6bc849a5a6ab57ac7352aad1ffec9e8b805f21 \ + --hash=sha256:9b542ced1ec0ceeff5b37d69838106a6348e60db7b8fdd245294dc1d26136265 \ + --hash=sha256:bc58025940a896d7e5356952228b68f793cf5fcb342be703c3a2669a1488cb72 \ + --hash=sha256:35020b8886c022ced9282b51b5a875b6d1ab0c387b31a065b84db7c33085ca79 +certifi==2022.6.15; python_version >= "3.7" and python_version < "4" and (python_version >= "3.6" and python_full_version < "3.0.0" or python_full_version >= "3.4.0" and python_version >= "3.6") \ + --hash=sha256:fe86415d55e84719d75f8b69414f6438ac3547d2078ab91b67e779ef69378412 \ + --hash=sha256:84c85a9078b11105f04f3036a9482ae10e4621616db313fe045dd24743a0820d +charset-normalizer==2.0.12; python_version >= "3.7" and python_version < "4" and (python_version >= "3.6" and python_full_version < "3.0.0" or python_full_version >= "3.4.0" and python_version >= "3.6") and python_full_version >= "3.5.0" \ + --hash=sha256:2857e29ff0d34db842cd7ca3230549d1a697f96ee6d3fb071cfa6c7393832597 \ + --hash=sha256:6881edbebdb17b39b4eaaa821b438bf6eddffb4468cf344f09f89def34a8b1df +click==8.1.3; python_version >= "3.7" and python_full_version >= "3.6.2" \ + --hash=sha256:bb4d8133cb15a609f44e8213d9b391b0809795062913b383c62be0ee95b1db48 \ + --hash=sha256:7682dc8afb30297001674575ea00d1814d808d6a36af415a82bd481d37ba7b8e +colorama==0.4.5; platform_system == "Windows" and python_version >= "3.7" and python_full_version >= "3.6.2" and sys_platform == "win32" and (python_version >= "3.7" and python_full_version < "3.0.0" and sys_platform == "win32" or sys_platform == "win32" and python_version >= "3.7" and python_full_version >= "3.5.0") and (python_version >= "3.6" and python_full_version < "3.0.0" and sys_platform == "win32" or python_full_version >= "3.5.0" and python_version >= "3.6" and sys_platform == "win32") \ + --hash=sha256:854bf444933e37f5824ae7bfc1e98d5bce2ebe4160d46b5edf346a89358e99da \ + --hash=sha256:e6c6b4334fc50988a639d9b98aa429a0b57da6e17b9a44f0451f930b6967b7a4 +coverage==6.4.1; python_version >= "3.7" \ + --hash=sha256:f1d5aa2703e1dab4ae6cf416eb0095304f49d004c39e9db1d86f57924f43006b \ + --hash=sha256:4ce1b258493cbf8aec43e9b50d89982346b98e9ffdfaae8ae5793bc112fb0068 \ + --hash=sha256:83c4e737f60c6936460c5be330d296dd5b48b3963f48634c53b3f7deb0f34ec4 \ + --hash=sha256:84e65ef149028516c6d64461b95a8dbcfce95cfd5b9eb634320596173332ea84 \ + --hash=sha256:f69718750eaae75efe506406c490d6fc5a6161d047206cc63ce25527e8a3adad \ + --hash=sha256:e57816f8ffe46b1df8f12e1b348f06d164fd5219beba7d9433ba79608ef011cc \ + --hash=sha256:01c5615d13f3dd3aa8543afc069e5319cfa0c7d712f6e04b920431e5c564a749 \ + --hash=sha256:75ab269400706fab15981fd4bd5080c56bd5cc07c3bccb86aab5e1d5a88dc8f4 \ + --hash=sha256:a7f3049243783df2e6cc6deafc49ea123522b59f464831476d3d1448e30d72df \ + --hash=sha256:ee2ddcac99b2d2aec413e36d7a429ae9ebcadf912946b13ffa88e7d4c9b712d6 \ + --hash=sha256:fb73e0011b8793c053bfa85e53129ba5f0250fdc0392c1591fd35d915ec75c46 \ + --hash=sha256:106c16dfe494de3193ec55cac9640dd039b66e196e4641fa8ac396181578b982 \ + --hash=sha256:87f4f3df85aa39da00fd3ec4b5abeb7407e82b68c7c5ad181308b0e2526da5d4 \ + --hash=sha256:961e2fb0680b4f5ad63234e0bf55dfb90d302740ae9c7ed0120677a94a1590cb \ + --hash=sha256:cec3a0f75c8f1031825e19cd86ee787e87cf03e4fd2865c79c057092e69e3a3b \ + --hash=sha256:129cd05ba6f0d08a766d942a9ed4b29283aff7b2cccf5b7ce279d50796860bb3 \ + --hash=sha256:bf5601c33213d3cb19d17a796f8a14a9eaa5e87629a53979a5981e3e3ae166f6 \ + --hash=sha256:269eaa2c20a13a5bf17558d4dc91a8d078c4fa1872f25303dddcbba3a813085e \ + --hash=sha256:f02cbbf8119db68455b9d763f2f8737bb7db7e43720afa07d8eb1604e5c5ae28 \ + --hash=sha256:ffa9297c3a453fba4717d06df579af42ab9a28022444cae7fa605af4df612d54 \ + --hash=sha256:145f296d00441ca703a659e8f3eb48ae39fb083baba2d7ce4482fb2723e050d9 \ + --hash=sha256:d67d44996140af8b84284e5e7d398e589574b376fb4de8ccd28d82ad8e3bea13 \ + --hash=sha256:2bd9a6fc18aab8d2e18f89b7ff91c0f34ff4d5e0ba0b33e989b3cd4194c81fd9 \ + --hash=sha256:3384f2a3652cef289e38100f2d037956194a837221edd520a7ee5b42d00cc605 \ + --hash=sha256:9b3e07152b4563722be523e8cd0b209e0d1a373022cfbde395ebb6575bf6790d \ + --hash=sha256:1480ff858b4113db2718848d7b2d1b75bc79895a9c22e76a221b9d8d62496428 \ + --hash=sha256:865d69ae811a392f4d06bde506d531f6a28a00af36f5c8649684a9e5e4a85c83 \ + --hash=sha256:664a47ce62fe4bef9e2d2c430306e1428ecea207ffd68649e3b942fa8ea83b0b \ + --hash=sha256:26dff09fb0d82693ba9e6231248641d60ba606150d02ed45110f9ec26404ed1c \ + --hash=sha256:d9c80df769f5ec05ad21ea34be7458d1dc51ff1fb4b2219e77fe24edf462d6df \ + --hash=sha256:39ee53946bf009788108b4dd2894bf1349b4e0ca18c2016ffa7d26ce46b8f10d \ + --hash=sha256:f5b66caa62922531059bc5ac04f836860412f7f88d38a476eda0a6f11d4724f4 \ + --hash=sha256:fd180ed867e289964404051a958f7cccabdeed423f91a899829264bb7974d3d3 \ + --hash=sha256:84631e81dd053e8a0d4967cedab6db94345f1c36107c71698f746cb2636c63e3 \ + --hash=sha256:8c08da0bd238f2970230c2a0d28ff0e99961598cb2e810245d7fc5afcf1254e8 \ + --hash=sha256:d42c549a8f41dc103a8004b9f0c433e2086add8a719da00e246e17cbe4056f72 \ + --hash=sha256:309ce4a522ed5fca432af4ebe0f32b21d6d7ccbb0f5fcc99290e71feba67c264 \ + --hash=sha256:fdb6f7bd51c2d1714cea40718f6149ad9be6a2ee7d93b19e9f00934c0f2a74d9 \ + --hash=sha256:342d4aefd1c3e7f620a13f4fe563154d808b69cccef415415aece4c786665397 \ + --hash=sha256:4803e7ccf93230accb928f3a68f00ffa80a88213af98ed338a57ad021ef06815 \ + --hash=sha256:4321f075095a096e70aff1d002030ee612b65a205a0a0f5b815280d5dc58100c +dill==0.3.5.1; python_full_version >= "3.7.0" \ + --hash=sha256:33501d03270bbe410c72639b350e941882a8b0fd55357580fbc873fba0c59302 \ + --hash=sha256:d75e41f3eff1eee599d738e76ba8f4ad98ea229db8b085318aa2b3333a208c86 +docutils==0.17.1; python_version >= "3.6" and python_full_version < "3.0.0" or python_full_version >= "3.5.0" and python_version >= "3.6" \ + --hash=sha256:cf316c8370a737a022b72b56874f6602acf974a37a9fba42ec2876387549fc61 \ + --hash=sha256:686577d2e4c32380bb50cbb22f575ed742d58168cee37e99117a854bcd88f125 +exceptiongroup==1.0.0rc8; python_version < "3.11" and python_version >= "3.7" \ + --hash=sha256:ab0a968e1ef769e55d9a596f4a89f7be9ffedbc9fdefdb77cc68cf5c33ce1035 \ + --hash=sha256:6990c24f06b8d33c8065cfe43e5e8a4bfa384e0358be036af9cc60b6321bd11a +flake8==4.0.1; python_version >= "3.6" \ + --hash=sha256:479b1304f72536a55948cb40a32dce8bb0ffe3501e26eaf292c7e60eb5e0428d \ + --hash=sha256:806e034dda44114815e23c16ef92f95c91e4c71100ff52813adf7132a6ad870d +gitdb==4.0.9; python_version >= "3.7" \ + --hash=sha256:8033ad4e853066ba6ca92050b9df2f89301b8fc8bf7e9324d412a63f8bf1a8fd \ + --hash=sha256:bac2fd45c0a1c9cf619e63a90d62bdc63892ef92387424b855792a6cabe789aa +gitpython==3.1.27; python_version >= "3.7" \ + --hash=sha256:5b68b000463593e05ff2b261acff0ff0972df8ab1b70d3cdbd41b546c8b8fc3d \ + --hash=sha256:1c885ce809e8ba2d88a29befeb385fcea06338d3640712b59ca623c220bb5704 +hypothesis==6.47.3; python_version >= "3.7" \ + --hash=sha256:148198eacaa52aabd5b598e989d6906311a0580af6f99e76e3ace8d60762779a \ + --hash=sha256:3c6d99cd7540a64c37d88f86a7509c0ca29cda37a82a957340fba11388dfb9ce +idna==3.3; python_version >= "3.7" and python_version < "4" and (python_version >= "3.6" and python_full_version < "3.0.0" or python_full_version >= "3.4.0" and python_version >= "3.6") \ + --hash=sha256:84d9dd047ffa80596e0f246e2eab0b391788b0503584e8945f2368256d2735ff \ + --hash=sha256:9d643ff0a55b762d5cdb124b8eaa99c66322e2157b69160bc32796e824360e6d +imagesize==1.3.0; python_version >= "3.6" and python_full_version < "3.0.0" or python_full_version >= "3.4.0" and python_version >= "3.6" \ + --hash=sha256:1db2f82529e53c3e929e8926a1fa9235aa82d0bd0c580359c67ec31b2fddaa8c \ + --hash=sha256:cd1750d452385ca327479d45b64d9c7729ecf0b3969a58148298c77092261f9d +importlib-metadata==4.2.0; python_version < "3.8" and python_version >= "3.7" and python_full_version >= "3.6.2" \ + --hash=sha256:057e92c15bc8d9e8109738a48db0ccb31b4d9d5cfbee5a8670879a30be66304b \ + --hash=sha256:b7e52a1f8dec14a75ea73e0891f3060099ca1d8e6a462a4dff11c3e119ea1b31 +iniconfig==1.1.1; python_version >= "3.7" \ + --hash=sha256:011e24c64b7f47f6ebd835bb12a743f2fbe9a26d4cecaa7f53bc4f35ee9da8b3 \ + --hash=sha256:bc3af051d7d14b2ee5ef9969666def0cd1a000e121eaea580d4a313df4b37f32 +isort==5.10.1; python_full_version >= "3.6.2" and python_version < "4.0" \ + --hash=sha256:6f62d78e2f89b4500b080fe3a81690850cd254227f27f75c3a0c491a1f351ba7 \ + --hash=sha256:e8443a5e7a020e9d7f97f1d7d9cd17c88bcb3bc7e218bf9cf5095fe550be2951 +jinja2==3.1.2; python_version >= "3.7" and python_full_version < "3.0.0" or python_full_version >= "3.4.0" and python_version >= "3.7" \ + --hash=sha256:6088930bfe239f0e6710546ab9c19c9ef35e29792895fed6e6e31a023a182a61 \ + --hash=sha256:31351a702a408a9e7595a8fc6150fc3f43bb6bf7e319770cbc0db9df9437e852 +lazy-object-proxy==1.7.1; python_version >= "3.6" and python_full_version >= "3.6.2" \ + --hash=sha256:d609c75b986def706743cdebe5e47553f4a5a1da9c5ff66d76013ef396b5a8a4 \ + --hash=sha256:bb8c5fd1684d60a9902c60ebe276da1f2281a318ca16c1d0a96db28f62e9166b \ + --hash=sha256:a57d51ed2997e97f3b8e3500c984db50a554bb5db56c50b5dab1b41339b37e36 \ + --hash=sha256:fd45683c3caddf83abbb1249b653a266e7069a09f486daa8863fb0e7496a9fdb \ + --hash=sha256:8561da8b3dd22d696244d6d0d5330618c993a215070f473b699e00cf1f3f6443 \ + --hash=sha256:fccdf7c2c5821a8cbd0a9440a456f5050492f2270bd54e94360cac663398739b \ + --hash=sha256:898322f8d078f2654d275124a8dd19b079080ae977033b713f677afcfc88e2b9 \ + --hash=sha256:85b232e791f2229a4f55840ed54706110c80c0a210d076eee093f2b2e33e1bfd \ + --hash=sha256:46ff647e76f106bb444b4533bb4153c7370cdf52efc62ccfc1a28bdb3cc95442 \ + --hash=sha256:12f3bb77efe1367b2515f8cb4790a11cffae889148ad33adad07b9b55e0ab22c \ + --hash=sha256:c19814163728941bb871240d45c4c30d33b8a2e85972c44d4e63dd7107faba44 \ + --hash=sha256:e40f2013d96d30217a51eeb1db28c9ac41e9d0ee915ef9d00da639c5b63f01a1 \ + --hash=sha256:2052837718516a94940867e16b1bb10edb069ab475c3ad84fd1e1a6dd2c0fcfc \ + --hash=sha256:6a24357267aa976abab660b1d47a34aaf07259a0c3859a34e536f1ee6e76b5bb \ + --hash=sha256:6aff3fe5de0831867092e017cf67e2750c6a1c7d88d84d2481bd84a2e019ec35 \ + --hash=sha256:6a6e94c7b02641d1311228a102607ecd576f70734dc3d5e22610111aeacba8a0 \ + --hash=sha256:c4ce15276a1a14549d7e81c243b887293904ad2d94ad767f42df91e75fd7b5b6 \ + --hash=sha256:e368b7f7eac182a59ff1f81d5f3802161932a41dc1b1cc45c1f757dc876b5d2c \ + --hash=sha256:6ecbb350991d6434e1388bee761ece3260e5228952b1f0c46ffc800eb313ff42 \ + --hash=sha256:553b0f0d8dbf21890dd66edd771f9b1b5f51bd912fa5f26de4449bfc5af5e029 \ + --hash=sha256:c7a683c37a8a24f6428c28c561c80d5f4fd316ddcf0c7cab999b15ab3f5c5c69 \ + --hash=sha256:df2631f9d67259dc9620d831384ed7732a198eb434eadf69aea95ad18c587a28 \ + --hash=sha256:07fa44286cda977bd4803b656ffc1c9b7e3bc7dff7d34263446aec8f8c96f88a \ + --hash=sha256:4dca6244e4121c74cc20542c2ca39e5c4a5027c81d112bfb893cf0790f96f57e \ + --hash=sha256:91ba172fc5b03978764d1df5144b4ba4ab13290d7bab7a50f12d8117f8630c38 \ + --hash=sha256:043651b6cb706eee4f91854da4a089816a6606c1428fd391573ef8cb642ae4f7 \ + --hash=sha256:b9e89b87c707dd769c4ea91f7a31538888aad05c116a59820f28d59b3ebfe25a \ + --hash=sha256:9d166602b525bf54ac994cf833c385bfcc341b364e3ee71e3bf5a1336e677b55 \ + --hash=sha256:8f3953eb575b45480db6568306893f0bd9d8dfeeebd46812aa09ca9579595148 \ + --hash=sha256:dd7ed7429dbb6c494aa9bc4e09d94b778a3579be699f9d67da7e6804c422d3de \ + --hash=sha256:70ed0c2b380eb6248abdef3cd425fc52f0abd92d2b07ce26359fcbc399f636ad \ + --hash=sha256:7096a5e0c1115ec82641afbdd70451a144558ea5cf564a896294e346eb611be1 \ + --hash=sha256:f769457a639403073968d118bc70110e7dce294688009f5c24ab78800ae56dc8 \ + --hash=sha256:39b0e26725c5023757fc1ab2a89ef9d7ab23b84f9251e28f9cc114d5b59c1b09 \ + --hash=sha256:2130db8ed69a48a3440103d4a520b89d8a9405f1b06e2cc81640509e8bf6548f \ + --hash=sha256:677ea950bef409b47e51e733283544ac3d660b709cfce7b187f5ace137960d61 \ + --hash=sha256:d66906d5785da8e0be7360912e99c9188b70f52c422f9fc18223347235691a84 +markupsafe==2.1.1; python_version >= "3.7" and python_full_version < "3.0.0" or python_full_version >= "3.4.0" and python_version >= "3.7" \ + --hash=sha256:86b1f75c4e7c2ac2ccdaec2b9022845dbb81880ca318bb7a0a01fbf7813e3812 \ + --hash=sha256:f121a1420d4e173a5d96e47e9a0c0dcff965afdf1626d28de1460815f7c4ee7a \ + --hash=sha256:a49907dd8420c5685cfa064a1335b6754b74541bbb3706c259c02ed65b644b3e \ + --hash=sha256:10c1bfff05d95783da83491be968e8fe789263689c02724e0c691933c52994f5 \ + --hash=sha256:b7bd98b796e2b6553da7225aeb61f447f80a1ca64f41d83612e6139ca5213aa4 \ + --hash=sha256:b09bf97215625a311f669476f44b8b318b075847b49316d3e28c08e41a7a573f \ + --hash=sha256:694deca8d702d5db21ec83983ce0bb4b26a578e71fbdbd4fdcd387daa90e4d5e \ + --hash=sha256:efc1913fd2ca4f334418481c7e595c00aad186563bbc1ec76067848c7ca0a933 \ + --hash=sha256:4a33dea2b688b3190ee12bd7cfa29d39c9ed176bda40bfa11099a3ce5d3a7ac6 \ + --hash=sha256:dda30ba7e87fbbb7eab1ec9f58678558fd9a6b8b853530e176eabd064da81417 \ + --hash=sha256:671cd1187ed5e62818414afe79ed29da836dde67166a9fac6d435873c44fdd02 \ + --hash=sha256:3799351e2336dc91ea70b034983ee71cf2f9533cdff7c14c90ea126bfd95d65a \ + --hash=sha256:e72591e9ecd94d7feb70c1cbd7be7b3ebea3f548870aa91e2732960fa4d57a37 \ + --hash=sha256:6fbf47b5d3728c6aea2abb0589b5d30459e369baa772e0f37a0320185e87c980 \ + --hash=sha256:d5ee4f386140395a2c818d149221149c54849dfcfcb9f1debfe07a8b8bd63f9a \ + --hash=sha256:bcb3ed405ed3222f9904899563d6fc492ff75cce56cba05e32eff40e6acbeaa3 \ + --hash=sha256:e1c0b87e09fa55a220f058d1d49d3fb8df88fbfab58558f1198e08c1e1de842a \ + --hash=sha256:8dc1c72a69aa7e082593c4a203dcf94ddb74bb5c8a731e4e1eb68d031e8498ff \ + --hash=sha256:97a68e6ada378df82bc9f16b800ab77cbf4b2fada0081794318520138c088e4a \ + --hash=sha256:e8c843bbcda3a2f1e3c2ab25913c80a3c5376cd00c6e8c4a86a89a28c8dc5452 \ + --hash=sha256:0212a68688482dc52b2d45013df70d169f542b7394fc744c02a57374a4207003 \ + --hash=sha256:8e576a51ad59e4bfaac456023a78f6b5e6e7651dcd383bcc3e18d06f9b55d6d1 \ + --hash=sha256:4b9fe39a2ccc108a4accc2676e77da025ce383c108593d65cc909add5c3bd601 \ + --hash=sha256:96e37a3dc86e80bf81758c152fe66dbf60ed5eca3d26305edf01892257049925 \ + --hash=sha256:6d0072fea50feec76a4c418096652f2c3238eaa014b2f94aeb1d56a66b41403f \ + --hash=sha256:089cf3dbf0cd6c100f02945abeb18484bd1ee57a079aefd52cffd17fba910b88 \ + --hash=sha256:6a074d34ee7a5ce3effbc526b7083ec9731bb3cbf921bbe1d3005d4d2bdb3a63 \ + --hash=sha256:421be9fbf0ffe9ffd7a378aafebbf6f4602d564d34be190fc19a193232fd12b1 \ + --hash=sha256:fc7b548b17d238737688817ab67deebb30e8073c95749d55538ed473130ec0c7 \ + --hash=sha256:e04e26803c9c3851c931eac40c695602c6295b8d432cbe78609649ad9bd2da8a \ + --hash=sha256:b87db4360013327109564f0e591bd2a3b318547bcef31b468a92ee504d07ae4f \ + --hash=sha256:99a2a507ed3ac881b975a2976d59f38c19386d128e7a9a18b7df6fff1fd4c1d6 \ + --hash=sha256:56442863ed2b06d19c37f94d999035e15ee982988920e12a5b4ba29b62ad1f77 \ + --hash=sha256:3ce11ee3f23f79dbd06fb3d63e2f6af7b12db1d46932fe7bd8afa259a5996603 \ + --hash=sha256:33b74d289bd2f5e527beadcaa3f401e0df0a89927c1559c8566c066fa4248ab7 \ + --hash=sha256:43093fb83d8343aac0b1baa75516da6092f58f41200907ef92448ecab8825135 \ + --hash=sha256:8e3dcf21f367459434c18e71b2a9532d96547aef8a871872a5bd69a715c15f96 \ + --hash=sha256:d4306c36ca495956b6d568d276ac11fdd9c30a36f1b6eb928070dc5360b22e1c \ + --hash=sha256:46d00d6cfecdde84d40e572d63735ef81423ad31184100411e6e3388d405e247 \ + --hash=sha256:7f91197cc9e48f989d12e4e6fbc46495c446636dfc81b9ccf50bb0ec74b91d4b +mccabe==0.6.1; python_version >= "3.6" and python_full_version >= "3.6.2" \ + --hash=sha256:ab8a6258860da4b6677da4bd2fe5dc2c659cff31b3ee4f7f5d64e79735b80d42 \ + --hash=sha256:dd8d182285a0fe56bace7f45b5e7d1a6ebcbf524e8f3bd87eb0f125271b8831f +mypy-extensions==0.4.3; python_full_version >= "3.6.2" and python_version >= "3.6" \ + --hash=sha256:090fedd75945a69ae91ce1303b5824f428daf5a028d2f6ab8a299250a846f15d \ + --hash=sha256:2d82818f5bb3e369420cb3c4060a7970edba416647068eb4c5343488a6c604a8 +mypy==0.950; python_version >= "3.6" \ + --hash=sha256:cf9c261958a769a3bd38c3e133801ebcd284ffb734ea12d01457cb09eacf7d7b \ + --hash=sha256:b5b5bd0ffb11b4aba2bb6d31b8643902c48f990cc92fda4e21afac658044f0c0 \ + --hash=sha256:5e7647df0f8fc947388e6251d728189cfadb3b1e558407f93254e35abc026e22 \ + --hash=sha256:eaff8156016487c1af5ffa5304c3e3fd183edcb412f3e9c72db349faf3f6e0eb \ + --hash=sha256:563514c7dc504698fb66bb1cf897657a173a496406f1866afae73ab5b3cdb334 \ + --hash=sha256:dd4d670eee9610bf61c25c940e9ade2d0ed05eb44227275cce88701fee014b1f \ + --hash=sha256:ca75ecf2783395ca3016a5e455cb322ba26b6d33b4b413fcdedfc632e67941dc \ + --hash=sha256:6003de687c13196e8a1243a5e4bcce617d79b88f83ee6625437e335d89dfebe2 \ + --hash=sha256:4c653e4846f287051599ed8f4b3c044b80e540e88feec76b11044ddc5612ffed \ + --hash=sha256:e19736af56947addedce4674c0971e5dceef1b5ec7d667fe86bcd2b07f8f9075 \ + --hash=sha256:ef7beb2a3582eb7a9f37beaf38a28acfd801988cde688760aea9e6cc4832b10b \ + --hash=sha256:0112752a6ff07230f9ec2f71b0d3d4e088a910fdce454fdb6553e83ed0eced7d \ + --hash=sha256:ee0a36edd332ed2c5208565ae6e3a7afc0eabb53f5327e281f2ef03a6bc7687a \ + --hash=sha256:77423570c04aca807508a492037abbd72b12a1fb25a385847d191cd50b2c9605 \ + --hash=sha256:5ce6a09042b6da16d773d2110e44f169683d8cc8687e79ec6d1181a72cb028d2 \ + --hash=sha256:5b231afd6a6e951381b9ef09a1223b1feabe13625388db48a8690f8daa9b71ff \ + --hash=sha256:0384d9f3af49837baa92f559d3fa673e6d2652a16550a9ee07fc08c736f5e6f8 \ + --hash=sha256:1fdeb0a0f64f2a874a4c1f5271f06e40e1e9779bf55f9567f149466fc7a55038 \ + --hash=sha256:61504b9a5ae166ba5ecfed9e93357fd51aa693d3d434b582a925338a2ff57fd2 \ + --hash=sha256:a952b8bc0ae278fc6316e6384f67bb9a396eb30aced6ad034d3a76120ebcc519 \ + --hash=sha256:eaea21d150fb26d7b4856766e7addcf929119dd19fc832b22e71d942835201ef \ + --hash=sha256:a4d9898f46446bfb6405383b57b96737dcfd0a7f25b748e78ef3e8c576bba3cb \ + --hash=sha256:1b333cfbca1762ff15808a0ef4f71b5d3eed8528b23ea1c3fb50543c867d68de +packaging==21.3; python_version >= "3.7" and python_full_version < "3.0.0" or python_full_version >= "3.4.0" and python_version >= "3.7" \ + --hash=sha256:ef103e05f519cdc783ae24ea4e2e0f508a9c99b2d4969652eed6a2e1ea5bd522 \ + --hash=sha256:dd47c42927d89ab911e606518907cc2d3a1f38bbd026385970643f9c5b8ecfeb +pathspec==0.9.0; python_full_version >= "3.6.2" \ + --hash=sha256:7d15c4ddb0b5c802d161efc417ec1a2558ea2653c2e8ad9c19098201dc1c993a \ + --hash=sha256:e564499435a2673d586f6b2130bb5b95f04a3ba06f81b8f895b651a3c76aabb1 +pbr==5.9.0; python_version >= "3.7" \ + --hash=sha256:e547125940bcc052856ded43be8e101f63828c2d94239ffbe2b327ba3d5ccf0a \ + --hash=sha256:e8dca2f4b43560edef58813969f52a56cef023146cbb8931626db80e6c1c4308 +platformdirs==2.5.2; python_version >= "3.7" and python_full_version >= "3.6.2" \ + --hash=sha256:027d8e83a2d7de06bbac4e5ef7e023c02b863d7ea5d079477e722bb41ab25788 \ + --hash=sha256:58c8abb07dcb441e6ee4b11d8df0ac856038f944ab98b7be6b27b2a3c7feef19 +pluggy==1.0.0; python_version >= "3.7" \ + --hash=sha256:74134bbf457f031a36d68416e1509f34bd5ccc019f0bcc952c7b909d06b37bd3 \ + --hash=sha256:4224373bacce55f955a878bf9cfa763c1e360858e330072059e10bad68531159 +py==1.11.0; python_version >= "3.7" and python_full_version < "3.0.0" or python_full_version >= "3.5.0" and python_version >= "3.7" \ + --hash=sha256:607c53218732647dff4acdfcd50cb62615cedf612e72d1724fb1a0cc6405b378 \ + --hash=sha256:51c75c4126074b472f746a24399ad32f6053d1b34b68d2fa41e558e6f4a98719 +pycodestyle==2.8.0; python_version >= "3.6" and python_full_version < "3.0.0" or python_full_version >= "3.5.0" and python_version >= "3.6" \ + --hash=sha256:720f8b39dde8b293825e7ff02c475f3077124006db4f440dcbc9a20b76548a20 \ + --hash=sha256:eddd5847ef438ea1c7870ca7eb78a9d47ce0cdb4851a5523949f2601d0cbbe7f +pydantic==1.9.1; python_full_version >= "3.6.1" \ + --hash=sha256:c8098a724c2784bf03e8070993f6d46aa2eeca031f8d8a048dff277703e6e193 \ + --hash=sha256:c320c64dd876e45254bdd350f0179da737463eea41c43bacbee9d8c9d1021f11 \ + --hash=sha256:18f3e912f9ad1bdec27fb06b8198a2ccc32f201e24174cec1b3424dda605a310 \ + --hash=sha256:c11951b404e08b01b151222a1cb1a9f0a860a8153ce8334149ab9199cd198131 \ + --hash=sha256:8bc541a405423ce0e51c19f637050acdbdf8feca34150e0d17f675e72d119580 \ + --hash=sha256:e565a785233c2d03724c4dc55464559639b1ba9ecf091288dd47ad9c629433bd \ + --hash=sha256:a4a88dcd6ff8fd47c18b3a3709a89adb39a6373f4482e04c1b765045c7e282fd \ + --hash=sha256:447d5521575f18e18240906beadc58551e97ec98142266e521c34968c76c8761 \ + --hash=sha256:985ceb5d0a86fcaa61e45781e567a59baa0da292d5ed2e490d612d0de5796918 \ + --hash=sha256:059b6c1795170809103a1538255883e1983e5b831faea6558ef873d4955b4a74 \ + --hash=sha256:d12f96b5b64bec3f43c8e82b4aab7599d0157f11c798c9f9c528a72b9e0b339a \ + --hash=sha256:ae72f8098acb368d877b210ebe02ba12585e77bd0db78ac04a1ee9b9f5dd2166 \ + --hash=sha256:79b485767c13788ee314669008d01f9ef3bc05db9ea3298f6a50d3ef596a154b \ + --hash=sha256:494f7c8537f0c02b740c229af4cb47c0d39840b829ecdcfc93d91dcbb0779892 \ + --hash=sha256:f0f047e11febe5c3198ed346b507e1d010330d56ad615a7e0a89fae604065a0e \ + --hash=sha256:969dd06110cb780da01336b281f53e2e7eb3a482831df441fb65dd30403f4608 \ + --hash=sha256:177071dfc0df6248fd22b43036f936cfe2508077a72af0933d0c1fa269b18537 \ + --hash=sha256:9bcf8b6e011be08fb729d110f3e22e654a50f8a826b0575c7196616780683380 \ + --hash=sha256:a955260d47f03df08acf45689bd163ed9df82c0e0124beb4251b1290fa7ae728 \ + --hash=sha256:9ce157d979f742a915b75f792dbd6aa63b8eccaf46a1005ba03aa8a986bde34a \ + --hash=sha256:0bf07cab5b279859c253d26a9194a8906e6f4a210063b84b433cf90a569de0c1 \ + --hash=sha256:5d93d4e95eacd313d2c765ebe40d49ca9dd2ed90e5b37d0d421c597af830c195 \ + --hash=sha256:1542636a39c4892c4f4fa6270696902acb186a9aaeac6f6cf92ce6ae2e88564b \ + --hash=sha256:a9af62e9b5b9bc67b2a195ebc2c2662fdf498a822d62f902bf27cccb52dbbf49 \ + --hash=sha256:fe4670cb32ea98ffbf5a1262f14c3e102cccd92b1869df3bb09538158ba90fe6 \ + --hash=sha256:9f659a5ee95c8baa2436d392267988fd0f43eb774e5eb8739252e5a7e9cf07e0 \ + --hash=sha256:b83ba3825bc91dfa989d4eed76865e71aea3a6ca1388b59fc801ee04c4d8d0d6 \ + --hash=sha256:1dd8fecbad028cd89d04a46688d2fcc14423e8a196d5b0a5c65105664901f810 \ + --hash=sha256:02eefd7087268b711a3ff4db528e9916ac9aa18616da7bca69c1871d0b7a091f \ + --hash=sha256:7eb57ba90929bac0b6cc2af2373893d80ac559adda6933e562dcfb375029acee \ + --hash=sha256:4ce9ae9e91f46c344bec3b03d6ee9612802682c1551aaf627ad24045ce090761 \ + --hash=sha256:72ccb318bf0c9ab97fc04c10c37683d9eea952ed526707fabf9ac5ae59b701fd \ + --hash=sha256:61b6760b08b7c395975d893e0b814a11cf011ebb24f7d869e7118f5a339a82e1 \ + --hash=sha256:4988c0f13c42bfa9ddd2fe2f569c9d54646ce84adc5de84228cfe83396f3bd58 \ + --hash=sha256:1ed987c3ff29fff7fd8c3ea3a3ea877ad310aae2ef9889a119e22d3f2db0691a +pyflakes==2.4.0; python_version >= "3.6" and python_full_version < "3.0.0" or python_full_version >= "3.4.0" and python_version >= "3.6" \ + --hash=sha256:3bb3a3f256f4b7968c9c788781e4ff07dce46bdf12339dcda61053375426ee2e \ + --hash=sha256:05a85c2872edf37a4ed30b0cce2f6093e1d0581f8c19d7393122da7e25b2b24c +pygments==2.12.0; python_version >= "3.6" and python_full_version < "3.0.0" or python_full_version >= "3.4.0" and python_version >= "3.6" \ + --hash=sha256:dc9c10fb40944260f6ed4c688ece0cd2048414940f1cea51b8b226318411c519 \ + --hash=sha256:5eb116118f9612ff1ee89ac96437bb6b49e8f04d8a13b514ba26f620208e26eb +pylint==2.13.9; python_full_version >= "3.6.2" \ + --hash=sha256:705c620d388035bdd9ff8b44c5bcdd235bfb49d276d488dd2c8ff1736aa42526 \ + --hash=sha256:095567c96e19e6f57b5b907e67d265ff535e588fe26b12b5ebe1fc5645b2c731 +pyparsing==3.0.9; python_full_version >= "3.6.8" and python_version >= "3.7" \ + --hash=sha256:5026bae9a10eeaefb61dab2f09052b9f4307d44aee4eda64b309723d8d206bbc \ + --hash=sha256:2b020ecf7d21b687f219b71ecad3631f644a47f01403fa1d1036b0c6416d70fb +pytest==7.1.2; python_version >= "3.7" \ + --hash=sha256:13d0e3ccfc2b6e26be000cb6568c832ba67ba32e719443bfe725814d3c42433c \ + --hash=sha256:a06a0425453864a270bc45e71f783330a7428defb4230fb5e6a731fde06ecd45 +pytz==2022.1; python_version >= "3.6" and python_full_version < "3.0.0" or python_full_version >= "3.4.0" and python_version >= "3.6" \ + --hash=sha256:e68985985296d9a66a881eb3193b0906246245294a881e7c8afe623866ac6a5c \ + --hash=sha256:1e760e2fe6a8163bc0b3d9a19c4f84342afa0a2affebfaa84b01b978a02ecaa7 +pyyaml==6.0; python_version >= "3.7" \ + --hash=sha256:d4db7c7aef085872ef65a8fd7d6d09a14ae91f691dec3e87ee5ee0539d516f53 \ + --hash=sha256:9df7ed3b3d2e0ecfe09e14741b857df43adb5a3ddadc919a2d94fbdf78fea53c \ + --hash=sha256:77f396e6ef4c73fdc33a9157446466f1cff553d979bd00ecb64385760c6babdc \ + --hash=sha256:a80a78046a72361de73f8f395f1f1e49f956c6be882eed58505a15f3e430962b \ + --hash=sha256:f84fbc98b019fef2ee9a1cb3ce93e3187a6df0b2538a651bfb890254ba9f90b5 \ + --hash=sha256:2cd5df3de48857ed0544b34e2d40e9fac445930039f3cfe4bcc592a1f836d513 \ + --hash=sha256:daf496c58a8c52083df09b80c860005194014c3698698d1a57cbcfa182142a3a \ + --hash=sha256:897b80890765f037df3403d22bab41627ca8811ae55e9a722fd0392850ec4d86 \ + --hash=sha256:50602afada6d6cbfad699b0c7bb50d5ccffa7e46a3d738092afddc1f9758427f \ + --hash=sha256:48c346915c114f5fdb3ead70312bd042a953a8ce5c7106d5bfb1a5254e47da92 \ + --hash=sha256:98c4d36e99714e55cfbaaee6dd5badbc9a1ec339ebfc3b1f52e293aee6bb71a4 \ + --hash=sha256:0283c35a6a9fbf047493e3a0ce8d79ef5030852c51e9d911a27badfde0605293 \ + --hash=sha256:07751360502caac1c067a8132d150cf3d61339af5691fe9e87803040dbc5db57 \ + --hash=sha256:819b3830a1543db06c4d4b865e70ded25be52a2e0631ccd2f6a47a2822f2fd7c \ + --hash=sha256:473f9edb243cb1935ab5a084eb238d842fb8f404ed2193a915d1784b5a6b5fc0 \ + --hash=sha256:0ce82d761c532fe4ec3f87fc45688bdd3a4c1dc5e0b4a19814b9009a29baefd4 \ + --hash=sha256:231710d57adfd809ef5d34183b8ed1eeae3f76459c18fb4a0b373ad56bedcdd9 \ + --hash=sha256:c5687b8d43cf58545ade1fe3e055f70eac7a5a1a0bf42824308d868289a95737 \ + --hash=sha256:d15a181d1ecd0d4270dc32edb46f7cb7733c7c508857278d3d378d14d606db2d \ + --hash=sha256:0b4624f379dab24d3725ffde76559cff63d9ec94e1736b556dacdfebe5ab6d4b \ + --hash=sha256:213c60cd50106436cc818accf5baa1aba61c0189ff610f64f4a3e8c6726218ba \ + --hash=sha256:9fa600030013c4de8165339db93d182b9431076eb98eb40ee068700c9c813e34 \ + --hash=sha256:277a0ef2981ca40581a47093e9e2d13b3f1fbbeffae064c1d21bfceba2030287 \ + --hash=sha256:d4eccecf9adf6fbcc6861a38015c2a64f38b9d94838ac1810a9023a0609e1b78 \ + --hash=sha256:1e4747bc279b4f613a09eb64bba2ba602d8a6664c6ce6396a4d0cd413a50ce07 \ + --hash=sha256:055d937d65826939cb044fc8c9b08889e8c743fdc6a32b33e2390f66013e449b \ + --hash=sha256:e61ceaab6f49fb8bdfaa0f92c4b57bcfbea54c09277b1b4f7ac376bfb7a7c174 \ + --hash=sha256:d67d839ede4ed1b28a4e8909735fc992a923cdb84e618544973d7dfc71540803 \ + --hash=sha256:cba8c411ef271aa037d7357a2bc8f9ee8b58b9965831d9e51baf703280dc73d3 \ + --hash=sha256:40527857252b61eacd1d9af500c3337ba8deb8fc298940291486c465c8b46ec0 \ + --hash=sha256:b5b9eccad747aabaaffbc6064800670f0c297e52c12754eb1d976c57e4f74dcb \ + --hash=sha256:b3d267842bf12586ba6c734f89d1f5b871df0273157918b0ccefa29deb05c21c \ + --hash=sha256:68fb519c14306fec9720a2a5b45bc9f0c8d1b9c72adf45c37baedfcd949c35a2 +requests==2.28.0; python_version >= "3.7" and python_version < "4" and (python_version >= "3.6" and python_full_version < "3.0.0" or python_full_version >= "3.4.0" and python_version >= "3.6") \ + --hash=sha256:bc7861137fbce630f17b03d3ad02ad0bf978c844f3536d0edda6499dafce2b6f \ + --hash=sha256:d568723a7ebd25875d8d1eaf5dfa068cd2fc8194b2e483d7b1f7c81918dbec6b +smmap==5.0.0; python_version >= "3.7" \ + --hash=sha256:2aba19d6a040e78d8b09de5c57e96207b09ed71d8e55ce0959eeee6c8e190d94 \ + --hash=sha256:c840e62059cd3be204b0c9c9f74be2c09d5648eddd4580d9314c3ecde0b30936 +snowballstemmer==2.2.0; python_version >= "3.6" and python_full_version < "3.0.0" or python_full_version >= "3.4.0" and python_version >= "3.6" \ + --hash=sha256:c8e1716e83cc398ae16824e5572ae04e0d9fc2c6b985fb0f900f5f0c96ecba1a \ + --hash=sha256:09b16deb8547d3412ad7b590689584cd0fe25ec8db3be37788be3810cbf19cb1 +sortedcontainers==2.4.0; python_version >= "3.7" \ + --hash=sha256:a163dcaede0f1c021485e957a39245190e74249897e2ae4b2aa38595db237ee0 \ + --hash=sha256:25caa5a06cc30b6b83d11423433f65d1f9d76c4c6a0c90e3379eaa43b9bfdb88 +sphinx-rtd-theme==1.0.0; (python_version >= "2.7" and python_full_version < "3.0.0") or (python_full_version >= "3.4.0") \ + --hash=sha256:4d35a56f4508cfee4c4fb604373ede6feae2a306731d533f409ef5c3496fdbd8 \ + --hash=sha256:eec6d497e4c2195fa0e8b2016b337532b8a699a68bcb22a512870e16925c6a5c +sphinx==4.3.2; python_version >= "3.6" and python_full_version < "3.0.0" or python_full_version >= "3.4.0" and python_version >= "3.6" \ + --hash=sha256:6a11ea5dd0bdb197f9c2abc2e0ce73e01340464feaece525e64036546d24c851 \ + --hash=sha256:0a8836751a68306b3fe97ecbe44db786f8479c3bf4b80e3a7f5c838657b4698c +sphinxcontrib-applehelp==1.0.2; python_version >= "3.6" and python_full_version < "3.0.0" or python_full_version >= "3.4.0" and python_version >= "3.6" \ + --hash=sha256:a072735ec80e7675e3f432fcae8610ecf509c5f1869d17e2eecff44389cdbc58 \ + --hash=sha256:806111e5e962be97c29ec4c1e7fe277bfd19e9652fb1a4392105b43e01af885a +sphinxcontrib-devhelp==1.0.2; python_version >= "3.6" and python_full_version < "3.0.0" or python_full_version >= "3.4.0" and python_version >= "3.6" \ + --hash=sha256:ff7f1afa7b9642e7060379360a67e9c41e8f3121f2ce9164266f61b9f4b338e4 \ + --hash=sha256:8165223f9a335cc1af7ffe1ed31d2871f325254c0423bc0c4c7cd1c1e4734a2e +sphinxcontrib-htmlhelp==2.0.0; python_version >= "3.6" and python_full_version < "3.0.0" or python_full_version >= "3.4.0" and python_version >= "3.6" \ + --hash=sha256:f5f8bb2d0d629f398bf47d0d69c07bc13b65f75a81ad9e2f71a63d4b7a2f6db2 \ + --hash=sha256:d412243dfb797ae3ec2b59eca0e52dac12e75a241bf0e4eb861e450d06c6ed07 +sphinxcontrib-jsmath==1.0.1; python_version >= "3.6" and python_full_version < "3.0.0" or python_full_version >= "3.4.0" and python_version >= "3.6" \ + --hash=sha256:a9925e4a4587247ed2191a22df5f6970656cb8ca2bd6284309578f2153e0c4b8 \ + --hash=sha256:2ec2eaebfb78f3f2078e73666b1415417a116cc848b72e5172e596c871103178 +sphinxcontrib-qthelp==1.0.3; python_version >= "3.6" and python_full_version < "3.0.0" or python_full_version >= "3.4.0" and python_version >= "3.6" \ + --hash=sha256:4c33767ee058b70dba89a6fc5c1892c0d57a54be67ddd3e7875a18d14cba5a72 \ + --hash=sha256:bd9fc24bcb748a8d51fd4ecaade681350aa63009a347a8c14e637895444dfab6 +sphinxcontrib-serializinghtml==1.1.5; python_version >= "3.6" and python_full_version < "3.0.0" or python_full_version >= "3.4.0" and python_version >= "3.6" \ + --hash=sha256:aa5f6de5dfdf809ef505c4895e51ef5c9eac17d0f287933eb49ec495280b6952 \ + --hash=sha256:352a9a00ae864471d3a7ead8d7d79f5fc0b57e8b3f95e9867eb9eb28999b92fd +stevedore==3.5.0; python_version >= "3.7" \ + --hash=sha256:a547de73308fd7e90075bb4d301405bebf705292fa90a90fc3bcf9133f58616c \ + --hash=sha256:f40253887d8712eaa2bb0ea3830374416736dc8ec0e22f5a65092c1174c44335 +tomli==2.0.1; python_version < "3.11" and python_full_version >= "3.6.2" and python_version >= "3.7" \ + --hash=sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc \ + --hash=sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f +typed-ast==1.5.4; python_version < "3.8" and implementation_name == "cpython" and python_full_version >= "3.6.2" and python_version >= "3.6" \ + --hash=sha256:669dd0c4167f6f2cd9f57041e03c3c2ebf9063d0757dc89f79ba1daa2bfca9d4 \ + --hash=sha256:211260621ab1cd7324e0798d6be953d00b74e0428382991adfddb352252f1d62 \ + --hash=sha256:267e3f78697a6c00c689c03db4876dd1efdfea2f251a5ad6555e82a26847b4ac \ + --hash=sha256:c542eeda69212fa10a7ada75e668876fdec5f856cd3d06829e6aa64ad17c8dfe \ + --hash=sha256:a9916d2bb8865f973824fb47436fa45e1ebf2efd920f2b9f99342cb7fab93f72 \ + --hash=sha256:79b1e0869db7c830ba6a981d58711c88b6677506e648496b1f64ac7d15633aec \ + --hash=sha256:a94d55d142c9265f4ea46fab70977a1944ecae359ae867397757d836ea5a3f47 \ + --hash=sha256:183afdf0ec5b1b211724dfef3d2cad2d767cbefac291f24d69b00546c1837fb6 \ + --hash=sha256:639c5f0b21776605dd6c9dbe592d5228f021404dafd377e2b7ac046b0349b1a1 \ + --hash=sha256:cf4afcfac006ece570e32d6fa90ab74a17245b83dfd6655a6f68568098345ff6 \ + --hash=sha256:ed855bbe3eb3715fca349c80174cfcfd699c2f9de574d40527b8429acae23a66 \ + --hash=sha256:6778e1b2f81dfc7bc58e4b259363b83d2e509a65198e85d5700dfae4c6c8ff1c \ + --hash=sha256:0261195c2062caf107831e92a76764c81227dae162c4f75192c0d489faf751a2 \ + --hash=sha256:2efae9db7a8c05ad5547d522e7dbe62c83d838d3906a3716d1478b6c1d61388d \ + --hash=sha256:7d5d014b7daa8b0bf2eaef684295acae12b036d79f54178b92a2b6a56f92278f \ + --hash=sha256:370788a63915e82fd6f212865a596a0fefcbb7d408bbbb13dea723d971ed8bdc \ + --hash=sha256:4e964b4ff86550a7a7d56345c7864b18f403f5bd7380edf44a3c1fb4ee7ac6c6 \ + --hash=sha256:683407d92dc953c8a7347119596f0b0e6c55eb98ebebd9b23437501b28dcbb8e \ + --hash=sha256:4879da6c9b73443f97e731b617184a596ac1235fe91f98d279a7af36c796da35 \ + --hash=sha256:3e123d878ba170397916557d31c8f589951e353cc95fb7f24f6bb69adc1a8a97 \ + --hash=sha256:ebd9d7f80ccf7a82ac5f88c521115cc55d84e35bf8b446fcd7836eb6b98929a3 \ + --hash=sha256:98f80dee3c03455e92796b58b98ff6ca0b2a6f652120c263efdba4d6c5e58f72 \ + --hash=sha256:0fdbcf2fef0ca421a3f5912555804296f0b0960f0418c440f5d6d3abb549f3e1 \ + --hash=sha256:39e21ceb7388e4bb37f4c679d72707ed46c2fbf2a5609b8b8ebc4b067d977df2 +typing-extensions==4.2.0; python_version >= "3.7" and python_full_version >= "3.6.2" and python_version < "3.8" \ + --hash=sha256:6657594ee297170d19f67d55c05852a874e7eb634f4f753dbd667855e07c1708 \ + --hash=sha256:f1c24655a0da0d1b67f07e17a5e6b2a105894e6824b92096378bb3668ef02376 +ujson==5.3.0; python_version >= "3.7" \ + --hash=sha256:a933b3a238a48162c382e0ac338b97663d044b0485021b6670565a81e7b7ec98 \ + --hash=sha256:612015c6e5a9bf041b89f1eaa8ab8682469b3a745a00c7c95bbbee8080f6b346 \ + --hash=sha256:a720b6eff73415249a3dd02e2b1b337de31bb9fa8220bd572dffba23066e538c \ + --hash=sha256:c1408ea1704017289c3023928065233b90953aae3e1d7d06d6d6db667e9fe159 \ + --hash=sha256:5192505798a5734a85c763eff11e6f6072d3595c337b52f72922b4e22fe66e2e \ + --hash=sha256:bad1471ccfa8d100a0bc513c6db587c38de99384f2aa54eec1016a131d63d3d9 \ + --hash=sha256:b926f2f7a266db8f2c46498f0c2c9fcc7e53c8e0fa8bff7f08ad9c044723a2ec \ + --hash=sha256:ed9809bc36292e0d3632d50aae497b5827c1a2e07158f7d4d5c53e8e8662bf66 \ + --hash=sha256:522b1d60872bb6368c14ac538adb55ca9d6c39a7a962832819ef1aafb3446ff5 \ + --hash=sha256:a609bb1cdda9748e6a8363039926dee5ea2bcc073412279615560b967f92a524 \ + --hash=sha256:7455fc3d69315149b95fd011c01496a5e9442c9e7c4d202bed87c5c2e449ed05 \ + --hash=sha256:865225a85e4ce48754d0036fdc0eb796b4aaf4f1e928f0efb9b4e1c081647a4c \ + --hash=sha256:d553f31bceda492c2bda37f48873820d28f07608ae14409c5e9d6c3aa6694840 \ + --hash=sha256:a014531468b78c031aa04e5ca8b64385a6edb48a2e66ebf11093213c678fc383 \ + --hash=sha256:b3e6431812d8008dce7b2546b1276f649f6c9aa44617762ebd3529a25092816c \ + --hash=sha256:089965f964d17905c48cdca88b982d525165e549b438ac86f194c6a9d852fd69 \ + --hash=sha256:ca5eced4ae4ba1e2c9539fca6451694d31e0243de2acfcd6965e2b6e159ba29b \ + --hash=sha256:a4fe193050b519ace09f7d053def30b99deadf650c18a8a874ea0f6c9a2992bc \ + --hash=sha256:e7961c493a982c03cffc9ce4dc2b23bed1375352296f946cc36ddeb5145fa62c \ + --hash=sha256:34592a3c9370745b093ebca60aee6d32f8e7abe3d5c12d54c7dba0b2f81cd863 \ + --hash=sha256:510c3705b29bc3753ec9e6073b99000160320c1cf6e035884295401acb474dfa \ + --hash=sha256:034c07399dff35385ecc53caf9b1f12b3e203834de27b723daeb2cbb3e02ee7f \ + --hash=sha256:5a87e1c05f1efc23c67bfa26be79f12c1f59f71a586b396068d5cf7eb78a2635 \ + --hash=sha256:972c1850cc52e57ccdea70e3c069e2da5c6090e3ee18d167dff2618a8d7dd127 \ + --hash=sha256:d45e86101a5cddd295d5870b02244fc87ecd9b8936f440acbd2bb30b4c1fe23c \ + --hash=sha256:decd32e8d7f934dde484e43431f60b069e87bb30a3a7e186cb6bd69caa0418f3 \ + --hash=sha256:8c734982d6560356c173817576a1f3fa074a2d2b993e63bffa69105ae9ec144b \ + --hash=sha256:563b7ed1e789f763410c49e6fab51d61982eb94088b25338e65b89ad20b6b107 \ + --hash=sha256:8a2cbb044bc6e6764b9a089a2079432b8bd576dbff5faa808b562a8f3c97452b \ + --hash=sha256:6c5d19fbdd29d5080926c863ba89591a2d3dbf592ea35b456cb2996004433d11 \ + --hash=sha256:4dc79db757b0dfa23a111a4573827a6ef57de65dbe8cdb202e45cf9ddf06aad5 \ + --hash=sha256:5700a179abacbdc8609737e595a598b7f107cd68615ded3f922f4c0d4b6009d6 \ + --hash=sha256:287dea79473ce4941598c45dc34f9f692d48d7863b451541c5ce960ab54465fb \ + --hash=sha256:151faa9085c10351a04aea959a2bc25dfa2e21af26d9b614a221d045b7923ea4 \ + --hash=sha256:285082924747958aa69e1dc2146c01db6b0921a0bb04b595beefe7fcffaffaf9 \ + --hash=sha256:8dd74570fe59c738d4dc12d44eb89538b0b01fae9dda6cfe3ff3f6934877cf35 \ + --hash=sha256:6aba1e39ffdd83ec14832ea25bbb18266fea46bc69b8c0acbd996495826c0e6f \ + --hash=sha256:1358621686ddfda55171fc98c171bf5b1a80ce4d444134b70e1e449925fa014f \ + --hash=sha256:d1fab398734634f4b412512ed230d45522fc9f3dd9ca169f579474a491f662aa \ + --hash=sha256:d4830c8df958c45c16dfc43c8353403efd7f1a8e39b91a7e0e848d55b7fa8b48 \ + --hash=sha256:48bed7c1f95484644a2cc658efff4d1e75b8c806f6ef2b5c815f59e1cbe0d039 \ + --hash=sha256:2db7cbe415d7329b9bff029a83851d1077836ec728fe1c32be34c9c3a5017ab2 \ + --hash=sha256:73636001055667bbcc6a73b232da1d272f68a49a1f192efbe99e99ddf8ef1d21 \ + --hash=sha256:47bf966e1041ae8e568d7e8eb421d72d0521c30c28306b76c256832553e316c6 \ + --hash=sha256:66f857d8b8d7ea44e3fd5f2b7e471334f24b735423729771f5a7a7f69ab645ed \ + --hash=sha256:7d2cb50aa526032b8812975c3832058763ee50e1dc3a1302431ed9d0922c3a1b \ + --hash=sha256:f615ee181b813c8f50a57d55354d0c0304a0be066962efdbef6f44517b26e3b2 \ + --hash=sha256:c5696c99a7dd567566c18490e8e346b2657967feb1e3c2004e91dbb253db0894 \ + --hash=sha256:a68d5a8a46712ffe86db8ae1b4311714db534725521c71fd4c9e1cd062dae9a4 \ + --hash=sha256:ab938777b3ac0372231ee654a7f6a13787e587b1ca268d8aa7e6fb6846e477d0 +urllib3==1.26.9; python_version >= "3.7" and python_full_version < "3.0.0" and python_version < "4" and (python_version >= "3.6" and python_full_version < "3.0.0" or python_full_version >= "3.4.0" and python_version >= "3.6") or python_full_version >= "3.5.0" and python_version < "4" and python_version >= "3.7" and (python_version >= "3.6" and python_full_version < "3.0.0" or python_full_version >= "3.4.0" and python_version >= "3.6") \ + --hash=sha256:44ece4d53fb1706f667c9bd1c648f5469a2ec925fcf3a776667042d645472c14 \ + --hash=sha256:aabaf16477806a5e1dd19aa41f8c2b7950dd3c746362d7e3223dbe6de6ac448e +wrapt==1.14.1; python_full_version >= "3.6.2" \ + --hash=sha256:1b376b3f4896e7930f1f772ac4b064ac12598d1c38d04907e696cc4d794b43d3 \ + --hash=sha256:903500616422a40a98a5a3c4ff4ed9d0066f3b4c951fa286018ecdf0750194ef \ + --hash=sha256:5a9a0d155deafd9448baff28c08e150d9b24ff010e899311ddd63c45c2445e28 \ + --hash=sha256:ddaea91abf8b0d13443f6dac52e89051a5063c7d014710dcb4d4abb2ff811a59 \ + --hash=sha256:36f582d0c6bc99d5f39cd3ac2a9062e57f3cf606ade29a0a0d6b323462f4dd87 \ + --hash=sha256:7ef58fb89674095bfc57c4069e95d7a31cfdc0939e2a579882ac7d55aadfd2a1 \ + --hash=sha256:e2f83e18fe2f4c9e7db597e988f72712c0c3676d337d8b101f6758107c42425b \ + --hash=sha256:ee2b1b1769f6707a8a445162ea16dddf74285c3964f605877a20e38545c3c462 \ + --hash=sha256:833b58d5d0b7e5b9832869f039203389ac7cbf01765639c7309fd50ef619e0b1 \ + --hash=sha256:80bb5c256f1415f747011dc3604b59bc1f91c6e7150bd7db03b19170ee06b320 \ + --hash=sha256:07f7a7d0f388028b2df1d916e94bbb40624c59b48ecc6cbc232546706fac74c2 \ + --hash=sha256:02b41b633c6261feff8ddd8d11c711df6842aba629fdd3da10249a53211a72c4 \ + --hash=sha256:2fe803deacd09a233e4762a1adcea5db5d31e6be577a43352936179d14d90069 \ + --hash=sha256:257fd78c513e0fb5cdbe058c27a0624c9884e735bbd131935fd49e9fe719d310 \ + --hash=sha256:4fcc4649dc762cddacd193e6b55bc02edca674067f5f98166d7713b193932b7f \ + --hash=sha256:11871514607b15cfeb87c547a49bca19fde402f32e2b1c24a632506c0a756656 \ + --hash=sha256:8ad85f7f4e20964db4daadcab70b47ab05c7c1cf2a7c1e51087bfaa83831854c \ + --hash=sha256:a9a52172be0b5aae932bef82a79ec0a0ce87288c7d132946d645eba03f0ad8a8 \ + --hash=sha256:6d323e1554b3d22cfc03cd3243b5bb815a51f5249fdcbb86fda4bf62bab9e164 \ + --hash=sha256:43ca3bbbe97af00f49efb06e352eae40434ca9d915906f77def219b88e85d907 \ + --hash=sha256:6b1a564e6cb69922c7fe3a678b9f9a3c54e72b469875aa8018f18b4d1dd1adf3 \ + --hash=sha256:00b6d4ea20a906c0ca56d84f93065b398ab74b927a7a3dbd470f6fc503f95dc3 \ + --hash=sha256:a85d2b46be66a71bedde836d9e41859879cc54a2a04fad1191eb50c2066f6e9d \ + --hash=sha256:dbcda74c67263139358f4d188ae5faae95c30929281bc6866d00573783c422b7 \ + --hash=sha256:b21bb4c09ffabfa0e85e3a6b623e19b80e7acd709b9f91452b8297ace2a8ab00 \ + --hash=sha256:9e0fd32e0148dd5dea6af5fee42beb949098564cc23211a88d799e434255a1f4 \ + --hash=sha256:9736af4641846491aedb3c3f56b9bc5568d92b0692303b5a305301a95dfd38b1 \ + --hash=sha256:5b02d65b9ccf0ef6c34cba6cf5bf2aab1bb2f49c6090bafeecc9cd81ad4ea1c1 \ + --hash=sha256:21ac0156c4b089b330b7666db40feee30a5d52634cc4560e1905d6529a3897ff \ + --hash=sha256:9f3e6f9e05148ff90002b884fbc2a86bd303ae847e472f44ecc06c2cd2fcdb2d \ + --hash=sha256:6e743de5e9c3d1b7185870f480587b75b1cb604832e380d64f9504a0535912d1 \ + --hash=sha256:d79d7d5dc8a32b7093e81e97dad755127ff77bcc899e845f41bf71747af0c569 \ + --hash=sha256:81b19725065dcb43df02b37e03278c011a09e49757287dca60c5aecdd5a0b8ed \ + --hash=sha256:b014c23646a467558be7da3d6b9fa409b2c567d2110599b7cf9a0c5992b3b471 \ + --hash=sha256:88bd7b6bd70a5b6803c1abf6bca012f7ed963e58c68d76ee20b9d751c74a3248 \ + --hash=sha256:b5901a312f4d14c59918c221323068fad0540e34324925c8475263841dbdfe68 \ + --hash=sha256:d77c85fedff92cf788face9bfa3ebaa364448ebb1d765302e9af11bf449ca36d \ + --hash=sha256:8d649d616e5c6a678b26d15ece345354f7c2286acd6db868e65fcc5ff7c24a77 \ + --hash=sha256:7d2872609603cb35ca513d7404a94d6d608fc13211563571117046c9d2bcc3d7 \ + --hash=sha256:ee6acae74a2b91865910eef5e7de37dc6895ad96fa23603d1d27ea69df545015 \ + --hash=sha256:2b39d38039a1fdad98c87279b48bc5dce2c0ca0d73483b12cb72aa9609278e8a \ + --hash=sha256:60db23fa423575eeb65ea430cee741acb7c26a1365d103f7b0f6ec412b893853 \ + --hash=sha256:709fe01086a55cf79d20f741f39325018f4df051ef39fe921b1ebe780a66184c \ + --hash=sha256:8c0ce1e99116d5ab21355d8ebe53d9460366704ea38ae4d9f6933188f327b456 \ + --hash=sha256:e3fb1677c720409d5f671e39bac6c9e0e422584e5f518bfd50aa4cbbea02433f \ + --hash=sha256:642c2e7a804fcf18c222e1060df25fc210b9c58db7c91416fb055897fc27e8cc \ + --hash=sha256:7b7c050ae976e286906dd3f26009e117eb000fb2cf3533398c5ad9ccc86867b1 \ + --hash=sha256:ef3f72c9666bba2bab70d2a8b79f2c6d2c1a42a7f7e2b0ec83bb2f9e383950af \ + --hash=sha256:01c205616a89d09827986bc4e859bcabd64f5a0662a7fe95e0d359424e0e071b \ + --hash=sha256:5a0f54ce2c092aaf439813735584b9537cad479575a09892b8352fea5e988dc0 \ + --hash=sha256:2cf71233a0ed05ccdabe209c606fe0bac7379fdcf687f39b944420d2a09fdb57 \ + --hash=sha256:aa31fdcc33fef9eb2552cbcbfee7773d5a6792c137b359e82879c101e98584c5 \ + --hash=sha256:d1967f46ea8f2db647c786e78d8cc7e4313dbd1b0aca360592d8027b8508e24d \ + --hash=sha256:3232822c7d98d23895ccc443bbdf57c7412c5a65996c30442ebe6ed3df335383 \ + --hash=sha256:988635d122aaf2bdcef9e795435662bcd65b02f4f4c1ae37fbee7401c440b3a7 \ + --hash=sha256:9cca3c2cdadb362116235fdbd411735de4328c61425b0aa9f872fd76d02c4e86 \ + --hash=sha256:d52a25136894c63de15a35bc0bdc5adb4b0e173b9c0d07a2be9d3ca64a332735 \ + --hash=sha256:40e7bc81c9e2b2734ea4bc1aceb8a8f0ceaac7c5299bc5d69e37c44d9081d43b \ + --hash=sha256:b9b7a708dd92306328117d8c4b62e2194d00c365f18eff11a9b53c6f923b01e3 \ + --hash=sha256:6a9a25751acb379b466ff6be78a315e2b439d4c94c1e99cb7266d40a537995d3 \ + --hash=sha256:34aa51c45f28ba7f12accd624225e2b1e5a3a45206aa191f6f9aac931d9d56fe \ + --hash=sha256:dee0ce50c6a2dd9056c20db781e9c1cfd33e77d2d569f5d1d9321c641bb903d5 \ + --hash=sha256:dee60e1de1898bde3b238f18340eec6148986da0455d8ba7848d50470a7a32fb \ + --hash=sha256:380a85cf89e0e69b7cfbe2ea9f765f004ff419f34194018a6827ac0e3edfed4d +zipp==3.8.0; python_version < "3.8" and python_version >= "3.7" and python_full_version >= "3.6.2" \ + --hash=sha256:c4f6e5bbf48e74f7a38e7cc5b0480ff42b0ae5178957d564d18932525d5cf099 \ + --hash=sha256:56bf8aadb83c24db6c4b577e13de374ccfb67da2078beba1d037c17980bf43ad diff --git a/docs/rethinkdb.rst b/docs/rethinkdb.rst new file mode 100644 index 00000000..0e762f20 --- /dev/null +++ b/docs/rethinkdb.rst @@ -0,0 +1,85 @@ +rethinkdb package +================= + +Submodules +---------- + +rethinkdb.ast module +-------------------- + +.. automodule:: rethinkdb.ast + :members: + :undoc-members: + :show-inheritance: + +rethinkdb.encoder module +------------------------ + +.. automodule:: rethinkdb.encoder + :members: + :undoc-members: + :show-inheritance: + +rethinkdb.errors module +----------------------- + +.. automodule:: rethinkdb.errors + :members: + :undoc-members: + :show-inheritance: + +rethinkdb.handshake module +-------------------------- + +.. automodule:: rethinkdb.handshake + :members: + :undoc-members: + :show-inheritance: + +rethinkdb.net module +-------------------- + +.. automodule:: rethinkdb.net + :members: + :undoc-members: + :show-inheritance: + +rethinkdb.ql2\_pb2 module +------------------------- + +.. automodule:: rethinkdb.ql2_pb2 + :members: + :undoc-members: + :show-inheritance: + +rethinkdb.query module +---------------------- + +.. automodule:: rethinkdb.query + :members: + :undoc-members: + :show-inheritance: + +rethinkdb.repl module +--------------------- + +.. automodule:: rethinkdb.repl + :members: + :undoc-members: + :show-inheritance: + +rethinkdb.utilities module +-------------------------- + +.. automodule:: rethinkdb.utilities + :members: + :undoc-members: + :show-inheritance: + +Module contents +--------------- + +.. automodule:: rethinkdb + :members: + :undoc-members: + :show-inheritance: diff --git a/docs/vulnerabilities.rst b/docs/vulnerabilities.rst new file mode 100644 index 00000000..5f5ef27e --- /dev/null +++ b/docs/vulnerabilities.rst @@ -0,0 +1,52 @@ +=============== +Vulnerabilities +=============== + +.. note:: + Important! In case you found vulnerability or security issue in one of + the libraries we use or somewhere else in the code, please contact us + via e-mail at security@rethinkdb.com. Please do not use this channel for + support. + +Reporting vulnerabilities +------------------------- + +What is vulnerability? +~~~~~~~~~~~~~~~~~~~~~~~~~ + +Vulnerability is a cyber-security term that refers to a flaw in a system +that can leave it open to attack. The vulnerability may also refer to any +type of weakness in a computer system itself, in a set of procedures, or +in anything that leaves information security exposed to a threat. +- by techopedia_ + +.. _techopedia: https://rethinkdb.com/docs/vulnerabilities/techopedia.com/definition/13484/vulnerability + +In case you found a vulnerability +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +In case you found vulnerability or security issue in one of the libraries +we use or somewhere else in the code, please do not publish it, instead, +contact us via e-mail at security@rethinkdb.com. We will take the necessary +steps to fix the issue. We are handling the vulnerabilities privately. + +To make report processing easier, please consider the following: + +* Use clear and expressive subject +* Have a short, clear, and direct description including the details +* Include OWASP link, CVE references or links to other public advisories + and standards +* Add steps on how to reproduce the issue +* Describe your environment +* Attach screenshots if applicable + +.. note:: + + This article_ is a pretty good resource on how to report vulnerabilities. + +.. _article: + +In case you have any further questions regarding vulnerability reporting, +feel free to open an issue_ on GitHub. + +.. _issue: https://github.com/rethinkdb/rethinkdb-python/issues diff --git a/mypy.ini b/mypy.ini new file mode 100644 index 00000000..f8b1844b --- /dev/null +++ b/mypy.ini @@ -0,0 +1 @@ +[mypy] diff --git a/poetry.lock b/poetry.lock new file mode 100644 index 00000000..a14ba594 --- /dev/null +++ b/poetry.lock @@ -0,0 +1,1388 @@ +[[package]] +name = "alabaster" +version = "0.7.12" +description = "A configurable sidebar-enabled Sphinx theme" +category = "dev" +optional = false +python-versions = "*" + +[[package]] +name = "astroid" +version = "2.11.6" +description = "An abstract syntax tree for Python with inference support." +category = "dev" +optional = false +python-versions = ">=3.6.2" + +[package.dependencies] +lazy-object-proxy = ">=1.4.0" +typed-ast = {version = ">=1.4.0,<2.0", markers = "implementation_name == \"cpython\" and python_version < \"3.8\""} +typing-extensions = {version = ">=3.10", markers = "python_version < \"3.10\""} +wrapt = ">=1.11,<2" + +[[package]] +name = "atomicwrites" +version = "1.4.0" +description = "Atomic file writes." +category = "dev" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" + +[[package]] +name = "attrs" +version = "21.4.0" +description = "Classes Without Boilerplate" +category = "dev" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" + +[package.extras] +dev = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "mypy", "pytest-mypy-plugins", "zope.interface", "furo", "sphinx", "sphinx-notfound-page", "pre-commit", "cloudpickle"] +docs = ["furo", "sphinx", "zope.interface", "sphinx-notfound-page"] +tests = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "mypy", "pytest-mypy-plugins", "zope.interface", "cloudpickle"] +tests_no_zope = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "mypy", "pytest-mypy-plugins", "cloudpickle"] + +[[package]] +name = "babel" +version = "2.10.3" +description = "Internationalization utilities" +category = "dev" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +pytz = ">=2015.7" + +[[package]] +name = "bandit" +version = "1.7.4" +description = "Security oriented static analyser for python code." +category = "dev" +optional = false +python-versions = ">=3.7" + +[package.dependencies] +colorama = {version = ">=0.3.9", markers = "platform_system == \"Windows\""} +GitPython = ">=1.0.1" +PyYAML = ">=5.3.1" +stevedore = ">=1.20.0" + +[package.extras] +test = ["coverage (>=4.5.4)", "fixtures (>=3.0.0)", "flake8 (>=4.0.0)", "stestr (>=2.5.0)", "testscenarios (>=0.5.0)", "testtools (>=2.3.0)", "toml", "beautifulsoup4 (>=4.8.0)", "pylint (==1.9.4)"] +toml = ["toml"] +yaml = ["pyyaml"] + +[[package]] +name = "black" +version = "22.3.0" +description = "The uncompromising code formatter." +category = "dev" +optional = false +python-versions = ">=3.6.2" + +[package.dependencies] +click = ">=8.0.0" +mypy-extensions = ">=0.4.3" +pathspec = ">=0.9.0" +platformdirs = ">=2" +tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} +typed-ast = {version = ">=1.4.2", markers = "python_version < \"3.8\" and implementation_name == \"cpython\""} +typing-extensions = {version = ">=3.10.0.0", markers = "python_version < \"3.10\""} + +[package.extras] +colorama = ["colorama (>=0.4.3)"] +d = ["aiohttp (>=3.7.4)"] +jupyter = ["ipython (>=7.8.0)", "tokenize-rt (>=3.2.0)"] +uvloop = ["uvloop (>=0.15.2)"] + +[[package]] +name = "certifi" +version = "2022.6.15" +description = "Python package for providing Mozilla's CA Bundle." +category = "dev" +optional = false +python-versions = ">=3.6" + +[[package]] +name = "charset-normalizer" +version = "2.0.12" +description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." +category = "dev" +optional = false +python-versions = ">=3.5.0" + +[package.extras] +unicode_backport = ["unicodedata2"] + +[[package]] +name = "click" +version = "8.1.3" +description = "Composable command line interface toolkit" +category = "dev" +optional = false +python-versions = ">=3.7" + +[package.dependencies] +colorama = {version = "*", markers = "platform_system == \"Windows\""} +importlib-metadata = {version = "*", markers = "python_version < \"3.8\""} + +[[package]] +name = "colorama" +version = "0.4.5" +description = "Cross-platform colored terminal text." +category = "dev" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" + +[[package]] +name = "coverage" +version = "6.4.1" +description = "Code coverage measurement for Python" +category = "dev" +optional = false +python-versions = ">=3.7" + +[package.extras] +toml = ["tomli"] + +[[package]] +name = "dill" +version = "0.3.5.1" +description = "serialize all of python" +category = "dev" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*, !=3.6.*" + +[package.extras] +graph = ["objgraph (>=1.7.2)"] + +[[package]] +name = "docutils" +version = "0.17.1" +description = "Docutils -- Python Documentation Utilities" +category = "dev" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" + +[[package]] +name = "exceptiongroup" +version = "1.0.0rc8" +description = "Backport of PEP 654 (exception groups)" +category = "dev" +optional = false +python-versions = ">=3.7" + +[package.extras] +test = ["pytest (>=6)"] + +[[package]] +name = "flake8" +version = "4.0.1" +description = "the modular source code checker: pep8 pyflakes and co" +category = "dev" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +importlib-metadata = {version = "<4.3", markers = "python_version < \"3.8\""} +mccabe = ">=0.6.0,<0.7.0" +pycodestyle = ">=2.8.0,<2.9.0" +pyflakes = ">=2.4.0,<2.5.0" + +[[package]] +name = "gitdb" +version = "4.0.9" +description = "Git Object Database" +category = "dev" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +smmap = ">=3.0.1,<6" + +[[package]] +name = "gitpython" +version = "3.1.27" +description = "GitPython is a python library used to interact with Git repositories" +category = "dev" +optional = false +python-versions = ">=3.7" + +[package.dependencies] +gitdb = ">=4.0.1,<5" +typing-extensions = {version = ">=3.7.4.3", markers = "python_version < \"3.8\""} + +[[package]] +name = "hypothesis" +version = "6.47.3" +description = "A library for property-based testing" +category = "dev" +optional = false +python-versions = ">=3.7" + +[package.dependencies] +attrs = ">=19.2.0" +exceptiongroup = {version = ">=1.0.0rc8", markers = "python_version < \"3.11\""} +sortedcontainers = ">=2.1.0,<3.0.0" + +[package.extras] +all = ["black (>=19.10b0)", "click (>=7.0)", "django (>=2.2)", "dpcontracts (>=0.4)", "lark-parser (>=0.6.5)", "libcst (>=0.3.16)", "numpy (>=1.9.0)", "pandas (>=0.25)", "pytest (>=4.6)", "python-dateutil (>=1.4)", "pytz (>=2014.1)", "redis (>=3.0.0)", "rich (>=9.0.0)", "importlib-metadata (>=3.6)", "backports.zoneinfo (>=0.2.1)", "tzdata (>=2022.1)"] +cli = ["click (>=7.0)", "black (>=19.10b0)", "rich (>=9.0.0)"] +codemods = ["libcst (>=0.3.16)"] +dateutil = ["python-dateutil (>=1.4)"] +django = ["django (>=2.2)"] +dpcontracts = ["dpcontracts (>=0.4)"] +ghostwriter = ["black (>=19.10b0)"] +lark = ["lark-parser (>=0.6.5)"] +numpy = ["numpy (>=1.9.0)"] +pandas = ["pandas (>=0.25)"] +pytest = ["pytest (>=4.6)"] +pytz = ["pytz (>=2014.1)"] +redis = ["redis (>=3.0.0)"] +zoneinfo = ["backports.zoneinfo (>=0.2.1)", "tzdata (>=2022.1)"] + +[[package]] +name = "idna" +version = "3.3" +description = "Internationalized Domain Names in Applications (IDNA)" +category = "dev" +optional = false +python-versions = ">=3.5" + +[[package]] +name = "imagesize" +version = "1.3.0" +description = "Getting image size from png/jpeg/jpeg2000/gif file" +category = "dev" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" + +[[package]] +name = "importlib-metadata" +version = "4.2.0" +description = "Read metadata from Python packages" +category = "dev" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +typing-extensions = {version = ">=3.6.4", markers = "python_version < \"3.8\""} +zipp = ">=0.5" + +[package.extras] +docs = ["sphinx", "jaraco.packaging (>=8.2)", "rst.linker (>=1.9)"] +testing = ["pytest (>=4.6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytest-cov", "pytest-enabler (>=1.0.1)", "packaging", "pep517", "pyfakefs", "flufl.flake8", "pytest-black (>=0.3.7)", "pytest-mypy", "importlib-resources (>=1.3)"] + +[[package]] +name = "iniconfig" +version = "1.1.1" +description = "iniconfig: brain-dead simple config-ini parsing" +category = "dev" +optional = false +python-versions = "*" + +[[package]] +name = "isort" +version = "5.10.1" +description = "A Python utility / library to sort Python imports." +category = "dev" +optional = false +python-versions = ">=3.6.1,<4.0" + +[package.extras] +pipfile_deprecated_finder = ["pipreqs", "requirementslib"] +requirements_deprecated_finder = ["pipreqs", "pip-api"] +colors = ["colorama (>=0.4.3,<0.5.0)"] +plugins = ["setuptools"] + +[[package]] +name = "jinja2" +version = "3.1.2" +description = "A very fast and expressive template engine." +category = "dev" +optional = false +python-versions = ">=3.7" + +[package.dependencies] +MarkupSafe = ">=2.0" + +[package.extras] +i18n = ["Babel (>=2.7)"] + +[[package]] +name = "lazy-object-proxy" +version = "1.7.1" +description = "A fast and thorough lazy object proxy." +category = "dev" +optional = false +python-versions = ">=3.6" + +[[package]] +name = "markupsafe" +version = "2.1.1" +description = "Safely add untrusted strings to HTML/XML markup." +category = "dev" +optional = false +python-versions = ">=3.7" + +[[package]] +name = "mccabe" +version = "0.6.1" +description = "McCabe checker, plugin for flake8" +category = "dev" +optional = false +python-versions = "*" + +[[package]] +name = "mypy" +version = "0.950" +description = "Optional static typing for Python" +category = "dev" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +mypy-extensions = ">=0.4.3" +tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} +typed-ast = {version = ">=1.4.0,<2", markers = "python_version < \"3.8\""} +typing-extensions = ">=3.10" + +[package.extras] +dmypy = ["psutil (>=4.0)"] +python2 = ["typed-ast (>=1.4.0,<2)"] +reports = ["lxml"] + +[[package]] +name = "mypy-extensions" +version = "0.4.3" +description = "Experimental type system extensions for programs checked with the mypy typechecker." +category = "dev" +optional = false +python-versions = "*" + +[[package]] +name = "packaging" +version = "21.3" +description = "Core utilities for Python packages" +category = "dev" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +pyparsing = ">=2.0.2,<3.0.5 || >3.0.5" + +[[package]] +name = "pathspec" +version = "0.9.0" +description = "Utility library for gitignore style pattern matching of file paths." +category = "dev" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7" + +[[package]] +name = "pbr" +version = "5.9.0" +description = "Python Build Reasonableness" +category = "dev" +optional = false +python-versions = ">=2.6" + +[[package]] +name = "platformdirs" +version = "2.5.2" +description = "A small Python module for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." +category = "dev" +optional = false +python-versions = ">=3.7" + +[package.extras] +docs = ["furo (>=2021.7.5b38)", "proselint (>=0.10.2)", "sphinx-autodoc-typehints (>=1.12)", "sphinx (>=4)"] +test = ["appdirs (==1.4.4)", "pytest-cov (>=2.7)", "pytest-mock (>=3.6)", "pytest (>=6)"] + +[[package]] +name = "pluggy" +version = "1.0.0" +description = "plugin and hook calling mechanisms for python" +category = "dev" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +importlib-metadata = {version = ">=0.12", markers = "python_version < \"3.8\""} + +[package.extras] +dev = ["pre-commit", "tox"] +testing = ["pytest", "pytest-benchmark"] + +[[package]] +name = "py" +version = "1.11.0" +description = "library with cross-python path, ini-parsing, io, code, log facilities" +category = "dev" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" + +[[package]] +name = "pycodestyle" +version = "2.8.0" +description = "Python style guide checker" +category = "dev" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" + +[[package]] +name = "pydantic" +version = "1.9.1" +description = "Data validation and settings management using python type hints" +category = "main" +optional = false +python-versions = ">=3.6.1" + +[package.dependencies] +typing-extensions = ">=3.7.4.3" + +[package.extras] +dotenv = ["python-dotenv (>=0.10.4)"] +email = ["email-validator (>=1.0.3)"] + +[[package]] +name = "pyflakes" +version = "2.4.0" +description = "passive checker of Python programs" +category = "dev" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" + +[[package]] +name = "pygments" +version = "2.12.0" +description = "Pygments is a syntax highlighting package written in Python." +category = "dev" +optional = false +python-versions = ">=3.6" + +[[package]] +name = "pylint" +version = "2.13.9" +description = "python code static checker" +category = "dev" +optional = false +python-versions = ">=3.6.2" + +[package.dependencies] +astroid = ">=2.11.5,<=2.12.0-dev0" +colorama = {version = "*", markers = "sys_platform == \"win32\""} +dill = ">=0.2" +isort = ">=4.2.5,<6" +mccabe = ">=0.6,<0.8" +platformdirs = ">=2.2.0" +tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} +typing-extensions = {version = ">=3.10.0", markers = "python_version < \"3.10\""} + +[package.extras] +testutil = ["gitpython (>3)"] + +[[package]] +name = "pyparsing" +version = "3.0.9" +description = "pyparsing module - Classes and methods to define and execute parsing grammars" +category = "dev" +optional = false +python-versions = ">=3.6.8" + +[package.extras] +diagrams = ["railroad-diagrams", "jinja2"] + +[[package]] +name = "pytest" +version = "7.1.2" +description = "pytest: simple powerful testing with Python" +category = "dev" +optional = false +python-versions = ">=3.7" + +[package.dependencies] +atomicwrites = {version = ">=1.0", markers = "sys_platform == \"win32\""} +attrs = ">=19.2.0" +colorama = {version = "*", markers = "sys_platform == \"win32\""} +importlib-metadata = {version = ">=0.12", markers = "python_version < \"3.8\""} +iniconfig = "*" +packaging = "*" +pluggy = ">=0.12,<2.0" +py = ">=1.8.2" +tomli = ">=1.0.0" + +[package.extras] +testing = ["argcomplete", "hypothesis (>=3.56)", "mock", "nose", "pygments (>=2.7.2)", "requests", "xmlschema"] + +[[package]] +name = "pytz" +version = "2022.1" +description = "World timezone definitions, modern and historical" +category = "dev" +optional = false +python-versions = "*" + +[[package]] +name = "pyyaml" +version = "6.0" +description = "YAML parser and emitter for Python" +category = "dev" +optional = false +python-versions = ">=3.6" + +[[package]] +name = "requests" +version = "2.28.0" +description = "Python HTTP for Humans." +category = "dev" +optional = false +python-versions = ">=3.7, <4" + +[package.dependencies] +certifi = ">=2017.4.17" +charset-normalizer = ">=2.0.0,<2.1.0" +idna = ">=2.5,<4" +urllib3 = ">=1.21.1,<1.27" + +[package.extras] +socks = ["PySocks (>=1.5.6,!=1.5.7)"] +use_chardet_on_py3 = ["chardet (>=3.0.2,<5)"] + +[[package]] +name = "smmap" +version = "5.0.0" +description = "A pure Python implementation of a sliding window memory map manager" +category = "dev" +optional = false +python-versions = ">=3.6" + +[[package]] +name = "snowballstemmer" +version = "2.2.0" +description = "This package provides 29 stemmers for 28 languages generated from Snowball algorithms." +category = "dev" +optional = false +python-versions = "*" + +[[package]] +name = "sortedcontainers" +version = "2.4.0" +description = "Sorted Containers -- Sorted List, Sorted Dict, Sorted Set" +category = "dev" +optional = false +python-versions = "*" + +[[package]] +name = "sphinx" +version = "4.3.2" +description = "Python documentation generator" +category = "dev" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +alabaster = ">=0.7,<0.8" +babel = ">=1.3" +colorama = {version = ">=0.3.5", markers = "sys_platform == \"win32\""} +docutils = ">=0.14,<0.18" +imagesize = "*" +Jinja2 = ">=2.3" +packaging = "*" +Pygments = ">=2.0" +requests = ">=2.5.0" +snowballstemmer = ">=1.1" +sphinxcontrib-applehelp = "*" +sphinxcontrib-devhelp = "*" +sphinxcontrib-htmlhelp = ">=2.0.0" +sphinxcontrib-jsmath = "*" +sphinxcontrib-qthelp = "*" +sphinxcontrib-serializinghtml = ">=1.1.5" + +[package.extras] +docs = ["sphinxcontrib-websupport"] +lint = ["flake8 (>=3.5.0)", "isort", "mypy (>=0.920)", "docutils-stubs", "types-typed-ast", "types-pkg-resources", "types-requests"] +test = ["pytest", "pytest-cov", "html5lib", "cython", "typed-ast"] + +[[package]] +name = "sphinx-rtd-theme" +version = "1.0.0" +description = "Read the Docs theme for Sphinx" +category = "dev" +optional = false +python-versions = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*" + +[package.dependencies] +docutils = "<0.18" +sphinx = ">=1.6" + +[package.extras] +dev = ["transifex-client", "sphinxcontrib-httpdomain", "bump2version"] + +[[package]] +name = "sphinxcontrib-applehelp" +version = "1.0.2" +description = "sphinxcontrib-applehelp is a sphinx extension which outputs Apple help books" +category = "dev" +optional = false +python-versions = ">=3.5" + +[package.extras] +lint = ["flake8", "mypy", "docutils-stubs"] +test = ["pytest"] + +[[package]] +name = "sphinxcontrib-devhelp" +version = "1.0.2" +description = "sphinxcontrib-devhelp is a sphinx extension which outputs Devhelp document." +category = "dev" +optional = false +python-versions = ">=3.5" + +[package.extras] +lint = ["flake8", "mypy", "docutils-stubs"] +test = ["pytest"] + +[[package]] +name = "sphinxcontrib-htmlhelp" +version = "2.0.0" +description = "sphinxcontrib-htmlhelp is a sphinx extension which renders HTML help files" +category = "dev" +optional = false +python-versions = ">=3.6" + +[package.extras] +lint = ["flake8", "mypy", "docutils-stubs"] +test = ["pytest", "html5lib"] + +[[package]] +name = "sphinxcontrib-jsmath" +version = "1.0.1" +description = "A sphinx extension which renders display math in HTML via JavaScript" +category = "dev" +optional = false +python-versions = ">=3.5" + +[package.extras] +test = ["pytest", "flake8", "mypy"] + +[[package]] +name = "sphinxcontrib-qthelp" +version = "1.0.3" +description = "sphinxcontrib-qthelp is a sphinx extension which outputs QtHelp document." +category = "dev" +optional = false +python-versions = ">=3.5" + +[package.extras] +lint = ["flake8", "mypy", "docutils-stubs"] +test = ["pytest"] + +[[package]] +name = "sphinxcontrib-serializinghtml" +version = "1.1.5" +description = "sphinxcontrib-serializinghtml is a sphinx extension which outputs \"serialized\" HTML files (json and pickle)." +category = "dev" +optional = false +python-versions = ">=3.5" + +[package.extras] +lint = ["flake8", "mypy", "docutils-stubs"] +test = ["pytest"] + +[[package]] +name = "stevedore" +version = "3.5.0" +description = "Manage dynamic plugins for Python applications" +category = "dev" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +importlib-metadata = {version = ">=1.7.0", markers = "python_version < \"3.8\""} +pbr = ">=2.0.0,<2.1.0 || >2.1.0" + +[[package]] +name = "tomli" +version = "2.0.1" +description = "A lil' TOML parser" +category = "dev" +optional = false +python-versions = ">=3.7" + +[[package]] +name = "typed-ast" +version = "1.5.4" +description = "a fork of Python 2 and 3 ast modules with type comment support" +category = "dev" +optional = false +python-versions = ">=3.6" + +[[package]] +name = "typing-extensions" +version = "4.2.0" +description = "Backported and Experimental Type Hints for Python 3.7+" +category = "main" +optional = false +python-versions = ">=3.7" + +[[package]] +name = "ujson" +version = "5.3.0" +description = "Ultra fast JSON encoder and decoder for Python" +category = "main" +optional = false +python-versions = ">=3.7" + +[[package]] +name = "urllib3" +version = "1.26.9" +description = "HTTP library with thread-safe connection pooling, file post, and more." +category = "dev" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, <4" + +[package.extras] +brotli = ["brotlicffi (>=0.8.0)", "brotli (>=1.0.9)", "brotlipy (>=0.6.0)"] +secure = ["pyOpenSSL (>=0.14)", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "certifi", "ipaddress"] +socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"] + +[[package]] +name = "wrapt" +version = "1.14.1" +description = "Module for decorators, wrappers and monkey patching." +category = "dev" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7" + +[[package]] +name = "zipp" +version = "3.8.0" +description = "Backport of pathlib-compatible object wrapper for zip files" +category = "dev" +optional = false +python-versions = ">=3.7" + +[package.extras] +docs = ["sphinx", "jaraco.packaging (>=9)", "rst.linker (>=1.9)"] +testing = ["pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytest-cov", "pytest-enabler (>=1.0.1)", "jaraco.itertools", "func-timeout", "pytest-black (>=0.3.7)", "pytest-mypy (>=0.9.1)"] + +[extras] +all = [] + +[metadata] +lock-version = "1.1" +python-versions = "^3.7" +content-hash = "1188177f9b4a69cea9a4091b4eae8da377cf07890872f8f04fb231af2acc56cb" + +[metadata.files] +alabaster = [ + {file = "alabaster-0.7.12-py2.py3-none-any.whl", hash = "sha256:446438bdcca0e05bd45ea2de1668c1d9b032e1a9154c2c259092d77031ddd359"}, + {file = "alabaster-0.7.12.tar.gz", hash = "sha256:a661d72d58e6ea8a57f7a86e37d86716863ee5e92788398526d58b26a4e4dc02"}, +] +astroid = [ + {file = "astroid-2.11.6-py3-none-any.whl", hash = "sha256:ba33a82a9a9c06a5ceed98180c5aab16e29c285b828d94696bf32d6015ea82a9"}, + {file = "astroid-2.11.6.tar.gz", hash = "sha256:4f933d0bf5e408b03a6feb5d23793740c27e07340605f236496cd6ce552043d6"}, +] +atomicwrites = [ + {file = "atomicwrites-1.4.0-py2.py3-none-any.whl", hash = "sha256:6d1784dea7c0c8d4a5172b6c620f40b6e4cbfdf96d783691f2e1302a7b88e197"}, + {file = "atomicwrites-1.4.0.tar.gz", hash = "sha256:ae70396ad1a434f9c7046fd2dd196fc04b12f9e91ffb859164193be8b6168a7a"}, +] +attrs = [ + {file = "attrs-21.4.0-py2.py3-none-any.whl", hash = "sha256:2d27e3784d7a565d36ab851fe94887c5eccd6a463168875832a1be79c82828b4"}, + {file = "attrs-21.4.0.tar.gz", hash = "sha256:626ba8234211db98e869df76230a137c4c40a12d72445c45d5f5b716f076e2fd"}, +] +babel = [ + {file = "Babel-2.10.3-py3-none-any.whl", hash = "sha256:ff56f4892c1c4bf0d814575ea23471c230d544203c7748e8c68f0089478d48eb"}, + {file = "Babel-2.10.3.tar.gz", hash = "sha256:7614553711ee97490f732126dc077f8d0ae084ebc6a96e23db1482afabdb2c51"}, +] +bandit = [ + {file = "bandit-1.7.4-py3-none-any.whl", hash = "sha256:412d3f259dab4077d0e7f0c11f50f650cc7d10db905d98f6520a95a18049658a"}, + {file = "bandit-1.7.4.tar.gz", hash = "sha256:2d63a8c573417bae338962d4b9b06fbc6080f74ecd955a092849e1e65c717bd2"}, +] +black = [ + {file = "black-22.3.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:2497f9c2386572e28921fa8bec7be3e51de6801f7459dffd6e62492531c47e09"}, + {file = "black-22.3.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:5795a0375eb87bfe902e80e0c8cfaedf8af4d49694d69161e5bd3206c18618bb"}, + {file = "black-22.3.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e3556168e2e5c49629f7b0f377070240bd5511e45e25a4497bb0073d9dda776a"}, + {file = "black-22.3.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:67c8301ec94e3bcc8906740fe071391bce40a862b7be0b86fb5382beefecd968"}, + {file = "black-22.3.0-cp310-cp310-win_amd64.whl", hash = "sha256:fd57160949179ec517d32ac2ac898b5f20d68ed1a9c977346efbac9c2f1e779d"}, + {file = "black-22.3.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:cc1e1de68c8e5444e8f94c3670bb48a2beef0e91dddfd4fcc29595ebd90bb9ce"}, + {file = "black-22.3.0-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6d2fc92002d44746d3e7db7cf9313cf4452f43e9ea77a2c939defce3b10b5c82"}, + {file = "black-22.3.0-cp36-cp36m-win_amd64.whl", hash = "sha256:a6342964b43a99dbc72f72812bf88cad8f0217ae9acb47c0d4f141a6416d2d7b"}, + {file = "black-22.3.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:328efc0cc70ccb23429d6be184a15ce613f676bdfc85e5fe8ea2a9354b4e9015"}, + {file = "black-22.3.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:06f9d8846f2340dfac80ceb20200ea5d1b3f181dd0556b47af4e8e0b24fa0a6b"}, + {file = "black-22.3.0-cp37-cp37m-win_amd64.whl", hash = "sha256:ad4efa5fad66b903b4a5f96d91461d90b9507a812b3c5de657d544215bb7877a"}, + {file = "black-22.3.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:e8477ec6bbfe0312c128e74644ac8a02ca06bcdb8982d4ee06f209be28cdf163"}, + {file = "black-22.3.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:637a4014c63fbf42a692d22b55d8ad6968a946b4a6ebc385c5505d9625b6a464"}, + {file = "black-22.3.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:863714200ada56cbc366dc9ae5291ceb936573155f8bf8e9de92aef51f3ad0f0"}, + {file = "black-22.3.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:10dbe6e6d2988049b4655b2b739f98785a884d4d6b85bc35133a8fb9a2233176"}, + {file = "black-22.3.0-cp38-cp38-win_amd64.whl", hash = "sha256:cee3e11161dde1b2a33a904b850b0899e0424cc331b7295f2a9698e79f9a69a0"}, + {file = "black-22.3.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:5891ef8abc06576985de8fa88e95ab70641de6c1fca97e2a15820a9b69e51b20"}, + {file = "black-22.3.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:30d78ba6bf080eeaf0b7b875d924b15cd46fec5fd044ddfbad38c8ea9171043a"}, + {file = "black-22.3.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ee8f1f7228cce7dffc2b464f07ce769f478968bfb3dd1254a4c2eeed84928aad"}, + {file = "black-22.3.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6ee227b696ca60dd1c507be80a6bc849a5a6ab57ac7352aad1ffec9e8b805f21"}, + {file = "black-22.3.0-cp39-cp39-win_amd64.whl", hash = "sha256:9b542ced1ec0ceeff5b37d69838106a6348e60db7b8fdd245294dc1d26136265"}, + {file = "black-22.3.0-py3-none-any.whl", hash = "sha256:bc58025940a896d7e5356952228b68f793cf5fcb342be703c3a2669a1488cb72"}, + {file = "black-22.3.0.tar.gz", hash = "sha256:35020b8886c022ced9282b51b5a875b6d1ab0c387b31a065b84db7c33085ca79"}, +] +certifi = [ + {file = "certifi-2022.6.15-py3-none-any.whl", hash = "sha256:fe86415d55e84719d75f8b69414f6438ac3547d2078ab91b67e779ef69378412"}, + {file = "certifi-2022.6.15.tar.gz", hash = "sha256:84c85a9078b11105f04f3036a9482ae10e4621616db313fe045dd24743a0820d"}, +] +charset-normalizer = [ + {file = "charset-normalizer-2.0.12.tar.gz", hash = "sha256:2857e29ff0d34db842cd7ca3230549d1a697f96ee6d3fb071cfa6c7393832597"}, + {file = "charset_normalizer-2.0.12-py3-none-any.whl", hash = "sha256:6881edbebdb17b39b4eaaa821b438bf6eddffb4468cf344f09f89def34a8b1df"}, +] +click = [ + {file = "click-8.1.3-py3-none-any.whl", hash = "sha256:bb4d8133cb15a609f44e8213d9b391b0809795062913b383c62be0ee95b1db48"}, + {file = "click-8.1.3.tar.gz", hash = "sha256:7682dc8afb30297001674575ea00d1814d808d6a36af415a82bd481d37ba7b8e"}, +] +colorama = [ + {file = "colorama-0.4.5-py2.py3-none-any.whl", hash = "sha256:854bf444933e37f5824ae7bfc1e98d5bce2ebe4160d46b5edf346a89358e99da"}, + {file = "colorama-0.4.5.tar.gz", hash = "sha256:e6c6b4334fc50988a639d9b98aa429a0b57da6e17b9a44f0451f930b6967b7a4"}, +] +coverage = [ + {file = "coverage-6.4.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f1d5aa2703e1dab4ae6cf416eb0095304f49d004c39e9db1d86f57924f43006b"}, + {file = "coverage-6.4.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:4ce1b258493cbf8aec43e9b50d89982346b98e9ffdfaae8ae5793bc112fb0068"}, + {file = "coverage-6.4.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:83c4e737f60c6936460c5be330d296dd5b48b3963f48634c53b3f7deb0f34ec4"}, + {file = "coverage-6.4.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:84e65ef149028516c6d64461b95a8dbcfce95cfd5b9eb634320596173332ea84"}, + {file = "coverage-6.4.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f69718750eaae75efe506406c490d6fc5a6161d047206cc63ce25527e8a3adad"}, + {file = "coverage-6.4.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:e57816f8ffe46b1df8f12e1b348f06d164fd5219beba7d9433ba79608ef011cc"}, + {file = "coverage-6.4.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:01c5615d13f3dd3aa8543afc069e5319cfa0c7d712f6e04b920431e5c564a749"}, + {file = "coverage-6.4.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:75ab269400706fab15981fd4bd5080c56bd5cc07c3bccb86aab5e1d5a88dc8f4"}, + {file = "coverage-6.4.1-cp310-cp310-win32.whl", hash = "sha256:a7f3049243783df2e6cc6deafc49ea123522b59f464831476d3d1448e30d72df"}, + {file = "coverage-6.4.1-cp310-cp310-win_amd64.whl", hash = "sha256:ee2ddcac99b2d2aec413e36d7a429ae9ebcadf912946b13ffa88e7d4c9b712d6"}, + {file = "coverage-6.4.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:fb73e0011b8793c053bfa85e53129ba5f0250fdc0392c1591fd35d915ec75c46"}, + {file = "coverage-6.4.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:106c16dfe494de3193ec55cac9640dd039b66e196e4641fa8ac396181578b982"}, + {file = "coverage-6.4.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:87f4f3df85aa39da00fd3ec4b5abeb7407e82b68c7c5ad181308b0e2526da5d4"}, + {file = "coverage-6.4.1-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:961e2fb0680b4f5ad63234e0bf55dfb90d302740ae9c7ed0120677a94a1590cb"}, + {file = "coverage-6.4.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:cec3a0f75c8f1031825e19cd86ee787e87cf03e4fd2865c79c057092e69e3a3b"}, + {file = "coverage-6.4.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:129cd05ba6f0d08a766d942a9ed4b29283aff7b2cccf5b7ce279d50796860bb3"}, + {file = "coverage-6.4.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:bf5601c33213d3cb19d17a796f8a14a9eaa5e87629a53979a5981e3e3ae166f6"}, + {file = "coverage-6.4.1-cp37-cp37m-win32.whl", hash = "sha256:269eaa2c20a13a5bf17558d4dc91a8d078c4fa1872f25303dddcbba3a813085e"}, + {file = "coverage-6.4.1-cp37-cp37m-win_amd64.whl", hash = "sha256:f02cbbf8119db68455b9d763f2f8737bb7db7e43720afa07d8eb1604e5c5ae28"}, + {file = "coverage-6.4.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:ffa9297c3a453fba4717d06df579af42ab9a28022444cae7fa605af4df612d54"}, + {file = "coverage-6.4.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:145f296d00441ca703a659e8f3eb48ae39fb083baba2d7ce4482fb2723e050d9"}, + {file = "coverage-6.4.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d67d44996140af8b84284e5e7d398e589574b376fb4de8ccd28d82ad8e3bea13"}, + {file = "coverage-6.4.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2bd9a6fc18aab8d2e18f89b7ff91c0f34ff4d5e0ba0b33e989b3cd4194c81fd9"}, + {file = "coverage-6.4.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3384f2a3652cef289e38100f2d037956194a837221edd520a7ee5b42d00cc605"}, + {file = "coverage-6.4.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:9b3e07152b4563722be523e8cd0b209e0d1a373022cfbde395ebb6575bf6790d"}, + {file = "coverage-6.4.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:1480ff858b4113db2718848d7b2d1b75bc79895a9c22e76a221b9d8d62496428"}, + {file = "coverage-6.4.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:865d69ae811a392f4d06bde506d531f6a28a00af36f5c8649684a9e5e4a85c83"}, + {file = "coverage-6.4.1-cp38-cp38-win32.whl", hash = "sha256:664a47ce62fe4bef9e2d2c430306e1428ecea207ffd68649e3b942fa8ea83b0b"}, + {file = "coverage-6.4.1-cp38-cp38-win_amd64.whl", hash = "sha256:26dff09fb0d82693ba9e6231248641d60ba606150d02ed45110f9ec26404ed1c"}, + {file = "coverage-6.4.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:d9c80df769f5ec05ad21ea34be7458d1dc51ff1fb4b2219e77fe24edf462d6df"}, + {file = "coverage-6.4.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:39ee53946bf009788108b4dd2894bf1349b4e0ca18c2016ffa7d26ce46b8f10d"}, + {file = "coverage-6.4.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f5b66caa62922531059bc5ac04f836860412f7f88d38a476eda0a6f11d4724f4"}, + {file = "coverage-6.4.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fd180ed867e289964404051a958f7cccabdeed423f91a899829264bb7974d3d3"}, + {file = "coverage-6.4.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:84631e81dd053e8a0d4967cedab6db94345f1c36107c71698f746cb2636c63e3"}, + {file = "coverage-6.4.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:8c08da0bd238f2970230c2a0d28ff0e99961598cb2e810245d7fc5afcf1254e8"}, + {file = "coverage-6.4.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:d42c549a8f41dc103a8004b9f0c433e2086add8a719da00e246e17cbe4056f72"}, + {file = "coverage-6.4.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:309ce4a522ed5fca432af4ebe0f32b21d6d7ccbb0f5fcc99290e71feba67c264"}, + {file = "coverage-6.4.1-cp39-cp39-win32.whl", hash = "sha256:fdb6f7bd51c2d1714cea40718f6149ad9be6a2ee7d93b19e9f00934c0f2a74d9"}, + {file = "coverage-6.4.1-cp39-cp39-win_amd64.whl", hash = "sha256:342d4aefd1c3e7f620a13f4fe563154d808b69cccef415415aece4c786665397"}, + {file = "coverage-6.4.1-pp36.pp37.pp38-none-any.whl", hash = "sha256:4803e7ccf93230accb928f3a68f00ffa80a88213af98ed338a57ad021ef06815"}, + {file = "coverage-6.4.1.tar.gz", hash = "sha256:4321f075095a096e70aff1d002030ee612b65a205a0a0f5b815280d5dc58100c"}, +] +dill = [ + {file = "dill-0.3.5.1-py2.py3-none-any.whl", hash = "sha256:33501d03270bbe410c72639b350e941882a8b0fd55357580fbc873fba0c59302"}, + {file = "dill-0.3.5.1.tar.gz", hash = "sha256:d75e41f3eff1eee599d738e76ba8f4ad98ea229db8b085318aa2b3333a208c86"}, +] +docutils = [ + {file = "docutils-0.17.1-py2.py3-none-any.whl", hash = "sha256:cf316c8370a737a022b72b56874f6602acf974a37a9fba42ec2876387549fc61"}, + {file = "docutils-0.17.1.tar.gz", hash = "sha256:686577d2e4c32380bb50cbb22f575ed742d58168cee37e99117a854bcd88f125"}, +] +exceptiongroup = [ + {file = "exceptiongroup-1.0.0rc8-py3-none-any.whl", hash = "sha256:ab0a968e1ef769e55d9a596f4a89f7be9ffedbc9fdefdb77cc68cf5c33ce1035"}, + {file = "exceptiongroup-1.0.0rc8.tar.gz", hash = "sha256:6990c24f06b8d33c8065cfe43e5e8a4bfa384e0358be036af9cc60b6321bd11a"}, +] +flake8 = [ + {file = "flake8-4.0.1-py2.py3-none-any.whl", hash = "sha256:479b1304f72536a55948cb40a32dce8bb0ffe3501e26eaf292c7e60eb5e0428d"}, + {file = "flake8-4.0.1.tar.gz", hash = "sha256:806e034dda44114815e23c16ef92f95c91e4c71100ff52813adf7132a6ad870d"}, +] +gitdb = [ + {file = "gitdb-4.0.9-py3-none-any.whl", hash = "sha256:8033ad4e853066ba6ca92050b9df2f89301b8fc8bf7e9324d412a63f8bf1a8fd"}, + {file = "gitdb-4.0.9.tar.gz", hash = "sha256:bac2fd45c0a1c9cf619e63a90d62bdc63892ef92387424b855792a6cabe789aa"}, +] +gitpython = [ + {file = "GitPython-3.1.27-py3-none-any.whl", hash = "sha256:5b68b000463593e05ff2b261acff0ff0972df8ab1b70d3cdbd41b546c8b8fc3d"}, + {file = "GitPython-3.1.27.tar.gz", hash = "sha256:1c885ce809e8ba2d88a29befeb385fcea06338d3640712b59ca623c220bb5704"}, +] +hypothesis = [ + {file = "hypothesis-6.47.3-py3-none-any.whl", hash = "sha256:148198eacaa52aabd5b598e989d6906311a0580af6f99e76e3ace8d60762779a"}, + {file = "hypothesis-6.47.3.tar.gz", hash = "sha256:3c6d99cd7540a64c37d88f86a7509c0ca29cda37a82a957340fba11388dfb9ce"}, +] +idna = [ + {file = "idna-3.3-py3-none-any.whl", hash = "sha256:84d9dd047ffa80596e0f246e2eab0b391788b0503584e8945f2368256d2735ff"}, + {file = "idna-3.3.tar.gz", hash = "sha256:9d643ff0a55b762d5cdb124b8eaa99c66322e2157b69160bc32796e824360e6d"}, +] +imagesize = [ + {file = "imagesize-1.3.0-py2.py3-none-any.whl", hash = "sha256:1db2f82529e53c3e929e8926a1fa9235aa82d0bd0c580359c67ec31b2fddaa8c"}, + {file = "imagesize-1.3.0.tar.gz", hash = "sha256:cd1750d452385ca327479d45b64d9c7729ecf0b3969a58148298c77092261f9d"}, +] +importlib-metadata = [ + {file = "importlib_metadata-4.2.0-py3-none-any.whl", hash = "sha256:057e92c15bc8d9e8109738a48db0ccb31b4d9d5cfbee5a8670879a30be66304b"}, + {file = "importlib_metadata-4.2.0.tar.gz", hash = "sha256:b7e52a1f8dec14a75ea73e0891f3060099ca1d8e6a462a4dff11c3e119ea1b31"}, +] +iniconfig = [ + {file = "iniconfig-1.1.1-py2.py3-none-any.whl", hash = "sha256:011e24c64b7f47f6ebd835bb12a743f2fbe9a26d4cecaa7f53bc4f35ee9da8b3"}, + {file = "iniconfig-1.1.1.tar.gz", hash = "sha256:bc3af051d7d14b2ee5ef9969666def0cd1a000e121eaea580d4a313df4b37f32"}, +] +isort = [ + {file = "isort-5.10.1-py3-none-any.whl", hash = "sha256:6f62d78e2f89b4500b080fe3a81690850cd254227f27f75c3a0c491a1f351ba7"}, + {file = "isort-5.10.1.tar.gz", hash = "sha256:e8443a5e7a020e9d7f97f1d7d9cd17c88bcb3bc7e218bf9cf5095fe550be2951"}, +] +jinja2 = [ + {file = "Jinja2-3.1.2-py3-none-any.whl", hash = "sha256:6088930bfe239f0e6710546ab9c19c9ef35e29792895fed6e6e31a023a182a61"}, + {file = "Jinja2-3.1.2.tar.gz", hash = "sha256:31351a702a408a9e7595a8fc6150fc3f43bb6bf7e319770cbc0db9df9437e852"}, +] +lazy-object-proxy = [ + {file = "lazy-object-proxy-1.7.1.tar.gz", hash = "sha256:d609c75b986def706743cdebe5e47553f4a5a1da9c5ff66d76013ef396b5a8a4"}, + {file = "lazy_object_proxy-1.7.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:bb8c5fd1684d60a9902c60ebe276da1f2281a318ca16c1d0a96db28f62e9166b"}, + {file = "lazy_object_proxy-1.7.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a57d51ed2997e97f3b8e3500c984db50a554bb5db56c50b5dab1b41339b37e36"}, + {file = "lazy_object_proxy-1.7.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fd45683c3caddf83abbb1249b653a266e7069a09f486daa8863fb0e7496a9fdb"}, + {file = "lazy_object_proxy-1.7.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:8561da8b3dd22d696244d6d0d5330618c993a215070f473b699e00cf1f3f6443"}, + {file = "lazy_object_proxy-1.7.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fccdf7c2c5821a8cbd0a9440a456f5050492f2270bd54e94360cac663398739b"}, + {file = "lazy_object_proxy-1.7.1-cp310-cp310-win32.whl", hash = "sha256:898322f8d078f2654d275124a8dd19b079080ae977033b713f677afcfc88e2b9"}, + {file = "lazy_object_proxy-1.7.1-cp310-cp310-win_amd64.whl", hash = "sha256:85b232e791f2229a4f55840ed54706110c80c0a210d076eee093f2b2e33e1bfd"}, + {file = "lazy_object_proxy-1.7.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:46ff647e76f106bb444b4533bb4153c7370cdf52efc62ccfc1a28bdb3cc95442"}, + {file = "lazy_object_proxy-1.7.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:12f3bb77efe1367b2515f8cb4790a11cffae889148ad33adad07b9b55e0ab22c"}, + {file = "lazy_object_proxy-1.7.1-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c19814163728941bb871240d45c4c30d33b8a2e85972c44d4e63dd7107faba44"}, + {file = "lazy_object_proxy-1.7.1-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:e40f2013d96d30217a51eeb1db28c9ac41e9d0ee915ef9d00da639c5b63f01a1"}, + {file = "lazy_object_proxy-1.7.1-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:2052837718516a94940867e16b1bb10edb069ab475c3ad84fd1e1a6dd2c0fcfc"}, + {file = "lazy_object_proxy-1.7.1-cp36-cp36m-win32.whl", hash = "sha256:6a24357267aa976abab660b1d47a34aaf07259a0c3859a34e536f1ee6e76b5bb"}, + {file = "lazy_object_proxy-1.7.1-cp36-cp36m-win_amd64.whl", hash = "sha256:6aff3fe5de0831867092e017cf67e2750c6a1c7d88d84d2481bd84a2e019ec35"}, + {file = "lazy_object_proxy-1.7.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:6a6e94c7b02641d1311228a102607ecd576f70734dc3d5e22610111aeacba8a0"}, + {file = "lazy_object_proxy-1.7.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c4ce15276a1a14549d7e81c243b887293904ad2d94ad767f42df91e75fd7b5b6"}, + {file = "lazy_object_proxy-1.7.1-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e368b7f7eac182a59ff1f81d5f3802161932a41dc1b1cc45c1f757dc876b5d2c"}, + {file = "lazy_object_proxy-1.7.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:6ecbb350991d6434e1388bee761ece3260e5228952b1f0c46ffc800eb313ff42"}, + {file = "lazy_object_proxy-1.7.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:553b0f0d8dbf21890dd66edd771f9b1b5f51bd912fa5f26de4449bfc5af5e029"}, + {file = "lazy_object_proxy-1.7.1-cp37-cp37m-win32.whl", hash = "sha256:c7a683c37a8a24f6428c28c561c80d5f4fd316ddcf0c7cab999b15ab3f5c5c69"}, + {file = "lazy_object_proxy-1.7.1-cp37-cp37m-win_amd64.whl", hash = "sha256:df2631f9d67259dc9620d831384ed7732a198eb434eadf69aea95ad18c587a28"}, + {file = "lazy_object_proxy-1.7.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:07fa44286cda977bd4803b656ffc1c9b7e3bc7dff7d34263446aec8f8c96f88a"}, + {file = "lazy_object_proxy-1.7.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4dca6244e4121c74cc20542c2ca39e5c4a5027c81d112bfb893cf0790f96f57e"}, + {file = "lazy_object_proxy-1.7.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:91ba172fc5b03978764d1df5144b4ba4ab13290d7bab7a50f12d8117f8630c38"}, + {file = "lazy_object_proxy-1.7.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:043651b6cb706eee4f91854da4a089816a6606c1428fd391573ef8cb642ae4f7"}, + {file = "lazy_object_proxy-1.7.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:b9e89b87c707dd769c4ea91f7a31538888aad05c116a59820f28d59b3ebfe25a"}, + {file = "lazy_object_proxy-1.7.1-cp38-cp38-win32.whl", hash = "sha256:9d166602b525bf54ac994cf833c385bfcc341b364e3ee71e3bf5a1336e677b55"}, + {file = "lazy_object_proxy-1.7.1-cp38-cp38-win_amd64.whl", hash = "sha256:8f3953eb575b45480db6568306893f0bd9d8dfeeebd46812aa09ca9579595148"}, + {file = "lazy_object_proxy-1.7.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:dd7ed7429dbb6c494aa9bc4e09d94b778a3579be699f9d67da7e6804c422d3de"}, + {file = "lazy_object_proxy-1.7.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:70ed0c2b380eb6248abdef3cd425fc52f0abd92d2b07ce26359fcbc399f636ad"}, + {file = "lazy_object_proxy-1.7.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7096a5e0c1115ec82641afbdd70451a144558ea5cf564a896294e346eb611be1"}, + {file = "lazy_object_proxy-1.7.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:f769457a639403073968d118bc70110e7dce294688009f5c24ab78800ae56dc8"}, + {file = "lazy_object_proxy-1.7.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:39b0e26725c5023757fc1ab2a89ef9d7ab23b84f9251e28f9cc114d5b59c1b09"}, + {file = "lazy_object_proxy-1.7.1-cp39-cp39-win32.whl", hash = "sha256:2130db8ed69a48a3440103d4a520b89d8a9405f1b06e2cc81640509e8bf6548f"}, + {file = "lazy_object_proxy-1.7.1-cp39-cp39-win_amd64.whl", hash = "sha256:677ea950bef409b47e51e733283544ac3d660b709cfce7b187f5ace137960d61"}, + {file = "lazy_object_proxy-1.7.1-pp37.pp38-none-any.whl", hash = "sha256:d66906d5785da8e0be7360912e99c9188b70f52c422f9fc18223347235691a84"}, +] +markupsafe = [ + {file = "MarkupSafe-2.1.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:86b1f75c4e7c2ac2ccdaec2b9022845dbb81880ca318bb7a0a01fbf7813e3812"}, + {file = "MarkupSafe-2.1.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f121a1420d4e173a5d96e47e9a0c0dcff965afdf1626d28de1460815f7c4ee7a"}, + {file = "MarkupSafe-2.1.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a49907dd8420c5685cfa064a1335b6754b74541bbb3706c259c02ed65b644b3e"}, + {file = "MarkupSafe-2.1.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:10c1bfff05d95783da83491be968e8fe789263689c02724e0c691933c52994f5"}, + {file = "MarkupSafe-2.1.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b7bd98b796e2b6553da7225aeb61f447f80a1ca64f41d83612e6139ca5213aa4"}, + {file = "MarkupSafe-2.1.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:b09bf97215625a311f669476f44b8b318b075847b49316d3e28c08e41a7a573f"}, + {file = "MarkupSafe-2.1.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:694deca8d702d5db21ec83983ce0bb4b26a578e71fbdbd4fdcd387daa90e4d5e"}, + {file = "MarkupSafe-2.1.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:efc1913fd2ca4f334418481c7e595c00aad186563bbc1ec76067848c7ca0a933"}, + {file = "MarkupSafe-2.1.1-cp310-cp310-win32.whl", hash = "sha256:4a33dea2b688b3190ee12bd7cfa29d39c9ed176bda40bfa11099a3ce5d3a7ac6"}, + {file = "MarkupSafe-2.1.1-cp310-cp310-win_amd64.whl", hash = "sha256:dda30ba7e87fbbb7eab1ec9f58678558fd9a6b8b853530e176eabd064da81417"}, + {file = "MarkupSafe-2.1.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:671cd1187ed5e62818414afe79ed29da836dde67166a9fac6d435873c44fdd02"}, + {file = "MarkupSafe-2.1.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3799351e2336dc91ea70b034983ee71cf2f9533cdff7c14c90ea126bfd95d65a"}, + {file = "MarkupSafe-2.1.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e72591e9ecd94d7feb70c1cbd7be7b3ebea3f548870aa91e2732960fa4d57a37"}, + {file = "MarkupSafe-2.1.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6fbf47b5d3728c6aea2abb0589b5d30459e369baa772e0f37a0320185e87c980"}, + {file = "MarkupSafe-2.1.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:d5ee4f386140395a2c818d149221149c54849dfcfcb9f1debfe07a8b8bd63f9a"}, + {file = "MarkupSafe-2.1.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:bcb3ed405ed3222f9904899563d6fc492ff75cce56cba05e32eff40e6acbeaa3"}, + {file = "MarkupSafe-2.1.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:e1c0b87e09fa55a220f058d1d49d3fb8df88fbfab58558f1198e08c1e1de842a"}, + {file = "MarkupSafe-2.1.1-cp37-cp37m-win32.whl", hash = "sha256:8dc1c72a69aa7e082593c4a203dcf94ddb74bb5c8a731e4e1eb68d031e8498ff"}, + {file = "MarkupSafe-2.1.1-cp37-cp37m-win_amd64.whl", hash = "sha256:97a68e6ada378df82bc9f16b800ab77cbf4b2fada0081794318520138c088e4a"}, + {file = "MarkupSafe-2.1.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:e8c843bbcda3a2f1e3c2ab25913c80a3c5376cd00c6e8c4a86a89a28c8dc5452"}, + {file = "MarkupSafe-2.1.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0212a68688482dc52b2d45013df70d169f542b7394fc744c02a57374a4207003"}, + {file = "MarkupSafe-2.1.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8e576a51ad59e4bfaac456023a78f6b5e6e7651dcd383bcc3e18d06f9b55d6d1"}, + {file = "MarkupSafe-2.1.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4b9fe39a2ccc108a4accc2676e77da025ce383c108593d65cc909add5c3bd601"}, + {file = "MarkupSafe-2.1.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:96e37a3dc86e80bf81758c152fe66dbf60ed5eca3d26305edf01892257049925"}, + {file = "MarkupSafe-2.1.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:6d0072fea50feec76a4c418096652f2c3238eaa014b2f94aeb1d56a66b41403f"}, + {file = "MarkupSafe-2.1.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:089cf3dbf0cd6c100f02945abeb18484bd1ee57a079aefd52cffd17fba910b88"}, + {file = "MarkupSafe-2.1.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:6a074d34ee7a5ce3effbc526b7083ec9731bb3cbf921bbe1d3005d4d2bdb3a63"}, + {file = "MarkupSafe-2.1.1-cp38-cp38-win32.whl", hash = "sha256:421be9fbf0ffe9ffd7a378aafebbf6f4602d564d34be190fc19a193232fd12b1"}, + {file = "MarkupSafe-2.1.1-cp38-cp38-win_amd64.whl", hash = "sha256:fc7b548b17d238737688817ab67deebb30e8073c95749d55538ed473130ec0c7"}, + {file = "MarkupSafe-2.1.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:e04e26803c9c3851c931eac40c695602c6295b8d432cbe78609649ad9bd2da8a"}, + {file = "MarkupSafe-2.1.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b87db4360013327109564f0e591bd2a3b318547bcef31b468a92ee504d07ae4f"}, + {file = "MarkupSafe-2.1.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:99a2a507ed3ac881b975a2976d59f38c19386d128e7a9a18b7df6fff1fd4c1d6"}, + {file = "MarkupSafe-2.1.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:56442863ed2b06d19c37f94d999035e15ee982988920e12a5b4ba29b62ad1f77"}, + {file = "MarkupSafe-2.1.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3ce11ee3f23f79dbd06fb3d63e2f6af7b12db1d46932fe7bd8afa259a5996603"}, + {file = "MarkupSafe-2.1.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:33b74d289bd2f5e527beadcaa3f401e0df0a89927c1559c8566c066fa4248ab7"}, + {file = "MarkupSafe-2.1.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:43093fb83d8343aac0b1baa75516da6092f58f41200907ef92448ecab8825135"}, + {file = "MarkupSafe-2.1.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8e3dcf21f367459434c18e71b2a9532d96547aef8a871872a5bd69a715c15f96"}, + {file = "MarkupSafe-2.1.1-cp39-cp39-win32.whl", hash = "sha256:d4306c36ca495956b6d568d276ac11fdd9c30a36f1b6eb928070dc5360b22e1c"}, + {file = "MarkupSafe-2.1.1-cp39-cp39-win_amd64.whl", hash = "sha256:46d00d6cfecdde84d40e572d63735ef81423ad31184100411e6e3388d405e247"}, + {file = "MarkupSafe-2.1.1.tar.gz", hash = "sha256:7f91197cc9e48f989d12e4e6fbc46495c446636dfc81b9ccf50bb0ec74b91d4b"}, +] +mccabe = [ + {file = "mccabe-0.6.1-py2.py3-none-any.whl", hash = "sha256:ab8a6258860da4b6677da4bd2fe5dc2c659cff31b3ee4f7f5d64e79735b80d42"}, + {file = "mccabe-0.6.1.tar.gz", hash = "sha256:dd8d182285a0fe56bace7f45b5e7d1a6ebcbf524e8f3bd87eb0f125271b8831f"}, +] +mypy = [ + {file = "mypy-0.950-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:cf9c261958a769a3bd38c3e133801ebcd284ffb734ea12d01457cb09eacf7d7b"}, + {file = "mypy-0.950-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b5b5bd0ffb11b4aba2bb6d31b8643902c48f990cc92fda4e21afac658044f0c0"}, + {file = "mypy-0.950-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:5e7647df0f8fc947388e6251d728189cfadb3b1e558407f93254e35abc026e22"}, + {file = "mypy-0.950-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:eaff8156016487c1af5ffa5304c3e3fd183edcb412f3e9c72db349faf3f6e0eb"}, + {file = "mypy-0.950-cp310-cp310-win_amd64.whl", hash = "sha256:563514c7dc504698fb66bb1cf897657a173a496406f1866afae73ab5b3cdb334"}, + {file = "mypy-0.950-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:dd4d670eee9610bf61c25c940e9ade2d0ed05eb44227275cce88701fee014b1f"}, + {file = "mypy-0.950-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:ca75ecf2783395ca3016a5e455cb322ba26b6d33b4b413fcdedfc632e67941dc"}, + {file = "mypy-0.950-cp36-cp36m-win_amd64.whl", hash = "sha256:6003de687c13196e8a1243a5e4bcce617d79b88f83ee6625437e335d89dfebe2"}, + {file = "mypy-0.950-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:4c653e4846f287051599ed8f4b3c044b80e540e88feec76b11044ddc5612ffed"}, + {file = "mypy-0.950-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:e19736af56947addedce4674c0971e5dceef1b5ec7d667fe86bcd2b07f8f9075"}, + {file = "mypy-0.950-cp37-cp37m-win_amd64.whl", hash = "sha256:ef7beb2a3582eb7a9f37beaf38a28acfd801988cde688760aea9e6cc4832b10b"}, + {file = "mypy-0.950-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:0112752a6ff07230f9ec2f71b0d3d4e088a910fdce454fdb6553e83ed0eced7d"}, + {file = "mypy-0.950-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:ee0a36edd332ed2c5208565ae6e3a7afc0eabb53f5327e281f2ef03a6bc7687a"}, + {file = "mypy-0.950-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:77423570c04aca807508a492037abbd72b12a1fb25a385847d191cd50b2c9605"}, + {file = "mypy-0.950-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:5ce6a09042b6da16d773d2110e44f169683d8cc8687e79ec6d1181a72cb028d2"}, + {file = "mypy-0.950-cp38-cp38-win_amd64.whl", hash = "sha256:5b231afd6a6e951381b9ef09a1223b1feabe13625388db48a8690f8daa9b71ff"}, + {file = "mypy-0.950-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:0384d9f3af49837baa92f559d3fa673e6d2652a16550a9ee07fc08c736f5e6f8"}, + {file = "mypy-0.950-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:1fdeb0a0f64f2a874a4c1f5271f06e40e1e9779bf55f9567f149466fc7a55038"}, + {file = "mypy-0.950-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:61504b9a5ae166ba5ecfed9e93357fd51aa693d3d434b582a925338a2ff57fd2"}, + {file = "mypy-0.950-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:a952b8bc0ae278fc6316e6384f67bb9a396eb30aced6ad034d3a76120ebcc519"}, + {file = "mypy-0.950-cp39-cp39-win_amd64.whl", hash = "sha256:eaea21d150fb26d7b4856766e7addcf929119dd19fc832b22e71d942835201ef"}, + {file = "mypy-0.950-py3-none-any.whl", hash = "sha256:a4d9898f46446bfb6405383b57b96737dcfd0a7f25b748e78ef3e8c576bba3cb"}, + {file = "mypy-0.950.tar.gz", hash = "sha256:1b333cfbca1762ff15808a0ef4f71b5d3eed8528b23ea1c3fb50543c867d68de"}, +] +mypy-extensions = [ + {file = "mypy_extensions-0.4.3-py2.py3-none-any.whl", hash = "sha256:090fedd75945a69ae91ce1303b5824f428daf5a028d2f6ab8a299250a846f15d"}, + {file = "mypy_extensions-0.4.3.tar.gz", hash = "sha256:2d82818f5bb3e369420cb3c4060a7970edba416647068eb4c5343488a6c604a8"}, +] +packaging = [ + {file = "packaging-21.3-py3-none-any.whl", hash = "sha256:ef103e05f519cdc783ae24ea4e2e0f508a9c99b2d4969652eed6a2e1ea5bd522"}, + {file = "packaging-21.3.tar.gz", hash = "sha256:dd47c42927d89ab911e606518907cc2d3a1f38bbd026385970643f9c5b8ecfeb"}, +] +pathspec = [ + {file = "pathspec-0.9.0-py2.py3-none-any.whl", hash = "sha256:7d15c4ddb0b5c802d161efc417ec1a2558ea2653c2e8ad9c19098201dc1c993a"}, + {file = "pathspec-0.9.0.tar.gz", hash = "sha256:e564499435a2673d586f6b2130bb5b95f04a3ba06f81b8f895b651a3c76aabb1"}, +] +pbr = [ + {file = "pbr-5.9.0-py2.py3-none-any.whl", hash = "sha256:e547125940bcc052856ded43be8e101f63828c2d94239ffbe2b327ba3d5ccf0a"}, + {file = "pbr-5.9.0.tar.gz", hash = "sha256:e8dca2f4b43560edef58813969f52a56cef023146cbb8931626db80e6c1c4308"}, +] +platformdirs = [ + {file = "platformdirs-2.5.2-py3-none-any.whl", hash = "sha256:027d8e83a2d7de06bbac4e5ef7e023c02b863d7ea5d079477e722bb41ab25788"}, + {file = "platformdirs-2.5.2.tar.gz", hash = "sha256:58c8abb07dcb441e6ee4b11d8df0ac856038f944ab98b7be6b27b2a3c7feef19"}, +] +pluggy = [ + {file = "pluggy-1.0.0-py2.py3-none-any.whl", hash = "sha256:74134bbf457f031a36d68416e1509f34bd5ccc019f0bcc952c7b909d06b37bd3"}, + {file = "pluggy-1.0.0.tar.gz", hash = "sha256:4224373bacce55f955a878bf9cfa763c1e360858e330072059e10bad68531159"}, +] +py = [ + {file = "py-1.11.0-py2.py3-none-any.whl", hash = "sha256:607c53218732647dff4acdfcd50cb62615cedf612e72d1724fb1a0cc6405b378"}, + {file = "py-1.11.0.tar.gz", hash = "sha256:51c75c4126074b472f746a24399ad32f6053d1b34b68d2fa41e558e6f4a98719"}, +] +pycodestyle = [ + {file = "pycodestyle-2.8.0-py2.py3-none-any.whl", hash = "sha256:720f8b39dde8b293825e7ff02c475f3077124006db4f440dcbc9a20b76548a20"}, + {file = "pycodestyle-2.8.0.tar.gz", hash = "sha256:eddd5847ef438ea1c7870ca7eb78a9d47ce0cdb4851a5523949f2601d0cbbe7f"}, +] +pydantic = [ + {file = "pydantic-1.9.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c8098a724c2784bf03e8070993f6d46aa2eeca031f8d8a048dff277703e6e193"}, + {file = "pydantic-1.9.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c320c64dd876e45254bdd350f0179da737463eea41c43bacbee9d8c9d1021f11"}, + {file = "pydantic-1.9.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:18f3e912f9ad1bdec27fb06b8198a2ccc32f201e24174cec1b3424dda605a310"}, + {file = "pydantic-1.9.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c11951b404e08b01b151222a1cb1a9f0a860a8153ce8334149ab9199cd198131"}, + {file = "pydantic-1.9.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:8bc541a405423ce0e51c19f637050acdbdf8feca34150e0d17f675e72d119580"}, + {file = "pydantic-1.9.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:e565a785233c2d03724c4dc55464559639b1ba9ecf091288dd47ad9c629433bd"}, + {file = "pydantic-1.9.1-cp310-cp310-win_amd64.whl", hash = "sha256:a4a88dcd6ff8fd47c18b3a3709a89adb39a6373f4482e04c1b765045c7e282fd"}, + {file = "pydantic-1.9.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:447d5521575f18e18240906beadc58551e97ec98142266e521c34968c76c8761"}, + {file = "pydantic-1.9.1-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:985ceb5d0a86fcaa61e45781e567a59baa0da292d5ed2e490d612d0de5796918"}, + {file = "pydantic-1.9.1-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:059b6c1795170809103a1538255883e1983e5b831faea6558ef873d4955b4a74"}, + {file = "pydantic-1.9.1-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:d12f96b5b64bec3f43c8e82b4aab7599d0157f11c798c9f9c528a72b9e0b339a"}, + {file = "pydantic-1.9.1-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:ae72f8098acb368d877b210ebe02ba12585e77bd0db78ac04a1ee9b9f5dd2166"}, + {file = "pydantic-1.9.1-cp36-cp36m-win_amd64.whl", hash = "sha256:79b485767c13788ee314669008d01f9ef3bc05db9ea3298f6a50d3ef596a154b"}, + {file = "pydantic-1.9.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:494f7c8537f0c02b740c229af4cb47c0d39840b829ecdcfc93d91dcbb0779892"}, + {file = "pydantic-1.9.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f0f047e11febe5c3198ed346b507e1d010330d56ad615a7e0a89fae604065a0e"}, + {file = "pydantic-1.9.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:969dd06110cb780da01336b281f53e2e7eb3a482831df441fb65dd30403f4608"}, + {file = "pydantic-1.9.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:177071dfc0df6248fd22b43036f936cfe2508077a72af0933d0c1fa269b18537"}, + {file = "pydantic-1.9.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:9bcf8b6e011be08fb729d110f3e22e654a50f8a826b0575c7196616780683380"}, + {file = "pydantic-1.9.1-cp37-cp37m-win_amd64.whl", hash = "sha256:a955260d47f03df08acf45689bd163ed9df82c0e0124beb4251b1290fa7ae728"}, + {file = "pydantic-1.9.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:9ce157d979f742a915b75f792dbd6aa63b8eccaf46a1005ba03aa8a986bde34a"}, + {file = "pydantic-1.9.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:0bf07cab5b279859c253d26a9194a8906e6f4a210063b84b433cf90a569de0c1"}, + {file = "pydantic-1.9.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5d93d4e95eacd313d2c765ebe40d49ca9dd2ed90e5b37d0d421c597af830c195"}, + {file = "pydantic-1.9.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1542636a39c4892c4f4fa6270696902acb186a9aaeac6f6cf92ce6ae2e88564b"}, + {file = "pydantic-1.9.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a9af62e9b5b9bc67b2a195ebc2c2662fdf498a822d62f902bf27cccb52dbbf49"}, + {file = "pydantic-1.9.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:fe4670cb32ea98ffbf5a1262f14c3e102cccd92b1869df3bb09538158ba90fe6"}, + {file = "pydantic-1.9.1-cp38-cp38-win_amd64.whl", hash = "sha256:9f659a5ee95c8baa2436d392267988fd0f43eb774e5eb8739252e5a7e9cf07e0"}, + {file = "pydantic-1.9.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b83ba3825bc91dfa989d4eed76865e71aea3a6ca1388b59fc801ee04c4d8d0d6"}, + {file = "pydantic-1.9.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:1dd8fecbad028cd89d04a46688d2fcc14423e8a196d5b0a5c65105664901f810"}, + {file = "pydantic-1.9.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:02eefd7087268b711a3ff4db528e9916ac9aa18616da7bca69c1871d0b7a091f"}, + {file = "pydantic-1.9.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7eb57ba90929bac0b6cc2af2373893d80ac559adda6933e562dcfb375029acee"}, + {file = "pydantic-1.9.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:4ce9ae9e91f46c344bec3b03d6ee9612802682c1551aaf627ad24045ce090761"}, + {file = "pydantic-1.9.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:72ccb318bf0c9ab97fc04c10c37683d9eea952ed526707fabf9ac5ae59b701fd"}, + {file = "pydantic-1.9.1-cp39-cp39-win_amd64.whl", hash = "sha256:61b6760b08b7c395975d893e0b814a11cf011ebb24f7d869e7118f5a339a82e1"}, + {file = "pydantic-1.9.1-py3-none-any.whl", hash = "sha256:4988c0f13c42bfa9ddd2fe2f569c9d54646ce84adc5de84228cfe83396f3bd58"}, + {file = "pydantic-1.9.1.tar.gz", hash = "sha256:1ed987c3ff29fff7fd8c3ea3a3ea877ad310aae2ef9889a119e22d3f2db0691a"}, +] +pyflakes = [ + {file = "pyflakes-2.4.0-py2.py3-none-any.whl", hash = "sha256:3bb3a3f256f4b7968c9c788781e4ff07dce46bdf12339dcda61053375426ee2e"}, + {file = "pyflakes-2.4.0.tar.gz", hash = "sha256:05a85c2872edf37a4ed30b0cce2f6093e1d0581f8c19d7393122da7e25b2b24c"}, +] +pygments = [ + {file = "Pygments-2.12.0-py3-none-any.whl", hash = "sha256:dc9c10fb40944260f6ed4c688ece0cd2048414940f1cea51b8b226318411c519"}, + {file = "Pygments-2.12.0.tar.gz", hash = "sha256:5eb116118f9612ff1ee89ac96437bb6b49e8f04d8a13b514ba26f620208e26eb"}, +] +pylint = [ + {file = "pylint-2.13.9-py3-none-any.whl", hash = "sha256:705c620d388035bdd9ff8b44c5bcdd235bfb49d276d488dd2c8ff1736aa42526"}, + {file = "pylint-2.13.9.tar.gz", hash = "sha256:095567c96e19e6f57b5b907e67d265ff535e588fe26b12b5ebe1fc5645b2c731"}, +] +pyparsing = [ + {file = "pyparsing-3.0.9-py3-none-any.whl", hash = "sha256:5026bae9a10eeaefb61dab2f09052b9f4307d44aee4eda64b309723d8d206bbc"}, + {file = "pyparsing-3.0.9.tar.gz", hash = "sha256:2b020ecf7d21b687f219b71ecad3631f644a47f01403fa1d1036b0c6416d70fb"}, +] +pytest = [ + {file = "pytest-7.1.2-py3-none-any.whl", hash = "sha256:13d0e3ccfc2b6e26be000cb6568c832ba67ba32e719443bfe725814d3c42433c"}, + {file = "pytest-7.1.2.tar.gz", hash = "sha256:a06a0425453864a270bc45e71f783330a7428defb4230fb5e6a731fde06ecd45"}, +] +pytz = [ + {file = "pytz-2022.1-py2.py3-none-any.whl", hash = "sha256:e68985985296d9a66a881eb3193b0906246245294a881e7c8afe623866ac6a5c"}, + {file = "pytz-2022.1.tar.gz", hash = "sha256:1e760e2fe6a8163bc0b3d9a19c4f84342afa0a2affebfaa84b01b978a02ecaa7"}, +] +pyyaml = [ + {file = "PyYAML-6.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d4db7c7aef085872ef65a8fd7d6d09a14ae91f691dec3e87ee5ee0539d516f53"}, + {file = "PyYAML-6.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9df7ed3b3d2e0ecfe09e14741b857df43adb5a3ddadc919a2d94fbdf78fea53c"}, + {file = "PyYAML-6.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:77f396e6ef4c73fdc33a9157446466f1cff553d979bd00ecb64385760c6babdc"}, + {file = "PyYAML-6.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a80a78046a72361de73f8f395f1f1e49f956c6be882eed58505a15f3e430962b"}, + {file = "PyYAML-6.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:f84fbc98b019fef2ee9a1cb3ce93e3187a6df0b2538a651bfb890254ba9f90b5"}, + {file = "PyYAML-6.0-cp310-cp310-win32.whl", hash = "sha256:2cd5df3de48857ed0544b34e2d40e9fac445930039f3cfe4bcc592a1f836d513"}, + {file = "PyYAML-6.0-cp310-cp310-win_amd64.whl", hash = "sha256:daf496c58a8c52083df09b80c860005194014c3698698d1a57cbcfa182142a3a"}, + {file = "PyYAML-6.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:897b80890765f037df3403d22bab41627ca8811ae55e9a722fd0392850ec4d86"}, + {file = "PyYAML-6.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:50602afada6d6cbfad699b0c7bb50d5ccffa7e46a3d738092afddc1f9758427f"}, + {file = "PyYAML-6.0-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:48c346915c114f5fdb3ead70312bd042a953a8ce5c7106d5bfb1a5254e47da92"}, + {file = "PyYAML-6.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:98c4d36e99714e55cfbaaee6dd5badbc9a1ec339ebfc3b1f52e293aee6bb71a4"}, + {file = "PyYAML-6.0-cp36-cp36m-win32.whl", hash = "sha256:0283c35a6a9fbf047493e3a0ce8d79ef5030852c51e9d911a27badfde0605293"}, + {file = "PyYAML-6.0-cp36-cp36m-win_amd64.whl", hash = "sha256:07751360502caac1c067a8132d150cf3d61339af5691fe9e87803040dbc5db57"}, + {file = "PyYAML-6.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:819b3830a1543db06c4d4b865e70ded25be52a2e0631ccd2f6a47a2822f2fd7c"}, + {file = "PyYAML-6.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:473f9edb243cb1935ab5a084eb238d842fb8f404ed2193a915d1784b5a6b5fc0"}, + {file = "PyYAML-6.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0ce82d761c532fe4ec3f87fc45688bdd3a4c1dc5e0b4a19814b9009a29baefd4"}, + {file = "PyYAML-6.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:231710d57adfd809ef5d34183b8ed1eeae3f76459c18fb4a0b373ad56bedcdd9"}, + {file = "PyYAML-6.0-cp37-cp37m-win32.whl", hash = "sha256:c5687b8d43cf58545ade1fe3e055f70eac7a5a1a0bf42824308d868289a95737"}, + {file = "PyYAML-6.0-cp37-cp37m-win_amd64.whl", hash = "sha256:d15a181d1ecd0d4270dc32edb46f7cb7733c7c508857278d3d378d14d606db2d"}, + {file = "PyYAML-6.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0b4624f379dab24d3725ffde76559cff63d9ec94e1736b556dacdfebe5ab6d4b"}, + {file = "PyYAML-6.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:213c60cd50106436cc818accf5baa1aba61c0189ff610f64f4a3e8c6726218ba"}, + {file = "PyYAML-6.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9fa600030013c4de8165339db93d182b9431076eb98eb40ee068700c9c813e34"}, + {file = "PyYAML-6.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:277a0ef2981ca40581a47093e9e2d13b3f1fbbeffae064c1d21bfceba2030287"}, + {file = "PyYAML-6.0-cp38-cp38-win32.whl", hash = "sha256:d4eccecf9adf6fbcc6861a38015c2a64f38b9d94838ac1810a9023a0609e1b78"}, + {file = "PyYAML-6.0-cp38-cp38-win_amd64.whl", hash = "sha256:1e4747bc279b4f613a09eb64bba2ba602d8a6664c6ce6396a4d0cd413a50ce07"}, + {file = "PyYAML-6.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:055d937d65826939cb044fc8c9b08889e8c743fdc6a32b33e2390f66013e449b"}, + {file = "PyYAML-6.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:e61ceaab6f49fb8bdfaa0f92c4b57bcfbea54c09277b1b4f7ac376bfb7a7c174"}, + {file = "PyYAML-6.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d67d839ede4ed1b28a4e8909735fc992a923cdb84e618544973d7dfc71540803"}, + {file = "PyYAML-6.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cba8c411ef271aa037d7357a2bc8f9ee8b58b9965831d9e51baf703280dc73d3"}, + {file = "PyYAML-6.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:40527857252b61eacd1d9af500c3337ba8deb8fc298940291486c465c8b46ec0"}, + {file = "PyYAML-6.0-cp39-cp39-win32.whl", hash = "sha256:b5b9eccad747aabaaffbc6064800670f0c297e52c12754eb1d976c57e4f74dcb"}, + {file = "PyYAML-6.0-cp39-cp39-win_amd64.whl", hash = "sha256:b3d267842bf12586ba6c734f89d1f5b871df0273157918b0ccefa29deb05c21c"}, + {file = "PyYAML-6.0.tar.gz", hash = "sha256:68fb519c14306fec9720a2a5b45bc9f0c8d1b9c72adf45c37baedfcd949c35a2"}, +] +requests = [ + {file = "requests-2.28.0-py3-none-any.whl", hash = "sha256:bc7861137fbce630f17b03d3ad02ad0bf978c844f3536d0edda6499dafce2b6f"}, + {file = "requests-2.28.0.tar.gz", hash = "sha256:d568723a7ebd25875d8d1eaf5dfa068cd2fc8194b2e483d7b1f7c81918dbec6b"}, +] +smmap = [ + {file = "smmap-5.0.0-py3-none-any.whl", hash = "sha256:2aba19d6a040e78d8b09de5c57e96207b09ed71d8e55ce0959eeee6c8e190d94"}, + {file = "smmap-5.0.0.tar.gz", hash = "sha256:c840e62059cd3be204b0c9c9f74be2c09d5648eddd4580d9314c3ecde0b30936"}, +] +snowballstemmer = [ + {file = "snowballstemmer-2.2.0-py2.py3-none-any.whl", hash = "sha256:c8e1716e83cc398ae16824e5572ae04e0d9fc2c6b985fb0f900f5f0c96ecba1a"}, + {file = "snowballstemmer-2.2.0.tar.gz", hash = "sha256:09b16deb8547d3412ad7b590689584cd0fe25ec8db3be37788be3810cbf19cb1"}, +] +sortedcontainers = [ + {file = "sortedcontainers-2.4.0-py2.py3-none-any.whl", hash = "sha256:a163dcaede0f1c021485e957a39245190e74249897e2ae4b2aa38595db237ee0"}, + {file = "sortedcontainers-2.4.0.tar.gz", hash = "sha256:25caa5a06cc30b6b83d11423433f65d1f9d76c4c6a0c90e3379eaa43b9bfdb88"}, +] +sphinx = [ + {file = "Sphinx-4.3.2-py3-none-any.whl", hash = "sha256:6a11ea5dd0bdb197f9c2abc2e0ce73e01340464feaece525e64036546d24c851"}, + {file = "Sphinx-4.3.2.tar.gz", hash = "sha256:0a8836751a68306b3fe97ecbe44db786f8479c3bf4b80e3a7f5c838657b4698c"}, +] +sphinx-rtd-theme = [ + {file = "sphinx_rtd_theme-1.0.0-py2.py3-none-any.whl", hash = "sha256:4d35a56f4508cfee4c4fb604373ede6feae2a306731d533f409ef5c3496fdbd8"}, + {file = "sphinx_rtd_theme-1.0.0.tar.gz", hash = "sha256:eec6d497e4c2195fa0e8b2016b337532b8a699a68bcb22a512870e16925c6a5c"}, +] +sphinxcontrib-applehelp = [ + {file = "sphinxcontrib-applehelp-1.0.2.tar.gz", hash = "sha256:a072735ec80e7675e3f432fcae8610ecf509c5f1869d17e2eecff44389cdbc58"}, + {file = "sphinxcontrib_applehelp-1.0.2-py2.py3-none-any.whl", hash = "sha256:806111e5e962be97c29ec4c1e7fe277bfd19e9652fb1a4392105b43e01af885a"}, +] +sphinxcontrib-devhelp = [ + {file = "sphinxcontrib-devhelp-1.0.2.tar.gz", hash = "sha256:ff7f1afa7b9642e7060379360a67e9c41e8f3121f2ce9164266f61b9f4b338e4"}, + {file = "sphinxcontrib_devhelp-1.0.2-py2.py3-none-any.whl", hash = "sha256:8165223f9a335cc1af7ffe1ed31d2871f325254c0423bc0c4c7cd1c1e4734a2e"}, +] +sphinxcontrib-htmlhelp = [ + {file = "sphinxcontrib-htmlhelp-2.0.0.tar.gz", hash = "sha256:f5f8bb2d0d629f398bf47d0d69c07bc13b65f75a81ad9e2f71a63d4b7a2f6db2"}, + {file = "sphinxcontrib_htmlhelp-2.0.0-py2.py3-none-any.whl", hash = "sha256:d412243dfb797ae3ec2b59eca0e52dac12e75a241bf0e4eb861e450d06c6ed07"}, +] +sphinxcontrib-jsmath = [ + {file = "sphinxcontrib-jsmath-1.0.1.tar.gz", hash = "sha256:a9925e4a4587247ed2191a22df5f6970656cb8ca2bd6284309578f2153e0c4b8"}, + {file = "sphinxcontrib_jsmath-1.0.1-py2.py3-none-any.whl", hash = "sha256:2ec2eaebfb78f3f2078e73666b1415417a116cc848b72e5172e596c871103178"}, +] +sphinxcontrib-qthelp = [ + {file = "sphinxcontrib-qthelp-1.0.3.tar.gz", hash = "sha256:4c33767ee058b70dba89a6fc5c1892c0d57a54be67ddd3e7875a18d14cba5a72"}, + {file = "sphinxcontrib_qthelp-1.0.3-py2.py3-none-any.whl", hash = "sha256:bd9fc24bcb748a8d51fd4ecaade681350aa63009a347a8c14e637895444dfab6"}, +] +sphinxcontrib-serializinghtml = [ + {file = "sphinxcontrib-serializinghtml-1.1.5.tar.gz", hash = "sha256:aa5f6de5dfdf809ef505c4895e51ef5c9eac17d0f287933eb49ec495280b6952"}, + {file = "sphinxcontrib_serializinghtml-1.1.5-py2.py3-none-any.whl", hash = "sha256:352a9a00ae864471d3a7ead8d7d79f5fc0b57e8b3f95e9867eb9eb28999b92fd"}, +] +stevedore = [ + {file = "stevedore-3.5.0-py3-none-any.whl", hash = "sha256:a547de73308fd7e90075bb4d301405bebf705292fa90a90fc3bcf9133f58616c"}, + {file = "stevedore-3.5.0.tar.gz", hash = "sha256:f40253887d8712eaa2bb0ea3830374416736dc8ec0e22f5a65092c1174c44335"}, +] +tomli = [ + {file = "tomli-2.0.1-py3-none-any.whl", hash = "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc"}, + {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"}, +] +typed-ast = [ + {file = "typed_ast-1.5.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:669dd0c4167f6f2cd9f57041e03c3c2ebf9063d0757dc89f79ba1daa2bfca9d4"}, + {file = "typed_ast-1.5.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:211260621ab1cd7324e0798d6be953d00b74e0428382991adfddb352252f1d62"}, + {file = "typed_ast-1.5.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:267e3f78697a6c00c689c03db4876dd1efdfea2f251a5ad6555e82a26847b4ac"}, + {file = "typed_ast-1.5.4-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:c542eeda69212fa10a7ada75e668876fdec5f856cd3d06829e6aa64ad17c8dfe"}, + {file = "typed_ast-1.5.4-cp310-cp310-win_amd64.whl", hash = "sha256:a9916d2bb8865f973824fb47436fa45e1ebf2efd920f2b9f99342cb7fab93f72"}, + {file = "typed_ast-1.5.4-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:79b1e0869db7c830ba6a981d58711c88b6677506e648496b1f64ac7d15633aec"}, + {file = "typed_ast-1.5.4-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a94d55d142c9265f4ea46fab70977a1944ecae359ae867397757d836ea5a3f47"}, + {file = "typed_ast-1.5.4-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:183afdf0ec5b1b211724dfef3d2cad2d767cbefac291f24d69b00546c1837fb6"}, + {file = "typed_ast-1.5.4-cp36-cp36m-win_amd64.whl", hash = "sha256:639c5f0b21776605dd6c9dbe592d5228f021404dafd377e2b7ac046b0349b1a1"}, + {file = "typed_ast-1.5.4-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:cf4afcfac006ece570e32d6fa90ab74a17245b83dfd6655a6f68568098345ff6"}, + {file = "typed_ast-1.5.4-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ed855bbe3eb3715fca349c80174cfcfd699c2f9de574d40527b8429acae23a66"}, + {file = "typed_ast-1.5.4-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:6778e1b2f81dfc7bc58e4b259363b83d2e509a65198e85d5700dfae4c6c8ff1c"}, + {file = "typed_ast-1.5.4-cp37-cp37m-win_amd64.whl", hash = "sha256:0261195c2062caf107831e92a76764c81227dae162c4f75192c0d489faf751a2"}, + {file = "typed_ast-1.5.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:2efae9db7a8c05ad5547d522e7dbe62c83d838d3906a3716d1478b6c1d61388d"}, + {file = "typed_ast-1.5.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:7d5d014b7daa8b0bf2eaef684295acae12b036d79f54178b92a2b6a56f92278f"}, + {file = "typed_ast-1.5.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:370788a63915e82fd6f212865a596a0fefcbb7d408bbbb13dea723d971ed8bdc"}, + {file = "typed_ast-1.5.4-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:4e964b4ff86550a7a7d56345c7864b18f403f5bd7380edf44a3c1fb4ee7ac6c6"}, + {file = "typed_ast-1.5.4-cp38-cp38-win_amd64.whl", hash = "sha256:683407d92dc953c8a7347119596f0b0e6c55eb98ebebd9b23437501b28dcbb8e"}, + {file = "typed_ast-1.5.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:4879da6c9b73443f97e731b617184a596ac1235fe91f98d279a7af36c796da35"}, + {file = "typed_ast-1.5.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:3e123d878ba170397916557d31c8f589951e353cc95fb7f24f6bb69adc1a8a97"}, + {file = "typed_ast-1.5.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ebd9d7f80ccf7a82ac5f88c521115cc55d84e35bf8b446fcd7836eb6b98929a3"}, + {file = "typed_ast-1.5.4-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:98f80dee3c03455e92796b58b98ff6ca0b2a6f652120c263efdba4d6c5e58f72"}, + {file = "typed_ast-1.5.4-cp39-cp39-win_amd64.whl", hash = "sha256:0fdbcf2fef0ca421a3f5912555804296f0b0960f0418c440f5d6d3abb549f3e1"}, + {file = "typed_ast-1.5.4.tar.gz", hash = "sha256:39e21ceb7388e4bb37f4c679d72707ed46c2fbf2a5609b8b8ebc4b067d977df2"}, +] +typing-extensions = [ + {file = "typing_extensions-4.2.0-py3-none-any.whl", hash = "sha256:6657594ee297170d19f67d55c05852a874e7eb634f4f753dbd667855e07c1708"}, + {file = "typing_extensions-4.2.0.tar.gz", hash = "sha256:f1c24655a0da0d1b67f07e17a5e6b2a105894e6824b92096378bb3668ef02376"}, +] +ujson = [ + {file = "ujson-5.3.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:a933b3a238a48162c382e0ac338b97663d044b0485021b6670565a81e7b7ec98"}, + {file = "ujson-5.3.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:612015c6e5a9bf041b89f1eaa8ab8682469b3a745a00c7c95bbbee8080f6b346"}, + {file = "ujson-5.3.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a720b6eff73415249a3dd02e2b1b337de31bb9fa8220bd572dffba23066e538c"}, + {file = "ujson-5.3.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c1408ea1704017289c3023928065233b90953aae3e1d7d06d6d6db667e9fe159"}, + {file = "ujson-5.3.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5192505798a5734a85c763eff11e6f6072d3595c337b52f72922b4e22fe66e2e"}, + {file = "ujson-5.3.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:bad1471ccfa8d100a0bc513c6db587c38de99384f2aa54eec1016a131d63d3d9"}, + {file = "ujson-5.3.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:b926f2f7a266db8f2c46498f0c2c9fcc7e53c8e0fa8bff7f08ad9c044723a2ec"}, + {file = "ujson-5.3.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:ed9809bc36292e0d3632d50aae497b5827c1a2e07158f7d4d5c53e8e8662bf66"}, + {file = "ujson-5.3.0-cp310-cp310-win32.whl", hash = "sha256:522b1d60872bb6368c14ac538adb55ca9d6c39a7a962832819ef1aafb3446ff5"}, + {file = "ujson-5.3.0-cp310-cp310-win_amd64.whl", hash = "sha256:a609bb1cdda9748e6a8363039926dee5ea2bcc073412279615560b967f92a524"}, + {file = "ujson-5.3.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:7455fc3d69315149b95fd011c01496a5e9442c9e7c4d202bed87c5c2e449ed05"}, + {file = "ujson-5.3.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:865225a85e4ce48754d0036fdc0eb796b4aaf4f1e928f0efb9b4e1c081647a4c"}, + {file = "ujson-5.3.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d553f31bceda492c2bda37f48873820d28f07608ae14409c5e9d6c3aa6694840"}, + {file = "ujson-5.3.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a014531468b78c031aa04e5ca8b64385a6edb48a2e66ebf11093213c678fc383"}, + {file = "ujson-5.3.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:b3e6431812d8008dce7b2546b1276f649f6c9aa44617762ebd3529a25092816c"}, + {file = "ujson-5.3.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:089965f964d17905c48cdca88b982d525165e549b438ac86f194c6a9d852fd69"}, + {file = "ujson-5.3.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:ca5eced4ae4ba1e2c9539fca6451694d31e0243de2acfcd6965e2b6e159ba29b"}, + {file = "ujson-5.3.0-cp37-cp37m-win32.whl", hash = "sha256:a4fe193050b519ace09f7d053def30b99deadf650c18a8a874ea0f6c9a2992bc"}, + {file = "ujson-5.3.0-cp37-cp37m-win_amd64.whl", hash = "sha256:e7961c493a982c03cffc9ce4dc2b23bed1375352296f946cc36ddeb5145fa62c"}, + {file = "ujson-5.3.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:34592a3c9370745b093ebca60aee6d32f8e7abe3d5c12d54c7dba0b2f81cd863"}, + {file = "ujson-5.3.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:510c3705b29bc3753ec9e6073b99000160320c1cf6e035884295401acb474dfa"}, + {file = "ujson-5.3.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:034c07399dff35385ecc53caf9b1f12b3e203834de27b723daeb2cbb3e02ee7f"}, + {file = "ujson-5.3.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5a87e1c05f1efc23c67bfa26be79f12c1f59f71a586b396068d5cf7eb78a2635"}, + {file = "ujson-5.3.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:972c1850cc52e57ccdea70e3c069e2da5c6090e3ee18d167dff2618a8d7dd127"}, + {file = "ujson-5.3.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:d45e86101a5cddd295d5870b02244fc87ecd9b8936f440acbd2bb30b4c1fe23c"}, + {file = "ujson-5.3.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:decd32e8d7f934dde484e43431f60b069e87bb30a3a7e186cb6bd69caa0418f3"}, + {file = "ujson-5.3.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8c734982d6560356c173817576a1f3fa074a2d2b993e63bffa69105ae9ec144b"}, + {file = "ujson-5.3.0-cp38-cp38-win32.whl", hash = "sha256:563b7ed1e789f763410c49e6fab51d61982eb94088b25338e65b89ad20b6b107"}, + {file = "ujson-5.3.0-cp38-cp38-win_amd64.whl", hash = "sha256:8a2cbb044bc6e6764b9a089a2079432b8bd576dbff5faa808b562a8f3c97452b"}, + {file = "ujson-5.3.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6c5d19fbdd29d5080926c863ba89591a2d3dbf592ea35b456cb2996004433d11"}, + {file = "ujson-5.3.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4dc79db757b0dfa23a111a4573827a6ef57de65dbe8cdb202e45cf9ddf06aad5"}, + {file = "ujson-5.3.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5700a179abacbdc8609737e595a598b7f107cd68615ded3f922f4c0d4b6009d6"}, + {file = "ujson-5.3.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:287dea79473ce4941598c45dc34f9f692d48d7863b451541c5ce960ab54465fb"}, + {file = "ujson-5.3.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:151faa9085c10351a04aea959a2bc25dfa2e21af26d9b614a221d045b7923ea4"}, + {file = "ujson-5.3.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:285082924747958aa69e1dc2146c01db6b0921a0bb04b595beefe7fcffaffaf9"}, + {file = "ujson-5.3.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:8dd74570fe59c738d4dc12d44eb89538b0b01fae9dda6cfe3ff3f6934877cf35"}, + {file = "ujson-5.3.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:6aba1e39ffdd83ec14832ea25bbb18266fea46bc69b8c0acbd996495826c0e6f"}, + {file = "ujson-5.3.0-cp39-cp39-win32.whl", hash = "sha256:1358621686ddfda55171fc98c171bf5b1a80ce4d444134b70e1e449925fa014f"}, + {file = "ujson-5.3.0-cp39-cp39-win_amd64.whl", hash = "sha256:d1fab398734634f4b412512ed230d45522fc9f3dd9ca169f579474a491f662aa"}, + {file = "ujson-5.3.0-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:d4830c8df958c45c16dfc43c8353403efd7f1a8e39b91a7e0e848d55b7fa8b48"}, + {file = "ujson-5.3.0-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:48bed7c1f95484644a2cc658efff4d1e75b8c806f6ef2b5c815f59e1cbe0d039"}, + {file = "ujson-5.3.0-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2db7cbe415d7329b9bff029a83851d1077836ec728fe1c32be34c9c3a5017ab2"}, + {file = "ujson-5.3.0-pp37-pypy37_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:73636001055667bbcc6a73b232da1d272f68a49a1f192efbe99e99ddf8ef1d21"}, + {file = "ujson-5.3.0-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:47bf966e1041ae8e568d7e8eb421d72d0521c30c28306b76c256832553e316c6"}, + {file = "ujson-5.3.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:66f857d8b8d7ea44e3fd5f2b7e471334f24b735423729771f5a7a7f69ab645ed"}, + {file = "ujson-5.3.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7d2cb50aa526032b8812975c3832058763ee50e1dc3a1302431ed9d0922c3a1b"}, + {file = "ujson-5.3.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f615ee181b813c8f50a57d55354d0c0304a0be066962efdbef6f44517b26e3b2"}, + {file = "ujson-5.3.0-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c5696c99a7dd567566c18490e8e346b2657967feb1e3c2004e91dbb253db0894"}, + {file = "ujson-5.3.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:a68d5a8a46712ffe86db8ae1b4311714db534725521c71fd4c9e1cd062dae9a4"}, + {file = "ujson-5.3.0.tar.gz", hash = "sha256:ab938777b3ac0372231ee654a7f6a13787e587b1ca268d8aa7e6fb6846e477d0"}, +] +urllib3 = [ + {file = "urllib3-1.26.9-py2.py3-none-any.whl", hash = "sha256:44ece4d53fb1706f667c9bd1c648f5469a2ec925fcf3a776667042d645472c14"}, + {file = "urllib3-1.26.9.tar.gz", hash = "sha256:aabaf16477806a5e1dd19aa41f8c2b7950dd3c746362d7e3223dbe6de6ac448e"}, +] +wrapt = [ + {file = "wrapt-1.14.1-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:1b376b3f4896e7930f1f772ac4b064ac12598d1c38d04907e696cc4d794b43d3"}, + {file = "wrapt-1.14.1-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:903500616422a40a98a5a3c4ff4ed9d0066f3b4c951fa286018ecdf0750194ef"}, + {file = "wrapt-1.14.1-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:5a9a0d155deafd9448baff28c08e150d9b24ff010e899311ddd63c45c2445e28"}, + {file = "wrapt-1.14.1-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:ddaea91abf8b0d13443f6dac52e89051a5063c7d014710dcb4d4abb2ff811a59"}, + {file = "wrapt-1.14.1-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:36f582d0c6bc99d5f39cd3ac2a9062e57f3cf606ade29a0a0d6b323462f4dd87"}, + {file = "wrapt-1.14.1-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:7ef58fb89674095bfc57c4069e95d7a31cfdc0939e2a579882ac7d55aadfd2a1"}, + {file = "wrapt-1.14.1-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:e2f83e18fe2f4c9e7db597e988f72712c0c3676d337d8b101f6758107c42425b"}, + {file = "wrapt-1.14.1-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:ee2b1b1769f6707a8a445162ea16dddf74285c3964f605877a20e38545c3c462"}, + {file = "wrapt-1.14.1-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:833b58d5d0b7e5b9832869f039203389ac7cbf01765639c7309fd50ef619e0b1"}, + {file = "wrapt-1.14.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:80bb5c256f1415f747011dc3604b59bc1f91c6e7150bd7db03b19170ee06b320"}, + {file = "wrapt-1.14.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:07f7a7d0f388028b2df1d916e94bbb40624c59b48ecc6cbc232546706fac74c2"}, + {file = "wrapt-1.14.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:02b41b633c6261feff8ddd8d11c711df6842aba629fdd3da10249a53211a72c4"}, + {file = "wrapt-1.14.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2fe803deacd09a233e4762a1adcea5db5d31e6be577a43352936179d14d90069"}, + {file = "wrapt-1.14.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:257fd78c513e0fb5cdbe058c27a0624c9884e735bbd131935fd49e9fe719d310"}, + {file = "wrapt-1.14.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:4fcc4649dc762cddacd193e6b55bc02edca674067f5f98166d7713b193932b7f"}, + {file = "wrapt-1.14.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:11871514607b15cfeb87c547a49bca19fde402f32e2b1c24a632506c0a756656"}, + {file = "wrapt-1.14.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:8ad85f7f4e20964db4daadcab70b47ab05c7c1cf2a7c1e51087bfaa83831854c"}, + {file = "wrapt-1.14.1-cp310-cp310-win32.whl", hash = "sha256:a9a52172be0b5aae932bef82a79ec0a0ce87288c7d132946d645eba03f0ad8a8"}, + {file = "wrapt-1.14.1-cp310-cp310-win_amd64.whl", hash = "sha256:6d323e1554b3d22cfc03cd3243b5bb815a51f5249fdcbb86fda4bf62bab9e164"}, + {file = "wrapt-1.14.1-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:43ca3bbbe97af00f49efb06e352eae40434ca9d915906f77def219b88e85d907"}, + {file = "wrapt-1.14.1-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:6b1a564e6cb69922c7fe3a678b9f9a3c54e72b469875aa8018f18b4d1dd1adf3"}, + {file = "wrapt-1.14.1-cp35-cp35m-manylinux2010_i686.whl", hash = "sha256:00b6d4ea20a906c0ca56d84f93065b398ab74b927a7a3dbd470f6fc503f95dc3"}, + {file = "wrapt-1.14.1-cp35-cp35m-manylinux2010_x86_64.whl", hash = "sha256:a85d2b46be66a71bedde836d9e41859879cc54a2a04fad1191eb50c2066f6e9d"}, + {file = "wrapt-1.14.1-cp35-cp35m-win32.whl", hash = "sha256:dbcda74c67263139358f4d188ae5faae95c30929281bc6866d00573783c422b7"}, + {file = "wrapt-1.14.1-cp35-cp35m-win_amd64.whl", hash = "sha256:b21bb4c09ffabfa0e85e3a6b623e19b80e7acd709b9f91452b8297ace2a8ab00"}, + {file = "wrapt-1.14.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:9e0fd32e0148dd5dea6af5fee42beb949098564cc23211a88d799e434255a1f4"}, + {file = "wrapt-1.14.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9736af4641846491aedb3c3f56b9bc5568d92b0692303b5a305301a95dfd38b1"}, + {file = "wrapt-1.14.1-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5b02d65b9ccf0ef6c34cba6cf5bf2aab1bb2f49c6090bafeecc9cd81ad4ea1c1"}, + {file = "wrapt-1.14.1-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21ac0156c4b089b330b7666db40feee30a5d52634cc4560e1905d6529a3897ff"}, + {file = "wrapt-1.14.1-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:9f3e6f9e05148ff90002b884fbc2a86bd303ae847e472f44ecc06c2cd2fcdb2d"}, + {file = "wrapt-1.14.1-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:6e743de5e9c3d1b7185870f480587b75b1cb604832e380d64f9504a0535912d1"}, + {file = "wrapt-1.14.1-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:d79d7d5dc8a32b7093e81e97dad755127ff77bcc899e845f41bf71747af0c569"}, + {file = "wrapt-1.14.1-cp36-cp36m-win32.whl", hash = "sha256:81b19725065dcb43df02b37e03278c011a09e49757287dca60c5aecdd5a0b8ed"}, + {file = "wrapt-1.14.1-cp36-cp36m-win_amd64.whl", hash = "sha256:b014c23646a467558be7da3d6b9fa409b2c567d2110599b7cf9a0c5992b3b471"}, + {file = "wrapt-1.14.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:88bd7b6bd70a5b6803c1abf6bca012f7ed963e58c68d76ee20b9d751c74a3248"}, + {file = "wrapt-1.14.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b5901a312f4d14c59918c221323068fad0540e34324925c8475263841dbdfe68"}, + {file = "wrapt-1.14.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d77c85fedff92cf788face9bfa3ebaa364448ebb1d765302e9af11bf449ca36d"}, + {file = "wrapt-1.14.1-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8d649d616e5c6a678b26d15ece345354f7c2286acd6db868e65fcc5ff7c24a77"}, + {file = "wrapt-1.14.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:7d2872609603cb35ca513d7404a94d6d608fc13211563571117046c9d2bcc3d7"}, + {file = "wrapt-1.14.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:ee6acae74a2b91865910eef5e7de37dc6895ad96fa23603d1d27ea69df545015"}, + {file = "wrapt-1.14.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:2b39d38039a1fdad98c87279b48bc5dce2c0ca0d73483b12cb72aa9609278e8a"}, + {file = "wrapt-1.14.1-cp37-cp37m-win32.whl", hash = "sha256:60db23fa423575eeb65ea430cee741acb7c26a1365d103f7b0f6ec412b893853"}, + {file = "wrapt-1.14.1-cp37-cp37m-win_amd64.whl", hash = "sha256:709fe01086a55cf79d20f741f39325018f4df051ef39fe921b1ebe780a66184c"}, + {file = "wrapt-1.14.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:8c0ce1e99116d5ab21355d8ebe53d9460366704ea38ae4d9f6933188f327b456"}, + {file = "wrapt-1.14.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:e3fb1677c720409d5f671e39bac6c9e0e422584e5f518bfd50aa4cbbea02433f"}, + {file = "wrapt-1.14.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:642c2e7a804fcf18c222e1060df25fc210b9c58db7c91416fb055897fc27e8cc"}, + {file = "wrapt-1.14.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7b7c050ae976e286906dd3f26009e117eb000fb2cf3533398c5ad9ccc86867b1"}, + {file = "wrapt-1.14.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ef3f72c9666bba2bab70d2a8b79f2c6d2c1a42a7f7e2b0ec83bb2f9e383950af"}, + {file = "wrapt-1.14.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:01c205616a89d09827986bc4e859bcabd64f5a0662a7fe95e0d359424e0e071b"}, + {file = "wrapt-1.14.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:5a0f54ce2c092aaf439813735584b9537cad479575a09892b8352fea5e988dc0"}, + {file = "wrapt-1.14.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:2cf71233a0ed05ccdabe209c606fe0bac7379fdcf687f39b944420d2a09fdb57"}, + {file = "wrapt-1.14.1-cp38-cp38-win32.whl", hash = "sha256:aa31fdcc33fef9eb2552cbcbfee7773d5a6792c137b359e82879c101e98584c5"}, + {file = "wrapt-1.14.1-cp38-cp38-win_amd64.whl", hash = "sha256:d1967f46ea8f2db647c786e78d8cc7e4313dbd1b0aca360592d8027b8508e24d"}, + {file = "wrapt-1.14.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3232822c7d98d23895ccc443bbdf57c7412c5a65996c30442ebe6ed3df335383"}, + {file = "wrapt-1.14.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:988635d122aaf2bdcef9e795435662bcd65b02f4f4c1ae37fbee7401c440b3a7"}, + {file = "wrapt-1.14.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9cca3c2cdadb362116235fdbd411735de4328c61425b0aa9f872fd76d02c4e86"}, + {file = "wrapt-1.14.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d52a25136894c63de15a35bc0bdc5adb4b0e173b9c0d07a2be9d3ca64a332735"}, + {file = "wrapt-1.14.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:40e7bc81c9e2b2734ea4bc1aceb8a8f0ceaac7c5299bc5d69e37c44d9081d43b"}, + {file = "wrapt-1.14.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:b9b7a708dd92306328117d8c4b62e2194d00c365f18eff11a9b53c6f923b01e3"}, + {file = "wrapt-1.14.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:6a9a25751acb379b466ff6be78a315e2b439d4c94c1e99cb7266d40a537995d3"}, + {file = "wrapt-1.14.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:34aa51c45f28ba7f12accd624225e2b1e5a3a45206aa191f6f9aac931d9d56fe"}, + {file = "wrapt-1.14.1-cp39-cp39-win32.whl", hash = "sha256:dee0ce50c6a2dd9056c20db781e9c1cfd33e77d2d569f5d1d9321c641bb903d5"}, + {file = "wrapt-1.14.1-cp39-cp39-win_amd64.whl", hash = "sha256:dee60e1de1898bde3b238f18340eec6148986da0455d8ba7848d50470a7a32fb"}, + {file = "wrapt-1.14.1.tar.gz", hash = "sha256:380a85cf89e0e69b7cfbe2ea9f765f004ff419f34194018a6827ac0e3edfed4d"}, +] +zipp = [ + {file = "zipp-3.8.0-py3-none-any.whl", hash = "sha256:c4f6e5bbf48e74f7a38e7cc5b0480ff42b0ae5178957d564d18932525d5cf099"}, + {file = "zipp-3.8.0.tar.gz", hash = "sha256:56bf8aadb83c24db6c4b577e13de374ccfb67da2078beba1d037c17980bf43ad"}, +] diff --git a/pylintrc b/pylintrc new file mode 100644 index 00000000..4c23e244 --- /dev/null +++ b/pylintrc @@ -0,0 +1,503 @@ +[MASTER] + +# A comma-separated list of package or module names from where C extensions may +# be loaded. Extensions are loading into the active Python interpreter and may +# run arbitrary code. +extension-pkg-whitelist= + +# Add files or directories to the blacklist. They should be base names, not +# paths. +ignore=CVS + +# Add files or directories matching the regex patterns to the blacklist. The +# regex matches against base names, not paths. +ignore-patterns= + +# Python code to execute, usually for sys.path manipulation such as +# pygtk.require(). +#init-hook= + +# Use multiple processes to speed up Pylint. Specifying 0 will auto-detect the +# number of processors available to use. +jobs=0 + +# Control the amount of potential inferred values when inferring a single +# object. This can help the performance when dealing with large functions or +# complex, nested conditions. +limit-inference-results=100 + +# List of plugins (as comma separated values of python module names) to load, +# usually to register additional checkers. +load-plugins= + +# Pickle collected data for later comparisons. +persistent=yes + +# Specify a configuration file. +#rcfile= + +# When enabled, pylint would attempt to guess common misconfiguration and emit +# user-friendly hints instead of false-positive error messages. +suggestion-mode=yes + +# Allow loading of arbitrary C extensions. Extensions are imported into the +# active Python interpreter and may run arbitrary code. +unsafe-load-any-extension=no + + +[MESSAGES CONTROL] + +# Only show warnings with the listed confidence levels. Leave empty to show +# all. Valid levels: HIGH, INFERENCE, INFERENCE_FAILURE, UNDEFINED. +confidence= + +# Disable the message, report, category or checker with the given id(s). You +# can either give multiple identifiers separated by comma (,) or put this +# option multiple times (only on the command line, not in the configuration +# file where it should appear only once). You can also use "--disable=all" to +# disable everything first and then reenable specific checks. For example, if +# you want to run only the similarities checker, you can use "--disable=all +# --enable=similarities". If you want to run only the classes checker, but have +# no Warning level messages displayed, use "--disable=all --enable=classes +# --disable=W". +disable=bad-continuation, + missing-module-docstring, + W0511 + +# Enable the message, report, category or checker with the given id(s). You can +# either give multiple identifier separated by comma (,) or put this option +# multiple time (only on the command line, not in the configuration file where +# it should appear only once). See also the "--disable" option for examples. +enable=c-extension-no-member + + +[REPORTS] + +# Python expression which should return a score less than or equal to 10. You +# have access to the variables 'error', 'warning', 'refactor', and 'convention' +# which contain the number of messages in each category, as well as 'statement' +# which is the total number of statements analyzed. This score is used by the +# global evaluation report (RP0004). +evaluation=10.0 - ((float(5 * error + warning + refactor + convention) / statement) * 10) + +# Template used to display messages. This is a python new-style format string +# used to format the message information. See doc for all details. +#msg-template= + +# Set the output format. Available formats are text, parseable, colorized, json +# and msvs (visual studio). You can also give a reporter class, e.g. +# mypackage.mymodule.MyReporterClass. +output-format=colorized + +# Tells whether to display a full report or only the messages. +reports=no + +# Activate the evaluation score. +score=yes + + +[REFACTORING] + +# Maximum number of nested blocks for function / method body +max-nested-blocks=5 + +# Complete name of functions that never returns. When checking for +# inconsistent-return-statements if a never returning function is called then +# it will be considered as an explicit return statement and no message will be +# printed. +never-returning-functions=sys.exit + + +[LOGGING] + +# Format style used to check logging format string. `old` means using % +# formatting, `new` is for `{}` formatting,and `fstr` is for f-strings. +logging-format-style=old + +# Logging modules to check that the string format arguments are in logging +# function parameter format. +logging-modules=logging + + +[SPELLING] + +# Limits count of emitted suggestions for spelling mistakes. +max-spelling-suggestions=4 + +# Spelling dictionary name. Available dictionaries: none. To make it work, +# install the python-enchant package. +spelling-dict= + +# List of comma separated words that should not be checked. +spelling-ignore-words= + +# A path to a file that contains the private dictionary; one word per line. +spelling-private-dict-file= + +# Tells whether to store unknown words to the private dictionary (see the +# --spelling-private-dict-file option) instead of raising a message. +spelling-store-unknown-words=no + + +[MISCELLANEOUS] + +# List of note tags to take in consideration, separated by a comma. +notes=FIXME, + TODO + + +[TYPECHECK] + +# List of decorators that produce context managers, such as +# contextlib.contextmanager. Add to this list to register other decorators that +# produce valid context managers. +contextmanager-decorators=contextlib.contextmanager + +# List of members which are set dynamically and missed by pylint inference +# system, and so shouldn't trigger E1101 when accessed. Python regular +# expressions are accepted. +generated-members= + +# Tells whether missing members accessed in mixin class should be ignored. A +# mixin class is detected if its name ends with "mixin" (case insensitive). +ignore-mixin-members=yes + +# Tells whether to warn about missing members when the owner of the attribute +# is inferred to be None. +ignore-none=yes + +# This flag controls whether pylint should warn about no-member and similar +# checks whenever an opaque object is returned when inferring. The inference +# can return multiple potential results while evaluating a Python object, but +# some branches might not be evaluated, which results in partial inference. In +# that case, it might be useful to still emit no-member and other checks for +# the rest of the inferred objects. +ignore-on-opaque-inference=yes + +# List of class names for which member attributes should not be checked (useful +# for classes with dynamically set attributes). This supports the use of +# qualified names. +ignored-classes=optparse.Values,thread._local,_thread._local + +# List of module names for which member attributes should not be checked +# (useful for modules/projects where namespaces are manipulated during runtime +# and thus existing member attributes cannot be deduced by static analysis). It +# supports qualified module names, as well as Unix pattern matching. +ignored-modules= + +# Show a hint with possible names when a member name was not found. The aspect +# of finding the hint is based on edit distance. +missing-member-hint=yes + +# The minimum edit distance a name should have in order to be considered a +# similar match for a missing member name. +missing-member-hint-distance=1 + +# The total number of similar names that should be taken in consideration when +# showing a hint for a missing member. +missing-member-max-choices=1 + +# List of decorators that change the signature of a decorated function. +signature-mutators= + + +[VARIABLES] + +# List of additional names supposed to be defined in builtins. Remember that +# you should avoid defining new builtins when possible. +additional-builtins= + +# Tells whether unused global variables should be treated as a violation. +allow-global-unused-variables=yes + +# List of strings which can identify a callback function by name. A callback +# name must start or end with one of those strings. +callbacks=cb_, + _cb + +# A regular expression matching the name of dummy variables (i.e. expected to +# not be used). +dummy-variables-rgx=_+$|(_[a-zA-Z0-9_]*[a-zA-Z0-9]+?$)|dummy|^ignored_|^unused_ + +# Argument names that match this expression will be ignored. Default to name +# with leading underscore. +ignored-argument-names=_.*|^ignored_|^unused_ + +# Tells whether we should check for unused import in __init__ files. +init-import=no + +# List of qualified module names which can have objects that can redefine +# builtins. +redefining-builtins-modules=six.moves,past.builtins,future.builtins,builtins,io + + +[FORMAT] + +# Expected format of line ending, e.g. empty (any line ending), LF or CRLF. +expected-line-ending-format= + +# Regexp for a line that is allowed to be longer than the limit. +ignore-long-lines=^\s*(# )??$ + +# Number of spaces of indent required inside a hanging or continued line. +indent-after-paren=4 + +# String used as indentation unit. This is usually " " (4 spaces) or "\t" (1 +# tab). +indent-string=' ' + +# Maximum number of characters on a single line. +max-line-length=100 + +# Maximum number of lines in a module. +max-module-lines=1000 + +# List of optional constructs for which whitespace checking is disabled. `dict- +# separator` is used to allow tabulation in dicts, etc.: {1 : 1,\n222: 2}. +# `trailing-comma` allows a space between comma and closing bracket: (a, ). +# `empty-line` allows space-only lines. +no-space-check=trailing-comma, + dict-separator + +# Allow the body of a class to be on the same line as the declaration if body +# contains single statement. +single-line-class-stmt=no + +# Allow the body of an if to be on the same line as the test if there is no +# else. +single-line-if-stmt=no + + +[SIMILARITIES] + +# Ignore comments when computing similarities. +ignore-comments=yes + +# Ignore docstrings when computing similarities. +ignore-docstrings=yes + +# Ignore imports when computing similarities. +ignore-imports=no + +# Minimum lines number of a similarity. +min-similarity-lines=4 + + +[BASIC] + +# Naming style matching correct argument names. +argument-naming-style=snake_case + +# Regular expression matching correct argument names. Overrides argument- +# naming-style. +#argument-rgx= + +# Naming style matching correct attribute names. +attr-naming-style=snake_case + +# Regular expression matching correct attribute names. Overrides attr-naming- +# style. +#attr-rgx= + +# Bad variable names which should always be refused, separated by a comma. +bad-names=foo, + bar, + baz, + toto, + tutu, + tata + +# Naming style matching correct class attribute names. +class-attribute-naming-style=any + +# Regular expression matching correct class attribute names. Overrides class- +# attribute-naming-style. +#class-attribute-rgx= + +# Naming style matching correct class names. +class-naming-style=PascalCase + +# Regular expression matching correct class names. Overrides class-naming- +# style. +#class-rgx= + +# Naming style matching correct constant names. +const-naming-style=UPPER_CASE + +# Regular expression matching correct constant names. Overrides const-naming- +# style. +#const-rgx= + +# Minimum line length for functions/classes that require docstrings, shorter +# ones are exempt. +docstring-min-length=-1 + +# Naming style matching correct function names. +function-naming-style=snake_case + +# Regular expression matching correct function names. Overrides function- +# naming-style. +#function-rgx= + +# Good variable names which should always be accepted, separated by a comma. +good-names=i, + j, + k, + _, + db + +# Include a hint for the correct naming format with invalid-name. +include-naming-hint=yes + +# Naming style matching correct inline iteration names. +inlinevar-naming-style=any + +# Regular expression matching correct inline iteration names. Overrides +# inlinevar-naming-style. +#inlinevar-rgx= + +# Naming style matching correct method names. +method-naming-style=snake_case + +# Regular expression matching correct method names. Overrides method-naming- +# style. +#method-rgx= + +# Naming style matching correct module names. +module-naming-style=snake_case + +# Regular expression matching correct module names. Overrides module-naming- +# style. +#module-rgx= + +# Colon-delimited sets of names that determine each other's naming style when +# the name regexes allow several styles. +name-group= + +# Regular expression which should only match function or class names that do +# not require a docstring. +no-docstring-rgx=^_ + +# List of decorators that produce properties, such as abc.abstractproperty. Add +# to this list to register other decorators that produce valid properties. +# These decorators are taken in consideration only for invalid-name. +property-classes=abc.abstractproperty + +# Naming style matching correct variable names. +variable-naming-style=snake_case + +# Regular expression matching correct variable names. Overrides variable- +# naming-style. +#variable-rgx= + + +[STRING] + +# This flag controls whether the implicit-str-concat-in-sequence should +# generate a warning on implicit string concatenation in sequences defined over +# several lines. +check-str-concat-over-line-jumps=no + + +[IMPORTS] + +# List of modules that can be imported at any level, not just the top level +# one. +allow-any-import-level= + +# Allow wildcard imports from modules that define __all__. +allow-wildcard-with-all=no + +# Analyse import fallback blocks. This can be used to support both Python 2 and +# 3 compatible code, which means that the block might have code that exists +# only in one or another interpreter, leading to false positives when analysed. +analyse-fallback-blocks=no + +# Deprecated modules which should not be used, separated by a comma. +deprecated-modules=optparse,tkinter.tix + +# Create a graph of external dependencies in the given file (report RP0402 must +# not be disabled). +ext-import-graph= + +# Create a graph of every (i.e. internal and external) dependencies in the +# given file (report RP0402 must not be disabled). +import-graph= + +# Create a graph of internal dependencies in the given file (report RP0402 must +# not be disabled). +int-import-graph= + +# Force import order to recognize a module as part of the standard +# compatibility libraries. +known-standard-library= + +# Force import order to recognize a module as part of a third party library. +known-third-party=enchant + +# Couples of modules and preferred modules, separated by a comma. +preferred-modules= + + +[CLASSES] + +# List of method names used to declare (i.e. assign) instance attributes. +defining-attr-methods=__init__, + __new__, + setUp, + __post_init__ + +# List of member names, which should be excluded from the protected access +# warning. +exclude-protected=_asdict, + _fields, + _replace, + _source, + _make + +# List of valid names for the first argument in a class method. +valid-classmethod-first-arg=cls + +# List of valid names for the first argument in a metaclass class method. +valid-metaclass-classmethod-first-arg=cls + + +[DESIGN] + +# Maximum number of arguments for function / method. +max-args=8 + +# Maximum number of attributes for a class (see R0902). +max-attributes=10 + +# Maximum number of boolean expressions in an if statement (see R0916). +max-bool-expr=5 + +# Maximum number of branch for function / method body. +max-branches=12 + +# Maximum number of locals for function / method body. +max-locals=15 + +# Maximum number of parents for a class (see R0901). +max-parents=10 + +# Maximum number of public methods for a class (see R0904). +max-public-methods=20 + +# Maximum number of return / yield for function / method body. +max-returns=5 + +# Maximum number of statements in function / method body. +max-statements=50 + +# Minimum number of public methods for a class (see R0903). +min-public-methods=2 + + +[EXCEPTIONS] + +# Exceptions that will emit a warning when being caught. Defaults to +# "BaseException, Exception". +overgeneral-exceptions=BaseException, + Exception diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 index 00000000..0b42e79d --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,78 @@ +[tool.poetry] +name = "rethinkdb" +version = "2.5.0" +license = "Apache-2.0" +description = "Python client for RethinkDB." +documentation = "https://rethinkdb.com/api/python/" +repository = "https://github.com/rethinkdb/rethinkdb-python/" +readme = "README.rst" +keywords = ["database", "rethinkdb", "client"] +classifiers = [ + "Development Status :: 4 - Beta", + "Environment :: Console", + "Intended Audience :: Developers", + "License :: OSI Approved :: Apache Software License", + "Natural Language :: English", + "Operating System :: MacOS", + "Operating System :: Microsoft", + "Operating System :: POSIX", + "Operating System :: Unix", + "Programming Language :: Python :: 3 :: Only", + "Programming Language :: Python :: 3.7", + "Programming Language :: Python :: 3.8", + "Topic :: Software Development", + "Topic :: Database", + "Topic :: Database :: Front-Ends", + "Typing :: Typed", + +] +authors = [ + "RethinkDB ", +] +maintainers = [ + "RethinkDB ", +] + +[tool.poetry.urls] +"Bug Tracker" = "https://github.com/rethinkdb/rethinkdb-python/issues/" + +# this should be shipped with the C++ code not the python client +# [tool.poetry.scripts] +# rethinkdb-import = 'rethinkdb.main:app' + +[tool.poetry.dependencies] +pydantic = "^1.9" +python = "^3.7" +ujson = "^5.2.0" + +[tool.poetry.dev-dependencies] +bandit = "^1.7" +black = "^22.3" +coverage = "^6.3" +flake8 = "^4.0" +hypothesis = "^6.46.3" +mypy = "^0.950" +pylint = "^2.13" +pytest = "^7.1" +sphinx_rtd_theme = "^1.0" + +[tool.poetry.extras] +# Here comes the Trio, Twisted, etc extras +all = [] + +[tool.black] +target-version = ['py38'] + +[tool.isort] +atomic = true +case_sensitive = true +combine_star = true +force_sort_within_sections = true +include_trailing_comma = true +order_by_type = true +line_length = 88 # Same as Black's line length +multi_line_output = 3 + +[build-system] +requires = ["poetry>=1.1.13"] +build-backend = "poetry.masonry.api" diff --git a/pytest.ini b/pytest.ini index 602e59d0..5c244bef 100644 --- a/pytest.ini +++ b/pytest.ini @@ -1,8 +1,7 @@ [pytest] -python_files = test_*.py +python_files=test_*.py +python_classes=*TestCase +python_functions=test_* markers = - unit: Run unit tests - integration: Run integration tests - trio: Run trio related tests - tornado: Run tornado related tests - asyncio: Run asyncio related tests \ No newline at end of file + unit: Select only unit tests + integration: Select only integration tests diff --git a/requirements.txt b/requirements.txt deleted file mode 100644 index b35831ca..00000000 --- a/requirements.txt +++ /dev/null @@ -1,15 +0,0 @@ -async-generator==1.10; python_version>="3.6" -codacy-coverage==1.3.11 -mock==3.0.5 -pytest-cov==2.10.1 -pytest-tornasync==0.6.0.post2; python_version >= '3.5' -pytest-trio==0.6.0; python_version>="3.6" -pytest==4.6.6; python_version<"3.5" -pytest==6.1.2; python_version>="3.5" -six==1.15.0 -tornado==5.1.1; python_version<"3.6" -tornado==6.0.4; python_version>="3.6" -trio==0.16.0; python_version>="3.6" -outcome==1.1.0; python_version>="3.6" -outcome==1.0.1; python_version<="3.5" -attrs==20.3.0; python_version>="3.5" diff --git a/rethinkdb/__init__.py b/rethinkdb/__init__.py index 70b1661a..801db479 100644 --- a/rethinkdb/__init__.py +++ b/rethinkdb/__init__.py @@ -1,4 +1,4 @@ -# Copyright 2018 RethinkDB +# Copyright 2022 RethinkDB # # Licensed under the Apache License, Version 2.0 (the 'License'); # you may not use this file except in compliance with the License. @@ -11,40 +11,31 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. +# +# This file incorporates work covered by the following copyright: +# Copyright 2010-2016 RethinkDB, all rights reserved. + +import warnings -from rethinkdb import errors, version +from rethinkdb import errors # , version -# The builtins here defends against re-importing something obscuring `object`. -try: - import __builtin__ as builtins # Python 2 -except ImportError: - import builtins # Python 3 +__all__ = ["r", "RethinkDB"] +__version__ = "2.5.0" -__all__ = ["RethinkDB"] + errors.__all__ -__version__ = version.VERSION +class RethinkDB: + """ + RethinkDB serves as an entrypoint for queries. + It constructs the connection handlers and event loops, re-exports internal modules for easier + use, and sets the event loop. + """ -class RethinkDB(builtins.object): def __init__(self): - super(RethinkDB, self).__init__() - - from rethinkdb import ( - _dump, - _export, - _import, - _index_rebuild, - _restore, - ast, - query, - net, - ) - - self._dump = _dump - self._export = _export - self._import = _import - self._index_rebuild = _index_rebuild - self._restore = _restore + super().__init__() + + # pylint: disable=import-outside-toplevel + from rethinkdb import ast, net, query # Re-export internal modules for backward compatibility self.ast = ast @@ -53,40 +44,50 @@ def __init__(self): self.query = query net.Connection._r = self + self.connection_type = None + # Dynamically assign every re-exported internal module's function to self for module in (self.net, self.query, self.ast, self.errors): for function_name in module.__all__: setattr(self, function_name, getattr(module, function_name)) + # Ensure the `make_connection` function is not overridden accidentally + self.make_connection = self.net.make_connection self.set_loop_type(None) - def set_loop_type(self, library=None): + def set_loop_type(self, library=None) -> None: + """ + Set event loop type for the requested library. + """ + if library == "asyncio": - from rethinkdb.asyncio_net import net_asyncio - self.connection_type = net_asyncio.Connection + warnings.warn(f"{library} is not yet supported, using the default one") + library = None if library == "gevent": - from rethinkdb.gevent_net import net_gevent - self.connection_type = net_gevent.Connection + warnings.warn(f"{library} is not yet supported, using the default one") + library = None if library == "tornado": - from rethinkdb.tornado_net import net_tornado - self.connection_type = net_tornado.Connection + warnings.warn(f"{library} is not yet supported, using the default one") + library = None if library == "trio": - from rethinkdb.trio_net import net_trio - self.connection_type = net_trio.Connection + warnings.warn(f"{library} is not yet supported, using the default one") + library = None if library == "twisted": - from rethinkdb.twisted_net import net_twisted - self.connection_type = net_twisted.Connection + warnings.warn(f"{library} is not yet supported, using the default one") + library = None if library is None or self.connection_type is None: self.connection_type = self.net.DefaultConnection - return - def connect(self, *args, **kwargs): + """ + Make a connection to the database. + """ + return self.make_connection(self.connection_type, *args, **kwargs) diff --git a/rethinkdb/__main__.py b/rethinkdb/__main__.py deleted file mode 100644 index fb0c670f..00000000 --- a/rethinkdb/__main__.py +++ /dev/null @@ -1,113 +0,0 @@ -#!/usr/bin/env python - -# Copyright 2018 RethinkDB -# -# Licensed under the Apache License, Version 2.0 (the 'License'); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an 'AS IS' BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# This file incorporates work covered by the following copyright: -# Copyright 2010-2016 RethinkDB, all rights reserved. - -"""Dispatcher for interactive functions such as repl and backup""" - -import code -import sys -import traceback - -from rethinkdb import errors, net, utils_common - - -def startInterpreter(argv=None, prog=None): - repl_variables = {"r": net.Connection._r, "rethinkdb": net.Connection._r} - banner = "The RethinkDB driver has been imported as `r`." - - # -- get host/port setup - - # - parse command line - parser = utils_common.CommonOptionsParser( - prog=prog, - description="An interactive Python shell (repl) with the RethinkDB driver imported", - ) - options, args = parser.parse_args( - argv if argv is not None else sys.argv[1:], connect=False - ) - - if args: - parser.error( - "No positional arguments supported. Unrecognized option(s): %s" % args - ) - - # -- open connection - - try: - repl_variables["conn"] = options.retryQuery.conn() - repl_variables["conn"].repl() - banner += """ - A connection to %s:%d has been established as `conn` - and can be used by calling `run()` on a query without any arguments.""" % ( - options.hostname, - options.driver_port, - ) - except errors.ReqlDriverError as e: - banner += "\nWarning: %s" % str(e) - if options.debug: - banner += "\n" + traceback.format_exc() - - # -- start interpreter - - code.interact(banner=banner + "\n==========", local=repl_variables) - - -if __name__ == "__main__": - if __package__ is None: - __package__ = "rethinkdb" - - # -- figure out which mode we are in - modes = ["dump", "export", "import", "index_rebuild", "repl", "restore"] - - if len(sys.argv) < 2 or sys.argv[1] not in modes: - sys.exit( - "ERROR: Must be called with one of the following verbs: %s" - % ", ".join(modes) - ) - - verb = sys.argv[1] - prog = "python -m rethinkdb" - if sys.version_info < (2, 7) or ( - sys.version_info >= (3, 0) and sys.version_info < (3, 4) - ): - prog += ".__main__" # Python versions 2.6, 3.0, 3.1 and 3.3 do not support running packages - prog += " " + verb - argv = sys.argv[2:] - - if verb == "dump": - from . import _dump - - exit(_dump.main(argv, prog=prog)) - elif verb == "export": - from . import _export - - exit(_export.main(argv, prog=prog)) - elif verb == "import": - from . import _import - - exit(_import.main(argv, prog=prog)) - elif verb == "index_rebuild": - from . import _index_rebuild - - exit(_index_rebuild.main(argv, prog=prog)) - elif verb == "repl": - startInterpreter(argv, prog=prog) - elif verb == "restore": - from . import _restore - - exit(_restore.main(argv, prog=prog)) diff --git a/rethinkdb/_dump.py b/rethinkdb/_dump.py deleted file mode 100755 index ec8a714b..00000000 --- a/rethinkdb/_dump.py +++ /dev/null @@ -1,260 +0,0 @@ -#!/usr/bin/env python - -# Copyright 2018 RethinkDB -# -# Licensed under the Apache License, Version 2.0 (the 'License'); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an 'AS IS' BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# This file incorporates work covered by the following copyright: -# Copyright 2010-2016 RethinkDB, all rights reserved. - - -"""`rethinkdb-dump` creates an archive of data from a RethinkDB cluster""" - -from __future__ import print_function - -import datetime -import os -import platform -import shutil -import sys -import tarfile -import tempfile -import time -import traceback - -from rethinkdb import _export, utils_common -from rethinkdb.logger import default_logger - -usage = ( - "rethinkdb dump [-c HOST:PORT] [-p] [--password-file FILENAME] [--tls-cert FILENAME] [-f FILE] " - "[--clients NUM] [-e (DB | DB.TABLE)]..." -) -help_epilog = """ -EXAMPLES: -rethinkdb dump -c mnemosyne:39500 - Archive all data from a cluster running on host 'mnemosyne' with a client port at 39500. - -rethinkdb dump -e test -f rdb_dump.tar.gz - Archive only the 'test' database from a local cluster into a named file. - -rethinkdb dump -c hades -e test.subscribers -p - Archive a specific table from a cluster running on host 'hades' which requires a password.""" - - -def parse_options(argv, prog=None): - parser = utils_common.CommonOptionsParser( - usage=usage, epilog=help_epilog, prog=prog - ) - - parser.add_option( - "-f", - "--file", - dest="out_file", - metavar="FILE", - default=None, - help="file to write archive to (defaults to rethinkdb_dump_DATE_TIME.tar.gz);\nif FILE is -, use standard " - "output (note that intermediate files will still be written to the --temp-dir directory)", - ) - parser.add_option( - "-e", - "--export", - dest="db_tables", - metavar="DB|DB.TABLE", - default=[], - type="db_table", - help="limit dump to the given database or table (may be specified multiple times)", - action="append", - ) - - parser.add_option( - "--temp-dir", - dest="temp_dir", - metavar="directory", - default=None, - help="the directory to use for intermediary results", - ) - parser.add_option( - "--overwrite-file", - dest="overwrite", - default=False, - help="overwrite -f/--file if it exists", - action="store_true", - ) - parser.add_option( - "--clients", - dest="clients", - metavar="NUM", - default=3, - help="number of tables to export simultaneously (default: 3)", - type="pos_int", - ) - parser.add_option( - "--read-outdated", - dest="outdated", - default=False, - help="use outdated read mode", - action="store_true", - ) - - options, args = parser.parse_args(argv) - - # Check validity of arguments - if len(args) != 0: - raise parser.error( - "No positional arguments supported. Unrecognized option(s): %s" % args - ) - - # Add dump name - if platform.system() == "Windows" or platform.system().lower().startswith("cygwin"): - options.dump_name = "rethinkdb_dump_%s" % datetime.datetime.today().strftime( - "%Y-%m-%dT%H-%M-%S" - ) # no colons in name - else: - options.dump_name = "rethinkdb_dump_%s" % datetime.datetime.today().strftime( - "%Y-%m-%dT%H:%M:%S" - ) - - # Verify valid output file - if options.out_file == "-": - options.out_file = sys.stdout - options.quiet = True - elif options.out_file is None: - options.out_file = os.path.realpath("%s.tar.gz" % options.dump_name) - else: - options.out_file = os.path.realpath(options.out_file) - - if options.out_file is not sys.stdout: - if os.path.exists(options.out_file) and not options.overwrite: - parser.error("Output file already exists: %s" % options.out_file) - if os.path.exists(options.out_file) and not os.path.isfile(options.out_file): - parser.error( - "There is a non-file at the -f/--file location: %s" % options.out_file - ) - - # Verify valid client count - if options.clients < 1: - raise RuntimeError( - "Error: invalid number of clients (%d), must be greater than zero" - % options.clients - ) - - # Make sure the temporary directory exists and is accessible - if options.temp_dir is not None: - if not os.path.exists(options.temp_dir): - try: - os.makedirs(options.temp_dir) - except OSError: - parser.error( - "Could not create temporary directory: %s" % options.temp_dir - ) - if not os.path.isdir(options.temp_dir): - parser.error( - "Temporary directory doesn't exist or is not a directory: %s" - % options.temp_dir - ) - if not os.access(options.temp_dir, os.W_OK): - parser.error("Temporary directory inaccessible: %s" % options.temp_dir) - - return options - - -def main(argv=None, prog=None): - options = parse_options(argv or sys.argv[1:], prog=prog) - try: - if not options.quiet: - # Print a warning about the capabilities of dump, so no one is confused (hopefully) - print( - """\ - NOTE: 'rethinkdb-dump' saves data, secondary indexes, and write hooks, but does *not* save - cluster metadata. You will need to recreate your cluster setup yourself after - you run 'rethinkdb-restore'.""" - ) - - try: - start_time = time.time() - archive = None - - # -- _export options - need to be kep in-sync with _export - - options.directory = os.path.realpath(tempfile.mkdtemp(dir=options.temp_dir)) - options.fields = None - options.delimiter = None - options.format = "json" - - # -- export to a directory - - if not options.quiet: - print(" Exporting to temporary directory...") - - try: - _export.run(options) - except Exception as exc: - default_logger.exception(exc) - - if options.debug: - sys.stderr.write("\n%s\n" % traceback.format_exc()) - - raise Exception("Error: export failed, %s" % exc) - - # -- zip directory - - if not options.quiet: - print(" Zipping export directory...") - - try: - if hasattr(options.out_file, "read"): - archive = tarfile.open(fileobj=options.out_file, mode="w:gz") - else: - archive = tarfile.open(name=options.out_file, mode="w:gz") - for curr, _, files in os.walk(os.path.realpath(options.directory)): - for data_file in files: - full_path = os.path.join(options.directory, curr, data_file) - archive_path = os.path.join( - options.dump_name, - os.path.relpath(full_path, options.directory), - ) - archive.add(full_path, arcname=archive_path) - os.unlink(full_path) - finally: - if archive: - archive.close() - - # -- - - if not options.quiet: - print( - "Done (%.2f seconds): %s" - % ( - time.time() - start_time, - options.out_file.name - if hasattr(options.out_file, "name") - else options.out_file, - ) - ) - except KeyboardInterrupt: - time.sleep(0.2) - raise RuntimeError("Interrupted") - finally: - if os.path.exists(options.directory): - shutil.rmtree(options.directory) - - except Exception as ex: - if options.debug: - traceback.print_exc() - print(ex, file=sys.stderr) - return 1 - return 0 - - -if __name__ == "__main__": - sys.exit(main()) diff --git a/rethinkdb/_export.py b/rethinkdb/_export.py deleted file mode 100755 index 01bae2f4..00000000 --- a/rethinkdb/_export.py +++ /dev/null @@ -1,676 +0,0 @@ -#!/usr/bin/env python - -# Copyright 2018 RethinkDB -# -# Licensed under the Apache License, Version 2.0 (the 'License'); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an 'AS IS' BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# This file incorporates work covered by the following copyright: -# Copyright 2010-2016 RethinkDB, all rights reserved. - -from __future__ import print_function - -import csv -import ctypes -import datetime -import json -import multiprocessing -import numbers -import optparse -import os -import platform -import signal -import sys -import tempfile -import time -import traceback -from multiprocessing.queues import SimpleQueue - -import six - -from rethinkdb import errors, query, utils_common -from rethinkdb.logger import default_logger - -try: - unicode -except NameError: - unicode = str - - -usage = """rethinkdb export [-c HOST:PORT] [-p] [--password-file FILENAME] [--tls-cert filename] [-d DIR] - [-e (DB | DB.TABLE)]... - [--format (csv | json | ndjson)] [--fields FIELD,FIELD...] [--delimiter CHARACTER] - [--clients NUM]""" -help_description = ( - "`rethinkdb export` exports data from a RethinkDB cluster into a directory" -) -help_epilog = """ -EXAMPLES: -rethinkdb export -c mnemosyne:39500 - Export all data from a cluster running on host 'mnemosyne' with a client port at 39500. - -rethinkdb export -e test -d rdb_export - Export only the 'test' database on a local cluster into a named directory. - -rethinkdb export -c hades -e test.subscribers -p - Export a specific table from a cluster running on host 'hades' which requires a password. - -rethinkdb export --format csv -e test.history --fields time,message --delimiter ';' - Export a specific table from a local cluster in CSV format with the fields 'time' and 'message', - using a semicolon as field delimiter (rather than a comma). - -rethinkdb export --fields id,value -e test.data - Export a specific table from a local cluster in JSON format with only the fields 'id' and 'value'. -""" - - -def parse_options(argv, prog=None): - if platform.system() == "Windows" or platform.system().lower().startswith("cygwin"): - # no colons in name - default_dir = "rethinkdb_export_%s" % datetime.datetime.today().strftime( - "%Y-%m-%dT%H-%M-%S" - ) - else: - # " - default_dir = "rethinkdb_export_%s" % datetime.datetime.today().strftime( - "%Y-%m-%dT%H:%M:%S" - ) - - parser = utils_common.CommonOptionsParser( - usage=usage, description=help_description, epilog=help_epilog, prog=prog - ) - - parser.add_option( - "-d", - "--directory", - dest="directory", - metavar="DIRECTORY", - default=default_dir, - help="directory to output to (default: rethinkdb_export_DATE_TIME)", - type="new_file", - ) - parser.add_option( - "-e", - "--export", - dest="db_tables", - metavar="DB|DB.TABLE", - default=[], - help="limit dump to the given database or table (may be specified multiple times)", - action="append", - type="db_table", - ) - parser.add_option( - "--fields", - dest="fields", - metavar=",...", - default=None, - help="export only specified fields (required for CSV format)", - ) - parser.add_option( - "--format", - dest="format", - metavar="json|csv|ndjson", - default="json", - help="format to write (defaults to json. ndjson is newline delimited json.)", - type="choice", - choices=["json", "csv", "ndjson"], - ) - parser.add_option( - "--clients", - dest="clients", - metavar="NUM", - default=3, - help="number of tables to export simultaneously (default: 3)", - type="pos_int", - ) - parser.add_option( - "--read-outdated", - dest="outdated", - default=False, - help="use outdated read mode", - action="store_true", - ) - - csvGroup = optparse.OptionGroup(parser, "CSV options") - csvGroup.add_option( - "--delimiter", - dest="delimiter", - metavar="CHARACTER", - default=None, - help="character to be used as field delimiter, or '\\t' for tab (default: ',')", - ) - parser.add_option_group(csvGroup) - - options, args = parser.parse_args(argv) - - # -- Check validity of arguments - - if len(args) != 0: - parser.error( - "No positional arguments supported. Unrecognized option(s): %s" % args - ) - - if options.fields: - if len(options.db_tables) != 1 or options.db_tables[0].table is None: - parser.error( - "The --fields option can only be used when exporting a single table" - ) - options.fields = options.fields.split(",") - - # - format specific validation - - if options.format == "csv": - if options.fields is None: - parser.error("CSV files require the '--fields' option to be specified.") - - if options.delimiter is None: - options.delimiter = "," - elif options.delimiter == "\\t": - options.delimiter = "\t" - elif len(options.delimiter) != 1: - parser.error( - "Specify exactly one character for the --delimiter option: %s" - % options.delimiter - ) - else: - if options.delimiter: - parser.error("--delimiter option is only valid for CSV file formats") - - # - - - return options - - -def json_writer(filename, fields, task_queue, error_queue, format): - try: - with open(filename, "w") as out: - first = True - if format != "ndjson": - out.write("[") - item = task_queue.get() - while not isinstance(item, StopIteration): - row = item[0] - if fields is not None: - for item in list(row.keys()): - if item not in fields: - del row[item] - if first: - if format == "ndjson": - out.write(json.dumps(row)) - else: - out.write("\n" + json.dumps(row)) - first = False - elif format == "ndjson": - out.write("\n" + json.dumps(row)) - else: - out.write(",\n" + json.dumps(row)) - - item = task_queue.get() - if format != "ndjson": - out.write("\n]\n") - except BaseException: - ex_type, ex_class, tb = sys.exc_info() - error_queue.put((ex_type, ex_class, traceback.extract_tb(tb))) - - # Read until the exit task so the readers do not hang on pushing onto the queue - while not isinstance(task_queue.get(), StopIteration): - pass - - -def csv_writer(filename, fields, delimiter, task_queue, error_queue): - try: - with open(filename, "w") as out: - out_writer = csv.writer(out, delimiter=delimiter) - out_writer.writerow(fields) - - item = task_queue.get() - while not isinstance(item, StopIteration): - row = item[0] - info = [] - # If the data is a simple type, just write it directly, otherwise, write it as json - for field in fields: - if field not in row: - info.append(None) - elif isinstance(row[field], numbers.Number): - info.append(str(row[field])) - elif isinstance(row[field], str): - info.append(row[field]) - elif isinstance(row[field], unicode): - info.append(row[field].encode("utf-8")) - else: - if str == unicode: - info.append(json.dumps(row[field])) - else: - info.append(json.dumps(row[field]).encode("utf-8")) - out_writer.writerow(info) - item = task_queue.get() - except BaseException: - ex_type, ex_class, tb = sys.exc_info() - error_queue.put((ex_type, ex_class, traceback.extract_tb(tb))) - - # Read until the exit task so the readers do not hang on pushing onto the queue - while not isinstance(task_queue.get(), StopIteration): - pass - - -def export_table( - db, - table, - directory, - options, - error_queue, - progress_info, - sindex_counter, - hook_counter, - exit_event, -): - signal.signal( - signal.SIGINT, signal.SIG_DFL - ) # prevent signal handlers from being set in child processes - - writer = None - - has_write_hooks = utils_common.check_minimum_version(options, "2.3.7", False) - - try: - # -- get table info - - table_info = options.retryQuery( - "table info: %s.%s" % (db, table), query.db(db).table(table).info() - ) - - # Rather than just the index names, store all index information - table_info["indexes"] = options.retryQuery( - "table index data %s.%s" % (db, table), - query.db(db).table(table).index_status(), - run_options={"binary_format": "raw"}, - ) - - sindex_counter.value += len(table_info["indexes"]) - - if has_write_hooks: - table_info["write_hook"] = options.retryQuery( - "table write hook data %s.%s" % (db, table), - query.db(db).table(table).get_write_hook(), - run_options={"binary_format": "raw"}, - ) - - if table_info["write_hook"] is not None: - hook_counter.value += 1 - - with open(os.path.join(directory, db, table + ".info"), "w") as info_file: - info_file.write(json.dumps(table_info) + "\n") - with sindex_counter.get_lock(): - sindex_counter.value += len(table_info["indexes"]) - # -- start the writer - if six.PY3: - ctx = multiprocessing.get_context(multiprocessing.get_start_method()) - task_queue = SimpleQueue(ctx=ctx) - else: - task_queue = SimpleQueue() - - writer = None - if options.format == "json": - filename = directory + "/%s/%s.json" % (db, table) - writer = multiprocessing.Process( - target=json_writer, - args=( - filename, - options.fields, - task_queue, - error_queue, - options.format, - ), - ) - elif options.format == "csv": - filename = directory + "/%s/%s.csv" % (db, table) - writer = multiprocessing.Process( - target=csv_writer, - args=( - filename, - options.fields, - options.delimiter, - task_queue, - error_queue, - ), - ) - elif options.format == "ndjson": - filename = directory + "/%s/%s.ndjson" % (db, table) - writer = multiprocessing.Process( - target=json_writer, - args=( - filename, - options.fields, - task_queue, - error_queue, - options.format, - ), - ) - else: - raise RuntimeError("unknown format type: %s" % options.format) - writer.start() - - # -- read in the data source - - # - - - lastPrimaryKey = None - read_rows = 0 - run_options = {"time_format": "raw", "binary_format": "raw"} - if options.outdated: - run_options["read_mode"] = "outdated" - cursor = options.retryQuery( - "inital cursor for %s.%s" % (db, table), - query.db(db).table(table).order_by(index=table_info["primary_key"]), - run_options=run_options, - ) - while not exit_event.is_set(): - try: - for row in cursor: - # bail on exit - if exit_event.is_set(): - break - - # add to the output queue - task_queue.put([row]) - lastPrimaryKey = row[table_info["primary_key"]] - read_rows += 1 - - # Update the progress every 20 rows - if read_rows % 20 == 0: - progress_info[0].value = read_rows - - else: - # Export is done - since we used estimates earlier, update the actual table size - progress_info[0].value = read_rows - progress_info[1].value = read_rows - break - - except (errors.ReqlTimeoutError, errors.ReqlDriverError): - # connection problem, re-setup the cursor - try: - cursor.close() - except errors.ReqlError as exc: - default_logger.exception(exc) - - cursor = options.retryQuery( - "backup cursor for %s.%s" % (db, table), - query.db(db) - .table(table) - .between(lastPrimaryKey, query.maxval, left_bound="open") - .order_by(index=table_info["primary_key"]), - run_options=run_options, - ) - - except (errors.ReqlError, errors.ReqlDriverError) as ex: - error_queue.put( - ( - RuntimeError, - RuntimeError(ex.message), - traceback.extract_tb(sys.exc_info()[2]), - ) - ) - except BaseException: - ex_type, ex_class, tb = sys.exc_info() - error_queue.put((ex_type, ex_class, traceback.extract_tb(tb))) - finally: - if writer and writer.is_alive(): - task_queue.put(StopIteration()) - writer.join() - - -def abort_export(signum, frame, exit_event, interrupt_event): - interrupt_event.set() - exit_event.set() - - -# We sum up the row count from all tables for total percentage completion -# This is because table exports can be staggered when there are not enough clients -# to export all of them at once. As a result, the progress bar will not necessarily -# move at the same rate for different tables. - - -def update_progress(progress_info, options): - rows_done = 0 - total_rows = 1 - for current, max_count in progress_info: - curr_val = current.value - max_val = max_count.value - if curr_val < 0: - # There is a table that hasn't finished counting yet, we can't report progress - rows_done = 0 - break - else: - rows_done += curr_val - total_rows += max_val - - if not options.quiet: - utils_common.print_progress(float(rows_done) / total_rows, indent=4) - - -def run_clients(options, workingDir, db_table_set): - # Spawn one client for each db.table, up to options.clients at a time - exit_event = multiprocessing.Event() - processes = [] - if six.PY3: - ctx = multiprocessing.get_context(multiprocessing.get_start_method()) - error_queue = SimpleQueue(ctx=ctx) - else: - error_queue = SimpleQueue() - interrupt_event = multiprocessing.Event() - sindex_counter = multiprocessing.Value(ctypes.c_longlong, 0) - hook_counter = multiprocessing.Value(ctypes.c_longlong, 0) - - signal.signal( - signal.SIGINT, lambda a, b: abort_export(a, b, exit_event, interrupt_event) - ) - errors = [] - - try: - progress_info = [] - arg_lists = [] - for db, table in db_table_set: - - tableSize = int( - options.retryQuery( - "count", - query.db(db).table(table).info()["doc_count_estimates"].sum(), - ) - ) - - progress_info.append( - ( - multiprocessing.Value(ctypes.c_longlong, 0), - multiprocessing.Value(ctypes.c_longlong, tableSize), - ) - ) - arg_lists.append( - ( - db, - table, - workingDir, - options, - error_queue, - progress_info[-1], - sindex_counter, - hook_counter, - exit_event, - ) - ) - - # Wait for all tables to finish - while processes or arg_lists: - time.sleep(0.1) - - while not error_queue.empty(): - exit_event.set() # Stop immediately if an error occurs - errors.append(error_queue.get()) - - processes = [process for process in processes if process.is_alive()] - - if len(processes) < options.clients and len(arg_lists) > 0: - new_process = multiprocessing.Process( - target=export_table, args=arg_lists.pop(0) - ) - new_process.start() - processes.append(new_process) - - update_progress(progress_info, options) - - # If we were successful, make sure 100% progress is reported - # (rows could have been deleted which would result in being done at less than 100%) - if len(errors) == 0 and not interrupt_event.is_set() and not options.quiet: - utils_common.print_progress(1.0, indent=4) - - # Continue past the progress output line and print total rows processed - def plural(num, text, plural_text): - return "%d %s" % (num, text if num == 1 else plural_text) - - if not options.quiet: - print( - "\n %s exported from %s, with %s, and %s" - % ( - plural( - sum([max(0, info[0].value) for info in progress_info]), - "row", - "rows", - ), - plural(len(db_table_set), "table", "tables"), - plural( - sindex_counter.value, "secondary index", "secondary indexes" - ), - plural(hook_counter.value, "hook function", "hook functions"), - ) - ) - finally: - signal.signal(signal.SIGINT, signal.SIG_DFL) - - if interrupt_event.is_set(): - raise RuntimeError("Interrupted") - - if len(errors) != 0: - # multiprocessing queues don't handle tracebacks, so they've already been stringified in the queue - for error in errors: - print("%s" % error[1], file=sys.stderr) - if options.debug: - print( - "%s traceback: %s" % (error[0].__name__, error[2]), file=sys.stderr - ) - raise RuntimeError("Errors occurred during export") - - -def run(options): - # Make sure this isn't a pre-`reql_admin` cluster - which could result in data loss - # if the user has a database named 'rethinkdb' - utils_common.check_minimum_version(options, "1.6") - - # get the complete list of tables - db_table_set = set() - all_tables = [ - utils_common.DbTable(x["db"], x["name"]) - for x in options.retryQuery( - "list tables", - query.db("rethinkdb").table("table_config").pluck(["db", "name"]), - ) - ] - if not options.db_tables: - db_table_set = all_tables # default to all tables - else: - all_databases = options.retryQuery( - "list dbs", query.db_list().filter(query.row.ne("rethinkdb")) - ) - for db_table in options.db_tables: - db, table = db_table - - if db == "rethinkdb": - raise AssertionError("Can not export tables from the system database") - - if db not in all_databases: - raise RuntimeError("Error: Database '%s' not found" % db) - - if ( - table is None - ): # This is just a db name, implicitly selecting all tables in that db - db_table_set.update(set([x for x in all_tables if x.db == db])) - else: - if utils_common.DbTable(db, table) not in all_tables: - raise RuntimeError("Error: Table not found: '%s.%s'" % (db, table)) - db_table_set.add(db_table) - - # Determine the actual number of client processes we'll have - options.clients = min(options.clients, len(db_table_set)) - - # create the working directory and its structure - parent_dir = os.path.dirname(options.directory) - if not os.path.exists(parent_dir): - if os.path.isdir(parent_dir): - raise RuntimeError( - "Output parent directory is not a directory: %s" % parent_dir - ) - try: - os.makedirs(parent_dir) - except OSError as e: - raise optparse.OptionValueError( - "Unable to create parent directory for %s: %s" - % (parent_dir, e.strerror) - ) - working_dir = tempfile.mkdtemp( - prefix=os.path.basename(options.directory) + "_partial_", - dir=os.path.dirname(options.directory), - ) - try: - for db in set([database for database, _ in db_table_set]): - os.makedirs(os.path.join(working_dir, str(db))) - except OSError as e: - raise RuntimeError( - "Failed to create temporary directory (%s): %s" % (e.filename, e.strerror) - ) - - # Run the export - run_clients(options, working_dir, db_table_set) - - # Move the temporary directory structure over to the original output directory - try: - if os.path.isdir(options.directory): - os.rmdir( - options.directory - ) # an empty directory is created here when using _dump - elif os.path.exists(options.directory): - raise Exception( - "There was a file at the output location: %s" % options.directory - ) - os.rename(working_dir, options.directory) - except OSError as e: - raise RuntimeError( - "Failed to move temporary directory to output directory (%s): %s" - % (options.directory, e.strerror) - ) - - -def main(argv=None, prog=None): - options = parse_options(argv or sys.argv[1:], prog=prog) - - start_time = time.time() - try: - run(options) - except Exception as ex: - if options.debug: - traceback.print_exc() - print(ex, file=sys.stderr) - return 1 - if not options.quiet: - print(" Done (%.2f seconds)" % (time.time() - start_time)) - return 0 - - -if __name__ == "__main__": - sys.exit(main()) diff --git a/rethinkdb/_import.py b/rethinkdb/_import.py deleted file mode 100755 index 0ce90bfc..00000000 --- a/rethinkdb/_import.py +++ /dev/null @@ -1,1728 +0,0 @@ -#!/usr/bin/env python - -# Copyright 2018 RethinkDB -# -# Licensed under the Apache License, Version 2.0 (the 'License'); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an 'AS IS' BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# This file incorporates work covered by the following copyright: -# Copyright 2010-2016 RethinkDB, all rights reserved. - -"""`rethinkdb import` loads data into a RethinkDB cluster""" - -from __future__ import print_function - -import codecs -import collections -import csv -import ctypes -import json -import multiprocessing -import optparse -import os -import signal -import sys -import time -import traceback -from multiprocessing.queues import Queue, SimpleQueue - -import six - -from rethinkdb import ast, errors, query, utils_common -from rethinkdb.logger import default_logger - -try: - unicode -except NameError: - unicode = str - -try: - from Queue import Empty, Full -except ImportError: - from queue import Empty, Full - - -# json parameters -JSON_READ_CHUNK_SIZE = 128 * 1024 -JSON_MAX_BUFFER_SIZE = 128 * 1024 * 1024 -MAX_NESTING_DEPTH = 100 - -Error = collections.namedtuple("Error", ["message", "traceback", "file"]) - - -class SourceFile(object): - format = None # set by subclasses - - name = None - - db = None - table = None - primary_key = None - indexes = None - write_hook = None - source_options = None - - start_time = None - end_time = None - - query_runner = None - - _source = None # open filehandle for the source - - # - internal synchronization variables - - _bytes_size = None - _bytes_read = None # -1 until started - - _total_rows = None # -1 until known - _rows_read = None - _rows_written = None - - def __init__( - self, - source, - db, - table, - query_runner, - primary_key=None, - indexes=None, - write_hook=None, - source_options=None, - ): - - if self.format is None: - raise AssertionError( - "{class_name} must have a format".format( - class_name=self.__class__.__name__ - ) - ) - - if self.db == "rethinkdb": - raise AssertionError("Can not import tables into the system database") - - # query_runner - if not isinstance(query_runner, utils_common.RetryQuery): - raise AssertionError("Query runner is not instance of RetryQuery") - - self.query_runner = query_runner - - # reporting information - self._bytes_size = multiprocessing.Value(ctypes.c_longlong, -1) - self._bytes_read = multiprocessing.Value(ctypes.c_longlong, -1) - - self._total_rows = multiprocessing.Value(ctypes.c_longlong, -1) - self._rows_read = multiprocessing.Value(ctypes.c_longlong, 0) - self._rows_written = multiprocessing.Value(ctypes.c_longlong, 0) - - # source - if hasattr(source, "read"): - if unicode != str or "b" in source.mode: - # Python2.x or binary file, assume utf-8 encoding - self._source = codecs.getreader("utf-8")(source) - else: - # assume that it has the right encoding on it - self._source = source - else: - try: - self._source = codecs.open(source, mode="r", encoding="utf-8") - except IOError as exc: - default_logger.exception(exc) - raise ValueError( - 'Unable to open source file "%s": %s' % (str(source), str(exc)) - ) - - if ( - hasattr(self._source, "name") - and self._source.name - and os.path.isfile(self._source.name) - ): - self._bytes_size.value = os.path.getsize(source) - if self._bytes_size.value == 0: - raise ValueError("Source is zero-length: %s" % source) - - # table info - self.db = db - self.table = table - self.primary_key = primary_key - self.indexes = indexes or [] - self.write_hook = write_hook or [] - - # options - self.source_options = source_options or { - "create_args": {"primary_key": self.primary_key} - } - - # name - if hasattr(self._source, "name") and self._source.name: - self.name = os.path.basename(self._source.name) - else: - self.name = "%s.%s" % (self.db, self.table) - - def __hash__(self): - return hash((self.db, self.table)) - - def get_line(self): - """Returns a single line from the file""" - raise NotImplementedError( - "This needs to be implemented on the %s subclass" % self.format - ) - - # - bytes - @property - def bytes_size(self): - return self._bytes_size.value - - @bytes_size.setter - def bytes_size(self, value): - self._bytes_size.value = value - - @property - def bytes_read(self): - return self._bytes_read.value - - @bytes_read.setter - def bytes_read(self, value): - self._bytes_read.value = value - - # - rows - @property - def total_rows(self): - return self._total_rows.value - - @total_rows.setter - def total_rows(self, value): - self._total_rows.value = value - - @property - def rows_read(self): - return self._rows_read.value - - @rows_read.setter - def rows_read(self, value): - self._rows_read.value = value - - @property - def rows_written(self): - return self._rows_written.value - - def add_rows_written(self, increment): # we have multiple writers to coordinate - with self._rows_written.get_lock(): - self._rows_written.value += increment - - # - percent done - @property - def percent_done(self): - """return a float between 0 and 1 for a reasonable guess of percentage complete""" - # assume that reading takes 50% of the time and writing the other 50% - completed = 0.0 # of 2.0 - - # - add read percentage - if ( - self._bytes_size.value <= 0 - or self._bytes_size.value <= self._bytes_read.value - ): - completed += 1.0 - elif self._bytes_read.value < 0 and self._total_rows.value >= 0: - # done by rows read - if self._rows_read > 0: - completed += float(self._rows_read) / float(self._total_rows.value) - else: - # done by bytes read - if self._bytes_read.value > 0: - completed += float(self._bytes_read.value) / float( - self._bytes_size.value - ) - - # - add written percentage - if self._rows_read.value or self._rows_written.value: - total_rows = float(self._total_rows.value) - if total_rows == 0: - completed += 1.0 - elif total_rows < 0: - # a guesstimate - per_row_size = float(self._bytes_read.value) / float( - self._rows_read.value - ) - total_rows = float(self._rows_read.value) + ( - float(self._bytes_size.value - self._bytes_read.value) - / per_row_size - ) - completed += float(self._rows_written.value) / total_rows - else: - # accurate count - completed += float(self._rows_written.value) / total_rows - - # - return the value - return completed * 0.5 - - def setup_table(self): - """Ensure that the db, table, and indexes exist and are correct""" - - # - ensure the table exists and is ready - self.query_runner( - "create table: %s.%s" % (self.db, self.table), - ast.expr([self.table]) - .set_difference(query.db(self.db).table_list()) - .for_each( - query.db(self.db).table_create( - query.row, - **self.source_options["create_args"] - if "create_args" in self.source_options - else {} - ) - ), - ) - - self.query_runner( - "wait for %s.%s" % (self.db, self.table), - query.db(self.db).table(self.table).wait(timeout=30), - ) - - # - ensure that the primary key on the table is correct - primary_key = self.query_runner( - "primary key %s.%s" % (self.db, self.table), - query.db(self.db).table(self.table).info()["primary_key"], - ) - if self.primary_key is None: - self.primary_key = primary_key - elif primary_key != self.primary_key: - raise RuntimeError( - "Error: table %s.%s primary key was `%s` rather than the expected: %s" - % (self.db, self.table, primary_key, self.primary_key) - ) - - def restore_indexes(self, warning_queue): - # recreate secondary indexes - dropping existing on the assumption they are wrong - if self.indexes: - existing_indexes = self.query_runner( - "indexes from: %s.%s" % (self.db, self.table), - query.db(self.db).table(self.table).index_list(), - ) - try: - created_indexes = [] - for index in self.indexes: - if index["index"] in existing_indexes: # drop existing versions - self.query_runner( - "drop index: %s.%s:%s" - % (self.db, self.table, index["index"]), - query.db(self.db) - .table(self.table) - .index_drop(index["index"]), - ) - self.query_runner( - "create index: %s.%s:%s" - % (self.db, self.table, index["index"]), - query.db(self.db) - .table(self.table) - .index_create(index["index"], index["function"]), - ) - created_indexes.append(index["index"]) - - # wait for all of the created indexes to build - self.query_runner( - "waiting for indexes on %s.%s" % (self.db, self.table), - query.db(self.db) - .table(self.table) - .index_wait(query.args(created_indexes)), - ) - except RuntimeError: - exception_type, exception_class, trcback = sys.exc_info() - warning_queue.put( - ( - exception_type, - exception_class, - traceback.extract_tb(trcback), - self._source.name, - ) - ) - - if self.write_hook: - self.query_runner( - "Write hook from: %s.%s" % (self.db, self.table), - query.db(self.db).table(self.table).get_write_hook(), - ) - try: - self.query_runner( - "drop hook: %s.%s" % (self.db, self.table), - query.db(self.db).table(self.table).set_write_hook(None), - ) - self.query_runner( - "create hook: %s.%s:%s" % (self.db, self.table, self.write_hook), - query.db(self.db) - .table(self.table) - .set_write_hook(self.write_hook["function"]), - ) - except RuntimeError: - exception_type, exception_class, trcback = sys.exc_info() - warning_queue.put( - ( - exception_type, - exception_class, - traceback.extract_tb(trcback), - self._source.name, - ) - ) - - def batches(self, batch_size=None, warning_queue=None): - - # setup table - self.setup_table() - - # default batch_size - if batch_size is None: - batch_size = utils_common.default_batch_size - else: - batch_size = int(batch_size) - - if batch_size <= 0: - raise AssertionError("Batch size can not be less than one") - - # setup - self.setup_file(warning_queue=warning_queue) - - # - yield batches - - batch = [] - try: - need_more_data = False - while True: - if need_more_data: - self.fill_buffer() - need_more_data = False - - while len(batch) < batch_size: - try: - row = self.get_line() - # ToDo: validate the line - batch.append(row) - except NeedMoreData: - need_more_data = True - break - else: - yield batch - batch = [] - - except StopIteration as e: - # yield any final batch - if batch: - yield batch - - # - check the end of the file - - self.teardown() - - # - rebuild indexes - if self.indexes: - self.restore_indexes(warning_queue) - - def setup_file(self, warning_queue=None): - raise NotImplementedError("Subclasses need to implement this") - - def teardown(self): - pass - - def read_to_queue( - self, - work_queue, - exit_event, - error_queue, - warning_queue, - timing_queue, - fields=None, - ignore_signals=True, - batch_size=None, - ): - if ( - ignore_signals - ): # ToDo: work out when we are in a worker process automatically - signal.signal(signal.SIGINT, signal.SIG_IGN) # workers should ignore these - - if batch_size is None: - batch_size = utils_common.default_batch_size - - self.start_time = time.time() - try: - timePoint = time.time() - for batch in self.batches(warning_queue=warning_queue): - timing_queue.put(("reader_work", time.time() - timePoint)) - timePoint = time.time() - - # apply the fields filter - if fields: - for row in batch: - for key in [x for x in row.keys() if x not in fields]: - del row[key] - - while not exit_event.is_set(): - try: - work_queue.put((self.db, self.table, batch), timeout=0.1) - self._rows_read.value += len(batch) - break - except Full: - pass - else: - break - timing_queue.put(("reader_wait", time.time() - timePoint)) - timePoint = time.time() - - # - report relevant errors - except Exception as exc: - default_logger.exception(exc) - error_queue.put(Error(str(exc), traceback.format_exc(), self.name)) - exit_event.set() - raise - finally: - self.end_time = time.time() - - -class NeedMoreData(Exception): - pass - - -class JsonSourceFile(SourceFile): - format = "json" - - decoder = json.JSONDecoder() - json_array = None - found_first = False - - _buffer_size = JSON_READ_CHUNK_SIZE - _buffer_str = None - _buffer_pos = None - _buffer_end = None - - def fill_buffer(self): - if self._buffer_str is None: - self._buffer_str = "" - self._buffer_pos = 0 - self._buffer_end = 0 - elif self._buffer_pos == 0: - # double the buffer under the assumption that the documents are too large to fit - if self._buffer_size == JSON_MAX_BUFFER_SIZE: - raise Exception( - "Error: JSON max buffer size exceeded on file %s (from position %d). Use '--max-document-size' to " - "extend your buffer." % (self.name, self.bytes_processed) - ) - self._buffer_size = min(self._buffer_size * 2, JSON_MAX_BUFFER_SIZE) - - # add more data - read_target = self._buffer_size - self._buffer_end + self._buffer_pos - - if read_target < 1: - raise AssertionError("Can not set the read target and full the buffer") - - new_chunk = self._source.read(read_target) - - if len(new_chunk) == 0: - raise StopIteration() # file ended - - self._buffer_str = self._buffer_str[self._buffer_pos :] + new_chunk - self._bytes_read.value += len(new_chunk) - - # reset markers - self._buffer_pos = 0 - self._buffer_end = len(self._buffer_str) - 1 - - def get_line(self): - """Return a line from the current _buffer_str, or raise NeedMoreData trying""" - - # advance over any whitespace - self._buffer_pos = json.decoder.WHITESPACE.match( - self._buffer_str, self._buffer_pos - ).end() - if self._buffer_pos >= self._buffer_end: - raise NeedMoreData() - - # read over a comma if we are not the first item in a json_array - if ( - self.json_array - and self.found_first - and self._buffer_str[self._buffer_pos] == "," - ): - self._buffer_pos += 1 - if self._buffer_pos >= self._buffer_end: - raise NeedMoreData() - - # advance over any post-comma whitespace - self._buffer_pos = json.decoder.WHITESPACE.match( - self._buffer_str, self._buffer_pos - ).end() - if self._buffer_pos >= self._buffer_end: - raise NeedMoreData() - - # parse and return an object - try: - row, self._buffer_pos = self.decoder.raw_decode( - self._buffer_str, idx=self._buffer_pos - ) - self.found_first = True - return row - except (ValueError, IndexError): - raise NeedMoreData() - - def setup_file(self, warning_queue=None): - # - move to the first record - - # advance through any leading whitespace - while True: - self.fill_buffer() - self._buffer_pos = json.decoder.WHITESPACE.match(self._buffer_str, 0).end() - if self._buffer_pos == 0: - break - - # check the first character - try: - if self._buffer_str[0] == "[": - self.json_array = True - self._buffer_pos = 1 - elif self._buffer_str[0] == "{": - self.json_array = False - else: - raise ValueError( - "Error: JSON format not recognized - file does not begin with an object or array" - ) - except IndexError: - raise ValueError("Error: JSON file was empty of content") - - def teardown(self): - - # - check the end of the file - # note: fill_buffer should have guaranteed that we have only the data in the end - - # advance through any leading whitespace - self._buffer_pos = json.decoder.WHITESPACE.match( - self._buffer_str, self._buffer_pos - ).end() - - # check the end of the array if we have it - if self.json_array: - if self._buffer_str[self._buffer_pos] != "]": - snippit = self._buffer_str[self._buffer_pos :] - extra = ( - "" - if len(snippit) <= 100 - else " and %d more characters" % (len(snippit) - 100) - ) - raise ValueError( - "Error: JSON array did not end cleanly, rather with: <<%s>>%s" - % (snippit[:100], extra) - ) - self._buffer_pos += 1 - - # advance through any trailing whitespace - self._buffer_pos = json.decoder.WHITESPACE.match( - self._buffer_str, self._buffer_pos - ).end() - snippit = self._buffer_str[self._buffer_pos :] - if len(snippit) > 0: - extra = ( - "" - if len(snippit) <= 100 - else " and %d more characters" % (len(snippit) - 100) - ) - raise ValueError( - "Error: extra data after JSON data: <<%s>>%s" % (snippit[:100], extra) - ) - - -class CsvSourceFile(SourceFile): - format = "csv" - - no_header_row = False - custom_header = None - - _reader = None # instance of csv.reader - _columns = None # name of the columns - - def __init__(self, *args, **kwargs): - if "source_options" in kwargs and isinstance(kwargs["source_options"], dict): - if "no_header_row" in kwargs["source_options"]: - self.no_header_row = kwargs["source_options"]["no_header_row"] - if "custom_header" in kwargs["source_options"]: - self.custom_header = kwargs["source_options"]["custom_header"] - - super(CsvSourceFile, self).__init__(*args, **kwargs) - - def byte_counter(self): - """Generator for getting a byte count on a file being used""" - - for line in self._source: - self._bytes_read.value += len(line) - if unicode != str: - yield line.encode( - "utf-8" - ) # Python2.x csv module does not really handle unicode - else: - yield line - - def setup_file(self, warning_queue=None): - # - setup csv.reader with a byte counter wrapper - - self._reader = csv.reader(self.byte_counter()) - - # - get the header information for column names - - if not self.no_header_row: - self._columns = next(self._reader) - - # field names may override fields from the header - if self.custom_header is not None: - if not self.no_header_row: - warning_queue.put( - "Ignoring header row on %s: %s" % (self.name, str(self._columns)) - ) - self._columns = self.custom_header - elif self.no_header_row: - raise ValueError("Error: No field name information available") - - def get_line(self): - raw_row = next(self._reader) - if len(self._columns) != len(raw_row): - raise Exception( - "Error: '%s' line %d has an inconsistent number of columns: %s" - % (self.name, self._reader.line_num, str(raw_row)) - ) - - row = {} - for key, value in zip( - self._columns, raw_row - ): # note: we import all csv fields as strings - # treat empty fields as no entry rather than empty string - if value == "": - continue - row[key] = value if str == unicode else unicode(value, encoding="utf-8") - - return row - - -# == - - -usage = """rethinkdb import -d DIR [-c HOST:PORT] [--tls-cert FILENAME] [-p] [--password-file FILENAME] - [--force] [-i (DB | DB.TABLE)] [--clients NUM] - [--shards NUM_SHARDS] [--replicas NUM_REPLICAS] - rethinkdb import -f FILE --table DB.TABLE [-c HOST:PORT] [--tls-cert FILENAME] [-p] [--password-file FILENAME] - [--force] [--clients NUM] [--format (csv | json)] [--pkey PRIMARY_KEY] - [--shards NUM_SHARDS] [--replicas NUM_REPLICAS] - [--delimiter CHARACTER] [--custom-header FIELD,FIELD... [--no-header]]""" - -help_epilog = """ -EXAMPLES: - -rethinkdb import -d rdb_export -c mnemosyne:39500 --clients 128 - Import data into a cluster running on host 'mnemosyne' with a client port at 39500, - using 128 client connections and the named export directory. - -rethinkdb import -f site_history.csv --format csv --table test.history --pkey count - Import data into a local cluster and the table 'history' in the 'test' database, - using the named CSV file, and using the 'count' field as the primary key. - -rethinkdb import -d rdb_export -c hades -p -i test - Import data into a cluster running on host 'hades' which requires a password, - using only the database 'test' from the named export directory. - -rethinkdb import -f subscriber_info.json --fields id,name,hashtag --force - Import data into a local cluster using the named JSON file, and only the fields - 'id', 'name', and 'hashtag', overwriting any existing rows with the same primary key. - -rethinkdb import -f user_data.csv --delimiter ';' --no-header --custom-header id,name,number - Import data into a local cluster using the named CSV file with no header and instead - use the fields 'id', 'name', and 'number', the delimiter is a semicolon (rather than - a comma). -""" - - -def parse_options(argv, prog=None): - parser = utils_common.CommonOptionsParser( - usage=usage, epilog=help_epilog, prog=prog - ) - - parser.add_option( - "--clients", - dest="clients", - metavar="CLIENTS", - default=8, - help="client connections to use (default: 8)", - type="pos_int", - ) - parser.add_option( - "--hard-durability", - dest="durability", - action="store_const", - default="soft", - help="use hard durability writes (slower, uses less memory)", - const="hard", - ) - parser.add_option( - "--force", - dest="force", - action="store_true", - default=False, - help="import even if a table already exists, overwriting duplicate primary keys", - ) - - parser.add_option( - "--batch-size", - dest="batch_size", - default=utils_common.default_batch_size, - help=optparse.SUPPRESS_HELP, - type="pos_int", - ) - - # Replication settings - replication_options_group = optparse.OptionGroup(parser, "Replication Options") - replication_options_group.add_option( - "--shards", - dest="create_args", - metavar="SHARDS", - help="shards to setup on created tables (default: 1)", - type="pos_int", - action="add_key", - ) - replication_options_group.add_option( - "--replicas", - dest="create_args", - metavar="REPLICAS", - help="replicas to setup on created tables (default: 1)", - type="pos_int", - action="add_key", - ) - parser.add_option_group(replication_options_group) - - # Directory import options - dir_import_group = optparse.OptionGroup(parser, "Directory Import Options") - dir_import_group.add_option( - "-d", - "--directory", - dest="directory", - metavar="DIRECTORY", - default=None, - help="directory to import data from", - ) - dir_import_group.add_option( - "-i", - "--import", - dest="db_tables", - metavar="DB|DB.TABLE", - default=[], - help="restore only the given database or table (may be specified multiple times)", - action="append", - type="db_table", - ) - dir_import_group.add_option( - "--no-secondary-indexes", - dest="indexes", - action="store_false", - default=None, - help="do not create secondary indexes", - ) - parser.add_option_group(dir_import_group) - - # File import options - file_import_group = optparse.OptionGroup(parser, "File Import Options") - file_import_group.add_option( - "-f", - "--file", - dest="file", - metavar="FILE", - default=None, - help="file to import data from", - type="file", - ) - file_import_group.add_option( - "--table", - dest="import_table", - metavar="DB.TABLE", - default=None, - help="table to import the data into", - ) - file_import_group.add_option( - "--fields", - dest="fields", - metavar="FIELD,...", - default=None, - help="limit which fields to use when importing one table", - ) - file_import_group.add_option( - "--format", - dest="format", - metavar="json|csv", - default=None, - help="format of the file (default: json, accepts newline delimited json)", - type="choice", - choices=["json", "csv"], - ) - file_import_group.add_option( - "--pkey", - dest="create_args", - metavar="PRIMARY_KEY", - default=None, - help="field to use as the primary key in the table", - action="add_key", - ) - parser.add_option_group(file_import_group) - - # CSV import options - csv_import_group = optparse.OptionGroup(parser, "CSV Options") - csv_import_group.add_option( - "--delimiter", - dest="delimiter", - metavar="CHARACTER", - default=None, - help="character separating fields, or '\\t' for tab", - ) - csv_import_group.add_option( - "--no-header", - dest="no_header", - action="store_true", - default=None, - help="do not read in a header of field names", - ) - csv_import_group.add_option( - "--custom-header", - dest="custom_header", - metavar="FIELD,...", - default=None, - help="header to use (overriding file header), must be specified if --no-header", - ) - parser.add_option_group(csv_import_group) - - # JSON import options - json_options_group = optparse.OptionGroup(parser, "JSON Options") - json_options_group.add_option( - "--max-document-size", - dest="max_document_size", - metavar="MAX_SIZE", - default=0, - help="maximum allowed size (bytes) for a single JSON document (default: 128MiB)", - type="pos_int", - ) - json_options_group.add_option( - "--max-nesting-depth", - dest="max_nesting_depth", - metavar="MAX_DEPTH", - default=0, - help="maximum depth of the JSON documents (default: 100)", - type="pos_int", - ) - parser.add_option_group(json_options_group) - - options, args = parser.parse_args(argv) - - # Check validity of arguments - - if len(args) != 0: - raise parser.error( - "No positional arguments supported. Unrecognized option(s): %s" % args - ) - - # - create_args - if options.create_args is None: - options.create_args = {} - - # - options based on file/directory import - - if options.directory and options.file: - parser.error("-f/--file and -d/--directory can not be used together") - - elif options.directory: - if not os.path.exists(options.directory): - parser.error("-d/--directory does not exist: %s" % options.directory) - if not os.path.isdir(options.directory): - parser.error("-d/--directory is not a directory: %s" % options.directory) - options.directory = os.path.realpath(options.directory) - - # disallow invalid options - if options.import_table: - parser.error("--table option is not valid when importing a directory") - if options.fields: - parser.error("--fields option is not valid when importing a directory") - if options.format: - parser.error("--format option is not valid when importing a directory") - if options.create_args: - parser.error("--pkey option is not valid when importing a directory") - - if options.delimiter: - parser.error("--delimiter option is not valid when importing a directory") - if options.no_header: - parser.error("--no-header option is not valid when importing a directory") - if options.custom_header: - parser.error( - "table create options are not valid when importing a directory: %s" - % ", ".join( - [x.lower().replace("_", " ") for x in options.custom_header.keys()] - ) - ) - - # check valid options - if not os.path.isdir(options.directory): - parser.error("Directory to import does not exist: %s" % options.directory) - - if options.fields and ( - len(options.db_tables) > 1 or options.db_tables[0].table is None - ): - parser.error( - "--fields option can only be used when importing a single table" - ) - - elif options.file: - if not os.path.exists(options.file): - parser.error("-f/--file does not exist: %s" % options.file) - if not os.path.isfile(options.file): - parser.error("-f/--file is not a file: %s" % options.file) - options.file = os.path.realpath(options.file) - - # format - if options.format is None: - options.format = os.path.splitext(options.file)[1].lstrip(".") - - # import_table - if options.import_table: - res = utils_common._tableNameRegex.match(options.import_table) - if res and res.group("table"): - options.import_table = utils_common.DbTable( - res.group("db"), res.group("table") - ) - else: - parser.error("Invalid --table option: %s" % options.import_table) - else: - parser.error("A value is required for --table when importing from a file") - - # fields - options.fields = options.fields.split(",") if options.fields else None - - # disallow invalid options - if options.db_tables: - parser.error("-i/--import can only be used when importing a directory") - if options.indexes: - parser.error( - "--no-secondary-indexes can only be used when importing a directory" - ) - - if options.format == "csv": - # disallow invalid options - if options.max_document_size: - parser.error( - "--max_document_size only affects importing JSON documents" - ) - - # delimiter - if options.delimiter is None: - options.delimiter = "," - elif options.delimiter == "\\t": - options.delimiter = "\t" - elif len(options.delimiter) != 1: - parser.error( - "Specify exactly one character for the --delimiter option: %s" - % options.delimiter - ) - - # no_header - if options.no_header is None: - options.no_header = False - elif options.custom_header is None: - parser.error("--custom-header is required if --no-header is specified") - - # custom_header - if options.custom_header: - options.custom_header = options.custom_header.split(",") - - elif options.format == "json": - # disallow invalid options - if options.delimiter is not None: - parser.error("--delimiter option is not valid for json files") - if options.no_header: - parser.error("--no-header option is not valid for json files") - if options.custom_header is not None: - parser.error("--custom-header option is not valid for json files") - - # default options - options.format = "json" - - if options.max_document_size > 0: - global JSON_MAX_BUFFER_SIZE - JSON_MAX_BUFFER_SIZE = options.max_document_size - - options.file = os.path.abspath(options.file) - - else: - parser.error("Unrecognized file format: %s" % options.format) - - else: - parser.error("Either -f/--file or -d/--directory is required") - - # -- - - # max_nesting_depth - if options.max_nesting_depth > 0: - global MAX_NESTING_DEPTH - MAX_NESTING_DEPTH = options.max_nesting_depth - - # -- - - return options - - -# This is run for each client requested, and accepts tasks from the reader processes - - -def table_writer( - tables, options, work_queue, error_queue, warning_queue, exit_event, timing_queue -): - signal.signal(signal.SIGINT, signal.SIG_IGN) # workers should ignore these - db = table = batch = None - - try: - conflict_action = "replace" if options.force else "error" - timePoint = time.time() - while not exit_event.is_set(): - # get a batch - try: - db, table, batch = work_queue.get(timeout=0.1) - except Empty: - continue - timing_queue.put(("writer_wait", time.time() - timePoint)) - timePoint = time.time() - - # shut down when appropriate - if isinstance(batch, StopIteration): - return - - # find the table we are working on - table_info = tables[(db, table)] - tbl = query.db(db).table(table) - - # write the batch to the database - try: - res = options.retryQuery( - "write batch to %s.%s" % (db, table), - tbl.insert( - ast.expr(batch, nesting_depth=MAX_NESTING_DEPTH), - durability=options.durability, - conflict=conflict_action, - ), - ) - - if res["errors"] > 0: - raise RuntimeError( - "Error when importing into table '%s.%s': %s" - % (db, table, res["first_error"]) - ) - modified = res["inserted"] + res["replaced"] + res["unchanged"] - if modified != len(batch): - raise RuntimeError( - "The inserted/replaced/unchanged number did not match when importing into table '%s.%s': %s" - % (db, table, res["first_error"]) - ) - - table_info.add_rows_written(modified) - - except errors.ReqlError: - # the error might have been caused by a comm or temporary error causing a partial batch write - - for row in batch: - if table_info.primary_key not in row: - raise RuntimeError( - "Connection error while importing. Current row does not have the specified primary key " - "(%s), so cannot guarantee absence of duplicates" - % table_info.primary_key - ) - res = None - if conflict_action == "replace": - res = options.retryQuery( - "write row to %s.%s" % (db, table), - tbl.insert( - ast.expr(row, nesting_depth=MAX_NESTING_DEPTH), - durability=options.durability, - conflict=conflict_action, - ignore_write_hook=True, - ), - ) - else: - existingRow = options.retryQuery( - "read row from %s.%s" % (db, table), - tbl.get(row[table_info.primary_key]), - ) - if not existingRow: - res = options.retryQuery( - "write row to %s.%s" % (db, table), - tbl.insert( - ast.expr(row, nesting_depth=MAX_NESTING_DEPTH), - durability=options.durability, - conflict=conflict_action, - ignore_write_hook=True, - ), - ) - elif existingRow != row: - raise RuntimeError( - "Duplicate primary key `%s`:\n%s\n%s" - % (table_info.primary_key, str(row), str(existingRow)) - ) - - if res["errors"] > 0: - raise RuntimeError( - "Error when importing into table '%s.%s': %s" - % (db, table, res["first_error"]) - ) - if res["inserted"] + res["replaced"] + res["unchanged"] != 1: - raise RuntimeError( - "The inserted/replaced/unchanged number was not 1 when inserting on '%s.%s': %s" - % (db, table, res) - ) - table_info.add_rows_written(1) - timing_queue.put(("writer_work", time.time() - timePoint)) - timePoint = time.time() - - except Exception as e: - error_queue.put(Error(str(e), traceback.format_exc(), "%s.%s" % (db, table))) - exit_event.set() - - -def update_progress(tables, debug, exit_event, sleep=0.2): - signal.signal(signal.SIGINT, signal.SIG_IGN) # workers should not get these - - # give weights to each of the tables based on file size - totalSize = sum([x.bytes_size for x in tables]) - for table in tables: - table.weight = float(table.bytes_size) / totalSize - - lastComplete = None - startTime = time.time() - readWrites = collections.deque(maxlen=5) # (time, read, write) - readWrites.append((startTime, 0, 0)) - readRate = None - writeRate = None - while True: - try: - if exit_event.is_set(): - break - complete = read = write = 0 - currentTime = time.time() - for table in tables: - complete += table.percent_done * table.weight - if debug: - read += table.rows_read - write += table.rows_written - readWrites.append((currentTime, read, write)) - if complete != lastComplete: - timeDelta = readWrites[-1][0] - readWrites[0][0] - if debug and len(readWrites) > 1 and timeDelta > 0: - readRate = max( - (readWrites[-1][1] - readWrites[0][1]) / timeDelta, 0 - ) - writeRate = max( - (readWrites[-1][2] - readWrites[0][2]) / timeDelta, 0 - ) - utils_common.print_progress( - complete, indent=2, read=readRate, write=writeRate - ) - lastComplete = complete - time.sleep(sleep) - except KeyboardInterrupt: - break - except Exception as e: - if debug: - print(e) - traceback.print_exc() - - -def import_tables(options, sources, files_ignored=None): - # Make sure this isn't a pre-`reql_admin` cluster - which could result in data loss - # if the user has a database named 'rethinkdb' - utils_common.check_minimum_version(options, "1.6") - - start_time = time.time() - - tables = dict(((x.db, x.table), x) for x in sources) # (db, table) => table - - if six.PY3: - ctx = multiprocessing.get_context(multiprocessing.get_start_method()) - error_queue = SimpleQueue(ctx=ctx) - warning_queue = SimpleQueue(ctx=ctx) - timing_queue = SimpleQueue(ctx=ctx) - else: - error_queue = SimpleQueue() - warning_queue = SimpleQueue() - timing_queue = SimpleQueue() - - max_queue_size = options.clients * 3 - work_queue = multiprocessing.Manager().Queue(max_queue_size) - - exit_event = multiprocessing.Event() - interrupt_event = multiprocessing.Event() - - errors = [] - warnings = [] - timing_sums = {} - - pools = [] - progress_bar = None - progress_bar_sleep = 0.2 - - # - setup KeyboardInterupt handler - signal.signal(signal.SIGINT, lambda a, b: utils_common.abort(pools, exit_event)) - - # - queue draining - def drain_queues(): - # error_queue - while not error_queue.empty(): - errors.append(error_queue.get()) - - # warning_queue - while not warning_queue.empty(): - warnings.append(warning_queue.get()) - - # timing_queue - while not timing_queue.empty(): - key, value = timing_queue.get() - if key not in timing_sums: - timing_sums[key] = value - else: - timing_sums[key] += value - - # - setup dbs and tables - - # create missing dbs - needed_dbs = set([x.db for x in sources]) - if "rethinkdb" in needed_dbs: - raise RuntimeError( - "Error: Cannot import tables into the system database: 'rethinkdb'" - ) - options.retryQuery( - "ensure dbs: %s" % ", ".join(needed_dbs), - ast.expr(needed_dbs) - .set_difference(query.db_list()) - .for_each(query.db_create(query.row)), - ) - - # check for existing tables, or if --force is enabled ones with mis-matched primary keys - existing_tables = dict( - [ - ((x["db"], x["name"]), x["primary_key"]) - for x in options.retryQuery( - "list tables", - query.db("rethinkdb") - .table("table_config") - .pluck(["db", "name", "primary_key"]), - ) - ] - ) - already_exist = [] - for source in sources: - if (source.db, source.table) in existing_tables: - if not options.force: - already_exist.append("%s.%s" % (source.db, source.table)) - elif source.primary_key is None: - source.primary_key = existing_tables[(source.db, source.table)] - elif source.primary_key != existing_tables[(source.db, source.table)]: - raise RuntimeError( - "Error: Table '%s.%s' already exists with a different primary key: %s (expected: %s)" - % ( - source.db, - source.table, - existing_tables[(source.db, source.table)], - source.primary_key, - ) - ) - - if len(already_exist) == 1: - raise RuntimeError( - "Error: Table '%s' already exists, run with --force to import into the existing table" - % already_exist[0] - ) - elif len(already_exist) > 1: - already_exist.sort() - raise RuntimeError( - "Error: The following tables already exist, run with --force to import into the existing tables:\n %s" - % "\n ".join(already_exist) - ) - - # - start the import - - try: - # - start the progress bar - if not options.quiet: - progress_bar = multiprocessing.Process( - target=update_progress, - name="progress bar", - args=(sources, options.debug, exit_event, progress_bar_sleep), - ) - progress_bar.start() - pools.append([progress_bar]) - - # - start the writers - writers = [] - pools.append(writers) - for i in range(options.clients): - writer = multiprocessing.Process( - target=table_writer, - name="table writer %d" % i, - kwargs={ - "tables": tables, - "options": options, - "work_queue": work_queue, - "error_queue": error_queue, - "warning_queue": warning_queue, - "timing_queue": timing_queue, - "exit_event": exit_event, - }, - ) - writers.append(writer) - writer.start() - - # - read the tables options.clients at a time - readers = [] - pools.append(readers) - file_iter = iter(sources) - try: - while not exit_event.is_set(): - # add a workers to fill up the readers pool - while len(readers) < options.clients: - table = next(file_iter) - reader = multiprocessing.Process( - target=table.read_to_queue, - name="table reader %s.%s" % (table.db, table.table), - kwargs={ - "fields": options.fields, - "batch_size": options.batch_size, - "work_queue": work_queue, - "error_queue": error_queue, - "warning_queue": warning_queue, - "timing_queue": timing_queue, - "exit_event": exit_event, - }, - ) - readers.append(reader) - reader.start() - - # drain the queues - drain_queues() - - # reap completed tasks - for reader in readers[:]: - if not reader.is_alive(): - readers.remove(reader) - if len(readers) == options.clients: - time.sleep(0.05) - except StopIteration: - pass # ran out of new tables - - # - wait for the last batch of readers to complete - while readers: - # drain the queues - drain_queues() - - # drain the work queue to prevent readers from stalling on exit - if exit_event.is_set(): - try: - while True: - work_queue.get(timeout=0.1) - except Empty: - pass - - # watch the readers - for reader in readers[:]: - try: - reader.join(0.1) - except Exception as exc: - default_logger.exception(exc) - if not reader.is_alive(): - readers.remove(reader) - - # - append enough StopIterations to signal all writers - for _ in writers: - while True: - if exit_event.is_set(): - break - try: - work_queue.put((None, None, StopIteration()), timeout=0.1) - break - except Full: - pass - - # - wait for all of the writers - for writer in writers[:]: - while writer.is_alive(): - writer.join(0.1) - writers.remove(writer) - - # - stop the progress bar - if progress_bar: - progress_bar.join(progress_bar_sleep * 2) - if not interrupt_event.is_set(): - utils_common.print_progress(1, indent=2) - if progress_bar.is_alive(): - progress_bar.terminate() - - # - drain queues - drain_queues() - - # - final reporting - if not options.quiet: - # if successful, make sure 100% progress is reported - if len(errors) == 0 and not interrupt_event.is_set(): - utils_common.print_progress(1.0, indent=2) - - # advance past the progress bar - print("") - - # report statistics - def plural(num, text): - return "%d %s%s" % (num, text, "" if num == 1 else "s") - - print( - " %s imported to %s in %.2f secs" - % ( - plural(sum(x.rows_written for x in sources), "row"), - plural(len(sources), "table"), - time.time() - start_time, - ) - ) - - # report debug statistics - if options.debug: - print("Debug timing:") - for key, value in sorted(timing_sums.items(), key=lambda x: x[0]): - print(" %s: %.2f" % (key, value)) - finally: - signal.signal(signal.SIGINT, signal.SIG_DFL) - - drain_queues() - - for error in errors: - print("%s" % error.message, file=sys.stderr) - if options.debug and error.traceback: - print(" Traceback:\n%s" % error.traceback, file=sys.stderr) - if len(error.file) == 4: - print(" In file: %s" % error.file, file=sys.stderr) - - for warning in warnings: - print("%s" % warning[1], file=sys.stderr) - if options.debug: - print( - "%s traceback: %s" % (warning[0].__name__, warning[2]), file=sys.stderr - ) - if len(warning) == 4: - print("In file: %s" % warning[3], file=sys.stderr) - - if interrupt_event.is_set(): - raise RuntimeError("Interrupted") - if errors: - raise RuntimeError("Errors occurred during import") - if warnings: - raise RuntimeError("Warnings occurred during import") - - -def parse_sources(options, files_ignored=None): - def parse_info_file(path): - primary_key = None - indexes = [] - write_hook = None - with open(path, "r") as info_file: - metadata = json.load(info_file) - if "primary_key" in metadata: - primary_key = metadata["primary_key"] - if "indexes" in metadata and options.indexes is not False: - indexes = metadata["indexes"] - if "write_hook" in metadata: - write_hook = metadata["write_hook"] - return primary_key, indexes, write_hook - - has_write_hooks = utils_common.check_minimum_version(options, "2.3.7", False) - - sources = set() - if files_ignored is None: - files_ignored = [] - if options.directory and options.file: - raise RuntimeError( - "Error: Both --directory and --file cannot be specified together" - ) - elif options.file: - db, table = options.import_table - path, ext = os.path.splitext(options.file) - table_type_options = None - if ext == ".json": - table_type = JsonSourceFile - elif ext == ".csv": - table_type = CsvSourceFile - table_type_options = { - "no_header_row": options.no_header, - "custom_header": options.custom_header, - } - else: - raise Exception("The table type is not recognised: %s" % ext) - - # - parse the info file if it exists - primary_key = ( - options.create_args.get("primary_key", None) - if options.create_args - else None - ) - indexes = [] - write_hook = None - info_path = path + ".info" - if (primary_key is None or options.indexes is not False) and os.path.isfile( - info_path - ): - info_primary_key, info_indexes, info_write_hook = parse_info_file(info_path) - if primary_key is None: - primary_key = info_primary_key - if options.indexes is not False: - indexes = info_indexes - if write_hook is None: - write_hook = info_write_hook - if write_hook and not has_write_hooks: - raise Exception("this RDB version doesn't support write-hooks") - - sources.add( - table_type( - source=options.file, - db=db, - table=table, - query_runner=options.retryQuery, - primary_key=primary_key, - indexes=indexes, - write_hook=write_hook, - source_options=table_type_options, - ) - ) - elif options.directory: - # Scan for all files, make sure no duplicated tables with different formats - dbs = False - files_ignored = [] - for root, dirs, files in os.walk(options.directory): - if not dbs: - files_ignored.extend([os.path.join(root, f) for f in files]) - # The first iteration through should be the top-level directory, which contains the db folders - dbs = True - - # don't recurse into folders not matching our filter - db_filter = set([db_table[0] for db_table in options.db_tables or []]) - if db_filter: - for dir_name in dirs[:]: # iterate on a copy - if dir_name not in db_filter: - dirs.remove(dir_name) - else: - if dirs: - files_ignored.extend([os.path.join(root, d) for d in dirs]) - del dirs[:] - - db = os.path.basename(root) - for filename in files: - path = os.path.join(root, filename) - table, ext = os.path.splitext(filename) - table = os.path.basename(table) - - if ext not in [".json", ".csv", ".info"]: - files_ignored.append(os.path.join(root, filename)) - elif ext == ".info": - pass # Info files are included based on the data files - elif not os.path.exists(os.path.join(root, table + ".info")): - files_ignored.append(os.path.join(root, filename)) - else: - # apply db/table filters - if options.db_tables: - for filter_db, filter_table in options.db_tables: - if db == filter_db and filter_table in (None, table): - break # either all tables in this db, or specific pair - else: - files_ignored.append(os.path.join(root, filename)) - continue # not a chosen db/table - - # collect the info - primary_key = None - indexes = [] - write_hook = None - info_path = os.path.join(root, table + ".info") - if not os.path.isfile(info_path): - files_ignored.append(os.path.join(root, filename)) - else: - primary_key, indexes, write_hook = parse_info_file( - info_path - ) - if write_hook and not has_write_hooks: - raise Exception( - "RDB versions below doesn't support write-hooks" - ) - - table_type = None - if ext == ".json": - table_type = JsonSourceFile - elif ext == ".csv": - table_type = CsvSourceFile - else: - raise Exception( - "The table type is not recognised: %s" % ext - ) - source = table_type( - source=path, - query_runner=options.retryQuery, - db=db, - table=table, - primary_key=primary_key, - indexes=indexes, - write_hook=write_hook, - ) - - # ensure we don't have a duplicate - if table in sources: - raise RuntimeError( - "Error: Duplicate db.table found in directory tree: %s.%s" - % (source.db, source.table) - ) - - sources.add(source) - - # Warn the user about the files that were ignored - if len(files_ignored) > 0: - print( - "Unexpected files found in the specified directory. Importing a directory expects", - file=sys.stderr, - ) - print( - " a directory from `rethinkdb export`. If you want to import individual tables", - file=sys.stderr, - ) - print( - " import them as single files. The following files were ignored:", - file=sys.stderr, - ) - for ignored_file in files_ignored: - print("%s" % str(ignored_file), file=sys.stderr) - else: - raise RuntimeError("Error: Neither --directory or --file specified") - - return sources - - -def main(argv=None, prog=None): - start_time = time.time() - - if argv is None: - argv = sys.argv[1:] - options = parse_options(argv, prog=prog) - - try: - sources = parse_sources(options) - import_tables(options, sources) - except RuntimeError as ex: - print(ex, file=sys.stderr) - if str(ex) == "Warnings occurred during import": - return 2 - return 1 - if not options.quiet: - print(" Done (%d seconds)" % (time.time() - start_time)) - return 0 - - -if __name__ == "__main__": - sys.exit(main()) diff --git a/rethinkdb/_index_rebuild.py b/rethinkdb/_index_rebuild.py deleted file mode 100755 index b12997b1..00000000 --- a/rethinkdb/_index_rebuild.py +++ /dev/null @@ -1,275 +0,0 @@ -#!/usr/bin/env python - -# Copyright 2018 RethinkDB -# -# Licensed under the Apache License, Version 2.0 (the 'License'); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an 'AS IS' BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# This file incorporates work covered by the following copyright: -# Copyright 2010-2016 RethinkDB, all rights reserved. - - -"""'rethinkdb index-rebuild' recreates outdated secondary indexes in a cluster. - This should be used after upgrading to a newer version of rethinkdb. There - will be a notification in the web UI if any secondary indexes are out-of-date.""" - -from __future__ import print_function - -import sys -import time -import traceback - -from rethinkdb import query, utils_common - -usage = ( - "rethinkdb index-rebuild [-c HOST:PORT] [-n NUM] [-r (DB | DB.TABLE)] [--tls-cert FILENAME] [-p] " - "[--password-file FILENAME]..." -) -help_epilog = """ -FILE: the archive file to restore data from - -EXAMPLES: -rethinkdb index-rebuild -c mnemosyne:39500 - rebuild all outdated secondary indexes from the cluster through the host 'mnemosyne', - one at a time - -rethinkdb index-rebuild -r test -r production.users -n 5 - rebuild all outdated secondary indexes from a local cluster on all tables in the - 'test' database as well as the 'production.users' table, five at a time -""" - -# Prefix used for indexes that are being rebuilt -TMP_INDEX_PREFIX = "$reql_temp_index$_" - - -def parse_options(argv, prog=None): - parser = utils_common.CommonOptionsParser( - usage=usage, epilog=help_epilog, prog=prog - ) - - parser.add_option( - "-r", - "--rebuild", - dest="db_table", - metavar="DB|DB.TABLE", - default=[], - help="databases or tables to rebuild indexes on (default: all, may be specified multiple times)", - action="append", - type="db_table", - ) - parser.add_option( - "-n", - dest="concurrent", - metavar="NUM", - default=1, - help="concurrent indexes to rebuild (default: 1)", - type="pos_int", - ) - parser.add_option( - "--force", - dest="force", - action="store_true", - default=False, - help="rebuild non-outdated indexes", - ) - - options, args = parser.parse_args(argv) - - # Check validity of arguments - if len(args) != 0: - parser.error( - "Error: No positional arguments supported. Unrecognized option '%s'" - % args[0] - ) - - return options - - -def rebuild_indexes(options): - - # flesh out options.db_table - if not options.db_table: - options.db_table = [ - utils_common.DbTable(x["db"], x["name"]) - for x in options.retryQuery( - "all tables", - query.db("rethinkdb").table("table_config").pluck(["db", "name"]), - ) - ] - else: - for db_table in options.db_table[:]: # work from a copy - if not db_table[1]: - options.db_table += [ - utils_common.DbTable(db_table[0], x) - for x in options.retryQuery( - "table list of %s" % db_table[0], - query.db(db_table[0]).table_list(), - ) - ] - del options.db_table[db_table] - - # wipe out any indexes with the TMP_INDEX_PREFIX - for db, table in options.db_table: - for index in options.retryQuery( - "list indexes on %s.%s" % (db, table), - query.db(db).table(table).index_list(), - ): - if index.startswith(TMP_INDEX_PREFIX): - options.retryQuery( - "drop index: %s.%s:%s" % (db, table, index), - query.db(index["db"]) - .table(index["table"]) - .index_drop(index["name"]), - ) - - # get the list of indexes to rebuild - indexes_to_build = [] - for db, table in options.db_table: - indexes = None - if not options.force: - indexes = options.retryQuery( - "get outdated indexes from %s.%s" % (db, table), - query.db(db) - .table(table) - .index_status() - .filter({"outdated": True}) - .get_field("index"), - ) - else: - indexes = options.retryQuery( - "get all indexes from %s.%s" % (db, table), - query.db(db).table(table).index_status().get_field("index"), - ) - for index in indexes: - indexes_to_build.append({"db": db, "table": table, "name": index}) - - # rebuild selected indexes - - total_indexes = len(indexes_to_build) - indexes_completed = 0 - progress_ratio = 0.0 - highest_progress = 0.0 - indexes_in_progress = [] - - if not options.quiet: - print( - "Rebuilding %d index%s: %s" - % ( - total_indexes, - "es" if total_indexes > 1 else "", - ", ".join( - ["`%(db)s.%(table)s:%(name)s`" % i for i in indexes_to_build] - ), - ) - ) - - while len(indexes_to_build) > 0 or len(indexes_in_progress) > 0: - # Make sure we're running the right number of concurrent index rebuilds - while ( - len(indexes_to_build) > 0 and len(indexes_in_progress) < options.concurrent - ): - index = indexes_to_build.pop() - indexes_in_progress.append(index) - index["temp_name"] = TMP_INDEX_PREFIX + index["name"] - index["progress"] = 0 - index["ready"] = False - - existing_indexes = dict( - (x["index"], x["function"]) - for x in options.retryQuery( - "existing indexes", - query.db(index["db"]) - .table(index["table"]) - .index_status() - .pluck("index", "function"), - ) - ) - - if index["name"] not in existing_indexes: - raise AssertionError( - "{index_name} is not part of existing indexes {indexes}".format( - index_name=index["name"], indexes=", ".join(existing_indexes) - ) - ) - - if index["temp_name"] not in existing_indexes: - options.retryQuery( - "create temp index: %(db)s.%(table)s:%(name)s" % index, - query.db(index["db"]) - .table(index["table"]) - .index_create(index["temp_name"], existing_indexes[index["name"]]), - ) - - # Report progress - highest_progress = max(highest_progress, progress_ratio) - if not options.quiet: - utils_common.print_progress(highest_progress) - - # Check the status of indexes in progress - progress_ratio = 0.0 - for index in indexes_in_progress: - status = options.retryQuery( - "progress `%(db)s.%(table)s` index `%(name)s`" % index, - query.db(index["db"]) - .table(index["table"]) - .index_status(index["temp_name"]) - .nth(0), - ) - if status["ready"]: - index["ready"] = True - options.retryQuery( - "rename `%(db)s.%(table)s` index `%(name)s`" % index, - query.db(index["db"]) - .table(index["table"]) - .index_rename(index["temp_name"], index["name"], overwrite=True), - ) - else: - progress_ratio += status.get("progress", 0) / total_indexes - - indexes_in_progress = [ - index for index in indexes_in_progress if not index["ready"] - ] - indexes_completed = ( - total_indexes - len(indexes_to_build) - len(indexes_in_progress) - ) - progress_ratio += float(indexes_completed) / total_indexes - - if len(indexes_in_progress) == options.concurrent or ( - len(indexes_in_progress) > 0 and len(indexes_to_build) == 0 - ): - # Short sleep to keep from killing the CPU - time.sleep(0.1) - - # Make sure the progress bar says we're done and get past the progress bar line - if not options.quiet: - utils_common.print_progress(1.0) - print("") - - -def main(argv=None, prog=None): - options = parse_options(argv or sys.argv[1:], prog=prog) - start_time = time.time() - try: - rebuild_indexes(options) - except Exception as ex: - if options.debug: - traceback.print_exc() - if not options.quiet: - print(ex, file=sys.stderr) - return 1 - if not options.quiet: - print("Done (%d seconds)" % (time.time() - start_time)) - return 0 - - -if __name__ == "__main__": - sys.exit(main()) diff --git a/rethinkdb/_restore.py b/rethinkdb/_restore.py deleted file mode 100755 index 178f1bdc..00000000 --- a/rethinkdb/_restore.py +++ /dev/null @@ -1,347 +0,0 @@ -#!/usr/bin/env python - -# Copyright 2018 RethinkDB -# -# Licensed under the Apache License, Version 2.0 (the 'License'); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an 'AS IS' BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# This file incorporates work covered by the following copyright: -# Copyright 2010-2016 RethinkDB, all rights reserved. - - -"""`rethinkdb restore` loads data into a RethinkDB cluster from an archive""" - -from __future__ import print_function - -import copy -import multiprocessing -import optparse -import os -import shutil -import sys -import tarfile -import tempfile -import time -import traceback - -from rethinkdb import _import, utils_common - -usage = ( - "rethinkdb restore FILE [-c HOST:PORT] [--tls-cert FILENAME] [-p] [--password-file FILENAME] [--clients NUM] " - "[--shards NUM_SHARDS] [--replicas NUM_REPLICAS] [--force] [-i (DB | DB.TABLE)]..." -) -help_epilog = """ -FILE: - the archive file to restore data from; - if FILE is -, use standard input (note that - intermediate files will still be written to - the --temp-dir directory) - -EXAMPLES: - -rethinkdb restore rdb_dump.tar.gz -c mnemosyne:39500 - Import data into a cluster running on host 'mnemosyne' with a client port at 39500 using - the named archive file. - -rethinkdb restore rdb_dump.tar.gz -i test - Import data into a local cluster from only the 'test' database in the named archive file. - -rethinkdb restore rdb_dump.tar.gz -i test.subscribers -c hades -p - Import data into a cluster running on host 'hades' which requires a password from only - a specific table from the named archive file. - -rethinkdb restore rdb_dump.tar.gz --clients 4 --force - Import data to a local cluster from the named archive file using only 4 client connections - and overwriting any existing rows with the same primary key. -""" - - -def parse_options(argv, prog=None): - parser = utils_common.CommonOptionsParser( - usage=usage, epilog=help_epilog, prog=prog - ) - - parser.add_option( - "-i", - "--import", - dest="db_tables", - metavar="DB|DB.TABLE", - default=[], - help="limit restore to the given database or table (may be specified multiple times)", - action="append", - type="db_table", - ) - - parser.add_option( - "--temp-dir", - dest="temp_dir", - metavar="DIR", - default=None, - help="directory to use for intermediary results", - ) - parser.add_option( - "--clients", - dest="clients", - metavar="CLIENTS", - default=8, - help="client connections to use (default: 8)", - type="pos_int", - ) - parser.add_option( - "--hard-durability", - dest="durability", - action="store_const", - default="soft", - help="use hard durability writes (slower, uses less memory)", - const="hard", - ) - parser.add_option( - "--force", - dest="force", - action="store_true", - default=False, - help="import data even if a table already exists", - ) - parser.add_option( - "--no-secondary-indexes", - dest="indexes", - action="store_false", - default=None, - help="do not create secondary indexes for the restored tables", - ) - - parser.add_option( - "--writers-per-table", - dest="writers", - default=multiprocessing.cpu_count(), - help=optparse.SUPPRESS_HELP, - type="pos_int", - ) - parser.add_option( - "--batch-size", - dest="batch_size", - default=utils_common.default_batch_size, - help=optparse.SUPPRESS_HELP, - type="pos_int", - ) - - # Replication settings - replication_options_group = optparse.OptionGroup(parser, "Replication Options") - replication_options_group.add_option( - "--shards", - dest="create_args", - metavar="SHARDS", - help="shards to setup on created tables (default: 1)", - type="pos_int", - action="add_key", - ) - replication_options_group.add_option( - "--replicas", - dest="create_args", - metavar="REPLICAS", - help="replicas to setup on created tables (default: 1)", - type="pos_int", - action="add_key", - ) - parser.add_option_group(replication_options_group) - - options, args = parser.parse_args(argv) - - # -- Check validity of arguments - - # - archive - if len(args) == 0: - parser.error( - "Archive to import not specified. Provide an archive file created by rethinkdb-dump." - ) - elif len(args) != 1: - parser.error("Only one positional argument supported") - options.in_file = args[0] - if options.in_file == "-": - options.in_file = sys.stdin - else: - if not os.path.isfile(options.in_file): - parser.error("Archive file does not exist: %s" % options.in_file) - options.in_file = os.path.realpath(options.in_file) - - # - temp_dir - if options.temp_dir: - if not os.path.isdir(options.temp_dir): - parser.error( - "Temporary directory doesn't exist or is not a directory: %s" - % options.temp_dir - ) - if not os.access(options.temp_dir, os.W_OK): - parser.error("Temporary directory inaccessible: %s" % options.temp_dir) - - # - create_args - if options.create_args is None: - options.create_args = {} - - # -- - - return options - - -def do_unzip(temp_dir, options): - """extract the tarfile to the filesystem""" - - tables_to_export = set(options.db_tables) - top_level = None - files_ignored = [] - files_found = False - archive = None - tarfile_options = { - "mode": "r|*", - "fileobj" if hasattr(options.in_file, "read") else "name": options.in_file, - } - try: - archive = tarfile.open(**tarfile_options) - for tarinfo in archive: - # skip without comment anything but files - if not tarinfo.isfile(): - continue # skip everything but files - - # normalize the path - relpath = os.path.relpath( - os.path.realpath(tarinfo.name.strip().lstrip(os.sep)) - ) - - # skip things that try to jump out of the folder - if relpath.startswith(os.path.pardir): - files_ignored.append(tarinfo.name) - continue - - # skip files types other than what we use - if not os.path.splitext(relpath)[1] in (".json", ".csv", ".info"): - files_ignored.append(tarinfo.name) - continue - - # ensure this looks like our structure - try: - top, db, file_name = relpath.split(os.sep) - except ValueError: - raise RuntimeError( - "Error: Archive file has an unexpected directory structure: %s" - % tarinfo.name - ) - - if not top_level: - top_level = top - elif top != top_level: - raise RuntimeError( - "Error: Archive file has an unexpected directory structure (%s vs %s)" - % (top, top_level) - ) - - # filter out tables we are not looking for - table = os.path.splitext(file_name) - if tables_to_export and not ( - (db, table) in tables_to_export or (db, None) in tables_to_export - ): - continue # skip without comment - - # write the file out - files_found = True - dest_path = os.path.join(temp_dir, db, file_name) - - if not os.path.exists(os.path.dirname(dest_path)): - os.makedirs(os.path.dirname(dest_path)) - - with open(dest_path, "wb") as dest: - source = archive.extractfile(tarinfo) - chunk = True - while chunk: - chunk = source.read(1024 * 128) - dest.write(chunk) - source.close() - - if not os.path.isfile(dest_path): - raise AssertionError( - "Was not able to write {destination_path}".format( - destination_path=dest_path - ) - ) - - finally: - if archive: - archive.close() - - if not files_found: - raise RuntimeError("Error: Archive file had no files") - - # - send the location and ignored list back to our caller - return files_ignored - - -def do_restore(options): - # Create a temporary directory to store the extracted data - temp_dir = tempfile.mkdtemp(dir=options.temp_dir) - - try: - # - extract the archive - if not options.quiet: - print("Extracting archive file...") - start_time = time.time() - - do_unzip(temp_dir, options) - - if not options.quiet: - print(" Done (%d seconds)" % (time.time() - start_time)) - - # - default _import options - - options = copy.copy(options) - options.fields = None - options.directory = temp_dir - options.file = None - - sources = _import.parse_sources(options) - - # - run the import - if not options.quiet: - print("Importing from directory...") - - try: - _import.import_tables(options, sources) - except RuntimeError as ex: - if options.debug: - traceback.print_exc() - if str(ex) == "Warnings occurred during import": - raise RuntimeError( - "Warning: import did not create some secondary indexes." - ) - else: - error_string = str(ex) - if error_string.startswith("Error: "): - error_string = error_string[len("Error: ") :] - raise RuntimeError("Error: import failed: %s" % error_string) - # 'Done' message will be printed by the import script - finally: - shutil.rmtree(temp_dir) - - -def main(argv=None, prog=None): - if argv is None: - argv = sys.argv[1:] - options = parse_options(argv, prog=prog) - - try: - do_restore(options) - except RuntimeError as ex: - print(ex, file=sys.stderr) - return 1 - return 0 - - -if __name__ == "__main__": - exit(main()) diff --git a/rethinkdb/ast.py b/rethinkdb/ast.py index 7bd73b0c..5b232fd0 100644 --- a/rethinkdb/ast.py +++ b/rethinkdb/ast.py @@ -1,4 +1,4 @@ -# Copyright 2018 RethinkDB +# Copyright 2022 RethinkDB # # Licensed under the Apache License, Version 2.0 (the 'License'); # you may not use this file except in compliance with the License. @@ -15,160 +15,104 @@ # This file incorporates work covered by the following copyright: # Copyright 2010-2016 RethinkDB, all rights reserved. -__all__ = ["expr", "RqlQuery", "ReQLEncoder", "ReQLDecoder", "Repl"] +""" +AST module contains the way the queries are serialized and deserialized. +""" +# It is known and expected that the ast module will be lot longer than the +# usual module length, so we disabled it. +# pylint: disable=too-many-lines +# FIXME: do a major refactoring and re-enable docstring checks +# pylint: disable=missing-function-docstring,missing-class-docstring + +__all__ = ["expr", "ReqlQuery", "ReqlBinary", "ReqlTzinfo"] + +from abc import abstractmethod import base64 import binascii +from collections import abc import datetime -import json -import sys import threading +from typing import TYPE_CHECKING, Any, Callable, Iterable, List, Mapping, Optional +from typing import Union as TUnion from rethinkdb import ql2_pb2 -from rethinkdb.errors import (QueryPrinter, ReqlDriverCompileError, - ReqlDriverError, T) - -if sys.version_info < (3, 3): - # python < 3.3 uses collections - import collections -else: - # but collections is deprecated from python >= 3.3 - import collections.abc as collections - -P_TERM = ql2_pb2.Term.TermType - -try: - unicode -except NameError: - unicode = str - -try: - xrange -except NameError: - xrange = range - - -def dict_items(dictionary): - return list(dictionary.items()) - - -class Repl(object): - thread_data = threading.local() - repl_active = False - - @classmethod - def get(cls): - if "repl" in cls.thread_data.__dict__: - return cls.thread_data.repl - else: - return None - - @classmethod - def set(cls, conn): - cls.thread_data.repl = conn - cls.repl_active = True - - @classmethod - def clear(cls): - if "repl" in cls.thread_data.__dict__: - del cls.thread_data.repl - cls.repl_active = False +from rethinkdb.errors import QueryPrinter, ReqlDriverCompileError, ReqlDriverError +from rethinkdb.repl import Repl +from rethinkdb.utilities import EnhancedTuple +if TYPE_CHECKING: + from rethinkdb.net import Connection -# This is both an external function and one used extensively -# internally to convert coerce python values to RQL types +P_TERM = ql2_pb2.Term.TermType # pylint: disable=invalid-name -def expr(val, nesting_depth=20): +class ReqlQuery: # pylint: disable=too-many-public-methods """ - Convert a Python primitive into a RQL primitive value + The RethinkDB Query object which determines the operations we can request + from the server. """ - if not isinstance(nesting_depth, int): - raise ReqlDriverCompileError("Second argument to `r.expr` must be a number.") - if nesting_depth <= 0: - raise ReqlDriverCompileError("Nesting depth limit exceeded.") + def __init__(self, *args, **kwargs: dict): + self._args = [expr(e) for e in args] + self.kwargs = {k: expr(v) for k, v in kwargs.items()} + self.term_type: Optional[int] = None + self.statement: str = "" + + @abstractmethod + def compose(self, args, kwargs): + """Compose the Reql query""" + + # TODO: add return value + def run(self, connection: Optional["Connection"] = None, **kwargs: dict): + """ + Send the query to the server for execution and return the result of the + evaluation. + """ + + repl = Repl() + conn = connection or repl.get_connection() + + if conn is None: + if repl.is_repl_active: + raise ReqlDriverError( + "ReqlQuery.run must be given a connection to run on. " + "A default connection has been set with " + "`repl()` on another thread, but not this one." + ) - if isinstance(val, RqlQuery): - return val - elif isinstance(val, collections.Callable): - return Func(val) - elif isinstance(val, (datetime.datetime, datetime.date)): - if not hasattr(val, "tzinfo") or not val.tzinfo: - raise ReqlDriverCompileError( - """Cannot convert %s to ReQL time object - without timezone information. You can add timezone information with - the third party module \"pytz\" or by constructing ReQL compatible - timezone values with r.make_timezone(\"[+-]HH:MM\"). Alternatively, - use one of ReQL's bultin time constructors, r.now, r.time, - or r.iso8601. - """ - % (type(val).__name__) - ) - return ISO8601(val.isoformat()) - elif isinstance(val, RqlBinary): - return Binary(val) - elif isinstance(val, (str, unicode)): - return Datum(val) - elif isinstance(val, bytes): - return Binary(val) - elif isinstance(val, collections.Mapping): - # MakeObj doesn't take the dict as a keyword args to avoid - # conflicting with the `self` parameter. - obj = {} - for k, v in dict_items(val): - obj[k] = expr(v, nesting_depth - 1) - return MakeObj(obj) - elif isinstance(val, collections.Iterable): - val = [expr(v, nesting_depth - 1) for v in val] - return MakeArray(*val) - else: - return Datum(val) + raise ReqlDriverError("ReqlQuery.run must be given a connection to run on.") + return conn.start(self, **kwargs) -class RqlQuery(object): - # Instantiate this AST node with the given pos and opt args - def __init__(self, *args, **optargs): - self._args = [expr(e) for e in args] + def __str__(self) -> str: + """ + Return the string representation of the query. + """ + return QueryPrinter(self).query - self.optargs = {} - for key, value in dict_items(optargs): - self.optargs[key] = expr(value) - - # Send this query to the server to be executed - def run(self, c=None, **global_optargs): - if c is None: - c = Repl.get() - if c is None: - if Repl.repl_active: - raise ReqlDriverError( - "RqlQuery.run must be given a connection to run on. A default connection has been set with " - "`repl()` on another thread, but not this one." - ) - else: - raise ReqlDriverError( - "RqlQuery.run must be given a connection to run on." - ) + def __repr__(self) -> str: + """ + Return the representation string of the object. + """ + return f"" - return c._start(self, **global_optargs) + def build(self) -> List[str]: + """ + Compile the query to a json-serializable object. + """ - def __str__(self): - printer = QueryPrinter(self) - return printer.print_query() + # TODO: Have a more specific typing here + res: List[Any] = [self.term_type, self._args] - def __repr__(self): - return "" % str(self) + if len(self.kwargs) > 0: + res.append(self.kwargs) - # Compile this query to a json-serializable object - def build(self): - res = [self.term_type, self._args] - if len(self.optargs) > 0: - res.append(self.optargs) return res # The following are all operators and methods that operate on - # Rql queries to build up more complex operations + # Reql queries to build up more complex operations # Comparison operators def __eq__(self, other): @@ -250,103 +194,260 @@ def __ror__(self, other): return query # Non-operator versions of the above - - def eq(self, *args): + def eq(self, *args): # pylint: disable=invalid-name + """ + Non-operator version of ``__eq__``. + """ return Eq(self, *args) - def ne(self, *args): + def ne(self, *args): # pylint: disable=invalid-name + """ + Non-operator version of ``__ne__``. + """ return Ne(self, *args) - def lt(self, *args): + def lt(self, *args): # pylint: disable=invalid-name + """ + Non-operator version of ``__lt__``. + """ return Lt(self, *args) - def le(self, *args): + def le(self, *args): # pylint: disable=invalid-name + """ + Non-operator version of ``__le__``. + """ return Le(self, *args) - def gt(self, *args): + def gt(self, *args): # pylint: disable=invalid-name + """ + Non-operator version of ``__gt__``. + """ return Gt(self, *args) - def ge(self, *args): + def ge(self, *args): # pylint: disable=invalid-name + """ + Non-operator version of ``__ge__``. + """ return Ge(self, *args) def add(self, *args): + """ + Non-operator version of ``__add__``. + """ return Add(self, *args) def sub(self, *args): + """ + Non-operator version of ``__sub__``. + """ return Sub(self, *args) def mul(self, *args): + """ + Non-operator version of ``__mul__``. + """ return Mul(self, *args) def div(self, *args): + """ + Non-operator version of ``__div__``. + """ return Div(self, *args) def mod(self, *args): + """ + Non-operator version of ``__mod__``. + """ return Mod(self, *args) def bit_and(self, *args): + """ + Bitwise AND operator. + + A bitwise AND is a binary operation that takes two equal-length binary + representations and performs the logical AND operation on each pair of + the corresponding bits, which is equivalent to multiplying them. Thus, + if both bits in the compared position are 1, the bit in the resulting + binary representation is 1 (1 x 1 = 1); otherwise, the result is + 0 (1 x 0 = 0 and 0 x 0 = 0). + """ return BitAnd(self, *args) def bit_or(self, *args): + """ + Bitwise OR operator. + + A bitwise OR is a binary operation that takes two bit patterns of equal + length and performs the logical inclusive OR operation on each pair of + corresponding bits. The result in each position is 0 if both bits are 0, + while otherwise the result is 1. + """ return BitOr(self, *args) def bit_xor(self, *args): + """ + Bitwise XOR operator. + + A bitwise XOR is a binary operation that takes two bit patterns of equal + length and performs the logical exclusive OR operation on each pair of + corresponding bits. The result in each position is 1 if only the first + bit is 1 or only the second bit is 1, but will be 0 if both are 0 or + both are 1. In this we perform the comparison of two bits, being 1 if + the two bits are different, and 0 if they are the same. + """ return BitXor(self, *args) def bit_not(self, *args): + """ + Bitwise NOT operator. + + A bitwise NOT, or complement, is a unary operation that performs logical + negation on each bit, forming the ones' complement of the given binary + value. Bits that are 0 become 1, and those that are 1 become 0. + """ return BitNot(self, *args) def bit_sal(self, *args): + """ + Bitwise SAL operator. + + In an arithmetic shift (also referred to as signed shift), like a + logical shift, the bits that slide off the end disappear (except for the + last, which goes into the carry flag). But in an arithmetic shift, the + spaces are filled in such a way to preserve the sign of the number being + slid. For this reason, arithmetic shifts are better suited for signed + numbers in two's complement format. + + Note: SHL and SAL are the same, and differentiation only happens because + SAR and SHR (right shifting) has differences in their implementation. + """ return BitSal(self, *args) def bit_sar(self, *args): + """ + Bitwise SAR operator. + + In an arithmetic shift (also referred to as signed shift), like a + logical shift, the bits that slide off the end disappear (except for the + last, which goes into the carry flag). But in an arithmetic shift, the + spaces are filled in such a way to preserve the sign of the number being + slid. For this reason, arithmetic shifts are better suited for signed + numbers in two's complement format. + """ return BitSar(self, *args) def floor(self, *args): + """ + Rounds the given value down, returning the largest integer value less + than or equal to the given value (the value's floor). + """ return Floor(self, *args) def ceil(self, *args): + """ + Rounds the given value up, returning the smallest integer value greater + than or equal to the given value (the value's ceiling). + """ return Ceil(self, *args) def round(self, *args): + """ + Rounds the given value to the nearest whole integer. + """ return Round(self, *args) def and_(self, *args): + """ + Non-operator version of ``__and__``. + """ return And(self, *args) def or_(self, *args): + """ + Non-operator version of ``__or__``. + """ return Or(self, *args) def not_(self, *args): + """ + Non-operator version of ``__not__``. + """ return Not(self, *args) # N.B. Cannot use 'in' operator because it must return a boolean def contains(self, *args): + """ + When called with values, returns True if a sequence contains all the + specified values. When called with predicate functions, returns True if + for each predicate there exists at least one element of the stream where + that predicate returns True. + """ return Contains(self, *[func_wrap(arg) for arg in args]) def has_fields(self, *args): + """ + Test if an object has one or more fields. An object has a field if it + has that key and the key has a non-null value. For instance, the object + {'a': 1,'b': 2,'c': null} has the fields a and b. + + When applied to a single object, has_fields returns true if the object + has the fields and false if it does not. When applied to a sequence, it + will return a new sequence (an array or stream) containing the elements + that have the specified fields. + """ return HasFields(self, *args) def with_fields(self, *args): + """ + Plucks one or more attributes from a sequence of objects, filtering out + any objects in the sequence that do not have the specified fields. + Functionally, this is identical to has_fields followed by pluck on a + sequence. + """ return WithFields(self, *args) def keys(self, *args): + """ + Return an array containing all of an object's keys. Note that the keys + will be sorted as described in Reql data types (for strings, + lexicographically). + """ return Keys(self, *args) def values(self, *args): + """ + Return an array containing all of an object's values. values() + guarantees the values will come out in the same order as keys. + """ return Values(self, *args) def changes(self, *args, **kwargs): + """ + Turn a query into a changefeed, an infinite stream of objects + representing changes to the query's results as they occur. A changefeed + may return changes to a table or an individual document (a “point” + changefeed). Commands such as filter or map may be used before the + changes command to transform or filter the output, and many commands + that operate on sequences can be chained after changes. + """ return Changes(self, *args, **kwargs) # Polymorphic object/sequence operations def pluck(self, *args): + """ + Plucks out one or more attributes from either an object or a sequence of + objects (projection). + """ return Pluck(self, *args) def without(self, *args): + """ + The opposite of pluck; takes an object or a sequence of objects, and + returns them with the specified paths removed. + """ return Without(self, *args) - def do(self, *args): + def do(self, *args): # pylint: disable=invalid-name return FunCall(self, *args) def default(self, *args): @@ -361,7 +462,7 @@ def replace(self, *args, **kwargs): def delete(self, *args, **kwargs): return Delete(self, *args, **kwargs) - # Rql type inspection + # Reql type inspection def coerce_to(self, *args): return CoerceTo(self, *args) @@ -397,24 +498,28 @@ def set_difference(self, *args): # Operator used for get attr / nth / slice. Non-operator versions below # in cases of ambiguity + # TODO + # Undestand the type of index. Apparently it can be of type slice + # but of some type accepted by Bracket, + # which I can't understand where it's defined def __getitem__(self, index): - if isinstance(index, slice): - if index.stop: - return Slice(self, index.start or 0, index.stop, bracket_operator=True) - else: - return Slice( - self, - index.start or 0, - -1, - right_bound="closed", - bracket_operator=True, - ) - else: + if not isinstance(index, slice): return Bracket(self, index, bracket_operator=True) - def __iter__(*args, **kwargs): + if index.stop: + return Slice(self, index.start or 0, index.stop, bracket_operator=True) + + return Slice( + self, + index.start or 0, + -1, + right_bound="closed", + bracket_operator=True, + ) + + def __iter__(self): raise ReqlDriverError( - "__iter__ called on an RqlQuery object.\n" + "__iter__ called on an ReqlQuery object.\n" "To iterate over the results of a query, call run first.\n" "To iterate inside a query, use map or for_each." ) @@ -428,8 +533,14 @@ def nth(self, *args): def to_json(self, *args): return ToJsonString(self, *args) + # DEPRECATE: Remove this function in the next release def to_json_string(self, *args): - return ToJsonString(self, *args) + """ + Function `to_json_string` is an alias for `to_json` and will be removed + in the future. + """ + + return self.to_json(*args) def match(self, *args): return Match(self, *args) @@ -477,21 +588,19 @@ def map(self, *args): if len(args) > 0: # `func_wrap` only the last argument return Map(self, *(args[:-1] + (func_wrap(args[-1]),))) - else: - return Map(self) + + return Map(self) def fold(self, *args, **kwargs): if len(args) > 0: # `func_wrap` only the last argument before optional arguments - # Also `func_wrap` keyword arguments + return Fold( + self, + *(args[:-1] + (func_wrap(args[-1]),)), + **{key: func_wrap(val) for key, val in kwargs.items()}, + ) - # Nice syntax not supported by python2.6 - kwfuncargs = {} - for arg_name in kwargs: - kwfuncargs[arg_name] = func_wrap(kwargs[arg_name]) - return Fold(self, *(args[:-1] + (func_wrap(args[-1]),)), **kwfuncargs) - else: - return Fold(self) + return Fold(self) def filter(self, *args, **kwargs): return Filter(self, *[func_wrap(arg) for arg in args], **kwargs) @@ -509,15 +618,12 @@ def between(self, *args, **kwargs): def distinct(self, *args, **kwargs): return Distinct(self, *args, **kwargs) - # NB: Can't overload __len__ because Python doesn't - # allow us to return a non-integer + # Can't overload __len__ because Python doesn't allow us to return a non-integer def count(self, *args): return Count(self, *[func_wrap(arg) for arg in args]) def union(self, *args, **kwargs): - func_kwargs = {} - for key in kwargs: - func_kwargs[key] = func_wrap(kwargs[key]) + func_kwargs = {key: func_wrap(val) for key, val in kwargs.items()} return Union(self, *args, **func_kwargs) def inner_join(self, *args): @@ -561,7 +667,6 @@ def sample(self, *args): return Sample(self, *args) # Time support - def to_iso8601(self, *args): return ToISO8601(self, *args) @@ -627,117 +732,148 @@ def polygon_sub(self, *args): return PolygonSub(self, *args) -# These classes define how nodes are printed by overloading `compose` -def needs_wrap(arg): - return isinstance(arg, (Datum, MakeArray, MakeObj)) - - -class RqlBoolOperQuery(RqlQuery): - def __init__(self, *args, **optargs): +class ReqlBoolOperQuery(ReqlQuery): + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self.statement_infix = "" self.infix = False - RqlQuery.__init__(self, *args, **optargs) def set_infix(self): self.infix = True - def compose(self, args, optargs): - t_args = [ - T("r.expr(", args[i], ")") if needs_wrap(self._args[i]) else args[i] - for i in xrange(len(args)) + def compose(self, args, kwargs): # pylint: disable=unused-argument + term_args = [ + EnhancedTuple("r.expr(", args[i], ")") + if needs_wrap(self._args[i]) + else args[i] + for i in range(len(args)) ] if self.infix: - return T("(", T(*t_args, intsp=[" ", self.statement_infix, " "]), ")") - else: - return T("r.", self.statement, "(", T(*t_args, intsp=", "), ")") + return EnhancedTuple( + "(", + EnhancedTuple( + *term_args, int_separator=[" ", self.statement_infix, " "] + ), + ")", + ) + return EnhancedTuple( + "r.", + self.statement, + "(", + EnhancedTuple(*term_args, int_separator=", "), + ")", + ) -class RqlBiOperQuery(RqlQuery): - def compose(self, args, optargs): - t_args = [ - T("r.expr(", args[i], ")") if needs_wrap(self._args[i]) else args[i] - for i in xrange(len(args)) + +class ReqlBiOperQuery(ReqlQuery): + """ + RethinkDB binary query operation. + """ + + def compose(self, args, kwargs): # pylint: disable=unused-argument + term_args = [ + EnhancedTuple("r.expr(", args[i], ")") + if needs_wrap(self._args[i]) + else args[i] + for i in range(len(args)) ] - return T("(", T(*t_args, intsp=[" ", self.statement, " "]), ")") + return EnhancedTuple( + "(", + EnhancedTuple(*term_args, int_separator=[" ", self.statement, " "]), + ")", + ) + + +class ReqlBiCompareOperQuery(ReqlBiOperQuery): + """ + RethinkDB comparison operator query. + """ -class RqlBiCompareOperQuery(RqlBiOperQuery): - def __init__(self, *args, **optargs): - RqlBiOperQuery.__init__(self, *args, **optargs) + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) for arg in args: - try: - if arg.infix: - err = ( - "Calling '%s' on result of infix bitwise operator:\n" - "%s.\n" - "This is almost always a precedence error.\n" - "Note that `a < b | b < c` <==> `a < (b | b) < c`.\n" - "If you really want this behavior, use `.or_` or " - "`.and_` instead." - ) - raise ReqlDriverCompileError( - err % (self.statement, QueryPrinter(self).print_query()) - ) - except AttributeError: - pass # No infix attribute, so not possible to be an infix bool operator + if hasattr(arg, "infix"): + raise ReqlDriverCompileError( + f""" + Calling '{self.statement}' on result of infix bitwise operator: + {QueryPrinter(self).query}\n + This is almost always a precedence error. + Note that `a < b | b < c` <==> `a < (b | b) < c`. + If you really want this behavior, use `.or_` or `.and_` instead. + """ + ) -class RqlTopLevelQuery(RqlQuery): - def compose(self, args, optargs): - args.extend([T(key, "=", value) for key, value in dict_items(optargs)]) - return T("r.", self.statement, "(", T(*(args), intsp=", "), ")") +class ReqlTopLevelQuery(ReqlQuery): + def compose(self, args, kwargs): + args.extend([EnhancedTuple(key, "=", value) for key, value in kwargs.items()]) + return EnhancedTuple( + "r.", self.statement, "(", EnhancedTuple(*(args), int_separator=", "), ")" + ) -class RqlMethodQuery(RqlQuery): - def compose(self, args, optargs): +class ReqlMethodQuery(ReqlQuery): + def compose(self, args, kwargs): if len(args) == 0: - return T("r.", self.statement, "()") + return EnhancedTuple("r.", self.statement, "()") if needs_wrap(self._args[0]): - args[0] = T("r.expr(", args[0], ")") + args[0] = EnhancedTuple("r.expr(", args[0], ")") restargs = args[1:] - restargs.extend([T(k, "=", v) for k, v in dict_items(optargs)]) - restargs = T(*restargs, intsp=", ") + restargs.extend([EnhancedTuple(k, "=", v) for k, v in kwargs.items()]) + restargs = EnhancedTuple(*restargs, int_separator=", ") + + return EnhancedTuple(args[0], ".", self.statement, "(", restargs, ")") - return T(args[0], ".", self.statement, "(", restargs, ")") +class ReqlBracketQuery(ReqlMethodQuery): + def __init__(self, *args, **kwargs): + self.bracket_operator = False -class RqlBracketQuery(RqlMethodQuery): - def __init__(self, *args, **optargs): - if "bracket_operator" in optargs: - self.bracket_operator = optargs["bracket_operator"] - del optargs["bracket_operator"] - else: - self.bracket_operator = False + if "bracket_operator" in kwargs: + self.bracket_operator = kwargs["bracket_operator"] + del kwargs["bracket_operator"] - RqlMethodQuery.__init__(self, *args, **optargs) + super().__init__(*args, **kwargs) - def compose(self, args, optargs): + def compose(self, args, kwargs): if self.bracket_operator: if needs_wrap(self._args[0]): - args[0] = T("r.expr(", args[0], ")") - return T(args[0], "[", T(*args[1:], intsp=[","]), "]") - else: - return RqlMethodQuery.compose(self, args, optargs) + args[0] = EnhancedTuple("r.expr(", args[0], ")") + return EnhancedTuple( + args[0], "[", EnhancedTuple(*args[1:], int_separator=[","]), "]" + ) + return super().compose(args, kwargs) + + +class ReqlTzinfo(datetime.tzinfo): + """ + RethinkDB timezone information. + """ -class RqlTzinfo(datetime.tzinfo): def __init__(self, offsetstr): + super().__init__() + hours, minutes = map(int, offsetstr.split(":")) self.offsetstr = offsetstr self.delta = datetime.timedelta(hours=hours, minutes=minutes) def __getinitargs__(self): + # Consciously return a tuple return (self.offsetstr,) def __copy__(self): - return RqlTzinfo(self.offsetstr) + return ReqlTzinfo(self.offsetstr) def __deepcopy__(self, memo): - return RqlTzinfo(self.offsetstr) + return ReqlTzinfo(self.offsetstr) def utcoffset(self, dt): return self.delta @@ -749,462 +885,459 @@ def dst(self, dt): return datetime.timedelta(0) -# Python only allows immutable built-in types to be hashed, such as -# for keys in a dict This means we can't use lists or dicts as keys in -# grouped data objects, so we convert them to tuples and frozensets, -# respectively. This may make it a little harder for users to work -# with converted grouped data, unless they do a simple iteration over -# the result -def recursively_make_hashable(obj): - if isinstance(obj, list): - return tuple([recursively_make_hashable(i) for i in obj]) - elif isinstance(obj, dict): - return frozenset( - [(k, recursively_make_hashable(v)) for k, v in dict_items(obj)] - ) - return obj - - -class ReQLEncoder(json.JSONEncoder): +class Datum(ReqlQuery): """ - Default JSONEncoder subclass to handle query conversion. + RethinkDB datum query. + + This class handles the conversion of Reql terminal types in both directions + Going to the server though it does not support R_ARRAY or R_OBJECT as those + are alternately handled by the MakeArray and MakeObject nodes. Why do this? + MakeArray and MakeObject are more flexible, allowing us to construct array + and object expressions from nested Reql expressions. Constructing pure + R_ARRAYs and R_OBJECTs would require verifying that at all nested levels + our arrays and objects are composed only of basic types. """ - def __init__(self): - json.JSONEncoder.__init__( - self, - ensure_ascii=False, - allow_nan=False, - check_circular=False, - separators=(",", ":"), - ) - - def default(self, obj): - if isinstance(obj, RqlQuery): - return obj.build() - return json.JSONEncoder.default(self, obj) - - -class ReQLDecoder(json.JSONDecoder): - """ - Default JSONDecoder subclass to handle pseudo-type conversion. - """ - - def __init__(self, reql_format_opts=None): - json.JSONDecoder.__init__(self, object_hook=self.convert_pseudotype) - self.reql_format_opts = reql_format_opts or {} - - def convert_time(self, obj): - if "epoch_time" not in obj: - raise ReqlDriverError( - ( - "pseudo-type TIME object %s does not " - + 'have expected field "epoch_time".' - ) - % json.dumps(obj) - ) - - if "timezone" in obj: - return datetime.datetime.fromtimestamp( - obj["epoch_time"], RqlTzinfo(obj["timezone"]) - ) - else: - return datetime.datetime.utcfromtimestamp(obj["epoch_time"]) - - @staticmethod - def convert_grouped_data(obj): - if "data" not in obj: - raise ReqlDriverError( - ( - "pseudo-type GROUPED_DATA object" - + ' %s does not have the expected field "data".' - ) - % json.dumps(obj) - ) - return dict([(recursively_make_hashable(k), v) for k, v in obj["data"]]) - - @staticmethod - def convert_binary(obj): - if "data" not in obj: - raise ReqlDriverError( - ( - "pseudo-type BINARY object %s does not have " - + 'the expected field "data".' - ) - % json.dumps(obj) - ) - return RqlBinary(base64.b64decode(obj["data"].encode("utf-8"))) - - def convert_pseudotype(self, obj): - reql_type = obj.get("$reql_type$") - if reql_type is not None: - if reql_type == "TIME": - time_format = self.reql_format_opts.get("time_format") - if time_format is None or time_format == "native": - # Convert to native python datetime object - return self.convert_time(obj) - elif time_format != "raw": - raise ReqlDriverError( - 'Unknown time_format run option "%s".' % time_format - ) - elif reql_type == "GROUPED_DATA": - group_format = self.reql_format_opts.get("group_format") - if group_format is None or group_format == "native": - return self.convert_grouped_data(obj) - elif group_format != "raw": - raise ReqlDriverError( - 'Unknown group_format run option "%s".' % group_format - ) - elif reql_type == "GEOMETRY": - # No special support for this. Just return the raw object - return obj - elif reql_type == "BINARY": - binary_format = self.reql_format_opts.get("binary_format") - if binary_format is None or binary_format == "native": - return self.convert_binary(obj) - elif binary_format != "raw": - raise ReqlDriverError( - 'Unknown binary_format run option "%s".' % binary_format - ) - else: - raise ReqlDriverError("Unknown pseudo-type %s" % reql_type) - # If there was no pseudotype, or the relevant format is raw, return - # the original object - return obj - - -# This class handles the conversion of RQL terminal types in both directions -# Going to the server though it does not support R_ARRAY or R_OBJECT as those -# are alternately handled by the MakeArray and MakeObject nodes. Why do this? -# MakeArray and MakeObject are more flexible, allowing us to construct array -# and object expressions from nested RQL expressions. Constructing pure -# R_ARRAYs and R_OBJECTs would require verifying that at all nested levels -# our arrays and objects are composed only of basic types. -class Datum(RqlQuery): def __init__(self, val): - super(Datum, self).__init__() + super().__init__() self.data = val def build(self): return self.data - def compose(self, args, optargs): + def compose(self, args, kwargs): # pylint: disable=unused-argument return repr(self.data) -class MakeArray(RqlQuery): - term_type = P_TERM.MAKE_ARRAY +class MakeArray(ReqlQuery): + """ + RethinkDB array composer query. + """ + + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self.term_type = P_TERM.MAKE_ARRAY - def compose(self, args, optargs): - return T("[", T(*args, intsp=", "), "]") + # pylint: disable=unused-argument,no-self-use + def compose(self, args, kwargs): + return EnhancedTuple("[", EnhancedTuple(*args, int_separator=", "), "]") -class MakeObj(RqlQuery): - term_type = P_TERM.MAKE_OBJ +class MakeObj(ReqlQuery): + def __init__(self, obj_dict: dict, *args, **kwargs): + super().__init__(*args, **kwargs) + self.term_type = P_TERM.MAKE_OBJ - # We cannot inherit from RqlQuery because of potential conflicts with - # the `self` parameter. This is not a problem for other RqlQuery sub- - # classes unless we add a 'self' optional argument to one of them. - # TODO: @gabor-boros Figure out is the above still an issue or not. - def __init__(self, obj_dict): - super(MakeObj, self).__init__() - for key, value in dict_items(obj_dict): - if not isinstance(key, (str, unicode)): + for key, value in obj_dict.items(): + if not isinstance(key, str): raise ReqlDriverCompileError("Object keys must be strings.") - self.optargs[key] = expr(value) + + self.kwargs[key] = expr(value) def build(self): - return self.optargs + return self.kwargs - def compose(self, args, optargs): - return T( + # pylint: disable=unused-argument,no-self-use + def compose(self, args, kwargs): + return EnhancedTuple( "r.expr({", - T( - *[T(repr(key), ": ", value) for key, value in dict_items(optargs)], - intsp=", " + EnhancedTuple( + *[ + EnhancedTuple(repr(key), ": ", value) + for key, value in kwargs.items() + ], + int_separator=", ", ), "})", ) -class Var(RqlQuery): - term_type = P_TERM.VAR +class Var(ReqlQuery): + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self.term_type = P_TERM.VAR - def compose(self, args, optargs): + # pylint: disable=unused-argument,no-self-use + def compose(self, args, kwargs): return "var_" + args[0] -class JavaScript(RqlTopLevelQuery): - term_type = P_TERM.JAVASCRIPT - statement = "js" +class JavaScript(ReqlTopLevelQuery): + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self.term_type = P_TERM.JAVASCRIPT + self.statement = "js" -class Http(RqlTopLevelQuery): - term_type = P_TERM.HTTP - statement = "http" +class Http(ReqlTopLevelQuery): + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self.term_type = P_TERM.HTTP + self.statement = "http" -class UserError(RqlTopLevelQuery): - term_type = P_TERM.ERROR - statement = "error" +class UserError(ReqlTopLevelQuery): + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self.term_type = P_TERM.ERROR + self.statement = "error" -class Random(RqlTopLevelQuery): - term_type = P_TERM.RANDOM - statement = "random" +class Random(ReqlTopLevelQuery): + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self.term_type = P_TERM.RANDOM + self.statement = "random" -class Changes(RqlMethodQuery): - term_type = P_TERM.CHANGES - statement = "changes" +class Changes(ReqlMethodQuery): + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self.term_type = P_TERM.CHANGES + self.statement = "changes" -class Default(RqlMethodQuery): - term_type = P_TERM.DEFAULT - statement = "default" +class Default(ReqlMethodQuery): + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self.term_type = P_TERM.DEFAULT + self.statement = "default" -class ImplicitVar(RqlQuery): - term_type = P_TERM.IMPLICIT_VAR +class ImplicitVar(ReqlQuery): + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self.term_type = P_TERM.IMPLICIT_VAR def __call__(self, *args, **kwargs): raise TypeError("'r.row' is not callable, use 'r.row[...]' instead") - def compose(self, args, optargs): + # pylint: disable=unused-argument,no-self-use + def compose(self, args, kwargs): return "r.row" -class Eq(RqlBiCompareOperQuery): - term_type = P_TERM.EQ - statement = "==" +class Eq(ReqlBiCompareOperQuery): + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self.term_type = P_TERM.EQ + self.statement = "==" -class Ne(RqlBiCompareOperQuery): - term_type = P_TERM.NE - statement = "!=" +class Ne(ReqlBiCompareOperQuery): + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self.term_type = P_TERM.NE + self.statement = "!=" -class Lt(RqlBiCompareOperQuery): - term_type = P_TERM.LT - statement = "<" +class Lt(ReqlBiCompareOperQuery): + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self.term_type = P_TERM.LT + self.statement = "<" -class Le(RqlBiCompareOperQuery): - term_type = P_TERM.LE - statement = "<=" +class Le(ReqlBiCompareOperQuery): + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self.term_type = P_TERM.LE + self.statement = "<=" -class Gt(RqlBiCompareOperQuery): - term_type = P_TERM.GT - statement = ">" +class Gt(ReqlBiCompareOperQuery): + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self.term_type = P_TERM.GT + self.statement = ">" -class Ge(RqlBiCompareOperQuery): - term_type = P_TERM.GE - statement = ">=" +class Ge(ReqlBiCompareOperQuery): + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self.term_type = P_TERM.GE + self.statement = ">=" -class Not(RqlQuery): - term_type = P_TERM.NOT +class Not(ReqlQuery): + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self.term_type = P_TERM.NOT - def compose(self, args, optargs): + def compose(self, args, kwargs): # pylint: disable=unused-argument if isinstance(self._args[0], Datum): - args[0] = T("r.expr(", args[0], ")") - return T("(~", args[0], ")") + args[0] = EnhancedTuple("r.expr(", args[0], ")") + + return EnhancedTuple("(~", args[0], ")") -class Add(RqlBiOperQuery): - term_type = P_TERM.ADD - statement = "+" +class Add(ReqlBiOperQuery): + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self.term_type = P_TERM.ADD + self.statement = "+" -class Sub(RqlBiOperQuery): - term_type = P_TERM.SUB - statement = "-" +class Sub(ReqlBiOperQuery): + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self.term_type = P_TERM.SUB + self.statement = "-" -class Mul(RqlBiOperQuery): - term_type = P_TERM.MUL - statement = "*" +class Mul(ReqlBiOperQuery): + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self.term_type = P_TERM.MUL + self.statement = "*" -class Div(RqlBiOperQuery): - term_type = P_TERM.DIV - statement = "/" +class Div(ReqlBiOperQuery): + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self.term_type = P_TERM.DIV + self.statement = "/" -class Mod(RqlBiOperQuery): - term_type = P_TERM.MOD - statement = "%" +class Mod(ReqlBiOperQuery): + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self.term_type = P_TERM.MOD + self.statement = "%" -class BitAnd(RqlBoolOperQuery): - term_type = P_TERM.BIT_AND - statement = "bit_and" +class BitAnd(ReqlBoolOperQuery): + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self.term_type = P_TERM.BIT_AND + self.statement = "bit_and" -class BitOr(RqlBoolOperQuery): - term_type = P_TERM.BIT_OR - statement = "bit_or" +class BitOr(ReqlBoolOperQuery): + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self.term_type = P_TERM.BIT_OR + self.statement = "bit_or" -class BitXor(RqlBoolOperQuery): - term_type = P_TERM.BIT_XOR - statement = "bit_xor" +class BitXor(ReqlBoolOperQuery): + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self.term_type = P_TERM.BIT_XOR + self.statement = "bit_xor" -class BitNot(RqlMethodQuery): - term_type = P_TERM.BIT_NOT - statement = "bit_not" +class BitNot(ReqlMethodQuery): + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self.term_type = P_TERM.BIT_NOT + self.statement = "bit_not" -class BitSal(RqlBoolOperQuery): - term_type = P_TERM.BIT_SAL - statement = "bit_sal" +class BitSal(ReqlBoolOperQuery): + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self.term_type = P_TERM.BIT_SAL + self.statement = "bit_sal" -class BitSar(RqlBoolOperQuery): - term_type = P_TERM.BIT_SAR - statement = "bit_sar" +class BitSar(ReqlBoolOperQuery): + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self.term_type = P_TERM.BIT_SAR + self.statement = "bit_sar" -class Floor(RqlMethodQuery): - term_type = P_TERM.FLOOR - statement = "floor" +class Floor(ReqlMethodQuery): + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self.term_type = P_TERM.FLOOR + self.statement = "floor" -class Ceil(RqlMethodQuery): - term_type = P_TERM.CEIL - statement = "ceil" +class Ceil(ReqlMethodQuery): + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self.term_type = P_TERM.CEIL + self.statement = "ceil" -class Round(RqlMethodQuery): - term_type = P_TERM.ROUND - statement = "round" +class Round(ReqlMethodQuery): + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self.term_type = P_TERM.ROUND + self.statement = "round" -class Append(RqlMethodQuery): - term_type = P_TERM.APPEND - statement = "append" +class Append(ReqlMethodQuery): + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self.term_type = P_TERM.APPEND + self.statement = "append" -class Prepend(RqlMethodQuery): - term_type = P_TERM.PREPEND - statement = "prepend" +class Prepend(ReqlMethodQuery): + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self.term_type = P_TERM.PREPEND + self.statement = "prepend" -class Difference(RqlMethodQuery): - term_type = P_TERM.DIFFERENCE - statement = "difference" +class Difference(ReqlMethodQuery): + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self.term_type = P_TERM.DIFFERENCE + self.statement = "difference" -class SetInsert(RqlMethodQuery): - term_type = P_TERM.SET_INSERT - statement = "set_insert" +class SetInsert(ReqlMethodQuery): + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self.term_type = P_TERM.SET_INSERT + self.statement = "set_insert" -class SetUnion(RqlMethodQuery): - term_type = P_TERM.SET_UNION - statement = "set_union" +class SetUnion(ReqlMethodQuery): + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self.term_type = P_TERM.SET_UNION + self.statement = "set_union" -class SetIntersection(RqlMethodQuery): - term_type = P_TERM.SET_INTERSECTION - statement = "set_intersection" +class SetIntersection(ReqlMethodQuery): + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self.term_type = P_TERM.SET_INTERSECTION + self.statement = "set_intersection" -class SetDifference(RqlMethodQuery): - term_type = P_TERM.SET_DIFFERENCE - statement = "set_difference" +class SetDifference(ReqlMethodQuery): + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self.term_type = P_TERM.SET_DIFFERENCE + self.statement = "set_difference" -class Slice(RqlBracketQuery): - term_type = P_TERM.SLICE - statement = "slice" +class Slice(ReqlBracketQuery): + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self.term_type = P_TERM.SLICE + self.statement = "slice" # Slice has a special bracket syntax, implemented here - def compose(self, args, optargs): + def compose(self, args, kwargs): if self.bracket_operator: if needs_wrap(self._args[0]): - args[0] = T("r.expr(", args[0], ")") - return T(args[0], "[", args[1], ":", args[2], "]") - else: - return RqlBracketQuery.compose(self, args, optargs) + args[0] = EnhancedTuple("r.expr(", args[0], ")") + + return EnhancedTuple(args[0], "[", args[1], ":", args[2], "]") + + return ReqlBracketQuery.compose(self, args, kwargs) -class Skip(RqlMethodQuery): - term_type = P_TERM.SKIP - statement = "skip" +class Skip(ReqlMethodQuery): + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self.term_type = P_TERM.SKIP + self.statement = "skip" -class Limit(RqlMethodQuery): - term_type = P_TERM.LIMIT - statement = "limit" +class Limit(ReqlMethodQuery): + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self.term_type = P_TERM.LIMIT + self.statement = "limit" -class GetField(RqlBracketQuery): - term_type = P_TERM.GET_FIELD - statement = "get_field" +class GetField(ReqlBracketQuery): + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self.term_type = P_TERM.GET_FIELD + self.statement = "get_field" -class Bracket(RqlBracketQuery): - term_type = P_TERM.BRACKET - statement = "bracket" +class Bracket(ReqlBracketQuery): + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self.term_type = P_TERM.BRACKET + self.statement = "bracket" -class Contains(RqlMethodQuery): - term_type = P_TERM.CONTAINS - statement = "contains" +class Contains(ReqlMethodQuery): + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self.term_type = P_TERM.CONTAINS + self.statement = "contains" -class HasFields(RqlMethodQuery): - term_type = P_TERM.HAS_FIELDS - statement = "has_fields" +class HasFields(ReqlMethodQuery): + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self.term_type = P_TERM.HAS_FIELDS + self.statement = "has_fields" -class WithFields(RqlMethodQuery): - term_type = P_TERM.WITH_FIELDS - statement = "with_fields" +class WithFields(ReqlMethodQuery): + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self.term_type = P_TERM.WITH_FIELDS + self.statement = "with_fields" -class Keys(RqlMethodQuery): - term_type = P_TERM.KEYS - statement = "keys" +class Keys(ReqlMethodQuery): + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self.term_type = P_TERM.KEYS + self.statement = "keys" -class Values(RqlMethodQuery): - term_type = P_TERM.VALUES - statement = "values" +class Values(ReqlMethodQuery): + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self.term_type = P_TERM.VALUES + self.statement = "values" -class Object(RqlMethodQuery): - term_type = P_TERM.OBJECT - statement = "object" +class Object(ReqlMethodQuery): + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self.term_type = P_TERM.OBJECT + self.statement = "object" -class Pluck(RqlMethodQuery): - term_type = P_TERM.PLUCK - statement = "pluck" +class Pluck(ReqlMethodQuery): + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self.term_type = P_TERM.PLUCK + self.statement = "pluck" -class Without(RqlMethodQuery): - term_type = P_TERM.WITHOUT - statement = "without" +class Without(ReqlMethodQuery): + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self.term_type = P_TERM.WITHOUT + self.statement = "without" -class Merge(RqlMethodQuery): - term_type = P_TERM.MERGE - statement = "merge" +class Merge(ReqlMethodQuery): + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self.term_type = P_TERM.MERGE + self.statement = "merge" -class Between(RqlMethodQuery): - term_type = P_TERM.BETWEEN - statement = "between" +class Between(ReqlMethodQuery): + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self.term_type = P_TERM.BETWEEN + self.statement = "between" -class DB(RqlTopLevelQuery): - term_type = P_TERM.DB - statement = "db" +class DB(ReqlTopLevelQuery): + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self.term_type = P_TERM.DB + self.statement = "db" def table_list(self, *args): return TableList(self, *args) @@ -1234,33 +1367,43 @@ def table(self, *args, **kwargs): return Table(self, *args, **kwargs) -class FunCall(RqlQuery): - term_type = P_TERM.FUNCALL - +class FunCall(ReqlQuery): # This object should be constructed with arguments first, and the # function itself as the last parameter. This makes it easier for # the places where this object is constructed. The actual wire # format is function first, arguments last, so we flip them around # before passing it down to the base class constructor. - def __init__(self, *args): + def __init__(self, *args, **kwargs): if len(args) == 0: raise ReqlDriverCompileError("Expected 1 or more arguments but found 0.") + args = [func_wrap(args[-1])] + list(args[:-1]) - RqlQuery.__init__(self, *args) + super().__init__(*args, **kwargs) + self.term_type = P_TERM.FUNCALL - def compose(self, args, optargs): + def compose(self, args, kwargs): # pylint: disable=unused-argument if len(args) != 2: - return T("r.do(", T(T(*(args[1:]), intsp=", "), args[0], intsp=", "), ")") + return EnhancedTuple( + "r.do(", + EnhancedTuple( + EnhancedTuple(*(args[1:]), int_separator=", "), + args[0], + int_separator=", ", + ), + ")", + ) if isinstance(self._args[1], Datum): - args[1] = T("r.expr(", args[1], ")") + args[1] = EnhancedTuple("r.expr(", args[1], ")") - return T(args[1], ".do(", args[0], ")") + return EnhancedTuple(args[1], ".do(", args[0], ")") -class Table(RqlQuery): - term_type = P_TERM.TABLE - statement = "table" +class Table(ReqlQuery): # pylint: disable=too-many-public-methods + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self.term_type = P_TERM.TABLE + self.statement = "table" def insert(self, *args, **kwargs): return Insert(self, *[expr(arg) for arg in args], **kwargs) @@ -1280,6 +1423,7 @@ def get_write_hook(self, *args, **kwargs): def index_create(self, *args, **kwargs): if len(args) > 1: args = [args[0]] + [func_wrap(arg) for arg in args[1:]] + return IndexCreate(self, *args, **kwargs) def index_drop(self, *args): @@ -1327,425 +1471,576 @@ def get_nearest(self, *args, **kwargs): def uuid(self, *args, **kwargs): return UUID(self, *args, **kwargs) - def compose(self, args, optargs): - args.extend([T(k, "=", v) for k, v in dict_items(optargs)]) + def compose(self, args, kwargs): + args.extend([EnhancedTuple(k, "=", v) for k, v in kwargs.items()]) + if isinstance(self._args[0], DB): - return T(args[0], ".table(", T(*(args[1:]), intsp=", "), ")") - else: - return T("r.table(", T(*(args), intsp=", "), ")") + return EnhancedTuple( + args[0], ".table(", EnhancedTuple(*(args[1:]), int_separator=", "), ")" + ) + + return EnhancedTuple( + "r.table(", EnhancedTuple(*(args), int_separator=", "), ")" + ) -class Get(RqlMethodQuery): - term_type = P_TERM.GET - statement = "get" +class Get(ReqlMethodQuery): + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self.term_type = P_TERM.GET + self.statement = "get" -class GetAll(RqlMethodQuery): - term_type = P_TERM.GET_ALL - statement = "get_all" +class GetAll(ReqlMethodQuery): + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self.term_type = P_TERM.GET_ALL + self.statement = "get_all" -class GetIntersecting(RqlMethodQuery): - term_type = P_TERM.GET_INTERSECTING - statement = "get_intersecting" +class GetIntersecting(ReqlMethodQuery): + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self.term_type = P_TERM.GET_INTERSECTING + self.statement = "get_intersecting" -class GetNearest(RqlMethodQuery): - term_type = P_TERM.GET_NEAREST - statement = "get_nearest" +class GetNearest(ReqlMethodQuery): + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self.term_type = P_TERM.GET_NEAREST + self.statement = "get_nearest" -class UUID(RqlMethodQuery): - term_type = P_TERM.UUID - statement = "uuid" +class UUID(ReqlMethodQuery): + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self.term_type = P_TERM.UUID + self.statement = "uuid" -class Reduce(RqlMethodQuery): - term_type = P_TERM.REDUCE - statement = "reduce" +class Reduce(ReqlMethodQuery): + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self.term_type = P_TERM.REDUCE + self.statement = "reduce" -class Sum(RqlMethodQuery): - term_type = P_TERM.SUM - statement = "sum" +class Sum(ReqlMethodQuery): + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self.term_type = P_TERM.SUM + self.statement = "sum" -class Avg(RqlMethodQuery): - term_type = P_TERM.AVG - statement = "avg" +class Avg(ReqlMethodQuery): + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self.term_type = P_TERM.AVG + self.statement = "avg" -class Min(RqlMethodQuery): - term_type = P_TERM.MIN - statement = "min" +class Min(ReqlMethodQuery): + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self.term_type = P_TERM.MIN + self.statement = "min" -class Max(RqlMethodQuery): - term_type = P_TERM.MAX - statement = "max" +class Max(ReqlMethodQuery): + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self.term_type = P_TERM.MAX + self.statement = "max" -class Map(RqlMethodQuery): - term_type = P_TERM.MAP - statement = "map" +class Map(ReqlMethodQuery): + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self.term_type = P_TERM.MAP + self.statement = "map" -class Fold(RqlMethodQuery): - term_type = P_TERM.FOLD - statement = "fold" +class Fold(ReqlMethodQuery): + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self.term_type = P_TERM.FOLD + self.statement = "fold" -class Filter(RqlMethodQuery): - term_type = P_TERM.FILTER - statement = "filter" +class Filter(ReqlMethodQuery): + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self.term_type = P_TERM.FILTER + self.statement = "filter" -class ConcatMap(RqlMethodQuery): - term_type = P_TERM.CONCAT_MAP - statement = "concat_map" +class ConcatMap(ReqlMethodQuery): + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self.term_type = P_TERM.CONCAT_MAP + self.statement = "concat_map" -class OrderBy(RqlMethodQuery): - term_type = P_TERM.ORDER_BY - statement = "order_by" +class OrderBy(ReqlMethodQuery): + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self.term_type = P_TERM.ORDER_BY + self.statement = "order_by" -class Distinct(RqlMethodQuery): - term_type = P_TERM.DISTINCT - statement = "distinct" +class Distinct(ReqlMethodQuery): + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self.term_type = P_TERM.DISTINCT + self.statement = "distinct" -class Count(RqlMethodQuery): - term_type = P_TERM.COUNT - statement = "count" +class Count(ReqlMethodQuery): + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self.term_type = P_TERM.COUNT + self.statement = "count" -class Union(RqlMethodQuery): - term_type = P_TERM.UNION - statement = "union" +class Union(ReqlMethodQuery): + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self.term_type = P_TERM.UNION + self.statement = "union" -class Nth(RqlBracketQuery): - term_type = P_TERM.NTH - statement = "nth" +class Nth(ReqlBracketQuery): + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self.term_type = P_TERM.NTH + self.statement = "nth" -class Match(RqlMethodQuery): - term_type = P_TERM.MATCH - statement = "match" +class Match(ReqlMethodQuery): + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self.term_type = P_TERM.MATCH + self.statement = "match" -class ToJsonString(RqlMethodQuery): - term_type = P_TERM.TO_JSON_STRING - statement = "to_json_string" +class ToJsonString(ReqlMethodQuery): + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self.term_type = P_TERM.TO_JSON_STRING + self.statement = "to_json_string" -class Split(RqlMethodQuery): - term_type = P_TERM.SPLIT - statement = "split" +class Split(ReqlMethodQuery): + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self.term_type = P_TERM.SPLIT + self.statement = "split" -class Upcase(RqlMethodQuery): - term_type = P_TERM.UPCASE - statement = "upcase" +class Upcase(ReqlMethodQuery): + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self.term_type = P_TERM.UPCASE + self.statement = "upcase" -class Downcase(RqlMethodQuery): - term_type = P_TERM.DOWNCASE - statement = "downcase" +class Downcase(ReqlMethodQuery): + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self.term_type = P_TERM.DOWNCASE + self.statement = "downcase" -class OffsetsOf(RqlMethodQuery): - term_type = P_TERM.OFFSETS_OF - statement = "offsets_of" +class OffsetsOf(ReqlMethodQuery): + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self.term_type = P_TERM.OFFSETS_OF + self.statement = "offsets_of" -class IsEmpty(RqlMethodQuery): - term_type = P_TERM.IS_EMPTY - statement = "is_empty" +class IsEmpty(ReqlMethodQuery): + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self.term_type = P_TERM.IS_EMPTY + self.statement = "is_empty" -class Group(RqlMethodQuery): - term_type = P_TERM.GROUP - statement = "group" +class Group(ReqlMethodQuery): + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self.term_type = P_TERM.GROUP + self.statement = "group" -class InnerJoin(RqlMethodQuery): - term_type = P_TERM.INNER_JOIN - statement = "inner_join" +class InnerJoin(ReqlMethodQuery): + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self.term_type = P_TERM.INNER_JOIN + self.statement = "inner_join" -class OuterJoin(RqlMethodQuery): - term_type = P_TERM.OUTER_JOIN - statement = "outer_join" +class OuterJoin(ReqlMethodQuery): + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self.term_type = P_TERM.OUTER_JOIN + self.statement = "outer_join" -class EqJoin(RqlMethodQuery): - term_type = P_TERM.EQ_JOIN - statement = "eq_join" +class EqJoin(ReqlMethodQuery): + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self.term_type = P_TERM.EQ_JOIN + self.statement = "eq_join" -class Zip(RqlMethodQuery): - term_type = P_TERM.ZIP - statement = "zip" +class Zip(ReqlMethodQuery): + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self.term_type = P_TERM.ZIP + self.statement = "zip" -class CoerceTo(RqlMethodQuery): - term_type = P_TERM.COERCE_TO - statement = "coerce_to" +class CoerceTo(ReqlMethodQuery): + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self.term_type = P_TERM.COERCE_TO + self.statement = "coerce_to" -class Ungroup(RqlMethodQuery): - term_type = P_TERM.UNGROUP - statement = "ungroup" +class Ungroup(ReqlMethodQuery): + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self.term_type = P_TERM.UNGROUP + self.statement = "ungroup" -class TypeOf(RqlMethodQuery): - term_type = P_TERM.TYPE_OF - statement = "type_of" +class TypeOf(ReqlMethodQuery): + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self.term_type = P_TERM.TYPE_OF + self.statement = "type_of" -class Update(RqlMethodQuery): - term_type = P_TERM.UPDATE - statement = "update" +class Update(ReqlMethodQuery): + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self.term_type = P_TERM.UPDATE + self.statement = "update" -class Delete(RqlMethodQuery): - term_type = P_TERM.DELETE - statement = "delete" +class Delete(ReqlMethodQuery): + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self.term_type = P_TERM.DELETE + self.statement = "delete" -class Replace(RqlMethodQuery): - term_type = P_TERM.REPLACE - statement = "replace" +class Replace(ReqlMethodQuery): + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self.term_type = P_TERM.REPLACE + self.statement = "replace" -class Insert(RqlMethodQuery): - term_type = P_TERM.INSERT - statement = "insert" +class Insert(ReqlMethodQuery): + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self.term_type = P_TERM.INSERT + self.statement = "insert" -class DbCreate(RqlTopLevelQuery): - term_type = P_TERM.DB_CREATE - statement = "db_create" +class DbCreate(ReqlTopLevelQuery): + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self.term_type = P_TERM.DB_CREATE + self.statement = "db_create" -class DbDrop(RqlTopLevelQuery): - term_type = P_TERM.DB_DROP - statement = "db_drop" +class DbDrop(ReqlTopLevelQuery): + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self.term_type = P_TERM.DB_DROP + self.statement = "db_drop" -class DbList(RqlTopLevelQuery): - term_type = P_TERM.DB_LIST - statement = "db_list" +class DbList(ReqlTopLevelQuery): + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self.term_type = P_TERM.DB_LIST + self.statement = "db_list" -class TableCreate(RqlMethodQuery): - term_type = P_TERM.TABLE_CREATE - statement = "table_create" +class TableCreate(ReqlMethodQuery): + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self.term_type = P_TERM.TABLE_CREATE + self.statement = "table_create" -class TableCreateTL(RqlTopLevelQuery): - term_type = P_TERM.TABLE_CREATE - statement = "table_create" +class TableCreateTL(ReqlTopLevelQuery): + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self.term_type = P_TERM.TABLE_CREATE + self.statement = "table_create" -class TableDrop(RqlMethodQuery): - term_type = P_TERM.TABLE_DROP - statement = "table_drop" +class TableDrop(ReqlMethodQuery): + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self.term_type = P_TERM.TABLE_DROP + self.statement = "table_drop" -class TableDropTL(RqlTopLevelQuery): - term_type = P_TERM.TABLE_DROP - statement = "table_drop" +class TableDropTL(ReqlTopLevelQuery): + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self.term_type = P_TERM.TABLE_DROP + self.statement = "table_drop" -class TableList(RqlMethodQuery): - term_type = P_TERM.TABLE_LIST - statement = "table_list" +class TableList(ReqlMethodQuery): + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self.term_type = P_TERM.TABLE_LIST + self.statement = "table_list" -class TableListTL(RqlTopLevelQuery): - term_type = P_TERM.TABLE_LIST - statement = "table_list" +class TableListTL(ReqlTopLevelQuery): + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self.term_type = P_TERM.TABLE_LIST + self.statement = "table_list" -class SetWriteHook(RqlMethodQuery): - term_type = P_TERM.SET_WRITE_HOOK - statement = "set_write_hook" +class SetWriteHook(ReqlMethodQuery): + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self.term_type = P_TERM.SET_WRITE_HOOK + self.statement = "set_write_hook" -class GetWriteHook(RqlMethodQuery): - term_type = P_TERM.GET_WRITE_HOOK - statement = "get_write_hook" +class GetWriteHook(ReqlMethodQuery): + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self.term_type = P_TERM.GET_WRITE_HOOK + self.statement = "get_write_hook" -class IndexCreate(RqlMethodQuery): - term_type = P_TERM.INDEX_CREATE - statement = "index_create" +class IndexCreate(ReqlMethodQuery): + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self.term_type = P_TERM.INDEX_CREATE + self.statement = "index_create" -class IndexDrop(RqlMethodQuery): - term_type = P_TERM.INDEX_DROP - statement = "index_drop" +class IndexDrop(ReqlMethodQuery): + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self.term_type = P_TERM.INDEX_DROP + self.statement = "index_drop" -class IndexRename(RqlMethodQuery): - term_type = P_TERM.INDEX_RENAME - statement = "index_rename" +class IndexRename(ReqlMethodQuery): + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self.term_type = P_TERM.INDEX_RENAME + self.statement = "index_rename" -class IndexList(RqlMethodQuery): - term_type = P_TERM.INDEX_LIST - statement = "index_list" +class IndexList(ReqlMethodQuery): + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self.term_type = P_TERM.INDEX_LIST + self.statement = "index_list" -class IndexStatus(RqlMethodQuery): - term_type = P_TERM.INDEX_STATUS - statement = "index_status" +class IndexStatus(ReqlMethodQuery): + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self.term_type = P_TERM.INDEX_STATUS + self.statement = "index_status" -class IndexWait(RqlMethodQuery): - term_type = P_TERM.INDEX_WAIT - statement = "index_wait" +class IndexWait(ReqlMethodQuery): + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self.term_type = P_TERM.INDEX_WAIT + self.statement = "index_wait" -class Config(RqlMethodQuery): - term_type = P_TERM.CONFIG - statement = "config" +class Config(ReqlMethodQuery): + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self.term_type = P_TERM.CONFIG + self.statement = "config" -class Status(RqlMethodQuery): - term_type = P_TERM.STATUS - statement = "status" +class Status(ReqlMethodQuery): + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self.term_type = P_TERM.STATUS + self.statement = "status" -class Wait(RqlMethodQuery): - term_type = P_TERM.WAIT - statement = "wait" +class Wait(ReqlMethodQuery): + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self.term_type = P_TERM.WAIT + self.statement = "wait" -class Reconfigure(RqlMethodQuery): - term_type = P_TERM.RECONFIGURE - statement = "reconfigure" +class Reconfigure(ReqlMethodQuery): + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self.term_type = P_TERM.RECONFIGURE + self.statement = "reconfigure" -class Rebalance(RqlMethodQuery): - term_type = P_TERM.REBALANCE - statement = "rebalance" +class Rebalance(ReqlMethodQuery): + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self.term_type = P_TERM.REBALANCE + self.statement = "rebalance" -class Sync(RqlMethodQuery): - term_type = P_TERM.SYNC - statement = "sync" +class Sync(ReqlMethodQuery): + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self.term_type = P_TERM.SYNC + self.statement = "sync" -class Grant(RqlMethodQuery): - term_type = P_TERM.GRANT - statement = "grant" +class Grant(ReqlMethodQuery): + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self.term_type = P_TERM.GRANT + self.statement = "grant" -class GrantTL(RqlTopLevelQuery): - term_type = P_TERM.GRANT - statement = "grant" +class GrantTL(ReqlTopLevelQuery): + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self.term_type = P_TERM.GRANT + self.statement = "grant" -class Branch(RqlTopLevelQuery): - term_type = P_TERM.BRANCH - statement = "branch" +class Branch(ReqlTopLevelQuery): + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self.term_type = P_TERM.BRANCH + self.statement = "branch" -class Or(RqlBoolOperQuery): - term_type = P_TERM.OR - statement = "or_" - st_infix = "|" +class Or(ReqlBoolOperQuery): + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self.term_type = P_TERM.OR + self.statement = "or_" + self.statement_infix = "|" -class And(RqlBoolOperQuery): - term_type = P_TERM.AND - statement = "and_" - st_infix = "&" +class And(ReqlBoolOperQuery): + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self.term_type = P_TERM.AND + self.statement = "and_" + self.statement_infix = "&" -class ForEach(RqlMethodQuery): - term_type = P_TERM.FOR_EACH - statement = "for_each" +class ForEach(ReqlMethodQuery): + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self.term_type = P_TERM.FOR_EACH + self.statement = "for_each" -class Info(RqlMethodQuery): - term_type = P_TERM.INFO - statement = "info" +class Info(ReqlMethodQuery): + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self.term_type = P_TERM.INFO + self.statement = "info" -class InsertAt(RqlMethodQuery): - term_type = P_TERM.INSERT_AT - statement = "insert_at" +class InsertAt(ReqlMethodQuery): + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self.term_type = P_TERM.INSERT_AT + self.statement = "insert_at" -class SpliceAt(RqlMethodQuery): - term_type = P_TERM.SPLICE_AT - statement = "splice_at" +class SpliceAt(ReqlMethodQuery): + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self.term_type = P_TERM.SPLICE_AT + self.statement = "splice_at" -class DeleteAt(RqlMethodQuery): - term_type = P_TERM.DELETE_AT - statement = "delete_at" +class DeleteAt(ReqlMethodQuery): + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self.term_type = P_TERM.DELETE_AT + self.statement = "delete_at" -class ChangeAt(RqlMethodQuery): - term_type = P_TERM.CHANGE_AT - statement = "change_at" +class ChangeAt(ReqlMethodQuery): + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self.term_type = P_TERM.CHANGE_AT + self.statement = "change_at" -class Sample(RqlMethodQuery): - term_type = P_TERM.SAMPLE - statement = "sample" +class Sample(ReqlMethodQuery): + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self.term_type = P_TERM.SAMPLE + self.statement = "sample" -class Json(RqlTopLevelQuery): - term_type = P_TERM.JSON - statement = "json" +class Json(ReqlTopLevelQuery): + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self.term_type = P_TERM.JSON + self.statement = "json" -class Args(RqlTopLevelQuery): - term_type = P_TERM.ARGS - statement = "args" +class Args(ReqlTopLevelQuery): + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self.term_type = P_TERM.ARGS + self.statement = "args" # Use this class as a wrapper to 'bytes' so we can tell the difference # in Python2 (when reusing the result of a previous query). -class RqlBinary(bytes): +class ReqlBinary(bytes): def __new__(cls, *args, **kwargs): return bytes.__new__(cls, *args, **kwargs) def __repr__(self): + ellipsis = "..." if len(self) > 6 else "" excerpt = binascii.hexlify(self[0:6]).decode("utf-8") - excerpt = " ".join([excerpt[i : i + 2] for i in xrange(0, len(excerpt), 2)]) - excerpt = ( - ", '%s%s'" % (excerpt, "..." if len(self) > 6 else "") - if len(self) > 0 - else "" - ) - return "" % ( - len(self), - "s" if len(self) != 1 else "", - excerpt, - ) + excerpt = " ".join([excerpt[i : i + 2] for i in range(0, len(excerpt), 2)]) + excerpt = f", '{excerpt}{ellipsis}'" if len(self) > 0 else "" + + plural = "s" if len(self) != 1 else "" + return f"" -class Binary(RqlTopLevelQuery): +class Binary(ReqlTopLevelQuery): # Note: this term isn't actually serialized, it should exist only # in the client - term_type = P_TERM.BINARY - statement = "binary" + def __init__(self, data, *args, **kwargs): + super().__init__(*args, **kwargs) + self.term_type = P_TERM.BINARY + self.statement = "binary" - def __init__(self, data): # We only allow 'bytes' objects to be serialized as binary # Python 2 - `bytes` is equivalent to `str`, either will be accepted # Python 3 - `unicode` is equivalent to `str`, neither will be accepted - if isinstance(data, RqlQuery): - RqlTopLevelQuery.__init__(self, data) - elif isinstance(data, unicode): + if isinstance(data, ReqlQuery): + ReqlTopLevelQuery.__init__(self, data) + elif isinstance(data, str): raise ReqlDriverCompileError( "Cannot convert a unicode string to binary, " "use `unicode.encode()` to specify the " @@ -1753,254 +2048,406 @@ def __init__(self, data): ) elif not isinstance(data, bytes): raise ReqlDriverCompileError( - ( - "Cannot convert %s to binary, convert the " - "object to a `bytes` object first." - ) - % type(data).__name__ + f"Cannot convert {type(data).__name__} to binary, convert the object to a `bytes` " + f"object first." ) - else: - self.base64_data = base64.b64encode(data) - # Kind of a hack to get around composing - self._args = [] - self.optargs = {} + self.base64_data = base64.b64encode(data) - def compose(self, args, optargs): + # Kind of a hack to get around composing + self._args = [] + self.kwargs = {} + + def compose(self, args, kwargs): if len(self._args) == 0: - return T("r.", self.statement, "(bytes())") - else: - return RqlTopLevelQuery.compose(self, args, optargs) + return EnhancedTuple("r.", self.statement, "(bytes())") + + return ReqlTopLevelQuery.compose(self, args, kwargs) def build(self): if len(self._args) == 0: return {"$reql_type$": "BINARY", "data": self.base64_data.decode("utf-8")} - else: - return RqlTopLevelQuery.build(self) - -class Range(RqlTopLevelQuery): - term_type = P_TERM.RANGE - statement = "range" + return ReqlTopLevelQuery.build(self) -class ToISO8601(RqlMethodQuery): - term_type = P_TERM.TO_ISO8601 - statement = "to_iso8601" +class Range(ReqlTopLevelQuery): + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self.term_type = P_TERM.RANGE + self.statement = "range" -class During(RqlMethodQuery): - term_type = P_TERM.DURING - statement = "during" +class ToISO8601(ReqlMethodQuery): + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self.term_type = P_TERM.TO_ISO8601 + self.statement = "to_iso8601" -class Date(RqlMethodQuery): - term_type = P_TERM.DATE - statement = "date" +class During(ReqlMethodQuery): + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self.term_type = P_TERM.DURING + self.statement = "during" -class TimeOfDay(RqlMethodQuery): - term_type = P_TERM.TIME_OF_DAY - statement = "time_of_day" +class Date(ReqlMethodQuery): + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self.term_type = P_TERM.DATE + self.statement = "date" -class Timezone(RqlMethodQuery): - term_type = P_TERM.TIMEZONE - statement = "timezone" +class TimeOfDay(ReqlMethodQuery): + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self.term_type = P_TERM.TIME_OF_DAY + self.statement = "time_of_day" -class Year(RqlMethodQuery): - term_type = P_TERM.YEAR - statement = "year" +class Timezone(ReqlMethodQuery): + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self.term_type = P_TERM.TIMEZONE + self.statement = "timezone" -class Month(RqlMethodQuery): - term_type = P_TERM.MONTH - statement = "month" +class Year(ReqlMethodQuery): + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self.term_type = P_TERM.YEAR + self.statement = "year" -class Day(RqlMethodQuery): - term_type = P_TERM.DAY - statement = "day" +class Month(ReqlMethodQuery): + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self.term_type = P_TERM.MONTH + self.statement = "month" -class DayOfWeek(RqlMethodQuery): - term_type = P_TERM.DAY_OF_WEEK - statement = "day_of_week" +class Day(ReqlMethodQuery): + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self.term_type = P_TERM.DAY + self.statement = "day" -class DayOfYear(RqlMethodQuery): - term_type = P_TERM.DAY_OF_YEAR - statement = "day_of_year" +class DayOfWeek(ReqlMethodQuery): + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self.term_type = P_TERM.DAY_OF_WEEK + self.statement = "day_of_week" -class Hours(RqlMethodQuery): - term_type = P_TERM.HOURS - statement = "hours" +class DayOfYear(ReqlMethodQuery): + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self.term_type = P_TERM.DAY_OF_YEAR + self.statement = "day_of_year" -class Minutes(RqlMethodQuery): - term_type = P_TERM.MINUTES - statement = "minutes" +class Hours(ReqlMethodQuery): + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self.term_type = P_TERM.HOURS + self.statement = "hours" -class Seconds(RqlMethodQuery): - term_type = P_TERM.SECONDS - statement = "seconds" +class Minutes(ReqlMethodQuery): + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self.term_type = P_TERM.MINUTES + self.statement = "minutes" -class Time(RqlTopLevelQuery): - term_type = P_TERM.TIME - statement = "time" +class Seconds(ReqlMethodQuery): + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self.term_type = P_TERM.SECONDS + self.statement = "seconds" -class ISO8601(RqlTopLevelQuery): - term_type = P_TERM.ISO8601 - statement = "iso8601" +class Time(ReqlTopLevelQuery): + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self.term_type = P_TERM.TIME + self.statement = "time" -class EpochTime(RqlTopLevelQuery): - term_type = P_TERM.EPOCH_TIME - statement = "epoch_time" +class ISO8601(ReqlTopLevelQuery): + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self.term_type = P_TERM.ISO8601 + self.statement = "iso8601" -class Now(RqlTopLevelQuery): - term_type = P_TERM.NOW - statement = "now" +class EpochTime(ReqlTopLevelQuery): + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self.term_type = P_TERM.EPOCH_TIME + self.statement = "epoch_time" -class InTimezone(RqlMethodQuery): - term_type = P_TERM.IN_TIMEZONE - statement = "in_timezone" +class Now(ReqlTopLevelQuery): + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self.term_type = P_TERM.NOW + self.statement = "now" -class ToEpochTime(RqlMethodQuery): - term_type = P_TERM.TO_EPOCH_TIME - statement = "to_epoch_time" +class InTimezone(ReqlMethodQuery): + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self.term_type = P_TERM.IN_TIMEZONE + self.statement = "in_timezone" -class GeoJson(RqlTopLevelQuery): - term_type = P_TERM.GEOJSON - statement = "geojson" +class ToEpochTime(ReqlMethodQuery): + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self.term_type = P_TERM.TO_EPOCH_TIME + self.statement = "to_epoch_time" -class ToGeoJson(RqlMethodQuery): - term_type = P_TERM.TO_GEOJSON - statement = "to_geojson" +class GeoJson(ReqlTopLevelQuery): + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self.term_type = P_TERM.GEOJSON + self.statement = "geojson" -class Point(RqlTopLevelQuery): - term_type = P_TERM.POINT - statement = "point" +class ToGeoJson(ReqlMethodQuery): + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self.term_type = P_TERM.TO_GEOJSON + self.statement = "to_geojson" -class Line(RqlTopLevelQuery): - term_type = P_TERM.LINE - statement = "line" +class Point(ReqlTopLevelQuery): + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self.term_type = P_TERM.POINT + self.statement = "point" -class Polygon(RqlTopLevelQuery): - term_type = P_TERM.POLYGON - statement = "polygon" +class Line(ReqlTopLevelQuery): + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self.term_type = P_TERM.LINE + self.statement = "line" -class Distance(RqlMethodQuery): - term_type = P_TERM.DISTANCE - statement = "distance" +class Polygon(ReqlTopLevelQuery): + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self.term_type = P_TERM.POLYGON + self.statement = "polygon" -class Intersects(RqlMethodQuery): - term_type = P_TERM.INTERSECTS - statement = "intersects" +class Distance(ReqlMethodQuery): + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self.term_type = P_TERM.DISTANCE + self.statement = "distance" -class Includes(RqlMethodQuery): - term_type = P_TERM.INCLUDES - statement = "includes" +class Intersects(ReqlMethodQuery): + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self.term_type = P_TERM.INTERSECTS + self.statement = "intersects" -class Circle(RqlTopLevelQuery): - term_type = P_TERM.CIRCLE - statement = "circle" +class Includes(ReqlMethodQuery): + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self.term_type = P_TERM.INCLUDES + self.statement = "includes" -class Fill(RqlMethodQuery): - term_type = P_TERM.FILL - statement = "fill" +class Circle(ReqlTopLevelQuery): + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self.term_type = P_TERM.CIRCLE + self.statement = "circle" -class PolygonSub(RqlMethodQuery): - term_type = P_TERM.POLYGON_SUB - statement = "polygon_sub" - - -# Returns True if IMPLICIT_VAR is found in the subquery -def _ivar_scan(query): - if not isinstance(query, RqlQuery): - return False - if isinstance(query, ImplicitVar): - return True - if any([_ivar_scan(arg) for arg in query._args]): - return True - if any([_ivar_scan(arg) for k, arg in dict_items(query.optargs)]): - return True - return False +class Fill(ReqlMethodQuery): + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self.term_type = P_TERM.FILL + self.statement = "fill" -# Called on arguments that should be functions -def func_wrap(val): - val = expr(val) - if _ivar_scan(val): - return Func(lambda x: val) - return val +class PolygonSub(ReqlMethodQuery): + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self.term_type = P_TERM.POLYGON_SUB + self.statement = "polygon_sub" -class Func(RqlQuery): - term_type = P_TERM.FUNC +class Func(ReqlQuery): lock = threading.Lock() nextVarId = 1 - def __init__(self, lmbd): - super(Func, self).__init__() - vrs = [] - vrids = [] + def __init__(self, lmbd, *args, **kwargs): + super().__init__(*args, **kwargs) + self.term_type = P_TERM.FUNC + + variables = [] + variable_ids = [] + try: code = lmbd.func_code except AttributeError: code = lmbd.__code__ - for i in xrange(code.co_argcount): - Func.lock.acquire() - var_id = Func.nextVarId - Func.nextVarId += 1 - Func.lock.release() - vrs.append(Var(var_id)) - vrids.append(var_id) - - self.vrs = vrs - self._args.extend([MakeArray(*vrids), expr(lmbd(*vrs))]) - - def compose(self, args, optargs): - return T( + + for _ in range(code.co_argcount): + with Func.lock: + var_id = Func.nextVarId + Func.nextVarId += 1 + + variables.append(Var(var_id)) + variable_ids.append(var_id) + + self.variables = variables + self._args.extend([MakeArray(*variable_ids), expr(lmbd(*variables))]) + + def compose(self, args, kwargs): # pylint: disable=unused-argument + return EnhancedTuple( "lambda ", - T( - *[v.compose([v._args[0].compose(None, None)], []) for v in self.vrs], - intsp=", " + EnhancedTuple( + *[ + v.compose( + # pylint: disable=protected-access + [v._args[0].compose(None, None)], + [], + ) + for v in self.variables + ], + int_separator=", ", ), ": ", args[1], ) -class Asc(RqlTopLevelQuery): - term_type = P_TERM.ASC - statement = "asc" +class Asc(ReqlTopLevelQuery): + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self.term_type = P_TERM.ASC + self.statement = "asc" + + +class Desc(ReqlTopLevelQuery): + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self.term_type = P_TERM.DESC + self.statement = "desc" + + +class Literal(ReqlTopLevelQuery): + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self.term_type = P_TERM.LITERAL + self.statement = "literal" + + +# Returns True if IMPLICIT_VAR is found in the subquery +def _ivar_scan(query) -> bool: + if not isinstance(query, ReqlQuery): + return False + + if isinstance(query, ImplicitVar): + return True + + # pylint: disable=protected-access,use-a-generator + if any([_ivar_scan(arg) for arg in query._args]): + return True + + # pylint: disable=use-a-generator + if any([_ivar_scan(arg) for arg in query.kwargs.values()]): + return True + + return False + + +def needs_wrap(arg): + """ + These classes define how nodes are printed by overloading `compose`. + """ + + return isinstance(arg, (Datum, MakeArray, MakeObj)) + + +# pylint: disable=too-many-return-statements +def expr( + val: TUnion[ + str, + bytes, + ReqlQuery, + ReqlBinary, + datetime.date, + datetime.datetime, + Mapping, + Iterable, + Callable, + ], + nesting_depth: int = 20, +): + """ + Convert a Python primitive into a Reql primitive value. + """ + + if not isinstance(nesting_depth, int): + raise ReqlDriverCompileError("Second argument to `r.expr` must be a number.") + + if nesting_depth <= 0: + raise ReqlDriverCompileError("Nesting depth limit exceeded.") + + if isinstance(val, ReqlQuery): + return val + if callable(val): + return Func(val) -class Desc(RqlTopLevelQuery): - term_type = P_TERM.DESC - statement = "desc" + if isinstance(val, str): # TODO: Default is to return Datum - Remove? + return Datum(val) + + if isinstance(val, (bytes, ReqlBinary)): + return Binary(val) + if isinstance(val, abc.Mapping): + return MakeObj({k: expr(v, nesting_depth - 1) for k, v in val.items()}) -class Literal(RqlTopLevelQuery): - term_type = P_TERM.LITERAL - statement = "literal" + if isinstance(val, abc.Iterable): + return MakeArray(*[expr(v, nesting_depth - 1) for v in val]) # type: ignore + + if isinstance(val, (datetime.datetime, datetime.date)): + if isinstance(val, datetime.date) or not val.tzinfo: + raise ReqlDriverCompileError( + f""" + Cannot convert {type(val).__name__} to Reql time object + without timezone information. You can add timezone information with + the third party module \"pytz\" or by constructing Reql compatible + timezone values with r.make_timezone(\"[+-]HH:MM\"). Alternatively, + use one of Reql's bultin time constructors, r.now, r.time, + or r.iso8601. + """ + ) + + return ISO8601(val.isoformat()) + + return Datum(val) + + +# Called on arguments that should be functions +# TODO: expr may return different value types. Maybe use a base one? +def func_wrap(val: TUnion[ReqlQuery, ImplicitVar, list, dict]): + val = expr(val) + if _ivar_scan(val): + return Func(lambda x: val) + + return val diff --git a/rethinkdb/asyncio_net/__init__.py b/rethinkdb/asyncio_net/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/rethinkdb/asyncio_net/net_asyncio.py b/rethinkdb/asyncio_net/net_asyncio.py deleted file mode 100644 index 781081e5..00000000 --- a/rethinkdb/asyncio_net/net_asyncio.py +++ /dev/null @@ -1,383 +0,0 @@ -# Copyright 2018 RethinkDB -# -# Licensed under the Apache License, Version 2.0 (the 'License'); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an 'AS IS' BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# This file incorporates work covered by the following copyright: -# Copyright 2010-2016 RethinkDB, all rights reserved. - -import asyncio -import contextlib -import socket -import ssl -import struct - -from rethinkdb import ql2_pb2 -from rethinkdb.errors import ( - ReqlAuthError, - ReqlCursorEmpty, - ReqlDriverError, - ReqlTimeoutError, - RqlCursorEmpty, -) -from rethinkdb.net import Connection as ConnectionBase -from rethinkdb.net import Cursor, Query, Response, maybe_profile - -__all__ = ["Connection"] - - -pResponse = ql2_pb2.Response.ResponseType -pQuery = ql2_pb2.Query.QueryType - - -@asyncio.coroutine -def _read_until(streamreader, delimiter): - """Naive implementation of reading until a delimiter""" - buffer = bytearray() - - while True: - c = yield from streamreader.read(1) - if c == b"": - break # EOF - buffer.append(c[0]) - if c == delimiter: - break - - return bytes(buffer) - - -def reusable_waiter(loop, timeout): - """Wait for something, with a timeout from when the waiter was created. - - This can be used in loops:: - - waiter = reusable_waiter(event_loop, 10.0) - while some_condition: - yield from waiter(some_future) - """ - if timeout is not None: - deadline = loop.time() + timeout - else: - deadline = None - - @asyncio.coroutine - def wait(future): - if deadline is not None: - new_timeout = max(deadline - loop.time(), 0) - else: - new_timeout = None - return (yield from asyncio.wait_for(future, new_timeout, loop=loop)) - - return wait - - -@contextlib.contextmanager -def translate_timeout_errors(): - try: - yield - except asyncio.TimeoutError: - raise ReqlTimeoutError() - - -# The asyncio implementation of the Cursor object: -# The `new_response` Future notifies any waiting coroutines that the can attempt -# to grab the next result. In addition, the waiting coroutine will schedule a -# timeout at the given deadline (if provided), at which point the future will be -# errored. -class AsyncioCursor(Cursor): - def __init__(self, *args, **kwargs): - Cursor.__init__(self, *args, **kwargs) - self.new_response = asyncio.Future() - - def __aiter__(self): - return self - - @asyncio.coroutine - def __anext__(self): - try: - return (yield from self._get_next(None)) - except ReqlCursorEmpty: - raise StopAsyncIteration - - @asyncio.coroutine - def close(self): - if self.error is None: - self.error = self._empty_error() - if self.conn.is_open(): - self.outstanding_requests += 1 - yield from self.conn._parent._stop(self) - - def _extend(self, res_buf): - Cursor._extend(self, res_buf) - self.new_response.set_result(True) - self.new_response = asyncio.Future() - - # Convenience function so users know when they've hit the end of the cursor - # without having to catch an exception - @asyncio.coroutine - def fetch_next(self, wait=True): - timeout = Cursor._wait_to_timeout(wait) - waiter = reusable_waiter(self.conn._io_loop, timeout) - while len(self.items) == 0 and self.error is None: - self._maybe_fetch_batch() - if self.error is not None: - raise self.error - with translate_timeout_errors(): - yield from waiter(asyncio.shield(self.new_response)) - # If there is a (non-empty) error to be received, we return True, so the - # user will receive it on the next `next` call. - return len(self.items) != 0 or not isinstance(self.error, RqlCursorEmpty) - - def _empty_error(self): - # We do not have RqlCursorEmpty inherit from StopIteration as that interferes - # with mechanisms to return from a coroutine. - return RqlCursorEmpty() - - @asyncio.coroutine - def _get_next(self, timeout): - waiter = reusable_waiter(self.conn._io_loop, timeout) - while len(self.items) == 0: - self._maybe_fetch_batch() - if self.error is not None: - raise self.error - with translate_timeout_errors(): - yield from waiter(asyncio.shield(self.new_response)) - return self.items.popleft() - - def _maybe_fetch_batch(self): - if ( - self.error is None - and len(self.items) < self.threshold - and self.outstanding_requests == 0 - ): - self.outstanding_requests += 1 - asyncio.ensure_future(self.conn._parent._continue(self)) - - -class ConnectionInstance(object): - _streamreader = None - _streamwriter = None - _reader_task = None - - def __init__(self, parent, io_loop=None): - self._parent = parent - self._closing = False - self._user_queries = {} - self._cursor_cache = {} - self._ready = asyncio.Future() - self._io_loop = io_loop - if self._io_loop is None: - self._io_loop = asyncio.get_event_loop() - - def client_port(self): - if self.is_open(): - return self._streamwriter.get_extra_info("sockname")[1] - - def client_address(self): - if self.is_open(): - return self._streamwriter.get_extra_info("sockname")[0] - - @asyncio.coroutine - def connect(self, timeout): - try: - ssl_context = None - if len(self._parent.ssl) > 0: - ssl_context = ssl.SSLContext(ssl.PROTOCOL_SSLv23) - if hasattr(ssl_context, "options"): - ssl_context.options |= getattr(ssl, "OP_NO_SSLv2", 0) - ssl_context.options |= getattr(ssl, "OP_NO_SSLv3", 0) - ssl_context.verify_mode = ssl.CERT_REQUIRED - ssl_context.check_hostname = True # redundant with match_hostname - ssl_context.load_verify_locations(self._parent.ssl["ca_certs"]) - - self._streamreader, self._streamwriter = yield from asyncio.open_connection( - self._parent.host, - self._parent.port, - loop=self._io_loop, - ssl=ssl_context, - ) - self._streamwriter.get_extra_info("socket").setsockopt( - socket.IPPROTO_TCP, socket.TCP_NODELAY, 1 - ) - self._streamwriter.get_extra_info("socket").setsockopt( - socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1 - ) - except Exception as err: - raise ReqlDriverError( - "Could not connect to %s:%s. Error: %s" - % (self._parent.host, self._parent.port, str(err)) - ) - - try: - self._parent.handshake.reset() - response = None - with translate_timeout_errors(): - while True: - request = self._parent.handshake.next_message(response) - if request is None: - break - # This may happen in the `V1_0` protocol where we send two requests as - # an optimization, then need to read each separately - if request is not "": - self._streamwriter.write(request) - - response = yield from asyncio.wait_for( - _read_until(self._streamreader, b"\0"), - timeout, - loop=self._io_loop, - ) - response = response[:-1] - except ReqlAuthError: - yield from self.close() - raise - except ReqlTimeoutError as err: - yield from self.close() - raise ReqlDriverError( - "Connection interrupted during handshake with %s:%s. Error: %s" - % (self._parent.host, self._parent.port, str(err)) - ) - except Exception as err: - yield from self.close() - raise ReqlDriverError( - "Could not connect to %s:%s. Error: %s" - % (self._parent.host, self._parent.port, str(err)) - ) - - # Start a parallel function to perform reads - # store a reference to it so it doesn't get destroyed - self._reader_task = asyncio.ensure_future(self._reader(), loop=self._io_loop) - return self._parent - - def is_open(self): - return not (self._closing or self._streamreader.at_eof()) - - @asyncio.coroutine - def close(self, noreply_wait=False, token=None, exception=None): - self._closing = True - if exception is not None: - err_message = "Connection is closed (%s)." % str(exception) - else: - err_message = "Connection is closed." - - # Cursors may remove themselves when errored, so copy a list of them - for cursor in list(self._cursor_cache.values()): - cursor._error(err_message) - - for query, future in iter(self._user_queries.values()): - if not future.done(): - future.set_exception(ReqlDriverError(err_message)) - - self._user_queries = {} - self._cursor_cache = {} - - if noreply_wait: - noreply = Query(pQuery.NOREPLY_WAIT, token, None, None) - yield from self.run_query(noreply, False) - - self._streamwriter.close() - # We must not wait for the _reader_task if we got an exception, because that - # means that we were called from it. Waiting would lead to a deadlock. - if self._reader_task and exception is None: - yield from self._reader_task - - return None - - @asyncio.coroutine - def run_query(self, query, noreply): - self._streamwriter.write(query.serialize(self._parent._get_json_encoder(query))) - if noreply: - return None - - response_future = asyncio.Future() - self._user_queries[query.token] = (query, response_future) - return (yield from response_future) - - # The _reader coroutine runs in parallel, reading responses - # off of the socket and forwarding them to the appropriate Future or Cursor. - # This is shut down as a consequence of closing the stream, or an error in the - # socket/protocol from the server. Unexpected errors in this coroutine will - # close the ConnectionInstance and be passed to any open Futures or Cursors. - @asyncio.coroutine - def _reader(self): - try: - while True: - buf = yield from self._streamreader.readexactly(12) - (token, length,) = struct.unpack(" max_wildcards: - # Issue #17980: avoid denials of service by refusing more - # than one wildcard per fragment. A survey of established - # policy among SSL implementations showed it to be a - # reasonable choice. - raise CertificateError( - "too many wildcards in certificate DNS name: " + repr(domain_name) - ) - - # speed up common case w/o wildcards - if not wildcards: - return domain_name.lower() == hostname.lower() - - # RFC 6125, section 6.4.3, subitem 1. - # The client SHOULD NOT attempt to match a presented identifier in which - # the wildcard character comprises a label other than the left-most label. - if leftmost == "*": - # When '*' is a fragment by itself, it matches a non-empty dotless - # fragment. - pats.append("[^.]+") - elif leftmost.startswith("xn--") or hostname.startswith("xn--"): - # RFC 6125, section 6.4.3, subitem 3. - # The client SHOULD NOT attempt to match a presented identifier - # where the wildcard character is embedded within an A-label or - # U-label of an internationalized domain name. - pats.append(re.escape(leftmost)) - else: - # Otherwise, '*' matches any dotless string, e.g. www* - pats.append(re.escape(leftmost).replace(r"\*", "[^.]*")) - - # add the remaining fragments, ignore any wildcards - for frag in remainder: - pats.append(re.escape(frag)) - - pat = re.compile(r"\A" + r"\.".join(pats) + r"\Z", re.IGNORECASE) - return pat.match(hostname) - - -def match_hostname(cert, hostname): - """Verify that *cert* (in decoded format as returned by - SSLSocket.getpeercert()) matches the *hostname*. RFC 2818 and RFC 6125 - rules are followed, but IP addresses are not accepted for *hostname*. - - CertificateError is raised on failure. On success, the function - returns nothing. - """ - - if not cert: - raise ValueError("empty or no certificate") - - dnsnames = [] - san = cert.get("subjectAltName", ()) - for key, value in san: - if key == "DNS": - if _dnsname_match(value, hostname): - return - dnsnames.append(value) - - if not dnsnames: - # The subject is only checked when there is no dNSName entry - # in subjectAltName - for sub in cert.get("subject", ()): - for key, value in sub: - # XXX according to RFC 2818, the most specific Common Name - # must be used. - if key == "commonName": - if _dnsname_match(value, hostname): - return - dnsnames.append(value) - - if len(dnsnames) > 1: - raise CertificateError( - "hostname %r doesn't match either of %s" - % (hostname, ", ".join(map(repr, dnsnames))) - ) - elif len(dnsnames) == 1: - raise CertificateError("hostname %r doesn't match %r" % (hostname, dnsnames[0])) - else: - raise CertificateError( - "no appropriate commonName or subjectAltName fields were found" - ) diff --git a/rethinkdb/docs.py b/rethinkdb/docs.py deleted file mode 100644 index cd39de2e..00000000 --- a/rethinkdb/docs.py +++ /dev/null @@ -1,730 +0,0 @@ -# -*- coding: utf-8 -*- -# This file was generated by _scripts/gen_python.py from the rethinkdb documentation in http://github.com/rethinkdb/docs -# hash: "186fb20ea9911710e910acfab0f4f221d59d7c04" - -import rethinkdb - -docsSource = [ - ( - rethinkdb.net.Connection.close, - b"conn.close(noreply_wait=True)\n\nClose an open connection.\n\nClosing a connection normally waits until all outstanding requests have finished and then frees any open resources associated with the connection. By passing `False` to the `noreply_wait` optional argument, the connection will be closed immediately, possibly aborting any outstanding noreply writes.\n\nA noreply query is executed by passing the `noreply` option to the [run](http://rethinkdb.com/api/python/run/) command, indicating that `run()` should not wait for the query to complete before returning. You may also explicitly wait for a noreply query to complete by using the [noreply_wait](http://rethinkdb.com/api/python/noreply_wait) command.\n\n*Example* Close an open connection, waiting for noreply writes to finish.\n\n conn.close()\n\n*Example* Close an open connection immediately.\n\n conn.close(noreply_wait=False)\n", - ), - ( - rethinkdb.connect, - b"r.connect(host=\"localhost\", port=28015, db=\"test\", auth_key=\"\", timeout=20) -> connection\nr.connect(host) -> connection\n\nCreate a new connection to the database server. The keyword arguments are:\n\n- `host`: host of the RethinkDB instance. The default value is `localhost`.\n- `port`: the driver port, by default `28015`.\n- `db`: the database used if not explicitly specified in a query, by default `test`.\n- `user`: the user account to connect as (default `admin`).\n- `password`: the password for the user account to connect as (default `''`, empty).\n- `timeout`: timeout period in seconds for the connection to be opened (default `20`).\n- `ssl`: a hash of options to support SSL connections (default `None`). Currently, there is only one option available, and if the `ssl` option is specified, this key is required:\n - `ca_certs`: a path to the SSL CA certificate.\n\nIf the connection cannot be established, a `ReqlDriverError` exception will be thrown.\n\n\n\nThe RethinkDB Python driver includes support for asynchronous connections using Tornado and Twisted. Read the asynchronous connections documentation for more information.\n\n*Example* Open a connection using the default host and port, specifying the default database.\n\n conn = r.connect(db='marvel')\n\n*Example* Open a new connection to the database.\n\n conn = r.connect(host='localhost',\n port=28015,\n db='heroes')\n\n*Example* Open a new connection to the database, specifying a user/password combination for authentication.\n\n conn = r.connect(host='localhost',\n port=28015,\n db='heroes',\n user='herofinder',\n password='metropolis')\n\n*Example* Open a new connection to the database using an SSL proxy.\n\n conn = r.connect(host='localhost',\n port=28015,\n auth_key='hunter2',\n ssl={'ca_certs': '/path/to/ca.crt'})\n\n*Example* Use a `with` statement to open a connection and pass it to a block. Using this style, the connection will be automatically closed when execution reaches the end of the block.\n\n with r.connect(db='marvel') as conn:\n r.table('superheroes').run(conn)\n", - ), - ( - rethinkdb.net.Connection.noreply_wait, - b"conn.noreply_wait()\n\n`noreply_wait` ensures that previous queries with the `noreply` flag have been processed\nby the server. Note that this guarantee only applies to queries run on the given connection.\n\n*Example* We have previously run queries with the `noreply` argument set to `True`. Now\nwait until the server has processed them.\n\n conn.noreply_wait()\n\n", - ), - ( - rethinkdb, - b"r -> r\n\nThe top-level ReQL namespace.\n\n*Example* Setup your top-level namespace.\n\n import rethinkdb as r\n\n", - ), - ( - rethinkdb.net.Connection.reconnect, - b"conn.reconnect(noreply_wait=True)\n\nClose and reopen a connection.\n\nClosing a connection normally waits until all outstanding requests have finished and then frees any open resources associated with the connection. By passing `False` to the `noreply_wait` optional argument, the connection will be closed immediately, possibly aborting any outstanding noreply writes.\n\nA noreply query is executed by passing the `noreply` option to the [run](http://rethinkdb.com/api/python/run/) command, indicating that `run()` should not wait for the query to complete before returning. You may also explicitly wait for a noreply query to complete by using the [noreply_wait](http://rethinkdb.com/api/python/noreply_wait) command.\n\n*Example* Cancel outstanding requests/queries that are no longer needed.\n\n conn.reconnect(noreply_wait=False)\n", - ), - ( - rethinkdb.net.Connection.repl, - b"conn.repl()\n\nSet the default connection to make REPL use easier. Allows calling\n`.run()` on queries without specifying a connection.\n\n__Note:__ Avoid using `repl` in application code. RethinkDB connection objects are not thread-safe, and calls to `connect` from multiple threads may change the global connection object used by `repl`. Applications should specify connections explicitly.\n\n*Example* Set the default connection for the REPL, then call\n`run()` without specifying the connection.\n\n r.connect(db='marvel').repl()\n r.table('heroes').run()\n", - ), - ( - rethinkdb.ast.RqlQuery.run, - b"query.run(conn[, options]) -> cursor\nquery.run(conn[, options]) -> object\n\nRun a query on a connection, returning either a single JSON result or\na cursor, depending on the query.\n\nThe optional arguments are:\n\n- `read_mode`: One of three possible values affecting the consistency guarantee for the query (default: `'single'`).\n - `'single'` (the default) returns values that are in memory (but not necessarily written to disk) on the primary replica.\n - `'majority'` will only return values that are safely committed on disk on a majority of replicas. This requires sending a message to every replica on each read, so it is the slowest but most consistent.\n - `'outdated'` will return values that are in memory on an arbitrarily-selected replica. This is the fastest but least consistent.\n- `time_format`: what format to return times in (default: `'native'`).\n Set this to `'raw'` if you want times returned as JSON objects for exporting.\n- `profile`: whether or not to return a profile of the query's\n execution (default: `False`).\n- `durability`: possible values are `'hard'` and `'soft'`. In soft durability mode RethinkDB\nwill acknowledge the write immediately after receiving it, but before the write has\nbeen committed to disk.\n- `group_format`: what format to return `grouped_data` and `grouped_streams` in (default: `'native'`).\n Set this to `'raw'` if you want the raw pseudotype.\n- `noreply`: set to `True` to not receive the result object or cursor and return immediately.\n- `db`: the database to run this query against as a string. The default is the database specified in the `db` parameter to [connect](http://rethinkdb.com/api/python/connect/) (which defaults to `test`). The database may also be specified with the [db](http://rethinkdb.com/api/python/db/) command.\n- `array_limit`: the maximum numbers of array elements that can be returned by a query (default: 100,000). This affects all ReQL commands that return arrays. Note that it has no effect on the size of arrays being _written_ to the database; those always have an upper limit of 100,000 elements.\n- `binary_format`: what format to return binary data in (default: `'native'`). Set this to `'raw'` if you want the raw pseudotype.\n- `min_batch_rows`: minimum number of rows to wait for before batching a result set (default: 8). This is an integer.\n- `max_batch_rows`: maximum number of rows to wait for before batching a result set (default: unlimited). This is an integer.\n- `max_batch_bytes`: maximum number of bytes to wait for before batching a result set (default: 1MB). This is an integer.\n- `max_batch_seconds`: maximum number of seconds to wait before batching a result set (default: 0.5). This is a float (not an integer) and may be specified to the microsecond.\n- `first_batch_scaledown_factor`: factor to scale the other parameters down by on the first batch (default: 4). For example, with this set to 8 and `max_batch_rows` set to 80, on the first batch `max_batch_rows` will be adjusted to 10 (80 / 8). This allows the first batch to return faster.\n\n*Example* Run a query on the connection `conn` and print out every\nrow in the result.\n\n for doc in r.table('marvel').run(conn):\n print doc\n\n*Example* If you are OK with potentially out of date data from all\nthe tables involved in this query and want potentially faster reads,\npass a flag allowing out of date data in an options object. Settings\nfor individual tables will supercede this global setting for all\ntables in the query.\n\n r.table('marvel').run(conn, read_mode='outdated')\n\n*Example* If you just want to send a write and forget about it, you\ncan set `noreply` to true in the options. In this case `run` will\nreturn immediately.\n\n r.table('marvel').run(conn, noreply=True)\n\n*Example* If you want to specify whether to wait for a write to be\nwritten to disk (overriding the table's default settings), you can set\n`durability` to `'hard'` or `'soft'` in the options.\n\n r.table('marvel')\n .insert({ 'superhero': 'Iron Man', 'superpower': 'Arc Reactor' })\n .run(conn, noreply=True, durability='soft')\n\n*Example* If you do not want a time object to be converted to a\nnative date object, you can pass a `time_format` flag to prevent it\n(valid flags are \"raw\" and \"native\"). This query returns an object\nwith two fields (`epoch_time` and `$reql_type$`) instead of a native date\nobject.\n\n r.now().run(conn, time_format=\"raw\")\n\n*Example* Specify the database to use for the query.\n\n for doc in r.table('marvel').run(conn, db='heroes'):\n print doc\n\nThis is equivalent to using the `db` command to specify the database:\n\n r.db('heroes').table('marvel').run(conn) ...\n\n*Example* Change the batching parameters for this query.\n\n r.table('marvel').run(conn, max_batch_rows=16, max_batch_bytes=2048)\n", - ), - ( - rethinkdb.net.Connection.server, - b'conn.server()\n\nReturn information about the server being used by a connection.\n\nThe `server` command returns either two or three fields:\n\n* `id`: the UUID of the server the client is connected to.\n* `proxy`: a boolean indicating whether the server is a RethinkDB proxy node.\n* `name`: the server name. If `proxy` is `True`, this field will not be returned.\n\n*Example* Return server information.\n\n > conn.server()\n \n {\n "id": "404bef53-4b2c-433f-9184-bc3f7bda4a15",\n "name": "amadeus",\n "proxy": False\n }\n', - ), - ( - rethinkdb.set_loop_type, - b'r.set_loop_type(string)\n\nSet an asynchronous event loop model. There are two supported models:\n\n* `"tornado"`: use the Tornado web framework. Under this model, the connect and run commands will return Tornado `Future` objects.\n* `"twisted"`: use the Twisted networking engine. Under this model, the connect and run commands will return Twisted `Deferred` objects.\n\n*Example* Read a table\'s data using Tornado.\n\n r.set_loop_type("tornado")\n conn = r.connect(host=\'localhost\', port=28015)\n \n @gen.coroutine\n def use_cursor(conn):\n # Print every row in the table.\n cursor = yield r.table(\'test\').order_by(index="id").run(yield conn)\n while (yield cursor.fetch_next()):\n item = yield cursor.next()\n print(item)\n\nFor a longer discussion with both Tornado and Twisted examples, see the documentation article on Asynchronous connections.\n\n', - ), - ( - rethinkdb.net.Connection.use, - b"conn.use(db_name)\n\nChange the default database on this connection.\n\n*Example* Change the default database so that we don't need to\nspecify the database when referencing a table.\n\n conn.use('marvel')\n r.table('heroes').run(conn) # refers to r.db('marvel').table('heroes')\n", - ), - ( - rethinkdb.ast.Table.config, - b'table.config() -> selection<object>\ndatabase.config() -> selection<object>\n\nQuery (read and/or update) the configurations for individual tables or databases.\n\nThe `config` command is a shorthand way to access the `table_config` or `db_config` [System tables](http://rethinkdb.com/docs/system-tables/#configuration-tables). It will return the single row from the system that corresponds to the database or table configuration, as if [get](http://rethinkdb.com/api/python/get) had been called on the system table with the UUID of the database or table in question.\n\n*Example* Get the configuration for the `users` table.\n\n r.table(\'users\').config().run(conn)\n\n\n\nExample return:\n\n \n {\n "id": "31c92680-f70c-4a4b-a49e-b238eb12c023",\n "name": "users",\n "db": "superstuff",\n "primary_key": "id",\n "shards": [\n {\n "primary_replica": "a",\n "replicas": ["a", "b"],\n "nonvoting_replicas": []\n },\n {\n "primary_replica": "d",\n "replicas": ["c", "d"],\n "nonvoting_replicas": []\n }\n ],\n "indexes": [],\n "write_acks": "majority",\n "durability": "hard"\n }\n\n*Example* Change the write acknowledgement requirement of the `users` table.\n\n r.table(\'users\').config().update({\'write_acks\': \'single\'}).run(conn)\n', - ), - ( - rethinkdb.grant, - b"r.grant(\"username\", {\"permission\": bool[, ...]}) -> object\ndb.grant(\"username\", {\"permission\": bool[, ...]}) -> object\ntable.grant(\"username\", {\"permission\": bool[, ...]}) -> object\n\nGrant or deny access permissions for a user account, globally or on a per-database or per-table basis.\n\nThere are four different permissions that can be granted to an account:\n\n* `read` allows reading the data in tables.\n* `write` allows modifying data, including inserting, replacing/updating, and deleting.\n* `connect` allows a user to open HTTP connections via the http command. This permission can only be granted in global scope.\n* `config` allows users to create/drop secondary indexes on a table and changing the cluster configuration; to create and drop tables, if granted on a database; and to create and drop databases, if granted globally.\n\nPermissions may be granted on a global scope, or granted for a specific table or database. The scope is defined by calling `grant` on its own (e.g., `r.grant()`, on a table (`r.table().grant()`), or on a database (`r.db().grant()`).\n\nThe `grant` command returns an object of the following form:\n\n {\n \"granted\": 1,\n \"permissions_changes\": [\n {\n \"new_val\": { new permissions },\n \"old_val\": { original permissions }\n }\n ]\n\nThe `granted` field will always be `1`, and the `permissions_changes` list will have one object, describing the new permissions values and the old values they were changed from (which may be `None`).\n\nPermissions that are not defined on a local scope will be inherited from the next largest scope. For example, a write operation on a table will first check if `write` permissions are explicitly set to `True` or `False` for that table and account combination; if they are not, the `write` permissions for the database will be used if those are explicitly set; and if neither table nor database permissions are set for that account, the global `write` permissions for that account will be used.\n\n__Note:__ For all accounts other than the special, system-defined `admin` account, permissions that are not explicitly set in any scope will effectively be `False`. When you create a new user account by inserting a record into the system table, that account will have _no_ permissions until they are explicitly granted.\n\nFor a full description of permissions, read Permissions and user accounts.\n\n*Example* Grant the `chatapp` user account read and write permissions on the `users` database.\n\n > r.db('users').grant('chatapp', {'read': True, 'write': True}).run(conn)\n \n {\n \"granted\": 1,\n \"permissions_changes\": [\n {\n \"new_val\": { \"read\": true, \"write\": true },\n \"old_val\": { null }\n }\n ]\n\n*Example* Deny write permissions from the `chatapp` account for the `admin` table.\n\n r.db('users').table('admin').grant('chatapp', {'write': False}).run(conn)\n\nThis will override the `write: true` permissions granted in the first example, but for this table only. Other tables in the `users` database will inherit from the database permissions.\n\n*Example* Delete a table-level permission for the `chatapp` account.\n\n r.db('users').table('admin').grant('chatapp', {'write': None}).run(conn)\n\nBy specifying `None`, the table scope `write` permission is removed, and will again inherit from the next highest scope (database or global).\n\n*Example* Grant `chatapp` the ability to use HTTP connections.\n\n r.grant('chatapp', {'connect': True}).run(conn)\n\nThis grant can only be given on a global level.\n\n*Example* Grant a `monitor` account read-only access to all databases.\n\n r.grant('monitor', {'read': True}).run(conn)\n", - ), - ( - rethinkdb.ast.Table.rebalance, - b'table.rebalance() -> object\ndatabase.rebalance() -> object\n\nRebalances the shards of a table. When called on a database, all the tables in that database will be rebalanced.\n\nThe `rebalance` command operates by measuring the distribution of primary keys within a table and picking split points that will give each shard approximately the same number of documents. It won\'t change the number of shards within a table, or change any other configuration aspect for the table or the database.\n\nA table will lose availability temporarily after `rebalance` is called; use the [wait](http://rethinkdb.com/api/python/wait) command to wait for the table to become available again, or [status](http://rethinkdb.com/api/python/status) to check if the table is available for writing.\n\nRethinkDB automatically rebalances tables when the number of shards are increased, and as long as your documents have evenly distributed primary keys—such as the default UUIDs—it is rarely necessary to call `rebalance` manually. Cases where `rebalance` may need to be called include:\n\n* Tables with unevenly distributed primary keys, such as incrementing integers\n* Changing a table\'s primary key type\n* Increasing the number of shards on an empty table, then using non-UUID primary keys in that table\n\nThe [web UI](http://rethinkdb.com/docs/administration-tools/) (and the [info](http://rethinkdb.com/api/python/info) command) can be used to tell you when a table\'s shards need to be rebalanced.\n\nThe return value of `rebalance` is an object with two fields:\n\n* `rebalanced`: the number of tables rebalanced.\n* `status_changes`: a list of new and old table status values. Each element of the list will be an object with two fields:\n * `old_val`: The table\'s [status](http://rethinkdb.com/api/python/status) value before `rebalance` was executed. \n * `new_val`: The table\'s `status` value after `rebalance` was executed. (This value will almost always indicate the table is unavailable.)\n\nSee the [status](http://rethinkdb.com/api/python/status) command for an explanation of the objects returned in the `old_val` and `new_val` fields.\n\n*Example* Rebalance a table.\n\n r.table(\'superheroes\').rebalance().run(conn)\n\n\n\nExample return:\n\n {\n "rebalanced": 1,\n "status_changes": [\n {\n "old_val": {\n "db": "database",\n "id": "5cb35225-81b2-4cec-9eef-bfad15481265",\n "name": "superheroes",\n "shards": [\n {\n "primary_replica": "jeeves",\n "replicas": [\n {\n "server": "jeeves",\n "state": "ready"\n }\n ]\n },\n {\n "primary_replica": "jeeves",\n "replicas": [\n {\n "server": "jeeves",\n "state": "ready"\n }\n ]\n }\n ],\n "status": {\n "all_replicas_ready": True,\n "ready_for_outdated_reads": True,\n "ready_for_reads": True,\n "ready_for_writes": True\n }\n },\n "new_val": {\n "db": "database",\n "id": "5cb35225-81b2-4cec-9eef-bfad15481265",\n "name": "superheroes",\n "shards": [\n {\n "primary_replica": "jeeves",\n "replicas": [\n {\n "server": "jeeves",\n "state": "transitioning"\n }\n ]\n },\n {\n "primary_replica": "jeeves",\n "replicas": [\n {\n "server": "jeeves",\n "state": "transitioning"\n }\n ]\n }\n ],\n "status": {\n "all_replicas_ready": False,\n "ready_for_outdated_reads": False,\n "ready_for_reads": False,\n "ready_for_writes": False\n }\n }\n \n }\n ]\n }\n', - ), - ( - rethinkdb.ast.Table.reconfigure, - b'table.reconfigure(shards=, replicas=[, primary_replica_tag=, dry_run=False, nonvoting_replica_tags=None]) -> object\ndatabase.reconfigure(shards=, replicas=[, primary_replica_tag=, dry_run=False, nonvoting_replica_tags=None]) -> object\ntable.reconfigure(emergency_repair=